1 // Generated by the protocol buffer compiler. DO NOT EDIT! 2 // source: Admin.proto 3 4 package org.apache.hadoop.hbase.protobuf.generated; 5 6 public final class AdminProtos { AdminProtos()7 private AdminProtos() {} registerAllExtensions( com.google.protobuf.ExtensionRegistry registry)8 public static void registerAllExtensions( 9 com.google.protobuf.ExtensionRegistry registry) { 10 } 11 public interface GetRegionInfoRequestOrBuilder 12 extends com.google.protobuf.MessageOrBuilder { 13 14 // required .RegionSpecifier region = 1; 15 /** 16 * <code>required .RegionSpecifier region = 1;</code> 17 */ hasRegion()18 boolean hasRegion(); 19 /** 20 * <code>required .RegionSpecifier region = 1;</code> 21 */ getRegion()22 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); 23 /** 24 * <code>required .RegionSpecifier region = 1;</code> 25 */ getRegionOrBuilder()26 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); 27 28 // optional bool compaction_state = 2; 29 /** 30 * <code>optional bool compaction_state = 2;</code> 31 */ hasCompactionState()32 boolean hasCompactionState(); 33 /** 34 * <code>optional bool compaction_state = 2;</code> 35 */ getCompactionState()36 boolean getCompactionState(); 37 } 38 /** 39 * Protobuf type {@code GetRegionInfoRequest} 40 */ 41 public static final class GetRegionInfoRequest extends 42 com.google.protobuf.GeneratedMessage 43 implements GetRegionInfoRequestOrBuilder { 44 // Use GetRegionInfoRequest.newBuilder() to construct. GetRegionInfoRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)45 private GetRegionInfoRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 46 super(builder); 47 this.unknownFields = builder.getUnknownFields(); 48 } GetRegionInfoRequest(boolean noInit)49 private GetRegionInfoRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 50 51 private static final GetRegionInfoRequest defaultInstance; getDefaultInstance()52 public static GetRegionInfoRequest getDefaultInstance() { 53 return defaultInstance; 54 } 55 getDefaultInstanceForType()56 public GetRegionInfoRequest getDefaultInstanceForType() { 57 return defaultInstance; 58 } 59 60 private final com.google.protobuf.UnknownFieldSet unknownFields; 61 @java.lang.Override 62 public final com.google.protobuf.UnknownFieldSet getUnknownFields()63 getUnknownFields() { 64 return this.unknownFields; 65 } GetRegionInfoRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)66 private GetRegionInfoRequest( 67 com.google.protobuf.CodedInputStream input, 68 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 69 throws com.google.protobuf.InvalidProtocolBufferException { 70 initFields(); 71 int mutable_bitField0_ = 0; 72 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 73 com.google.protobuf.UnknownFieldSet.newBuilder(); 74 try { 75 boolean done = false; 76 while (!done) { 77 int tag = input.readTag(); 78 switch (tag) { 79 case 0: 80 done = true; 81 break; 82 default: { 83 if (!parseUnknownField(input, unknownFields, 84 extensionRegistry, tag)) { 85 done = true; 86 } 87 break; 88 } 89 case 10: { 90 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; 91 if (((bitField0_ & 0x00000001) == 0x00000001)) { 92 subBuilder = region_.toBuilder(); 93 } 94 region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); 95 if (subBuilder != null) { 96 subBuilder.mergeFrom(region_); 97 region_ = subBuilder.buildPartial(); 98 } 99 bitField0_ |= 0x00000001; 100 break; 101 } 102 case 16: { 103 bitField0_ |= 0x00000002; 104 compactionState_ = input.readBool(); 105 break; 106 } 107 } 108 } 109 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 110 throw e.setUnfinishedMessage(this); 111 } catch (java.io.IOException e) { 112 throw new com.google.protobuf.InvalidProtocolBufferException( 113 e.getMessage()).setUnfinishedMessage(this); 114 } finally { 115 this.unknownFields = unknownFields.build(); 116 makeExtensionsImmutable(); 117 } 118 } 119 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()120 getDescriptor() { 121 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_descriptor; 122 } 123 124 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()125 internalGetFieldAccessorTable() { 126 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_fieldAccessorTable 127 .ensureFieldAccessorsInitialized( 128 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.Builder.class); 129 } 130 131 public static com.google.protobuf.Parser<GetRegionInfoRequest> PARSER = 132 new com.google.protobuf.AbstractParser<GetRegionInfoRequest>() { 133 public GetRegionInfoRequest parsePartialFrom( 134 com.google.protobuf.CodedInputStream input, 135 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 136 throws com.google.protobuf.InvalidProtocolBufferException { 137 return new GetRegionInfoRequest(input, extensionRegistry); 138 } 139 }; 140 141 @java.lang.Override getParserForType()142 public com.google.protobuf.Parser<GetRegionInfoRequest> getParserForType() { 143 return PARSER; 144 } 145 146 private int bitField0_; 147 // required .RegionSpecifier region = 1; 148 public static final int REGION_FIELD_NUMBER = 1; 149 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; 150 /** 151 * <code>required .RegionSpecifier region = 1;</code> 152 */ hasRegion()153 public boolean hasRegion() { 154 return ((bitField0_ & 0x00000001) == 0x00000001); 155 } 156 /** 157 * <code>required .RegionSpecifier region = 1;</code> 158 */ getRegion()159 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { 160 return region_; 161 } 162 /** 163 * <code>required .RegionSpecifier region = 1;</code> 164 */ getRegionOrBuilder()165 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { 166 return region_; 167 } 168 169 // optional bool compaction_state = 2; 170 public static final int COMPACTION_STATE_FIELD_NUMBER = 2; 171 private boolean compactionState_; 172 /** 173 * <code>optional bool compaction_state = 2;</code> 174 */ hasCompactionState()175 public boolean hasCompactionState() { 176 return ((bitField0_ & 0x00000002) == 0x00000002); 177 } 178 /** 179 * <code>optional bool compaction_state = 2;</code> 180 */ getCompactionState()181 public boolean getCompactionState() { 182 return compactionState_; 183 } 184 initFields()185 private void initFields() { 186 region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); 187 compactionState_ = false; 188 } 189 private byte memoizedIsInitialized = -1; isInitialized()190 public final boolean isInitialized() { 191 byte isInitialized = memoizedIsInitialized; 192 if (isInitialized != -1) return isInitialized == 1; 193 194 if (!hasRegion()) { 195 memoizedIsInitialized = 0; 196 return false; 197 } 198 if (!getRegion().isInitialized()) { 199 memoizedIsInitialized = 0; 200 return false; 201 } 202 memoizedIsInitialized = 1; 203 return true; 204 } 205 writeTo(com.google.protobuf.CodedOutputStream output)206 public void writeTo(com.google.protobuf.CodedOutputStream output) 207 throws java.io.IOException { 208 getSerializedSize(); 209 if (((bitField0_ & 0x00000001) == 0x00000001)) { 210 output.writeMessage(1, region_); 211 } 212 if (((bitField0_ & 0x00000002) == 0x00000002)) { 213 output.writeBool(2, compactionState_); 214 } 215 getUnknownFields().writeTo(output); 216 } 217 218 private int memoizedSerializedSize = -1; getSerializedSize()219 public int getSerializedSize() { 220 int size = memoizedSerializedSize; 221 if (size != -1) return size; 222 223 size = 0; 224 if (((bitField0_ & 0x00000001) == 0x00000001)) { 225 size += com.google.protobuf.CodedOutputStream 226 .computeMessageSize(1, region_); 227 } 228 if (((bitField0_ & 0x00000002) == 0x00000002)) { 229 size += com.google.protobuf.CodedOutputStream 230 .computeBoolSize(2, compactionState_); 231 } 232 size += getUnknownFields().getSerializedSize(); 233 memoizedSerializedSize = size; 234 return size; 235 } 236 237 private static final long serialVersionUID = 0L; 238 @java.lang.Override writeReplace()239 protected java.lang.Object writeReplace() 240 throws java.io.ObjectStreamException { 241 return super.writeReplace(); 242 } 243 244 @java.lang.Override equals(final java.lang.Object obj)245 public boolean equals(final java.lang.Object obj) { 246 if (obj == this) { 247 return true; 248 } 249 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest)) { 250 return super.equals(obj); 251 } 252 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest) obj; 253 254 boolean result = true; 255 result = result && (hasRegion() == other.hasRegion()); 256 if (hasRegion()) { 257 result = result && getRegion() 258 .equals(other.getRegion()); 259 } 260 result = result && (hasCompactionState() == other.hasCompactionState()); 261 if (hasCompactionState()) { 262 result = result && (getCompactionState() 263 == other.getCompactionState()); 264 } 265 result = result && 266 getUnknownFields().equals(other.getUnknownFields()); 267 return result; 268 } 269 270 private int memoizedHashCode = 0; 271 @java.lang.Override hashCode()272 public int hashCode() { 273 if (memoizedHashCode != 0) { 274 return memoizedHashCode; 275 } 276 int hash = 41; 277 hash = (19 * hash) + getDescriptorForType().hashCode(); 278 if (hasRegion()) { 279 hash = (37 * hash) + REGION_FIELD_NUMBER; 280 hash = (53 * hash) + getRegion().hashCode(); 281 } 282 if (hasCompactionState()) { 283 hash = (37 * hash) + COMPACTION_STATE_FIELD_NUMBER; 284 hash = (53 * hash) + hashBoolean(getCompactionState()); 285 } 286 hash = (29 * hash) + getUnknownFields().hashCode(); 287 memoizedHashCode = hash; 288 return hash; 289 } 290 parseFrom( com.google.protobuf.ByteString data)291 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( 292 com.google.protobuf.ByteString data) 293 throws com.google.protobuf.InvalidProtocolBufferException { 294 return PARSER.parseFrom(data); 295 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)296 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( 297 com.google.protobuf.ByteString data, 298 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 299 throws com.google.protobuf.InvalidProtocolBufferException { 300 return PARSER.parseFrom(data, extensionRegistry); 301 } parseFrom(byte[] data)302 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(byte[] data) 303 throws com.google.protobuf.InvalidProtocolBufferException { 304 return PARSER.parseFrom(data); 305 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)306 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( 307 byte[] data, 308 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 309 throws com.google.protobuf.InvalidProtocolBufferException { 310 return PARSER.parseFrom(data, extensionRegistry); 311 } parseFrom(java.io.InputStream input)312 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom(java.io.InputStream input) 313 throws java.io.IOException { 314 return PARSER.parseFrom(input); 315 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)316 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( 317 java.io.InputStream input, 318 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 319 throws java.io.IOException { 320 return PARSER.parseFrom(input, extensionRegistry); 321 } parseDelimitedFrom(java.io.InputStream input)322 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseDelimitedFrom(java.io.InputStream input) 323 throws java.io.IOException { 324 return PARSER.parseDelimitedFrom(input); 325 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)326 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseDelimitedFrom( 327 java.io.InputStream input, 328 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 329 throws java.io.IOException { 330 return PARSER.parseDelimitedFrom(input, extensionRegistry); 331 } parseFrom( com.google.protobuf.CodedInputStream input)332 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( 333 com.google.protobuf.CodedInputStream input) 334 throws java.io.IOException { 335 return PARSER.parseFrom(input); 336 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)337 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parseFrom( 338 com.google.protobuf.CodedInputStream input, 339 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 340 throws java.io.IOException { 341 return PARSER.parseFrom(input, extensionRegistry); 342 } 343 newBuilder()344 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()345 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest prototype)346 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest prototype) { 347 return newBuilder().mergeFrom(prototype); 348 } toBuilder()349 public Builder toBuilder() { return newBuilder(this); } 350 351 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)352 protected Builder newBuilderForType( 353 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 354 Builder builder = new Builder(parent); 355 return builder; 356 } 357 /** 358 * Protobuf type {@code GetRegionInfoRequest} 359 */ 360 public static final class Builder extends 361 com.google.protobuf.GeneratedMessage.Builder<Builder> 362 implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequestOrBuilder { 363 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()364 getDescriptor() { 365 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_descriptor; 366 } 367 368 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()369 internalGetFieldAccessorTable() { 370 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_fieldAccessorTable 371 .ensureFieldAccessorsInitialized( 372 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.Builder.class); 373 } 374 375 // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.newBuilder() Builder()376 private Builder() { 377 maybeForceBuilderInitialization(); 378 } 379 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)380 private Builder( 381 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 382 super(parent); 383 maybeForceBuilderInitialization(); 384 } maybeForceBuilderInitialization()385 private void maybeForceBuilderInitialization() { 386 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 387 getRegionFieldBuilder(); 388 } 389 } create()390 private static Builder create() { 391 return new Builder(); 392 } 393 clear()394 public Builder clear() { 395 super.clear(); 396 if (regionBuilder_ == null) { 397 region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); 398 } else { 399 regionBuilder_.clear(); 400 } 401 bitField0_ = (bitField0_ & ~0x00000001); 402 compactionState_ = false; 403 bitField0_ = (bitField0_ & ~0x00000002); 404 return this; 405 } 406 clone()407 public Builder clone() { 408 return create().mergeFrom(buildPartial()); 409 } 410 411 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()412 getDescriptorForType() { 413 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoRequest_descriptor; 414 } 415 getDefaultInstanceForType()416 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest getDefaultInstanceForType() { 417 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDefaultInstance(); 418 } 419 build()420 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest build() { 421 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest result = buildPartial(); 422 if (!result.isInitialized()) { 423 throw newUninitializedMessageException(result); 424 } 425 return result; 426 } 427 buildPartial()428 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest buildPartial() { 429 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest(this); 430 int from_bitField0_ = bitField0_; 431 int to_bitField0_ = 0; 432 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 433 to_bitField0_ |= 0x00000001; 434 } 435 if (regionBuilder_ == null) { 436 result.region_ = region_; 437 } else { 438 result.region_ = regionBuilder_.build(); 439 } 440 if (((from_bitField0_ & 0x00000002) == 0x00000002)) { 441 to_bitField0_ |= 0x00000002; 442 } 443 result.compactionState_ = compactionState_; 444 result.bitField0_ = to_bitField0_; 445 onBuilt(); 446 return result; 447 } 448 mergeFrom(com.google.protobuf.Message other)449 public Builder mergeFrom(com.google.protobuf.Message other) { 450 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest) { 451 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest)other); 452 } else { 453 super.mergeFrom(other); 454 return this; 455 } 456 } 457 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest other)458 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest other) { 459 if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest.getDefaultInstance()) return this; 460 if (other.hasRegion()) { 461 mergeRegion(other.getRegion()); 462 } 463 if (other.hasCompactionState()) { 464 setCompactionState(other.getCompactionState()); 465 } 466 this.mergeUnknownFields(other.getUnknownFields()); 467 return this; 468 } 469 isInitialized()470 public final boolean isInitialized() { 471 if (!hasRegion()) { 472 473 return false; 474 } 475 if (!getRegion().isInitialized()) { 476 477 return false; 478 } 479 return true; 480 } 481 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)482 public Builder mergeFrom( 483 com.google.protobuf.CodedInputStream input, 484 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 485 throws java.io.IOException { 486 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest parsedMessage = null; 487 try { 488 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 489 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 490 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoRequest) e.getUnfinishedMessage(); 491 throw e; 492 } finally { 493 if (parsedMessage != null) { 494 mergeFrom(parsedMessage); 495 } 496 } 497 return this; 498 } 499 private int bitField0_; 500 501 // required .RegionSpecifier region = 1; 502 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); 503 private com.google.protobuf.SingleFieldBuilder< 504 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; 505 /** 506 * <code>required .RegionSpecifier region = 1;</code> 507 */ hasRegion()508 public boolean hasRegion() { 509 return ((bitField0_ & 0x00000001) == 0x00000001); 510 } 511 /** 512 * <code>required .RegionSpecifier region = 1;</code> 513 */ getRegion()514 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { 515 if (regionBuilder_ == null) { 516 return region_; 517 } else { 518 return regionBuilder_.getMessage(); 519 } 520 } 521 /** 522 * <code>required .RegionSpecifier region = 1;</code> 523 */ setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)524 public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { 525 if (regionBuilder_ == null) { 526 if (value == null) { 527 throw new NullPointerException(); 528 } 529 region_ = value; 530 onChanged(); 531 } else { 532 regionBuilder_.setMessage(value); 533 } 534 bitField0_ |= 0x00000001; 535 return this; 536 } 537 /** 538 * <code>required .RegionSpecifier region = 1;</code> 539 */ setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue)540 public Builder setRegion( 541 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { 542 if (regionBuilder_ == null) { 543 region_ = builderForValue.build(); 544 onChanged(); 545 } else { 546 regionBuilder_.setMessage(builderForValue.build()); 547 } 548 bitField0_ |= 0x00000001; 549 return this; 550 } 551 /** 552 * <code>required .RegionSpecifier region = 1;</code> 553 */ mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)554 public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { 555 if (regionBuilder_ == null) { 556 if (((bitField0_ & 0x00000001) == 0x00000001) && 557 region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { 558 region_ = 559 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); 560 } else { 561 region_ = value; 562 } 563 onChanged(); 564 } else { 565 regionBuilder_.mergeFrom(value); 566 } 567 bitField0_ |= 0x00000001; 568 return this; 569 } 570 /** 571 * <code>required .RegionSpecifier region = 1;</code> 572 */ clearRegion()573 public Builder clearRegion() { 574 if (regionBuilder_ == null) { 575 region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); 576 onChanged(); 577 } else { 578 regionBuilder_.clear(); 579 } 580 bitField0_ = (bitField0_ & ~0x00000001); 581 return this; 582 } 583 /** 584 * <code>required .RegionSpecifier region = 1;</code> 585 */ getRegionBuilder()586 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { 587 bitField0_ |= 0x00000001; 588 onChanged(); 589 return getRegionFieldBuilder().getBuilder(); 590 } 591 /** 592 * <code>required .RegionSpecifier region = 1;</code> 593 */ getRegionOrBuilder()594 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { 595 if (regionBuilder_ != null) { 596 return regionBuilder_.getMessageOrBuilder(); 597 } else { 598 return region_; 599 } 600 } 601 /** 602 * <code>required .RegionSpecifier region = 1;</code> 603 */ 604 private com.google.protobuf.SingleFieldBuilder< 605 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder()606 getRegionFieldBuilder() { 607 if (regionBuilder_ == null) { 608 regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< 609 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( 610 region_, 611 getParentForChildren(), 612 isClean()); 613 region_ = null; 614 } 615 return regionBuilder_; 616 } 617 618 // optional bool compaction_state = 2; 619 private boolean compactionState_ ; 620 /** 621 * <code>optional bool compaction_state = 2;</code> 622 */ hasCompactionState()623 public boolean hasCompactionState() { 624 return ((bitField0_ & 0x00000002) == 0x00000002); 625 } 626 /** 627 * <code>optional bool compaction_state = 2;</code> 628 */ getCompactionState()629 public boolean getCompactionState() { 630 return compactionState_; 631 } 632 /** 633 * <code>optional bool compaction_state = 2;</code> 634 */ setCompactionState(boolean value)635 public Builder setCompactionState(boolean value) { 636 bitField0_ |= 0x00000002; 637 compactionState_ = value; 638 onChanged(); 639 return this; 640 } 641 /** 642 * <code>optional bool compaction_state = 2;</code> 643 */ clearCompactionState()644 public Builder clearCompactionState() { 645 bitField0_ = (bitField0_ & ~0x00000002); 646 compactionState_ = false; 647 onChanged(); 648 return this; 649 } 650 651 // @@protoc_insertion_point(builder_scope:GetRegionInfoRequest) 652 } 653 654 static { 655 defaultInstance = new GetRegionInfoRequest(true); defaultInstance.initFields()656 defaultInstance.initFields(); 657 } 658 659 // @@protoc_insertion_point(class_scope:GetRegionInfoRequest) 660 } 661 662 public interface GetRegionInfoResponseOrBuilder 663 extends com.google.protobuf.MessageOrBuilder { 664 665 // required .RegionInfo region_info = 1; 666 /** 667 * <code>required .RegionInfo region_info = 1;</code> 668 */ hasRegionInfo()669 boolean hasRegionInfo(); 670 /** 671 * <code>required .RegionInfo region_info = 1;</code> 672 */ getRegionInfo()673 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(); 674 /** 675 * <code>required .RegionInfo region_info = 1;</code> 676 */ getRegionInfoOrBuilder()677 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(); 678 679 // optional .GetRegionInfoResponse.CompactionState compaction_state = 2; 680 /** 681 * <code>optional .GetRegionInfoResponse.CompactionState compaction_state = 2;</code> 682 */ hasCompactionState()683 boolean hasCompactionState(); 684 /** 685 * <code>optional .GetRegionInfoResponse.CompactionState compaction_state = 2;</code> 686 */ getCompactionState()687 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState getCompactionState(); 688 689 // optional bool isRecovering = 3; 690 /** 691 * <code>optional bool isRecovering = 3;</code> 692 */ hasIsRecovering()693 boolean hasIsRecovering(); 694 /** 695 * <code>optional bool isRecovering = 3;</code> 696 */ getIsRecovering()697 boolean getIsRecovering(); 698 } 699 /** 700 * Protobuf type {@code GetRegionInfoResponse} 701 */ 702 public static final class GetRegionInfoResponse extends 703 com.google.protobuf.GeneratedMessage 704 implements GetRegionInfoResponseOrBuilder { 705 // Use GetRegionInfoResponse.newBuilder() to construct. GetRegionInfoResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)706 private GetRegionInfoResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 707 super(builder); 708 this.unknownFields = builder.getUnknownFields(); 709 } GetRegionInfoResponse(boolean noInit)710 private GetRegionInfoResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 711 712 private static final GetRegionInfoResponse defaultInstance; getDefaultInstance()713 public static GetRegionInfoResponse getDefaultInstance() { 714 return defaultInstance; 715 } 716 getDefaultInstanceForType()717 public GetRegionInfoResponse getDefaultInstanceForType() { 718 return defaultInstance; 719 } 720 721 private final com.google.protobuf.UnknownFieldSet unknownFields; 722 @java.lang.Override 723 public final com.google.protobuf.UnknownFieldSet getUnknownFields()724 getUnknownFields() { 725 return this.unknownFields; 726 } GetRegionInfoResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)727 private GetRegionInfoResponse( 728 com.google.protobuf.CodedInputStream input, 729 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 730 throws com.google.protobuf.InvalidProtocolBufferException { 731 initFields(); 732 int mutable_bitField0_ = 0; 733 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 734 com.google.protobuf.UnknownFieldSet.newBuilder(); 735 try { 736 boolean done = false; 737 while (!done) { 738 int tag = input.readTag(); 739 switch (tag) { 740 case 0: 741 done = true; 742 break; 743 default: { 744 if (!parseUnknownField(input, unknownFields, 745 extensionRegistry, tag)) { 746 done = true; 747 } 748 break; 749 } 750 case 10: { 751 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = null; 752 if (((bitField0_ & 0x00000001) == 0x00000001)) { 753 subBuilder = regionInfo_.toBuilder(); 754 } 755 regionInfo_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry); 756 if (subBuilder != null) { 757 subBuilder.mergeFrom(regionInfo_); 758 regionInfo_ = subBuilder.buildPartial(); 759 } 760 bitField0_ |= 0x00000001; 761 break; 762 } 763 case 16: { 764 int rawValue = input.readEnum(); 765 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.valueOf(rawValue); 766 if (value == null) { 767 unknownFields.mergeVarintField(2, rawValue); 768 } else { 769 bitField0_ |= 0x00000002; 770 compactionState_ = value; 771 } 772 break; 773 } 774 case 24: { 775 bitField0_ |= 0x00000004; 776 isRecovering_ = input.readBool(); 777 break; 778 } 779 } 780 } 781 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 782 throw e.setUnfinishedMessage(this); 783 } catch (java.io.IOException e) { 784 throw new com.google.protobuf.InvalidProtocolBufferException( 785 e.getMessage()).setUnfinishedMessage(this); 786 } finally { 787 this.unknownFields = unknownFields.build(); 788 makeExtensionsImmutable(); 789 } 790 } 791 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()792 getDescriptor() { 793 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_descriptor; 794 } 795 796 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()797 internalGetFieldAccessorTable() { 798 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_fieldAccessorTable 799 .ensureFieldAccessorsInitialized( 800 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.Builder.class); 801 } 802 803 public static com.google.protobuf.Parser<GetRegionInfoResponse> PARSER = 804 new com.google.protobuf.AbstractParser<GetRegionInfoResponse>() { 805 public GetRegionInfoResponse parsePartialFrom( 806 com.google.protobuf.CodedInputStream input, 807 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 808 throws com.google.protobuf.InvalidProtocolBufferException { 809 return new GetRegionInfoResponse(input, extensionRegistry); 810 } 811 }; 812 813 @java.lang.Override getParserForType()814 public com.google.protobuf.Parser<GetRegionInfoResponse> getParserForType() { 815 return PARSER; 816 } 817 818 /** 819 * Protobuf enum {@code GetRegionInfoResponse.CompactionState} 820 */ 821 public enum CompactionState 822 implements com.google.protobuf.ProtocolMessageEnum { 823 /** 824 * <code>NONE = 0;</code> 825 */ 826 NONE(0, 0), 827 /** 828 * <code>MINOR = 1;</code> 829 */ 830 MINOR(1, 1), 831 /** 832 * <code>MAJOR = 2;</code> 833 */ 834 MAJOR(2, 2), 835 /** 836 * <code>MAJOR_AND_MINOR = 3;</code> 837 */ 838 MAJOR_AND_MINOR(3, 3), 839 ; 840 841 /** 842 * <code>NONE = 0;</code> 843 */ 844 public static final int NONE_VALUE = 0; 845 /** 846 * <code>MINOR = 1;</code> 847 */ 848 public static final int MINOR_VALUE = 1; 849 /** 850 * <code>MAJOR = 2;</code> 851 */ 852 public static final int MAJOR_VALUE = 2; 853 /** 854 * <code>MAJOR_AND_MINOR = 3;</code> 855 */ 856 public static final int MAJOR_AND_MINOR_VALUE = 3; 857 858 getNumber()859 public final int getNumber() { return value; } 860 valueOf(int value)861 public static CompactionState valueOf(int value) { 862 switch (value) { 863 case 0: return NONE; 864 case 1: return MINOR; 865 case 2: return MAJOR; 866 case 3: return MAJOR_AND_MINOR; 867 default: return null; 868 } 869 } 870 871 public static com.google.protobuf.Internal.EnumLiteMap<CompactionState> internalGetValueMap()872 internalGetValueMap() { 873 return internalValueMap; 874 } 875 private static com.google.protobuf.Internal.EnumLiteMap<CompactionState> 876 internalValueMap = 877 new com.google.protobuf.Internal.EnumLiteMap<CompactionState>() { 878 public CompactionState findValueByNumber(int number) { 879 return CompactionState.valueOf(number); 880 } 881 }; 882 883 public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor()884 getValueDescriptor() { 885 return getDescriptor().getValues().get(index); 886 } 887 public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType()888 getDescriptorForType() { 889 return getDescriptor(); 890 } 891 public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor()892 getDescriptor() { 893 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDescriptor().getEnumTypes().get(0); 894 } 895 896 private static final CompactionState[] VALUES = values(); 897 valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc)898 public static CompactionState valueOf( 899 com.google.protobuf.Descriptors.EnumValueDescriptor desc) { 900 if (desc.getType() != getDescriptor()) { 901 throw new java.lang.IllegalArgumentException( 902 "EnumValueDescriptor is not for this type."); 903 } 904 return VALUES[desc.getIndex()]; 905 } 906 907 private final int index; 908 private final int value; 909 CompactionState(int index, int value)910 private CompactionState(int index, int value) { 911 this.index = index; 912 this.value = value; 913 } 914 915 // @@protoc_insertion_point(enum_scope:GetRegionInfoResponse.CompactionState) 916 } 917 918 private int bitField0_; 919 // required .RegionInfo region_info = 1; 920 public static final int REGION_INFO_FIELD_NUMBER = 1; 921 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_; 922 /** 923 * <code>required .RegionInfo region_info = 1;</code> 924 */ hasRegionInfo()925 public boolean hasRegionInfo() { 926 return ((bitField0_ & 0x00000001) == 0x00000001); 927 } 928 /** 929 * <code>required .RegionInfo region_info = 1;</code> 930 */ getRegionInfo()931 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() { 932 return regionInfo_; 933 } 934 /** 935 * <code>required .RegionInfo region_info = 1;</code> 936 */ getRegionInfoOrBuilder()937 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() { 938 return regionInfo_; 939 } 940 941 // optional .GetRegionInfoResponse.CompactionState compaction_state = 2; 942 public static final int COMPACTION_STATE_FIELD_NUMBER = 2; 943 private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState compactionState_; 944 /** 945 * <code>optional .GetRegionInfoResponse.CompactionState compaction_state = 2;</code> 946 */ hasCompactionState()947 public boolean hasCompactionState() { 948 return ((bitField0_ & 0x00000002) == 0x00000002); 949 } 950 /** 951 * <code>optional .GetRegionInfoResponse.CompactionState compaction_state = 2;</code> 952 */ getCompactionState()953 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState getCompactionState() { 954 return compactionState_; 955 } 956 957 // optional bool isRecovering = 3; 958 public static final int ISRECOVERING_FIELD_NUMBER = 3; 959 private boolean isRecovering_; 960 /** 961 * <code>optional bool isRecovering = 3;</code> 962 */ hasIsRecovering()963 public boolean hasIsRecovering() { 964 return ((bitField0_ & 0x00000004) == 0x00000004); 965 } 966 /** 967 * <code>optional bool isRecovering = 3;</code> 968 */ getIsRecovering()969 public boolean getIsRecovering() { 970 return isRecovering_; 971 } 972 initFields()973 private void initFields() { 974 regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); 975 compactionState_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.NONE; 976 isRecovering_ = false; 977 } 978 private byte memoizedIsInitialized = -1; isInitialized()979 public final boolean isInitialized() { 980 byte isInitialized = memoizedIsInitialized; 981 if (isInitialized != -1) return isInitialized == 1; 982 983 if (!hasRegionInfo()) { 984 memoizedIsInitialized = 0; 985 return false; 986 } 987 if (!getRegionInfo().isInitialized()) { 988 memoizedIsInitialized = 0; 989 return false; 990 } 991 memoizedIsInitialized = 1; 992 return true; 993 } 994 writeTo(com.google.protobuf.CodedOutputStream output)995 public void writeTo(com.google.protobuf.CodedOutputStream output) 996 throws java.io.IOException { 997 getSerializedSize(); 998 if (((bitField0_ & 0x00000001) == 0x00000001)) { 999 output.writeMessage(1, regionInfo_); 1000 } 1001 if (((bitField0_ & 0x00000002) == 0x00000002)) { 1002 output.writeEnum(2, compactionState_.getNumber()); 1003 } 1004 if (((bitField0_ & 0x00000004) == 0x00000004)) { 1005 output.writeBool(3, isRecovering_); 1006 } 1007 getUnknownFields().writeTo(output); 1008 } 1009 1010 private int memoizedSerializedSize = -1; getSerializedSize()1011 public int getSerializedSize() { 1012 int size = memoizedSerializedSize; 1013 if (size != -1) return size; 1014 1015 size = 0; 1016 if (((bitField0_ & 0x00000001) == 0x00000001)) { 1017 size += com.google.protobuf.CodedOutputStream 1018 .computeMessageSize(1, regionInfo_); 1019 } 1020 if (((bitField0_ & 0x00000002) == 0x00000002)) { 1021 size += com.google.protobuf.CodedOutputStream 1022 .computeEnumSize(2, compactionState_.getNumber()); 1023 } 1024 if (((bitField0_ & 0x00000004) == 0x00000004)) { 1025 size += com.google.protobuf.CodedOutputStream 1026 .computeBoolSize(3, isRecovering_); 1027 } 1028 size += getUnknownFields().getSerializedSize(); 1029 memoizedSerializedSize = size; 1030 return size; 1031 } 1032 1033 private static final long serialVersionUID = 0L; 1034 @java.lang.Override writeReplace()1035 protected java.lang.Object writeReplace() 1036 throws java.io.ObjectStreamException { 1037 return super.writeReplace(); 1038 } 1039 1040 @java.lang.Override equals(final java.lang.Object obj)1041 public boolean equals(final java.lang.Object obj) { 1042 if (obj == this) { 1043 return true; 1044 } 1045 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse)) { 1046 return super.equals(obj); 1047 } 1048 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse) obj; 1049 1050 boolean result = true; 1051 result = result && (hasRegionInfo() == other.hasRegionInfo()); 1052 if (hasRegionInfo()) { 1053 result = result && getRegionInfo() 1054 .equals(other.getRegionInfo()); 1055 } 1056 result = result && (hasCompactionState() == other.hasCompactionState()); 1057 if (hasCompactionState()) { 1058 result = result && 1059 (getCompactionState() == other.getCompactionState()); 1060 } 1061 result = result && (hasIsRecovering() == other.hasIsRecovering()); 1062 if (hasIsRecovering()) { 1063 result = result && (getIsRecovering() 1064 == other.getIsRecovering()); 1065 } 1066 result = result && 1067 getUnknownFields().equals(other.getUnknownFields()); 1068 return result; 1069 } 1070 1071 private int memoizedHashCode = 0; 1072 @java.lang.Override hashCode()1073 public int hashCode() { 1074 if (memoizedHashCode != 0) { 1075 return memoizedHashCode; 1076 } 1077 int hash = 41; 1078 hash = (19 * hash) + getDescriptorForType().hashCode(); 1079 if (hasRegionInfo()) { 1080 hash = (37 * hash) + REGION_INFO_FIELD_NUMBER; 1081 hash = (53 * hash) + getRegionInfo().hashCode(); 1082 } 1083 if (hasCompactionState()) { 1084 hash = (37 * hash) + COMPACTION_STATE_FIELD_NUMBER; 1085 hash = (53 * hash) + hashEnum(getCompactionState()); 1086 } 1087 if (hasIsRecovering()) { 1088 hash = (37 * hash) + ISRECOVERING_FIELD_NUMBER; 1089 hash = (53 * hash) + hashBoolean(getIsRecovering()); 1090 } 1091 hash = (29 * hash) + getUnknownFields().hashCode(); 1092 memoizedHashCode = hash; 1093 return hash; 1094 } 1095 parseFrom( com.google.protobuf.ByteString data)1096 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( 1097 com.google.protobuf.ByteString data) 1098 throws com.google.protobuf.InvalidProtocolBufferException { 1099 return PARSER.parseFrom(data); 1100 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1101 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( 1102 com.google.protobuf.ByteString data, 1103 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1104 throws com.google.protobuf.InvalidProtocolBufferException { 1105 return PARSER.parseFrom(data, extensionRegistry); 1106 } parseFrom(byte[] data)1107 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(byte[] data) 1108 throws com.google.protobuf.InvalidProtocolBufferException { 1109 return PARSER.parseFrom(data); 1110 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1111 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( 1112 byte[] data, 1113 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1114 throws com.google.protobuf.InvalidProtocolBufferException { 1115 return PARSER.parseFrom(data, extensionRegistry); 1116 } parseFrom(java.io.InputStream input)1117 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom(java.io.InputStream input) 1118 throws java.io.IOException { 1119 return PARSER.parseFrom(input); 1120 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1121 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( 1122 java.io.InputStream input, 1123 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1124 throws java.io.IOException { 1125 return PARSER.parseFrom(input, extensionRegistry); 1126 } parseDelimitedFrom(java.io.InputStream input)1127 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseDelimitedFrom(java.io.InputStream input) 1128 throws java.io.IOException { 1129 return PARSER.parseDelimitedFrom(input); 1130 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1131 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseDelimitedFrom( 1132 java.io.InputStream input, 1133 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1134 throws java.io.IOException { 1135 return PARSER.parseDelimitedFrom(input, extensionRegistry); 1136 } parseFrom( com.google.protobuf.CodedInputStream input)1137 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( 1138 com.google.protobuf.CodedInputStream input) 1139 throws java.io.IOException { 1140 return PARSER.parseFrom(input); 1141 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1142 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parseFrom( 1143 com.google.protobuf.CodedInputStream input, 1144 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1145 throws java.io.IOException { 1146 return PARSER.parseFrom(input, extensionRegistry); 1147 } 1148 newBuilder()1149 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()1150 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse prototype)1151 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse prototype) { 1152 return newBuilder().mergeFrom(prototype); 1153 } toBuilder()1154 public Builder toBuilder() { return newBuilder(this); } 1155 1156 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)1157 protected Builder newBuilderForType( 1158 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 1159 Builder builder = new Builder(parent); 1160 return builder; 1161 } 1162 /** 1163 * Protobuf type {@code GetRegionInfoResponse} 1164 */ 1165 public static final class Builder extends 1166 com.google.protobuf.GeneratedMessage.Builder<Builder> 1167 implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponseOrBuilder { 1168 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()1169 getDescriptor() { 1170 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_descriptor; 1171 } 1172 1173 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()1174 internalGetFieldAccessorTable() { 1175 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_fieldAccessorTable 1176 .ensureFieldAccessorsInitialized( 1177 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.Builder.class); 1178 } 1179 1180 // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.newBuilder() Builder()1181 private Builder() { 1182 maybeForceBuilderInitialization(); 1183 } 1184 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)1185 private Builder( 1186 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 1187 super(parent); 1188 maybeForceBuilderInitialization(); 1189 } maybeForceBuilderInitialization()1190 private void maybeForceBuilderInitialization() { 1191 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 1192 getRegionInfoFieldBuilder(); 1193 } 1194 } create()1195 private static Builder create() { 1196 return new Builder(); 1197 } 1198 clear()1199 public Builder clear() { 1200 super.clear(); 1201 if (regionInfoBuilder_ == null) { 1202 regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); 1203 } else { 1204 regionInfoBuilder_.clear(); 1205 } 1206 bitField0_ = (bitField0_ & ~0x00000001); 1207 compactionState_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.NONE; 1208 bitField0_ = (bitField0_ & ~0x00000002); 1209 isRecovering_ = false; 1210 bitField0_ = (bitField0_ & ~0x00000004); 1211 return this; 1212 } 1213 clone()1214 public Builder clone() { 1215 return create().mergeFrom(buildPartial()); 1216 } 1217 1218 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()1219 getDescriptorForType() { 1220 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetRegionInfoResponse_descriptor; 1221 } 1222 getDefaultInstanceForType()1223 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse getDefaultInstanceForType() { 1224 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance(); 1225 } 1226 build()1227 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse build() { 1228 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse result = buildPartial(); 1229 if (!result.isInitialized()) { 1230 throw newUninitializedMessageException(result); 1231 } 1232 return result; 1233 } 1234 buildPartial()1235 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse buildPartial() { 1236 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse(this); 1237 int from_bitField0_ = bitField0_; 1238 int to_bitField0_ = 0; 1239 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 1240 to_bitField0_ |= 0x00000001; 1241 } 1242 if (regionInfoBuilder_ == null) { 1243 result.regionInfo_ = regionInfo_; 1244 } else { 1245 result.regionInfo_ = regionInfoBuilder_.build(); 1246 } 1247 if (((from_bitField0_ & 0x00000002) == 0x00000002)) { 1248 to_bitField0_ |= 0x00000002; 1249 } 1250 result.compactionState_ = compactionState_; 1251 if (((from_bitField0_ & 0x00000004) == 0x00000004)) { 1252 to_bitField0_ |= 0x00000004; 1253 } 1254 result.isRecovering_ = isRecovering_; 1255 result.bitField0_ = to_bitField0_; 1256 onBuilt(); 1257 return result; 1258 } 1259 mergeFrom(com.google.protobuf.Message other)1260 public Builder mergeFrom(com.google.protobuf.Message other) { 1261 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse) { 1262 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse)other); 1263 } else { 1264 super.mergeFrom(other); 1265 return this; 1266 } 1267 } 1268 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse other)1269 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse other) { 1270 if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.getDefaultInstance()) return this; 1271 if (other.hasRegionInfo()) { 1272 mergeRegionInfo(other.getRegionInfo()); 1273 } 1274 if (other.hasCompactionState()) { 1275 setCompactionState(other.getCompactionState()); 1276 } 1277 if (other.hasIsRecovering()) { 1278 setIsRecovering(other.getIsRecovering()); 1279 } 1280 this.mergeUnknownFields(other.getUnknownFields()); 1281 return this; 1282 } 1283 isInitialized()1284 public final boolean isInitialized() { 1285 if (!hasRegionInfo()) { 1286 1287 return false; 1288 } 1289 if (!getRegionInfo().isInitialized()) { 1290 1291 return false; 1292 } 1293 return true; 1294 } 1295 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1296 public Builder mergeFrom( 1297 com.google.protobuf.CodedInputStream input, 1298 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1299 throws java.io.IOException { 1300 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse parsedMessage = null; 1301 try { 1302 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 1303 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 1304 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse) e.getUnfinishedMessage(); 1305 throw e; 1306 } finally { 1307 if (parsedMessage != null) { 1308 mergeFrom(parsedMessage); 1309 } 1310 } 1311 return this; 1312 } 1313 private int bitField0_; 1314 1315 // required .RegionInfo region_info = 1; 1316 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); 1317 private com.google.protobuf.SingleFieldBuilder< 1318 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_; 1319 /** 1320 * <code>required .RegionInfo region_info = 1;</code> 1321 */ hasRegionInfo()1322 public boolean hasRegionInfo() { 1323 return ((bitField0_ & 0x00000001) == 0x00000001); 1324 } 1325 /** 1326 * <code>required .RegionInfo region_info = 1;</code> 1327 */ getRegionInfo()1328 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() { 1329 if (regionInfoBuilder_ == null) { 1330 return regionInfo_; 1331 } else { 1332 return regionInfoBuilder_.getMessage(); 1333 } 1334 } 1335 /** 1336 * <code>required .RegionInfo region_info = 1;</code> 1337 */ setRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value)1338 public Builder setRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { 1339 if (regionInfoBuilder_ == null) { 1340 if (value == null) { 1341 throw new NullPointerException(); 1342 } 1343 regionInfo_ = value; 1344 onChanged(); 1345 } else { 1346 regionInfoBuilder_.setMessage(value); 1347 } 1348 bitField0_ |= 0x00000001; 1349 return this; 1350 } 1351 /** 1352 * <code>required .RegionInfo region_info = 1;</code> 1353 */ setRegionInfo( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue)1354 public Builder setRegionInfo( 1355 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { 1356 if (regionInfoBuilder_ == null) { 1357 regionInfo_ = builderForValue.build(); 1358 onChanged(); 1359 } else { 1360 regionInfoBuilder_.setMessage(builderForValue.build()); 1361 } 1362 bitField0_ |= 0x00000001; 1363 return this; 1364 } 1365 /** 1366 * <code>required .RegionInfo region_info = 1;</code> 1367 */ mergeRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value)1368 public Builder mergeRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { 1369 if (regionInfoBuilder_ == null) { 1370 if (((bitField0_ & 0x00000001) == 0x00000001) && 1371 regionInfo_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()) { 1372 regionInfo_ = 1373 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(regionInfo_).mergeFrom(value).buildPartial(); 1374 } else { 1375 regionInfo_ = value; 1376 } 1377 onChanged(); 1378 } else { 1379 regionInfoBuilder_.mergeFrom(value); 1380 } 1381 bitField0_ |= 0x00000001; 1382 return this; 1383 } 1384 /** 1385 * <code>required .RegionInfo region_info = 1;</code> 1386 */ clearRegionInfo()1387 public Builder clearRegionInfo() { 1388 if (regionInfoBuilder_ == null) { 1389 regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); 1390 onChanged(); 1391 } else { 1392 regionInfoBuilder_.clear(); 1393 } 1394 bitField0_ = (bitField0_ & ~0x00000001); 1395 return this; 1396 } 1397 /** 1398 * <code>required .RegionInfo region_info = 1;</code> 1399 */ getRegionInfoBuilder()1400 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder() { 1401 bitField0_ |= 0x00000001; 1402 onChanged(); 1403 return getRegionInfoFieldBuilder().getBuilder(); 1404 } 1405 /** 1406 * <code>required .RegionInfo region_info = 1;</code> 1407 */ getRegionInfoOrBuilder()1408 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() { 1409 if (regionInfoBuilder_ != null) { 1410 return regionInfoBuilder_.getMessageOrBuilder(); 1411 } else { 1412 return regionInfo_; 1413 } 1414 } 1415 /** 1416 * <code>required .RegionInfo region_info = 1;</code> 1417 */ 1418 private com.google.protobuf.SingleFieldBuilder< 1419 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> getRegionInfoFieldBuilder()1420 getRegionInfoFieldBuilder() { 1421 if (regionInfoBuilder_ == null) { 1422 regionInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< 1423 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( 1424 regionInfo_, 1425 getParentForChildren(), 1426 isClean()); 1427 regionInfo_ = null; 1428 } 1429 return regionInfoBuilder_; 1430 } 1431 1432 // optional .GetRegionInfoResponse.CompactionState compaction_state = 2; 1433 private org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState compactionState_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.NONE; 1434 /** 1435 * <code>optional .GetRegionInfoResponse.CompactionState compaction_state = 2;</code> 1436 */ hasCompactionState()1437 public boolean hasCompactionState() { 1438 return ((bitField0_ & 0x00000002) == 0x00000002); 1439 } 1440 /** 1441 * <code>optional .GetRegionInfoResponse.CompactionState compaction_state = 2;</code> 1442 */ getCompactionState()1443 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState getCompactionState() { 1444 return compactionState_; 1445 } 1446 /** 1447 * <code>optional .GetRegionInfoResponse.CompactionState compaction_state = 2;</code> 1448 */ setCompactionState(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState value)1449 public Builder setCompactionState(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState value) { 1450 if (value == null) { 1451 throw new NullPointerException(); 1452 } 1453 bitField0_ |= 0x00000002; 1454 compactionState_ = value; 1455 onChanged(); 1456 return this; 1457 } 1458 /** 1459 * <code>optional .GetRegionInfoResponse.CompactionState compaction_state = 2;</code> 1460 */ clearCompactionState()1461 public Builder clearCompactionState() { 1462 bitField0_ = (bitField0_ & ~0x00000002); 1463 compactionState_ = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetRegionInfoResponse.CompactionState.NONE; 1464 onChanged(); 1465 return this; 1466 } 1467 1468 // optional bool isRecovering = 3; 1469 private boolean isRecovering_ ; 1470 /** 1471 * <code>optional bool isRecovering = 3;</code> 1472 */ hasIsRecovering()1473 public boolean hasIsRecovering() { 1474 return ((bitField0_ & 0x00000004) == 0x00000004); 1475 } 1476 /** 1477 * <code>optional bool isRecovering = 3;</code> 1478 */ getIsRecovering()1479 public boolean getIsRecovering() { 1480 return isRecovering_; 1481 } 1482 /** 1483 * <code>optional bool isRecovering = 3;</code> 1484 */ setIsRecovering(boolean value)1485 public Builder setIsRecovering(boolean value) { 1486 bitField0_ |= 0x00000004; 1487 isRecovering_ = value; 1488 onChanged(); 1489 return this; 1490 } 1491 /** 1492 * <code>optional bool isRecovering = 3;</code> 1493 */ clearIsRecovering()1494 public Builder clearIsRecovering() { 1495 bitField0_ = (bitField0_ & ~0x00000004); 1496 isRecovering_ = false; 1497 onChanged(); 1498 return this; 1499 } 1500 1501 // @@protoc_insertion_point(builder_scope:GetRegionInfoResponse) 1502 } 1503 1504 static { 1505 defaultInstance = new GetRegionInfoResponse(true); defaultInstance.initFields()1506 defaultInstance.initFields(); 1507 } 1508 1509 // @@protoc_insertion_point(class_scope:GetRegionInfoResponse) 1510 } 1511 1512 public interface GetStoreFileRequestOrBuilder 1513 extends com.google.protobuf.MessageOrBuilder { 1514 1515 // required .RegionSpecifier region = 1; 1516 /** 1517 * <code>required .RegionSpecifier region = 1;</code> 1518 */ hasRegion()1519 boolean hasRegion(); 1520 /** 1521 * <code>required .RegionSpecifier region = 1;</code> 1522 */ getRegion()1523 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); 1524 /** 1525 * <code>required .RegionSpecifier region = 1;</code> 1526 */ getRegionOrBuilder()1527 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); 1528 1529 // repeated bytes family = 2; 1530 /** 1531 * <code>repeated bytes family = 2;</code> 1532 */ getFamilyList()1533 java.util.List<com.google.protobuf.ByteString> getFamilyList(); 1534 /** 1535 * <code>repeated bytes family = 2;</code> 1536 */ getFamilyCount()1537 int getFamilyCount(); 1538 /** 1539 * <code>repeated bytes family = 2;</code> 1540 */ getFamily(int index)1541 com.google.protobuf.ByteString getFamily(int index); 1542 } 1543 /** 1544 * Protobuf type {@code GetStoreFileRequest} 1545 * 1546 * <pre> 1547 ** 1548 * Get a list of store files for a set of column families in a particular region. 1549 * If no column family is specified, get the store files for all column families. 1550 * </pre> 1551 */ 1552 public static final class GetStoreFileRequest extends 1553 com.google.protobuf.GeneratedMessage 1554 implements GetStoreFileRequestOrBuilder { 1555 // Use GetStoreFileRequest.newBuilder() to construct. GetStoreFileRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)1556 private GetStoreFileRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 1557 super(builder); 1558 this.unknownFields = builder.getUnknownFields(); 1559 } GetStoreFileRequest(boolean noInit)1560 private GetStoreFileRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 1561 1562 private static final GetStoreFileRequest defaultInstance; getDefaultInstance()1563 public static GetStoreFileRequest getDefaultInstance() { 1564 return defaultInstance; 1565 } 1566 getDefaultInstanceForType()1567 public GetStoreFileRequest getDefaultInstanceForType() { 1568 return defaultInstance; 1569 } 1570 1571 private final com.google.protobuf.UnknownFieldSet unknownFields; 1572 @java.lang.Override 1573 public final com.google.protobuf.UnknownFieldSet getUnknownFields()1574 getUnknownFields() { 1575 return this.unknownFields; 1576 } GetStoreFileRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1577 private GetStoreFileRequest( 1578 com.google.protobuf.CodedInputStream input, 1579 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1580 throws com.google.protobuf.InvalidProtocolBufferException { 1581 initFields(); 1582 int mutable_bitField0_ = 0; 1583 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 1584 com.google.protobuf.UnknownFieldSet.newBuilder(); 1585 try { 1586 boolean done = false; 1587 while (!done) { 1588 int tag = input.readTag(); 1589 switch (tag) { 1590 case 0: 1591 done = true; 1592 break; 1593 default: { 1594 if (!parseUnknownField(input, unknownFields, 1595 extensionRegistry, tag)) { 1596 done = true; 1597 } 1598 break; 1599 } 1600 case 10: { 1601 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; 1602 if (((bitField0_ & 0x00000001) == 0x00000001)) { 1603 subBuilder = region_.toBuilder(); 1604 } 1605 region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); 1606 if (subBuilder != null) { 1607 subBuilder.mergeFrom(region_); 1608 region_ = subBuilder.buildPartial(); 1609 } 1610 bitField0_ |= 0x00000001; 1611 break; 1612 } 1613 case 18: { 1614 if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { 1615 family_ = new java.util.ArrayList<com.google.protobuf.ByteString>(); 1616 mutable_bitField0_ |= 0x00000002; 1617 } 1618 family_.add(input.readBytes()); 1619 break; 1620 } 1621 } 1622 } 1623 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 1624 throw e.setUnfinishedMessage(this); 1625 } catch (java.io.IOException e) { 1626 throw new com.google.protobuf.InvalidProtocolBufferException( 1627 e.getMessage()).setUnfinishedMessage(this); 1628 } finally { 1629 if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { 1630 family_ = java.util.Collections.unmodifiableList(family_); 1631 } 1632 this.unknownFields = unknownFields.build(); 1633 makeExtensionsImmutable(); 1634 } 1635 } 1636 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()1637 getDescriptor() { 1638 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileRequest_descriptor; 1639 } 1640 1641 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()1642 internalGetFieldAccessorTable() { 1643 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileRequest_fieldAccessorTable 1644 .ensureFieldAccessorsInitialized( 1645 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.Builder.class); 1646 } 1647 1648 public static com.google.protobuf.Parser<GetStoreFileRequest> PARSER = 1649 new com.google.protobuf.AbstractParser<GetStoreFileRequest>() { 1650 public GetStoreFileRequest parsePartialFrom( 1651 com.google.protobuf.CodedInputStream input, 1652 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1653 throws com.google.protobuf.InvalidProtocolBufferException { 1654 return new GetStoreFileRequest(input, extensionRegistry); 1655 } 1656 }; 1657 1658 @java.lang.Override getParserForType()1659 public com.google.protobuf.Parser<GetStoreFileRequest> getParserForType() { 1660 return PARSER; 1661 } 1662 1663 private int bitField0_; 1664 // required .RegionSpecifier region = 1; 1665 public static final int REGION_FIELD_NUMBER = 1; 1666 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; 1667 /** 1668 * <code>required .RegionSpecifier region = 1;</code> 1669 */ hasRegion()1670 public boolean hasRegion() { 1671 return ((bitField0_ & 0x00000001) == 0x00000001); 1672 } 1673 /** 1674 * <code>required .RegionSpecifier region = 1;</code> 1675 */ getRegion()1676 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { 1677 return region_; 1678 } 1679 /** 1680 * <code>required .RegionSpecifier region = 1;</code> 1681 */ getRegionOrBuilder()1682 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { 1683 return region_; 1684 } 1685 1686 // repeated bytes family = 2; 1687 public static final int FAMILY_FIELD_NUMBER = 2; 1688 private java.util.List<com.google.protobuf.ByteString> family_; 1689 /** 1690 * <code>repeated bytes family = 2;</code> 1691 */ 1692 public java.util.List<com.google.protobuf.ByteString> getFamilyList()1693 getFamilyList() { 1694 return family_; 1695 } 1696 /** 1697 * <code>repeated bytes family = 2;</code> 1698 */ getFamilyCount()1699 public int getFamilyCount() { 1700 return family_.size(); 1701 } 1702 /** 1703 * <code>repeated bytes family = 2;</code> 1704 */ getFamily(int index)1705 public com.google.protobuf.ByteString getFamily(int index) { 1706 return family_.get(index); 1707 } 1708 initFields()1709 private void initFields() { 1710 region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); 1711 family_ = java.util.Collections.emptyList(); 1712 } 1713 private byte memoizedIsInitialized = -1; isInitialized()1714 public final boolean isInitialized() { 1715 byte isInitialized = memoizedIsInitialized; 1716 if (isInitialized != -1) return isInitialized == 1; 1717 1718 if (!hasRegion()) { 1719 memoizedIsInitialized = 0; 1720 return false; 1721 } 1722 if (!getRegion().isInitialized()) { 1723 memoizedIsInitialized = 0; 1724 return false; 1725 } 1726 memoizedIsInitialized = 1; 1727 return true; 1728 } 1729 writeTo(com.google.protobuf.CodedOutputStream output)1730 public void writeTo(com.google.protobuf.CodedOutputStream output) 1731 throws java.io.IOException { 1732 getSerializedSize(); 1733 if (((bitField0_ & 0x00000001) == 0x00000001)) { 1734 output.writeMessage(1, region_); 1735 } 1736 for (int i = 0; i < family_.size(); i++) { 1737 output.writeBytes(2, family_.get(i)); 1738 } 1739 getUnknownFields().writeTo(output); 1740 } 1741 1742 private int memoizedSerializedSize = -1; getSerializedSize()1743 public int getSerializedSize() { 1744 int size = memoizedSerializedSize; 1745 if (size != -1) return size; 1746 1747 size = 0; 1748 if (((bitField0_ & 0x00000001) == 0x00000001)) { 1749 size += com.google.protobuf.CodedOutputStream 1750 .computeMessageSize(1, region_); 1751 } 1752 { 1753 int dataSize = 0; 1754 for (int i = 0; i < family_.size(); i++) { 1755 dataSize += com.google.protobuf.CodedOutputStream 1756 .computeBytesSizeNoTag(family_.get(i)); 1757 } 1758 size += dataSize; 1759 size += 1 * getFamilyList().size(); 1760 } 1761 size += getUnknownFields().getSerializedSize(); 1762 memoizedSerializedSize = size; 1763 return size; 1764 } 1765 1766 private static final long serialVersionUID = 0L; 1767 @java.lang.Override writeReplace()1768 protected java.lang.Object writeReplace() 1769 throws java.io.ObjectStreamException { 1770 return super.writeReplace(); 1771 } 1772 1773 @java.lang.Override equals(final java.lang.Object obj)1774 public boolean equals(final java.lang.Object obj) { 1775 if (obj == this) { 1776 return true; 1777 } 1778 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest)) { 1779 return super.equals(obj); 1780 } 1781 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest) obj; 1782 1783 boolean result = true; 1784 result = result && (hasRegion() == other.hasRegion()); 1785 if (hasRegion()) { 1786 result = result && getRegion() 1787 .equals(other.getRegion()); 1788 } 1789 result = result && getFamilyList() 1790 .equals(other.getFamilyList()); 1791 result = result && 1792 getUnknownFields().equals(other.getUnknownFields()); 1793 return result; 1794 } 1795 1796 private int memoizedHashCode = 0; 1797 @java.lang.Override hashCode()1798 public int hashCode() { 1799 if (memoizedHashCode != 0) { 1800 return memoizedHashCode; 1801 } 1802 int hash = 41; 1803 hash = (19 * hash) + getDescriptorForType().hashCode(); 1804 if (hasRegion()) { 1805 hash = (37 * hash) + REGION_FIELD_NUMBER; 1806 hash = (53 * hash) + getRegion().hashCode(); 1807 } 1808 if (getFamilyCount() > 0) { 1809 hash = (37 * hash) + FAMILY_FIELD_NUMBER; 1810 hash = (53 * hash) + getFamilyList().hashCode(); 1811 } 1812 hash = (29 * hash) + getUnknownFields().hashCode(); 1813 memoizedHashCode = hash; 1814 return hash; 1815 } 1816 parseFrom( com.google.protobuf.ByteString data)1817 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom( 1818 com.google.protobuf.ByteString data) 1819 throws com.google.protobuf.InvalidProtocolBufferException { 1820 return PARSER.parseFrom(data); 1821 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1822 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom( 1823 com.google.protobuf.ByteString data, 1824 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1825 throws com.google.protobuf.InvalidProtocolBufferException { 1826 return PARSER.parseFrom(data, extensionRegistry); 1827 } parseFrom(byte[] data)1828 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom(byte[] data) 1829 throws com.google.protobuf.InvalidProtocolBufferException { 1830 return PARSER.parseFrom(data); 1831 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1832 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom( 1833 byte[] data, 1834 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1835 throws com.google.protobuf.InvalidProtocolBufferException { 1836 return PARSER.parseFrom(data, extensionRegistry); 1837 } parseFrom(java.io.InputStream input)1838 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom(java.io.InputStream input) 1839 throws java.io.IOException { 1840 return PARSER.parseFrom(input); 1841 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1842 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom( 1843 java.io.InputStream input, 1844 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1845 throws java.io.IOException { 1846 return PARSER.parseFrom(input, extensionRegistry); 1847 } parseDelimitedFrom(java.io.InputStream input)1848 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseDelimitedFrom(java.io.InputStream input) 1849 throws java.io.IOException { 1850 return PARSER.parseDelimitedFrom(input); 1851 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1852 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseDelimitedFrom( 1853 java.io.InputStream input, 1854 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1855 throws java.io.IOException { 1856 return PARSER.parseDelimitedFrom(input, extensionRegistry); 1857 } parseFrom( com.google.protobuf.CodedInputStream input)1858 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom( 1859 com.google.protobuf.CodedInputStream input) 1860 throws java.io.IOException { 1861 return PARSER.parseFrom(input); 1862 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1863 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parseFrom( 1864 com.google.protobuf.CodedInputStream input, 1865 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1866 throws java.io.IOException { 1867 return PARSER.parseFrom(input, extensionRegistry); 1868 } 1869 newBuilder()1870 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()1871 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest prototype)1872 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest prototype) { 1873 return newBuilder().mergeFrom(prototype); 1874 } toBuilder()1875 public Builder toBuilder() { return newBuilder(this); } 1876 1877 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)1878 protected Builder newBuilderForType( 1879 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 1880 Builder builder = new Builder(parent); 1881 return builder; 1882 } 1883 /** 1884 * Protobuf type {@code GetStoreFileRequest} 1885 * 1886 * <pre> 1887 ** 1888 * Get a list of store files for a set of column families in a particular region. 1889 * If no column family is specified, get the store files for all column families. 1890 * </pre> 1891 */ 1892 public static final class Builder extends 1893 com.google.protobuf.GeneratedMessage.Builder<Builder> 1894 implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequestOrBuilder { 1895 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()1896 getDescriptor() { 1897 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileRequest_descriptor; 1898 } 1899 1900 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()1901 internalGetFieldAccessorTable() { 1902 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileRequest_fieldAccessorTable 1903 .ensureFieldAccessorsInitialized( 1904 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.Builder.class); 1905 } 1906 1907 // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.newBuilder() Builder()1908 private Builder() { 1909 maybeForceBuilderInitialization(); 1910 } 1911 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)1912 private Builder( 1913 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 1914 super(parent); 1915 maybeForceBuilderInitialization(); 1916 } maybeForceBuilderInitialization()1917 private void maybeForceBuilderInitialization() { 1918 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 1919 getRegionFieldBuilder(); 1920 } 1921 } create()1922 private static Builder create() { 1923 return new Builder(); 1924 } 1925 clear()1926 public Builder clear() { 1927 super.clear(); 1928 if (regionBuilder_ == null) { 1929 region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); 1930 } else { 1931 regionBuilder_.clear(); 1932 } 1933 bitField0_ = (bitField0_ & ~0x00000001); 1934 family_ = java.util.Collections.emptyList(); 1935 bitField0_ = (bitField0_ & ~0x00000002); 1936 return this; 1937 } 1938 clone()1939 public Builder clone() { 1940 return create().mergeFrom(buildPartial()); 1941 } 1942 1943 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()1944 getDescriptorForType() { 1945 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileRequest_descriptor; 1946 } 1947 getDefaultInstanceForType()1948 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest getDefaultInstanceForType() { 1949 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.getDefaultInstance(); 1950 } 1951 build()1952 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest build() { 1953 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest result = buildPartial(); 1954 if (!result.isInitialized()) { 1955 throw newUninitializedMessageException(result); 1956 } 1957 return result; 1958 } 1959 buildPartial()1960 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest buildPartial() { 1961 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest(this); 1962 int from_bitField0_ = bitField0_; 1963 int to_bitField0_ = 0; 1964 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 1965 to_bitField0_ |= 0x00000001; 1966 } 1967 if (regionBuilder_ == null) { 1968 result.region_ = region_; 1969 } else { 1970 result.region_ = regionBuilder_.build(); 1971 } 1972 if (((bitField0_ & 0x00000002) == 0x00000002)) { 1973 family_ = java.util.Collections.unmodifiableList(family_); 1974 bitField0_ = (bitField0_ & ~0x00000002); 1975 } 1976 result.family_ = family_; 1977 result.bitField0_ = to_bitField0_; 1978 onBuilt(); 1979 return result; 1980 } 1981 mergeFrom(com.google.protobuf.Message other)1982 public Builder mergeFrom(com.google.protobuf.Message other) { 1983 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest) { 1984 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest)other); 1985 } else { 1986 super.mergeFrom(other); 1987 return this; 1988 } 1989 } 1990 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest other)1991 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest other) { 1992 if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest.getDefaultInstance()) return this; 1993 if (other.hasRegion()) { 1994 mergeRegion(other.getRegion()); 1995 } 1996 if (!other.family_.isEmpty()) { 1997 if (family_.isEmpty()) { 1998 family_ = other.family_; 1999 bitField0_ = (bitField0_ & ~0x00000002); 2000 } else { 2001 ensureFamilyIsMutable(); 2002 family_.addAll(other.family_); 2003 } 2004 onChanged(); 2005 } 2006 this.mergeUnknownFields(other.getUnknownFields()); 2007 return this; 2008 } 2009 isInitialized()2010 public final boolean isInitialized() { 2011 if (!hasRegion()) { 2012 2013 return false; 2014 } 2015 if (!getRegion().isInitialized()) { 2016 2017 return false; 2018 } 2019 return true; 2020 } 2021 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2022 public Builder mergeFrom( 2023 com.google.protobuf.CodedInputStream input, 2024 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2025 throws java.io.IOException { 2026 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest parsedMessage = null; 2027 try { 2028 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 2029 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 2030 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileRequest) e.getUnfinishedMessage(); 2031 throw e; 2032 } finally { 2033 if (parsedMessage != null) { 2034 mergeFrom(parsedMessage); 2035 } 2036 } 2037 return this; 2038 } 2039 private int bitField0_; 2040 2041 // required .RegionSpecifier region = 1; 2042 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); 2043 private com.google.protobuf.SingleFieldBuilder< 2044 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; 2045 /** 2046 * <code>required .RegionSpecifier region = 1;</code> 2047 */ hasRegion()2048 public boolean hasRegion() { 2049 return ((bitField0_ & 0x00000001) == 0x00000001); 2050 } 2051 /** 2052 * <code>required .RegionSpecifier region = 1;</code> 2053 */ getRegion()2054 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { 2055 if (regionBuilder_ == null) { 2056 return region_; 2057 } else { 2058 return regionBuilder_.getMessage(); 2059 } 2060 } 2061 /** 2062 * <code>required .RegionSpecifier region = 1;</code> 2063 */ setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)2064 public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { 2065 if (regionBuilder_ == null) { 2066 if (value == null) { 2067 throw new NullPointerException(); 2068 } 2069 region_ = value; 2070 onChanged(); 2071 } else { 2072 regionBuilder_.setMessage(value); 2073 } 2074 bitField0_ |= 0x00000001; 2075 return this; 2076 } 2077 /** 2078 * <code>required .RegionSpecifier region = 1;</code> 2079 */ setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue)2080 public Builder setRegion( 2081 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { 2082 if (regionBuilder_ == null) { 2083 region_ = builderForValue.build(); 2084 onChanged(); 2085 } else { 2086 regionBuilder_.setMessage(builderForValue.build()); 2087 } 2088 bitField0_ |= 0x00000001; 2089 return this; 2090 } 2091 /** 2092 * <code>required .RegionSpecifier region = 1;</code> 2093 */ mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)2094 public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { 2095 if (regionBuilder_ == null) { 2096 if (((bitField0_ & 0x00000001) == 0x00000001) && 2097 region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { 2098 region_ = 2099 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); 2100 } else { 2101 region_ = value; 2102 } 2103 onChanged(); 2104 } else { 2105 regionBuilder_.mergeFrom(value); 2106 } 2107 bitField0_ |= 0x00000001; 2108 return this; 2109 } 2110 /** 2111 * <code>required .RegionSpecifier region = 1;</code> 2112 */ clearRegion()2113 public Builder clearRegion() { 2114 if (regionBuilder_ == null) { 2115 region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); 2116 onChanged(); 2117 } else { 2118 regionBuilder_.clear(); 2119 } 2120 bitField0_ = (bitField0_ & ~0x00000001); 2121 return this; 2122 } 2123 /** 2124 * <code>required .RegionSpecifier region = 1;</code> 2125 */ getRegionBuilder()2126 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { 2127 bitField0_ |= 0x00000001; 2128 onChanged(); 2129 return getRegionFieldBuilder().getBuilder(); 2130 } 2131 /** 2132 * <code>required .RegionSpecifier region = 1;</code> 2133 */ getRegionOrBuilder()2134 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { 2135 if (regionBuilder_ != null) { 2136 return regionBuilder_.getMessageOrBuilder(); 2137 } else { 2138 return region_; 2139 } 2140 } 2141 /** 2142 * <code>required .RegionSpecifier region = 1;</code> 2143 */ 2144 private com.google.protobuf.SingleFieldBuilder< 2145 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder()2146 getRegionFieldBuilder() { 2147 if (regionBuilder_ == null) { 2148 regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< 2149 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( 2150 region_, 2151 getParentForChildren(), 2152 isClean()); 2153 region_ = null; 2154 } 2155 return regionBuilder_; 2156 } 2157 2158 // repeated bytes family = 2; 2159 private java.util.List<com.google.protobuf.ByteString> family_ = java.util.Collections.emptyList(); ensureFamilyIsMutable()2160 private void ensureFamilyIsMutable() { 2161 if (!((bitField0_ & 0x00000002) == 0x00000002)) { 2162 family_ = new java.util.ArrayList<com.google.protobuf.ByteString>(family_); 2163 bitField0_ |= 0x00000002; 2164 } 2165 } 2166 /** 2167 * <code>repeated bytes family = 2;</code> 2168 */ 2169 public java.util.List<com.google.protobuf.ByteString> getFamilyList()2170 getFamilyList() { 2171 return java.util.Collections.unmodifiableList(family_); 2172 } 2173 /** 2174 * <code>repeated bytes family = 2;</code> 2175 */ getFamilyCount()2176 public int getFamilyCount() { 2177 return family_.size(); 2178 } 2179 /** 2180 * <code>repeated bytes family = 2;</code> 2181 */ getFamily(int index)2182 public com.google.protobuf.ByteString getFamily(int index) { 2183 return family_.get(index); 2184 } 2185 /** 2186 * <code>repeated bytes family = 2;</code> 2187 */ setFamily( int index, com.google.protobuf.ByteString value)2188 public Builder setFamily( 2189 int index, com.google.protobuf.ByteString value) { 2190 if (value == null) { 2191 throw new NullPointerException(); 2192 } 2193 ensureFamilyIsMutable(); 2194 family_.set(index, value); 2195 onChanged(); 2196 return this; 2197 } 2198 /** 2199 * <code>repeated bytes family = 2;</code> 2200 */ addFamily(com.google.protobuf.ByteString value)2201 public Builder addFamily(com.google.protobuf.ByteString value) { 2202 if (value == null) { 2203 throw new NullPointerException(); 2204 } 2205 ensureFamilyIsMutable(); 2206 family_.add(value); 2207 onChanged(); 2208 return this; 2209 } 2210 /** 2211 * <code>repeated bytes family = 2;</code> 2212 */ addAllFamily( java.lang.Iterable<? extends com.google.protobuf.ByteString> values)2213 public Builder addAllFamily( 2214 java.lang.Iterable<? extends com.google.protobuf.ByteString> values) { 2215 ensureFamilyIsMutable(); 2216 super.addAll(values, family_); 2217 onChanged(); 2218 return this; 2219 } 2220 /** 2221 * <code>repeated bytes family = 2;</code> 2222 */ clearFamily()2223 public Builder clearFamily() { 2224 family_ = java.util.Collections.emptyList(); 2225 bitField0_ = (bitField0_ & ~0x00000002); 2226 onChanged(); 2227 return this; 2228 } 2229 2230 // @@protoc_insertion_point(builder_scope:GetStoreFileRequest) 2231 } 2232 2233 static { 2234 defaultInstance = new GetStoreFileRequest(true); defaultInstance.initFields()2235 defaultInstance.initFields(); 2236 } 2237 2238 // @@protoc_insertion_point(class_scope:GetStoreFileRequest) 2239 } 2240 2241 public interface GetStoreFileResponseOrBuilder 2242 extends com.google.protobuf.MessageOrBuilder { 2243 2244 // repeated string store_file = 1; 2245 /** 2246 * <code>repeated string store_file = 1;</code> 2247 */ 2248 java.util.List<java.lang.String> getStoreFileList()2249 getStoreFileList(); 2250 /** 2251 * <code>repeated string store_file = 1;</code> 2252 */ getStoreFileCount()2253 int getStoreFileCount(); 2254 /** 2255 * <code>repeated string store_file = 1;</code> 2256 */ getStoreFile(int index)2257 java.lang.String getStoreFile(int index); 2258 /** 2259 * <code>repeated string store_file = 1;</code> 2260 */ 2261 com.google.protobuf.ByteString getStoreFileBytes(int index)2262 getStoreFileBytes(int index); 2263 } 2264 /** 2265 * Protobuf type {@code GetStoreFileResponse} 2266 */ 2267 public static final class GetStoreFileResponse extends 2268 com.google.protobuf.GeneratedMessage 2269 implements GetStoreFileResponseOrBuilder { 2270 // Use GetStoreFileResponse.newBuilder() to construct. GetStoreFileResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)2271 private GetStoreFileResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 2272 super(builder); 2273 this.unknownFields = builder.getUnknownFields(); 2274 } GetStoreFileResponse(boolean noInit)2275 private GetStoreFileResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 2276 2277 private static final GetStoreFileResponse defaultInstance; getDefaultInstance()2278 public static GetStoreFileResponse getDefaultInstance() { 2279 return defaultInstance; 2280 } 2281 getDefaultInstanceForType()2282 public GetStoreFileResponse getDefaultInstanceForType() { 2283 return defaultInstance; 2284 } 2285 2286 private final com.google.protobuf.UnknownFieldSet unknownFields; 2287 @java.lang.Override 2288 public final com.google.protobuf.UnknownFieldSet getUnknownFields()2289 getUnknownFields() { 2290 return this.unknownFields; 2291 } GetStoreFileResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2292 private GetStoreFileResponse( 2293 com.google.protobuf.CodedInputStream input, 2294 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2295 throws com.google.protobuf.InvalidProtocolBufferException { 2296 initFields(); 2297 int mutable_bitField0_ = 0; 2298 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 2299 com.google.protobuf.UnknownFieldSet.newBuilder(); 2300 try { 2301 boolean done = false; 2302 while (!done) { 2303 int tag = input.readTag(); 2304 switch (tag) { 2305 case 0: 2306 done = true; 2307 break; 2308 default: { 2309 if (!parseUnknownField(input, unknownFields, 2310 extensionRegistry, tag)) { 2311 done = true; 2312 } 2313 break; 2314 } 2315 case 10: { 2316 if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { 2317 storeFile_ = new com.google.protobuf.LazyStringArrayList(); 2318 mutable_bitField0_ |= 0x00000001; 2319 } 2320 storeFile_.add(input.readBytes()); 2321 break; 2322 } 2323 } 2324 } 2325 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 2326 throw e.setUnfinishedMessage(this); 2327 } catch (java.io.IOException e) { 2328 throw new com.google.protobuf.InvalidProtocolBufferException( 2329 e.getMessage()).setUnfinishedMessage(this); 2330 } finally { 2331 if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { 2332 storeFile_ = new com.google.protobuf.UnmodifiableLazyStringList(storeFile_); 2333 } 2334 this.unknownFields = unknownFields.build(); 2335 makeExtensionsImmutable(); 2336 } 2337 } 2338 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()2339 getDescriptor() { 2340 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileResponse_descriptor; 2341 } 2342 2343 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()2344 internalGetFieldAccessorTable() { 2345 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileResponse_fieldAccessorTable 2346 .ensureFieldAccessorsInitialized( 2347 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.Builder.class); 2348 } 2349 2350 public static com.google.protobuf.Parser<GetStoreFileResponse> PARSER = 2351 new com.google.protobuf.AbstractParser<GetStoreFileResponse>() { 2352 public GetStoreFileResponse parsePartialFrom( 2353 com.google.protobuf.CodedInputStream input, 2354 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2355 throws com.google.protobuf.InvalidProtocolBufferException { 2356 return new GetStoreFileResponse(input, extensionRegistry); 2357 } 2358 }; 2359 2360 @java.lang.Override getParserForType()2361 public com.google.protobuf.Parser<GetStoreFileResponse> getParserForType() { 2362 return PARSER; 2363 } 2364 2365 // repeated string store_file = 1; 2366 public static final int STORE_FILE_FIELD_NUMBER = 1; 2367 private com.google.protobuf.LazyStringList storeFile_; 2368 /** 2369 * <code>repeated string store_file = 1;</code> 2370 */ 2371 public java.util.List<java.lang.String> getStoreFileList()2372 getStoreFileList() { 2373 return storeFile_; 2374 } 2375 /** 2376 * <code>repeated string store_file = 1;</code> 2377 */ getStoreFileCount()2378 public int getStoreFileCount() { 2379 return storeFile_.size(); 2380 } 2381 /** 2382 * <code>repeated string store_file = 1;</code> 2383 */ getStoreFile(int index)2384 public java.lang.String getStoreFile(int index) { 2385 return storeFile_.get(index); 2386 } 2387 /** 2388 * <code>repeated string store_file = 1;</code> 2389 */ 2390 public com.google.protobuf.ByteString getStoreFileBytes(int index)2391 getStoreFileBytes(int index) { 2392 return storeFile_.getByteString(index); 2393 } 2394 initFields()2395 private void initFields() { 2396 storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; 2397 } 2398 private byte memoizedIsInitialized = -1; isInitialized()2399 public final boolean isInitialized() { 2400 byte isInitialized = memoizedIsInitialized; 2401 if (isInitialized != -1) return isInitialized == 1; 2402 2403 memoizedIsInitialized = 1; 2404 return true; 2405 } 2406 writeTo(com.google.protobuf.CodedOutputStream output)2407 public void writeTo(com.google.protobuf.CodedOutputStream output) 2408 throws java.io.IOException { 2409 getSerializedSize(); 2410 for (int i = 0; i < storeFile_.size(); i++) { 2411 output.writeBytes(1, storeFile_.getByteString(i)); 2412 } 2413 getUnknownFields().writeTo(output); 2414 } 2415 2416 private int memoizedSerializedSize = -1; getSerializedSize()2417 public int getSerializedSize() { 2418 int size = memoizedSerializedSize; 2419 if (size != -1) return size; 2420 2421 size = 0; 2422 { 2423 int dataSize = 0; 2424 for (int i = 0; i < storeFile_.size(); i++) { 2425 dataSize += com.google.protobuf.CodedOutputStream 2426 .computeBytesSizeNoTag(storeFile_.getByteString(i)); 2427 } 2428 size += dataSize; 2429 size += 1 * getStoreFileList().size(); 2430 } 2431 size += getUnknownFields().getSerializedSize(); 2432 memoizedSerializedSize = size; 2433 return size; 2434 } 2435 2436 private static final long serialVersionUID = 0L; 2437 @java.lang.Override writeReplace()2438 protected java.lang.Object writeReplace() 2439 throws java.io.ObjectStreamException { 2440 return super.writeReplace(); 2441 } 2442 2443 @java.lang.Override equals(final java.lang.Object obj)2444 public boolean equals(final java.lang.Object obj) { 2445 if (obj == this) { 2446 return true; 2447 } 2448 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse)) { 2449 return super.equals(obj); 2450 } 2451 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse) obj; 2452 2453 boolean result = true; 2454 result = result && getStoreFileList() 2455 .equals(other.getStoreFileList()); 2456 result = result && 2457 getUnknownFields().equals(other.getUnknownFields()); 2458 return result; 2459 } 2460 2461 private int memoizedHashCode = 0; 2462 @java.lang.Override hashCode()2463 public int hashCode() { 2464 if (memoizedHashCode != 0) { 2465 return memoizedHashCode; 2466 } 2467 int hash = 41; 2468 hash = (19 * hash) + getDescriptorForType().hashCode(); 2469 if (getStoreFileCount() > 0) { 2470 hash = (37 * hash) + STORE_FILE_FIELD_NUMBER; 2471 hash = (53 * hash) + getStoreFileList().hashCode(); 2472 } 2473 hash = (29 * hash) + getUnknownFields().hashCode(); 2474 memoizedHashCode = hash; 2475 return hash; 2476 } 2477 parseFrom( com.google.protobuf.ByteString data)2478 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom( 2479 com.google.protobuf.ByteString data) 2480 throws com.google.protobuf.InvalidProtocolBufferException { 2481 return PARSER.parseFrom(data); 2482 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2483 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom( 2484 com.google.protobuf.ByteString data, 2485 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2486 throws com.google.protobuf.InvalidProtocolBufferException { 2487 return PARSER.parseFrom(data, extensionRegistry); 2488 } parseFrom(byte[] data)2489 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom(byte[] data) 2490 throws com.google.protobuf.InvalidProtocolBufferException { 2491 return PARSER.parseFrom(data); 2492 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2493 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom( 2494 byte[] data, 2495 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2496 throws com.google.protobuf.InvalidProtocolBufferException { 2497 return PARSER.parseFrom(data, extensionRegistry); 2498 } parseFrom(java.io.InputStream input)2499 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom(java.io.InputStream input) 2500 throws java.io.IOException { 2501 return PARSER.parseFrom(input); 2502 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2503 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom( 2504 java.io.InputStream input, 2505 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2506 throws java.io.IOException { 2507 return PARSER.parseFrom(input, extensionRegistry); 2508 } parseDelimitedFrom(java.io.InputStream input)2509 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseDelimitedFrom(java.io.InputStream input) 2510 throws java.io.IOException { 2511 return PARSER.parseDelimitedFrom(input); 2512 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2513 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseDelimitedFrom( 2514 java.io.InputStream input, 2515 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2516 throws java.io.IOException { 2517 return PARSER.parseDelimitedFrom(input, extensionRegistry); 2518 } parseFrom( com.google.protobuf.CodedInputStream input)2519 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom( 2520 com.google.protobuf.CodedInputStream input) 2521 throws java.io.IOException { 2522 return PARSER.parseFrom(input); 2523 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2524 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parseFrom( 2525 com.google.protobuf.CodedInputStream input, 2526 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2527 throws java.io.IOException { 2528 return PARSER.parseFrom(input, extensionRegistry); 2529 } 2530 newBuilder()2531 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()2532 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse prototype)2533 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse prototype) { 2534 return newBuilder().mergeFrom(prototype); 2535 } toBuilder()2536 public Builder toBuilder() { return newBuilder(this); } 2537 2538 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)2539 protected Builder newBuilderForType( 2540 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 2541 Builder builder = new Builder(parent); 2542 return builder; 2543 } 2544 /** 2545 * Protobuf type {@code GetStoreFileResponse} 2546 */ 2547 public static final class Builder extends 2548 com.google.protobuf.GeneratedMessage.Builder<Builder> 2549 implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponseOrBuilder { 2550 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()2551 getDescriptor() { 2552 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileResponse_descriptor; 2553 } 2554 2555 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()2556 internalGetFieldAccessorTable() { 2557 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileResponse_fieldAccessorTable 2558 .ensureFieldAccessorsInitialized( 2559 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.Builder.class); 2560 } 2561 2562 // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.newBuilder() Builder()2563 private Builder() { 2564 maybeForceBuilderInitialization(); 2565 } 2566 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)2567 private Builder( 2568 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 2569 super(parent); 2570 maybeForceBuilderInitialization(); 2571 } maybeForceBuilderInitialization()2572 private void maybeForceBuilderInitialization() { 2573 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 2574 } 2575 } create()2576 private static Builder create() { 2577 return new Builder(); 2578 } 2579 clear()2580 public Builder clear() { 2581 super.clear(); 2582 storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; 2583 bitField0_ = (bitField0_ & ~0x00000001); 2584 return this; 2585 } 2586 clone()2587 public Builder clone() { 2588 return create().mergeFrom(buildPartial()); 2589 } 2590 2591 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()2592 getDescriptorForType() { 2593 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetStoreFileResponse_descriptor; 2594 } 2595 getDefaultInstanceForType()2596 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse getDefaultInstanceForType() { 2597 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDefaultInstance(); 2598 } 2599 build()2600 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse build() { 2601 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse result = buildPartial(); 2602 if (!result.isInitialized()) { 2603 throw newUninitializedMessageException(result); 2604 } 2605 return result; 2606 } 2607 buildPartial()2608 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse buildPartial() { 2609 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse(this); 2610 int from_bitField0_ = bitField0_; 2611 if (((bitField0_ & 0x00000001) == 0x00000001)) { 2612 storeFile_ = new com.google.protobuf.UnmodifiableLazyStringList( 2613 storeFile_); 2614 bitField0_ = (bitField0_ & ~0x00000001); 2615 } 2616 result.storeFile_ = storeFile_; 2617 onBuilt(); 2618 return result; 2619 } 2620 mergeFrom(com.google.protobuf.Message other)2621 public Builder mergeFrom(com.google.protobuf.Message other) { 2622 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse) { 2623 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse)other); 2624 } else { 2625 super.mergeFrom(other); 2626 return this; 2627 } 2628 } 2629 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse other)2630 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse other) { 2631 if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse.getDefaultInstance()) return this; 2632 if (!other.storeFile_.isEmpty()) { 2633 if (storeFile_.isEmpty()) { 2634 storeFile_ = other.storeFile_; 2635 bitField0_ = (bitField0_ & ~0x00000001); 2636 } else { 2637 ensureStoreFileIsMutable(); 2638 storeFile_.addAll(other.storeFile_); 2639 } 2640 onChanged(); 2641 } 2642 this.mergeUnknownFields(other.getUnknownFields()); 2643 return this; 2644 } 2645 isInitialized()2646 public final boolean isInitialized() { 2647 return true; 2648 } 2649 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2650 public Builder mergeFrom( 2651 com.google.protobuf.CodedInputStream input, 2652 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2653 throws java.io.IOException { 2654 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse parsedMessage = null; 2655 try { 2656 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 2657 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 2658 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetStoreFileResponse) e.getUnfinishedMessage(); 2659 throw e; 2660 } finally { 2661 if (parsedMessage != null) { 2662 mergeFrom(parsedMessage); 2663 } 2664 } 2665 return this; 2666 } 2667 private int bitField0_; 2668 2669 // repeated string store_file = 1; 2670 private com.google.protobuf.LazyStringList storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; ensureStoreFileIsMutable()2671 private void ensureStoreFileIsMutable() { 2672 if (!((bitField0_ & 0x00000001) == 0x00000001)) { 2673 storeFile_ = new com.google.protobuf.LazyStringArrayList(storeFile_); 2674 bitField0_ |= 0x00000001; 2675 } 2676 } 2677 /** 2678 * <code>repeated string store_file = 1;</code> 2679 */ 2680 public java.util.List<java.lang.String> getStoreFileList()2681 getStoreFileList() { 2682 return java.util.Collections.unmodifiableList(storeFile_); 2683 } 2684 /** 2685 * <code>repeated string store_file = 1;</code> 2686 */ getStoreFileCount()2687 public int getStoreFileCount() { 2688 return storeFile_.size(); 2689 } 2690 /** 2691 * <code>repeated string store_file = 1;</code> 2692 */ getStoreFile(int index)2693 public java.lang.String getStoreFile(int index) { 2694 return storeFile_.get(index); 2695 } 2696 /** 2697 * <code>repeated string store_file = 1;</code> 2698 */ 2699 public com.google.protobuf.ByteString getStoreFileBytes(int index)2700 getStoreFileBytes(int index) { 2701 return storeFile_.getByteString(index); 2702 } 2703 /** 2704 * <code>repeated string store_file = 1;</code> 2705 */ setStoreFile( int index, java.lang.String value)2706 public Builder setStoreFile( 2707 int index, java.lang.String value) { 2708 if (value == null) { 2709 throw new NullPointerException(); 2710 } 2711 ensureStoreFileIsMutable(); 2712 storeFile_.set(index, value); 2713 onChanged(); 2714 return this; 2715 } 2716 /** 2717 * <code>repeated string store_file = 1;</code> 2718 */ addStoreFile( java.lang.String value)2719 public Builder addStoreFile( 2720 java.lang.String value) { 2721 if (value == null) { 2722 throw new NullPointerException(); 2723 } 2724 ensureStoreFileIsMutable(); 2725 storeFile_.add(value); 2726 onChanged(); 2727 return this; 2728 } 2729 /** 2730 * <code>repeated string store_file = 1;</code> 2731 */ addAllStoreFile( java.lang.Iterable<java.lang.String> values)2732 public Builder addAllStoreFile( 2733 java.lang.Iterable<java.lang.String> values) { 2734 ensureStoreFileIsMutable(); 2735 super.addAll(values, storeFile_); 2736 onChanged(); 2737 return this; 2738 } 2739 /** 2740 * <code>repeated string store_file = 1;</code> 2741 */ clearStoreFile()2742 public Builder clearStoreFile() { 2743 storeFile_ = com.google.protobuf.LazyStringArrayList.EMPTY; 2744 bitField0_ = (bitField0_ & ~0x00000001); 2745 onChanged(); 2746 return this; 2747 } 2748 /** 2749 * <code>repeated string store_file = 1;</code> 2750 */ addStoreFileBytes( com.google.protobuf.ByteString value)2751 public Builder addStoreFileBytes( 2752 com.google.protobuf.ByteString value) { 2753 if (value == null) { 2754 throw new NullPointerException(); 2755 } 2756 ensureStoreFileIsMutable(); 2757 storeFile_.add(value); 2758 onChanged(); 2759 return this; 2760 } 2761 2762 // @@protoc_insertion_point(builder_scope:GetStoreFileResponse) 2763 } 2764 2765 static { 2766 defaultInstance = new GetStoreFileResponse(true); defaultInstance.initFields()2767 defaultInstance.initFields(); 2768 } 2769 2770 // @@protoc_insertion_point(class_scope:GetStoreFileResponse) 2771 } 2772 2773 public interface GetOnlineRegionRequestOrBuilder 2774 extends com.google.protobuf.MessageOrBuilder { 2775 } 2776 /** 2777 * Protobuf type {@code GetOnlineRegionRequest} 2778 */ 2779 public static final class GetOnlineRegionRequest extends 2780 com.google.protobuf.GeneratedMessage 2781 implements GetOnlineRegionRequestOrBuilder { 2782 // Use GetOnlineRegionRequest.newBuilder() to construct. GetOnlineRegionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)2783 private GetOnlineRegionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 2784 super(builder); 2785 this.unknownFields = builder.getUnknownFields(); 2786 } GetOnlineRegionRequest(boolean noInit)2787 private GetOnlineRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 2788 2789 private static final GetOnlineRegionRequest defaultInstance; getDefaultInstance()2790 public static GetOnlineRegionRequest getDefaultInstance() { 2791 return defaultInstance; 2792 } 2793 getDefaultInstanceForType()2794 public GetOnlineRegionRequest getDefaultInstanceForType() { 2795 return defaultInstance; 2796 } 2797 2798 private final com.google.protobuf.UnknownFieldSet unknownFields; 2799 @java.lang.Override 2800 public final com.google.protobuf.UnknownFieldSet getUnknownFields()2801 getUnknownFields() { 2802 return this.unknownFields; 2803 } GetOnlineRegionRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2804 private GetOnlineRegionRequest( 2805 com.google.protobuf.CodedInputStream input, 2806 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2807 throws com.google.protobuf.InvalidProtocolBufferException { 2808 initFields(); 2809 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 2810 com.google.protobuf.UnknownFieldSet.newBuilder(); 2811 try { 2812 boolean done = false; 2813 while (!done) { 2814 int tag = input.readTag(); 2815 switch (tag) { 2816 case 0: 2817 done = true; 2818 break; 2819 default: { 2820 if (!parseUnknownField(input, unknownFields, 2821 extensionRegistry, tag)) { 2822 done = true; 2823 } 2824 break; 2825 } 2826 } 2827 } 2828 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 2829 throw e.setUnfinishedMessage(this); 2830 } catch (java.io.IOException e) { 2831 throw new com.google.protobuf.InvalidProtocolBufferException( 2832 e.getMessage()).setUnfinishedMessage(this); 2833 } finally { 2834 this.unknownFields = unknownFields.build(); 2835 makeExtensionsImmutable(); 2836 } 2837 } 2838 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()2839 getDescriptor() { 2840 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionRequest_descriptor; 2841 } 2842 2843 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()2844 internalGetFieldAccessorTable() { 2845 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionRequest_fieldAccessorTable 2846 .ensureFieldAccessorsInitialized( 2847 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.Builder.class); 2848 } 2849 2850 public static com.google.protobuf.Parser<GetOnlineRegionRequest> PARSER = 2851 new com.google.protobuf.AbstractParser<GetOnlineRegionRequest>() { 2852 public GetOnlineRegionRequest parsePartialFrom( 2853 com.google.protobuf.CodedInputStream input, 2854 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2855 throws com.google.protobuf.InvalidProtocolBufferException { 2856 return new GetOnlineRegionRequest(input, extensionRegistry); 2857 } 2858 }; 2859 2860 @java.lang.Override getParserForType()2861 public com.google.protobuf.Parser<GetOnlineRegionRequest> getParserForType() { 2862 return PARSER; 2863 } 2864 initFields()2865 private void initFields() { 2866 } 2867 private byte memoizedIsInitialized = -1; isInitialized()2868 public final boolean isInitialized() { 2869 byte isInitialized = memoizedIsInitialized; 2870 if (isInitialized != -1) return isInitialized == 1; 2871 2872 memoizedIsInitialized = 1; 2873 return true; 2874 } 2875 writeTo(com.google.protobuf.CodedOutputStream output)2876 public void writeTo(com.google.protobuf.CodedOutputStream output) 2877 throws java.io.IOException { 2878 getSerializedSize(); 2879 getUnknownFields().writeTo(output); 2880 } 2881 2882 private int memoizedSerializedSize = -1; getSerializedSize()2883 public int getSerializedSize() { 2884 int size = memoizedSerializedSize; 2885 if (size != -1) return size; 2886 2887 size = 0; 2888 size += getUnknownFields().getSerializedSize(); 2889 memoizedSerializedSize = size; 2890 return size; 2891 } 2892 2893 private static final long serialVersionUID = 0L; 2894 @java.lang.Override writeReplace()2895 protected java.lang.Object writeReplace() 2896 throws java.io.ObjectStreamException { 2897 return super.writeReplace(); 2898 } 2899 2900 @java.lang.Override equals(final java.lang.Object obj)2901 public boolean equals(final java.lang.Object obj) { 2902 if (obj == this) { 2903 return true; 2904 } 2905 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest)) { 2906 return super.equals(obj); 2907 } 2908 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest) obj; 2909 2910 boolean result = true; 2911 result = result && 2912 getUnknownFields().equals(other.getUnknownFields()); 2913 return result; 2914 } 2915 2916 private int memoizedHashCode = 0; 2917 @java.lang.Override hashCode()2918 public int hashCode() { 2919 if (memoizedHashCode != 0) { 2920 return memoizedHashCode; 2921 } 2922 int hash = 41; 2923 hash = (19 * hash) + getDescriptorForType().hashCode(); 2924 hash = (29 * hash) + getUnknownFields().hashCode(); 2925 memoizedHashCode = hash; 2926 return hash; 2927 } 2928 parseFrom( com.google.protobuf.ByteString data)2929 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( 2930 com.google.protobuf.ByteString data) 2931 throws com.google.protobuf.InvalidProtocolBufferException { 2932 return PARSER.parseFrom(data); 2933 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2934 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( 2935 com.google.protobuf.ByteString data, 2936 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2937 throws com.google.protobuf.InvalidProtocolBufferException { 2938 return PARSER.parseFrom(data, extensionRegistry); 2939 } parseFrom(byte[] data)2940 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom(byte[] data) 2941 throws com.google.protobuf.InvalidProtocolBufferException { 2942 return PARSER.parseFrom(data); 2943 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2944 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( 2945 byte[] data, 2946 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2947 throws com.google.protobuf.InvalidProtocolBufferException { 2948 return PARSER.parseFrom(data, extensionRegistry); 2949 } parseFrom(java.io.InputStream input)2950 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom(java.io.InputStream input) 2951 throws java.io.IOException { 2952 return PARSER.parseFrom(input); 2953 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2954 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( 2955 java.io.InputStream input, 2956 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2957 throws java.io.IOException { 2958 return PARSER.parseFrom(input, extensionRegistry); 2959 } parseDelimitedFrom(java.io.InputStream input)2960 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseDelimitedFrom(java.io.InputStream input) 2961 throws java.io.IOException { 2962 return PARSER.parseDelimitedFrom(input); 2963 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2964 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseDelimitedFrom( 2965 java.io.InputStream input, 2966 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2967 throws java.io.IOException { 2968 return PARSER.parseDelimitedFrom(input, extensionRegistry); 2969 } parseFrom( com.google.protobuf.CodedInputStream input)2970 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( 2971 com.google.protobuf.CodedInputStream input) 2972 throws java.io.IOException { 2973 return PARSER.parseFrom(input); 2974 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2975 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parseFrom( 2976 com.google.protobuf.CodedInputStream input, 2977 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2978 throws java.io.IOException { 2979 return PARSER.parseFrom(input, extensionRegistry); 2980 } 2981 newBuilder()2982 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()2983 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest prototype)2984 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest prototype) { 2985 return newBuilder().mergeFrom(prototype); 2986 } toBuilder()2987 public Builder toBuilder() { return newBuilder(this); } 2988 2989 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)2990 protected Builder newBuilderForType( 2991 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 2992 Builder builder = new Builder(parent); 2993 return builder; 2994 } 2995 /** 2996 * Protobuf type {@code GetOnlineRegionRequest} 2997 */ 2998 public static final class Builder extends 2999 com.google.protobuf.GeneratedMessage.Builder<Builder> 3000 implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequestOrBuilder { 3001 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()3002 getDescriptor() { 3003 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionRequest_descriptor; 3004 } 3005 3006 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()3007 internalGetFieldAccessorTable() { 3008 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionRequest_fieldAccessorTable 3009 .ensureFieldAccessorsInitialized( 3010 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.Builder.class); 3011 } 3012 3013 // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.newBuilder() Builder()3014 private Builder() { 3015 maybeForceBuilderInitialization(); 3016 } 3017 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)3018 private Builder( 3019 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 3020 super(parent); 3021 maybeForceBuilderInitialization(); 3022 } maybeForceBuilderInitialization()3023 private void maybeForceBuilderInitialization() { 3024 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 3025 } 3026 } create()3027 private static Builder create() { 3028 return new Builder(); 3029 } 3030 clear()3031 public Builder clear() { 3032 super.clear(); 3033 return this; 3034 } 3035 clone()3036 public Builder clone() { 3037 return create().mergeFrom(buildPartial()); 3038 } 3039 3040 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()3041 getDescriptorForType() { 3042 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionRequest_descriptor; 3043 } 3044 getDefaultInstanceForType()3045 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest getDefaultInstanceForType() { 3046 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDefaultInstance(); 3047 } 3048 build()3049 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest build() { 3050 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest result = buildPartial(); 3051 if (!result.isInitialized()) { 3052 throw newUninitializedMessageException(result); 3053 } 3054 return result; 3055 } 3056 buildPartial()3057 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest buildPartial() { 3058 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest(this); 3059 onBuilt(); 3060 return result; 3061 } 3062 mergeFrom(com.google.protobuf.Message other)3063 public Builder mergeFrom(com.google.protobuf.Message other) { 3064 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest) { 3065 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest)other); 3066 } else { 3067 super.mergeFrom(other); 3068 return this; 3069 } 3070 } 3071 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest other)3072 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest other) { 3073 if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest.getDefaultInstance()) return this; 3074 this.mergeUnknownFields(other.getUnknownFields()); 3075 return this; 3076 } 3077 isInitialized()3078 public final boolean isInitialized() { 3079 return true; 3080 } 3081 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3082 public Builder mergeFrom( 3083 com.google.protobuf.CodedInputStream input, 3084 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3085 throws java.io.IOException { 3086 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest parsedMessage = null; 3087 try { 3088 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 3089 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 3090 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionRequest) e.getUnfinishedMessage(); 3091 throw e; 3092 } finally { 3093 if (parsedMessage != null) { 3094 mergeFrom(parsedMessage); 3095 } 3096 } 3097 return this; 3098 } 3099 3100 // @@protoc_insertion_point(builder_scope:GetOnlineRegionRequest) 3101 } 3102 3103 static { 3104 defaultInstance = new GetOnlineRegionRequest(true); defaultInstance.initFields()3105 defaultInstance.initFields(); 3106 } 3107 3108 // @@protoc_insertion_point(class_scope:GetOnlineRegionRequest) 3109 } 3110 3111 public interface GetOnlineRegionResponseOrBuilder 3112 extends com.google.protobuf.MessageOrBuilder { 3113 3114 // repeated .RegionInfo region_info = 1; 3115 /** 3116 * <code>repeated .RegionInfo region_info = 1;</code> 3117 */ 3118 java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo> getRegionInfoList()3119 getRegionInfoList(); 3120 /** 3121 * <code>repeated .RegionInfo region_info = 1;</code> 3122 */ getRegionInfo(int index)3123 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index); 3124 /** 3125 * <code>repeated .RegionInfo region_info = 1;</code> 3126 */ getRegionInfoCount()3127 int getRegionInfoCount(); 3128 /** 3129 * <code>repeated .RegionInfo region_info = 1;</code> 3130 */ 3131 java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> getRegionInfoOrBuilderList()3132 getRegionInfoOrBuilderList(); 3133 /** 3134 * <code>repeated .RegionInfo region_info = 1;</code> 3135 */ getRegionInfoOrBuilder( int index)3136 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder( 3137 int index); 3138 } 3139 /** 3140 * Protobuf type {@code GetOnlineRegionResponse} 3141 */ 3142 public static final class GetOnlineRegionResponse extends 3143 com.google.protobuf.GeneratedMessage 3144 implements GetOnlineRegionResponseOrBuilder { 3145 // Use GetOnlineRegionResponse.newBuilder() to construct. GetOnlineRegionResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)3146 private GetOnlineRegionResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 3147 super(builder); 3148 this.unknownFields = builder.getUnknownFields(); 3149 } GetOnlineRegionResponse(boolean noInit)3150 private GetOnlineRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 3151 3152 private static final GetOnlineRegionResponse defaultInstance; getDefaultInstance()3153 public static GetOnlineRegionResponse getDefaultInstance() { 3154 return defaultInstance; 3155 } 3156 getDefaultInstanceForType()3157 public GetOnlineRegionResponse getDefaultInstanceForType() { 3158 return defaultInstance; 3159 } 3160 3161 private final com.google.protobuf.UnknownFieldSet unknownFields; 3162 @java.lang.Override 3163 public final com.google.protobuf.UnknownFieldSet getUnknownFields()3164 getUnknownFields() { 3165 return this.unknownFields; 3166 } GetOnlineRegionResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3167 private GetOnlineRegionResponse( 3168 com.google.protobuf.CodedInputStream input, 3169 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3170 throws com.google.protobuf.InvalidProtocolBufferException { 3171 initFields(); 3172 int mutable_bitField0_ = 0; 3173 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 3174 com.google.protobuf.UnknownFieldSet.newBuilder(); 3175 try { 3176 boolean done = false; 3177 while (!done) { 3178 int tag = input.readTag(); 3179 switch (tag) { 3180 case 0: 3181 done = true; 3182 break; 3183 default: { 3184 if (!parseUnknownField(input, unknownFields, 3185 extensionRegistry, tag)) { 3186 done = true; 3187 } 3188 break; 3189 } 3190 case 10: { 3191 if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { 3192 regionInfo_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo>(); 3193 mutable_bitField0_ |= 0x00000001; 3194 } 3195 regionInfo_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry)); 3196 break; 3197 } 3198 } 3199 } 3200 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 3201 throw e.setUnfinishedMessage(this); 3202 } catch (java.io.IOException e) { 3203 throw new com.google.protobuf.InvalidProtocolBufferException( 3204 e.getMessage()).setUnfinishedMessage(this); 3205 } finally { 3206 if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { 3207 regionInfo_ = java.util.Collections.unmodifiableList(regionInfo_); 3208 } 3209 this.unknownFields = unknownFields.build(); 3210 makeExtensionsImmutable(); 3211 } 3212 } 3213 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()3214 getDescriptor() { 3215 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionResponse_descriptor; 3216 } 3217 3218 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()3219 internalGetFieldAccessorTable() { 3220 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionResponse_fieldAccessorTable 3221 .ensureFieldAccessorsInitialized( 3222 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.Builder.class); 3223 } 3224 3225 public static com.google.protobuf.Parser<GetOnlineRegionResponse> PARSER = 3226 new com.google.protobuf.AbstractParser<GetOnlineRegionResponse>() { 3227 public GetOnlineRegionResponse parsePartialFrom( 3228 com.google.protobuf.CodedInputStream input, 3229 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3230 throws com.google.protobuf.InvalidProtocolBufferException { 3231 return new GetOnlineRegionResponse(input, extensionRegistry); 3232 } 3233 }; 3234 3235 @java.lang.Override getParserForType()3236 public com.google.protobuf.Parser<GetOnlineRegionResponse> getParserForType() { 3237 return PARSER; 3238 } 3239 3240 // repeated .RegionInfo region_info = 1; 3241 public static final int REGION_INFO_FIELD_NUMBER = 1; 3242 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo> regionInfo_; 3243 /** 3244 * <code>repeated .RegionInfo region_info = 1;</code> 3245 */ getRegionInfoList()3246 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo> getRegionInfoList() { 3247 return regionInfo_; 3248 } 3249 /** 3250 * <code>repeated .RegionInfo region_info = 1;</code> 3251 */ 3252 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> getRegionInfoOrBuilderList()3253 getRegionInfoOrBuilderList() { 3254 return regionInfo_; 3255 } 3256 /** 3257 * <code>repeated .RegionInfo region_info = 1;</code> 3258 */ getRegionInfoCount()3259 public int getRegionInfoCount() { 3260 return regionInfo_.size(); 3261 } 3262 /** 3263 * <code>repeated .RegionInfo region_info = 1;</code> 3264 */ getRegionInfo(int index)3265 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index) { 3266 return regionInfo_.get(index); 3267 } 3268 /** 3269 * <code>repeated .RegionInfo region_info = 1;</code> 3270 */ getRegionInfoOrBuilder( int index)3271 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder( 3272 int index) { 3273 return regionInfo_.get(index); 3274 } 3275 initFields()3276 private void initFields() { 3277 regionInfo_ = java.util.Collections.emptyList(); 3278 } 3279 private byte memoizedIsInitialized = -1; isInitialized()3280 public final boolean isInitialized() { 3281 byte isInitialized = memoizedIsInitialized; 3282 if (isInitialized != -1) return isInitialized == 1; 3283 3284 for (int i = 0; i < getRegionInfoCount(); i++) { 3285 if (!getRegionInfo(i).isInitialized()) { 3286 memoizedIsInitialized = 0; 3287 return false; 3288 } 3289 } 3290 memoizedIsInitialized = 1; 3291 return true; 3292 } 3293 writeTo(com.google.protobuf.CodedOutputStream output)3294 public void writeTo(com.google.protobuf.CodedOutputStream output) 3295 throws java.io.IOException { 3296 getSerializedSize(); 3297 for (int i = 0; i < regionInfo_.size(); i++) { 3298 output.writeMessage(1, regionInfo_.get(i)); 3299 } 3300 getUnknownFields().writeTo(output); 3301 } 3302 3303 private int memoizedSerializedSize = -1; getSerializedSize()3304 public int getSerializedSize() { 3305 int size = memoizedSerializedSize; 3306 if (size != -1) return size; 3307 3308 size = 0; 3309 for (int i = 0; i < regionInfo_.size(); i++) { 3310 size += com.google.protobuf.CodedOutputStream 3311 .computeMessageSize(1, regionInfo_.get(i)); 3312 } 3313 size += getUnknownFields().getSerializedSize(); 3314 memoizedSerializedSize = size; 3315 return size; 3316 } 3317 3318 private static final long serialVersionUID = 0L; 3319 @java.lang.Override writeReplace()3320 protected java.lang.Object writeReplace() 3321 throws java.io.ObjectStreamException { 3322 return super.writeReplace(); 3323 } 3324 3325 @java.lang.Override equals(final java.lang.Object obj)3326 public boolean equals(final java.lang.Object obj) { 3327 if (obj == this) { 3328 return true; 3329 } 3330 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse)) { 3331 return super.equals(obj); 3332 } 3333 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse) obj; 3334 3335 boolean result = true; 3336 result = result && getRegionInfoList() 3337 .equals(other.getRegionInfoList()); 3338 result = result && 3339 getUnknownFields().equals(other.getUnknownFields()); 3340 return result; 3341 } 3342 3343 private int memoizedHashCode = 0; 3344 @java.lang.Override hashCode()3345 public int hashCode() { 3346 if (memoizedHashCode != 0) { 3347 return memoizedHashCode; 3348 } 3349 int hash = 41; 3350 hash = (19 * hash) + getDescriptorForType().hashCode(); 3351 if (getRegionInfoCount() > 0) { 3352 hash = (37 * hash) + REGION_INFO_FIELD_NUMBER; 3353 hash = (53 * hash) + getRegionInfoList().hashCode(); 3354 } 3355 hash = (29 * hash) + getUnknownFields().hashCode(); 3356 memoizedHashCode = hash; 3357 return hash; 3358 } 3359 parseFrom( com.google.protobuf.ByteString data)3360 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( 3361 com.google.protobuf.ByteString data) 3362 throws com.google.protobuf.InvalidProtocolBufferException { 3363 return PARSER.parseFrom(data); 3364 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3365 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( 3366 com.google.protobuf.ByteString data, 3367 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3368 throws com.google.protobuf.InvalidProtocolBufferException { 3369 return PARSER.parseFrom(data, extensionRegistry); 3370 } parseFrom(byte[] data)3371 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom(byte[] data) 3372 throws com.google.protobuf.InvalidProtocolBufferException { 3373 return PARSER.parseFrom(data); 3374 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3375 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( 3376 byte[] data, 3377 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3378 throws com.google.protobuf.InvalidProtocolBufferException { 3379 return PARSER.parseFrom(data, extensionRegistry); 3380 } parseFrom(java.io.InputStream input)3381 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom(java.io.InputStream input) 3382 throws java.io.IOException { 3383 return PARSER.parseFrom(input); 3384 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3385 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( 3386 java.io.InputStream input, 3387 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3388 throws java.io.IOException { 3389 return PARSER.parseFrom(input, extensionRegistry); 3390 } parseDelimitedFrom(java.io.InputStream input)3391 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseDelimitedFrom(java.io.InputStream input) 3392 throws java.io.IOException { 3393 return PARSER.parseDelimitedFrom(input); 3394 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3395 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseDelimitedFrom( 3396 java.io.InputStream input, 3397 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3398 throws java.io.IOException { 3399 return PARSER.parseDelimitedFrom(input, extensionRegistry); 3400 } parseFrom( com.google.protobuf.CodedInputStream input)3401 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( 3402 com.google.protobuf.CodedInputStream input) 3403 throws java.io.IOException { 3404 return PARSER.parseFrom(input); 3405 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3406 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parseFrom( 3407 com.google.protobuf.CodedInputStream input, 3408 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3409 throws java.io.IOException { 3410 return PARSER.parseFrom(input, extensionRegistry); 3411 } 3412 newBuilder()3413 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()3414 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse prototype)3415 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse prototype) { 3416 return newBuilder().mergeFrom(prototype); 3417 } toBuilder()3418 public Builder toBuilder() { return newBuilder(this); } 3419 3420 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)3421 protected Builder newBuilderForType( 3422 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 3423 Builder builder = new Builder(parent); 3424 return builder; 3425 } 3426 /** 3427 * Protobuf type {@code GetOnlineRegionResponse} 3428 */ 3429 public static final class Builder extends 3430 com.google.protobuf.GeneratedMessage.Builder<Builder> 3431 implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponseOrBuilder { 3432 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()3433 getDescriptor() { 3434 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionResponse_descriptor; 3435 } 3436 3437 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()3438 internalGetFieldAccessorTable() { 3439 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionResponse_fieldAccessorTable 3440 .ensureFieldAccessorsInitialized( 3441 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.Builder.class); 3442 } 3443 3444 // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.newBuilder() Builder()3445 private Builder() { 3446 maybeForceBuilderInitialization(); 3447 } 3448 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)3449 private Builder( 3450 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 3451 super(parent); 3452 maybeForceBuilderInitialization(); 3453 } maybeForceBuilderInitialization()3454 private void maybeForceBuilderInitialization() { 3455 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 3456 getRegionInfoFieldBuilder(); 3457 } 3458 } create()3459 private static Builder create() { 3460 return new Builder(); 3461 } 3462 clear()3463 public Builder clear() { 3464 super.clear(); 3465 if (regionInfoBuilder_ == null) { 3466 regionInfo_ = java.util.Collections.emptyList(); 3467 bitField0_ = (bitField0_ & ~0x00000001); 3468 } else { 3469 regionInfoBuilder_.clear(); 3470 } 3471 return this; 3472 } 3473 clone()3474 public Builder clone() { 3475 return create().mergeFrom(buildPartial()); 3476 } 3477 3478 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()3479 getDescriptorForType() { 3480 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_GetOnlineRegionResponse_descriptor; 3481 } 3482 getDefaultInstanceForType()3483 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse getDefaultInstanceForType() { 3484 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance(); 3485 } 3486 build()3487 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse build() { 3488 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse result = buildPartial(); 3489 if (!result.isInitialized()) { 3490 throw newUninitializedMessageException(result); 3491 } 3492 return result; 3493 } 3494 buildPartial()3495 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse buildPartial() { 3496 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse(this); 3497 int from_bitField0_ = bitField0_; 3498 if (regionInfoBuilder_ == null) { 3499 if (((bitField0_ & 0x00000001) == 0x00000001)) { 3500 regionInfo_ = java.util.Collections.unmodifiableList(regionInfo_); 3501 bitField0_ = (bitField0_ & ~0x00000001); 3502 } 3503 result.regionInfo_ = regionInfo_; 3504 } else { 3505 result.regionInfo_ = regionInfoBuilder_.build(); 3506 } 3507 onBuilt(); 3508 return result; 3509 } 3510 mergeFrom(com.google.protobuf.Message other)3511 public Builder mergeFrom(com.google.protobuf.Message other) { 3512 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse) { 3513 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse)other); 3514 } else { 3515 super.mergeFrom(other); 3516 return this; 3517 } 3518 } 3519 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse other)3520 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse other) { 3521 if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse.getDefaultInstance()) return this; 3522 if (regionInfoBuilder_ == null) { 3523 if (!other.regionInfo_.isEmpty()) { 3524 if (regionInfo_.isEmpty()) { 3525 regionInfo_ = other.regionInfo_; 3526 bitField0_ = (bitField0_ & ~0x00000001); 3527 } else { 3528 ensureRegionInfoIsMutable(); 3529 regionInfo_.addAll(other.regionInfo_); 3530 } 3531 onChanged(); 3532 } 3533 } else { 3534 if (!other.regionInfo_.isEmpty()) { 3535 if (regionInfoBuilder_.isEmpty()) { 3536 regionInfoBuilder_.dispose(); 3537 regionInfoBuilder_ = null; 3538 regionInfo_ = other.regionInfo_; 3539 bitField0_ = (bitField0_ & ~0x00000001); 3540 regionInfoBuilder_ = 3541 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? 3542 getRegionInfoFieldBuilder() : null; 3543 } else { 3544 regionInfoBuilder_.addAllMessages(other.regionInfo_); 3545 } 3546 } 3547 } 3548 this.mergeUnknownFields(other.getUnknownFields()); 3549 return this; 3550 } 3551 isInitialized()3552 public final boolean isInitialized() { 3553 for (int i = 0; i < getRegionInfoCount(); i++) { 3554 if (!getRegionInfo(i).isInitialized()) { 3555 3556 return false; 3557 } 3558 } 3559 return true; 3560 } 3561 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3562 public Builder mergeFrom( 3563 com.google.protobuf.CodedInputStream input, 3564 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3565 throws java.io.IOException { 3566 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse parsedMessage = null; 3567 try { 3568 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 3569 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 3570 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.GetOnlineRegionResponse) e.getUnfinishedMessage(); 3571 throw e; 3572 } finally { 3573 if (parsedMessage != null) { 3574 mergeFrom(parsedMessage); 3575 } 3576 } 3577 return this; 3578 } 3579 private int bitField0_; 3580 3581 // repeated .RegionInfo region_info = 1; 3582 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo> regionInfo_ = 3583 java.util.Collections.emptyList(); ensureRegionInfoIsMutable()3584 private void ensureRegionInfoIsMutable() { 3585 if (!((bitField0_ & 0x00000001) == 0x00000001)) { 3586 regionInfo_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo>(regionInfo_); 3587 bitField0_ |= 0x00000001; 3588 } 3589 } 3590 3591 private com.google.protobuf.RepeatedFieldBuilder< 3592 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_; 3593 3594 /** 3595 * <code>repeated .RegionInfo region_info = 1;</code> 3596 */ getRegionInfoList()3597 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo> getRegionInfoList() { 3598 if (regionInfoBuilder_ == null) { 3599 return java.util.Collections.unmodifiableList(regionInfo_); 3600 } else { 3601 return regionInfoBuilder_.getMessageList(); 3602 } 3603 } 3604 /** 3605 * <code>repeated .RegionInfo region_info = 1;</code> 3606 */ getRegionInfoCount()3607 public int getRegionInfoCount() { 3608 if (regionInfoBuilder_ == null) { 3609 return regionInfo_.size(); 3610 } else { 3611 return regionInfoBuilder_.getCount(); 3612 } 3613 } 3614 /** 3615 * <code>repeated .RegionInfo region_info = 1;</code> 3616 */ getRegionInfo(int index)3617 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(int index) { 3618 if (regionInfoBuilder_ == null) { 3619 return regionInfo_.get(index); 3620 } else { 3621 return regionInfoBuilder_.getMessage(index); 3622 } 3623 } 3624 /** 3625 * <code>repeated .RegionInfo region_info = 1;</code> 3626 */ setRegionInfo( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value)3627 public Builder setRegionInfo( 3628 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { 3629 if (regionInfoBuilder_ == null) { 3630 if (value == null) { 3631 throw new NullPointerException(); 3632 } 3633 ensureRegionInfoIsMutable(); 3634 regionInfo_.set(index, value); 3635 onChanged(); 3636 } else { 3637 regionInfoBuilder_.setMessage(index, value); 3638 } 3639 return this; 3640 } 3641 /** 3642 * <code>repeated .RegionInfo region_info = 1;</code> 3643 */ setRegionInfo( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue)3644 public Builder setRegionInfo( 3645 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { 3646 if (regionInfoBuilder_ == null) { 3647 ensureRegionInfoIsMutable(); 3648 regionInfo_.set(index, builderForValue.build()); 3649 onChanged(); 3650 } else { 3651 regionInfoBuilder_.setMessage(index, builderForValue.build()); 3652 } 3653 return this; 3654 } 3655 /** 3656 * <code>repeated .RegionInfo region_info = 1;</code> 3657 */ addRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value)3658 public Builder addRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { 3659 if (regionInfoBuilder_ == null) { 3660 if (value == null) { 3661 throw new NullPointerException(); 3662 } 3663 ensureRegionInfoIsMutable(); 3664 regionInfo_.add(value); 3665 onChanged(); 3666 } else { 3667 regionInfoBuilder_.addMessage(value); 3668 } 3669 return this; 3670 } 3671 /** 3672 * <code>repeated .RegionInfo region_info = 1;</code> 3673 */ addRegionInfo( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value)3674 public Builder addRegionInfo( 3675 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { 3676 if (regionInfoBuilder_ == null) { 3677 if (value == null) { 3678 throw new NullPointerException(); 3679 } 3680 ensureRegionInfoIsMutable(); 3681 regionInfo_.add(index, value); 3682 onChanged(); 3683 } else { 3684 regionInfoBuilder_.addMessage(index, value); 3685 } 3686 return this; 3687 } 3688 /** 3689 * <code>repeated .RegionInfo region_info = 1;</code> 3690 */ addRegionInfo( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue)3691 public Builder addRegionInfo( 3692 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { 3693 if (regionInfoBuilder_ == null) { 3694 ensureRegionInfoIsMutable(); 3695 regionInfo_.add(builderForValue.build()); 3696 onChanged(); 3697 } else { 3698 regionInfoBuilder_.addMessage(builderForValue.build()); 3699 } 3700 return this; 3701 } 3702 /** 3703 * <code>repeated .RegionInfo region_info = 1;</code> 3704 */ addRegionInfo( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue)3705 public Builder addRegionInfo( 3706 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { 3707 if (regionInfoBuilder_ == null) { 3708 ensureRegionInfoIsMutable(); 3709 regionInfo_.add(index, builderForValue.build()); 3710 onChanged(); 3711 } else { 3712 regionInfoBuilder_.addMessage(index, builderForValue.build()); 3713 } 3714 return this; 3715 } 3716 /** 3717 * <code>repeated .RegionInfo region_info = 1;</code> 3718 */ addAllRegionInfo( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo> values)3719 public Builder addAllRegionInfo( 3720 java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo> values) { 3721 if (regionInfoBuilder_ == null) { 3722 ensureRegionInfoIsMutable(); 3723 super.addAll(values, regionInfo_); 3724 onChanged(); 3725 } else { 3726 regionInfoBuilder_.addAllMessages(values); 3727 } 3728 return this; 3729 } 3730 /** 3731 * <code>repeated .RegionInfo region_info = 1;</code> 3732 */ clearRegionInfo()3733 public Builder clearRegionInfo() { 3734 if (regionInfoBuilder_ == null) { 3735 regionInfo_ = java.util.Collections.emptyList(); 3736 bitField0_ = (bitField0_ & ~0x00000001); 3737 onChanged(); 3738 } else { 3739 regionInfoBuilder_.clear(); 3740 } 3741 return this; 3742 } 3743 /** 3744 * <code>repeated .RegionInfo region_info = 1;</code> 3745 */ removeRegionInfo(int index)3746 public Builder removeRegionInfo(int index) { 3747 if (regionInfoBuilder_ == null) { 3748 ensureRegionInfoIsMutable(); 3749 regionInfo_.remove(index); 3750 onChanged(); 3751 } else { 3752 regionInfoBuilder_.remove(index); 3753 } 3754 return this; 3755 } 3756 /** 3757 * <code>repeated .RegionInfo region_info = 1;</code> 3758 */ getRegionInfoBuilder( int index)3759 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder( 3760 int index) { 3761 return getRegionInfoFieldBuilder().getBuilder(index); 3762 } 3763 /** 3764 * <code>repeated .RegionInfo region_info = 1;</code> 3765 */ getRegionInfoOrBuilder( int index)3766 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder( 3767 int index) { 3768 if (regionInfoBuilder_ == null) { 3769 return regionInfo_.get(index); } else { 3770 return regionInfoBuilder_.getMessageOrBuilder(index); 3771 } 3772 } 3773 /** 3774 * <code>repeated .RegionInfo region_info = 1;</code> 3775 */ 3776 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> getRegionInfoOrBuilderList()3777 getRegionInfoOrBuilderList() { 3778 if (regionInfoBuilder_ != null) { 3779 return regionInfoBuilder_.getMessageOrBuilderList(); 3780 } else { 3781 return java.util.Collections.unmodifiableList(regionInfo_); 3782 } 3783 } 3784 /** 3785 * <code>repeated .RegionInfo region_info = 1;</code> 3786 */ addRegionInfoBuilder()3787 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder addRegionInfoBuilder() { 3788 return getRegionInfoFieldBuilder().addBuilder( 3789 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()); 3790 } 3791 /** 3792 * <code>repeated .RegionInfo region_info = 1;</code> 3793 */ addRegionInfoBuilder( int index)3794 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder addRegionInfoBuilder( 3795 int index) { 3796 return getRegionInfoFieldBuilder().addBuilder( 3797 index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()); 3798 } 3799 /** 3800 * <code>repeated .RegionInfo region_info = 1;</code> 3801 */ 3802 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder> getRegionInfoBuilderList()3803 getRegionInfoBuilderList() { 3804 return getRegionInfoFieldBuilder().getBuilderList(); 3805 } 3806 private com.google.protobuf.RepeatedFieldBuilder< 3807 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> getRegionInfoFieldBuilder()3808 getRegionInfoFieldBuilder() { 3809 if (regionInfoBuilder_ == null) { 3810 regionInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< 3811 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( 3812 regionInfo_, 3813 ((bitField0_ & 0x00000001) == 0x00000001), 3814 getParentForChildren(), 3815 isClean()); 3816 regionInfo_ = null; 3817 } 3818 return regionInfoBuilder_; 3819 } 3820 3821 // @@protoc_insertion_point(builder_scope:GetOnlineRegionResponse) 3822 } 3823 3824 static { 3825 defaultInstance = new GetOnlineRegionResponse(true); defaultInstance.initFields()3826 defaultInstance.initFields(); 3827 } 3828 3829 // @@protoc_insertion_point(class_scope:GetOnlineRegionResponse) 3830 } 3831 3832 public interface OpenRegionRequestOrBuilder 3833 extends com.google.protobuf.MessageOrBuilder { 3834 3835 // repeated .OpenRegionRequest.RegionOpenInfo open_info = 1; 3836 /** 3837 * <code>repeated .OpenRegionRequest.RegionOpenInfo open_info = 1;</code> 3838 */ 3839 java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo> getOpenInfoList()3840 getOpenInfoList(); 3841 /** 3842 * <code>repeated .OpenRegionRequest.RegionOpenInfo open_info = 1;</code> 3843 */ getOpenInfo(int index)3844 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo getOpenInfo(int index); 3845 /** 3846 * <code>repeated .OpenRegionRequest.RegionOpenInfo open_info = 1;</code> 3847 */ getOpenInfoCount()3848 int getOpenInfoCount(); 3849 /** 3850 * <code>repeated .OpenRegionRequest.RegionOpenInfo open_info = 1;</code> 3851 */ 3852 java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder> getOpenInfoOrBuilderList()3853 getOpenInfoOrBuilderList(); 3854 /** 3855 * <code>repeated .OpenRegionRequest.RegionOpenInfo open_info = 1;</code> 3856 */ getOpenInfoOrBuilder( int index)3857 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder getOpenInfoOrBuilder( 3858 int index); 3859 3860 // optional uint64 serverStartCode = 2; 3861 /** 3862 * <code>optional uint64 serverStartCode = 2;</code> 3863 * 3864 * <pre> 3865 * the intended server for this RPC. 3866 * </pre> 3867 */ hasServerStartCode()3868 boolean hasServerStartCode(); 3869 /** 3870 * <code>optional uint64 serverStartCode = 2;</code> 3871 * 3872 * <pre> 3873 * the intended server for this RPC. 3874 * </pre> 3875 */ getServerStartCode()3876 long getServerStartCode(); 3877 3878 // optional uint64 master_system_time = 5; 3879 /** 3880 * <code>optional uint64 master_system_time = 5;</code> 3881 * 3882 * <pre> 3883 * wall clock time from master 3884 * </pre> 3885 */ hasMasterSystemTime()3886 boolean hasMasterSystemTime(); 3887 /** 3888 * <code>optional uint64 master_system_time = 5;</code> 3889 * 3890 * <pre> 3891 * wall clock time from master 3892 * </pre> 3893 */ getMasterSystemTime()3894 long getMasterSystemTime(); 3895 } 3896 /** 3897 * Protobuf type {@code OpenRegionRequest} 3898 */ 3899 public static final class OpenRegionRequest extends 3900 com.google.protobuf.GeneratedMessage 3901 implements OpenRegionRequestOrBuilder { 3902 // Use OpenRegionRequest.newBuilder() to construct. OpenRegionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)3903 private OpenRegionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 3904 super(builder); 3905 this.unknownFields = builder.getUnknownFields(); 3906 } OpenRegionRequest(boolean noInit)3907 private OpenRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 3908 3909 private static final OpenRegionRequest defaultInstance; getDefaultInstance()3910 public static OpenRegionRequest getDefaultInstance() { 3911 return defaultInstance; 3912 } 3913 getDefaultInstanceForType()3914 public OpenRegionRequest getDefaultInstanceForType() { 3915 return defaultInstance; 3916 } 3917 3918 private final com.google.protobuf.UnknownFieldSet unknownFields; 3919 @java.lang.Override 3920 public final com.google.protobuf.UnknownFieldSet getUnknownFields()3921 getUnknownFields() { 3922 return this.unknownFields; 3923 } OpenRegionRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3924 private OpenRegionRequest( 3925 com.google.protobuf.CodedInputStream input, 3926 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3927 throws com.google.protobuf.InvalidProtocolBufferException { 3928 initFields(); 3929 int mutable_bitField0_ = 0; 3930 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 3931 com.google.protobuf.UnknownFieldSet.newBuilder(); 3932 try { 3933 boolean done = false; 3934 while (!done) { 3935 int tag = input.readTag(); 3936 switch (tag) { 3937 case 0: 3938 done = true; 3939 break; 3940 default: { 3941 if (!parseUnknownField(input, unknownFields, 3942 extensionRegistry, tag)) { 3943 done = true; 3944 } 3945 break; 3946 } 3947 case 10: { 3948 if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { 3949 openInfo_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo>(); 3950 mutable_bitField0_ |= 0x00000001; 3951 } 3952 openInfo_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.PARSER, extensionRegistry)); 3953 break; 3954 } 3955 case 16: { 3956 bitField0_ |= 0x00000001; 3957 serverStartCode_ = input.readUInt64(); 3958 break; 3959 } 3960 case 40: { 3961 bitField0_ |= 0x00000002; 3962 masterSystemTime_ = input.readUInt64(); 3963 break; 3964 } 3965 } 3966 } 3967 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 3968 throw e.setUnfinishedMessage(this); 3969 } catch (java.io.IOException e) { 3970 throw new com.google.protobuf.InvalidProtocolBufferException( 3971 e.getMessage()).setUnfinishedMessage(this); 3972 } finally { 3973 if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { 3974 openInfo_ = java.util.Collections.unmodifiableList(openInfo_); 3975 } 3976 this.unknownFields = unknownFields.build(); 3977 makeExtensionsImmutable(); 3978 } 3979 } 3980 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()3981 getDescriptor() { 3982 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_descriptor; 3983 } 3984 3985 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()3986 internalGetFieldAccessorTable() { 3987 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_fieldAccessorTable 3988 .ensureFieldAccessorsInitialized( 3989 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.Builder.class); 3990 } 3991 3992 public static com.google.protobuf.Parser<OpenRegionRequest> PARSER = 3993 new com.google.protobuf.AbstractParser<OpenRegionRequest>() { 3994 public OpenRegionRequest parsePartialFrom( 3995 com.google.protobuf.CodedInputStream input, 3996 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3997 throws com.google.protobuf.InvalidProtocolBufferException { 3998 return new OpenRegionRequest(input, extensionRegistry); 3999 } 4000 }; 4001 4002 @java.lang.Override getParserForType()4003 public com.google.protobuf.Parser<OpenRegionRequest> getParserForType() { 4004 return PARSER; 4005 } 4006 4007 public interface RegionOpenInfoOrBuilder 4008 extends com.google.protobuf.MessageOrBuilder { 4009 4010 // required .RegionInfo region = 1; 4011 /** 4012 * <code>required .RegionInfo region = 1;</code> 4013 */ hasRegion()4014 boolean hasRegion(); 4015 /** 4016 * <code>required .RegionInfo region = 1;</code> 4017 */ getRegion()4018 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegion(); 4019 /** 4020 * <code>required .RegionInfo region = 1;</code> 4021 */ getRegionOrBuilder()4022 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionOrBuilder(); 4023 4024 // optional uint32 version_of_offline_node = 2; 4025 /** 4026 * <code>optional uint32 version_of_offline_node = 2;</code> 4027 */ hasVersionOfOfflineNode()4028 boolean hasVersionOfOfflineNode(); 4029 /** 4030 * <code>optional uint32 version_of_offline_node = 2;</code> 4031 */ getVersionOfOfflineNode()4032 int getVersionOfOfflineNode(); 4033 4034 // repeated .ServerName favored_nodes = 3; 4035 /** 4036 * <code>repeated .ServerName favored_nodes = 3;</code> 4037 */ 4038 java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> getFavoredNodesList()4039 getFavoredNodesList(); 4040 /** 4041 * <code>repeated .ServerName favored_nodes = 3;</code> 4042 */ getFavoredNodes(int index)4043 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getFavoredNodes(int index); 4044 /** 4045 * <code>repeated .ServerName favored_nodes = 3;</code> 4046 */ getFavoredNodesCount()4047 int getFavoredNodesCount(); 4048 /** 4049 * <code>repeated .ServerName favored_nodes = 3;</code> 4050 */ 4051 java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getFavoredNodesOrBuilderList()4052 getFavoredNodesOrBuilderList(); 4053 /** 4054 * <code>repeated .ServerName favored_nodes = 3;</code> 4055 */ getFavoredNodesOrBuilder( int index)4056 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getFavoredNodesOrBuilder( 4057 int index); 4058 4059 // optional bool openForDistributedLogReplay = 4; 4060 /** 4061 * <code>optional bool openForDistributedLogReplay = 4;</code> 4062 * 4063 * <pre> 4064 * open region for distributedLogReplay 4065 * </pre> 4066 */ hasOpenForDistributedLogReplay()4067 boolean hasOpenForDistributedLogReplay(); 4068 /** 4069 * <code>optional bool openForDistributedLogReplay = 4;</code> 4070 * 4071 * <pre> 4072 * open region for distributedLogReplay 4073 * </pre> 4074 */ getOpenForDistributedLogReplay()4075 boolean getOpenForDistributedLogReplay(); 4076 } 4077 /** 4078 * Protobuf type {@code OpenRegionRequest.RegionOpenInfo} 4079 */ 4080 public static final class RegionOpenInfo extends 4081 com.google.protobuf.GeneratedMessage 4082 implements RegionOpenInfoOrBuilder { 4083 // Use RegionOpenInfo.newBuilder() to construct. RegionOpenInfo(com.google.protobuf.GeneratedMessage.Builder<?> builder)4084 private RegionOpenInfo(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 4085 super(builder); 4086 this.unknownFields = builder.getUnknownFields(); 4087 } RegionOpenInfo(boolean noInit)4088 private RegionOpenInfo(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 4089 4090 private static final RegionOpenInfo defaultInstance; getDefaultInstance()4091 public static RegionOpenInfo getDefaultInstance() { 4092 return defaultInstance; 4093 } 4094 getDefaultInstanceForType()4095 public RegionOpenInfo getDefaultInstanceForType() { 4096 return defaultInstance; 4097 } 4098 4099 private final com.google.protobuf.UnknownFieldSet unknownFields; 4100 @java.lang.Override 4101 public final com.google.protobuf.UnknownFieldSet getUnknownFields()4102 getUnknownFields() { 4103 return this.unknownFields; 4104 } RegionOpenInfo( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4105 private RegionOpenInfo( 4106 com.google.protobuf.CodedInputStream input, 4107 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4108 throws com.google.protobuf.InvalidProtocolBufferException { 4109 initFields(); 4110 int mutable_bitField0_ = 0; 4111 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 4112 com.google.protobuf.UnknownFieldSet.newBuilder(); 4113 try { 4114 boolean done = false; 4115 while (!done) { 4116 int tag = input.readTag(); 4117 switch (tag) { 4118 case 0: 4119 done = true; 4120 break; 4121 default: { 4122 if (!parseUnknownField(input, unknownFields, 4123 extensionRegistry, tag)) { 4124 done = true; 4125 } 4126 break; 4127 } 4128 case 10: { 4129 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = null; 4130 if (((bitField0_ & 0x00000001) == 0x00000001)) { 4131 subBuilder = region_.toBuilder(); 4132 } 4133 region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry); 4134 if (subBuilder != null) { 4135 subBuilder.mergeFrom(region_); 4136 region_ = subBuilder.buildPartial(); 4137 } 4138 bitField0_ |= 0x00000001; 4139 break; 4140 } 4141 case 16: { 4142 bitField0_ |= 0x00000002; 4143 versionOfOfflineNode_ = input.readUInt32(); 4144 break; 4145 } 4146 case 26: { 4147 if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) { 4148 favoredNodes_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName>(); 4149 mutable_bitField0_ |= 0x00000004; 4150 } 4151 favoredNodes_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry)); 4152 break; 4153 } 4154 case 32: { 4155 bitField0_ |= 0x00000004; 4156 openForDistributedLogReplay_ = input.readBool(); 4157 break; 4158 } 4159 } 4160 } 4161 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 4162 throw e.setUnfinishedMessage(this); 4163 } catch (java.io.IOException e) { 4164 throw new com.google.protobuf.InvalidProtocolBufferException( 4165 e.getMessage()).setUnfinishedMessage(this); 4166 } finally { 4167 if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) { 4168 favoredNodes_ = java.util.Collections.unmodifiableList(favoredNodes_); 4169 } 4170 this.unknownFields = unknownFields.build(); 4171 makeExtensionsImmutable(); 4172 } 4173 } 4174 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()4175 getDescriptor() { 4176 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_RegionOpenInfo_descriptor; 4177 } 4178 4179 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()4180 internalGetFieldAccessorTable() { 4181 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_RegionOpenInfo_fieldAccessorTable 4182 .ensureFieldAccessorsInitialized( 4183 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder.class); 4184 } 4185 4186 public static com.google.protobuf.Parser<RegionOpenInfo> PARSER = 4187 new com.google.protobuf.AbstractParser<RegionOpenInfo>() { 4188 public RegionOpenInfo parsePartialFrom( 4189 com.google.protobuf.CodedInputStream input, 4190 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4191 throws com.google.protobuf.InvalidProtocolBufferException { 4192 return new RegionOpenInfo(input, extensionRegistry); 4193 } 4194 }; 4195 4196 @java.lang.Override getParserForType()4197 public com.google.protobuf.Parser<RegionOpenInfo> getParserForType() { 4198 return PARSER; 4199 } 4200 4201 private int bitField0_; 4202 // required .RegionInfo region = 1; 4203 public static final int REGION_FIELD_NUMBER = 1; 4204 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo region_; 4205 /** 4206 * <code>required .RegionInfo region = 1;</code> 4207 */ hasRegion()4208 public boolean hasRegion() { 4209 return ((bitField0_ & 0x00000001) == 0x00000001); 4210 } 4211 /** 4212 * <code>required .RegionInfo region = 1;</code> 4213 */ getRegion()4214 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegion() { 4215 return region_; 4216 } 4217 /** 4218 * <code>required .RegionInfo region = 1;</code> 4219 */ getRegionOrBuilder()4220 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionOrBuilder() { 4221 return region_; 4222 } 4223 4224 // optional uint32 version_of_offline_node = 2; 4225 public static final int VERSION_OF_OFFLINE_NODE_FIELD_NUMBER = 2; 4226 private int versionOfOfflineNode_; 4227 /** 4228 * <code>optional uint32 version_of_offline_node = 2;</code> 4229 */ hasVersionOfOfflineNode()4230 public boolean hasVersionOfOfflineNode() { 4231 return ((bitField0_ & 0x00000002) == 0x00000002); 4232 } 4233 /** 4234 * <code>optional uint32 version_of_offline_node = 2;</code> 4235 */ getVersionOfOfflineNode()4236 public int getVersionOfOfflineNode() { 4237 return versionOfOfflineNode_; 4238 } 4239 4240 // repeated .ServerName favored_nodes = 3; 4241 public static final int FAVORED_NODES_FIELD_NUMBER = 3; 4242 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> favoredNodes_; 4243 /** 4244 * <code>repeated .ServerName favored_nodes = 3;</code> 4245 */ getFavoredNodesList()4246 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> getFavoredNodesList() { 4247 return favoredNodes_; 4248 } 4249 /** 4250 * <code>repeated .ServerName favored_nodes = 3;</code> 4251 */ 4252 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getFavoredNodesOrBuilderList()4253 getFavoredNodesOrBuilderList() { 4254 return favoredNodes_; 4255 } 4256 /** 4257 * <code>repeated .ServerName favored_nodes = 3;</code> 4258 */ getFavoredNodesCount()4259 public int getFavoredNodesCount() { 4260 return favoredNodes_.size(); 4261 } 4262 /** 4263 * <code>repeated .ServerName favored_nodes = 3;</code> 4264 */ getFavoredNodes(int index)4265 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getFavoredNodes(int index) { 4266 return favoredNodes_.get(index); 4267 } 4268 /** 4269 * <code>repeated .ServerName favored_nodes = 3;</code> 4270 */ getFavoredNodesOrBuilder( int index)4271 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getFavoredNodesOrBuilder( 4272 int index) { 4273 return favoredNodes_.get(index); 4274 } 4275 4276 // optional bool openForDistributedLogReplay = 4; 4277 public static final int OPENFORDISTRIBUTEDLOGREPLAY_FIELD_NUMBER = 4; 4278 private boolean openForDistributedLogReplay_; 4279 /** 4280 * <code>optional bool openForDistributedLogReplay = 4;</code> 4281 * 4282 * <pre> 4283 * open region for distributedLogReplay 4284 * </pre> 4285 */ hasOpenForDistributedLogReplay()4286 public boolean hasOpenForDistributedLogReplay() { 4287 return ((bitField0_ & 0x00000004) == 0x00000004); 4288 } 4289 /** 4290 * <code>optional bool openForDistributedLogReplay = 4;</code> 4291 * 4292 * <pre> 4293 * open region for distributedLogReplay 4294 * </pre> 4295 */ getOpenForDistributedLogReplay()4296 public boolean getOpenForDistributedLogReplay() { 4297 return openForDistributedLogReplay_; 4298 } 4299 initFields()4300 private void initFields() { 4301 region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); 4302 versionOfOfflineNode_ = 0; 4303 favoredNodes_ = java.util.Collections.emptyList(); 4304 openForDistributedLogReplay_ = false; 4305 } 4306 private byte memoizedIsInitialized = -1; isInitialized()4307 public final boolean isInitialized() { 4308 byte isInitialized = memoizedIsInitialized; 4309 if (isInitialized != -1) return isInitialized == 1; 4310 4311 if (!hasRegion()) { 4312 memoizedIsInitialized = 0; 4313 return false; 4314 } 4315 if (!getRegion().isInitialized()) { 4316 memoizedIsInitialized = 0; 4317 return false; 4318 } 4319 for (int i = 0; i < getFavoredNodesCount(); i++) { 4320 if (!getFavoredNodes(i).isInitialized()) { 4321 memoizedIsInitialized = 0; 4322 return false; 4323 } 4324 } 4325 memoizedIsInitialized = 1; 4326 return true; 4327 } 4328 writeTo(com.google.protobuf.CodedOutputStream output)4329 public void writeTo(com.google.protobuf.CodedOutputStream output) 4330 throws java.io.IOException { 4331 getSerializedSize(); 4332 if (((bitField0_ & 0x00000001) == 0x00000001)) { 4333 output.writeMessage(1, region_); 4334 } 4335 if (((bitField0_ & 0x00000002) == 0x00000002)) { 4336 output.writeUInt32(2, versionOfOfflineNode_); 4337 } 4338 for (int i = 0; i < favoredNodes_.size(); i++) { 4339 output.writeMessage(3, favoredNodes_.get(i)); 4340 } 4341 if (((bitField0_ & 0x00000004) == 0x00000004)) { 4342 output.writeBool(4, openForDistributedLogReplay_); 4343 } 4344 getUnknownFields().writeTo(output); 4345 } 4346 4347 private int memoizedSerializedSize = -1; getSerializedSize()4348 public int getSerializedSize() { 4349 int size = memoizedSerializedSize; 4350 if (size != -1) return size; 4351 4352 size = 0; 4353 if (((bitField0_ & 0x00000001) == 0x00000001)) { 4354 size += com.google.protobuf.CodedOutputStream 4355 .computeMessageSize(1, region_); 4356 } 4357 if (((bitField0_ & 0x00000002) == 0x00000002)) { 4358 size += com.google.protobuf.CodedOutputStream 4359 .computeUInt32Size(2, versionOfOfflineNode_); 4360 } 4361 for (int i = 0; i < favoredNodes_.size(); i++) { 4362 size += com.google.protobuf.CodedOutputStream 4363 .computeMessageSize(3, favoredNodes_.get(i)); 4364 } 4365 if (((bitField0_ & 0x00000004) == 0x00000004)) { 4366 size += com.google.protobuf.CodedOutputStream 4367 .computeBoolSize(4, openForDistributedLogReplay_); 4368 } 4369 size += getUnknownFields().getSerializedSize(); 4370 memoizedSerializedSize = size; 4371 return size; 4372 } 4373 4374 private static final long serialVersionUID = 0L; 4375 @java.lang.Override writeReplace()4376 protected java.lang.Object writeReplace() 4377 throws java.io.ObjectStreamException { 4378 return super.writeReplace(); 4379 } 4380 4381 @java.lang.Override equals(final java.lang.Object obj)4382 public boolean equals(final java.lang.Object obj) { 4383 if (obj == this) { 4384 return true; 4385 } 4386 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo)) { 4387 return super.equals(obj); 4388 } 4389 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo) obj; 4390 4391 boolean result = true; 4392 result = result && (hasRegion() == other.hasRegion()); 4393 if (hasRegion()) { 4394 result = result && getRegion() 4395 .equals(other.getRegion()); 4396 } 4397 result = result && (hasVersionOfOfflineNode() == other.hasVersionOfOfflineNode()); 4398 if (hasVersionOfOfflineNode()) { 4399 result = result && (getVersionOfOfflineNode() 4400 == other.getVersionOfOfflineNode()); 4401 } 4402 result = result && getFavoredNodesList() 4403 .equals(other.getFavoredNodesList()); 4404 result = result && (hasOpenForDistributedLogReplay() == other.hasOpenForDistributedLogReplay()); 4405 if (hasOpenForDistributedLogReplay()) { 4406 result = result && (getOpenForDistributedLogReplay() 4407 == other.getOpenForDistributedLogReplay()); 4408 } 4409 result = result && 4410 getUnknownFields().equals(other.getUnknownFields()); 4411 return result; 4412 } 4413 4414 private int memoizedHashCode = 0; 4415 @java.lang.Override hashCode()4416 public int hashCode() { 4417 if (memoizedHashCode != 0) { 4418 return memoizedHashCode; 4419 } 4420 int hash = 41; 4421 hash = (19 * hash) + getDescriptorForType().hashCode(); 4422 if (hasRegion()) { 4423 hash = (37 * hash) + REGION_FIELD_NUMBER; 4424 hash = (53 * hash) + getRegion().hashCode(); 4425 } 4426 if (hasVersionOfOfflineNode()) { 4427 hash = (37 * hash) + VERSION_OF_OFFLINE_NODE_FIELD_NUMBER; 4428 hash = (53 * hash) + getVersionOfOfflineNode(); 4429 } 4430 if (getFavoredNodesCount() > 0) { 4431 hash = (37 * hash) + FAVORED_NODES_FIELD_NUMBER; 4432 hash = (53 * hash) + getFavoredNodesList().hashCode(); 4433 } 4434 if (hasOpenForDistributedLogReplay()) { 4435 hash = (37 * hash) + OPENFORDISTRIBUTEDLOGREPLAY_FIELD_NUMBER; 4436 hash = (53 * hash) + hashBoolean(getOpenForDistributedLogReplay()); 4437 } 4438 hash = (29 * hash) + getUnknownFields().hashCode(); 4439 memoizedHashCode = hash; 4440 return hash; 4441 } 4442 parseFrom( com.google.protobuf.ByteString data)4443 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom( 4444 com.google.protobuf.ByteString data) 4445 throws com.google.protobuf.InvalidProtocolBufferException { 4446 return PARSER.parseFrom(data); 4447 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4448 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom( 4449 com.google.protobuf.ByteString data, 4450 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4451 throws com.google.protobuf.InvalidProtocolBufferException { 4452 return PARSER.parseFrom(data, extensionRegistry); 4453 } parseFrom(byte[] data)4454 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom(byte[] data) 4455 throws com.google.protobuf.InvalidProtocolBufferException { 4456 return PARSER.parseFrom(data); 4457 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4458 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom( 4459 byte[] data, 4460 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4461 throws com.google.protobuf.InvalidProtocolBufferException { 4462 return PARSER.parseFrom(data, extensionRegistry); 4463 } parseFrom(java.io.InputStream input)4464 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom(java.io.InputStream input) 4465 throws java.io.IOException { 4466 return PARSER.parseFrom(input); 4467 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4468 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom( 4469 java.io.InputStream input, 4470 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4471 throws java.io.IOException { 4472 return PARSER.parseFrom(input, extensionRegistry); 4473 } parseDelimitedFrom(java.io.InputStream input)4474 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseDelimitedFrom(java.io.InputStream input) 4475 throws java.io.IOException { 4476 return PARSER.parseDelimitedFrom(input); 4477 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4478 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseDelimitedFrom( 4479 java.io.InputStream input, 4480 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4481 throws java.io.IOException { 4482 return PARSER.parseDelimitedFrom(input, extensionRegistry); 4483 } parseFrom( com.google.protobuf.CodedInputStream input)4484 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom( 4485 com.google.protobuf.CodedInputStream input) 4486 throws java.io.IOException { 4487 return PARSER.parseFrom(input); 4488 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4489 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parseFrom( 4490 com.google.protobuf.CodedInputStream input, 4491 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4492 throws java.io.IOException { 4493 return PARSER.parseFrom(input, extensionRegistry); 4494 } 4495 newBuilder()4496 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()4497 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo prototype)4498 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo prototype) { 4499 return newBuilder().mergeFrom(prototype); 4500 } toBuilder()4501 public Builder toBuilder() { return newBuilder(this); } 4502 4503 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)4504 protected Builder newBuilderForType( 4505 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 4506 Builder builder = new Builder(parent); 4507 return builder; 4508 } 4509 /** 4510 * Protobuf type {@code OpenRegionRequest.RegionOpenInfo} 4511 */ 4512 public static final class Builder extends 4513 com.google.protobuf.GeneratedMessage.Builder<Builder> 4514 implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder { 4515 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()4516 getDescriptor() { 4517 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_RegionOpenInfo_descriptor; 4518 } 4519 4520 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()4521 internalGetFieldAccessorTable() { 4522 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_RegionOpenInfo_fieldAccessorTable 4523 .ensureFieldAccessorsInitialized( 4524 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder.class); 4525 } 4526 4527 // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.newBuilder() Builder()4528 private Builder() { 4529 maybeForceBuilderInitialization(); 4530 } 4531 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)4532 private Builder( 4533 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 4534 super(parent); 4535 maybeForceBuilderInitialization(); 4536 } maybeForceBuilderInitialization()4537 private void maybeForceBuilderInitialization() { 4538 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 4539 getRegionFieldBuilder(); 4540 getFavoredNodesFieldBuilder(); 4541 } 4542 } create()4543 private static Builder create() { 4544 return new Builder(); 4545 } 4546 clear()4547 public Builder clear() { 4548 super.clear(); 4549 if (regionBuilder_ == null) { 4550 region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); 4551 } else { 4552 regionBuilder_.clear(); 4553 } 4554 bitField0_ = (bitField0_ & ~0x00000001); 4555 versionOfOfflineNode_ = 0; 4556 bitField0_ = (bitField0_ & ~0x00000002); 4557 if (favoredNodesBuilder_ == null) { 4558 favoredNodes_ = java.util.Collections.emptyList(); 4559 bitField0_ = (bitField0_ & ~0x00000004); 4560 } else { 4561 favoredNodesBuilder_.clear(); 4562 } 4563 openForDistributedLogReplay_ = false; 4564 bitField0_ = (bitField0_ & ~0x00000008); 4565 return this; 4566 } 4567 clone()4568 public Builder clone() { 4569 return create().mergeFrom(buildPartial()); 4570 } 4571 4572 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()4573 getDescriptorForType() { 4574 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_RegionOpenInfo_descriptor; 4575 } 4576 getDefaultInstanceForType()4577 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo getDefaultInstanceForType() { 4578 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.getDefaultInstance(); 4579 } 4580 build()4581 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo build() { 4582 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo result = buildPartial(); 4583 if (!result.isInitialized()) { 4584 throw newUninitializedMessageException(result); 4585 } 4586 return result; 4587 } 4588 buildPartial()4589 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo buildPartial() { 4590 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo(this); 4591 int from_bitField0_ = bitField0_; 4592 int to_bitField0_ = 0; 4593 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 4594 to_bitField0_ |= 0x00000001; 4595 } 4596 if (regionBuilder_ == null) { 4597 result.region_ = region_; 4598 } else { 4599 result.region_ = regionBuilder_.build(); 4600 } 4601 if (((from_bitField0_ & 0x00000002) == 0x00000002)) { 4602 to_bitField0_ |= 0x00000002; 4603 } 4604 result.versionOfOfflineNode_ = versionOfOfflineNode_; 4605 if (favoredNodesBuilder_ == null) { 4606 if (((bitField0_ & 0x00000004) == 0x00000004)) { 4607 favoredNodes_ = java.util.Collections.unmodifiableList(favoredNodes_); 4608 bitField0_ = (bitField0_ & ~0x00000004); 4609 } 4610 result.favoredNodes_ = favoredNodes_; 4611 } else { 4612 result.favoredNodes_ = favoredNodesBuilder_.build(); 4613 } 4614 if (((from_bitField0_ & 0x00000008) == 0x00000008)) { 4615 to_bitField0_ |= 0x00000004; 4616 } 4617 result.openForDistributedLogReplay_ = openForDistributedLogReplay_; 4618 result.bitField0_ = to_bitField0_; 4619 onBuilt(); 4620 return result; 4621 } 4622 mergeFrom(com.google.protobuf.Message other)4623 public Builder mergeFrom(com.google.protobuf.Message other) { 4624 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo) { 4625 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo)other); 4626 } else { 4627 super.mergeFrom(other); 4628 return this; 4629 } 4630 } 4631 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo other)4632 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo other) { 4633 if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.getDefaultInstance()) return this; 4634 if (other.hasRegion()) { 4635 mergeRegion(other.getRegion()); 4636 } 4637 if (other.hasVersionOfOfflineNode()) { 4638 setVersionOfOfflineNode(other.getVersionOfOfflineNode()); 4639 } 4640 if (favoredNodesBuilder_ == null) { 4641 if (!other.favoredNodes_.isEmpty()) { 4642 if (favoredNodes_.isEmpty()) { 4643 favoredNodes_ = other.favoredNodes_; 4644 bitField0_ = (bitField0_ & ~0x00000004); 4645 } else { 4646 ensureFavoredNodesIsMutable(); 4647 favoredNodes_.addAll(other.favoredNodes_); 4648 } 4649 onChanged(); 4650 } 4651 } else { 4652 if (!other.favoredNodes_.isEmpty()) { 4653 if (favoredNodesBuilder_.isEmpty()) { 4654 favoredNodesBuilder_.dispose(); 4655 favoredNodesBuilder_ = null; 4656 favoredNodes_ = other.favoredNodes_; 4657 bitField0_ = (bitField0_ & ~0x00000004); 4658 favoredNodesBuilder_ = 4659 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? 4660 getFavoredNodesFieldBuilder() : null; 4661 } else { 4662 favoredNodesBuilder_.addAllMessages(other.favoredNodes_); 4663 } 4664 } 4665 } 4666 if (other.hasOpenForDistributedLogReplay()) { 4667 setOpenForDistributedLogReplay(other.getOpenForDistributedLogReplay()); 4668 } 4669 this.mergeUnknownFields(other.getUnknownFields()); 4670 return this; 4671 } 4672 isInitialized()4673 public final boolean isInitialized() { 4674 if (!hasRegion()) { 4675 4676 return false; 4677 } 4678 if (!getRegion().isInitialized()) { 4679 4680 return false; 4681 } 4682 for (int i = 0; i < getFavoredNodesCount(); i++) { 4683 if (!getFavoredNodes(i).isInitialized()) { 4684 4685 return false; 4686 } 4687 } 4688 return true; 4689 } 4690 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4691 public Builder mergeFrom( 4692 com.google.protobuf.CodedInputStream input, 4693 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4694 throws java.io.IOException { 4695 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo parsedMessage = null; 4696 try { 4697 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 4698 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 4699 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo) e.getUnfinishedMessage(); 4700 throw e; 4701 } finally { 4702 if (parsedMessage != null) { 4703 mergeFrom(parsedMessage); 4704 } 4705 } 4706 return this; 4707 } 4708 private int bitField0_; 4709 4710 // required .RegionInfo region = 1; 4711 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); 4712 private com.google.protobuf.SingleFieldBuilder< 4713 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionBuilder_; 4714 /** 4715 * <code>required .RegionInfo region = 1;</code> 4716 */ hasRegion()4717 public boolean hasRegion() { 4718 return ((bitField0_ & 0x00000001) == 0x00000001); 4719 } 4720 /** 4721 * <code>required .RegionInfo region = 1;</code> 4722 */ getRegion()4723 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegion() { 4724 if (regionBuilder_ == null) { 4725 return region_; 4726 } else { 4727 return regionBuilder_.getMessage(); 4728 } 4729 } 4730 /** 4731 * <code>required .RegionInfo region = 1;</code> 4732 */ setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value)4733 public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { 4734 if (regionBuilder_ == null) { 4735 if (value == null) { 4736 throw new NullPointerException(); 4737 } 4738 region_ = value; 4739 onChanged(); 4740 } else { 4741 regionBuilder_.setMessage(value); 4742 } 4743 bitField0_ |= 0x00000001; 4744 return this; 4745 } 4746 /** 4747 * <code>required .RegionInfo region = 1;</code> 4748 */ setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue)4749 public Builder setRegion( 4750 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { 4751 if (regionBuilder_ == null) { 4752 region_ = builderForValue.build(); 4753 onChanged(); 4754 } else { 4755 regionBuilder_.setMessage(builderForValue.build()); 4756 } 4757 bitField0_ |= 0x00000001; 4758 return this; 4759 } 4760 /** 4761 * <code>required .RegionInfo region = 1;</code> 4762 */ mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value)4763 public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { 4764 if (regionBuilder_ == null) { 4765 if (((bitField0_ & 0x00000001) == 0x00000001) && 4766 region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()) { 4767 region_ = 4768 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(region_).mergeFrom(value).buildPartial(); 4769 } else { 4770 region_ = value; 4771 } 4772 onChanged(); 4773 } else { 4774 regionBuilder_.mergeFrom(value); 4775 } 4776 bitField0_ |= 0x00000001; 4777 return this; 4778 } 4779 /** 4780 * <code>required .RegionInfo region = 1;</code> 4781 */ clearRegion()4782 public Builder clearRegion() { 4783 if (regionBuilder_ == null) { 4784 region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); 4785 onChanged(); 4786 } else { 4787 regionBuilder_.clear(); 4788 } 4789 bitField0_ = (bitField0_ & ~0x00000001); 4790 return this; 4791 } 4792 /** 4793 * <code>required .RegionInfo region = 1;</code> 4794 */ getRegionBuilder()4795 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionBuilder() { 4796 bitField0_ |= 0x00000001; 4797 onChanged(); 4798 return getRegionFieldBuilder().getBuilder(); 4799 } 4800 /** 4801 * <code>required .RegionInfo region = 1;</code> 4802 */ getRegionOrBuilder()4803 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionOrBuilder() { 4804 if (regionBuilder_ != null) { 4805 return regionBuilder_.getMessageOrBuilder(); 4806 } else { 4807 return region_; 4808 } 4809 } 4810 /** 4811 * <code>required .RegionInfo region = 1;</code> 4812 */ 4813 private com.google.protobuf.SingleFieldBuilder< 4814 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> getRegionFieldBuilder()4815 getRegionFieldBuilder() { 4816 if (regionBuilder_ == null) { 4817 regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< 4818 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( 4819 region_, 4820 getParentForChildren(), 4821 isClean()); 4822 region_ = null; 4823 } 4824 return regionBuilder_; 4825 } 4826 4827 // optional uint32 version_of_offline_node = 2; 4828 private int versionOfOfflineNode_ ; 4829 /** 4830 * <code>optional uint32 version_of_offline_node = 2;</code> 4831 */ hasVersionOfOfflineNode()4832 public boolean hasVersionOfOfflineNode() { 4833 return ((bitField0_ & 0x00000002) == 0x00000002); 4834 } 4835 /** 4836 * <code>optional uint32 version_of_offline_node = 2;</code> 4837 */ getVersionOfOfflineNode()4838 public int getVersionOfOfflineNode() { 4839 return versionOfOfflineNode_; 4840 } 4841 /** 4842 * <code>optional uint32 version_of_offline_node = 2;</code> 4843 */ setVersionOfOfflineNode(int value)4844 public Builder setVersionOfOfflineNode(int value) { 4845 bitField0_ |= 0x00000002; 4846 versionOfOfflineNode_ = value; 4847 onChanged(); 4848 return this; 4849 } 4850 /** 4851 * <code>optional uint32 version_of_offline_node = 2;</code> 4852 */ clearVersionOfOfflineNode()4853 public Builder clearVersionOfOfflineNode() { 4854 bitField0_ = (bitField0_ & ~0x00000002); 4855 versionOfOfflineNode_ = 0; 4856 onChanged(); 4857 return this; 4858 } 4859 4860 // repeated .ServerName favored_nodes = 3; 4861 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> favoredNodes_ = 4862 java.util.Collections.emptyList(); ensureFavoredNodesIsMutable()4863 private void ensureFavoredNodesIsMutable() { 4864 if (!((bitField0_ & 0x00000004) == 0x00000004)) { 4865 favoredNodes_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName>(favoredNodes_); 4866 bitField0_ |= 0x00000004; 4867 } 4868 } 4869 4870 private com.google.protobuf.RepeatedFieldBuilder< 4871 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> favoredNodesBuilder_; 4872 4873 /** 4874 * <code>repeated .ServerName favored_nodes = 3;</code> 4875 */ getFavoredNodesList()4876 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> getFavoredNodesList() { 4877 if (favoredNodesBuilder_ == null) { 4878 return java.util.Collections.unmodifiableList(favoredNodes_); 4879 } else { 4880 return favoredNodesBuilder_.getMessageList(); 4881 } 4882 } 4883 /** 4884 * <code>repeated .ServerName favored_nodes = 3;</code> 4885 */ getFavoredNodesCount()4886 public int getFavoredNodesCount() { 4887 if (favoredNodesBuilder_ == null) { 4888 return favoredNodes_.size(); 4889 } else { 4890 return favoredNodesBuilder_.getCount(); 4891 } 4892 } 4893 /** 4894 * <code>repeated .ServerName favored_nodes = 3;</code> 4895 */ getFavoredNodes(int index)4896 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getFavoredNodes(int index) { 4897 if (favoredNodesBuilder_ == null) { 4898 return favoredNodes_.get(index); 4899 } else { 4900 return favoredNodesBuilder_.getMessage(index); 4901 } 4902 } 4903 /** 4904 * <code>repeated .ServerName favored_nodes = 3;</code> 4905 */ setFavoredNodes( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value)4906 public Builder setFavoredNodes( 4907 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { 4908 if (favoredNodesBuilder_ == null) { 4909 if (value == null) { 4910 throw new NullPointerException(); 4911 } 4912 ensureFavoredNodesIsMutable(); 4913 favoredNodes_.set(index, value); 4914 onChanged(); 4915 } else { 4916 favoredNodesBuilder_.setMessage(index, value); 4917 } 4918 return this; 4919 } 4920 /** 4921 * <code>repeated .ServerName favored_nodes = 3;</code> 4922 */ setFavoredNodes( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue)4923 public Builder setFavoredNodes( 4924 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { 4925 if (favoredNodesBuilder_ == null) { 4926 ensureFavoredNodesIsMutable(); 4927 favoredNodes_.set(index, builderForValue.build()); 4928 onChanged(); 4929 } else { 4930 favoredNodesBuilder_.setMessage(index, builderForValue.build()); 4931 } 4932 return this; 4933 } 4934 /** 4935 * <code>repeated .ServerName favored_nodes = 3;</code> 4936 */ addFavoredNodes(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value)4937 public Builder addFavoredNodes(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { 4938 if (favoredNodesBuilder_ == null) { 4939 if (value == null) { 4940 throw new NullPointerException(); 4941 } 4942 ensureFavoredNodesIsMutable(); 4943 favoredNodes_.add(value); 4944 onChanged(); 4945 } else { 4946 favoredNodesBuilder_.addMessage(value); 4947 } 4948 return this; 4949 } 4950 /** 4951 * <code>repeated .ServerName favored_nodes = 3;</code> 4952 */ addFavoredNodes( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value)4953 public Builder addFavoredNodes( 4954 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { 4955 if (favoredNodesBuilder_ == null) { 4956 if (value == null) { 4957 throw new NullPointerException(); 4958 } 4959 ensureFavoredNodesIsMutable(); 4960 favoredNodes_.add(index, value); 4961 onChanged(); 4962 } else { 4963 favoredNodesBuilder_.addMessage(index, value); 4964 } 4965 return this; 4966 } 4967 /** 4968 * <code>repeated .ServerName favored_nodes = 3;</code> 4969 */ addFavoredNodes( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue)4970 public Builder addFavoredNodes( 4971 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { 4972 if (favoredNodesBuilder_ == null) { 4973 ensureFavoredNodesIsMutable(); 4974 favoredNodes_.add(builderForValue.build()); 4975 onChanged(); 4976 } else { 4977 favoredNodesBuilder_.addMessage(builderForValue.build()); 4978 } 4979 return this; 4980 } 4981 /** 4982 * <code>repeated .ServerName favored_nodes = 3;</code> 4983 */ addFavoredNodes( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue)4984 public Builder addFavoredNodes( 4985 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { 4986 if (favoredNodesBuilder_ == null) { 4987 ensureFavoredNodesIsMutable(); 4988 favoredNodes_.add(index, builderForValue.build()); 4989 onChanged(); 4990 } else { 4991 favoredNodesBuilder_.addMessage(index, builderForValue.build()); 4992 } 4993 return this; 4994 } 4995 /** 4996 * <code>repeated .ServerName favored_nodes = 3;</code> 4997 */ addAllFavoredNodes( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> values)4998 public Builder addAllFavoredNodes( 4999 java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName> values) { 5000 if (favoredNodesBuilder_ == null) { 5001 ensureFavoredNodesIsMutable(); 5002 super.addAll(values, favoredNodes_); 5003 onChanged(); 5004 } else { 5005 favoredNodesBuilder_.addAllMessages(values); 5006 } 5007 return this; 5008 } 5009 /** 5010 * <code>repeated .ServerName favored_nodes = 3;</code> 5011 */ clearFavoredNodes()5012 public Builder clearFavoredNodes() { 5013 if (favoredNodesBuilder_ == null) { 5014 favoredNodes_ = java.util.Collections.emptyList(); 5015 bitField0_ = (bitField0_ & ~0x00000004); 5016 onChanged(); 5017 } else { 5018 favoredNodesBuilder_.clear(); 5019 } 5020 return this; 5021 } 5022 /** 5023 * <code>repeated .ServerName favored_nodes = 3;</code> 5024 */ removeFavoredNodes(int index)5025 public Builder removeFavoredNodes(int index) { 5026 if (favoredNodesBuilder_ == null) { 5027 ensureFavoredNodesIsMutable(); 5028 favoredNodes_.remove(index); 5029 onChanged(); 5030 } else { 5031 favoredNodesBuilder_.remove(index); 5032 } 5033 return this; 5034 } 5035 /** 5036 * <code>repeated .ServerName favored_nodes = 3;</code> 5037 */ getFavoredNodesBuilder( int index)5038 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getFavoredNodesBuilder( 5039 int index) { 5040 return getFavoredNodesFieldBuilder().getBuilder(index); 5041 } 5042 /** 5043 * <code>repeated .ServerName favored_nodes = 3;</code> 5044 */ getFavoredNodesOrBuilder( int index)5045 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getFavoredNodesOrBuilder( 5046 int index) { 5047 if (favoredNodesBuilder_ == null) { 5048 return favoredNodes_.get(index); } else { 5049 return favoredNodesBuilder_.getMessageOrBuilder(index); 5050 } 5051 } 5052 /** 5053 * <code>repeated .ServerName favored_nodes = 3;</code> 5054 */ 5055 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getFavoredNodesOrBuilderList()5056 getFavoredNodesOrBuilderList() { 5057 if (favoredNodesBuilder_ != null) { 5058 return favoredNodesBuilder_.getMessageOrBuilderList(); 5059 } else { 5060 return java.util.Collections.unmodifiableList(favoredNodes_); 5061 } 5062 } 5063 /** 5064 * <code>repeated .ServerName favored_nodes = 3;</code> 5065 */ addFavoredNodesBuilder()5066 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder addFavoredNodesBuilder() { 5067 return getFavoredNodesFieldBuilder().addBuilder( 5068 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()); 5069 } 5070 /** 5071 * <code>repeated .ServerName favored_nodes = 3;</code> 5072 */ addFavoredNodesBuilder( int index)5073 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder addFavoredNodesBuilder( 5074 int index) { 5075 return getFavoredNodesFieldBuilder().addBuilder( 5076 index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()); 5077 } 5078 /** 5079 * <code>repeated .ServerName favored_nodes = 3;</code> 5080 */ 5081 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder> getFavoredNodesBuilderList()5082 getFavoredNodesBuilderList() { 5083 return getFavoredNodesFieldBuilder().getBuilderList(); 5084 } 5085 private com.google.protobuf.RepeatedFieldBuilder< 5086 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getFavoredNodesFieldBuilder()5087 getFavoredNodesFieldBuilder() { 5088 if (favoredNodesBuilder_ == null) { 5089 favoredNodesBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< 5090 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( 5091 favoredNodes_, 5092 ((bitField0_ & 0x00000004) == 0x00000004), 5093 getParentForChildren(), 5094 isClean()); 5095 favoredNodes_ = null; 5096 } 5097 return favoredNodesBuilder_; 5098 } 5099 5100 // optional bool openForDistributedLogReplay = 4; 5101 private boolean openForDistributedLogReplay_ ; 5102 /** 5103 * <code>optional bool openForDistributedLogReplay = 4;</code> 5104 * 5105 * <pre> 5106 * open region for distributedLogReplay 5107 * </pre> 5108 */ hasOpenForDistributedLogReplay()5109 public boolean hasOpenForDistributedLogReplay() { 5110 return ((bitField0_ & 0x00000008) == 0x00000008); 5111 } 5112 /** 5113 * <code>optional bool openForDistributedLogReplay = 4;</code> 5114 * 5115 * <pre> 5116 * open region for distributedLogReplay 5117 * </pre> 5118 */ getOpenForDistributedLogReplay()5119 public boolean getOpenForDistributedLogReplay() { 5120 return openForDistributedLogReplay_; 5121 } 5122 /** 5123 * <code>optional bool openForDistributedLogReplay = 4;</code> 5124 * 5125 * <pre> 5126 * open region for distributedLogReplay 5127 * </pre> 5128 */ setOpenForDistributedLogReplay(boolean value)5129 public Builder setOpenForDistributedLogReplay(boolean value) { 5130 bitField0_ |= 0x00000008; 5131 openForDistributedLogReplay_ = value; 5132 onChanged(); 5133 return this; 5134 } 5135 /** 5136 * <code>optional bool openForDistributedLogReplay = 4;</code> 5137 * 5138 * <pre> 5139 * open region for distributedLogReplay 5140 * </pre> 5141 */ clearOpenForDistributedLogReplay()5142 public Builder clearOpenForDistributedLogReplay() { 5143 bitField0_ = (bitField0_ & ~0x00000008); 5144 openForDistributedLogReplay_ = false; 5145 onChanged(); 5146 return this; 5147 } 5148 5149 // @@protoc_insertion_point(builder_scope:OpenRegionRequest.RegionOpenInfo) 5150 } 5151 5152 static { 5153 defaultInstance = new RegionOpenInfo(true); defaultInstance.initFields()5154 defaultInstance.initFields(); 5155 } 5156 5157 // @@protoc_insertion_point(class_scope:OpenRegionRequest.RegionOpenInfo) 5158 } 5159 5160 private int bitField0_; 5161 // repeated .OpenRegionRequest.RegionOpenInfo open_info = 1; 5162 public static final int OPEN_INFO_FIELD_NUMBER = 1; 5163 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo> openInfo_; 5164 /** 5165 * <code>repeated .OpenRegionRequest.RegionOpenInfo open_info = 1;</code> 5166 */ getOpenInfoList()5167 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo> getOpenInfoList() { 5168 return openInfo_; 5169 } 5170 /** 5171 * <code>repeated .OpenRegionRequest.RegionOpenInfo open_info = 1;</code> 5172 */ 5173 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder> getOpenInfoOrBuilderList()5174 getOpenInfoOrBuilderList() { 5175 return openInfo_; 5176 } 5177 /** 5178 * <code>repeated .OpenRegionRequest.RegionOpenInfo open_info = 1;</code> 5179 */ getOpenInfoCount()5180 public int getOpenInfoCount() { 5181 return openInfo_.size(); 5182 } 5183 /** 5184 * <code>repeated .OpenRegionRequest.RegionOpenInfo open_info = 1;</code> 5185 */ getOpenInfo(int index)5186 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo getOpenInfo(int index) { 5187 return openInfo_.get(index); 5188 } 5189 /** 5190 * <code>repeated .OpenRegionRequest.RegionOpenInfo open_info = 1;</code> 5191 */ getOpenInfoOrBuilder( int index)5192 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder getOpenInfoOrBuilder( 5193 int index) { 5194 return openInfo_.get(index); 5195 } 5196 5197 // optional uint64 serverStartCode = 2; 5198 public static final int SERVERSTARTCODE_FIELD_NUMBER = 2; 5199 private long serverStartCode_; 5200 /** 5201 * <code>optional uint64 serverStartCode = 2;</code> 5202 * 5203 * <pre> 5204 * the intended server for this RPC. 5205 * </pre> 5206 */ hasServerStartCode()5207 public boolean hasServerStartCode() { 5208 return ((bitField0_ & 0x00000001) == 0x00000001); 5209 } 5210 /** 5211 * <code>optional uint64 serverStartCode = 2;</code> 5212 * 5213 * <pre> 5214 * the intended server for this RPC. 5215 * </pre> 5216 */ getServerStartCode()5217 public long getServerStartCode() { 5218 return serverStartCode_; 5219 } 5220 5221 // optional uint64 master_system_time = 5; 5222 public static final int MASTER_SYSTEM_TIME_FIELD_NUMBER = 5; 5223 private long masterSystemTime_; 5224 /** 5225 * <code>optional uint64 master_system_time = 5;</code> 5226 * 5227 * <pre> 5228 * wall clock time from master 5229 * </pre> 5230 */ hasMasterSystemTime()5231 public boolean hasMasterSystemTime() { 5232 return ((bitField0_ & 0x00000002) == 0x00000002); 5233 } 5234 /** 5235 * <code>optional uint64 master_system_time = 5;</code> 5236 * 5237 * <pre> 5238 * wall clock time from master 5239 * </pre> 5240 */ getMasterSystemTime()5241 public long getMasterSystemTime() { 5242 return masterSystemTime_; 5243 } 5244 initFields()5245 private void initFields() { 5246 openInfo_ = java.util.Collections.emptyList(); 5247 serverStartCode_ = 0L; 5248 masterSystemTime_ = 0L; 5249 } 5250 private byte memoizedIsInitialized = -1; isInitialized()5251 public final boolean isInitialized() { 5252 byte isInitialized = memoizedIsInitialized; 5253 if (isInitialized != -1) return isInitialized == 1; 5254 5255 for (int i = 0; i < getOpenInfoCount(); i++) { 5256 if (!getOpenInfo(i).isInitialized()) { 5257 memoizedIsInitialized = 0; 5258 return false; 5259 } 5260 } 5261 memoizedIsInitialized = 1; 5262 return true; 5263 } 5264 writeTo(com.google.protobuf.CodedOutputStream output)5265 public void writeTo(com.google.protobuf.CodedOutputStream output) 5266 throws java.io.IOException { 5267 getSerializedSize(); 5268 for (int i = 0; i < openInfo_.size(); i++) { 5269 output.writeMessage(1, openInfo_.get(i)); 5270 } 5271 if (((bitField0_ & 0x00000001) == 0x00000001)) { 5272 output.writeUInt64(2, serverStartCode_); 5273 } 5274 if (((bitField0_ & 0x00000002) == 0x00000002)) { 5275 output.writeUInt64(5, masterSystemTime_); 5276 } 5277 getUnknownFields().writeTo(output); 5278 } 5279 5280 private int memoizedSerializedSize = -1; getSerializedSize()5281 public int getSerializedSize() { 5282 int size = memoizedSerializedSize; 5283 if (size != -1) return size; 5284 5285 size = 0; 5286 for (int i = 0; i < openInfo_.size(); i++) { 5287 size += com.google.protobuf.CodedOutputStream 5288 .computeMessageSize(1, openInfo_.get(i)); 5289 } 5290 if (((bitField0_ & 0x00000001) == 0x00000001)) { 5291 size += com.google.protobuf.CodedOutputStream 5292 .computeUInt64Size(2, serverStartCode_); 5293 } 5294 if (((bitField0_ & 0x00000002) == 0x00000002)) { 5295 size += com.google.protobuf.CodedOutputStream 5296 .computeUInt64Size(5, masterSystemTime_); 5297 } 5298 size += getUnknownFields().getSerializedSize(); 5299 memoizedSerializedSize = size; 5300 return size; 5301 } 5302 5303 private static final long serialVersionUID = 0L; 5304 @java.lang.Override writeReplace()5305 protected java.lang.Object writeReplace() 5306 throws java.io.ObjectStreamException { 5307 return super.writeReplace(); 5308 } 5309 5310 @java.lang.Override equals(final java.lang.Object obj)5311 public boolean equals(final java.lang.Object obj) { 5312 if (obj == this) { 5313 return true; 5314 } 5315 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest)) { 5316 return super.equals(obj); 5317 } 5318 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest) obj; 5319 5320 boolean result = true; 5321 result = result && getOpenInfoList() 5322 .equals(other.getOpenInfoList()); 5323 result = result && (hasServerStartCode() == other.hasServerStartCode()); 5324 if (hasServerStartCode()) { 5325 result = result && (getServerStartCode() 5326 == other.getServerStartCode()); 5327 } 5328 result = result && (hasMasterSystemTime() == other.hasMasterSystemTime()); 5329 if (hasMasterSystemTime()) { 5330 result = result && (getMasterSystemTime() 5331 == other.getMasterSystemTime()); 5332 } 5333 result = result && 5334 getUnknownFields().equals(other.getUnknownFields()); 5335 return result; 5336 } 5337 5338 private int memoizedHashCode = 0; 5339 @java.lang.Override hashCode()5340 public int hashCode() { 5341 if (memoizedHashCode != 0) { 5342 return memoizedHashCode; 5343 } 5344 int hash = 41; 5345 hash = (19 * hash) + getDescriptorForType().hashCode(); 5346 if (getOpenInfoCount() > 0) { 5347 hash = (37 * hash) + OPEN_INFO_FIELD_NUMBER; 5348 hash = (53 * hash) + getOpenInfoList().hashCode(); 5349 } 5350 if (hasServerStartCode()) { 5351 hash = (37 * hash) + SERVERSTARTCODE_FIELD_NUMBER; 5352 hash = (53 * hash) + hashLong(getServerStartCode()); 5353 } 5354 if (hasMasterSystemTime()) { 5355 hash = (37 * hash) + MASTER_SYSTEM_TIME_FIELD_NUMBER; 5356 hash = (53 * hash) + hashLong(getMasterSystemTime()); 5357 } 5358 hash = (29 * hash) + getUnknownFields().hashCode(); 5359 memoizedHashCode = hash; 5360 return hash; 5361 } 5362 parseFrom( com.google.protobuf.ByteString data)5363 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( 5364 com.google.protobuf.ByteString data) 5365 throws com.google.protobuf.InvalidProtocolBufferException { 5366 return PARSER.parseFrom(data); 5367 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5368 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( 5369 com.google.protobuf.ByteString data, 5370 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 5371 throws com.google.protobuf.InvalidProtocolBufferException { 5372 return PARSER.parseFrom(data, extensionRegistry); 5373 } parseFrom(byte[] data)5374 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom(byte[] data) 5375 throws com.google.protobuf.InvalidProtocolBufferException { 5376 return PARSER.parseFrom(data); 5377 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5378 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( 5379 byte[] data, 5380 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 5381 throws com.google.protobuf.InvalidProtocolBufferException { 5382 return PARSER.parseFrom(data, extensionRegistry); 5383 } parseFrom(java.io.InputStream input)5384 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom(java.io.InputStream input) 5385 throws java.io.IOException { 5386 return PARSER.parseFrom(input); 5387 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5388 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( 5389 java.io.InputStream input, 5390 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 5391 throws java.io.IOException { 5392 return PARSER.parseFrom(input, extensionRegistry); 5393 } parseDelimitedFrom(java.io.InputStream input)5394 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseDelimitedFrom(java.io.InputStream input) 5395 throws java.io.IOException { 5396 return PARSER.parseDelimitedFrom(input); 5397 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5398 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseDelimitedFrom( 5399 java.io.InputStream input, 5400 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 5401 throws java.io.IOException { 5402 return PARSER.parseDelimitedFrom(input, extensionRegistry); 5403 } parseFrom( com.google.protobuf.CodedInputStream input)5404 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( 5405 com.google.protobuf.CodedInputStream input) 5406 throws java.io.IOException { 5407 return PARSER.parseFrom(input); 5408 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5409 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parseFrom( 5410 com.google.protobuf.CodedInputStream input, 5411 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 5412 throws java.io.IOException { 5413 return PARSER.parseFrom(input, extensionRegistry); 5414 } 5415 newBuilder()5416 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()5417 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest prototype)5418 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest prototype) { 5419 return newBuilder().mergeFrom(prototype); 5420 } toBuilder()5421 public Builder toBuilder() { return newBuilder(this); } 5422 5423 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)5424 protected Builder newBuilderForType( 5425 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 5426 Builder builder = new Builder(parent); 5427 return builder; 5428 } 5429 /** 5430 * Protobuf type {@code OpenRegionRequest} 5431 */ 5432 public static final class Builder extends 5433 com.google.protobuf.GeneratedMessage.Builder<Builder> 5434 implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequestOrBuilder { 5435 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()5436 getDescriptor() { 5437 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_descriptor; 5438 } 5439 5440 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()5441 internalGetFieldAccessorTable() { 5442 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_fieldAccessorTable 5443 .ensureFieldAccessorsInitialized( 5444 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.Builder.class); 5445 } 5446 5447 // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.newBuilder() Builder()5448 private Builder() { 5449 maybeForceBuilderInitialization(); 5450 } 5451 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)5452 private Builder( 5453 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 5454 super(parent); 5455 maybeForceBuilderInitialization(); 5456 } maybeForceBuilderInitialization()5457 private void maybeForceBuilderInitialization() { 5458 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 5459 getOpenInfoFieldBuilder(); 5460 } 5461 } create()5462 private static Builder create() { 5463 return new Builder(); 5464 } 5465 clear()5466 public Builder clear() { 5467 super.clear(); 5468 if (openInfoBuilder_ == null) { 5469 openInfo_ = java.util.Collections.emptyList(); 5470 bitField0_ = (bitField0_ & ~0x00000001); 5471 } else { 5472 openInfoBuilder_.clear(); 5473 } 5474 serverStartCode_ = 0L; 5475 bitField0_ = (bitField0_ & ~0x00000002); 5476 masterSystemTime_ = 0L; 5477 bitField0_ = (bitField0_ & ~0x00000004); 5478 return this; 5479 } 5480 clone()5481 public Builder clone() { 5482 return create().mergeFrom(buildPartial()); 5483 } 5484 5485 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()5486 getDescriptorForType() { 5487 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionRequest_descriptor; 5488 } 5489 getDefaultInstanceForType()5490 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest getDefaultInstanceForType() { 5491 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.getDefaultInstance(); 5492 } 5493 build()5494 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest build() { 5495 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest result = buildPartial(); 5496 if (!result.isInitialized()) { 5497 throw newUninitializedMessageException(result); 5498 } 5499 return result; 5500 } 5501 buildPartial()5502 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest buildPartial() { 5503 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest(this); 5504 int from_bitField0_ = bitField0_; 5505 int to_bitField0_ = 0; 5506 if (openInfoBuilder_ == null) { 5507 if (((bitField0_ & 0x00000001) == 0x00000001)) { 5508 openInfo_ = java.util.Collections.unmodifiableList(openInfo_); 5509 bitField0_ = (bitField0_ & ~0x00000001); 5510 } 5511 result.openInfo_ = openInfo_; 5512 } else { 5513 result.openInfo_ = openInfoBuilder_.build(); 5514 } 5515 if (((from_bitField0_ & 0x00000002) == 0x00000002)) { 5516 to_bitField0_ |= 0x00000001; 5517 } 5518 result.serverStartCode_ = serverStartCode_; 5519 if (((from_bitField0_ & 0x00000004) == 0x00000004)) { 5520 to_bitField0_ |= 0x00000002; 5521 } 5522 result.masterSystemTime_ = masterSystemTime_; 5523 result.bitField0_ = to_bitField0_; 5524 onBuilt(); 5525 return result; 5526 } 5527 mergeFrom(com.google.protobuf.Message other)5528 public Builder mergeFrom(com.google.protobuf.Message other) { 5529 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest) { 5530 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest)other); 5531 } else { 5532 super.mergeFrom(other); 5533 return this; 5534 } 5535 } 5536 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest other)5537 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest other) { 5538 if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.getDefaultInstance()) return this; 5539 if (openInfoBuilder_ == null) { 5540 if (!other.openInfo_.isEmpty()) { 5541 if (openInfo_.isEmpty()) { 5542 openInfo_ = other.openInfo_; 5543 bitField0_ = (bitField0_ & ~0x00000001); 5544 } else { 5545 ensureOpenInfoIsMutable(); 5546 openInfo_.addAll(other.openInfo_); 5547 } 5548 onChanged(); 5549 } 5550 } else { 5551 if (!other.openInfo_.isEmpty()) { 5552 if (openInfoBuilder_.isEmpty()) { 5553 openInfoBuilder_.dispose(); 5554 openInfoBuilder_ = null; 5555 openInfo_ = other.openInfo_; 5556 bitField0_ = (bitField0_ & ~0x00000001); 5557 openInfoBuilder_ = 5558 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? 5559 getOpenInfoFieldBuilder() : null; 5560 } else { 5561 openInfoBuilder_.addAllMessages(other.openInfo_); 5562 } 5563 } 5564 } 5565 if (other.hasServerStartCode()) { 5566 setServerStartCode(other.getServerStartCode()); 5567 } 5568 if (other.hasMasterSystemTime()) { 5569 setMasterSystemTime(other.getMasterSystemTime()); 5570 } 5571 this.mergeUnknownFields(other.getUnknownFields()); 5572 return this; 5573 } 5574 isInitialized()5575 public final boolean isInitialized() { 5576 for (int i = 0; i < getOpenInfoCount(); i++) { 5577 if (!getOpenInfo(i).isInitialized()) { 5578 5579 return false; 5580 } 5581 } 5582 return true; 5583 } 5584 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5585 public Builder mergeFrom( 5586 com.google.protobuf.CodedInputStream input, 5587 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 5588 throws java.io.IOException { 5589 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest parsedMessage = null; 5590 try { 5591 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 5592 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 5593 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest) e.getUnfinishedMessage(); 5594 throw e; 5595 } finally { 5596 if (parsedMessage != null) { 5597 mergeFrom(parsedMessage); 5598 } 5599 } 5600 return this; 5601 } 5602 private int bitField0_; 5603 5604 // repeated .OpenRegionRequest.RegionOpenInfo open_info = 1; 5605 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo> openInfo_ = 5606 java.util.Collections.emptyList(); ensureOpenInfoIsMutable()5607 private void ensureOpenInfoIsMutable() { 5608 if (!((bitField0_ & 0x00000001) == 0x00000001)) { 5609 openInfo_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo>(openInfo_); 5610 bitField0_ |= 0x00000001; 5611 } 5612 } 5613 5614 private com.google.protobuf.RepeatedFieldBuilder< 5615 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder> openInfoBuilder_; 5616 5617 /** 5618 * <code>repeated .OpenRegionRequest.RegionOpenInfo open_info = 1;</code> 5619 */ getOpenInfoList()5620 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo> getOpenInfoList() { 5621 if (openInfoBuilder_ == null) { 5622 return java.util.Collections.unmodifiableList(openInfo_); 5623 } else { 5624 return openInfoBuilder_.getMessageList(); 5625 } 5626 } 5627 /** 5628 * <code>repeated .OpenRegionRequest.RegionOpenInfo open_info = 1;</code> 5629 */ getOpenInfoCount()5630 public int getOpenInfoCount() { 5631 if (openInfoBuilder_ == null) { 5632 return openInfo_.size(); 5633 } else { 5634 return openInfoBuilder_.getCount(); 5635 } 5636 } 5637 /** 5638 * <code>repeated .OpenRegionRequest.RegionOpenInfo open_info = 1;</code> 5639 */ getOpenInfo(int index)5640 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo getOpenInfo(int index) { 5641 if (openInfoBuilder_ == null) { 5642 return openInfo_.get(index); 5643 } else { 5644 return openInfoBuilder_.getMessage(index); 5645 } 5646 } 5647 /** 5648 * <code>repeated .OpenRegionRequest.RegionOpenInfo open_info = 1;</code> 5649 */ setOpenInfo( int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo value)5650 public Builder setOpenInfo( 5651 int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo value) { 5652 if (openInfoBuilder_ == null) { 5653 if (value == null) { 5654 throw new NullPointerException(); 5655 } 5656 ensureOpenInfoIsMutable(); 5657 openInfo_.set(index, value); 5658 onChanged(); 5659 } else { 5660 openInfoBuilder_.setMessage(index, value); 5661 } 5662 return this; 5663 } 5664 /** 5665 * <code>repeated .OpenRegionRequest.RegionOpenInfo open_info = 1;</code> 5666 */ setOpenInfo( int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder builderForValue)5667 public Builder setOpenInfo( 5668 int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder builderForValue) { 5669 if (openInfoBuilder_ == null) { 5670 ensureOpenInfoIsMutable(); 5671 openInfo_.set(index, builderForValue.build()); 5672 onChanged(); 5673 } else { 5674 openInfoBuilder_.setMessage(index, builderForValue.build()); 5675 } 5676 return this; 5677 } 5678 /** 5679 * <code>repeated .OpenRegionRequest.RegionOpenInfo open_info = 1;</code> 5680 */ addOpenInfo(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo value)5681 public Builder addOpenInfo(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo value) { 5682 if (openInfoBuilder_ == null) { 5683 if (value == null) { 5684 throw new NullPointerException(); 5685 } 5686 ensureOpenInfoIsMutable(); 5687 openInfo_.add(value); 5688 onChanged(); 5689 } else { 5690 openInfoBuilder_.addMessage(value); 5691 } 5692 return this; 5693 } 5694 /** 5695 * <code>repeated .OpenRegionRequest.RegionOpenInfo open_info = 1;</code> 5696 */ addOpenInfo( int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo value)5697 public Builder addOpenInfo( 5698 int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo value) { 5699 if (openInfoBuilder_ == null) { 5700 if (value == null) { 5701 throw new NullPointerException(); 5702 } 5703 ensureOpenInfoIsMutable(); 5704 openInfo_.add(index, value); 5705 onChanged(); 5706 } else { 5707 openInfoBuilder_.addMessage(index, value); 5708 } 5709 return this; 5710 } 5711 /** 5712 * <code>repeated .OpenRegionRequest.RegionOpenInfo open_info = 1;</code> 5713 */ addOpenInfo( org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder builderForValue)5714 public Builder addOpenInfo( 5715 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder builderForValue) { 5716 if (openInfoBuilder_ == null) { 5717 ensureOpenInfoIsMutable(); 5718 openInfo_.add(builderForValue.build()); 5719 onChanged(); 5720 } else { 5721 openInfoBuilder_.addMessage(builderForValue.build()); 5722 } 5723 return this; 5724 } 5725 /** 5726 * <code>repeated .OpenRegionRequest.RegionOpenInfo open_info = 1;</code> 5727 */ addOpenInfo( int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder builderForValue)5728 public Builder addOpenInfo( 5729 int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder builderForValue) { 5730 if (openInfoBuilder_ == null) { 5731 ensureOpenInfoIsMutable(); 5732 openInfo_.add(index, builderForValue.build()); 5733 onChanged(); 5734 } else { 5735 openInfoBuilder_.addMessage(index, builderForValue.build()); 5736 } 5737 return this; 5738 } 5739 /** 5740 * <code>repeated .OpenRegionRequest.RegionOpenInfo open_info = 1;</code> 5741 */ addAllOpenInfo( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo> values)5742 public Builder addAllOpenInfo( 5743 java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo> values) { 5744 if (openInfoBuilder_ == null) { 5745 ensureOpenInfoIsMutable(); 5746 super.addAll(values, openInfo_); 5747 onChanged(); 5748 } else { 5749 openInfoBuilder_.addAllMessages(values); 5750 } 5751 return this; 5752 } 5753 /** 5754 * <code>repeated .OpenRegionRequest.RegionOpenInfo open_info = 1;</code> 5755 */ clearOpenInfo()5756 public Builder clearOpenInfo() { 5757 if (openInfoBuilder_ == null) { 5758 openInfo_ = java.util.Collections.emptyList(); 5759 bitField0_ = (bitField0_ & ~0x00000001); 5760 onChanged(); 5761 } else { 5762 openInfoBuilder_.clear(); 5763 } 5764 return this; 5765 } 5766 /** 5767 * <code>repeated .OpenRegionRequest.RegionOpenInfo open_info = 1;</code> 5768 */ removeOpenInfo(int index)5769 public Builder removeOpenInfo(int index) { 5770 if (openInfoBuilder_ == null) { 5771 ensureOpenInfoIsMutable(); 5772 openInfo_.remove(index); 5773 onChanged(); 5774 } else { 5775 openInfoBuilder_.remove(index); 5776 } 5777 return this; 5778 } 5779 /** 5780 * <code>repeated .OpenRegionRequest.RegionOpenInfo open_info = 1;</code> 5781 */ getOpenInfoBuilder( int index)5782 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder getOpenInfoBuilder( 5783 int index) { 5784 return getOpenInfoFieldBuilder().getBuilder(index); 5785 } 5786 /** 5787 * <code>repeated .OpenRegionRequest.RegionOpenInfo open_info = 1;</code> 5788 */ getOpenInfoOrBuilder( int index)5789 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder getOpenInfoOrBuilder( 5790 int index) { 5791 if (openInfoBuilder_ == null) { 5792 return openInfo_.get(index); } else { 5793 return openInfoBuilder_.getMessageOrBuilder(index); 5794 } 5795 } 5796 /** 5797 * <code>repeated .OpenRegionRequest.RegionOpenInfo open_info = 1;</code> 5798 */ 5799 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder> getOpenInfoOrBuilderList()5800 getOpenInfoOrBuilderList() { 5801 if (openInfoBuilder_ != null) { 5802 return openInfoBuilder_.getMessageOrBuilderList(); 5803 } else { 5804 return java.util.Collections.unmodifiableList(openInfo_); 5805 } 5806 } 5807 /** 5808 * <code>repeated .OpenRegionRequest.RegionOpenInfo open_info = 1;</code> 5809 */ addOpenInfoBuilder()5810 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder addOpenInfoBuilder() { 5811 return getOpenInfoFieldBuilder().addBuilder( 5812 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.getDefaultInstance()); 5813 } 5814 /** 5815 * <code>repeated .OpenRegionRequest.RegionOpenInfo open_info = 1;</code> 5816 */ addOpenInfoBuilder( int index)5817 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder addOpenInfoBuilder( 5818 int index) { 5819 return getOpenInfoFieldBuilder().addBuilder( 5820 index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.getDefaultInstance()); 5821 } 5822 /** 5823 * <code>repeated .OpenRegionRequest.RegionOpenInfo open_info = 1;</code> 5824 */ 5825 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder> getOpenInfoBuilderList()5826 getOpenInfoBuilderList() { 5827 return getOpenInfoFieldBuilder().getBuilderList(); 5828 } 5829 private com.google.protobuf.RepeatedFieldBuilder< 5830 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder> getOpenInfoFieldBuilder()5831 getOpenInfoFieldBuilder() { 5832 if (openInfoBuilder_ == null) { 5833 openInfoBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< 5834 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionRequest.RegionOpenInfoOrBuilder>( 5835 openInfo_, 5836 ((bitField0_ & 0x00000001) == 0x00000001), 5837 getParentForChildren(), 5838 isClean()); 5839 openInfo_ = null; 5840 } 5841 return openInfoBuilder_; 5842 } 5843 5844 // optional uint64 serverStartCode = 2; 5845 private long serverStartCode_ ; 5846 /** 5847 * <code>optional uint64 serverStartCode = 2;</code> 5848 * 5849 * <pre> 5850 * the intended server for this RPC. 5851 * </pre> 5852 */ hasServerStartCode()5853 public boolean hasServerStartCode() { 5854 return ((bitField0_ & 0x00000002) == 0x00000002); 5855 } 5856 /** 5857 * <code>optional uint64 serverStartCode = 2;</code> 5858 * 5859 * <pre> 5860 * the intended server for this RPC. 5861 * </pre> 5862 */ getServerStartCode()5863 public long getServerStartCode() { 5864 return serverStartCode_; 5865 } 5866 /** 5867 * <code>optional uint64 serverStartCode = 2;</code> 5868 * 5869 * <pre> 5870 * the intended server for this RPC. 5871 * </pre> 5872 */ setServerStartCode(long value)5873 public Builder setServerStartCode(long value) { 5874 bitField0_ |= 0x00000002; 5875 serverStartCode_ = value; 5876 onChanged(); 5877 return this; 5878 } 5879 /** 5880 * <code>optional uint64 serverStartCode = 2;</code> 5881 * 5882 * <pre> 5883 * the intended server for this RPC. 5884 * </pre> 5885 */ clearServerStartCode()5886 public Builder clearServerStartCode() { 5887 bitField0_ = (bitField0_ & ~0x00000002); 5888 serverStartCode_ = 0L; 5889 onChanged(); 5890 return this; 5891 } 5892 5893 // optional uint64 master_system_time = 5; 5894 private long masterSystemTime_ ; 5895 /** 5896 * <code>optional uint64 master_system_time = 5;</code> 5897 * 5898 * <pre> 5899 * wall clock time from master 5900 * </pre> 5901 */ hasMasterSystemTime()5902 public boolean hasMasterSystemTime() { 5903 return ((bitField0_ & 0x00000004) == 0x00000004); 5904 } 5905 /** 5906 * <code>optional uint64 master_system_time = 5;</code> 5907 * 5908 * <pre> 5909 * wall clock time from master 5910 * </pre> 5911 */ getMasterSystemTime()5912 public long getMasterSystemTime() { 5913 return masterSystemTime_; 5914 } 5915 /** 5916 * <code>optional uint64 master_system_time = 5;</code> 5917 * 5918 * <pre> 5919 * wall clock time from master 5920 * </pre> 5921 */ setMasterSystemTime(long value)5922 public Builder setMasterSystemTime(long value) { 5923 bitField0_ |= 0x00000004; 5924 masterSystemTime_ = value; 5925 onChanged(); 5926 return this; 5927 } 5928 /** 5929 * <code>optional uint64 master_system_time = 5;</code> 5930 * 5931 * <pre> 5932 * wall clock time from master 5933 * </pre> 5934 */ clearMasterSystemTime()5935 public Builder clearMasterSystemTime() { 5936 bitField0_ = (bitField0_ & ~0x00000004); 5937 masterSystemTime_ = 0L; 5938 onChanged(); 5939 return this; 5940 } 5941 5942 // @@protoc_insertion_point(builder_scope:OpenRegionRequest) 5943 } 5944 5945 static { 5946 defaultInstance = new OpenRegionRequest(true); defaultInstance.initFields()5947 defaultInstance.initFields(); 5948 } 5949 5950 // @@protoc_insertion_point(class_scope:OpenRegionRequest) 5951 } 5952 5953 public interface OpenRegionResponseOrBuilder 5954 extends com.google.protobuf.MessageOrBuilder { 5955 5956 // repeated .OpenRegionResponse.RegionOpeningState opening_state = 1; 5957 /** 5958 * <code>repeated .OpenRegionResponse.RegionOpeningState opening_state = 1;</code> 5959 */ getOpeningStateList()5960 java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState> getOpeningStateList(); 5961 /** 5962 * <code>repeated .OpenRegionResponse.RegionOpeningState opening_state = 1;</code> 5963 */ getOpeningStateCount()5964 int getOpeningStateCount(); 5965 /** 5966 * <code>repeated .OpenRegionResponse.RegionOpeningState opening_state = 1;</code> 5967 */ getOpeningState(int index)5968 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index); 5969 } 5970 /** 5971 * Protobuf type {@code OpenRegionResponse} 5972 */ 5973 public static final class OpenRegionResponse extends 5974 com.google.protobuf.GeneratedMessage 5975 implements OpenRegionResponseOrBuilder { 5976 // Use OpenRegionResponse.newBuilder() to construct. OpenRegionResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)5977 private OpenRegionResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 5978 super(builder); 5979 this.unknownFields = builder.getUnknownFields(); 5980 } OpenRegionResponse(boolean noInit)5981 private OpenRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 5982 5983 private static final OpenRegionResponse defaultInstance; getDefaultInstance()5984 public static OpenRegionResponse getDefaultInstance() { 5985 return defaultInstance; 5986 } 5987 getDefaultInstanceForType()5988 public OpenRegionResponse getDefaultInstanceForType() { 5989 return defaultInstance; 5990 } 5991 5992 private final com.google.protobuf.UnknownFieldSet unknownFields; 5993 @java.lang.Override 5994 public final com.google.protobuf.UnknownFieldSet getUnknownFields()5995 getUnknownFields() { 5996 return this.unknownFields; 5997 } OpenRegionResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5998 private OpenRegionResponse( 5999 com.google.protobuf.CodedInputStream input, 6000 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6001 throws com.google.protobuf.InvalidProtocolBufferException { 6002 initFields(); 6003 int mutable_bitField0_ = 0; 6004 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 6005 com.google.protobuf.UnknownFieldSet.newBuilder(); 6006 try { 6007 boolean done = false; 6008 while (!done) { 6009 int tag = input.readTag(); 6010 switch (tag) { 6011 case 0: 6012 done = true; 6013 break; 6014 default: { 6015 if (!parseUnknownField(input, unknownFields, 6016 extensionRegistry, tag)) { 6017 done = true; 6018 } 6019 break; 6020 } 6021 case 8: { 6022 int rawValue = input.readEnum(); 6023 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState.valueOf(rawValue); 6024 if (value == null) { 6025 unknownFields.mergeVarintField(1, rawValue); 6026 } else { 6027 if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { 6028 openingState_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState>(); 6029 mutable_bitField0_ |= 0x00000001; 6030 } 6031 openingState_.add(value); 6032 } 6033 break; 6034 } 6035 case 10: { 6036 int length = input.readRawVarint32(); 6037 int oldLimit = input.pushLimit(length); 6038 while(input.getBytesUntilLimit() > 0) { 6039 int rawValue = input.readEnum(); 6040 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value = org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState.valueOf(rawValue); 6041 if (value == null) { 6042 unknownFields.mergeVarintField(1, rawValue); 6043 } else { 6044 if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { 6045 openingState_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState>(); 6046 mutable_bitField0_ |= 0x00000001; 6047 } 6048 openingState_.add(value); 6049 } 6050 } 6051 input.popLimit(oldLimit); 6052 break; 6053 } 6054 } 6055 } 6056 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 6057 throw e.setUnfinishedMessage(this); 6058 } catch (java.io.IOException e) { 6059 throw new com.google.protobuf.InvalidProtocolBufferException( 6060 e.getMessage()).setUnfinishedMessage(this); 6061 } finally { 6062 if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { 6063 openingState_ = java.util.Collections.unmodifiableList(openingState_); 6064 } 6065 this.unknownFields = unknownFields.build(); 6066 makeExtensionsImmutable(); 6067 } 6068 } 6069 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()6070 getDescriptor() { 6071 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionResponse_descriptor; 6072 } 6073 6074 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()6075 internalGetFieldAccessorTable() { 6076 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionResponse_fieldAccessorTable 6077 .ensureFieldAccessorsInitialized( 6078 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.Builder.class); 6079 } 6080 6081 public static com.google.protobuf.Parser<OpenRegionResponse> PARSER = 6082 new com.google.protobuf.AbstractParser<OpenRegionResponse>() { 6083 public OpenRegionResponse parsePartialFrom( 6084 com.google.protobuf.CodedInputStream input, 6085 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6086 throws com.google.protobuf.InvalidProtocolBufferException { 6087 return new OpenRegionResponse(input, extensionRegistry); 6088 } 6089 }; 6090 6091 @java.lang.Override getParserForType()6092 public com.google.protobuf.Parser<OpenRegionResponse> getParserForType() { 6093 return PARSER; 6094 } 6095 6096 /** 6097 * Protobuf enum {@code OpenRegionResponse.RegionOpeningState} 6098 */ 6099 public enum RegionOpeningState 6100 implements com.google.protobuf.ProtocolMessageEnum { 6101 /** 6102 * <code>OPENED = 0;</code> 6103 */ 6104 OPENED(0, 0), 6105 /** 6106 * <code>ALREADY_OPENED = 1;</code> 6107 */ 6108 ALREADY_OPENED(1, 1), 6109 /** 6110 * <code>FAILED_OPENING = 2;</code> 6111 */ 6112 FAILED_OPENING(2, 2), 6113 ; 6114 6115 /** 6116 * <code>OPENED = 0;</code> 6117 */ 6118 public static final int OPENED_VALUE = 0; 6119 /** 6120 * <code>ALREADY_OPENED = 1;</code> 6121 */ 6122 public static final int ALREADY_OPENED_VALUE = 1; 6123 /** 6124 * <code>FAILED_OPENING = 2;</code> 6125 */ 6126 public static final int FAILED_OPENING_VALUE = 2; 6127 6128 getNumber()6129 public final int getNumber() { return value; } 6130 valueOf(int value)6131 public static RegionOpeningState valueOf(int value) { 6132 switch (value) { 6133 case 0: return OPENED; 6134 case 1: return ALREADY_OPENED; 6135 case 2: return FAILED_OPENING; 6136 default: return null; 6137 } 6138 } 6139 6140 public static com.google.protobuf.Internal.EnumLiteMap<RegionOpeningState> internalGetValueMap()6141 internalGetValueMap() { 6142 return internalValueMap; 6143 } 6144 private static com.google.protobuf.Internal.EnumLiteMap<RegionOpeningState> 6145 internalValueMap = 6146 new com.google.protobuf.Internal.EnumLiteMap<RegionOpeningState>() { 6147 public RegionOpeningState findValueByNumber(int number) { 6148 return RegionOpeningState.valueOf(number); 6149 } 6150 }; 6151 6152 public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor()6153 getValueDescriptor() { 6154 return getDescriptor().getValues().get(index); 6155 } 6156 public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType()6157 getDescriptorForType() { 6158 return getDescriptor(); 6159 } 6160 public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor()6161 getDescriptor() { 6162 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDescriptor().getEnumTypes().get(0); 6163 } 6164 6165 private static final RegionOpeningState[] VALUES = values(); 6166 valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc)6167 public static RegionOpeningState valueOf( 6168 com.google.protobuf.Descriptors.EnumValueDescriptor desc) { 6169 if (desc.getType() != getDescriptor()) { 6170 throw new java.lang.IllegalArgumentException( 6171 "EnumValueDescriptor is not for this type."); 6172 } 6173 return VALUES[desc.getIndex()]; 6174 } 6175 6176 private final int index; 6177 private final int value; 6178 RegionOpeningState(int index, int value)6179 private RegionOpeningState(int index, int value) { 6180 this.index = index; 6181 this.value = value; 6182 } 6183 6184 // @@protoc_insertion_point(enum_scope:OpenRegionResponse.RegionOpeningState) 6185 } 6186 6187 // repeated .OpenRegionResponse.RegionOpeningState opening_state = 1; 6188 public static final int OPENING_STATE_FIELD_NUMBER = 1; 6189 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState> openingState_; 6190 /** 6191 * <code>repeated .OpenRegionResponse.RegionOpeningState opening_state = 1;</code> 6192 */ getOpeningStateList()6193 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState> getOpeningStateList() { 6194 return openingState_; 6195 } 6196 /** 6197 * <code>repeated .OpenRegionResponse.RegionOpeningState opening_state = 1;</code> 6198 */ getOpeningStateCount()6199 public int getOpeningStateCount() { 6200 return openingState_.size(); 6201 } 6202 /** 6203 * <code>repeated .OpenRegionResponse.RegionOpeningState opening_state = 1;</code> 6204 */ getOpeningState(int index)6205 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index) { 6206 return openingState_.get(index); 6207 } 6208 initFields()6209 private void initFields() { 6210 openingState_ = java.util.Collections.emptyList(); 6211 } 6212 private byte memoizedIsInitialized = -1; isInitialized()6213 public final boolean isInitialized() { 6214 byte isInitialized = memoizedIsInitialized; 6215 if (isInitialized != -1) return isInitialized == 1; 6216 6217 memoizedIsInitialized = 1; 6218 return true; 6219 } 6220 writeTo(com.google.protobuf.CodedOutputStream output)6221 public void writeTo(com.google.protobuf.CodedOutputStream output) 6222 throws java.io.IOException { 6223 getSerializedSize(); 6224 for (int i = 0; i < openingState_.size(); i++) { 6225 output.writeEnum(1, openingState_.get(i).getNumber()); 6226 } 6227 getUnknownFields().writeTo(output); 6228 } 6229 6230 private int memoizedSerializedSize = -1; getSerializedSize()6231 public int getSerializedSize() { 6232 int size = memoizedSerializedSize; 6233 if (size != -1) return size; 6234 6235 size = 0; 6236 { 6237 int dataSize = 0; 6238 for (int i = 0; i < openingState_.size(); i++) { 6239 dataSize += com.google.protobuf.CodedOutputStream 6240 .computeEnumSizeNoTag(openingState_.get(i).getNumber()); 6241 } 6242 size += dataSize; 6243 size += 1 * openingState_.size(); 6244 } 6245 size += getUnknownFields().getSerializedSize(); 6246 memoizedSerializedSize = size; 6247 return size; 6248 } 6249 6250 private static final long serialVersionUID = 0L; 6251 @java.lang.Override writeReplace()6252 protected java.lang.Object writeReplace() 6253 throws java.io.ObjectStreamException { 6254 return super.writeReplace(); 6255 } 6256 6257 @java.lang.Override equals(final java.lang.Object obj)6258 public boolean equals(final java.lang.Object obj) { 6259 if (obj == this) { 6260 return true; 6261 } 6262 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse)) { 6263 return super.equals(obj); 6264 } 6265 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse) obj; 6266 6267 boolean result = true; 6268 result = result && getOpeningStateList() 6269 .equals(other.getOpeningStateList()); 6270 result = result && 6271 getUnknownFields().equals(other.getUnknownFields()); 6272 return result; 6273 } 6274 6275 private int memoizedHashCode = 0; 6276 @java.lang.Override hashCode()6277 public int hashCode() { 6278 if (memoizedHashCode != 0) { 6279 return memoizedHashCode; 6280 } 6281 int hash = 41; 6282 hash = (19 * hash) + getDescriptorForType().hashCode(); 6283 if (getOpeningStateCount() > 0) { 6284 hash = (37 * hash) + OPENING_STATE_FIELD_NUMBER; 6285 hash = (53 * hash) + hashEnumList(getOpeningStateList()); 6286 } 6287 hash = (29 * hash) + getUnknownFields().hashCode(); 6288 memoizedHashCode = hash; 6289 return hash; 6290 } 6291 parseFrom( com.google.protobuf.ByteString data)6292 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( 6293 com.google.protobuf.ByteString data) 6294 throws com.google.protobuf.InvalidProtocolBufferException { 6295 return PARSER.parseFrom(data); 6296 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6297 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( 6298 com.google.protobuf.ByteString data, 6299 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6300 throws com.google.protobuf.InvalidProtocolBufferException { 6301 return PARSER.parseFrom(data, extensionRegistry); 6302 } parseFrom(byte[] data)6303 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom(byte[] data) 6304 throws com.google.protobuf.InvalidProtocolBufferException { 6305 return PARSER.parseFrom(data); 6306 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6307 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( 6308 byte[] data, 6309 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6310 throws com.google.protobuf.InvalidProtocolBufferException { 6311 return PARSER.parseFrom(data, extensionRegistry); 6312 } parseFrom(java.io.InputStream input)6313 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom(java.io.InputStream input) 6314 throws java.io.IOException { 6315 return PARSER.parseFrom(input); 6316 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6317 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( 6318 java.io.InputStream input, 6319 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6320 throws java.io.IOException { 6321 return PARSER.parseFrom(input, extensionRegistry); 6322 } parseDelimitedFrom(java.io.InputStream input)6323 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseDelimitedFrom(java.io.InputStream input) 6324 throws java.io.IOException { 6325 return PARSER.parseDelimitedFrom(input); 6326 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6327 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseDelimitedFrom( 6328 java.io.InputStream input, 6329 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6330 throws java.io.IOException { 6331 return PARSER.parseDelimitedFrom(input, extensionRegistry); 6332 } parseFrom( com.google.protobuf.CodedInputStream input)6333 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( 6334 com.google.protobuf.CodedInputStream input) 6335 throws java.io.IOException { 6336 return PARSER.parseFrom(input); 6337 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6338 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parseFrom( 6339 com.google.protobuf.CodedInputStream input, 6340 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6341 throws java.io.IOException { 6342 return PARSER.parseFrom(input, extensionRegistry); 6343 } 6344 newBuilder()6345 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()6346 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse prototype)6347 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse prototype) { 6348 return newBuilder().mergeFrom(prototype); 6349 } toBuilder()6350 public Builder toBuilder() { return newBuilder(this); } 6351 6352 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)6353 protected Builder newBuilderForType( 6354 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 6355 Builder builder = new Builder(parent); 6356 return builder; 6357 } 6358 /** 6359 * Protobuf type {@code OpenRegionResponse} 6360 */ 6361 public static final class Builder extends 6362 com.google.protobuf.GeneratedMessage.Builder<Builder> 6363 implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponseOrBuilder { 6364 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()6365 getDescriptor() { 6366 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionResponse_descriptor; 6367 } 6368 6369 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()6370 internalGetFieldAccessorTable() { 6371 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionResponse_fieldAccessorTable 6372 .ensureFieldAccessorsInitialized( 6373 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.Builder.class); 6374 } 6375 6376 // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.newBuilder() Builder()6377 private Builder() { 6378 maybeForceBuilderInitialization(); 6379 } 6380 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)6381 private Builder( 6382 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 6383 super(parent); 6384 maybeForceBuilderInitialization(); 6385 } maybeForceBuilderInitialization()6386 private void maybeForceBuilderInitialization() { 6387 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 6388 } 6389 } create()6390 private static Builder create() { 6391 return new Builder(); 6392 } 6393 clear()6394 public Builder clear() { 6395 super.clear(); 6396 openingState_ = java.util.Collections.emptyList(); 6397 bitField0_ = (bitField0_ & ~0x00000001); 6398 return this; 6399 } 6400 clone()6401 public Builder clone() { 6402 return create().mergeFrom(buildPartial()); 6403 } 6404 6405 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()6406 getDescriptorForType() { 6407 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_OpenRegionResponse_descriptor; 6408 } 6409 getDefaultInstanceForType()6410 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse getDefaultInstanceForType() { 6411 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance(); 6412 } 6413 build()6414 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse build() { 6415 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse result = buildPartial(); 6416 if (!result.isInitialized()) { 6417 throw newUninitializedMessageException(result); 6418 } 6419 return result; 6420 } 6421 buildPartial()6422 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse buildPartial() { 6423 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse(this); 6424 int from_bitField0_ = bitField0_; 6425 if (((bitField0_ & 0x00000001) == 0x00000001)) { 6426 openingState_ = java.util.Collections.unmodifiableList(openingState_); 6427 bitField0_ = (bitField0_ & ~0x00000001); 6428 } 6429 result.openingState_ = openingState_; 6430 onBuilt(); 6431 return result; 6432 } 6433 mergeFrom(com.google.protobuf.Message other)6434 public Builder mergeFrom(com.google.protobuf.Message other) { 6435 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse) { 6436 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse)other); 6437 } else { 6438 super.mergeFrom(other); 6439 return this; 6440 } 6441 } 6442 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse other)6443 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse other) { 6444 if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.getDefaultInstance()) return this; 6445 if (!other.openingState_.isEmpty()) { 6446 if (openingState_.isEmpty()) { 6447 openingState_ = other.openingState_; 6448 bitField0_ = (bitField0_ & ~0x00000001); 6449 } else { 6450 ensureOpeningStateIsMutable(); 6451 openingState_.addAll(other.openingState_); 6452 } 6453 onChanged(); 6454 } 6455 this.mergeUnknownFields(other.getUnknownFields()); 6456 return this; 6457 } 6458 isInitialized()6459 public final boolean isInitialized() { 6460 return true; 6461 } 6462 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6463 public Builder mergeFrom( 6464 com.google.protobuf.CodedInputStream input, 6465 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6466 throws java.io.IOException { 6467 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse parsedMessage = null; 6468 try { 6469 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 6470 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 6471 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse) e.getUnfinishedMessage(); 6472 throw e; 6473 } finally { 6474 if (parsedMessage != null) { 6475 mergeFrom(parsedMessage); 6476 } 6477 } 6478 return this; 6479 } 6480 private int bitField0_; 6481 6482 // repeated .OpenRegionResponse.RegionOpeningState opening_state = 1; 6483 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState> openingState_ = 6484 java.util.Collections.emptyList(); ensureOpeningStateIsMutable()6485 private void ensureOpeningStateIsMutable() { 6486 if (!((bitField0_ & 0x00000001) == 0x00000001)) { 6487 openingState_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState>(openingState_); 6488 bitField0_ |= 0x00000001; 6489 } 6490 } 6491 /** 6492 * <code>repeated .OpenRegionResponse.RegionOpeningState opening_state = 1;</code> 6493 */ getOpeningStateList()6494 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState> getOpeningStateList() { 6495 return java.util.Collections.unmodifiableList(openingState_); 6496 } 6497 /** 6498 * <code>repeated .OpenRegionResponse.RegionOpeningState opening_state = 1;</code> 6499 */ getOpeningStateCount()6500 public int getOpeningStateCount() { 6501 return openingState_.size(); 6502 } 6503 /** 6504 * <code>repeated .OpenRegionResponse.RegionOpeningState opening_state = 1;</code> 6505 */ getOpeningState(int index)6506 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState getOpeningState(int index) { 6507 return openingState_.get(index); 6508 } 6509 /** 6510 * <code>repeated .OpenRegionResponse.RegionOpeningState opening_state = 1;</code> 6511 */ setOpeningState( int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value)6512 public Builder setOpeningState( 6513 int index, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value) { 6514 if (value == null) { 6515 throw new NullPointerException(); 6516 } 6517 ensureOpeningStateIsMutable(); 6518 openingState_.set(index, value); 6519 onChanged(); 6520 return this; 6521 } 6522 /** 6523 * <code>repeated .OpenRegionResponse.RegionOpeningState opening_state = 1;</code> 6524 */ addOpeningState(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value)6525 public Builder addOpeningState(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState value) { 6526 if (value == null) { 6527 throw new NullPointerException(); 6528 } 6529 ensureOpeningStateIsMutable(); 6530 openingState_.add(value); 6531 onChanged(); 6532 return this; 6533 } 6534 /** 6535 * <code>repeated .OpenRegionResponse.RegionOpeningState opening_state = 1;</code> 6536 */ addAllOpeningState( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState> values)6537 public Builder addAllOpeningState( 6538 java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.AdminProtos.OpenRegionResponse.RegionOpeningState> values) { 6539 ensureOpeningStateIsMutable(); 6540 super.addAll(values, openingState_); 6541 onChanged(); 6542 return this; 6543 } 6544 /** 6545 * <code>repeated .OpenRegionResponse.RegionOpeningState opening_state = 1;</code> 6546 */ clearOpeningState()6547 public Builder clearOpeningState() { 6548 openingState_ = java.util.Collections.emptyList(); 6549 bitField0_ = (bitField0_ & ~0x00000001); 6550 onChanged(); 6551 return this; 6552 } 6553 6554 // @@protoc_insertion_point(builder_scope:OpenRegionResponse) 6555 } 6556 6557 static { 6558 defaultInstance = new OpenRegionResponse(true); defaultInstance.initFields()6559 defaultInstance.initFields(); 6560 } 6561 6562 // @@protoc_insertion_point(class_scope:OpenRegionResponse) 6563 } 6564 6565 public interface WarmupRegionRequestOrBuilder 6566 extends com.google.protobuf.MessageOrBuilder { 6567 6568 // required .RegionInfo regionInfo = 1; 6569 /** 6570 * <code>required .RegionInfo regionInfo = 1;</code> 6571 */ hasRegionInfo()6572 boolean hasRegionInfo(); 6573 /** 6574 * <code>required .RegionInfo regionInfo = 1;</code> 6575 */ getRegionInfo()6576 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo(); 6577 /** 6578 * <code>required .RegionInfo regionInfo = 1;</code> 6579 */ getRegionInfoOrBuilder()6580 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder(); 6581 } 6582 /** 6583 * Protobuf type {@code WarmupRegionRequest} 6584 */ 6585 public static final class WarmupRegionRequest extends 6586 com.google.protobuf.GeneratedMessage 6587 implements WarmupRegionRequestOrBuilder { 6588 // Use WarmupRegionRequest.newBuilder() to construct. WarmupRegionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)6589 private WarmupRegionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 6590 super(builder); 6591 this.unknownFields = builder.getUnknownFields(); 6592 } WarmupRegionRequest(boolean noInit)6593 private WarmupRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 6594 6595 private static final WarmupRegionRequest defaultInstance; getDefaultInstance()6596 public static WarmupRegionRequest getDefaultInstance() { 6597 return defaultInstance; 6598 } 6599 getDefaultInstanceForType()6600 public WarmupRegionRequest getDefaultInstanceForType() { 6601 return defaultInstance; 6602 } 6603 6604 private final com.google.protobuf.UnknownFieldSet unknownFields; 6605 @java.lang.Override 6606 public final com.google.protobuf.UnknownFieldSet getUnknownFields()6607 getUnknownFields() { 6608 return this.unknownFields; 6609 } WarmupRegionRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6610 private WarmupRegionRequest( 6611 com.google.protobuf.CodedInputStream input, 6612 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6613 throws com.google.protobuf.InvalidProtocolBufferException { 6614 initFields(); 6615 int mutable_bitField0_ = 0; 6616 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 6617 com.google.protobuf.UnknownFieldSet.newBuilder(); 6618 try { 6619 boolean done = false; 6620 while (!done) { 6621 int tag = input.readTag(); 6622 switch (tag) { 6623 case 0: 6624 done = true; 6625 break; 6626 default: { 6627 if (!parseUnknownField(input, unknownFields, 6628 extensionRegistry, tag)) { 6629 done = true; 6630 } 6631 break; 6632 } 6633 case 10: { 6634 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder subBuilder = null; 6635 if (((bitField0_ & 0x00000001) == 0x00000001)) { 6636 subBuilder = regionInfo_.toBuilder(); 6637 } 6638 regionInfo_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.PARSER, extensionRegistry); 6639 if (subBuilder != null) { 6640 subBuilder.mergeFrom(regionInfo_); 6641 regionInfo_ = subBuilder.buildPartial(); 6642 } 6643 bitField0_ |= 0x00000001; 6644 break; 6645 } 6646 } 6647 } 6648 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 6649 throw e.setUnfinishedMessage(this); 6650 } catch (java.io.IOException e) { 6651 throw new com.google.protobuf.InvalidProtocolBufferException( 6652 e.getMessage()).setUnfinishedMessage(this); 6653 } finally { 6654 this.unknownFields = unknownFields.build(); 6655 makeExtensionsImmutable(); 6656 } 6657 } 6658 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()6659 getDescriptor() { 6660 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WarmupRegionRequest_descriptor; 6661 } 6662 6663 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()6664 internalGetFieldAccessorTable() { 6665 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WarmupRegionRequest_fieldAccessorTable 6666 .ensureFieldAccessorsInitialized( 6667 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest.Builder.class); 6668 } 6669 6670 public static com.google.protobuf.Parser<WarmupRegionRequest> PARSER = 6671 new com.google.protobuf.AbstractParser<WarmupRegionRequest>() { 6672 public WarmupRegionRequest parsePartialFrom( 6673 com.google.protobuf.CodedInputStream input, 6674 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6675 throws com.google.protobuf.InvalidProtocolBufferException { 6676 return new WarmupRegionRequest(input, extensionRegistry); 6677 } 6678 }; 6679 6680 @java.lang.Override getParserForType()6681 public com.google.protobuf.Parser<WarmupRegionRequest> getParserForType() { 6682 return PARSER; 6683 } 6684 6685 private int bitField0_; 6686 // required .RegionInfo regionInfo = 1; 6687 public static final int REGIONINFO_FIELD_NUMBER = 1; 6688 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_; 6689 /** 6690 * <code>required .RegionInfo regionInfo = 1;</code> 6691 */ hasRegionInfo()6692 public boolean hasRegionInfo() { 6693 return ((bitField0_ & 0x00000001) == 0x00000001); 6694 } 6695 /** 6696 * <code>required .RegionInfo regionInfo = 1;</code> 6697 */ getRegionInfo()6698 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() { 6699 return regionInfo_; 6700 } 6701 /** 6702 * <code>required .RegionInfo regionInfo = 1;</code> 6703 */ getRegionInfoOrBuilder()6704 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() { 6705 return regionInfo_; 6706 } 6707 initFields()6708 private void initFields() { 6709 regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); 6710 } 6711 private byte memoizedIsInitialized = -1; isInitialized()6712 public final boolean isInitialized() { 6713 byte isInitialized = memoizedIsInitialized; 6714 if (isInitialized != -1) return isInitialized == 1; 6715 6716 if (!hasRegionInfo()) { 6717 memoizedIsInitialized = 0; 6718 return false; 6719 } 6720 if (!getRegionInfo().isInitialized()) { 6721 memoizedIsInitialized = 0; 6722 return false; 6723 } 6724 memoizedIsInitialized = 1; 6725 return true; 6726 } 6727 writeTo(com.google.protobuf.CodedOutputStream output)6728 public void writeTo(com.google.protobuf.CodedOutputStream output) 6729 throws java.io.IOException { 6730 getSerializedSize(); 6731 if (((bitField0_ & 0x00000001) == 0x00000001)) { 6732 output.writeMessage(1, regionInfo_); 6733 } 6734 getUnknownFields().writeTo(output); 6735 } 6736 6737 private int memoizedSerializedSize = -1; getSerializedSize()6738 public int getSerializedSize() { 6739 int size = memoizedSerializedSize; 6740 if (size != -1) return size; 6741 6742 size = 0; 6743 if (((bitField0_ & 0x00000001) == 0x00000001)) { 6744 size += com.google.protobuf.CodedOutputStream 6745 .computeMessageSize(1, regionInfo_); 6746 } 6747 size += getUnknownFields().getSerializedSize(); 6748 memoizedSerializedSize = size; 6749 return size; 6750 } 6751 6752 private static final long serialVersionUID = 0L; 6753 @java.lang.Override writeReplace()6754 protected java.lang.Object writeReplace() 6755 throws java.io.ObjectStreamException { 6756 return super.writeReplace(); 6757 } 6758 6759 @java.lang.Override equals(final java.lang.Object obj)6760 public boolean equals(final java.lang.Object obj) { 6761 if (obj == this) { 6762 return true; 6763 } 6764 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest)) { 6765 return super.equals(obj); 6766 } 6767 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest) obj; 6768 6769 boolean result = true; 6770 result = result && (hasRegionInfo() == other.hasRegionInfo()); 6771 if (hasRegionInfo()) { 6772 result = result && getRegionInfo() 6773 .equals(other.getRegionInfo()); 6774 } 6775 result = result && 6776 getUnknownFields().equals(other.getUnknownFields()); 6777 return result; 6778 } 6779 6780 private int memoizedHashCode = 0; 6781 @java.lang.Override hashCode()6782 public int hashCode() { 6783 if (memoizedHashCode != 0) { 6784 return memoizedHashCode; 6785 } 6786 int hash = 41; 6787 hash = (19 * hash) + getDescriptorForType().hashCode(); 6788 if (hasRegionInfo()) { 6789 hash = (37 * hash) + REGIONINFO_FIELD_NUMBER; 6790 hash = (53 * hash) + getRegionInfo().hashCode(); 6791 } 6792 hash = (29 * hash) + getUnknownFields().hashCode(); 6793 memoizedHashCode = hash; 6794 return hash; 6795 } 6796 parseFrom( com.google.protobuf.ByteString data)6797 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest parseFrom( 6798 com.google.protobuf.ByteString data) 6799 throws com.google.protobuf.InvalidProtocolBufferException { 6800 return PARSER.parseFrom(data); 6801 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6802 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest parseFrom( 6803 com.google.protobuf.ByteString data, 6804 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6805 throws com.google.protobuf.InvalidProtocolBufferException { 6806 return PARSER.parseFrom(data, extensionRegistry); 6807 } parseFrom(byte[] data)6808 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest parseFrom(byte[] data) 6809 throws com.google.protobuf.InvalidProtocolBufferException { 6810 return PARSER.parseFrom(data); 6811 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6812 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest parseFrom( 6813 byte[] data, 6814 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6815 throws com.google.protobuf.InvalidProtocolBufferException { 6816 return PARSER.parseFrom(data, extensionRegistry); 6817 } parseFrom(java.io.InputStream input)6818 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest parseFrom(java.io.InputStream input) 6819 throws java.io.IOException { 6820 return PARSER.parseFrom(input); 6821 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6822 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest parseFrom( 6823 java.io.InputStream input, 6824 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6825 throws java.io.IOException { 6826 return PARSER.parseFrom(input, extensionRegistry); 6827 } parseDelimitedFrom(java.io.InputStream input)6828 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest parseDelimitedFrom(java.io.InputStream input) 6829 throws java.io.IOException { 6830 return PARSER.parseDelimitedFrom(input); 6831 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6832 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest parseDelimitedFrom( 6833 java.io.InputStream input, 6834 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6835 throws java.io.IOException { 6836 return PARSER.parseDelimitedFrom(input, extensionRegistry); 6837 } parseFrom( com.google.protobuf.CodedInputStream input)6838 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest parseFrom( 6839 com.google.protobuf.CodedInputStream input) 6840 throws java.io.IOException { 6841 return PARSER.parseFrom(input); 6842 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6843 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest parseFrom( 6844 com.google.protobuf.CodedInputStream input, 6845 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6846 throws java.io.IOException { 6847 return PARSER.parseFrom(input, extensionRegistry); 6848 } 6849 newBuilder()6850 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()6851 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest prototype)6852 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest prototype) { 6853 return newBuilder().mergeFrom(prototype); 6854 } toBuilder()6855 public Builder toBuilder() { return newBuilder(this); } 6856 6857 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)6858 protected Builder newBuilderForType( 6859 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 6860 Builder builder = new Builder(parent); 6861 return builder; 6862 } 6863 /** 6864 * Protobuf type {@code WarmupRegionRequest} 6865 */ 6866 public static final class Builder extends 6867 com.google.protobuf.GeneratedMessage.Builder<Builder> 6868 implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequestOrBuilder { 6869 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()6870 getDescriptor() { 6871 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WarmupRegionRequest_descriptor; 6872 } 6873 6874 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()6875 internalGetFieldAccessorTable() { 6876 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WarmupRegionRequest_fieldAccessorTable 6877 .ensureFieldAccessorsInitialized( 6878 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest.Builder.class); 6879 } 6880 6881 // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest.newBuilder() Builder()6882 private Builder() { 6883 maybeForceBuilderInitialization(); 6884 } 6885 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)6886 private Builder( 6887 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 6888 super(parent); 6889 maybeForceBuilderInitialization(); 6890 } maybeForceBuilderInitialization()6891 private void maybeForceBuilderInitialization() { 6892 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 6893 getRegionInfoFieldBuilder(); 6894 } 6895 } create()6896 private static Builder create() { 6897 return new Builder(); 6898 } 6899 clear()6900 public Builder clear() { 6901 super.clear(); 6902 if (regionInfoBuilder_ == null) { 6903 regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); 6904 } else { 6905 regionInfoBuilder_.clear(); 6906 } 6907 bitField0_ = (bitField0_ & ~0x00000001); 6908 return this; 6909 } 6910 clone()6911 public Builder clone() { 6912 return create().mergeFrom(buildPartial()); 6913 } 6914 6915 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()6916 getDescriptorForType() { 6917 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WarmupRegionRequest_descriptor; 6918 } 6919 getDefaultInstanceForType()6920 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest getDefaultInstanceForType() { 6921 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest.getDefaultInstance(); 6922 } 6923 build()6924 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest build() { 6925 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest result = buildPartial(); 6926 if (!result.isInitialized()) { 6927 throw newUninitializedMessageException(result); 6928 } 6929 return result; 6930 } 6931 buildPartial()6932 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest buildPartial() { 6933 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest(this); 6934 int from_bitField0_ = bitField0_; 6935 int to_bitField0_ = 0; 6936 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 6937 to_bitField0_ |= 0x00000001; 6938 } 6939 if (regionInfoBuilder_ == null) { 6940 result.regionInfo_ = regionInfo_; 6941 } else { 6942 result.regionInfo_ = regionInfoBuilder_.build(); 6943 } 6944 result.bitField0_ = to_bitField0_; 6945 onBuilt(); 6946 return result; 6947 } 6948 mergeFrom(com.google.protobuf.Message other)6949 public Builder mergeFrom(com.google.protobuf.Message other) { 6950 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest) { 6951 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest)other); 6952 } else { 6953 super.mergeFrom(other); 6954 return this; 6955 } 6956 } 6957 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest other)6958 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest other) { 6959 if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest.getDefaultInstance()) return this; 6960 if (other.hasRegionInfo()) { 6961 mergeRegionInfo(other.getRegionInfo()); 6962 } 6963 this.mergeUnknownFields(other.getUnknownFields()); 6964 return this; 6965 } 6966 isInitialized()6967 public final boolean isInitialized() { 6968 if (!hasRegionInfo()) { 6969 6970 return false; 6971 } 6972 if (!getRegionInfo().isInitialized()) { 6973 6974 return false; 6975 } 6976 return true; 6977 } 6978 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6979 public Builder mergeFrom( 6980 com.google.protobuf.CodedInputStream input, 6981 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6982 throws java.io.IOException { 6983 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest parsedMessage = null; 6984 try { 6985 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 6986 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 6987 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionRequest) e.getUnfinishedMessage(); 6988 throw e; 6989 } finally { 6990 if (parsedMessage != null) { 6991 mergeFrom(parsedMessage); 6992 } 6993 } 6994 return this; 6995 } 6996 private int bitField0_; 6997 6998 // required .RegionInfo regionInfo = 1; 6999 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); 7000 private com.google.protobuf.SingleFieldBuilder< 7001 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> regionInfoBuilder_; 7002 /** 7003 * <code>required .RegionInfo regionInfo = 1;</code> 7004 */ hasRegionInfo()7005 public boolean hasRegionInfo() { 7006 return ((bitField0_ & 0x00000001) == 0x00000001); 7007 } 7008 /** 7009 * <code>required .RegionInfo regionInfo = 1;</code> 7010 */ getRegionInfo()7011 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo getRegionInfo() { 7012 if (regionInfoBuilder_ == null) { 7013 return regionInfo_; 7014 } else { 7015 return regionInfoBuilder_.getMessage(); 7016 } 7017 } 7018 /** 7019 * <code>required .RegionInfo regionInfo = 1;</code> 7020 */ setRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value)7021 public Builder setRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { 7022 if (regionInfoBuilder_ == null) { 7023 if (value == null) { 7024 throw new NullPointerException(); 7025 } 7026 regionInfo_ = value; 7027 onChanged(); 7028 } else { 7029 regionInfoBuilder_.setMessage(value); 7030 } 7031 bitField0_ |= 0x00000001; 7032 return this; 7033 } 7034 /** 7035 * <code>required .RegionInfo regionInfo = 1;</code> 7036 */ setRegionInfo( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue)7037 public Builder setRegionInfo( 7038 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder builderForValue) { 7039 if (regionInfoBuilder_ == null) { 7040 regionInfo_ = builderForValue.build(); 7041 onChanged(); 7042 } else { 7043 regionInfoBuilder_.setMessage(builderForValue.build()); 7044 } 7045 bitField0_ |= 0x00000001; 7046 return this; 7047 } 7048 /** 7049 * <code>required .RegionInfo regionInfo = 1;</code> 7050 */ mergeRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value)7051 public Builder mergeRegionInfo(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo value) { 7052 if (regionInfoBuilder_ == null) { 7053 if (((bitField0_ & 0x00000001) == 0x00000001) && 7054 regionInfo_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance()) { 7055 regionInfo_ = 7056 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.newBuilder(regionInfo_).mergeFrom(value).buildPartial(); 7057 } else { 7058 regionInfo_ = value; 7059 } 7060 onChanged(); 7061 } else { 7062 regionInfoBuilder_.mergeFrom(value); 7063 } 7064 bitField0_ |= 0x00000001; 7065 return this; 7066 } 7067 /** 7068 * <code>required .RegionInfo regionInfo = 1;</code> 7069 */ clearRegionInfo()7070 public Builder clearRegionInfo() { 7071 if (regionInfoBuilder_ == null) { 7072 regionInfo_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.getDefaultInstance(); 7073 onChanged(); 7074 } else { 7075 regionInfoBuilder_.clear(); 7076 } 7077 bitField0_ = (bitField0_ & ~0x00000001); 7078 return this; 7079 } 7080 /** 7081 * <code>required .RegionInfo regionInfo = 1;</code> 7082 */ getRegionInfoBuilder()7083 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder getRegionInfoBuilder() { 7084 bitField0_ |= 0x00000001; 7085 onChanged(); 7086 return getRegionInfoFieldBuilder().getBuilder(); 7087 } 7088 /** 7089 * <code>required .RegionInfo regionInfo = 1;</code> 7090 */ getRegionInfoOrBuilder()7091 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder getRegionInfoOrBuilder() { 7092 if (regionInfoBuilder_ != null) { 7093 return regionInfoBuilder_.getMessageOrBuilder(); 7094 } else { 7095 return regionInfo_; 7096 } 7097 } 7098 /** 7099 * <code>required .RegionInfo regionInfo = 1;</code> 7100 */ 7101 private com.google.protobuf.SingleFieldBuilder< 7102 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder> getRegionInfoFieldBuilder()7103 getRegionInfoFieldBuilder() { 7104 if (regionInfoBuilder_ == null) { 7105 regionInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder< 7106 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfoOrBuilder>( 7107 regionInfo_, 7108 getParentForChildren(), 7109 isClean()); 7110 regionInfo_ = null; 7111 } 7112 return regionInfoBuilder_; 7113 } 7114 7115 // @@protoc_insertion_point(builder_scope:WarmupRegionRequest) 7116 } 7117 7118 static { 7119 defaultInstance = new WarmupRegionRequest(true); defaultInstance.initFields()7120 defaultInstance.initFields(); 7121 } 7122 7123 // @@protoc_insertion_point(class_scope:WarmupRegionRequest) 7124 } 7125 7126 public interface WarmupRegionResponseOrBuilder 7127 extends com.google.protobuf.MessageOrBuilder { 7128 } 7129 /** 7130 * Protobuf type {@code WarmupRegionResponse} 7131 */ 7132 public static final class WarmupRegionResponse extends 7133 com.google.protobuf.GeneratedMessage 7134 implements WarmupRegionResponseOrBuilder { 7135 // Use WarmupRegionResponse.newBuilder() to construct. WarmupRegionResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)7136 private WarmupRegionResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 7137 super(builder); 7138 this.unknownFields = builder.getUnknownFields(); 7139 } WarmupRegionResponse(boolean noInit)7140 private WarmupRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 7141 7142 private static final WarmupRegionResponse defaultInstance; getDefaultInstance()7143 public static WarmupRegionResponse getDefaultInstance() { 7144 return defaultInstance; 7145 } 7146 getDefaultInstanceForType()7147 public WarmupRegionResponse getDefaultInstanceForType() { 7148 return defaultInstance; 7149 } 7150 7151 private final com.google.protobuf.UnknownFieldSet unknownFields; 7152 @java.lang.Override 7153 public final com.google.protobuf.UnknownFieldSet getUnknownFields()7154 getUnknownFields() { 7155 return this.unknownFields; 7156 } WarmupRegionResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7157 private WarmupRegionResponse( 7158 com.google.protobuf.CodedInputStream input, 7159 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7160 throws com.google.protobuf.InvalidProtocolBufferException { 7161 initFields(); 7162 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 7163 com.google.protobuf.UnknownFieldSet.newBuilder(); 7164 try { 7165 boolean done = false; 7166 while (!done) { 7167 int tag = input.readTag(); 7168 switch (tag) { 7169 case 0: 7170 done = true; 7171 break; 7172 default: { 7173 if (!parseUnknownField(input, unknownFields, 7174 extensionRegistry, tag)) { 7175 done = true; 7176 } 7177 break; 7178 } 7179 } 7180 } 7181 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 7182 throw e.setUnfinishedMessage(this); 7183 } catch (java.io.IOException e) { 7184 throw new com.google.protobuf.InvalidProtocolBufferException( 7185 e.getMessage()).setUnfinishedMessage(this); 7186 } finally { 7187 this.unknownFields = unknownFields.build(); 7188 makeExtensionsImmutable(); 7189 } 7190 } 7191 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()7192 getDescriptor() { 7193 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WarmupRegionResponse_descriptor; 7194 } 7195 7196 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()7197 internalGetFieldAccessorTable() { 7198 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WarmupRegionResponse_fieldAccessorTable 7199 .ensureFieldAccessorsInitialized( 7200 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse.Builder.class); 7201 } 7202 7203 public static com.google.protobuf.Parser<WarmupRegionResponse> PARSER = 7204 new com.google.protobuf.AbstractParser<WarmupRegionResponse>() { 7205 public WarmupRegionResponse parsePartialFrom( 7206 com.google.protobuf.CodedInputStream input, 7207 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7208 throws com.google.protobuf.InvalidProtocolBufferException { 7209 return new WarmupRegionResponse(input, extensionRegistry); 7210 } 7211 }; 7212 7213 @java.lang.Override getParserForType()7214 public com.google.protobuf.Parser<WarmupRegionResponse> getParserForType() { 7215 return PARSER; 7216 } 7217 initFields()7218 private void initFields() { 7219 } 7220 private byte memoizedIsInitialized = -1; isInitialized()7221 public final boolean isInitialized() { 7222 byte isInitialized = memoizedIsInitialized; 7223 if (isInitialized != -1) return isInitialized == 1; 7224 7225 memoizedIsInitialized = 1; 7226 return true; 7227 } 7228 writeTo(com.google.protobuf.CodedOutputStream output)7229 public void writeTo(com.google.protobuf.CodedOutputStream output) 7230 throws java.io.IOException { 7231 getSerializedSize(); 7232 getUnknownFields().writeTo(output); 7233 } 7234 7235 private int memoizedSerializedSize = -1; getSerializedSize()7236 public int getSerializedSize() { 7237 int size = memoizedSerializedSize; 7238 if (size != -1) return size; 7239 7240 size = 0; 7241 size += getUnknownFields().getSerializedSize(); 7242 memoizedSerializedSize = size; 7243 return size; 7244 } 7245 7246 private static final long serialVersionUID = 0L; 7247 @java.lang.Override writeReplace()7248 protected java.lang.Object writeReplace() 7249 throws java.io.ObjectStreamException { 7250 return super.writeReplace(); 7251 } 7252 7253 @java.lang.Override equals(final java.lang.Object obj)7254 public boolean equals(final java.lang.Object obj) { 7255 if (obj == this) { 7256 return true; 7257 } 7258 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse)) { 7259 return super.equals(obj); 7260 } 7261 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse) obj; 7262 7263 boolean result = true; 7264 result = result && 7265 getUnknownFields().equals(other.getUnknownFields()); 7266 return result; 7267 } 7268 7269 private int memoizedHashCode = 0; 7270 @java.lang.Override hashCode()7271 public int hashCode() { 7272 if (memoizedHashCode != 0) { 7273 return memoizedHashCode; 7274 } 7275 int hash = 41; 7276 hash = (19 * hash) + getDescriptorForType().hashCode(); 7277 hash = (29 * hash) + getUnknownFields().hashCode(); 7278 memoizedHashCode = hash; 7279 return hash; 7280 } 7281 parseFrom( com.google.protobuf.ByteString data)7282 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse parseFrom( 7283 com.google.protobuf.ByteString data) 7284 throws com.google.protobuf.InvalidProtocolBufferException { 7285 return PARSER.parseFrom(data); 7286 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7287 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse parseFrom( 7288 com.google.protobuf.ByteString data, 7289 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7290 throws com.google.protobuf.InvalidProtocolBufferException { 7291 return PARSER.parseFrom(data, extensionRegistry); 7292 } parseFrom(byte[] data)7293 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse parseFrom(byte[] data) 7294 throws com.google.protobuf.InvalidProtocolBufferException { 7295 return PARSER.parseFrom(data); 7296 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7297 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse parseFrom( 7298 byte[] data, 7299 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7300 throws com.google.protobuf.InvalidProtocolBufferException { 7301 return PARSER.parseFrom(data, extensionRegistry); 7302 } parseFrom(java.io.InputStream input)7303 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse parseFrom(java.io.InputStream input) 7304 throws java.io.IOException { 7305 return PARSER.parseFrom(input); 7306 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7307 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse parseFrom( 7308 java.io.InputStream input, 7309 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7310 throws java.io.IOException { 7311 return PARSER.parseFrom(input, extensionRegistry); 7312 } parseDelimitedFrom(java.io.InputStream input)7313 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse parseDelimitedFrom(java.io.InputStream input) 7314 throws java.io.IOException { 7315 return PARSER.parseDelimitedFrom(input); 7316 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7317 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse parseDelimitedFrom( 7318 java.io.InputStream input, 7319 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7320 throws java.io.IOException { 7321 return PARSER.parseDelimitedFrom(input, extensionRegistry); 7322 } parseFrom( com.google.protobuf.CodedInputStream input)7323 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse parseFrom( 7324 com.google.protobuf.CodedInputStream input) 7325 throws java.io.IOException { 7326 return PARSER.parseFrom(input); 7327 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7328 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse parseFrom( 7329 com.google.protobuf.CodedInputStream input, 7330 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7331 throws java.io.IOException { 7332 return PARSER.parseFrom(input, extensionRegistry); 7333 } 7334 newBuilder()7335 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()7336 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse prototype)7337 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse prototype) { 7338 return newBuilder().mergeFrom(prototype); 7339 } toBuilder()7340 public Builder toBuilder() { return newBuilder(this); } 7341 7342 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)7343 protected Builder newBuilderForType( 7344 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 7345 Builder builder = new Builder(parent); 7346 return builder; 7347 } 7348 /** 7349 * Protobuf type {@code WarmupRegionResponse} 7350 */ 7351 public static final class Builder extends 7352 com.google.protobuf.GeneratedMessage.Builder<Builder> 7353 implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponseOrBuilder { 7354 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()7355 getDescriptor() { 7356 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WarmupRegionResponse_descriptor; 7357 } 7358 7359 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()7360 internalGetFieldAccessorTable() { 7361 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WarmupRegionResponse_fieldAccessorTable 7362 .ensureFieldAccessorsInitialized( 7363 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse.Builder.class); 7364 } 7365 7366 // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse.newBuilder() Builder()7367 private Builder() { 7368 maybeForceBuilderInitialization(); 7369 } 7370 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)7371 private Builder( 7372 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 7373 super(parent); 7374 maybeForceBuilderInitialization(); 7375 } maybeForceBuilderInitialization()7376 private void maybeForceBuilderInitialization() { 7377 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 7378 } 7379 } create()7380 private static Builder create() { 7381 return new Builder(); 7382 } 7383 clear()7384 public Builder clear() { 7385 super.clear(); 7386 return this; 7387 } 7388 clone()7389 public Builder clone() { 7390 return create().mergeFrom(buildPartial()); 7391 } 7392 7393 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()7394 getDescriptorForType() { 7395 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_WarmupRegionResponse_descriptor; 7396 } 7397 getDefaultInstanceForType()7398 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse getDefaultInstanceForType() { 7399 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse.getDefaultInstance(); 7400 } 7401 build()7402 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse build() { 7403 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse result = buildPartial(); 7404 if (!result.isInitialized()) { 7405 throw newUninitializedMessageException(result); 7406 } 7407 return result; 7408 } 7409 buildPartial()7410 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse buildPartial() { 7411 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse(this); 7412 onBuilt(); 7413 return result; 7414 } 7415 mergeFrom(com.google.protobuf.Message other)7416 public Builder mergeFrom(com.google.protobuf.Message other) { 7417 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse) { 7418 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse)other); 7419 } else { 7420 super.mergeFrom(other); 7421 return this; 7422 } 7423 } 7424 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse other)7425 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse other) { 7426 if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse.getDefaultInstance()) return this; 7427 this.mergeUnknownFields(other.getUnknownFields()); 7428 return this; 7429 } 7430 isInitialized()7431 public final boolean isInitialized() { 7432 return true; 7433 } 7434 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7435 public Builder mergeFrom( 7436 com.google.protobuf.CodedInputStream input, 7437 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7438 throws java.io.IOException { 7439 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse parsedMessage = null; 7440 try { 7441 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 7442 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 7443 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WarmupRegionResponse) e.getUnfinishedMessage(); 7444 throw e; 7445 } finally { 7446 if (parsedMessage != null) { 7447 mergeFrom(parsedMessage); 7448 } 7449 } 7450 return this; 7451 } 7452 7453 // @@protoc_insertion_point(builder_scope:WarmupRegionResponse) 7454 } 7455 7456 static { 7457 defaultInstance = new WarmupRegionResponse(true); defaultInstance.initFields()7458 defaultInstance.initFields(); 7459 } 7460 7461 // @@protoc_insertion_point(class_scope:WarmupRegionResponse) 7462 } 7463 7464 public interface CloseRegionRequestOrBuilder 7465 extends com.google.protobuf.MessageOrBuilder { 7466 7467 // required .RegionSpecifier region = 1; 7468 /** 7469 * <code>required .RegionSpecifier region = 1;</code> 7470 */ hasRegion()7471 boolean hasRegion(); 7472 /** 7473 * <code>required .RegionSpecifier region = 1;</code> 7474 */ getRegion()7475 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); 7476 /** 7477 * <code>required .RegionSpecifier region = 1;</code> 7478 */ getRegionOrBuilder()7479 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); 7480 7481 // optional uint32 version_of_closing_node = 2; 7482 /** 7483 * <code>optional uint32 version_of_closing_node = 2;</code> 7484 */ hasVersionOfClosingNode()7485 boolean hasVersionOfClosingNode(); 7486 /** 7487 * <code>optional uint32 version_of_closing_node = 2;</code> 7488 */ getVersionOfClosingNode()7489 int getVersionOfClosingNode(); 7490 7491 // optional bool transition_in_ZK = 3 [default = true]; 7492 /** 7493 * <code>optional bool transition_in_ZK = 3 [default = true];</code> 7494 */ hasTransitionInZK()7495 boolean hasTransitionInZK(); 7496 /** 7497 * <code>optional bool transition_in_ZK = 3 [default = true];</code> 7498 */ getTransitionInZK()7499 boolean getTransitionInZK(); 7500 7501 // optional .ServerName destination_server = 4; 7502 /** 7503 * <code>optional .ServerName destination_server = 4;</code> 7504 */ hasDestinationServer()7505 boolean hasDestinationServer(); 7506 /** 7507 * <code>optional .ServerName destination_server = 4;</code> 7508 */ getDestinationServer()7509 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestinationServer(); 7510 /** 7511 * <code>optional .ServerName destination_server = 4;</code> 7512 */ getDestinationServerOrBuilder()7513 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestinationServerOrBuilder(); 7514 7515 // optional uint64 serverStartCode = 5; 7516 /** 7517 * <code>optional uint64 serverStartCode = 5;</code> 7518 * 7519 * <pre> 7520 * the intended server for this RPC. 7521 * </pre> 7522 */ hasServerStartCode()7523 boolean hasServerStartCode(); 7524 /** 7525 * <code>optional uint64 serverStartCode = 5;</code> 7526 * 7527 * <pre> 7528 * the intended server for this RPC. 7529 * </pre> 7530 */ getServerStartCode()7531 long getServerStartCode(); 7532 } 7533 /** 7534 * Protobuf type {@code CloseRegionRequest} 7535 * 7536 * <pre> 7537 ** 7538 * Closes the specified region and will use or not use ZK during the close 7539 * according to the specified flag. 7540 * </pre> 7541 */ 7542 public static final class CloseRegionRequest extends 7543 com.google.protobuf.GeneratedMessage 7544 implements CloseRegionRequestOrBuilder { 7545 // Use CloseRegionRequest.newBuilder() to construct. CloseRegionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)7546 private CloseRegionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 7547 super(builder); 7548 this.unknownFields = builder.getUnknownFields(); 7549 } CloseRegionRequest(boolean noInit)7550 private CloseRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 7551 7552 private static final CloseRegionRequest defaultInstance; getDefaultInstance()7553 public static CloseRegionRequest getDefaultInstance() { 7554 return defaultInstance; 7555 } 7556 getDefaultInstanceForType()7557 public CloseRegionRequest getDefaultInstanceForType() { 7558 return defaultInstance; 7559 } 7560 7561 private final com.google.protobuf.UnknownFieldSet unknownFields; 7562 @java.lang.Override 7563 public final com.google.protobuf.UnknownFieldSet getUnknownFields()7564 getUnknownFields() { 7565 return this.unknownFields; 7566 } CloseRegionRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7567 private CloseRegionRequest( 7568 com.google.protobuf.CodedInputStream input, 7569 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7570 throws com.google.protobuf.InvalidProtocolBufferException { 7571 initFields(); 7572 int mutable_bitField0_ = 0; 7573 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 7574 com.google.protobuf.UnknownFieldSet.newBuilder(); 7575 try { 7576 boolean done = false; 7577 while (!done) { 7578 int tag = input.readTag(); 7579 switch (tag) { 7580 case 0: 7581 done = true; 7582 break; 7583 default: { 7584 if (!parseUnknownField(input, unknownFields, 7585 extensionRegistry, tag)) { 7586 done = true; 7587 } 7588 break; 7589 } 7590 case 10: { 7591 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; 7592 if (((bitField0_ & 0x00000001) == 0x00000001)) { 7593 subBuilder = region_.toBuilder(); 7594 } 7595 region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); 7596 if (subBuilder != null) { 7597 subBuilder.mergeFrom(region_); 7598 region_ = subBuilder.buildPartial(); 7599 } 7600 bitField0_ |= 0x00000001; 7601 break; 7602 } 7603 case 16: { 7604 bitField0_ |= 0x00000002; 7605 versionOfClosingNode_ = input.readUInt32(); 7606 break; 7607 } 7608 case 24: { 7609 bitField0_ |= 0x00000004; 7610 transitionInZK_ = input.readBool(); 7611 break; 7612 } 7613 case 34: { 7614 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null; 7615 if (((bitField0_ & 0x00000008) == 0x00000008)) { 7616 subBuilder = destinationServer_.toBuilder(); 7617 } 7618 destinationServer_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry); 7619 if (subBuilder != null) { 7620 subBuilder.mergeFrom(destinationServer_); 7621 destinationServer_ = subBuilder.buildPartial(); 7622 } 7623 bitField0_ |= 0x00000008; 7624 break; 7625 } 7626 case 40: { 7627 bitField0_ |= 0x00000010; 7628 serverStartCode_ = input.readUInt64(); 7629 break; 7630 } 7631 } 7632 } 7633 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 7634 throw e.setUnfinishedMessage(this); 7635 } catch (java.io.IOException e) { 7636 throw new com.google.protobuf.InvalidProtocolBufferException( 7637 e.getMessage()).setUnfinishedMessage(this); 7638 } finally { 7639 this.unknownFields = unknownFields.build(); 7640 makeExtensionsImmutable(); 7641 } 7642 } 7643 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()7644 getDescriptor() { 7645 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionRequest_descriptor; 7646 } 7647 7648 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()7649 internalGetFieldAccessorTable() { 7650 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionRequest_fieldAccessorTable 7651 .ensureFieldAccessorsInitialized( 7652 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.Builder.class); 7653 } 7654 7655 public static com.google.protobuf.Parser<CloseRegionRequest> PARSER = 7656 new com.google.protobuf.AbstractParser<CloseRegionRequest>() { 7657 public CloseRegionRequest parsePartialFrom( 7658 com.google.protobuf.CodedInputStream input, 7659 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7660 throws com.google.protobuf.InvalidProtocolBufferException { 7661 return new CloseRegionRequest(input, extensionRegistry); 7662 } 7663 }; 7664 7665 @java.lang.Override getParserForType()7666 public com.google.protobuf.Parser<CloseRegionRequest> getParserForType() { 7667 return PARSER; 7668 } 7669 7670 private int bitField0_; 7671 // required .RegionSpecifier region = 1; 7672 public static final int REGION_FIELD_NUMBER = 1; 7673 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; 7674 /** 7675 * <code>required .RegionSpecifier region = 1;</code> 7676 */ hasRegion()7677 public boolean hasRegion() { 7678 return ((bitField0_ & 0x00000001) == 0x00000001); 7679 } 7680 /** 7681 * <code>required .RegionSpecifier region = 1;</code> 7682 */ getRegion()7683 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { 7684 return region_; 7685 } 7686 /** 7687 * <code>required .RegionSpecifier region = 1;</code> 7688 */ getRegionOrBuilder()7689 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { 7690 return region_; 7691 } 7692 7693 // optional uint32 version_of_closing_node = 2; 7694 public static final int VERSION_OF_CLOSING_NODE_FIELD_NUMBER = 2; 7695 private int versionOfClosingNode_; 7696 /** 7697 * <code>optional uint32 version_of_closing_node = 2;</code> 7698 */ hasVersionOfClosingNode()7699 public boolean hasVersionOfClosingNode() { 7700 return ((bitField0_ & 0x00000002) == 0x00000002); 7701 } 7702 /** 7703 * <code>optional uint32 version_of_closing_node = 2;</code> 7704 */ getVersionOfClosingNode()7705 public int getVersionOfClosingNode() { 7706 return versionOfClosingNode_; 7707 } 7708 7709 // optional bool transition_in_ZK = 3 [default = true]; 7710 public static final int TRANSITION_IN_ZK_FIELD_NUMBER = 3; 7711 private boolean transitionInZK_; 7712 /** 7713 * <code>optional bool transition_in_ZK = 3 [default = true];</code> 7714 */ hasTransitionInZK()7715 public boolean hasTransitionInZK() { 7716 return ((bitField0_ & 0x00000004) == 0x00000004); 7717 } 7718 /** 7719 * <code>optional bool transition_in_ZK = 3 [default = true];</code> 7720 */ getTransitionInZK()7721 public boolean getTransitionInZK() { 7722 return transitionInZK_; 7723 } 7724 7725 // optional .ServerName destination_server = 4; 7726 public static final int DESTINATION_SERVER_FIELD_NUMBER = 4; 7727 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName destinationServer_; 7728 /** 7729 * <code>optional .ServerName destination_server = 4;</code> 7730 */ hasDestinationServer()7731 public boolean hasDestinationServer() { 7732 return ((bitField0_ & 0x00000008) == 0x00000008); 7733 } 7734 /** 7735 * <code>optional .ServerName destination_server = 4;</code> 7736 */ getDestinationServer()7737 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestinationServer() { 7738 return destinationServer_; 7739 } 7740 /** 7741 * <code>optional .ServerName destination_server = 4;</code> 7742 */ getDestinationServerOrBuilder()7743 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestinationServerOrBuilder() { 7744 return destinationServer_; 7745 } 7746 7747 // optional uint64 serverStartCode = 5; 7748 public static final int SERVERSTARTCODE_FIELD_NUMBER = 5; 7749 private long serverStartCode_; 7750 /** 7751 * <code>optional uint64 serverStartCode = 5;</code> 7752 * 7753 * <pre> 7754 * the intended server for this RPC. 7755 * </pre> 7756 */ hasServerStartCode()7757 public boolean hasServerStartCode() { 7758 return ((bitField0_ & 0x00000010) == 0x00000010); 7759 } 7760 /** 7761 * <code>optional uint64 serverStartCode = 5;</code> 7762 * 7763 * <pre> 7764 * the intended server for this RPC. 7765 * </pre> 7766 */ getServerStartCode()7767 public long getServerStartCode() { 7768 return serverStartCode_; 7769 } 7770 initFields()7771 private void initFields() { 7772 region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); 7773 versionOfClosingNode_ = 0; 7774 transitionInZK_ = true; 7775 destinationServer_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); 7776 serverStartCode_ = 0L; 7777 } 7778 private byte memoizedIsInitialized = -1; isInitialized()7779 public final boolean isInitialized() { 7780 byte isInitialized = memoizedIsInitialized; 7781 if (isInitialized != -1) return isInitialized == 1; 7782 7783 if (!hasRegion()) { 7784 memoizedIsInitialized = 0; 7785 return false; 7786 } 7787 if (!getRegion().isInitialized()) { 7788 memoizedIsInitialized = 0; 7789 return false; 7790 } 7791 if (hasDestinationServer()) { 7792 if (!getDestinationServer().isInitialized()) { 7793 memoizedIsInitialized = 0; 7794 return false; 7795 } 7796 } 7797 memoizedIsInitialized = 1; 7798 return true; 7799 } 7800 writeTo(com.google.protobuf.CodedOutputStream output)7801 public void writeTo(com.google.protobuf.CodedOutputStream output) 7802 throws java.io.IOException { 7803 getSerializedSize(); 7804 if (((bitField0_ & 0x00000001) == 0x00000001)) { 7805 output.writeMessage(1, region_); 7806 } 7807 if (((bitField0_ & 0x00000002) == 0x00000002)) { 7808 output.writeUInt32(2, versionOfClosingNode_); 7809 } 7810 if (((bitField0_ & 0x00000004) == 0x00000004)) { 7811 output.writeBool(3, transitionInZK_); 7812 } 7813 if (((bitField0_ & 0x00000008) == 0x00000008)) { 7814 output.writeMessage(4, destinationServer_); 7815 } 7816 if (((bitField0_ & 0x00000010) == 0x00000010)) { 7817 output.writeUInt64(5, serverStartCode_); 7818 } 7819 getUnknownFields().writeTo(output); 7820 } 7821 7822 private int memoizedSerializedSize = -1; getSerializedSize()7823 public int getSerializedSize() { 7824 int size = memoizedSerializedSize; 7825 if (size != -1) return size; 7826 7827 size = 0; 7828 if (((bitField0_ & 0x00000001) == 0x00000001)) { 7829 size += com.google.protobuf.CodedOutputStream 7830 .computeMessageSize(1, region_); 7831 } 7832 if (((bitField0_ & 0x00000002) == 0x00000002)) { 7833 size += com.google.protobuf.CodedOutputStream 7834 .computeUInt32Size(2, versionOfClosingNode_); 7835 } 7836 if (((bitField0_ & 0x00000004) == 0x00000004)) { 7837 size += com.google.protobuf.CodedOutputStream 7838 .computeBoolSize(3, transitionInZK_); 7839 } 7840 if (((bitField0_ & 0x00000008) == 0x00000008)) { 7841 size += com.google.protobuf.CodedOutputStream 7842 .computeMessageSize(4, destinationServer_); 7843 } 7844 if (((bitField0_ & 0x00000010) == 0x00000010)) { 7845 size += com.google.protobuf.CodedOutputStream 7846 .computeUInt64Size(5, serverStartCode_); 7847 } 7848 size += getUnknownFields().getSerializedSize(); 7849 memoizedSerializedSize = size; 7850 return size; 7851 } 7852 7853 private static final long serialVersionUID = 0L; 7854 @java.lang.Override writeReplace()7855 protected java.lang.Object writeReplace() 7856 throws java.io.ObjectStreamException { 7857 return super.writeReplace(); 7858 } 7859 7860 @java.lang.Override equals(final java.lang.Object obj)7861 public boolean equals(final java.lang.Object obj) { 7862 if (obj == this) { 7863 return true; 7864 } 7865 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest)) { 7866 return super.equals(obj); 7867 } 7868 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest) obj; 7869 7870 boolean result = true; 7871 result = result && (hasRegion() == other.hasRegion()); 7872 if (hasRegion()) { 7873 result = result && getRegion() 7874 .equals(other.getRegion()); 7875 } 7876 result = result && (hasVersionOfClosingNode() == other.hasVersionOfClosingNode()); 7877 if (hasVersionOfClosingNode()) { 7878 result = result && (getVersionOfClosingNode() 7879 == other.getVersionOfClosingNode()); 7880 } 7881 result = result && (hasTransitionInZK() == other.hasTransitionInZK()); 7882 if (hasTransitionInZK()) { 7883 result = result && (getTransitionInZK() 7884 == other.getTransitionInZK()); 7885 } 7886 result = result && (hasDestinationServer() == other.hasDestinationServer()); 7887 if (hasDestinationServer()) { 7888 result = result && getDestinationServer() 7889 .equals(other.getDestinationServer()); 7890 } 7891 result = result && (hasServerStartCode() == other.hasServerStartCode()); 7892 if (hasServerStartCode()) { 7893 result = result && (getServerStartCode() 7894 == other.getServerStartCode()); 7895 } 7896 result = result && 7897 getUnknownFields().equals(other.getUnknownFields()); 7898 return result; 7899 } 7900 7901 private int memoizedHashCode = 0; 7902 @java.lang.Override hashCode()7903 public int hashCode() { 7904 if (memoizedHashCode != 0) { 7905 return memoizedHashCode; 7906 } 7907 int hash = 41; 7908 hash = (19 * hash) + getDescriptorForType().hashCode(); 7909 if (hasRegion()) { 7910 hash = (37 * hash) + REGION_FIELD_NUMBER; 7911 hash = (53 * hash) + getRegion().hashCode(); 7912 } 7913 if (hasVersionOfClosingNode()) { 7914 hash = (37 * hash) + VERSION_OF_CLOSING_NODE_FIELD_NUMBER; 7915 hash = (53 * hash) + getVersionOfClosingNode(); 7916 } 7917 if (hasTransitionInZK()) { 7918 hash = (37 * hash) + TRANSITION_IN_ZK_FIELD_NUMBER; 7919 hash = (53 * hash) + hashBoolean(getTransitionInZK()); 7920 } 7921 if (hasDestinationServer()) { 7922 hash = (37 * hash) + DESTINATION_SERVER_FIELD_NUMBER; 7923 hash = (53 * hash) + getDestinationServer().hashCode(); 7924 } 7925 if (hasServerStartCode()) { 7926 hash = (37 * hash) + SERVERSTARTCODE_FIELD_NUMBER; 7927 hash = (53 * hash) + hashLong(getServerStartCode()); 7928 } 7929 hash = (29 * hash) + getUnknownFields().hashCode(); 7930 memoizedHashCode = hash; 7931 return hash; 7932 } 7933 parseFrom( com.google.protobuf.ByteString data)7934 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( 7935 com.google.protobuf.ByteString data) 7936 throws com.google.protobuf.InvalidProtocolBufferException { 7937 return PARSER.parseFrom(data); 7938 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7939 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( 7940 com.google.protobuf.ByteString data, 7941 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7942 throws com.google.protobuf.InvalidProtocolBufferException { 7943 return PARSER.parseFrom(data, extensionRegistry); 7944 } parseFrom(byte[] data)7945 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom(byte[] data) 7946 throws com.google.protobuf.InvalidProtocolBufferException { 7947 return PARSER.parseFrom(data); 7948 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7949 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( 7950 byte[] data, 7951 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7952 throws com.google.protobuf.InvalidProtocolBufferException { 7953 return PARSER.parseFrom(data, extensionRegistry); 7954 } parseFrom(java.io.InputStream input)7955 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom(java.io.InputStream input) 7956 throws java.io.IOException { 7957 return PARSER.parseFrom(input); 7958 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7959 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( 7960 java.io.InputStream input, 7961 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7962 throws java.io.IOException { 7963 return PARSER.parseFrom(input, extensionRegistry); 7964 } parseDelimitedFrom(java.io.InputStream input)7965 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseDelimitedFrom(java.io.InputStream input) 7966 throws java.io.IOException { 7967 return PARSER.parseDelimitedFrom(input); 7968 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7969 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseDelimitedFrom( 7970 java.io.InputStream input, 7971 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7972 throws java.io.IOException { 7973 return PARSER.parseDelimitedFrom(input, extensionRegistry); 7974 } parseFrom( com.google.protobuf.CodedInputStream input)7975 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( 7976 com.google.protobuf.CodedInputStream input) 7977 throws java.io.IOException { 7978 return PARSER.parseFrom(input); 7979 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7980 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parseFrom( 7981 com.google.protobuf.CodedInputStream input, 7982 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7983 throws java.io.IOException { 7984 return PARSER.parseFrom(input, extensionRegistry); 7985 } 7986 newBuilder()7987 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()7988 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest prototype)7989 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest prototype) { 7990 return newBuilder().mergeFrom(prototype); 7991 } toBuilder()7992 public Builder toBuilder() { return newBuilder(this); } 7993 7994 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)7995 protected Builder newBuilderForType( 7996 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 7997 Builder builder = new Builder(parent); 7998 return builder; 7999 } 8000 /** 8001 * Protobuf type {@code CloseRegionRequest} 8002 * 8003 * <pre> 8004 ** 8005 * Closes the specified region and will use or not use ZK during the close 8006 * according to the specified flag. 8007 * </pre> 8008 */ 8009 public static final class Builder extends 8010 com.google.protobuf.GeneratedMessage.Builder<Builder> 8011 implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequestOrBuilder { 8012 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()8013 getDescriptor() { 8014 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionRequest_descriptor; 8015 } 8016 8017 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()8018 internalGetFieldAccessorTable() { 8019 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionRequest_fieldAccessorTable 8020 .ensureFieldAccessorsInitialized( 8021 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.Builder.class); 8022 } 8023 8024 // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.newBuilder() Builder()8025 private Builder() { 8026 maybeForceBuilderInitialization(); 8027 } 8028 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)8029 private Builder( 8030 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 8031 super(parent); 8032 maybeForceBuilderInitialization(); 8033 } maybeForceBuilderInitialization()8034 private void maybeForceBuilderInitialization() { 8035 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 8036 getRegionFieldBuilder(); 8037 getDestinationServerFieldBuilder(); 8038 } 8039 } create()8040 private static Builder create() { 8041 return new Builder(); 8042 } 8043 clear()8044 public Builder clear() { 8045 super.clear(); 8046 if (regionBuilder_ == null) { 8047 region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); 8048 } else { 8049 regionBuilder_.clear(); 8050 } 8051 bitField0_ = (bitField0_ & ~0x00000001); 8052 versionOfClosingNode_ = 0; 8053 bitField0_ = (bitField0_ & ~0x00000002); 8054 transitionInZK_ = true; 8055 bitField0_ = (bitField0_ & ~0x00000004); 8056 if (destinationServerBuilder_ == null) { 8057 destinationServer_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); 8058 } else { 8059 destinationServerBuilder_.clear(); 8060 } 8061 bitField0_ = (bitField0_ & ~0x00000008); 8062 serverStartCode_ = 0L; 8063 bitField0_ = (bitField0_ & ~0x00000010); 8064 return this; 8065 } 8066 clone()8067 public Builder clone() { 8068 return create().mergeFrom(buildPartial()); 8069 } 8070 8071 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()8072 getDescriptorForType() { 8073 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionRequest_descriptor; 8074 } 8075 getDefaultInstanceForType()8076 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest getDefaultInstanceForType() { 8077 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.getDefaultInstance(); 8078 } 8079 build()8080 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest build() { 8081 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest result = buildPartial(); 8082 if (!result.isInitialized()) { 8083 throw newUninitializedMessageException(result); 8084 } 8085 return result; 8086 } 8087 buildPartial()8088 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest buildPartial() { 8089 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest(this); 8090 int from_bitField0_ = bitField0_; 8091 int to_bitField0_ = 0; 8092 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 8093 to_bitField0_ |= 0x00000001; 8094 } 8095 if (regionBuilder_ == null) { 8096 result.region_ = region_; 8097 } else { 8098 result.region_ = regionBuilder_.build(); 8099 } 8100 if (((from_bitField0_ & 0x00000002) == 0x00000002)) { 8101 to_bitField0_ |= 0x00000002; 8102 } 8103 result.versionOfClosingNode_ = versionOfClosingNode_; 8104 if (((from_bitField0_ & 0x00000004) == 0x00000004)) { 8105 to_bitField0_ |= 0x00000004; 8106 } 8107 result.transitionInZK_ = transitionInZK_; 8108 if (((from_bitField0_ & 0x00000008) == 0x00000008)) { 8109 to_bitField0_ |= 0x00000008; 8110 } 8111 if (destinationServerBuilder_ == null) { 8112 result.destinationServer_ = destinationServer_; 8113 } else { 8114 result.destinationServer_ = destinationServerBuilder_.build(); 8115 } 8116 if (((from_bitField0_ & 0x00000010) == 0x00000010)) { 8117 to_bitField0_ |= 0x00000010; 8118 } 8119 result.serverStartCode_ = serverStartCode_; 8120 result.bitField0_ = to_bitField0_; 8121 onBuilt(); 8122 return result; 8123 } 8124 mergeFrom(com.google.protobuf.Message other)8125 public Builder mergeFrom(com.google.protobuf.Message other) { 8126 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest) { 8127 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest)other); 8128 } else { 8129 super.mergeFrom(other); 8130 return this; 8131 } 8132 } 8133 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest other)8134 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest other) { 8135 if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest.getDefaultInstance()) return this; 8136 if (other.hasRegion()) { 8137 mergeRegion(other.getRegion()); 8138 } 8139 if (other.hasVersionOfClosingNode()) { 8140 setVersionOfClosingNode(other.getVersionOfClosingNode()); 8141 } 8142 if (other.hasTransitionInZK()) { 8143 setTransitionInZK(other.getTransitionInZK()); 8144 } 8145 if (other.hasDestinationServer()) { 8146 mergeDestinationServer(other.getDestinationServer()); 8147 } 8148 if (other.hasServerStartCode()) { 8149 setServerStartCode(other.getServerStartCode()); 8150 } 8151 this.mergeUnknownFields(other.getUnknownFields()); 8152 return this; 8153 } 8154 isInitialized()8155 public final boolean isInitialized() { 8156 if (!hasRegion()) { 8157 8158 return false; 8159 } 8160 if (!getRegion().isInitialized()) { 8161 8162 return false; 8163 } 8164 if (hasDestinationServer()) { 8165 if (!getDestinationServer().isInitialized()) { 8166 8167 return false; 8168 } 8169 } 8170 return true; 8171 } 8172 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8173 public Builder mergeFrom( 8174 com.google.protobuf.CodedInputStream input, 8175 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8176 throws java.io.IOException { 8177 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest parsedMessage = null; 8178 try { 8179 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 8180 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 8181 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest) e.getUnfinishedMessage(); 8182 throw e; 8183 } finally { 8184 if (parsedMessage != null) { 8185 mergeFrom(parsedMessage); 8186 } 8187 } 8188 return this; 8189 } 8190 private int bitField0_; 8191 8192 // required .RegionSpecifier region = 1; 8193 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); 8194 private com.google.protobuf.SingleFieldBuilder< 8195 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; 8196 /** 8197 * <code>required .RegionSpecifier region = 1;</code> 8198 */ hasRegion()8199 public boolean hasRegion() { 8200 return ((bitField0_ & 0x00000001) == 0x00000001); 8201 } 8202 /** 8203 * <code>required .RegionSpecifier region = 1;</code> 8204 */ getRegion()8205 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { 8206 if (regionBuilder_ == null) { 8207 return region_; 8208 } else { 8209 return regionBuilder_.getMessage(); 8210 } 8211 } 8212 /** 8213 * <code>required .RegionSpecifier region = 1;</code> 8214 */ setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)8215 public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { 8216 if (regionBuilder_ == null) { 8217 if (value == null) { 8218 throw new NullPointerException(); 8219 } 8220 region_ = value; 8221 onChanged(); 8222 } else { 8223 regionBuilder_.setMessage(value); 8224 } 8225 bitField0_ |= 0x00000001; 8226 return this; 8227 } 8228 /** 8229 * <code>required .RegionSpecifier region = 1;</code> 8230 */ setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue)8231 public Builder setRegion( 8232 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { 8233 if (regionBuilder_ == null) { 8234 region_ = builderForValue.build(); 8235 onChanged(); 8236 } else { 8237 regionBuilder_.setMessage(builderForValue.build()); 8238 } 8239 bitField0_ |= 0x00000001; 8240 return this; 8241 } 8242 /** 8243 * <code>required .RegionSpecifier region = 1;</code> 8244 */ mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)8245 public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { 8246 if (regionBuilder_ == null) { 8247 if (((bitField0_ & 0x00000001) == 0x00000001) && 8248 region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { 8249 region_ = 8250 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); 8251 } else { 8252 region_ = value; 8253 } 8254 onChanged(); 8255 } else { 8256 regionBuilder_.mergeFrom(value); 8257 } 8258 bitField0_ |= 0x00000001; 8259 return this; 8260 } 8261 /** 8262 * <code>required .RegionSpecifier region = 1;</code> 8263 */ clearRegion()8264 public Builder clearRegion() { 8265 if (regionBuilder_ == null) { 8266 region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); 8267 onChanged(); 8268 } else { 8269 regionBuilder_.clear(); 8270 } 8271 bitField0_ = (bitField0_ & ~0x00000001); 8272 return this; 8273 } 8274 /** 8275 * <code>required .RegionSpecifier region = 1;</code> 8276 */ getRegionBuilder()8277 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { 8278 bitField0_ |= 0x00000001; 8279 onChanged(); 8280 return getRegionFieldBuilder().getBuilder(); 8281 } 8282 /** 8283 * <code>required .RegionSpecifier region = 1;</code> 8284 */ getRegionOrBuilder()8285 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { 8286 if (regionBuilder_ != null) { 8287 return regionBuilder_.getMessageOrBuilder(); 8288 } else { 8289 return region_; 8290 } 8291 } 8292 /** 8293 * <code>required .RegionSpecifier region = 1;</code> 8294 */ 8295 private com.google.protobuf.SingleFieldBuilder< 8296 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder()8297 getRegionFieldBuilder() { 8298 if (regionBuilder_ == null) { 8299 regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< 8300 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( 8301 region_, 8302 getParentForChildren(), 8303 isClean()); 8304 region_ = null; 8305 } 8306 return regionBuilder_; 8307 } 8308 8309 // optional uint32 version_of_closing_node = 2; 8310 private int versionOfClosingNode_ ; 8311 /** 8312 * <code>optional uint32 version_of_closing_node = 2;</code> 8313 */ hasVersionOfClosingNode()8314 public boolean hasVersionOfClosingNode() { 8315 return ((bitField0_ & 0x00000002) == 0x00000002); 8316 } 8317 /** 8318 * <code>optional uint32 version_of_closing_node = 2;</code> 8319 */ getVersionOfClosingNode()8320 public int getVersionOfClosingNode() { 8321 return versionOfClosingNode_; 8322 } 8323 /** 8324 * <code>optional uint32 version_of_closing_node = 2;</code> 8325 */ setVersionOfClosingNode(int value)8326 public Builder setVersionOfClosingNode(int value) { 8327 bitField0_ |= 0x00000002; 8328 versionOfClosingNode_ = value; 8329 onChanged(); 8330 return this; 8331 } 8332 /** 8333 * <code>optional uint32 version_of_closing_node = 2;</code> 8334 */ clearVersionOfClosingNode()8335 public Builder clearVersionOfClosingNode() { 8336 bitField0_ = (bitField0_ & ~0x00000002); 8337 versionOfClosingNode_ = 0; 8338 onChanged(); 8339 return this; 8340 } 8341 8342 // optional bool transition_in_ZK = 3 [default = true]; 8343 private boolean transitionInZK_ = true; 8344 /** 8345 * <code>optional bool transition_in_ZK = 3 [default = true];</code> 8346 */ hasTransitionInZK()8347 public boolean hasTransitionInZK() { 8348 return ((bitField0_ & 0x00000004) == 0x00000004); 8349 } 8350 /** 8351 * <code>optional bool transition_in_ZK = 3 [default = true];</code> 8352 */ getTransitionInZK()8353 public boolean getTransitionInZK() { 8354 return transitionInZK_; 8355 } 8356 /** 8357 * <code>optional bool transition_in_ZK = 3 [default = true];</code> 8358 */ setTransitionInZK(boolean value)8359 public Builder setTransitionInZK(boolean value) { 8360 bitField0_ |= 0x00000004; 8361 transitionInZK_ = value; 8362 onChanged(); 8363 return this; 8364 } 8365 /** 8366 * <code>optional bool transition_in_ZK = 3 [default = true];</code> 8367 */ clearTransitionInZK()8368 public Builder clearTransitionInZK() { 8369 bitField0_ = (bitField0_ & ~0x00000004); 8370 transitionInZK_ = true; 8371 onChanged(); 8372 return this; 8373 } 8374 8375 // optional .ServerName destination_server = 4; 8376 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName destinationServer_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); 8377 private com.google.protobuf.SingleFieldBuilder< 8378 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> destinationServerBuilder_; 8379 /** 8380 * <code>optional .ServerName destination_server = 4;</code> 8381 */ hasDestinationServer()8382 public boolean hasDestinationServer() { 8383 return ((bitField0_ & 0x00000008) == 0x00000008); 8384 } 8385 /** 8386 * <code>optional .ServerName destination_server = 4;</code> 8387 */ getDestinationServer()8388 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getDestinationServer() { 8389 if (destinationServerBuilder_ == null) { 8390 return destinationServer_; 8391 } else { 8392 return destinationServerBuilder_.getMessage(); 8393 } 8394 } 8395 /** 8396 * <code>optional .ServerName destination_server = 4;</code> 8397 */ setDestinationServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value)8398 public Builder setDestinationServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { 8399 if (destinationServerBuilder_ == null) { 8400 if (value == null) { 8401 throw new NullPointerException(); 8402 } 8403 destinationServer_ = value; 8404 onChanged(); 8405 } else { 8406 destinationServerBuilder_.setMessage(value); 8407 } 8408 bitField0_ |= 0x00000008; 8409 return this; 8410 } 8411 /** 8412 * <code>optional .ServerName destination_server = 4;</code> 8413 */ setDestinationServer( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue)8414 public Builder setDestinationServer( 8415 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) { 8416 if (destinationServerBuilder_ == null) { 8417 destinationServer_ = builderForValue.build(); 8418 onChanged(); 8419 } else { 8420 destinationServerBuilder_.setMessage(builderForValue.build()); 8421 } 8422 bitField0_ |= 0x00000008; 8423 return this; 8424 } 8425 /** 8426 * <code>optional .ServerName destination_server = 4;</code> 8427 */ mergeDestinationServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value)8428 public Builder mergeDestinationServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) { 8429 if (destinationServerBuilder_ == null) { 8430 if (((bitField0_ & 0x00000008) == 0x00000008) && 8431 destinationServer_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) { 8432 destinationServer_ = 8433 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(destinationServer_).mergeFrom(value).buildPartial(); 8434 } else { 8435 destinationServer_ = value; 8436 } 8437 onChanged(); 8438 } else { 8439 destinationServerBuilder_.mergeFrom(value); 8440 } 8441 bitField0_ |= 0x00000008; 8442 return this; 8443 } 8444 /** 8445 * <code>optional .ServerName destination_server = 4;</code> 8446 */ clearDestinationServer()8447 public Builder clearDestinationServer() { 8448 if (destinationServerBuilder_ == null) { 8449 destinationServer_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance(); 8450 onChanged(); 8451 } else { 8452 destinationServerBuilder_.clear(); 8453 } 8454 bitField0_ = (bitField0_ & ~0x00000008); 8455 return this; 8456 } 8457 /** 8458 * <code>optional .ServerName destination_server = 4;</code> 8459 */ getDestinationServerBuilder()8460 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getDestinationServerBuilder() { 8461 bitField0_ |= 0x00000008; 8462 onChanged(); 8463 return getDestinationServerFieldBuilder().getBuilder(); 8464 } 8465 /** 8466 * <code>optional .ServerName destination_server = 4;</code> 8467 */ getDestinationServerOrBuilder()8468 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getDestinationServerOrBuilder() { 8469 if (destinationServerBuilder_ != null) { 8470 return destinationServerBuilder_.getMessageOrBuilder(); 8471 } else { 8472 return destinationServer_; 8473 } 8474 } 8475 /** 8476 * <code>optional .ServerName destination_server = 4;</code> 8477 */ 8478 private com.google.protobuf.SingleFieldBuilder< 8479 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> getDestinationServerFieldBuilder()8480 getDestinationServerFieldBuilder() { 8481 if (destinationServerBuilder_ == null) { 8482 destinationServerBuilder_ = new com.google.protobuf.SingleFieldBuilder< 8483 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>( 8484 destinationServer_, 8485 getParentForChildren(), 8486 isClean()); 8487 destinationServer_ = null; 8488 } 8489 return destinationServerBuilder_; 8490 } 8491 8492 // optional uint64 serverStartCode = 5; 8493 private long serverStartCode_ ; 8494 /** 8495 * <code>optional uint64 serverStartCode = 5;</code> 8496 * 8497 * <pre> 8498 * the intended server for this RPC. 8499 * </pre> 8500 */ hasServerStartCode()8501 public boolean hasServerStartCode() { 8502 return ((bitField0_ & 0x00000010) == 0x00000010); 8503 } 8504 /** 8505 * <code>optional uint64 serverStartCode = 5;</code> 8506 * 8507 * <pre> 8508 * the intended server for this RPC. 8509 * </pre> 8510 */ getServerStartCode()8511 public long getServerStartCode() { 8512 return serverStartCode_; 8513 } 8514 /** 8515 * <code>optional uint64 serverStartCode = 5;</code> 8516 * 8517 * <pre> 8518 * the intended server for this RPC. 8519 * </pre> 8520 */ setServerStartCode(long value)8521 public Builder setServerStartCode(long value) { 8522 bitField0_ |= 0x00000010; 8523 serverStartCode_ = value; 8524 onChanged(); 8525 return this; 8526 } 8527 /** 8528 * <code>optional uint64 serverStartCode = 5;</code> 8529 * 8530 * <pre> 8531 * the intended server for this RPC. 8532 * </pre> 8533 */ clearServerStartCode()8534 public Builder clearServerStartCode() { 8535 bitField0_ = (bitField0_ & ~0x00000010); 8536 serverStartCode_ = 0L; 8537 onChanged(); 8538 return this; 8539 } 8540 8541 // @@protoc_insertion_point(builder_scope:CloseRegionRequest) 8542 } 8543 8544 static { 8545 defaultInstance = new CloseRegionRequest(true); defaultInstance.initFields()8546 defaultInstance.initFields(); 8547 } 8548 8549 // @@protoc_insertion_point(class_scope:CloseRegionRequest) 8550 } 8551 8552 public interface CloseRegionResponseOrBuilder 8553 extends com.google.protobuf.MessageOrBuilder { 8554 8555 // required bool closed = 1; 8556 /** 8557 * <code>required bool closed = 1;</code> 8558 */ hasClosed()8559 boolean hasClosed(); 8560 /** 8561 * <code>required bool closed = 1;</code> 8562 */ getClosed()8563 boolean getClosed(); 8564 } 8565 /** 8566 * Protobuf type {@code CloseRegionResponse} 8567 */ 8568 public static final class CloseRegionResponse extends 8569 com.google.protobuf.GeneratedMessage 8570 implements CloseRegionResponseOrBuilder { 8571 // Use CloseRegionResponse.newBuilder() to construct. CloseRegionResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)8572 private CloseRegionResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 8573 super(builder); 8574 this.unknownFields = builder.getUnknownFields(); 8575 } CloseRegionResponse(boolean noInit)8576 private CloseRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 8577 8578 private static final CloseRegionResponse defaultInstance; getDefaultInstance()8579 public static CloseRegionResponse getDefaultInstance() { 8580 return defaultInstance; 8581 } 8582 getDefaultInstanceForType()8583 public CloseRegionResponse getDefaultInstanceForType() { 8584 return defaultInstance; 8585 } 8586 8587 private final com.google.protobuf.UnknownFieldSet unknownFields; 8588 @java.lang.Override 8589 public final com.google.protobuf.UnknownFieldSet getUnknownFields()8590 getUnknownFields() { 8591 return this.unknownFields; 8592 } CloseRegionResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8593 private CloseRegionResponse( 8594 com.google.protobuf.CodedInputStream input, 8595 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8596 throws com.google.protobuf.InvalidProtocolBufferException { 8597 initFields(); 8598 int mutable_bitField0_ = 0; 8599 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 8600 com.google.protobuf.UnknownFieldSet.newBuilder(); 8601 try { 8602 boolean done = false; 8603 while (!done) { 8604 int tag = input.readTag(); 8605 switch (tag) { 8606 case 0: 8607 done = true; 8608 break; 8609 default: { 8610 if (!parseUnknownField(input, unknownFields, 8611 extensionRegistry, tag)) { 8612 done = true; 8613 } 8614 break; 8615 } 8616 case 8: { 8617 bitField0_ |= 0x00000001; 8618 closed_ = input.readBool(); 8619 break; 8620 } 8621 } 8622 } 8623 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 8624 throw e.setUnfinishedMessage(this); 8625 } catch (java.io.IOException e) { 8626 throw new com.google.protobuf.InvalidProtocolBufferException( 8627 e.getMessage()).setUnfinishedMessage(this); 8628 } finally { 8629 this.unknownFields = unknownFields.build(); 8630 makeExtensionsImmutable(); 8631 } 8632 } 8633 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()8634 getDescriptor() { 8635 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionResponse_descriptor; 8636 } 8637 8638 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()8639 internalGetFieldAccessorTable() { 8640 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionResponse_fieldAccessorTable 8641 .ensureFieldAccessorsInitialized( 8642 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.Builder.class); 8643 } 8644 8645 public static com.google.protobuf.Parser<CloseRegionResponse> PARSER = 8646 new com.google.protobuf.AbstractParser<CloseRegionResponse>() { 8647 public CloseRegionResponse parsePartialFrom( 8648 com.google.protobuf.CodedInputStream input, 8649 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8650 throws com.google.protobuf.InvalidProtocolBufferException { 8651 return new CloseRegionResponse(input, extensionRegistry); 8652 } 8653 }; 8654 8655 @java.lang.Override getParserForType()8656 public com.google.protobuf.Parser<CloseRegionResponse> getParserForType() { 8657 return PARSER; 8658 } 8659 8660 private int bitField0_; 8661 // required bool closed = 1; 8662 public static final int CLOSED_FIELD_NUMBER = 1; 8663 private boolean closed_; 8664 /** 8665 * <code>required bool closed = 1;</code> 8666 */ hasClosed()8667 public boolean hasClosed() { 8668 return ((bitField0_ & 0x00000001) == 0x00000001); 8669 } 8670 /** 8671 * <code>required bool closed = 1;</code> 8672 */ getClosed()8673 public boolean getClosed() { 8674 return closed_; 8675 } 8676 initFields()8677 private void initFields() { 8678 closed_ = false; 8679 } 8680 private byte memoizedIsInitialized = -1; isInitialized()8681 public final boolean isInitialized() { 8682 byte isInitialized = memoizedIsInitialized; 8683 if (isInitialized != -1) return isInitialized == 1; 8684 8685 if (!hasClosed()) { 8686 memoizedIsInitialized = 0; 8687 return false; 8688 } 8689 memoizedIsInitialized = 1; 8690 return true; 8691 } 8692 writeTo(com.google.protobuf.CodedOutputStream output)8693 public void writeTo(com.google.protobuf.CodedOutputStream output) 8694 throws java.io.IOException { 8695 getSerializedSize(); 8696 if (((bitField0_ & 0x00000001) == 0x00000001)) { 8697 output.writeBool(1, closed_); 8698 } 8699 getUnknownFields().writeTo(output); 8700 } 8701 8702 private int memoizedSerializedSize = -1; getSerializedSize()8703 public int getSerializedSize() { 8704 int size = memoizedSerializedSize; 8705 if (size != -1) return size; 8706 8707 size = 0; 8708 if (((bitField0_ & 0x00000001) == 0x00000001)) { 8709 size += com.google.protobuf.CodedOutputStream 8710 .computeBoolSize(1, closed_); 8711 } 8712 size += getUnknownFields().getSerializedSize(); 8713 memoizedSerializedSize = size; 8714 return size; 8715 } 8716 8717 private static final long serialVersionUID = 0L; 8718 @java.lang.Override writeReplace()8719 protected java.lang.Object writeReplace() 8720 throws java.io.ObjectStreamException { 8721 return super.writeReplace(); 8722 } 8723 8724 @java.lang.Override equals(final java.lang.Object obj)8725 public boolean equals(final java.lang.Object obj) { 8726 if (obj == this) { 8727 return true; 8728 } 8729 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse)) { 8730 return super.equals(obj); 8731 } 8732 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse) obj; 8733 8734 boolean result = true; 8735 result = result && (hasClosed() == other.hasClosed()); 8736 if (hasClosed()) { 8737 result = result && (getClosed() 8738 == other.getClosed()); 8739 } 8740 result = result && 8741 getUnknownFields().equals(other.getUnknownFields()); 8742 return result; 8743 } 8744 8745 private int memoizedHashCode = 0; 8746 @java.lang.Override hashCode()8747 public int hashCode() { 8748 if (memoizedHashCode != 0) { 8749 return memoizedHashCode; 8750 } 8751 int hash = 41; 8752 hash = (19 * hash) + getDescriptorForType().hashCode(); 8753 if (hasClosed()) { 8754 hash = (37 * hash) + CLOSED_FIELD_NUMBER; 8755 hash = (53 * hash) + hashBoolean(getClosed()); 8756 } 8757 hash = (29 * hash) + getUnknownFields().hashCode(); 8758 memoizedHashCode = hash; 8759 return hash; 8760 } 8761 parseFrom( com.google.protobuf.ByteString data)8762 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( 8763 com.google.protobuf.ByteString data) 8764 throws com.google.protobuf.InvalidProtocolBufferException { 8765 return PARSER.parseFrom(data); 8766 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8767 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( 8768 com.google.protobuf.ByteString data, 8769 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8770 throws com.google.protobuf.InvalidProtocolBufferException { 8771 return PARSER.parseFrom(data, extensionRegistry); 8772 } parseFrom(byte[] data)8773 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom(byte[] data) 8774 throws com.google.protobuf.InvalidProtocolBufferException { 8775 return PARSER.parseFrom(data); 8776 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8777 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( 8778 byte[] data, 8779 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8780 throws com.google.protobuf.InvalidProtocolBufferException { 8781 return PARSER.parseFrom(data, extensionRegistry); 8782 } parseFrom(java.io.InputStream input)8783 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom(java.io.InputStream input) 8784 throws java.io.IOException { 8785 return PARSER.parseFrom(input); 8786 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8787 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( 8788 java.io.InputStream input, 8789 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8790 throws java.io.IOException { 8791 return PARSER.parseFrom(input, extensionRegistry); 8792 } parseDelimitedFrom(java.io.InputStream input)8793 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseDelimitedFrom(java.io.InputStream input) 8794 throws java.io.IOException { 8795 return PARSER.parseDelimitedFrom(input); 8796 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8797 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseDelimitedFrom( 8798 java.io.InputStream input, 8799 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8800 throws java.io.IOException { 8801 return PARSER.parseDelimitedFrom(input, extensionRegistry); 8802 } parseFrom( com.google.protobuf.CodedInputStream input)8803 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( 8804 com.google.protobuf.CodedInputStream input) 8805 throws java.io.IOException { 8806 return PARSER.parseFrom(input); 8807 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8808 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parseFrom( 8809 com.google.protobuf.CodedInputStream input, 8810 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8811 throws java.io.IOException { 8812 return PARSER.parseFrom(input, extensionRegistry); 8813 } 8814 newBuilder()8815 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()8816 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse prototype)8817 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse prototype) { 8818 return newBuilder().mergeFrom(prototype); 8819 } toBuilder()8820 public Builder toBuilder() { return newBuilder(this); } 8821 8822 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)8823 protected Builder newBuilderForType( 8824 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 8825 Builder builder = new Builder(parent); 8826 return builder; 8827 } 8828 /** 8829 * Protobuf type {@code CloseRegionResponse} 8830 */ 8831 public static final class Builder extends 8832 com.google.protobuf.GeneratedMessage.Builder<Builder> 8833 implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponseOrBuilder { 8834 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()8835 getDescriptor() { 8836 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionResponse_descriptor; 8837 } 8838 8839 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()8840 internalGetFieldAccessorTable() { 8841 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionResponse_fieldAccessorTable 8842 .ensureFieldAccessorsInitialized( 8843 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.Builder.class); 8844 } 8845 8846 // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.newBuilder() Builder()8847 private Builder() { 8848 maybeForceBuilderInitialization(); 8849 } 8850 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)8851 private Builder( 8852 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 8853 super(parent); 8854 maybeForceBuilderInitialization(); 8855 } maybeForceBuilderInitialization()8856 private void maybeForceBuilderInitialization() { 8857 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 8858 } 8859 } create()8860 private static Builder create() { 8861 return new Builder(); 8862 } 8863 clear()8864 public Builder clear() { 8865 super.clear(); 8866 closed_ = false; 8867 bitField0_ = (bitField0_ & ~0x00000001); 8868 return this; 8869 } 8870 clone()8871 public Builder clone() { 8872 return create().mergeFrom(buildPartial()); 8873 } 8874 8875 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()8876 getDescriptorForType() { 8877 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_CloseRegionResponse_descriptor; 8878 } 8879 getDefaultInstanceForType()8880 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse getDefaultInstanceForType() { 8881 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance(); 8882 } 8883 build()8884 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse build() { 8885 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse result = buildPartial(); 8886 if (!result.isInitialized()) { 8887 throw newUninitializedMessageException(result); 8888 } 8889 return result; 8890 } 8891 buildPartial()8892 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse buildPartial() { 8893 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse(this); 8894 int from_bitField0_ = bitField0_; 8895 int to_bitField0_ = 0; 8896 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 8897 to_bitField0_ |= 0x00000001; 8898 } 8899 result.closed_ = closed_; 8900 result.bitField0_ = to_bitField0_; 8901 onBuilt(); 8902 return result; 8903 } 8904 mergeFrom(com.google.protobuf.Message other)8905 public Builder mergeFrom(com.google.protobuf.Message other) { 8906 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse) { 8907 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse)other); 8908 } else { 8909 super.mergeFrom(other); 8910 return this; 8911 } 8912 } 8913 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse other)8914 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse other) { 8915 if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse.getDefaultInstance()) return this; 8916 if (other.hasClosed()) { 8917 setClosed(other.getClosed()); 8918 } 8919 this.mergeUnknownFields(other.getUnknownFields()); 8920 return this; 8921 } 8922 isInitialized()8923 public final boolean isInitialized() { 8924 if (!hasClosed()) { 8925 8926 return false; 8927 } 8928 return true; 8929 } 8930 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8931 public Builder mergeFrom( 8932 com.google.protobuf.CodedInputStream input, 8933 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8934 throws java.io.IOException { 8935 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse parsedMessage = null; 8936 try { 8937 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 8938 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 8939 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionResponse) e.getUnfinishedMessage(); 8940 throw e; 8941 } finally { 8942 if (parsedMessage != null) { 8943 mergeFrom(parsedMessage); 8944 } 8945 } 8946 return this; 8947 } 8948 private int bitField0_; 8949 8950 // required bool closed = 1; 8951 private boolean closed_ ; 8952 /** 8953 * <code>required bool closed = 1;</code> 8954 */ hasClosed()8955 public boolean hasClosed() { 8956 return ((bitField0_ & 0x00000001) == 0x00000001); 8957 } 8958 /** 8959 * <code>required bool closed = 1;</code> 8960 */ getClosed()8961 public boolean getClosed() { 8962 return closed_; 8963 } 8964 /** 8965 * <code>required bool closed = 1;</code> 8966 */ setClosed(boolean value)8967 public Builder setClosed(boolean value) { 8968 bitField0_ |= 0x00000001; 8969 closed_ = value; 8970 onChanged(); 8971 return this; 8972 } 8973 /** 8974 * <code>required bool closed = 1;</code> 8975 */ clearClosed()8976 public Builder clearClosed() { 8977 bitField0_ = (bitField0_ & ~0x00000001); 8978 closed_ = false; 8979 onChanged(); 8980 return this; 8981 } 8982 8983 // @@protoc_insertion_point(builder_scope:CloseRegionResponse) 8984 } 8985 8986 static { 8987 defaultInstance = new CloseRegionResponse(true); defaultInstance.initFields()8988 defaultInstance.initFields(); 8989 } 8990 8991 // @@protoc_insertion_point(class_scope:CloseRegionResponse) 8992 } 8993 8994 public interface FlushRegionRequestOrBuilder 8995 extends com.google.protobuf.MessageOrBuilder { 8996 8997 // required .RegionSpecifier region = 1; 8998 /** 8999 * <code>required .RegionSpecifier region = 1;</code> 9000 */ hasRegion()9001 boolean hasRegion(); 9002 /** 9003 * <code>required .RegionSpecifier region = 1;</code> 9004 */ getRegion()9005 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); 9006 /** 9007 * <code>required .RegionSpecifier region = 1;</code> 9008 */ getRegionOrBuilder()9009 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); 9010 9011 // optional uint64 if_older_than_ts = 2; 9012 /** 9013 * <code>optional uint64 if_older_than_ts = 2;</code> 9014 */ hasIfOlderThanTs()9015 boolean hasIfOlderThanTs(); 9016 /** 9017 * <code>optional uint64 if_older_than_ts = 2;</code> 9018 */ getIfOlderThanTs()9019 long getIfOlderThanTs(); 9020 9021 // optional bool write_flush_wal_marker = 3; 9022 /** 9023 * <code>optional bool write_flush_wal_marker = 3;</code> 9024 * 9025 * <pre> 9026 * whether to write a marker to WAL even if not flushed 9027 * </pre> 9028 */ hasWriteFlushWalMarker()9029 boolean hasWriteFlushWalMarker(); 9030 /** 9031 * <code>optional bool write_flush_wal_marker = 3;</code> 9032 * 9033 * <pre> 9034 * whether to write a marker to WAL even if not flushed 9035 * </pre> 9036 */ getWriteFlushWalMarker()9037 boolean getWriteFlushWalMarker(); 9038 } 9039 /** 9040 * Protobuf type {@code FlushRegionRequest} 9041 * 9042 * <pre> 9043 ** 9044 * Flushes the MemStore of the specified region. 9045 * <p> 9046 * This method is synchronous. 9047 * </pre> 9048 */ 9049 public static final class FlushRegionRequest extends 9050 com.google.protobuf.GeneratedMessage 9051 implements FlushRegionRequestOrBuilder { 9052 // Use FlushRegionRequest.newBuilder() to construct. FlushRegionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)9053 private FlushRegionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 9054 super(builder); 9055 this.unknownFields = builder.getUnknownFields(); 9056 } FlushRegionRequest(boolean noInit)9057 private FlushRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 9058 9059 private static final FlushRegionRequest defaultInstance; getDefaultInstance()9060 public static FlushRegionRequest getDefaultInstance() { 9061 return defaultInstance; 9062 } 9063 getDefaultInstanceForType()9064 public FlushRegionRequest getDefaultInstanceForType() { 9065 return defaultInstance; 9066 } 9067 9068 private final com.google.protobuf.UnknownFieldSet unknownFields; 9069 @java.lang.Override 9070 public final com.google.protobuf.UnknownFieldSet getUnknownFields()9071 getUnknownFields() { 9072 return this.unknownFields; 9073 } FlushRegionRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9074 private FlushRegionRequest( 9075 com.google.protobuf.CodedInputStream input, 9076 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9077 throws com.google.protobuf.InvalidProtocolBufferException { 9078 initFields(); 9079 int mutable_bitField0_ = 0; 9080 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 9081 com.google.protobuf.UnknownFieldSet.newBuilder(); 9082 try { 9083 boolean done = false; 9084 while (!done) { 9085 int tag = input.readTag(); 9086 switch (tag) { 9087 case 0: 9088 done = true; 9089 break; 9090 default: { 9091 if (!parseUnknownField(input, unknownFields, 9092 extensionRegistry, tag)) { 9093 done = true; 9094 } 9095 break; 9096 } 9097 case 10: { 9098 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; 9099 if (((bitField0_ & 0x00000001) == 0x00000001)) { 9100 subBuilder = region_.toBuilder(); 9101 } 9102 region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); 9103 if (subBuilder != null) { 9104 subBuilder.mergeFrom(region_); 9105 region_ = subBuilder.buildPartial(); 9106 } 9107 bitField0_ |= 0x00000001; 9108 break; 9109 } 9110 case 16: { 9111 bitField0_ |= 0x00000002; 9112 ifOlderThanTs_ = input.readUInt64(); 9113 break; 9114 } 9115 case 24: { 9116 bitField0_ |= 0x00000004; 9117 writeFlushWalMarker_ = input.readBool(); 9118 break; 9119 } 9120 } 9121 } 9122 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 9123 throw e.setUnfinishedMessage(this); 9124 } catch (java.io.IOException e) { 9125 throw new com.google.protobuf.InvalidProtocolBufferException( 9126 e.getMessage()).setUnfinishedMessage(this); 9127 } finally { 9128 this.unknownFields = unknownFields.build(); 9129 makeExtensionsImmutable(); 9130 } 9131 } 9132 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()9133 getDescriptor() { 9134 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionRequest_descriptor; 9135 } 9136 9137 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()9138 internalGetFieldAccessorTable() { 9139 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionRequest_fieldAccessorTable 9140 .ensureFieldAccessorsInitialized( 9141 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.Builder.class); 9142 } 9143 9144 public static com.google.protobuf.Parser<FlushRegionRequest> PARSER = 9145 new com.google.protobuf.AbstractParser<FlushRegionRequest>() { 9146 public FlushRegionRequest parsePartialFrom( 9147 com.google.protobuf.CodedInputStream input, 9148 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9149 throws com.google.protobuf.InvalidProtocolBufferException { 9150 return new FlushRegionRequest(input, extensionRegistry); 9151 } 9152 }; 9153 9154 @java.lang.Override getParserForType()9155 public com.google.protobuf.Parser<FlushRegionRequest> getParserForType() { 9156 return PARSER; 9157 } 9158 9159 private int bitField0_; 9160 // required .RegionSpecifier region = 1; 9161 public static final int REGION_FIELD_NUMBER = 1; 9162 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; 9163 /** 9164 * <code>required .RegionSpecifier region = 1;</code> 9165 */ hasRegion()9166 public boolean hasRegion() { 9167 return ((bitField0_ & 0x00000001) == 0x00000001); 9168 } 9169 /** 9170 * <code>required .RegionSpecifier region = 1;</code> 9171 */ getRegion()9172 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { 9173 return region_; 9174 } 9175 /** 9176 * <code>required .RegionSpecifier region = 1;</code> 9177 */ getRegionOrBuilder()9178 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { 9179 return region_; 9180 } 9181 9182 // optional uint64 if_older_than_ts = 2; 9183 public static final int IF_OLDER_THAN_TS_FIELD_NUMBER = 2; 9184 private long ifOlderThanTs_; 9185 /** 9186 * <code>optional uint64 if_older_than_ts = 2;</code> 9187 */ hasIfOlderThanTs()9188 public boolean hasIfOlderThanTs() { 9189 return ((bitField0_ & 0x00000002) == 0x00000002); 9190 } 9191 /** 9192 * <code>optional uint64 if_older_than_ts = 2;</code> 9193 */ getIfOlderThanTs()9194 public long getIfOlderThanTs() { 9195 return ifOlderThanTs_; 9196 } 9197 9198 // optional bool write_flush_wal_marker = 3; 9199 public static final int WRITE_FLUSH_WAL_MARKER_FIELD_NUMBER = 3; 9200 private boolean writeFlushWalMarker_; 9201 /** 9202 * <code>optional bool write_flush_wal_marker = 3;</code> 9203 * 9204 * <pre> 9205 * whether to write a marker to WAL even if not flushed 9206 * </pre> 9207 */ hasWriteFlushWalMarker()9208 public boolean hasWriteFlushWalMarker() { 9209 return ((bitField0_ & 0x00000004) == 0x00000004); 9210 } 9211 /** 9212 * <code>optional bool write_flush_wal_marker = 3;</code> 9213 * 9214 * <pre> 9215 * whether to write a marker to WAL even if not flushed 9216 * </pre> 9217 */ getWriteFlushWalMarker()9218 public boolean getWriteFlushWalMarker() { 9219 return writeFlushWalMarker_; 9220 } 9221 initFields()9222 private void initFields() { 9223 region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); 9224 ifOlderThanTs_ = 0L; 9225 writeFlushWalMarker_ = false; 9226 } 9227 private byte memoizedIsInitialized = -1; isInitialized()9228 public final boolean isInitialized() { 9229 byte isInitialized = memoizedIsInitialized; 9230 if (isInitialized != -1) return isInitialized == 1; 9231 9232 if (!hasRegion()) { 9233 memoizedIsInitialized = 0; 9234 return false; 9235 } 9236 if (!getRegion().isInitialized()) { 9237 memoizedIsInitialized = 0; 9238 return false; 9239 } 9240 memoizedIsInitialized = 1; 9241 return true; 9242 } 9243 writeTo(com.google.protobuf.CodedOutputStream output)9244 public void writeTo(com.google.protobuf.CodedOutputStream output) 9245 throws java.io.IOException { 9246 getSerializedSize(); 9247 if (((bitField0_ & 0x00000001) == 0x00000001)) { 9248 output.writeMessage(1, region_); 9249 } 9250 if (((bitField0_ & 0x00000002) == 0x00000002)) { 9251 output.writeUInt64(2, ifOlderThanTs_); 9252 } 9253 if (((bitField0_ & 0x00000004) == 0x00000004)) { 9254 output.writeBool(3, writeFlushWalMarker_); 9255 } 9256 getUnknownFields().writeTo(output); 9257 } 9258 9259 private int memoizedSerializedSize = -1; getSerializedSize()9260 public int getSerializedSize() { 9261 int size = memoizedSerializedSize; 9262 if (size != -1) return size; 9263 9264 size = 0; 9265 if (((bitField0_ & 0x00000001) == 0x00000001)) { 9266 size += com.google.protobuf.CodedOutputStream 9267 .computeMessageSize(1, region_); 9268 } 9269 if (((bitField0_ & 0x00000002) == 0x00000002)) { 9270 size += com.google.protobuf.CodedOutputStream 9271 .computeUInt64Size(2, ifOlderThanTs_); 9272 } 9273 if (((bitField0_ & 0x00000004) == 0x00000004)) { 9274 size += com.google.protobuf.CodedOutputStream 9275 .computeBoolSize(3, writeFlushWalMarker_); 9276 } 9277 size += getUnknownFields().getSerializedSize(); 9278 memoizedSerializedSize = size; 9279 return size; 9280 } 9281 9282 private static final long serialVersionUID = 0L; 9283 @java.lang.Override writeReplace()9284 protected java.lang.Object writeReplace() 9285 throws java.io.ObjectStreamException { 9286 return super.writeReplace(); 9287 } 9288 9289 @java.lang.Override equals(final java.lang.Object obj)9290 public boolean equals(final java.lang.Object obj) { 9291 if (obj == this) { 9292 return true; 9293 } 9294 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest)) { 9295 return super.equals(obj); 9296 } 9297 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest) obj; 9298 9299 boolean result = true; 9300 result = result && (hasRegion() == other.hasRegion()); 9301 if (hasRegion()) { 9302 result = result && getRegion() 9303 .equals(other.getRegion()); 9304 } 9305 result = result && (hasIfOlderThanTs() == other.hasIfOlderThanTs()); 9306 if (hasIfOlderThanTs()) { 9307 result = result && (getIfOlderThanTs() 9308 == other.getIfOlderThanTs()); 9309 } 9310 result = result && (hasWriteFlushWalMarker() == other.hasWriteFlushWalMarker()); 9311 if (hasWriteFlushWalMarker()) { 9312 result = result && (getWriteFlushWalMarker() 9313 == other.getWriteFlushWalMarker()); 9314 } 9315 result = result && 9316 getUnknownFields().equals(other.getUnknownFields()); 9317 return result; 9318 } 9319 9320 private int memoizedHashCode = 0; 9321 @java.lang.Override hashCode()9322 public int hashCode() { 9323 if (memoizedHashCode != 0) { 9324 return memoizedHashCode; 9325 } 9326 int hash = 41; 9327 hash = (19 * hash) + getDescriptorForType().hashCode(); 9328 if (hasRegion()) { 9329 hash = (37 * hash) + REGION_FIELD_NUMBER; 9330 hash = (53 * hash) + getRegion().hashCode(); 9331 } 9332 if (hasIfOlderThanTs()) { 9333 hash = (37 * hash) + IF_OLDER_THAN_TS_FIELD_NUMBER; 9334 hash = (53 * hash) + hashLong(getIfOlderThanTs()); 9335 } 9336 if (hasWriteFlushWalMarker()) { 9337 hash = (37 * hash) + WRITE_FLUSH_WAL_MARKER_FIELD_NUMBER; 9338 hash = (53 * hash) + hashBoolean(getWriteFlushWalMarker()); 9339 } 9340 hash = (29 * hash) + getUnknownFields().hashCode(); 9341 memoizedHashCode = hash; 9342 return hash; 9343 } 9344 parseFrom( com.google.protobuf.ByteString data)9345 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( 9346 com.google.protobuf.ByteString data) 9347 throws com.google.protobuf.InvalidProtocolBufferException { 9348 return PARSER.parseFrom(data); 9349 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9350 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( 9351 com.google.protobuf.ByteString data, 9352 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9353 throws com.google.protobuf.InvalidProtocolBufferException { 9354 return PARSER.parseFrom(data, extensionRegistry); 9355 } parseFrom(byte[] data)9356 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom(byte[] data) 9357 throws com.google.protobuf.InvalidProtocolBufferException { 9358 return PARSER.parseFrom(data); 9359 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9360 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( 9361 byte[] data, 9362 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9363 throws com.google.protobuf.InvalidProtocolBufferException { 9364 return PARSER.parseFrom(data, extensionRegistry); 9365 } parseFrom(java.io.InputStream input)9366 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom(java.io.InputStream input) 9367 throws java.io.IOException { 9368 return PARSER.parseFrom(input); 9369 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9370 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( 9371 java.io.InputStream input, 9372 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9373 throws java.io.IOException { 9374 return PARSER.parseFrom(input, extensionRegistry); 9375 } parseDelimitedFrom(java.io.InputStream input)9376 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseDelimitedFrom(java.io.InputStream input) 9377 throws java.io.IOException { 9378 return PARSER.parseDelimitedFrom(input); 9379 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9380 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseDelimitedFrom( 9381 java.io.InputStream input, 9382 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9383 throws java.io.IOException { 9384 return PARSER.parseDelimitedFrom(input, extensionRegistry); 9385 } parseFrom( com.google.protobuf.CodedInputStream input)9386 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( 9387 com.google.protobuf.CodedInputStream input) 9388 throws java.io.IOException { 9389 return PARSER.parseFrom(input); 9390 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9391 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parseFrom( 9392 com.google.protobuf.CodedInputStream input, 9393 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9394 throws java.io.IOException { 9395 return PARSER.parseFrom(input, extensionRegistry); 9396 } 9397 newBuilder()9398 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()9399 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest prototype)9400 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest prototype) { 9401 return newBuilder().mergeFrom(prototype); 9402 } toBuilder()9403 public Builder toBuilder() { return newBuilder(this); } 9404 9405 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)9406 protected Builder newBuilderForType( 9407 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 9408 Builder builder = new Builder(parent); 9409 return builder; 9410 } 9411 /** 9412 * Protobuf type {@code FlushRegionRequest} 9413 * 9414 * <pre> 9415 ** 9416 * Flushes the MemStore of the specified region. 9417 * <p> 9418 * This method is synchronous. 9419 * </pre> 9420 */ 9421 public static final class Builder extends 9422 com.google.protobuf.GeneratedMessage.Builder<Builder> 9423 implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequestOrBuilder { 9424 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()9425 getDescriptor() { 9426 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionRequest_descriptor; 9427 } 9428 9429 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()9430 internalGetFieldAccessorTable() { 9431 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionRequest_fieldAccessorTable 9432 .ensureFieldAccessorsInitialized( 9433 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.Builder.class); 9434 } 9435 9436 // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.newBuilder() Builder()9437 private Builder() { 9438 maybeForceBuilderInitialization(); 9439 } 9440 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)9441 private Builder( 9442 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 9443 super(parent); 9444 maybeForceBuilderInitialization(); 9445 } maybeForceBuilderInitialization()9446 private void maybeForceBuilderInitialization() { 9447 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 9448 getRegionFieldBuilder(); 9449 } 9450 } create()9451 private static Builder create() { 9452 return new Builder(); 9453 } 9454 clear()9455 public Builder clear() { 9456 super.clear(); 9457 if (regionBuilder_ == null) { 9458 region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); 9459 } else { 9460 regionBuilder_.clear(); 9461 } 9462 bitField0_ = (bitField0_ & ~0x00000001); 9463 ifOlderThanTs_ = 0L; 9464 bitField0_ = (bitField0_ & ~0x00000002); 9465 writeFlushWalMarker_ = false; 9466 bitField0_ = (bitField0_ & ~0x00000004); 9467 return this; 9468 } 9469 clone()9470 public Builder clone() { 9471 return create().mergeFrom(buildPartial()); 9472 } 9473 9474 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()9475 getDescriptorForType() { 9476 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionRequest_descriptor; 9477 } 9478 getDefaultInstanceForType()9479 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest getDefaultInstanceForType() { 9480 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.getDefaultInstance(); 9481 } 9482 build()9483 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest build() { 9484 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest result = buildPartial(); 9485 if (!result.isInitialized()) { 9486 throw newUninitializedMessageException(result); 9487 } 9488 return result; 9489 } 9490 buildPartial()9491 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest buildPartial() { 9492 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest(this); 9493 int from_bitField0_ = bitField0_; 9494 int to_bitField0_ = 0; 9495 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 9496 to_bitField0_ |= 0x00000001; 9497 } 9498 if (regionBuilder_ == null) { 9499 result.region_ = region_; 9500 } else { 9501 result.region_ = regionBuilder_.build(); 9502 } 9503 if (((from_bitField0_ & 0x00000002) == 0x00000002)) { 9504 to_bitField0_ |= 0x00000002; 9505 } 9506 result.ifOlderThanTs_ = ifOlderThanTs_; 9507 if (((from_bitField0_ & 0x00000004) == 0x00000004)) { 9508 to_bitField0_ |= 0x00000004; 9509 } 9510 result.writeFlushWalMarker_ = writeFlushWalMarker_; 9511 result.bitField0_ = to_bitField0_; 9512 onBuilt(); 9513 return result; 9514 } 9515 mergeFrom(com.google.protobuf.Message other)9516 public Builder mergeFrom(com.google.protobuf.Message other) { 9517 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest) { 9518 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest)other); 9519 } else { 9520 super.mergeFrom(other); 9521 return this; 9522 } 9523 } 9524 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest other)9525 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest other) { 9526 if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest.getDefaultInstance()) return this; 9527 if (other.hasRegion()) { 9528 mergeRegion(other.getRegion()); 9529 } 9530 if (other.hasIfOlderThanTs()) { 9531 setIfOlderThanTs(other.getIfOlderThanTs()); 9532 } 9533 if (other.hasWriteFlushWalMarker()) { 9534 setWriteFlushWalMarker(other.getWriteFlushWalMarker()); 9535 } 9536 this.mergeUnknownFields(other.getUnknownFields()); 9537 return this; 9538 } 9539 isInitialized()9540 public final boolean isInitialized() { 9541 if (!hasRegion()) { 9542 9543 return false; 9544 } 9545 if (!getRegion().isInitialized()) { 9546 9547 return false; 9548 } 9549 return true; 9550 } 9551 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9552 public Builder mergeFrom( 9553 com.google.protobuf.CodedInputStream input, 9554 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9555 throws java.io.IOException { 9556 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest parsedMessage = null; 9557 try { 9558 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 9559 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 9560 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionRequest) e.getUnfinishedMessage(); 9561 throw e; 9562 } finally { 9563 if (parsedMessage != null) { 9564 mergeFrom(parsedMessage); 9565 } 9566 } 9567 return this; 9568 } 9569 private int bitField0_; 9570 9571 // required .RegionSpecifier region = 1; 9572 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); 9573 private com.google.protobuf.SingleFieldBuilder< 9574 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_; 9575 /** 9576 * <code>required .RegionSpecifier region = 1;</code> 9577 */ hasRegion()9578 public boolean hasRegion() { 9579 return ((bitField0_ & 0x00000001) == 0x00000001); 9580 } 9581 /** 9582 * <code>required .RegionSpecifier region = 1;</code> 9583 */ getRegion()9584 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { 9585 if (regionBuilder_ == null) { 9586 return region_; 9587 } else { 9588 return regionBuilder_.getMessage(); 9589 } 9590 } 9591 /** 9592 * <code>required .RegionSpecifier region = 1;</code> 9593 */ setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)9594 public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { 9595 if (regionBuilder_ == null) { 9596 if (value == null) { 9597 throw new NullPointerException(); 9598 } 9599 region_ = value; 9600 onChanged(); 9601 } else { 9602 regionBuilder_.setMessage(value); 9603 } 9604 bitField0_ |= 0x00000001; 9605 return this; 9606 } 9607 /** 9608 * <code>required .RegionSpecifier region = 1;</code> 9609 */ setRegion( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue)9610 public Builder setRegion( 9611 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) { 9612 if (regionBuilder_ == null) { 9613 region_ = builderForValue.build(); 9614 onChanged(); 9615 } else { 9616 regionBuilder_.setMessage(builderForValue.build()); 9617 } 9618 bitField0_ |= 0x00000001; 9619 return this; 9620 } 9621 /** 9622 * <code>required .RegionSpecifier region = 1;</code> 9623 */ mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value)9624 public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) { 9625 if (regionBuilder_ == null) { 9626 if (((bitField0_ & 0x00000001) == 0x00000001) && 9627 region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) { 9628 region_ = 9629 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial(); 9630 } else { 9631 region_ = value; 9632 } 9633 onChanged(); 9634 } else { 9635 regionBuilder_.mergeFrom(value); 9636 } 9637 bitField0_ |= 0x00000001; 9638 return this; 9639 } 9640 /** 9641 * <code>required .RegionSpecifier region = 1;</code> 9642 */ clearRegion()9643 public Builder clearRegion() { 9644 if (regionBuilder_ == null) { 9645 region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); 9646 onChanged(); 9647 } else { 9648 regionBuilder_.clear(); 9649 } 9650 bitField0_ = (bitField0_ & ~0x00000001); 9651 return this; 9652 } 9653 /** 9654 * <code>required .RegionSpecifier region = 1;</code> 9655 */ getRegionBuilder()9656 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() { 9657 bitField0_ |= 0x00000001; 9658 onChanged(); 9659 return getRegionFieldBuilder().getBuilder(); 9660 } 9661 /** 9662 * <code>required .RegionSpecifier region = 1;</code> 9663 */ getRegionOrBuilder()9664 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { 9665 if (regionBuilder_ != null) { 9666 return regionBuilder_.getMessageOrBuilder(); 9667 } else { 9668 return region_; 9669 } 9670 } 9671 /** 9672 * <code>required .RegionSpecifier region = 1;</code> 9673 */ 9674 private com.google.protobuf.SingleFieldBuilder< 9675 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> getRegionFieldBuilder()9676 getRegionFieldBuilder() { 9677 if (regionBuilder_ == null) { 9678 regionBuilder_ = new com.google.protobuf.SingleFieldBuilder< 9679 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>( 9680 region_, 9681 getParentForChildren(), 9682 isClean()); 9683 region_ = null; 9684 } 9685 return regionBuilder_; 9686 } 9687 9688 // optional uint64 if_older_than_ts = 2; 9689 private long ifOlderThanTs_ ; 9690 /** 9691 * <code>optional uint64 if_older_than_ts = 2;</code> 9692 */ hasIfOlderThanTs()9693 public boolean hasIfOlderThanTs() { 9694 return ((bitField0_ & 0x00000002) == 0x00000002); 9695 } 9696 /** 9697 * <code>optional uint64 if_older_than_ts = 2;</code> 9698 */ getIfOlderThanTs()9699 public long getIfOlderThanTs() { 9700 return ifOlderThanTs_; 9701 } 9702 /** 9703 * <code>optional uint64 if_older_than_ts = 2;</code> 9704 */ setIfOlderThanTs(long value)9705 public Builder setIfOlderThanTs(long value) { 9706 bitField0_ |= 0x00000002; 9707 ifOlderThanTs_ = value; 9708 onChanged(); 9709 return this; 9710 } 9711 /** 9712 * <code>optional uint64 if_older_than_ts = 2;</code> 9713 */ clearIfOlderThanTs()9714 public Builder clearIfOlderThanTs() { 9715 bitField0_ = (bitField0_ & ~0x00000002); 9716 ifOlderThanTs_ = 0L; 9717 onChanged(); 9718 return this; 9719 } 9720 9721 // optional bool write_flush_wal_marker = 3; 9722 private boolean writeFlushWalMarker_ ; 9723 /** 9724 * <code>optional bool write_flush_wal_marker = 3;</code> 9725 * 9726 * <pre> 9727 * whether to write a marker to WAL even if not flushed 9728 * </pre> 9729 */ hasWriteFlushWalMarker()9730 public boolean hasWriteFlushWalMarker() { 9731 return ((bitField0_ & 0x00000004) == 0x00000004); 9732 } 9733 /** 9734 * <code>optional bool write_flush_wal_marker = 3;</code> 9735 * 9736 * <pre> 9737 * whether to write a marker to WAL even if not flushed 9738 * </pre> 9739 */ getWriteFlushWalMarker()9740 public boolean getWriteFlushWalMarker() { 9741 return writeFlushWalMarker_; 9742 } 9743 /** 9744 * <code>optional bool write_flush_wal_marker = 3;</code> 9745 * 9746 * <pre> 9747 * whether to write a marker to WAL even if not flushed 9748 * </pre> 9749 */ setWriteFlushWalMarker(boolean value)9750 public Builder setWriteFlushWalMarker(boolean value) { 9751 bitField0_ |= 0x00000004; 9752 writeFlushWalMarker_ = value; 9753 onChanged(); 9754 return this; 9755 } 9756 /** 9757 * <code>optional bool write_flush_wal_marker = 3;</code> 9758 * 9759 * <pre> 9760 * whether to write a marker to WAL even if not flushed 9761 * </pre> 9762 */ clearWriteFlushWalMarker()9763 public Builder clearWriteFlushWalMarker() { 9764 bitField0_ = (bitField0_ & ~0x00000004); 9765 writeFlushWalMarker_ = false; 9766 onChanged(); 9767 return this; 9768 } 9769 9770 // @@protoc_insertion_point(builder_scope:FlushRegionRequest) 9771 } 9772 9773 static { 9774 defaultInstance = new FlushRegionRequest(true); defaultInstance.initFields()9775 defaultInstance.initFields(); 9776 } 9777 9778 // @@protoc_insertion_point(class_scope:FlushRegionRequest) 9779 } 9780 9781 public interface FlushRegionResponseOrBuilder 9782 extends com.google.protobuf.MessageOrBuilder { 9783 9784 // required uint64 last_flush_time = 1; 9785 /** 9786 * <code>required uint64 last_flush_time = 1;</code> 9787 */ hasLastFlushTime()9788 boolean hasLastFlushTime(); 9789 /** 9790 * <code>required uint64 last_flush_time = 1;</code> 9791 */ getLastFlushTime()9792 long getLastFlushTime(); 9793 9794 // optional bool flushed = 2; 9795 /** 9796 * <code>optional bool flushed = 2;</code> 9797 */ hasFlushed()9798 boolean hasFlushed(); 9799 /** 9800 * <code>optional bool flushed = 2;</code> 9801 */ getFlushed()9802 boolean getFlushed(); 9803 9804 // optional bool wrote_flush_wal_marker = 3; 9805 /** 9806 * <code>optional bool wrote_flush_wal_marker = 3;</code> 9807 */ hasWroteFlushWalMarker()9808 boolean hasWroteFlushWalMarker(); 9809 /** 9810 * <code>optional bool wrote_flush_wal_marker = 3;</code> 9811 */ getWroteFlushWalMarker()9812 boolean getWroteFlushWalMarker(); 9813 } 9814 /** 9815 * Protobuf type {@code FlushRegionResponse} 9816 */ 9817 public static final class FlushRegionResponse extends 9818 com.google.protobuf.GeneratedMessage 9819 implements FlushRegionResponseOrBuilder { 9820 // Use FlushRegionResponse.newBuilder() to construct. FlushRegionResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder)9821 private FlushRegionResponse(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 9822 super(builder); 9823 this.unknownFields = builder.getUnknownFields(); 9824 } FlushRegionResponse(boolean noInit)9825 private FlushRegionResponse(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 9826 9827 private static final FlushRegionResponse defaultInstance; getDefaultInstance()9828 public static FlushRegionResponse getDefaultInstance() { 9829 return defaultInstance; 9830 } 9831 getDefaultInstanceForType()9832 public FlushRegionResponse getDefaultInstanceForType() { 9833 return defaultInstance; 9834 } 9835 9836 private final com.google.protobuf.UnknownFieldSet unknownFields; 9837 @java.lang.Override 9838 public final com.google.protobuf.UnknownFieldSet getUnknownFields()9839 getUnknownFields() { 9840 return this.unknownFields; 9841 } FlushRegionResponse( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9842 private FlushRegionResponse( 9843 com.google.protobuf.CodedInputStream input, 9844 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9845 throws com.google.protobuf.InvalidProtocolBufferException { 9846 initFields(); 9847 int mutable_bitField0_ = 0; 9848 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 9849 com.google.protobuf.UnknownFieldSet.newBuilder(); 9850 try { 9851 boolean done = false; 9852 while (!done) { 9853 int tag = input.readTag(); 9854 switch (tag) { 9855 case 0: 9856 done = true; 9857 break; 9858 default: { 9859 if (!parseUnknownField(input, unknownFields, 9860 extensionRegistry, tag)) { 9861 done = true; 9862 } 9863 break; 9864 } 9865 case 8: { 9866 bitField0_ |= 0x00000001; 9867 lastFlushTime_ = input.readUInt64(); 9868 break; 9869 } 9870 case 16: { 9871 bitField0_ |= 0x00000002; 9872 flushed_ = input.readBool(); 9873 break; 9874 } 9875 case 24: { 9876 bitField0_ |= 0x00000004; 9877 wroteFlushWalMarker_ = input.readBool(); 9878 break; 9879 } 9880 } 9881 } 9882 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 9883 throw e.setUnfinishedMessage(this); 9884 } catch (java.io.IOException e) { 9885 throw new com.google.protobuf.InvalidProtocolBufferException( 9886 e.getMessage()).setUnfinishedMessage(this); 9887 } finally { 9888 this.unknownFields = unknownFields.build(); 9889 makeExtensionsImmutable(); 9890 } 9891 } 9892 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()9893 getDescriptor() { 9894 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionResponse_descriptor; 9895 } 9896 9897 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()9898 internalGetFieldAccessorTable() { 9899 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionResponse_fieldAccessorTable 9900 .ensureFieldAccessorsInitialized( 9901 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.Builder.class); 9902 } 9903 9904 public static com.google.protobuf.Parser<FlushRegionResponse> PARSER = 9905 new com.google.protobuf.AbstractParser<FlushRegionResponse>() { 9906 public FlushRegionResponse parsePartialFrom( 9907 com.google.protobuf.CodedInputStream input, 9908 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9909 throws com.google.protobuf.InvalidProtocolBufferException { 9910 return new FlushRegionResponse(input, extensionRegistry); 9911 } 9912 }; 9913 9914 @java.lang.Override getParserForType()9915 public com.google.protobuf.Parser<FlushRegionResponse> getParserForType() { 9916 return PARSER; 9917 } 9918 9919 private int bitField0_; 9920 // required uint64 last_flush_time = 1; 9921 public static final int LAST_FLUSH_TIME_FIELD_NUMBER = 1; 9922 private long lastFlushTime_; 9923 /** 9924 * <code>required uint64 last_flush_time = 1;</code> 9925 */ hasLastFlushTime()9926 public boolean hasLastFlushTime() { 9927 return ((bitField0_ & 0x00000001) == 0x00000001); 9928 } 9929 /** 9930 * <code>required uint64 last_flush_time = 1;</code> 9931 */ getLastFlushTime()9932 public long getLastFlushTime() { 9933 return lastFlushTime_; 9934 } 9935 9936 // optional bool flushed = 2; 9937 public static final int FLUSHED_FIELD_NUMBER = 2; 9938 private boolean flushed_; 9939 /** 9940 * <code>optional bool flushed = 2;</code> 9941 */ hasFlushed()9942 public boolean hasFlushed() { 9943 return ((bitField0_ & 0x00000002) == 0x00000002); 9944 } 9945 /** 9946 * <code>optional bool flushed = 2;</code> 9947 */ getFlushed()9948 public boolean getFlushed() { 9949 return flushed_; 9950 } 9951 9952 // optional bool wrote_flush_wal_marker = 3; 9953 public static final int WROTE_FLUSH_WAL_MARKER_FIELD_NUMBER = 3; 9954 private boolean wroteFlushWalMarker_; 9955 /** 9956 * <code>optional bool wrote_flush_wal_marker = 3;</code> 9957 */ hasWroteFlushWalMarker()9958 public boolean hasWroteFlushWalMarker() { 9959 return ((bitField0_ & 0x00000004) == 0x00000004); 9960 } 9961 /** 9962 * <code>optional bool wrote_flush_wal_marker = 3;</code> 9963 */ getWroteFlushWalMarker()9964 public boolean getWroteFlushWalMarker() { 9965 return wroteFlushWalMarker_; 9966 } 9967 initFields()9968 private void initFields() { 9969 lastFlushTime_ = 0L; 9970 flushed_ = false; 9971 wroteFlushWalMarker_ = false; 9972 } 9973 private byte memoizedIsInitialized = -1; isInitialized()9974 public final boolean isInitialized() { 9975 byte isInitialized = memoizedIsInitialized; 9976 if (isInitialized != -1) return isInitialized == 1; 9977 9978 if (!hasLastFlushTime()) { 9979 memoizedIsInitialized = 0; 9980 return false; 9981 } 9982 memoizedIsInitialized = 1; 9983 return true; 9984 } 9985 writeTo(com.google.protobuf.CodedOutputStream output)9986 public void writeTo(com.google.protobuf.CodedOutputStream output) 9987 throws java.io.IOException { 9988 getSerializedSize(); 9989 if (((bitField0_ & 0x00000001) == 0x00000001)) { 9990 output.writeUInt64(1, lastFlushTime_); 9991 } 9992 if (((bitField0_ & 0x00000002) == 0x00000002)) { 9993 output.writeBool(2, flushed_); 9994 } 9995 if (((bitField0_ & 0x00000004) == 0x00000004)) { 9996 output.writeBool(3, wroteFlushWalMarker_); 9997 } 9998 getUnknownFields().writeTo(output); 9999 } 10000 10001 private int memoizedSerializedSize = -1; getSerializedSize()10002 public int getSerializedSize() { 10003 int size = memoizedSerializedSize; 10004 if (size != -1) return size; 10005 10006 size = 0; 10007 if (((bitField0_ & 0x00000001) == 0x00000001)) { 10008 size += com.google.protobuf.CodedOutputStream 10009 .computeUInt64Size(1, lastFlushTime_); 10010 } 10011 if (((bitField0_ & 0x00000002) == 0x00000002)) { 10012 size += com.google.protobuf.CodedOutputStream 10013 .computeBoolSize(2, flushed_); 10014 } 10015 if (((bitField0_ & 0x00000004) == 0x00000004)) { 10016 size += com.google.protobuf.CodedOutputStream 10017 .computeBoolSize(3, wroteFlushWalMarker_); 10018 } 10019 size += getUnknownFields().getSerializedSize(); 10020 memoizedSerializedSize = size; 10021 return size; 10022 } 10023 10024 private static final long serialVersionUID = 0L; 10025 @java.lang.Override writeReplace()10026 protected java.lang.Object writeReplace() 10027 throws java.io.ObjectStreamException { 10028 return super.writeReplace(); 10029 } 10030 10031 @java.lang.Override equals(final java.lang.Object obj)10032 public boolean equals(final java.lang.Object obj) { 10033 if (obj == this) { 10034 return true; 10035 } 10036 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse)) { 10037 return super.equals(obj); 10038 } 10039 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse) obj; 10040 10041 boolean result = true; 10042 result = result && (hasLastFlushTime() == other.hasLastFlushTime()); 10043 if (hasLastFlushTime()) { 10044 result = result && (getLastFlushTime() 10045 == other.getLastFlushTime()); 10046 } 10047 result = result && (hasFlushed() == other.hasFlushed()); 10048 if (hasFlushed()) { 10049 result = result && (getFlushed() 10050 == other.getFlushed()); 10051 } 10052 result = result && (hasWroteFlushWalMarker() == other.hasWroteFlushWalMarker()); 10053 if (hasWroteFlushWalMarker()) { 10054 result = result && (getWroteFlushWalMarker() 10055 == other.getWroteFlushWalMarker()); 10056 } 10057 result = result && 10058 getUnknownFields().equals(other.getUnknownFields()); 10059 return result; 10060 } 10061 10062 private int memoizedHashCode = 0; 10063 @java.lang.Override hashCode()10064 public int hashCode() { 10065 if (memoizedHashCode != 0) { 10066 return memoizedHashCode; 10067 } 10068 int hash = 41; 10069 hash = (19 * hash) + getDescriptorForType().hashCode(); 10070 if (hasLastFlushTime()) { 10071 hash = (37 * hash) + LAST_FLUSH_TIME_FIELD_NUMBER; 10072 hash = (53 * hash) + hashLong(getLastFlushTime()); 10073 } 10074 if (hasFlushed()) { 10075 hash = (37 * hash) + FLUSHED_FIELD_NUMBER; 10076 hash = (53 * hash) + hashBoolean(getFlushed()); 10077 } 10078 if (hasWroteFlushWalMarker()) { 10079 hash = (37 * hash) + WROTE_FLUSH_WAL_MARKER_FIELD_NUMBER; 10080 hash = (53 * hash) + hashBoolean(getWroteFlushWalMarker()); 10081 } 10082 hash = (29 * hash) + getUnknownFields().hashCode(); 10083 memoizedHashCode = hash; 10084 return hash; 10085 } 10086 parseFrom( com.google.protobuf.ByteString data)10087 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( 10088 com.google.protobuf.ByteString data) 10089 throws com.google.protobuf.InvalidProtocolBufferException { 10090 return PARSER.parseFrom(data); 10091 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10092 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( 10093 com.google.protobuf.ByteString data, 10094 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10095 throws com.google.protobuf.InvalidProtocolBufferException { 10096 return PARSER.parseFrom(data, extensionRegistry); 10097 } parseFrom(byte[] data)10098 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom(byte[] data) 10099 throws com.google.protobuf.InvalidProtocolBufferException { 10100 return PARSER.parseFrom(data); 10101 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10102 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( 10103 byte[] data, 10104 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10105 throws com.google.protobuf.InvalidProtocolBufferException { 10106 return PARSER.parseFrom(data, extensionRegistry); 10107 } parseFrom(java.io.InputStream input)10108 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom(java.io.InputStream input) 10109 throws java.io.IOException { 10110 return PARSER.parseFrom(input); 10111 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10112 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( 10113 java.io.InputStream input, 10114 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10115 throws java.io.IOException { 10116 return PARSER.parseFrom(input, extensionRegistry); 10117 } parseDelimitedFrom(java.io.InputStream input)10118 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseDelimitedFrom(java.io.InputStream input) 10119 throws java.io.IOException { 10120 return PARSER.parseDelimitedFrom(input); 10121 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10122 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseDelimitedFrom( 10123 java.io.InputStream input, 10124 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10125 throws java.io.IOException { 10126 return PARSER.parseDelimitedFrom(input, extensionRegistry); 10127 } parseFrom( com.google.protobuf.CodedInputStream input)10128 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( 10129 com.google.protobuf.CodedInputStream input) 10130 throws java.io.IOException { 10131 return PARSER.parseFrom(input); 10132 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10133 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parseFrom( 10134 com.google.protobuf.CodedInputStream input, 10135 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10136 throws java.io.IOException { 10137 return PARSER.parseFrom(input, extensionRegistry); 10138 } 10139 newBuilder()10140 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()10141 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse prototype)10142 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse prototype) { 10143 return newBuilder().mergeFrom(prototype); 10144 } toBuilder()10145 public Builder toBuilder() { return newBuilder(this); } 10146 10147 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)10148 protected Builder newBuilderForType( 10149 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 10150 Builder builder = new Builder(parent); 10151 return builder; 10152 } 10153 /** 10154 * Protobuf type {@code FlushRegionResponse} 10155 */ 10156 public static final class Builder extends 10157 com.google.protobuf.GeneratedMessage.Builder<Builder> 10158 implements org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponseOrBuilder { 10159 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()10160 getDescriptor() { 10161 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionResponse_descriptor; 10162 } 10163 10164 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()10165 internalGetFieldAccessorTable() { 10166 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionResponse_fieldAccessorTable 10167 .ensureFieldAccessorsInitialized( 10168 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.Builder.class); 10169 } 10170 10171 // Construct using org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.newBuilder() Builder()10172 private Builder() { 10173 maybeForceBuilderInitialization(); 10174 } 10175 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)10176 private Builder( 10177 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 10178 super(parent); 10179 maybeForceBuilderInitialization(); 10180 } maybeForceBuilderInitialization()10181 private void maybeForceBuilderInitialization() { 10182 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 10183 } 10184 } create()10185 private static Builder create() { 10186 return new Builder(); 10187 } 10188 clear()10189 public Builder clear() { 10190 super.clear(); 10191 lastFlushTime_ = 0L; 10192 bitField0_ = (bitField0_ & ~0x00000001); 10193 flushed_ = false; 10194 bitField0_ = (bitField0_ & ~0x00000002); 10195 wroteFlushWalMarker_ = false; 10196 bitField0_ = (bitField0_ & ~0x00000004); 10197 return this; 10198 } 10199 clone()10200 public Builder clone() { 10201 return create().mergeFrom(buildPartial()); 10202 } 10203 10204 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()10205 getDescriptorForType() { 10206 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_FlushRegionResponse_descriptor; 10207 } 10208 getDefaultInstanceForType()10209 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse getDefaultInstanceForType() { 10210 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance(); 10211 } 10212 build()10213 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse build() { 10214 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse result = buildPartial(); 10215 if (!result.isInitialized()) { 10216 throw newUninitializedMessageException(result); 10217 } 10218 return result; 10219 } 10220 buildPartial()10221 public org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse buildPartial() { 10222 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse result = new org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse(this); 10223 int from_bitField0_ = bitField0_; 10224 int to_bitField0_ = 0; 10225 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 10226 to_bitField0_ |= 0x00000001; 10227 } 10228 result.lastFlushTime_ = lastFlushTime_; 10229 if (((from_bitField0_ & 0x00000002) == 0x00000002)) { 10230 to_bitField0_ |= 0x00000002; 10231 } 10232 result.flushed_ = flushed_; 10233 if (((from_bitField0_ & 0x00000004) == 0x00000004)) { 10234 to_bitField0_ |= 0x00000004; 10235 } 10236 result.wroteFlushWalMarker_ = wroteFlushWalMarker_; 10237 result.bitField0_ = to_bitField0_; 10238 onBuilt(); 10239 return result; 10240 } 10241 mergeFrom(com.google.protobuf.Message other)10242 public Builder mergeFrom(com.google.protobuf.Message other) { 10243 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse) { 10244 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse)other); 10245 } else { 10246 super.mergeFrom(other); 10247 return this; 10248 } 10249 } 10250 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse other)10251 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse other) { 10252 if (other == org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse.getDefaultInstance()) return this; 10253 if (other.hasLastFlushTime()) { 10254 setLastFlushTime(other.getLastFlushTime()); 10255 } 10256 if (other.hasFlushed()) { 10257 setFlushed(other.getFlushed()); 10258 } 10259 if (other.hasWroteFlushWalMarker()) { 10260 setWroteFlushWalMarker(other.getWroteFlushWalMarker()); 10261 } 10262 this.mergeUnknownFields(other.getUnknownFields()); 10263 return this; 10264 } 10265 isInitialized()10266 public final boolean isInitialized() { 10267 if (!hasLastFlushTime()) { 10268 10269 return false; 10270 } 10271 return true; 10272 } 10273 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10274 public Builder mergeFrom( 10275 com.google.protobuf.CodedInputStream input, 10276 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10277 throws java.io.IOException { 10278 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse parsedMessage = null; 10279 try { 10280 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 10281 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 10282 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.FlushRegionResponse) e.getUnfinishedMessage(); 10283 throw e; 10284 } finally { 10285 if (parsedMessage != null) { 10286 mergeFrom(parsedMessage); 10287 } 10288 } 10289 return this; 10290 } 10291 private int bitField0_; 10292 10293 // required uint64 last_flush_time = 1; 10294 private long lastFlushTime_ ; 10295 /** 10296 * <code>required uint64 last_flush_time = 1;</code> 10297 */ hasLastFlushTime()10298 public boolean hasLastFlushTime() { 10299 return ((bitField0_ & 0x00000001) == 0x00000001); 10300 } 10301 /** 10302 * <code>required uint64 last_flush_time = 1;</code> 10303 */ getLastFlushTime()10304 public long getLastFlushTime() { 10305 return lastFlushTime_; 10306 } 10307 /** 10308 * <code>required uint64 last_flush_time = 1;</code> 10309 */ setLastFlushTime(long value)10310 public Builder setLastFlushTime(long value) { 10311 bitField0_ |= 0x00000001; 10312 lastFlushTime_ = value; 10313 onChanged(); 10314 return this; 10315 } 10316 /** 10317 * <code>required uint64 last_flush_time = 1;</code> 10318 */ clearLastFlushTime()10319 public Builder clearLastFlushTime() { 10320 bitField0_ = (bitField0_ & ~0x00000001); 10321 lastFlushTime_ = 0L; 10322 onChanged(); 10323 return this; 10324 } 10325 10326 // optional bool flushed = 2; 10327 private boolean flushed_ ; 10328 /** 10329 * <code>optional bool flushed = 2;</code> 10330 */ hasFlushed()10331 public boolean hasFlushed() { 10332 return ((bitField0_ & 0x00000002) == 0x00000002); 10333 } 10334 /** 10335 * <code>optional bool flushed = 2;</code> 10336 */ getFlushed()10337 public boolean getFlushed() { 10338 return flushed_; 10339 } 10340 /** 10341 * <code>optional bool flushed = 2;</code> 10342 */ setFlushed(boolean value)10343 public Builder setFlushed(boolean value) { 10344 bitField0_ |= 0x00000002; 10345 flushed_ = value; 10346 onChanged(); 10347 return this; 10348 } 10349 /** 10350 * <code>optional bool flushed = 2;</code> 10351 */ clearFlushed()10352 public Builder clearFlushed() { 10353 bitField0_ = (bitField0_ & ~0x00000002); 10354 flushed_ = false; 10355 onChanged(); 10356 return this; 10357 } 10358 10359 // optional bool wrote_flush_wal_marker = 3; 10360 private boolean wroteFlushWalMarker_ ; 10361 /** 10362 * <code>optional bool wrote_flush_wal_marker = 3;</code> 10363 */ hasWroteFlushWalMarker()10364 public boolean hasWroteFlushWalMarker() { 10365 return ((bitField0_ & 0x00000004) == 0x00000004); 10366 } 10367 /** 10368 * <code>optional bool wrote_flush_wal_marker = 3;</code> 10369 */ getWroteFlushWalMarker()10370 public boolean getWroteFlushWalMarker() { 10371 return wroteFlushWalMarker_; 10372 } 10373 /** 10374 * <code>optional bool wrote_flush_wal_marker = 3;</code> 10375 */ setWroteFlushWalMarker(boolean value)10376 public Builder setWroteFlushWalMarker(boolean value) { 10377 bitField0_ |= 0x00000004; 10378 wroteFlushWalMarker_ = value; 10379 onChanged(); 10380 return this; 10381 } 10382 /** 10383 * <code>optional bool wrote_flush_wal_marker = 3;</code> 10384 */ clearWroteFlushWalMarker()10385 public Builder clearWroteFlushWalMarker() { 10386 bitField0_ = (bitField0_ & ~0x00000004); 10387 wroteFlushWalMarker_ = false; 10388 onChanged(); 10389 return this; 10390 } 10391 10392 // @@protoc_insertion_point(builder_scope:FlushRegionResponse) 10393 } 10394 10395 static { 10396 defaultInstance = new FlushRegionResponse(true); defaultInstance.initFields()10397 defaultInstance.initFields(); 10398 } 10399 10400 // @@protoc_insertion_point(class_scope:FlushRegionResponse) 10401 } 10402 10403 public interface SplitRegionRequestOrBuilder 10404 extends com.google.protobuf.MessageOrBuilder { 10405 10406 // required .RegionSpecifier region = 1; 10407 /** 10408 * <code>required .RegionSpecifier region = 1;</code> 10409 */ hasRegion()10410 boolean hasRegion(); 10411 /** 10412 * <code>required .RegionSpecifier region = 1;</code> 10413 */ getRegion()10414 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion(); 10415 /** 10416 * <code>required .RegionSpecifier region = 1;</code> 10417 */ getRegionOrBuilder()10418 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder(); 10419 10420 // optional bytes split_point = 2; 10421 /** 10422 * <code>optional bytes split_point = 2;</code> 10423 */ hasSplitPoint()10424 boolean hasSplitPoint(); 10425 /** 10426 * <code>optional bytes split_point = 2;</code> 10427 */ getSplitPoint()10428 com.google.protobuf.ByteString getSplitPoint(); 10429 } 10430 /** 10431 * Protobuf type {@code SplitRegionRequest} 10432 * 10433 * <pre> 10434 ** 10435 * Splits the specified region. 10436 * <p> 10437 * This method currently flushes the region and then forces a compaction which 10438 * will then trigger a split. The flush is done synchronously but the 10439 * compaction is asynchronous. 10440 * </pre> 10441 */ 10442 public static final class SplitRegionRequest extends 10443 com.google.protobuf.GeneratedMessage 10444 implements SplitRegionRequestOrBuilder { 10445 // Use SplitRegionRequest.newBuilder() to construct. SplitRegionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder)10446 private SplitRegionRequest(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 10447 super(builder); 10448 this.unknownFields = builder.getUnknownFields(); 10449 } SplitRegionRequest(boolean noInit)10450 private SplitRegionRequest(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 10451 10452 private static final SplitRegionRequest defaultInstance; getDefaultInstance()10453 public static SplitRegionRequest getDefaultInstance() { 10454 return defaultInstance; 10455 } 10456 getDefaultInstanceForType()10457 public SplitRegionRequest getDefaultInstanceForType() { 10458 return defaultInstance; 10459 } 10460 10461 private final com.google.protobuf.UnknownFieldSet unknownFields; 10462 @java.lang.Override 10463 public final com.google.protobuf.UnknownFieldSet getUnknownFields()10464 getUnknownFields() { 10465 return this.unknownFields; 10466 } SplitRegionRequest( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10467 private SplitRegionRequest( 10468 com.google.protobuf.CodedInputStream input, 10469 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10470 throws com.google.protobuf.InvalidProtocolBufferException { 10471 initFields(); 10472 int mutable_bitField0_ = 0; 10473 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 10474 com.google.protobuf.UnknownFieldSet.newBuilder(); 10475 try { 10476 boolean done = false; 10477 while (!done) { 10478 int tag = input.readTag(); 10479 switch (tag) { 10480 case 0: 10481 done = true; 10482 break; 10483 default: { 10484 if (!parseUnknownField(input, unknownFields, 10485 extensionRegistry, tag)) { 10486 done = true; 10487 } 10488 break; 10489 } 10490 case 10: { 10491 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = null; 10492 if (((bitField0_ & 0x00000001) == 0x00000001)) { 10493 subBuilder = region_.toBuilder(); 10494 } 10495 region_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.PARSER, extensionRegistry); 10496 if (subBuilder != null) { 10497 subBuilder.mergeFrom(region_); 10498 region_ = subBuilder.buildPartial(); 10499 } 10500 bitField0_ |= 0x00000001; 10501 break; 10502 } 10503 case 18: { 10504 bitField0_ |= 0x00000002; 10505 splitPoint_ = input.readBytes(); 10506 break; 10507 } 10508 } 10509 } 10510 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 10511 throw e.setUnfinishedMessage(this); 10512 } catch (java.io.IOException e) { 10513 throw new com.google.protobuf.InvalidProtocolBufferException( 10514 e.getMessage()).setUnfinishedMessage(this); 10515 } finally { 10516 this.unknownFields = unknownFields.build(); 10517 makeExtensionsImmutable(); 10518 } 10519 } 10520 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()10521 getDescriptor() { 10522 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionRequest_descriptor; 10523 } 10524 10525 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()10526 internalGetFieldAccessorTable() { 10527 return org.apache.hadoop.hbase.protobuf.generated.AdminProtos.internal_static_SplitRegionRequest_fieldAccessorTable 10528 .ensureFieldAccessorsInitialized( 10529 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.class, org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest.Builder.class); 10530 } 10531 10532 public static com.google.protobuf.Parser<SplitRegionRequest> PARSER = 10533 new com.google.protobuf.AbstractParser<SplitRegionRequest>() { 10534 public SplitRegionRequest parsePartialFrom( 10535 com.google.protobuf.CodedInputStream input, 10536 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10537 throws com.google.protobuf.InvalidProtocolBufferException { 10538 return new SplitRegionRequest(input, extensionRegistry); 10539 } 10540 }; 10541 10542 @java.lang.Override getParserForType()10543 public com.google.protobuf.Parser<SplitRegionRequest> getParserForType() { 10544 return PARSER; 10545 } 10546 10547 private int bitField0_; 10548 // required .RegionSpecifier region = 1; 10549 public static final int REGION_FIELD_NUMBER = 1; 10550 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_; 10551 /** 10552 * <code>required .RegionSpecifier region = 1;</code> 10553 */ hasRegion()10554 public boolean hasRegion() { 10555 return ((bitField0_ & 0x00000001) == 0x00000001); 10556 } 10557 /** 10558 * <code>required .RegionSpecifier region = 1;</code> 10559 */ getRegion()10560 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() { 10561 return region_; 10562 } 10563 /** 10564 * <code>required .RegionSpecifier region = 1;</code> 10565 */ getRegionOrBuilder()10566 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() { 10567 return region_; 10568 } 10569 10570 // optional bytes split_point = 2; 10571 public static final int SPLIT_POINT_FIELD_NUMBER = 2; 10572 private com.google.protobuf.ByteString splitPoint_; 10573 /** 10574 * <code>optional bytes split_point = 2;</code> 10575 */ hasSplitPoint()10576 public boolean hasSplitPoint() { 10577 return ((bitField0_ & 0x00000002) == 0x00000002); 10578 } 10579 /** 10580 * <code>optional bytes split_point = 2;</code> 10581 */ getSplitPoint()10582 public com.google.protobuf.ByteString getSplitPoint() { 10583 return splitPoint_; 10584 } 10585 initFields()10586 private void initFields() { 10587 region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance(); 10588 splitPoint_ = com.google.protobuf.ByteString.EMPTY; 10589 } 10590 private byte memoizedIsInitialized = -1; isInitialized()10591 public final boolean isInitialized() { 10592 byte isInitialized = memoizedIsInitialized; 10593 if (isInitialized != -1) return isInitialized == 1; 10594 10595 if (!hasRegion()) { 10596 memoizedIsInitialized = 0; 10597 return false; 10598 } 10599 if (!getRegion().isInitialized()) { 10600 memoizedIsInitialized = 0; 10601 return false; 10602 } 10603 memoizedIsInitialized = 1; 10604 return true; 10605 } 10606 writeTo(com.google.protobuf.CodedOutputStream output)10607 public void writeTo(com.google.protobuf.CodedOutputStream output) 10608 throws java.io.IOException { 10609 getSerializedSize(); 10610 if (((bitField0_ & 0x00000001) == 0x00000001)) { 10611 output.writeMessage(1, region_); 10612 } 10613 if (((bitField0_ & 0x00000002) == 0x00000002)) { 10614 output.writeBytes(2, splitPoint_); 10615 } 10616 getUnknownFields().writeTo(output); 10617 } 10618 10619 private int memoizedSerializedSize = -1; getSerializedSize()10620 public int getSerializedSize() { 10621 int size = memoizedSerializedSize; 10622 if (size != -1) return size; 10623 10624 size = 0; 10625 if (((bitField0_ & 0x00000001) == 0x00000001)) { 10626 size += com.google.protobuf.CodedOutputStream 10627 .computeMessageSize(1, region_); 10628 } 10629 if (((bitField0_ & 0x00000002) == 0x00000002)) { 10630 size += com.google.protobuf.CodedOutputStream 10631 .computeBytesSize(2, splitPoint_); 10632 } 10633 size += getUnknownFields().getSerializedSize(); 10634 memoizedSerializedSize = size; 10635 return size; 10636 } 10637 10638 private static final long serialVersionUID = 0L; 10639 @java.lang.Override writeReplace()10640 protected java.lang.Object writeReplace() 10641 throws java.io.ObjectStreamException { 10642 return super.writeReplace(); 10643 } 10644 10645 @java.lang.Override equals(final java.lang.Object obj)10646 public boolean equals(final java.lang.Object obj) { 10647 if (obj == this) { 10648 return true; 10649 } 10650 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest)) { 10651 return super.equals(obj); 10652 } 10653 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest) obj; 10654 10655 boolean result = true; 10656 result = result && (hasRegion() == other.hasRegion()); 10657 if (hasRegion()) { 10658 result = result && getRegion() 10659 .equals(other.getRegion()); 10660 } 10661 result = result && (hasSplitPoint() == other.hasSplitPoint()); 10662 if (hasSplitPoint()) { 10663 result = result && getSplitPoint() 10664 .equals(other.getSplitPoint()); 10665 } 10666 result = result && 10667 getUnknownFields().equals(other.getUnknownFields()); 10668 return result; 10669 } 10670 10671 private int memoizedHashCode = 0; 10672 @java.lang.Override hashCode()10673 public int hashCode() { 10674 if (memoizedHashCode != 0) { 10675 return memoizedHashCode; 10676 } 10677 int hash = 41; 10678 hash = (19 * hash) + getDescriptorForType().hashCode(); 10679 if (hasRegion()) { 10680 hash = (37 * hash) + REGION_FIELD_NUMBER; 10681 hash = (53 * hash) + getRegion().hashCode(); 10682 } 10683 if (hasSplitPoint()) { 10684 hash = (37 * hash) + SPLIT_POINT_FIELD_NUMBER; 10685 hash = (53 * hash) + getSplitPoint().hashCode(); 10686 } 10687 hash = (29 * hash) + getUnknownFields().hashCode(); 10688 memoizedHashCode = hash; 10689 return hash; 10690 } 10691 parseFrom( com.google.protobuf.ByteString data)10692 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( 10693 com.google.protobuf.ByteString data) 10694 throws com.google.protobuf.InvalidProtocolBufferException { 10695 return PARSER.parseFrom(data); 10696 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10697 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( 10698 com.google.protobuf.ByteString data, 10699 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10700 throws com.google.protobuf.InvalidProtocolBufferException { 10701 return PARSER.parseFrom(data, extensionRegistry); 10702 } parseFrom(byte[] data)10703 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom(byte[] data) 10704 throws com.google.protobuf.InvalidProtocolBufferException { 10705 return PARSER.parseFrom(data); 10706 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10707 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( 10708 byte[] data, 10709 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10710 throws com.google.protobuf.InvalidProtocolBufferException { 10711 return PARSER.parseFrom(data, extensionRegistry); 10712 } parseFrom(java.io.InputStream input)10713 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom(java.io.InputStream input) 10714 throws java.io.IOException { 10715 return PARSER.parseFrom(input); 10716 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)10717 public static org.apache.hadoop.hbase.protobuf.generated.AdminProtos.SplitRegionRequest parseFrom( 10718 java.io.InputStream input, 10719 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 10720 throws java.io.IOException { 10721 return PARSER.parseFrom(input, extensionRegistry); 10722 } parseDelimitedFrom(java.io.InputStream input)10723