1 // Generated by the protocol buffer compiler. DO NOT EDIT! 2 // source: Filter.proto 3 4 package org.apache.hadoop.hbase.protobuf.generated; 5 6 public final class FilterProtos { FilterProtos()7 private FilterProtos() {} registerAllExtensions( com.google.protobuf.ExtensionRegistry registry)8 public static void registerAllExtensions( 9 com.google.protobuf.ExtensionRegistry registry) { 10 } 11 public interface FilterOrBuilder 12 extends com.google.protobuf.MessageOrBuilder { 13 14 // required string name = 1; 15 /** 16 * <code>required string name = 1;</code> 17 */ hasName()18 boolean hasName(); 19 /** 20 * <code>required string name = 1;</code> 21 */ getName()22 java.lang.String getName(); 23 /** 24 * <code>required string name = 1;</code> 25 */ 26 com.google.protobuf.ByteString getNameBytes()27 getNameBytes(); 28 29 // optional bytes serialized_filter = 2; 30 /** 31 * <code>optional bytes serialized_filter = 2;</code> 32 */ hasSerializedFilter()33 boolean hasSerializedFilter(); 34 /** 35 * <code>optional bytes serialized_filter = 2;</code> 36 */ getSerializedFilter()37 com.google.protobuf.ByteString getSerializedFilter(); 38 } 39 /** 40 * Protobuf type {@code Filter} 41 */ 42 public static final class Filter extends 43 com.google.protobuf.GeneratedMessage 44 implements FilterOrBuilder { 45 // Use Filter.newBuilder() to construct. Filter(com.google.protobuf.GeneratedMessage.Builder<?> builder)46 private Filter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 47 super(builder); 48 this.unknownFields = builder.getUnknownFields(); 49 } Filter(boolean noInit)50 private Filter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 51 52 private static final Filter defaultInstance; getDefaultInstance()53 public static Filter getDefaultInstance() { 54 return defaultInstance; 55 } 56 getDefaultInstanceForType()57 public Filter getDefaultInstanceForType() { 58 return defaultInstance; 59 } 60 61 private final com.google.protobuf.UnknownFieldSet unknownFields; 62 @java.lang.Override 63 public final com.google.protobuf.UnknownFieldSet getUnknownFields()64 getUnknownFields() { 65 return this.unknownFields; 66 } Filter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)67 private Filter( 68 com.google.protobuf.CodedInputStream input, 69 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 70 throws com.google.protobuf.InvalidProtocolBufferException { 71 initFields(); 72 int mutable_bitField0_ = 0; 73 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 74 com.google.protobuf.UnknownFieldSet.newBuilder(); 75 try { 76 boolean done = false; 77 while (!done) { 78 int tag = input.readTag(); 79 switch (tag) { 80 case 0: 81 done = true; 82 break; 83 default: { 84 if (!parseUnknownField(input, unknownFields, 85 extensionRegistry, tag)) { 86 done = true; 87 } 88 break; 89 } 90 case 10: { 91 bitField0_ |= 0x00000001; 92 name_ = input.readBytes(); 93 break; 94 } 95 case 18: { 96 bitField0_ |= 0x00000002; 97 serializedFilter_ = input.readBytes(); 98 break; 99 } 100 } 101 } 102 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 103 throw e.setUnfinishedMessage(this); 104 } catch (java.io.IOException e) { 105 throw new com.google.protobuf.InvalidProtocolBufferException( 106 e.getMessage()).setUnfinishedMessage(this); 107 } finally { 108 this.unknownFields = unknownFields.build(); 109 makeExtensionsImmutable(); 110 } 111 } 112 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()113 getDescriptor() { 114 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_Filter_descriptor; 115 } 116 117 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()118 internalGetFieldAccessorTable() { 119 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_Filter_fieldAccessorTable 120 .ensureFieldAccessorsInitialized( 121 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder.class); 122 } 123 124 public static com.google.protobuf.Parser<Filter> PARSER = 125 new com.google.protobuf.AbstractParser<Filter>() { 126 public Filter parsePartialFrom( 127 com.google.protobuf.CodedInputStream input, 128 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 129 throws com.google.protobuf.InvalidProtocolBufferException { 130 return new Filter(input, extensionRegistry); 131 } 132 }; 133 134 @java.lang.Override getParserForType()135 public com.google.protobuf.Parser<Filter> getParserForType() { 136 return PARSER; 137 } 138 139 private int bitField0_; 140 // required string name = 1; 141 public static final int NAME_FIELD_NUMBER = 1; 142 private java.lang.Object name_; 143 /** 144 * <code>required string name = 1;</code> 145 */ hasName()146 public boolean hasName() { 147 return ((bitField0_ & 0x00000001) == 0x00000001); 148 } 149 /** 150 * <code>required string name = 1;</code> 151 */ getName()152 public java.lang.String getName() { 153 java.lang.Object ref = name_; 154 if (ref instanceof java.lang.String) { 155 return (java.lang.String) ref; 156 } else { 157 com.google.protobuf.ByteString bs = 158 (com.google.protobuf.ByteString) ref; 159 java.lang.String s = bs.toStringUtf8(); 160 if (bs.isValidUtf8()) { 161 name_ = s; 162 } 163 return s; 164 } 165 } 166 /** 167 * <code>required string name = 1;</code> 168 */ 169 public com.google.protobuf.ByteString getNameBytes()170 getNameBytes() { 171 java.lang.Object ref = name_; 172 if (ref instanceof java.lang.String) { 173 com.google.protobuf.ByteString b = 174 com.google.protobuf.ByteString.copyFromUtf8( 175 (java.lang.String) ref); 176 name_ = b; 177 return b; 178 } else { 179 return (com.google.protobuf.ByteString) ref; 180 } 181 } 182 183 // optional bytes serialized_filter = 2; 184 public static final int SERIALIZED_FILTER_FIELD_NUMBER = 2; 185 private com.google.protobuf.ByteString serializedFilter_; 186 /** 187 * <code>optional bytes serialized_filter = 2;</code> 188 */ hasSerializedFilter()189 public boolean hasSerializedFilter() { 190 return ((bitField0_ & 0x00000002) == 0x00000002); 191 } 192 /** 193 * <code>optional bytes serialized_filter = 2;</code> 194 */ getSerializedFilter()195 public com.google.protobuf.ByteString getSerializedFilter() { 196 return serializedFilter_; 197 } 198 initFields()199 private void initFields() { 200 name_ = ""; 201 serializedFilter_ = com.google.protobuf.ByteString.EMPTY; 202 } 203 private byte memoizedIsInitialized = -1; isInitialized()204 public final boolean isInitialized() { 205 byte isInitialized = memoizedIsInitialized; 206 if (isInitialized != -1) return isInitialized == 1; 207 208 if (!hasName()) { 209 memoizedIsInitialized = 0; 210 return false; 211 } 212 memoizedIsInitialized = 1; 213 return true; 214 } 215 writeTo(com.google.protobuf.CodedOutputStream output)216 public void writeTo(com.google.protobuf.CodedOutputStream output) 217 throws java.io.IOException { 218 getSerializedSize(); 219 if (((bitField0_ & 0x00000001) == 0x00000001)) { 220 output.writeBytes(1, getNameBytes()); 221 } 222 if (((bitField0_ & 0x00000002) == 0x00000002)) { 223 output.writeBytes(2, serializedFilter_); 224 } 225 getUnknownFields().writeTo(output); 226 } 227 228 private int memoizedSerializedSize = -1; getSerializedSize()229 public int getSerializedSize() { 230 int size = memoizedSerializedSize; 231 if (size != -1) return size; 232 233 size = 0; 234 if (((bitField0_ & 0x00000001) == 0x00000001)) { 235 size += com.google.protobuf.CodedOutputStream 236 .computeBytesSize(1, getNameBytes()); 237 } 238 if (((bitField0_ & 0x00000002) == 0x00000002)) { 239 size += com.google.protobuf.CodedOutputStream 240 .computeBytesSize(2, serializedFilter_); 241 } 242 size += getUnknownFields().getSerializedSize(); 243 memoizedSerializedSize = size; 244 return size; 245 } 246 247 private static final long serialVersionUID = 0L; 248 @java.lang.Override writeReplace()249 protected java.lang.Object writeReplace() 250 throws java.io.ObjectStreamException { 251 return super.writeReplace(); 252 } 253 254 @java.lang.Override equals(final java.lang.Object obj)255 public boolean equals(final java.lang.Object obj) { 256 if (obj == this) { 257 return true; 258 } 259 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter)) { 260 return super.equals(obj); 261 } 262 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter) obj; 263 264 boolean result = true; 265 result = result && (hasName() == other.hasName()); 266 if (hasName()) { 267 result = result && getName() 268 .equals(other.getName()); 269 } 270 result = result && (hasSerializedFilter() == other.hasSerializedFilter()); 271 if (hasSerializedFilter()) { 272 result = result && getSerializedFilter() 273 .equals(other.getSerializedFilter()); 274 } 275 result = result && 276 getUnknownFields().equals(other.getUnknownFields()); 277 return result; 278 } 279 280 private int memoizedHashCode = 0; 281 @java.lang.Override hashCode()282 public int hashCode() { 283 if (memoizedHashCode != 0) { 284 return memoizedHashCode; 285 } 286 int hash = 41; 287 hash = (19 * hash) + getDescriptorForType().hashCode(); 288 if (hasName()) { 289 hash = (37 * hash) + NAME_FIELD_NUMBER; 290 hash = (53 * hash) + getName().hashCode(); 291 } 292 if (hasSerializedFilter()) { 293 hash = (37 * hash) + SERIALIZED_FILTER_FIELD_NUMBER; 294 hash = (53 * hash) + getSerializedFilter().hashCode(); 295 } 296 hash = (29 * hash) + getUnknownFields().hashCode(); 297 memoizedHashCode = hash; 298 return hash; 299 } 300 parseFrom( com.google.protobuf.ByteString data)301 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom( 302 com.google.protobuf.ByteString data) 303 throws com.google.protobuf.InvalidProtocolBufferException { 304 return PARSER.parseFrom(data); 305 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)306 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom( 307 com.google.protobuf.ByteString data, 308 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 309 throws com.google.protobuf.InvalidProtocolBufferException { 310 return PARSER.parseFrom(data, extensionRegistry); 311 } parseFrom(byte[] data)312 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom(byte[] data) 313 throws com.google.protobuf.InvalidProtocolBufferException { 314 return PARSER.parseFrom(data); 315 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)316 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom( 317 byte[] data, 318 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 319 throws com.google.protobuf.InvalidProtocolBufferException { 320 return PARSER.parseFrom(data, extensionRegistry); 321 } parseFrom(java.io.InputStream input)322 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom(java.io.InputStream input) 323 throws java.io.IOException { 324 return PARSER.parseFrom(input); 325 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)326 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom( 327 java.io.InputStream input, 328 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 329 throws java.io.IOException { 330 return PARSER.parseFrom(input, extensionRegistry); 331 } parseDelimitedFrom(java.io.InputStream input)332 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseDelimitedFrom(java.io.InputStream input) 333 throws java.io.IOException { 334 return PARSER.parseDelimitedFrom(input); 335 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)336 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseDelimitedFrom( 337 java.io.InputStream input, 338 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 339 throws java.io.IOException { 340 return PARSER.parseDelimitedFrom(input, extensionRegistry); 341 } parseFrom( com.google.protobuf.CodedInputStream input)342 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom( 343 com.google.protobuf.CodedInputStream input) 344 throws java.io.IOException { 345 return PARSER.parseFrom(input); 346 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)347 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parseFrom( 348 com.google.protobuf.CodedInputStream input, 349 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 350 throws java.io.IOException { 351 return PARSER.parseFrom(input, extensionRegistry); 352 } 353 newBuilder()354 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()355 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter prototype)356 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter prototype) { 357 return newBuilder().mergeFrom(prototype); 358 } toBuilder()359 public Builder toBuilder() { return newBuilder(this); } 360 361 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)362 protected Builder newBuilderForType( 363 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 364 Builder builder = new Builder(parent); 365 return builder; 366 } 367 /** 368 * Protobuf type {@code Filter} 369 */ 370 public static final class Builder extends 371 com.google.protobuf.GeneratedMessage.Builder<Builder> 372 implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder { 373 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()374 getDescriptor() { 375 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_Filter_descriptor; 376 } 377 378 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()379 internalGetFieldAccessorTable() { 380 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_Filter_fieldAccessorTable 381 .ensureFieldAccessorsInitialized( 382 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder.class); 383 } 384 385 // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.newBuilder() Builder()386 private Builder() { 387 maybeForceBuilderInitialization(); 388 } 389 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)390 private Builder( 391 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 392 super(parent); 393 maybeForceBuilderInitialization(); 394 } maybeForceBuilderInitialization()395 private void maybeForceBuilderInitialization() { 396 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 397 } 398 } create()399 private static Builder create() { 400 return new Builder(); 401 } 402 clear()403 public Builder clear() { 404 super.clear(); 405 name_ = ""; 406 bitField0_ = (bitField0_ & ~0x00000001); 407 serializedFilter_ = com.google.protobuf.ByteString.EMPTY; 408 bitField0_ = (bitField0_ & ~0x00000002); 409 return this; 410 } 411 clone()412 public Builder clone() { 413 return create().mergeFrom(buildPartial()); 414 } 415 416 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()417 getDescriptorForType() { 418 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_Filter_descriptor; 419 } 420 getDefaultInstanceForType()421 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getDefaultInstanceForType() { 422 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); 423 } 424 build()425 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter build() { 426 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter result = buildPartial(); 427 if (!result.isInitialized()) { 428 throw newUninitializedMessageException(result); 429 } 430 return result; 431 } 432 buildPartial()433 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter buildPartial() { 434 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter(this); 435 int from_bitField0_ = bitField0_; 436 int to_bitField0_ = 0; 437 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 438 to_bitField0_ |= 0x00000001; 439 } 440 result.name_ = name_; 441 if (((from_bitField0_ & 0x00000002) == 0x00000002)) { 442 to_bitField0_ |= 0x00000002; 443 } 444 result.serializedFilter_ = serializedFilter_; 445 result.bitField0_ = to_bitField0_; 446 onBuilt(); 447 return result; 448 } 449 mergeFrom(com.google.protobuf.Message other)450 public Builder mergeFrom(com.google.protobuf.Message other) { 451 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter) { 452 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter)other); 453 } else { 454 super.mergeFrom(other); 455 return this; 456 } 457 } 458 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter other)459 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter other) { 460 if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance()) return this; 461 if (other.hasName()) { 462 bitField0_ |= 0x00000001; 463 name_ = other.name_; 464 onChanged(); 465 } 466 if (other.hasSerializedFilter()) { 467 setSerializedFilter(other.getSerializedFilter()); 468 } 469 this.mergeUnknownFields(other.getUnknownFields()); 470 return this; 471 } 472 isInitialized()473 public final boolean isInitialized() { 474 if (!hasName()) { 475 476 return false; 477 } 478 return true; 479 } 480 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)481 public Builder mergeFrom( 482 com.google.protobuf.CodedInputStream input, 483 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 484 throws java.io.IOException { 485 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter parsedMessage = null; 486 try { 487 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 488 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 489 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter) e.getUnfinishedMessage(); 490 throw e; 491 } finally { 492 if (parsedMessage != null) { 493 mergeFrom(parsedMessage); 494 } 495 } 496 return this; 497 } 498 private int bitField0_; 499 500 // required string name = 1; 501 private java.lang.Object name_ = ""; 502 /** 503 * <code>required string name = 1;</code> 504 */ hasName()505 public boolean hasName() { 506 return ((bitField0_ & 0x00000001) == 0x00000001); 507 } 508 /** 509 * <code>required string name = 1;</code> 510 */ getName()511 public java.lang.String getName() { 512 java.lang.Object ref = name_; 513 if (!(ref instanceof java.lang.String)) { 514 java.lang.String s = ((com.google.protobuf.ByteString) ref) 515 .toStringUtf8(); 516 name_ = s; 517 return s; 518 } else { 519 return (java.lang.String) ref; 520 } 521 } 522 /** 523 * <code>required string name = 1;</code> 524 */ 525 public com.google.protobuf.ByteString getNameBytes()526 getNameBytes() { 527 java.lang.Object ref = name_; 528 if (ref instanceof String) { 529 com.google.protobuf.ByteString b = 530 com.google.protobuf.ByteString.copyFromUtf8( 531 (java.lang.String) ref); 532 name_ = b; 533 return b; 534 } else { 535 return (com.google.protobuf.ByteString) ref; 536 } 537 } 538 /** 539 * <code>required string name = 1;</code> 540 */ setName( java.lang.String value)541 public Builder setName( 542 java.lang.String value) { 543 if (value == null) { 544 throw new NullPointerException(); 545 } 546 bitField0_ |= 0x00000001; 547 name_ = value; 548 onChanged(); 549 return this; 550 } 551 /** 552 * <code>required string name = 1;</code> 553 */ clearName()554 public Builder clearName() { 555 bitField0_ = (bitField0_ & ~0x00000001); 556 name_ = getDefaultInstance().getName(); 557 onChanged(); 558 return this; 559 } 560 /** 561 * <code>required string name = 1;</code> 562 */ setNameBytes( com.google.protobuf.ByteString value)563 public Builder setNameBytes( 564 com.google.protobuf.ByteString value) { 565 if (value == null) { 566 throw new NullPointerException(); 567 } 568 bitField0_ |= 0x00000001; 569 name_ = value; 570 onChanged(); 571 return this; 572 } 573 574 // optional bytes serialized_filter = 2; 575 private com.google.protobuf.ByteString serializedFilter_ = com.google.protobuf.ByteString.EMPTY; 576 /** 577 * <code>optional bytes serialized_filter = 2;</code> 578 */ hasSerializedFilter()579 public boolean hasSerializedFilter() { 580 return ((bitField0_ & 0x00000002) == 0x00000002); 581 } 582 /** 583 * <code>optional bytes serialized_filter = 2;</code> 584 */ getSerializedFilter()585 public com.google.protobuf.ByteString getSerializedFilter() { 586 return serializedFilter_; 587 } 588 /** 589 * <code>optional bytes serialized_filter = 2;</code> 590 */ setSerializedFilter(com.google.protobuf.ByteString value)591 public Builder setSerializedFilter(com.google.protobuf.ByteString value) { 592 if (value == null) { 593 throw new NullPointerException(); 594 } 595 bitField0_ |= 0x00000002; 596 serializedFilter_ = value; 597 onChanged(); 598 return this; 599 } 600 /** 601 * <code>optional bytes serialized_filter = 2;</code> 602 */ clearSerializedFilter()603 public Builder clearSerializedFilter() { 604 bitField0_ = (bitField0_ & ~0x00000002); 605 serializedFilter_ = getDefaultInstance().getSerializedFilter(); 606 onChanged(); 607 return this; 608 } 609 610 // @@protoc_insertion_point(builder_scope:Filter) 611 } 612 613 static { 614 defaultInstance = new Filter(true); defaultInstance.initFields()615 defaultInstance.initFields(); 616 } 617 618 // @@protoc_insertion_point(class_scope:Filter) 619 } 620 621 public interface ColumnCountGetFilterOrBuilder 622 extends com.google.protobuf.MessageOrBuilder { 623 624 // required int32 limit = 1; 625 /** 626 * <code>required int32 limit = 1;</code> 627 */ hasLimit()628 boolean hasLimit(); 629 /** 630 * <code>required int32 limit = 1;</code> 631 */ getLimit()632 int getLimit(); 633 } 634 /** 635 * Protobuf type {@code ColumnCountGetFilter} 636 */ 637 public static final class ColumnCountGetFilter extends 638 com.google.protobuf.GeneratedMessage 639 implements ColumnCountGetFilterOrBuilder { 640 // Use ColumnCountGetFilter.newBuilder() to construct. ColumnCountGetFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)641 private ColumnCountGetFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 642 super(builder); 643 this.unknownFields = builder.getUnknownFields(); 644 } ColumnCountGetFilter(boolean noInit)645 private ColumnCountGetFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 646 647 private static final ColumnCountGetFilter defaultInstance; getDefaultInstance()648 public static ColumnCountGetFilter getDefaultInstance() { 649 return defaultInstance; 650 } 651 getDefaultInstanceForType()652 public ColumnCountGetFilter getDefaultInstanceForType() { 653 return defaultInstance; 654 } 655 656 private final com.google.protobuf.UnknownFieldSet unknownFields; 657 @java.lang.Override 658 public final com.google.protobuf.UnknownFieldSet getUnknownFields()659 getUnknownFields() { 660 return this.unknownFields; 661 } ColumnCountGetFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)662 private ColumnCountGetFilter( 663 com.google.protobuf.CodedInputStream input, 664 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 665 throws com.google.protobuf.InvalidProtocolBufferException { 666 initFields(); 667 int mutable_bitField0_ = 0; 668 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 669 com.google.protobuf.UnknownFieldSet.newBuilder(); 670 try { 671 boolean done = false; 672 while (!done) { 673 int tag = input.readTag(); 674 switch (tag) { 675 case 0: 676 done = true; 677 break; 678 default: { 679 if (!parseUnknownField(input, unknownFields, 680 extensionRegistry, tag)) { 681 done = true; 682 } 683 break; 684 } 685 case 8: { 686 bitField0_ |= 0x00000001; 687 limit_ = input.readInt32(); 688 break; 689 } 690 } 691 } 692 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 693 throw e.setUnfinishedMessage(this); 694 } catch (java.io.IOException e) { 695 throw new com.google.protobuf.InvalidProtocolBufferException( 696 e.getMessage()).setUnfinishedMessage(this); 697 } finally { 698 this.unknownFields = unknownFields.build(); 699 makeExtensionsImmutable(); 700 } 701 } 702 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()703 getDescriptor() { 704 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnCountGetFilter_descriptor; 705 } 706 707 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()708 internalGetFieldAccessorTable() { 709 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnCountGetFilter_fieldAccessorTable 710 .ensureFieldAccessorsInitialized( 711 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.Builder.class); 712 } 713 714 public static com.google.protobuf.Parser<ColumnCountGetFilter> PARSER = 715 new com.google.protobuf.AbstractParser<ColumnCountGetFilter>() { 716 public ColumnCountGetFilter parsePartialFrom( 717 com.google.protobuf.CodedInputStream input, 718 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 719 throws com.google.protobuf.InvalidProtocolBufferException { 720 return new ColumnCountGetFilter(input, extensionRegistry); 721 } 722 }; 723 724 @java.lang.Override getParserForType()725 public com.google.protobuf.Parser<ColumnCountGetFilter> getParserForType() { 726 return PARSER; 727 } 728 729 private int bitField0_; 730 // required int32 limit = 1; 731 public static final int LIMIT_FIELD_NUMBER = 1; 732 private int limit_; 733 /** 734 * <code>required int32 limit = 1;</code> 735 */ hasLimit()736 public boolean hasLimit() { 737 return ((bitField0_ & 0x00000001) == 0x00000001); 738 } 739 /** 740 * <code>required int32 limit = 1;</code> 741 */ getLimit()742 public int getLimit() { 743 return limit_; 744 } 745 initFields()746 private void initFields() { 747 limit_ = 0; 748 } 749 private byte memoizedIsInitialized = -1; isInitialized()750 public final boolean isInitialized() { 751 byte isInitialized = memoizedIsInitialized; 752 if (isInitialized != -1) return isInitialized == 1; 753 754 if (!hasLimit()) { 755 memoizedIsInitialized = 0; 756 return false; 757 } 758 memoizedIsInitialized = 1; 759 return true; 760 } 761 writeTo(com.google.protobuf.CodedOutputStream output)762 public void writeTo(com.google.protobuf.CodedOutputStream output) 763 throws java.io.IOException { 764 getSerializedSize(); 765 if (((bitField0_ & 0x00000001) == 0x00000001)) { 766 output.writeInt32(1, limit_); 767 } 768 getUnknownFields().writeTo(output); 769 } 770 771 private int memoizedSerializedSize = -1; getSerializedSize()772 public int getSerializedSize() { 773 int size = memoizedSerializedSize; 774 if (size != -1) return size; 775 776 size = 0; 777 if (((bitField0_ & 0x00000001) == 0x00000001)) { 778 size += com.google.protobuf.CodedOutputStream 779 .computeInt32Size(1, limit_); 780 } 781 size += getUnknownFields().getSerializedSize(); 782 memoizedSerializedSize = size; 783 return size; 784 } 785 786 private static final long serialVersionUID = 0L; 787 @java.lang.Override writeReplace()788 protected java.lang.Object writeReplace() 789 throws java.io.ObjectStreamException { 790 return super.writeReplace(); 791 } 792 793 @java.lang.Override equals(final java.lang.Object obj)794 public boolean equals(final java.lang.Object obj) { 795 if (obj == this) { 796 return true; 797 } 798 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter)) { 799 return super.equals(obj); 800 } 801 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter) obj; 802 803 boolean result = true; 804 result = result && (hasLimit() == other.hasLimit()); 805 if (hasLimit()) { 806 result = result && (getLimit() 807 == other.getLimit()); 808 } 809 result = result && 810 getUnknownFields().equals(other.getUnknownFields()); 811 return result; 812 } 813 814 private int memoizedHashCode = 0; 815 @java.lang.Override hashCode()816 public int hashCode() { 817 if (memoizedHashCode != 0) { 818 return memoizedHashCode; 819 } 820 int hash = 41; 821 hash = (19 * hash) + getDescriptorForType().hashCode(); 822 if (hasLimit()) { 823 hash = (37 * hash) + LIMIT_FIELD_NUMBER; 824 hash = (53 * hash) + getLimit(); 825 } 826 hash = (29 * hash) + getUnknownFields().hashCode(); 827 memoizedHashCode = hash; 828 return hash; 829 } 830 parseFrom( com.google.protobuf.ByteString data)831 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( 832 com.google.protobuf.ByteString data) 833 throws com.google.protobuf.InvalidProtocolBufferException { 834 return PARSER.parseFrom(data); 835 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)836 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( 837 com.google.protobuf.ByteString data, 838 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 839 throws com.google.protobuf.InvalidProtocolBufferException { 840 return PARSER.parseFrom(data, extensionRegistry); 841 } parseFrom(byte[] data)842 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(byte[] data) 843 throws com.google.protobuf.InvalidProtocolBufferException { 844 return PARSER.parseFrom(data); 845 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)846 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( 847 byte[] data, 848 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 849 throws com.google.protobuf.InvalidProtocolBufferException { 850 return PARSER.parseFrom(data, extensionRegistry); 851 } parseFrom(java.io.InputStream input)852 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom(java.io.InputStream input) 853 throws java.io.IOException { 854 return PARSER.parseFrom(input); 855 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)856 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( 857 java.io.InputStream input, 858 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 859 throws java.io.IOException { 860 return PARSER.parseFrom(input, extensionRegistry); 861 } parseDelimitedFrom(java.io.InputStream input)862 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseDelimitedFrom(java.io.InputStream input) 863 throws java.io.IOException { 864 return PARSER.parseDelimitedFrom(input); 865 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)866 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseDelimitedFrom( 867 java.io.InputStream input, 868 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 869 throws java.io.IOException { 870 return PARSER.parseDelimitedFrom(input, extensionRegistry); 871 } parseFrom( com.google.protobuf.CodedInputStream input)872 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( 873 com.google.protobuf.CodedInputStream input) 874 throws java.io.IOException { 875 return PARSER.parseFrom(input); 876 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)877 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parseFrom( 878 com.google.protobuf.CodedInputStream input, 879 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 880 throws java.io.IOException { 881 return PARSER.parseFrom(input, extensionRegistry); 882 } 883 newBuilder()884 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()885 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter prototype)886 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter prototype) { 887 return newBuilder().mergeFrom(prototype); 888 } toBuilder()889 public Builder toBuilder() { return newBuilder(this); } 890 891 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)892 protected Builder newBuilderForType( 893 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 894 Builder builder = new Builder(parent); 895 return builder; 896 } 897 /** 898 * Protobuf type {@code ColumnCountGetFilter} 899 */ 900 public static final class Builder extends 901 com.google.protobuf.GeneratedMessage.Builder<Builder> 902 implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilterOrBuilder { 903 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()904 getDescriptor() { 905 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnCountGetFilter_descriptor; 906 } 907 908 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()909 internalGetFieldAccessorTable() { 910 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnCountGetFilter_fieldAccessorTable 911 .ensureFieldAccessorsInitialized( 912 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.Builder.class); 913 } 914 915 // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.newBuilder() Builder()916 private Builder() { 917 maybeForceBuilderInitialization(); 918 } 919 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)920 private Builder( 921 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 922 super(parent); 923 maybeForceBuilderInitialization(); 924 } maybeForceBuilderInitialization()925 private void maybeForceBuilderInitialization() { 926 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 927 } 928 } create()929 private static Builder create() { 930 return new Builder(); 931 } 932 clear()933 public Builder clear() { 934 super.clear(); 935 limit_ = 0; 936 bitField0_ = (bitField0_ & ~0x00000001); 937 return this; 938 } 939 clone()940 public Builder clone() { 941 return create().mergeFrom(buildPartial()); 942 } 943 944 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()945 getDescriptorForType() { 946 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnCountGetFilter_descriptor; 947 } 948 getDefaultInstanceForType()949 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter getDefaultInstanceForType() { 950 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.getDefaultInstance(); 951 } 952 build()953 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter build() { 954 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter result = buildPartial(); 955 if (!result.isInitialized()) { 956 throw newUninitializedMessageException(result); 957 } 958 return result; 959 } 960 buildPartial()961 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter buildPartial() { 962 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter(this); 963 int from_bitField0_ = bitField0_; 964 int to_bitField0_ = 0; 965 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 966 to_bitField0_ |= 0x00000001; 967 } 968 result.limit_ = limit_; 969 result.bitField0_ = to_bitField0_; 970 onBuilt(); 971 return result; 972 } 973 mergeFrom(com.google.protobuf.Message other)974 public Builder mergeFrom(com.google.protobuf.Message other) { 975 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter) { 976 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter)other); 977 } else { 978 super.mergeFrom(other); 979 return this; 980 } 981 } 982 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter other)983 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter other) { 984 if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter.getDefaultInstance()) return this; 985 if (other.hasLimit()) { 986 setLimit(other.getLimit()); 987 } 988 this.mergeUnknownFields(other.getUnknownFields()); 989 return this; 990 } 991 isInitialized()992 public final boolean isInitialized() { 993 if (!hasLimit()) { 994 995 return false; 996 } 997 return true; 998 } 999 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1000 public Builder mergeFrom( 1001 com.google.protobuf.CodedInputStream input, 1002 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1003 throws java.io.IOException { 1004 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter parsedMessage = null; 1005 try { 1006 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 1007 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 1008 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnCountGetFilter) e.getUnfinishedMessage(); 1009 throw e; 1010 } finally { 1011 if (parsedMessage != null) { 1012 mergeFrom(parsedMessage); 1013 } 1014 } 1015 return this; 1016 } 1017 private int bitField0_; 1018 1019 // required int32 limit = 1; 1020 private int limit_ ; 1021 /** 1022 * <code>required int32 limit = 1;</code> 1023 */ hasLimit()1024 public boolean hasLimit() { 1025 return ((bitField0_ & 0x00000001) == 0x00000001); 1026 } 1027 /** 1028 * <code>required int32 limit = 1;</code> 1029 */ getLimit()1030 public int getLimit() { 1031 return limit_; 1032 } 1033 /** 1034 * <code>required int32 limit = 1;</code> 1035 */ setLimit(int value)1036 public Builder setLimit(int value) { 1037 bitField0_ |= 0x00000001; 1038 limit_ = value; 1039 onChanged(); 1040 return this; 1041 } 1042 /** 1043 * <code>required int32 limit = 1;</code> 1044 */ clearLimit()1045 public Builder clearLimit() { 1046 bitField0_ = (bitField0_ & ~0x00000001); 1047 limit_ = 0; 1048 onChanged(); 1049 return this; 1050 } 1051 1052 // @@protoc_insertion_point(builder_scope:ColumnCountGetFilter) 1053 } 1054 1055 static { 1056 defaultInstance = new ColumnCountGetFilter(true); defaultInstance.initFields()1057 defaultInstance.initFields(); 1058 } 1059 1060 // @@protoc_insertion_point(class_scope:ColumnCountGetFilter) 1061 } 1062 1063 public interface ColumnPaginationFilterOrBuilder 1064 extends com.google.protobuf.MessageOrBuilder { 1065 1066 // required int32 limit = 1; 1067 /** 1068 * <code>required int32 limit = 1;</code> 1069 */ hasLimit()1070 boolean hasLimit(); 1071 /** 1072 * <code>required int32 limit = 1;</code> 1073 */ getLimit()1074 int getLimit(); 1075 1076 // optional int32 offset = 2; 1077 /** 1078 * <code>optional int32 offset = 2;</code> 1079 */ hasOffset()1080 boolean hasOffset(); 1081 /** 1082 * <code>optional int32 offset = 2;</code> 1083 */ getOffset()1084 int getOffset(); 1085 1086 // optional bytes column_offset = 3; 1087 /** 1088 * <code>optional bytes column_offset = 3;</code> 1089 */ hasColumnOffset()1090 boolean hasColumnOffset(); 1091 /** 1092 * <code>optional bytes column_offset = 3;</code> 1093 */ getColumnOffset()1094 com.google.protobuf.ByteString getColumnOffset(); 1095 } 1096 /** 1097 * Protobuf type {@code ColumnPaginationFilter} 1098 */ 1099 public static final class ColumnPaginationFilter extends 1100 com.google.protobuf.GeneratedMessage 1101 implements ColumnPaginationFilterOrBuilder { 1102 // Use ColumnPaginationFilter.newBuilder() to construct. ColumnPaginationFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)1103 private ColumnPaginationFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 1104 super(builder); 1105 this.unknownFields = builder.getUnknownFields(); 1106 } ColumnPaginationFilter(boolean noInit)1107 private ColumnPaginationFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 1108 1109 private static final ColumnPaginationFilter defaultInstance; getDefaultInstance()1110 public static ColumnPaginationFilter getDefaultInstance() { 1111 return defaultInstance; 1112 } 1113 getDefaultInstanceForType()1114 public ColumnPaginationFilter getDefaultInstanceForType() { 1115 return defaultInstance; 1116 } 1117 1118 private final com.google.protobuf.UnknownFieldSet unknownFields; 1119 @java.lang.Override 1120 public final com.google.protobuf.UnknownFieldSet getUnknownFields()1121 getUnknownFields() { 1122 return this.unknownFields; 1123 } ColumnPaginationFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1124 private ColumnPaginationFilter( 1125 com.google.protobuf.CodedInputStream input, 1126 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1127 throws com.google.protobuf.InvalidProtocolBufferException { 1128 initFields(); 1129 int mutable_bitField0_ = 0; 1130 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 1131 com.google.protobuf.UnknownFieldSet.newBuilder(); 1132 try { 1133 boolean done = false; 1134 while (!done) { 1135 int tag = input.readTag(); 1136 switch (tag) { 1137 case 0: 1138 done = true; 1139 break; 1140 default: { 1141 if (!parseUnknownField(input, unknownFields, 1142 extensionRegistry, tag)) { 1143 done = true; 1144 } 1145 break; 1146 } 1147 case 8: { 1148 bitField0_ |= 0x00000001; 1149 limit_ = input.readInt32(); 1150 break; 1151 } 1152 case 16: { 1153 bitField0_ |= 0x00000002; 1154 offset_ = input.readInt32(); 1155 break; 1156 } 1157 case 26: { 1158 bitField0_ |= 0x00000004; 1159 columnOffset_ = input.readBytes(); 1160 break; 1161 } 1162 } 1163 } 1164 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 1165 throw e.setUnfinishedMessage(this); 1166 } catch (java.io.IOException e) { 1167 throw new com.google.protobuf.InvalidProtocolBufferException( 1168 e.getMessage()).setUnfinishedMessage(this); 1169 } finally { 1170 this.unknownFields = unknownFields.build(); 1171 makeExtensionsImmutable(); 1172 } 1173 } 1174 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()1175 getDescriptor() { 1176 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPaginationFilter_descriptor; 1177 } 1178 1179 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()1180 internalGetFieldAccessorTable() { 1181 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPaginationFilter_fieldAccessorTable 1182 .ensureFieldAccessorsInitialized( 1183 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.Builder.class); 1184 } 1185 1186 public static com.google.protobuf.Parser<ColumnPaginationFilter> PARSER = 1187 new com.google.protobuf.AbstractParser<ColumnPaginationFilter>() { 1188 public ColumnPaginationFilter parsePartialFrom( 1189 com.google.protobuf.CodedInputStream input, 1190 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1191 throws com.google.protobuf.InvalidProtocolBufferException { 1192 return new ColumnPaginationFilter(input, extensionRegistry); 1193 } 1194 }; 1195 1196 @java.lang.Override getParserForType()1197 public com.google.protobuf.Parser<ColumnPaginationFilter> getParserForType() { 1198 return PARSER; 1199 } 1200 1201 private int bitField0_; 1202 // required int32 limit = 1; 1203 public static final int LIMIT_FIELD_NUMBER = 1; 1204 private int limit_; 1205 /** 1206 * <code>required int32 limit = 1;</code> 1207 */ hasLimit()1208 public boolean hasLimit() { 1209 return ((bitField0_ & 0x00000001) == 0x00000001); 1210 } 1211 /** 1212 * <code>required int32 limit = 1;</code> 1213 */ getLimit()1214 public int getLimit() { 1215 return limit_; 1216 } 1217 1218 // optional int32 offset = 2; 1219 public static final int OFFSET_FIELD_NUMBER = 2; 1220 private int offset_; 1221 /** 1222 * <code>optional int32 offset = 2;</code> 1223 */ hasOffset()1224 public boolean hasOffset() { 1225 return ((bitField0_ & 0x00000002) == 0x00000002); 1226 } 1227 /** 1228 * <code>optional int32 offset = 2;</code> 1229 */ getOffset()1230 public int getOffset() { 1231 return offset_; 1232 } 1233 1234 // optional bytes column_offset = 3; 1235 public static final int COLUMN_OFFSET_FIELD_NUMBER = 3; 1236 private com.google.protobuf.ByteString columnOffset_; 1237 /** 1238 * <code>optional bytes column_offset = 3;</code> 1239 */ hasColumnOffset()1240 public boolean hasColumnOffset() { 1241 return ((bitField0_ & 0x00000004) == 0x00000004); 1242 } 1243 /** 1244 * <code>optional bytes column_offset = 3;</code> 1245 */ getColumnOffset()1246 public com.google.protobuf.ByteString getColumnOffset() { 1247 return columnOffset_; 1248 } 1249 initFields()1250 private void initFields() { 1251 limit_ = 0; 1252 offset_ = 0; 1253 columnOffset_ = com.google.protobuf.ByteString.EMPTY; 1254 } 1255 private byte memoizedIsInitialized = -1; isInitialized()1256 public final boolean isInitialized() { 1257 byte isInitialized = memoizedIsInitialized; 1258 if (isInitialized != -1) return isInitialized == 1; 1259 1260 if (!hasLimit()) { 1261 memoizedIsInitialized = 0; 1262 return false; 1263 } 1264 memoizedIsInitialized = 1; 1265 return true; 1266 } 1267 writeTo(com.google.protobuf.CodedOutputStream output)1268 public void writeTo(com.google.protobuf.CodedOutputStream output) 1269 throws java.io.IOException { 1270 getSerializedSize(); 1271 if (((bitField0_ & 0x00000001) == 0x00000001)) { 1272 output.writeInt32(1, limit_); 1273 } 1274 if (((bitField0_ & 0x00000002) == 0x00000002)) { 1275 output.writeInt32(2, offset_); 1276 } 1277 if (((bitField0_ & 0x00000004) == 0x00000004)) { 1278 output.writeBytes(3, columnOffset_); 1279 } 1280 getUnknownFields().writeTo(output); 1281 } 1282 1283 private int memoizedSerializedSize = -1; getSerializedSize()1284 public int getSerializedSize() { 1285 int size = memoizedSerializedSize; 1286 if (size != -1) return size; 1287 1288 size = 0; 1289 if (((bitField0_ & 0x00000001) == 0x00000001)) { 1290 size += com.google.protobuf.CodedOutputStream 1291 .computeInt32Size(1, limit_); 1292 } 1293 if (((bitField0_ & 0x00000002) == 0x00000002)) { 1294 size += com.google.protobuf.CodedOutputStream 1295 .computeInt32Size(2, offset_); 1296 } 1297 if (((bitField0_ & 0x00000004) == 0x00000004)) { 1298 size += com.google.protobuf.CodedOutputStream 1299 .computeBytesSize(3, columnOffset_); 1300 } 1301 size += getUnknownFields().getSerializedSize(); 1302 memoizedSerializedSize = size; 1303 return size; 1304 } 1305 1306 private static final long serialVersionUID = 0L; 1307 @java.lang.Override writeReplace()1308 protected java.lang.Object writeReplace() 1309 throws java.io.ObjectStreamException { 1310 return super.writeReplace(); 1311 } 1312 1313 @java.lang.Override equals(final java.lang.Object obj)1314 public boolean equals(final java.lang.Object obj) { 1315 if (obj == this) { 1316 return true; 1317 } 1318 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter)) { 1319 return super.equals(obj); 1320 } 1321 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter) obj; 1322 1323 boolean result = true; 1324 result = result && (hasLimit() == other.hasLimit()); 1325 if (hasLimit()) { 1326 result = result && (getLimit() 1327 == other.getLimit()); 1328 } 1329 result = result && (hasOffset() == other.hasOffset()); 1330 if (hasOffset()) { 1331 result = result && (getOffset() 1332 == other.getOffset()); 1333 } 1334 result = result && (hasColumnOffset() == other.hasColumnOffset()); 1335 if (hasColumnOffset()) { 1336 result = result && getColumnOffset() 1337 .equals(other.getColumnOffset()); 1338 } 1339 result = result && 1340 getUnknownFields().equals(other.getUnknownFields()); 1341 return result; 1342 } 1343 1344 private int memoizedHashCode = 0; 1345 @java.lang.Override hashCode()1346 public int hashCode() { 1347 if (memoizedHashCode != 0) { 1348 return memoizedHashCode; 1349 } 1350 int hash = 41; 1351 hash = (19 * hash) + getDescriptorForType().hashCode(); 1352 if (hasLimit()) { 1353 hash = (37 * hash) + LIMIT_FIELD_NUMBER; 1354 hash = (53 * hash) + getLimit(); 1355 } 1356 if (hasOffset()) { 1357 hash = (37 * hash) + OFFSET_FIELD_NUMBER; 1358 hash = (53 * hash) + getOffset(); 1359 } 1360 if (hasColumnOffset()) { 1361 hash = (37 * hash) + COLUMN_OFFSET_FIELD_NUMBER; 1362 hash = (53 * hash) + getColumnOffset().hashCode(); 1363 } 1364 hash = (29 * hash) + getUnknownFields().hashCode(); 1365 memoizedHashCode = hash; 1366 return hash; 1367 } 1368 parseFrom( com.google.protobuf.ByteString data)1369 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( 1370 com.google.protobuf.ByteString data) 1371 throws com.google.protobuf.InvalidProtocolBufferException { 1372 return PARSER.parseFrom(data); 1373 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1374 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( 1375 com.google.protobuf.ByteString data, 1376 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1377 throws com.google.protobuf.InvalidProtocolBufferException { 1378 return PARSER.parseFrom(data, extensionRegistry); 1379 } parseFrom(byte[] data)1380 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(byte[] data) 1381 throws com.google.protobuf.InvalidProtocolBufferException { 1382 return PARSER.parseFrom(data); 1383 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1384 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( 1385 byte[] data, 1386 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1387 throws com.google.protobuf.InvalidProtocolBufferException { 1388 return PARSER.parseFrom(data, extensionRegistry); 1389 } parseFrom(java.io.InputStream input)1390 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom(java.io.InputStream input) 1391 throws java.io.IOException { 1392 return PARSER.parseFrom(input); 1393 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1394 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( 1395 java.io.InputStream input, 1396 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1397 throws java.io.IOException { 1398 return PARSER.parseFrom(input, extensionRegistry); 1399 } parseDelimitedFrom(java.io.InputStream input)1400 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseDelimitedFrom(java.io.InputStream input) 1401 throws java.io.IOException { 1402 return PARSER.parseDelimitedFrom(input); 1403 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1404 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseDelimitedFrom( 1405 java.io.InputStream input, 1406 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1407 throws java.io.IOException { 1408 return PARSER.parseDelimitedFrom(input, extensionRegistry); 1409 } parseFrom( com.google.protobuf.CodedInputStream input)1410 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( 1411 com.google.protobuf.CodedInputStream input) 1412 throws java.io.IOException { 1413 return PARSER.parseFrom(input); 1414 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1415 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parseFrom( 1416 com.google.protobuf.CodedInputStream input, 1417 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1418 throws java.io.IOException { 1419 return PARSER.parseFrom(input, extensionRegistry); 1420 } 1421 newBuilder()1422 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()1423 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter prototype)1424 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter prototype) { 1425 return newBuilder().mergeFrom(prototype); 1426 } toBuilder()1427 public Builder toBuilder() { return newBuilder(this); } 1428 1429 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)1430 protected Builder newBuilderForType( 1431 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 1432 Builder builder = new Builder(parent); 1433 return builder; 1434 } 1435 /** 1436 * Protobuf type {@code ColumnPaginationFilter} 1437 */ 1438 public static final class Builder extends 1439 com.google.protobuf.GeneratedMessage.Builder<Builder> 1440 implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilterOrBuilder { 1441 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()1442 getDescriptor() { 1443 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPaginationFilter_descriptor; 1444 } 1445 1446 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()1447 internalGetFieldAccessorTable() { 1448 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPaginationFilter_fieldAccessorTable 1449 .ensureFieldAccessorsInitialized( 1450 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.Builder.class); 1451 } 1452 1453 // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.newBuilder() Builder()1454 private Builder() { 1455 maybeForceBuilderInitialization(); 1456 } 1457 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)1458 private Builder( 1459 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 1460 super(parent); 1461 maybeForceBuilderInitialization(); 1462 } maybeForceBuilderInitialization()1463 private void maybeForceBuilderInitialization() { 1464 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 1465 } 1466 } create()1467 private static Builder create() { 1468 return new Builder(); 1469 } 1470 clear()1471 public Builder clear() { 1472 super.clear(); 1473 limit_ = 0; 1474 bitField0_ = (bitField0_ & ~0x00000001); 1475 offset_ = 0; 1476 bitField0_ = (bitField0_ & ~0x00000002); 1477 columnOffset_ = com.google.protobuf.ByteString.EMPTY; 1478 bitField0_ = (bitField0_ & ~0x00000004); 1479 return this; 1480 } 1481 clone()1482 public Builder clone() { 1483 return create().mergeFrom(buildPartial()); 1484 } 1485 1486 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()1487 getDescriptorForType() { 1488 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPaginationFilter_descriptor; 1489 } 1490 getDefaultInstanceForType()1491 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter getDefaultInstanceForType() { 1492 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.getDefaultInstance(); 1493 } 1494 build()1495 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter build() { 1496 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter result = buildPartial(); 1497 if (!result.isInitialized()) { 1498 throw newUninitializedMessageException(result); 1499 } 1500 return result; 1501 } 1502 buildPartial()1503 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter buildPartial() { 1504 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter(this); 1505 int from_bitField0_ = bitField0_; 1506 int to_bitField0_ = 0; 1507 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 1508 to_bitField0_ |= 0x00000001; 1509 } 1510 result.limit_ = limit_; 1511 if (((from_bitField0_ & 0x00000002) == 0x00000002)) { 1512 to_bitField0_ |= 0x00000002; 1513 } 1514 result.offset_ = offset_; 1515 if (((from_bitField0_ & 0x00000004) == 0x00000004)) { 1516 to_bitField0_ |= 0x00000004; 1517 } 1518 result.columnOffset_ = columnOffset_; 1519 result.bitField0_ = to_bitField0_; 1520 onBuilt(); 1521 return result; 1522 } 1523 mergeFrom(com.google.protobuf.Message other)1524 public Builder mergeFrom(com.google.protobuf.Message other) { 1525 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter) { 1526 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter)other); 1527 } else { 1528 super.mergeFrom(other); 1529 return this; 1530 } 1531 } 1532 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter other)1533 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter other) { 1534 if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter.getDefaultInstance()) return this; 1535 if (other.hasLimit()) { 1536 setLimit(other.getLimit()); 1537 } 1538 if (other.hasOffset()) { 1539 setOffset(other.getOffset()); 1540 } 1541 if (other.hasColumnOffset()) { 1542 setColumnOffset(other.getColumnOffset()); 1543 } 1544 this.mergeUnknownFields(other.getUnknownFields()); 1545 return this; 1546 } 1547 isInitialized()1548 public final boolean isInitialized() { 1549 if (!hasLimit()) { 1550 1551 return false; 1552 } 1553 return true; 1554 } 1555 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1556 public Builder mergeFrom( 1557 com.google.protobuf.CodedInputStream input, 1558 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1559 throws java.io.IOException { 1560 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter parsedMessage = null; 1561 try { 1562 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 1563 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 1564 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPaginationFilter) e.getUnfinishedMessage(); 1565 throw e; 1566 } finally { 1567 if (parsedMessage != null) { 1568 mergeFrom(parsedMessage); 1569 } 1570 } 1571 return this; 1572 } 1573 private int bitField0_; 1574 1575 // required int32 limit = 1; 1576 private int limit_ ; 1577 /** 1578 * <code>required int32 limit = 1;</code> 1579 */ hasLimit()1580 public boolean hasLimit() { 1581 return ((bitField0_ & 0x00000001) == 0x00000001); 1582 } 1583 /** 1584 * <code>required int32 limit = 1;</code> 1585 */ getLimit()1586 public int getLimit() { 1587 return limit_; 1588 } 1589 /** 1590 * <code>required int32 limit = 1;</code> 1591 */ setLimit(int value)1592 public Builder setLimit(int value) { 1593 bitField0_ |= 0x00000001; 1594 limit_ = value; 1595 onChanged(); 1596 return this; 1597 } 1598 /** 1599 * <code>required int32 limit = 1;</code> 1600 */ clearLimit()1601 public Builder clearLimit() { 1602 bitField0_ = (bitField0_ & ~0x00000001); 1603 limit_ = 0; 1604 onChanged(); 1605 return this; 1606 } 1607 1608 // optional int32 offset = 2; 1609 private int offset_ ; 1610 /** 1611 * <code>optional int32 offset = 2;</code> 1612 */ hasOffset()1613 public boolean hasOffset() { 1614 return ((bitField0_ & 0x00000002) == 0x00000002); 1615 } 1616 /** 1617 * <code>optional int32 offset = 2;</code> 1618 */ getOffset()1619 public int getOffset() { 1620 return offset_; 1621 } 1622 /** 1623 * <code>optional int32 offset = 2;</code> 1624 */ setOffset(int value)1625 public Builder setOffset(int value) { 1626 bitField0_ |= 0x00000002; 1627 offset_ = value; 1628 onChanged(); 1629 return this; 1630 } 1631 /** 1632 * <code>optional int32 offset = 2;</code> 1633 */ clearOffset()1634 public Builder clearOffset() { 1635 bitField0_ = (bitField0_ & ~0x00000002); 1636 offset_ = 0; 1637 onChanged(); 1638 return this; 1639 } 1640 1641 // optional bytes column_offset = 3; 1642 private com.google.protobuf.ByteString columnOffset_ = com.google.protobuf.ByteString.EMPTY; 1643 /** 1644 * <code>optional bytes column_offset = 3;</code> 1645 */ hasColumnOffset()1646 public boolean hasColumnOffset() { 1647 return ((bitField0_ & 0x00000004) == 0x00000004); 1648 } 1649 /** 1650 * <code>optional bytes column_offset = 3;</code> 1651 */ getColumnOffset()1652 public com.google.protobuf.ByteString getColumnOffset() { 1653 return columnOffset_; 1654 } 1655 /** 1656 * <code>optional bytes column_offset = 3;</code> 1657 */ setColumnOffset(com.google.protobuf.ByteString value)1658 public Builder setColumnOffset(com.google.protobuf.ByteString value) { 1659 if (value == null) { 1660 throw new NullPointerException(); 1661 } 1662 bitField0_ |= 0x00000004; 1663 columnOffset_ = value; 1664 onChanged(); 1665 return this; 1666 } 1667 /** 1668 * <code>optional bytes column_offset = 3;</code> 1669 */ clearColumnOffset()1670 public Builder clearColumnOffset() { 1671 bitField0_ = (bitField0_ & ~0x00000004); 1672 columnOffset_ = getDefaultInstance().getColumnOffset(); 1673 onChanged(); 1674 return this; 1675 } 1676 1677 // @@protoc_insertion_point(builder_scope:ColumnPaginationFilter) 1678 } 1679 1680 static { 1681 defaultInstance = new ColumnPaginationFilter(true); defaultInstance.initFields()1682 defaultInstance.initFields(); 1683 } 1684 1685 // @@protoc_insertion_point(class_scope:ColumnPaginationFilter) 1686 } 1687 1688 public interface ColumnPrefixFilterOrBuilder 1689 extends com.google.protobuf.MessageOrBuilder { 1690 1691 // required bytes prefix = 1; 1692 /** 1693 * <code>required bytes prefix = 1;</code> 1694 */ hasPrefix()1695 boolean hasPrefix(); 1696 /** 1697 * <code>required bytes prefix = 1;</code> 1698 */ getPrefix()1699 com.google.protobuf.ByteString getPrefix(); 1700 } 1701 /** 1702 * Protobuf type {@code ColumnPrefixFilter} 1703 */ 1704 public static final class ColumnPrefixFilter extends 1705 com.google.protobuf.GeneratedMessage 1706 implements ColumnPrefixFilterOrBuilder { 1707 // Use ColumnPrefixFilter.newBuilder() to construct. ColumnPrefixFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)1708 private ColumnPrefixFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 1709 super(builder); 1710 this.unknownFields = builder.getUnknownFields(); 1711 } ColumnPrefixFilter(boolean noInit)1712 private ColumnPrefixFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 1713 1714 private static final ColumnPrefixFilter defaultInstance; getDefaultInstance()1715 public static ColumnPrefixFilter getDefaultInstance() { 1716 return defaultInstance; 1717 } 1718 getDefaultInstanceForType()1719 public ColumnPrefixFilter getDefaultInstanceForType() { 1720 return defaultInstance; 1721 } 1722 1723 private final com.google.protobuf.UnknownFieldSet unknownFields; 1724 @java.lang.Override 1725 public final com.google.protobuf.UnknownFieldSet getUnknownFields()1726 getUnknownFields() { 1727 return this.unknownFields; 1728 } ColumnPrefixFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1729 private ColumnPrefixFilter( 1730 com.google.protobuf.CodedInputStream input, 1731 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1732 throws com.google.protobuf.InvalidProtocolBufferException { 1733 initFields(); 1734 int mutable_bitField0_ = 0; 1735 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 1736 com.google.protobuf.UnknownFieldSet.newBuilder(); 1737 try { 1738 boolean done = false; 1739 while (!done) { 1740 int tag = input.readTag(); 1741 switch (tag) { 1742 case 0: 1743 done = true; 1744 break; 1745 default: { 1746 if (!parseUnknownField(input, unknownFields, 1747 extensionRegistry, tag)) { 1748 done = true; 1749 } 1750 break; 1751 } 1752 case 10: { 1753 bitField0_ |= 0x00000001; 1754 prefix_ = input.readBytes(); 1755 break; 1756 } 1757 } 1758 } 1759 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 1760 throw e.setUnfinishedMessage(this); 1761 } catch (java.io.IOException e) { 1762 throw new com.google.protobuf.InvalidProtocolBufferException( 1763 e.getMessage()).setUnfinishedMessage(this); 1764 } finally { 1765 this.unknownFields = unknownFields.build(); 1766 makeExtensionsImmutable(); 1767 } 1768 } 1769 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()1770 getDescriptor() { 1771 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPrefixFilter_descriptor; 1772 } 1773 1774 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()1775 internalGetFieldAccessorTable() { 1776 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPrefixFilter_fieldAccessorTable 1777 .ensureFieldAccessorsInitialized( 1778 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.Builder.class); 1779 } 1780 1781 public static com.google.protobuf.Parser<ColumnPrefixFilter> PARSER = 1782 new com.google.protobuf.AbstractParser<ColumnPrefixFilter>() { 1783 public ColumnPrefixFilter parsePartialFrom( 1784 com.google.protobuf.CodedInputStream input, 1785 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1786 throws com.google.protobuf.InvalidProtocolBufferException { 1787 return new ColumnPrefixFilter(input, extensionRegistry); 1788 } 1789 }; 1790 1791 @java.lang.Override getParserForType()1792 public com.google.protobuf.Parser<ColumnPrefixFilter> getParserForType() { 1793 return PARSER; 1794 } 1795 1796 private int bitField0_; 1797 // required bytes prefix = 1; 1798 public static final int PREFIX_FIELD_NUMBER = 1; 1799 private com.google.protobuf.ByteString prefix_; 1800 /** 1801 * <code>required bytes prefix = 1;</code> 1802 */ hasPrefix()1803 public boolean hasPrefix() { 1804 return ((bitField0_ & 0x00000001) == 0x00000001); 1805 } 1806 /** 1807 * <code>required bytes prefix = 1;</code> 1808 */ getPrefix()1809 public com.google.protobuf.ByteString getPrefix() { 1810 return prefix_; 1811 } 1812 initFields()1813 private void initFields() { 1814 prefix_ = com.google.protobuf.ByteString.EMPTY; 1815 } 1816 private byte memoizedIsInitialized = -1; isInitialized()1817 public final boolean isInitialized() { 1818 byte isInitialized = memoizedIsInitialized; 1819 if (isInitialized != -1) return isInitialized == 1; 1820 1821 if (!hasPrefix()) { 1822 memoizedIsInitialized = 0; 1823 return false; 1824 } 1825 memoizedIsInitialized = 1; 1826 return true; 1827 } 1828 writeTo(com.google.protobuf.CodedOutputStream output)1829 public void writeTo(com.google.protobuf.CodedOutputStream output) 1830 throws java.io.IOException { 1831 getSerializedSize(); 1832 if (((bitField0_ & 0x00000001) == 0x00000001)) { 1833 output.writeBytes(1, prefix_); 1834 } 1835 getUnknownFields().writeTo(output); 1836 } 1837 1838 private int memoizedSerializedSize = -1; getSerializedSize()1839 public int getSerializedSize() { 1840 int size = memoizedSerializedSize; 1841 if (size != -1) return size; 1842 1843 size = 0; 1844 if (((bitField0_ & 0x00000001) == 0x00000001)) { 1845 size += com.google.protobuf.CodedOutputStream 1846 .computeBytesSize(1, prefix_); 1847 } 1848 size += getUnknownFields().getSerializedSize(); 1849 memoizedSerializedSize = size; 1850 return size; 1851 } 1852 1853 private static final long serialVersionUID = 0L; 1854 @java.lang.Override writeReplace()1855 protected java.lang.Object writeReplace() 1856 throws java.io.ObjectStreamException { 1857 return super.writeReplace(); 1858 } 1859 1860 @java.lang.Override equals(final java.lang.Object obj)1861 public boolean equals(final java.lang.Object obj) { 1862 if (obj == this) { 1863 return true; 1864 } 1865 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter)) { 1866 return super.equals(obj); 1867 } 1868 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter) obj; 1869 1870 boolean result = true; 1871 result = result && (hasPrefix() == other.hasPrefix()); 1872 if (hasPrefix()) { 1873 result = result && getPrefix() 1874 .equals(other.getPrefix()); 1875 } 1876 result = result && 1877 getUnknownFields().equals(other.getUnknownFields()); 1878 return result; 1879 } 1880 1881 private int memoizedHashCode = 0; 1882 @java.lang.Override hashCode()1883 public int hashCode() { 1884 if (memoizedHashCode != 0) { 1885 return memoizedHashCode; 1886 } 1887 int hash = 41; 1888 hash = (19 * hash) + getDescriptorForType().hashCode(); 1889 if (hasPrefix()) { 1890 hash = (37 * hash) + PREFIX_FIELD_NUMBER; 1891 hash = (53 * hash) + getPrefix().hashCode(); 1892 } 1893 hash = (29 * hash) + getUnknownFields().hashCode(); 1894 memoizedHashCode = hash; 1895 return hash; 1896 } 1897 parseFrom( com.google.protobuf.ByteString data)1898 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( 1899 com.google.protobuf.ByteString data) 1900 throws com.google.protobuf.InvalidProtocolBufferException { 1901 return PARSER.parseFrom(data); 1902 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1903 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( 1904 com.google.protobuf.ByteString data, 1905 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1906 throws com.google.protobuf.InvalidProtocolBufferException { 1907 return PARSER.parseFrom(data, extensionRegistry); 1908 } parseFrom(byte[] data)1909 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(byte[] data) 1910 throws com.google.protobuf.InvalidProtocolBufferException { 1911 return PARSER.parseFrom(data); 1912 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1913 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( 1914 byte[] data, 1915 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1916 throws com.google.protobuf.InvalidProtocolBufferException { 1917 return PARSER.parseFrom(data, extensionRegistry); 1918 } parseFrom(java.io.InputStream input)1919 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom(java.io.InputStream input) 1920 throws java.io.IOException { 1921 return PARSER.parseFrom(input); 1922 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1923 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( 1924 java.io.InputStream input, 1925 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1926 throws java.io.IOException { 1927 return PARSER.parseFrom(input, extensionRegistry); 1928 } parseDelimitedFrom(java.io.InputStream input)1929 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseDelimitedFrom(java.io.InputStream input) 1930 throws java.io.IOException { 1931 return PARSER.parseDelimitedFrom(input); 1932 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1933 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseDelimitedFrom( 1934 java.io.InputStream input, 1935 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1936 throws java.io.IOException { 1937 return PARSER.parseDelimitedFrom(input, extensionRegistry); 1938 } parseFrom( com.google.protobuf.CodedInputStream input)1939 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( 1940 com.google.protobuf.CodedInputStream input) 1941 throws java.io.IOException { 1942 return PARSER.parseFrom(input); 1943 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1944 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parseFrom( 1945 com.google.protobuf.CodedInputStream input, 1946 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 1947 throws java.io.IOException { 1948 return PARSER.parseFrom(input, extensionRegistry); 1949 } 1950 newBuilder()1951 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()1952 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter prototype)1953 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter prototype) { 1954 return newBuilder().mergeFrom(prototype); 1955 } toBuilder()1956 public Builder toBuilder() { return newBuilder(this); } 1957 1958 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)1959 protected Builder newBuilderForType( 1960 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 1961 Builder builder = new Builder(parent); 1962 return builder; 1963 } 1964 /** 1965 * Protobuf type {@code ColumnPrefixFilter} 1966 */ 1967 public static final class Builder extends 1968 com.google.protobuf.GeneratedMessage.Builder<Builder> 1969 implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilterOrBuilder { 1970 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()1971 getDescriptor() { 1972 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPrefixFilter_descriptor; 1973 } 1974 1975 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()1976 internalGetFieldAccessorTable() { 1977 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPrefixFilter_fieldAccessorTable 1978 .ensureFieldAccessorsInitialized( 1979 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.Builder.class); 1980 } 1981 1982 // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.newBuilder() Builder()1983 private Builder() { 1984 maybeForceBuilderInitialization(); 1985 } 1986 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)1987 private Builder( 1988 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 1989 super(parent); 1990 maybeForceBuilderInitialization(); 1991 } maybeForceBuilderInitialization()1992 private void maybeForceBuilderInitialization() { 1993 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 1994 } 1995 } create()1996 private static Builder create() { 1997 return new Builder(); 1998 } 1999 clear()2000 public Builder clear() { 2001 super.clear(); 2002 prefix_ = com.google.protobuf.ByteString.EMPTY; 2003 bitField0_ = (bitField0_ & ~0x00000001); 2004 return this; 2005 } 2006 clone()2007 public Builder clone() { 2008 return create().mergeFrom(buildPartial()); 2009 } 2010 2011 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()2012 getDescriptorForType() { 2013 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnPrefixFilter_descriptor; 2014 } 2015 getDefaultInstanceForType()2016 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter getDefaultInstanceForType() { 2017 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.getDefaultInstance(); 2018 } 2019 build()2020 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter build() { 2021 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter result = buildPartial(); 2022 if (!result.isInitialized()) { 2023 throw newUninitializedMessageException(result); 2024 } 2025 return result; 2026 } 2027 buildPartial()2028 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter buildPartial() { 2029 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter(this); 2030 int from_bitField0_ = bitField0_; 2031 int to_bitField0_ = 0; 2032 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 2033 to_bitField0_ |= 0x00000001; 2034 } 2035 result.prefix_ = prefix_; 2036 result.bitField0_ = to_bitField0_; 2037 onBuilt(); 2038 return result; 2039 } 2040 mergeFrom(com.google.protobuf.Message other)2041 public Builder mergeFrom(com.google.protobuf.Message other) { 2042 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter) { 2043 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter)other); 2044 } else { 2045 super.mergeFrom(other); 2046 return this; 2047 } 2048 } 2049 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter other)2050 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter other) { 2051 if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter.getDefaultInstance()) return this; 2052 if (other.hasPrefix()) { 2053 setPrefix(other.getPrefix()); 2054 } 2055 this.mergeUnknownFields(other.getUnknownFields()); 2056 return this; 2057 } 2058 isInitialized()2059 public final boolean isInitialized() { 2060 if (!hasPrefix()) { 2061 2062 return false; 2063 } 2064 return true; 2065 } 2066 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2067 public Builder mergeFrom( 2068 com.google.protobuf.CodedInputStream input, 2069 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2070 throws java.io.IOException { 2071 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter parsedMessage = null; 2072 try { 2073 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 2074 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 2075 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnPrefixFilter) e.getUnfinishedMessage(); 2076 throw e; 2077 } finally { 2078 if (parsedMessage != null) { 2079 mergeFrom(parsedMessage); 2080 } 2081 } 2082 return this; 2083 } 2084 private int bitField0_; 2085 2086 // required bytes prefix = 1; 2087 private com.google.protobuf.ByteString prefix_ = com.google.protobuf.ByteString.EMPTY; 2088 /** 2089 * <code>required bytes prefix = 1;</code> 2090 */ hasPrefix()2091 public boolean hasPrefix() { 2092 return ((bitField0_ & 0x00000001) == 0x00000001); 2093 } 2094 /** 2095 * <code>required bytes prefix = 1;</code> 2096 */ getPrefix()2097 public com.google.protobuf.ByteString getPrefix() { 2098 return prefix_; 2099 } 2100 /** 2101 * <code>required bytes prefix = 1;</code> 2102 */ setPrefix(com.google.protobuf.ByteString value)2103 public Builder setPrefix(com.google.protobuf.ByteString value) { 2104 if (value == null) { 2105 throw new NullPointerException(); 2106 } 2107 bitField0_ |= 0x00000001; 2108 prefix_ = value; 2109 onChanged(); 2110 return this; 2111 } 2112 /** 2113 * <code>required bytes prefix = 1;</code> 2114 */ clearPrefix()2115 public Builder clearPrefix() { 2116 bitField0_ = (bitField0_ & ~0x00000001); 2117 prefix_ = getDefaultInstance().getPrefix(); 2118 onChanged(); 2119 return this; 2120 } 2121 2122 // @@protoc_insertion_point(builder_scope:ColumnPrefixFilter) 2123 } 2124 2125 static { 2126 defaultInstance = new ColumnPrefixFilter(true); defaultInstance.initFields()2127 defaultInstance.initFields(); 2128 } 2129 2130 // @@protoc_insertion_point(class_scope:ColumnPrefixFilter) 2131 } 2132 2133 public interface ColumnRangeFilterOrBuilder 2134 extends com.google.protobuf.MessageOrBuilder { 2135 2136 // optional bytes min_column = 1; 2137 /** 2138 * <code>optional bytes min_column = 1;</code> 2139 */ hasMinColumn()2140 boolean hasMinColumn(); 2141 /** 2142 * <code>optional bytes min_column = 1;</code> 2143 */ getMinColumn()2144 com.google.protobuf.ByteString getMinColumn(); 2145 2146 // optional bool min_column_inclusive = 2; 2147 /** 2148 * <code>optional bool min_column_inclusive = 2;</code> 2149 */ hasMinColumnInclusive()2150 boolean hasMinColumnInclusive(); 2151 /** 2152 * <code>optional bool min_column_inclusive = 2;</code> 2153 */ getMinColumnInclusive()2154 boolean getMinColumnInclusive(); 2155 2156 // optional bytes max_column = 3; 2157 /** 2158 * <code>optional bytes max_column = 3;</code> 2159 */ hasMaxColumn()2160 boolean hasMaxColumn(); 2161 /** 2162 * <code>optional bytes max_column = 3;</code> 2163 */ getMaxColumn()2164 com.google.protobuf.ByteString getMaxColumn(); 2165 2166 // optional bool max_column_inclusive = 4; 2167 /** 2168 * <code>optional bool max_column_inclusive = 4;</code> 2169 */ hasMaxColumnInclusive()2170 boolean hasMaxColumnInclusive(); 2171 /** 2172 * <code>optional bool max_column_inclusive = 4;</code> 2173 */ getMaxColumnInclusive()2174 boolean getMaxColumnInclusive(); 2175 } 2176 /** 2177 * Protobuf type {@code ColumnRangeFilter} 2178 */ 2179 public static final class ColumnRangeFilter extends 2180 com.google.protobuf.GeneratedMessage 2181 implements ColumnRangeFilterOrBuilder { 2182 // Use ColumnRangeFilter.newBuilder() to construct. ColumnRangeFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)2183 private ColumnRangeFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 2184 super(builder); 2185 this.unknownFields = builder.getUnknownFields(); 2186 } ColumnRangeFilter(boolean noInit)2187 private ColumnRangeFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 2188 2189 private static final ColumnRangeFilter defaultInstance; getDefaultInstance()2190 public static ColumnRangeFilter getDefaultInstance() { 2191 return defaultInstance; 2192 } 2193 getDefaultInstanceForType()2194 public ColumnRangeFilter getDefaultInstanceForType() { 2195 return defaultInstance; 2196 } 2197 2198 private final com.google.protobuf.UnknownFieldSet unknownFields; 2199 @java.lang.Override 2200 public final com.google.protobuf.UnknownFieldSet getUnknownFields()2201 getUnknownFields() { 2202 return this.unknownFields; 2203 } ColumnRangeFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2204 private ColumnRangeFilter( 2205 com.google.protobuf.CodedInputStream input, 2206 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2207 throws com.google.protobuf.InvalidProtocolBufferException { 2208 initFields(); 2209 int mutable_bitField0_ = 0; 2210 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 2211 com.google.protobuf.UnknownFieldSet.newBuilder(); 2212 try { 2213 boolean done = false; 2214 while (!done) { 2215 int tag = input.readTag(); 2216 switch (tag) { 2217 case 0: 2218 done = true; 2219 break; 2220 default: { 2221 if (!parseUnknownField(input, unknownFields, 2222 extensionRegistry, tag)) { 2223 done = true; 2224 } 2225 break; 2226 } 2227 case 10: { 2228 bitField0_ |= 0x00000001; 2229 minColumn_ = input.readBytes(); 2230 break; 2231 } 2232 case 16: { 2233 bitField0_ |= 0x00000002; 2234 minColumnInclusive_ = input.readBool(); 2235 break; 2236 } 2237 case 26: { 2238 bitField0_ |= 0x00000004; 2239 maxColumn_ = input.readBytes(); 2240 break; 2241 } 2242 case 32: { 2243 bitField0_ |= 0x00000008; 2244 maxColumnInclusive_ = input.readBool(); 2245 break; 2246 } 2247 } 2248 } 2249 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 2250 throw e.setUnfinishedMessage(this); 2251 } catch (java.io.IOException e) { 2252 throw new com.google.protobuf.InvalidProtocolBufferException( 2253 e.getMessage()).setUnfinishedMessage(this); 2254 } finally { 2255 this.unknownFields = unknownFields.build(); 2256 makeExtensionsImmutable(); 2257 } 2258 } 2259 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()2260 getDescriptor() { 2261 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnRangeFilter_descriptor; 2262 } 2263 2264 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()2265 internalGetFieldAccessorTable() { 2266 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnRangeFilter_fieldAccessorTable 2267 .ensureFieldAccessorsInitialized( 2268 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.Builder.class); 2269 } 2270 2271 public static com.google.protobuf.Parser<ColumnRangeFilter> PARSER = 2272 new com.google.protobuf.AbstractParser<ColumnRangeFilter>() { 2273 public ColumnRangeFilter parsePartialFrom( 2274 com.google.protobuf.CodedInputStream input, 2275 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2276 throws com.google.protobuf.InvalidProtocolBufferException { 2277 return new ColumnRangeFilter(input, extensionRegistry); 2278 } 2279 }; 2280 2281 @java.lang.Override getParserForType()2282 public com.google.protobuf.Parser<ColumnRangeFilter> getParserForType() { 2283 return PARSER; 2284 } 2285 2286 private int bitField0_; 2287 // optional bytes min_column = 1; 2288 public static final int MIN_COLUMN_FIELD_NUMBER = 1; 2289 private com.google.protobuf.ByteString minColumn_; 2290 /** 2291 * <code>optional bytes min_column = 1;</code> 2292 */ hasMinColumn()2293 public boolean hasMinColumn() { 2294 return ((bitField0_ & 0x00000001) == 0x00000001); 2295 } 2296 /** 2297 * <code>optional bytes min_column = 1;</code> 2298 */ getMinColumn()2299 public com.google.protobuf.ByteString getMinColumn() { 2300 return minColumn_; 2301 } 2302 2303 // optional bool min_column_inclusive = 2; 2304 public static final int MIN_COLUMN_INCLUSIVE_FIELD_NUMBER = 2; 2305 private boolean minColumnInclusive_; 2306 /** 2307 * <code>optional bool min_column_inclusive = 2;</code> 2308 */ hasMinColumnInclusive()2309 public boolean hasMinColumnInclusive() { 2310 return ((bitField0_ & 0x00000002) == 0x00000002); 2311 } 2312 /** 2313 * <code>optional bool min_column_inclusive = 2;</code> 2314 */ getMinColumnInclusive()2315 public boolean getMinColumnInclusive() { 2316 return minColumnInclusive_; 2317 } 2318 2319 // optional bytes max_column = 3; 2320 public static final int MAX_COLUMN_FIELD_NUMBER = 3; 2321 private com.google.protobuf.ByteString maxColumn_; 2322 /** 2323 * <code>optional bytes max_column = 3;</code> 2324 */ hasMaxColumn()2325 public boolean hasMaxColumn() { 2326 return ((bitField0_ & 0x00000004) == 0x00000004); 2327 } 2328 /** 2329 * <code>optional bytes max_column = 3;</code> 2330 */ getMaxColumn()2331 public com.google.protobuf.ByteString getMaxColumn() { 2332 return maxColumn_; 2333 } 2334 2335 // optional bool max_column_inclusive = 4; 2336 public static final int MAX_COLUMN_INCLUSIVE_FIELD_NUMBER = 4; 2337 private boolean maxColumnInclusive_; 2338 /** 2339 * <code>optional bool max_column_inclusive = 4;</code> 2340 */ hasMaxColumnInclusive()2341 public boolean hasMaxColumnInclusive() { 2342 return ((bitField0_ & 0x00000008) == 0x00000008); 2343 } 2344 /** 2345 * <code>optional bool max_column_inclusive = 4;</code> 2346 */ getMaxColumnInclusive()2347 public boolean getMaxColumnInclusive() { 2348 return maxColumnInclusive_; 2349 } 2350 initFields()2351 private void initFields() { 2352 minColumn_ = com.google.protobuf.ByteString.EMPTY; 2353 minColumnInclusive_ = false; 2354 maxColumn_ = com.google.protobuf.ByteString.EMPTY; 2355 maxColumnInclusive_ = false; 2356 } 2357 private byte memoizedIsInitialized = -1; isInitialized()2358 public final boolean isInitialized() { 2359 byte isInitialized = memoizedIsInitialized; 2360 if (isInitialized != -1) return isInitialized == 1; 2361 2362 memoizedIsInitialized = 1; 2363 return true; 2364 } 2365 writeTo(com.google.protobuf.CodedOutputStream output)2366 public void writeTo(com.google.protobuf.CodedOutputStream output) 2367 throws java.io.IOException { 2368 getSerializedSize(); 2369 if (((bitField0_ & 0x00000001) == 0x00000001)) { 2370 output.writeBytes(1, minColumn_); 2371 } 2372 if (((bitField0_ & 0x00000002) == 0x00000002)) { 2373 output.writeBool(2, minColumnInclusive_); 2374 } 2375 if (((bitField0_ & 0x00000004) == 0x00000004)) { 2376 output.writeBytes(3, maxColumn_); 2377 } 2378 if (((bitField0_ & 0x00000008) == 0x00000008)) { 2379 output.writeBool(4, maxColumnInclusive_); 2380 } 2381 getUnknownFields().writeTo(output); 2382 } 2383 2384 private int memoizedSerializedSize = -1; getSerializedSize()2385 public int getSerializedSize() { 2386 int size = memoizedSerializedSize; 2387 if (size != -1) return size; 2388 2389 size = 0; 2390 if (((bitField0_ & 0x00000001) == 0x00000001)) { 2391 size += com.google.protobuf.CodedOutputStream 2392 .computeBytesSize(1, minColumn_); 2393 } 2394 if (((bitField0_ & 0x00000002) == 0x00000002)) { 2395 size += com.google.protobuf.CodedOutputStream 2396 .computeBoolSize(2, minColumnInclusive_); 2397 } 2398 if (((bitField0_ & 0x00000004) == 0x00000004)) { 2399 size += com.google.protobuf.CodedOutputStream 2400 .computeBytesSize(3, maxColumn_); 2401 } 2402 if (((bitField0_ & 0x00000008) == 0x00000008)) { 2403 size += com.google.protobuf.CodedOutputStream 2404 .computeBoolSize(4, maxColumnInclusive_); 2405 } 2406 size += getUnknownFields().getSerializedSize(); 2407 memoizedSerializedSize = size; 2408 return size; 2409 } 2410 2411 private static final long serialVersionUID = 0L; 2412 @java.lang.Override writeReplace()2413 protected java.lang.Object writeReplace() 2414 throws java.io.ObjectStreamException { 2415 return super.writeReplace(); 2416 } 2417 2418 @java.lang.Override equals(final java.lang.Object obj)2419 public boolean equals(final java.lang.Object obj) { 2420 if (obj == this) { 2421 return true; 2422 } 2423 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter)) { 2424 return super.equals(obj); 2425 } 2426 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter) obj; 2427 2428 boolean result = true; 2429 result = result && (hasMinColumn() == other.hasMinColumn()); 2430 if (hasMinColumn()) { 2431 result = result && getMinColumn() 2432 .equals(other.getMinColumn()); 2433 } 2434 result = result && (hasMinColumnInclusive() == other.hasMinColumnInclusive()); 2435 if (hasMinColumnInclusive()) { 2436 result = result && (getMinColumnInclusive() 2437 == other.getMinColumnInclusive()); 2438 } 2439 result = result && (hasMaxColumn() == other.hasMaxColumn()); 2440 if (hasMaxColumn()) { 2441 result = result && getMaxColumn() 2442 .equals(other.getMaxColumn()); 2443 } 2444 result = result && (hasMaxColumnInclusive() == other.hasMaxColumnInclusive()); 2445 if (hasMaxColumnInclusive()) { 2446 result = result && (getMaxColumnInclusive() 2447 == other.getMaxColumnInclusive()); 2448 } 2449 result = result && 2450 getUnknownFields().equals(other.getUnknownFields()); 2451 return result; 2452 } 2453 2454 private int memoizedHashCode = 0; 2455 @java.lang.Override hashCode()2456 public int hashCode() { 2457 if (memoizedHashCode != 0) { 2458 return memoizedHashCode; 2459 } 2460 int hash = 41; 2461 hash = (19 * hash) + getDescriptorForType().hashCode(); 2462 if (hasMinColumn()) { 2463 hash = (37 * hash) + MIN_COLUMN_FIELD_NUMBER; 2464 hash = (53 * hash) + getMinColumn().hashCode(); 2465 } 2466 if (hasMinColumnInclusive()) { 2467 hash = (37 * hash) + MIN_COLUMN_INCLUSIVE_FIELD_NUMBER; 2468 hash = (53 * hash) + hashBoolean(getMinColumnInclusive()); 2469 } 2470 if (hasMaxColumn()) { 2471 hash = (37 * hash) + MAX_COLUMN_FIELD_NUMBER; 2472 hash = (53 * hash) + getMaxColumn().hashCode(); 2473 } 2474 if (hasMaxColumnInclusive()) { 2475 hash = (37 * hash) + MAX_COLUMN_INCLUSIVE_FIELD_NUMBER; 2476 hash = (53 * hash) + hashBoolean(getMaxColumnInclusive()); 2477 } 2478 hash = (29 * hash) + getUnknownFields().hashCode(); 2479 memoizedHashCode = hash; 2480 return hash; 2481 } 2482 parseFrom( com.google.protobuf.ByteString data)2483 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( 2484 com.google.protobuf.ByteString data) 2485 throws com.google.protobuf.InvalidProtocolBufferException { 2486 return PARSER.parseFrom(data); 2487 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2488 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( 2489 com.google.protobuf.ByteString data, 2490 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2491 throws com.google.protobuf.InvalidProtocolBufferException { 2492 return PARSER.parseFrom(data, extensionRegistry); 2493 } parseFrom(byte[] data)2494 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom(byte[] data) 2495 throws com.google.protobuf.InvalidProtocolBufferException { 2496 return PARSER.parseFrom(data); 2497 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2498 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( 2499 byte[] data, 2500 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2501 throws com.google.protobuf.InvalidProtocolBufferException { 2502 return PARSER.parseFrom(data, extensionRegistry); 2503 } parseFrom(java.io.InputStream input)2504 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom(java.io.InputStream input) 2505 throws java.io.IOException { 2506 return PARSER.parseFrom(input); 2507 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2508 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( 2509 java.io.InputStream input, 2510 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2511 throws java.io.IOException { 2512 return PARSER.parseFrom(input, extensionRegistry); 2513 } parseDelimitedFrom(java.io.InputStream input)2514 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseDelimitedFrom(java.io.InputStream input) 2515 throws java.io.IOException { 2516 return PARSER.parseDelimitedFrom(input); 2517 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2518 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseDelimitedFrom( 2519 java.io.InputStream input, 2520 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2521 throws java.io.IOException { 2522 return PARSER.parseDelimitedFrom(input, extensionRegistry); 2523 } parseFrom( com.google.protobuf.CodedInputStream input)2524 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( 2525 com.google.protobuf.CodedInputStream input) 2526 throws java.io.IOException { 2527 return PARSER.parseFrom(input); 2528 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2529 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parseFrom( 2530 com.google.protobuf.CodedInputStream input, 2531 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2532 throws java.io.IOException { 2533 return PARSER.parseFrom(input, extensionRegistry); 2534 } 2535 newBuilder()2536 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()2537 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter prototype)2538 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter prototype) { 2539 return newBuilder().mergeFrom(prototype); 2540 } toBuilder()2541 public Builder toBuilder() { return newBuilder(this); } 2542 2543 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)2544 protected Builder newBuilderForType( 2545 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 2546 Builder builder = new Builder(parent); 2547 return builder; 2548 } 2549 /** 2550 * Protobuf type {@code ColumnRangeFilter} 2551 */ 2552 public static final class Builder extends 2553 com.google.protobuf.GeneratedMessage.Builder<Builder> 2554 implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilterOrBuilder { 2555 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()2556 getDescriptor() { 2557 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnRangeFilter_descriptor; 2558 } 2559 2560 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()2561 internalGetFieldAccessorTable() { 2562 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnRangeFilter_fieldAccessorTable 2563 .ensureFieldAccessorsInitialized( 2564 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.Builder.class); 2565 } 2566 2567 // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.newBuilder() Builder()2568 private Builder() { 2569 maybeForceBuilderInitialization(); 2570 } 2571 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)2572 private Builder( 2573 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 2574 super(parent); 2575 maybeForceBuilderInitialization(); 2576 } maybeForceBuilderInitialization()2577 private void maybeForceBuilderInitialization() { 2578 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 2579 } 2580 } create()2581 private static Builder create() { 2582 return new Builder(); 2583 } 2584 clear()2585 public Builder clear() { 2586 super.clear(); 2587 minColumn_ = com.google.protobuf.ByteString.EMPTY; 2588 bitField0_ = (bitField0_ & ~0x00000001); 2589 minColumnInclusive_ = false; 2590 bitField0_ = (bitField0_ & ~0x00000002); 2591 maxColumn_ = com.google.protobuf.ByteString.EMPTY; 2592 bitField0_ = (bitField0_ & ~0x00000004); 2593 maxColumnInclusive_ = false; 2594 bitField0_ = (bitField0_ & ~0x00000008); 2595 return this; 2596 } 2597 clone()2598 public Builder clone() { 2599 return create().mergeFrom(buildPartial()); 2600 } 2601 2602 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()2603 getDescriptorForType() { 2604 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_ColumnRangeFilter_descriptor; 2605 } 2606 getDefaultInstanceForType()2607 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter getDefaultInstanceForType() { 2608 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.getDefaultInstance(); 2609 } 2610 build()2611 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter build() { 2612 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter result = buildPartial(); 2613 if (!result.isInitialized()) { 2614 throw newUninitializedMessageException(result); 2615 } 2616 return result; 2617 } 2618 buildPartial()2619 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter buildPartial() { 2620 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter(this); 2621 int from_bitField0_ = bitField0_; 2622 int to_bitField0_ = 0; 2623 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 2624 to_bitField0_ |= 0x00000001; 2625 } 2626 result.minColumn_ = minColumn_; 2627 if (((from_bitField0_ & 0x00000002) == 0x00000002)) { 2628 to_bitField0_ |= 0x00000002; 2629 } 2630 result.minColumnInclusive_ = minColumnInclusive_; 2631 if (((from_bitField0_ & 0x00000004) == 0x00000004)) { 2632 to_bitField0_ |= 0x00000004; 2633 } 2634 result.maxColumn_ = maxColumn_; 2635 if (((from_bitField0_ & 0x00000008) == 0x00000008)) { 2636 to_bitField0_ |= 0x00000008; 2637 } 2638 result.maxColumnInclusive_ = maxColumnInclusive_; 2639 result.bitField0_ = to_bitField0_; 2640 onBuilt(); 2641 return result; 2642 } 2643 mergeFrom(com.google.protobuf.Message other)2644 public Builder mergeFrom(com.google.protobuf.Message other) { 2645 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter) { 2646 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter)other); 2647 } else { 2648 super.mergeFrom(other); 2649 return this; 2650 } 2651 } 2652 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter other)2653 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter other) { 2654 if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter.getDefaultInstance()) return this; 2655 if (other.hasMinColumn()) { 2656 setMinColumn(other.getMinColumn()); 2657 } 2658 if (other.hasMinColumnInclusive()) { 2659 setMinColumnInclusive(other.getMinColumnInclusive()); 2660 } 2661 if (other.hasMaxColumn()) { 2662 setMaxColumn(other.getMaxColumn()); 2663 } 2664 if (other.hasMaxColumnInclusive()) { 2665 setMaxColumnInclusive(other.getMaxColumnInclusive()); 2666 } 2667 this.mergeUnknownFields(other.getUnknownFields()); 2668 return this; 2669 } 2670 isInitialized()2671 public final boolean isInitialized() { 2672 return true; 2673 } 2674 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2675 public Builder mergeFrom( 2676 com.google.protobuf.CodedInputStream input, 2677 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2678 throws java.io.IOException { 2679 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter parsedMessage = null; 2680 try { 2681 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 2682 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 2683 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.ColumnRangeFilter) e.getUnfinishedMessage(); 2684 throw e; 2685 } finally { 2686 if (parsedMessage != null) { 2687 mergeFrom(parsedMessage); 2688 } 2689 } 2690 return this; 2691 } 2692 private int bitField0_; 2693 2694 // optional bytes min_column = 1; 2695 private com.google.protobuf.ByteString minColumn_ = com.google.protobuf.ByteString.EMPTY; 2696 /** 2697 * <code>optional bytes min_column = 1;</code> 2698 */ hasMinColumn()2699 public boolean hasMinColumn() { 2700 return ((bitField0_ & 0x00000001) == 0x00000001); 2701 } 2702 /** 2703 * <code>optional bytes min_column = 1;</code> 2704 */ getMinColumn()2705 public com.google.protobuf.ByteString getMinColumn() { 2706 return minColumn_; 2707 } 2708 /** 2709 * <code>optional bytes min_column = 1;</code> 2710 */ setMinColumn(com.google.protobuf.ByteString value)2711 public Builder setMinColumn(com.google.protobuf.ByteString value) { 2712 if (value == null) { 2713 throw new NullPointerException(); 2714 } 2715 bitField0_ |= 0x00000001; 2716 minColumn_ = value; 2717 onChanged(); 2718 return this; 2719 } 2720 /** 2721 * <code>optional bytes min_column = 1;</code> 2722 */ clearMinColumn()2723 public Builder clearMinColumn() { 2724 bitField0_ = (bitField0_ & ~0x00000001); 2725 minColumn_ = getDefaultInstance().getMinColumn(); 2726 onChanged(); 2727 return this; 2728 } 2729 2730 // optional bool min_column_inclusive = 2; 2731 private boolean minColumnInclusive_ ; 2732 /** 2733 * <code>optional bool min_column_inclusive = 2;</code> 2734 */ hasMinColumnInclusive()2735 public boolean hasMinColumnInclusive() { 2736 return ((bitField0_ & 0x00000002) == 0x00000002); 2737 } 2738 /** 2739 * <code>optional bool min_column_inclusive = 2;</code> 2740 */ getMinColumnInclusive()2741 public boolean getMinColumnInclusive() { 2742 return minColumnInclusive_; 2743 } 2744 /** 2745 * <code>optional bool min_column_inclusive = 2;</code> 2746 */ setMinColumnInclusive(boolean value)2747 public Builder setMinColumnInclusive(boolean value) { 2748 bitField0_ |= 0x00000002; 2749 minColumnInclusive_ = value; 2750 onChanged(); 2751 return this; 2752 } 2753 /** 2754 * <code>optional bool min_column_inclusive = 2;</code> 2755 */ clearMinColumnInclusive()2756 public Builder clearMinColumnInclusive() { 2757 bitField0_ = (bitField0_ & ~0x00000002); 2758 minColumnInclusive_ = false; 2759 onChanged(); 2760 return this; 2761 } 2762 2763 // optional bytes max_column = 3; 2764 private com.google.protobuf.ByteString maxColumn_ = com.google.protobuf.ByteString.EMPTY; 2765 /** 2766 * <code>optional bytes max_column = 3;</code> 2767 */ hasMaxColumn()2768 public boolean hasMaxColumn() { 2769 return ((bitField0_ & 0x00000004) == 0x00000004); 2770 } 2771 /** 2772 * <code>optional bytes max_column = 3;</code> 2773 */ getMaxColumn()2774 public com.google.protobuf.ByteString getMaxColumn() { 2775 return maxColumn_; 2776 } 2777 /** 2778 * <code>optional bytes max_column = 3;</code> 2779 */ setMaxColumn(com.google.protobuf.ByteString value)2780 public Builder setMaxColumn(com.google.protobuf.ByteString value) { 2781 if (value == null) { 2782 throw new NullPointerException(); 2783 } 2784 bitField0_ |= 0x00000004; 2785 maxColumn_ = value; 2786 onChanged(); 2787 return this; 2788 } 2789 /** 2790 * <code>optional bytes max_column = 3;</code> 2791 */ clearMaxColumn()2792 public Builder clearMaxColumn() { 2793 bitField0_ = (bitField0_ & ~0x00000004); 2794 maxColumn_ = getDefaultInstance().getMaxColumn(); 2795 onChanged(); 2796 return this; 2797 } 2798 2799 // optional bool max_column_inclusive = 4; 2800 private boolean maxColumnInclusive_ ; 2801 /** 2802 * <code>optional bool max_column_inclusive = 4;</code> 2803 */ hasMaxColumnInclusive()2804 public boolean hasMaxColumnInclusive() { 2805 return ((bitField0_ & 0x00000008) == 0x00000008); 2806 } 2807 /** 2808 * <code>optional bool max_column_inclusive = 4;</code> 2809 */ getMaxColumnInclusive()2810 public boolean getMaxColumnInclusive() { 2811 return maxColumnInclusive_; 2812 } 2813 /** 2814 * <code>optional bool max_column_inclusive = 4;</code> 2815 */ setMaxColumnInclusive(boolean value)2816 public Builder setMaxColumnInclusive(boolean value) { 2817 bitField0_ |= 0x00000008; 2818 maxColumnInclusive_ = value; 2819 onChanged(); 2820 return this; 2821 } 2822 /** 2823 * <code>optional bool max_column_inclusive = 4;</code> 2824 */ clearMaxColumnInclusive()2825 public Builder clearMaxColumnInclusive() { 2826 bitField0_ = (bitField0_ & ~0x00000008); 2827 maxColumnInclusive_ = false; 2828 onChanged(); 2829 return this; 2830 } 2831 2832 // @@protoc_insertion_point(builder_scope:ColumnRangeFilter) 2833 } 2834 2835 static { 2836 defaultInstance = new ColumnRangeFilter(true); defaultInstance.initFields()2837 defaultInstance.initFields(); 2838 } 2839 2840 // @@protoc_insertion_point(class_scope:ColumnRangeFilter) 2841 } 2842 2843 public interface CompareFilterOrBuilder 2844 extends com.google.protobuf.MessageOrBuilder { 2845 2846 // required .CompareType compare_op = 1; 2847 /** 2848 * <code>required .CompareType compare_op = 1;</code> 2849 */ hasCompareOp()2850 boolean hasCompareOp(); 2851 /** 2852 * <code>required .CompareType compare_op = 1;</code> 2853 */ getCompareOp()2854 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp(); 2855 2856 // optional .Comparator comparator = 2; 2857 /** 2858 * <code>optional .Comparator comparator = 2;</code> 2859 */ hasComparator()2860 boolean hasComparator(); 2861 /** 2862 * <code>optional .Comparator comparator = 2;</code> 2863 */ getComparator()2864 org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator(); 2865 /** 2866 * <code>optional .Comparator comparator = 2;</code> 2867 */ getComparatorOrBuilder()2868 org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder(); 2869 } 2870 /** 2871 * Protobuf type {@code CompareFilter} 2872 */ 2873 public static final class CompareFilter extends 2874 com.google.protobuf.GeneratedMessage 2875 implements CompareFilterOrBuilder { 2876 // Use CompareFilter.newBuilder() to construct. CompareFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)2877 private CompareFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 2878 super(builder); 2879 this.unknownFields = builder.getUnknownFields(); 2880 } CompareFilter(boolean noInit)2881 private CompareFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 2882 2883 private static final CompareFilter defaultInstance; getDefaultInstance()2884 public static CompareFilter getDefaultInstance() { 2885 return defaultInstance; 2886 } 2887 getDefaultInstanceForType()2888 public CompareFilter getDefaultInstanceForType() { 2889 return defaultInstance; 2890 } 2891 2892 private final com.google.protobuf.UnknownFieldSet unknownFields; 2893 @java.lang.Override 2894 public final com.google.protobuf.UnknownFieldSet getUnknownFields()2895 getUnknownFields() { 2896 return this.unknownFields; 2897 } CompareFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2898 private CompareFilter( 2899 com.google.protobuf.CodedInputStream input, 2900 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2901 throws com.google.protobuf.InvalidProtocolBufferException { 2902 initFields(); 2903 int mutable_bitField0_ = 0; 2904 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 2905 com.google.protobuf.UnknownFieldSet.newBuilder(); 2906 try { 2907 boolean done = false; 2908 while (!done) { 2909 int tag = input.readTag(); 2910 switch (tag) { 2911 case 0: 2912 done = true; 2913 break; 2914 default: { 2915 if (!parseUnknownField(input, unknownFields, 2916 extensionRegistry, tag)) { 2917 done = true; 2918 } 2919 break; 2920 } 2921 case 8: { 2922 int rawValue = input.readEnum(); 2923 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.valueOf(rawValue); 2924 if (value == null) { 2925 unknownFields.mergeVarintField(1, rawValue); 2926 } else { 2927 bitField0_ |= 0x00000001; 2928 compareOp_ = value; 2929 } 2930 break; 2931 } 2932 case 18: { 2933 org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder subBuilder = null; 2934 if (((bitField0_ & 0x00000002) == 0x00000002)) { 2935 subBuilder = comparator_.toBuilder(); 2936 } 2937 comparator_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.PARSER, extensionRegistry); 2938 if (subBuilder != null) { 2939 subBuilder.mergeFrom(comparator_); 2940 comparator_ = subBuilder.buildPartial(); 2941 } 2942 bitField0_ |= 0x00000002; 2943 break; 2944 } 2945 } 2946 } 2947 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 2948 throw e.setUnfinishedMessage(this); 2949 } catch (java.io.IOException e) { 2950 throw new com.google.protobuf.InvalidProtocolBufferException( 2951 e.getMessage()).setUnfinishedMessage(this); 2952 } finally { 2953 this.unknownFields = unknownFields.build(); 2954 makeExtensionsImmutable(); 2955 } 2956 } 2957 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()2958 getDescriptor() { 2959 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_CompareFilter_descriptor; 2960 } 2961 2962 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()2963 internalGetFieldAccessorTable() { 2964 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_CompareFilter_fieldAccessorTable 2965 .ensureFieldAccessorsInitialized( 2966 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder.class); 2967 } 2968 2969 public static com.google.protobuf.Parser<CompareFilter> PARSER = 2970 new com.google.protobuf.AbstractParser<CompareFilter>() { 2971 public CompareFilter parsePartialFrom( 2972 com.google.protobuf.CodedInputStream input, 2973 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 2974 throws com.google.protobuf.InvalidProtocolBufferException { 2975 return new CompareFilter(input, extensionRegistry); 2976 } 2977 }; 2978 2979 @java.lang.Override getParserForType()2980 public com.google.protobuf.Parser<CompareFilter> getParserForType() { 2981 return PARSER; 2982 } 2983 2984 private int bitField0_; 2985 // required .CompareType compare_op = 1; 2986 public static final int COMPARE_OP_FIELD_NUMBER = 1; 2987 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareOp_; 2988 /** 2989 * <code>required .CompareType compare_op = 1;</code> 2990 */ hasCompareOp()2991 public boolean hasCompareOp() { 2992 return ((bitField0_ & 0x00000001) == 0x00000001); 2993 } 2994 /** 2995 * <code>required .CompareType compare_op = 1;</code> 2996 */ getCompareOp()2997 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp() { 2998 return compareOp_; 2999 } 3000 3001 // optional .Comparator comparator = 2; 3002 public static final int COMPARATOR_FIELD_NUMBER = 2; 3003 private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_; 3004 /** 3005 * <code>optional .Comparator comparator = 2;</code> 3006 */ hasComparator()3007 public boolean hasComparator() { 3008 return ((bitField0_ & 0x00000002) == 0x00000002); 3009 } 3010 /** 3011 * <code>optional .Comparator comparator = 2;</code> 3012 */ getComparator()3013 public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() { 3014 return comparator_; 3015 } 3016 /** 3017 * <code>optional .Comparator comparator = 2;</code> 3018 */ getComparatorOrBuilder()3019 public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() { 3020 return comparator_; 3021 } 3022 initFields()3023 private void initFields() { 3024 compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; 3025 comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); 3026 } 3027 private byte memoizedIsInitialized = -1; isInitialized()3028 public final boolean isInitialized() { 3029 byte isInitialized = memoizedIsInitialized; 3030 if (isInitialized != -1) return isInitialized == 1; 3031 3032 if (!hasCompareOp()) { 3033 memoizedIsInitialized = 0; 3034 return false; 3035 } 3036 if (hasComparator()) { 3037 if (!getComparator().isInitialized()) { 3038 memoizedIsInitialized = 0; 3039 return false; 3040 } 3041 } 3042 memoizedIsInitialized = 1; 3043 return true; 3044 } 3045 writeTo(com.google.protobuf.CodedOutputStream output)3046 public void writeTo(com.google.protobuf.CodedOutputStream output) 3047 throws java.io.IOException { 3048 getSerializedSize(); 3049 if (((bitField0_ & 0x00000001) == 0x00000001)) { 3050 output.writeEnum(1, compareOp_.getNumber()); 3051 } 3052 if (((bitField0_ & 0x00000002) == 0x00000002)) { 3053 output.writeMessage(2, comparator_); 3054 } 3055 getUnknownFields().writeTo(output); 3056 } 3057 3058 private int memoizedSerializedSize = -1; getSerializedSize()3059 public int getSerializedSize() { 3060 int size = memoizedSerializedSize; 3061 if (size != -1) return size; 3062 3063 size = 0; 3064 if (((bitField0_ & 0x00000001) == 0x00000001)) { 3065 size += com.google.protobuf.CodedOutputStream 3066 .computeEnumSize(1, compareOp_.getNumber()); 3067 } 3068 if (((bitField0_ & 0x00000002) == 0x00000002)) { 3069 size += com.google.protobuf.CodedOutputStream 3070 .computeMessageSize(2, comparator_); 3071 } 3072 size += getUnknownFields().getSerializedSize(); 3073 memoizedSerializedSize = size; 3074 return size; 3075 } 3076 3077 private static final long serialVersionUID = 0L; 3078 @java.lang.Override writeReplace()3079 protected java.lang.Object writeReplace() 3080 throws java.io.ObjectStreamException { 3081 return super.writeReplace(); 3082 } 3083 3084 @java.lang.Override equals(final java.lang.Object obj)3085 public boolean equals(final java.lang.Object obj) { 3086 if (obj == this) { 3087 return true; 3088 } 3089 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter)) { 3090 return super.equals(obj); 3091 } 3092 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter) obj; 3093 3094 boolean result = true; 3095 result = result && (hasCompareOp() == other.hasCompareOp()); 3096 if (hasCompareOp()) { 3097 result = result && 3098 (getCompareOp() == other.getCompareOp()); 3099 } 3100 result = result && (hasComparator() == other.hasComparator()); 3101 if (hasComparator()) { 3102 result = result && getComparator() 3103 .equals(other.getComparator()); 3104 } 3105 result = result && 3106 getUnknownFields().equals(other.getUnknownFields()); 3107 return result; 3108 } 3109 3110 private int memoizedHashCode = 0; 3111 @java.lang.Override hashCode()3112 public int hashCode() { 3113 if (memoizedHashCode != 0) { 3114 return memoizedHashCode; 3115 } 3116 int hash = 41; 3117 hash = (19 * hash) + getDescriptorForType().hashCode(); 3118 if (hasCompareOp()) { 3119 hash = (37 * hash) + COMPARE_OP_FIELD_NUMBER; 3120 hash = (53 * hash) + hashEnum(getCompareOp()); 3121 } 3122 if (hasComparator()) { 3123 hash = (37 * hash) + COMPARATOR_FIELD_NUMBER; 3124 hash = (53 * hash) + getComparator().hashCode(); 3125 } 3126 hash = (29 * hash) + getUnknownFields().hashCode(); 3127 memoizedHashCode = hash; 3128 return hash; 3129 } 3130 parseFrom( com.google.protobuf.ByteString data)3131 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom( 3132 com.google.protobuf.ByteString data) 3133 throws com.google.protobuf.InvalidProtocolBufferException { 3134 return PARSER.parseFrom(data); 3135 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3136 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom( 3137 com.google.protobuf.ByteString data, 3138 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3139 throws com.google.protobuf.InvalidProtocolBufferException { 3140 return PARSER.parseFrom(data, extensionRegistry); 3141 } parseFrom(byte[] data)3142 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom(byte[] data) 3143 throws com.google.protobuf.InvalidProtocolBufferException { 3144 return PARSER.parseFrom(data); 3145 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3146 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom( 3147 byte[] data, 3148 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3149 throws com.google.protobuf.InvalidProtocolBufferException { 3150 return PARSER.parseFrom(data, extensionRegistry); 3151 } parseFrom(java.io.InputStream input)3152 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom(java.io.InputStream input) 3153 throws java.io.IOException { 3154 return PARSER.parseFrom(input); 3155 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3156 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom( 3157 java.io.InputStream input, 3158 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3159 throws java.io.IOException { 3160 return PARSER.parseFrom(input, extensionRegistry); 3161 } parseDelimitedFrom(java.io.InputStream input)3162 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseDelimitedFrom(java.io.InputStream input) 3163 throws java.io.IOException { 3164 return PARSER.parseDelimitedFrom(input); 3165 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3166 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseDelimitedFrom( 3167 java.io.InputStream input, 3168 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3169 throws java.io.IOException { 3170 return PARSER.parseDelimitedFrom(input, extensionRegistry); 3171 } parseFrom( com.google.protobuf.CodedInputStream input)3172 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom( 3173 com.google.protobuf.CodedInputStream input) 3174 throws java.io.IOException { 3175 return PARSER.parseFrom(input); 3176 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3177 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parseFrom( 3178 com.google.protobuf.CodedInputStream input, 3179 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3180 throws java.io.IOException { 3181 return PARSER.parseFrom(input, extensionRegistry); 3182 } 3183 newBuilder()3184 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()3185 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter prototype)3186 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter prototype) { 3187 return newBuilder().mergeFrom(prototype); 3188 } toBuilder()3189 public Builder toBuilder() { return newBuilder(this); } 3190 3191 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)3192 protected Builder newBuilderForType( 3193 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 3194 Builder builder = new Builder(parent); 3195 return builder; 3196 } 3197 /** 3198 * Protobuf type {@code CompareFilter} 3199 */ 3200 public static final class Builder extends 3201 com.google.protobuf.GeneratedMessage.Builder<Builder> 3202 implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder { 3203 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()3204 getDescriptor() { 3205 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_CompareFilter_descriptor; 3206 } 3207 3208 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()3209 internalGetFieldAccessorTable() { 3210 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_CompareFilter_fieldAccessorTable 3211 .ensureFieldAccessorsInitialized( 3212 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder.class); 3213 } 3214 3215 // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder() Builder()3216 private Builder() { 3217 maybeForceBuilderInitialization(); 3218 } 3219 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)3220 private Builder( 3221 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 3222 super(parent); 3223 maybeForceBuilderInitialization(); 3224 } maybeForceBuilderInitialization()3225 private void maybeForceBuilderInitialization() { 3226 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 3227 getComparatorFieldBuilder(); 3228 } 3229 } create()3230 private static Builder create() { 3231 return new Builder(); 3232 } 3233 clear()3234 public Builder clear() { 3235 super.clear(); 3236 compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; 3237 bitField0_ = (bitField0_ & ~0x00000001); 3238 if (comparatorBuilder_ == null) { 3239 comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); 3240 } else { 3241 comparatorBuilder_.clear(); 3242 } 3243 bitField0_ = (bitField0_ & ~0x00000002); 3244 return this; 3245 } 3246 clone()3247 public Builder clone() { 3248 return create().mergeFrom(buildPartial()); 3249 } 3250 3251 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()3252 getDescriptorForType() { 3253 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_CompareFilter_descriptor; 3254 } 3255 getDefaultInstanceForType()3256 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getDefaultInstanceForType() { 3257 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); 3258 } 3259 build()3260 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter build() { 3261 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter result = buildPartial(); 3262 if (!result.isInitialized()) { 3263 throw newUninitializedMessageException(result); 3264 } 3265 return result; 3266 } 3267 buildPartial()3268 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter buildPartial() { 3269 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter(this); 3270 int from_bitField0_ = bitField0_; 3271 int to_bitField0_ = 0; 3272 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 3273 to_bitField0_ |= 0x00000001; 3274 } 3275 result.compareOp_ = compareOp_; 3276 if (((from_bitField0_ & 0x00000002) == 0x00000002)) { 3277 to_bitField0_ |= 0x00000002; 3278 } 3279 if (comparatorBuilder_ == null) { 3280 result.comparator_ = comparator_; 3281 } else { 3282 result.comparator_ = comparatorBuilder_.build(); 3283 } 3284 result.bitField0_ = to_bitField0_; 3285 onBuilt(); 3286 return result; 3287 } 3288 mergeFrom(com.google.protobuf.Message other)3289 public Builder mergeFrom(com.google.protobuf.Message other) { 3290 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter) { 3291 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter)other); 3292 } else { 3293 super.mergeFrom(other); 3294 return this; 3295 } 3296 } 3297 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter other)3298 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter other) { 3299 if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) return this; 3300 if (other.hasCompareOp()) { 3301 setCompareOp(other.getCompareOp()); 3302 } 3303 if (other.hasComparator()) { 3304 mergeComparator(other.getComparator()); 3305 } 3306 this.mergeUnknownFields(other.getUnknownFields()); 3307 return this; 3308 } 3309 isInitialized()3310 public final boolean isInitialized() { 3311 if (!hasCompareOp()) { 3312 3313 return false; 3314 } 3315 if (hasComparator()) { 3316 if (!getComparator().isInitialized()) { 3317 3318 return false; 3319 } 3320 } 3321 return true; 3322 } 3323 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3324 public Builder mergeFrom( 3325 com.google.protobuf.CodedInputStream input, 3326 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3327 throws java.io.IOException { 3328 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter parsedMessage = null; 3329 try { 3330 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 3331 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 3332 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter) e.getUnfinishedMessage(); 3333 throw e; 3334 } finally { 3335 if (parsedMessage != null) { 3336 mergeFrom(parsedMessage); 3337 } 3338 } 3339 return this; 3340 } 3341 private int bitField0_; 3342 3343 // required .CompareType compare_op = 1; 3344 private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; 3345 /** 3346 * <code>required .CompareType compare_op = 1;</code> 3347 */ hasCompareOp()3348 public boolean hasCompareOp() { 3349 return ((bitField0_ & 0x00000001) == 0x00000001); 3350 } 3351 /** 3352 * <code>required .CompareType compare_op = 1;</code> 3353 */ getCompareOp()3354 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType getCompareOp() { 3355 return compareOp_; 3356 } 3357 /** 3358 * <code>required .CompareType compare_op = 1;</code> 3359 */ setCompareOp(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value)3360 public Builder setCompareOp(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType value) { 3361 if (value == null) { 3362 throw new NullPointerException(); 3363 } 3364 bitField0_ |= 0x00000001; 3365 compareOp_ = value; 3366 onChanged(); 3367 return this; 3368 } 3369 /** 3370 * <code>required .CompareType compare_op = 1;</code> 3371 */ clearCompareOp()3372 public Builder clearCompareOp() { 3373 bitField0_ = (bitField0_ & ~0x00000001); 3374 compareOp_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.CompareType.LESS; 3375 onChanged(); 3376 return this; 3377 } 3378 3379 // optional .Comparator comparator = 2; 3380 private org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); 3381 private com.google.protobuf.SingleFieldBuilder< 3382 org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> comparatorBuilder_; 3383 /** 3384 * <code>optional .Comparator comparator = 2;</code> 3385 */ hasComparator()3386 public boolean hasComparator() { 3387 return ((bitField0_ & 0x00000002) == 0x00000002); 3388 } 3389 /** 3390 * <code>optional .Comparator comparator = 2;</code> 3391 */ getComparator()3392 public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator getComparator() { 3393 if (comparatorBuilder_ == null) { 3394 return comparator_; 3395 } else { 3396 return comparatorBuilder_.getMessage(); 3397 } 3398 } 3399 /** 3400 * <code>optional .Comparator comparator = 2;</code> 3401 */ setComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value)3402 public Builder setComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) { 3403 if (comparatorBuilder_ == null) { 3404 if (value == null) { 3405 throw new NullPointerException(); 3406 } 3407 comparator_ = value; 3408 onChanged(); 3409 } else { 3410 comparatorBuilder_.setMessage(value); 3411 } 3412 bitField0_ |= 0x00000002; 3413 return this; 3414 } 3415 /** 3416 * <code>optional .Comparator comparator = 2;</code> 3417 */ setComparator( org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder builderForValue)3418 public Builder setComparator( 3419 org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder builderForValue) { 3420 if (comparatorBuilder_ == null) { 3421 comparator_ = builderForValue.build(); 3422 onChanged(); 3423 } else { 3424 comparatorBuilder_.setMessage(builderForValue.build()); 3425 } 3426 bitField0_ |= 0x00000002; 3427 return this; 3428 } 3429 /** 3430 * <code>optional .Comparator comparator = 2;</code> 3431 */ mergeComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value)3432 public Builder mergeComparator(org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator value) { 3433 if (comparatorBuilder_ == null) { 3434 if (((bitField0_ & 0x00000002) == 0x00000002) && 3435 comparator_ != org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance()) { 3436 comparator_ = 3437 org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.newBuilder(comparator_).mergeFrom(value).buildPartial(); 3438 } else { 3439 comparator_ = value; 3440 } 3441 onChanged(); 3442 } else { 3443 comparatorBuilder_.mergeFrom(value); 3444 } 3445 bitField0_ |= 0x00000002; 3446 return this; 3447 } 3448 /** 3449 * <code>optional .Comparator comparator = 2;</code> 3450 */ clearComparator()3451 public Builder clearComparator() { 3452 if (comparatorBuilder_ == null) { 3453 comparator_ = org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.getDefaultInstance(); 3454 onChanged(); 3455 } else { 3456 comparatorBuilder_.clear(); 3457 } 3458 bitField0_ = (bitField0_ & ~0x00000002); 3459 return this; 3460 } 3461 /** 3462 * <code>optional .Comparator comparator = 2;</code> 3463 */ getComparatorBuilder()3464 public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder getComparatorBuilder() { 3465 bitField0_ |= 0x00000002; 3466 onChanged(); 3467 return getComparatorFieldBuilder().getBuilder(); 3468 } 3469 /** 3470 * <code>optional .Comparator comparator = 2;</code> 3471 */ getComparatorOrBuilder()3472 public org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder getComparatorOrBuilder() { 3473 if (comparatorBuilder_ != null) { 3474 return comparatorBuilder_.getMessageOrBuilder(); 3475 } else { 3476 return comparator_; 3477 } 3478 } 3479 /** 3480 * <code>optional .Comparator comparator = 2;</code> 3481 */ 3482 private com.google.protobuf.SingleFieldBuilder< 3483 org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder> getComparatorFieldBuilder()3484 getComparatorFieldBuilder() { 3485 if (comparatorBuilder_ == null) { 3486 comparatorBuilder_ = new com.google.protobuf.SingleFieldBuilder< 3487 org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.Comparator.Builder, org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos.ComparatorOrBuilder>( 3488 comparator_, 3489 getParentForChildren(), 3490 isClean()); 3491 comparator_ = null; 3492 } 3493 return comparatorBuilder_; 3494 } 3495 3496 // @@protoc_insertion_point(builder_scope:CompareFilter) 3497 } 3498 3499 static { 3500 defaultInstance = new CompareFilter(true); defaultInstance.initFields()3501 defaultInstance.initFields(); 3502 } 3503 3504 // @@protoc_insertion_point(class_scope:CompareFilter) 3505 } 3506 3507 public interface DependentColumnFilterOrBuilder 3508 extends com.google.protobuf.MessageOrBuilder { 3509 3510 // required .CompareFilter compare_filter = 1; 3511 /** 3512 * <code>required .CompareFilter compare_filter = 1;</code> 3513 */ hasCompareFilter()3514 boolean hasCompareFilter(); 3515 /** 3516 * <code>required .CompareFilter compare_filter = 1;</code> 3517 */ getCompareFilter()3518 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter(); 3519 /** 3520 * <code>required .CompareFilter compare_filter = 1;</code> 3521 */ getCompareFilterOrBuilder()3522 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder(); 3523 3524 // optional bytes column_family = 2; 3525 /** 3526 * <code>optional bytes column_family = 2;</code> 3527 */ hasColumnFamily()3528 boolean hasColumnFamily(); 3529 /** 3530 * <code>optional bytes column_family = 2;</code> 3531 */ getColumnFamily()3532 com.google.protobuf.ByteString getColumnFamily(); 3533 3534 // optional bytes column_qualifier = 3; 3535 /** 3536 * <code>optional bytes column_qualifier = 3;</code> 3537 */ hasColumnQualifier()3538 boolean hasColumnQualifier(); 3539 /** 3540 * <code>optional bytes column_qualifier = 3;</code> 3541 */ getColumnQualifier()3542 com.google.protobuf.ByteString getColumnQualifier(); 3543 3544 // optional bool drop_dependent_column = 4; 3545 /** 3546 * <code>optional bool drop_dependent_column = 4;</code> 3547 */ hasDropDependentColumn()3548 boolean hasDropDependentColumn(); 3549 /** 3550 * <code>optional bool drop_dependent_column = 4;</code> 3551 */ getDropDependentColumn()3552 boolean getDropDependentColumn(); 3553 } 3554 /** 3555 * Protobuf type {@code DependentColumnFilter} 3556 */ 3557 public static final class DependentColumnFilter extends 3558 com.google.protobuf.GeneratedMessage 3559 implements DependentColumnFilterOrBuilder { 3560 // Use DependentColumnFilter.newBuilder() to construct. DependentColumnFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)3561 private DependentColumnFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 3562 super(builder); 3563 this.unknownFields = builder.getUnknownFields(); 3564 } DependentColumnFilter(boolean noInit)3565 private DependentColumnFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 3566 3567 private static final DependentColumnFilter defaultInstance; getDefaultInstance()3568 public static DependentColumnFilter getDefaultInstance() { 3569 return defaultInstance; 3570 } 3571 getDefaultInstanceForType()3572 public DependentColumnFilter getDefaultInstanceForType() { 3573 return defaultInstance; 3574 } 3575 3576 private final com.google.protobuf.UnknownFieldSet unknownFields; 3577 @java.lang.Override 3578 public final com.google.protobuf.UnknownFieldSet getUnknownFields()3579 getUnknownFields() { 3580 return this.unknownFields; 3581 } DependentColumnFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3582 private DependentColumnFilter( 3583 com.google.protobuf.CodedInputStream input, 3584 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3585 throws com.google.protobuf.InvalidProtocolBufferException { 3586 initFields(); 3587 int mutable_bitField0_ = 0; 3588 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 3589 com.google.protobuf.UnknownFieldSet.newBuilder(); 3590 try { 3591 boolean done = false; 3592 while (!done) { 3593 int tag = input.readTag(); 3594 switch (tag) { 3595 case 0: 3596 done = true; 3597 break; 3598 default: { 3599 if (!parseUnknownField(input, unknownFields, 3600 extensionRegistry, tag)) { 3601 done = true; 3602 } 3603 break; 3604 } 3605 case 10: { 3606 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = null; 3607 if (((bitField0_ & 0x00000001) == 0x00000001)) { 3608 subBuilder = compareFilter_.toBuilder(); 3609 } 3610 compareFilter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.PARSER, extensionRegistry); 3611 if (subBuilder != null) { 3612 subBuilder.mergeFrom(compareFilter_); 3613 compareFilter_ = subBuilder.buildPartial(); 3614 } 3615 bitField0_ |= 0x00000001; 3616 break; 3617 } 3618 case 18: { 3619 bitField0_ |= 0x00000002; 3620 columnFamily_ = input.readBytes(); 3621 break; 3622 } 3623 case 26: { 3624 bitField0_ |= 0x00000004; 3625 columnQualifier_ = input.readBytes(); 3626 break; 3627 } 3628 case 32: { 3629 bitField0_ |= 0x00000008; 3630 dropDependentColumn_ = input.readBool(); 3631 break; 3632 } 3633 } 3634 } 3635 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 3636 throw e.setUnfinishedMessage(this); 3637 } catch (java.io.IOException e) { 3638 throw new com.google.protobuf.InvalidProtocolBufferException( 3639 e.getMessage()).setUnfinishedMessage(this); 3640 } finally { 3641 this.unknownFields = unknownFields.build(); 3642 makeExtensionsImmutable(); 3643 } 3644 } 3645 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()3646 getDescriptor() { 3647 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_DependentColumnFilter_descriptor; 3648 } 3649 3650 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()3651 internalGetFieldAccessorTable() { 3652 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_DependentColumnFilter_fieldAccessorTable 3653 .ensureFieldAccessorsInitialized( 3654 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.Builder.class); 3655 } 3656 3657 public static com.google.protobuf.Parser<DependentColumnFilter> PARSER = 3658 new com.google.protobuf.AbstractParser<DependentColumnFilter>() { 3659 public DependentColumnFilter parsePartialFrom( 3660 com.google.protobuf.CodedInputStream input, 3661 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3662 throws com.google.protobuf.InvalidProtocolBufferException { 3663 return new DependentColumnFilter(input, extensionRegistry); 3664 } 3665 }; 3666 3667 @java.lang.Override getParserForType()3668 public com.google.protobuf.Parser<DependentColumnFilter> getParserForType() { 3669 return PARSER; 3670 } 3671 3672 private int bitField0_; 3673 // required .CompareFilter compare_filter = 1; 3674 public static final int COMPARE_FILTER_FIELD_NUMBER = 1; 3675 private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_; 3676 /** 3677 * <code>required .CompareFilter compare_filter = 1;</code> 3678 */ hasCompareFilter()3679 public boolean hasCompareFilter() { 3680 return ((bitField0_ & 0x00000001) == 0x00000001); 3681 } 3682 /** 3683 * <code>required .CompareFilter compare_filter = 1;</code> 3684 */ getCompareFilter()3685 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { 3686 return compareFilter_; 3687 } 3688 /** 3689 * <code>required .CompareFilter compare_filter = 1;</code> 3690 */ getCompareFilterOrBuilder()3691 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { 3692 return compareFilter_; 3693 } 3694 3695 // optional bytes column_family = 2; 3696 public static final int COLUMN_FAMILY_FIELD_NUMBER = 2; 3697 private com.google.protobuf.ByteString columnFamily_; 3698 /** 3699 * <code>optional bytes column_family = 2;</code> 3700 */ hasColumnFamily()3701 public boolean hasColumnFamily() { 3702 return ((bitField0_ & 0x00000002) == 0x00000002); 3703 } 3704 /** 3705 * <code>optional bytes column_family = 2;</code> 3706 */ getColumnFamily()3707 public com.google.protobuf.ByteString getColumnFamily() { 3708 return columnFamily_; 3709 } 3710 3711 // optional bytes column_qualifier = 3; 3712 public static final int COLUMN_QUALIFIER_FIELD_NUMBER = 3; 3713 private com.google.protobuf.ByteString columnQualifier_; 3714 /** 3715 * <code>optional bytes column_qualifier = 3;</code> 3716 */ hasColumnQualifier()3717 public boolean hasColumnQualifier() { 3718 return ((bitField0_ & 0x00000004) == 0x00000004); 3719 } 3720 /** 3721 * <code>optional bytes column_qualifier = 3;</code> 3722 */ getColumnQualifier()3723 public com.google.protobuf.ByteString getColumnQualifier() { 3724 return columnQualifier_; 3725 } 3726 3727 // optional bool drop_dependent_column = 4; 3728 public static final int DROP_DEPENDENT_COLUMN_FIELD_NUMBER = 4; 3729 private boolean dropDependentColumn_; 3730 /** 3731 * <code>optional bool drop_dependent_column = 4;</code> 3732 */ hasDropDependentColumn()3733 public boolean hasDropDependentColumn() { 3734 return ((bitField0_ & 0x00000008) == 0x00000008); 3735 } 3736 /** 3737 * <code>optional bool drop_dependent_column = 4;</code> 3738 */ getDropDependentColumn()3739 public boolean getDropDependentColumn() { 3740 return dropDependentColumn_; 3741 } 3742 initFields()3743 private void initFields() { 3744 compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); 3745 columnFamily_ = com.google.protobuf.ByteString.EMPTY; 3746 columnQualifier_ = com.google.protobuf.ByteString.EMPTY; 3747 dropDependentColumn_ = false; 3748 } 3749 private byte memoizedIsInitialized = -1; isInitialized()3750 public final boolean isInitialized() { 3751 byte isInitialized = memoizedIsInitialized; 3752 if (isInitialized != -1) return isInitialized == 1; 3753 3754 if (!hasCompareFilter()) { 3755 memoizedIsInitialized = 0; 3756 return false; 3757 } 3758 if (!getCompareFilter().isInitialized()) { 3759 memoizedIsInitialized = 0; 3760 return false; 3761 } 3762 memoizedIsInitialized = 1; 3763 return true; 3764 } 3765 writeTo(com.google.protobuf.CodedOutputStream output)3766 public void writeTo(com.google.protobuf.CodedOutputStream output) 3767 throws java.io.IOException { 3768 getSerializedSize(); 3769 if (((bitField0_ & 0x00000001) == 0x00000001)) { 3770 output.writeMessage(1, compareFilter_); 3771 } 3772 if (((bitField0_ & 0x00000002) == 0x00000002)) { 3773 output.writeBytes(2, columnFamily_); 3774 } 3775 if (((bitField0_ & 0x00000004) == 0x00000004)) { 3776 output.writeBytes(3, columnQualifier_); 3777 } 3778 if (((bitField0_ & 0x00000008) == 0x00000008)) { 3779 output.writeBool(4, dropDependentColumn_); 3780 } 3781 getUnknownFields().writeTo(output); 3782 } 3783 3784 private int memoizedSerializedSize = -1; getSerializedSize()3785 public int getSerializedSize() { 3786 int size = memoizedSerializedSize; 3787 if (size != -1) return size; 3788 3789 size = 0; 3790 if (((bitField0_ & 0x00000001) == 0x00000001)) { 3791 size += com.google.protobuf.CodedOutputStream 3792 .computeMessageSize(1, compareFilter_); 3793 } 3794 if (((bitField0_ & 0x00000002) == 0x00000002)) { 3795 size += com.google.protobuf.CodedOutputStream 3796 .computeBytesSize(2, columnFamily_); 3797 } 3798 if (((bitField0_ & 0x00000004) == 0x00000004)) { 3799 size += com.google.protobuf.CodedOutputStream 3800 .computeBytesSize(3, columnQualifier_); 3801 } 3802 if (((bitField0_ & 0x00000008) == 0x00000008)) { 3803 size += com.google.protobuf.CodedOutputStream 3804 .computeBoolSize(4, dropDependentColumn_); 3805 } 3806 size += getUnknownFields().getSerializedSize(); 3807 memoizedSerializedSize = size; 3808 return size; 3809 } 3810 3811 private static final long serialVersionUID = 0L; 3812 @java.lang.Override writeReplace()3813 protected java.lang.Object writeReplace() 3814 throws java.io.ObjectStreamException { 3815 return super.writeReplace(); 3816 } 3817 3818 @java.lang.Override equals(final java.lang.Object obj)3819 public boolean equals(final java.lang.Object obj) { 3820 if (obj == this) { 3821 return true; 3822 } 3823 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter)) { 3824 return super.equals(obj); 3825 } 3826 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter) obj; 3827 3828 boolean result = true; 3829 result = result && (hasCompareFilter() == other.hasCompareFilter()); 3830 if (hasCompareFilter()) { 3831 result = result && getCompareFilter() 3832 .equals(other.getCompareFilter()); 3833 } 3834 result = result && (hasColumnFamily() == other.hasColumnFamily()); 3835 if (hasColumnFamily()) { 3836 result = result && getColumnFamily() 3837 .equals(other.getColumnFamily()); 3838 } 3839 result = result && (hasColumnQualifier() == other.hasColumnQualifier()); 3840 if (hasColumnQualifier()) { 3841 result = result && getColumnQualifier() 3842 .equals(other.getColumnQualifier()); 3843 } 3844 result = result && (hasDropDependentColumn() == other.hasDropDependentColumn()); 3845 if (hasDropDependentColumn()) { 3846 result = result && (getDropDependentColumn() 3847 == other.getDropDependentColumn()); 3848 } 3849 result = result && 3850 getUnknownFields().equals(other.getUnknownFields()); 3851 return result; 3852 } 3853 3854 private int memoizedHashCode = 0; 3855 @java.lang.Override hashCode()3856 public int hashCode() { 3857 if (memoizedHashCode != 0) { 3858 return memoizedHashCode; 3859 } 3860 int hash = 41; 3861 hash = (19 * hash) + getDescriptorForType().hashCode(); 3862 if (hasCompareFilter()) { 3863 hash = (37 * hash) + COMPARE_FILTER_FIELD_NUMBER; 3864 hash = (53 * hash) + getCompareFilter().hashCode(); 3865 } 3866 if (hasColumnFamily()) { 3867 hash = (37 * hash) + COLUMN_FAMILY_FIELD_NUMBER; 3868 hash = (53 * hash) + getColumnFamily().hashCode(); 3869 } 3870 if (hasColumnQualifier()) { 3871 hash = (37 * hash) + COLUMN_QUALIFIER_FIELD_NUMBER; 3872 hash = (53 * hash) + getColumnQualifier().hashCode(); 3873 } 3874 if (hasDropDependentColumn()) { 3875 hash = (37 * hash) + DROP_DEPENDENT_COLUMN_FIELD_NUMBER; 3876 hash = (53 * hash) + hashBoolean(getDropDependentColumn()); 3877 } 3878 hash = (29 * hash) + getUnknownFields().hashCode(); 3879 memoizedHashCode = hash; 3880 return hash; 3881 } 3882 parseFrom( com.google.protobuf.ByteString data)3883 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( 3884 com.google.protobuf.ByteString data) 3885 throws com.google.protobuf.InvalidProtocolBufferException { 3886 return PARSER.parseFrom(data); 3887 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3888 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( 3889 com.google.protobuf.ByteString data, 3890 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3891 throws com.google.protobuf.InvalidProtocolBufferException { 3892 return PARSER.parseFrom(data, extensionRegistry); 3893 } parseFrom(byte[] data)3894 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom(byte[] data) 3895 throws com.google.protobuf.InvalidProtocolBufferException { 3896 return PARSER.parseFrom(data); 3897 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3898 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( 3899 byte[] data, 3900 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3901 throws com.google.protobuf.InvalidProtocolBufferException { 3902 return PARSER.parseFrom(data, extensionRegistry); 3903 } parseFrom(java.io.InputStream input)3904 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom(java.io.InputStream input) 3905 throws java.io.IOException { 3906 return PARSER.parseFrom(input); 3907 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3908 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( 3909 java.io.InputStream input, 3910 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3911 throws java.io.IOException { 3912 return PARSER.parseFrom(input, extensionRegistry); 3913 } parseDelimitedFrom(java.io.InputStream input)3914 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseDelimitedFrom(java.io.InputStream input) 3915 throws java.io.IOException { 3916 return PARSER.parseDelimitedFrom(input); 3917 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3918 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseDelimitedFrom( 3919 java.io.InputStream input, 3920 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3921 throws java.io.IOException { 3922 return PARSER.parseDelimitedFrom(input, extensionRegistry); 3923 } parseFrom( com.google.protobuf.CodedInputStream input)3924 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( 3925 com.google.protobuf.CodedInputStream input) 3926 throws java.io.IOException { 3927 return PARSER.parseFrom(input); 3928 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3929 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parseFrom( 3930 com.google.protobuf.CodedInputStream input, 3931 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 3932 throws java.io.IOException { 3933 return PARSER.parseFrom(input, extensionRegistry); 3934 } 3935 newBuilder()3936 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()3937 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter prototype)3938 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter prototype) { 3939 return newBuilder().mergeFrom(prototype); 3940 } toBuilder()3941 public Builder toBuilder() { return newBuilder(this); } 3942 3943 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)3944 protected Builder newBuilderForType( 3945 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 3946 Builder builder = new Builder(parent); 3947 return builder; 3948 } 3949 /** 3950 * Protobuf type {@code DependentColumnFilter} 3951 */ 3952 public static final class Builder extends 3953 com.google.protobuf.GeneratedMessage.Builder<Builder> 3954 implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilterOrBuilder { 3955 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()3956 getDescriptor() { 3957 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_DependentColumnFilter_descriptor; 3958 } 3959 3960 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()3961 internalGetFieldAccessorTable() { 3962 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_DependentColumnFilter_fieldAccessorTable 3963 .ensureFieldAccessorsInitialized( 3964 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.Builder.class); 3965 } 3966 3967 // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.newBuilder() Builder()3968 private Builder() { 3969 maybeForceBuilderInitialization(); 3970 } 3971 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)3972 private Builder( 3973 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 3974 super(parent); 3975 maybeForceBuilderInitialization(); 3976 } maybeForceBuilderInitialization()3977 private void maybeForceBuilderInitialization() { 3978 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 3979 getCompareFilterFieldBuilder(); 3980 } 3981 } create()3982 private static Builder create() { 3983 return new Builder(); 3984 } 3985 clear()3986 public Builder clear() { 3987 super.clear(); 3988 if (compareFilterBuilder_ == null) { 3989 compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); 3990 } else { 3991 compareFilterBuilder_.clear(); 3992 } 3993 bitField0_ = (bitField0_ & ~0x00000001); 3994 columnFamily_ = com.google.protobuf.ByteString.EMPTY; 3995 bitField0_ = (bitField0_ & ~0x00000002); 3996 columnQualifier_ = com.google.protobuf.ByteString.EMPTY; 3997 bitField0_ = (bitField0_ & ~0x00000004); 3998 dropDependentColumn_ = false; 3999 bitField0_ = (bitField0_ & ~0x00000008); 4000 return this; 4001 } 4002 clone()4003 public Builder clone() { 4004 return create().mergeFrom(buildPartial()); 4005 } 4006 4007 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()4008 getDescriptorForType() { 4009 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_DependentColumnFilter_descriptor; 4010 } 4011 getDefaultInstanceForType()4012 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter getDefaultInstanceForType() { 4013 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.getDefaultInstance(); 4014 } 4015 build()4016 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter build() { 4017 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter result = buildPartial(); 4018 if (!result.isInitialized()) { 4019 throw newUninitializedMessageException(result); 4020 } 4021 return result; 4022 } 4023 buildPartial()4024 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter buildPartial() { 4025 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter(this); 4026 int from_bitField0_ = bitField0_; 4027 int to_bitField0_ = 0; 4028 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 4029 to_bitField0_ |= 0x00000001; 4030 } 4031 if (compareFilterBuilder_ == null) { 4032 result.compareFilter_ = compareFilter_; 4033 } else { 4034 result.compareFilter_ = compareFilterBuilder_.build(); 4035 } 4036 if (((from_bitField0_ & 0x00000002) == 0x00000002)) { 4037 to_bitField0_ |= 0x00000002; 4038 } 4039 result.columnFamily_ = columnFamily_; 4040 if (((from_bitField0_ & 0x00000004) == 0x00000004)) { 4041 to_bitField0_ |= 0x00000004; 4042 } 4043 result.columnQualifier_ = columnQualifier_; 4044 if (((from_bitField0_ & 0x00000008) == 0x00000008)) { 4045 to_bitField0_ |= 0x00000008; 4046 } 4047 result.dropDependentColumn_ = dropDependentColumn_; 4048 result.bitField0_ = to_bitField0_; 4049 onBuilt(); 4050 return result; 4051 } 4052 mergeFrom(com.google.protobuf.Message other)4053 public Builder mergeFrom(com.google.protobuf.Message other) { 4054 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter) { 4055 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter)other); 4056 } else { 4057 super.mergeFrom(other); 4058 return this; 4059 } 4060 } 4061 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter other)4062 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter other) { 4063 if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter.getDefaultInstance()) return this; 4064 if (other.hasCompareFilter()) { 4065 mergeCompareFilter(other.getCompareFilter()); 4066 } 4067 if (other.hasColumnFamily()) { 4068 setColumnFamily(other.getColumnFamily()); 4069 } 4070 if (other.hasColumnQualifier()) { 4071 setColumnQualifier(other.getColumnQualifier()); 4072 } 4073 if (other.hasDropDependentColumn()) { 4074 setDropDependentColumn(other.getDropDependentColumn()); 4075 } 4076 this.mergeUnknownFields(other.getUnknownFields()); 4077 return this; 4078 } 4079 isInitialized()4080 public final boolean isInitialized() { 4081 if (!hasCompareFilter()) { 4082 4083 return false; 4084 } 4085 if (!getCompareFilter().isInitialized()) { 4086 4087 return false; 4088 } 4089 return true; 4090 } 4091 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4092 public Builder mergeFrom( 4093 com.google.protobuf.CodedInputStream input, 4094 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4095 throws java.io.IOException { 4096 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter parsedMessage = null; 4097 try { 4098 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 4099 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 4100 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.DependentColumnFilter) e.getUnfinishedMessage(); 4101 throw e; 4102 } finally { 4103 if (parsedMessage != null) { 4104 mergeFrom(parsedMessage); 4105 } 4106 } 4107 return this; 4108 } 4109 private int bitField0_; 4110 4111 // required .CompareFilter compare_filter = 1; 4112 private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); 4113 private com.google.protobuf.SingleFieldBuilder< 4114 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_; 4115 /** 4116 * <code>required .CompareFilter compare_filter = 1;</code> 4117 */ hasCompareFilter()4118 public boolean hasCompareFilter() { 4119 return ((bitField0_ & 0x00000001) == 0x00000001); 4120 } 4121 /** 4122 * <code>required .CompareFilter compare_filter = 1;</code> 4123 */ getCompareFilter()4124 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { 4125 if (compareFilterBuilder_ == null) { 4126 return compareFilter_; 4127 } else { 4128 return compareFilterBuilder_.getMessage(); 4129 } 4130 } 4131 /** 4132 * <code>required .CompareFilter compare_filter = 1;</code> 4133 */ setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value)4134 public Builder setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { 4135 if (compareFilterBuilder_ == null) { 4136 if (value == null) { 4137 throw new NullPointerException(); 4138 } 4139 compareFilter_ = value; 4140 onChanged(); 4141 } else { 4142 compareFilterBuilder_.setMessage(value); 4143 } 4144 bitField0_ |= 0x00000001; 4145 return this; 4146 } 4147 /** 4148 * <code>required .CompareFilter compare_filter = 1;</code> 4149 */ setCompareFilter( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue)4150 public Builder setCompareFilter( 4151 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue) { 4152 if (compareFilterBuilder_ == null) { 4153 compareFilter_ = builderForValue.build(); 4154 onChanged(); 4155 } else { 4156 compareFilterBuilder_.setMessage(builderForValue.build()); 4157 } 4158 bitField0_ |= 0x00000001; 4159 return this; 4160 } 4161 /** 4162 * <code>required .CompareFilter compare_filter = 1;</code> 4163 */ mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value)4164 public Builder mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { 4165 if (compareFilterBuilder_ == null) { 4166 if (((bitField0_ & 0x00000001) == 0x00000001) && 4167 compareFilter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) { 4168 compareFilter_ = 4169 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(compareFilter_).mergeFrom(value).buildPartial(); 4170 } else { 4171 compareFilter_ = value; 4172 } 4173 onChanged(); 4174 } else { 4175 compareFilterBuilder_.mergeFrom(value); 4176 } 4177 bitField0_ |= 0x00000001; 4178 return this; 4179 } 4180 /** 4181 * <code>required .CompareFilter compare_filter = 1;</code> 4182 */ clearCompareFilter()4183 public Builder clearCompareFilter() { 4184 if (compareFilterBuilder_ == null) { 4185 compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); 4186 onChanged(); 4187 } else { 4188 compareFilterBuilder_.clear(); 4189 } 4190 bitField0_ = (bitField0_ & ~0x00000001); 4191 return this; 4192 } 4193 /** 4194 * <code>required .CompareFilter compare_filter = 1;</code> 4195 */ getCompareFilterBuilder()4196 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder getCompareFilterBuilder() { 4197 bitField0_ |= 0x00000001; 4198 onChanged(); 4199 return getCompareFilterFieldBuilder().getBuilder(); 4200 } 4201 /** 4202 * <code>required .CompareFilter compare_filter = 1;</code> 4203 */ getCompareFilterOrBuilder()4204 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { 4205 if (compareFilterBuilder_ != null) { 4206 return compareFilterBuilder_.getMessageOrBuilder(); 4207 } else { 4208 return compareFilter_; 4209 } 4210 } 4211 /** 4212 * <code>required .CompareFilter compare_filter = 1;</code> 4213 */ 4214 private com.google.protobuf.SingleFieldBuilder< 4215 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> getCompareFilterFieldBuilder()4216 getCompareFilterFieldBuilder() { 4217 if (compareFilterBuilder_ == null) { 4218 compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder< 4219 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>( 4220 compareFilter_, 4221 getParentForChildren(), 4222 isClean()); 4223 compareFilter_ = null; 4224 } 4225 return compareFilterBuilder_; 4226 } 4227 4228 // optional bytes column_family = 2; 4229 private com.google.protobuf.ByteString columnFamily_ = com.google.protobuf.ByteString.EMPTY; 4230 /** 4231 * <code>optional bytes column_family = 2;</code> 4232 */ hasColumnFamily()4233 public boolean hasColumnFamily() { 4234 return ((bitField0_ & 0x00000002) == 0x00000002); 4235 } 4236 /** 4237 * <code>optional bytes column_family = 2;</code> 4238 */ getColumnFamily()4239 public com.google.protobuf.ByteString getColumnFamily() { 4240 return columnFamily_; 4241 } 4242 /** 4243 * <code>optional bytes column_family = 2;</code> 4244 */ setColumnFamily(com.google.protobuf.ByteString value)4245 public Builder setColumnFamily(com.google.protobuf.ByteString value) { 4246 if (value == null) { 4247 throw new NullPointerException(); 4248 } 4249 bitField0_ |= 0x00000002; 4250 columnFamily_ = value; 4251 onChanged(); 4252 return this; 4253 } 4254 /** 4255 * <code>optional bytes column_family = 2;</code> 4256 */ clearColumnFamily()4257 public Builder clearColumnFamily() { 4258 bitField0_ = (bitField0_ & ~0x00000002); 4259 columnFamily_ = getDefaultInstance().getColumnFamily(); 4260 onChanged(); 4261 return this; 4262 } 4263 4264 // optional bytes column_qualifier = 3; 4265 private com.google.protobuf.ByteString columnQualifier_ = com.google.protobuf.ByteString.EMPTY; 4266 /** 4267 * <code>optional bytes column_qualifier = 3;</code> 4268 */ hasColumnQualifier()4269 public boolean hasColumnQualifier() { 4270 return ((bitField0_ & 0x00000004) == 0x00000004); 4271 } 4272 /** 4273 * <code>optional bytes column_qualifier = 3;</code> 4274 */ getColumnQualifier()4275 public com.google.protobuf.ByteString getColumnQualifier() { 4276 return columnQualifier_; 4277 } 4278 /** 4279 * <code>optional bytes column_qualifier = 3;</code> 4280 */ setColumnQualifier(com.google.protobuf.ByteString value)4281 public Builder setColumnQualifier(com.google.protobuf.ByteString value) { 4282 if (value == null) { 4283 throw new NullPointerException(); 4284 } 4285 bitField0_ |= 0x00000004; 4286 columnQualifier_ = value; 4287 onChanged(); 4288 return this; 4289 } 4290 /** 4291 * <code>optional bytes column_qualifier = 3;</code> 4292 */ clearColumnQualifier()4293 public Builder clearColumnQualifier() { 4294 bitField0_ = (bitField0_ & ~0x00000004); 4295 columnQualifier_ = getDefaultInstance().getColumnQualifier(); 4296 onChanged(); 4297 return this; 4298 } 4299 4300 // optional bool drop_dependent_column = 4; 4301 private boolean dropDependentColumn_ ; 4302 /** 4303 * <code>optional bool drop_dependent_column = 4;</code> 4304 */ hasDropDependentColumn()4305 public boolean hasDropDependentColumn() { 4306 return ((bitField0_ & 0x00000008) == 0x00000008); 4307 } 4308 /** 4309 * <code>optional bool drop_dependent_column = 4;</code> 4310 */ getDropDependentColumn()4311 public boolean getDropDependentColumn() { 4312 return dropDependentColumn_; 4313 } 4314 /** 4315 * <code>optional bool drop_dependent_column = 4;</code> 4316 */ setDropDependentColumn(boolean value)4317 public Builder setDropDependentColumn(boolean value) { 4318 bitField0_ |= 0x00000008; 4319 dropDependentColumn_ = value; 4320 onChanged(); 4321 return this; 4322 } 4323 /** 4324 * <code>optional bool drop_dependent_column = 4;</code> 4325 */ clearDropDependentColumn()4326 public Builder clearDropDependentColumn() { 4327 bitField0_ = (bitField0_ & ~0x00000008); 4328 dropDependentColumn_ = false; 4329 onChanged(); 4330 return this; 4331 } 4332 4333 // @@protoc_insertion_point(builder_scope:DependentColumnFilter) 4334 } 4335 4336 static { 4337 defaultInstance = new DependentColumnFilter(true); defaultInstance.initFields()4338 defaultInstance.initFields(); 4339 } 4340 4341 // @@protoc_insertion_point(class_scope:DependentColumnFilter) 4342 } 4343 4344 public interface FamilyFilterOrBuilder 4345 extends com.google.protobuf.MessageOrBuilder { 4346 4347 // required .CompareFilter compare_filter = 1; 4348 /** 4349 * <code>required .CompareFilter compare_filter = 1;</code> 4350 */ hasCompareFilter()4351 boolean hasCompareFilter(); 4352 /** 4353 * <code>required .CompareFilter compare_filter = 1;</code> 4354 */ getCompareFilter()4355 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter(); 4356 /** 4357 * <code>required .CompareFilter compare_filter = 1;</code> 4358 */ getCompareFilterOrBuilder()4359 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder(); 4360 } 4361 /** 4362 * Protobuf type {@code FamilyFilter} 4363 */ 4364 public static final class FamilyFilter extends 4365 com.google.protobuf.GeneratedMessage 4366 implements FamilyFilterOrBuilder { 4367 // Use FamilyFilter.newBuilder() to construct. FamilyFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)4368 private FamilyFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 4369 super(builder); 4370 this.unknownFields = builder.getUnknownFields(); 4371 } FamilyFilter(boolean noInit)4372 private FamilyFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 4373 4374 private static final FamilyFilter defaultInstance; getDefaultInstance()4375 public static FamilyFilter getDefaultInstance() { 4376 return defaultInstance; 4377 } 4378 getDefaultInstanceForType()4379 public FamilyFilter getDefaultInstanceForType() { 4380 return defaultInstance; 4381 } 4382 4383 private final com.google.protobuf.UnknownFieldSet unknownFields; 4384 @java.lang.Override 4385 public final com.google.protobuf.UnknownFieldSet getUnknownFields()4386 getUnknownFields() { 4387 return this.unknownFields; 4388 } FamilyFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4389 private FamilyFilter( 4390 com.google.protobuf.CodedInputStream input, 4391 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4392 throws com.google.protobuf.InvalidProtocolBufferException { 4393 initFields(); 4394 int mutable_bitField0_ = 0; 4395 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 4396 com.google.protobuf.UnknownFieldSet.newBuilder(); 4397 try { 4398 boolean done = false; 4399 while (!done) { 4400 int tag = input.readTag(); 4401 switch (tag) { 4402 case 0: 4403 done = true; 4404 break; 4405 default: { 4406 if (!parseUnknownField(input, unknownFields, 4407 extensionRegistry, tag)) { 4408 done = true; 4409 } 4410 break; 4411 } 4412 case 10: { 4413 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder subBuilder = null; 4414 if (((bitField0_ & 0x00000001) == 0x00000001)) { 4415 subBuilder = compareFilter_.toBuilder(); 4416 } 4417 compareFilter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.PARSER, extensionRegistry); 4418 if (subBuilder != null) { 4419 subBuilder.mergeFrom(compareFilter_); 4420 compareFilter_ = subBuilder.buildPartial(); 4421 } 4422 bitField0_ |= 0x00000001; 4423 break; 4424 } 4425 } 4426 } 4427 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 4428 throw e.setUnfinishedMessage(this); 4429 } catch (java.io.IOException e) { 4430 throw new com.google.protobuf.InvalidProtocolBufferException( 4431 e.getMessage()).setUnfinishedMessage(this); 4432 } finally { 4433 this.unknownFields = unknownFields.build(); 4434 makeExtensionsImmutable(); 4435 } 4436 } 4437 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()4438 getDescriptor() { 4439 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FamilyFilter_descriptor; 4440 } 4441 4442 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()4443 internalGetFieldAccessorTable() { 4444 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FamilyFilter_fieldAccessorTable 4445 .ensureFieldAccessorsInitialized( 4446 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.Builder.class); 4447 } 4448 4449 public static com.google.protobuf.Parser<FamilyFilter> PARSER = 4450 new com.google.protobuf.AbstractParser<FamilyFilter>() { 4451 public FamilyFilter parsePartialFrom( 4452 com.google.protobuf.CodedInputStream input, 4453 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4454 throws com.google.protobuf.InvalidProtocolBufferException { 4455 return new FamilyFilter(input, extensionRegistry); 4456 } 4457 }; 4458 4459 @java.lang.Override getParserForType()4460 public com.google.protobuf.Parser<FamilyFilter> getParserForType() { 4461 return PARSER; 4462 } 4463 4464 private int bitField0_; 4465 // required .CompareFilter compare_filter = 1; 4466 public static final int COMPARE_FILTER_FIELD_NUMBER = 1; 4467 private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_; 4468 /** 4469 * <code>required .CompareFilter compare_filter = 1;</code> 4470 */ hasCompareFilter()4471 public boolean hasCompareFilter() { 4472 return ((bitField0_ & 0x00000001) == 0x00000001); 4473 } 4474 /** 4475 * <code>required .CompareFilter compare_filter = 1;</code> 4476 */ getCompareFilter()4477 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { 4478 return compareFilter_; 4479 } 4480 /** 4481 * <code>required .CompareFilter compare_filter = 1;</code> 4482 */ getCompareFilterOrBuilder()4483 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { 4484 return compareFilter_; 4485 } 4486 initFields()4487 private void initFields() { 4488 compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); 4489 } 4490 private byte memoizedIsInitialized = -1; isInitialized()4491 public final boolean isInitialized() { 4492 byte isInitialized = memoizedIsInitialized; 4493 if (isInitialized != -1) return isInitialized == 1; 4494 4495 if (!hasCompareFilter()) { 4496 memoizedIsInitialized = 0; 4497 return false; 4498 } 4499 if (!getCompareFilter().isInitialized()) { 4500 memoizedIsInitialized = 0; 4501 return false; 4502 } 4503 memoizedIsInitialized = 1; 4504 return true; 4505 } 4506 writeTo(com.google.protobuf.CodedOutputStream output)4507 public void writeTo(com.google.protobuf.CodedOutputStream output) 4508 throws java.io.IOException { 4509 getSerializedSize(); 4510 if (((bitField0_ & 0x00000001) == 0x00000001)) { 4511 output.writeMessage(1, compareFilter_); 4512 } 4513 getUnknownFields().writeTo(output); 4514 } 4515 4516 private int memoizedSerializedSize = -1; getSerializedSize()4517 public int getSerializedSize() { 4518 int size = memoizedSerializedSize; 4519 if (size != -1) return size; 4520 4521 size = 0; 4522 if (((bitField0_ & 0x00000001) == 0x00000001)) { 4523 size += com.google.protobuf.CodedOutputStream 4524 .computeMessageSize(1, compareFilter_); 4525 } 4526 size += getUnknownFields().getSerializedSize(); 4527 memoizedSerializedSize = size; 4528 return size; 4529 } 4530 4531 private static final long serialVersionUID = 0L; 4532 @java.lang.Override writeReplace()4533 protected java.lang.Object writeReplace() 4534 throws java.io.ObjectStreamException { 4535 return super.writeReplace(); 4536 } 4537 4538 @java.lang.Override equals(final java.lang.Object obj)4539 public boolean equals(final java.lang.Object obj) { 4540 if (obj == this) { 4541 return true; 4542 } 4543 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter)) { 4544 return super.equals(obj); 4545 } 4546 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter) obj; 4547 4548 boolean result = true; 4549 result = result && (hasCompareFilter() == other.hasCompareFilter()); 4550 if (hasCompareFilter()) { 4551 result = result && getCompareFilter() 4552 .equals(other.getCompareFilter()); 4553 } 4554 result = result && 4555 getUnknownFields().equals(other.getUnknownFields()); 4556 return result; 4557 } 4558 4559 private int memoizedHashCode = 0; 4560 @java.lang.Override hashCode()4561 public int hashCode() { 4562 if (memoizedHashCode != 0) { 4563 return memoizedHashCode; 4564 } 4565 int hash = 41; 4566 hash = (19 * hash) + getDescriptorForType().hashCode(); 4567 if (hasCompareFilter()) { 4568 hash = (37 * hash) + COMPARE_FILTER_FIELD_NUMBER; 4569 hash = (53 * hash) + getCompareFilter().hashCode(); 4570 } 4571 hash = (29 * hash) + getUnknownFields().hashCode(); 4572 memoizedHashCode = hash; 4573 return hash; 4574 } 4575 parseFrom( com.google.protobuf.ByteString data)4576 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom( 4577 com.google.protobuf.ByteString data) 4578 throws com.google.protobuf.InvalidProtocolBufferException { 4579 return PARSER.parseFrom(data); 4580 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4581 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom( 4582 com.google.protobuf.ByteString data, 4583 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4584 throws com.google.protobuf.InvalidProtocolBufferException { 4585 return PARSER.parseFrom(data, extensionRegistry); 4586 } parseFrom(byte[] data)4587 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom(byte[] data) 4588 throws com.google.protobuf.InvalidProtocolBufferException { 4589 return PARSER.parseFrom(data); 4590 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4591 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom( 4592 byte[] data, 4593 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4594 throws com.google.protobuf.InvalidProtocolBufferException { 4595 return PARSER.parseFrom(data, extensionRegistry); 4596 } parseFrom(java.io.InputStream input)4597 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom(java.io.InputStream input) 4598 throws java.io.IOException { 4599 return PARSER.parseFrom(input); 4600 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4601 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom( 4602 java.io.InputStream input, 4603 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4604 throws java.io.IOException { 4605 return PARSER.parseFrom(input, extensionRegistry); 4606 } parseDelimitedFrom(java.io.InputStream input)4607 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseDelimitedFrom(java.io.InputStream input) 4608 throws java.io.IOException { 4609 return PARSER.parseDelimitedFrom(input); 4610 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4611 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseDelimitedFrom( 4612 java.io.InputStream input, 4613 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4614 throws java.io.IOException { 4615 return PARSER.parseDelimitedFrom(input, extensionRegistry); 4616 } parseFrom( com.google.protobuf.CodedInputStream input)4617 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom( 4618 com.google.protobuf.CodedInputStream input) 4619 throws java.io.IOException { 4620 return PARSER.parseFrom(input); 4621 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4622 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parseFrom( 4623 com.google.protobuf.CodedInputStream input, 4624 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4625 throws java.io.IOException { 4626 return PARSER.parseFrom(input, extensionRegistry); 4627 } 4628 newBuilder()4629 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()4630 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter prototype)4631 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter prototype) { 4632 return newBuilder().mergeFrom(prototype); 4633 } toBuilder()4634 public Builder toBuilder() { return newBuilder(this); } 4635 4636 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)4637 protected Builder newBuilderForType( 4638 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 4639 Builder builder = new Builder(parent); 4640 return builder; 4641 } 4642 /** 4643 * Protobuf type {@code FamilyFilter} 4644 */ 4645 public static final class Builder extends 4646 com.google.protobuf.GeneratedMessage.Builder<Builder> 4647 implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilterOrBuilder { 4648 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()4649 getDescriptor() { 4650 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FamilyFilter_descriptor; 4651 } 4652 4653 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()4654 internalGetFieldAccessorTable() { 4655 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FamilyFilter_fieldAccessorTable 4656 .ensureFieldAccessorsInitialized( 4657 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.Builder.class); 4658 } 4659 4660 // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.newBuilder() Builder()4661 private Builder() { 4662 maybeForceBuilderInitialization(); 4663 } 4664 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)4665 private Builder( 4666 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 4667 super(parent); 4668 maybeForceBuilderInitialization(); 4669 } maybeForceBuilderInitialization()4670 private void maybeForceBuilderInitialization() { 4671 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 4672 getCompareFilterFieldBuilder(); 4673 } 4674 } create()4675 private static Builder create() { 4676 return new Builder(); 4677 } 4678 clear()4679 public Builder clear() { 4680 super.clear(); 4681 if (compareFilterBuilder_ == null) { 4682 compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); 4683 } else { 4684 compareFilterBuilder_.clear(); 4685 } 4686 bitField0_ = (bitField0_ & ~0x00000001); 4687 return this; 4688 } 4689 clone()4690 public Builder clone() { 4691 return create().mergeFrom(buildPartial()); 4692 } 4693 4694 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()4695 getDescriptorForType() { 4696 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FamilyFilter_descriptor; 4697 } 4698 getDefaultInstanceForType()4699 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter getDefaultInstanceForType() { 4700 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.getDefaultInstance(); 4701 } 4702 build()4703 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter build() { 4704 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter result = buildPartial(); 4705 if (!result.isInitialized()) { 4706 throw newUninitializedMessageException(result); 4707 } 4708 return result; 4709 } 4710 buildPartial()4711 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter buildPartial() { 4712 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter(this); 4713 int from_bitField0_ = bitField0_; 4714 int to_bitField0_ = 0; 4715 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 4716 to_bitField0_ |= 0x00000001; 4717 } 4718 if (compareFilterBuilder_ == null) { 4719 result.compareFilter_ = compareFilter_; 4720 } else { 4721 result.compareFilter_ = compareFilterBuilder_.build(); 4722 } 4723 result.bitField0_ = to_bitField0_; 4724 onBuilt(); 4725 return result; 4726 } 4727 mergeFrom(com.google.protobuf.Message other)4728 public Builder mergeFrom(com.google.protobuf.Message other) { 4729 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter) { 4730 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter)other); 4731 } else { 4732 super.mergeFrom(other); 4733 return this; 4734 } 4735 } 4736 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter other)4737 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter other) { 4738 if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter.getDefaultInstance()) return this; 4739 if (other.hasCompareFilter()) { 4740 mergeCompareFilter(other.getCompareFilter()); 4741 } 4742 this.mergeUnknownFields(other.getUnknownFields()); 4743 return this; 4744 } 4745 isInitialized()4746 public final boolean isInitialized() { 4747 if (!hasCompareFilter()) { 4748 4749 return false; 4750 } 4751 if (!getCompareFilter().isInitialized()) { 4752 4753 return false; 4754 } 4755 return true; 4756 } 4757 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4758 public Builder mergeFrom( 4759 com.google.protobuf.CodedInputStream input, 4760 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4761 throws java.io.IOException { 4762 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter parsedMessage = null; 4763 try { 4764 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 4765 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 4766 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FamilyFilter) e.getUnfinishedMessage(); 4767 throw e; 4768 } finally { 4769 if (parsedMessage != null) { 4770 mergeFrom(parsedMessage); 4771 } 4772 } 4773 return this; 4774 } 4775 private int bitField0_; 4776 4777 // required .CompareFilter compare_filter = 1; 4778 private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); 4779 private com.google.protobuf.SingleFieldBuilder< 4780 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> compareFilterBuilder_; 4781 /** 4782 * <code>required .CompareFilter compare_filter = 1;</code> 4783 */ hasCompareFilter()4784 public boolean hasCompareFilter() { 4785 return ((bitField0_ & 0x00000001) == 0x00000001); 4786 } 4787 /** 4788 * <code>required .CompareFilter compare_filter = 1;</code> 4789 */ getCompareFilter()4790 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter getCompareFilter() { 4791 if (compareFilterBuilder_ == null) { 4792 return compareFilter_; 4793 } else { 4794 return compareFilterBuilder_.getMessage(); 4795 } 4796 } 4797 /** 4798 * <code>required .CompareFilter compare_filter = 1;</code> 4799 */ setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value)4800 public Builder setCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { 4801 if (compareFilterBuilder_ == null) { 4802 if (value == null) { 4803 throw new NullPointerException(); 4804 } 4805 compareFilter_ = value; 4806 onChanged(); 4807 } else { 4808 compareFilterBuilder_.setMessage(value); 4809 } 4810 bitField0_ |= 0x00000001; 4811 return this; 4812 } 4813 /** 4814 * <code>required .CompareFilter compare_filter = 1;</code> 4815 */ setCompareFilter( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue)4816 public Builder setCompareFilter( 4817 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder builderForValue) { 4818 if (compareFilterBuilder_ == null) { 4819 compareFilter_ = builderForValue.build(); 4820 onChanged(); 4821 } else { 4822 compareFilterBuilder_.setMessage(builderForValue.build()); 4823 } 4824 bitField0_ |= 0x00000001; 4825 return this; 4826 } 4827 /** 4828 * <code>required .CompareFilter compare_filter = 1;</code> 4829 */ mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value)4830 public Builder mergeCompareFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter value) { 4831 if (compareFilterBuilder_ == null) { 4832 if (((bitField0_ & 0x00000001) == 0x00000001) && 4833 compareFilter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance()) { 4834 compareFilter_ = 4835 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.newBuilder(compareFilter_).mergeFrom(value).buildPartial(); 4836 } else { 4837 compareFilter_ = value; 4838 } 4839 onChanged(); 4840 } else { 4841 compareFilterBuilder_.mergeFrom(value); 4842 } 4843 bitField0_ |= 0x00000001; 4844 return this; 4845 } 4846 /** 4847 * <code>required .CompareFilter compare_filter = 1;</code> 4848 */ clearCompareFilter()4849 public Builder clearCompareFilter() { 4850 if (compareFilterBuilder_ == null) { 4851 compareFilter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.getDefaultInstance(); 4852 onChanged(); 4853 } else { 4854 compareFilterBuilder_.clear(); 4855 } 4856 bitField0_ = (bitField0_ & ~0x00000001); 4857 return this; 4858 } 4859 /** 4860 * <code>required .CompareFilter compare_filter = 1;</code> 4861 */ getCompareFilterBuilder()4862 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder getCompareFilterBuilder() { 4863 bitField0_ |= 0x00000001; 4864 onChanged(); 4865 return getCompareFilterFieldBuilder().getBuilder(); 4866 } 4867 /** 4868 * <code>required .CompareFilter compare_filter = 1;</code> 4869 */ getCompareFilterOrBuilder()4870 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder getCompareFilterOrBuilder() { 4871 if (compareFilterBuilder_ != null) { 4872 return compareFilterBuilder_.getMessageOrBuilder(); 4873 } else { 4874 return compareFilter_; 4875 } 4876 } 4877 /** 4878 * <code>required .CompareFilter compare_filter = 1;</code> 4879 */ 4880 private com.google.protobuf.SingleFieldBuilder< 4881 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder> getCompareFilterFieldBuilder()4882 getCompareFilterFieldBuilder() { 4883 if (compareFilterBuilder_ == null) { 4884 compareFilterBuilder_ = new com.google.protobuf.SingleFieldBuilder< 4885 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.CompareFilterOrBuilder>( 4886 compareFilter_, 4887 getParentForChildren(), 4888 isClean()); 4889 compareFilter_ = null; 4890 } 4891 return compareFilterBuilder_; 4892 } 4893 4894 // @@protoc_insertion_point(builder_scope:FamilyFilter) 4895 } 4896 4897 static { 4898 defaultInstance = new FamilyFilter(true); defaultInstance.initFields()4899 defaultInstance.initFields(); 4900 } 4901 4902 // @@protoc_insertion_point(class_scope:FamilyFilter) 4903 } 4904 4905 public interface FilterListOrBuilder 4906 extends com.google.protobuf.MessageOrBuilder { 4907 4908 // required .FilterList.Operator operator = 1; 4909 /** 4910 * <code>required .FilterList.Operator operator = 1;</code> 4911 */ hasOperator()4912 boolean hasOperator(); 4913 /** 4914 * <code>required .FilterList.Operator operator = 1;</code> 4915 */ getOperator()4916 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator getOperator(); 4917 4918 // repeated .Filter filters = 2; 4919 /** 4920 * <code>repeated .Filter filters = 2;</code> 4921 */ 4922 java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter> getFiltersList()4923 getFiltersList(); 4924 /** 4925 * <code>repeated .Filter filters = 2;</code> 4926 */ getFilters(int index)4927 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilters(int index); 4928 /** 4929 * <code>repeated .Filter filters = 2;</code> 4930 */ getFiltersCount()4931 int getFiltersCount(); 4932 /** 4933 * <code>repeated .Filter filters = 2;</code> 4934 */ 4935 java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> getFiltersOrBuilderList()4936 getFiltersOrBuilderList(); 4937 /** 4938 * <code>repeated .Filter filters = 2;</code> 4939 */ getFiltersOrBuilder( int index)4940 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFiltersOrBuilder( 4941 int index); 4942 } 4943 /** 4944 * Protobuf type {@code FilterList} 4945 */ 4946 public static final class FilterList extends 4947 com.google.protobuf.GeneratedMessage 4948 implements FilterListOrBuilder { 4949 // Use FilterList.newBuilder() to construct. FilterList(com.google.protobuf.GeneratedMessage.Builder<?> builder)4950 private FilterList(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 4951 super(builder); 4952 this.unknownFields = builder.getUnknownFields(); 4953 } FilterList(boolean noInit)4954 private FilterList(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 4955 4956 private static final FilterList defaultInstance; getDefaultInstance()4957 public static FilterList getDefaultInstance() { 4958 return defaultInstance; 4959 } 4960 getDefaultInstanceForType()4961 public FilterList getDefaultInstanceForType() { 4962 return defaultInstance; 4963 } 4964 4965 private final com.google.protobuf.UnknownFieldSet unknownFields; 4966 @java.lang.Override 4967 public final com.google.protobuf.UnknownFieldSet getUnknownFields()4968 getUnknownFields() { 4969 return this.unknownFields; 4970 } FilterList( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4971 private FilterList( 4972 com.google.protobuf.CodedInputStream input, 4973 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 4974 throws com.google.protobuf.InvalidProtocolBufferException { 4975 initFields(); 4976 int mutable_bitField0_ = 0; 4977 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 4978 com.google.protobuf.UnknownFieldSet.newBuilder(); 4979 try { 4980 boolean done = false; 4981 while (!done) { 4982 int tag = input.readTag(); 4983 switch (tag) { 4984 case 0: 4985 done = true; 4986 break; 4987 default: { 4988 if (!parseUnknownField(input, unknownFields, 4989 extensionRegistry, tag)) { 4990 done = true; 4991 } 4992 break; 4993 } 4994 case 8: { 4995 int rawValue = input.readEnum(); 4996 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator value = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.valueOf(rawValue); 4997 if (value == null) { 4998 unknownFields.mergeVarintField(1, rawValue); 4999 } else { 5000 bitField0_ |= 0x00000001; 5001 operator_ = value; 5002 } 5003 break; 5004 } 5005 case 18: { 5006 if (!((mutable_bitField0_ & 0x00000002) == 0x00000002)) { 5007 filters_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter>(); 5008 mutable_bitField0_ |= 0x00000002; 5009 } 5010 filters_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.PARSER, extensionRegistry)); 5011 break; 5012 } 5013 } 5014 } 5015 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 5016 throw e.setUnfinishedMessage(this); 5017 } catch (java.io.IOException e) { 5018 throw new com.google.protobuf.InvalidProtocolBufferException( 5019 e.getMessage()).setUnfinishedMessage(this); 5020 } finally { 5021 if (((mutable_bitField0_ & 0x00000002) == 0x00000002)) { 5022 filters_ = java.util.Collections.unmodifiableList(filters_); 5023 } 5024 this.unknownFields = unknownFields.build(); 5025 makeExtensionsImmutable(); 5026 } 5027 } 5028 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()5029 getDescriptor() { 5030 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterList_descriptor; 5031 } 5032 5033 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()5034 internalGetFieldAccessorTable() { 5035 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterList_fieldAccessorTable 5036 .ensureFieldAccessorsInitialized( 5037 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Builder.class); 5038 } 5039 5040 public static com.google.protobuf.Parser<FilterList> PARSER = 5041 new com.google.protobuf.AbstractParser<FilterList>() { 5042 public FilterList parsePartialFrom( 5043 com.google.protobuf.CodedInputStream input, 5044 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 5045 throws com.google.protobuf.InvalidProtocolBufferException { 5046 return new FilterList(input, extensionRegistry); 5047 } 5048 }; 5049 5050 @java.lang.Override getParserForType()5051 public com.google.protobuf.Parser<FilterList> getParserForType() { 5052 return PARSER; 5053 } 5054 5055 /** 5056 * Protobuf enum {@code FilterList.Operator} 5057 */ 5058 public enum Operator 5059 implements com.google.protobuf.ProtocolMessageEnum { 5060 /** 5061 * <code>MUST_PASS_ALL = 1;</code> 5062 */ 5063 MUST_PASS_ALL(0, 1), 5064 /** 5065 * <code>MUST_PASS_ONE = 2;</code> 5066 */ 5067 MUST_PASS_ONE(1, 2), 5068 ; 5069 5070 /** 5071 * <code>MUST_PASS_ALL = 1;</code> 5072 */ 5073 public static final int MUST_PASS_ALL_VALUE = 1; 5074 /** 5075 * <code>MUST_PASS_ONE = 2;</code> 5076 */ 5077 public static final int MUST_PASS_ONE_VALUE = 2; 5078 5079 getNumber()5080 public final int getNumber() { return value; } 5081 valueOf(int value)5082 public static Operator valueOf(int value) { 5083 switch (value) { 5084 case 1: return MUST_PASS_ALL; 5085 case 2: return MUST_PASS_ONE; 5086 default: return null; 5087 } 5088 } 5089 5090 public static com.google.protobuf.Internal.EnumLiteMap<Operator> internalGetValueMap()5091 internalGetValueMap() { 5092 return internalValueMap; 5093 } 5094 private static com.google.protobuf.Internal.EnumLiteMap<Operator> 5095 internalValueMap = 5096 new com.google.protobuf.Internal.EnumLiteMap<Operator>() { 5097 public Operator findValueByNumber(int number) { 5098 return Operator.valueOf(number); 5099 } 5100 }; 5101 5102 public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor()5103 getValueDescriptor() { 5104 return getDescriptor().getValues().get(index); 5105 } 5106 public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType()5107 getDescriptorForType() { 5108 return getDescriptor(); 5109 } 5110 public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor()5111 getDescriptor() { 5112 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.getDescriptor().getEnumTypes().get(0); 5113 } 5114 5115 private static final Operator[] VALUES = values(); 5116 valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc)5117 public static Operator valueOf( 5118 com.google.protobuf.Descriptors.EnumValueDescriptor desc) { 5119 if (desc.getType() != getDescriptor()) { 5120 throw new java.lang.IllegalArgumentException( 5121 "EnumValueDescriptor is not for this type."); 5122 } 5123 return VALUES[desc.getIndex()]; 5124 } 5125 5126 private final int index; 5127 private final int value; 5128 Operator(int index, int value)5129 private Operator(int index, int value) { 5130 this.index = index; 5131 this.value = value; 5132 } 5133 5134 // @@protoc_insertion_point(enum_scope:FilterList.Operator) 5135 } 5136 5137 private int bitField0_; 5138 // required .FilterList.Operator operator = 1; 5139 public static final int OPERATOR_FIELD_NUMBER = 1; 5140 private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator operator_; 5141 /** 5142 * <code>required .FilterList.Operator operator = 1;</code> 5143 */ hasOperator()5144 public boolean hasOperator() { 5145 return ((bitField0_ & 0x00000001) == 0x00000001); 5146 } 5147 /** 5148 * <code>required .FilterList.Operator operator = 1;</code> 5149 */ getOperator()5150 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator getOperator() { 5151 return operator_; 5152 } 5153 5154 // repeated .Filter filters = 2; 5155 public static final int FILTERS_FIELD_NUMBER = 2; 5156 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter> filters_; 5157 /** 5158 * <code>repeated .Filter filters = 2;</code> 5159 */ getFiltersList()5160 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter> getFiltersList() { 5161 return filters_; 5162 } 5163 /** 5164 * <code>repeated .Filter filters = 2;</code> 5165 */ 5166 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> getFiltersOrBuilderList()5167 getFiltersOrBuilderList() { 5168 return filters_; 5169 } 5170 /** 5171 * <code>repeated .Filter filters = 2;</code> 5172 */ getFiltersCount()5173 public int getFiltersCount() { 5174 return filters_.size(); 5175 } 5176 /** 5177 * <code>repeated .Filter filters = 2;</code> 5178 */ getFilters(int index)5179 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilters(int index) { 5180 return filters_.get(index); 5181 } 5182 /** 5183 * <code>repeated .Filter filters = 2;</code> 5184 */ getFiltersOrBuilder( int index)5185 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFiltersOrBuilder( 5186 int index) { 5187 return filters_.get(index); 5188 } 5189 initFields()5190 private void initFields() { 5191 operator_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.MUST_PASS_ALL; 5192 filters_ = java.util.Collections.emptyList(); 5193 } 5194 private byte memoizedIsInitialized = -1; isInitialized()5195 public final boolean isInitialized() { 5196 byte isInitialized = memoizedIsInitialized; 5197 if (isInitialized != -1) return isInitialized == 1; 5198 5199 if (!hasOperator()) { 5200 memoizedIsInitialized = 0; 5201 return false; 5202 } 5203 for (int i = 0; i < getFiltersCount(); i++) { 5204 if (!getFilters(i).isInitialized()) { 5205 memoizedIsInitialized = 0; 5206 return false; 5207 } 5208 } 5209 memoizedIsInitialized = 1; 5210 return true; 5211 } 5212 writeTo(com.google.protobuf.CodedOutputStream output)5213 public void writeTo(com.google.protobuf.CodedOutputStream output) 5214 throws java.io.IOException { 5215 getSerializedSize(); 5216 if (((bitField0_ & 0x00000001) == 0x00000001)) { 5217 output.writeEnum(1, operator_.getNumber()); 5218 } 5219 for (int i = 0; i < filters_.size(); i++) { 5220 output.writeMessage(2, filters_.get(i)); 5221 } 5222 getUnknownFields().writeTo(output); 5223 } 5224 5225 private int memoizedSerializedSize = -1; getSerializedSize()5226 public int getSerializedSize() { 5227 int size = memoizedSerializedSize; 5228 if (size != -1) return size; 5229 5230 size = 0; 5231 if (((bitField0_ & 0x00000001) == 0x00000001)) { 5232 size += com.google.protobuf.CodedOutputStream 5233 .computeEnumSize(1, operator_.getNumber()); 5234 } 5235 for (int i = 0; i < filters_.size(); i++) { 5236 size += com.google.protobuf.CodedOutputStream 5237 .computeMessageSize(2, filters_.get(i)); 5238 } 5239 size += getUnknownFields().getSerializedSize(); 5240 memoizedSerializedSize = size; 5241 return size; 5242 } 5243 5244 private static final long serialVersionUID = 0L; 5245 @java.lang.Override writeReplace()5246 protected java.lang.Object writeReplace() 5247 throws java.io.ObjectStreamException { 5248 return super.writeReplace(); 5249 } 5250 5251 @java.lang.Override equals(final java.lang.Object obj)5252 public boolean equals(final java.lang.Object obj) { 5253 if (obj == this) { 5254 return true; 5255 } 5256 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList)) { 5257 return super.equals(obj); 5258 } 5259 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList) obj; 5260 5261 boolean result = true; 5262 result = result && (hasOperator() == other.hasOperator()); 5263 if (hasOperator()) { 5264 result = result && 5265 (getOperator() == other.getOperator()); 5266 } 5267 result = result && getFiltersList() 5268 .equals(other.getFiltersList()); 5269 result = result && 5270 getUnknownFields().equals(other.getUnknownFields()); 5271 return result; 5272 } 5273 5274 private int memoizedHashCode = 0; 5275 @java.lang.Override hashCode()5276 public int hashCode() { 5277 if (memoizedHashCode != 0) { 5278 return memoizedHashCode; 5279 } 5280 int hash = 41; 5281 hash = (19 * hash) + getDescriptorForType().hashCode(); 5282 if (hasOperator()) { 5283 hash = (37 * hash) + OPERATOR_FIELD_NUMBER; 5284 hash = (53 * hash) + hashEnum(getOperator()); 5285 } 5286 if (getFiltersCount() > 0) { 5287 hash = (37 * hash) + FILTERS_FIELD_NUMBER; 5288 hash = (53 * hash) + getFiltersList().hashCode(); 5289 } 5290 hash = (29 * hash) + getUnknownFields().hashCode(); 5291 memoizedHashCode = hash; 5292 return hash; 5293 } 5294 parseFrom( com.google.protobuf.ByteString data)5295 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom( 5296 com.google.protobuf.ByteString data) 5297 throws com.google.protobuf.InvalidProtocolBufferException { 5298 return PARSER.parseFrom(data); 5299 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5300 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom( 5301 com.google.protobuf.ByteString data, 5302 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 5303 throws com.google.protobuf.InvalidProtocolBufferException { 5304 return PARSER.parseFrom(data, extensionRegistry); 5305 } parseFrom(byte[] data)5306 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom(byte[] data) 5307 throws com.google.protobuf.InvalidProtocolBufferException { 5308 return PARSER.parseFrom(data); 5309 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5310 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom( 5311 byte[] data, 5312 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 5313 throws com.google.protobuf.InvalidProtocolBufferException { 5314 return PARSER.parseFrom(data, extensionRegistry); 5315 } parseFrom(java.io.InputStream input)5316 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom(java.io.InputStream input) 5317 throws java.io.IOException { 5318 return PARSER.parseFrom(input); 5319 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5320 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom( 5321 java.io.InputStream input, 5322 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 5323 throws java.io.IOException { 5324 return PARSER.parseFrom(input, extensionRegistry); 5325 } parseDelimitedFrom(java.io.InputStream input)5326 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseDelimitedFrom(java.io.InputStream input) 5327 throws java.io.IOException { 5328 return PARSER.parseDelimitedFrom(input); 5329 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5330 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseDelimitedFrom( 5331 java.io.InputStream input, 5332 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 5333 throws java.io.IOException { 5334 return PARSER.parseDelimitedFrom(input, extensionRegistry); 5335 } parseFrom( com.google.protobuf.CodedInputStream input)5336 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom( 5337 com.google.protobuf.CodedInputStream input) 5338 throws java.io.IOException { 5339 return PARSER.parseFrom(input); 5340 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5341 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parseFrom( 5342 com.google.protobuf.CodedInputStream input, 5343 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 5344 throws java.io.IOException { 5345 return PARSER.parseFrom(input, extensionRegistry); 5346 } 5347 newBuilder()5348 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()5349 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList prototype)5350 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList prototype) { 5351 return newBuilder().mergeFrom(prototype); 5352 } toBuilder()5353 public Builder toBuilder() { return newBuilder(this); } 5354 5355 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)5356 protected Builder newBuilderForType( 5357 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 5358 Builder builder = new Builder(parent); 5359 return builder; 5360 } 5361 /** 5362 * Protobuf type {@code FilterList} 5363 */ 5364 public static final class Builder extends 5365 com.google.protobuf.GeneratedMessage.Builder<Builder> 5366 implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterListOrBuilder { 5367 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()5368 getDescriptor() { 5369 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterList_descriptor; 5370 } 5371 5372 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()5373 internalGetFieldAccessorTable() { 5374 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterList_fieldAccessorTable 5375 .ensureFieldAccessorsInitialized( 5376 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Builder.class); 5377 } 5378 5379 // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.newBuilder() Builder()5380 private Builder() { 5381 maybeForceBuilderInitialization(); 5382 } 5383 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)5384 private Builder( 5385 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 5386 super(parent); 5387 maybeForceBuilderInitialization(); 5388 } maybeForceBuilderInitialization()5389 private void maybeForceBuilderInitialization() { 5390 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 5391 getFiltersFieldBuilder(); 5392 } 5393 } create()5394 private static Builder create() { 5395 return new Builder(); 5396 } 5397 clear()5398 public Builder clear() { 5399 super.clear(); 5400 operator_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.MUST_PASS_ALL; 5401 bitField0_ = (bitField0_ & ~0x00000001); 5402 if (filtersBuilder_ == null) { 5403 filters_ = java.util.Collections.emptyList(); 5404 bitField0_ = (bitField0_ & ~0x00000002); 5405 } else { 5406 filtersBuilder_.clear(); 5407 } 5408 return this; 5409 } 5410 clone()5411 public Builder clone() { 5412 return create().mergeFrom(buildPartial()); 5413 } 5414 5415 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()5416 getDescriptorForType() { 5417 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterList_descriptor; 5418 } 5419 getDefaultInstanceForType()5420 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList getDefaultInstanceForType() { 5421 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.getDefaultInstance(); 5422 } 5423 build()5424 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList build() { 5425 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList result = buildPartial(); 5426 if (!result.isInitialized()) { 5427 throw newUninitializedMessageException(result); 5428 } 5429 return result; 5430 } 5431 buildPartial()5432 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList buildPartial() { 5433 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList(this); 5434 int from_bitField0_ = bitField0_; 5435 int to_bitField0_ = 0; 5436 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 5437 to_bitField0_ |= 0x00000001; 5438 } 5439 result.operator_ = operator_; 5440 if (filtersBuilder_ == null) { 5441 if (((bitField0_ & 0x00000002) == 0x00000002)) { 5442 filters_ = java.util.Collections.unmodifiableList(filters_); 5443 bitField0_ = (bitField0_ & ~0x00000002); 5444 } 5445 result.filters_ = filters_; 5446 } else { 5447 result.filters_ = filtersBuilder_.build(); 5448 } 5449 result.bitField0_ = to_bitField0_; 5450 onBuilt(); 5451 return result; 5452 } 5453 mergeFrom(com.google.protobuf.Message other)5454 public Builder mergeFrom(com.google.protobuf.Message other) { 5455 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList) { 5456 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList)other); 5457 } else { 5458 super.mergeFrom(other); 5459 return this; 5460 } 5461 } 5462 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList other)5463 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList other) { 5464 if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.getDefaultInstance()) return this; 5465 if (other.hasOperator()) { 5466 setOperator(other.getOperator()); 5467 } 5468 if (filtersBuilder_ == null) { 5469 if (!other.filters_.isEmpty()) { 5470 if (filters_.isEmpty()) { 5471 filters_ = other.filters_; 5472 bitField0_ = (bitField0_ & ~0x00000002); 5473 } else { 5474 ensureFiltersIsMutable(); 5475 filters_.addAll(other.filters_); 5476 } 5477 onChanged(); 5478 } 5479 } else { 5480 if (!other.filters_.isEmpty()) { 5481 if (filtersBuilder_.isEmpty()) { 5482 filtersBuilder_.dispose(); 5483 filtersBuilder_ = null; 5484 filters_ = other.filters_; 5485 bitField0_ = (bitField0_ & ~0x00000002); 5486 filtersBuilder_ = 5487 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? 5488 getFiltersFieldBuilder() : null; 5489 } else { 5490 filtersBuilder_.addAllMessages(other.filters_); 5491 } 5492 } 5493 } 5494 this.mergeUnknownFields(other.getUnknownFields()); 5495 return this; 5496 } 5497 isInitialized()5498 public final boolean isInitialized() { 5499 if (!hasOperator()) { 5500 5501 return false; 5502 } 5503 for (int i = 0; i < getFiltersCount(); i++) { 5504 if (!getFilters(i).isInitialized()) { 5505 5506 return false; 5507 } 5508 } 5509 return true; 5510 } 5511 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5512 public Builder mergeFrom( 5513 com.google.protobuf.CodedInputStream input, 5514 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 5515 throws java.io.IOException { 5516 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList parsedMessage = null; 5517 try { 5518 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 5519 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 5520 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList) e.getUnfinishedMessage(); 5521 throw e; 5522 } finally { 5523 if (parsedMessage != null) { 5524 mergeFrom(parsedMessage); 5525 } 5526 } 5527 return this; 5528 } 5529 private int bitField0_; 5530 5531 // required .FilterList.Operator operator = 1; 5532 private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator operator_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.MUST_PASS_ALL; 5533 /** 5534 * <code>required .FilterList.Operator operator = 1;</code> 5535 */ hasOperator()5536 public boolean hasOperator() { 5537 return ((bitField0_ & 0x00000001) == 0x00000001); 5538 } 5539 /** 5540 * <code>required .FilterList.Operator operator = 1;</code> 5541 */ getOperator()5542 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator getOperator() { 5543 return operator_; 5544 } 5545 /** 5546 * <code>required .FilterList.Operator operator = 1;</code> 5547 */ setOperator(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator value)5548 public Builder setOperator(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator value) { 5549 if (value == null) { 5550 throw new NullPointerException(); 5551 } 5552 bitField0_ |= 0x00000001; 5553 operator_ = value; 5554 onChanged(); 5555 return this; 5556 } 5557 /** 5558 * <code>required .FilterList.Operator operator = 1;</code> 5559 */ clearOperator()5560 public Builder clearOperator() { 5561 bitField0_ = (bitField0_ & ~0x00000001); 5562 operator_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterList.Operator.MUST_PASS_ALL; 5563 onChanged(); 5564 return this; 5565 } 5566 5567 // repeated .Filter filters = 2; 5568 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter> filters_ = 5569 java.util.Collections.emptyList(); ensureFiltersIsMutable()5570 private void ensureFiltersIsMutable() { 5571 if (!((bitField0_ & 0x00000002) == 0x00000002)) { 5572 filters_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter>(filters_); 5573 bitField0_ |= 0x00000002; 5574 } 5575 } 5576 5577 private com.google.protobuf.RepeatedFieldBuilder< 5578 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> filtersBuilder_; 5579 5580 /** 5581 * <code>repeated .Filter filters = 2;</code> 5582 */ getFiltersList()5583 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter> getFiltersList() { 5584 if (filtersBuilder_ == null) { 5585 return java.util.Collections.unmodifiableList(filters_); 5586 } else { 5587 return filtersBuilder_.getMessageList(); 5588 } 5589 } 5590 /** 5591 * <code>repeated .Filter filters = 2;</code> 5592 */ getFiltersCount()5593 public int getFiltersCount() { 5594 if (filtersBuilder_ == null) { 5595 return filters_.size(); 5596 } else { 5597 return filtersBuilder_.getCount(); 5598 } 5599 } 5600 /** 5601 * <code>repeated .Filter filters = 2;</code> 5602 */ getFilters(int index)5603 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilters(int index) { 5604 if (filtersBuilder_ == null) { 5605 return filters_.get(index); 5606 } else { 5607 return filtersBuilder_.getMessage(index); 5608 } 5609 } 5610 /** 5611 * <code>repeated .Filter filters = 2;</code> 5612 */ setFilters( int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value)5613 public Builder setFilters( 5614 int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) { 5615 if (filtersBuilder_ == null) { 5616 if (value == null) { 5617 throw new NullPointerException(); 5618 } 5619 ensureFiltersIsMutable(); 5620 filters_.set(index, value); 5621 onChanged(); 5622 } else { 5623 filtersBuilder_.setMessage(index, value); 5624 } 5625 return this; 5626 } 5627 /** 5628 * <code>repeated .Filter filters = 2;</code> 5629 */ setFilters( int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue)5630 public Builder setFilters( 5631 int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue) { 5632 if (filtersBuilder_ == null) { 5633 ensureFiltersIsMutable(); 5634 filters_.set(index, builderForValue.build()); 5635 onChanged(); 5636 } else { 5637 filtersBuilder_.setMessage(index, builderForValue.build()); 5638 } 5639 return this; 5640 } 5641 /** 5642 * <code>repeated .Filter filters = 2;</code> 5643 */ addFilters(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value)5644 public Builder addFilters(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) { 5645 if (filtersBuilder_ == null) { 5646 if (value == null) { 5647 throw new NullPointerException(); 5648 } 5649 ensureFiltersIsMutable(); 5650 filters_.add(value); 5651 onChanged(); 5652 } else { 5653 filtersBuilder_.addMessage(value); 5654 } 5655 return this; 5656 } 5657 /** 5658 * <code>repeated .Filter filters = 2;</code> 5659 */ addFilters( int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value)5660 public Builder addFilters( 5661 int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) { 5662 if (filtersBuilder_ == null) { 5663 if (value == null) { 5664 throw new NullPointerException(); 5665 } 5666 ensureFiltersIsMutable(); 5667 filters_.add(index, value); 5668 onChanged(); 5669 } else { 5670 filtersBuilder_.addMessage(index, value); 5671 } 5672 return this; 5673 } 5674 /** 5675 * <code>repeated .Filter filters = 2;</code> 5676 */ addFilters( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue)5677 public Builder addFilters( 5678 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue) { 5679 if (filtersBuilder_ == null) { 5680 ensureFiltersIsMutable(); 5681 filters_.add(builderForValue.build()); 5682 onChanged(); 5683 } else { 5684 filtersBuilder_.addMessage(builderForValue.build()); 5685 } 5686 return this; 5687 } 5688 /** 5689 * <code>repeated .Filter filters = 2;</code> 5690 */ addFilters( int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue)5691 public Builder addFilters( 5692 int index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue) { 5693 if (filtersBuilder_ == null) { 5694 ensureFiltersIsMutable(); 5695 filters_.add(index, builderForValue.build()); 5696 onChanged(); 5697 } else { 5698 filtersBuilder_.addMessage(index, builderForValue.build()); 5699 } 5700 return this; 5701 } 5702 /** 5703 * <code>repeated .Filter filters = 2;</code> 5704 */ addAllFilters( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter> values)5705 public Builder addAllFilters( 5706 java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter> values) { 5707 if (filtersBuilder_ == null) { 5708 ensureFiltersIsMutable(); 5709 super.addAll(values, filters_); 5710 onChanged(); 5711 } else { 5712 filtersBuilder_.addAllMessages(values); 5713 } 5714 return this; 5715 } 5716 /** 5717 * <code>repeated .Filter filters = 2;</code> 5718 */ clearFilters()5719 public Builder clearFilters() { 5720 if (filtersBuilder_ == null) { 5721 filters_ = java.util.Collections.emptyList(); 5722 bitField0_ = (bitField0_ & ~0x00000002); 5723 onChanged(); 5724 } else { 5725 filtersBuilder_.clear(); 5726 } 5727 return this; 5728 } 5729 /** 5730 * <code>repeated .Filter filters = 2;</code> 5731 */ removeFilters(int index)5732 public Builder removeFilters(int index) { 5733 if (filtersBuilder_ == null) { 5734 ensureFiltersIsMutable(); 5735 filters_.remove(index); 5736 onChanged(); 5737 } else { 5738 filtersBuilder_.remove(index); 5739 } 5740 return this; 5741 } 5742 /** 5743 * <code>repeated .Filter filters = 2;</code> 5744 */ getFiltersBuilder( int index)5745 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder getFiltersBuilder( 5746 int index) { 5747 return getFiltersFieldBuilder().getBuilder(index); 5748 } 5749 /** 5750 * <code>repeated .Filter filters = 2;</code> 5751 */ getFiltersOrBuilder( int index)5752 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFiltersOrBuilder( 5753 int index) { 5754 if (filtersBuilder_ == null) { 5755 return filters_.get(index); } else { 5756 return filtersBuilder_.getMessageOrBuilder(index); 5757 } 5758 } 5759 /** 5760 * <code>repeated .Filter filters = 2;</code> 5761 */ 5762 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> getFiltersOrBuilderList()5763 getFiltersOrBuilderList() { 5764 if (filtersBuilder_ != null) { 5765 return filtersBuilder_.getMessageOrBuilderList(); 5766 } else { 5767 return java.util.Collections.unmodifiableList(filters_); 5768 } 5769 } 5770 /** 5771 * <code>repeated .Filter filters = 2;</code> 5772 */ addFiltersBuilder()5773 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder addFiltersBuilder() { 5774 return getFiltersFieldBuilder().addBuilder( 5775 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance()); 5776 } 5777 /** 5778 * <code>repeated .Filter filters = 2;</code> 5779 */ addFiltersBuilder( int index)5780 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder addFiltersBuilder( 5781 int index) { 5782 return getFiltersFieldBuilder().addBuilder( 5783 index, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance()); 5784 } 5785 /** 5786 * <code>repeated .Filter filters = 2;</code> 5787 */ 5788 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder> getFiltersBuilderList()5789 getFiltersBuilderList() { 5790 return getFiltersFieldBuilder().getBuilderList(); 5791 } 5792 private com.google.protobuf.RepeatedFieldBuilder< 5793 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> getFiltersFieldBuilder()5794 getFiltersFieldBuilder() { 5795 if (filtersBuilder_ == null) { 5796 filtersBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< 5797 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>( 5798 filters_, 5799 ((bitField0_ & 0x00000002) == 0x00000002), 5800 getParentForChildren(), 5801 isClean()); 5802 filters_ = null; 5803 } 5804 return filtersBuilder_; 5805 } 5806 5807 // @@protoc_insertion_point(builder_scope:FilterList) 5808 } 5809 5810 static { 5811 defaultInstance = new FilterList(true); defaultInstance.initFields()5812 defaultInstance.initFields(); 5813 } 5814 5815 // @@protoc_insertion_point(class_scope:FilterList) 5816 } 5817 5818 public interface FilterWrapperOrBuilder 5819 extends com.google.protobuf.MessageOrBuilder { 5820 5821 // required .Filter filter = 1; 5822 /** 5823 * <code>required .Filter filter = 1;</code> 5824 */ hasFilter()5825 boolean hasFilter(); 5826 /** 5827 * <code>required .Filter filter = 1;</code> 5828 */ getFilter()5829 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter(); 5830 /** 5831 * <code>required .Filter filter = 1;</code> 5832 */ getFilterOrBuilder()5833 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder(); 5834 } 5835 /** 5836 * Protobuf type {@code FilterWrapper} 5837 */ 5838 public static final class FilterWrapper extends 5839 com.google.protobuf.GeneratedMessage 5840 implements FilterWrapperOrBuilder { 5841 // Use FilterWrapper.newBuilder() to construct. FilterWrapper(com.google.protobuf.GeneratedMessage.Builder<?> builder)5842 private FilterWrapper(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 5843 super(builder); 5844 this.unknownFields = builder.getUnknownFields(); 5845 } FilterWrapper(boolean noInit)5846 private FilterWrapper(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 5847 5848 private static final FilterWrapper defaultInstance; getDefaultInstance()5849 public static FilterWrapper getDefaultInstance() { 5850 return defaultInstance; 5851 } 5852 getDefaultInstanceForType()5853 public FilterWrapper getDefaultInstanceForType() { 5854 return defaultInstance; 5855 } 5856 5857 private final com.google.protobuf.UnknownFieldSet unknownFields; 5858 @java.lang.Override 5859 public final com.google.protobuf.UnknownFieldSet getUnknownFields()5860 getUnknownFields() { 5861 return this.unknownFields; 5862 } FilterWrapper( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5863 private FilterWrapper( 5864 com.google.protobuf.CodedInputStream input, 5865 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 5866 throws com.google.protobuf.InvalidProtocolBufferException { 5867 initFields(); 5868 int mutable_bitField0_ = 0; 5869 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 5870 com.google.protobuf.UnknownFieldSet.newBuilder(); 5871 try { 5872 boolean done = false; 5873 while (!done) { 5874 int tag = input.readTag(); 5875 switch (tag) { 5876 case 0: 5877 done = true; 5878 break; 5879 default: { 5880 if (!parseUnknownField(input, unknownFields, 5881 extensionRegistry, tag)) { 5882 done = true; 5883 } 5884 break; 5885 } 5886 case 10: { 5887 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder subBuilder = null; 5888 if (((bitField0_ & 0x00000001) == 0x00000001)) { 5889 subBuilder = filter_.toBuilder(); 5890 } 5891 filter_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.PARSER, extensionRegistry); 5892 if (subBuilder != null) { 5893 subBuilder.mergeFrom(filter_); 5894 filter_ = subBuilder.buildPartial(); 5895 } 5896 bitField0_ |= 0x00000001; 5897 break; 5898 } 5899 } 5900 } 5901 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 5902 throw e.setUnfinishedMessage(this); 5903 } catch (java.io.IOException e) { 5904 throw new com.google.protobuf.InvalidProtocolBufferException( 5905 e.getMessage()).setUnfinishedMessage(this); 5906 } finally { 5907 this.unknownFields = unknownFields.build(); 5908 makeExtensionsImmutable(); 5909 } 5910 } 5911 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()5912 getDescriptor() { 5913 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterWrapper_descriptor; 5914 } 5915 5916 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()5917 internalGetFieldAccessorTable() { 5918 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterWrapper_fieldAccessorTable 5919 .ensureFieldAccessorsInitialized( 5920 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.Builder.class); 5921 } 5922 5923 public static com.google.protobuf.Parser<FilterWrapper> PARSER = 5924 new com.google.protobuf.AbstractParser<FilterWrapper>() { 5925 public FilterWrapper parsePartialFrom( 5926 com.google.protobuf.CodedInputStream input, 5927 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 5928 throws com.google.protobuf.InvalidProtocolBufferException { 5929 return new FilterWrapper(input, extensionRegistry); 5930 } 5931 }; 5932 5933 @java.lang.Override getParserForType()5934 public com.google.protobuf.Parser<FilterWrapper> getParserForType() { 5935 return PARSER; 5936 } 5937 5938 private int bitField0_; 5939 // required .Filter filter = 1; 5940 public static final int FILTER_FIELD_NUMBER = 1; 5941 private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_; 5942 /** 5943 * <code>required .Filter filter = 1;</code> 5944 */ hasFilter()5945 public boolean hasFilter() { 5946 return ((bitField0_ & 0x00000001) == 0x00000001); 5947 } 5948 /** 5949 * <code>required .Filter filter = 1;</code> 5950 */ getFilter()5951 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() { 5952 return filter_; 5953 } 5954 /** 5955 * <code>required .Filter filter = 1;</code> 5956 */ getFilterOrBuilder()5957 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() { 5958 return filter_; 5959 } 5960 initFields()5961 private void initFields() { 5962 filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); 5963 } 5964 private byte memoizedIsInitialized = -1; isInitialized()5965 public final boolean isInitialized() { 5966 byte isInitialized = memoizedIsInitialized; 5967 if (isInitialized != -1) return isInitialized == 1; 5968 5969 if (!hasFilter()) { 5970 memoizedIsInitialized = 0; 5971 return false; 5972 } 5973 if (!getFilter().isInitialized()) { 5974 memoizedIsInitialized = 0; 5975 return false; 5976 } 5977 memoizedIsInitialized = 1; 5978 return true; 5979 } 5980 writeTo(com.google.protobuf.CodedOutputStream output)5981 public void writeTo(com.google.protobuf.CodedOutputStream output) 5982 throws java.io.IOException { 5983 getSerializedSize(); 5984 if (((bitField0_ & 0x00000001) == 0x00000001)) { 5985 output.writeMessage(1, filter_); 5986 } 5987 getUnknownFields().writeTo(output); 5988 } 5989 5990 private int memoizedSerializedSize = -1; getSerializedSize()5991 public int getSerializedSize() { 5992 int size = memoizedSerializedSize; 5993 if (size != -1) return size; 5994 5995 size = 0; 5996 if (((bitField0_ & 0x00000001) == 0x00000001)) { 5997 size += com.google.protobuf.CodedOutputStream 5998 .computeMessageSize(1, filter_); 5999 } 6000 size += getUnknownFields().getSerializedSize(); 6001 memoizedSerializedSize = size; 6002 return size; 6003 } 6004 6005 private static final long serialVersionUID = 0L; 6006 @java.lang.Override writeReplace()6007 protected java.lang.Object writeReplace() 6008 throws java.io.ObjectStreamException { 6009 return super.writeReplace(); 6010 } 6011 6012 @java.lang.Override equals(final java.lang.Object obj)6013 public boolean equals(final java.lang.Object obj) { 6014 if (obj == this) { 6015 return true; 6016 } 6017 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper)) { 6018 return super.equals(obj); 6019 } 6020 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper) obj; 6021 6022 boolean result = true; 6023 result = result && (hasFilter() == other.hasFilter()); 6024 if (hasFilter()) { 6025 result = result && getFilter() 6026 .equals(other.getFilter()); 6027 } 6028 result = result && 6029 getUnknownFields().equals(other.getUnknownFields()); 6030 return result; 6031 } 6032 6033 private int memoizedHashCode = 0; 6034 @java.lang.Override hashCode()6035 public int hashCode() { 6036 if (memoizedHashCode != 0) { 6037 return memoizedHashCode; 6038 } 6039 int hash = 41; 6040 hash = (19 * hash) + getDescriptorForType().hashCode(); 6041 if (hasFilter()) { 6042 hash = (37 * hash) + FILTER_FIELD_NUMBER; 6043 hash = (53 * hash) + getFilter().hashCode(); 6044 } 6045 hash = (29 * hash) + getUnknownFields().hashCode(); 6046 memoizedHashCode = hash; 6047 return hash; 6048 } 6049 parseFrom( com.google.protobuf.ByteString data)6050 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom( 6051 com.google.protobuf.ByteString data) 6052 throws com.google.protobuf.InvalidProtocolBufferException { 6053 return PARSER.parseFrom(data); 6054 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6055 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom( 6056 com.google.protobuf.ByteString data, 6057 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6058 throws com.google.protobuf.InvalidProtocolBufferException { 6059 return PARSER.parseFrom(data, extensionRegistry); 6060 } parseFrom(byte[] data)6061 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom(byte[] data) 6062 throws com.google.protobuf.InvalidProtocolBufferException { 6063 return PARSER.parseFrom(data); 6064 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6065 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom( 6066 byte[] data, 6067 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6068 throws com.google.protobuf.InvalidProtocolBufferException { 6069 return PARSER.parseFrom(data, extensionRegistry); 6070 } parseFrom(java.io.InputStream input)6071 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom(java.io.InputStream input) 6072 throws java.io.IOException { 6073 return PARSER.parseFrom(input); 6074 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6075 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom( 6076 java.io.InputStream input, 6077 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6078 throws java.io.IOException { 6079 return PARSER.parseFrom(input, extensionRegistry); 6080 } parseDelimitedFrom(java.io.InputStream input)6081 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseDelimitedFrom(java.io.InputStream input) 6082 throws java.io.IOException { 6083 return PARSER.parseDelimitedFrom(input); 6084 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6085 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseDelimitedFrom( 6086 java.io.InputStream input, 6087 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6088 throws java.io.IOException { 6089 return PARSER.parseDelimitedFrom(input, extensionRegistry); 6090 } parseFrom( com.google.protobuf.CodedInputStream input)6091 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom( 6092 com.google.protobuf.CodedInputStream input) 6093 throws java.io.IOException { 6094 return PARSER.parseFrom(input); 6095 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6096 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parseFrom( 6097 com.google.protobuf.CodedInputStream input, 6098 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6099 throws java.io.IOException { 6100 return PARSER.parseFrom(input, extensionRegistry); 6101 } 6102 newBuilder()6103 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()6104 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper prototype)6105 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper prototype) { 6106 return newBuilder().mergeFrom(prototype); 6107 } toBuilder()6108 public Builder toBuilder() { return newBuilder(this); } 6109 6110 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)6111 protected Builder newBuilderForType( 6112 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 6113 Builder builder = new Builder(parent); 6114 return builder; 6115 } 6116 /** 6117 * Protobuf type {@code FilterWrapper} 6118 */ 6119 public static final class Builder extends 6120 com.google.protobuf.GeneratedMessage.Builder<Builder> 6121 implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapperOrBuilder { 6122 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()6123 getDescriptor() { 6124 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterWrapper_descriptor; 6125 } 6126 6127 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()6128 internalGetFieldAccessorTable() { 6129 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterWrapper_fieldAccessorTable 6130 .ensureFieldAccessorsInitialized( 6131 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.Builder.class); 6132 } 6133 6134 // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.newBuilder() Builder()6135 private Builder() { 6136 maybeForceBuilderInitialization(); 6137 } 6138 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)6139 private Builder( 6140 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 6141 super(parent); 6142 maybeForceBuilderInitialization(); 6143 } maybeForceBuilderInitialization()6144 private void maybeForceBuilderInitialization() { 6145 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 6146 getFilterFieldBuilder(); 6147 } 6148 } create()6149 private static Builder create() { 6150 return new Builder(); 6151 } 6152 clear()6153 public Builder clear() { 6154 super.clear(); 6155 if (filterBuilder_ == null) { 6156 filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); 6157 } else { 6158 filterBuilder_.clear(); 6159 } 6160 bitField0_ = (bitField0_ & ~0x00000001); 6161 return this; 6162 } 6163 clone()6164 public Builder clone() { 6165 return create().mergeFrom(buildPartial()); 6166 } 6167 6168 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()6169 getDescriptorForType() { 6170 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FilterWrapper_descriptor; 6171 } 6172 getDefaultInstanceForType()6173 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper getDefaultInstanceForType() { 6174 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.getDefaultInstance(); 6175 } 6176 build()6177 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper build() { 6178 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper result = buildPartial(); 6179 if (!result.isInitialized()) { 6180 throw newUninitializedMessageException(result); 6181 } 6182 return result; 6183 } 6184 buildPartial()6185 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper buildPartial() { 6186 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper(this); 6187 int from_bitField0_ = bitField0_; 6188 int to_bitField0_ = 0; 6189 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 6190 to_bitField0_ |= 0x00000001; 6191 } 6192 if (filterBuilder_ == null) { 6193 result.filter_ = filter_; 6194 } else { 6195 result.filter_ = filterBuilder_.build(); 6196 } 6197 result.bitField0_ = to_bitField0_; 6198 onBuilt(); 6199 return result; 6200 } 6201 mergeFrom(com.google.protobuf.Message other)6202 public Builder mergeFrom(com.google.protobuf.Message other) { 6203 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper) { 6204 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper)other); 6205 } else { 6206 super.mergeFrom(other); 6207 return this; 6208 } 6209 } 6210 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper other)6211 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper other) { 6212 if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper.getDefaultInstance()) return this; 6213 if (other.hasFilter()) { 6214 mergeFilter(other.getFilter()); 6215 } 6216 this.mergeUnknownFields(other.getUnknownFields()); 6217 return this; 6218 } 6219 isInitialized()6220 public final boolean isInitialized() { 6221 if (!hasFilter()) { 6222 6223 return false; 6224 } 6225 if (!getFilter().isInitialized()) { 6226 6227 return false; 6228 } 6229 return true; 6230 } 6231 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6232 public Builder mergeFrom( 6233 com.google.protobuf.CodedInputStream input, 6234 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6235 throws java.io.IOException { 6236 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper parsedMessage = null; 6237 try { 6238 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 6239 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 6240 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterWrapper) e.getUnfinishedMessage(); 6241 throw e; 6242 } finally { 6243 if (parsedMessage != null) { 6244 mergeFrom(parsedMessage); 6245 } 6246 } 6247 return this; 6248 } 6249 private int bitField0_; 6250 6251 // required .Filter filter = 1; 6252 private org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); 6253 private com.google.protobuf.SingleFieldBuilder< 6254 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> filterBuilder_; 6255 /** 6256 * <code>required .Filter filter = 1;</code> 6257 */ hasFilter()6258 public boolean hasFilter() { 6259 return ((bitField0_ & 0x00000001) == 0x00000001); 6260 } 6261 /** 6262 * <code>required .Filter filter = 1;</code> 6263 */ getFilter()6264 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter getFilter() { 6265 if (filterBuilder_ == null) { 6266 return filter_; 6267 } else { 6268 return filterBuilder_.getMessage(); 6269 } 6270 } 6271 /** 6272 * <code>required .Filter filter = 1;</code> 6273 */ setFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value)6274 public Builder setFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) { 6275 if (filterBuilder_ == null) { 6276 if (value == null) { 6277 throw new NullPointerException(); 6278 } 6279 filter_ = value; 6280 onChanged(); 6281 } else { 6282 filterBuilder_.setMessage(value); 6283 } 6284 bitField0_ |= 0x00000001; 6285 return this; 6286 } 6287 /** 6288 * <code>required .Filter filter = 1;</code> 6289 */ setFilter( org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue)6290 public Builder setFilter( 6291 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder builderForValue) { 6292 if (filterBuilder_ == null) { 6293 filter_ = builderForValue.build(); 6294 onChanged(); 6295 } else { 6296 filterBuilder_.setMessage(builderForValue.build()); 6297 } 6298 bitField0_ |= 0x00000001; 6299 return this; 6300 } 6301 /** 6302 * <code>required .Filter filter = 1;</code> 6303 */ mergeFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value)6304 public Builder mergeFilter(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter value) { 6305 if (filterBuilder_ == null) { 6306 if (((bitField0_ & 0x00000001) == 0x00000001) && 6307 filter_ != org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance()) { 6308 filter_ = 6309 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.newBuilder(filter_).mergeFrom(value).buildPartial(); 6310 } else { 6311 filter_ = value; 6312 } 6313 onChanged(); 6314 } else { 6315 filterBuilder_.mergeFrom(value); 6316 } 6317 bitField0_ |= 0x00000001; 6318 return this; 6319 } 6320 /** 6321 * <code>required .Filter filter = 1;</code> 6322 */ clearFilter()6323 public Builder clearFilter() { 6324 if (filterBuilder_ == null) { 6325 filter_ = org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.getDefaultInstance(); 6326 onChanged(); 6327 } else { 6328 filterBuilder_.clear(); 6329 } 6330 bitField0_ = (bitField0_ & ~0x00000001); 6331 return this; 6332 } 6333 /** 6334 * <code>required .Filter filter = 1;</code> 6335 */ getFilterBuilder()6336 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder getFilterBuilder() { 6337 bitField0_ |= 0x00000001; 6338 onChanged(); 6339 return getFilterFieldBuilder().getBuilder(); 6340 } 6341 /** 6342 * <code>required .Filter filter = 1;</code> 6343 */ getFilterOrBuilder()6344 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder getFilterOrBuilder() { 6345 if (filterBuilder_ != null) { 6346 return filterBuilder_.getMessageOrBuilder(); 6347 } else { 6348 return filter_; 6349 } 6350 } 6351 /** 6352 * <code>required .Filter filter = 1;</code> 6353 */ 6354 private com.google.protobuf.SingleFieldBuilder< 6355 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder> getFilterFieldBuilder()6356 getFilterFieldBuilder() { 6357 if (filterBuilder_ == null) { 6358 filterBuilder_ = new com.google.protobuf.SingleFieldBuilder< 6359 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.Filter.Builder, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FilterOrBuilder>( 6360 filter_, 6361 getParentForChildren(), 6362 isClean()); 6363 filter_ = null; 6364 } 6365 return filterBuilder_; 6366 } 6367 6368 // @@protoc_insertion_point(builder_scope:FilterWrapper) 6369 } 6370 6371 static { 6372 defaultInstance = new FilterWrapper(true); defaultInstance.initFields()6373 defaultInstance.initFields(); 6374 } 6375 6376 // @@protoc_insertion_point(class_scope:FilterWrapper) 6377 } 6378 6379 public interface FirstKeyOnlyFilterOrBuilder 6380 extends com.google.protobuf.MessageOrBuilder { 6381 } 6382 /** 6383 * Protobuf type {@code FirstKeyOnlyFilter} 6384 */ 6385 public static final class FirstKeyOnlyFilter extends 6386 com.google.protobuf.GeneratedMessage 6387 implements FirstKeyOnlyFilterOrBuilder { 6388 // Use FirstKeyOnlyFilter.newBuilder() to construct. FirstKeyOnlyFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)6389 private FirstKeyOnlyFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 6390 super(builder); 6391 this.unknownFields = builder.getUnknownFields(); 6392 } FirstKeyOnlyFilter(boolean noInit)6393 private FirstKeyOnlyFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 6394 6395 private static final FirstKeyOnlyFilter defaultInstance; getDefaultInstance()6396 public static FirstKeyOnlyFilter getDefaultInstance() { 6397 return defaultInstance; 6398 } 6399 getDefaultInstanceForType()6400 public FirstKeyOnlyFilter getDefaultInstanceForType() { 6401 return defaultInstance; 6402 } 6403 6404 private final com.google.protobuf.UnknownFieldSet unknownFields; 6405 @java.lang.Override 6406 public final com.google.protobuf.UnknownFieldSet getUnknownFields()6407 getUnknownFields() { 6408 return this.unknownFields; 6409 } FirstKeyOnlyFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6410 private FirstKeyOnlyFilter( 6411 com.google.protobuf.CodedInputStream input, 6412 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6413 throws com.google.protobuf.InvalidProtocolBufferException { 6414 initFields(); 6415 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 6416 com.google.protobuf.UnknownFieldSet.newBuilder(); 6417 try { 6418 boolean done = false; 6419 while (!done) { 6420 int tag = input.readTag(); 6421 switch (tag) { 6422 case 0: 6423 done = true; 6424 break; 6425 default: { 6426 if (!parseUnknownField(input, unknownFields, 6427 extensionRegistry, tag)) { 6428 done = true; 6429 } 6430 break; 6431 } 6432 } 6433 } 6434 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 6435 throw e.setUnfinishedMessage(this); 6436 } catch (java.io.IOException e) { 6437 throw new com.google.protobuf.InvalidProtocolBufferException( 6438 e.getMessage()).setUnfinishedMessage(this); 6439 } finally { 6440 this.unknownFields = unknownFields.build(); 6441 makeExtensionsImmutable(); 6442 } 6443 } 6444 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()6445 getDescriptor() { 6446 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyOnlyFilter_descriptor; 6447 } 6448 6449 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()6450 internalGetFieldAccessorTable() { 6451 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyOnlyFilter_fieldAccessorTable 6452 .ensureFieldAccessorsInitialized( 6453 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.Builder.class); 6454 } 6455 6456 public static com.google.protobuf.Parser<FirstKeyOnlyFilter> PARSER = 6457 new com.google.protobuf.AbstractParser<FirstKeyOnlyFilter>() { 6458 public FirstKeyOnlyFilter parsePartialFrom( 6459 com.google.protobuf.CodedInputStream input, 6460 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6461 throws com.google.protobuf.InvalidProtocolBufferException { 6462 return new FirstKeyOnlyFilter(input, extensionRegistry); 6463 } 6464 }; 6465 6466 @java.lang.Override getParserForType()6467 public com.google.protobuf.Parser<FirstKeyOnlyFilter> getParserForType() { 6468 return PARSER; 6469 } 6470 initFields()6471 private void initFields() { 6472 } 6473 private byte memoizedIsInitialized = -1; isInitialized()6474 public final boolean isInitialized() { 6475 byte isInitialized = memoizedIsInitialized; 6476 if (isInitialized != -1) return isInitialized == 1; 6477 6478 memoizedIsInitialized = 1; 6479 return true; 6480 } 6481 writeTo(com.google.protobuf.CodedOutputStream output)6482 public void writeTo(com.google.protobuf.CodedOutputStream output) 6483 throws java.io.IOException { 6484 getSerializedSize(); 6485 getUnknownFields().writeTo(output); 6486 } 6487 6488 private int memoizedSerializedSize = -1; getSerializedSize()6489 public int getSerializedSize() { 6490 int size = memoizedSerializedSize; 6491 if (size != -1) return size; 6492 6493 size = 0; 6494 size += getUnknownFields().getSerializedSize(); 6495 memoizedSerializedSize = size; 6496 return size; 6497 } 6498 6499 private static final long serialVersionUID = 0L; 6500 @java.lang.Override writeReplace()6501 protected java.lang.Object writeReplace() 6502 throws java.io.ObjectStreamException { 6503 return super.writeReplace(); 6504 } 6505 6506 @java.lang.Override equals(final java.lang.Object obj)6507 public boolean equals(final java.lang.Object obj) { 6508 if (obj == this) { 6509 return true; 6510 } 6511 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter)) { 6512 return super.equals(obj); 6513 } 6514 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter) obj; 6515 6516 boolean result = true; 6517 result = result && 6518 getUnknownFields().equals(other.getUnknownFields()); 6519 return result; 6520 } 6521 6522 private int memoizedHashCode = 0; 6523 @java.lang.Override hashCode()6524 public int hashCode() { 6525 if (memoizedHashCode != 0) { 6526 return memoizedHashCode; 6527 } 6528 int hash = 41; 6529 hash = (19 * hash) + getDescriptorForType().hashCode(); 6530 hash = (29 * hash) + getUnknownFields().hashCode(); 6531 memoizedHashCode = hash; 6532 return hash; 6533 } 6534 parseFrom( com.google.protobuf.ByteString data)6535 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( 6536 com.google.protobuf.ByteString data) 6537 throws com.google.protobuf.InvalidProtocolBufferException { 6538 return PARSER.parseFrom(data); 6539 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6540 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( 6541 com.google.protobuf.ByteString data, 6542 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6543 throws com.google.protobuf.InvalidProtocolBufferException { 6544 return PARSER.parseFrom(data, extensionRegistry); 6545 } parseFrom(byte[] data)6546 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom(byte[] data) 6547 throws com.google.protobuf.InvalidProtocolBufferException { 6548 return PARSER.parseFrom(data); 6549 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6550 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( 6551 byte[] data, 6552 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6553 throws com.google.protobuf.InvalidProtocolBufferException { 6554 return PARSER.parseFrom(data, extensionRegistry); 6555 } parseFrom(java.io.InputStream input)6556 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom(java.io.InputStream input) 6557 throws java.io.IOException { 6558 return PARSER.parseFrom(input); 6559 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6560 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( 6561 java.io.InputStream input, 6562 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6563 throws java.io.IOException { 6564 return PARSER.parseFrom(input, extensionRegistry); 6565 } parseDelimitedFrom(java.io.InputStream input)6566 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseDelimitedFrom(java.io.InputStream input) 6567 throws java.io.IOException { 6568 return PARSER.parseDelimitedFrom(input); 6569 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6570 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseDelimitedFrom( 6571 java.io.InputStream input, 6572 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6573 throws java.io.IOException { 6574 return PARSER.parseDelimitedFrom(input, extensionRegistry); 6575 } parseFrom( com.google.protobuf.CodedInputStream input)6576 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( 6577 com.google.protobuf.CodedInputStream input) 6578 throws java.io.IOException { 6579 return PARSER.parseFrom(input); 6580 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6581 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parseFrom( 6582 com.google.protobuf.CodedInputStream input, 6583 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6584 throws java.io.IOException { 6585 return PARSER.parseFrom(input, extensionRegistry); 6586 } 6587 newBuilder()6588 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()6589 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter prototype)6590 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter prototype) { 6591 return newBuilder().mergeFrom(prototype); 6592 } toBuilder()6593 public Builder toBuilder() { return newBuilder(this); } 6594 6595 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)6596 protected Builder newBuilderForType( 6597 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 6598 Builder builder = new Builder(parent); 6599 return builder; 6600 } 6601 /** 6602 * Protobuf type {@code FirstKeyOnlyFilter} 6603 */ 6604 public static final class Builder extends 6605 com.google.protobuf.GeneratedMessage.Builder<Builder> 6606 implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilterOrBuilder { 6607 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()6608 getDescriptor() { 6609 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyOnlyFilter_descriptor; 6610 } 6611 6612 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()6613 internalGetFieldAccessorTable() { 6614 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyOnlyFilter_fieldAccessorTable 6615 .ensureFieldAccessorsInitialized( 6616 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.Builder.class); 6617 } 6618 6619 // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.newBuilder() Builder()6620 private Builder() { 6621 maybeForceBuilderInitialization(); 6622 } 6623 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)6624 private Builder( 6625 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 6626 super(parent); 6627 maybeForceBuilderInitialization(); 6628 } maybeForceBuilderInitialization()6629 private void maybeForceBuilderInitialization() { 6630 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 6631 } 6632 } create()6633 private static Builder create() { 6634 return new Builder(); 6635 } 6636 clear()6637 public Builder clear() { 6638 super.clear(); 6639 return this; 6640 } 6641 clone()6642 public Builder clone() { 6643 return create().mergeFrom(buildPartial()); 6644 } 6645 6646 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()6647 getDescriptorForType() { 6648 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyOnlyFilter_descriptor; 6649 } 6650 getDefaultInstanceForType()6651 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter getDefaultInstanceForType() { 6652 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.getDefaultInstance(); 6653 } 6654 build()6655 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter build() { 6656 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter result = buildPartial(); 6657 if (!result.isInitialized()) { 6658 throw newUninitializedMessageException(result); 6659 } 6660 return result; 6661 } 6662 buildPartial()6663 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter buildPartial() { 6664 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter(this); 6665 onBuilt(); 6666 return result; 6667 } 6668 mergeFrom(com.google.protobuf.Message other)6669 public Builder mergeFrom(com.google.protobuf.Message other) { 6670 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter) { 6671 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter)other); 6672 } else { 6673 super.mergeFrom(other); 6674 return this; 6675 } 6676 } 6677 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter other)6678 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter other) { 6679 if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter.getDefaultInstance()) return this; 6680 this.mergeUnknownFields(other.getUnknownFields()); 6681 return this; 6682 } 6683 isInitialized()6684 public final boolean isInitialized() { 6685 return true; 6686 } 6687 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6688 public Builder mergeFrom( 6689 com.google.protobuf.CodedInputStream input, 6690 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6691 throws java.io.IOException { 6692 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter parsedMessage = null; 6693 try { 6694 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 6695 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 6696 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyOnlyFilter) e.getUnfinishedMessage(); 6697 throw e; 6698 } finally { 6699 if (parsedMessage != null) { 6700 mergeFrom(parsedMessage); 6701 } 6702 } 6703 return this; 6704 } 6705 6706 // @@protoc_insertion_point(builder_scope:FirstKeyOnlyFilter) 6707 } 6708 6709 static { 6710 defaultInstance = new FirstKeyOnlyFilter(true); defaultInstance.initFields()6711 defaultInstance.initFields(); 6712 } 6713 6714 // @@protoc_insertion_point(class_scope:FirstKeyOnlyFilter) 6715 } 6716 6717 public interface FirstKeyValueMatchingQualifiersFilterOrBuilder 6718 extends com.google.protobuf.MessageOrBuilder { 6719 6720 // repeated bytes qualifiers = 1; 6721 /** 6722 * <code>repeated bytes qualifiers = 1;</code> 6723 */ getQualifiersList()6724 java.util.List<com.google.protobuf.ByteString> getQualifiersList(); 6725 /** 6726 * <code>repeated bytes qualifiers = 1;</code> 6727 */ getQualifiersCount()6728 int getQualifiersCount(); 6729 /** 6730 * <code>repeated bytes qualifiers = 1;</code> 6731 */ getQualifiers(int index)6732 com.google.protobuf.ByteString getQualifiers(int index); 6733 } 6734 /** 6735 * Protobuf type {@code FirstKeyValueMatchingQualifiersFilter} 6736 */ 6737 public static final class FirstKeyValueMatchingQualifiersFilter extends 6738 com.google.protobuf.GeneratedMessage 6739 implements FirstKeyValueMatchingQualifiersFilterOrBuilder { 6740 // Use FirstKeyValueMatchingQualifiersFilter.newBuilder() to construct. FirstKeyValueMatchingQualifiersFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)6741 private FirstKeyValueMatchingQualifiersFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 6742 super(builder); 6743 this.unknownFields = builder.getUnknownFields(); 6744 } FirstKeyValueMatchingQualifiersFilter(boolean noInit)6745 private FirstKeyValueMatchingQualifiersFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 6746 6747 private static final FirstKeyValueMatchingQualifiersFilter defaultInstance; getDefaultInstance()6748 public static FirstKeyValueMatchingQualifiersFilter getDefaultInstance() { 6749 return defaultInstance; 6750 } 6751 getDefaultInstanceForType()6752 public FirstKeyValueMatchingQualifiersFilter getDefaultInstanceForType() { 6753 return defaultInstance; 6754 } 6755 6756 private final com.google.protobuf.UnknownFieldSet unknownFields; 6757 @java.lang.Override 6758 public final com.google.protobuf.UnknownFieldSet getUnknownFields()6759 getUnknownFields() { 6760 return this.unknownFields; 6761 } FirstKeyValueMatchingQualifiersFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6762 private FirstKeyValueMatchingQualifiersFilter( 6763 com.google.protobuf.CodedInputStream input, 6764 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6765 throws com.google.protobuf.InvalidProtocolBufferException { 6766 initFields(); 6767 int mutable_bitField0_ = 0; 6768 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 6769 com.google.protobuf.UnknownFieldSet.newBuilder(); 6770 try { 6771 boolean done = false; 6772 while (!done) { 6773 int tag = input.readTag(); 6774 switch (tag) { 6775 case 0: 6776 done = true; 6777 break; 6778 default: { 6779 if (!parseUnknownField(input, unknownFields, 6780 extensionRegistry, tag)) { 6781 done = true; 6782 } 6783 break; 6784 } 6785 case 10: { 6786 if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { 6787 qualifiers_ = new java.util.ArrayList<com.google.protobuf.ByteString>(); 6788 mutable_bitField0_ |= 0x00000001; 6789 } 6790 qualifiers_.add(input.readBytes()); 6791 break; 6792 } 6793 } 6794 } 6795 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 6796 throw e.setUnfinishedMessage(this); 6797 } catch (java.io.IOException e) { 6798 throw new com.google.protobuf.InvalidProtocolBufferException( 6799 e.getMessage()).setUnfinishedMessage(this); 6800 } finally { 6801 if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { 6802 qualifiers_ = java.util.Collections.unmodifiableList(qualifiers_); 6803 } 6804 this.unknownFields = unknownFields.build(); 6805 makeExtensionsImmutable(); 6806 } 6807 } 6808 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()6809 getDescriptor() { 6810 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyValueMatchingQualifiersFilter_descriptor; 6811 } 6812 6813 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()6814 internalGetFieldAccessorTable() { 6815 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyValueMatchingQualifiersFilter_fieldAccessorTable 6816 .ensureFieldAccessorsInitialized( 6817 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.Builder.class); 6818 } 6819 6820 public static com.google.protobuf.Parser<FirstKeyValueMatchingQualifiersFilter> PARSER = 6821 new com.google.protobuf.AbstractParser<FirstKeyValueMatchingQualifiersFilter>() { 6822 public FirstKeyValueMatchingQualifiersFilter parsePartialFrom( 6823 com.google.protobuf.CodedInputStream input, 6824 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6825 throws com.google.protobuf.InvalidProtocolBufferException { 6826 return new FirstKeyValueMatchingQualifiersFilter(input, extensionRegistry); 6827 } 6828 }; 6829 6830 @java.lang.Override getParserForType()6831 public com.google.protobuf.Parser<FirstKeyValueMatchingQualifiersFilter> getParserForType() { 6832 return PARSER; 6833 } 6834 6835 // repeated bytes qualifiers = 1; 6836 public static final int QUALIFIERS_FIELD_NUMBER = 1; 6837 private java.util.List<com.google.protobuf.ByteString> qualifiers_; 6838 /** 6839 * <code>repeated bytes qualifiers = 1;</code> 6840 */ 6841 public java.util.List<com.google.protobuf.ByteString> getQualifiersList()6842 getQualifiersList() { 6843 return qualifiers_; 6844 } 6845 /** 6846 * <code>repeated bytes qualifiers = 1;</code> 6847 */ getQualifiersCount()6848 public int getQualifiersCount() { 6849 return qualifiers_.size(); 6850 } 6851 /** 6852 * <code>repeated bytes qualifiers = 1;</code> 6853 */ getQualifiers(int index)6854 public com.google.protobuf.ByteString getQualifiers(int index) { 6855 return qualifiers_.get(index); 6856 } 6857 initFields()6858 private void initFields() { 6859 qualifiers_ = java.util.Collections.emptyList(); 6860 } 6861 private byte memoizedIsInitialized = -1; isInitialized()6862 public final boolean isInitialized() { 6863 byte isInitialized = memoizedIsInitialized; 6864 if (isInitialized != -1) return isInitialized == 1; 6865 6866 memoizedIsInitialized = 1; 6867 return true; 6868 } 6869 writeTo(com.google.protobuf.CodedOutputStream output)6870 public void writeTo(com.google.protobuf.CodedOutputStream output) 6871 throws java.io.IOException { 6872 getSerializedSize(); 6873 for (int i = 0; i < qualifiers_.size(); i++) { 6874 output.writeBytes(1, qualifiers_.get(i)); 6875 } 6876 getUnknownFields().writeTo(output); 6877 } 6878 6879 private int memoizedSerializedSize = -1; getSerializedSize()6880 public int getSerializedSize() { 6881 int size = memoizedSerializedSize; 6882 if (size != -1) return size; 6883 6884 size = 0; 6885 { 6886 int dataSize = 0; 6887 for (int i = 0; i < qualifiers_.size(); i++) { 6888 dataSize += com.google.protobuf.CodedOutputStream 6889 .computeBytesSizeNoTag(qualifiers_.get(i)); 6890 } 6891 size += dataSize; 6892 size += 1 * getQualifiersList().size(); 6893 } 6894 size += getUnknownFields().getSerializedSize(); 6895 memoizedSerializedSize = size; 6896 return size; 6897 } 6898 6899 private static final long serialVersionUID = 0L; 6900 @java.lang.Override writeReplace()6901 protected java.lang.Object writeReplace() 6902 throws java.io.ObjectStreamException { 6903 return super.writeReplace(); 6904 } 6905 6906 @java.lang.Override equals(final java.lang.Object obj)6907 public boolean equals(final java.lang.Object obj) { 6908 if (obj == this) { 6909 return true; 6910 } 6911 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter)) { 6912 return super.equals(obj); 6913 } 6914 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter) obj; 6915 6916 boolean result = true; 6917 result = result && getQualifiersList() 6918 .equals(other.getQualifiersList()); 6919 result = result && 6920 getUnknownFields().equals(other.getUnknownFields()); 6921 return result; 6922 } 6923 6924 private int memoizedHashCode = 0; 6925 @java.lang.Override hashCode()6926 public int hashCode() { 6927 if (memoizedHashCode != 0) { 6928 return memoizedHashCode; 6929 } 6930 int hash = 41; 6931 hash = (19 * hash) + getDescriptorForType().hashCode(); 6932 if (getQualifiersCount() > 0) { 6933 hash = (37 * hash) + QUALIFIERS_FIELD_NUMBER; 6934 hash = (53 * hash) + getQualifiersList().hashCode(); 6935 } 6936 hash = (29 * hash) + getUnknownFields().hashCode(); 6937 memoizedHashCode = hash; 6938 return hash; 6939 } 6940 parseFrom( com.google.protobuf.ByteString data)6941 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( 6942 com.google.protobuf.ByteString data) 6943 throws com.google.protobuf.InvalidProtocolBufferException { 6944 return PARSER.parseFrom(data); 6945 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6946 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( 6947 com.google.protobuf.ByteString data, 6948 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6949 throws com.google.protobuf.InvalidProtocolBufferException { 6950 return PARSER.parseFrom(data, extensionRegistry); 6951 } parseFrom(byte[] data)6952 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom(byte[] data) 6953 throws com.google.protobuf.InvalidProtocolBufferException { 6954 return PARSER.parseFrom(data); 6955 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6956 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( 6957 byte[] data, 6958 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6959 throws com.google.protobuf.InvalidProtocolBufferException { 6960 return PARSER.parseFrom(data, extensionRegistry); 6961 } parseFrom(java.io.InputStream input)6962 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom(java.io.InputStream input) 6963 throws java.io.IOException { 6964 return PARSER.parseFrom(input); 6965 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6966 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( 6967 java.io.InputStream input, 6968 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6969 throws java.io.IOException { 6970 return PARSER.parseFrom(input, extensionRegistry); 6971 } parseDelimitedFrom(java.io.InputStream input)6972 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseDelimitedFrom(java.io.InputStream input) 6973 throws java.io.IOException { 6974 return PARSER.parseDelimitedFrom(input); 6975 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6976 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseDelimitedFrom( 6977 java.io.InputStream input, 6978 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6979 throws java.io.IOException { 6980 return PARSER.parseDelimitedFrom(input, extensionRegistry); 6981 } parseFrom( com.google.protobuf.CodedInputStream input)6982 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( 6983 com.google.protobuf.CodedInputStream input) 6984 throws java.io.IOException { 6985 return PARSER.parseFrom(input); 6986 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6987 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parseFrom( 6988 com.google.protobuf.CodedInputStream input, 6989 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 6990 throws java.io.IOException { 6991 return PARSER.parseFrom(input, extensionRegistry); 6992 } 6993 newBuilder()6994 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()6995 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter prototype)6996 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter prototype) { 6997 return newBuilder().mergeFrom(prototype); 6998 } toBuilder()6999 public Builder toBuilder() { return newBuilder(this); } 7000 7001 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)7002 protected Builder newBuilderForType( 7003 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 7004 Builder builder = new Builder(parent); 7005 return builder; 7006 } 7007 /** 7008 * Protobuf type {@code FirstKeyValueMatchingQualifiersFilter} 7009 */ 7010 public static final class Builder extends 7011 com.google.protobuf.GeneratedMessage.Builder<Builder> 7012 implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilterOrBuilder { 7013 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()7014 getDescriptor() { 7015 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyValueMatchingQualifiersFilter_descriptor; 7016 } 7017 7018 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()7019 internalGetFieldAccessorTable() { 7020 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyValueMatchingQualifiersFilter_fieldAccessorTable 7021 .ensureFieldAccessorsInitialized( 7022 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.Builder.class); 7023 } 7024 7025 // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.newBuilder() Builder()7026 private Builder() { 7027 maybeForceBuilderInitialization(); 7028 } 7029 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)7030 private Builder( 7031 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 7032 super(parent); 7033 maybeForceBuilderInitialization(); 7034 } maybeForceBuilderInitialization()7035 private void maybeForceBuilderInitialization() { 7036 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 7037 } 7038 } create()7039 private static Builder create() { 7040 return new Builder(); 7041 } 7042 clear()7043 public Builder clear() { 7044 super.clear(); 7045 qualifiers_ = java.util.Collections.emptyList(); 7046 bitField0_ = (bitField0_ & ~0x00000001); 7047 return this; 7048 } 7049 clone()7050 public Builder clone() { 7051 return create().mergeFrom(buildPartial()); 7052 } 7053 7054 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()7055 getDescriptorForType() { 7056 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FirstKeyValueMatchingQualifiersFilter_descriptor; 7057 } 7058 getDefaultInstanceForType()7059 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter getDefaultInstanceForType() { 7060 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.getDefaultInstance(); 7061 } 7062 build()7063 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter build() { 7064 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter result = buildPartial(); 7065 if (!result.isInitialized()) { 7066 throw newUninitializedMessageException(result); 7067 } 7068 return result; 7069 } 7070 buildPartial()7071 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter buildPartial() { 7072 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter(this); 7073 int from_bitField0_ = bitField0_; 7074 if (((bitField0_ & 0x00000001) == 0x00000001)) { 7075 qualifiers_ = java.util.Collections.unmodifiableList(qualifiers_); 7076 bitField0_ = (bitField0_ & ~0x00000001); 7077 } 7078 result.qualifiers_ = qualifiers_; 7079 onBuilt(); 7080 return result; 7081 } 7082 mergeFrom(com.google.protobuf.Message other)7083 public Builder mergeFrom(com.google.protobuf.Message other) { 7084 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter) { 7085 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter)other); 7086 } else { 7087 super.mergeFrom(other); 7088 return this; 7089 } 7090 } 7091 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter other)7092 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter other) { 7093 if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter.getDefaultInstance()) return this; 7094 if (!other.qualifiers_.isEmpty()) { 7095 if (qualifiers_.isEmpty()) { 7096 qualifiers_ = other.qualifiers_; 7097 bitField0_ = (bitField0_ & ~0x00000001); 7098 } else { 7099 ensureQualifiersIsMutable(); 7100 qualifiers_.addAll(other.qualifiers_); 7101 } 7102 onChanged(); 7103 } 7104 this.mergeUnknownFields(other.getUnknownFields()); 7105 return this; 7106 } 7107 isInitialized()7108 public final boolean isInitialized() { 7109 return true; 7110 } 7111 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7112 public Builder mergeFrom( 7113 com.google.protobuf.CodedInputStream input, 7114 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7115 throws java.io.IOException { 7116 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter parsedMessage = null; 7117 try { 7118 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 7119 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 7120 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FirstKeyValueMatchingQualifiersFilter) e.getUnfinishedMessage(); 7121 throw e; 7122 } finally { 7123 if (parsedMessage != null) { 7124 mergeFrom(parsedMessage); 7125 } 7126 } 7127 return this; 7128 } 7129 private int bitField0_; 7130 7131 // repeated bytes qualifiers = 1; 7132 private java.util.List<com.google.protobuf.ByteString> qualifiers_ = java.util.Collections.emptyList(); ensureQualifiersIsMutable()7133 private void ensureQualifiersIsMutable() { 7134 if (!((bitField0_ & 0x00000001) == 0x00000001)) { 7135 qualifiers_ = new java.util.ArrayList<com.google.protobuf.ByteString>(qualifiers_); 7136 bitField0_ |= 0x00000001; 7137 } 7138 } 7139 /** 7140 * <code>repeated bytes qualifiers = 1;</code> 7141 */ 7142 public java.util.List<com.google.protobuf.ByteString> getQualifiersList()7143 getQualifiersList() { 7144 return java.util.Collections.unmodifiableList(qualifiers_); 7145 } 7146 /** 7147 * <code>repeated bytes qualifiers = 1;</code> 7148 */ getQualifiersCount()7149 public int getQualifiersCount() { 7150 return qualifiers_.size(); 7151 } 7152 /** 7153 * <code>repeated bytes qualifiers = 1;</code> 7154 */ getQualifiers(int index)7155 public com.google.protobuf.ByteString getQualifiers(int index) { 7156 return qualifiers_.get(index); 7157 } 7158 /** 7159 * <code>repeated bytes qualifiers = 1;</code> 7160 */ setQualifiers( int index, com.google.protobuf.ByteString value)7161 public Builder setQualifiers( 7162 int index, com.google.protobuf.ByteString value) { 7163 if (value == null) { 7164 throw new NullPointerException(); 7165 } 7166 ensureQualifiersIsMutable(); 7167 qualifiers_.set(index, value); 7168 onChanged(); 7169 return this; 7170 } 7171 /** 7172 * <code>repeated bytes qualifiers = 1;</code> 7173 */ addQualifiers(com.google.protobuf.ByteString value)7174 public Builder addQualifiers(com.google.protobuf.ByteString value) { 7175 if (value == null) { 7176 throw new NullPointerException(); 7177 } 7178 ensureQualifiersIsMutable(); 7179 qualifiers_.add(value); 7180 onChanged(); 7181 return this; 7182 } 7183 /** 7184 * <code>repeated bytes qualifiers = 1;</code> 7185 */ addAllQualifiers( java.lang.Iterable<? extends com.google.protobuf.ByteString> values)7186 public Builder addAllQualifiers( 7187 java.lang.Iterable<? extends com.google.protobuf.ByteString> values) { 7188 ensureQualifiersIsMutable(); 7189 super.addAll(values, qualifiers_); 7190 onChanged(); 7191 return this; 7192 } 7193 /** 7194 * <code>repeated bytes qualifiers = 1;</code> 7195 */ clearQualifiers()7196 public Builder clearQualifiers() { 7197 qualifiers_ = java.util.Collections.emptyList(); 7198 bitField0_ = (bitField0_ & ~0x00000001); 7199 onChanged(); 7200 return this; 7201 } 7202 7203 // @@protoc_insertion_point(builder_scope:FirstKeyValueMatchingQualifiersFilter) 7204 } 7205 7206 static { 7207 defaultInstance = new FirstKeyValueMatchingQualifiersFilter(true); defaultInstance.initFields()7208 defaultInstance.initFields(); 7209 } 7210 7211 // @@protoc_insertion_point(class_scope:FirstKeyValueMatchingQualifiersFilter) 7212 } 7213 7214 public interface FuzzyRowFilterOrBuilder 7215 extends com.google.protobuf.MessageOrBuilder { 7216 7217 // repeated .BytesBytesPair fuzzy_keys_data = 1; 7218 /** 7219 * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code> 7220 */ 7221 java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> getFuzzyKeysDataList()7222 getFuzzyKeysDataList(); 7223 /** 7224 * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code> 7225 */ getFuzzyKeysData(int index)7226 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getFuzzyKeysData(int index); 7227 /** 7228 * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code> 7229 */ getFuzzyKeysDataCount()7230 int getFuzzyKeysDataCount(); 7231 /** 7232 * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code> 7233 */ 7234 java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getFuzzyKeysDataOrBuilderList()7235 getFuzzyKeysDataOrBuilderList(); 7236 /** 7237 * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code> 7238 */ getFuzzyKeysDataOrBuilder( int index)7239 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getFuzzyKeysDataOrBuilder( 7240 int index); 7241 } 7242 /** 7243 * Protobuf type {@code FuzzyRowFilter} 7244 */ 7245 public static final class FuzzyRowFilter extends 7246 com.google.protobuf.GeneratedMessage 7247 implements FuzzyRowFilterOrBuilder { 7248 // Use FuzzyRowFilter.newBuilder() to construct. FuzzyRowFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)7249 private FuzzyRowFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 7250 super(builder); 7251 this.unknownFields = builder.getUnknownFields(); 7252 } FuzzyRowFilter(boolean noInit)7253 private FuzzyRowFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 7254 7255 private static final FuzzyRowFilter defaultInstance; getDefaultInstance()7256 public static FuzzyRowFilter getDefaultInstance() { 7257 return defaultInstance; 7258 } 7259 getDefaultInstanceForType()7260 public FuzzyRowFilter getDefaultInstanceForType() { 7261 return defaultInstance; 7262 } 7263 7264 private final com.google.protobuf.UnknownFieldSet unknownFields; 7265 @java.lang.Override 7266 public final com.google.protobuf.UnknownFieldSet getUnknownFields()7267 getUnknownFields() { 7268 return this.unknownFields; 7269 } FuzzyRowFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7270 private FuzzyRowFilter( 7271 com.google.protobuf.CodedInputStream input, 7272 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7273 throws com.google.protobuf.InvalidProtocolBufferException { 7274 initFields(); 7275 int mutable_bitField0_ = 0; 7276 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 7277 com.google.protobuf.UnknownFieldSet.newBuilder(); 7278 try { 7279 boolean done = false; 7280 while (!done) { 7281 int tag = input.readTag(); 7282 switch (tag) { 7283 case 0: 7284 done = true; 7285 break; 7286 default: { 7287 if (!parseUnknownField(input, unknownFields, 7288 extensionRegistry, tag)) { 7289 done = true; 7290 } 7291 break; 7292 } 7293 case 10: { 7294 if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { 7295 fuzzyKeysData_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair>(); 7296 mutable_bitField0_ |= 0x00000001; 7297 } 7298 fuzzyKeysData_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.PARSER, extensionRegistry)); 7299 break; 7300 } 7301 } 7302 } 7303 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 7304 throw e.setUnfinishedMessage(this); 7305 } catch (java.io.IOException e) { 7306 throw new com.google.protobuf.InvalidProtocolBufferException( 7307 e.getMessage()).setUnfinishedMessage(this); 7308 } finally { 7309 if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { 7310 fuzzyKeysData_ = java.util.Collections.unmodifiableList(fuzzyKeysData_); 7311 } 7312 this.unknownFields = unknownFields.build(); 7313 makeExtensionsImmutable(); 7314 } 7315 } 7316 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()7317 getDescriptor() { 7318 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FuzzyRowFilter_descriptor; 7319 } 7320 7321 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()7322 internalGetFieldAccessorTable() { 7323 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FuzzyRowFilter_fieldAccessorTable 7324 .ensureFieldAccessorsInitialized( 7325 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.Builder.class); 7326 } 7327 7328 public static com.google.protobuf.Parser<FuzzyRowFilter> PARSER = 7329 new com.google.protobuf.AbstractParser<FuzzyRowFilter>() { 7330 public FuzzyRowFilter parsePartialFrom( 7331 com.google.protobuf.CodedInputStream input, 7332 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7333 throws com.google.protobuf.InvalidProtocolBufferException { 7334 return new FuzzyRowFilter(input, extensionRegistry); 7335 } 7336 }; 7337 7338 @java.lang.Override getParserForType()7339 public com.google.protobuf.Parser<FuzzyRowFilter> getParserForType() { 7340 return PARSER; 7341 } 7342 7343 // repeated .BytesBytesPair fuzzy_keys_data = 1; 7344 public static final int FUZZY_KEYS_DATA_FIELD_NUMBER = 1; 7345 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> fuzzyKeysData_; 7346 /** 7347 * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code> 7348 */ getFuzzyKeysDataList()7349 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> getFuzzyKeysDataList() { 7350 return fuzzyKeysData_; 7351 } 7352 /** 7353 * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code> 7354 */ 7355 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getFuzzyKeysDataOrBuilderList()7356 getFuzzyKeysDataOrBuilderList() { 7357 return fuzzyKeysData_; 7358 } 7359 /** 7360 * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code> 7361 */ getFuzzyKeysDataCount()7362 public int getFuzzyKeysDataCount() { 7363 return fuzzyKeysData_.size(); 7364 } 7365 /** 7366 * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code> 7367 */ getFuzzyKeysData(int index)7368 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getFuzzyKeysData(int index) { 7369 return fuzzyKeysData_.get(index); 7370 } 7371 /** 7372 * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code> 7373 */ getFuzzyKeysDataOrBuilder( int index)7374 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getFuzzyKeysDataOrBuilder( 7375 int index) { 7376 return fuzzyKeysData_.get(index); 7377 } 7378 initFields()7379 private void initFields() { 7380 fuzzyKeysData_ = java.util.Collections.emptyList(); 7381 } 7382 private byte memoizedIsInitialized = -1; isInitialized()7383 public final boolean isInitialized() { 7384 byte isInitialized = memoizedIsInitialized; 7385 if (isInitialized != -1) return isInitialized == 1; 7386 7387 for (int i = 0; i < getFuzzyKeysDataCount(); i++) { 7388 if (!getFuzzyKeysData(i).isInitialized()) { 7389 memoizedIsInitialized = 0; 7390 return false; 7391 } 7392 } 7393 memoizedIsInitialized = 1; 7394 return true; 7395 } 7396 writeTo(com.google.protobuf.CodedOutputStream output)7397 public void writeTo(com.google.protobuf.CodedOutputStream output) 7398 throws java.io.IOException { 7399 getSerializedSize(); 7400 for (int i = 0; i < fuzzyKeysData_.size(); i++) { 7401 output.writeMessage(1, fuzzyKeysData_.get(i)); 7402 } 7403 getUnknownFields().writeTo(output); 7404 } 7405 7406 private int memoizedSerializedSize = -1; getSerializedSize()7407 public int getSerializedSize() { 7408 int size = memoizedSerializedSize; 7409 if (size != -1) return size; 7410 7411 size = 0; 7412 for (int i = 0; i < fuzzyKeysData_.size(); i++) { 7413 size += com.google.protobuf.CodedOutputStream 7414 .computeMessageSize(1, fuzzyKeysData_.get(i)); 7415 } 7416 size += getUnknownFields().getSerializedSize(); 7417 memoizedSerializedSize = size; 7418 return size; 7419 } 7420 7421 private static final long serialVersionUID = 0L; 7422 @java.lang.Override writeReplace()7423 protected java.lang.Object writeReplace() 7424 throws java.io.ObjectStreamException { 7425 return super.writeReplace(); 7426 } 7427 7428 @java.lang.Override equals(final java.lang.Object obj)7429 public boolean equals(final java.lang.Object obj) { 7430 if (obj == this) { 7431 return true; 7432 } 7433 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter)) { 7434 return super.equals(obj); 7435 } 7436 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter) obj; 7437 7438 boolean result = true; 7439 result = result && getFuzzyKeysDataList() 7440 .equals(other.getFuzzyKeysDataList()); 7441 result = result && 7442 getUnknownFields().equals(other.getUnknownFields()); 7443 return result; 7444 } 7445 7446 private int memoizedHashCode = 0; 7447 @java.lang.Override hashCode()7448 public int hashCode() { 7449 if (memoizedHashCode != 0) { 7450 return memoizedHashCode; 7451 } 7452 int hash = 41; 7453 hash = (19 * hash) + getDescriptorForType().hashCode(); 7454 if (getFuzzyKeysDataCount() > 0) { 7455 hash = (37 * hash) + FUZZY_KEYS_DATA_FIELD_NUMBER; 7456 hash = (53 * hash) + getFuzzyKeysDataList().hashCode(); 7457 } 7458 hash = (29 * hash) + getUnknownFields().hashCode(); 7459 memoizedHashCode = hash; 7460 return hash; 7461 } 7462 parseFrom( com.google.protobuf.ByteString data)7463 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( 7464 com.google.protobuf.ByteString data) 7465 throws com.google.protobuf.InvalidProtocolBufferException { 7466 return PARSER.parseFrom(data); 7467 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7468 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( 7469 com.google.protobuf.ByteString data, 7470 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7471 throws com.google.protobuf.InvalidProtocolBufferException { 7472 return PARSER.parseFrom(data, extensionRegistry); 7473 } parseFrom(byte[] data)7474 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom(byte[] data) 7475 throws com.google.protobuf.InvalidProtocolBufferException { 7476 return PARSER.parseFrom(data); 7477 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7478 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( 7479 byte[] data, 7480 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7481 throws com.google.protobuf.InvalidProtocolBufferException { 7482 return PARSER.parseFrom(data, extensionRegistry); 7483 } parseFrom(java.io.InputStream input)7484 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom(java.io.InputStream input) 7485 throws java.io.IOException { 7486 return PARSER.parseFrom(input); 7487 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7488 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( 7489 java.io.InputStream input, 7490 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7491 throws java.io.IOException { 7492 return PARSER.parseFrom(input, extensionRegistry); 7493 } parseDelimitedFrom(java.io.InputStream input)7494 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseDelimitedFrom(java.io.InputStream input) 7495 throws java.io.IOException { 7496 return PARSER.parseDelimitedFrom(input); 7497 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7498 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseDelimitedFrom( 7499 java.io.InputStream input, 7500 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7501 throws java.io.IOException { 7502 return PARSER.parseDelimitedFrom(input, extensionRegistry); 7503 } parseFrom( com.google.protobuf.CodedInputStream input)7504 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( 7505 com.google.protobuf.CodedInputStream input) 7506 throws java.io.IOException { 7507 return PARSER.parseFrom(input); 7508 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7509 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parseFrom( 7510 com.google.protobuf.CodedInputStream input, 7511 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7512 throws java.io.IOException { 7513 return PARSER.parseFrom(input, extensionRegistry); 7514 } 7515 newBuilder()7516 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()7517 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter prototype)7518 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter prototype) { 7519 return newBuilder().mergeFrom(prototype); 7520 } toBuilder()7521 public Builder toBuilder() { return newBuilder(this); } 7522 7523 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)7524 protected Builder newBuilderForType( 7525 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 7526 Builder builder = new Builder(parent); 7527 return builder; 7528 } 7529 /** 7530 * Protobuf type {@code FuzzyRowFilter} 7531 */ 7532 public static final class Builder extends 7533 com.google.protobuf.GeneratedMessage.Builder<Builder> 7534 implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilterOrBuilder { 7535 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()7536 getDescriptor() { 7537 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FuzzyRowFilter_descriptor; 7538 } 7539 7540 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()7541 internalGetFieldAccessorTable() { 7542 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FuzzyRowFilter_fieldAccessorTable 7543 .ensureFieldAccessorsInitialized( 7544 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.Builder.class); 7545 } 7546 7547 // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.newBuilder() Builder()7548 private Builder() { 7549 maybeForceBuilderInitialization(); 7550 } 7551 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)7552 private Builder( 7553 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 7554 super(parent); 7555 maybeForceBuilderInitialization(); 7556 } maybeForceBuilderInitialization()7557 private void maybeForceBuilderInitialization() { 7558 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 7559 getFuzzyKeysDataFieldBuilder(); 7560 } 7561 } create()7562 private static Builder create() { 7563 return new Builder(); 7564 } 7565 clear()7566 public Builder clear() { 7567 super.clear(); 7568 if (fuzzyKeysDataBuilder_ == null) { 7569 fuzzyKeysData_ = java.util.Collections.emptyList(); 7570 bitField0_ = (bitField0_ & ~0x00000001); 7571 } else { 7572 fuzzyKeysDataBuilder_.clear(); 7573 } 7574 return this; 7575 } 7576 clone()7577 public Builder clone() { 7578 return create().mergeFrom(buildPartial()); 7579 } 7580 7581 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()7582 getDescriptorForType() { 7583 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_FuzzyRowFilter_descriptor; 7584 } 7585 getDefaultInstanceForType()7586 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter getDefaultInstanceForType() { 7587 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.getDefaultInstance(); 7588 } 7589 build()7590 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter build() { 7591 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter result = buildPartial(); 7592 if (!result.isInitialized()) { 7593 throw newUninitializedMessageException(result); 7594 } 7595 return result; 7596 } 7597 buildPartial()7598 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter buildPartial() { 7599 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter(this); 7600 int from_bitField0_ = bitField0_; 7601 if (fuzzyKeysDataBuilder_ == null) { 7602 if (((bitField0_ & 0x00000001) == 0x00000001)) { 7603 fuzzyKeysData_ = java.util.Collections.unmodifiableList(fuzzyKeysData_); 7604 bitField0_ = (bitField0_ & ~0x00000001); 7605 } 7606 result.fuzzyKeysData_ = fuzzyKeysData_; 7607 } else { 7608 result.fuzzyKeysData_ = fuzzyKeysDataBuilder_.build(); 7609 } 7610 onBuilt(); 7611 return result; 7612 } 7613 mergeFrom(com.google.protobuf.Message other)7614 public Builder mergeFrom(com.google.protobuf.Message other) { 7615 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter) { 7616 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter)other); 7617 } else { 7618 super.mergeFrom(other); 7619 return this; 7620 } 7621 } 7622 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter other)7623 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter other) { 7624 if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter.getDefaultInstance()) return this; 7625 if (fuzzyKeysDataBuilder_ == null) { 7626 if (!other.fuzzyKeysData_.isEmpty()) { 7627 if (fuzzyKeysData_.isEmpty()) { 7628 fuzzyKeysData_ = other.fuzzyKeysData_; 7629 bitField0_ = (bitField0_ & ~0x00000001); 7630 } else { 7631 ensureFuzzyKeysDataIsMutable(); 7632 fuzzyKeysData_.addAll(other.fuzzyKeysData_); 7633 } 7634 onChanged(); 7635 } 7636 } else { 7637 if (!other.fuzzyKeysData_.isEmpty()) { 7638 if (fuzzyKeysDataBuilder_.isEmpty()) { 7639 fuzzyKeysDataBuilder_.dispose(); 7640 fuzzyKeysDataBuilder_ = null; 7641 fuzzyKeysData_ = other.fuzzyKeysData_; 7642 bitField0_ = (bitField0_ & ~0x00000001); 7643 fuzzyKeysDataBuilder_ = 7644 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ? 7645 getFuzzyKeysDataFieldBuilder() : null; 7646 } else { 7647 fuzzyKeysDataBuilder_.addAllMessages(other.fuzzyKeysData_); 7648 } 7649 } 7650 } 7651 this.mergeUnknownFields(other.getUnknownFields()); 7652 return this; 7653 } 7654 isInitialized()7655 public final boolean isInitialized() { 7656 for (int i = 0; i < getFuzzyKeysDataCount(); i++) { 7657 if (!getFuzzyKeysData(i).isInitialized()) { 7658 7659 return false; 7660 } 7661 } 7662 return true; 7663 } 7664 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7665 public Builder mergeFrom( 7666 com.google.protobuf.CodedInputStream input, 7667 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7668 throws java.io.IOException { 7669 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter parsedMessage = null; 7670 try { 7671 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 7672 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 7673 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.FuzzyRowFilter) e.getUnfinishedMessage(); 7674 throw e; 7675 } finally { 7676 if (parsedMessage != null) { 7677 mergeFrom(parsedMessage); 7678 } 7679 } 7680 return this; 7681 } 7682 private int bitField0_; 7683 7684 // repeated .BytesBytesPair fuzzy_keys_data = 1; 7685 private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> fuzzyKeysData_ = 7686 java.util.Collections.emptyList(); ensureFuzzyKeysDataIsMutable()7687 private void ensureFuzzyKeysDataIsMutable() { 7688 if (!((bitField0_ & 0x00000001) == 0x00000001)) { 7689 fuzzyKeysData_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair>(fuzzyKeysData_); 7690 bitField0_ |= 0x00000001; 7691 } 7692 } 7693 7694 private com.google.protobuf.RepeatedFieldBuilder< 7695 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> fuzzyKeysDataBuilder_; 7696 7697 /** 7698 * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code> 7699 */ getFuzzyKeysDataList()7700 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> getFuzzyKeysDataList() { 7701 if (fuzzyKeysDataBuilder_ == null) { 7702 return java.util.Collections.unmodifiableList(fuzzyKeysData_); 7703 } else { 7704 return fuzzyKeysDataBuilder_.getMessageList(); 7705 } 7706 } 7707 /** 7708 * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code> 7709 */ getFuzzyKeysDataCount()7710 public int getFuzzyKeysDataCount() { 7711 if (fuzzyKeysDataBuilder_ == null) { 7712 return fuzzyKeysData_.size(); 7713 } else { 7714 return fuzzyKeysDataBuilder_.getCount(); 7715 } 7716 } 7717 /** 7718 * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code> 7719 */ getFuzzyKeysData(int index)7720 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getFuzzyKeysData(int index) { 7721 if (fuzzyKeysDataBuilder_ == null) { 7722 return fuzzyKeysData_.get(index); 7723 } else { 7724 return fuzzyKeysDataBuilder_.getMessage(index); 7725 } 7726 } 7727 /** 7728 * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code> 7729 */ setFuzzyKeysData( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value)7730 public Builder setFuzzyKeysData( 7731 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { 7732 if (fuzzyKeysDataBuilder_ == null) { 7733 if (value == null) { 7734 throw new NullPointerException(); 7735 } 7736 ensureFuzzyKeysDataIsMutable(); 7737 fuzzyKeysData_.set(index, value); 7738 onChanged(); 7739 } else { 7740 fuzzyKeysDataBuilder_.setMessage(index, value); 7741 } 7742 return this; 7743 } 7744 /** 7745 * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code> 7746 */ setFuzzyKeysData( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue)7747 public Builder setFuzzyKeysData( 7748 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { 7749 if (fuzzyKeysDataBuilder_ == null) { 7750 ensureFuzzyKeysDataIsMutable(); 7751 fuzzyKeysData_.set(index, builderForValue.build()); 7752 onChanged(); 7753 } else { 7754 fuzzyKeysDataBuilder_.setMessage(index, builderForValue.build()); 7755 } 7756 return this; 7757 } 7758 /** 7759 * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code> 7760 */ addFuzzyKeysData(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value)7761 public Builder addFuzzyKeysData(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { 7762 if (fuzzyKeysDataBuilder_ == null) { 7763 if (value == null) { 7764 throw new NullPointerException(); 7765 } 7766 ensureFuzzyKeysDataIsMutable(); 7767 fuzzyKeysData_.add(value); 7768 onChanged(); 7769 } else { 7770 fuzzyKeysDataBuilder_.addMessage(value); 7771 } 7772 return this; 7773 } 7774 /** 7775 * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code> 7776 */ addFuzzyKeysData( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value)7777 public Builder addFuzzyKeysData( 7778 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) { 7779 if (fuzzyKeysDataBuilder_ == null) { 7780 if (value == null) { 7781 throw new NullPointerException(); 7782 } 7783 ensureFuzzyKeysDataIsMutable(); 7784 fuzzyKeysData_.add(index, value); 7785 onChanged(); 7786 } else { 7787 fuzzyKeysDataBuilder_.addMessage(index, value); 7788 } 7789 return this; 7790 } 7791 /** 7792 * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code> 7793 */ addFuzzyKeysData( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue)7794 public Builder addFuzzyKeysData( 7795 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { 7796 if (fuzzyKeysDataBuilder_ == null) { 7797 ensureFuzzyKeysDataIsMutable(); 7798 fuzzyKeysData_.add(builderForValue.build()); 7799 onChanged(); 7800 } else { 7801 fuzzyKeysDataBuilder_.addMessage(builderForValue.build()); 7802 } 7803 return this; 7804 } 7805 /** 7806 * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code> 7807 */ addFuzzyKeysData( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue)7808 public Builder addFuzzyKeysData( 7809 int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) { 7810 if (fuzzyKeysDataBuilder_ == null) { 7811 ensureFuzzyKeysDataIsMutable(); 7812 fuzzyKeysData_.add(index, builderForValue.build()); 7813 onChanged(); 7814 } else { 7815 fuzzyKeysDataBuilder_.addMessage(index, builderForValue.build()); 7816 } 7817 return this; 7818 } 7819 /** 7820 * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code> 7821 */ addAllFuzzyKeysData( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> values)7822 public Builder addAllFuzzyKeysData( 7823 java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> values) { 7824 if (fuzzyKeysDataBuilder_ == null) { 7825 ensureFuzzyKeysDataIsMutable(); 7826 super.addAll(values, fuzzyKeysData_); 7827 onChanged(); 7828 } else { 7829 fuzzyKeysDataBuilder_.addAllMessages(values); 7830 } 7831 return this; 7832 } 7833 /** 7834 * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code> 7835 */ clearFuzzyKeysData()7836 public Builder clearFuzzyKeysData() { 7837 if (fuzzyKeysDataBuilder_ == null) { 7838 fuzzyKeysData_ = java.util.Collections.emptyList(); 7839 bitField0_ = (bitField0_ & ~0x00000001); 7840 onChanged(); 7841 } else { 7842 fuzzyKeysDataBuilder_.clear(); 7843 } 7844 return this; 7845 } 7846 /** 7847 * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code> 7848 */ removeFuzzyKeysData(int index)7849 public Builder removeFuzzyKeysData(int index) { 7850 if (fuzzyKeysDataBuilder_ == null) { 7851 ensureFuzzyKeysDataIsMutable(); 7852 fuzzyKeysData_.remove(index); 7853 onChanged(); 7854 } else { 7855 fuzzyKeysDataBuilder_.remove(index); 7856 } 7857 return this; 7858 } 7859 /** 7860 * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code> 7861 */ getFuzzyKeysDataBuilder( int index)7862 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder getFuzzyKeysDataBuilder( 7863 int index) { 7864 return getFuzzyKeysDataFieldBuilder().getBuilder(index); 7865 } 7866 /** 7867 * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code> 7868 */ getFuzzyKeysDataOrBuilder( int index)7869 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getFuzzyKeysDataOrBuilder( 7870 int index) { 7871 if (fuzzyKeysDataBuilder_ == null) { 7872 return fuzzyKeysData_.get(index); } else { 7873 return fuzzyKeysDataBuilder_.getMessageOrBuilder(index); 7874 } 7875 } 7876 /** 7877 * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code> 7878 */ 7879 public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getFuzzyKeysDataOrBuilderList()7880 getFuzzyKeysDataOrBuilderList() { 7881 if (fuzzyKeysDataBuilder_ != null) { 7882 return fuzzyKeysDataBuilder_.getMessageOrBuilderList(); 7883 } else { 7884 return java.util.Collections.unmodifiableList(fuzzyKeysData_); 7885 } 7886 } 7887 /** 7888 * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code> 7889 */ addFuzzyKeysDataBuilder()7890 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addFuzzyKeysDataBuilder() { 7891 return getFuzzyKeysDataFieldBuilder().addBuilder( 7892 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()); 7893 } 7894 /** 7895 * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code> 7896 */ addFuzzyKeysDataBuilder( int index)7897 public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addFuzzyKeysDataBuilder( 7898 int index) { 7899 return getFuzzyKeysDataFieldBuilder().addBuilder( 7900 index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance()); 7901 } 7902 /** 7903 * <code>repeated .BytesBytesPair fuzzy_keys_data = 1;</code> 7904 */ 7905 public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder> getFuzzyKeysDataBuilderList()7906 getFuzzyKeysDataBuilderList() { 7907 return getFuzzyKeysDataFieldBuilder().getBuilderList(); 7908 } 7909 private com.google.protobuf.RepeatedFieldBuilder< 7910 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> getFuzzyKeysDataFieldBuilder()7911 getFuzzyKeysDataFieldBuilder() { 7912 if (fuzzyKeysDataBuilder_ == null) { 7913 fuzzyKeysDataBuilder_ = new com.google.protobuf.RepeatedFieldBuilder< 7914 org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>( 7915 fuzzyKeysData_, 7916 ((bitField0_ & 0x00000001) == 0x00000001), 7917 getParentForChildren(), 7918 isClean()); 7919 fuzzyKeysData_ = null; 7920 } 7921 return fuzzyKeysDataBuilder_; 7922 } 7923 7924 // @@protoc_insertion_point(builder_scope:FuzzyRowFilter) 7925 } 7926 7927 static { 7928 defaultInstance = new FuzzyRowFilter(true); defaultInstance.initFields()7929 defaultInstance.initFields(); 7930 } 7931 7932 // @@protoc_insertion_point(class_scope:FuzzyRowFilter) 7933 } 7934 7935 public interface InclusiveStopFilterOrBuilder 7936 extends com.google.protobuf.MessageOrBuilder { 7937 7938 // optional bytes stop_row_key = 1; 7939 /** 7940 * <code>optional bytes stop_row_key = 1;</code> 7941 */ hasStopRowKey()7942 boolean hasStopRowKey(); 7943 /** 7944 * <code>optional bytes stop_row_key = 1;</code> 7945 */ getStopRowKey()7946 com.google.protobuf.ByteString getStopRowKey(); 7947 } 7948 /** 7949 * Protobuf type {@code InclusiveStopFilter} 7950 */ 7951 public static final class InclusiveStopFilter extends 7952 com.google.protobuf.GeneratedMessage 7953 implements InclusiveStopFilterOrBuilder { 7954 // Use InclusiveStopFilter.newBuilder() to construct. InclusiveStopFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)7955 private InclusiveStopFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 7956 super(builder); 7957 this.unknownFields = builder.getUnknownFields(); 7958 } InclusiveStopFilter(boolean noInit)7959 private InclusiveStopFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 7960 7961 private static final InclusiveStopFilter defaultInstance; getDefaultInstance()7962 public static InclusiveStopFilter getDefaultInstance() { 7963 return defaultInstance; 7964 } 7965 getDefaultInstanceForType()7966 public InclusiveStopFilter getDefaultInstanceForType() { 7967 return defaultInstance; 7968 } 7969 7970 private final com.google.protobuf.UnknownFieldSet unknownFields; 7971 @java.lang.Override 7972 public final com.google.protobuf.UnknownFieldSet getUnknownFields()7973 getUnknownFields() { 7974 return this.unknownFields; 7975 } InclusiveStopFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7976 private InclusiveStopFilter( 7977 com.google.protobuf.CodedInputStream input, 7978 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 7979 throws com.google.protobuf.InvalidProtocolBufferException { 7980 initFields(); 7981 int mutable_bitField0_ = 0; 7982 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 7983 com.google.protobuf.UnknownFieldSet.newBuilder(); 7984 try { 7985 boolean done = false; 7986 while (!done) { 7987 int tag = input.readTag(); 7988 switch (tag) { 7989 case 0: 7990 done = true; 7991 break; 7992 default: { 7993 if (!parseUnknownField(input, unknownFields, 7994 extensionRegistry, tag)) { 7995 done = true; 7996 } 7997 break; 7998 } 7999 case 10: { 8000 bitField0_ |= 0x00000001; 8001 stopRowKey_ = input.readBytes(); 8002 break; 8003 } 8004 } 8005 } 8006 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 8007 throw e.setUnfinishedMessage(this); 8008 } catch (java.io.IOException e) { 8009 throw new com.google.protobuf.InvalidProtocolBufferException( 8010 e.getMessage()).setUnfinishedMessage(this); 8011 } finally { 8012 this.unknownFields = unknownFields.build(); 8013 makeExtensionsImmutable(); 8014 } 8015 } 8016 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()8017 getDescriptor() { 8018 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_InclusiveStopFilter_descriptor; 8019 } 8020 8021 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()8022 internalGetFieldAccessorTable() { 8023 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_InclusiveStopFilter_fieldAccessorTable 8024 .ensureFieldAccessorsInitialized( 8025 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.Builder.class); 8026 } 8027 8028 public static com.google.protobuf.Parser<InclusiveStopFilter> PARSER = 8029 new com.google.protobuf.AbstractParser<InclusiveStopFilter>() { 8030 public InclusiveStopFilter parsePartialFrom( 8031 com.google.protobuf.CodedInputStream input, 8032 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8033 throws com.google.protobuf.InvalidProtocolBufferException { 8034 return new InclusiveStopFilter(input, extensionRegistry); 8035 } 8036 }; 8037 8038 @java.lang.Override getParserForType()8039 public com.google.protobuf.Parser<InclusiveStopFilter> getParserForType() { 8040 return PARSER; 8041 } 8042 8043 private int bitField0_; 8044 // optional bytes stop_row_key = 1; 8045 public static final int STOP_ROW_KEY_FIELD_NUMBER = 1; 8046 private com.google.protobuf.ByteString stopRowKey_; 8047 /** 8048 * <code>optional bytes stop_row_key = 1;</code> 8049 */ hasStopRowKey()8050 public boolean hasStopRowKey() { 8051 return ((bitField0_ & 0x00000001) == 0x00000001); 8052 } 8053 /** 8054 * <code>optional bytes stop_row_key = 1;</code> 8055 */ getStopRowKey()8056 public com.google.protobuf.ByteString getStopRowKey() { 8057 return stopRowKey_; 8058 } 8059 initFields()8060 private void initFields() { 8061 stopRowKey_ = com.google.protobuf.ByteString.EMPTY; 8062 } 8063 private byte memoizedIsInitialized = -1; isInitialized()8064 public final boolean isInitialized() { 8065 byte isInitialized = memoizedIsInitialized; 8066 if (isInitialized != -1) return isInitialized == 1; 8067 8068 memoizedIsInitialized = 1; 8069 return true; 8070 } 8071 writeTo(com.google.protobuf.CodedOutputStream output)8072 public void writeTo(com.google.protobuf.CodedOutputStream output) 8073 throws java.io.IOException { 8074 getSerializedSize(); 8075 if (((bitField0_ & 0x00000001) == 0x00000001)) { 8076 output.writeBytes(1, stopRowKey_); 8077 } 8078 getUnknownFields().writeTo(output); 8079 } 8080 8081 private int memoizedSerializedSize = -1; getSerializedSize()8082 public int getSerializedSize() { 8083 int size = memoizedSerializedSize; 8084 if (size != -1) return size; 8085 8086 size = 0; 8087 if (((bitField0_ & 0x00000001) == 0x00000001)) { 8088 size += com.google.protobuf.CodedOutputStream 8089 .computeBytesSize(1, stopRowKey_); 8090 } 8091 size += getUnknownFields().getSerializedSize(); 8092 memoizedSerializedSize = size; 8093 return size; 8094 } 8095 8096 private static final long serialVersionUID = 0L; 8097 @java.lang.Override writeReplace()8098 protected java.lang.Object writeReplace() 8099 throws java.io.ObjectStreamException { 8100 return super.writeReplace(); 8101 } 8102 8103 @java.lang.Override equals(final java.lang.Object obj)8104 public boolean equals(final java.lang.Object obj) { 8105 if (obj == this) { 8106 return true; 8107 } 8108 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter)) { 8109 return super.equals(obj); 8110 } 8111 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter) obj; 8112 8113 boolean result = true; 8114 result = result && (hasStopRowKey() == other.hasStopRowKey()); 8115 if (hasStopRowKey()) { 8116 result = result && getStopRowKey() 8117 .equals(other.getStopRowKey()); 8118 } 8119 result = result && 8120 getUnknownFields().equals(other.getUnknownFields()); 8121 return result; 8122 } 8123 8124 private int memoizedHashCode = 0; 8125 @java.lang.Override hashCode()8126 public int hashCode() { 8127 if (memoizedHashCode != 0) { 8128 return memoizedHashCode; 8129 } 8130 int hash = 41; 8131 hash = (19 * hash) + getDescriptorForType().hashCode(); 8132 if (hasStopRowKey()) { 8133 hash = (37 * hash) + STOP_ROW_KEY_FIELD_NUMBER; 8134 hash = (53 * hash) + getStopRowKey().hashCode(); 8135 } 8136 hash = (29 * hash) + getUnknownFields().hashCode(); 8137 memoizedHashCode = hash; 8138 return hash; 8139 } 8140 parseFrom( com.google.protobuf.ByteString data)8141 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( 8142 com.google.protobuf.ByteString data) 8143 throws com.google.protobuf.InvalidProtocolBufferException { 8144 return PARSER.parseFrom(data); 8145 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8146 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( 8147 com.google.protobuf.ByteString data, 8148 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8149 throws com.google.protobuf.InvalidProtocolBufferException { 8150 return PARSER.parseFrom(data, extensionRegistry); 8151 } parseFrom(byte[] data)8152 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom(byte[] data) 8153 throws com.google.protobuf.InvalidProtocolBufferException { 8154 return PARSER.parseFrom(data); 8155 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8156 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( 8157 byte[] data, 8158 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8159 throws com.google.protobuf.InvalidProtocolBufferException { 8160 return PARSER.parseFrom(data, extensionRegistry); 8161 } parseFrom(java.io.InputStream input)8162 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom(java.io.InputStream input) 8163 throws java.io.IOException { 8164 return PARSER.parseFrom(input); 8165 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8166 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( 8167 java.io.InputStream input, 8168 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8169 throws java.io.IOException { 8170 return PARSER.parseFrom(input, extensionRegistry); 8171 } parseDelimitedFrom(java.io.InputStream input)8172 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseDelimitedFrom(java.io.InputStream input) 8173 throws java.io.IOException { 8174 return PARSER.parseDelimitedFrom(input); 8175 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8176 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseDelimitedFrom( 8177 java.io.InputStream input, 8178 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8179 throws java.io.IOException { 8180 return PARSER.parseDelimitedFrom(input, extensionRegistry); 8181 } parseFrom( com.google.protobuf.CodedInputStream input)8182 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( 8183 com.google.protobuf.CodedInputStream input) 8184 throws java.io.IOException { 8185 return PARSER.parseFrom(input); 8186 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8187 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parseFrom( 8188 com.google.protobuf.CodedInputStream input, 8189 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8190 throws java.io.IOException { 8191 return PARSER.parseFrom(input, extensionRegistry); 8192 } 8193 newBuilder()8194 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()8195 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter prototype)8196 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter prototype) { 8197 return newBuilder().mergeFrom(prototype); 8198 } toBuilder()8199 public Builder toBuilder() { return newBuilder(this); } 8200 8201 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)8202 protected Builder newBuilderForType( 8203 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 8204 Builder builder = new Builder(parent); 8205 return builder; 8206 } 8207 /** 8208 * Protobuf type {@code InclusiveStopFilter} 8209 */ 8210 public static final class Builder extends 8211 com.google.protobuf.GeneratedMessage.Builder<Builder> 8212 implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilterOrBuilder { 8213 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()8214 getDescriptor() { 8215 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_InclusiveStopFilter_descriptor; 8216 } 8217 8218 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()8219 internalGetFieldAccessorTable() { 8220 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_InclusiveStopFilter_fieldAccessorTable 8221 .ensureFieldAccessorsInitialized( 8222 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.Builder.class); 8223 } 8224 8225 // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.newBuilder() Builder()8226 private Builder() { 8227 maybeForceBuilderInitialization(); 8228 } 8229 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)8230 private Builder( 8231 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 8232 super(parent); 8233 maybeForceBuilderInitialization(); 8234 } maybeForceBuilderInitialization()8235 private void maybeForceBuilderInitialization() { 8236 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 8237 } 8238 } create()8239 private static Builder create() { 8240 return new Builder(); 8241 } 8242 clear()8243 public Builder clear() { 8244 super.clear(); 8245 stopRowKey_ = com.google.protobuf.ByteString.EMPTY; 8246 bitField0_ = (bitField0_ & ~0x00000001); 8247 return this; 8248 } 8249 clone()8250 public Builder clone() { 8251 return create().mergeFrom(buildPartial()); 8252 } 8253 8254 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()8255 getDescriptorForType() { 8256 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_InclusiveStopFilter_descriptor; 8257 } 8258 getDefaultInstanceForType()8259 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter getDefaultInstanceForType() { 8260 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.getDefaultInstance(); 8261 } 8262 build()8263 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter build() { 8264 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter result = buildPartial(); 8265 if (!result.isInitialized()) { 8266 throw newUninitializedMessageException(result); 8267 } 8268 return result; 8269 } 8270 buildPartial()8271 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter buildPartial() { 8272 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter(this); 8273 int from_bitField0_ = bitField0_; 8274 int to_bitField0_ = 0; 8275 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 8276 to_bitField0_ |= 0x00000001; 8277 } 8278 result.stopRowKey_ = stopRowKey_; 8279 result.bitField0_ = to_bitField0_; 8280 onBuilt(); 8281 return result; 8282 } 8283 mergeFrom(com.google.protobuf.Message other)8284 public Builder mergeFrom(com.google.protobuf.Message other) { 8285 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter) { 8286 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter)other); 8287 } else { 8288 super.mergeFrom(other); 8289 return this; 8290 } 8291 } 8292 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter other)8293 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter other) { 8294 if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter.getDefaultInstance()) return this; 8295 if (other.hasStopRowKey()) { 8296 setStopRowKey(other.getStopRowKey()); 8297 } 8298 this.mergeUnknownFields(other.getUnknownFields()); 8299 return this; 8300 } 8301 isInitialized()8302 public final boolean isInitialized() { 8303 return true; 8304 } 8305 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8306 public Builder mergeFrom( 8307 com.google.protobuf.CodedInputStream input, 8308 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8309 throws java.io.IOException { 8310 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter parsedMessage = null; 8311 try { 8312 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 8313 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 8314 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.InclusiveStopFilter) e.getUnfinishedMessage(); 8315 throw e; 8316 } finally { 8317 if (parsedMessage != null) { 8318 mergeFrom(parsedMessage); 8319 } 8320 } 8321 return this; 8322 } 8323 private int bitField0_; 8324 8325 // optional bytes stop_row_key = 1; 8326 private com.google.protobuf.ByteString stopRowKey_ = com.google.protobuf.ByteString.EMPTY; 8327 /** 8328 * <code>optional bytes stop_row_key = 1;</code> 8329 */ hasStopRowKey()8330 public boolean hasStopRowKey() { 8331 return ((bitField0_ & 0x00000001) == 0x00000001); 8332 } 8333 /** 8334 * <code>optional bytes stop_row_key = 1;</code> 8335 */ getStopRowKey()8336 public com.google.protobuf.ByteString getStopRowKey() { 8337 return stopRowKey_; 8338 } 8339 /** 8340 * <code>optional bytes stop_row_key = 1;</code> 8341 */ setStopRowKey(com.google.protobuf.ByteString value)8342 public Builder setStopRowKey(com.google.protobuf.ByteString value) { 8343 if (value == null) { 8344 throw new NullPointerException(); 8345 } 8346 bitField0_ |= 0x00000001; 8347 stopRowKey_ = value; 8348 onChanged(); 8349 return this; 8350 } 8351 /** 8352 * <code>optional bytes stop_row_key = 1;</code> 8353 */ clearStopRowKey()8354 public Builder clearStopRowKey() { 8355 bitField0_ = (bitField0_ & ~0x00000001); 8356 stopRowKey_ = getDefaultInstance().getStopRowKey(); 8357 onChanged(); 8358 return this; 8359 } 8360 8361 // @@protoc_insertion_point(builder_scope:InclusiveStopFilter) 8362 } 8363 8364 static { 8365 defaultInstance = new InclusiveStopFilter(true); defaultInstance.initFields()8366 defaultInstance.initFields(); 8367 } 8368 8369 // @@protoc_insertion_point(class_scope:InclusiveStopFilter) 8370 } 8371 8372 public interface KeyOnlyFilterOrBuilder 8373 extends com.google.protobuf.MessageOrBuilder { 8374 8375 // required bool len_as_val = 1; 8376 /** 8377 * <code>required bool len_as_val = 1;</code> 8378 */ hasLenAsVal()8379 boolean hasLenAsVal(); 8380 /** 8381 * <code>required bool len_as_val = 1;</code> 8382 */ getLenAsVal()8383 boolean getLenAsVal(); 8384 } 8385 /** 8386 * Protobuf type {@code KeyOnlyFilter} 8387 */ 8388 public static final class KeyOnlyFilter extends 8389 com.google.protobuf.GeneratedMessage 8390 implements KeyOnlyFilterOrBuilder { 8391 // Use KeyOnlyFilter.newBuilder() to construct. KeyOnlyFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)8392 private KeyOnlyFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 8393 super(builder); 8394 this.unknownFields = builder.getUnknownFields(); 8395 } KeyOnlyFilter(boolean noInit)8396 private KeyOnlyFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 8397 8398 private static final KeyOnlyFilter defaultInstance; getDefaultInstance()8399 public static KeyOnlyFilter getDefaultInstance() { 8400 return defaultInstance; 8401 } 8402 getDefaultInstanceForType()8403 public KeyOnlyFilter getDefaultInstanceForType() { 8404 return defaultInstance; 8405 } 8406 8407 private final com.google.protobuf.UnknownFieldSet unknownFields; 8408 @java.lang.Override 8409 public final com.google.protobuf.UnknownFieldSet getUnknownFields()8410 getUnknownFields() { 8411 return this.unknownFields; 8412 } KeyOnlyFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8413 private KeyOnlyFilter( 8414 com.google.protobuf.CodedInputStream input, 8415 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8416 throws com.google.protobuf.InvalidProtocolBufferException { 8417 initFields(); 8418 int mutable_bitField0_ = 0; 8419 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 8420 com.google.protobuf.UnknownFieldSet.newBuilder(); 8421 try { 8422 boolean done = false; 8423 while (!done) { 8424 int tag = input.readTag(); 8425 switch (tag) { 8426 case 0: 8427 done = true; 8428 break; 8429 default: { 8430 if (!parseUnknownField(input, unknownFields, 8431 extensionRegistry, tag)) { 8432 done = true; 8433 } 8434 break; 8435 } 8436 case 8: { 8437 bitField0_ |= 0x00000001; 8438 lenAsVal_ = input.readBool(); 8439 break; 8440 } 8441 } 8442 } 8443 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 8444 throw e.setUnfinishedMessage(this); 8445 } catch (java.io.IOException e) { 8446 throw new com.google.protobuf.InvalidProtocolBufferException( 8447 e.getMessage()).setUnfinishedMessage(this); 8448 } finally { 8449 this.unknownFields = unknownFields.build(); 8450 makeExtensionsImmutable(); 8451 } 8452 } 8453 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()8454 getDescriptor() { 8455 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_KeyOnlyFilter_descriptor; 8456 } 8457 8458 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()8459 internalGetFieldAccessorTable() { 8460 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_KeyOnlyFilter_fieldAccessorTable 8461 .ensureFieldAccessorsInitialized( 8462 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.Builder.class); 8463 } 8464 8465 public static com.google.protobuf.Parser<KeyOnlyFilter> PARSER = 8466 new com.google.protobuf.AbstractParser<KeyOnlyFilter>() { 8467 public KeyOnlyFilter parsePartialFrom( 8468 com.google.protobuf.CodedInputStream input, 8469 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8470 throws com.google.protobuf.InvalidProtocolBufferException { 8471 return new KeyOnlyFilter(input, extensionRegistry); 8472 } 8473 }; 8474 8475 @java.lang.Override getParserForType()8476 public com.google.protobuf.Parser<KeyOnlyFilter> getParserForType() { 8477 return PARSER; 8478 } 8479 8480 private int bitField0_; 8481 // required bool len_as_val = 1; 8482 public static final int LEN_AS_VAL_FIELD_NUMBER = 1; 8483 private boolean lenAsVal_; 8484 /** 8485 * <code>required bool len_as_val = 1;</code> 8486 */ hasLenAsVal()8487 public boolean hasLenAsVal() { 8488 return ((bitField0_ & 0x00000001) == 0x00000001); 8489 } 8490 /** 8491 * <code>required bool len_as_val = 1;</code> 8492 */ getLenAsVal()8493 public boolean getLenAsVal() { 8494 return lenAsVal_; 8495 } 8496 initFields()8497 private void initFields() { 8498 lenAsVal_ = false; 8499 } 8500 private byte memoizedIsInitialized = -1; isInitialized()8501 public final boolean isInitialized() { 8502 byte isInitialized = memoizedIsInitialized; 8503 if (isInitialized != -1) return isInitialized == 1; 8504 8505 if (!hasLenAsVal()) { 8506 memoizedIsInitialized = 0; 8507 return false; 8508 } 8509 memoizedIsInitialized = 1; 8510 return true; 8511 } 8512 writeTo(com.google.protobuf.CodedOutputStream output)8513 public void writeTo(com.google.protobuf.CodedOutputStream output) 8514 throws java.io.IOException { 8515 getSerializedSize(); 8516 if (((bitField0_ & 0x00000001) == 0x00000001)) { 8517 output.writeBool(1, lenAsVal_); 8518 } 8519 getUnknownFields().writeTo(output); 8520 } 8521 8522 private int memoizedSerializedSize = -1; getSerializedSize()8523 public int getSerializedSize() { 8524 int size = memoizedSerializedSize; 8525 if (size != -1) return size; 8526 8527 size = 0; 8528 if (((bitField0_ & 0x00000001) == 0x00000001)) { 8529 size += com.google.protobuf.CodedOutputStream 8530 .computeBoolSize(1, lenAsVal_); 8531 } 8532 size += getUnknownFields().getSerializedSize(); 8533 memoizedSerializedSize = size; 8534 return size; 8535 } 8536 8537 private static final long serialVersionUID = 0L; 8538 @java.lang.Override writeReplace()8539 protected java.lang.Object writeReplace() 8540 throws java.io.ObjectStreamException { 8541 return super.writeReplace(); 8542 } 8543 8544 @java.lang.Override equals(final java.lang.Object obj)8545 public boolean equals(final java.lang.Object obj) { 8546 if (obj == this) { 8547 return true; 8548 } 8549 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter)) { 8550 return super.equals(obj); 8551 } 8552 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter) obj; 8553 8554 boolean result = true; 8555 result = result && (hasLenAsVal() == other.hasLenAsVal()); 8556 if (hasLenAsVal()) { 8557 result = result && (getLenAsVal() 8558 == other.getLenAsVal()); 8559 } 8560 result = result && 8561 getUnknownFields().equals(other.getUnknownFields()); 8562 return result; 8563 } 8564 8565 private int memoizedHashCode = 0; 8566 @java.lang.Override hashCode()8567 public int hashCode() { 8568 if (memoizedHashCode != 0) { 8569 return memoizedHashCode; 8570 } 8571 int hash = 41; 8572 hash = (19 * hash) + getDescriptorForType().hashCode(); 8573 if (hasLenAsVal()) { 8574 hash = (37 * hash) + LEN_AS_VAL_FIELD_NUMBER; 8575 hash = (53 * hash) + hashBoolean(getLenAsVal()); 8576 } 8577 hash = (29 * hash) + getUnknownFields().hashCode(); 8578 memoizedHashCode = hash; 8579 return hash; 8580 } 8581 parseFrom( com.google.protobuf.ByteString data)8582 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( 8583 com.google.protobuf.ByteString data) 8584 throws com.google.protobuf.InvalidProtocolBufferException { 8585 return PARSER.parseFrom(data); 8586 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8587 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( 8588 com.google.protobuf.ByteString data, 8589 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8590 throws com.google.protobuf.InvalidProtocolBufferException { 8591 return PARSER.parseFrom(data, extensionRegistry); 8592 } parseFrom(byte[] data)8593 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom(byte[] data) 8594 throws com.google.protobuf.InvalidProtocolBufferException { 8595 return PARSER.parseFrom(data); 8596 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8597 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( 8598 byte[] data, 8599 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8600 throws com.google.protobuf.InvalidProtocolBufferException { 8601 return PARSER.parseFrom(data, extensionRegistry); 8602 } parseFrom(java.io.InputStream input)8603 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom(java.io.InputStream input) 8604 throws java.io.IOException { 8605 return PARSER.parseFrom(input); 8606 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8607 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( 8608 java.io.InputStream input, 8609 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8610 throws java.io.IOException { 8611 return PARSER.parseFrom(input, extensionRegistry); 8612 } parseDelimitedFrom(java.io.InputStream input)8613 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseDelimitedFrom(java.io.InputStream input) 8614 throws java.io.IOException { 8615 return PARSER.parseDelimitedFrom(input); 8616 } parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8617 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseDelimitedFrom( 8618 java.io.InputStream input, 8619 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8620 throws java.io.IOException { 8621 return PARSER.parseDelimitedFrom(input, extensionRegistry); 8622 } parseFrom( com.google.protobuf.CodedInputStream input)8623 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( 8624 com.google.protobuf.CodedInputStream input) 8625 throws java.io.IOException { 8626 return PARSER.parseFrom(input); 8627 } parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8628 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parseFrom( 8629 com.google.protobuf.CodedInputStream input, 8630 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8631 throws java.io.IOException { 8632 return PARSER.parseFrom(input, extensionRegistry); 8633 } 8634 newBuilder()8635 public static Builder newBuilder() { return Builder.create(); } newBuilderForType()8636 public Builder newBuilderForType() { return newBuilder(); } newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter prototype)8637 public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter prototype) { 8638 return newBuilder().mergeFrom(prototype); 8639 } toBuilder()8640 public Builder toBuilder() { return newBuilder(this); } 8641 8642 @java.lang.Override newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)8643 protected Builder newBuilderForType( 8644 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 8645 Builder builder = new Builder(parent); 8646 return builder; 8647 } 8648 /** 8649 * Protobuf type {@code KeyOnlyFilter} 8650 */ 8651 public static final class Builder extends 8652 com.google.protobuf.GeneratedMessage.Builder<Builder> 8653 implements org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilterOrBuilder { 8654 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()8655 getDescriptor() { 8656 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_KeyOnlyFilter_descriptor; 8657 } 8658 8659 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()8660 internalGetFieldAccessorTable() { 8661 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_KeyOnlyFilter_fieldAccessorTable 8662 .ensureFieldAccessorsInitialized( 8663 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.Builder.class); 8664 } 8665 8666 // Construct using org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.newBuilder() Builder()8667 private Builder() { 8668 maybeForceBuilderInitialization(); 8669 } 8670 Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)8671 private Builder( 8672 com.google.protobuf.GeneratedMessage.BuilderParent parent) { 8673 super(parent); 8674 maybeForceBuilderInitialization(); 8675 } maybeForceBuilderInitialization()8676 private void maybeForceBuilderInitialization() { 8677 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { 8678 } 8679 } create()8680 private static Builder create() { 8681 return new Builder(); 8682 } 8683 clear()8684 public Builder clear() { 8685 super.clear(); 8686 lenAsVal_ = false; 8687 bitField0_ = (bitField0_ & ~0x00000001); 8688 return this; 8689 } 8690 clone()8691 public Builder clone() { 8692 return create().mergeFrom(buildPartial()); 8693 } 8694 8695 public com.google.protobuf.Descriptors.Descriptor getDescriptorForType()8696 getDescriptorForType() { 8697 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_KeyOnlyFilter_descriptor; 8698 } 8699 getDefaultInstanceForType()8700 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter getDefaultInstanceForType() { 8701 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.getDefaultInstance(); 8702 } 8703 build()8704 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter build() { 8705 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter result = buildPartial(); 8706 if (!result.isInitialized()) { 8707 throw newUninitializedMessageException(result); 8708 } 8709 return result; 8710 } 8711 buildPartial()8712 public org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter buildPartial() { 8713 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter result = new org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter(this); 8714 int from_bitField0_ = bitField0_; 8715 int to_bitField0_ = 0; 8716 if (((from_bitField0_ & 0x00000001) == 0x00000001)) { 8717 to_bitField0_ |= 0x00000001; 8718 } 8719 result.lenAsVal_ = lenAsVal_; 8720 result.bitField0_ = to_bitField0_; 8721 onBuilt(); 8722 return result; 8723 } 8724 mergeFrom(com.google.protobuf.Message other)8725 public Builder mergeFrom(com.google.protobuf.Message other) { 8726 if (other instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter) { 8727 return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter)other); 8728 } else { 8729 super.mergeFrom(other); 8730 return this; 8731 } 8732 } 8733 mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter other)8734 public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter other) { 8735 if (other == org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter.getDefaultInstance()) return this; 8736 if (other.hasLenAsVal()) { 8737 setLenAsVal(other.getLenAsVal()); 8738 } 8739 this.mergeUnknownFields(other.getUnknownFields()); 8740 return this; 8741 } 8742 isInitialized()8743 public final boolean isInitialized() { 8744 if (!hasLenAsVal()) { 8745 8746 return false; 8747 } 8748 return true; 8749 } 8750 mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8751 public Builder mergeFrom( 8752 com.google.protobuf.CodedInputStream input, 8753 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8754 throws java.io.IOException { 8755 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter parsedMessage = null; 8756 try { 8757 parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); 8758 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 8759 parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.KeyOnlyFilter) e.getUnfinishedMessage(); 8760 throw e; 8761 } finally { 8762 if (parsedMessage != null) { 8763 mergeFrom(parsedMessage); 8764 } 8765 } 8766 return this; 8767 } 8768 private int bitField0_; 8769 8770 // required bool len_as_val = 1; 8771 private boolean lenAsVal_ ; 8772 /** 8773 * <code>required bool len_as_val = 1;</code> 8774 */ hasLenAsVal()8775 public boolean hasLenAsVal() { 8776 return ((bitField0_ & 0x00000001) == 0x00000001); 8777 } 8778 /** 8779 * <code>required bool len_as_val = 1;</code> 8780 */ getLenAsVal()8781 public boolean getLenAsVal() { 8782 return lenAsVal_; 8783 } 8784 /** 8785 * <code>required bool len_as_val = 1;</code> 8786 */ setLenAsVal(boolean value)8787 public Builder setLenAsVal(boolean value) { 8788 bitField0_ |= 0x00000001; 8789 lenAsVal_ = value; 8790 onChanged(); 8791 return this; 8792 } 8793 /** 8794 * <code>required bool len_as_val = 1;</code> 8795 */ clearLenAsVal()8796 public Builder clearLenAsVal() { 8797 bitField0_ = (bitField0_ & ~0x00000001); 8798 lenAsVal_ = false; 8799 onChanged(); 8800 return this; 8801 } 8802 8803 // @@protoc_insertion_point(builder_scope:KeyOnlyFilter) 8804 } 8805 8806 static { 8807 defaultInstance = new KeyOnlyFilter(true); defaultInstance.initFields()8808 defaultInstance.initFields(); 8809 } 8810 8811 // @@protoc_insertion_point(class_scope:KeyOnlyFilter) 8812 } 8813 8814 public interface MultipleColumnPrefixFilterOrBuilder 8815 extends com.google.protobuf.MessageOrBuilder { 8816 8817 // repeated bytes sorted_prefixes = 1; 8818 /** 8819 * <code>repeated bytes sorted_prefixes = 1;</code> 8820 */ getSortedPrefixesList()8821 java.util.List<com.google.protobuf.ByteString> getSortedPrefixesList(); 8822 /** 8823 * <code>repeated bytes sorted_prefixes = 1;</code> 8824 */ getSortedPrefixesCount()8825 int getSortedPrefixesCount(); 8826 /** 8827 * <code>repeated bytes sorted_prefixes = 1;</code> 8828 */ getSortedPrefixes(int index)8829 com.google.protobuf.ByteString getSortedPrefixes(int index); 8830 } 8831 /** 8832 * Protobuf type {@code MultipleColumnPrefixFilter} 8833 */ 8834 public static final class MultipleColumnPrefixFilter extends 8835 com.google.protobuf.GeneratedMessage 8836 implements MultipleColumnPrefixFilterOrBuilder { 8837 // Use MultipleColumnPrefixFilter.newBuilder() to construct. MultipleColumnPrefixFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder)8838 private MultipleColumnPrefixFilter(com.google.protobuf.GeneratedMessage.Builder<?> builder) { 8839 super(builder); 8840 this.unknownFields = builder.getUnknownFields(); 8841 } MultipleColumnPrefixFilter(boolean noInit)8842 private MultipleColumnPrefixFilter(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } 8843 8844 private static final MultipleColumnPrefixFilter defaultInstance; getDefaultInstance()8845 public static MultipleColumnPrefixFilter getDefaultInstance() { 8846 return defaultInstance; 8847 } 8848 getDefaultInstanceForType()8849 public MultipleColumnPrefixFilter getDefaultInstanceForType() { 8850 return defaultInstance; 8851 } 8852 8853 private final com.google.protobuf.UnknownFieldSet unknownFields; 8854 @java.lang.Override 8855 public final com.google.protobuf.UnknownFieldSet getUnknownFields()8856 getUnknownFields() { 8857 return this.unknownFields; 8858 } MultipleColumnPrefixFilter( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8859 private MultipleColumnPrefixFilter( 8860 com.google.protobuf.CodedInputStream input, 8861 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8862 throws com.google.protobuf.InvalidProtocolBufferException { 8863 initFields(); 8864 int mutable_bitField0_ = 0; 8865 com.google.protobuf.UnknownFieldSet.Builder unknownFields = 8866 com.google.protobuf.UnknownFieldSet.newBuilder(); 8867 try { 8868 boolean done = false; 8869 while (!done) { 8870 int tag = input.readTag(); 8871 switch (tag) { 8872 case 0: 8873 done = true; 8874 break; 8875 default: { 8876 if (!parseUnknownField(input, unknownFields, 8877 extensionRegistry, tag)) { 8878 done = true; 8879 } 8880 break; 8881 } 8882 case 10: { 8883 if (!((mutable_bitField0_ & 0x00000001) == 0x00000001)) { 8884 sortedPrefixes_ = new java.util.ArrayList<com.google.protobuf.ByteString>(); 8885 mutable_bitField0_ |= 0x00000001; 8886 } 8887 sortedPrefixes_.add(input.readBytes()); 8888 break; 8889 } 8890 } 8891 } 8892 } catch (com.google.protobuf.InvalidProtocolBufferException e) { 8893 throw e.setUnfinishedMessage(this); 8894 } catch (java.io.IOException e) { 8895 throw new com.google.protobuf.InvalidProtocolBufferException( 8896 e.getMessage()).setUnfinishedMessage(this); 8897 } finally { 8898 if (((mutable_bitField0_ & 0x00000001) == 0x00000001)) { 8899 sortedPrefixes_ = java.util.Collections.unmodifiableList(sortedPrefixes_); 8900 } 8901 this.unknownFields = unknownFields.build(); 8902 makeExtensionsImmutable(); 8903 } 8904 } 8905 public static final com.google.protobuf.Descriptors.Descriptor getDescriptor()8906 getDescriptor() { 8907 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_MultipleColumnPrefixFilter_descriptor; 8908 } 8909 8910 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable internalGetFieldAccessorTable()8911 internalGetFieldAccessorTable() { 8912 return org.apache.hadoop.hbase.protobuf.generated.FilterProtos.internal_static_MultipleColumnPrefixFilter_fieldAccessorTable 8913 .ensureFieldAccessorsInitialized( 8914 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.class, org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter.Builder.class); 8915 } 8916 8917 public static com.google.protobuf.Parser<MultipleColumnPrefixFilter> PARSER = 8918 new com.google.protobuf.AbstractParser<MultipleColumnPrefixFilter>() { 8919 public MultipleColumnPrefixFilter parsePartialFrom( 8920 com.google.protobuf.CodedInputStream input, 8921 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 8922 throws com.google.protobuf.InvalidProtocolBufferException { 8923 return new MultipleColumnPrefixFilter(input, extensionRegistry); 8924 } 8925 }; 8926 8927 @java.lang.Override getParserForType()8928 public com.google.protobuf.Parser<MultipleColumnPrefixFilter> getParserForType() { 8929 return PARSER; 8930 } 8931 8932 // repeated bytes sorted_prefixes = 1; 8933 public static final int SORTED_PREFIXES_FIELD_NUMBER = 1; 8934 private java.util.List<com.google.protobuf.ByteString> sortedPrefixes_; 8935 /** 8936 * <code>repeated bytes sorted_prefixes = 1;</code> 8937 */ 8938 public java.util.List<com.google.protobuf.ByteString> getSortedPrefixesList()8939 getSortedPrefixesList() { 8940 return sortedPrefixes_; 8941 } 8942 /** 8943 * <code>repeated bytes sorted_prefixes = 1;</code> 8944 */ getSortedPrefixesCount()8945 public int getSortedPrefixesCount() { 8946 return sortedPrefixes_.size(); 8947 } 8948 /** 8949 * <code>repeated bytes sorted_prefixes = 1;</code> 8950 */ getSortedPrefixes(int index)8951 public com.google.protobuf.ByteString getSortedPrefixes(int index) { 8952 return sortedPrefixes_.get(index); 8953 } 8954 initFields()8955 private void initFields() { 8956 sortedPrefixes_ = java.util.Collections.emptyList(); 8957 } 8958 private byte memoizedIsInitialized = -1; isInitialized()8959 public final boolean isInitialized() { 8960 byte isInitialized = memoizedIsInitialized; 8961 if (isInitialized != -1) return isInitialized == 1; 8962 8963 memoizedIsInitialized = 1; 8964 return true; 8965 } 8966 writeTo(com.google.protobuf.CodedOutputStream output)8967 public void writeTo(com.google.protobuf.CodedOutputStream output) 8968 throws java.io.IOException { 8969 getSerializedSize(); 8970 for (int i = 0; i < sortedPrefixes_.size(); i++) { 8971 output.writeBytes(1, sortedPrefixes_.get(i)); 8972 } 8973 getUnknownFields().writeTo(output); 8974 } 8975 8976 private int memoizedSerializedSize = -1; getSerializedSize()8977 public int getSerializedSize() { 8978 int size = memoizedSerializedSize; 8979 if (size != -1) return size; 8980 8981 size = 0; 8982 { 8983 int dataSize = 0; 8984 for (int i = 0; i < sortedPrefixes_.size(); i++) { 8985 dataSize += com.google.protobuf.CodedOutputStream 8986 .computeBytesSizeNoTag(sortedPrefixes_.get(i)); 8987 } 8988 size += dataSize; 8989 size += 1 * getSortedPrefixesList().size(); 8990 } 8991 size += getUnknownFields().getSerializedSize(); 8992 memoizedSerializedSize = size; 8993 return size; 8994 } 8995 8996 private static final long serialVersionUID = 0L; 8997 @java.lang.Override writeReplace()8998 protected java.lang.Object writeReplace() 8999 throws java.io.ObjectStreamException { 9000 return super.writeReplace(); 9001 } 9002 9003 @java.lang.Override equals(final java.lang.Object obj)9004 public boolean equals(final java.lang.Object obj) { 9005 if (obj == this) { 9006 return true; 9007 } 9008 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter)) { 9009 return super.equals(obj); 9010 } 9011 org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter other = (org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter) obj; 9012 9013 boolean result = true; 9014 result = result && getSortedPrefixesList() 9015 .equals(other.getSortedPrefixesList()); 9016 result = result && 9017 getUnknownFields().equals(other.getUnknownFields()); 9018 return result; 9019 } 9020 9021 private int memoizedHashCode = 0; 9022 @java.lang.Override hashCode()9023 public int hashCode() { 9024 if (memoizedHashCode != 0) { 9025 return memoizedHashCode; 9026 } 9027 int hash = 41; 9028 hash = (19 * hash) + getDescriptorForType().hashCode(); 9029 if (getSortedPrefixesCount() > 0) { 9030 hash = (37 * hash) + SORTED_PREFIXES_FIELD_NUMBER; 9031 hash = (53 * hash) + getSortedPrefixesList().hashCode(); 9032 } 9033 hash = (29 * hash) + getUnknownFields().hashCode(); 9034 memoizedHashCode = hash; 9035 return hash; 9036 } 9037 parseFrom( com.google.protobuf.ByteString data)9038 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom( 9039 com.google.protobuf.ByteString data) 9040 throws com.google.protobuf.InvalidProtocolBufferException { 9041 return PARSER.parseFrom(data); 9042 } parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9043 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom( 9044 com.google.protobuf.ByteString data, 9045 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9046 throws com.google.protobuf.InvalidProtocolBufferException { 9047 return PARSER.parseFrom(data, extensionRegistry); 9048 } parseFrom(byte[] data)9049 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom(byte[] data) 9050 throws com.google.protobuf.InvalidProtocolBufferException { 9051 return PARSER.parseFrom(data); 9052 } parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9053 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom( 9054 byte[] data, 9055 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9056 throws com.google.protobuf.InvalidProtocolBufferException { 9057 return PARSER.parseFrom(data, extensionRegistry); 9058 } parseFrom(java.io.InputStream input)9059 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom(java.io.InputStream input) 9060 throws java.io.IOException { 9061 return PARSER.parseFrom(input); 9062 } parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)9063 public static org.apache.hadoop.hbase.protobuf.generated.FilterProtos.MultipleColumnPrefixFilter parseFrom( 9064 java.io.InputStream input, 9065 com.google.protobuf.ExtensionRegistryLite extensionRegistry) 9066 throws java.io.IOException { 9067 return PARSER.parseFrom(input, extensionRegistry); 9068 } parseDelimitedFrom(java.io.InputStream input)9069