1 // Generated by the protocol buffer compiler.  DO NOT EDIT!
2 // source: ZooKeeper.proto
3 
4 package org.apache.hadoop.hbase.protobuf.generated;
5 
6 public final class ZooKeeperProtos {
ZooKeeperProtos()7   private ZooKeeperProtos() {}
registerAllExtensions( com.google.protobuf.ExtensionRegistry registry)8   public static void registerAllExtensions(
9       com.google.protobuf.ExtensionRegistry registry) {
10   }
11   public interface MetaRegionServerOrBuilder
12       extends com.google.protobuf.MessageOrBuilder {
13 
14     // required .ServerName server = 1;
15     /**
16      * <code>required .ServerName server = 1;</code>
17      *
18      * <pre>
19      * The ServerName hosting the meta region currently, or destination server,
20      * if meta region is in transition.
21      * </pre>
22      */
hasServer()23     boolean hasServer();
24     /**
25      * <code>required .ServerName server = 1;</code>
26      *
27      * <pre>
28      * The ServerName hosting the meta region currently, or destination server,
29      * if meta region is in transition.
30      * </pre>
31      */
getServer()32     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer();
33     /**
34      * <code>required .ServerName server = 1;</code>
35      *
36      * <pre>
37      * The ServerName hosting the meta region currently, or destination server,
38      * if meta region is in transition.
39      * </pre>
40      */
getServerOrBuilder()41     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder();
42 
43     // optional uint32 rpc_version = 2;
44     /**
45      * <code>optional uint32 rpc_version = 2;</code>
46      *
47      * <pre>
48      * The major version of the rpc the server speaks.  This is used so that
49      * clients connecting to the cluster can have prior knowledge of what version
50      * to send to a RegionServer.  AsyncHBase will use this to detect versions.
51      * </pre>
52      */
hasRpcVersion()53     boolean hasRpcVersion();
54     /**
55      * <code>optional uint32 rpc_version = 2;</code>
56      *
57      * <pre>
58      * The major version of the rpc the server speaks.  This is used so that
59      * clients connecting to the cluster can have prior knowledge of what version
60      * to send to a RegionServer.  AsyncHBase will use this to detect versions.
61      * </pre>
62      */
getRpcVersion()63     int getRpcVersion();
64 
65     // optional .RegionState.State state = 3;
66     /**
67      * <code>optional .RegionState.State state = 3;</code>
68      *
69      * <pre>
70      * State of the region transition. OPEN means fully operational 'hbase:meta'
71      * </pre>
72      */
hasState()73     boolean hasState();
74     /**
75      * <code>optional .RegionState.State state = 3;</code>
76      *
77      * <pre>
78      * State of the region transition. OPEN means fully operational 'hbase:meta'
79      * </pre>
80      */
getState()81     org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State getState();
82   }
83   /**
84    * Protobuf type {@code MetaRegionServer}
85    *
86    * <pre>
87    **
88    * Content of the meta-region-server znode.
89    * </pre>
90    */
91   public static final class MetaRegionServer extends
92       com.google.protobuf.GeneratedMessage
93       implements MetaRegionServerOrBuilder {
94     // Use MetaRegionServer.newBuilder() to construct.
MetaRegionServer(com.google.protobuf.GeneratedMessage.Builder<?> builder)95     private MetaRegionServer(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
96       super(builder);
97       this.unknownFields = builder.getUnknownFields();
98     }
MetaRegionServer(boolean noInit)99     private MetaRegionServer(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
100 
101     private static final MetaRegionServer defaultInstance;
getDefaultInstance()102     public static MetaRegionServer getDefaultInstance() {
103       return defaultInstance;
104     }
105 
getDefaultInstanceForType()106     public MetaRegionServer getDefaultInstanceForType() {
107       return defaultInstance;
108     }
109 
110     private final com.google.protobuf.UnknownFieldSet unknownFields;
111     @java.lang.Override
112     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()113         getUnknownFields() {
114       return this.unknownFields;
115     }
MetaRegionServer( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)116     private MetaRegionServer(
117         com.google.protobuf.CodedInputStream input,
118         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
119         throws com.google.protobuf.InvalidProtocolBufferException {
120       initFields();
121       int mutable_bitField0_ = 0;
122       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
123           com.google.protobuf.UnknownFieldSet.newBuilder();
124       try {
125         boolean done = false;
126         while (!done) {
127           int tag = input.readTag();
128           switch (tag) {
129             case 0:
130               done = true;
131               break;
132             default: {
133               if (!parseUnknownField(input, unknownFields,
134                                      extensionRegistry, tag)) {
135                 done = true;
136               }
137               break;
138             }
139             case 10: {
140               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null;
141               if (((bitField0_ & 0x00000001) == 0x00000001)) {
142                 subBuilder = server_.toBuilder();
143               }
144               server_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry);
145               if (subBuilder != null) {
146                 subBuilder.mergeFrom(server_);
147                 server_ = subBuilder.buildPartial();
148               }
149               bitField0_ |= 0x00000001;
150               break;
151             }
152             case 16: {
153               bitField0_ |= 0x00000002;
154               rpcVersion_ = input.readUInt32();
155               break;
156             }
157             case 24: {
158               int rawValue = input.readEnum();
159               org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State value = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State.valueOf(rawValue);
160               if (value == null) {
161                 unknownFields.mergeVarintField(3, rawValue);
162               } else {
163                 bitField0_ |= 0x00000004;
164                 state_ = value;
165               }
166               break;
167             }
168           }
169         }
170       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
171         throw e.setUnfinishedMessage(this);
172       } catch (java.io.IOException e) {
173         throw new com.google.protobuf.InvalidProtocolBufferException(
174             e.getMessage()).setUnfinishedMessage(this);
175       } finally {
176         this.unknownFields = unknownFields.build();
177         makeExtensionsImmutable();
178       }
179     }
180     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()181         getDescriptor() {
182       return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_MetaRegionServer_descriptor;
183     }
184 
185     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()186         internalGetFieldAccessorTable() {
187       return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_MetaRegionServer_fieldAccessorTable
188           .ensureFieldAccessorsInitialized(
189               org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServer.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServer.Builder.class);
190     }
191 
192     public static com.google.protobuf.Parser<MetaRegionServer> PARSER =
193         new com.google.protobuf.AbstractParser<MetaRegionServer>() {
194       public MetaRegionServer parsePartialFrom(
195           com.google.protobuf.CodedInputStream input,
196           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
197           throws com.google.protobuf.InvalidProtocolBufferException {
198         return new MetaRegionServer(input, extensionRegistry);
199       }
200     };
201 
202     @java.lang.Override
getParserForType()203     public com.google.protobuf.Parser<MetaRegionServer> getParserForType() {
204       return PARSER;
205     }
206 
207     private int bitField0_;
208     // required .ServerName server = 1;
209     public static final int SERVER_FIELD_NUMBER = 1;
210     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_;
211     /**
212      * <code>required .ServerName server = 1;</code>
213      *
214      * <pre>
215      * The ServerName hosting the meta region currently, or destination server,
216      * if meta region is in transition.
217      * </pre>
218      */
hasServer()219     public boolean hasServer() {
220       return ((bitField0_ & 0x00000001) == 0x00000001);
221     }
222     /**
223      * <code>required .ServerName server = 1;</code>
224      *
225      * <pre>
226      * The ServerName hosting the meta region currently, or destination server,
227      * if meta region is in transition.
228      * </pre>
229      */
getServer()230     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() {
231       return server_;
232     }
233     /**
234      * <code>required .ServerName server = 1;</code>
235      *
236      * <pre>
237      * The ServerName hosting the meta region currently, or destination server,
238      * if meta region is in transition.
239      * </pre>
240      */
getServerOrBuilder()241     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() {
242       return server_;
243     }
244 
245     // optional uint32 rpc_version = 2;
246     public static final int RPC_VERSION_FIELD_NUMBER = 2;
247     private int rpcVersion_;
248     /**
249      * <code>optional uint32 rpc_version = 2;</code>
250      *
251      * <pre>
252      * The major version of the rpc the server speaks.  This is used so that
253      * clients connecting to the cluster can have prior knowledge of what version
254      * to send to a RegionServer.  AsyncHBase will use this to detect versions.
255      * </pre>
256      */
hasRpcVersion()257     public boolean hasRpcVersion() {
258       return ((bitField0_ & 0x00000002) == 0x00000002);
259     }
260     /**
261      * <code>optional uint32 rpc_version = 2;</code>
262      *
263      * <pre>
264      * The major version of the rpc the server speaks.  This is used so that
265      * clients connecting to the cluster can have prior knowledge of what version
266      * to send to a RegionServer.  AsyncHBase will use this to detect versions.
267      * </pre>
268      */
getRpcVersion()269     public int getRpcVersion() {
270       return rpcVersion_;
271     }
272 
273     // optional .RegionState.State state = 3;
274     public static final int STATE_FIELD_NUMBER = 3;
275     private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State state_;
276     /**
277      * <code>optional .RegionState.State state = 3;</code>
278      *
279      * <pre>
280      * State of the region transition. OPEN means fully operational 'hbase:meta'
281      * </pre>
282      */
hasState()283     public boolean hasState() {
284       return ((bitField0_ & 0x00000004) == 0x00000004);
285     }
286     /**
287      * <code>optional .RegionState.State state = 3;</code>
288      *
289      * <pre>
290      * State of the region transition. OPEN means fully operational 'hbase:meta'
291      * </pre>
292      */
getState()293     public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State getState() {
294       return state_;
295     }
296 
initFields()297     private void initFields() {
298       server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
299       rpcVersion_ = 0;
300       state_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE;
301     }
302     private byte memoizedIsInitialized = -1;
isInitialized()303     public final boolean isInitialized() {
304       byte isInitialized = memoizedIsInitialized;
305       if (isInitialized != -1) return isInitialized == 1;
306 
307       if (!hasServer()) {
308         memoizedIsInitialized = 0;
309         return false;
310       }
311       if (!getServer().isInitialized()) {
312         memoizedIsInitialized = 0;
313         return false;
314       }
315       memoizedIsInitialized = 1;
316       return true;
317     }
318 
writeTo(com.google.protobuf.CodedOutputStream output)319     public void writeTo(com.google.protobuf.CodedOutputStream output)
320                         throws java.io.IOException {
321       getSerializedSize();
322       if (((bitField0_ & 0x00000001) == 0x00000001)) {
323         output.writeMessage(1, server_);
324       }
325       if (((bitField0_ & 0x00000002) == 0x00000002)) {
326         output.writeUInt32(2, rpcVersion_);
327       }
328       if (((bitField0_ & 0x00000004) == 0x00000004)) {
329         output.writeEnum(3, state_.getNumber());
330       }
331       getUnknownFields().writeTo(output);
332     }
333 
334     private int memoizedSerializedSize = -1;
getSerializedSize()335     public int getSerializedSize() {
336       int size = memoizedSerializedSize;
337       if (size != -1) return size;
338 
339       size = 0;
340       if (((bitField0_ & 0x00000001) == 0x00000001)) {
341         size += com.google.protobuf.CodedOutputStream
342           .computeMessageSize(1, server_);
343       }
344       if (((bitField0_ & 0x00000002) == 0x00000002)) {
345         size += com.google.protobuf.CodedOutputStream
346           .computeUInt32Size(2, rpcVersion_);
347       }
348       if (((bitField0_ & 0x00000004) == 0x00000004)) {
349         size += com.google.protobuf.CodedOutputStream
350           .computeEnumSize(3, state_.getNumber());
351       }
352       size += getUnknownFields().getSerializedSize();
353       memoizedSerializedSize = size;
354       return size;
355     }
356 
357     private static final long serialVersionUID = 0L;
358     @java.lang.Override
writeReplace()359     protected java.lang.Object writeReplace()
360         throws java.io.ObjectStreamException {
361       return super.writeReplace();
362     }
363 
364     @java.lang.Override
equals(final java.lang.Object obj)365     public boolean equals(final java.lang.Object obj) {
366       if (obj == this) {
367        return true;
368       }
369       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServer)) {
370         return super.equals(obj);
371       }
372       org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServer other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServer) obj;
373 
374       boolean result = true;
375       result = result && (hasServer() == other.hasServer());
376       if (hasServer()) {
377         result = result && getServer()
378             .equals(other.getServer());
379       }
380       result = result && (hasRpcVersion() == other.hasRpcVersion());
381       if (hasRpcVersion()) {
382         result = result && (getRpcVersion()
383             == other.getRpcVersion());
384       }
385       result = result && (hasState() == other.hasState());
386       if (hasState()) {
387         result = result &&
388             (getState() == other.getState());
389       }
390       result = result &&
391           getUnknownFields().equals(other.getUnknownFields());
392       return result;
393     }
394 
395     private int memoizedHashCode = 0;
396     @java.lang.Override
hashCode()397     public int hashCode() {
398       if (memoizedHashCode != 0) {
399         return memoizedHashCode;
400       }
401       int hash = 41;
402       hash = (19 * hash) + getDescriptorForType().hashCode();
403       if (hasServer()) {
404         hash = (37 * hash) + SERVER_FIELD_NUMBER;
405         hash = (53 * hash) + getServer().hashCode();
406       }
407       if (hasRpcVersion()) {
408         hash = (37 * hash) + RPC_VERSION_FIELD_NUMBER;
409         hash = (53 * hash) + getRpcVersion();
410       }
411       if (hasState()) {
412         hash = (37 * hash) + STATE_FIELD_NUMBER;
413         hash = (53 * hash) + hashEnum(getState());
414       }
415       hash = (29 * hash) + getUnknownFields().hashCode();
416       memoizedHashCode = hash;
417       return hash;
418     }
419 
parseFrom( com.google.protobuf.ByteString data)420     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServer parseFrom(
421         com.google.protobuf.ByteString data)
422         throws com.google.protobuf.InvalidProtocolBufferException {
423       return PARSER.parseFrom(data);
424     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)425     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServer parseFrom(
426         com.google.protobuf.ByteString data,
427         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
428         throws com.google.protobuf.InvalidProtocolBufferException {
429       return PARSER.parseFrom(data, extensionRegistry);
430     }
parseFrom(byte[] data)431     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServer parseFrom(byte[] data)
432         throws com.google.protobuf.InvalidProtocolBufferException {
433       return PARSER.parseFrom(data);
434     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)435     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServer parseFrom(
436         byte[] data,
437         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
438         throws com.google.protobuf.InvalidProtocolBufferException {
439       return PARSER.parseFrom(data, extensionRegistry);
440     }
parseFrom(java.io.InputStream input)441     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServer parseFrom(java.io.InputStream input)
442         throws java.io.IOException {
443       return PARSER.parseFrom(input);
444     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)445     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServer parseFrom(
446         java.io.InputStream input,
447         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
448         throws java.io.IOException {
449       return PARSER.parseFrom(input, extensionRegistry);
450     }
parseDelimitedFrom(java.io.InputStream input)451     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServer parseDelimitedFrom(java.io.InputStream input)
452         throws java.io.IOException {
453       return PARSER.parseDelimitedFrom(input);
454     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)455     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServer parseDelimitedFrom(
456         java.io.InputStream input,
457         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
458         throws java.io.IOException {
459       return PARSER.parseDelimitedFrom(input, extensionRegistry);
460     }
parseFrom( com.google.protobuf.CodedInputStream input)461     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServer parseFrom(
462         com.google.protobuf.CodedInputStream input)
463         throws java.io.IOException {
464       return PARSER.parseFrom(input);
465     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)466     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServer parseFrom(
467         com.google.protobuf.CodedInputStream input,
468         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
469         throws java.io.IOException {
470       return PARSER.parseFrom(input, extensionRegistry);
471     }
472 
newBuilder()473     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()474     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServer prototype)475     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServer prototype) {
476       return newBuilder().mergeFrom(prototype);
477     }
toBuilder()478     public Builder toBuilder() { return newBuilder(this); }
479 
480     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)481     protected Builder newBuilderForType(
482         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
483       Builder builder = new Builder(parent);
484       return builder;
485     }
486     /**
487      * Protobuf type {@code MetaRegionServer}
488      *
489      * <pre>
490      **
491      * Content of the meta-region-server znode.
492      * </pre>
493      */
494     public static final class Builder extends
495         com.google.protobuf.GeneratedMessage.Builder<Builder>
496        implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServerOrBuilder {
497       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()498           getDescriptor() {
499         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_MetaRegionServer_descriptor;
500       }
501 
502       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()503           internalGetFieldAccessorTable() {
504         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_MetaRegionServer_fieldAccessorTable
505             .ensureFieldAccessorsInitialized(
506                 org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServer.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServer.Builder.class);
507       }
508 
509       // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServer.newBuilder()
Builder()510       private Builder() {
511         maybeForceBuilderInitialization();
512       }
513 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)514       private Builder(
515           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
516         super(parent);
517         maybeForceBuilderInitialization();
518       }
maybeForceBuilderInitialization()519       private void maybeForceBuilderInitialization() {
520         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
521           getServerFieldBuilder();
522         }
523       }
create()524       private static Builder create() {
525         return new Builder();
526       }
527 
clear()528       public Builder clear() {
529         super.clear();
530         if (serverBuilder_ == null) {
531           server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
532         } else {
533           serverBuilder_.clear();
534         }
535         bitField0_ = (bitField0_ & ~0x00000001);
536         rpcVersion_ = 0;
537         bitField0_ = (bitField0_ & ~0x00000002);
538         state_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE;
539         bitField0_ = (bitField0_ & ~0x00000004);
540         return this;
541       }
542 
clone()543       public Builder clone() {
544         return create().mergeFrom(buildPartial());
545       }
546 
547       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()548           getDescriptorForType() {
549         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_MetaRegionServer_descriptor;
550       }
551 
getDefaultInstanceForType()552       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServer getDefaultInstanceForType() {
553         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServer.getDefaultInstance();
554       }
555 
build()556       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServer build() {
557         org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServer result = buildPartial();
558         if (!result.isInitialized()) {
559           throw newUninitializedMessageException(result);
560         }
561         return result;
562       }
563 
buildPartial()564       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServer buildPartial() {
565         org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServer result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServer(this);
566         int from_bitField0_ = bitField0_;
567         int to_bitField0_ = 0;
568         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
569           to_bitField0_ |= 0x00000001;
570         }
571         if (serverBuilder_ == null) {
572           result.server_ = server_;
573         } else {
574           result.server_ = serverBuilder_.build();
575         }
576         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
577           to_bitField0_ |= 0x00000002;
578         }
579         result.rpcVersion_ = rpcVersion_;
580         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
581           to_bitField0_ |= 0x00000004;
582         }
583         result.state_ = state_;
584         result.bitField0_ = to_bitField0_;
585         onBuilt();
586         return result;
587       }
588 
mergeFrom(com.google.protobuf.Message other)589       public Builder mergeFrom(com.google.protobuf.Message other) {
590         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServer) {
591           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServer)other);
592         } else {
593           super.mergeFrom(other);
594           return this;
595         }
596       }
597 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServer other)598       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServer other) {
599         if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServer.getDefaultInstance()) return this;
600         if (other.hasServer()) {
601           mergeServer(other.getServer());
602         }
603         if (other.hasRpcVersion()) {
604           setRpcVersion(other.getRpcVersion());
605         }
606         if (other.hasState()) {
607           setState(other.getState());
608         }
609         this.mergeUnknownFields(other.getUnknownFields());
610         return this;
611       }
612 
isInitialized()613       public final boolean isInitialized() {
614         if (!hasServer()) {
615 
616           return false;
617         }
618         if (!getServer().isInitialized()) {
619 
620           return false;
621         }
622         return true;
623       }
624 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)625       public Builder mergeFrom(
626           com.google.protobuf.CodedInputStream input,
627           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
628           throws java.io.IOException {
629         org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServer parsedMessage = null;
630         try {
631           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
632         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
633           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MetaRegionServer) e.getUnfinishedMessage();
634           throw e;
635         } finally {
636           if (parsedMessage != null) {
637             mergeFrom(parsedMessage);
638           }
639         }
640         return this;
641       }
642       private int bitField0_;
643 
644       // required .ServerName server = 1;
645       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
646       private com.google.protobuf.SingleFieldBuilder<
647           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverBuilder_;
648       /**
649        * <code>required .ServerName server = 1;</code>
650        *
651        * <pre>
652        * The ServerName hosting the meta region currently, or destination server,
653        * if meta region is in transition.
654        * </pre>
655        */
hasServer()656       public boolean hasServer() {
657         return ((bitField0_ & 0x00000001) == 0x00000001);
658       }
659       /**
660        * <code>required .ServerName server = 1;</code>
661        *
662        * <pre>
663        * The ServerName hosting the meta region currently, or destination server,
664        * if meta region is in transition.
665        * </pre>
666        */
getServer()667       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServer() {
668         if (serverBuilder_ == null) {
669           return server_;
670         } else {
671           return serverBuilder_.getMessage();
672         }
673       }
674       /**
675        * <code>required .ServerName server = 1;</code>
676        *
677        * <pre>
678        * The ServerName hosting the meta region currently, or destination server,
679        * if meta region is in transition.
680        * </pre>
681        */
setServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value)682       public Builder setServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
683         if (serverBuilder_ == null) {
684           if (value == null) {
685             throw new NullPointerException();
686           }
687           server_ = value;
688           onChanged();
689         } else {
690           serverBuilder_.setMessage(value);
691         }
692         bitField0_ |= 0x00000001;
693         return this;
694       }
695       /**
696        * <code>required .ServerName server = 1;</code>
697        *
698        * <pre>
699        * The ServerName hosting the meta region currently, or destination server,
700        * if meta region is in transition.
701        * </pre>
702        */
setServer( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue)703       public Builder setServer(
704           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) {
705         if (serverBuilder_ == null) {
706           server_ = builderForValue.build();
707           onChanged();
708         } else {
709           serverBuilder_.setMessage(builderForValue.build());
710         }
711         bitField0_ |= 0x00000001;
712         return this;
713       }
714       /**
715        * <code>required .ServerName server = 1;</code>
716        *
717        * <pre>
718        * The ServerName hosting the meta region currently, or destination server,
719        * if meta region is in transition.
720        * </pre>
721        */
mergeServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value)722       public Builder mergeServer(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
723         if (serverBuilder_ == null) {
724           if (((bitField0_ & 0x00000001) == 0x00000001) &&
725               server_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) {
726             server_ =
727               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(server_).mergeFrom(value).buildPartial();
728           } else {
729             server_ = value;
730           }
731           onChanged();
732         } else {
733           serverBuilder_.mergeFrom(value);
734         }
735         bitField0_ |= 0x00000001;
736         return this;
737       }
738       /**
739        * <code>required .ServerName server = 1;</code>
740        *
741        * <pre>
742        * The ServerName hosting the meta region currently, or destination server,
743        * if meta region is in transition.
744        * </pre>
745        */
clearServer()746       public Builder clearServer() {
747         if (serverBuilder_ == null) {
748           server_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
749           onChanged();
750         } else {
751           serverBuilder_.clear();
752         }
753         bitField0_ = (bitField0_ & ~0x00000001);
754         return this;
755       }
756       /**
757        * <code>required .ServerName server = 1;</code>
758        *
759        * <pre>
760        * The ServerName hosting the meta region currently, or destination server,
761        * if meta region is in transition.
762        * </pre>
763        */
getServerBuilder()764       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerBuilder() {
765         bitField0_ |= 0x00000001;
766         onChanged();
767         return getServerFieldBuilder().getBuilder();
768       }
769       /**
770        * <code>required .ServerName server = 1;</code>
771        *
772        * <pre>
773        * The ServerName hosting the meta region currently, or destination server,
774        * if meta region is in transition.
775        * </pre>
776        */
getServerOrBuilder()777       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerOrBuilder() {
778         if (serverBuilder_ != null) {
779           return serverBuilder_.getMessageOrBuilder();
780         } else {
781           return server_;
782         }
783       }
784       /**
785        * <code>required .ServerName server = 1;</code>
786        *
787        * <pre>
788        * The ServerName hosting the meta region currently, or destination server,
789        * if meta region is in transition.
790        * </pre>
791        */
792       private com.google.protobuf.SingleFieldBuilder<
793           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>
getServerFieldBuilder()794           getServerFieldBuilder() {
795         if (serverBuilder_ == null) {
796           serverBuilder_ = new com.google.protobuf.SingleFieldBuilder<
797               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>(
798                   server_,
799                   getParentForChildren(),
800                   isClean());
801           server_ = null;
802         }
803         return serverBuilder_;
804       }
805 
806       // optional uint32 rpc_version = 2;
807       private int rpcVersion_ ;
808       /**
809        * <code>optional uint32 rpc_version = 2;</code>
810        *
811        * <pre>
812        * The major version of the rpc the server speaks.  This is used so that
813        * clients connecting to the cluster can have prior knowledge of what version
814        * to send to a RegionServer.  AsyncHBase will use this to detect versions.
815        * </pre>
816        */
hasRpcVersion()817       public boolean hasRpcVersion() {
818         return ((bitField0_ & 0x00000002) == 0x00000002);
819       }
820       /**
821        * <code>optional uint32 rpc_version = 2;</code>
822        *
823        * <pre>
824        * The major version of the rpc the server speaks.  This is used so that
825        * clients connecting to the cluster can have prior knowledge of what version
826        * to send to a RegionServer.  AsyncHBase will use this to detect versions.
827        * </pre>
828        */
getRpcVersion()829       public int getRpcVersion() {
830         return rpcVersion_;
831       }
832       /**
833        * <code>optional uint32 rpc_version = 2;</code>
834        *
835        * <pre>
836        * The major version of the rpc the server speaks.  This is used so that
837        * clients connecting to the cluster can have prior knowledge of what version
838        * to send to a RegionServer.  AsyncHBase will use this to detect versions.
839        * </pre>
840        */
setRpcVersion(int value)841       public Builder setRpcVersion(int value) {
842         bitField0_ |= 0x00000002;
843         rpcVersion_ = value;
844         onChanged();
845         return this;
846       }
847       /**
848        * <code>optional uint32 rpc_version = 2;</code>
849        *
850        * <pre>
851        * The major version of the rpc the server speaks.  This is used so that
852        * clients connecting to the cluster can have prior knowledge of what version
853        * to send to a RegionServer.  AsyncHBase will use this to detect versions.
854        * </pre>
855        */
clearRpcVersion()856       public Builder clearRpcVersion() {
857         bitField0_ = (bitField0_ & ~0x00000002);
858         rpcVersion_ = 0;
859         onChanged();
860         return this;
861       }
862 
863       // optional .RegionState.State state = 3;
864       private org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State state_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE;
865       /**
866        * <code>optional .RegionState.State state = 3;</code>
867        *
868        * <pre>
869        * State of the region transition. OPEN means fully operational 'hbase:meta'
870        * </pre>
871        */
hasState()872       public boolean hasState() {
873         return ((bitField0_ & 0x00000004) == 0x00000004);
874       }
875       /**
876        * <code>optional .RegionState.State state = 3;</code>
877        *
878        * <pre>
879        * State of the region transition. OPEN means fully operational 'hbase:meta'
880        * </pre>
881        */
getState()882       public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State getState() {
883         return state_;
884       }
885       /**
886        * <code>optional .RegionState.State state = 3;</code>
887        *
888        * <pre>
889        * State of the region transition. OPEN means fully operational 'hbase:meta'
890        * </pre>
891        */
setState(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State value)892       public Builder setState(org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State value) {
893         if (value == null) {
894           throw new NullPointerException();
895         }
896         bitField0_ |= 0x00000004;
897         state_ = value;
898         onChanged();
899         return this;
900       }
901       /**
902        * <code>optional .RegionState.State state = 3;</code>
903        *
904        * <pre>
905        * State of the region transition. OPEN means fully operational 'hbase:meta'
906        * </pre>
907        */
clearState()908       public Builder clearState() {
909         bitField0_ = (bitField0_ & ~0x00000004);
910         state_ = org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.RegionState.State.OFFLINE;
911         onChanged();
912         return this;
913       }
914 
915       // @@protoc_insertion_point(builder_scope:MetaRegionServer)
916     }
917 
918     static {
919       defaultInstance = new MetaRegionServer(true);
defaultInstance.initFields()920       defaultInstance.initFields();
921     }
922 
923     // @@protoc_insertion_point(class_scope:MetaRegionServer)
924   }
925 
926   public interface MasterOrBuilder
927       extends com.google.protobuf.MessageOrBuilder {
928 
929     // required .ServerName master = 1;
930     /**
931      * <code>required .ServerName master = 1;</code>
932      *
933      * <pre>
934      * The ServerName of the current Master
935      * </pre>
936      */
hasMaster()937     boolean hasMaster();
938     /**
939      * <code>required .ServerName master = 1;</code>
940      *
941      * <pre>
942      * The ServerName of the current Master
943      * </pre>
944      */
getMaster()945     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getMaster();
946     /**
947      * <code>required .ServerName master = 1;</code>
948      *
949      * <pre>
950      * The ServerName of the current Master
951      * </pre>
952      */
getMasterOrBuilder()953     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getMasterOrBuilder();
954 
955     // optional uint32 rpc_version = 2;
956     /**
957      * <code>optional uint32 rpc_version = 2;</code>
958      *
959      * <pre>
960      * Major RPC version so that clients can know what version the master can accept.
961      * </pre>
962      */
hasRpcVersion()963     boolean hasRpcVersion();
964     /**
965      * <code>optional uint32 rpc_version = 2;</code>
966      *
967      * <pre>
968      * Major RPC version so that clients can know what version the master can accept.
969      * </pre>
970      */
getRpcVersion()971     int getRpcVersion();
972 
973     // optional uint32 info_port = 3;
974     /**
975      * <code>optional uint32 info_port = 3;</code>
976      */
hasInfoPort()977     boolean hasInfoPort();
978     /**
979      * <code>optional uint32 info_port = 3;</code>
980      */
getInfoPort()981     int getInfoPort();
982   }
983   /**
984    * Protobuf type {@code Master}
985    *
986    * <pre>
987    **
988    * Content of the master znode.
989    * </pre>
990    */
991   public static final class Master extends
992       com.google.protobuf.GeneratedMessage
993       implements MasterOrBuilder {
994     // Use Master.newBuilder() to construct.
Master(com.google.protobuf.GeneratedMessage.Builder<?> builder)995     private Master(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
996       super(builder);
997       this.unknownFields = builder.getUnknownFields();
998     }
Master(boolean noInit)999     private Master(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
1000 
1001     private static final Master defaultInstance;
getDefaultInstance()1002     public static Master getDefaultInstance() {
1003       return defaultInstance;
1004     }
1005 
getDefaultInstanceForType()1006     public Master getDefaultInstanceForType() {
1007       return defaultInstance;
1008     }
1009 
1010     private final com.google.protobuf.UnknownFieldSet unknownFields;
1011     @java.lang.Override
1012     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()1013         getUnknownFields() {
1014       return this.unknownFields;
1015     }
Master( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1016     private Master(
1017         com.google.protobuf.CodedInputStream input,
1018         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1019         throws com.google.protobuf.InvalidProtocolBufferException {
1020       initFields();
1021       int mutable_bitField0_ = 0;
1022       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1023           com.google.protobuf.UnknownFieldSet.newBuilder();
1024       try {
1025         boolean done = false;
1026         while (!done) {
1027           int tag = input.readTag();
1028           switch (tag) {
1029             case 0:
1030               done = true;
1031               break;
1032             default: {
1033               if (!parseUnknownField(input, unknownFields,
1034                                      extensionRegistry, tag)) {
1035                 done = true;
1036               }
1037               break;
1038             }
1039             case 10: {
1040               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null;
1041               if (((bitField0_ & 0x00000001) == 0x00000001)) {
1042                 subBuilder = master_.toBuilder();
1043               }
1044               master_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry);
1045               if (subBuilder != null) {
1046                 subBuilder.mergeFrom(master_);
1047                 master_ = subBuilder.buildPartial();
1048               }
1049               bitField0_ |= 0x00000001;
1050               break;
1051             }
1052             case 16: {
1053               bitField0_ |= 0x00000002;
1054               rpcVersion_ = input.readUInt32();
1055               break;
1056             }
1057             case 24: {
1058               bitField0_ |= 0x00000004;
1059               infoPort_ = input.readUInt32();
1060               break;
1061             }
1062           }
1063         }
1064       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1065         throw e.setUnfinishedMessage(this);
1066       } catch (java.io.IOException e) {
1067         throw new com.google.protobuf.InvalidProtocolBufferException(
1068             e.getMessage()).setUnfinishedMessage(this);
1069       } finally {
1070         this.unknownFields = unknownFields.build();
1071         makeExtensionsImmutable();
1072       }
1073     }
1074     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()1075         getDescriptor() {
1076       return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Master_descriptor;
1077     }
1078 
1079     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()1080         internalGetFieldAccessorTable() {
1081       return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Master_fieldAccessorTable
1082           .ensureFieldAccessorsInitialized(
1083               org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master.Builder.class);
1084     }
1085 
1086     public static com.google.protobuf.Parser<Master> PARSER =
1087         new com.google.protobuf.AbstractParser<Master>() {
1088       public Master parsePartialFrom(
1089           com.google.protobuf.CodedInputStream input,
1090           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1091           throws com.google.protobuf.InvalidProtocolBufferException {
1092         return new Master(input, extensionRegistry);
1093       }
1094     };
1095 
1096     @java.lang.Override
getParserForType()1097     public com.google.protobuf.Parser<Master> getParserForType() {
1098       return PARSER;
1099     }
1100 
1101     private int bitField0_;
1102     // required .ServerName master = 1;
1103     public static final int MASTER_FIELD_NUMBER = 1;
1104     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName master_;
1105     /**
1106      * <code>required .ServerName master = 1;</code>
1107      *
1108      * <pre>
1109      * The ServerName of the current Master
1110      * </pre>
1111      */
hasMaster()1112     public boolean hasMaster() {
1113       return ((bitField0_ & 0x00000001) == 0x00000001);
1114     }
1115     /**
1116      * <code>required .ServerName master = 1;</code>
1117      *
1118      * <pre>
1119      * The ServerName of the current Master
1120      * </pre>
1121      */
getMaster()1122     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getMaster() {
1123       return master_;
1124     }
1125     /**
1126      * <code>required .ServerName master = 1;</code>
1127      *
1128      * <pre>
1129      * The ServerName of the current Master
1130      * </pre>
1131      */
getMasterOrBuilder()1132     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getMasterOrBuilder() {
1133       return master_;
1134     }
1135 
1136     // optional uint32 rpc_version = 2;
1137     public static final int RPC_VERSION_FIELD_NUMBER = 2;
1138     private int rpcVersion_;
1139     /**
1140      * <code>optional uint32 rpc_version = 2;</code>
1141      *
1142      * <pre>
1143      * Major RPC version so that clients can know what version the master can accept.
1144      * </pre>
1145      */
hasRpcVersion()1146     public boolean hasRpcVersion() {
1147       return ((bitField0_ & 0x00000002) == 0x00000002);
1148     }
1149     /**
1150      * <code>optional uint32 rpc_version = 2;</code>
1151      *
1152      * <pre>
1153      * Major RPC version so that clients can know what version the master can accept.
1154      * </pre>
1155      */
getRpcVersion()1156     public int getRpcVersion() {
1157       return rpcVersion_;
1158     }
1159 
1160     // optional uint32 info_port = 3;
1161     public static final int INFO_PORT_FIELD_NUMBER = 3;
1162     private int infoPort_;
1163     /**
1164      * <code>optional uint32 info_port = 3;</code>
1165      */
hasInfoPort()1166     public boolean hasInfoPort() {
1167       return ((bitField0_ & 0x00000004) == 0x00000004);
1168     }
1169     /**
1170      * <code>optional uint32 info_port = 3;</code>
1171      */
getInfoPort()1172     public int getInfoPort() {
1173       return infoPort_;
1174     }
1175 
initFields()1176     private void initFields() {
1177       master_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
1178       rpcVersion_ = 0;
1179       infoPort_ = 0;
1180     }
1181     private byte memoizedIsInitialized = -1;
isInitialized()1182     public final boolean isInitialized() {
1183       byte isInitialized = memoizedIsInitialized;
1184       if (isInitialized != -1) return isInitialized == 1;
1185 
1186       if (!hasMaster()) {
1187         memoizedIsInitialized = 0;
1188         return false;
1189       }
1190       if (!getMaster().isInitialized()) {
1191         memoizedIsInitialized = 0;
1192         return false;
1193       }
1194       memoizedIsInitialized = 1;
1195       return true;
1196     }
1197 
writeTo(com.google.protobuf.CodedOutputStream output)1198     public void writeTo(com.google.protobuf.CodedOutputStream output)
1199                         throws java.io.IOException {
1200       getSerializedSize();
1201       if (((bitField0_ & 0x00000001) == 0x00000001)) {
1202         output.writeMessage(1, master_);
1203       }
1204       if (((bitField0_ & 0x00000002) == 0x00000002)) {
1205         output.writeUInt32(2, rpcVersion_);
1206       }
1207       if (((bitField0_ & 0x00000004) == 0x00000004)) {
1208         output.writeUInt32(3, infoPort_);
1209       }
1210       getUnknownFields().writeTo(output);
1211     }
1212 
1213     private int memoizedSerializedSize = -1;
getSerializedSize()1214     public int getSerializedSize() {
1215       int size = memoizedSerializedSize;
1216       if (size != -1) return size;
1217 
1218       size = 0;
1219       if (((bitField0_ & 0x00000001) == 0x00000001)) {
1220         size += com.google.protobuf.CodedOutputStream
1221           .computeMessageSize(1, master_);
1222       }
1223       if (((bitField0_ & 0x00000002) == 0x00000002)) {
1224         size += com.google.protobuf.CodedOutputStream
1225           .computeUInt32Size(2, rpcVersion_);
1226       }
1227       if (((bitField0_ & 0x00000004) == 0x00000004)) {
1228         size += com.google.protobuf.CodedOutputStream
1229           .computeUInt32Size(3, infoPort_);
1230       }
1231       size += getUnknownFields().getSerializedSize();
1232       memoizedSerializedSize = size;
1233       return size;
1234     }
1235 
1236     private static final long serialVersionUID = 0L;
1237     @java.lang.Override
writeReplace()1238     protected java.lang.Object writeReplace()
1239         throws java.io.ObjectStreamException {
1240       return super.writeReplace();
1241     }
1242 
1243     @java.lang.Override
equals(final java.lang.Object obj)1244     public boolean equals(final java.lang.Object obj) {
1245       if (obj == this) {
1246        return true;
1247       }
1248       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master)) {
1249         return super.equals(obj);
1250       }
1251       org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master) obj;
1252 
1253       boolean result = true;
1254       result = result && (hasMaster() == other.hasMaster());
1255       if (hasMaster()) {
1256         result = result && getMaster()
1257             .equals(other.getMaster());
1258       }
1259       result = result && (hasRpcVersion() == other.hasRpcVersion());
1260       if (hasRpcVersion()) {
1261         result = result && (getRpcVersion()
1262             == other.getRpcVersion());
1263       }
1264       result = result && (hasInfoPort() == other.hasInfoPort());
1265       if (hasInfoPort()) {
1266         result = result && (getInfoPort()
1267             == other.getInfoPort());
1268       }
1269       result = result &&
1270           getUnknownFields().equals(other.getUnknownFields());
1271       return result;
1272     }
1273 
1274     private int memoizedHashCode = 0;
1275     @java.lang.Override
hashCode()1276     public int hashCode() {
1277       if (memoizedHashCode != 0) {
1278         return memoizedHashCode;
1279       }
1280       int hash = 41;
1281       hash = (19 * hash) + getDescriptorForType().hashCode();
1282       if (hasMaster()) {
1283         hash = (37 * hash) + MASTER_FIELD_NUMBER;
1284         hash = (53 * hash) + getMaster().hashCode();
1285       }
1286       if (hasRpcVersion()) {
1287         hash = (37 * hash) + RPC_VERSION_FIELD_NUMBER;
1288         hash = (53 * hash) + getRpcVersion();
1289       }
1290       if (hasInfoPort()) {
1291         hash = (37 * hash) + INFO_PORT_FIELD_NUMBER;
1292         hash = (53 * hash) + getInfoPort();
1293       }
1294       hash = (29 * hash) + getUnknownFields().hashCode();
1295       memoizedHashCode = hash;
1296       return hash;
1297     }
1298 
parseFrom( com.google.protobuf.ByteString data)1299     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseFrom(
1300         com.google.protobuf.ByteString data)
1301         throws com.google.protobuf.InvalidProtocolBufferException {
1302       return PARSER.parseFrom(data);
1303     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1304     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseFrom(
1305         com.google.protobuf.ByteString data,
1306         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1307         throws com.google.protobuf.InvalidProtocolBufferException {
1308       return PARSER.parseFrom(data, extensionRegistry);
1309     }
parseFrom(byte[] data)1310     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseFrom(byte[] data)
1311         throws com.google.protobuf.InvalidProtocolBufferException {
1312       return PARSER.parseFrom(data);
1313     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1314     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseFrom(
1315         byte[] data,
1316         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1317         throws com.google.protobuf.InvalidProtocolBufferException {
1318       return PARSER.parseFrom(data, extensionRegistry);
1319     }
parseFrom(java.io.InputStream input)1320     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseFrom(java.io.InputStream input)
1321         throws java.io.IOException {
1322       return PARSER.parseFrom(input);
1323     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1324     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseFrom(
1325         java.io.InputStream input,
1326         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1327         throws java.io.IOException {
1328       return PARSER.parseFrom(input, extensionRegistry);
1329     }
parseDelimitedFrom(java.io.InputStream input)1330     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseDelimitedFrom(java.io.InputStream input)
1331         throws java.io.IOException {
1332       return PARSER.parseDelimitedFrom(input);
1333     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1334     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseDelimitedFrom(
1335         java.io.InputStream input,
1336         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1337         throws java.io.IOException {
1338       return PARSER.parseDelimitedFrom(input, extensionRegistry);
1339     }
parseFrom( com.google.protobuf.CodedInputStream input)1340     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseFrom(
1341         com.google.protobuf.CodedInputStream input)
1342         throws java.io.IOException {
1343       return PARSER.parseFrom(input);
1344     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1345     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parseFrom(
1346         com.google.protobuf.CodedInputStream input,
1347         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1348         throws java.io.IOException {
1349       return PARSER.parseFrom(input, extensionRegistry);
1350     }
1351 
newBuilder()1352     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()1353     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master prototype)1354     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master prototype) {
1355       return newBuilder().mergeFrom(prototype);
1356     }
toBuilder()1357     public Builder toBuilder() { return newBuilder(this); }
1358 
1359     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)1360     protected Builder newBuilderForType(
1361         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1362       Builder builder = new Builder(parent);
1363       return builder;
1364     }
1365     /**
1366      * Protobuf type {@code Master}
1367      *
1368      * <pre>
1369      **
1370      * Content of the master znode.
1371      * </pre>
1372      */
1373     public static final class Builder extends
1374         com.google.protobuf.GeneratedMessage.Builder<Builder>
1375        implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.MasterOrBuilder {
1376       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()1377           getDescriptor() {
1378         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Master_descriptor;
1379       }
1380 
1381       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()1382           internalGetFieldAccessorTable() {
1383         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Master_fieldAccessorTable
1384             .ensureFieldAccessorsInitialized(
1385                 org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master.Builder.class);
1386       }
1387 
1388       // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master.newBuilder()
Builder()1389       private Builder() {
1390         maybeForceBuilderInitialization();
1391       }
1392 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)1393       private Builder(
1394           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
1395         super(parent);
1396         maybeForceBuilderInitialization();
1397       }
maybeForceBuilderInitialization()1398       private void maybeForceBuilderInitialization() {
1399         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
1400           getMasterFieldBuilder();
1401         }
1402       }
create()1403       private static Builder create() {
1404         return new Builder();
1405       }
1406 
clear()1407       public Builder clear() {
1408         super.clear();
1409         if (masterBuilder_ == null) {
1410           master_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
1411         } else {
1412           masterBuilder_.clear();
1413         }
1414         bitField0_ = (bitField0_ & ~0x00000001);
1415         rpcVersion_ = 0;
1416         bitField0_ = (bitField0_ & ~0x00000002);
1417         infoPort_ = 0;
1418         bitField0_ = (bitField0_ & ~0x00000004);
1419         return this;
1420       }
1421 
clone()1422       public Builder clone() {
1423         return create().mergeFrom(buildPartial());
1424       }
1425 
1426       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()1427           getDescriptorForType() {
1428         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Master_descriptor;
1429       }
1430 
getDefaultInstanceForType()1431       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master getDefaultInstanceForType() {
1432         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master.getDefaultInstance();
1433       }
1434 
build()1435       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master build() {
1436         org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master result = buildPartial();
1437         if (!result.isInitialized()) {
1438           throw newUninitializedMessageException(result);
1439         }
1440         return result;
1441       }
1442 
buildPartial()1443       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master buildPartial() {
1444         org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master(this);
1445         int from_bitField0_ = bitField0_;
1446         int to_bitField0_ = 0;
1447         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
1448           to_bitField0_ |= 0x00000001;
1449         }
1450         if (masterBuilder_ == null) {
1451           result.master_ = master_;
1452         } else {
1453           result.master_ = masterBuilder_.build();
1454         }
1455         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
1456           to_bitField0_ |= 0x00000002;
1457         }
1458         result.rpcVersion_ = rpcVersion_;
1459         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
1460           to_bitField0_ |= 0x00000004;
1461         }
1462         result.infoPort_ = infoPort_;
1463         result.bitField0_ = to_bitField0_;
1464         onBuilt();
1465         return result;
1466       }
1467 
mergeFrom(com.google.protobuf.Message other)1468       public Builder mergeFrom(com.google.protobuf.Message other) {
1469         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master) {
1470           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master)other);
1471         } else {
1472           super.mergeFrom(other);
1473           return this;
1474         }
1475       }
1476 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master other)1477       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master other) {
1478         if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master.getDefaultInstance()) return this;
1479         if (other.hasMaster()) {
1480           mergeMaster(other.getMaster());
1481         }
1482         if (other.hasRpcVersion()) {
1483           setRpcVersion(other.getRpcVersion());
1484         }
1485         if (other.hasInfoPort()) {
1486           setInfoPort(other.getInfoPort());
1487         }
1488         this.mergeUnknownFields(other.getUnknownFields());
1489         return this;
1490       }
1491 
isInitialized()1492       public final boolean isInitialized() {
1493         if (!hasMaster()) {
1494 
1495           return false;
1496         }
1497         if (!getMaster().isInitialized()) {
1498 
1499           return false;
1500         }
1501         return true;
1502       }
1503 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1504       public Builder mergeFrom(
1505           com.google.protobuf.CodedInputStream input,
1506           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1507           throws java.io.IOException {
1508         org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master parsedMessage = null;
1509         try {
1510           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
1511         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1512           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Master) e.getUnfinishedMessage();
1513           throw e;
1514         } finally {
1515           if (parsedMessage != null) {
1516             mergeFrom(parsedMessage);
1517           }
1518         }
1519         return this;
1520       }
1521       private int bitField0_;
1522 
1523       // required .ServerName master = 1;
1524       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName master_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
1525       private com.google.protobuf.SingleFieldBuilder<
1526           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> masterBuilder_;
1527       /**
1528        * <code>required .ServerName master = 1;</code>
1529        *
1530        * <pre>
1531        * The ServerName of the current Master
1532        * </pre>
1533        */
hasMaster()1534       public boolean hasMaster() {
1535         return ((bitField0_ & 0x00000001) == 0x00000001);
1536       }
1537       /**
1538        * <code>required .ServerName master = 1;</code>
1539        *
1540        * <pre>
1541        * The ServerName of the current Master
1542        * </pre>
1543        */
getMaster()1544       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getMaster() {
1545         if (masterBuilder_ == null) {
1546           return master_;
1547         } else {
1548           return masterBuilder_.getMessage();
1549         }
1550       }
1551       /**
1552        * <code>required .ServerName master = 1;</code>
1553        *
1554        * <pre>
1555        * The ServerName of the current Master
1556        * </pre>
1557        */
setMaster(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value)1558       public Builder setMaster(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
1559         if (masterBuilder_ == null) {
1560           if (value == null) {
1561             throw new NullPointerException();
1562           }
1563           master_ = value;
1564           onChanged();
1565         } else {
1566           masterBuilder_.setMessage(value);
1567         }
1568         bitField0_ |= 0x00000001;
1569         return this;
1570       }
1571       /**
1572        * <code>required .ServerName master = 1;</code>
1573        *
1574        * <pre>
1575        * The ServerName of the current Master
1576        * </pre>
1577        */
setMaster( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue)1578       public Builder setMaster(
1579           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) {
1580         if (masterBuilder_ == null) {
1581           master_ = builderForValue.build();
1582           onChanged();
1583         } else {
1584           masterBuilder_.setMessage(builderForValue.build());
1585         }
1586         bitField0_ |= 0x00000001;
1587         return this;
1588       }
1589       /**
1590        * <code>required .ServerName master = 1;</code>
1591        *
1592        * <pre>
1593        * The ServerName of the current Master
1594        * </pre>
1595        */
mergeMaster(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value)1596       public Builder mergeMaster(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
1597         if (masterBuilder_ == null) {
1598           if (((bitField0_ & 0x00000001) == 0x00000001) &&
1599               master_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) {
1600             master_ =
1601               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(master_).mergeFrom(value).buildPartial();
1602           } else {
1603             master_ = value;
1604           }
1605           onChanged();
1606         } else {
1607           masterBuilder_.mergeFrom(value);
1608         }
1609         bitField0_ |= 0x00000001;
1610         return this;
1611       }
1612       /**
1613        * <code>required .ServerName master = 1;</code>
1614        *
1615        * <pre>
1616        * The ServerName of the current Master
1617        * </pre>
1618        */
clearMaster()1619       public Builder clearMaster() {
1620         if (masterBuilder_ == null) {
1621           master_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
1622           onChanged();
1623         } else {
1624           masterBuilder_.clear();
1625         }
1626         bitField0_ = (bitField0_ & ~0x00000001);
1627         return this;
1628       }
1629       /**
1630        * <code>required .ServerName master = 1;</code>
1631        *
1632        * <pre>
1633        * The ServerName of the current Master
1634        * </pre>
1635        */
getMasterBuilder()1636       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getMasterBuilder() {
1637         bitField0_ |= 0x00000001;
1638         onChanged();
1639         return getMasterFieldBuilder().getBuilder();
1640       }
1641       /**
1642        * <code>required .ServerName master = 1;</code>
1643        *
1644        * <pre>
1645        * The ServerName of the current Master
1646        * </pre>
1647        */
getMasterOrBuilder()1648       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getMasterOrBuilder() {
1649         if (masterBuilder_ != null) {
1650           return masterBuilder_.getMessageOrBuilder();
1651         } else {
1652           return master_;
1653         }
1654       }
1655       /**
1656        * <code>required .ServerName master = 1;</code>
1657        *
1658        * <pre>
1659        * The ServerName of the current Master
1660        * </pre>
1661        */
1662       private com.google.protobuf.SingleFieldBuilder<
1663           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>
getMasterFieldBuilder()1664           getMasterFieldBuilder() {
1665         if (masterBuilder_ == null) {
1666           masterBuilder_ = new com.google.protobuf.SingleFieldBuilder<
1667               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>(
1668                   master_,
1669                   getParentForChildren(),
1670                   isClean());
1671           master_ = null;
1672         }
1673         return masterBuilder_;
1674       }
1675 
1676       // optional uint32 rpc_version = 2;
1677       private int rpcVersion_ ;
1678       /**
1679        * <code>optional uint32 rpc_version = 2;</code>
1680        *
1681        * <pre>
1682        * Major RPC version so that clients can know what version the master can accept.
1683        * </pre>
1684        */
hasRpcVersion()1685       public boolean hasRpcVersion() {
1686         return ((bitField0_ & 0x00000002) == 0x00000002);
1687       }
1688       /**
1689        * <code>optional uint32 rpc_version = 2;</code>
1690        *
1691        * <pre>
1692        * Major RPC version so that clients can know what version the master can accept.
1693        * </pre>
1694        */
getRpcVersion()1695       public int getRpcVersion() {
1696         return rpcVersion_;
1697       }
1698       /**
1699        * <code>optional uint32 rpc_version = 2;</code>
1700        *
1701        * <pre>
1702        * Major RPC version so that clients can know what version the master can accept.
1703        * </pre>
1704        */
setRpcVersion(int value)1705       public Builder setRpcVersion(int value) {
1706         bitField0_ |= 0x00000002;
1707         rpcVersion_ = value;
1708         onChanged();
1709         return this;
1710       }
1711       /**
1712        * <code>optional uint32 rpc_version = 2;</code>
1713        *
1714        * <pre>
1715        * Major RPC version so that clients can know what version the master can accept.
1716        * </pre>
1717        */
clearRpcVersion()1718       public Builder clearRpcVersion() {
1719         bitField0_ = (bitField0_ & ~0x00000002);
1720         rpcVersion_ = 0;
1721         onChanged();
1722         return this;
1723       }
1724 
1725       // optional uint32 info_port = 3;
1726       private int infoPort_ ;
1727       /**
1728        * <code>optional uint32 info_port = 3;</code>
1729        */
hasInfoPort()1730       public boolean hasInfoPort() {
1731         return ((bitField0_ & 0x00000004) == 0x00000004);
1732       }
1733       /**
1734        * <code>optional uint32 info_port = 3;</code>
1735        */
getInfoPort()1736       public int getInfoPort() {
1737         return infoPort_;
1738       }
1739       /**
1740        * <code>optional uint32 info_port = 3;</code>
1741        */
setInfoPort(int value)1742       public Builder setInfoPort(int value) {
1743         bitField0_ |= 0x00000004;
1744         infoPort_ = value;
1745         onChanged();
1746         return this;
1747       }
1748       /**
1749        * <code>optional uint32 info_port = 3;</code>
1750        */
clearInfoPort()1751       public Builder clearInfoPort() {
1752         bitField0_ = (bitField0_ & ~0x00000004);
1753         infoPort_ = 0;
1754         onChanged();
1755         return this;
1756       }
1757 
1758       // @@protoc_insertion_point(builder_scope:Master)
1759     }
1760 
1761     static {
1762       defaultInstance = new Master(true);
defaultInstance.initFields()1763       defaultInstance.initFields();
1764     }
1765 
1766     // @@protoc_insertion_point(class_scope:Master)
1767   }
1768 
1769   public interface ClusterUpOrBuilder
1770       extends com.google.protobuf.MessageOrBuilder {
1771 
1772     // required string start_date = 1;
1773     /**
1774      * <code>required string start_date = 1;</code>
1775      *
1776      * <pre>
1777      * If this znode is present, cluster is up.  Currently
1778      * the data is cluster start_date.
1779      * </pre>
1780      */
hasStartDate()1781     boolean hasStartDate();
1782     /**
1783      * <code>required string start_date = 1;</code>
1784      *
1785      * <pre>
1786      * If this znode is present, cluster is up.  Currently
1787      * the data is cluster start_date.
1788      * </pre>
1789      */
getStartDate()1790     java.lang.String getStartDate();
1791     /**
1792      * <code>required string start_date = 1;</code>
1793      *
1794      * <pre>
1795      * If this znode is present, cluster is up.  Currently
1796      * the data is cluster start_date.
1797      * </pre>
1798      */
1799     com.google.protobuf.ByteString
getStartDateBytes()1800         getStartDateBytes();
1801   }
1802   /**
1803    * Protobuf type {@code ClusterUp}
1804    *
1805    * <pre>
1806    **
1807    * Content of the '/hbase/running', cluster state, znode.
1808    * </pre>
1809    */
1810   public static final class ClusterUp extends
1811       com.google.protobuf.GeneratedMessage
1812       implements ClusterUpOrBuilder {
1813     // Use ClusterUp.newBuilder() to construct.
ClusterUp(com.google.protobuf.GeneratedMessage.Builder<?> builder)1814     private ClusterUp(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
1815       super(builder);
1816       this.unknownFields = builder.getUnknownFields();
1817     }
ClusterUp(boolean noInit)1818     private ClusterUp(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
1819 
1820     private static final ClusterUp defaultInstance;
getDefaultInstance()1821     public static ClusterUp getDefaultInstance() {
1822       return defaultInstance;
1823     }
1824 
getDefaultInstanceForType()1825     public ClusterUp getDefaultInstanceForType() {
1826       return defaultInstance;
1827     }
1828 
1829     private final com.google.protobuf.UnknownFieldSet unknownFields;
1830     @java.lang.Override
1831     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()1832         getUnknownFields() {
1833       return this.unknownFields;
1834     }
ClusterUp( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)1835     private ClusterUp(
1836         com.google.protobuf.CodedInputStream input,
1837         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1838         throws com.google.protobuf.InvalidProtocolBufferException {
1839       initFields();
1840       int mutable_bitField0_ = 0;
1841       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
1842           com.google.protobuf.UnknownFieldSet.newBuilder();
1843       try {
1844         boolean done = false;
1845         while (!done) {
1846           int tag = input.readTag();
1847           switch (tag) {
1848             case 0:
1849               done = true;
1850               break;
1851             default: {
1852               if (!parseUnknownField(input, unknownFields,
1853                                      extensionRegistry, tag)) {
1854                 done = true;
1855               }
1856               break;
1857             }
1858             case 10: {
1859               bitField0_ |= 0x00000001;
1860               startDate_ = input.readBytes();
1861               break;
1862             }
1863           }
1864         }
1865       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
1866         throw e.setUnfinishedMessage(this);
1867       } catch (java.io.IOException e) {
1868         throw new com.google.protobuf.InvalidProtocolBufferException(
1869             e.getMessage()).setUnfinishedMessage(this);
1870       } finally {
1871         this.unknownFields = unknownFields.build();
1872         makeExtensionsImmutable();
1873       }
1874     }
1875     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()1876         getDescriptor() {
1877       return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ClusterUp_descriptor;
1878     }
1879 
1880     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()1881         internalGetFieldAccessorTable() {
1882       return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ClusterUp_fieldAccessorTable
1883           .ensureFieldAccessorsInitialized(
1884               org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp.Builder.class);
1885     }
1886 
1887     public static com.google.protobuf.Parser<ClusterUp> PARSER =
1888         new com.google.protobuf.AbstractParser<ClusterUp>() {
1889       public ClusterUp parsePartialFrom(
1890           com.google.protobuf.CodedInputStream input,
1891           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
1892           throws com.google.protobuf.InvalidProtocolBufferException {
1893         return new ClusterUp(input, extensionRegistry);
1894       }
1895     };
1896 
1897     @java.lang.Override
getParserForType()1898     public com.google.protobuf.Parser<ClusterUp> getParserForType() {
1899       return PARSER;
1900     }
1901 
1902     private int bitField0_;
1903     // required string start_date = 1;
1904     public static final int START_DATE_FIELD_NUMBER = 1;
1905     private java.lang.Object startDate_;
1906     /**
1907      * <code>required string start_date = 1;</code>
1908      *
1909      * <pre>
1910      * If this znode is present, cluster is up.  Currently
1911      * the data is cluster start_date.
1912      * </pre>
1913      */
hasStartDate()1914     public boolean hasStartDate() {
1915       return ((bitField0_ & 0x00000001) == 0x00000001);
1916     }
1917     /**
1918      * <code>required string start_date = 1;</code>
1919      *
1920      * <pre>
1921      * If this znode is present, cluster is up.  Currently
1922      * the data is cluster start_date.
1923      * </pre>
1924      */
getStartDate()1925     public java.lang.String getStartDate() {
1926       java.lang.Object ref = startDate_;
1927       if (ref instanceof java.lang.String) {
1928         return (java.lang.String) ref;
1929       } else {
1930         com.google.protobuf.ByteString bs =
1931             (com.google.protobuf.ByteString) ref;
1932         java.lang.String s = bs.toStringUtf8();
1933         if (bs.isValidUtf8()) {
1934           startDate_ = s;
1935         }
1936         return s;
1937       }
1938     }
1939     /**
1940      * <code>required string start_date = 1;</code>
1941      *
1942      * <pre>
1943      * If this znode is present, cluster is up.  Currently
1944      * the data is cluster start_date.
1945      * </pre>
1946      */
1947     public com.google.protobuf.ByteString
getStartDateBytes()1948         getStartDateBytes() {
1949       java.lang.Object ref = startDate_;
1950       if (ref instanceof java.lang.String) {
1951         com.google.protobuf.ByteString b =
1952             com.google.protobuf.ByteString.copyFromUtf8(
1953                 (java.lang.String) ref);
1954         startDate_ = b;
1955         return b;
1956       } else {
1957         return (com.google.protobuf.ByteString) ref;
1958       }
1959     }
1960 
initFields()1961     private void initFields() {
1962       startDate_ = "";
1963     }
1964     private byte memoizedIsInitialized = -1;
isInitialized()1965     public final boolean isInitialized() {
1966       byte isInitialized = memoizedIsInitialized;
1967       if (isInitialized != -1) return isInitialized == 1;
1968 
1969       if (!hasStartDate()) {
1970         memoizedIsInitialized = 0;
1971         return false;
1972       }
1973       memoizedIsInitialized = 1;
1974       return true;
1975     }
1976 
writeTo(com.google.protobuf.CodedOutputStream output)1977     public void writeTo(com.google.protobuf.CodedOutputStream output)
1978                         throws java.io.IOException {
1979       getSerializedSize();
1980       if (((bitField0_ & 0x00000001) == 0x00000001)) {
1981         output.writeBytes(1, getStartDateBytes());
1982       }
1983       getUnknownFields().writeTo(output);
1984     }
1985 
1986     private int memoizedSerializedSize = -1;
getSerializedSize()1987     public int getSerializedSize() {
1988       int size = memoizedSerializedSize;
1989       if (size != -1) return size;
1990 
1991       size = 0;
1992       if (((bitField0_ & 0x00000001) == 0x00000001)) {
1993         size += com.google.protobuf.CodedOutputStream
1994           .computeBytesSize(1, getStartDateBytes());
1995       }
1996       size += getUnknownFields().getSerializedSize();
1997       memoizedSerializedSize = size;
1998       return size;
1999     }
2000 
2001     private static final long serialVersionUID = 0L;
2002     @java.lang.Override
writeReplace()2003     protected java.lang.Object writeReplace()
2004         throws java.io.ObjectStreamException {
2005       return super.writeReplace();
2006     }
2007 
2008     @java.lang.Override
equals(final java.lang.Object obj)2009     public boolean equals(final java.lang.Object obj) {
2010       if (obj == this) {
2011        return true;
2012       }
2013       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp)) {
2014         return super.equals(obj);
2015       }
2016       org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp) obj;
2017 
2018       boolean result = true;
2019       result = result && (hasStartDate() == other.hasStartDate());
2020       if (hasStartDate()) {
2021         result = result && getStartDate()
2022             .equals(other.getStartDate());
2023       }
2024       result = result &&
2025           getUnknownFields().equals(other.getUnknownFields());
2026       return result;
2027     }
2028 
2029     private int memoizedHashCode = 0;
2030     @java.lang.Override
hashCode()2031     public int hashCode() {
2032       if (memoizedHashCode != 0) {
2033         return memoizedHashCode;
2034       }
2035       int hash = 41;
2036       hash = (19 * hash) + getDescriptorForType().hashCode();
2037       if (hasStartDate()) {
2038         hash = (37 * hash) + START_DATE_FIELD_NUMBER;
2039         hash = (53 * hash) + getStartDate().hashCode();
2040       }
2041       hash = (29 * hash) + getUnknownFields().hashCode();
2042       memoizedHashCode = hash;
2043       return hash;
2044     }
2045 
parseFrom( com.google.protobuf.ByteString data)2046     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom(
2047         com.google.protobuf.ByteString data)
2048         throws com.google.protobuf.InvalidProtocolBufferException {
2049       return PARSER.parseFrom(data);
2050     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2051     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom(
2052         com.google.protobuf.ByteString data,
2053         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2054         throws com.google.protobuf.InvalidProtocolBufferException {
2055       return PARSER.parseFrom(data, extensionRegistry);
2056     }
parseFrom(byte[] data)2057     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom(byte[] data)
2058         throws com.google.protobuf.InvalidProtocolBufferException {
2059       return PARSER.parseFrom(data);
2060     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2061     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom(
2062         byte[] data,
2063         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2064         throws com.google.protobuf.InvalidProtocolBufferException {
2065       return PARSER.parseFrom(data, extensionRegistry);
2066     }
parseFrom(java.io.InputStream input)2067     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom(java.io.InputStream input)
2068         throws java.io.IOException {
2069       return PARSER.parseFrom(input);
2070     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2071     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom(
2072         java.io.InputStream input,
2073         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2074         throws java.io.IOException {
2075       return PARSER.parseFrom(input, extensionRegistry);
2076     }
parseDelimitedFrom(java.io.InputStream input)2077     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseDelimitedFrom(java.io.InputStream input)
2078         throws java.io.IOException {
2079       return PARSER.parseDelimitedFrom(input);
2080     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2081     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseDelimitedFrom(
2082         java.io.InputStream input,
2083         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2084         throws java.io.IOException {
2085       return PARSER.parseDelimitedFrom(input, extensionRegistry);
2086     }
parseFrom( com.google.protobuf.CodedInputStream input)2087     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom(
2088         com.google.protobuf.CodedInputStream input)
2089         throws java.io.IOException {
2090       return PARSER.parseFrom(input);
2091     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2092     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parseFrom(
2093         com.google.protobuf.CodedInputStream input,
2094         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2095         throws java.io.IOException {
2096       return PARSER.parseFrom(input, extensionRegistry);
2097     }
2098 
newBuilder()2099     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()2100     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp prototype)2101     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp prototype) {
2102       return newBuilder().mergeFrom(prototype);
2103     }
toBuilder()2104     public Builder toBuilder() { return newBuilder(this); }
2105 
2106     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)2107     protected Builder newBuilderForType(
2108         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2109       Builder builder = new Builder(parent);
2110       return builder;
2111     }
2112     /**
2113      * Protobuf type {@code ClusterUp}
2114      *
2115      * <pre>
2116      **
2117      * Content of the '/hbase/running', cluster state, znode.
2118      * </pre>
2119      */
2120     public static final class Builder extends
2121         com.google.protobuf.GeneratedMessage.Builder<Builder>
2122        implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUpOrBuilder {
2123       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()2124           getDescriptor() {
2125         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ClusterUp_descriptor;
2126       }
2127 
2128       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()2129           internalGetFieldAccessorTable() {
2130         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ClusterUp_fieldAccessorTable
2131             .ensureFieldAccessorsInitialized(
2132                 org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp.Builder.class);
2133       }
2134 
2135       // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp.newBuilder()
Builder()2136       private Builder() {
2137         maybeForceBuilderInitialization();
2138       }
2139 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)2140       private Builder(
2141           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2142         super(parent);
2143         maybeForceBuilderInitialization();
2144       }
maybeForceBuilderInitialization()2145       private void maybeForceBuilderInitialization() {
2146         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
2147         }
2148       }
create()2149       private static Builder create() {
2150         return new Builder();
2151       }
2152 
clear()2153       public Builder clear() {
2154         super.clear();
2155         startDate_ = "";
2156         bitField0_ = (bitField0_ & ~0x00000001);
2157         return this;
2158       }
2159 
clone()2160       public Builder clone() {
2161         return create().mergeFrom(buildPartial());
2162       }
2163 
2164       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()2165           getDescriptorForType() {
2166         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ClusterUp_descriptor;
2167       }
2168 
getDefaultInstanceForType()2169       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp getDefaultInstanceForType() {
2170         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp.getDefaultInstance();
2171       }
2172 
build()2173       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp build() {
2174         org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp result = buildPartial();
2175         if (!result.isInitialized()) {
2176           throw newUninitializedMessageException(result);
2177         }
2178         return result;
2179       }
2180 
buildPartial()2181       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp buildPartial() {
2182         org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp(this);
2183         int from_bitField0_ = bitField0_;
2184         int to_bitField0_ = 0;
2185         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
2186           to_bitField0_ |= 0x00000001;
2187         }
2188         result.startDate_ = startDate_;
2189         result.bitField0_ = to_bitField0_;
2190         onBuilt();
2191         return result;
2192       }
2193 
mergeFrom(com.google.protobuf.Message other)2194       public Builder mergeFrom(com.google.protobuf.Message other) {
2195         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp) {
2196           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp)other);
2197         } else {
2198           super.mergeFrom(other);
2199           return this;
2200         }
2201       }
2202 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp other)2203       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp other) {
2204         if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp.getDefaultInstance()) return this;
2205         if (other.hasStartDate()) {
2206           bitField0_ |= 0x00000001;
2207           startDate_ = other.startDate_;
2208           onChanged();
2209         }
2210         this.mergeUnknownFields(other.getUnknownFields());
2211         return this;
2212       }
2213 
isInitialized()2214       public final boolean isInitialized() {
2215         if (!hasStartDate()) {
2216 
2217           return false;
2218         }
2219         return true;
2220       }
2221 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2222       public Builder mergeFrom(
2223           com.google.protobuf.CodedInputStream input,
2224           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2225           throws java.io.IOException {
2226         org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp parsedMessage = null;
2227         try {
2228           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
2229         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2230           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ClusterUp) e.getUnfinishedMessage();
2231           throw e;
2232         } finally {
2233           if (parsedMessage != null) {
2234             mergeFrom(parsedMessage);
2235           }
2236         }
2237         return this;
2238       }
2239       private int bitField0_;
2240 
2241       // required string start_date = 1;
2242       private java.lang.Object startDate_ = "";
2243       /**
2244        * <code>required string start_date = 1;</code>
2245        *
2246        * <pre>
2247        * If this znode is present, cluster is up.  Currently
2248        * the data is cluster start_date.
2249        * </pre>
2250        */
hasStartDate()2251       public boolean hasStartDate() {
2252         return ((bitField0_ & 0x00000001) == 0x00000001);
2253       }
2254       /**
2255        * <code>required string start_date = 1;</code>
2256        *
2257        * <pre>
2258        * If this znode is present, cluster is up.  Currently
2259        * the data is cluster start_date.
2260        * </pre>
2261        */
getStartDate()2262       public java.lang.String getStartDate() {
2263         java.lang.Object ref = startDate_;
2264         if (!(ref instanceof java.lang.String)) {
2265           java.lang.String s = ((com.google.protobuf.ByteString) ref)
2266               .toStringUtf8();
2267           startDate_ = s;
2268           return s;
2269         } else {
2270           return (java.lang.String) ref;
2271         }
2272       }
2273       /**
2274        * <code>required string start_date = 1;</code>
2275        *
2276        * <pre>
2277        * If this znode is present, cluster is up.  Currently
2278        * the data is cluster start_date.
2279        * </pre>
2280        */
2281       public com.google.protobuf.ByteString
getStartDateBytes()2282           getStartDateBytes() {
2283         java.lang.Object ref = startDate_;
2284         if (ref instanceof String) {
2285           com.google.protobuf.ByteString b =
2286               com.google.protobuf.ByteString.copyFromUtf8(
2287                   (java.lang.String) ref);
2288           startDate_ = b;
2289           return b;
2290         } else {
2291           return (com.google.protobuf.ByteString) ref;
2292         }
2293       }
2294       /**
2295        * <code>required string start_date = 1;</code>
2296        *
2297        * <pre>
2298        * If this znode is present, cluster is up.  Currently
2299        * the data is cluster start_date.
2300        * </pre>
2301        */
setStartDate( java.lang.String value)2302       public Builder setStartDate(
2303           java.lang.String value) {
2304         if (value == null) {
2305     throw new NullPointerException();
2306   }
2307   bitField0_ |= 0x00000001;
2308         startDate_ = value;
2309         onChanged();
2310         return this;
2311       }
2312       /**
2313        * <code>required string start_date = 1;</code>
2314        *
2315        * <pre>
2316        * If this znode is present, cluster is up.  Currently
2317        * the data is cluster start_date.
2318        * </pre>
2319        */
clearStartDate()2320       public Builder clearStartDate() {
2321         bitField0_ = (bitField0_ & ~0x00000001);
2322         startDate_ = getDefaultInstance().getStartDate();
2323         onChanged();
2324         return this;
2325       }
2326       /**
2327        * <code>required string start_date = 1;</code>
2328        *
2329        * <pre>
2330        * If this znode is present, cluster is up.  Currently
2331        * the data is cluster start_date.
2332        * </pre>
2333        */
setStartDateBytes( com.google.protobuf.ByteString value)2334       public Builder setStartDateBytes(
2335           com.google.protobuf.ByteString value) {
2336         if (value == null) {
2337     throw new NullPointerException();
2338   }
2339   bitField0_ |= 0x00000001;
2340         startDate_ = value;
2341         onChanged();
2342         return this;
2343       }
2344 
2345       // @@protoc_insertion_point(builder_scope:ClusterUp)
2346     }
2347 
2348     static {
2349       defaultInstance = new ClusterUp(true);
defaultInstance.initFields()2350       defaultInstance.initFields();
2351     }
2352 
2353     // @@protoc_insertion_point(class_scope:ClusterUp)
2354   }
2355 
2356   public interface RegionTransitionOrBuilder
2357       extends com.google.protobuf.MessageOrBuilder {
2358 
2359     // required uint32 event_type_code = 1;
2360     /**
2361      * <code>required uint32 event_type_code = 1;</code>
2362      *
2363      * <pre>
2364      * Code for EventType gotten by doing o.a.h.h.EventHandler.EventType.getCode()
2365      * </pre>
2366      */
hasEventTypeCode()2367     boolean hasEventTypeCode();
2368     /**
2369      * <code>required uint32 event_type_code = 1;</code>
2370      *
2371      * <pre>
2372      * Code for EventType gotten by doing o.a.h.h.EventHandler.EventType.getCode()
2373      * </pre>
2374      */
getEventTypeCode()2375     int getEventTypeCode();
2376 
2377     // required bytes region_name = 2;
2378     /**
2379      * <code>required bytes region_name = 2;</code>
2380      *
2381      * <pre>
2382      * Full regionname in bytes
2383      * </pre>
2384      */
hasRegionName()2385     boolean hasRegionName();
2386     /**
2387      * <code>required bytes region_name = 2;</code>
2388      *
2389      * <pre>
2390      * Full regionname in bytes
2391      * </pre>
2392      */
getRegionName()2393     com.google.protobuf.ByteString getRegionName();
2394 
2395     // required uint64 create_time = 3;
2396     /**
2397      * <code>required uint64 create_time = 3;</code>
2398      */
hasCreateTime()2399     boolean hasCreateTime();
2400     /**
2401      * <code>required uint64 create_time = 3;</code>
2402      */
getCreateTime()2403     long getCreateTime();
2404 
2405     // required .ServerName server_name = 4;
2406     /**
2407      * <code>required .ServerName server_name = 4;</code>
2408      *
2409      * <pre>
2410      * The region server where the transition will happen or is happening
2411      * </pre>
2412      */
hasServerName()2413     boolean hasServerName();
2414     /**
2415      * <code>required .ServerName server_name = 4;</code>
2416      *
2417      * <pre>
2418      * The region server where the transition will happen or is happening
2419      * </pre>
2420      */
getServerName()2421     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName();
2422     /**
2423      * <code>required .ServerName server_name = 4;</code>
2424      *
2425      * <pre>
2426      * The region server where the transition will happen or is happening
2427      * </pre>
2428      */
getServerNameOrBuilder()2429     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder();
2430 
2431     // optional bytes payload = 5;
2432     /**
2433      * <code>optional bytes payload = 5;</code>
2434      */
hasPayload()2435     boolean hasPayload();
2436     /**
2437      * <code>optional bytes payload = 5;</code>
2438      */
getPayload()2439     com.google.protobuf.ByteString getPayload();
2440   }
2441   /**
2442    * Protobuf type {@code RegionTransition}
2443    *
2444    * <pre>
2445    **
2446    * What we write under unassigned up in zookeeper as a region moves through
2447    * open/close, etc., regions.  Details a region in transition.
2448    * </pre>
2449    */
2450   public static final class RegionTransition extends
2451       com.google.protobuf.GeneratedMessage
2452       implements RegionTransitionOrBuilder {
2453     // Use RegionTransition.newBuilder() to construct.
RegionTransition(com.google.protobuf.GeneratedMessage.Builder<?> builder)2454     private RegionTransition(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
2455       super(builder);
2456       this.unknownFields = builder.getUnknownFields();
2457     }
RegionTransition(boolean noInit)2458     private RegionTransition(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
2459 
2460     private static final RegionTransition defaultInstance;
getDefaultInstance()2461     public static RegionTransition getDefaultInstance() {
2462       return defaultInstance;
2463     }
2464 
getDefaultInstanceForType()2465     public RegionTransition getDefaultInstanceForType() {
2466       return defaultInstance;
2467     }
2468 
2469     private final com.google.protobuf.UnknownFieldSet unknownFields;
2470     @java.lang.Override
2471     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()2472         getUnknownFields() {
2473       return this.unknownFields;
2474     }
RegionTransition( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2475     private RegionTransition(
2476         com.google.protobuf.CodedInputStream input,
2477         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2478         throws com.google.protobuf.InvalidProtocolBufferException {
2479       initFields();
2480       int mutable_bitField0_ = 0;
2481       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
2482           com.google.protobuf.UnknownFieldSet.newBuilder();
2483       try {
2484         boolean done = false;
2485         while (!done) {
2486           int tag = input.readTag();
2487           switch (tag) {
2488             case 0:
2489               done = true;
2490               break;
2491             default: {
2492               if (!parseUnknownField(input, unknownFields,
2493                                      extensionRegistry, tag)) {
2494                 done = true;
2495               }
2496               break;
2497             }
2498             case 8: {
2499               bitField0_ |= 0x00000001;
2500               eventTypeCode_ = input.readUInt32();
2501               break;
2502             }
2503             case 18: {
2504               bitField0_ |= 0x00000002;
2505               regionName_ = input.readBytes();
2506               break;
2507             }
2508             case 24: {
2509               bitField0_ |= 0x00000004;
2510               createTime_ = input.readUInt64();
2511               break;
2512             }
2513             case 34: {
2514               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null;
2515               if (((bitField0_ & 0x00000008) == 0x00000008)) {
2516                 subBuilder = serverName_.toBuilder();
2517               }
2518               serverName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry);
2519               if (subBuilder != null) {
2520                 subBuilder.mergeFrom(serverName_);
2521                 serverName_ = subBuilder.buildPartial();
2522               }
2523               bitField0_ |= 0x00000008;
2524               break;
2525             }
2526             case 42: {
2527               bitField0_ |= 0x00000010;
2528               payload_ = input.readBytes();
2529               break;
2530             }
2531           }
2532         }
2533       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
2534         throw e.setUnfinishedMessage(this);
2535       } catch (java.io.IOException e) {
2536         throw new com.google.protobuf.InvalidProtocolBufferException(
2537             e.getMessage()).setUnfinishedMessage(this);
2538       } finally {
2539         this.unknownFields = unknownFields.build();
2540         makeExtensionsImmutable();
2541       }
2542     }
2543     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()2544         getDescriptor() {
2545       return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RegionTransition_descriptor;
2546     }
2547 
2548     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()2549         internalGetFieldAccessorTable() {
2550       return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RegionTransition_fieldAccessorTable
2551           .ensureFieldAccessorsInitialized(
2552               org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition.Builder.class);
2553     }
2554 
2555     public static com.google.protobuf.Parser<RegionTransition> PARSER =
2556         new com.google.protobuf.AbstractParser<RegionTransition>() {
2557       public RegionTransition parsePartialFrom(
2558           com.google.protobuf.CodedInputStream input,
2559           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2560           throws com.google.protobuf.InvalidProtocolBufferException {
2561         return new RegionTransition(input, extensionRegistry);
2562       }
2563     };
2564 
2565     @java.lang.Override
getParserForType()2566     public com.google.protobuf.Parser<RegionTransition> getParserForType() {
2567       return PARSER;
2568     }
2569 
2570     private int bitField0_;
2571     // required uint32 event_type_code = 1;
2572     public static final int EVENT_TYPE_CODE_FIELD_NUMBER = 1;
2573     private int eventTypeCode_;
2574     /**
2575      * <code>required uint32 event_type_code = 1;</code>
2576      *
2577      * <pre>
2578      * Code for EventType gotten by doing o.a.h.h.EventHandler.EventType.getCode()
2579      * </pre>
2580      */
hasEventTypeCode()2581     public boolean hasEventTypeCode() {
2582       return ((bitField0_ & 0x00000001) == 0x00000001);
2583     }
2584     /**
2585      * <code>required uint32 event_type_code = 1;</code>
2586      *
2587      * <pre>
2588      * Code for EventType gotten by doing o.a.h.h.EventHandler.EventType.getCode()
2589      * </pre>
2590      */
getEventTypeCode()2591     public int getEventTypeCode() {
2592       return eventTypeCode_;
2593     }
2594 
2595     // required bytes region_name = 2;
2596     public static final int REGION_NAME_FIELD_NUMBER = 2;
2597     private com.google.protobuf.ByteString regionName_;
2598     /**
2599      * <code>required bytes region_name = 2;</code>
2600      *
2601      * <pre>
2602      * Full regionname in bytes
2603      * </pre>
2604      */
hasRegionName()2605     public boolean hasRegionName() {
2606       return ((bitField0_ & 0x00000002) == 0x00000002);
2607     }
2608     /**
2609      * <code>required bytes region_name = 2;</code>
2610      *
2611      * <pre>
2612      * Full regionname in bytes
2613      * </pre>
2614      */
getRegionName()2615     public com.google.protobuf.ByteString getRegionName() {
2616       return regionName_;
2617     }
2618 
2619     // required uint64 create_time = 3;
2620     public static final int CREATE_TIME_FIELD_NUMBER = 3;
2621     private long createTime_;
2622     /**
2623      * <code>required uint64 create_time = 3;</code>
2624      */
hasCreateTime()2625     public boolean hasCreateTime() {
2626       return ((bitField0_ & 0x00000004) == 0x00000004);
2627     }
2628     /**
2629      * <code>required uint64 create_time = 3;</code>
2630      */
getCreateTime()2631     public long getCreateTime() {
2632       return createTime_;
2633     }
2634 
2635     // required .ServerName server_name = 4;
2636     public static final int SERVER_NAME_FIELD_NUMBER = 4;
2637     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName serverName_;
2638     /**
2639      * <code>required .ServerName server_name = 4;</code>
2640      *
2641      * <pre>
2642      * The region server where the transition will happen or is happening
2643      * </pre>
2644      */
hasServerName()2645     public boolean hasServerName() {
2646       return ((bitField0_ & 0x00000008) == 0x00000008);
2647     }
2648     /**
2649      * <code>required .ServerName server_name = 4;</code>
2650      *
2651      * <pre>
2652      * The region server where the transition will happen or is happening
2653      * </pre>
2654      */
getServerName()2655     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName() {
2656       return serverName_;
2657     }
2658     /**
2659      * <code>required .ServerName server_name = 4;</code>
2660      *
2661      * <pre>
2662      * The region server where the transition will happen or is happening
2663      * </pre>
2664      */
getServerNameOrBuilder()2665     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() {
2666       return serverName_;
2667     }
2668 
2669     // optional bytes payload = 5;
2670     public static final int PAYLOAD_FIELD_NUMBER = 5;
2671     private com.google.protobuf.ByteString payload_;
2672     /**
2673      * <code>optional bytes payload = 5;</code>
2674      */
hasPayload()2675     public boolean hasPayload() {
2676       return ((bitField0_ & 0x00000010) == 0x00000010);
2677     }
2678     /**
2679      * <code>optional bytes payload = 5;</code>
2680      */
getPayload()2681     public com.google.protobuf.ByteString getPayload() {
2682       return payload_;
2683     }
2684 
initFields()2685     private void initFields() {
2686       eventTypeCode_ = 0;
2687       regionName_ = com.google.protobuf.ByteString.EMPTY;
2688       createTime_ = 0L;
2689       serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
2690       payload_ = com.google.protobuf.ByteString.EMPTY;
2691     }
2692     private byte memoizedIsInitialized = -1;
isInitialized()2693     public final boolean isInitialized() {
2694       byte isInitialized = memoizedIsInitialized;
2695       if (isInitialized != -1) return isInitialized == 1;
2696 
2697       if (!hasEventTypeCode()) {
2698         memoizedIsInitialized = 0;
2699         return false;
2700       }
2701       if (!hasRegionName()) {
2702         memoizedIsInitialized = 0;
2703         return false;
2704       }
2705       if (!hasCreateTime()) {
2706         memoizedIsInitialized = 0;
2707         return false;
2708       }
2709       if (!hasServerName()) {
2710         memoizedIsInitialized = 0;
2711         return false;
2712       }
2713       if (!getServerName().isInitialized()) {
2714         memoizedIsInitialized = 0;
2715         return false;
2716       }
2717       memoizedIsInitialized = 1;
2718       return true;
2719     }
2720 
writeTo(com.google.protobuf.CodedOutputStream output)2721     public void writeTo(com.google.protobuf.CodedOutputStream output)
2722                         throws java.io.IOException {
2723       getSerializedSize();
2724       if (((bitField0_ & 0x00000001) == 0x00000001)) {
2725         output.writeUInt32(1, eventTypeCode_);
2726       }
2727       if (((bitField0_ & 0x00000002) == 0x00000002)) {
2728         output.writeBytes(2, regionName_);
2729       }
2730       if (((bitField0_ & 0x00000004) == 0x00000004)) {
2731         output.writeUInt64(3, createTime_);
2732       }
2733       if (((bitField0_ & 0x00000008) == 0x00000008)) {
2734         output.writeMessage(4, serverName_);
2735       }
2736       if (((bitField0_ & 0x00000010) == 0x00000010)) {
2737         output.writeBytes(5, payload_);
2738       }
2739       getUnknownFields().writeTo(output);
2740     }
2741 
2742     private int memoizedSerializedSize = -1;
getSerializedSize()2743     public int getSerializedSize() {
2744       int size = memoizedSerializedSize;
2745       if (size != -1) return size;
2746 
2747       size = 0;
2748       if (((bitField0_ & 0x00000001) == 0x00000001)) {
2749         size += com.google.protobuf.CodedOutputStream
2750           .computeUInt32Size(1, eventTypeCode_);
2751       }
2752       if (((bitField0_ & 0x00000002) == 0x00000002)) {
2753         size += com.google.protobuf.CodedOutputStream
2754           .computeBytesSize(2, regionName_);
2755       }
2756       if (((bitField0_ & 0x00000004) == 0x00000004)) {
2757         size += com.google.protobuf.CodedOutputStream
2758           .computeUInt64Size(3, createTime_);
2759       }
2760       if (((bitField0_ & 0x00000008) == 0x00000008)) {
2761         size += com.google.protobuf.CodedOutputStream
2762           .computeMessageSize(4, serverName_);
2763       }
2764       if (((bitField0_ & 0x00000010) == 0x00000010)) {
2765         size += com.google.protobuf.CodedOutputStream
2766           .computeBytesSize(5, payload_);
2767       }
2768       size += getUnknownFields().getSerializedSize();
2769       memoizedSerializedSize = size;
2770       return size;
2771     }
2772 
2773     private static final long serialVersionUID = 0L;
2774     @java.lang.Override
writeReplace()2775     protected java.lang.Object writeReplace()
2776         throws java.io.ObjectStreamException {
2777       return super.writeReplace();
2778     }
2779 
2780     @java.lang.Override
equals(final java.lang.Object obj)2781     public boolean equals(final java.lang.Object obj) {
2782       if (obj == this) {
2783        return true;
2784       }
2785       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition)) {
2786         return super.equals(obj);
2787       }
2788       org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition) obj;
2789 
2790       boolean result = true;
2791       result = result && (hasEventTypeCode() == other.hasEventTypeCode());
2792       if (hasEventTypeCode()) {
2793         result = result && (getEventTypeCode()
2794             == other.getEventTypeCode());
2795       }
2796       result = result && (hasRegionName() == other.hasRegionName());
2797       if (hasRegionName()) {
2798         result = result && getRegionName()
2799             .equals(other.getRegionName());
2800       }
2801       result = result && (hasCreateTime() == other.hasCreateTime());
2802       if (hasCreateTime()) {
2803         result = result && (getCreateTime()
2804             == other.getCreateTime());
2805       }
2806       result = result && (hasServerName() == other.hasServerName());
2807       if (hasServerName()) {
2808         result = result && getServerName()
2809             .equals(other.getServerName());
2810       }
2811       result = result && (hasPayload() == other.hasPayload());
2812       if (hasPayload()) {
2813         result = result && getPayload()
2814             .equals(other.getPayload());
2815       }
2816       result = result &&
2817           getUnknownFields().equals(other.getUnknownFields());
2818       return result;
2819     }
2820 
2821     private int memoizedHashCode = 0;
2822     @java.lang.Override
hashCode()2823     public int hashCode() {
2824       if (memoizedHashCode != 0) {
2825         return memoizedHashCode;
2826       }
2827       int hash = 41;
2828       hash = (19 * hash) + getDescriptorForType().hashCode();
2829       if (hasEventTypeCode()) {
2830         hash = (37 * hash) + EVENT_TYPE_CODE_FIELD_NUMBER;
2831         hash = (53 * hash) + getEventTypeCode();
2832       }
2833       if (hasRegionName()) {
2834         hash = (37 * hash) + REGION_NAME_FIELD_NUMBER;
2835         hash = (53 * hash) + getRegionName().hashCode();
2836       }
2837       if (hasCreateTime()) {
2838         hash = (37 * hash) + CREATE_TIME_FIELD_NUMBER;
2839         hash = (53 * hash) + hashLong(getCreateTime());
2840       }
2841       if (hasServerName()) {
2842         hash = (37 * hash) + SERVER_NAME_FIELD_NUMBER;
2843         hash = (53 * hash) + getServerName().hashCode();
2844       }
2845       if (hasPayload()) {
2846         hash = (37 * hash) + PAYLOAD_FIELD_NUMBER;
2847         hash = (53 * hash) + getPayload().hashCode();
2848       }
2849       hash = (29 * hash) + getUnknownFields().hashCode();
2850       memoizedHashCode = hash;
2851       return hash;
2852     }
2853 
parseFrom( com.google.protobuf.ByteString data)2854     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseFrom(
2855         com.google.protobuf.ByteString data)
2856         throws com.google.protobuf.InvalidProtocolBufferException {
2857       return PARSER.parseFrom(data);
2858     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2859     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseFrom(
2860         com.google.protobuf.ByteString data,
2861         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2862         throws com.google.protobuf.InvalidProtocolBufferException {
2863       return PARSER.parseFrom(data, extensionRegistry);
2864     }
parseFrom(byte[] data)2865     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseFrom(byte[] data)
2866         throws com.google.protobuf.InvalidProtocolBufferException {
2867       return PARSER.parseFrom(data);
2868     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2869     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseFrom(
2870         byte[] data,
2871         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2872         throws com.google.protobuf.InvalidProtocolBufferException {
2873       return PARSER.parseFrom(data, extensionRegistry);
2874     }
parseFrom(java.io.InputStream input)2875     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseFrom(java.io.InputStream input)
2876         throws java.io.IOException {
2877       return PARSER.parseFrom(input);
2878     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2879     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseFrom(
2880         java.io.InputStream input,
2881         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2882         throws java.io.IOException {
2883       return PARSER.parseFrom(input, extensionRegistry);
2884     }
parseDelimitedFrom(java.io.InputStream input)2885     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseDelimitedFrom(java.io.InputStream input)
2886         throws java.io.IOException {
2887       return PARSER.parseDelimitedFrom(input);
2888     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2889     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseDelimitedFrom(
2890         java.io.InputStream input,
2891         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2892         throws java.io.IOException {
2893       return PARSER.parseDelimitedFrom(input, extensionRegistry);
2894     }
parseFrom( com.google.protobuf.CodedInputStream input)2895     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseFrom(
2896         com.google.protobuf.CodedInputStream input)
2897         throws java.io.IOException {
2898       return PARSER.parseFrom(input);
2899     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)2900     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parseFrom(
2901         com.google.protobuf.CodedInputStream input,
2902         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
2903         throws java.io.IOException {
2904       return PARSER.parseFrom(input, extensionRegistry);
2905     }
2906 
newBuilder()2907     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()2908     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition prototype)2909     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition prototype) {
2910       return newBuilder().mergeFrom(prototype);
2911     }
toBuilder()2912     public Builder toBuilder() { return newBuilder(this); }
2913 
2914     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)2915     protected Builder newBuilderForType(
2916         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2917       Builder builder = new Builder(parent);
2918       return builder;
2919     }
2920     /**
2921      * Protobuf type {@code RegionTransition}
2922      *
2923      * <pre>
2924      **
2925      * What we write under unassigned up in zookeeper as a region moves through
2926      * open/close, etc., regions.  Details a region in transition.
2927      * </pre>
2928      */
2929     public static final class Builder extends
2930         com.google.protobuf.GeneratedMessage.Builder<Builder>
2931        implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransitionOrBuilder {
2932       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()2933           getDescriptor() {
2934         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RegionTransition_descriptor;
2935       }
2936 
2937       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()2938           internalGetFieldAccessorTable() {
2939         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RegionTransition_fieldAccessorTable
2940             .ensureFieldAccessorsInitialized(
2941                 org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition.Builder.class);
2942       }
2943 
2944       // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition.newBuilder()
Builder()2945       private Builder() {
2946         maybeForceBuilderInitialization();
2947       }
2948 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)2949       private Builder(
2950           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
2951         super(parent);
2952         maybeForceBuilderInitialization();
2953       }
maybeForceBuilderInitialization()2954       private void maybeForceBuilderInitialization() {
2955         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
2956           getServerNameFieldBuilder();
2957         }
2958       }
create()2959       private static Builder create() {
2960         return new Builder();
2961       }
2962 
clear()2963       public Builder clear() {
2964         super.clear();
2965         eventTypeCode_ = 0;
2966         bitField0_ = (bitField0_ & ~0x00000001);
2967         regionName_ = com.google.protobuf.ByteString.EMPTY;
2968         bitField0_ = (bitField0_ & ~0x00000002);
2969         createTime_ = 0L;
2970         bitField0_ = (bitField0_ & ~0x00000004);
2971         if (serverNameBuilder_ == null) {
2972           serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
2973         } else {
2974           serverNameBuilder_.clear();
2975         }
2976         bitField0_ = (bitField0_ & ~0x00000008);
2977         payload_ = com.google.protobuf.ByteString.EMPTY;
2978         bitField0_ = (bitField0_ & ~0x00000010);
2979         return this;
2980       }
2981 
clone()2982       public Builder clone() {
2983         return create().mergeFrom(buildPartial());
2984       }
2985 
2986       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()2987           getDescriptorForType() {
2988         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_RegionTransition_descriptor;
2989       }
2990 
getDefaultInstanceForType()2991       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition getDefaultInstanceForType() {
2992         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition.getDefaultInstance();
2993       }
2994 
build()2995       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition build() {
2996         org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition result = buildPartial();
2997         if (!result.isInitialized()) {
2998           throw newUninitializedMessageException(result);
2999         }
3000         return result;
3001       }
3002 
buildPartial()3003       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition buildPartial() {
3004         org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition(this);
3005         int from_bitField0_ = bitField0_;
3006         int to_bitField0_ = 0;
3007         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
3008           to_bitField0_ |= 0x00000001;
3009         }
3010         result.eventTypeCode_ = eventTypeCode_;
3011         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
3012           to_bitField0_ |= 0x00000002;
3013         }
3014         result.regionName_ = regionName_;
3015         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
3016           to_bitField0_ |= 0x00000004;
3017         }
3018         result.createTime_ = createTime_;
3019         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
3020           to_bitField0_ |= 0x00000008;
3021         }
3022         if (serverNameBuilder_ == null) {
3023           result.serverName_ = serverName_;
3024         } else {
3025           result.serverName_ = serverNameBuilder_.build();
3026         }
3027         if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
3028           to_bitField0_ |= 0x00000010;
3029         }
3030         result.payload_ = payload_;
3031         result.bitField0_ = to_bitField0_;
3032         onBuilt();
3033         return result;
3034       }
3035 
mergeFrom(com.google.protobuf.Message other)3036       public Builder mergeFrom(com.google.protobuf.Message other) {
3037         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition) {
3038           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition)other);
3039         } else {
3040           super.mergeFrom(other);
3041           return this;
3042         }
3043       }
3044 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition other)3045       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition other) {
3046         if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition.getDefaultInstance()) return this;
3047         if (other.hasEventTypeCode()) {
3048           setEventTypeCode(other.getEventTypeCode());
3049         }
3050         if (other.hasRegionName()) {
3051           setRegionName(other.getRegionName());
3052         }
3053         if (other.hasCreateTime()) {
3054           setCreateTime(other.getCreateTime());
3055         }
3056         if (other.hasServerName()) {
3057           mergeServerName(other.getServerName());
3058         }
3059         if (other.hasPayload()) {
3060           setPayload(other.getPayload());
3061         }
3062         this.mergeUnknownFields(other.getUnknownFields());
3063         return this;
3064       }
3065 
isInitialized()3066       public final boolean isInitialized() {
3067         if (!hasEventTypeCode()) {
3068 
3069           return false;
3070         }
3071         if (!hasRegionName()) {
3072 
3073           return false;
3074         }
3075         if (!hasCreateTime()) {
3076 
3077           return false;
3078         }
3079         if (!hasServerName()) {
3080 
3081           return false;
3082         }
3083         if (!getServerName().isInitialized()) {
3084 
3085           return false;
3086         }
3087         return true;
3088       }
3089 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3090       public Builder mergeFrom(
3091           com.google.protobuf.CodedInputStream input,
3092           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3093           throws java.io.IOException {
3094         org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition parsedMessage = null;
3095         try {
3096           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
3097         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3098           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.RegionTransition) e.getUnfinishedMessage();
3099           throw e;
3100         } finally {
3101           if (parsedMessage != null) {
3102             mergeFrom(parsedMessage);
3103           }
3104         }
3105         return this;
3106       }
3107       private int bitField0_;
3108 
3109       // required uint32 event_type_code = 1;
3110       private int eventTypeCode_ ;
3111       /**
3112        * <code>required uint32 event_type_code = 1;</code>
3113        *
3114        * <pre>
3115        * Code for EventType gotten by doing o.a.h.h.EventHandler.EventType.getCode()
3116        * </pre>
3117        */
hasEventTypeCode()3118       public boolean hasEventTypeCode() {
3119         return ((bitField0_ & 0x00000001) == 0x00000001);
3120       }
3121       /**
3122        * <code>required uint32 event_type_code = 1;</code>
3123        *
3124        * <pre>
3125        * Code for EventType gotten by doing o.a.h.h.EventHandler.EventType.getCode()
3126        * </pre>
3127        */
getEventTypeCode()3128       public int getEventTypeCode() {
3129         return eventTypeCode_;
3130       }
3131       /**
3132        * <code>required uint32 event_type_code = 1;</code>
3133        *
3134        * <pre>
3135        * Code for EventType gotten by doing o.a.h.h.EventHandler.EventType.getCode()
3136        * </pre>
3137        */
setEventTypeCode(int value)3138       public Builder setEventTypeCode(int value) {
3139         bitField0_ |= 0x00000001;
3140         eventTypeCode_ = value;
3141         onChanged();
3142         return this;
3143       }
3144       /**
3145        * <code>required uint32 event_type_code = 1;</code>
3146        *
3147        * <pre>
3148        * Code for EventType gotten by doing o.a.h.h.EventHandler.EventType.getCode()
3149        * </pre>
3150        */
clearEventTypeCode()3151       public Builder clearEventTypeCode() {
3152         bitField0_ = (bitField0_ & ~0x00000001);
3153         eventTypeCode_ = 0;
3154         onChanged();
3155         return this;
3156       }
3157 
3158       // required bytes region_name = 2;
3159       private com.google.protobuf.ByteString regionName_ = com.google.protobuf.ByteString.EMPTY;
3160       /**
3161        * <code>required bytes region_name = 2;</code>
3162        *
3163        * <pre>
3164        * Full regionname in bytes
3165        * </pre>
3166        */
hasRegionName()3167       public boolean hasRegionName() {
3168         return ((bitField0_ & 0x00000002) == 0x00000002);
3169       }
3170       /**
3171        * <code>required bytes region_name = 2;</code>
3172        *
3173        * <pre>
3174        * Full regionname in bytes
3175        * </pre>
3176        */
getRegionName()3177       public com.google.protobuf.ByteString getRegionName() {
3178         return regionName_;
3179       }
3180       /**
3181        * <code>required bytes region_name = 2;</code>
3182        *
3183        * <pre>
3184        * Full regionname in bytes
3185        * </pre>
3186        */
setRegionName(com.google.protobuf.ByteString value)3187       public Builder setRegionName(com.google.protobuf.ByteString value) {
3188         if (value == null) {
3189     throw new NullPointerException();
3190   }
3191   bitField0_ |= 0x00000002;
3192         regionName_ = value;
3193         onChanged();
3194         return this;
3195       }
3196       /**
3197        * <code>required bytes region_name = 2;</code>
3198        *
3199        * <pre>
3200        * Full regionname in bytes
3201        * </pre>
3202        */
clearRegionName()3203       public Builder clearRegionName() {
3204         bitField0_ = (bitField0_ & ~0x00000002);
3205         regionName_ = getDefaultInstance().getRegionName();
3206         onChanged();
3207         return this;
3208       }
3209 
3210       // required uint64 create_time = 3;
3211       private long createTime_ ;
3212       /**
3213        * <code>required uint64 create_time = 3;</code>
3214        */
hasCreateTime()3215       public boolean hasCreateTime() {
3216         return ((bitField0_ & 0x00000004) == 0x00000004);
3217       }
3218       /**
3219        * <code>required uint64 create_time = 3;</code>
3220        */
getCreateTime()3221       public long getCreateTime() {
3222         return createTime_;
3223       }
3224       /**
3225        * <code>required uint64 create_time = 3;</code>
3226        */
setCreateTime(long value)3227       public Builder setCreateTime(long value) {
3228         bitField0_ |= 0x00000004;
3229         createTime_ = value;
3230         onChanged();
3231         return this;
3232       }
3233       /**
3234        * <code>required uint64 create_time = 3;</code>
3235        */
clearCreateTime()3236       public Builder clearCreateTime() {
3237         bitField0_ = (bitField0_ & ~0x00000004);
3238         createTime_ = 0L;
3239         onChanged();
3240         return this;
3241       }
3242 
3243       // required .ServerName server_name = 4;
3244       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
3245       private com.google.protobuf.SingleFieldBuilder<
3246           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverNameBuilder_;
3247       /**
3248        * <code>required .ServerName server_name = 4;</code>
3249        *
3250        * <pre>
3251        * The region server where the transition will happen or is happening
3252        * </pre>
3253        */
hasServerName()3254       public boolean hasServerName() {
3255         return ((bitField0_ & 0x00000008) == 0x00000008);
3256       }
3257       /**
3258        * <code>required .ServerName server_name = 4;</code>
3259        *
3260        * <pre>
3261        * The region server where the transition will happen or is happening
3262        * </pre>
3263        */
getServerName()3264       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName() {
3265         if (serverNameBuilder_ == null) {
3266           return serverName_;
3267         } else {
3268           return serverNameBuilder_.getMessage();
3269         }
3270       }
3271       /**
3272        * <code>required .ServerName server_name = 4;</code>
3273        *
3274        * <pre>
3275        * The region server where the transition will happen or is happening
3276        * </pre>
3277        */
setServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value)3278       public Builder setServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
3279         if (serverNameBuilder_ == null) {
3280           if (value == null) {
3281             throw new NullPointerException();
3282           }
3283           serverName_ = value;
3284           onChanged();
3285         } else {
3286           serverNameBuilder_.setMessage(value);
3287         }
3288         bitField0_ |= 0x00000008;
3289         return this;
3290       }
3291       /**
3292        * <code>required .ServerName server_name = 4;</code>
3293        *
3294        * <pre>
3295        * The region server where the transition will happen or is happening
3296        * </pre>
3297        */
setServerName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue)3298       public Builder setServerName(
3299           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) {
3300         if (serverNameBuilder_ == null) {
3301           serverName_ = builderForValue.build();
3302           onChanged();
3303         } else {
3304           serverNameBuilder_.setMessage(builderForValue.build());
3305         }
3306         bitField0_ |= 0x00000008;
3307         return this;
3308       }
3309       /**
3310        * <code>required .ServerName server_name = 4;</code>
3311        *
3312        * <pre>
3313        * The region server where the transition will happen or is happening
3314        * </pre>
3315        */
mergeServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value)3316       public Builder mergeServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
3317         if (serverNameBuilder_ == null) {
3318           if (((bitField0_ & 0x00000008) == 0x00000008) &&
3319               serverName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) {
3320             serverName_ =
3321               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(serverName_).mergeFrom(value).buildPartial();
3322           } else {
3323             serverName_ = value;
3324           }
3325           onChanged();
3326         } else {
3327           serverNameBuilder_.mergeFrom(value);
3328         }
3329         bitField0_ |= 0x00000008;
3330         return this;
3331       }
3332       /**
3333        * <code>required .ServerName server_name = 4;</code>
3334        *
3335        * <pre>
3336        * The region server where the transition will happen or is happening
3337        * </pre>
3338        */
clearServerName()3339       public Builder clearServerName() {
3340         if (serverNameBuilder_ == null) {
3341           serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
3342           onChanged();
3343         } else {
3344           serverNameBuilder_.clear();
3345         }
3346         bitField0_ = (bitField0_ & ~0x00000008);
3347         return this;
3348       }
3349       /**
3350        * <code>required .ServerName server_name = 4;</code>
3351        *
3352        * <pre>
3353        * The region server where the transition will happen or is happening
3354        * </pre>
3355        */
getServerNameBuilder()3356       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerNameBuilder() {
3357         bitField0_ |= 0x00000008;
3358         onChanged();
3359         return getServerNameFieldBuilder().getBuilder();
3360       }
3361       /**
3362        * <code>required .ServerName server_name = 4;</code>
3363        *
3364        * <pre>
3365        * The region server where the transition will happen or is happening
3366        * </pre>
3367        */
getServerNameOrBuilder()3368       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() {
3369         if (serverNameBuilder_ != null) {
3370           return serverNameBuilder_.getMessageOrBuilder();
3371         } else {
3372           return serverName_;
3373         }
3374       }
3375       /**
3376        * <code>required .ServerName server_name = 4;</code>
3377        *
3378        * <pre>
3379        * The region server where the transition will happen or is happening
3380        * </pre>
3381        */
3382       private com.google.protobuf.SingleFieldBuilder<
3383           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>
getServerNameFieldBuilder()3384           getServerNameFieldBuilder() {
3385         if (serverNameBuilder_ == null) {
3386           serverNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
3387               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>(
3388                   serverName_,
3389                   getParentForChildren(),
3390                   isClean());
3391           serverName_ = null;
3392         }
3393         return serverNameBuilder_;
3394       }
3395 
3396       // optional bytes payload = 5;
3397       private com.google.protobuf.ByteString payload_ = com.google.protobuf.ByteString.EMPTY;
3398       /**
3399        * <code>optional bytes payload = 5;</code>
3400        */
hasPayload()3401       public boolean hasPayload() {
3402         return ((bitField0_ & 0x00000010) == 0x00000010);
3403       }
3404       /**
3405        * <code>optional bytes payload = 5;</code>
3406        */
getPayload()3407       public com.google.protobuf.ByteString getPayload() {
3408         return payload_;
3409       }
3410       /**
3411        * <code>optional bytes payload = 5;</code>
3412        */
setPayload(com.google.protobuf.ByteString value)3413       public Builder setPayload(com.google.protobuf.ByteString value) {
3414         if (value == null) {
3415     throw new NullPointerException();
3416   }
3417   bitField0_ |= 0x00000010;
3418         payload_ = value;
3419         onChanged();
3420         return this;
3421       }
3422       /**
3423        * <code>optional bytes payload = 5;</code>
3424        */
clearPayload()3425       public Builder clearPayload() {
3426         bitField0_ = (bitField0_ & ~0x00000010);
3427         payload_ = getDefaultInstance().getPayload();
3428         onChanged();
3429         return this;
3430       }
3431 
3432       // @@protoc_insertion_point(builder_scope:RegionTransition)
3433     }
3434 
3435     static {
3436       defaultInstance = new RegionTransition(true);
defaultInstance.initFields()3437       defaultInstance.initFields();
3438     }
3439 
3440     // @@protoc_insertion_point(class_scope:RegionTransition)
3441   }
3442 
3443   public interface SplitLogTaskOrBuilder
3444       extends com.google.protobuf.MessageOrBuilder {
3445 
3446     // required .SplitLogTask.State state = 1;
3447     /**
3448      * <code>required .SplitLogTask.State state = 1;</code>
3449      */
hasState()3450     boolean hasState();
3451     /**
3452      * <code>required .SplitLogTask.State state = 1;</code>
3453      */
getState()3454     org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State getState();
3455 
3456     // required .ServerName server_name = 2;
3457     /**
3458      * <code>required .ServerName server_name = 2;</code>
3459      */
hasServerName()3460     boolean hasServerName();
3461     /**
3462      * <code>required .ServerName server_name = 2;</code>
3463      */
getServerName()3464     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName();
3465     /**
3466      * <code>required .ServerName server_name = 2;</code>
3467      */
getServerNameOrBuilder()3468     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder();
3469 
3470     // optional .SplitLogTask.RecoveryMode mode = 3 [default = UNKNOWN];
3471     /**
3472      * <code>optional .SplitLogTask.RecoveryMode mode = 3 [default = UNKNOWN];</code>
3473      */
hasMode()3474     boolean hasMode();
3475     /**
3476      * <code>optional .SplitLogTask.RecoveryMode mode = 3 [default = UNKNOWN];</code>
3477      */
getMode()3478     org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode getMode();
3479   }
3480   /**
3481    * Protobuf type {@code SplitLogTask}
3482    *
3483    * <pre>
3484    **
3485    * WAL SplitLog directory znodes have this for content.  Used doing distributed
3486    * WAL splitting.  Holds current state and name of server that originated split.
3487    * </pre>
3488    */
3489   public static final class SplitLogTask extends
3490       com.google.protobuf.GeneratedMessage
3491       implements SplitLogTaskOrBuilder {
3492     // Use SplitLogTask.newBuilder() to construct.
SplitLogTask(com.google.protobuf.GeneratedMessage.Builder<?> builder)3493     private SplitLogTask(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
3494       super(builder);
3495       this.unknownFields = builder.getUnknownFields();
3496     }
SplitLogTask(boolean noInit)3497     private SplitLogTask(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
3498 
3499     private static final SplitLogTask defaultInstance;
getDefaultInstance()3500     public static SplitLogTask getDefaultInstance() {
3501       return defaultInstance;
3502     }
3503 
getDefaultInstanceForType()3504     public SplitLogTask getDefaultInstanceForType() {
3505       return defaultInstance;
3506     }
3507 
3508     private final com.google.protobuf.UnknownFieldSet unknownFields;
3509     @java.lang.Override
3510     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()3511         getUnknownFields() {
3512       return this.unknownFields;
3513     }
SplitLogTask( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3514     private SplitLogTask(
3515         com.google.protobuf.CodedInputStream input,
3516         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3517         throws com.google.protobuf.InvalidProtocolBufferException {
3518       initFields();
3519       int mutable_bitField0_ = 0;
3520       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
3521           com.google.protobuf.UnknownFieldSet.newBuilder();
3522       try {
3523         boolean done = false;
3524         while (!done) {
3525           int tag = input.readTag();
3526           switch (tag) {
3527             case 0:
3528               done = true;
3529               break;
3530             default: {
3531               if (!parseUnknownField(input, unknownFields,
3532                                      extensionRegistry, tag)) {
3533                 done = true;
3534               }
3535               break;
3536             }
3537             case 8: {
3538               int rawValue = input.readEnum();
3539               org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State value = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State.valueOf(rawValue);
3540               if (value == null) {
3541                 unknownFields.mergeVarintField(1, rawValue);
3542               } else {
3543                 bitField0_ |= 0x00000001;
3544                 state_ = value;
3545               }
3546               break;
3547             }
3548             case 18: {
3549               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null;
3550               if (((bitField0_ & 0x00000002) == 0x00000002)) {
3551                 subBuilder = serverName_.toBuilder();
3552               }
3553               serverName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry);
3554               if (subBuilder != null) {
3555                 subBuilder.mergeFrom(serverName_);
3556                 serverName_ = subBuilder.buildPartial();
3557               }
3558               bitField0_ |= 0x00000002;
3559               break;
3560             }
3561             case 24: {
3562               int rawValue = input.readEnum();
3563               org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode value = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode.valueOf(rawValue);
3564               if (value == null) {
3565                 unknownFields.mergeVarintField(3, rawValue);
3566               } else {
3567                 bitField0_ |= 0x00000004;
3568                 mode_ = value;
3569               }
3570               break;
3571             }
3572           }
3573         }
3574       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
3575         throw e.setUnfinishedMessage(this);
3576       } catch (java.io.IOException e) {
3577         throw new com.google.protobuf.InvalidProtocolBufferException(
3578             e.getMessage()).setUnfinishedMessage(this);
3579       } finally {
3580         this.unknownFields = unknownFields.build();
3581         makeExtensionsImmutable();
3582       }
3583     }
3584     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()3585         getDescriptor() {
3586       return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_SplitLogTask_descriptor;
3587     }
3588 
3589     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()3590         internalGetFieldAccessorTable() {
3591       return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_SplitLogTask_fieldAccessorTable
3592           .ensureFieldAccessorsInitialized(
3593               org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.Builder.class);
3594     }
3595 
3596     public static com.google.protobuf.Parser<SplitLogTask> PARSER =
3597         new com.google.protobuf.AbstractParser<SplitLogTask>() {
3598       public SplitLogTask parsePartialFrom(
3599           com.google.protobuf.CodedInputStream input,
3600           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
3601           throws com.google.protobuf.InvalidProtocolBufferException {
3602         return new SplitLogTask(input, extensionRegistry);
3603       }
3604     };
3605 
3606     @java.lang.Override
getParserForType()3607     public com.google.protobuf.Parser<SplitLogTask> getParserForType() {
3608       return PARSER;
3609     }
3610 
3611     /**
3612      * Protobuf enum {@code SplitLogTask.State}
3613      */
3614     public enum State
3615         implements com.google.protobuf.ProtocolMessageEnum {
3616       /**
3617        * <code>UNASSIGNED = 0;</code>
3618        */
3619       UNASSIGNED(0, 0),
3620       /**
3621        * <code>OWNED = 1;</code>
3622        */
3623       OWNED(1, 1),
3624       /**
3625        * <code>RESIGNED = 2;</code>
3626        */
3627       RESIGNED(2, 2),
3628       /**
3629        * <code>DONE = 3;</code>
3630        */
3631       DONE(3, 3),
3632       /**
3633        * <code>ERR = 4;</code>
3634        */
3635       ERR(4, 4),
3636       ;
3637 
3638       /**
3639        * <code>UNASSIGNED = 0;</code>
3640        */
3641       public static final int UNASSIGNED_VALUE = 0;
3642       /**
3643        * <code>OWNED = 1;</code>
3644        */
3645       public static final int OWNED_VALUE = 1;
3646       /**
3647        * <code>RESIGNED = 2;</code>
3648        */
3649       public static final int RESIGNED_VALUE = 2;
3650       /**
3651        * <code>DONE = 3;</code>
3652        */
3653       public static final int DONE_VALUE = 3;
3654       /**
3655        * <code>ERR = 4;</code>
3656        */
3657       public static final int ERR_VALUE = 4;
3658 
3659 
getNumber()3660       public final int getNumber() { return value; }
3661 
valueOf(int value)3662       public static State valueOf(int value) {
3663         switch (value) {
3664           case 0: return UNASSIGNED;
3665           case 1: return OWNED;
3666           case 2: return RESIGNED;
3667           case 3: return DONE;
3668           case 4: return ERR;
3669           default: return null;
3670         }
3671       }
3672 
3673       public static com.google.protobuf.Internal.EnumLiteMap<State>
internalGetValueMap()3674           internalGetValueMap() {
3675         return internalValueMap;
3676       }
3677       private static com.google.protobuf.Internal.EnumLiteMap<State>
3678           internalValueMap =
3679             new com.google.protobuf.Internal.EnumLiteMap<State>() {
3680               public State findValueByNumber(int number) {
3681                 return State.valueOf(number);
3682               }
3683             };
3684 
3685       public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor()3686           getValueDescriptor() {
3687         return getDescriptor().getValues().get(index);
3688       }
3689       public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType()3690           getDescriptorForType() {
3691         return getDescriptor();
3692       }
3693       public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor()3694           getDescriptor() {
3695         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.getDescriptor().getEnumTypes().get(0);
3696       }
3697 
3698       private static final State[] VALUES = values();
3699 
valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc)3700       public static State valueOf(
3701           com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
3702         if (desc.getType() != getDescriptor()) {
3703           throw new java.lang.IllegalArgumentException(
3704             "EnumValueDescriptor is not for this type.");
3705         }
3706         return VALUES[desc.getIndex()];
3707       }
3708 
3709       private final int index;
3710       private final int value;
3711 
State(int index, int value)3712       private State(int index, int value) {
3713         this.index = index;
3714         this.value = value;
3715       }
3716 
3717       // @@protoc_insertion_point(enum_scope:SplitLogTask.State)
3718     }
3719 
3720     /**
3721      * Protobuf enum {@code SplitLogTask.RecoveryMode}
3722      */
3723     public enum RecoveryMode
3724         implements com.google.protobuf.ProtocolMessageEnum {
3725       /**
3726        * <code>UNKNOWN = 0;</code>
3727        */
3728       UNKNOWN(0, 0),
3729       /**
3730        * <code>LOG_SPLITTING = 1;</code>
3731        */
3732       LOG_SPLITTING(1, 1),
3733       /**
3734        * <code>LOG_REPLAY = 2;</code>
3735        */
3736       LOG_REPLAY(2, 2),
3737       ;
3738 
3739       /**
3740        * <code>UNKNOWN = 0;</code>
3741        */
3742       public static final int UNKNOWN_VALUE = 0;
3743       /**
3744        * <code>LOG_SPLITTING = 1;</code>
3745        */
3746       public static final int LOG_SPLITTING_VALUE = 1;
3747       /**
3748        * <code>LOG_REPLAY = 2;</code>
3749        */
3750       public static final int LOG_REPLAY_VALUE = 2;
3751 
3752 
getNumber()3753       public final int getNumber() { return value; }
3754 
valueOf(int value)3755       public static RecoveryMode valueOf(int value) {
3756         switch (value) {
3757           case 0: return UNKNOWN;
3758           case 1: return LOG_SPLITTING;
3759           case 2: return LOG_REPLAY;
3760           default: return null;
3761         }
3762       }
3763 
3764       public static com.google.protobuf.Internal.EnumLiteMap<RecoveryMode>
internalGetValueMap()3765           internalGetValueMap() {
3766         return internalValueMap;
3767       }
3768       private static com.google.protobuf.Internal.EnumLiteMap<RecoveryMode>
3769           internalValueMap =
3770             new com.google.protobuf.Internal.EnumLiteMap<RecoveryMode>() {
3771               public RecoveryMode findValueByNumber(int number) {
3772                 return RecoveryMode.valueOf(number);
3773               }
3774             };
3775 
3776       public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor()3777           getValueDescriptor() {
3778         return getDescriptor().getValues().get(index);
3779       }
3780       public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType()3781           getDescriptorForType() {
3782         return getDescriptor();
3783       }
3784       public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor()3785           getDescriptor() {
3786         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.getDescriptor().getEnumTypes().get(1);
3787       }
3788 
3789       private static final RecoveryMode[] VALUES = values();
3790 
valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc)3791       public static RecoveryMode valueOf(
3792           com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
3793         if (desc.getType() != getDescriptor()) {
3794           throw new java.lang.IllegalArgumentException(
3795             "EnumValueDescriptor is not for this type.");
3796         }
3797         return VALUES[desc.getIndex()];
3798       }
3799 
3800       private final int index;
3801       private final int value;
3802 
RecoveryMode(int index, int value)3803       private RecoveryMode(int index, int value) {
3804         this.index = index;
3805         this.value = value;
3806       }
3807 
3808       // @@protoc_insertion_point(enum_scope:SplitLogTask.RecoveryMode)
3809     }
3810 
3811     private int bitField0_;
3812     // required .SplitLogTask.State state = 1;
3813     public static final int STATE_FIELD_NUMBER = 1;
3814     private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State state_;
3815     /**
3816      * <code>required .SplitLogTask.State state = 1;</code>
3817      */
hasState()3818     public boolean hasState() {
3819       return ((bitField0_ & 0x00000001) == 0x00000001);
3820     }
3821     /**
3822      * <code>required .SplitLogTask.State state = 1;</code>
3823      */
getState()3824     public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State getState() {
3825       return state_;
3826     }
3827 
3828     // required .ServerName server_name = 2;
3829     public static final int SERVER_NAME_FIELD_NUMBER = 2;
3830     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName serverName_;
3831     /**
3832      * <code>required .ServerName server_name = 2;</code>
3833      */
hasServerName()3834     public boolean hasServerName() {
3835       return ((bitField0_ & 0x00000002) == 0x00000002);
3836     }
3837     /**
3838      * <code>required .ServerName server_name = 2;</code>
3839      */
getServerName()3840     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName() {
3841       return serverName_;
3842     }
3843     /**
3844      * <code>required .ServerName server_name = 2;</code>
3845      */
getServerNameOrBuilder()3846     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() {
3847       return serverName_;
3848     }
3849 
3850     // optional .SplitLogTask.RecoveryMode mode = 3 [default = UNKNOWN];
3851     public static final int MODE_FIELD_NUMBER = 3;
3852     private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode mode_;
3853     /**
3854      * <code>optional .SplitLogTask.RecoveryMode mode = 3 [default = UNKNOWN];</code>
3855      */
hasMode()3856     public boolean hasMode() {
3857       return ((bitField0_ & 0x00000004) == 0x00000004);
3858     }
3859     /**
3860      * <code>optional .SplitLogTask.RecoveryMode mode = 3 [default = UNKNOWN];</code>
3861      */
getMode()3862     public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode getMode() {
3863       return mode_;
3864     }
3865 
initFields()3866     private void initFields() {
3867       state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State.UNASSIGNED;
3868       serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
3869       mode_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode.UNKNOWN;
3870     }
3871     private byte memoizedIsInitialized = -1;
isInitialized()3872     public final boolean isInitialized() {
3873       byte isInitialized = memoizedIsInitialized;
3874       if (isInitialized != -1) return isInitialized == 1;
3875 
3876       if (!hasState()) {
3877         memoizedIsInitialized = 0;
3878         return false;
3879       }
3880       if (!hasServerName()) {
3881         memoizedIsInitialized = 0;
3882         return false;
3883       }
3884       if (!getServerName().isInitialized()) {
3885         memoizedIsInitialized = 0;
3886         return false;
3887       }
3888       memoizedIsInitialized = 1;
3889       return true;
3890     }
3891 
writeTo(com.google.protobuf.CodedOutputStream output)3892     public void writeTo(com.google.protobuf.CodedOutputStream output)
3893                         throws java.io.IOException {
3894       getSerializedSize();
3895       if (((bitField0_ & 0x00000001) == 0x00000001)) {
3896         output.writeEnum(1, state_.getNumber());
3897       }
3898       if (((bitField0_ & 0x00000002) == 0x00000002)) {
3899         output.writeMessage(2, serverName_);
3900       }
3901       if (((bitField0_ & 0x00000004) == 0x00000004)) {
3902         output.writeEnum(3, mode_.getNumber());
3903       }
3904       getUnknownFields().writeTo(output);
3905     }
3906 
3907     private int memoizedSerializedSize = -1;
getSerializedSize()3908     public int getSerializedSize() {
3909       int size = memoizedSerializedSize;
3910       if (size != -1) return size;
3911 
3912       size = 0;
3913       if (((bitField0_ & 0x00000001) == 0x00000001)) {
3914         size += com.google.protobuf.CodedOutputStream
3915           .computeEnumSize(1, state_.getNumber());
3916       }
3917       if (((bitField0_ & 0x00000002) == 0x00000002)) {
3918         size += com.google.protobuf.CodedOutputStream
3919           .computeMessageSize(2, serverName_);
3920       }
3921       if (((bitField0_ & 0x00000004) == 0x00000004)) {
3922         size += com.google.protobuf.CodedOutputStream
3923           .computeEnumSize(3, mode_.getNumber());
3924       }
3925       size += getUnknownFields().getSerializedSize();
3926       memoizedSerializedSize = size;
3927       return size;
3928     }
3929 
3930     private static final long serialVersionUID = 0L;
3931     @java.lang.Override
writeReplace()3932     protected java.lang.Object writeReplace()
3933         throws java.io.ObjectStreamException {
3934       return super.writeReplace();
3935     }
3936 
3937     @java.lang.Override
equals(final java.lang.Object obj)3938     public boolean equals(final java.lang.Object obj) {
3939       if (obj == this) {
3940        return true;
3941       }
3942       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask)) {
3943         return super.equals(obj);
3944       }
3945       org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask) obj;
3946 
3947       boolean result = true;
3948       result = result && (hasState() == other.hasState());
3949       if (hasState()) {
3950         result = result &&
3951             (getState() == other.getState());
3952       }
3953       result = result && (hasServerName() == other.hasServerName());
3954       if (hasServerName()) {
3955         result = result && getServerName()
3956             .equals(other.getServerName());
3957       }
3958       result = result && (hasMode() == other.hasMode());
3959       if (hasMode()) {
3960         result = result &&
3961             (getMode() == other.getMode());
3962       }
3963       result = result &&
3964           getUnknownFields().equals(other.getUnknownFields());
3965       return result;
3966     }
3967 
3968     private int memoizedHashCode = 0;
3969     @java.lang.Override
hashCode()3970     public int hashCode() {
3971       if (memoizedHashCode != 0) {
3972         return memoizedHashCode;
3973       }
3974       int hash = 41;
3975       hash = (19 * hash) + getDescriptorForType().hashCode();
3976       if (hasState()) {
3977         hash = (37 * hash) + STATE_FIELD_NUMBER;
3978         hash = (53 * hash) + hashEnum(getState());
3979       }
3980       if (hasServerName()) {
3981         hash = (37 * hash) + SERVER_NAME_FIELD_NUMBER;
3982         hash = (53 * hash) + getServerName().hashCode();
3983       }
3984       if (hasMode()) {
3985         hash = (37 * hash) + MODE_FIELD_NUMBER;
3986         hash = (53 * hash) + hashEnum(getMode());
3987       }
3988       hash = (29 * hash) + getUnknownFields().hashCode();
3989       memoizedHashCode = hash;
3990       return hash;
3991     }
3992 
parseFrom( com.google.protobuf.ByteString data)3993     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom(
3994         com.google.protobuf.ByteString data)
3995         throws com.google.protobuf.InvalidProtocolBufferException {
3996       return PARSER.parseFrom(data);
3997     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)3998     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom(
3999         com.google.protobuf.ByteString data,
4000         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4001         throws com.google.protobuf.InvalidProtocolBufferException {
4002       return PARSER.parseFrom(data, extensionRegistry);
4003     }
parseFrom(byte[] data)4004     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom(byte[] data)
4005         throws com.google.protobuf.InvalidProtocolBufferException {
4006       return PARSER.parseFrom(data);
4007     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4008     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom(
4009         byte[] data,
4010         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4011         throws com.google.protobuf.InvalidProtocolBufferException {
4012       return PARSER.parseFrom(data, extensionRegistry);
4013     }
parseFrom(java.io.InputStream input)4014     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom(java.io.InputStream input)
4015         throws java.io.IOException {
4016       return PARSER.parseFrom(input);
4017     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4018     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom(
4019         java.io.InputStream input,
4020         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4021         throws java.io.IOException {
4022       return PARSER.parseFrom(input, extensionRegistry);
4023     }
parseDelimitedFrom(java.io.InputStream input)4024     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseDelimitedFrom(java.io.InputStream input)
4025         throws java.io.IOException {
4026       return PARSER.parseDelimitedFrom(input);
4027     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4028     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseDelimitedFrom(
4029         java.io.InputStream input,
4030         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4031         throws java.io.IOException {
4032       return PARSER.parseDelimitedFrom(input, extensionRegistry);
4033     }
parseFrom( com.google.protobuf.CodedInputStream input)4034     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom(
4035         com.google.protobuf.CodedInputStream input)
4036         throws java.io.IOException {
4037       return PARSER.parseFrom(input);
4038     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4039     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parseFrom(
4040         com.google.protobuf.CodedInputStream input,
4041         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4042         throws java.io.IOException {
4043       return PARSER.parseFrom(input, extensionRegistry);
4044     }
4045 
newBuilder()4046     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()4047     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask prototype)4048     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask prototype) {
4049       return newBuilder().mergeFrom(prototype);
4050     }
toBuilder()4051     public Builder toBuilder() { return newBuilder(this); }
4052 
4053     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)4054     protected Builder newBuilderForType(
4055         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4056       Builder builder = new Builder(parent);
4057       return builder;
4058     }
4059     /**
4060      * Protobuf type {@code SplitLogTask}
4061      *
4062      * <pre>
4063      **
4064      * WAL SplitLog directory znodes have this for content.  Used doing distributed
4065      * WAL splitting.  Holds current state and name of server that originated split.
4066      * </pre>
4067      */
4068     public static final class Builder extends
4069         com.google.protobuf.GeneratedMessage.Builder<Builder>
4070        implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTaskOrBuilder {
4071       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()4072           getDescriptor() {
4073         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_SplitLogTask_descriptor;
4074       }
4075 
4076       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()4077           internalGetFieldAccessorTable() {
4078         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_SplitLogTask_fieldAccessorTable
4079             .ensureFieldAccessorsInitialized(
4080                 org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.Builder.class);
4081       }
4082 
4083       // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.newBuilder()
Builder()4084       private Builder() {
4085         maybeForceBuilderInitialization();
4086       }
4087 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)4088       private Builder(
4089           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4090         super(parent);
4091         maybeForceBuilderInitialization();
4092       }
maybeForceBuilderInitialization()4093       private void maybeForceBuilderInitialization() {
4094         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
4095           getServerNameFieldBuilder();
4096         }
4097       }
create()4098       private static Builder create() {
4099         return new Builder();
4100       }
4101 
clear()4102       public Builder clear() {
4103         super.clear();
4104         state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State.UNASSIGNED;
4105         bitField0_ = (bitField0_ & ~0x00000001);
4106         if (serverNameBuilder_ == null) {
4107           serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
4108         } else {
4109           serverNameBuilder_.clear();
4110         }
4111         bitField0_ = (bitField0_ & ~0x00000002);
4112         mode_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode.UNKNOWN;
4113         bitField0_ = (bitField0_ & ~0x00000004);
4114         return this;
4115       }
4116 
clone()4117       public Builder clone() {
4118         return create().mergeFrom(buildPartial());
4119       }
4120 
4121       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()4122           getDescriptorForType() {
4123         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_SplitLogTask_descriptor;
4124       }
4125 
getDefaultInstanceForType()4126       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask getDefaultInstanceForType() {
4127         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.getDefaultInstance();
4128       }
4129 
build()4130       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask build() {
4131         org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask result = buildPartial();
4132         if (!result.isInitialized()) {
4133           throw newUninitializedMessageException(result);
4134         }
4135         return result;
4136       }
4137 
buildPartial()4138       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask buildPartial() {
4139         org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask(this);
4140         int from_bitField0_ = bitField0_;
4141         int to_bitField0_ = 0;
4142         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
4143           to_bitField0_ |= 0x00000001;
4144         }
4145         result.state_ = state_;
4146         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
4147           to_bitField0_ |= 0x00000002;
4148         }
4149         if (serverNameBuilder_ == null) {
4150           result.serverName_ = serverName_;
4151         } else {
4152           result.serverName_ = serverNameBuilder_.build();
4153         }
4154         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
4155           to_bitField0_ |= 0x00000004;
4156         }
4157         result.mode_ = mode_;
4158         result.bitField0_ = to_bitField0_;
4159         onBuilt();
4160         return result;
4161       }
4162 
mergeFrom(com.google.protobuf.Message other)4163       public Builder mergeFrom(com.google.protobuf.Message other) {
4164         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask) {
4165           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask)other);
4166         } else {
4167           super.mergeFrom(other);
4168           return this;
4169         }
4170       }
4171 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask other)4172       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask other) {
4173         if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.getDefaultInstance()) return this;
4174         if (other.hasState()) {
4175           setState(other.getState());
4176         }
4177         if (other.hasServerName()) {
4178           mergeServerName(other.getServerName());
4179         }
4180         if (other.hasMode()) {
4181           setMode(other.getMode());
4182         }
4183         this.mergeUnknownFields(other.getUnknownFields());
4184         return this;
4185       }
4186 
isInitialized()4187       public final boolean isInitialized() {
4188         if (!hasState()) {
4189 
4190           return false;
4191         }
4192         if (!hasServerName()) {
4193 
4194           return false;
4195         }
4196         if (!getServerName().isInitialized()) {
4197 
4198           return false;
4199         }
4200         return true;
4201       }
4202 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4203       public Builder mergeFrom(
4204           com.google.protobuf.CodedInputStream input,
4205           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4206           throws java.io.IOException {
4207         org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask parsedMessage = null;
4208         try {
4209           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
4210         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4211           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask) e.getUnfinishedMessage();
4212           throw e;
4213         } finally {
4214           if (parsedMessage != null) {
4215             mergeFrom(parsedMessage);
4216           }
4217         }
4218         return this;
4219       }
4220       private int bitField0_;
4221 
4222       // required .SplitLogTask.State state = 1;
4223       private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State.UNASSIGNED;
4224       /**
4225        * <code>required .SplitLogTask.State state = 1;</code>
4226        */
hasState()4227       public boolean hasState() {
4228         return ((bitField0_ & 0x00000001) == 0x00000001);
4229       }
4230       /**
4231        * <code>required .SplitLogTask.State state = 1;</code>
4232        */
getState()4233       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State getState() {
4234         return state_;
4235       }
4236       /**
4237        * <code>required .SplitLogTask.State state = 1;</code>
4238        */
setState(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State value)4239       public Builder setState(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State value) {
4240         if (value == null) {
4241           throw new NullPointerException();
4242         }
4243         bitField0_ |= 0x00000001;
4244         state_ = value;
4245         onChanged();
4246         return this;
4247       }
4248       /**
4249        * <code>required .SplitLogTask.State state = 1;</code>
4250        */
clearState()4251       public Builder clearState() {
4252         bitField0_ = (bitField0_ & ~0x00000001);
4253         state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.State.UNASSIGNED;
4254         onChanged();
4255         return this;
4256       }
4257 
4258       // required .ServerName server_name = 2;
4259       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
4260       private com.google.protobuf.SingleFieldBuilder<
4261           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> serverNameBuilder_;
4262       /**
4263        * <code>required .ServerName server_name = 2;</code>
4264        */
hasServerName()4265       public boolean hasServerName() {
4266         return ((bitField0_ & 0x00000002) == 0x00000002);
4267       }
4268       /**
4269        * <code>required .ServerName server_name = 2;</code>
4270        */
getServerName()4271       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getServerName() {
4272         if (serverNameBuilder_ == null) {
4273           return serverName_;
4274         } else {
4275           return serverNameBuilder_.getMessage();
4276         }
4277       }
4278       /**
4279        * <code>required .ServerName server_name = 2;</code>
4280        */
setServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value)4281       public Builder setServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
4282         if (serverNameBuilder_ == null) {
4283           if (value == null) {
4284             throw new NullPointerException();
4285           }
4286           serverName_ = value;
4287           onChanged();
4288         } else {
4289           serverNameBuilder_.setMessage(value);
4290         }
4291         bitField0_ |= 0x00000002;
4292         return this;
4293       }
4294       /**
4295        * <code>required .ServerName server_name = 2;</code>
4296        */
setServerName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue)4297       public Builder setServerName(
4298           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) {
4299         if (serverNameBuilder_ == null) {
4300           serverName_ = builderForValue.build();
4301           onChanged();
4302         } else {
4303           serverNameBuilder_.setMessage(builderForValue.build());
4304         }
4305         bitField0_ |= 0x00000002;
4306         return this;
4307       }
4308       /**
4309        * <code>required .ServerName server_name = 2;</code>
4310        */
mergeServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value)4311       public Builder mergeServerName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
4312         if (serverNameBuilder_ == null) {
4313           if (((bitField0_ & 0x00000002) == 0x00000002) &&
4314               serverName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) {
4315             serverName_ =
4316               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(serverName_).mergeFrom(value).buildPartial();
4317           } else {
4318             serverName_ = value;
4319           }
4320           onChanged();
4321         } else {
4322           serverNameBuilder_.mergeFrom(value);
4323         }
4324         bitField0_ |= 0x00000002;
4325         return this;
4326       }
4327       /**
4328        * <code>required .ServerName server_name = 2;</code>
4329        */
clearServerName()4330       public Builder clearServerName() {
4331         if (serverNameBuilder_ == null) {
4332           serverName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
4333           onChanged();
4334         } else {
4335           serverNameBuilder_.clear();
4336         }
4337         bitField0_ = (bitField0_ & ~0x00000002);
4338         return this;
4339       }
4340       /**
4341        * <code>required .ServerName server_name = 2;</code>
4342        */
getServerNameBuilder()4343       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getServerNameBuilder() {
4344         bitField0_ |= 0x00000002;
4345         onChanged();
4346         return getServerNameFieldBuilder().getBuilder();
4347       }
4348       /**
4349        * <code>required .ServerName server_name = 2;</code>
4350        */
getServerNameOrBuilder()4351       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getServerNameOrBuilder() {
4352         if (serverNameBuilder_ != null) {
4353           return serverNameBuilder_.getMessageOrBuilder();
4354         } else {
4355           return serverName_;
4356         }
4357       }
4358       /**
4359        * <code>required .ServerName server_name = 2;</code>
4360        */
4361       private com.google.protobuf.SingleFieldBuilder<
4362           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>
getServerNameFieldBuilder()4363           getServerNameFieldBuilder() {
4364         if (serverNameBuilder_ == null) {
4365           serverNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
4366               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>(
4367                   serverName_,
4368                   getParentForChildren(),
4369                   isClean());
4370           serverName_ = null;
4371         }
4372         return serverNameBuilder_;
4373       }
4374 
4375       // optional .SplitLogTask.RecoveryMode mode = 3 [default = UNKNOWN];
4376       private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode mode_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode.UNKNOWN;
4377       /**
4378        * <code>optional .SplitLogTask.RecoveryMode mode = 3 [default = UNKNOWN];</code>
4379        */
hasMode()4380       public boolean hasMode() {
4381         return ((bitField0_ & 0x00000004) == 0x00000004);
4382       }
4383       /**
4384        * <code>optional .SplitLogTask.RecoveryMode mode = 3 [default = UNKNOWN];</code>
4385        */
getMode()4386       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode getMode() {
4387         return mode_;
4388       }
4389       /**
4390        * <code>optional .SplitLogTask.RecoveryMode mode = 3 [default = UNKNOWN];</code>
4391        */
setMode(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode value)4392       public Builder setMode(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode value) {
4393         if (value == null) {
4394           throw new NullPointerException();
4395         }
4396         bitField0_ |= 0x00000004;
4397         mode_ = value;
4398         onChanged();
4399         return this;
4400       }
4401       /**
4402        * <code>optional .SplitLogTask.RecoveryMode mode = 3 [default = UNKNOWN];</code>
4403        */
clearMode()4404       public Builder clearMode() {
4405         bitField0_ = (bitField0_ & ~0x00000004);
4406         mode_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.SplitLogTask.RecoveryMode.UNKNOWN;
4407         onChanged();
4408         return this;
4409       }
4410 
4411       // @@protoc_insertion_point(builder_scope:SplitLogTask)
4412     }
4413 
4414     static {
4415       defaultInstance = new SplitLogTask(true);
defaultInstance.initFields()4416       defaultInstance.initFields();
4417     }
4418 
4419     // @@protoc_insertion_point(class_scope:SplitLogTask)
4420   }
4421 
4422   public interface TableOrBuilder
4423       extends com.google.protobuf.MessageOrBuilder {
4424 
4425     // required .Table.State state = 1 [default = ENABLED];
4426     /**
4427      * <code>required .Table.State state = 1 [default = ENABLED];</code>
4428      *
4429      * <pre>
4430      * This is the table's state.  If no znode for a table,
4431      * its state is presumed enabled.  See o.a.h.h.zookeeper.ZKTable class
4432      * for more.
4433      * </pre>
4434      */
hasState()4435     boolean hasState();
4436     /**
4437      * <code>required .Table.State state = 1 [default = ENABLED];</code>
4438      *
4439      * <pre>
4440      * This is the table's state.  If no znode for a table,
4441      * its state is presumed enabled.  See o.a.h.h.zookeeper.ZKTable class
4442      * for more.
4443      * </pre>
4444      */
getState()4445     org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State getState();
4446   }
4447   /**
4448    * Protobuf type {@code Table}
4449    *
4450    * <pre>
4451    **
4452    * The znode that holds state of table.
4453    * </pre>
4454    */
4455   public static final class Table extends
4456       com.google.protobuf.GeneratedMessage
4457       implements TableOrBuilder {
4458     // Use Table.newBuilder() to construct.
Table(com.google.protobuf.GeneratedMessage.Builder<?> builder)4459     private Table(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
4460       super(builder);
4461       this.unknownFields = builder.getUnknownFields();
4462     }
Table(boolean noInit)4463     private Table(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
4464 
4465     private static final Table defaultInstance;
getDefaultInstance()4466     public static Table getDefaultInstance() {
4467       return defaultInstance;
4468     }
4469 
getDefaultInstanceForType()4470     public Table getDefaultInstanceForType() {
4471       return defaultInstance;
4472     }
4473 
4474     private final com.google.protobuf.UnknownFieldSet unknownFields;
4475     @java.lang.Override
4476     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()4477         getUnknownFields() {
4478       return this.unknownFields;
4479     }
Table( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4480     private Table(
4481         com.google.protobuf.CodedInputStream input,
4482         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4483         throws com.google.protobuf.InvalidProtocolBufferException {
4484       initFields();
4485       int mutable_bitField0_ = 0;
4486       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
4487           com.google.protobuf.UnknownFieldSet.newBuilder();
4488       try {
4489         boolean done = false;
4490         while (!done) {
4491           int tag = input.readTag();
4492           switch (tag) {
4493             case 0:
4494               done = true;
4495               break;
4496             default: {
4497               if (!parseUnknownField(input, unknownFields,
4498                                      extensionRegistry, tag)) {
4499                 done = true;
4500               }
4501               break;
4502             }
4503             case 8: {
4504               int rawValue = input.readEnum();
4505               org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State value = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State.valueOf(rawValue);
4506               if (value == null) {
4507                 unknownFields.mergeVarintField(1, rawValue);
4508               } else {
4509                 bitField0_ |= 0x00000001;
4510                 state_ = value;
4511               }
4512               break;
4513             }
4514           }
4515         }
4516       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4517         throw e.setUnfinishedMessage(this);
4518       } catch (java.io.IOException e) {
4519         throw new com.google.protobuf.InvalidProtocolBufferException(
4520             e.getMessage()).setUnfinishedMessage(this);
4521       } finally {
4522         this.unknownFields = unknownFields.build();
4523         makeExtensionsImmutable();
4524       }
4525     }
4526     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()4527         getDescriptor() {
4528       return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Table_descriptor;
4529     }
4530 
4531     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()4532         internalGetFieldAccessorTable() {
4533       return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Table_fieldAccessorTable
4534           .ensureFieldAccessorsInitialized(
4535               org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.Builder.class);
4536     }
4537 
4538     public static com.google.protobuf.Parser<Table> PARSER =
4539         new com.google.protobuf.AbstractParser<Table>() {
4540       public Table parsePartialFrom(
4541           com.google.protobuf.CodedInputStream input,
4542           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4543           throws com.google.protobuf.InvalidProtocolBufferException {
4544         return new Table(input, extensionRegistry);
4545       }
4546     };
4547 
4548     @java.lang.Override
getParserForType()4549     public com.google.protobuf.Parser<Table> getParserForType() {
4550       return PARSER;
4551     }
4552 
4553     /**
4554      * Protobuf enum {@code Table.State}
4555      *
4556      * <pre>
4557      * Table's current state
4558      * </pre>
4559      */
4560     public enum State
4561         implements com.google.protobuf.ProtocolMessageEnum {
4562       /**
4563        * <code>ENABLED = 0;</code>
4564        */
4565       ENABLED(0, 0),
4566       /**
4567        * <code>DISABLED = 1;</code>
4568        */
4569       DISABLED(1, 1),
4570       /**
4571        * <code>DISABLING = 2;</code>
4572        */
4573       DISABLING(2, 2),
4574       /**
4575        * <code>ENABLING = 3;</code>
4576        */
4577       ENABLING(3, 3),
4578       ;
4579 
4580       /**
4581        * <code>ENABLED = 0;</code>
4582        */
4583       public static final int ENABLED_VALUE = 0;
4584       /**
4585        * <code>DISABLED = 1;</code>
4586        */
4587       public static final int DISABLED_VALUE = 1;
4588       /**
4589        * <code>DISABLING = 2;</code>
4590        */
4591       public static final int DISABLING_VALUE = 2;
4592       /**
4593        * <code>ENABLING = 3;</code>
4594        */
4595       public static final int ENABLING_VALUE = 3;
4596 
4597 
getNumber()4598       public final int getNumber() { return value; }
4599 
valueOf(int value)4600       public static State valueOf(int value) {
4601         switch (value) {
4602           case 0: return ENABLED;
4603           case 1: return DISABLED;
4604           case 2: return DISABLING;
4605           case 3: return ENABLING;
4606           default: return null;
4607         }
4608       }
4609 
4610       public static com.google.protobuf.Internal.EnumLiteMap<State>
internalGetValueMap()4611           internalGetValueMap() {
4612         return internalValueMap;
4613       }
4614       private static com.google.protobuf.Internal.EnumLiteMap<State>
4615           internalValueMap =
4616             new com.google.protobuf.Internal.EnumLiteMap<State>() {
4617               public State findValueByNumber(int number) {
4618                 return State.valueOf(number);
4619               }
4620             };
4621 
4622       public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor()4623           getValueDescriptor() {
4624         return getDescriptor().getValues().get(index);
4625       }
4626       public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType()4627           getDescriptorForType() {
4628         return getDescriptor();
4629       }
4630       public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor()4631           getDescriptor() {
4632         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.getDescriptor().getEnumTypes().get(0);
4633       }
4634 
4635       private static final State[] VALUES = values();
4636 
valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc)4637       public static State valueOf(
4638           com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
4639         if (desc.getType() != getDescriptor()) {
4640           throw new java.lang.IllegalArgumentException(
4641             "EnumValueDescriptor is not for this type.");
4642         }
4643         return VALUES[desc.getIndex()];
4644       }
4645 
4646       private final int index;
4647       private final int value;
4648 
State(int index, int value)4649       private State(int index, int value) {
4650         this.index = index;
4651         this.value = value;
4652       }
4653 
4654       // @@protoc_insertion_point(enum_scope:Table.State)
4655     }
4656 
4657     private int bitField0_;
4658     // required .Table.State state = 1 [default = ENABLED];
4659     public static final int STATE_FIELD_NUMBER = 1;
4660     private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State state_;
4661     /**
4662      * <code>required .Table.State state = 1 [default = ENABLED];</code>
4663      *
4664      * <pre>
4665      * This is the table's state.  If no znode for a table,
4666      * its state is presumed enabled.  See o.a.h.h.zookeeper.ZKTable class
4667      * for more.
4668      * </pre>
4669      */
hasState()4670     public boolean hasState() {
4671       return ((bitField0_ & 0x00000001) == 0x00000001);
4672     }
4673     /**
4674      * <code>required .Table.State state = 1 [default = ENABLED];</code>
4675      *
4676      * <pre>
4677      * This is the table's state.  If no znode for a table,
4678      * its state is presumed enabled.  See o.a.h.h.zookeeper.ZKTable class
4679      * for more.
4680      * </pre>
4681      */
getState()4682     public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State getState() {
4683       return state_;
4684     }
4685 
initFields()4686     private void initFields() {
4687       state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State.ENABLED;
4688     }
4689     private byte memoizedIsInitialized = -1;
isInitialized()4690     public final boolean isInitialized() {
4691       byte isInitialized = memoizedIsInitialized;
4692       if (isInitialized != -1) return isInitialized == 1;
4693 
4694       if (!hasState()) {
4695         memoizedIsInitialized = 0;
4696         return false;
4697       }
4698       memoizedIsInitialized = 1;
4699       return true;
4700     }
4701 
writeTo(com.google.protobuf.CodedOutputStream output)4702     public void writeTo(com.google.protobuf.CodedOutputStream output)
4703                         throws java.io.IOException {
4704       getSerializedSize();
4705       if (((bitField0_ & 0x00000001) == 0x00000001)) {
4706         output.writeEnum(1, state_.getNumber());
4707       }
4708       getUnknownFields().writeTo(output);
4709     }
4710 
4711     private int memoizedSerializedSize = -1;
getSerializedSize()4712     public int getSerializedSize() {
4713       int size = memoizedSerializedSize;
4714       if (size != -1) return size;
4715 
4716       size = 0;
4717       if (((bitField0_ & 0x00000001) == 0x00000001)) {
4718         size += com.google.protobuf.CodedOutputStream
4719           .computeEnumSize(1, state_.getNumber());
4720       }
4721       size += getUnknownFields().getSerializedSize();
4722       memoizedSerializedSize = size;
4723       return size;
4724     }
4725 
4726     private static final long serialVersionUID = 0L;
4727     @java.lang.Override
writeReplace()4728     protected java.lang.Object writeReplace()
4729         throws java.io.ObjectStreamException {
4730       return super.writeReplace();
4731     }
4732 
4733     @java.lang.Override
equals(final java.lang.Object obj)4734     public boolean equals(final java.lang.Object obj) {
4735       if (obj == this) {
4736        return true;
4737       }
4738       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table)) {
4739         return super.equals(obj);
4740       }
4741       org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table) obj;
4742 
4743       boolean result = true;
4744       result = result && (hasState() == other.hasState());
4745       if (hasState()) {
4746         result = result &&
4747             (getState() == other.getState());
4748       }
4749       result = result &&
4750           getUnknownFields().equals(other.getUnknownFields());
4751       return result;
4752     }
4753 
4754     private int memoizedHashCode = 0;
4755     @java.lang.Override
hashCode()4756     public int hashCode() {
4757       if (memoizedHashCode != 0) {
4758         return memoizedHashCode;
4759       }
4760       int hash = 41;
4761       hash = (19 * hash) + getDescriptorForType().hashCode();
4762       if (hasState()) {
4763         hash = (37 * hash) + STATE_FIELD_NUMBER;
4764         hash = (53 * hash) + hashEnum(getState());
4765       }
4766       hash = (29 * hash) + getUnknownFields().hashCode();
4767       memoizedHashCode = hash;
4768       return hash;
4769     }
4770 
parseFrom( com.google.protobuf.ByteString data)4771     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseFrom(
4772         com.google.protobuf.ByteString data)
4773         throws com.google.protobuf.InvalidProtocolBufferException {
4774       return PARSER.parseFrom(data);
4775     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4776     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseFrom(
4777         com.google.protobuf.ByteString data,
4778         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4779         throws com.google.protobuf.InvalidProtocolBufferException {
4780       return PARSER.parseFrom(data, extensionRegistry);
4781     }
parseFrom(byte[] data)4782     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseFrom(byte[] data)
4783         throws com.google.protobuf.InvalidProtocolBufferException {
4784       return PARSER.parseFrom(data);
4785     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4786     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseFrom(
4787         byte[] data,
4788         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4789         throws com.google.protobuf.InvalidProtocolBufferException {
4790       return PARSER.parseFrom(data, extensionRegistry);
4791     }
parseFrom(java.io.InputStream input)4792     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseFrom(java.io.InputStream input)
4793         throws java.io.IOException {
4794       return PARSER.parseFrom(input);
4795     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4796     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseFrom(
4797         java.io.InputStream input,
4798         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4799         throws java.io.IOException {
4800       return PARSER.parseFrom(input, extensionRegistry);
4801     }
parseDelimitedFrom(java.io.InputStream input)4802     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseDelimitedFrom(java.io.InputStream input)
4803         throws java.io.IOException {
4804       return PARSER.parseDelimitedFrom(input);
4805     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4806     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseDelimitedFrom(
4807         java.io.InputStream input,
4808         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4809         throws java.io.IOException {
4810       return PARSER.parseDelimitedFrom(input, extensionRegistry);
4811     }
parseFrom( com.google.protobuf.CodedInputStream input)4812     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseFrom(
4813         com.google.protobuf.CodedInputStream input)
4814         throws java.io.IOException {
4815       return PARSER.parseFrom(input);
4816     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4817     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parseFrom(
4818         com.google.protobuf.CodedInputStream input,
4819         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4820         throws java.io.IOException {
4821       return PARSER.parseFrom(input, extensionRegistry);
4822     }
4823 
newBuilder()4824     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()4825     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table prototype)4826     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table prototype) {
4827       return newBuilder().mergeFrom(prototype);
4828     }
toBuilder()4829     public Builder toBuilder() { return newBuilder(this); }
4830 
4831     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)4832     protected Builder newBuilderForType(
4833         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4834       Builder builder = new Builder(parent);
4835       return builder;
4836     }
4837     /**
4838      * Protobuf type {@code Table}
4839      *
4840      * <pre>
4841      **
4842      * The znode that holds state of table.
4843      * </pre>
4844      */
4845     public static final class Builder extends
4846         com.google.protobuf.GeneratedMessage.Builder<Builder>
4847        implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableOrBuilder {
4848       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()4849           getDescriptor() {
4850         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Table_descriptor;
4851       }
4852 
4853       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()4854           internalGetFieldAccessorTable() {
4855         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Table_fieldAccessorTable
4856             .ensureFieldAccessorsInitialized(
4857                 org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.Builder.class);
4858       }
4859 
4860       // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.newBuilder()
Builder()4861       private Builder() {
4862         maybeForceBuilderInitialization();
4863       }
4864 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)4865       private Builder(
4866           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
4867         super(parent);
4868         maybeForceBuilderInitialization();
4869       }
maybeForceBuilderInitialization()4870       private void maybeForceBuilderInitialization() {
4871         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
4872         }
4873       }
create()4874       private static Builder create() {
4875         return new Builder();
4876       }
4877 
clear()4878       public Builder clear() {
4879         super.clear();
4880         state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State.ENABLED;
4881         bitField0_ = (bitField0_ & ~0x00000001);
4882         return this;
4883       }
4884 
clone()4885       public Builder clone() {
4886         return create().mergeFrom(buildPartial());
4887       }
4888 
4889       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()4890           getDescriptorForType() {
4891         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_Table_descriptor;
4892       }
4893 
getDefaultInstanceForType()4894       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table getDefaultInstanceForType() {
4895         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.getDefaultInstance();
4896       }
4897 
build()4898       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table build() {
4899         org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table result = buildPartial();
4900         if (!result.isInitialized()) {
4901           throw newUninitializedMessageException(result);
4902         }
4903         return result;
4904       }
4905 
buildPartial()4906       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table buildPartial() {
4907         org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table(this);
4908         int from_bitField0_ = bitField0_;
4909         int to_bitField0_ = 0;
4910         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
4911           to_bitField0_ |= 0x00000001;
4912         }
4913         result.state_ = state_;
4914         result.bitField0_ = to_bitField0_;
4915         onBuilt();
4916         return result;
4917       }
4918 
mergeFrom(com.google.protobuf.Message other)4919       public Builder mergeFrom(com.google.protobuf.Message other) {
4920         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table) {
4921           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table)other);
4922         } else {
4923           super.mergeFrom(other);
4924           return this;
4925         }
4926       }
4927 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table other)4928       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table other) {
4929         if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.getDefaultInstance()) return this;
4930         if (other.hasState()) {
4931           setState(other.getState());
4932         }
4933         this.mergeUnknownFields(other.getUnknownFields());
4934         return this;
4935       }
4936 
isInitialized()4937       public final boolean isInitialized() {
4938         if (!hasState()) {
4939 
4940           return false;
4941         }
4942         return true;
4943       }
4944 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)4945       public Builder mergeFrom(
4946           com.google.protobuf.CodedInputStream input,
4947           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
4948           throws java.io.IOException {
4949         org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table parsedMessage = null;
4950         try {
4951           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
4952         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
4953           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table) e.getUnfinishedMessage();
4954           throw e;
4955         } finally {
4956           if (parsedMessage != null) {
4957             mergeFrom(parsedMessage);
4958           }
4959         }
4960         return this;
4961       }
4962       private int bitField0_;
4963 
4964       // required .Table.State state = 1 [default = ENABLED];
4965       private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State.ENABLED;
4966       /**
4967        * <code>required .Table.State state = 1 [default = ENABLED];</code>
4968        *
4969        * <pre>
4970        * This is the table's state.  If no znode for a table,
4971        * its state is presumed enabled.  See o.a.h.h.zookeeper.ZKTable class
4972        * for more.
4973        * </pre>
4974        */
hasState()4975       public boolean hasState() {
4976         return ((bitField0_ & 0x00000001) == 0x00000001);
4977       }
4978       /**
4979        * <code>required .Table.State state = 1 [default = ENABLED];</code>
4980        *
4981        * <pre>
4982        * This is the table's state.  If no znode for a table,
4983        * its state is presumed enabled.  See o.a.h.h.zookeeper.ZKTable class
4984        * for more.
4985        * </pre>
4986        */
getState()4987       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State getState() {
4988         return state_;
4989       }
4990       /**
4991        * <code>required .Table.State state = 1 [default = ENABLED];</code>
4992        *
4993        * <pre>
4994        * This is the table's state.  If no znode for a table,
4995        * its state is presumed enabled.  See o.a.h.h.zookeeper.ZKTable class
4996        * for more.
4997        * </pre>
4998        */
setState(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State value)4999       public Builder setState(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State value) {
5000         if (value == null) {
5001           throw new NullPointerException();
5002         }
5003         bitField0_ |= 0x00000001;
5004         state_ = value;
5005         onChanged();
5006         return this;
5007       }
5008       /**
5009        * <code>required .Table.State state = 1 [default = ENABLED];</code>
5010        *
5011        * <pre>
5012        * This is the table's state.  If no znode for a table,
5013        * its state is presumed enabled.  See o.a.h.h.zookeeper.ZKTable class
5014        * for more.
5015        * </pre>
5016        */
clearState()5017       public Builder clearState() {
5018         bitField0_ = (bitField0_ & ~0x00000001);
5019         state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.Table.State.ENABLED;
5020         onChanged();
5021         return this;
5022       }
5023 
5024       // @@protoc_insertion_point(builder_scope:Table)
5025     }
5026 
5027     static {
5028       defaultInstance = new Table(true);
defaultInstance.initFields()5029       defaultInstance.initFields();
5030     }
5031 
5032     // @@protoc_insertion_point(class_scope:Table)
5033   }
5034 
5035   public interface ReplicationPeerOrBuilder
5036       extends com.google.protobuf.MessageOrBuilder {
5037 
5038     // required string clusterkey = 1;
5039     /**
5040      * <code>required string clusterkey = 1;</code>
5041      *
5042      * <pre>
5043      * clusterkey is the concatenation of the slave cluster's
5044      * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
5045      * </pre>
5046      */
hasClusterkey()5047     boolean hasClusterkey();
5048     /**
5049      * <code>required string clusterkey = 1;</code>
5050      *
5051      * <pre>
5052      * clusterkey is the concatenation of the slave cluster's
5053      * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
5054      * </pre>
5055      */
getClusterkey()5056     java.lang.String getClusterkey();
5057     /**
5058      * <code>required string clusterkey = 1;</code>
5059      *
5060      * <pre>
5061      * clusterkey is the concatenation of the slave cluster's
5062      * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
5063      * </pre>
5064      */
5065     com.google.protobuf.ByteString
getClusterkeyBytes()5066         getClusterkeyBytes();
5067 
5068     // optional string replicationEndpointImpl = 2;
5069     /**
5070      * <code>optional string replicationEndpointImpl = 2;</code>
5071      */
hasReplicationEndpointImpl()5072     boolean hasReplicationEndpointImpl();
5073     /**
5074      * <code>optional string replicationEndpointImpl = 2;</code>
5075      */
getReplicationEndpointImpl()5076     java.lang.String getReplicationEndpointImpl();
5077     /**
5078      * <code>optional string replicationEndpointImpl = 2;</code>
5079      */
5080     com.google.protobuf.ByteString
getReplicationEndpointImplBytes()5081         getReplicationEndpointImplBytes();
5082 
5083     // repeated .BytesBytesPair data = 3;
5084     /**
5085      * <code>repeated .BytesBytesPair data = 3;</code>
5086      */
5087     java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair>
getDataList()5088         getDataList();
5089     /**
5090      * <code>repeated .BytesBytesPair data = 3;</code>
5091      */
getData(int index)5092     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getData(int index);
5093     /**
5094      * <code>repeated .BytesBytesPair data = 3;</code>
5095      */
getDataCount()5096     int getDataCount();
5097     /**
5098      * <code>repeated .BytesBytesPair data = 3;</code>
5099      */
5100     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>
getDataOrBuilderList()5101         getDataOrBuilderList();
5102     /**
5103      * <code>repeated .BytesBytesPair data = 3;</code>
5104      */
getDataOrBuilder( int index)5105     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getDataOrBuilder(
5106         int index);
5107 
5108     // repeated .NameStringPair configuration = 4;
5109     /**
5110      * <code>repeated .NameStringPair configuration = 4;</code>
5111      */
5112     java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair>
getConfigurationList()5113         getConfigurationList();
5114     /**
5115      * <code>repeated .NameStringPair configuration = 4;</code>
5116      */
getConfiguration(int index)5117     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index);
5118     /**
5119      * <code>repeated .NameStringPair configuration = 4;</code>
5120      */
getConfigurationCount()5121     int getConfigurationCount();
5122     /**
5123      * <code>repeated .NameStringPair configuration = 4;</code>
5124      */
5125     java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>
getConfigurationOrBuilderList()5126         getConfigurationOrBuilderList();
5127     /**
5128      * <code>repeated .NameStringPair configuration = 4;</code>
5129      */
getConfigurationOrBuilder( int index)5130     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder(
5131         int index);
5132   }
5133   /**
5134    * Protobuf type {@code ReplicationPeer}
5135    *
5136    * <pre>
5137    **
5138    * Used by replication. Holds a replication peer key.
5139    * </pre>
5140    */
5141   public static final class ReplicationPeer extends
5142       com.google.protobuf.GeneratedMessage
5143       implements ReplicationPeerOrBuilder {
5144     // Use ReplicationPeer.newBuilder() to construct.
ReplicationPeer(com.google.protobuf.GeneratedMessage.Builder<?> builder)5145     private ReplicationPeer(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
5146       super(builder);
5147       this.unknownFields = builder.getUnknownFields();
5148     }
ReplicationPeer(boolean noInit)5149     private ReplicationPeer(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
5150 
5151     private static final ReplicationPeer defaultInstance;
getDefaultInstance()5152     public static ReplicationPeer getDefaultInstance() {
5153       return defaultInstance;
5154     }
5155 
getDefaultInstanceForType()5156     public ReplicationPeer getDefaultInstanceForType() {
5157       return defaultInstance;
5158     }
5159 
5160     private final com.google.protobuf.UnknownFieldSet unknownFields;
5161     @java.lang.Override
5162     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()5163         getUnknownFields() {
5164       return this.unknownFields;
5165     }
ReplicationPeer( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5166     private ReplicationPeer(
5167         com.google.protobuf.CodedInputStream input,
5168         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5169         throws com.google.protobuf.InvalidProtocolBufferException {
5170       initFields();
5171       int mutable_bitField0_ = 0;
5172       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
5173           com.google.protobuf.UnknownFieldSet.newBuilder();
5174       try {
5175         boolean done = false;
5176         while (!done) {
5177           int tag = input.readTag();
5178           switch (tag) {
5179             case 0:
5180               done = true;
5181               break;
5182             default: {
5183               if (!parseUnknownField(input, unknownFields,
5184                                      extensionRegistry, tag)) {
5185                 done = true;
5186               }
5187               break;
5188             }
5189             case 10: {
5190               bitField0_ |= 0x00000001;
5191               clusterkey_ = input.readBytes();
5192               break;
5193             }
5194             case 18: {
5195               bitField0_ |= 0x00000002;
5196               replicationEndpointImpl_ = input.readBytes();
5197               break;
5198             }
5199             case 26: {
5200               if (!((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
5201                 data_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair>();
5202                 mutable_bitField0_ |= 0x00000004;
5203               }
5204               data_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.PARSER, extensionRegistry));
5205               break;
5206             }
5207             case 34: {
5208               if (!((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
5209                 configuration_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair>();
5210                 mutable_bitField0_ |= 0x00000008;
5211               }
5212               configuration_.add(input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.PARSER, extensionRegistry));
5213               break;
5214             }
5215           }
5216         }
5217       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
5218         throw e.setUnfinishedMessage(this);
5219       } catch (java.io.IOException e) {
5220         throw new com.google.protobuf.InvalidProtocolBufferException(
5221             e.getMessage()).setUnfinishedMessage(this);
5222       } finally {
5223         if (((mutable_bitField0_ & 0x00000004) == 0x00000004)) {
5224           data_ = java.util.Collections.unmodifiableList(data_);
5225         }
5226         if (((mutable_bitField0_ & 0x00000008) == 0x00000008)) {
5227           configuration_ = java.util.Collections.unmodifiableList(configuration_);
5228         }
5229         this.unknownFields = unknownFields.build();
5230         makeExtensionsImmutable();
5231       }
5232     }
5233     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()5234         getDescriptor() {
5235       return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationPeer_descriptor;
5236     }
5237 
5238     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()5239         internalGetFieldAccessorTable() {
5240       return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationPeer_fieldAccessorTable
5241           .ensureFieldAccessorsInitialized(
5242               org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer.Builder.class);
5243     }
5244 
5245     public static com.google.protobuf.Parser<ReplicationPeer> PARSER =
5246         new com.google.protobuf.AbstractParser<ReplicationPeer>() {
5247       public ReplicationPeer parsePartialFrom(
5248           com.google.protobuf.CodedInputStream input,
5249           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5250           throws com.google.protobuf.InvalidProtocolBufferException {
5251         return new ReplicationPeer(input, extensionRegistry);
5252       }
5253     };
5254 
5255     @java.lang.Override
getParserForType()5256     public com.google.protobuf.Parser<ReplicationPeer> getParserForType() {
5257       return PARSER;
5258     }
5259 
5260     private int bitField0_;
5261     // required string clusterkey = 1;
5262     public static final int CLUSTERKEY_FIELD_NUMBER = 1;
5263     private java.lang.Object clusterkey_;
5264     /**
5265      * <code>required string clusterkey = 1;</code>
5266      *
5267      * <pre>
5268      * clusterkey is the concatenation of the slave cluster's
5269      * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
5270      * </pre>
5271      */
hasClusterkey()5272     public boolean hasClusterkey() {
5273       return ((bitField0_ & 0x00000001) == 0x00000001);
5274     }
5275     /**
5276      * <code>required string clusterkey = 1;</code>
5277      *
5278      * <pre>
5279      * clusterkey is the concatenation of the slave cluster's
5280      * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
5281      * </pre>
5282      */
getClusterkey()5283     public java.lang.String getClusterkey() {
5284       java.lang.Object ref = clusterkey_;
5285       if (ref instanceof java.lang.String) {
5286         return (java.lang.String) ref;
5287       } else {
5288         com.google.protobuf.ByteString bs =
5289             (com.google.protobuf.ByteString) ref;
5290         java.lang.String s = bs.toStringUtf8();
5291         if (bs.isValidUtf8()) {
5292           clusterkey_ = s;
5293         }
5294         return s;
5295       }
5296     }
5297     /**
5298      * <code>required string clusterkey = 1;</code>
5299      *
5300      * <pre>
5301      * clusterkey is the concatenation of the slave cluster's
5302      * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
5303      * </pre>
5304      */
5305     public com.google.protobuf.ByteString
getClusterkeyBytes()5306         getClusterkeyBytes() {
5307       java.lang.Object ref = clusterkey_;
5308       if (ref instanceof java.lang.String) {
5309         com.google.protobuf.ByteString b =
5310             com.google.protobuf.ByteString.copyFromUtf8(
5311                 (java.lang.String) ref);
5312         clusterkey_ = b;
5313         return b;
5314       } else {
5315         return (com.google.protobuf.ByteString) ref;
5316       }
5317     }
5318 
5319     // optional string replicationEndpointImpl = 2;
5320     public static final int REPLICATIONENDPOINTIMPL_FIELD_NUMBER = 2;
5321     private java.lang.Object replicationEndpointImpl_;
5322     /**
5323      * <code>optional string replicationEndpointImpl = 2;</code>
5324      */
hasReplicationEndpointImpl()5325     public boolean hasReplicationEndpointImpl() {
5326       return ((bitField0_ & 0x00000002) == 0x00000002);
5327     }
5328     /**
5329      * <code>optional string replicationEndpointImpl = 2;</code>
5330      */
getReplicationEndpointImpl()5331     public java.lang.String getReplicationEndpointImpl() {
5332       java.lang.Object ref = replicationEndpointImpl_;
5333       if (ref instanceof java.lang.String) {
5334         return (java.lang.String) ref;
5335       } else {
5336         com.google.protobuf.ByteString bs =
5337             (com.google.protobuf.ByteString) ref;
5338         java.lang.String s = bs.toStringUtf8();
5339         if (bs.isValidUtf8()) {
5340           replicationEndpointImpl_ = s;
5341         }
5342         return s;
5343       }
5344     }
5345     /**
5346      * <code>optional string replicationEndpointImpl = 2;</code>
5347      */
5348     public com.google.protobuf.ByteString
getReplicationEndpointImplBytes()5349         getReplicationEndpointImplBytes() {
5350       java.lang.Object ref = replicationEndpointImpl_;
5351       if (ref instanceof java.lang.String) {
5352         com.google.protobuf.ByteString b =
5353             com.google.protobuf.ByteString.copyFromUtf8(
5354                 (java.lang.String) ref);
5355         replicationEndpointImpl_ = b;
5356         return b;
5357       } else {
5358         return (com.google.protobuf.ByteString) ref;
5359       }
5360     }
5361 
5362     // repeated .BytesBytesPair data = 3;
5363     public static final int DATA_FIELD_NUMBER = 3;
5364     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> data_;
5365     /**
5366      * <code>repeated .BytesBytesPair data = 3;</code>
5367      */
getDataList()5368     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> getDataList() {
5369       return data_;
5370     }
5371     /**
5372      * <code>repeated .BytesBytesPair data = 3;</code>
5373      */
5374     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>
getDataOrBuilderList()5375         getDataOrBuilderList() {
5376       return data_;
5377     }
5378     /**
5379      * <code>repeated .BytesBytesPair data = 3;</code>
5380      */
getDataCount()5381     public int getDataCount() {
5382       return data_.size();
5383     }
5384     /**
5385      * <code>repeated .BytesBytesPair data = 3;</code>
5386      */
getData(int index)5387     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getData(int index) {
5388       return data_.get(index);
5389     }
5390     /**
5391      * <code>repeated .BytesBytesPair data = 3;</code>
5392      */
getDataOrBuilder( int index)5393     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getDataOrBuilder(
5394         int index) {
5395       return data_.get(index);
5396     }
5397 
5398     // repeated .NameStringPair configuration = 4;
5399     public static final int CONFIGURATION_FIELD_NUMBER = 4;
5400     private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> configuration_;
5401     /**
5402      * <code>repeated .NameStringPair configuration = 4;</code>
5403      */
getConfigurationList()5404     public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() {
5405       return configuration_;
5406     }
5407     /**
5408      * <code>repeated .NameStringPair configuration = 4;</code>
5409      */
5410     public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>
getConfigurationOrBuilderList()5411         getConfigurationOrBuilderList() {
5412       return configuration_;
5413     }
5414     /**
5415      * <code>repeated .NameStringPair configuration = 4;</code>
5416      */
getConfigurationCount()5417     public int getConfigurationCount() {
5418       return configuration_.size();
5419     }
5420     /**
5421      * <code>repeated .NameStringPair configuration = 4;</code>
5422      */
getConfiguration(int index)5423     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) {
5424       return configuration_.get(index);
5425     }
5426     /**
5427      * <code>repeated .NameStringPair configuration = 4;</code>
5428      */
getConfigurationOrBuilder( int index)5429     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder(
5430         int index) {
5431       return configuration_.get(index);
5432     }
5433 
initFields()5434     private void initFields() {
5435       clusterkey_ = "";
5436       replicationEndpointImpl_ = "";
5437       data_ = java.util.Collections.emptyList();
5438       configuration_ = java.util.Collections.emptyList();
5439     }
5440     private byte memoizedIsInitialized = -1;
isInitialized()5441     public final boolean isInitialized() {
5442       byte isInitialized = memoizedIsInitialized;
5443       if (isInitialized != -1) return isInitialized == 1;
5444 
5445       if (!hasClusterkey()) {
5446         memoizedIsInitialized = 0;
5447         return false;
5448       }
5449       for (int i = 0; i < getDataCount(); i++) {
5450         if (!getData(i).isInitialized()) {
5451           memoizedIsInitialized = 0;
5452           return false;
5453         }
5454       }
5455       for (int i = 0; i < getConfigurationCount(); i++) {
5456         if (!getConfiguration(i).isInitialized()) {
5457           memoizedIsInitialized = 0;
5458           return false;
5459         }
5460       }
5461       memoizedIsInitialized = 1;
5462       return true;
5463     }
5464 
writeTo(com.google.protobuf.CodedOutputStream output)5465     public void writeTo(com.google.protobuf.CodedOutputStream output)
5466                         throws java.io.IOException {
5467       getSerializedSize();
5468       if (((bitField0_ & 0x00000001) == 0x00000001)) {
5469         output.writeBytes(1, getClusterkeyBytes());
5470       }
5471       if (((bitField0_ & 0x00000002) == 0x00000002)) {
5472         output.writeBytes(2, getReplicationEndpointImplBytes());
5473       }
5474       for (int i = 0; i < data_.size(); i++) {
5475         output.writeMessage(3, data_.get(i));
5476       }
5477       for (int i = 0; i < configuration_.size(); i++) {
5478         output.writeMessage(4, configuration_.get(i));
5479       }
5480       getUnknownFields().writeTo(output);
5481     }
5482 
5483     private int memoizedSerializedSize = -1;
getSerializedSize()5484     public int getSerializedSize() {
5485       int size = memoizedSerializedSize;
5486       if (size != -1) return size;
5487 
5488       size = 0;
5489       if (((bitField0_ & 0x00000001) == 0x00000001)) {
5490         size += com.google.protobuf.CodedOutputStream
5491           .computeBytesSize(1, getClusterkeyBytes());
5492       }
5493       if (((bitField0_ & 0x00000002) == 0x00000002)) {
5494         size += com.google.protobuf.CodedOutputStream
5495           .computeBytesSize(2, getReplicationEndpointImplBytes());
5496       }
5497       for (int i = 0; i < data_.size(); i++) {
5498         size += com.google.protobuf.CodedOutputStream
5499           .computeMessageSize(3, data_.get(i));
5500       }
5501       for (int i = 0; i < configuration_.size(); i++) {
5502         size += com.google.protobuf.CodedOutputStream
5503           .computeMessageSize(4, configuration_.get(i));
5504       }
5505       size += getUnknownFields().getSerializedSize();
5506       memoizedSerializedSize = size;
5507       return size;
5508     }
5509 
5510     private static final long serialVersionUID = 0L;
5511     @java.lang.Override
writeReplace()5512     protected java.lang.Object writeReplace()
5513         throws java.io.ObjectStreamException {
5514       return super.writeReplace();
5515     }
5516 
5517     @java.lang.Override
equals(final java.lang.Object obj)5518     public boolean equals(final java.lang.Object obj) {
5519       if (obj == this) {
5520        return true;
5521       }
5522       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer)) {
5523         return super.equals(obj);
5524       }
5525       org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer) obj;
5526 
5527       boolean result = true;
5528       result = result && (hasClusterkey() == other.hasClusterkey());
5529       if (hasClusterkey()) {
5530         result = result && getClusterkey()
5531             .equals(other.getClusterkey());
5532       }
5533       result = result && (hasReplicationEndpointImpl() == other.hasReplicationEndpointImpl());
5534       if (hasReplicationEndpointImpl()) {
5535         result = result && getReplicationEndpointImpl()
5536             .equals(other.getReplicationEndpointImpl());
5537       }
5538       result = result && getDataList()
5539           .equals(other.getDataList());
5540       result = result && getConfigurationList()
5541           .equals(other.getConfigurationList());
5542       result = result &&
5543           getUnknownFields().equals(other.getUnknownFields());
5544       return result;
5545     }
5546 
5547     private int memoizedHashCode = 0;
5548     @java.lang.Override
hashCode()5549     public int hashCode() {
5550       if (memoizedHashCode != 0) {
5551         return memoizedHashCode;
5552       }
5553       int hash = 41;
5554       hash = (19 * hash) + getDescriptorForType().hashCode();
5555       if (hasClusterkey()) {
5556         hash = (37 * hash) + CLUSTERKEY_FIELD_NUMBER;
5557         hash = (53 * hash) + getClusterkey().hashCode();
5558       }
5559       if (hasReplicationEndpointImpl()) {
5560         hash = (37 * hash) + REPLICATIONENDPOINTIMPL_FIELD_NUMBER;
5561         hash = (53 * hash) + getReplicationEndpointImpl().hashCode();
5562       }
5563       if (getDataCount() > 0) {
5564         hash = (37 * hash) + DATA_FIELD_NUMBER;
5565         hash = (53 * hash) + getDataList().hashCode();
5566       }
5567       if (getConfigurationCount() > 0) {
5568         hash = (37 * hash) + CONFIGURATION_FIELD_NUMBER;
5569         hash = (53 * hash) + getConfigurationList().hashCode();
5570       }
5571       hash = (29 * hash) + getUnknownFields().hashCode();
5572       memoizedHashCode = hash;
5573       return hash;
5574     }
5575 
parseFrom( com.google.protobuf.ByteString data)5576     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom(
5577         com.google.protobuf.ByteString data)
5578         throws com.google.protobuf.InvalidProtocolBufferException {
5579       return PARSER.parseFrom(data);
5580     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5581     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom(
5582         com.google.protobuf.ByteString data,
5583         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5584         throws com.google.protobuf.InvalidProtocolBufferException {
5585       return PARSER.parseFrom(data, extensionRegistry);
5586     }
parseFrom(byte[] data)5587     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom(byte[] data)
5588         throws com.google.protobuf.InvalidProtocolBufferException {
5589       return PARSER.parseFrom(data);
5590     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5591     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom(
5592         byte[] data,
5593         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5594         throws com.google.protobuf.InvalidProtocolBufferException {
5595       return PARSER.parseFrom(data, extensionRegistry);
5596     }
parseFrom(java.io.InputStream input)5597     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom(java.io.InputStream input)
5598         throws java.io.IOException {
5599       return PARSER.parseFrom(input);
5600     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5601     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom(
5602         java.io.InputStream input,
5603         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5604         throws java.io.IOException {
5605       return PARSER.parseFrom(input, extensionRegistry);
5606     }
parseDelimitedFrom(java.io.InputStream input)5607     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseDelimitedFrom(java.io.InputStream input)
5608         throws java.io.IOException {
5609       return PARSER.parseDelimitedFrom(input);
5610     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5611     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseDelimitedFrom(
5612         java.io.InputStream input,
5613         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5614         throws java.io.IOException {
5615       return PARSER.parseDelimitedFrom(input, extensionRegistry);
5616     }
parseFrom( com.google.protobuf.CodedInputStream input)5617     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom(
5618         com.google.protobuf.CodedInputStream input)
5619         throws java.io.IOException {
5620       return PARSER.parseFrom(input);
5621     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5622     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parseFrom(
5623         com.google.protobuf.CodedInputStream input,
5624         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5625         throws java.io.IOException {
5626       return PARSER.parseFrom(input, extensionRegistry);
5627     }
5628 
newBuilder()5629     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()5630     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer prototype)5631     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer prototype) {
5632       return newBuilder().mergeFrom(prototype);
5633     }
toBuilder()5634     public Builder toBuilder() { return newBuilder(this); }
5635 
5636     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)5637     protected Builder newBuilderForType(
5638         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
5639       Builder builder = new Builder(parent);
5640       return builder;
5641     }
5642     /**
5643      * Protobuf type {@code ReplicationPeer}
5644      *
5645      * <pre>
5646      **
5647      * Used by replication. Holds a replication peer key.
5648      * </pre>
5649      */
5650     public static final class Builder extends
5651         com.google.protobuf.GeneratedMessage.Builder<Builder>
5652        implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeerOrBuilder {
5653       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()5654           getDescriptor() {
5655         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationPeer_descriptor;
5656       }
5657 
5658       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()5659           internalGetFieldAccessorTable() {
5660         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationPeer_fieldAccessorTable
5661             .ensureFieldAccessorsInitialized(
5662                 org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer.Builder.class);
5663       }
5664 
5665       // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer.newBuilder()
Builder()5666       private Builder() {
5667         maybeForceBuilderInitialization();
5668       }
5669 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)5670       private Builder(
5671           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
5672         super(parent);
5673         maybeForceBuilderInitialization();
5674       }
maybeForceBuilderInitialization()5675       private void maybeForceBuilderInitialization() {
5676         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
5677           getDataFieldBuilder();
5678           getConfigurationFieldBuilder();
5679         }
5680       }
create()5681       private static Builder create() {
5682         return new Builder();
5683       }
5684 
clear()5685       public Builder clear() {
5686         super.clear();
5687         clusterkey_ = "";
5688         bitField0_ = (bitField0_ & ~0x00000001);
5689         replicationEndpointImpl_ = "";
5690         bitField0_ = (bitField0_ & ~0x00000002);
5691         if (dataBuilder_ == null) {
5692           data_ = java.util.Collections.emptyList();
5693           bitField0_ = (bitField0_ & ~0x00000004);
5694         } else {
5695           dataBuilder_.clear();
5696         }
5697         if (configurationBuilder_ == null) {
5698           configuration_ = java.util.Collections.emptyList();
5699           bitField0_ = (bitField0_ & ~0x00000008);
5700         } else {
5701           configurationBuilder_.clear();
5702         }
5703         return this;
5704       }
5705 
clone()5706       public Builder clone() {
5707         return create().mergeFrom(buildPartial());
5708       }
5709 
5710       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()5711           getDescriptorForType() {
5712         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationPeer_descriptor;
5713       }
5714 
getDefaultInstanceForType()5715       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer getDefaultInstanceForType() {
5716         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer.getDefaultInstance();
5717       }
5718 
build()5719       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer build() {
5720         org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer result = buildPartial();
5721         if (!result.isInitialized()) {
5722           throw newUninitializedMessageException(result);
5723         }
5724         return result;
5725       }
5726 
buildPartial()5727       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer buildPartial() {
5728         org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer(this);
5729         int from_bitField0_ = bitField0_;
5730         int to_bitField0_ = 0;
5731         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
5732           to_bitField0_ |= 0x00000001;
5733         }
5734         result.clusterkey_ = clusterkey_;
5735         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
5736           to_bitField0_ |= 0x00000002;
5737         }
5738         result.replicationEndpointImpl_ = replicationEndpointImpl_;
5739         if (dataBuilder_ == null) {
5740           if (((bitField0_ & 0x00000004) == 0x00000004)) {
5741             data_ = java.util.Collections.unmodifiableList(data_);
5742             bitField0_ = (bitField0_ & ~0x00000004);
5743           }
5744           result.data_ = data_;
5745         } else {
5746           result.data_ = dataBuilder_.build();
5747         }
5748         if (configurationBuilder_ == null) {
5749           if (((bitField0_ & 0x00000008) == 0x00000008)) {
5750             configuration_ = java.util.Collections.unmodifiableList(configuration_);
5751             bitField0_ = (bitField0_ & ~0x00000008);
5752           }
5753           result.configuration_ = configuration_;
5754         } else {
5755           result.configuration_ = configurationBuilder_.build();
5756         }
5757         result.bitField0_ = to_bitField0_;
5758         onBuilt();
5759         return result;
5760       }
5761 
mergeFrom(com.google.protobuf.Message other)5762       public Builder mergeFrom(com.google.protobuf.Message other) {
5763         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer) {
5764           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer)other);
5765         } else {
5766           super.mergeFrom(other);
5767           return this;
5768         }
5769       }
5770 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer other)5771       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer other) {
5772         if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer.getDefaultInstance()) return this;
5773         if (other.hasClusterkey()) {
5774           bitField0_ |= 0x00000001;
5775           clusterkey_ = other.clusterkey_;
5776           onChanged();
5777         }
5778         if (other.hasReplicationEndpointImpl()) {
5779           bitField0_ |= 0x00000002;
5780           replicationEndpointImpl_ = other.replicationEndpointImpl_;
5781           onChanged();
5782         }
5783         if (dataBuilder_ == null) {
5784           if (!other.data_.isEmpty()) {
5785             if (data_.isEmpty()) {
5786               data_ = other.data_;
5787               bitField0_ = (bitField0_ & ~0x00000004);
5788             } else {
5789               ensureDataIsMutable();
5790               data_.addAll(other.data_);
5791             }
5792             onChanged();
5793           }
5794         } else {
5795           if (!other.data_.isEmpty()) {
5796             if (dataBuilder_.isEmpty()) {
5797               dataBuilder_.dispose();
5798               dataBuilder_ = null;
5799               data_ = other.data_;
5800               bitField0_ = (bitField0_ & ~0x00000004);
5801               dataBuilder_ =
5802                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
5803                    getDataFieldBuilder() : null;
5804             } else {
5805               dataBuilder_.addAllMessages(other.data_);
5806             }
5807           }
5808         }
5809         if (configurationBuilder_ == null) {
5810           if (!other.configuration_.isEmpty()) {
5811             if (configuration_.isEmpty()) {
5812               configuration_ = other.configuration_;
5813               bitField0_ = (bitField0_ & ~0x00000008);
5814             } else {
5815               ensureConfigurationIsMutable();
5816               configuration_.addAll(other.configuration_);
5817             }
5818             onChanged();
5819           }
5820         } else {
5821           if (!other.configuration_.isEmpty()) {
5822             if (configurationBuilder_.isEmpty()) {
5823               configurationBuilder_.dispose();
5824               configurationBuilder_ = null;
5825               configuration_ = other.configuration_;
5826               bitField0_ = (bitField0_ & ~0x00000008);
5827               configurationBuilder_ =
5828                 com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
5829                    getConfigurationFieldBuilder() : null;
5830             } else {
5831               configurationBuilder_.addAllMessages(other.configuration_);
5832             }
5833           }
5834         }
5835         this.mergeUnknownFields(other.getUnknownFields());
5836         return this;
5837       }
5838 
isInitialized()5839       public final boolean isInitialized() {
5840         if (!hasClusterkey()) {
5841 
5842           return false;
5843         }
5844         for (int i = 0; i < getDataCount(); i++) {
5845           if (!getData(i).isInitialized()) {
5846 
5847             return false;
5848           }
5849         }
5850         for (int i = 0; i < getConfigurationCount(); i++) {
5851           if (!getConfiguration(i).isInitialized()) {
5852 
5853             return false;
5854           }
5855         }
5856         return true;
5857       }
5858 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)5859       public Builder mergeFrom(
5860           com.google.protobuf.CodedInputStream input,
5861           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
5862           throws java.io.IOException {
5863         org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer parsedMessage = null;
5864         try {
5865           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
5866         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
5867           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationPeer) e.getUnfinishedMessage();
5868           throw e;
5869         } finally {
5870           if (parsedMessage != null) {
5871             mergeFrom(parsedMessage);
5872           }
5873         }
5874         return this;
5875       }
5876       private int bitField0_;
5877 
5878       // required string clusterkey = 1;
5879       private java.lang.Object clusterkey_ = "";
5880       /**
5881        * <code>required string clusterkey = 1;</code>
5882        *
5883        * <pre>
5884        * clusterkey is the concatenation of the slave cluster's
5885        * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
5886        * </pre>
5887        */
hasClusterkey()5888       public boolean hasClusterkey() {
5889         return ((bitField0_ & 0x00000001) == 0x00000001);
5890       }
5891       /**
5892        * <code>required string clusterkey = 1;</code>
5893        *
5894        * <pre>
5895        * clusterkey is the concatenation of the slave cluster's
5896        * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
5897        * </pre>
5898        */
getClusterkey()5899       public java.lang.String getClusterkey() {
5900         java.lang.Object ref = clusterkey_;
5901         if (!(ref instanceof java.lang.String)) {
5902           java.lang.String s = ((com.google.protobuf.ByteString) ref)
5903               .toStringUtf8();
5904           clusterkey_ = s;
5905           return s;
5906         } else {
5907           return (java.lang.String) ref;
5908         }
5909       }
5910       /**
5911        * <code>required string clusterkey = 1;</code>
5912        *
5913        * <pre>
5914        * clusterkey is the concatenation of the slave cluster's
5915        * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
5916        * </pre>
5917        */
5918       public com.google.protobuf.ByteString
getClusterkeyBytes()5919           getClusterkeyBytes() {
5920         java.lang.Object ref = clusterkey_;
5921         if (ref instanceof String) {
5922           com.google.protobuf.ByteString b =
5923               com.google.protobuf.ByteString.copyFromUtf8(
5924                   (java.lang.String) ref);
5925           clusterkey_ = b;
5926           return b;
5927         } else {
5928           return (com.google.protobuf.ByteString) ref;
5929         }
5930       }
5931       /**
5932        * <code>required string clusterkey = 1;</code>
5933        *
5934        * <pre>
5935        * clusterkey is the concatenation of the slave cluster's
5936        * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
5937        * </pre>
5938        */
setClusterkey( java.lang.String value)5939       public Builder setClusterkey(
5940           java.lang.String value) {
5941         if (value == null) {
5942     throw new NullPointerException();
5943   }
5944   bitField0_ |= 0x00000001;
5945         clusterkey_ = value;
5946         onChanged();
5947         return this;
5948       }
5949       /**
5950        * <code>required string clusterkey = 1;</code>
5951        *
5952        * <pre>
5953        * clusterkey is the concatenation of the slave cluster's
5954        * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
5955        * </pre>
5956        */
clearClusterkey()5957       public Builder clearClusterkey() {
5958         bitField0_ = (bitField0_ & ~0x00000001);
5959         clusterkey_ = getDefaultInstance().getClusterkey();
5960         onChanged();
5961         return this;
5962       }
5963       /**
5964        * <code>required string clusterkey = 1;</code>
5965        *
5966        * <pre>
5967        * clusterkey is the concatenation of the slave cluster's
5968        * hbase.zookeeper.quorum:hbase.zookeeper.property.clientPort:zookeeper.znode.parent
5969        * </pre>
5970        */
setClusterkeyBytes( com.google.protobuf.ByteString value)5971       public Builder setClusterkeyBytes(
5972           com.google.protobuf.ByteString value) {
5973         if (value == null) {
5974     throw new NullPointerException();
5975   }
5976   bitField0_ |= 0x00000001;
5977         clusterkey_ = value;
5978         onChanged();
5979         return this;
5980       }
5981 
5982       // optional string replicationEndpointImpl = 2;
5983       private java.lang.Object replicationEndpointImpl_ = "";
5984       /**
5985        * <code>optional string replicationEndpointImpl = 2;</code>
5986        */
hasReplicationEndpointImpl()5987       public boolean hasReplicationEndpointImpl() {
5988         return ((bitField0_ & 0x00000002) == 0x00000002);
5989       }
5990       /**
5991        * <code>optional string replicationEndpointImpl = 2;</code>
5992        */
getReplicationEndpointImpl()5993       public java.lang.String getReplicationEndpointImpl() {
5994         java.lang.Object ref = replicationEndpointImpl_;
5995         if (!(ref instanceof java.lang.String)) {
5996           java.lang.String s = ((com.google.protobuf.ByteString) ref)
5997               .toStringUtf8();
5998           replicationEndpointImpl_ = s;
5999           return s;
6000         } else {
6001           return (java.lang.String) ref;
6002         }
6003       }
6004       /**
6005        * <code>optional string replicationEndpointImpl = 2;</code>
6006        */
6007       public com.google.protobuf.ByteString
getReplicationEndpointImplBytes()6008           getReplicationEndpointImplBytes() {
6009         java.lang.Object ref = replicationEndpointImpl_;
6010         if (ref instanceof String) {
6011           com.google.protobuf.ByteString b =
6012               com.google.protobuf.ByteString.copyFromUtf8(
6013                   (java.lang.String) ref);
6014           replicationEndpointImpl_ = b;
6015           return b;
6016         } else {
6017           return (com.google.protobuf.ByteString) ref;
6018         }
6019       }
6020       /**
6021        * <code>optional string replicationEndpointImpl = 2;</code>
6022        */
setReplicationEndpointImpl( java.lang.String value)6023       public Builder setReplicationEndpointImpl(
6024           java.lang.String value) {
6025         if (value == null) {
6026     throw new NullPointerException();
6027   }
6028   bitField0_ |= 0x00000002;
6029         replicationEndpointImpl_ = value;
6030         onChanged();
6031         return this;
6032       }
6033       /**
6034        * <code>optional string replicationEndpointImpl = 2;</code>
6035        */
clearReplicationEndpointImpl()6036       public Builder clearReplicationEndpointImpl() {
6037         bitField0_ = (bitField0_ & ~0x00000002);
6038         replicationEndpointImpl_ = getDefaultInstance().getReplicationEndpointImpl();
6039         onChanged();
6040         return this;
6041       }
6042       /**
6043        * <code>optional string replicationEndpointImpl = 2;</code>
6044        */
setReplicationEndpointImplBytes( com.google.protobuf.ByteString value)6045       public Builder setReplicationEndpointImplBytes(
6046           com.google.protobuf.ByteString value) {
6047         if (value == null) {
6048     throw new NullPointerException();
6049   }
6050   bitField0_ |= 0x00000002;
6051         replicationEndpointImpl_ = value;
6052         onChanged();
6053         return this;
6054       }
6055 
6056       // repeated .BytesBytesPair data = 3;
6057       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> data_ =
6058         java.util.Collections.emptyList();
ensureDataIsMutable()6059       private void ensureDataIsMutable() {
6060         if (!((bitField0_ & 0x00000004) == 0x00000004)) {
6061           data_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair>(data_);
6062           bitField0_ |= 0x00000004;
6063          }
6064       }
6065 
6066       private com.google.protobuf.RepeatedFieldBuilder<
6067           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder> dataBuilder_;
6068 
6069       /**
6070        * <code>repeated .BytesBytesPair data = 3;</code>
6071        */
getDataList()6072       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> getDataList() {
6073         if (dataBuilder_ == null) {
6074           return java.util.Collections.unmodifiableList(data_);
6075         } else {
6076           return dataBuilder_.getMessageList();
6077         }
6078       }
6079       /**
6080        * <code>repeated .BytesBytesPair data = 3;</code>
6081        */
getDataCount()6082       public int getDataCount() {
6083         if (dataBuilder_ == null) {
6084           return data_.size();
6085         } else {
6086           return dataBuilder_.getCount();
6087         }
6088       }
6089       /**
6090        * <code>repeated .BytesBytesPair data = 3;</code>
6091        */
getData(int index)6092       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair getData(int index) {
6093         if (dataBuilder_ == null) {
6094           return data_.get(index);
6095         } else {
6096           return dataBuilder_.getMessage(index);
6097         }
6098       }
6099       /**
6100        * <code>repeated .BytesBytesPair data = 3;</code>
6101        */
setData( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value)6102       public Builder setData(
6103           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) {
6104         if (dataBuilder_ == null) {
6105           if (value == null) {
6106             throw new NullPointerException();
6107           }
6108           ensureDataIsMutable();
6109           data_.set(index, value);
6110           onChanged();
6111         } else {
6112           dataBuilder_.setMessage(index, value);
6113         }
6114         return this;
6115       }
6116       /**
6117        * <code>repeated .BytesBytesPair data = 3;</code>
6118        */
setData( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue)6119       public Builder setData(
6120           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) {
6121         if (dataBuilder_ == null) {
6122           ensureDataIsMutable();
6123           data_.set(index, builderForValue.build());
6124           onChanged();
6125         } else {
6126           dataBuilder_.setMessage(index, builderForValue.build());
6127         }
6128         return this;
6129       }
6130       /**
6131        * <code>repeated .BytesBytesPair data = 3;</code>
6132        */
addData(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value)6133       public Builder addData(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) {
6134         if (dataBuilder_ == null) {
6135           if (value == null) {
6136             throw new NullPointerException();
6137           }
6138           ensureDataIsMutable();
6139           data_.add(value);
6140           onChanged();
6141         } else {
6142           dataBuilder_.addMessage(value);
6143         }
6144         return this;
6145       }
6146       /**
6147        * <code>repeated .BytesBytesPair data = 3;</code>
6148        */
addData( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value)6149       public Builder addData(
6150           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair value) {
6151         if (dataBuilder_ == null) {
6152           if (value == null) {
6153             throw new NullPointerException();
6154           }
6155           ensureDataIsMutable();
6156           data_.add(index, value);
6157           onChanged();
6158         } else {
6159           dataBuilder_.addMessage(index, value);
6160         }
6161         return this;
6162       }
6163       /**
6164        * <code>repeated .BytesBytesPair data = 3;</code>
6165        */
addData( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue)6166       public Builder addData(
6167           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) {
6168         if (dataBuilder_ == null) {
6169           ensureDataIsMutable();
6170           data_.add(builderForValue.build());
6171           onChanged();
6172         } else {
6173           dataBuilder_.addMessage(builderForValue.build());
6174         }
6175         return this;
6176       }
6177       /**
6178        * <code>repeated .BytesBytesPair data = 3;</code>
6179        */
addData( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue)6180       public Builder addData(
6181           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder builderForValue) {
6182         if (dataBuilder_ == null) {
6183           ensureDataIsMutable();
6184           data_.add(index, builderForValue.build());
6185           onChanged();
6186         } else {
6187           dataBuilder_.addMessage(index, builderForValue.build());
6188         }
6189         return this;
6190       }
6191       /**
6192        * <code>repeated .BytesBytesPair data = 3;</code>
6193        */
addAllData( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> values)6194       public Builder addAllData(
6195           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair> values) {
6196         if (dataBuilder_ == null) {
6197           ensureDataIsMutable();
6198           super.addAll(values, data_);
6199           onChanged();
6200         } else {
6201           dataBuilder_.addAllMessages(values);
6202         }
6203         return this;
6204       }
6205       /**
6206        * <code>repeated .BytesBytesPair data = 3;</code>
6207        */
clearData()6208       public Builder clearData() {
6209         if (dataBuilder_ == null) {
6210           data_ = java.util.Collections.emptyList();
6211           bitField0_ = (bitField0_ & ~0x00000004);
6212           onChanged();
6213         } else {
6214           dataBuilder_.clear();
6215         }
6216         return this;
6217       }
6218       /**
6219        * <code>repeated .BytesBytesPair data = 3;</code>
6220        */
removeData(int index)6221       public Builder removeData(int index) {
6222         if (dataBuilder_ == null) {
6223           ensureDataIsMutable();
6224           data_.remove(index);
6225           onChanged();
6226         } else {
6227           dataBuilder_.remove(index);
6228         }
6229         return this;
6230       }
6231       /**
6232        * <code>repeated .BytesBytesPair data = 3;</code>
6233        */
getDataBuilder( int index)6234       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder getDataBuilder(
6235           int index) {
6236         return getDataFieldBuilder().getBuilder(index);
6237       }
6238       /**
6239        * <code>repeated .BytesBytesPair data = 3;</code>
6240        */
getDataOrBuilder( int index)6241       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder getDataOrBuilder(
6242           int index) {
6243         if (dataBuilder_ == null) {
6244           return data_.get(index);  } else {
6245           return dataBuilder_.getMessageOrBuilder(index);
6246         }
6247       }
6248       /**
6249        * <code>repeated .BytesBytesPair data = 3;</code>
6250        */
6251       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>
getDataOrBuilderList()6252            getDataOrBuilderList() {
6253         if (dataBuilder_ != null) {
6254           return dataBuilder_.getMessageOrBuilderList();
6255         } else {
6256           return java.util.Collections.unmodifiableList(data_);
6257         }
6258       }
6259       /**
6260        * <code>repeated .BytesBytesPair data = 3;</code>
6261        */
addDataBuilder()6262       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addDataBuilder() {
6263         return getDataFieldBuilder().addBuilder(
6264             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance());
6265       }
6266       /**
6267        * <code>repeated .BytesBytesPair data = 3;</code>
6268        */
addDataBuilder( int index)6269       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder addDataBuilder(
6270           int index) {
6271         return getDataFieldBuilder().addBuilder(
6272             index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.getDefaultInstance());
6273       }
6274       /**
6275        * <code>repeated .BytesBytesPair data = 3;</code>
6276        */
6277       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder>
getDataBuilderList()6278            getDataBuilderList() {
6279         return getDataFieldBuilder().getBuilderList();
6280       }
6281       private com.google.protobuf.RepeatedFieldBuilder<
6282           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>
getDataFieldBuilder()6283           getDataFieldBuilder() {
6284         if (dataBuilder_ == null) {
6285           dataBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
6286               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPairOrBuilder>(
6287                   data_,
6288                   ((bitField0_ & 0x00000004) == 0x00000004),
6289                   getParentForChildren(),
6290                   isClean());
6291           data_ = null;
6292         }
6293         return dataBuilder_;
6294       }
6295 
6296       // repeated .NameStringPair configuration = 4;
6297       private java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> configuration_ =
6298         java.util.Collections.emptyList();
ensureConfigurationIsMutable()6299       private void ensureConfigurationIsMutable() {
6300         if (!((bitField0_ & 0x00000008) == 0x00000008)) {
6301           configuration_ = new java.util.ArrayList<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair>(configuration_);
6302           bitField0_ |= 0x00000008;
6303          }
6304       }
6305 
6306       private com.google.protobuf.RepeatedFieldBuilder<
6307           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> configurationBuilder_;
6308 
6309       /**
6310        * <code>repeated .NameStringPair configuration = 4;</code>
6311        */
getConfigurationList()6312       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> getConfigurationList() {
6313         if (configurationBuilder_ == null) {
6314           return java.util.Collections.unmodifiableList(configuration_);
6315         } else {
6316           return configurationBuilder_.getMessageList();
6317         }
6318       }
6319       /**
6320        * <code>repeated .NameStringPair configuration = 4;</code>
6321        */
getConfigurationCount()6322       public int getConfigurationCount() {
6323         if (configurationBuilder_ == null) {
6324           return configuration_.size();
6325         } else {
6326           return configurationBuilder_.getCount();
6327         }
6328       }
6329       /**
6330        * <code>repeated .NameStringPair configuration = 4;</code>
6331        */
getConfiguration(int index)6332       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getConfiguration(int index) {
6333         if (configurationBuilder_ == null) {
6334           return configuration_.get(index);
6335         } else {
6336           return configurationBuilder_.getMessage(index);
6337         }
6338       }
6339       /**
6340        * <code>repeated .NameStringPair configuration = 4;</code>
6341        */
setConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value)6342       public Builder setConfiguration(
6343           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) {
6344         if (configurationBuilder_ == null) {
6345           if (value == null) {
6346             throw new NullPointerException();
6347           }
6348           ensureConfigurationIsMutable();
6349           configuration_.set(index, value);
6350           onChanged();
6351         } else {
6352           configurationBuilder_.setMessage(index, value);
6353         }
6354         return this;
6355       }
6356       /**
6357        * <code>repeated .NameStringPair configuration = 4;</code>
6358        */
setConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue)6359       public Builder setConfiguration(
6360           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) {
6361         if (configurationBuilder_ == null) {
6362           ensureConfigurationIsMutable();
6363           configuration_.set(index, builderForValue.build());
6364           onChanged();
6365         } else {
6366           configurationBuilder_.setMessage(index, builderForValue.build());
6367         }
6368         return this;
6369       }
6370       /**
6371        * <code>repeated .NameStringPair configuration = 4;</code>
6372        */
addConfiguration(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value)6373       public Builder addConfiguration(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) {
6374         if (configurationBuilder_ == null) {
6375           if (value == null) {
6376             throw new NullPointerException();
6377           }
6378           ensureConfigurationIsMutable();
6379           configuration_.add(value);
6380           onChanged();
6381         } else {
6382           configurationBuilder_.addMessage(value);
6383         }
6384         return this;
6385       }
6386       /**
6387        * <code>repeated .NameStringPair configuration = 4;</code>
6388        */
addConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value)6389       public Builder addConfiguration(
6390           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) {
6391         if (configurationBuilder_ == null) {
6392           if (value == null) {
6393             throw new NullPointerException();
6394           }
6395           ensureConfigurationIsMutable();
6396           configuration_.add(index, value);
6397           onChanged();
6398         } else {
6399           configurationBuilder_.addMessage(index, value);
6400         }
6401         return this;
6402       }
6403       /**
6404        * <code>repeated .NameStringPair configuration = 4;</code>
6405        */
addConfiguration( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue)6406       public Builder addConfiguration(
6407           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) {
6408         if (configurationBuilder_ == null) {
6409           ensureConfigurationIsMutable();
6410           configuration_.add(builderForValue.build());
6411           onChanged();
6412         } else {
6413           configurationBuilder_.addMessage(builderForValue.build());
6414         }
6415         return this;
6416       }
6417       /**
6418        * <code>repeated .NameStringPair configuration = 4;</code>
6419        */
addConfiguration( int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue)6420       public Builder addConfiguration(
6421           int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) {
6422         if (configurationBuilder_ == null) {
6423           ensureConfigurationIsMutable();
6424           configuration_.add(index, builderForValue.build());
6425           onChanged();
6426         } else {
6427           configurationBuilder_.addMessage(index, builderForValue.build());
6428         }
6429         return this;
6430       }
6431       /**
6432        * <code>repeated .NameStringPair configuration = 4;</code>
6433        */
addAllConfiguration( java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> values)6434       public Builder addAllConfiguration(
6435           java.lang.Iterable<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> values) {
6436         if (configurationBuilder_ == null) {
6437           ensureConfigurationIsMutable();
6438           super.addAll(values, configuration_);
6439           onChanged();
6440         } else {
6441           configurationBuilder_.addAllMessages(values);
6442         }
6443         return this;
6444       }
6445       /**
6446        * <code>repeated .NameStringPair configuration = 4;</code>
6447        */
clearConfiguration()6448       public Builder clearConfiguration() {
6449         if (configurationBuilder_ == null) {
6450           configuration_ = java.util.Collections.emptyList();
6451           bitField0_ = (bitField0_ & ~0x00000008);
6452           onChanged();
6453         } else {
6454           configurationBuilder_.clear();
6455         }
6456         return this;
6457       }
6458       /**
6459        * <code>repeated .NameStringPair configuration = 4;</code>
6460        */
removeConfiguration(int index)6461       public Builder removeConfiguration(int index) {
6462         if (configurationBuilder_ == null) {
6463           ensureConfigurationIsMutable();
6464           configuration_.remove(index);
6465           onChanged();
6466         } else {
6467           configurationBuilder_.remove(index);
6468         }
6469         return this;
6470       }
6471       /**
6472        * <code>repeated .NameStringPair configuration = 4;</code>
6473        */
getConfigurationBuilder( int index)6474       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder getConfigurationBuilder(
6475           int index) {
6476         return getConfigurationFieldBuilder().getBuilder(index);
6477       }
6478       /**
6479        * <code>repeated .NameStringPair configuration = 4;</code>
6480        */
getConfigurationOrBuilder( int index)6481       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getConfigurationOrBuilder(
6482           int index) {
6483         if (configurationBuilder_ == null) {
6484           return configuration_.get(index);  } else {
6485           return configurationBuilder_.getMessageOrBuilder(index);
6486         }
6487       }
6488       /**
6489        * <code>repeated .NameStringPair configuration = 4;</code>
6490        */
6491       public java.util.List<? extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>
getConfigurationOrBuilderList()6492            getConfigurationOrBuilderList() {
6493         if (configurationBuilder_ != null) {
6494           return configurationBuilder_.getMessageOrBuilderList();
6495         } else {
6496           return java.util.Collections.unmodifiableList(configuration_);
6497         }
6498       }
6499       /**
6500        * <code>repeated .NameStringPair configuration = 4;</code>
6501        */
addConfigurationBuilder()6502       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder() {
6503         return getConfigurationFieldBuilder().addBuilder(
6504             org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance());
6505       }
6506       /**
6507        * <code>repeated .NameStringPair configuration = 4;</code>
6508        */
addConfigurationBuilder( int index)6509       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addConfigurationBuilder(
6510           int index) {
6511         return getConfigurationFieldBuilder().addBuilder(
6512             index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance());
6513       }
6514       /**
6515        * <code>repeated .NameStringPair configuration = 4;</code>
6516        */
6517       public java.util.List<org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder>
getConfigurationBuilderList()6518            getConfigurationBuilderList() {
6519         return getConfigurationFieldBuilder().getBuilderList();
6520       }
6521       private com.google.protobuf.RepeatedFieldBuilder<
6522           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>
getConfigurationFieldBuilder()6523           getConfigurationFieldBuilder() {
6524         if (configurationBuilder_ == null) {
6525           configurationBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
6526               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>(
6527                   configuration_,
6528                   ((bitField0_ & 0x00000008) == 0x00000008),
6529                   getParentForChildren(),
6530                   isClean());
6531           configuration_ = null;
6532         }
6533         return configurationBuilder_;
6534       }
6535 
6536       // @@protoc_insertion_point(builder_scope:ReplicationPeer)
6537     }
6538 
6539     static {
6540       defaultInstance = new ReplicationPeer(true);
defaultInstance.initFields()6541       defaultInstance.initFields();
6542     }
6543 
6544     // @@protoc_insertion_point(class_scope:ReplicationPeer)
6545   }
6546 
6547   public interface ReplicationStateOrBuilder
6548       extends com.google.protobuf.MessageOrBuilder {
6549 
6550     // required .ReplicationState.State state = 1;
6551     /**
6552      * <code>required .ReplicationState.State state = 1;</code>
6553      */
hasState()6554     boolean hasState();
6555     /**
6556      * <code>required .ReplicationState.State state = 1;</code>
6557      */
getState()6558     org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State getState();
6559   }
6560   /**
6561    * Protobuf type {@code ReplicationState}
6562    *
6563    * <pre>
6564    **
6565    * Used by replication. Holds whether enabled or disabled
6566    * </pre>
6567    */
6568   public static final class ReplicationState extends
6569       com.google.protobuf.GeneratedMessage
6570       implements ReplicationStateOrBuilder {
6571     // Use ReplicationState.newBuilder() to construct.
ReplicationState(com.google.protobuf.GeneratedMessage.Builder<?> builder)6572     private ReplicationState(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
6573       super(builder);
6574       this.unknownFields = builder.getUnknownFields();
6575     }
ReplicationState(boolean noInit)6576     private ReplicationState(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
6577 
6578     private static final ReplicationState defaultInstance;
getDefaultInstance()6579     public static ReplicationState getDefaultInstance() {
6580       return defaultInstance;
6581     }
6582 
getDefaultInstanceForType()6583     public ReplicationState getDefaultInstanceForType() {
6584       return defaultInstance;
6585     }
6586 
6587     private final com.google.protobuf.UnknownFieldSet unknownFields;
6588     @java.lang.Override
6589     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()6590         getUnknownFields() {
6591       return this.unknownFields;
6592     }
ReplicationState( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6593     private ReplicationState(
6594         com.google.protobuf.CodedInputStream input,
6595         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6596         throws com.google.protobuf.InvalidProtocolBufferException {
6597       initFields();
6598       int mutable_bitField0_ = 0;
6599       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
6600           com.google.protobuf.UnknownFieldSet.newBuilder();
6601       try {
6602         boolean done = false;
6603         while (!done) {
6604           int tag = input.readTag();
6605           switch (tag) {
6606             case 0:
6607               done = true;
6608               break;
6609             default: {
6610               if (!parseUnknownField(input, unknownFields,
6611                                      extensionRegistry, tag)) {
6612                 done = true;
6613               }
6614               break;
6615             }
6616             case 8: {
6617               int rawValue = input.readEnum();
6618               org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State value = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State.valueOf(rawValue);
6619               if (value == null) {
6620                 unknownFields.mergeVarintField(1, rawValue);
6621               } else {
6622                 bitField0_ |= 0x00000001;
6623                 state_ = value;
6624               }
6625               break;
6626             }
6627           }
6628         }
6629       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
6630         throw e.setUnfinishedMessage(this);
6631       } catch (java.io.IOException e) {
6632         throw new com.google.protobuf.InvalidProtocolBufferException(
6633             e.getMessage()).setUnfinishedMessage(this);
6634       } finally {
6635         this.unknownFields = unknownFields.build();
6636         makeExtensionsImmutable();
6637       }
6638     }
6639     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()6640         getDescriptor() {
6641       return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationState_descriptor;
6642     }
6643 
6644     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()6645         internalGetFieldAccessorTable() {
6646       return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationState_fieldAccessorTable
6647           .ensureFieldAccessorsInitialized(
6648               org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.Builder.class);
6649     }
6650 
6651     public static com.google.protobuf.Parser<ReplicationState> PARSER =
6652         new com.google.protobuf.AbstractParser<ReplicationState>() {
6653       public ReplicationState parsePartialFrom(
6654           com.google.protobuf.CodedInputStream input,
6655           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6656           throws com.google.protobuf.InvalidProtocolBufferException {
6657         return new ReplicationState(input, extensionRegistry);
6658       }
6659     };
6660 
6661     @java.lang.Override
getParserForType()6662     public com.google.protobuf.Parser<ReplicationState> getParserForType() {
6663       return PARSER;
6664     }
6665 
6666     /**
6667      * Protobuf enum {@code ReplicationState.State}
6668      */
6669     public enum State
6670         implements com.google.protobuf.ProtocolMessageEnum {
6671       /**
6672        * <code>ENABLED = 0;</code>
6673        */
6674       ENABLED(0, 0),
6675       /**
6676        * <code>DISABLED = 1;</code>
6677        */
6678       DISABLED(1, 1),
6679       ;
6680 
6681       /**
6682        * <code>ENABLED = 0;</code>
6683        */
6684       public static final int ENABLED_VALUE = 0;
6685       /**
6686        * <code>DISABLED = 1;</code>
6687        */
6688       public static final int DISABLED_VALUE = 1;
6689 
6690 
getNumber()6691       public final int getNumber() { return value; }
6692 
valueOf(int value)6693       public static State valueOf(int value) {
6694         switch (value) {
6695           case 0: return ENABLED;
6696           case 1: return DISABLED;
6697           default: return null;
6698         }
6699       }
6700 
6701       public static com.google.protobuf.Internal.EnumLiteMap<State>
internalGetValueMap()6702           internalGetValueMap() {
6703         return internalValueMap;
6704       }
6705       private static com.google.protobuf.Internal.EnumLiteMap<State>
6706           internalValueMap =
6707             new com.google.protobuf.Internal.EnumLiteMap<State>() {
6708               public State findValueByNumber(int number) {
6709                 return State.valueOf(number);
6710               }
6711             };
6712 
6713       public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor()6714           getValueDescriptor() {
6715         return getDescriptor().getValues().get(index);
6716       }
6717       public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType()6718           getDescriptorForType() {
6719         return getDescriptor();
6720       }
6721       public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor()6722           getDescriptor() {
6723         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.getDescriptor().getEnumTypes().get(0);
6724       }
6725 
6726       private static final State[] VALUES = values();
6727 
valueOf( com.google.protobuf.Descriptors.EnumValueDescriptor desc)6728       public static State valueOf(
6729           com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
6730         if (desc.getType() != getDescriptor()) {
6731           throw new java.lang.IllegalArgumentException(
6732             "EnumValueDescriptor is not for this type.");
6733         }
6734         return VALUES[desc.getIndex()];
6735       }
6736 
6737       private final int index;
6738       private final int value;
6739 
State(int index, int value)6740       private State(int index, int value) {
6741         this.index = index;
6742         this.value = value;
6743       }
6744 
6745       // @@protoc_insertion_point(enum_scope:ReplicationState.State)
6746     }
6747 
6748     private int bitField0_;
6749     // required .ReplicationState.State state = 1;
6750     public static final int STATE_FIELD_NUMBER = 1;
6751     private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State state_;
6752     /**
6753      * <code>required .ReplicationState.State state = 1;</code>
6754      */
hasState()6755     public boolean hasState() {
6756       return ((bitField0_ & 0x00000001) == 0x00000001);
6757     }
6758     /**
6759      * <code>required .ReplicationState.State state = 1;</code>
6760      */
getState()6761     public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State getState() {
6762       return state_;
6763     }
6764 
initFields()6765     private void initFields() {
6766       state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State.ENABLED;
6767     }
6768     private byte memoizedIsInitialized = -1;
isInitialized()6769     public final boolean isInitialized() {
6770       byte isInitialized = memoizedIsInitialized;
6771       if (isInitialized != -1) return isInitialized == 1;
6772 
6773       if (!hasState()) {
6774         memoizedIsInitialized = 0;
6775         return false;
6776       }
6777       memoizedIsInitialized = 1;
6778       return true;
6779     }
6780 
writeTo(com.google.protobuf.CodedOutputStream output)6781     public void writeTo(com.google.protobuf.CodedOutputStream output)
6782                         throws java.io.IOException {
6783       getSerializedSize();
6784       if (((bitField0_ & 0x00000001) == 0x00000001)) {
6785         output.writeEnum(1, state_.getNumber());
6786       }
6787       getUnknownFields().writeTo(output);
6788     }
6789 
6790     private int memoizedSerializedSize = -1;
getSerializedSize()6791     public int getSerializedSize() {
6792       int size = memoizedSerializedSize;
6793       if (size != -1) return size;
6794 
6795       size = 0;
6796       if (((bitField0_ & 0x00000001) == 0x00000001)) {
6797         size += com.google.protobuf.CodedOutputStream
6798           .computeEnumSize(1, state_.getNumber());
6799       }
6800       size += getUnknownFields().getSerializedSize();
6801       memoizedSerializedSize = size;
6802       return size;
6803     }
6804 
6805     private static final long serialVersionUID = 0L;
6806     @java.lang.Override
writeReplace()6807     protected java.lang.Object writeReplace()
6808         throws java.io.ObjectStreamException {
6809       return super.writeReplace();
6810     }
6811 
6812     @java.lang.Override
equals(final java.lang.Object obj)6813     public boolean equals(final java.lang.Object obj) {
6814       if (obj == this) {
6815        return true;
6816       }
6817       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState)) {
6818         return super.equals(obj);
6819       }
6820       org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState) obj;
6821 
6822       boolean result = true;
6823       result = result && (hasState() == other.hasState());
6824       if (hasState()) {
6825         result = result &&
6826             (getState() == other.getState());
6827       }
6828       result = result &&
6829           getUnknownFields().equals(other.getUnknownFields());
6830       return result;
6831     }
6832 
6833     private int memoizedHashCode = 0;
6834     @java.lang.Override
hashCode()6835     public int hashCode() {
6836       if (memoizedHashCode != 0) {
6837         return memoizedHashCode;
6838       }
6839       int hash = 41;
6840       hash = (19 * hash) + getDescriptorForType().hashCode();
6841       if (hasState()) {
6842         hash = (37 * hash) + STATE_FIELD_NUMBER;
6843         hash = (53 * hash) + hashEnum(getState());
6844       }
6845       hash = (29 * hash) + getUnknownFields().hashCode();
6846       memoizedHashCode = hash;
6847       return hash;
6848     }
6849 
parseFrom( com.google.protobuf.ByteString data)6850     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom(
6851         com.google.protobuf.ByteString data)
6852         throws com.google.protobuf.InvalidProtocolBufferException {
6853       return PARSER.parseFrom(data);
6854     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6855     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom(
6856         com.google.protobuf.ByteString data,
6857         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6858         throws com.google.protobuf.InvalidProtocolBufferException {
6859       return PARSER.parseFrom(data, extensionRegistry);
6860     }
parseFrom(byte[] data)6861     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom(byte[] data)
6862         throws com.google.protobuf.InvalidProtocolBufferException {
6863       return PARSER.parseFrom(data);
6864     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6865     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom(
6866         byte[] data,
6867         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6868         throws com.google.protobuf.InvalidProtocolBufferException {
6869       return PARSER.parseFrom(data, extensionRegistry);
6870     }
parseFrom(java.io.InputStream input)6871     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom(java.io.InputStream input)
6872         throws java.io.IOException {
6873       return PARSER.parseFrom(input);
6874     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6875     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom(
6876         java.io.InputStream input,
6877         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6878         throws java.io.IOException {
6879       return PARSER.parseFrom(input, extensionRegistry);
6880     }
parseDelimitedFrom(java.io.InputStream input)6881     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseDelimitedFrom(java.io.InputStream input)
6882         throws java.io.IOException {
6883       return PARSER.parseDelimitedFrom(input);
6884     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6885     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseDelimitedFrom(
6886         java.io.InputStream input,
6887         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6888         throws java.io.IOException {
6889       return PARSER.parseDelimitedFrom(input, extensionRegistry);
6890     }
parseFrom( com.google.protobuf.CodedInputStream input)6891     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom(
6892         com.google.protobuf.CodedInputStream input)
6893         throws java.io.IOException {
6894       return PARSER.parseFrom(input);
6895     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)6896     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parseFrom(
6897         com.google.protobuf.CodedInputStream input,
6898         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
6899         throws java.io.IOException {
6900       return PARSER.parseFrom(input, extensionRegistry);
6901     }
6902 
newBuilder()6903     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()6904     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState prototype)6905     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState prototype) {
6906       return newBuilder().mergeFrom(prototype);
6907     }
toBuilder()6908     public Builder toBuilder() { return newBuilder(this); }
6909 
6910     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)6911     protected Builder newBuilderForType(
6912         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6913       Builder builder = new Builder(parent);
6914       return builder;
6915     }
6916     /**
6917      * Protobuf type {@code ReplicationState}
6918      *
6919      * <pre>
6920      **
6921      * Used by replication. Holds whether enabled or disabled
6922      * </pre>
6923      */
6924     public static final class Builder extends
6925         com.google.protobuf.GeneratedMessage.Builder<Builder>
6926        implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationStateOrBuilder {
6927       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()6928           getDescriptor() {
6929         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationState_descriptor;
6930       }
6931 
6932       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()6933           internalGetFieldAccessorTable() {
6934         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationState_fieldAccessorTable
6935             .ensureFieldAccessorsInitialized(
6936                 org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.Builder.class);
6937       }
6938 
6939       // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.newBuilder()
Builder()6940       private Builder() {
6941         maybeForceBuilderInitialization();
6942       }
6943 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)6944       private Builder(
6945           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
6946         super(parent);
6947         maybeForceBuilderInitialization();
6948       }
maybeForceBuilderInitialization()6949       private void maybeForceBuilderInitialization() {
6950         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
6951         }
6952       }
create()6953       private static Builder create() {
6954         return new Builder();
6955       }
6956 
clear()6957       public Builder clear() {
6958         super.clear();
6959         state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State.ENABLED;
6960         bitField0_ = (bitField0_ & ~0x00000001);
6961         return this;
6962       }
6963 
clone()6964       public Builder clone() {
6965         return create().mergeFrom(buildPartial());
6966       }
6967 
6968       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()6969           getDescriptorForType() {
6970         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationState_descriptor;
6971       }
6972 
getDefaultInstanceForType()6973       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState getDefaultInstanceForType() {
6974         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.getDefaultInstance();
6975       }
6976 
build()6977       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState build() {
6978         org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState result = buildPartial();
6979         if (!result.isInitialized()) {
6980           throw newUninitializedMessageException(result);
6981         }
6982         return result;
6983       }
6984 
buildPartial()6985       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState buildPartial() {
6986         org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState(this);
6987         int from_bitField0_ = bitField0_;
6988         int to_bitField0_ = 0;
6989         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
6990           to_bitField0_ |= 0x00000001;
6991         }
6992         result.state_ = state_;
6993         result.bitField0_ = to_bitField0_;
6994         onBuilt();
6995         return result;
6996       }
6997 
mergeFrom(com.google.protobuf.Message other)6998       public Builder mergeFrom(com.google.protobuf.Message other) {
6999         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState) {
7000           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState)other);
7001         } else {
7002           super.mergeFrom(other);
7003           return this;
7004         }
7005       }
7006 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState other)7007       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState other) {
7008         if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.getDefaultInstance()) return this;
7009         if (other.hasState()) {
7010           setState(other.getState());
7011         }
7012         this.mergeUnknownFields(other.getUnknownFields());
7013         return this;
7014       }
7015 
isInitialized()7016       public final boolean isInitialized() {
7017         if (!hasState()) {
7018 
7019           return false;
7020         }
7021         return true;
7022       }
7023 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7024       public Builder mergeFrom(
7025           com.google.protobuf.CodedInputStream input,
7026           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7027           throws java.io.IOException {
7028         org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState parsedMessage = null;
7029         try {
7030           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
7031         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7032           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState) e.getUnfinishedMessage();
7033           throw e;
7034         } finally {
7035           if (parsedMessage != null) {
7036             mergeFrom(parsedMessage);
7037           }
7038         }
7039         return this;
7040       }
7041       private int bitField0_;
7042 
7043       // required .ReplicationState.State state = 1;
7044       private org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State.ENABLED;
7045       /**
7046        * <code>required .ReplicationState.State state = 1;</code>
7047        */
hasState()7048       public boolean hasState() {
7049         return ((bitField0_ & 0x00000001) == 0x00000001);
7050       }
7051       /**
7052        * <code>required .ReplicationState.State state = 1;</code>
7053        */
getState()7054       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State getState() {
7055         return state_;
7056       }
7057       /**
7058        * <code>required .ReplicationState.State state = 1;</code>
7059        */
setState(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State value)7060       public Builder setState(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State value) {
7061         if (value == null) {
7062           throw new NullPointerException();
7063         }
7064         bitField0_ |= 0x00000001;
7065         state_ = value;
7066         onChanged();
7067         return this;
7068       }
7069       /**
7070        * <code>required .ReplicationState.State state = 1;</code>
7071        */
clearState()7072       public Builder clearState() {
7073         bitField0_ = (bitField0_ & ~0x00000001);
7074         state_ = org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationState.State.ENABLED;
7075         onChanged();
7076         return this;
7077       }
7078 
7079       // @@protoc_insertion_point(builder_scope:ReplicationState)
7080     }
7081 
7082     static {
7083       defaultInstance = new ReplicationState(true);
defaultInstance.initFields()7084       defaultInstance.initFields();
7085     }
7086 
7087     // @@protoc_insertion_point(class_scope:ReplicationState)
7088   }
7089 
7090   public interface ReplicationHLogPositionOrBuilder
7091       extends com.google.protobuf.MessageOrBuilder {
7092 
7093     // required int64 position = 1;
7094     /**
7095      * <code>required int64 position = 1;</code>
7096      */
hasPosition()7097     boolean hasPosition();
7098     /**
7099      * <code>required int64 position = 1;</code>
7100      */
getPosition()7101     long getPosition();
7102   }
7103   /**
7104    * Protobuf type {@code ReplicationHLogPosition}
7105    *
7106    * <pre>
7107    **
7108    * Used by replication. Holds the current position in an WAL file.
7109    * </pre>
7110    */
7111   public static final class ReplicationHLogPosition extends
7112       com.google.protobuf.GeneratedMessage
7113       implements ReplicationHLogPositionOrBuilder {
7114     // Use ReplicationHLogPosition.newBuilder() to construct.
ReplicationHLogPosition(com.google.protobuf.GeneratedMessage.Builder<?> builder)7115     private ReplicationHLogPosition(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
7116       super(builder);
7117       this.unknownFields = builder.getUnknownFields();
7118     }
ReplicationHLogPosition(boolean noInit)7119     private ReplicationHLogPosition(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
7120 
7121     private static final ReplicationHLogPosition defaultInstance;
getDefaultInstance()7122     public static ReplicationHLogPosition getDefaultInstance() {
7123       return defaultInstance;
7124     }
7125 
getDefaultInstanceForType()7126     public ReplicationHLogPosition getDefaultInstanceForType() {
7127       return defaultInstance;
7128     }
7129 
7130     private final com.google.protobuf.UnknownFieldSet unknownFields;
7131     @java.lang.Override
7132     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()7133         getUnknownFields() {
7134       return this.unknownFields;
7135     }
ReplicationHLogPosition( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7136     private ReplicationHLogPosition(
7137         com.google.protobuf.CodedInputStream input,
7138         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7139         throws com.google.protobuf.InvalidProtocolBufferException {
7140       initFields();
7141       int mutable_bitField0_ = 0;
7142       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
7143           com.google.protobuf.UnknownFieldSet.newBuilder();
7144       try {
7145         boolean done = false;
7146         while (!done) {
7147           int tag = input.readTag();
7148           switch (tag) {
7149             case 0:
7150               done = true;
7151               break;
7152             default: {
7153               if (!parseUnknownField(input, unknownFields,
7154                                      extensionRegistry, tag)) {
7155                 done = true;
7156               }
7157               break;
7158             }
7159             case 8: {
7160               bitField0_ |= 0x00000001;
7161               position_ = input.readInt64();
7162               break;
7163             }
7164           }
7165         }
7166       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7167         throw e.setUnfinishedMessage(this);
7168       } catch (java.io.IOException e) {
7169         throw new com.google.protobuf.InvalidProtocolBufferException(
7170             e.getMessage()).setUnfinishedMessage(this);
7171       } finally {
7172         this.unknownFields = unknownFields.build();
7173         makeExtensionsImmutable();
7174       }
7175     }
7176     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()7177         getDescriptor() {
7178       return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationHLogPosition_descriptor;
7179     }
7180 
7181     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()7182         internalGetFieldAccessorTable() {
7183       return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationHLogPosition_fieldAccessorTable
7184           .ensureFieldAccessorsInitialized(
7185               org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition.Builder.class);
7186     }
7187 
7188     public static com.google.protobuf.Parser<ReplicationHLogPosition> PARSER =
7189         new com.google.protobuf.AbstractParser<ReplicationHLogPosition>() {
7190       public ReplicationHLogPosition parsePartialFrom(
7191           com.google.protobuf.CodedInputStream input,
7192           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7193           throws com.google.protobuf.InvalidProtocolBufferException {
7194         return new ReplicationHLogPosition(input, extensionRegistry);
7195       }
7196     };
7197 
7198     @java.lang.Override
getParserForType()7199     public com.google.protobuf.Parser<ReplicationHLogPosition> getParserForType() {
7200       return PARSER;
7201     }
7202 
7203     private int bitField0_;
7204     // required int64 position = 1;
7205     public static final int POSITION_FIELD_NUMBER = 1;
7206     private long position_;
7207     /**
7208      * <code>required int64 position = 1;</code>
7209      */
hasPosition()7210     public boolean hasPosition() {
7211       return ((bitField0_ & 0x00000001) == 0x00000001);
7212     }
7213     /**
7214      * <code>required int64 position = 1;</code>
7215      */
getPosition()7216     public long getPosition() {
7217       return position_;
7218     }
7219 
initFields()7220     private void initFields() {
7221       position_ = 0L;
7222     }
7223     private byte memoizedIsInitialized = -1;
isInitialized()7224     public final boolean isInitialized() {
7225       byte isInitialized = memoizedIsInitialized;
7226       if (isInitialized != -1) return isInitialized == 1;
7227 
7228       if (!hasPosition()) {
7229         memoizedIsInitialized = 0;
7230         return false;
7231       }
7232       memoizedIsInitialized = 1;
7233       return true;
7234     }
7235 
writeTo(com.google.protobuf.CodedOutputStream output)7236     public void writeTo(com.google.protobuf.CodedOutputStream output)
7237                         throws java.io.IOException {
7238       getSerializedSize();
7239       if (((bitField0_ & 0x00000001) == 0x00000001)) {
7240         output.writeInt64(1, position_);
7241       }
7242       getUnknownFields().writeTo(output);
7243     }
7244 
7245     private int memoizedSerializedSize = -1;
getSerializedSize()7246     public int getSerializedSize() {
7247       int size = memoizedSerializedSize;
7248       if (size != -1) return size;
7249 
7250       size = 0;
7251       if (((bitField0_ & 0x00000001) == 0x00000001)) {
7252         size += com.google.protobuf.CodedOutputStream
7253           .computeInt64Size(1, position_);
7254       }
7255       size += getUnknownFields().getSerializedSize();
7256       memoizedSerializedSize = size;
7257       return size;
7258     }
7259 
7260     private static final long serialVersionUID = 0L;
7261     @java.lang.Override
writeReplace()7262     protected java.lang.Object writeReplace()
7263         throws java.io.ObjectStreamException {
7264       return super.writeReplace();
7265     }
7266 
7267     @java.lang.Override
equals(final java.lang.Object obj)7268     public boolean equals(final java.lang.Object obj) {
7269       if (obj == this) {
7270        return true;
7271       }
7272       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition)) {
7273         return super.equals(obj);
7274       }
7275       org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition) obj;
7276 
7277       boolean result = true;
7278       result = result && (hasPosition() == other.hasPosition());
7279       if (hasPosition()) {
7280         result = result && (getPosition()
7281             == other.getPosition());
7282       }
7283       result = result &&
7284           getUnknownFields().equals(other.getUnknownFields());
7285       return result;
7286     }
7287 
7288     private int memoizedHashCode = 0;
7289     @java.lang.Override
hashCode()7290     public int hashCode() {
7291       if (memoizedHashCode != 0) {
7292         return memoizedHashCode;
7293       }
7294       int hash = 41;
7295       hash = (19 * hash) + getDescriptorForType().hashCode();
7296       if (hasPosition()) {
7297         hash = (37 * hash) + POSITION_FIELD_NUMBER;
7298         hash = (53 * hash) + hashLong(getPosition());
7299       }
7300       hash = (29 * hash) + getUnknownFields().hashCode();
7301       memoizedHashCode = hash;
7302       return hash;
7303     }
7304 
parseFrom( com.google.protobuf.ByteString data)7305     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom(
7306         com.google.protobuf.ByteString data)
7307         throws com.google.protobuf.InvalidProtocolBufferException {
7308       return PARSER.parseFrom(data);
7309     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7310     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom(
7311         com.google.protobuf.ByteString data,
7312         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7313         throws com.google.protobuf.InvalidProtocolBufferException {
7314       return PARSER.parseFrom(data, extensionRegistry);
7315     }
parseFrom(byte[] data)7316     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom(byte[] data)
7317         throws com.google.protobuf.InvalidProtocolBufferException {
7318       return PARSER.parseFrom(data);
7319     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7320     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom(
7321         byte[] data,
7322         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7323         throws com.google.protobuf.InvalidProtocolBufferException {
7324       return PARSER.parseFrom(data, extensionRegistry);
7325     }
parseFrom(java.io.InputStream input)7326     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom(java.io.InputStream input)
7327         throws java.io.IOException {
7328       return PARSER.parseFrom(input);
7329     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7330     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom(
7331         java.io.InputStream input,
7332         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7333         throws java.io.IOException {
7334       return PARSER.parseFrom(input, extensionRegistry);
7335     }
parseDelimitedFrom(java.io.InputStream input)7336     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseDelimitedFrom(java.io.InputStream input)
7337         throws java.io.IOException {
7338       return PARSER.parseDelimitedFrom(input);
7339     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7340     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseDelimitedFrom(
7341         java.io.InputStream input,
7342         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7343         throws java.io.IOException {
7344       return PARSER.parseDelimitedFrom(input, extensionRegistry);
7345     }
parseFrom( com.google.protobuf.CodedInputStream input)7346     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom(
7347         com.google.protobuf.CodedInputStream input)
7348         throws java.io.IOException {
7349       return PARSER.parseFrom(input);
7350     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7351     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parseFrom(
7352         com.google.protobuf.CodedInputStream input,
7353         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7354         throws java.io.IOException {
7355       return PARSER.parseFrom(input, extensionRegistry);
7356     }
7357 
newBuilder()7358     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()7359     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition prototype)7360     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition prototype) {
7361       return newBuilder().mergeFrom(prototype);
7362     }
toBuilder()7363     public Builder toBuilder() { return newBuilder(this); }
7364 
7365     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)7366     protected Builder newBuilderForType(
7367         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7368       Builder builder = new Builder(parent);
7369       return builder;
7370     }
7371     /**
7372      * Protobuf type {@code ReplicationHLogPosition}
7373      *
7374      * <pre>
7375      **
7376      * Used by replication. Holds the current position in an WAL file.
7377      * </pre>
7378      */
7379     public static final class Builder extends
7380         com.google.protobuf.GeneratedMessage.Builder<Builder>
7381        implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPositionOrBuilder {
7382       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()7383           getDescriptor() {
7384         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationHLogPosition_descriptor;
7385       }
7386 
7387       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()7388           internalGetFieldAccessorTable() {
7389         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationHLogPosition_fieldAccessorTable
7390             .ensureFieldAccessorsInitialized(
7391                 org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition.Builder.class);
7392       }
7393 
7394       // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition.newBuilder()
Builder()7395       private Builder() {
7396         maybeForceBuilderInitialization();
7397       }
7398 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)7399       private Builder(
7400           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7401         super(parent);
7402         maybeForceBuilderInitialization();
7403       }
maybeForceBuilderInitialization()7404       private void maybeForceBuilderInitialization() {
7405         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
7406         }
7407       }
create()7408       private static Builder create() {
7409         return new Builder();
7410       }
7411 
clear()7412       public Builder clear() {
7413         super.clear();
7414         position_ = 0L;
7415         bitField0_ = (bitField0_ & ~0x00000001);
7416         return this;
7417       }
7418 
clone()7419       public Builder clone() {
7420         return create().mergeFrom(buildPartial());
7421       }
7422 
7423       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()7424           getDescriptorForType() {
7425         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationHLogPosition_descriptor;
7426       }
7427 
getDefaultInstanceForType()7428       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition getDefaultInstanceForType() {
7429         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition.getDefaultInstance();
7430       }
7431 
build()7432       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition build() {
7433         org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition result = buildPartial();
7434         if (!result.isInitialized()) {
7435           throw newUninitializedMessageException(result);
7436         }
7437         return result;
7438       }
7439 
buildPartial()7440       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition buildPartial() {
7441         org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition(this);
7442         int from_bitField0_ = bitField0_;
7443         int to_bitField0_ = 0;
7444         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
7445           to_bitField0_ |= 0x00000001;
7446         }
7447         result.position_ = position_;
7448         result.bitField0_ = to_bitField0_;
7449         onBuilt();
7450         return result;
7451       }
7452 
mergeFrom(com.google.protobuf.Message other)7453       public Builder mergeFrom(com.google.protobuf.Message other) {
7454         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition) {
7455           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition)other);
7456         } else {
7457           super.mergeFrom(other);
7458           return this;
7459         }
7460       }
7461 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition other)7462       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition other) {
7463         if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition.getDefaultInstance()) return this;
7464         if (other.hasPosition()) {
7465           setPosition(other.getPosition());
7466         }
7467         this.mergeUnknownFields(other.getUnknownFields());
7468         return this;
7469       }
7470 
isInitialized()7471       public final boolean isInitialized() {
7472         if (!hasPosition()) {
7473 
7474           return false;
7475         }
7476         return true;
7477       }
7478 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7479       public Builder mergeFrom(
7480           com.google.protobuf.CodedInputStream input,
7481           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7482           throws java.io.IOException {
7483         org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition parsedMessage = null;
7484         try {
7485           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
7486         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7487           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationHLogPosition) e.getUnfinishedMessage();
7488           throw e;
7489         } finally {
7490           if (parsedMessage != null) {
7491             mergeFrom(parsedMessage);
7492           }
7493         }
7494         return this;
7495       }
7496       private int bitField0_;
7497 
7498       // required int64 position = 1;
7499       private long position_ ;
7500       /**
7501        * <code>required int64 position = 1;</code>
7502        */
hasPosition()7503       public boolean hasPosition() {
7504         return ((bitField0_ & 0x00000001) == 0x00000001);
7505       }
7506       /**
7507        * <code>required int64 position = 1;</code>
7508        */
getPosition()7509       public long getPosition() {
7510         return position_;
7511       }
7512       /**
7513        * <code>required int64 position = 1;</code>
7514        */
setPosition(long value)7515       public Builder setPosition(long value) {
7516         bitField0_ |= 0x00000001;
7517         position_ = value;
7518         onChanged();
7519         return this;
7520       }
7521       /**
7522        * <code>required int64 position = 1;</code>
7523        */
clearPosition()7524       public Builder clearPosition() {
7525         bitField0_ = (bitField0_ & ~0x00000001);
7526         position_ = 0L;
7527         onChanged();
7528         return this;
7529       }
7530 
7531       // @@protoc_insertion_point(builder_scope:ReplicationHLogPosition)
7532     }
7533 
7534     static {
7535       defaultInstance = new ReplicationHLogPosition(true);
defaultInstance.initFields()7536       defaultInstance.initFields();
7537     }
7538 
7539     // @@protoc_insertion_point(class_scope:ReplicationHLogPosition)
7540   }
7541 
7542   public interface ReplicationLockOrBuilder
7543       extends com.google.protobuf.MessageOrBuilder {
7544 
7545     // required string lock_owner = 1;
7546     /**
7547      * <code>required string lock_owner = 1;</code>
7548      */
hasLockOwner()7549     boolean hasLockOwner();
7550     /**
7551      * <code>required string lock_owner = 1;</code>
7552      */
getLockOwner()7553     java.lang.String getLockOwner();
7554     /**
7555      * <code>required string lock_owner = 1;</code>
7556      */
7557     com.google.protobuf.ByteString
getLockOwnerBytes()7558         getLockOwnerBytes();
7559   }
7560   /**
7561    * Protobuf type {@code ReplicationLock}
7562    *
7563    * <pre>
7564    **
7565    * Used by replication. Used to lock a region server during failover.
7566    * </pre>
7567    */
7568   public static final class ReplicationLock extends
7569       com.google.protobuf.GeneratedMessage
7570       implements ReplicationLockOrBuilder {
7571     // Use ReplicationLock.newBuilder() to construct.
ReplicationLock(com.google.protobuf.GeneratedMessage.Builder<?> builder)7572     private ReplicationLock(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
7573       super(builder);
7574       this.unknownFields = builder.getUnknownFields();
7575     }
ReplicationLock(boolean noInit)7576     private ReplicationLock(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
7577 
7578     private static final ReplicationLock defaultInstance;
getDefaultInstance()7579     public static ReplicationLock getDefaultInstance() {
7580       return defaultInstance;
7581     }
7582 
getDefaultInstanceForType()7583     public ReplicationLock getDefaultInstanceForType() {
7584       return defaultInstance;
7585     }
7586 
7587     private final com.google.protobuf.UnknownFieldSet unknownFields;
7588     @java.lang.Override
7589     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()7590         getUnknownFields() {
7591       return this.unknownFields;
7592     }
ReplicationLock( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7593     private ReplicationLock(
7594         com.google.protobuf.CodedInputStream input,
7595         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7596         throws com.google.protobuf.InvalidProtocolBufferException {
7597       initFields();
7598       int mutable_bitField0_ = 0;
7599       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
7600           com.google.protobuf.UnknownFieldSet.newBuilder();
7601       try {
7602         boolean done = false;
7603         while (!done) {
7604           int tag = input.readTag();
7605           switch (tag) {
7606             case 0:
7607               done = true;
7608               break;
7609             default: {
7610               if (!parseUnknownField(input, unknownFields,
7611                                      extensionRegistry, tag)) {
7612                 done = true;
7613               }
7614               break;
7615             }
7616             case 10: {
7617               bitField0_ |= 0x00000001;
7618               lockOwner_ = input.readBytes();
7619               break;
7620             }
7621           }
7622         }
7623       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7624         throw e.setUnfinishedMessage(this);
7625       } catch (java.io.IOException e) {
7626         throw new com.google.protobuf.InvalidProtocolBufferException(
7627             e.getMessage()).setUnfinishedMessage(this);
7628       } finally {
7629         this.unknownFields = unknownFields.build();
7630         makeExtensionsImmutable();
7631       }
7632     }
7633     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()7634         getDescriptor() {
7635       return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationLock_descriptor;
7636     }
7637 
7638     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()7639         internalGetFieldAccessorTable() {
7640       return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationLock_fieldAccessorTable
7641           .ensureFieldAccessorsInitialized(
7642               org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock.Builder.class);
7643     }
7644 
7645     public static com.google.protobuf.Parser<ReplicationLock> PARSER =
7646         new com.google.protobuf.AbstractParser<ReplicationLock>() {
7647       public ReplicationLock parsePartialFrom(
7648           com.google.protobuf.CodedInputStream input,
7649           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7650           throws com.google.protobuf.InvalidProtocolBufferException {
7651         return new ReplicationLock(input, extensionRegistry);
7652       }
7653     };
7654 
7655     @java.lang.Override
getParserForType()7656     public com.google.protobuf.Parser<ReplicationLock> getParserForType() {
7657       return PARSER;
7658     }
7659 
7660     private int bitField0_;
7661     // required string lock_owner = 1;
7662     public static final int LOCK_OWNER_FIELD_NUMBER = 1;
7663     private java.lang.Object lockOwner_;
7664     /**
7665      * <code>required string lock_owner = 1;</code>
7666      */
hasLockOwner()7667     public boolean hasLockOwner() {
7668       return ((bitField0_ & 0x00000001) == 0x00000001);
7669     }
7670     /**
7671      * <code>required string lock_owner = 1;</code>
7672      */
getLockOwner()7673     public java.lang.String getLockOwner() {
7674       java.lang.Object ref = lockOwner_;
7675       if (ref instanceof java.lang.String) {
7676         return (java.lang.String) ref;
7677       } else {
7678         com.google.protobuf.ByteString bs =
7679             (com.google.protobuf.ByteString) ref;
7680         java.lang.String s = bs.toStringUtf8();
7681         if (bs.isValidUtf8()) {
7682           lockOwner_ = s;
7683         }
7684         return s;
7685       }
7686     }
7687     /**
7688      * <code>required string lock_owner = 1;</code>
7689      */
7690     public com.google.protobuf.ByteString
getLockOwnerBytes()7691         getLockOwnerBytes() {
7692       java.lang.Object ref = lockOwner_;
7693       if (ref instanceof java.lang.String) {
7694         com.google.protobuf.ByteString b =
7695             com.google.protobuf.ByteString.copyFromUtf8(
7696                 (java.lang.String) ref);
7697         lockOwner_ = b;
7698         return b;
7699       } else {
7700         return (com.google.protobuf.ByteString) ref;
7701       }
7702     }
7703 
initFields()7704     private void initFields() {
7705       lockOwner_ = "";
7706     }
7707     private byte memoizedIsInitialized = -1;
isInitialized()7708     public final boolean isInitialized() {
7709       byte isInitialized = memoizedIsInitialized;
7710       if (isInitialized != -1) return isInitialized == 1;
7711 
7712       if (!hasLockOwner()) {
7713         memoizedIsInitialized = 0;
7714         return false;
7715       }
7716       memoizedIsInitialized = 1;
7717       return true;
7718     }
7719 
writeTo(com.google.protobuf.CodedOutputStream output)7720     public void writeTo(com.google.protobuf.CodedOutputStream output)
7721                         throws java.io.IOException {
7722       getSerializedSize();
7723       if (((bitField0_ & 0x00000001) == 0x00000001)) {
7724         output.writeBytes(1, getLockOwnerBytes());
7725       }
7726       getUnknownFields().writeTo(output);
7727     }
7728 
7729     private int memoizedSerializedSize = -1;
getSerializedSize()7730     public int getSerializedSize() {
7731       int size = memoizedSerializedSize;
7732       if (size != -1) return size;
7733 
7734       size = 0;
7735       if (((bitField0_ & 0x00000001) == 0x00000001)) {
7736         size += com.google.protobuf.CodedOutputStream
7737           .computeBytesSize(1, getLockOwnerBytes());
7738       }
7739       size += getUnknownFields().getSerializedSize();
7740       memoizedSerializedSize = size;
7741       return size;
7742     }
7743 
7744     private static final long serialVersionUID = 0L;
7745     @java.lang.Override
writeReplace()7746     protected java.lang.Object writeReplace()
7747         throws java.io.ObjectStreamException {
7748       return super.writeReplace();
7749     }
7750 
7751     @java.lang.Override
equals(final java.lang.Object obj)7752     public boolean equals(final java.lang.Object obj) {
7753       if (obj == this) {
7754        return true;
7755       }
7756       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock)) {
7757         return super.equals(obj);
7758       }
7759       org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock) obj;
7760 
7761       boolean result = true;
7762       result = result && (hasLockOwner() == other.hasLockOwner());
7763       if (hasLockOwner()) {
7764         result = result && getLockOwner()
7765             .equals(other.getLockOwner());
7766       }
7767       result = result &&
7768           getUnknownFields().equals(other.getUnknownFields());
7769       return result;
7770     }
7771 
7772     private int memoizedHashCode = 0;
7773     @java.lang.Override
hashCode()7774     public int hashCode() {
7775       if (memoizedHashCode != 0) {
7776         return memoizedHashCode;
7777       }
7778       int hash = 41;
7779       hash = (19 * hash) + getDescriptorForType().hashCode();
7780       if (hasLockOwner()) {
7781         hash = (37 * hash) + LOCK_OWNER_FIELD_NUMBER;
7782         hash = (53 * hash) + getLockOwner().hashCode();
7783       }
7784       hash = (29 * hash) + getUnknownFields().hashCode();
7785       memoizedHashCode = hash;
7786       return hash;
7787     }
7788 
parseFrom( com.google.protobuf.ByteString data)7789     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseFrom(
7790         com.google.protobuf.ByteString data)
7791         throws com.google.protobuf.InvalidProtocolBufferException {
7792       return PARSER.parseFrom(data);
7793     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7794     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseFrom(
7795         com.google.protobuf.ByteString data,
7796         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7797         throws com.google.protobuf.InvalidProtocolBufferException {
7798       return PARSER.parseFrom(data, extensionRegistry);
7799     }
parseFrom(byte[] data)7800     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseFrom(byte[] data)
7801         throws com.google.protobuf.InvalidProtocolBufferException {
7802       return PARSER.parseFrom(data);
7803     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7804     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseFrom(
7805         byte[] data,
7806         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7807         throws com.google.protobuf.InvalidProtocolBufferException {
7808       return PARSER.parseFrom(data, extensionRegistry);
7809     }
parseFrom(java.io.InputStream input)7810     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseFrom(java.io.InputStream input)
7811         throws java.io.IOException {
7812       return PARSER.parseFrom(input);
7813     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7814     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseFrom(
7815         java.io.InputStream input,
7816         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7817         throws java.io.IOException {
7818       return PARSER.parseFrom(input, extensionRegistry);
7819     }
parseDelimitedFrom(java.io.InputStream input)7820     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseDelimitedFrom(java.io.InputStream input)
7821         throws java.io.IOException {
7822       return PARSER.parseDelimitedFrom(input);
7823     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7824     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseDelimitedFrom(
7825         java.io.InputStream input,
7826         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7827         throws java.io.IOException {
7828       return PARSER.parseDelimitedFrom(input, extensionRegistry);
7829     }
parseFrom( com.google.protobuf.CodedInputStream input)7830     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseFrom(
7831         com.google.protobuf.CodedInputStream input)
7832         throws java.io.IOException {
7833       return PARSER.parseFrom(input);
7834     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7835     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parseFrom(
7836         com.google.protobuf.CodedInputStream input,
7837         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7838         throws java.io.IOException {
7839       return PARSER.parseFrom(input, extensionRegistry);
7840     }
7841 
newBuilder()7842     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()7843     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock prototype)7844     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock prototype) {
7845       return newBuilder().mergeFrom(prototype);
7846     }
toBuilder()7847     public Builder toBuilder() { return newBuilder(this); }
7848 
7849     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)7850     protected Builder newBuilderForType(
7851         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7852       Builder builder = new Builder(parent);
7853       return builder;
7854     }
7855     /**
7856      * Protobuf type {@code ReplicationLock}
7857      *
7858      * <pre>
7859      **
7860      * Used by replication. Used to lock a region server during failover.
7861      * </pre>
7862      */
7863     public static final class Builder extends
7864         com.google.protobuf.GeneratedMessage.Builder<Builder>
7865        implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLockOrBuilder {
7866       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()7867           getDescriptor() {
7868         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationLock_descriptor;
7869       }
7870 
7871       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()7872           internalGetFieldAccessorTable() {
7873         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationLock_fieldAccessorTable
7874             .ensureFieldAccessorsInitialized(
7875                 org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock.Builder.class);
7876       }
7877 
7878       // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock.newBuilder()
Builder()7879       private Builder() {
7880         maybeForceBuilderInitialization();
7881       }
7882 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)7883       private Builder(
7884           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
7885         super(parent);
7886         maybeForceBuilderInitialization();
7887       }
maybeForceBuilderInitialization()7888       private void maybeForceBuilderInitialization() {
7889         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
7890         }
7891       }
create()7892       private static Builder create() {
7893         return new Builder();
7894       }
7895 
clear()7896       public Builder clear() {
7897         super.clear();
7898         lockOwner_ = "";
7899         bitField0_ = (bitField0_ & ~0x00000001);
7900         return this;
7901       }
7902 
clone()7903       public Builder clone() {
7904         return create().mergeFrom(buildPartial());
7905       }
7906 
7907       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()7908           getDescriptorForType() {
7909         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_ReplicationLock_descriptor;
7910       }
7911 
getDefaultInstanceForType()7912       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock getDefaultInstanceForType() {
7913         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock.getDefaultInstance();
7914       }
7915 
build()7916       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock build() {
7917         org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock result = buildPartial();
7918         if (!result.isInitialized()) {
7919           throw newUninitializedMessageException(result);
7920         }
7921         return result;
7922       }
7923 
buildPartial()7924       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock buildPartial() {
7925         org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock(this);
7926         int from_bitField0_ = bitField0_;
7927         int to_bitField0_ = 0;
7928         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
7929           to_bitField0_ |= 0x00000001;
7930         }
7931         result.lockOwner_ = lockOwner_;
7932         result.bitField0_ = to_bitField0_;
7933         onBuilt();
7934         return result;
7935       }
7936 
mergeFrom(com.google.protobuf.Message other)7937       public Builder mergeFrom(com.google.protobuf.Message other) {
7938         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock) {
7939           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock)other);
7940         } else {
7941           super.mergeFrom(other);
7942           return this;
7943         }
7944       }
7945 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock other)7946       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock other) {
7947         if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock.getDefaultInstance()) return this;
7948         if (other.hasLockOwner()) {
7949           bitField0_ |= 0x00000001;
7950           lockOwner_ = other.lockOwner_;
7951           onChanged();
7952         }
7953         this.mergeUnknownFields(other.getUnknownFields());
7954         return this;
7955       }
7956 
isInitialized()7957       public final boolean isInitialized() {
7958         if (!hasLockOwner()) {
7959 
7960           return false;
7961         }
7962         return true;
7963       }
7964 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)7965       public Builder mergeFrom(
7966           com.google.protobuf.CodedInputStream input,
7967           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
7968           throws java.io.IOException {
7969         org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock parsedMessage = null;
7970         try {
7971           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
7972         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
7973           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.ReplicationLock) e.getUnfinishedMessage();
7974           throw e;
7975         } finally {
7976           if (parsedMessage != null) {
7977             mergeFrom(parsedMessage);
7978           }
7979         }
7980         return this;
7981       }
7982       private int bitField0_;
7983 
7984       // required string lock_owner = 1;
7985       private java.lang.Object lockOwner_ = "";
7986       /**
7987        * <code>required string lock_owner = 1;</code>
7988        */
hasLockOwner()7989       public boolean hasLockOwner() {
7990         return ((bitField0_ & 0x00000001) == 0x00000001);
7991       }
7992       /**
7993        * <code>required string lock_owner = 1;</code>
7994        */
getLockOwner()7995       public java.lang.String getLockOwner() {
7996         java.lang.Object ref = lockOwner_;
7997         if (!(ref instanceof java.lang.String)) {
7998           java.lang.String s = ((com.google.protobuf.ByteString) ref)
7999               .toStringUtf8();
8000           lockOwner_ = s;
8001           return s;
8002         } else {
8003           return (java.lang.String) ref;
8004         }
8005       }
8006       /**
8007        * <code>required string lock_owner = 1;</code>
8008        */
8009       public com.google.protobuf.ByteString
getLockOwnerBytes()8010           getLockOwnerBytes() {
8011         java.lang.Object ref = lockOwner_;
8012         if (ref instanceof String) {
8013           com.google.protobuf.ByteString b =
8014               com.google.protobuf.ByteString.copyFromUtf8(
8015                   (java.lang.String) ref);
8016           lockOwner_ = b;
8017           return b;
8018         } else {
8019           return (com.google.protobuf.ByteString) ref;
8020         }
8021       }
8022       /**
8023        * <code>required string lock_owner = 1;</code>
8024        */
setLockOwner( java.lang.String value)8025       public Builder setLockOwner(
8026           java.lang.String value) {
8027         if (value == null) {
8028     throw new NullPointerException();
8029   }
8030   bitField0_ |= 0x00000001;
8031         lockOwner_ = value;
8032         onChanged();
8033         return this;
8034       }
8035       /**
8036        * <code>required string lock_owner = 1;</code>
8037        */
clearLockOwner()8038       public Builder clearLockOwner() {
8039         bitField0_ = (bitField0_ & ~0x00000001);
8040         lockOwner_ = getDefaultInstance().getLockOwner();
8041         onChanged();
8042         return this;
8043       }
8044       /**
8045        * <code>required string lock_owner = 1;</code>
8046        */
setLockOwnerBytes( com.google.protobuf.ByteString value)8047       public Builder setLockOwnerBytes(
8048           com.google.protobuf.ByteString value) {
8049         if (value == null) {
8050     throw new NullPointerException();
8051   }
8052   bitField0_ |= 0x00000001;
8053         lockOwner_ = value;
8054         onChanged();
8055         return this;
8056       }
8057 
8058       // @@protoc_insertion_point(builder_scope:ReplicationLock)
8059     }
8060 
8061     static {
8062       defaultInstance = new ReplicationLock(true);
defaultInstance.initFields()8063       defaultInstance.initFields();
8064     }
8065 
8066     // @@protoc_insertion_point(class_scope:ReplicationLock)
8067   }
8068 
8069   public interface TableLockOrBuilder
8070       extends com.google.protobuf.MessageOrBuilder {
8071 
8072     // optional .TableName table_name = 1;
8073     /**
8074      * <code>optional .TableName table_name = 1;</code>
8075      */
hasTableName()8076     boolean hasTableName();
8077     /**
8078      * <code>optional .TableName table_name = 1;</code>
8079      */
getTableName()8080     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName();
8081     /**
8082      * <code>optional .TableName table_name = 1;</code>
8083      */
getTableNameOrBuilder()8084     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder();
8085 
8086     // optional .ServerName lock_owner = 2;
8087     /**
8088      * <code>optional .ServerName lock_owner = 2;</code>
8089      */
hasLockOwner()8090     boolean hasLockOwner();
8091     /**
8092      * <code>optional .ServerName lock_owner = 2;</code>
8093      */
getLockOwner()8094     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getLockOwner();
8095     /**
8096      * <code>optional .ServerName lock_owner = 2;</code>
8097      */
getLockOwnerOrBuilder()8098     org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getLockOwnerOrBuilder();
8099 
8100     // optional int64 thread_id = 3;
8101     /**
8102      * <code>optional int64 thread_id = 3;</code>
8103      */
hasThreadId()8104     boolean hasThreadId();
8105     /**
8106      * <code>optional int64 thread_id = 3;</code>
8107      */
getThreadId()8108     long getThreadId();
8109 
8110     // optional bool is_shared = 4;
8111     /**
8112      * <code>optional bool is_shared = 4;</code>
8113      */
hasIsShared()8114     boolean hasIsShared();
8115     /**
8116      * <code>optional bool is_shared = 4;</code>
8117      */
getIsShared()8118     boolean getIsShared();
8119 
8120     // optional string purpose = 5;
8121     /**
8122      * <code>optional string purpose = 5;</code>
8123      */
hasPurpose()8124     boolean hasPurpose();
8125     /**
8126      * <code>optional string purpose = 5;</code>
8127      */
getPurpose()8128     java.lang.String getPurpose();
8129     /**
8130      * <code>optional string purpose = 5;</code>
8131      */
8132     com.google.protobuf.ByteString
getPurposeBytes()8133         getPurposeBytes();
8134 
8135     // optional int64 create_time = 6;
8136     /**
8137      * <code>optional int64 create_time = 6;</code>
8138      */
hasCreateTime()8139     boolean hasCreateTime();
8140     /**
8141      * <code>optional int64 create_time = 6;</code>
8142      */
getCreateTime()8143     long getCreateTime();
8144   }
8145   /**
8146    * Protobuf type {@code TableLock}
8147    *
8148    * <pre>
8149    **
8150    * Metadata associated with a table lock in zookeeper
8151    * </pre>
8152    */
8153   public static final class TableLock extends
8154       com.google.protobuf.GeneratedMessage
8155       implements TableLockOrBuilder {
8156     // Use TableLock.newBuilder() to construct.
TableLock(com.google.protobuf.GeneratedMessage.Builder<?> builder)8157     private TableLock(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
8158       super(builder);
8159       this.unknownFields = builder.getUnknownFields();
8160     }
TableLock(boolean noInit)8161     private TableLock(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
8162 
8163     private static final TableLock defaultInstance;
getDefaultInstance()8164     public static TableLock getDefaultInstance() {
8165       return defaultInstance;
8166     }
8167 
getDefaultInstanceForType()8168     public TableLock getDefaultInstanceForType() {
8169       return defaultInstance;
8170     }
8171 
8172     private final com.google.protobuf.UnknownFieldSet unknownFields;
8173     @java.lang.Override
8174     public final com.google.protobuf.UnknownFieldSet
getUnknownFields()8175         getUnknownFields() {
8176       return this.unknownFields;
8177     }
TableLock( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8178     private TableLock(
8179         com.google.protobuf.CodedInputStream input,
8180         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8181         throws com.google.protobuf.InvalidProtocolBufferException {
8182       initFields();
8183       int mutable_bitField0_ = 0;
8184       com.google.protobuf.UnknownFieldSet.Builder unknownFields =
8185           com.google.protobuf.UnknownFieldSet.newBuilder();
8186       try {
8187         boolean done = false;
8188         while (!done) {
8189           int tag = input.readTag();
8190           switch (tag) {
8191             case 0:
8192               done = true;
8193               break;
8194             default: {
8195               if (!parseUnknownField(input, unknownFields,
8196                                      extensionRegistry, tag)) {
8197                 done = true;
8198               }
8199               break;
8200             }
8201             case 10: {
8202               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder subBuilder = null;
8203               if (((bitField0_ & 0x00000001) == 0x00000001)) {
8204                 subBuilder = tableName_.toBuilder();
8205               }
8206               tableName_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.PARSER, extensionRegistry);
8207               if (subBuilder != null) {
8208                 subBuilder.mergeFrom(tableName_);
8209                 tableName_ = subBuilder.buildPartial();
8210               }
8211               bitField0_ |= 0x00000001;
8212               break;
8213             }
8214             case 18: {
8215               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder subBuilder = null;
8216               if (((bitField0_ & 0x00000002) == 0x00000002)) {
8217                 subBuilder = lockOwner_.toBuilder();
8218               }
8219               lockOwner_ = input.readMessage(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.PARSER, extensionRegistry);
8220               if (subBuilder != null) {
8221                 subBuilder.mergeFrom(lockOwner_);
8222                 lockOwner_ = subBuilder.buildPartial();
8223               }
8224               bitField0_ |= 0x00000002;
8225               break;
8226             }
8227             case 24: {
8228               bitField0_ |= 0x00000004;
8229               threadId_ = input.readInt64();
8230               break;
8231             }
8232             case 32: {
8233               bitField0_ |= 0x00000008;
8234               isShared_ = input.readBool();
8235               break;
8236             }
8237             case 42: {
8238               bitField0_ |= 0x00000010;
8239               purpose_ = input.readBytes();
8240               break;
8241             }
8242             case 48: {
8243               bitField0_ |= 0x00000020;
8244               createTime_ = input.readInt64();
8245               break;
8246             }
8247           }
8248         }
8249       } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8250         throw e.setUnfinishedMessage(this);
8251       } catch (java.io.IOException e) {
8252         throw new com.google.protobuf.InvalidProtocolBufferException(
8253             e.getMessage()).setUnfinishedMessage(this);
8254       } finally {
8255         this.unknownFields = unknownFields.build();
8256         makeExtensionsImmutable();
8257       }
8258     }
8259     public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()8260         getDescriptor() {
8261       return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_TableLock_descriptor;
8262     }
8263 
8264     protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()8265         internalGetFieldAccessorTable() {
8266       return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_TableLock_fieldAccessorTable
8267           .ensureFieldAccessorsInitialized(
8268               org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock.Builder.class);
8269     }
8270 
8271     public static com.google.protobuf.Parser<TableLock> PARSER =
8272         new com.google.protobuf.AbstractParser<TableLock>() {
8273       public TableLock parsePartialFrom(
8274           com.google.protobuf.CodedInputStream input,
8275           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8276           throws com.google.protobuf.InvalidProtocolBufferException {
8277         return new TableLock(input, extensionRegistry);
8278       }
8279     };
8280 
8281     @java.lang.Override
getParserForType()8282     public com.google.protobuf.Parser<TableLock> getParserForType() {
8283       return PARSER;
8284     }
8285 
8286     private int bitField0_;
8287     // optional .TableName table_name = 1;
8288     public static final int TABLE_NAME_FIELD_NUMBER = 1;
8289     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_;
8290     /**
8291      * <code>optional .TableName table_name = 1;</code>
8292      */
hasTableName()8293     public boolean hasTableName() {
8294       return ((bitField0_ & 0x00000001) == 0x00000001);
8295     }
8296     /**
8297      * <code>optional .TableName table_name = 1;</code>
8298      */
getTableName()8299     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
8300       return tableName_;
8301     }
8302     /**
8303      * <code>optional .TableName table_name = 1;</code>
8304      */
getTableNameOrBuilder()8305     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
8306       return tableName_;
8307     }
8308 
8309     // optional .ServerName lock_owner = 2;
8310     public static final int LOCK_OWNER_FIELD_NUMBER = 2;
8311     private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName lockOwner_;
8312     /**
8313      * <code>optional .ServerName lock_owner = 2;</code>
8314      */
hasLockOwner()8315     public boolean hasLockOwner() {
8316       return ((bitField0_ & 0x00000002) == 0x00000002);
8317     }
8318     /**
8319      * <code>optional .ServerName lock_owner = 2;</code>
8320      */
getLockOwner()8321     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getLockOwner() {
8322       return lockOwner_;
8323     }
8324     /**
8325      * <code>optional .ServerName lock_owner = 2;</code>
8326      */
getLockOwnerOrBuilder()8327     public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getLockOwnerOrBuilder() {
8328       return lockOwner_;
8329     }
8330 
8331     // optional int64 thread_id = 3;
8332     public static final int THREAD_ID_FIELD_NUMBER = 3;
8333     private long threadId_;
8334     /**
8335      * <code>optional int64 thread_id = 3;</code>
8336      */
hasThreadId()8337     public boolean hasThreadId() {
8338       return ((bitField0_ & 0x00000004) == 0x00000004);
8339     }
8340     /**
8341      * <code>optional int64 thread_id = 3;</code>
8342      */
getThreadId()8343     public long getThreadId() {
8344       return threadId_;
8345     }
8346 
8347     // optional bool is_shared = 4;
8348     public static final int IS_SHARED_FIELD_NUMBER = 4;
8349     private boolean isShared_;
8350     /**
8351      * <code>optional bool is_shared = 4;</code>
8352      */
hasIsShared()8353     public boolean hasIsShared() {
8354       return ((bitField0_ & 0x00000008) == 0x00000008);
8355     }
8356     /**
8357      * <code>optional bool is_shared = 4;</code>
8358      */
getIsShared()8359     public boolean getIsShared() {
8360       return isShared_;
8361     }
8362 
8363     // optional string purpose = 5;
8364     public static final int PURPOSE_FIELD_NUMBER = 5;
8365     private java.lang.Object purpose_;
8366     /**
8367      * <code>optional string purpose = 5;</code>
8368      */
hasPurpose()8369     public boolean hasPurpose() {
8370       return ((bitField0_ & 0x00000010) == 0x00000010);
8371     }
8372     /**
8373      * <code>optional string purpose = 5;</code>
8374      */
getPurpose()8375     public java.lang.String getPurpose() {
8376       java.lang.Object ref = purpose_;
8377       if (ref instanceof java.lang.String) {
8378         return (java.lang.String) ref;
8379       } else {
8380         com.google.protobuf.ByteString bs =
8381             (com.google.protobuf.ByteString) ref;
8382         java.lang.String s = bs.toStringUtf8();
8383         if (bs.isValidUtf8()) {
8384           purpose_ = s;
8385         }
8386         return s;
8387       }
8388     }
8389     /**
8390      * <code>optional string purpose = 5;</code>
8391      */
8392     public com.google.protobuf.ByteString
getPurposeBytes()8393         getPurposeBytes() {
8394       java.lang.Object ref = purpose_;
8395       if (ref instanceof java.lang.String) {
8396         com.google.protobuf.ByteString b =
8397             com.google.protobuf.ByteString.copyFromUtf8(
8398                 (java.lang.String) ref);
8399         purpose_ = b;
8400         return b;
8401       } else {
8402         return (com.google.protobuf.ByteString) ref;
8403       }
8404     }
8405 
8406     // optional int64 create_time = 6;
8407     public static final int CREATE_TIME_FIELD_NUMBER = 6;
8408     private long createTime_;
8409     /**
8410      * <code>optional int64 create_time = 6;</code>
8411      */
hasCreateTime()8412     public boolean hasCreateTime() {
8413       return ((bitField0_ & 0x00000020) == 0x00000020);
8414     }
8415     /**
8416      * <code>optional int64 create_time = 6;</code>
8417      */
getCreateTime()8418     public long getCreateTime() {
8419       return createTime_;
8420     }
8421 
initFields()8422     private void initFields() {
8423       tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
8424       lockOwner_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
8425       threadId_ = 0L;
8426       isShared_ = false;
8427       purpose_ = "";
8428       createTime_ = 0L;
8429     }
8430     private byte memoizedIsInitialized = -1;
isInitialized()8431     public final boolean isInitialized() {
8432       byte isInitialized = memoizedIsInitialized;
8433       if (isInitialized != -1) return isInitialized == 1;
8434 
8435       if (hasTableName()) {
8436         if (!getTableName().isInitialized()) {
8437           memoizedIsInitialized = 0;
8438           return false;
8439         }
8440       }
8441       if (hasLockOwner()) {
8442         if (!getLockOwner().isInitialized()) {
8443           memoizedIsInitialized = 0;
8444           return false;
8445         }
8446       }
8447       memoizedIsInitialized = 1;
8448       return true;
8449     }
8450 
writeTo(com.google.protobuf.CodedOutputStream output)8451     public void writeTo(com.google.protobuf.CodedOutputStream output)
8452                         throws java.io.IOException {
8453       getSerializedSize();
8454       if (((bitField0_ & 0x00000001) == 0x00000001)) {
8455         output.writeMessage(1, tableName_);
8456       }
8457       if (((bitField0_ & 0x00000002) == 0x00000002)) {
8458         output.writeMessage(2, lockOwner_);
8459       }
8460       if (((bitField0_ & 0x00000004) == 0x00000004)) {
8461         output.writeInt64(3, threadId_);
8462       }
8463       if (((bitField0_ & 0x00000008) == 0x00000008)) {
8464         output.writeBool(4, isShared_);
8465       }
8466       if (((bitField0_ & 0x00000010) == 0x00000010)) {
8467         output.writeBytes(5, getPurposeBytes());
8468       }
8469       if (((bitField0_ & 0x00000020) == 0x00000020)) {
8470         output.writeInt64(6, createTime_);
8471       }
8472       getUnknownFields().writeTo(output);
8473     }
8474 
8475     private int memoizedSerializedSize = -1;
getSerializedSize()8476     public int getSerializedSize() {
8477       int size = memoizedSerializedSize;
8478       if (size != -1) return size;
8479 
8480       size = 0;
8481       if (((bitField0_ & 0x00000001) == 0x00000001)) {
8482         size += com.google.protobuf.CodedOutputStream
8483           .computeMessageSize(1, tableName_);
8484       }
8485       if (((bitField0_ & 0x00000002) == 0x00000002)) {
8486         size += com.google.protobuf.CodedOutputStream
8487           .computeMessageSize(2, lockOwner_);
8488       }
8489       if (((bitField0_ & 0x00000004) == 0x00000004)) {
8490         size += com.google.protobuf.CodedOutputStream
8491           .computeInt64Size(3, threadId_);
8492       }
8493       if (((bitField0_ & 0x00000008) == 0x00000008)) {
8494         size += com.google.protobuf.CodedOutputStream
8495           .computeBoolSize(4, isShared_);
8496       }
8497       if (((bitField0_ & 0x00000010) == 0x00000010)) {
8498         size += com.google.protobuf.CodedOutputStream
8499           .computeBytesSize(5, getPurposeBytes());
8500       }
8501       if (((bitField0_ & 0x00000020) == 0x00000020)) {
8502         size += com.google.protobuf.CodedOutputStream
8503           .computeInt64Size(6, createTime_);
8504       }
8505       size += getUnknownFields().getSerializedSize();
8506       memoizedSerializedSize = size;
8507       return size;
8508     }
8509 
8510     private static final long serialVersionUID = 0L;
8511     @java.lang.Override
writeReplace()8512     protected java.lang.Object writeReplace()
8513         throws java.io.ObjectStreamException {
8514       return super.writeReplace();
8515     }
8516 
8517     @java.lang.Override
equals(final java.lang.Object obj)8518     public boolean equals(final java.lang.Object obj) {
8519       if (obj == this) {
8520        return true;
8521       }
8522       if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock)) {
8523         return super.equals(obj);
8524       }
8525       org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock other = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock) obj;
8526 
8527       boolean result = true;
8528       result = result && (hasTableName() == other.hasTableName());
8529       if (hasTableName()) {
8530         result = result && getTableName()
8531             .equals(other.getTableName());
8532       }
8533       result = result && (hasLockOwner() == other.hasLockOwner());
8534       if (hasLockOwner()) {
8535         result = result && getLockOwner()
8536             .equals(other.getLockOwner());
8537       }
8538       result = result && (hasThreadId() == other.hasThreadId());
8539       if (hasThreadId()) {
8540         result = result && (getThreadId()
8541             == other.getThreadId());
8542       }
8543       result = result && (hasIsShared() == other.hasIsShared());
8544       if (hasIsShared()) {
8545         result = result && (getIsShared()
8546             == other.getIsShared());
8547       }
8548       result = result && (hasPurpose() == other.hasPurpose());
8549       if (hasPurpose()) {
8550         result = result && getPurpose()
8551             .equals(other.getPurpose());
8552       }
8553       result = result && (hasCreateTime() == other.hasCreateTime());
8554       if (hasCreateTime()) {
8555         result = result && (getCreateTime()
8556             == other.getCreateTime());
8557       }
8558       result = result &&
8559           getUnknownFields().equals(other.getUnknownFields());
8560       return result;
8561     }
8562 
8563     private int memoizedHashCode = 0;
8564     @java.lang.Override
hashCode()8565     public int hashCode() {
8566       if (memoizedHashCode != 0) {
8567         return memoizedHashCode;
8568       }
8569       int hash = 41;
8570       hash = (19 * hash) + getDescriptorForType().hashCode();
8571       if (hasTableName()) {
8572         hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
8573         hash = (53 * hash) + getTableName().hashCode();
8574       }
8575       if (hasLockOwner()) {
8576         hash = (37 * hash) + LOCK_OWNER_FIELD_NUMBER;
8577         hash = (53 * hash) + getLockOwner().hashCode();
8578       }
8579       if (hasThreadId()) {
8580         hash = (37 * hash) + THREAD_ID_FIELD_NUMBER;
8581         hash = (53 * hash) + hashLong(getThreadId());
8582       }
8583       if (hasIsShared()) {
8584         hash = (37 * hash) + IS_SHARED_FIELD_NUMBER;
8585         hash = (53 * hash) + hashBoolean(getIsShared());
8586       }
8587       if (hasPurpose()) {
8588         hash = (37 * hash) + PURPOSE_FIELD_NUMBER;
8589         hash = (53 * hash) + getPurpose().hashCode();
8590       }
8591       if (hasCreateTime()) {
8592         hash = (37 * hash) + CREATE_TIME_FIELD_NUMBER;
8593         hash = (53 * hash) + hashLong(getCreateTime());
8594       }
8595       hash = (29 * hash) + getUnknownFields().hashCode();
8596       memoizedHashCode = hash;
8597       return hash;
8598     }
8599 
parseFrom( com.google.protobuf.ByteString data)8600     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock parseFrom(
8601         com.google.protobuf.ByteString data)
8602         throws com.google.protobuf.InvalidProtocolBufferException {
8603       return PARSER.parseFrom(data);
8604     }
parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8605     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock parseFrom(
8606         com.google.protobuf.ByteString data,
8607         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8608         throws com.google.protobuf.InvalidProtocolBufferException {
8609       return PARSER.parseFrom(data, extensionRegistry);
8610     }
parseFrom(byte[] data)8611     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock parseFrom(byte[] data)
8612         throws com.google.protobuf.InvalidProtocolBufferException {
8613       return PARSER.parseFrom(data);
8614     }
parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8615     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock parseFrom(
8616         byte[] data,
8617         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8618         throws com.google.protobuf.InvalidProtocolBufferException {
8619       return PARSER.parseFrom(data, extensionRegistry);
8620     }
parseFrom(java.io.InputStream input)8621     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock parseFrom(java.io.InputStream input)
8622         throws java.io.IOException {
8623       return PARSER.parseFrom(input);
8624     }
parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8625     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock parseFrom(
8626         java.io.InputStream input,
8627         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8628         throws java.io.IOException {
8629       return PARSER.parseFrom(input, extensionRegistry);
8630     }
parseDelimitedFrom(java.io.InputStream input)8631     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock parseDelimitedFrom(java.io.InputStream input)
8632         throws java.io.IOException {
8633       return PARSER.parseDelimitedFrom(input);
8634     }
parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8635     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock parseDelimitedFrom(
8636         java.io.InputStream input,
8637         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8638         throws java.io.IOException {
8639       return PARSER.parseDelimitedFrom(input, extensionRegistry);
8640     }
parseFrom( com.google.protobuf.CodedInputStream input)8641     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock parseFrom(
8642         com.google.protobuf.CodedInputStream input)
8643         throws java.io.IOException {
8644       return PARSER.parseFrom(input);
8645     }
parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8646     public static org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock parseFrom(
8647         com.google.protobuf.CodedInputStream input,
8648         com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8649         throws java.io.IOException {
8650       return PARSER.parseFrom(input, extensionRegistry);
8651     }
8652 
newBuilder()8653     public static Builder newBuilder() { return Builder.create(); }
newBuilderForType()8654     public Builder newBuilderForType() { return newBuilder(); }
newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock prototype)8655     public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock prototype) {
8656       return newBuilder().mergeFrom(prototype);
8657     }
toBuilder()8658     public Builder toBuilder() { return newBuilder(this); }
8659 
8660     @java.lang.Override
newBuilderForType( com.google.protobuf.GeneratedMessage.BuilderParent parent)8661     protected Builder newBuilderForType(
8662         com.google.protobuf.GeneratedMessage.BuilderParent parent) {
8663       Builder builder = new Builder(parent);
8664       return builder;
8665     }
8666     /**
8667      * Protobuf type {@code TableLock}
8668      *
8669      * <pre>
8670      **
8671      * Metadata associated with a table lock in zookeeper
8672      * </pre>
8673      */
8674     public static final class Builder extends
8675         com.google.protobuf.GeneratedMessage.Builder<Builder>
8676        implements org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLockOrBuilder {
8677       public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor()8678           getDescriptor() {
8679         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_TableLock_descriptor;
8680       }
8681 
8682       protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable()8683           internalGetFieldAccessorTable() {
8684         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_TableLock_fieldAccessorTable
8685             .ensureFieldAccessorsInitialized(
8686                 org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock.class, org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock.Builder.class);
8687       }
8688 
8689       // Construct using org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock.newBuilder()
Builder()8690       private Builder() {
8691         maybeForceBuilderInitialization();
8692       }
8693 
Builder( com.google.protobuf.GeneratedMessage.BuilderParent parent)8694       private Builder(
8695           com.google.protobuf.GeneratedMessage.BuilderParent parent) {
8696         super(parent);
8697         maybeForceBuilderInitialization();
8698       }
maybeForceBuilderInitialization()8699       private void maybeForceBuilderInitialization() {
8700         if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
8701           getTableNameFieldBuilder();
8702           getLockOwnerFieldBuilder();
8703         }
8704       }
create()8705       private static Builder create() {
8706         return new Builder();
8707       }
8708 
clear()8709       public Builder clear() {
8710         super.clear();
8711         if (tableNameBuilder_ == null) {
8712           tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
8713         } else {
8714           tableNameBuilder_.clear();
8715         }
8716         bitField0_ = (bitField0_ & ~0x00000001);
8717         if (lockOwnerBuilder_ == null) {
8718           lockOwner_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
8719         } else {
8720           lockOwnerBuilder_.clear();
8721         }
8722         bitField0_ = (bitField0_ & ~0x00000002);
8723         threadId_ = 0L;
8724         bitField0_ = (bitField0_ & ~0x00000004);
8725         isShared_ = false;
8726         bitField0_ = (bitField0_ & ~0x00000008);
8727         purpose_ = "";
8728         bitField0_ = (bitField0_ & ~0x00000010);
8729         createTime_ = 0L;
8730         bitField0_ = (bitField0_ & ~0x00000020);
8731         return this;
8732       }
8733 
clone()8734       public Builder clone() {
8735         return create().mergeFrom(buildPartial());
8736       }
8737 
8738       public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType()8739           getDescriptorForType() {
8740         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.internal_static_TableLock_descriptor;
8741       }
8742 
getDefaultInstanceForType()8743       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock getDefaultInstanceForType() {
8744         return org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock.getDefaultInstance();
8745       }
8746 
build()8747       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock build() {
8748         org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock result = buildPartial();
8749         if (!result.isInitialized()) {
8750           throw newUninitializedMessageException(result);
8751         }
8752         return result;
8753       }
8754 
buildPartial()8755       public org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock buildPartial() {
8756         org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock result = new org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock(this);
8757         int from_bitField0_ = bitField0_;
8758         int to_bitField0_ = 0;
8759         if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
8760           to_bitField0_ |= 0x00000001;
8761         }
8762         if (tableNameBuilder_ == null) {
8763           result.tableName_ = tableName_;
8764         } else {
8765           result.tableName_ = tableNameBuilder_.build();
8766         }
8767         if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
8768           to_bitField0_ |= 0x00000002;
8769         }
8770         if (lockOwnerBuilder_ == null) {
8771           result.lockOwner_ = lockOwner_;
8772         } else {
8773           result.lockOwner_ = lockOwnerBuilder_.build();
8774         }
8775         if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
8776           to_bitField0_ |= 0x00000004;
8777         }
8778         result.threadId_ = threadId_;
8779         if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
8780           to_bitField0_ |= 0x00000008;
8781         }
8782         result.isShared_ = isShared_;
8783         if (((from_bitField0_ & 0x00000010) == 0x00000010)) {
8784           to_bitField0_ |= 0x00000010;
8785         }
8786         result.purpose_ = purpose_;
8787         if (((from_bitField0_ & 0x00000020) == 0x00000020)) {
8788           to_bitField0_ |= 0x00000020;
8789         }
8790         result.createTime_ = createTime_;
8791         result.bitField0_ = to_bitField0_;
8792         onBuilt();
8793         return result;
8794       }
8795 
mergeFrom(com.google.protobuf.Message other)8796       public Builder mergeFrom(com.google.protobuf.Message other) {
8797         if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock) {
8798           return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock)other);
8799         } else {
8800           super.mergeFrom(other);
8801           return this;
8802         }
8803       }
8804 
mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock other)8805       public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock other) {
8806         if (other == org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock.getDefaultInstance()) return this;
8807         if (other.hasTableName()) {
8808           mergeTableName(other.getTableName());
8809         }
8810         if (other.hasLockOwner()) {
8811           mergeLockOwner(other.getLockOwner());
8812         }
8813         if (other.hasThreadId()) {
8814           setThreadId(other.getThreadId());
8815         }
8816         if (other.hasIsShared()) {
8817           setIsShared(other.getIsShared());
8818         }
8819         if (other.hasPurpose()) {
8820           bitField0_ |= 0x00000010;
8821           purpose_ = other.purpose_;
8822           onChanged();
8823         }
8824         if (other.hasCreateTime()) {
8825           setCreateTime(other.getCreateTime());
8826         }
8827         this.mergeUnknownFields(other.getUnknownFields());
8828         return this;
8829       }
8830 
isInitialized()8831       public final boolean isInitialized() {
8832         if (hasTableName()) {
8833           if (!getTableName().isInitialized()) {
8834 
8835             return false;
8836           }
8837         }
8838         if (hasLockOwner()) {
8839           if (!getLockOwner().isInitialized()) {
8840 
8841             return false;
8842           }
8843         }
8844         return true;
8845       }
8846 
mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)8847       public Builder mergeFrom(
8848           com.google.protobuf.CodedInputStream input,
8849           com.google.protobuf.ExtensionRegistryLite extensionRegistry)
8850           throws java.io.IOException {
8851         org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock parsedMessage = null;
8852         try {
8853           parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
8854         } catch (com.google.protobuf.InvalidProtocolBufferException e) {
8855           parsedMessage = (org.apache.hadoop.hbase.protobuf.generated.ZooKeeperProtos.TableLock) e.getUnfinishedMessage();
8856           throw e;
8857         } finally {
8858           if (parsedMessage != null) {
8859             mergeFrom(parsedMessage);
8860           }
8861         }
8862         return this;
8863       }
8864       private int bitField0_;
8865 
8866       // optional .TableName table_name = 1;
8867       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
8868       private com.google.protobuf.SingleFieldBuilder<
8869           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder> tableNameBuilder_;
8870       /**
8871        * <code>optional .TableName table_name = 1;</code>
8872        */
hasTableName()8873       public boolean hasTableName() {
8874         return ((bitField0_ & 0x00000001) == 0x00000001);
8875       }
8876       /**
8877        * <code>optional .TableName table_name = 1;</code>
8878        */
getTableName()8879       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName getTableName() {
8880         if (tableNameBuilder_ == null) {
8881           return tableName_;
8882         } else {
8883           return tableNameBuilder_.getMessage();
8884         }
8885       }
8886       /**
8887        * <code>optional .TableName table_name = 1;</code>
8888        */
setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)8889       public Builder setTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
8890         if (tableNameBuilder_ == null) {
8891           if (value == null) {
8892             throw new NullPointerException();
8893           }
8894           tableName_ = value;
8895           onChanged();
8896         } else {
8897           tableNameBuilder_.setMessage(value);
8898         }
8899         bitField0_ |= 0x00000001;
8900         return this;
8901       }
8902       /**
8903        * <code>optional .TableName table_name = 1;</code>
8904        */
setTableName( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue)8905       public Builder setTableName(
8906           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder builderForValue) {
8907         if (tableNameBuilder_ == null) {
8908           tableName_ = builderForValue.build();
8909           onChanged();
8910         } else {
8911           tableNameBuilder_.setMessage(builderForValue.build());
8912         }
8913         bitField0_ |= 0x00000001;
8914         return this;
8915       }
8916       /**
8917        * <code>optional .TableName table_name = 1;</code>
8918        */
mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value)8919       public Builder mergeTableName(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName value) {
8920         if (tableNameBuilder_ == null) {
8921           if (((bitField0_ & 0x00000001) == 0x00000001) &&
8922               tableName_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance()) {
8923             tableName_ =
8924               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.newBuilder(tableName_).mergeFrom(value).buildPartial();
8925           } else {
8926             tableName_ = value;
8927           }
8928           onChanged();
8929         } else {
8930           tableNameBuilder_.mergeFrom(value);
8931         }
8932         bitField0_ |= 0x00000001;
8933         return this;
8934       }
8935       /**
8936        * <code>optional .TableName table_name = 1;</code>
8937        */
clearTableName()8938       public Builder clearTableName() {
8939         if (tableNameBuilder_ == null) {
8940           tableName_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.getDefaultInstance();
8941           onChanged();
8942         } else {
8943           tableNameBuilder_.clear();
8944         }
8945         bitField0_ = (bitField0_ & ~0x00000001);
8946         return this;
8947       }
8948       /**
8949        * <code>optional .TableName table_name = 1;</code>
8950        */
getTableNameBuilder()8951       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder getTableNameBuilder() {
8952         bitField0_ |= 0x00000001;
8953         onChanged();
8954         return getTableNameFieldBuilder().getBuilder();
8955       }
8956       /**
8957        * <code>optional .TableName table_name = 1;</code>
8958        */
getTableNameOrBuilder()8959       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder getTableNameOrBuilder() {
8960         if (tableNameBuilder_ != null) {
8961           return tableNameBuilder_.getMessageOrBuilder();
8962         } else {
8963           return tableName_;
8964         }
8965       }
8966       /**
8967        * <code>optional .TableName table_name = 1;</code>
8968        */
8969       private com.google.protobuf.SingleFieldBuilder<
8970           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>
getTableNameFieldBuilder()8971           getTableNameFieldBuilder() {
8972         if (tableNameBuilder_ == null) {
8973           tableNameBuilder_ = new com.google.protobuf.SingleFieldBuilder<
8974               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.TableNameOrBuilder>(
8975                   tableName_,
8976                   getParentForChildren(),
8977                   isClean());
8978           tableName_ = null;
8979         }
8980         return tableNameBuilder_;
8981       }
8982 
8983       // optional .ServerName lock_owner = 2;
8984       private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName lockOwner_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
8985       private com.google.protobuf.SingleFieldBuilder<
8986           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder> lockOwnerBuilder_;
8987       /**
8988        * <code>optional .ServerName lock_owner = 2;</code>
8989        */
hasLockOwner()8990       public boolean hasLockOwner() {
8991         return ((bitField0_ & 0x00000002) == 0x00000002);
8992       }
8993       /**
8994        * <code>optional .ServerName lock_owner = 2;</code>
8995        */
getLockOwner()8996       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName getLockOwner() {
8997         if (lockOwnerBuilder_ == null) {
8998           return lockOwner_;
8999         } else {
9000           return lockOwnerBuilder_.getMessage();
9001         }
9002       }
9003       /**
9004        * <code>optional .ServerName lock_owner = 2;</code>
9005        */
setLockOwner(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value)9006       public Builder setLockOwner(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
9007         if (lockOwnerBuilder_ == null) {
9008           if (value == null) {
9009             throw new NullPointerException();
9010           }
9011           lockOwner_ = value;
9012           onChanged();
9013         } else {
9014           lockOwnerBuilder_.setMessage(value);
9015         }
9016         bitField0_ |= 0x00000002;
9017         return this;
9018       }
9019       /**
9020        * <code>optional .ServerName lock_owner = 2;</code>
9021        */
setLockOwner( org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue)9022       public Builder setLockOwner(
9023           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder builderForValue) {
9024         if (lockOwnerBuilder_ == null) {
9025           lockOwner_ = builderForValue.build();
9026           onChanged();
9027         } else {
9028           lockOwnerBuilder_.setMessage(builderForValue.build());
9029         }
9030         bitField0_ |= 0x00000002;
9031         return this;
9032       }
9033       /**
9034        * <code>optional .ServerName lock_owner = 2;</code>
9035        */
mergeLockOwner(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value)9036       public Builder mergeLockOwner(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName value) {
9037         if (lockOwnerBuilder_ == null) {
9038           if (((bitField0_ & 0x00000002) == 0x00000002) &&
9039               lockOwner_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance()) {
9040             lockOwner_ =
9041               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.newBuilder(lockOwner_).mergeFrom(value).buildPartial();
9042           } else {
9043             lockOwner_ = value;
9044           }
9045           onChanged();
9046         } else {
9047           lockOwnerBuilder_.mergeFrom(value);
9048         }
9049         bitField0_ |= 0x00000002;
9050         return this;
9051       }
9052       /**
9053        * <code>optional .ServerName lock_owner = 2;</code>
9054        */
clearLockOwner()9055       public Builder clearLockOwner() {
9056         if (lockOwnerBuilder_ == null) {
9057           lockOwner_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.getDefaultInstance();
9058           onChanged();
9059         } else {
9060           lockOwnerBuilder_.clear();
9061         }
9062         bitField0_ = (bitField0_ & ~0x00000002);
9063         return this;
9064       }
9065       /**
9066        * <code>optional .ServerName lock_owner = 2;</code>
9067        */
getLockOwnerBuilder()9068       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder getLockOwnerBuilder() {
9069         bitField0_ |= 0x00000002;
9070         onChanged();
9071         return getLockOwnerFieldBuilder().getBuilder();
9072       }
9073       /**
9074        * <code>optional .ServerName lock_owner = 2;</code>
9075        */
getLockOwnerOrBuilder()9076       public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder getLockOwnerOrBuilder() {
9077         if (lockOwnerBuilder_ != null) {
9078           return lockOwnerBuilder_.getMessageOrBuilder();
9079         } else {
9080           return lockOwner_;
9081         }
9082       }
9083       /**
9084        * <code>optional .ServerName lock_owner = 2;</code>
9085        */
9086       private com.google.protobuf.SingleFieldBuilder<
9087           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>
getLockOwnerFieldBuilder()9088           getLockOwnerFieldBuilder() {
9089         if (lockOwnerBuilder_ == null) {
9090           lockOwnerBuilder_ = new com.google.protobuf.SingleFieldBuilder<
9091               org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerName.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.ServerNameOrBuilder>(
9092                   lockOwner_,
9093                   getParentForChildren(),
9094                   isClean());
9095           lockOwner_ = null;
9096         }
9097         return lockOwnerBuilder_;
9098       }
9099 
9100       // optional int64 thread_id = 3;
9101       private long threadId_ ;
9102       /**
9103        * <code>optional int64 thread_id = 3;</code>
9104        */
hasThreadId()9105       public boolean hasThreadId() {
9106         return ((bitField0_ & 0x00000004) == 0x00000004);
9107       }
9108       /**
9109        * <code>optional int64 thread_id = 3;</code>
9110        */
getThreadId()9111       public long getThreadId() {
9112         return threadId_;
9113       }
9114       /**
9115        * <code>optional int64 thread_id = 3;</code>
9116        */
setThreadId(long value)9117       public Builder setThreadId(long value) {
9118         bitField0_ |= 0x00000004;
9119         threadId_ = value;
9120         onChanged();
9121         return this;
9122       }
9123       /**
9124        * <code>optional int64 thread_id = 3;</code>
9125        */
clearThreadId()9126       public Builder clearThreadId() {
9127         bitField0_ = (bitField0_ & ~0x00000004);
9128         threadId_ = 0L;
9129         onChanged();
9130         return this;
9131       }
9132 
9133       // optional bool is_shared = 4;
9134       private boolean isShared_ ;
9135       /**
9136        * <code>optional bool is_shared = 4;</code>
9137        */
hasIsShared()9138       public boolean hasIsShared() {
9139         return ((bitField0_ & 0x00000008) == 0x00000008);
9140       }
9141       /**
9142        * <code>optional bool is_shared = 4;</code>
9143        */
getIsShared()9144       public boolean getIsShared() {
9145         return isShared_;
9146       }
9147       /**
9148        * <code>optional bool is_shared = 4;</code>
9149        */
setIsShared(boolean value)9150       public Builder setIsShared(boolean value) {
9151         bitField0_ |= 0x00000008;
9152         isShared_ = value;
9153         onChanged();
9154         return this;
9155       }
9156       /**
9157        * <code>optional bool is_shared = 4;</code>
9158        */
clearIsShared()9159       public Builder clearIsShared() {
9160         bitField0_ = (bitField0_ & ~0x00000008);
9161         isShared_ = false;
9162         onChanged();
9163         return this;
9164       }
9165 
9166       // optional string purpose = 5;
9167       private java.lang.Object purpose_ = "";
9168       /**
9169        * <code>optional string purpose = 5;</code>
9170        */
hasPurpose()9171       public boolean hasPurpose() {
9172         return ((bitField0_ & 0x00000010) == 0x00000010);
9173       }
9174       /**
9175        * <code>optional string purpose = 5;</code>
9176        */
getPurpose()9177       public java.lang.String getPurpose() {
9178         java.lang.Object ref = purpose_;
9179         if (!(ref instanceof java.lang.String)) {
9180           java.lang.String s = ((com.google.protobuf.ByteString) ref)
9181               .toStringUtf8();
9182           purpose_ = s;
9183           return s;
9184         } else {
9185           return (java.lang.String) ref;
9186         }
9187       }
9188       /**
9189        * <code>optional string purpose = 5;</code>
9190        */
9191       public com.google.protobuf.ByteString
getPurposeBytes()9192           getPurposeBytes() {
9193         java.lang.Object ref = purpose_;
9194         if (ref instanceof String) {
9195           com.google.protobuf.ByteString b =
9196               com.google.protobuf.ByteString.copyFromUtf8(
9197                   (java.lang.String) ref);
9198           purpose_ = b;
9199           return b;
9200         } else {
9201           return (com.google.protobuf.ByteString) ref;
9202         }
9203       }
9204       /**
9205        * <code>optional string purpose = 5;</code>
9206        */
setPurpose( java.lang.String value)9207       public Builder setPurpose(
9208           java.lang.String value) {
9209         if (value == null) {
9210     throw new NullPointerException();
9211   }
9212   bitField0_ |= 0x00000010;
9213         purpose_ = value;
9214         onChanged();
9215         return this;
9216       }
9217       /**
9218        * <code>optional string purpose = 5;</code>
9219        */
clearPurpose()9220       public Builder clearPurpose() {
9221         bitField0_ = (bitField0_ & ~0x00000010);
9222         purpose_ = getDefaultInstance().getPurpose();
9223         onChanged();
9224         return this;
9225       }
9226       /**
9227        * <code>optional string purpose = 5;</code>
9228        */
setPurposeBytes( com.google.protobuf.ByteString value)9229       public Builder setPurposeBytes(
9230           com.google.protobuf.ByteString value) {
9231         if (value == null) {
9232     throw new NullPointerException();
9233   }
9234   bitField0_ |= 0x00000010;
9235         purpose_ = value;
9236         onChanged();
9237         return this;
9238       }
9239 
9240       // optional int64 create_time = 6;
9241       private long createTime_ ;
9242       /**
9243        * <code>optional int64 create_time = 6;</code>
9244        */
hasCreateTime()9245       public boolean hasCreateTime() {
9246         return ((bitField0_ & 0x00000020) == 0x00000020);
9247       }
9248       /**
9249        * <code>optional int64 create_time = 6;</code>
9250        */
getCreateTime()9251       public long getCreateTime() {
9252         return createTime_;
9253       }
9254       /**
9255        * <code>optional int64 create_time = 6;</code>
9256        */
setCreateTime(long value)9257       public Builder setCreateTime(long value) {
9258         bitField0_ |= 0x00000020;
9259         createTime_ = value;
9260         onChanged();
9261         return this;
9262       }
9263       /**
9264        * <code>optional int64 create_time = 6;</code>
9265        */
clearCreateTime()9266       public Builder clearCreateTime() {
9267         bitField0_ = (bitField0_ & ~0x00000020);
9268         createTime_ = 0L;
9269         onChanged();
9270         return this;
9271       }
9272 
9273       // @@protoc_insertion_point(builder_scope:TableLock)
9274     }
9275 
9276     static {
9277       defaultInstance = new TableLock(true);
defaultInstance.initFields()9278       defaultInstance.initFields();
9279     }
9280 
9281     // @@protoc_insertion_point(class_scope:TableLock)
9282   }
9283 
9284   private static com.google.protobuf.Descriptors.Descriptor
9285     internal_static_MetaRegionServer_descriptor;
9286   private static
9287     com.google.protobuf.GeneratedMessage.FieldAccessorTable
9288       internal_static_MetaRegionServer_fieldAccessorTable;
9289   private static com.google.protobuf.Descriptors.Descriptor
9290     internal_static_Master_descriptor;
9291   private static
9292     com.google.protobuf.GeneratedMessage.FieldAccessorTable
9293       internal_static_Master_fieldAccessorTable;
9294   private static com.google.protobuf.Descriptors.Descriptor
9295     internal_static_ClusterUp_descriptor;
9296   private static
9297     com.google.protobuf.GeneratedMessage.FieldAccessorTable
9298       internal_static_ClusterUp_fieldAccessorTable;
9299   private static com.google.protobuf.Descriptors.Descriptor
9300     internal_static_RegionTransition_descriptor;
9301   private static
9302     com.google.protobuf.GeneratedMessage.FieldAccessorTable
9303       internal_static_RegionTransition_fieldAccessorTable;
9304   private static com.google.protobuf.Descriptors.Descriptor
9305     internal_static_SplitLogTask_descriptor;
9306   private static
9307     com.google.protobuf.GeneratedMessage.FieldAccessorTable
9308       internal_static_SplitLogTask_fieldAccessorTable;
9309   private static com.google.protobuf.Descriptors.Descriptor
9310     internal_static_Table_descriptor;
9311   private static
9312     com.google.protobuf.GeneratedMessage.FieldAccessorTable
9313       internal_static_Table_fieldAccessorTable;
9314   private static com.google.protobuf.Descriptors.Descriptor
9315     internal_static_ReplicationPeer_descriptor;
9316   private static
9317     com.google.protobuf.GeneratedMessage.FieldAccessorTable
9318       internal_static_ReplicationPeer_fieldAccessorTable;
9319   private static com.google.protobuf.Descriptors.Descriptor
9320     internal_static_ReplicationState_descriptor;
9321   private static
9322     com.google.protobuf.GeneratedMessage.FieldAccessorTable
9323       internal_static_ReplicationState_fieldAccessorTable;
9324   private static com.google.protobuf.Descriptors.Descriptor
9325     internal_static_ReplicationHLogPosition_descriptor;
9326   private static
9327     com.google.protobuf.GeneratedMessage.FieldAccessorTable
9328       internal_static_ReplicationHLogPosition_fieldAccessorTable;
9329   private static com.google.protobuf.Descriptors.Descriptor
9330     internal_static_ReplicationLock_descriptor;
9331   private static
9332     com.google.protobuf.GeneratedMessage.FieldAccessorTable
9333       internal_static_ReplicationLock_fieldAccessorTable;
9334   private static com.google.protobuf.Descriptors.Descriptor
9335     internal_static_TableLock_descriptor;
9336   private static
9337     com.google.protobuf.GeneratedMessage.FieldAccessorTable
9338       internal_static_TableLock_fieldAccessorTable;
9339 
9340   public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor()9341       getDescriptor() {
9342     return descriptor;
9343   }
9344   private static com.google.protobuf.Descriptors.FileDescriptor
9345       descriptor;
9346   static {
9347     java.lang.String[] descriptorData = {
9348       "\n\017ZooKeeper.proto\032\013HBase.proto\032\023ClusterS" +
9349       "tatus.proto\"g\n\020MetaRegionServer\022\033\n\006serve" +
9350       "r\030\001 \002(\0132\013.ServerName\022\023\n\013rpc_version\030\002 \001(" +
9351       "\r\022!\n\005state\030\003 \001(\0162\022.RegionState.State\"M\n\006" +
9352       "Master\022\033\n\006master\030\001 \002(\0132\013.ServerName\022\023\n\013r" +
9353       "pc_version\030\002 \001(\r\022\021\n\tinfo_port\030\003 \001(\r\"\037\n\tC" +
9354       "lusterUp\022\022\n\nstart_date\030\001 \002(\t\"\210\001\n\020RegionT" +
9355       "ransition\022\027\n\017event_type_code\030\001 \002(\r\022\023\n\013re" +
9356       "gion_name\030\002 \002(\014\022\023\n\013create_time\030\003 \002(\004\022 \n\013" +
9357       "server_name\030\004 \002(\0132\013.ServerName\022\017\n\007payloa",
9358       "d\030\005 \001(\014\"\214\002\n\014SplitLogTask\022\"\n\005state\030\001 \002(\0162" +
9359       "\023.SplitLogTask.State\022 \n\013server_name\030\002 \002(" +
9360       "\0132\013.ServerName\0221\n\004mode\030\003 \001(\0162\032.SplitLogT" +
9361       "ask.RecoveryMode:\007UNKNOWN\"C\n\005State\022\016\n\nUN" +
9362       "ASSIGNED\020\000\022\t\n\005OWNED\020\001\022\014\n\010RESIGNED\020\002\022\010\n\004D" +
9363       "ONE\020\003\022\007\n\003ERR\020\004\">\n\014RecoveryMode\022\013\n\007UNKNOW" +
9364       "N\020\000\022\021\n\rLOG_SPLITTING\020\001\022\016\n\nLOG_REPLAY\020\002\"n" +
9365       "\n\005Table\022$\n\005state\030\001 \002(\0162\014.Table.State:\007EN" +
9366       "ABLED\"?\n\005State\022\013\n\007ENABLED\020\000\022\014\n\010DISABLED\020" +
9367       "\001\022\r\n\tDISABLING\020\002\022\014\n\010ENABLING\020\003\"\215\001\n\017Repli",
9368       "cationPeer\022\022\n\nclusterkey\030\001 \002(\t\022\037\n\027replic" +
9369       "ationEndpointImpl\030\002 \001(\t\022\035\n\004data\030\003 \003(\0132\017." +
9370       "BytesBytesPair\022&\n\rconfiguration\030\004 \003(\0132\017." +
9371       "NameStringPair\"^\n\020ReplicationState\022&\n\005st" +
9372       "ate\030\001 \002(\0162\027.ReplicationState.State\"\"\n\005St" +
9373       "ate\022\013\n\007ENABLED\020\000\022\014\n\010DISABLED\020\001\"+\n\027Replic" +
9374       "ationHLogPosition\022\020\n\010position\030\001 \002(\003\"%\n\017R" +
9375       "eplicationLock\022\022\n\nlock_owner\030\001 \002(\t\"\230\001\n\tT" +
9376       "ableLock\022\036\n\ntable_name\030\001 \001(\0132\n.TableName" +
9377       "\022\037\n\nlock_owner\030\002 \001(\0132\013.ServerName\022\021\n\tthr",
9378       "ead_id\030\003 \001(\003\022\021\n\tis_shared\030\004 \001(\010\022\017\n\007purpo" +
9379       "se\030\005 \001(\t\022\023\n\013create_time\030\006 \001(\003BE\n*org.apa" +
9380       "che.hadoop.hbase.protobuf.generatedB\017Zoo" +
9381       "KeeperProtosH\001\210\001\001\240\001\001"
9382     };
9383     com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
9384       new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
9385         public com.google.protobuf.ExtensionRegistry assignDescriptors(
9386             com.google.protobuf.Descriptors.FileDescriptor root) {
9387           descriptor = root;
9388           internal_static_MetaRegionServer_descriptor =
9389             getDescriptor().getMessageTypes().get(0);
9390           internal_static_MetaRegionServer_fieldAccessorTable = new
9391             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
9392               internal_static_MetaRegionServer_descriptor,
9393               new java.lang.String[] { "Server", "RpcVersion", "State", });
9394           internal_static_Master_descriptor =
9395             getDescriptor().getMessageTypes().get(1);
9396           internal_static_Master_fieldAccessorTable = new
9397             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
9398               internal_static_Master_descriptor,
9399               new java.lang.String[] { "Master", "RpcVersion", "InfoPort", });
9400           internal_static_ClusterUp_descriptor =
9401             getDescriptor().getMessageTypes().get(2);
9402           internal_static_ClusterUp_fieldAccessorTable = new
9403             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
9404               internal_static_ClusterUp_descriptor,
9405               new java.lang.String[] { "StartDate", });
9406           internal_static_RegionTransition_descriptor =
9407             getDescriptor().getMessageTypes().get(3);
9408           internal_static_RegionTransition_fieldAccessorTable = new
9409             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
9410               internal_static_RegionTransition_descriptor,
9411               new java.lang.String[] { "EventTypeCode", "RegionName", "CreateTime", "ServerName", "Payload", });
9412           internal_static_SplitLogTask_descriptor =
9413             getDescriptor().getMessageTypes().get(4);
9414           internal_static_SplitLogTask_fieldAccessorTable = new
9415             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
9416               internal_static_SplitLogTask_descriptor,
9417               new java.lang.String[] { "State", "ServerName", "Mode", });
9418           internal_static_Table_descriptor =
9419             getDescriptor().getMessageTypes().get(5);
9420           internal_static_Table_fieldAccessorTable = new
9421             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
9422               internal_static_Table_descriptor,
9423               new java.lang.String[] { "State", });
9424           internal_static_ReplicationPeer_descriptor =
9425             getDescriptor().getMessageTypes().get(6);
9426           internal_static_ReplicationPeer_fieldAccessorTable = new
9427             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
9428               internal_static_ReplicationPeer_descriptor,
9429               new java.lang.String[] { "Clusterkey", "ReplicationEndpointImpl", "Data", "Configuration", });
9430           internal_static_ReplicationState_descriptor =
9431             getDescriptor().getMessageTypes().get(7);
9432           internal_static_ReplicationState_fieldAccessorTable = new
9433             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
9434               internal_static_ReplicationState_descriptor,
9435               new java.lang.String[] { "State", });
9436           internal_static_ReplicationHLogPosition_descriptor =
9437             getDescriptor().getMessageTypes().get(8);
9438           internal_static_ReplicationHLogPosition_fieldAccessorTable = new
9439             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
9440               internal_static_ReplicationHLogPosition_descriptor,
9441               new java.lang.String[] { "Position", });
9442           internal_static_ReplicationLock_descriptor =
9443             getDescriptor().getMessageTypes().get(9);
9444           internal_static_ReplicationLock_fieldAccessorTable = new
9445             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
9446               internal_static_ReplicationLock_descriptor,
9447               new java.lang.String[] { "LockOwner", });
9448           internal_static_TableLock_descriptor =
9449             getDescriptor().getMessageTypes().get(10);
9450           internal_static_TableLock_fieldAccessorTable = new
9451             com.google.protobuf.GeneratedMessage.FieldAccessorTable(
9452               internal_static_TableLock_descriptor,
9453               new java.lang.String[] { "TableName", "LockOwner", "ThreadId", "IsShared", "Purpose", "CreateTime", });
9454           return null;
9455         }
9456       };
9457     com.google.protobuf.Descriptors.FileDescriptor
internalBuildGeneratedFileFrom(descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(), org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.getDescriptor(), }, assigner)9458       .internalBuildGeneratedFileFrom(descriptorData,
9459         new com.google.protobuf.Descriptors.FileDescriptor[] {
9460           org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.getDescriptor(),
9461           org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.getDescriptor(),
9462         }, assigner);
9463   }
9464 
9465   // @@protoc_insertion_point(outer_class_scope)
9466 }
9467