001 // Generated by the protocol buffer compiler. DO NOT EDIT!
002 // source: IpcConnectionContext.proto
003
004 package org.apache.hadoop.ipc.protobuf;
005
006 public final class IpcConnectionContextProtos {
007 private IpcConnectionContextProtos() {}
008 public static void registerAllExtensions(
009 com.google.protobuf.ExtensionRegistry registry) {
010 }
011 public interface UserInformationProtoOrBuilder
012 extends com.google.protobuf.MessageOrBuilder {
013
014 // optional string effectiveUser = 1;
015 boolean hasEffectiveUser();
016 String getEffectiveUser();
017
018 // optional string realUser = 2;
019 boolean hasRealUser();
020 String getRealUser();
021 }
022 public static final class UserInformationProto extends
023 com.google.protobuf.GeneratedMessage
024 implements UserInformationProtoOrBuilder {
025 // Use UserInformationProto.newBuilder() to construct.
026 private UserInformationProto(Builder builder) {
027 super(builder);
028 }
029 private UserInformationProto(boolean noInit) {}
030
031 private static final UserInformationProto defaultInstance;
032 public static UserInformationProto getDefaultInstance() {
033 return defaultInstance;
034 }
035
036 public UserInformationProto getDefaultInstanceForType() {
037 return defaultInstance;
038 }
039
040 public static final com.google.protobuf.Descriptors.Descriptor
041 getDescriptor() {
042 return org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.internal_static_UserInformationProto_descriptor;
043 }
044
045 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
046 internalGetFieldAccessorTable() {
047 return org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.internal_static_UserInformationProto_fieldAccessorTable;
048 }
049
050 private int bitField0_;
051 // optional string effectiveUser = 1;
052 public static final int EFFECTIVEUSER_FIELD_NUMBER = 1;
053 private java.lang.Object effectiveUser_;
054 public boolean hasEffectiveUser() {
055 return ((bitField0_ & 0x00000001) == 0x00000001);
056 }
057 public String getEffectiveUser() {
058 java.lang.Object ref = effectiveUser_;
059 if (ref instanceof String) {
060 return (String) ref;
061 } else {
062 com.google.protobuf.ByteString bs =
063 (com.google.protobuf.ByteString) ref;
064 String s = bs.toStringUtf8();
065 if (com.google.protobuf.Internal.isValidUtf8(bs)) {
066 effectiveUser_ = s;
067 }
068 return s;
069 }
070 }
071 private com.google.protobuf.ByteString getEffectiveUserBytes() {
072 java.lang.Object ref = effectiveUser_;
073 if (ref instanceof String) {
074 com.google.protobuf.ByteString b =
075 com.google.protobuf.ByteString.copyFromUtf8((String) ref);
076 effectiveUser_ = b;
077 return b;
078 } else {
079 return (com.google.protobuf.ByteString) ref;
080 }
081 }
082
083 // optional string realUser = 2;
084 public static final int REALUSER_FIELD_NUMBER = 2;
085 private java.lang.Object realUser_;
086 public boolean hasRealUser() {
087 return ((bitField0_ & 0x00000002) == 0x00000002);
088 }
089 public String getRealUser() {
090 java.lang.Object ref = realUser_;
091 if (ref instanceof String) {
092 return (String) ref;
093 } else {
094 com.google.protobuf.ByteString bs =
095 (com.google.protobuf.ByteString) ref;
096 String s = bs.toStringUtf8();
097 if (com.google.protobuf.Internal.isValidUtf8(bs)) {
098 realUser_ = s;
099 }
100 return s;
101 }
102 }
103 private com.google.protobuf.ByteString getRealUserBytes() {
104 java.lang.Object ref = realUser_;
105 if (ref instanceof String) {
106 com.google.protobuf.ByteString b =
107 com.google.protobuf.ByteString.copyFromUtf8((String) ref);
108 realUser_ = b;
109 return b;
110 } else {
111 return (com.google.protobuf.ByteString) ref;
112 }
113 }
114
115 private void initFields() {
116 effectiveUser_ = "";
117 realUser_ = "";
118 }
119 private byte memoizedIsInitialized = -1;
120 public final boolean isInitialized() {
121 byte isInitialized = memoizedIsInitialized;
122 if (isInitialized != -1) return isInitialized == 1;
123
124 memoizedIsInitialized = 1;
125 return true;
126 }
127
128 public void writeTo(com.google.protobuf.CodedOutputStream output)
129 throws java.io.IOException {
130 getSerializedSize();
131 if (((bitField0_ & 0x00000001) == 0x00000001)) {
132 output.writeBytes(1, getEffectiveUserBytes());
133 }
134 if (((bitField0_ & 0x00000002) == 0x00000002)) {
135 output.writeBytes(2, getRealUserBytes());
136 }
137 getUnknownFields().writeTo(output);
138 }
139
140 private int memoizedSerializedSize = -1;
141 public int getSerializedSize() {
142 int size = memoizedSerializedSize;
143 if (size != -1) return size;
144
145 size = 0;
146 if (((bitField0_ & 0x00000001) == 0x00000001)) {
147 size += com.google.protobuf.CodedOutputStream
148 .computeBytesSize(1, getEffectiveUserBytes());
149 }
150 if (((bitField0_ & 0x00000002) == 0x00000002)) {
151 size += com.google.protobuf.CodedOutputStream
152 .computeBytesSize(2, getRealUserBytes());
153 }
154 size += getUnknownFields().getSerializedSize();
155 memoizedSerializedSize = size;
156 return size;
157 }
158
159 private static final long serialVersionUID = 0L;
160 @java.lang.Override
161 protected java.lang.Object writeReplace()
162 throws java.io.ObjectStreamException {
163 return super.writeReplace();
164 }
165
166 @java.lang.Override
167 public boolean equals(final java.lang.Object obj) {
168 if (obj == this) {
169 return true;
170 }
171 if (!(obj instanceof org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto)) {
172 return super.equals(obj);
173 }
174 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto other = (org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto) obj;
175
176 boolean result = true;
177 result = result && (hasEffectiveUser() == other.hasEffectiveUser());
178 if (hasEffectiveUser()) {
179 result = result && getEffectiveUser()
180 .equals(other.getEffectiveUser());
181 }
182 result = result && (hasRealUser() == other.hasRealUser());
183 if (hasRealUser()) {
184 result = result && getRealUser()
185 .equals(other.getRealUser());
186 }
187 result = result &&
188 getUnknownFields().equals(other.getUnknownFields());
189 return result;
190 }
191
192 @java.lang.Override
193 public int hashCode() {
194 int hash = 41;
195 hash = (19 * hash) + getDescriptorForType().hashCode();
196 if (hasEffectiveUser()) {
197 hash = (37 * hash) + EFFECTIVEUSER_FIELD_NUMBER;
198 hash = (53 * hash) + getEffectiveUser().hashCode();
199 }
200 if (hasRealUser()) {
201 hash = (37 * hash) + REALUSER_FIELD_NUMBER;
202 hash = (53 * hash) + getRealUser().hashCode();
203 }
204 hash = (29 * hash) + getUnknownFields().hashCode();
205 return hash;
206 }
207
208 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto parseFrom(
209 com.google.protobuf.ByteString data)
210 throws com.google.protobuf.InvalidProtocolBufferException {
211 return newBuilder().mergeFrom(data).buildParsed();
212 }
213 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto parseFrom(
214 com.google.protobuf.ByteString data,
215 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
216 throws com.google.protobuf.InvalidProtocolBufferException {
217 return newBuilder().mergeFrom(data, extensionRegistry)
218 .buildParsed();
219 }
220 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto parseFrom(byte[] data)
221 throws com.google.protobuf.InvalidProtocolBufferException {
222 return newBuilder().mergeFrom(data).buildParsed();
223 }
224 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto parseFrom(
225 byte[] data,
226 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
227 throws com.google.protobuf.InvalidProtocolBufferException {
228 return newBuilder().mergeFrom(data, extensionRegistry)
229 .buildParsed();
230 }
231 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto parseFrom(java.io.InputStream input)
232 throws java.io.IOException {
233 return newBuilder().mergeFrom(input).buildParsed();
234 }
235 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto parseFrom(
236 java.io.InputStream input,
237 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
238 throws java.io.IOException {
239 return newBuilder().mergeFrom(input, extensionRegistry)
240 .buildParsed();
241 }
242 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto parseDelimitedFrom(java.io.InputStream input)
243 throws java.io.IOException {
244 Builder builder = newBuilder();
245 if (builder.mergeDelimitedFrom(input)) {
246 return builder.buildParsed();
247 } else {
248 return null;
249 }
250 }
251 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto parseDelimitedFrom(
252 java.io.InputStream input,
253 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
254 throws java.io.IOException {
255 Builder builder = newBuilder();
256 if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
257 return builder.buildParsed();
258 } else {
259 return null;
260 }
261 }
262 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto parseFrom(
263 com.google.protobuf.CodedInputStream input)
264 throws java.io.IOException {
265 return newBuilder().mergeFrom(input).buildParsed();
266 }
267 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto parseFrom(
268 com.google.protobuf.CodedInputStream input,
269 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
270 throws java.io.IOException {
271 return newBuilder().mergeFrom(input, extensionRegistry)
272 .buildParsed();
273 }
274
275 public static Builder newBuilder() { return Builder.create(); }
276 public Builder newBuilderForType() { return newBuilder(); }
277 public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto prototype) {
278 return newBuilder().mergeFrom(prototype);
279 }
280 public Builder toBuilder() { return newBuilder(this); }
281
282 @java.lang.Override
283 protected Builder newBuilderForType(
284 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
285 Builder builder = new Builder(parent);
286 return builder;
287 }
288 public static final class Builder extends
289 com.google.protobuf.GeneratedMessage.Builder<Builder>
290 implements org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProtoOrBuilder {
291 public static final com.google.protobuf.Descriptors.Descriptor
292 getDescriptor() {
293 return org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.internal_static_UserInformationProto_descriptor;
294 }
295
296 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
297 internalGetFieldAccessorTable() {
298 return org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.internal_static_UserInformationProto_fieldAccessorTable;
299 }
300
301 // Construct using org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.newBuilder()
302 private Builder() {
303 maybeForceBuilderInitialization();
304 }
305
306 private Builder(BuilderParent parent) {
307 super(parent);
308 maybeForceBuilderInitialization();
309 }
310 private void maybeForceBuilderInitialization() {
311 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
312 }
313 }
314 private static Builder create() {
315 return new Builder();
316 }
317
318 public Builder clear() {
319 super.clear();
320 effectiveUser_ = "";
321 bitField0_ = (bitField0_ & ~0x00000001);
322 realUser_ = "";
323 bitField0_ = (bitField0_ & ~0x00000002);
324 return this;
325 }
326
327 public Builder clone() {
328 return create().mergeFrom(buildPartial());
329 }
330
331 public com.google.protobuf.Descriptors.Descriptor
332 getDescriptorForType() {
333 return org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.getDescriptor();
334 }
335
336 public org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto getDefaultInstanceForType() {
337 return org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.getDefaultInstance();
338 }
339
340 public org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto build() {
341 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto result = buildPartial();
342 if (!result.isInitialized()) {
343 throw newUninitializedMessageException(result);
344 }
345 return result;
346 }
347
348 private org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto buildParsed()
349 throws com.google.protobuf.InvalidProtocolBufferException {
350 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto result = buildPartial();
351 if (!result.isInitialized()) {
352 throw newUninitializedMessageException(
353 result).asInvalidProtocolBufferException();
354 }
355 return result;
356 }
357
358 public org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto buildPartial() {
359 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto result = new org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto(this);
360 int from_bitField0_ = bitField0_;
361 int to_bitField0_ = 0;
362 if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
363 to_bitField0_ |= 0x00000001;
364 }
365 result.effectiveUser_ = effectiveUser_;
366 if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
367 to_bitField0_ |= 0x00000002;
368 }
369 result.realUser_ = realUser_;
370 result.bitField0_ = to_bitField0_;
371 onBuilt();
372 return result;
373 }
374
375 public Builder mergeFrom(com.google.protobuf.Message other) {
376 if (other instanceof org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto) {
377 return mergeFrom((org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto)other);
378 } else {
379 super.mergeFrom(other);
380 return this;
381 }
382 }
383
384 public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto other) {
385 if (other == org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.getDefaultInstance()) return this;
386 if (other.hasEffectiveUser()) {
387 setEffectiveUser(other.getEffectiveUser());
388 }
389 if (other.hasRealUser()) {
390 setRealUser(other.getRealUser());
391 }
392 this.mergeUnknownFields(other.getUnknownFields());
393 return this;
394 }
395
396 public final boolean isInitialized() {
397 return true;
398 }
399
400 public Builder mergeFrom(
401 com.google.protobuf.CodedInputStream input,
402 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
403 throws java.io.IOException {
404 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
405 com.google.protobuf.UnknownFieldSet.newBuilder(
406 this.getUnknownFields());
407 while (true) {
408 int tag = input.readTag();
409 switch (tag) {
410 case 0:
411 this.setUnknownFields(unknownFields.build());
412 onChanged();
413 return this;
414 default: {
415 if (!parseUnknownField(input, unknownFields,
416 extensionRegistry, tag)) {
417 this.setUnknownFields(unknownFields.build());
418 onChanged();
419 return this;
420 }
421 break;
422 }
423 case 10: {
424 bitField0_ |= 0x00000001;
425 effectiveUser_ = input.readBytes();
426 break;
427 }
428 case 18: {
429 bitField0_ |= 0x00000002;
430 realUser_ = input.readBytes();
431 break;
432 }
433 }
434 }
435 }
436
437 private int bitField0_;
438
439 // optional string effectiveUser = 1;
440 private java.lang.Object effectiveUser_ = "";
441 public boolean hasEffectiveUser() {
442 return ((bitField0_ & 0x00000001) == 0x00000001);
443 }
444 public String getEffectiveUser() {
445 java.lang.Object ref = effectiveUser_;
446 if (!(ref instanceof String)) {
447 String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
448 effectiveUser_ = s;
449 return s;
450 } else {
451 return (String) ref;
452 }
453 }
454 public Builder setEffectiveUser(String value) {
455 if (value == null) {
456 throw new NullPointerException();
457 }
458 bitField0_ |= 0x00000001;
459 effectiveUser_ = value;
460 onChanged();
461 return this;
462 }
463 public Builder clearEffectiveUser() {
464 bitField0_ = (bitField0_ & ~0x00000001);
465 effectiveUser_ = getDefaultInstance().getEffectiveUser();
466 onChanged();
467 return this;
468 }
469 void setEffectiveUser(com.google.protobuf.ByteString value) {
470 bitField0_ |= 0x00000001;
471 effectiveUser_ = value;
472 onChanged();
473 }
474
475 // optional string realUser = 2;
476 private java.lang.Object realUser_ = "";
477 public boolean hasRealUser() {
478 return ((bitField0_ & 0x00000002) == 0x00000002);
479 }
480 public String getRealUser() {
481 java.lang.Object ref = realUser_;
482 if (!(ref instanceof String)) {
483 String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
484 realUser_ = s;
485 return s;
486 } else {
487 return (String) ref;
488 }
489 }
490 public Builder setRealUser(String value) {
491 if (value == null) {
492 throw new NullPointerException();
493 }
494 bitField0_ |= 0x00000002;
495 realUser_ = value;
496 onChanged();
497 return this;
498 }
499 public Builder clearRealUser() {
500 bitField0_ = (bitField0_ & ~0x00000002);
501 realUser_ = getDefaultInstance().getRealUser();
502 onChanged();
503 return this;
504 }
505 void setRealUser(com.google.protobuf.ByteString value) {
506 bitField0_ |= 0x00000002;
507 realUser_ = value;
508 onChanged();
509 }
510
511 // @@protoc_insertion_point(builder_scope:UserInformationProto)
512 }
513
514 static {
515 defaultInstance = new UserInformationProto(true);
516 defaultInstance.initFields();
517 }
518
519 // @@protoc_insertion_point(class_scope:UserInformationProto)
520 }
521
522 public interface IpcConnectionContextProtoOrBuilder
523 extends com.google.protobuf.MessageOrBuilder {
524
525 // optional .UserInformationProto userInfo = 2;
526 boolean hasUserInfo();
527 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto getUserInfo();
528 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProtoOrBuilder getUserInfoOrBuilder();
529
530 // optional string protocol = 3;
531 boolean hasProtocol();
532 String getProtocol();
533 }
534 public static final class IpcConnectionContextProto extends
535 com.google.protobuf.GeneratedMessage
536 implements IpcConnectionContextProtoOrBuilder {
537 // Use IpcConnectionContextProto.newBuilder() to construct.
538 private IpcConnectionContextProto(Builder builder) {
539 super(builder);
540 }
541 private IpcConnectionContextProto(boolean noInit) {}
542
543 private static final IpcConnectionContextProto defaultInstance;
544 public static IpcConnectionContextProto getDefaultInstance() {
545 return defaultInstance;
546 }
547
548 public IpcConnectionContextProto getDefaultInstanceForType() {
549 return defaultInstance;
550 }
551
552 public static final com.google.protobuf.Descriptors.Descriptor
553 getDescriptor() {
554 return org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.internal_static_IpcConnectionContextProto_descriptor;
555 }
556
557 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
558 internalGetFieldAccessorTable() {
559 return org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.internal_static_IpcConnectionContextProto_fieldAccessorTable;
560 }
561
562 private int bitField0_;
563 // optional .UserInformationProto userInfo = 2;
564 public static final int USERINFO_FIELD_NUMBER = 2;
565 private org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto userInfo_;
566 public boolean hasUserInfo() {
567 return ((bitField0_ & 0x00000001) == 0x00000001);
568 }
569 public org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto getUserInfo() {
570 return userInfo_;
571 }
572 public org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProtoOrBuilder getUserInfoOrBuilder() {
573 return userInfo_;
574 }
575
576 // optional string protocol = 3;
577 public static final int PROTOCOL_FIELD_NUMBER = 3;
578 private java.lang.Object protocol_;
579 public boolean hasProtocol() {
580 return ((bitField0_ & 0x00000002) == 0x00000002);
581 }
582 public String getProtocol() {
583 java.lang.Object ref = protocol_;
584 if (ref instanceof String) {
585 return (String) ref;
586 } else {
587 com.google.protobuf.ByteString bs =
588 (com.google.protobuf.ByteString) ref;
589 String s = bs.toStringUtf8();
590 if (com.google.protobuf.Internal.isValidUtf8(bs)) {
591 protocol_ = s;
592 }
593 return s;
594 }
595 }
596 private com.google.protobuf.ByteString getProtocolBytes() {
597 java.lang.Object ref = protocol_;
598 if (ref instanceof String) {
599 com.google.protobuf.ByteString b =
600 com.google.protobuf.ByteString.copyFromUtf8((String) ref);
601 protocol_ = b;
602 return b;
603 } else {
604 return (com.google.protobuf.ByteString) ref;
605 }
606 }
607
608 private void initFields() {
609 userInfo_ = org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.getDefaultInstance();
610 protocol_ = "";
611 }
612 private byte memoizedIsInitialized = -1;
613 public final boolean isInitialized() {
614 byte isInitialized = memoizedIsInitialized;
615 if (isInitialized != -1) return isInitialized == 1;
616
617 memoizedIsInitialized = 1;
618 return true;
619 }
620
621 public void writeTo(com.google.protobuf.CodedOutputStream output)
622 throws java.io.IOException {
623 getSerializedSize();
624 if (((bitField0_ & 0x00000001) == 0x00000001)) {
625 output.writeMessage(2, userInfo_);
626 }
627 if (((bitField0_ & 0x00000002) == 0x00000002)) {
628 output.writeBytes(3, getProtocolBytes());
629 }
630 getUnknownFields().writeTo(output);
631 }
632
633 private int memoizedSerializedSize = -1;
634 public int getSerializedSize() {
635 int size = memoizedSerializedSize;
636 if (size != -1) return size;
637
638 size = 0;
639 if (((bitField0_ & 0x00000001) == 0x00000001)) {
640 size += com.google.protobuf.CodedOutputStream
641 .computeMessageSize(2, userInfo_);
642 }
643 if (((bitField0_ & 0x00000002) == 0x00000002)) {
644 size += com.google.protobuf.CodedOutputStream
645 .computeBytesSize(3, getProtocolBytes());
646 }
647 size += getUnknownFields().getSerializedSize();
648 memoizedSerializedSize = size;
649 return size;
650 }
651
652 private static final long serialVersionUID = 0L;
653 @java.lang.Override
654 protected java.lang.Object writeReplace()
655 throws java.io.ObjectStreamException {
656 return super.writeReplace();
657 }
658
659 @java.lang.Override
660 public boolean equals(final java.lang.Object obj) {
661 if (obj == this) {
662 return true;
663 }
664 if (!(obj instanceof org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto)) {
665 return super.equals(obj);
666 }
667 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto other = (org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto) obj;
668
669 boolean result = true;
670 result = result && (hasUserInfo() == other.hasUserInfo());
671 if (hasUserInfo()) {
672 result = result && getUserInfo()
673 .equals(other.getUserInfo());
674 }
675 result = result && (hasProtocol() == other.hasProtocol());
676 if (hasProtocol()) {
677 result = result && getProtocol()
678 .equals(other.getProtocol());
679 }
680 result = result &&
681 getUnknownFields().equals(other.getUnknownFields());
682 return result;
683 }
684
685 @java.lang.Override
686 public int hashCode() {
687 int hash = 41;
688 hash = (19 * hash) + getDescriptorForType().hashCode();
689 if (hasUserInfo()) {
690 hash = (37 * hash) + USERINFO_FIELD_NUMBER;
691 hash = (53 * hash) + getUserInfo().hashCode();
692 }
693 if (hasProtocol()) {
694 hash = (37 * hash) + PROTOCOL_FIELD_NUMBER;
695 hash = (53 * hash) + getProtocol().hashCode();
696 }
697 hash = (29 * hash) + getUnknownFields().hashCode();
698 return hash;
699 }
700
701 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto parseFrom(
702 com.google.protobuf.ByteString data)
703 throws com.google.protobuf.InvalidProtocolBufferException {
704 return newBuilder().mergeFrom(data).buildParsed();
705 }
706 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto parseFrom(
707 com.google.protobuf.ByteString data,
708 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
709 throws com.google.protobuf.InvalidProtocolBufferException {
710 return newBuilder().mergeFrom(data, extensionRegistry)
711 .buildParsed();
712 }
713 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto parseFrom(byte[] data)
714 throws com.google.protobuf.InvalidProtocolBufferException {
715 return newBuilder().mergeFrom(data).buildParsed();
716 }
717 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto parseFrom(
718 byte[] data,
719 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
720 throws com.google.protobuf.InvalidProtocolBufferException {
721 return newBuilder().mergeFrom(data, extensionRegistry)
722 .buildParsed();
723 }
724 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto parseFrom(java.io.InputStream input)
725 throws java.io.IOException {
726 return newBuilder().mergeFrom(input).buildParsed();
727 }
728 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto parseFrom(
729 java.io.InputStream input,
730 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
731 throws java.io.IOException {
732 return newBuilder().mergeFrom(input, extensionRegistry)
733 .buildParsed();
734 }
735 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto parseDelimitedFrom(java.io.InputStream input)
736 throws java.io.IOException {
737 Builder builder = newBuilder();
738 if (builder.mergeDelimitedFrom(input)) {
739 return builder.buildParsed();
740 } else {
741 return null;
742 }
743 }
744 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto parseDelimitedFrom(
745 java.io.InputStream input,
746 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
747 throws java.io.IOException {
748 Builder builder = newBuilder();
749 if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
750 return builder.buildParsed();
751 } else {
752 return null;
753 }
754 }
755 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto parseFrom(
756 com.google.protobuf.CodedInputStream input)
757 throws java.io.IOException {
758 return newBuilder().mergeFrom(input).buildParsed();
759 }
760 public static org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto parseFrom(
761 com.google.protobuf.CodedInputStream input,
762 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
763 throws java.io.IOException {
764 return newBuilder().mergeFrom(input, extensionRegistry)
765 .buildParsed();
766 }
767
768 public static Builder newBuilder() { return Builder.create(); }
769 public Builder newBuilderForType() { return newBuilder(); }
770 public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto prototype) {
771 return newBuilder().mergeFrom(prototype);
772 }
773 public Builder toBuilder() { return newBuilder(this); }
774
775 @java.lang.Override
776 protected Builder newBuilderForType(
777 com.google.protobuf.GeneratedMessage.BuilderParent parent) {
778 Builder builder = new Builder(parent);
779 return builder;
780 }
781 public static final class Builder extends
782 com.google.protobuf.GeneratedMessage.Builder<Builder>
783 implements org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProtoOrBuilder {
784 public static final com.google.protobuf.Descriptors.Descriptor
785 getDescriptor() {
786 return org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.internal_static_IpcConnectionContextProto_descriptor;
787 }
788
789 protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
790 internalGetFieldAccessorTable() {
791 return org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.internal_static_IpcConnectionContextProto_fieldAccessorTable;
792 }
793
794 // Construct using org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto.newBuilder()
795 private Builder() {
796 maybeForceBuilderInitialization();
797 }
798
799 private Builder(BuilderParent parent) {
800 super(parent);
801 maybeForceBuilderInitialization();
802 }
803 private void maybeForceBuilderInitialization() {
804 if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
805 getUserInfoFieldBuilder();
806 }
807 }
808 private static Builder create() {
809 return new Builder();
810 }
811
812 public Builder clear() {
813 super.clear();
814 if (userInfoBuilder_ == null) {
815 userInfo_ = org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.getDefaultInstance();
816 } else {
817 userInfoBuilder_.clear();
818 }
819 bitField0_ = (bitField0_ & ~0x00000001);
820 protocol_ = "";
821 bitField0_ = (bitField0_ & ~0x00000002);
822 return this;
823 }
824
825 public Builder clone() {
826 return create().mergeFrom(buildPartial());
827 }
828
829 public com.google.protobuf.Descriptors.Descriptor
830 getDescriptorForType() {
831 return org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto.getDescriptor();
832 }
833
834 public org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto getDefaultInstanceForType() {
835 return org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto.getDefaultInstance();
836 }
837
838 public org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto build() {
839 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto result = buildPartial();
840 if (!result.isInitialized()) {
841 throw newUninitializedMessageException(result);
842 }
843 return result;
844 }
845
846 private org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto buildParsed()
847 throws com.google.protobuf.InvalidProtocolBufferException {
848 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto result = buildPartial();
849 if (!result.isInitialized()) {
850 throw newUninitializedMessageException(
851 result).asInvalidProtocolBufferException();
852 }
853 return result;
854 }
855
856 public org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto buildPartial() {
857 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto result = new org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto(this);
858 int from_bitField0_ = bitField0_;
859 int to_bitField0_ = 0;
860 if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
861 to_bitField0_ |= 0x00000001;
862 }
863 if (userInfoBuilder_ == null) {
864 result.userInfo_ = userInfo_;
865 } else {
866 result.userInfo_ = userInfoBuilder_.build();
867 }
868 if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
869 to_bitField0_ |= 0x00000002;
870 }
871 result.protocol_ = protocol_;
872 result.bitField0_ = to_bitField0_;
873 onBuilt();
874 return result;
875 }
876
877 public Builder mergeFrom(com.google.protobuf.Message other) {
878 if (other instanceof org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto) {
879 return mergeFrom((org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto)other);
880 } else {
881 super.mergeFrom(other);
882 return this;
883 }
884 }
885
886 public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto other) {
887 if (other == org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto.getDefaultInstance()) return this;
888 if (other.hasUserInfo()) {
889 mergeUserInfo(other.getUserInfo());
890 }
891 if (other.hasProtocol()) {
892 setProtocol(other.getProtocol());
893 }
894 this.mergeUnknownFields(other.getUnknownFields());
895 return this;
896 }
897
898 public final boolean isInitialized() {
899 return true;
900 }
901
902 public Builder mergeFrom(
903 com.google.protobuf.CodedInputStream input,
904 com.google.protobuf.ExtensionRegistryLite extensionRegistry)
905 throws java.io.IOException {
906 com.google.protobuf.UnknownFieldSet.Builder unknownFields =
907 com.google.protobuf.UnknownFieldSet.newBuilder(
908 this.getUnknownFields());
909 while (true) {
910 int tag = input.readTag();
911 switch (tag) {
912 case 0:
913 this.setUnknownFields(unknownFields.build());
914 onChanged();
915 return this;
916 default: {
917 if (!parseUnknownField(input, unknownFields,
918 extensionRegistry, tag)) {
919 this.setUnknownFields(unknownFields.build());
920 onChanged();
921 return this;
922 }
923 break;
924 }
925 case 18: {
926 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.Builder subBuilder = org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.newBuilder();
927 if (hasUserInfo()) {
928 subBuilder.mergeFrom(getUserInfo());
929 }
930 input.readMessage(subBuilder, extensionRegistry);
931 setUserInfo(subBuilder.buildPartial());
932 break;
933 }
934 case 26: {
935 bitField0_ |= 0x00000002;
936 protocol_ = input.readBytes();
937 break;
938 }
939 }
940 }
941 }
942
943 private int bitField0_;
944
945 // optional .UserInformationProto userInfo = 2;
946 private org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto userInfo_ = org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.getDefaultInstance();
947 private com.google.protobuf.SingleFieldBuilder<
948 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto, org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.Builder, org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProtoOrBuilder> userInfoBuilder_;
949 public boolean hasUserInfo() {
950 return ((bitField0_ & 0x00000001) == 0x00000001);
951 }
952 public org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto getUserInfo() {
953 if (userInfoBuilder_ == null) {
954 return userInfo_;
955 } else {
956 return userInfoBuilder_.getMessage();
957 }
958 }
959 public Builder setUserInfo(org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto value) {
960 if (userInfoBuilder_ == null) {
961 if (value == null) {
962 throw new NullPointerException();
963 }
964 userInfo_ = value;
965 onChanged();
966 } else {
967 userInfoBuilder_.setMessage(value);
968 }
969 bitField0_ |= 0x00000001;
970 return this;
971 }
972 public Builder setUserInfo(
973 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.Builder builderForValue) {
974 if (userInfoBuilder_ == null) {
975 userInfo_ = builderForValue.build();
976 onChanged();
977 } else {
978 userInfoBuilder_.setMessage(builderForValue.build());
979 }
980 bitField0_ |= 0x00000001;
981 return this;
982 }
983 public Builder mergeUserInfo(org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto value) {
984 if (userInfoBuilder_ == null) {
985 if (((bitField0_ & 0x00000001) == 0x00000001) &&
986 userInfo_ != org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.getDefaultInstance()) {
987 userInfo_ =
988 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.newBuilder(userInfo_).mergeFrom(value).buildPartial();
989 } else {
990 userInfo_ = value;
991 }
992 onChanged();
993 } else {
994 userInfoBuilder_.mergeFrom(value);
995 }
996 bitField0_ |= 0x00000001;
997 return this;
998 }
999 public Builder clearUserInfo() {
1000 if (userInfoBuilder_ == null) {
1001 userInfo_ = org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.getDefaultInstance();
1002 onChanged();
1003 } else {
1004 userInfoBuilder_.clear();
1005 }
1006 bitField0_ = (bitField0_ & ~0x00000001);
1007 return this;
1008 }
1009 public org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.Builder getUserInfoBuilder() {
1010 bitField0_ |= 0x00000001;
1011 onChanged();
1012 return getUserInfoFieldBuilder().getBuilder();
1013 }
1014 public org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProtoOrBuilder getUserInfoOrBuilder() {
1015 if (userInfoBuilder_ != null) {
1016 return userInfoBuilder_.getMessageOrBuilder();
1017 } else {
1018 return userInfo_;
1019 }
1020 }
1021 private com.google.protobuf.SingleFieldBuilder<
1022 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto, org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.Builder, org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProtoOrBuilder>
1023 getUserInfoFieldBuilder() {
1024 if (userInfoBuilder_ == null) {
1025 userInfoBuilder_ = new com.google.protobuf.SingleFieldBuilder<
1026 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto, org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.Builder, org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProtoOrBuilder>(
1027 userInfo_,
1028 getParentForChildren(),
1029 isClean());
1030 userInfo_ = null;
1031 }
1032 return userInfoBuilder_;
1033 }
1034
1035 // optional string protocol = 3;
1036 private java.lang.Object protocol_ = "";
1037 public boolean hasProtocol() {
1038 return ((bitField0_ & 0x00000002) == 0x00000002);
1039 }
1040 public String getProtocol() {
1041 java.lang.Object ref = protocol_;
1042 if (!(ref instanceof String)) {
1043 String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
1044 protocol_ = s;
1045 return s;
1046 } else {
1047 return (String) ref;
1048 }
1049 }
1050 public Builder setProtocol(String value) {
1051 if (value == null) {
1052 throw new NullPointerException();
1053 }
1054 bitField0_ |= 0x00000002;
1055 protocol_ = value;
1056 onChanged();
1057 return this;
1058 }
1059 public Builder clearProtocol() {
1060 bitField0_ = (bitField0_ & ~0x00000002);
1061 protocol_ = getDefaultInstance().getProtocol();
1062 onChanged();
1063 return this;
1064 }
1065 void setProtocol(com.google.protobuf.ByteString value) {
1066 bitField0_ |= 0x00000002;
1067 protocol_ = value;
1068 onChanged();
1069 }
1070
1071 // @@protoc_insertion_point(builder_scope:IpcConnectionContextProto)
1072 }
1073
1074 static {
1075 defaultInstance = new IpcConnectionContextProto(true);
1076 defaultInstance.initFields();
1077 }
1078
1079 // @@protoc_insertion_point(class_scope:IpcConnectionContextProto)
1080 }
1081
1082 private static com.google.protobuf.Descriptors.Descriptor
1083 internal_static_UserInformationProto_descriptor;
1084 private static
1085 com.google.protobuf.GeneratedMessage.FieldAccessorTable
1086 internal_static_UserInformationProto_fieldAccessorTable;
1087 private static com.google.protobuf.Descriptors.Descriptor
1088 internal_static_IpcConnectionContextProto_descriptor;
1089 private static
1090 com.google.protobuf.GeneratedMessage.FieldAccessorTable
1091 internal_static_IpcConnectionContextProto_fieldAccessorTable;
1092
1093 public static com.google.protobuf.Descriptors.FileDescriptor
1094 getDescriptor() {
1095 return descriptor;
1096 }
1097 private static com.google.protobuf.Descriptors.FileDescriptor
1098 descriptor;
1099 static {
1100 java.lang.String[] descriptorData = {
1101 "\n\032IpcConnectionContext.proto\"?\n\024UserInfo" +
1102 "rmationProto\022\025\n\reffectiveUser\030\001 \001(\t\022\020\n\010r" +
1103 "ealUser\030\002 \001(\t\"V\n\031IpcConnectionContextPro" +
1104 "to\022\'\n\010userInfo\030\002 \001(\0132\025.UserInformationPr" +
1105 "oto\022\020\n\010protocol\030\003 \001(\tB?\n\036org.apache.hado" +
1106 "op.ipc.protobufB\032IpcConnectionContextPro" +
1107 "tos\240\001\001"
1108 };
1109 com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
1110 new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
1111 public com.google.protobuf.ExtensionRegistry assignDescriptors(
1112 com.google.protobuf.Descriptors.FileDescriptor root) {
1113 descriptor = root;
1114 internal_static_UserInformationProto_descriptor =
1115 getDescriptor().getMessageTypes().get(0);
1116 internal_static_UserInformationProto_fieldAccessorTable = new
1117 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
1118 internal_static_UserInformationProto_descriptor,
1119 new java.lang.String[] { "EffectiveUser", "RealUser", },
1120 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.class,
1121 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.UserInformationProto.Builder.class);
1122 internal_static_IpcConnectionContextProto_descriptor =
1123 getDescriptor().getMessageTypes().get(1);
1124 internal_static_IpcConnectionContextProto_fieldAccessorTable = new
1125 com.google.protobuf.GeneratedMessage.FieldAccessorTable(
1126 internal_static_IpcConnectionContextProto_descriptor,
1127 new java.lang.String[] { "UserInfo", "Protocol", },
1128 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto.class,
1129 org.apache.hadoop.ipc.protobuf.IpcConnectionContextProtos.IpcConnectionContextProto.Builder.class);
1130 return null;
1131 }
1132 };
1133 com.google.protobuf.Descriptors.FileDescriptor
1134 .internalBuildGeneratedFileFrom(descriptorData,
1135 new com.google.protobuf.Descriptors.FileDescriptor[] {
1136 }, assigner);
1137 }
1138
1139 // @@protoc_insertion_point(outer_class_scope)
1140 }