1 // Generated by the protocol buffer compiler.  DO NOT EDIT!
2 // source: tensorflow/core/protobuf/coordination_service.proto
3 
4 #ifndef GOOGLE_PROTOBUF_INCLUDED_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto
5 #define GOOGLE_PROTOBUF_INCLUDED_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto
6 
7 #include <cstdint>
8 #include <limits>
9 #include <string>
10 
11 #include <google/protobuf/port_def.inc>
12 #if PROTOBUF_VERSION < 3021000
13 #error This file was generated by a newer version of protoc which is
14 #error incompatible with your Protocol Buffer headers. Please update
15 #error your headers.
16 #endif
17 #if 3021012 < PROTOBUF_MIN_PROTOC_VERSION
18 #error This file was generated by an older version of protoc which is
19 #error incompatible with your Protocol Buffer headers. Please
20 #error regenerate this file with a newer version of protoc.
21 #endif
22 
23 #include <google/protobuf/port_undef.inc>
24 #include <google/protobuf/io/coded_stream.h>
25 #include <google/protobuf/arena.h>
26 #include <google/protobuf/arenastring.h>
27 #include <google/protobuf/generated_message_util.h>
28 #include <google/protobuf/metadata_lite.h>
29 #include <google/protobuf/message_lite.h>
30 #include <google/protobuf/repeated_field.h>  // IWYU pragma: export
31 #include <google/protobuf/extension_set.h>  // IWYU pragma: export
32 #include <google/protobuf/generated_enum_util.h>
33 #include "tensorflow/compiler/xla/pjrt/distributed/protocol.pb.h"
34 #include "tensorflow/core/framework/device_attributes.pb.h"
35 // @@protoc_insertion_point(includes)
36 #include <google/protobuf/port_def.inc>
37 #define PROTOBUF_INTERNAL_EXPORT_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto
38 PROTOBUF_NAMESPACE_OPEN
39 namespace internal {
40 class AnyMetadata;
41 }  // namespace internal
42 PROTOBUF_NAMESPACE_CLOSE
43 
44 // Internal implementation detail -- do not use these members.
45 struct TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto {
46   static const ::uint32_t offsets[];
47 };
48 namespace tensorflow {
49 class BarrierRequest;
50 struct BarrierRequestDefaultTypeInternal;
51 extern BarrierRequestDefaultTypeInternal _BarrierRequest_default_instance_;
52 class BarrierResponse;
53 struct BarrierResponseDefaultTypeInternal;
54 extern BarrierResponseDefaultTypeInternal _BarrierResponse_default_instance_;
55 class CancelBarrierRequest;
56 struct CancelBarrierRequestDefaultTypeInternal;
57 extern CancelBarrierRequestDefaultTypeInternal _CancelBarrierRequest_default_instance_;
58 class CancelBarrierResponse;
59 struct CancelBarrierResponseDefaultTypeInternal;
60 extern CancelBarrierResponseDefaultTypeInternal _CancelBarrierResponse_default_instance_;
61 class CoordinatedTask;
62 struct CoordinatedTaskDefaultTypeInternal;
63 extern CoordinatedTaskDefaultTypeInternal _CoordinatedTask_default_instance_;
64 class CoordinationServiceDeviceInfo;
65 struct CoordinationServiceDeviceInfoDefaultTypeInternal;
66 extern CoordinationServiceDeviceInfoDefaultTypeInternal _CoordinationServiceDeviceInfo_default_instance_;
67 class CoordinationServiceError;
68 struct CoordinationServiceErrorDefaultTypeInternal;
69 extern CoordinationServiceErrorDefaultTypeInternal _CoordinationServiceError_default_instance_;
70 class DeleteKeyValueRequest;
71 struct DeleteKeyValueRequestDefaultTypeInternal;
72 extern DeleteKeyValueRequestDefaultTypeInternal _DeleteKeyValueRequest_default_instance_;
73 class DeleteKeyValueResponse;
74 struct DeleteKeyValueResponseDefaultTypeInternal;
75 extern DeleteKeyValueResponseDefaultTypeInternal _DeleteKeyValueResponse_default_instance_;
76 class GetKeyValueDirRequest;
77 struct GetKeyValueDirRequestDefaultTypeInternal;
78 extern GetKeyValueDirRequestDefaultTypeInternal _GetKeyValueDirRequest_default_instance_;
79 class GetKeyValueDirResponse;
80 struct GetKeyValueDirResponseDefaultTypeInternal;
81 extern GetKeyValueDirResponseDefaultTypeInternal _GetKeyValueDirResponse_default_instance_;
82 class GetKeyValueRequest;
83 struct GetKeyValueRequestDefaultTypeInternal;
84 extern GetKeyValueRequestDefaultTypeInternal _GetKeyValueRequest_default_instance_;
85 class GetKeyValueResponse;
86 struct GetKeyValueResponseDefaultTypeInternal;
87 extern GetKeyValueResponseDefaultTypeInternal _GetKeyValueResponse_default_instance_;
88 class HeartbeatRequest;
89 struct HeartbeatRequestDefaultTypeInternal;
90 extern HeartbeatRequestDefaultTypeInternal _HeartbeatRequest_default_instance_;
91 class HeartbeatResponse;
92 struct HeartbeatResponseDefaultTypeInternal;
93 extern HeartbeatResponseDefaultTypeInternal _HeartbeatResponse_default_instance_;
94 class InsertKeyValueRequest;
95 struct InsertKeyValueRequestDefaultTypeInternal;
96 extern InsertKeyValueRequestDefaultTypeInternal _InsertKeyValueRequest_default_instance_;
97 class InsertKeyValueResponse;
98 struct InsertKeyValueResponseDefaultTypeInternal;
99 extern InsertKeyValueResponseDefaultTypeInternal _InsertKeyValueResponse_default_instance_;
100 class KeyValueEntry;
101 struct KeyValueEntryDefaultTypeInternal;
102 extern KeyValueEntryDefaultTypeInternal _KeyValueEntry_default_instance_;
103 class RegisterTaskRequest;
104 struct RegisterTaskRequestDefaultTypeInternal;
105 extern RegisterTaskRequestDefaultTypeInternal _RegisterTaskRequest_default_instance_;
106 class RegisterTaskResponse;
107 struct RegisterTaskResponseDefaultTypeInternal;
108 extern RegisterTaskResponseDefaultTypeInternal _RegisterTaskResponse_default_instance_;
109 class ReportErrorToServiceRequest;
110 struct ReportErrorToServiceRequestDefaultTypeInternal;
111 extern ReportErrorToServiceRequestDefaultTypeInternal _ReportErrorToServiceRequest_default_instance_;
112 class ReportErrorToServiceResponse;
113 struct ReportErrorToServiceResponseDefaultTypeInternal;
114 extern ReportErrorToServiceResponseDefaultTypeInternal _ReportErrorToServiceResponse_default_instance_;
115 class ReportErrorToTaskRequest;
116 struct ReportErrorToTaskRequestDefaultTypeInternal;
117 extern ReportErrorToTaskRequestDefaultTypeInternal _ReportErrorToTaskRequest_default_instance_;
118 class ReportErrorToTaskResponse;
119 struct ReportErrorToTaskResponseDefaultTypeInternal;
120 extern ReportErrorToTaskResponseDefaultTypeInternal _ReportErrorToTaskResponse_default_instance_;
121 class ResetTaskRequest;
122 struct ResetTaskRequestDefaultTypeInternal;
123 extern ResetTaskRequestDefaultTypeInternal _ResetTaskRequest_default_instance_;
124 class ResetTaskResponse;
125 struct ResetTaskResponseDefaultTypeInternal;
126 extern ResetTaskResponseDefaultTypeInternal _ResetTaskResponse_default_instance_;
127 class ShutdownTaskRequest;
128 struct ShutdownTaskRequestDefaultTypeInternal;
129 extern ShutdownTaskRequestDefaultTypeInternal _ShutdownTaskRequest_default_instance_;
130 class ShutdownTaskResponse;
131 struct ShutdownTaskResponseDefaultTypeInternal;
132 extern ShutdownTaskResponseDefaultTypeInternal _ShutdownTaskResponse_default_instance_;
133 class TfDeviceList;
134 struct TfDeviceListDefaultTypeInternal;
135 extern TfDeviceListDefaultTypeInternal _TfDeviceList_default_instance_;
136 class TryGetKeyValueRequest;
137 struct TryGetKeyValueRequestDefaultTypeInternal;
138 extern TryGetKeyValueRequestDefaultTypeInternal _TryGetKeyValueRequest_default_instance_;
139 class TryGetKeyValueResponse;
140 struct TryGetKeyValueResponseDefaultTypeInternal;
141 extern TryGetKeyValueResponseDefaultTypeInternal _TryGetKeyValueResponse_default_instance_;
142 class WaitForAllTasksRequest;
143 struct WaitForAllTasksRequestDefaultTypeInternal;
144 extern WaitForAllTasksRequestDefaultTypeInternal _WaitForAllTasksRequest_default_instance_;
145 class WaitForAllTasksResponse;
146 struct WaitForAllTasksResponseDefaultTypeInternal;
147 extern WaitForAllTasksResponseDefaultTypeInternal _WaitForAllTasksResponse_default_instance_;
148 class XlaDeviceList;
149 struct XlaDeviceListDefaultTypeInternal;
150 extern XlaDeviceListDefaultTypeInternal _XlaDeviceList_default_instance_;
151 }  // namespace tensorflow
152 PROTOBUF_NAMESPACE_OPEN
153 template<> ::tensorflow::BarrierRequest* Arena::CreateMaybeMessage<::tensorflow::BarrierRequest>(Arena*);
154 template<> ::tensorflow::BarrierResponse* Arena::CreateMaybeMessage<::tensorflow::BarrierResponse>(Arena*);
155 template<> ::tensorflow::CancelBarrierRequest* Arena::CreateMaybeMessage<::tensorflow::CancelBarrierRequest>(Arena*);
156 template<> ::tensorflow::CancelBarrierResponse* Arena::CreateMaybeMessage<::tensorflow::CancelBarrierResponse>(Arena*);
157 template<> ::tensorflow::CoordinatedTask* Arena::CreateMaybeMessage<::tensorflow::CoordinatedTask>(Arena*);
158 template<> ::tensorflow::CoordinationServiceDeviceInfo* Arena::CreateMaybeMessage<::tensorflow::CoordinationServiceDeviceInfo>(Arena*);
159 template<> ::tensorflow::CoordinationServiceError* Arena::CreateMaybeMessage<::tensorflow::CoordinationServiceError>(Arena*);
160 template<> ::tensorflow::DeleteKeyValueRequest* Arena::CreateMaybeMessage<::tensorflow::DeleteKeyValueRequest>(Arena*);
161 template<> ::tensorflow::DeleteKeyValueResponse* Arena::CreateMaybeMessage<::tensorflow::DeleteKeyValueResponse>(Arena*);
162 template<> ::tensorflow::GetKeyValueDirRequest* Arena::CreateMaybeMessage<::tensorflow::GetKeyValueDirRequest>(Arena*);
163 template<> ::tensorflow::GetKeyValueDirResponse* Arena::CreateMaybeMessage<::tensorflow::GetKeyValueDirResponse>(Arena*);
164 template<> ::tensorflow::GetKeyValueRequest* Arena::CreateMaybeMessage<::tensorflow::GetKeyValueRequest>(Arena*);
165 template<> ::tensorflow::GetKeyValueResponse* Arena::CreateMaybeMessage<::tensorflow::GetKeyValueResponse>(Arena*);
166 template<> ::tensorflow::HeartbeatRequest* Arena::CreateMaybeMessage<::tensorflow::HeartbeatRequest>(Arena*);
167 template<> ::tensorflow::HeartbeatResponse* Arena::CreateMaybeMessage<::tensorflow::HeartbeatResponse>(Arena*);
168 template<> ::tensorflow::InsertKeyValueRequest* Arena::CreateMaybeMessage<::tensorflow::InsertKeyValueRequest>(Arena*);
169 template<> ::tensorflow::InsertKeyValueResponse* Arena::CreateMaybeMessage<::tensorflow::InsertKeyValueResponse>(Arena*);
170 template<> ::tensorflow::KeyValueEntry* Arena::CreateMaybeMessage<::tensorflow::KeyValueEntry>(Arena*);
171 template<> ::tensorflow::RegisterTaskRequest* Arena::CreateMaybeMessage<::tensorflow::RegisterTaskRequest>(Arena*);
172 template<> ::tensorflow::RegisterTaskResponse* Arena::CreateMaybeMessage<::tensorflow::RegisterTaskResponse>(Arena*);
173 template<> ::tensorflow::ReportErrorToServiceRequest* Arena::CreateMaybeMessage<::tensorflow::ReportErrorToServiceRequest>(Arena*);
174 template<> ::tensorflow::ReportErrorToServiceResponse* Arena::CreateMaybeMessage<::tensorflow::ReportErrorToServiceResponse>(Arena*);
175 template<> ::tensorflow::ReportErrorToTaskRequest* Arena::CreateMaybeMessage<::tensorflow::ReportErrorToTaskRequest>(Arena*);
176 template<> ::tensorflow::ReportErrorToTaskResponse* Arena::CreateMaybeMessage<::tensorflow::ReportErrorToTaskResponse>(Arena*);
177 template<> ::tensorflow::ResetTaskRequest* Arena::CreateMaybeMessage<::tensorflow::ResetTaskRequest>(Arena*);
178 template<> ::tensorflow::ResetTaskResponse* Arena::CreateMaybeMessage<::tensorflow::ResetTaskResponse>(Arena*);
179 template<> ::tensorflow::ShutdownTaskRequest* Arena::CreateMaybeMessage<::tensorflow::ShutdownTaskRequest>(Arena*);
180 template<> ::tensorflow::ShutdownTaskResponse* Arena::CreateMaybeMessage<::tensorflow::ShutdownTaskResponse>(Arena*);
181 template<> ::tensorflow::TfDeviceList* Arena::CreateMaybeMessage<::tensorflow::TfDeviceList>(Arena*);
182 template<> ::tensorflow::TryGetKeyValueRequest* Arena::CreateMaybeMessage<::tensorflow::TryGetKeyValueRequest>(Arena*);
183 template<> ::tensorflow::TryGetKeyValueResponse* Arena::CreateMaybeMessage<::tensorflow::TryGetKeyValueResponse>(Arena*);
184 template<> ::tensorflow::WaitForAllTasksRequest* Arena::CreateMaybeMessage<::tensorflow::WaitForAllTasksRequest>(Arena*);
185 template<> ::tensorflow::WaitForAllTasksResponse* Arena::CreateMaybeMessage<::tensorflow::WaitForAllTasksResponse>(Arena*);
186 template<> ::tensorflow::XlaDeviceList* Arena::CreateMaybeMessage<::tensorflow::XlaDeviceList>(Arena*);
187 PROTOBUF_NAMESPACE_CLOSE
188 namespace tensorflow {
189 
190 enum CoordinatedTaskState : int {
191   TASKSTATE_UNSPECIFIED = 0,
192   TASKSTATE_UNINITIALIZED = 1,
193   TASKSTATE_DISCONNECTED = 2,
194   TASKSTATE_CONNECTED = 3,
195   TASKSTATE_ERROR = 4,
196   CoordinatedTaskState_INT_MIN_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::int32_t>::min(),
197   CoordinatedTaskState_INT_MAX_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::int32_t>::max()
198 };
199 bool CoordinatedTaskState_IsValid(int value);
200 constexpr CoordinatedTaskState CoordinatedTaskState_MIN = TASKSTATE_UNSPECIFIED;
201 constexpr CoordinatedTaskState CoordinatedTaskState_MAX = TASKSTATE_ERROR;
202 constexpr int CoordinatedTaskState_ARRAYSIZE = CoordinatedTaskState_MAX + 1;
203 
204 const std::string& CoordinatedTaskState_Name(CoordinatedTaskState value);
205 template<typename T>
CoordinatedTaskState_Name(T enum_t_value)206 inline const std::string& CoordinatedTaskState_Name(T enum_t_value) {
207   static_assert(::std::is_same<T, CoordinatedTaskState>::value ||
208     ::std::is_integral<T>::value,
209     "Incorrect type passed to function CoordinatedTaskState_Name.");
210   return CoordinatedTaskState_Name(static_cast<CoordinatedTaskState>(enum_t_value));
211 }
212 bool CoordinatedTaskState_Parse(
213     ::PROTOBUF_NAMESPACE_ID::ConstStringParam name, CoordinatedTaskState* value);
214 // ===================================================================
215 
216 class CoordinatedTask final :
217     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.CoordinatedTask) */ {
218  public:
CoordinatedTask()219   inline CoordinatedTask() : CoordinatedTask(nullptr) {}
220   ~CoordinatedTask() override;
221   explicit PROTOBUF_CONSTEXPR CoordinatedTask(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
222 
223   CoordinatedTask(const CoordinatedTask& from);
CoordinatedTask(CoordinatedTask && from)224   CoordinatedTask(CoordinatedTask&& from) noexcept
225     : CoordinatedTask() {
226     *this = ::std::move(from);
227   }
228 
229   inline CoordinatedTask& operator=(const CoordinatedTask& from) {
230     if (this == &from) return *this;
231     CopyFrom(from);
232     return *this;
233   }
234   inline CoordinatedTask& operator=(CoordinatedTask&& from) noexcept {
235     if (this == &from) return *this;
236     if (GetOwningArena() == from.GetOwningArena()
237   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
238         && GetOwningArena() != nullptr
239   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
240     ) {
241       InternalSwap(&from);
242     } else {
243       CopyFrom(from);
244     }
245     return *this;
246   }
247 
default_instance()248   static const CoordinatedTask& default_instance() {
249     return *internal_default_instance();
250   }
internal_default_instance()251   static inline const CoordinatedTask* internal_default_instance() {
252     return reinterpret_cast<const CoordinatedTask*>(
253                &_CoordinatedTask_default_instance_);
254   }
255   static constexpr int kIndexInFileMessages =
256     0;
257 
swap(CoordinatedTask & a,CoordinatedTask & b)258   friend void swap(CoordinatedTask& a, CoordinatedTask& b) {
259     a.Swap(&b);
260   }
Swap(CoordinatedTask * other)261   inline void Swap(CoordinatedTask* other) {
262     if (other == this) return;
263   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
264     if (GetOwningArena() != nullptr &&
265         GetOwningArena() == other->GetOwningArena()) {
266    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
267     if (GetOwningArena() == other->GetOwningArena()) {
268   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
269       InternalSwap(other);
270     } else {
271       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
272     }
273   }
274   void UnsafeArenaSwap(CoordinatedTask* other) {
275     if (other == this) return;
276     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
277     InternalSwap(other);
278   }
279 
280   // implements Message ----------------------------------------------
281 
282   CoordinatedTask* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
283     return CreateMaybeMessage<CoordinatedTask>(arena);
284   }
285   CoordinatedTask* New() const {
286     return New(nullptr);
287   }
288   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
289   void CopyFrom(const CoordinatedTask& from);
290   void MergeFrom(const CoordinatedTask& from);
291   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
292   bool IsInitialized() const final;
293 
294   size_t ByteSizeLong() const final;
295   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
296   ::uint8_t* _InternalSerialize(
297       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
298   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
299 
300   private:
301   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
302   void SharedDtor();
303   void SetCachedSize(int size) const;
304   void InternalSwap(CoordinatedTask* other);
305 
306   private:
307   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
308   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
309     return "tensorflow.CoordinatedTask";
310   }
311   protected:
312   explicit CoordinatedTask(::PROTOBUF_NAMESPACE_ID::Arena* arena,
313                        bool is_message_owned = false);
314   public:
315 
316   std::string GetTypeName() const final;
317 
318   // nested types ----------------------------------------------------
319 
320   // accessors -------------------------------------------------------
321 
322   enum : int {
323     kJobNameFieldNumber = 1,
324     kTaskIdFieldNumber = 2,
325   };
326   // string job_name = 1;
327   void clear_job_name();
328   const std::string& job_name() const;
329   template <typename ArgT0 = const std::string&, typename... ArgT>
330   void set_job_name(ArgT0&& arg0, ArgT... args);
331   std::string* mutable_job_name();
332   PROTOBUF_NODISCARD std::string* release_job_name();
333   void set_allocated_job_name(std::string* job_name);
334   private:
335   const std::string& _internal_job_name() const;
336   inline PROTOBUF_ALWAYS_INLINE void _internal_set_job_name(const std::string& value);
337   std::string* _internal_mutable_job_name();
338   public:
339 
340   // int32 task_id = 2;
341   void clear_task_id();
342   ::int32_t task_id() const;
343   void set_task_id(::int32_t value);
344   private:
345   ::int32_t _internal_task_id() const;
346   void _internal_set_task_id(::int32_t value);
347   public:
348 
349   // @@protoc_insertion_point(class_scope:tensorflow.CoordinatedTask)
350  private:
351   class _Internal;
352 
353   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
354   typedef void InternalArenaConstructable_;
355   typedef void DestructorSkippable_;
356   struct Impl_ {
357     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr job_name_;
358     ::int32_t task_id_;
359     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
360   };
361   union { Impl_ _impl_; };
362   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto;
363 };
364 // -------------------------------------------------------------------
365 
366 class CoordinationServiceError final :
367     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.CoordinationServiceError) */ {
368  public:
CoordinationServiceError()369   inline CoordinationServiceError() : CoordinationServiceError(nullptr) {}
370   ~CoordinationServiceError() override;
371   explicit PROTOBUF_CONSTEXPR CoordinationServiceError(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
372 
373   CoordinationServiceError(const CoordinationServiceError& from);
CoordinationServiceError(CoordinationServiceError && from)374   CoordinationServiceError(CoordinationServiceError&& from) noexcept
375     : CoordinationServiceError() {
376     *this = ::std::move(from);
377   }
378 
379   inline CoordinationServiceError& operator=(const CoordinationServiceError& from) {
380     if (this == &from) return *this;
381     CopyFrom(from);
382     return *this;
383   }
384   inline CoordinationServiceError& operator=(CoordinationServiceError&& from) noexcept {
385     if (this == &from) return *this;
386     if (GetOwningArena() == from.GetOwningArena()
387   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
388         && GetOwningArena() != nullptr
389   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
390     ) {
391       InternalSwap(&from);
392     } else {
393       CopyFrom(from);
394     }
395     return *this;
396   }
397 
default_instance()398   static const CoordinationServiceError& default_instance() {
399     return *internal_default_instance();
400   }
internal_default_instance()401   static inline const CoordinationServiceError* internal_default_instance() {
402     return reinterpret_cast<const CoordinationServiceError*>(
403                &_CoordinationServiceError_default_instance_);
404   }
405   static constexpr int kIndexInFileMessages =
406     1;
407 
swap(CoordinationServiceError & a,CoordinationServiceError & b)408   friend void swap(CoordinationServiceError& a, CoordinationServiceError& b) {
409     a.Swap(&b);
410   }
Swap(CoordinationServiceError * other)411   inline void Swap(CoordinationServiceError* other) {
412     if (other == this) return;
413   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
414     if (GetOwningArena() != nullptr &&
415         GetOwningArena() == other->GetOwningArena()) {
416    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
417     if (GetOwningArena() == other->GetOwningArena()) {
418   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
419       InternalSwap(other);
420     } else {
421       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
422     }
423   }
424   void UnsafeArenaSwap(CoordinationServiceError* other) {
425     if (other == this) return;
426     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
427     InternalSwap(other);
428   }
429 
430   // implements Message ----------------------------------------------
431 
432   CoordinationServiceError* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
433     return CreateMaybeMessage<CoordinationServiceError>(arena);
434   }
435   CoordinationServiceError* New() const {
436     return New(nullptr);
437   }
438   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
439   void CopyFrom(const CoordinationServiceError& from);
440   void MergeFrom(const CoordinationServiceError& from);
441   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
442   bool IsInitialized() const final;
443 
444   size_t ByteSizeLong() const final;
445   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
446   ::uint8_t* _InternalSerialize(
447       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
448   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
449 
450   private:
451   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
452   void SharedDtor();
453   void SetCachedSize(int size) const;
454   void InternalSwap(CoordinationServiceError* other);
455 
456   private:
457   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
458   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
459     return "tensorflow.CoordinationServiceError";
460   }
461   protected:
462   explicit CoordinationServiceError(::PROTOBUF_NAMESPACE_ID::Arena* arena,
463                        bool is_message_owned = false);
464   public:
465 
466   std::string GetTypeName() const final;
467 
468   // nested types ----------------------------------------------------
469 
470   // accessors -------------------------------------------------------
471 
472   enum : int {
473     kSourceTaskFieldNumber = 4,
474     kIsReportedErrorFieldNumber = 3,
475   };
476   // .tensorflow.CoordinatedTask source_task = 4;
477   bool has_source_task() const;
478   private:
479   bool _internal_has_source_task() const;
480   public:
481   void clear_source_task();
482   const ::tensorflow::CoordinatedTask& source_task() const;
483   PROTOBUF_NODISCARD ::tensorflow::CoordinatedTask* release_source_task();
484   ::tensorflow::CoordinatedTask* mutable_source_task();
485   void set_allocated_source_task(::tensorflow::CoordinatedTask* source_task);
486   private:
487   const ::tensorflow::CoordinatedTask& _internal_source_task() const;
488   ::tensorflow::CoordinatedTask* _internal_mutable_source_task();
489   public:
490   void unsafe_arena_set_allocated_source_task(
491       ::tensorflow::CoordinatedTask* source_task);
492   ::tensorflow::CoordinatedTask* unsafe_arena_release_source_task();
493 
494   // bool is_reported_error = 3;
495   void clear_is_reported_error();
496   bool is_reported_error() const;
497   void set_is_reported_error(bool value);
498   private:
499   bool _internal_is_reported_error() const;
500   void _internal_set_is_reported_error(bool value);
501   public:
502 
503   // @@protoc_insertion_point(class_scope:tensorflow.CoordinationServiceError)
504  private:
505   class _Internal;
506 
507   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
508   typedef void InternalArenaConstructable_;
509   typedef void DestructorSkippable_;
510   struct Impl_ {
511     ::tensorflow::CoordinatedTask* source_task_;
512     bool is_reported_error_;
513     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
514   };
515   union { Impl_ _impl_; };
516   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto;
517 };
518 // -------------------------------------------------------------------
519 
520 class TfDeviceList final :
521     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.TfDeviceList) */ {
522  public:
TfDeviceList()523   inline TfDeviceList() : TfDeviceList(nullptr) {}
524   ~TfDeviceList() override;
525   explicit PROTOBUF_CONSTEXPR TfDeviceList(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
526 
527   TfDeviceList(const TfDeviceList& from);
TfDeviceList(TfDeviceList && from)528   TfDeviceList(TfDeviceList&& from) noexcept
529     : TfDeviceList() {
530     *this = ::std::move(from);
531   }
532 
533   inline TfDeviceList& operator=(const TfDeviceList& from) {
534     if (this == &from) return *this;
535     CopyFrom(from);
536     return *this;
537   }
538   inline TfDeviceList& operator=(TfDeviceList&& from) noexcept {
539     if (this == &from) return *this;
540     if (GetOwningArena() == from.GetOwningArena()
541   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
542         && GetOwningArena() != nullptr
543   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
544     ) {
545       InternalSwap(&from);
546     } else {
547       CopyFrom(from);
548     }
549     return *this;
550   }
551 
default_instance()552   static const TfDeviceList& default_instance() {
553     return *internal_default_instance();
554   }
internal_default_instance()555   static inline const TfDeviceList* internal_default_instance() {
556     return reinterpret_cast<const TfDeviceList*>(
557                &_TfDeviceList_default_instance_);
558   }
559   static constexpr int kIndexInFileMessages =
560     2;
561 
swap(TfDeviceList & a,TfDeviceList & b)562   friend void swap(TfDeviceList& a, TfDeviceList& b) {
563     a.Swap(&b);
564   }
Swap(TfDeviceList * other)565   inline void Swap(TfDeviceList* other) {
566     if (other == this) return;
567   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
568     if (GetOwningArena() != nullptr &&
569         GetOwningArena() == other->GetOwningArena()) {
570    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
571     if (GetOwningArena() == other->GetOwningArena()) {
572   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
573       InternalSwap(other);
574     } else {
575       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
576     }
577   }
578   void UnsafeArenaSwap(TfDeviceList* other) {
579     if (other == this) return;
580     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
581     InternalSwap(other);
582   }
583 
584   // implements Message ----------------------------------------------
585 
586   TfDeviceList* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
587     return CreateMaybeMessage<TfDeviceList>(arena);
588   }
589   TfDeviceList* New() const {
590     return New(nullptr);
591   }
592   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
593   void CopyFrom(const TfDeviceList& from);
594   void MergeFrom(const TfDeviceList& from);
595   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
596   bool IsInitialized() const final;
597 
598   size_t ByteSizeLong() const final;
599   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
600   ::uint8_t* _InternalSerialize(
601       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
602   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
603 
604   private:
605   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
606   void SharedDtor();
607   void SetCachedSize(int size) const;
608   void InternalSwap(TfDeviceList* other);
609 
610   private:
611   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
612   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
613     return "tensorflow.TfDeviceList";
614   }
615   protected:
616   explicit TfDeviceList(::PROTOBUF_NAMESPACE_ID::Arena* arena,
617                        bool is_message_owned = false);
618   public:
619 
620   std::string GetTypeName() const final;
621 
622   // nested types ----------------------------------------------------
623 
624   // accessors -------------------------------------------------------
625 
626   enum : int {
627     kDevicesFieldNumber = 1,
628   };
629   // repeated .tensorflow.DeviceAttributes devices = 1;
630   int devices_size() const;
631   private:
632   int _internal_devices_size() const;
633   public:
634   void clear_devices();
635   ::tensorflow::DeviceAttributes* mutable_devices(int index);
636   ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::DeviceAttributes >*
637       mutable_devices();
638   private:
639   const ::tensorflow::DeviceAttributes& _internal_devices(int index) const;
640   ::tensorflow::DeviceAttributes* _internal_add_devices();
641   public:
642   const ::tensorflow::DeviceAttributes& devices(int index) const;
643   ::tensorflow::DeviceAttributes* add_devices();
644   const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::DeviceAttributes >&
645       devices() const;
646 
647   // @@protoc_insertion_point(class_scope:tensorflow.TfDeviceList)
648  private:
649   class _Internal;
650 
651   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
652   typedef void InternalArenaConstructable_;
653   typedef void DestructorSkippable_;
654   struct Impl_ {
655     ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::DeviceAttributes > devices_;
656     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
657   };
658   union { Impl_ _impl_; };
659   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto;
660 };
661 // -------------------------------------------------------------------
662 
663 class XlaDeviceList final :
664     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.XlaDeviceList) */ {
665  public:
XlaDeviceList()666   inline XlaDeviceList() : XlaDeviceList(nullptr) {}
667   ~XlaDeviceList() override;
668   explicit PROTOBUF_CONSTEXPR XlaDeviceList(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
669 
670   XlaDeviceList(const XlaDeviceList& from);
XlaDeviceList(XlaDeviceList && from)671   XlaDeviceList(XlaDeviceList&& from) noexcept
672     : XlaDeviceList() {
673     *this = ::std::move(from);
674   }
675 
676   inline XlaDeviceList& operator=(const XlaDeviceList& from) {
677     if (this == &from) return *this;
678     CopyFrom(from);
679     return *this;
680   }
681   inline XlaDeviceList& operator=(XlaDeviceList&& from) noexcept {
682     if (this == &from) return *this;
683     if (GetOwningArena() == from.GetOwningArena()
684   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
685         && GetOwningArena() != nullptr
686   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
687     ) {
688       InternalSwap(&from);
689     } else {
690       CopyFrom(from);
691     }
692     return *this;
693   }
694 
default_instance()695   static const XlaDeviceList& default_instance() {
696     return *internal_default_instance();
697   }
internal_default_instance()698   static inline const XlaDeviceList* internal_default_instance() {
699     return reinterpret_cast<const XlaDeviceList*>(
700                &_XlaDeviceList_default_instance_);
701   }
702   static constexpr int kIndexInFileMessages =
703     3;
704 
swap(XlaDeviceList & a,XlaDeviceList & b)705   friend void swap(XlaDeviceList& a, XlaDeviceList& b) {
706     a.Swap(&b);
707   }
Swap(XlaDeviceList * other)708   inline void Swap(XlaDeviceList* other) {
709     if (other == this) return;
710   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
711     if (GetOwningArena() != nullptr &&
712         GetOwningArena() == other->GetOwningArena()) {
713    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
714     if (GetOwningArena() == other->GetOwningArena()) {
715   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
716       InternalSwap(other);
717     } else {
718       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
719     }
720   }
721   void UnsafeArenaSwap(XlaDeviceList* other) {
722     if (other == this) return;
723     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
724     InternalSwap(other);
725   }
726 
727   // implements Message ----------------------------------------------
728 
729   XlaDeviceList* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
730     return CreateMaybeMessage<XlaDeviceList>(arena);
731   }
732   XlaDeviceList* New() const {
733     return New(nullptr);
734   }
735   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
736   void CopyFrom(const XlaDeviceList& from);
737   void MergeFrom(const XlaDeviceList& from);
738   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
739   bool IsInitialized() const final;
740 
741   size_t ByteSizeLong() const final;
742   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
743   ::uint8_t* _InternalSerialize(
744       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
745   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
746 
747   private:
748   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
749   void SharedDtor();
750   void SetCachedSize(int size) const;
751   void InternalSwap(XlaDeviceList* other);
752 
753   private:
754   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
755   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
756     return "tensorflow.XlaDeviceList";
757   }
758   protected:
759   explicit XlaDeviceList(::PROTOBUF_NAMESPACE_ID::Arena* arena,
760                        bool is_message_owned = false);
761   public:
762 
763   std::string GetTypeName() const final;
764 
765   // nested types ----------------------------------------------------
766 
767   // accessors -------------------------------------------------------
768 
769   enum : int {
770     kDevicesFieldNumber = 1,
771   };
772   // .xla.GlobalTopologyProto devices = 1;
773   bool has_devices() const;
774   private:
775   bool _internal_has_devices() const;
776   public:
777   void clear_devices();
778   const ::xla::GlobalTopologyProto& devices() const;
779   PROTOBUF_NODISCARD ::xla::GlobalTopologyProto* release_devices();
780   ::xla::GlobalTopologyProto* mutable_devices();
781   void set_allocated_devices(::xla::GlobalTopologyProto* devices);
782   private:
783   const ::xla::GlobalTopologyProto& _internal_devices() const;
784   ::xla::GlobalTopologyProto* _internal_mutable_devices();
785   public:
786   void unsafe_arena_set_allocated_devices(
787       ::xla::GlobalTopologyProto* devices);
788   ::xla::GlobalTopologyProto* unsafe_arena_release_devices();
789 
790   // @@protoc_insertion_point(class_scope:tensorflow.XlaDeviceList)
791  private:
792   class _Internal;
793 
794   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
795   typedef void InternalArenaConstructable_;
796   typedef void DestructorSkippable_;
797   struct Impl_ {
798     ::xla::GlobalTopologyProto* devices_;
799     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
800   };
801   union { Impl_ _impl_; };
802   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto;
803 };
804 // -------------------------------------------------------------------
805 
806 class CoordinationServiceDeviceInfo final :
807     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.CoordinationServiceDeviceInfo) */ {
808  public:
CoordinationServiceDeviceInfo()809   inline CoordinationServiceDeviceInfo() : CoordinationServiceDeviceInfo(nullptr) {}
810   ~CoordinationServiceDeviceInfo() override;
811   explicit PROTOBUF_CONSTEXPR CoordinationServiceDeviceInfo(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
812 
813   CoordinationServiceDeviceInfo(const CoordinationServiceDeviceInfo& from);
CoordinationServiceDeviceInfo(CoordinationServiceDeviceInfo && from)814   CoordinationServiceDeviceInfo(CoordinationServiceDeviceInfo&& from) noexcept
815     : CoordinationServiceDeviceInfo() {
816     *this = ::std::move(from);
817   }
818 
819   inline CoordinationServiceDeviceInfo& operator=(const CoordinationServiceDeviceInfo& from) {
820     if (this == &from) return *this;
821     CopyFrom(from);
822     return *this;
823   }
824   inline CoordinationServiceDeviceInfo& operator=(CoordinationServiceDeviceInfo&& from) noexcept {
825     if (this == &from) return *this;
826     if (GetOwningArena() == from.GetOwningArena()
827   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
828         && GetOwningArena() != nullptr
829   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
830     ) {
831       InternalSwap(&from);
832     } else {
833       CopyFrom(from);
834     }
835     return *this;
836   }
837 
default_instance()838   static const CoordinationServiceDeviceInfo& default_instance() {
839     return *internal_default_instance();
840   }
841   enum TypeCase {
842     kTf = 1,
843     kXla = 2,
844     TYPE_NOT_SET = 0,
845   };
846 
internal_default_instance()847   static inline const CoordinationServiceDeviceInfo* internal_default_instance() {
848     return reinterpret_cast<const CoordinationServiceDeviceInfo*>(
849                &_CoordinationServiceDeviceInfo_default_instance_);
850   }
851   static constexpr int kIndexInFileMessages =
852     4;
853 
swap(CoordinationServiceDeviceInfo & a,CoordinationServiceDeviceInfo & b)854   friend void swap(CoordinationServiceDeviceInfo& a, CoordinationServiceDeviceInfo& b) {
855     a.Swap(&b);
856   }
Swap(CoordinationServiceDeviceInfo * other)857   inline void Swap(CoordinationServiceDeviceInfo* other) {
858     if (other == this) return;
859   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
860     if (GetOwningArena() != nullptr &&
861         GetOwningArena() == other->GetOwningArena()) {
862    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
863     if (GetOwningArena() == other->GetOwningArena()) {
864   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
865       InternalSwap(other);
866     } else {
867       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
868     }
869   }
870   void UnsafeArenaSwap(CoordinationServiceDeviceInfo* other) {
871     if (other == this) return;
872     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
873     InternalSwap(other);
874   }
875 
876   // implements Message ----------------------------------------------
877 
878   CoordinationServiceDeviceInfo* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
879     return CreateMaybeMessage<CoordinationServiceDeviceInfo>(arena);
880   }
881   CoordinationServiceDeviceInfo* New() const {
882     return New(nullptr);
883   }
884   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
885   void CopyFrom(const CoordinationServiceDeviceInfo& from);
886   void MergeFrom(const CoordinationServiceDeviceInfo& from);
887   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
888   bool IsInitialized() const final;
889 
890   size_t ByteSizeLong() const final;
891   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
892   ::uint8_t* _InternalSerialize(
893       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
894   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
895 
896   private:
897   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
898   void SharedDtor();
899   void SetCachedSize(int size) const;
900   void InternalSwap(CoordinationServiceDeviceInfo* other);
901 
902   private:
903   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
904   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
905     return "tensorflow.CoordinationServiceDeviceInfo";
906   }
907   protected:
908   explicit CoordinationServiceDeviceInfo(::PROTOBUF_NAMESPACE_ID::Arena* arena,
909                        bool is_message_owned = false);
910   public:
911 
912   std::string GetTypeName() const final;
913 
914   // nested types ----------------------------------------------------
915 
916   // accessors -------------------------------------------------------
917 
918   enum : int {
919     kTfFieldNumber = 1,
920     kXlaFieldNumber = 2,
921   };
922   // .tensorflow.TfDeviceList tf = 1;
923   bool has_tf() const;
924   private:
925   bool _internal_has_tf() const;
926   public:
927   void clear_tf();
928   const ::tensorflow::TfDeviceList& tf() const;
929   PROTOBUF_NODISCARD ::tensorflow::TfDeviceList* release_tf();
930   ::tensorflow::TfDeviceList* mutable_tf();
931   void set_allocated_tf(::tensorflow::TfDeviceList* tf);
932   private:
933   const ::tensorflow::TfDeviceList& _internal_tf() const;
934   ::tensorflow::TfDeviceList* _internal_mutable_tf();
935   public:
936   void unsafe_arena_set_allocated_tf(
937       ::tensorflow::TfDeviceList* tf);
938   ::tensorflow::TfDeviceList* unsafe_arena_release_tf();
939 
940   // .tensorflow.XlaDeviceList xla = 2;
941   bool has_xla() const;
942   private:
943   bool _internal_has_xla() const;
944   public:
945   void clear_xla();
946   const ::tensorflow::XlaDeviceList& xla() const;
947   PROTOBUF_NODISCARD ::tensorflow::XlaDeviceList* release_xla();
948   ::tensorflow::XlaDeviceList* mutable_xla();
949   void set_allocated_xla(::tensorflow::XlaDeviceList* xla);
950   private:
951   const ::tensorflow::XlaDeviceList& _internal_xla() const;
952   ::tensorflow::XlaDeviceList* _internal_mutable_xla();
953   public:
954   void unsafe_arena_set_allocated_xla(
955       ::tensorflow::XlaDeviceList* xla);
956   ::tensorflow::XlaDeviceList* unsafe_arena_release_xla();
957 
958   void clear_type();
959   TypeCase type_case() const;
960   // @@protoc_insertion_point(class_scope:tensorflow.CoordinationServiceDeviceInfo)
961  private:
962   class _Internal;
963   void set_has_tf();
964   void set_has_xla();
965 
966   inline bool has_type() const;
967   inline void clear_has_type();
968 
969   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
970   typedef void InternalArenaConstructable_;
971   typedef void DestructorSkippable_;
972   struct Impl_ {
973     union TypeUnion {
974       constexpr TypeUnion() : _constinit_{} {}
975         ::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized _constinit_;
976       ::tensorflow::TfDeviceList* tf_;
977       ::tensorflow::XlaDeviceList* xla_;
978     } type_;
979     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
980     ::uint32_t _oneof_case_[1];
981 
982   };
983   union { Impl_ _impl_; };
984   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto;
985 };
986 // -------------------------------------------------------------------
987 
988 class RegisterTaskRequest final :
989     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.RegisterTaskRequest) */ {
990  public:
RegisterTaskRequest()991   inline RegisterTaskRequest() : RegisterTaskRequest(nullptr) {}
992   ~RegisterTaskRequest() override;
993   explicit PROTOBUF_CONSTEXPR RegisterTaskRequest(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
994 
995   RegisterTaskRequest(const RegisterTaskRequest& from);
RegisterTaskRequest(RegisterTaskRequest && from)996   RegisterTaskRequest(RegisterTaskRequest&& from) noexcept
997     : RegisterTaskRequest() {
998     *this = ::std::move(from);
999   }
1000 
1001   inline RegisterTaskRequest& operator=(const RegisterTaskRequest& from) {
1002     if (this == &from) return *this;
1003     CopyFrom(from);
1004     return *this;
1005   }
1006   inline RegisterTaskRequest& operator=(RegisterTaskRequest&& from) noexcept {
1007     if (this == &from) return *this;
1008     if (GetOwningArena() == from.GetOwningArena()
1009   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
1010         && GetOwningArena() != nullptr
1011   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
1012     ) {
1013       InternalSwap(&from);
1014     } else {
1015       CopyFrom(from);
1016     }
1017     return *this;
1018   }
1019 
default_instance()1020   static const RegisterTaskRequest& default_instance() {
1021     return *internal_default_instance();
1022   }
internal_default_instance()1023   static inline const RegisterTaskRequest* internal_default_instance() {
1024     return reinterpret_cast<const RegisterTaskRequest*>(
1025                &_RegisterTaskRequest_default_instance_);
1026   }
1027   static constexpr int kIndexInFileMessages =
1028     5;
1029 
swap(RegisterTaskRequest & a,RegisterTaskRequest & b)1030   friend void swap(RegisterTaskRequest& a, RegisterTaskRequest& b) {
1031     a.Swap(&b);
1032   }
Swap(RegisterTaskRequest * other)1033   inline void Swap(RegisterTaskRequest* other) {
1034     if (other == this) return;
1035   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
1036     if (GetOwningArena() != nullptr &&
1037         GetOwningArena() == other->GetOwningArena()) {
1038    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
1039     if (GetOwningArena() == other->GetOwningArena()) {
1040   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
1041       InternalSwap(other);
1042     } else {
1043       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1044     }
1045   }
1046   void UnsafeArenaSwap(RegisterTaskRequest* other) {
1047     if (other == this) return;
1048     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
1049     InternalSwap(other);
1050   }
1051 
1052   // implements Message ----------------------------------------------
1053 
1054   RegisterTaskRequest* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1055     return CreateMaybeMessage<RegisterTaskRequest>(arena);
1056   }
1057   RegisterTaskRequest* New() const {
1058     return New(nullptr);
1059   }
1060   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
1061   void CopyFrom(const RegisterTaskRequest& from);
1062   void MergeFrom(const RegisterTaskRequest& from);
1063   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1064   bool IsInitialized() const final;
1065 
1066   size_t ByteSizeLong() const final;
1067   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1068   ::uint8_t* _InternalSerialize(
1069       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
1070   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
1071 
1072   private:
1073   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
1074   void SharedDtor();
1075   void SetCachedSize(int size) const;
1076   void InternalSwap(RegisterTaskRequest* other);
1077 
1078   private:
1079   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1080   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1081     return "tensorflow.RegisterTaskRequest";
1082   }
1083   protected:
1084   explicit RegisterTaskRequest(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1085                        bool is_message_owned = false);
1086   public:
1087 
1088   std::string GetTypeName() const final;
1089 
1090   // nested types ----------------------------------------------------
1091 
1092   // accessors -------------------------------------------------------
1093 
1094   enum : int {
1095     kSourceTaskFieldNumber = 5,
1096     kIncarnationFieldNumber = 3,
1097   };
1098   // .tensorflow.CoordinatedTask source_task = 5;
1099   bool has_source_task() const;
1100   private:
1101   bool _internal_has_source_task() const;
1102   public:
1103   void clear_source_task();
1104   const ::tensorflow::CoordinatedTask& source_task() const;
1105   PROTOBUF_NODISCARD ::tensorflow::CoordinatedTask* release_source_task();
1106   ::tensorflow::CoordinatedTask* mutable_source_task();
1107   void set_allocated_source_task(::tensorflow::CoordinatedTask* source_task);
1108   private:
1109   const ::tensorflow::CoordinatedTask& _internal_source_task() const;
1110   ::tensorflow::CoordinatedTask* _internal_mutable_source_task();
1111   public:
1112   void unsafe_arena_set_allocated_source_task(
1113       ::tensorflow::CoordinatedTask* source_task);
1114   ::tensorflow::CoordinatedTask* unsafe_arena_release_source_task();
1115 
1116   // fixed64 incarnation = 3;
1117   void clear_incarnation();
1118   ::uint64_t incarnation() const;
1119   void set_incarnation(::uint64_t value);
1120   private:
1121   ::uint64_t _internal_incarnation() const;
1122   void _internal_set_incarnation(::uint64_t value);
1123   public:
1124 
1125   // @@protoc_insertion_point(class_scope:tensorflow.RegisterTaskRequest)
1126  private:
1127   class _Internal;
1128 
1129   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1130   typedef void InternalArenaConstructable_;
1131   typedef void DestructorSkippable_;
1132   struct Impl_ {
1133     ::tensorflow::CoordinatedTask* source_task_;
1134     ::uint64_t incarnation_;
1135     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1136   };
1137   union { Impl_ _impl_; };
1138   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto;
1139 };
1140 // -------------------------------------------------------------------
1141 
1142 class RegisterTaskResponse final :
1143     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.RegisterTaskResponse) */ {
1144  public:
RegisterTaskResponse()1145   inline RegisterTaskResponse() : RegisterTaskResponse(nullptr) {}
1146   ~RegisterTaskResponse() override;
1147   explicit PROTOBUF_CONSTEXPR RegisterTaskResponse(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
1148 
1149   RegisterTaskResponse(const RegisterTaskResponse& from);
RegisterTaskResponse(RegisterTaskResponse && from)1150   RegisterTaskResponse(RegisterTaskResponse&& from) noexcept
1151     : RegisterTaskResponse() {
1152     *this = ::std::move(from);
1153   }
1154 
1155   inline RegisterTaskResponse& operator=(const RegisterTaskResponse& from) {
1156     if (this == &from) return *this;
1157     CopyFrom(from);
1158     return *this;
1159   }
1160   inline RegisterTaskResponse& operator=(RegisterTaskResponse&& from) noexcept {
1161     if (this == &from) return *this;
1162     if (GetOwningArena() == from.GetOwningArena()
1163   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
1164         && GetOwningArena() != nullptr
1165   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
1166     ) {
1167       InternalSwap(&from);
1168     } else {
1169       CopyFrom(from);
1170     }
1171     return *this;
1172   }
1173 
default_instance()1174   static const RegisterTaskResponse& default_instance() {
1175     return *internal_default_instance();
1176   }
internal_default_instance()1177   static inline const RegisterTaskResponse* internal_default_instance() {
1178     return reinterpret_cast<const RegisterTaskResponse*>(
1179                &_RegisterTaskResponse_default_instance_);
1180   }
1181   static constexpr int kIndexInFileMessages =
1182     6;
1183 
swap(RegisterTaskResponse & a,RegisterTaskResponse & b)1184   friend void swap(RegisterTaskResponse& a, RegisterTaskResponse& b) {
1185     a.Swap(&b);
1186   }
Swap(RegisterTaskResponse * other)1187   inline void Swap(RegisterTaskResponse* other) {
1188     if (other == this) return;
1189   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
1190     if (GetOwningArena() != nullptr &&
1191         GetOwningArena() == other->GetOwningArena()) {
1192    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
1193     if (GetOwningArena() == other->GetOwningArena()) {
1194   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
1195       InternalSwap(other);
1196     } else {
1197       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1198     }
1199   }
1200   void UnsafeArenaSwap(RegisterTaskResponse* other) {
1201     if (other == this) return;
1202     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
1203     InternalSwap(other);
1204   }
1205 
1206   // implements Message ----------------------------------------------
1207 
1208   RegisterTaskResponse* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1209     return CreateMaybeMessage<RegisterTaskResponse>(arena);
1210   }
1211   RegisterTaskResponse* New() const {
1212     return New(nullptr);
1213   }
1214   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
1215   void CopyFrom(const RegisterTaskResponse& from);
1216   void MergeFrom(const RegisterTaskResponse& from);
1217   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1218   bool IsInitialized() const final;
1219 
1220   size_t ByteSizeLong() const final;
1221   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1222   ::uint8_t* _InternalSerialize(
1223       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
1224   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
1225 
1226   private:
1227   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
1228   void SharedDtor();
1229   void SetCachedSize(int size) const;
1230   void InternalSwap(RegisterTaskResponse* other);
1231 
1232   private:
1233   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1234   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1235     return "tensorflow.RegisterTaskResponse";
1236   }
1237   protected:
1238   explicit RegisterTaskResponse(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1239                        bool is_message_owned = false);
1240   public:
1241 
1242   std::string GetTypeName() const final;
1243 
1244   // nested types ----------------------------------------------------
1245 
1246   // accessors -------------------------------------------------------
1247 
1248   enum : int {
1249     kLeaderIncarnationFieldNumber = 1,
1250   };
1251   // fixed64 leader_incarnation = 1;
1252   void clear_leader_incarnation();
1253   ::uint64_t leader_incarnation() const;
1254   void set_leader_incarnation(::uint64_t value);
1255   private:
1256   ::uint64_t _internal_leader_incarnation() const;
1257   void _internal_set_leader_incarnation(::uint64_t value);
1258   public:
1259 
1260   // @@protoc_insertion_point(class_scope:tensorflow.RegisterTaskResponse)
1261  private:
1262   class _Internal;
1263 
1264   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1265   typedef void InternalArenaConstructable_;
1266   typedef void DestructorSkippable_;
1267   struct Impl_ {
1268     ::uint64_t leader_incarnation_;
1269     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1270   };
1271   union { Impl_ _impl_; };
1272   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto;
1273 };
1274 // -------------------------------------------------------------------
1275 
1276 class HeartbeatRequest final :
1277     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.HeartbeatRequest) */ {
1278  public:
HeartbeatRequest()1279   inline HeartbeatRequest() : HeartbeatRequest(nullptr) {}
1280   ~HeartbeatRequest() override;
1281   explicit PROTOBUF_CONSTEXPR HeartbeatRequest(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
1282 
1283   HeartbeatRequest(const HeartbeatRequest& from);
HeartbeatRequest(HeartbeatRequest && from)1284   HeartbeatRequest(HeartbeatRequest&& from) noexcept
1285     : HeartbeatRequest() {
1286     *this = ::std::move(from);
1287   }
1288 
1289   inline HeartbeatRequest& operator=(const HeartbeatRequest& from) {
1290     if (this == &from) return *this;
1291     CopyFrom(from);
1292     return *this;
1293   }
1294   inline HeartbeatRequest& operator=(HeartbeatRequest&& from) noexcept {
1295     if (this == &from) return *this;
1296     if (GetOwningArena() == from.GetOwningArena()
1297   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
1298         && GetOwningArena() != nullptr
1299   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
1300     ) {
1301       InternalSwap(&from);
1302     } else {
1303       CopyFrom(from);
1304     }
1305     return *this;
1306   }
1307 
default_instance()1308   static const HeartbeatRequest& default_instance() {
1309     return *internal_default_instance();
1310   }
internal_default_instance()1311   static inline const HeartbeatRequest* internal_default_instance() {
1312     return reinterpret_cast<const HeartbeatRequest*>(
1313                &_HeartbeatRequest_default_instance_);
1314   }
1315   static constexpr int kIndexInFileMessages =
1316     7;
1317 
swap(HeartbeatRequest & a,HeartbeatRequest & b)1318   friend void swap(HeartbeatRequest& a, HeartbeatRequest& b) {
1319     a.Swap(&b);
1320   }
Swap(HeartbeatRequest * other)1321   inline void Swap(HeartbeatRequest* other) {
1322     if (other == this) return;
1323   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
1324     if (GetOwningArena() != nullptr &&
1325         GetOwningArena() == other->GetOwningArena()) {
1326    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
1327     if (GetOwningArena() == other->GetOwningArena()) {
1328   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
1329       InternalSwap(other);
1330     } else {
1331       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1332     }
1333   }
1334   void UnsafeArenaSwap(HeartbeatRequest* other) {
1335     if (other == this) return;
1336     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
1337     InternalSwap(other);
1338   }
1339 
1340   // implements Message ----------------------------------------------
1341 
1342   HeartbeatRequest* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1343     return CreateMaybeMessage<HeartbeatRequest>(arena);
1344   }
1345   HeartbeatRequest* New() const {
1346     return New(nullptr);
1347   }
1348   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
1349   void CopyFrom(const HeartbeatRequest& from);
1350   void MergeFrom(const HeartbeatRequest& from);
1351   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1352   bool IsInitialized() const final;
1353 
1354   size_t ByteSizeLong() const final;
1355   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1356   ::uint8_t* _InternalSerialize(
1357       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
1358   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
1359 
1360   private:
1361   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
1362   void SharedDtor();
1363   void SetCachedSize(int size) const;
1364   void InternalSwap(HeartbeatRequest* other);
1365 
1366   private:
1367   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1368   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1369     return "tensorflow.HeartbeatRequest";
1370   }
1371   protected:
1372   explicit HeartbeatRequest(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1373                        bool is_message_owned = false);
1374   public:
1375 
1376   std::string GetTypeName() const final;
1377 
1378   // nested types ----------------------------------------------------
1379 
1380   // accessors -------------------------------------------------------
1381 
1382   enum : int {
1383     kSourceTaskFieldNumber = 4,
1384     kIncarnationFieldNumber = 3,
1385   };
1386   // .tensorflow.CoordinatedTask source_task = 4;
1387   bool has_source_task() const;
1388   private:
1389   bool _internal_has_source_task() const;
1390   public:
1391   void clear_source_task();
1392   const ::tensorflow::CoordinatedTask& source_task() const;
1393   PROTOBUF_NODISCARD ::tensorflow::CoordinatedTask* release_source_task();
1394   ::tensorflow::CoordinatedTask* mutable_source_task();
1395   void set_allocated_source_task(::tensorflow::CoordinatedTask* source_task);
1396   private:
1397   const ::tensorflow::CoordinatedTask& _internal_source_task() const;
1398   ::tensorflow::CoordinatedTask* _internal_mutable_source_task();
1399   public:
1400   void unsafe_arena_set_allocated_source_task(
1401       ::tensorflow::CoordinatedTask* source_task);
1402   ::tensorflow::CoordinatedTask* unsafe_arena_release_source_task();
1403 
1404   // fixed64 incarnation = 3;
1405   void clear_incarnation();
1406   ::uint64_t incarnation() const;
1407   void set_incarnation(::uint64_t value);
1408   private:
1409   ::uint64_t _internal_incarnation() const;
1410   void _internal_set_incarnation(::uint64_t value);
1411   public:
1412 
1413   // @@protoc_insertion_point(class_scope:tensorflow.HeartbeatRequest)
1414  private:
1415   class _Internal;
1416 
1417   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1418   typedef void InternalArenaConstructable_;
1419   typedef void DestructorSkippable_;
1420   struct Impl_ {
1421     ::tensorflow::CoordinatedTask* source_task_;
1422     ::uint64_t incarnation_;
1423     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1424   };
1425   union { Impl_ _impl_; };
1426   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto;
1427 };
1428 // -------------------------------------------------------------------
1429 
1430 class HeartbeatResponse final :
1431     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.HeartbeatResponse) */ {
1432  public:
HeartbeatResponse()1433   inline HeartbeatResponse() : HeartbeatResponse(nullptr) {}
1434   ~HeartbeatResponse() override;
1435   explicit PROTOBUF_CONSTEXPR HeartbeatResponse(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
1436 
1437   HeartbeatResponse(const HeartbeatResponse& from);
HeartbeatResponse(HeartbeatResponse && from)1438   HeartbeatResponse(HeartbeatResponse&& from) noexcept
1439     : HeartbeatResponse() {
1440     *this = ::std::move(from);
1441   }
1442 
1443   inline HeartbeatResponse& operator=(const HeartbeatResponse& from) {
1444     if (this == &from) return *this;
1445     CopyFrom(from);
1446     return *this;
1447   }
1448   inline HeartbeatResponse& operator=(HeartbeatResponse&& from) noexcept {
1449     if (this == &from) return *this;
1450     if (GetOwningArena() == from.GetOwningArena()
1451   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
1452         && GetOwningArena() != nullptr
1453   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
1454     ) {
1455       InternalSwap(&from);
1456     } else {
1457       CopyFrom(from);
1458     }
1459     return *this;
1460   }
1461 
default_instance()1462   static const HeartbeatResponse& default_instance() {
1463     return *internal_default_instance();
1464   }
internal_default_instance()1465   static inline const HeartbeatResponse* internal_default_instance() {
1466     return reinterpret_cast<const HeartbeatResponse*>(
1467                &_HeartbeatResponse_default_instance_);
1468   }
1469   static constexpr int kIndexInFileMessages =
1470     8;
1471 
swap(HeartbeatResponse & a,HeartbeatResponse & b)1472   friend void swap(HeartbeatResponse& a, HeartbeatResponse& b) {
1473     a.Swap(&b);
1474   }
Swap(HeartbeatResponse * other)1475   inline void Swap(HeartbeatResponse* other) {
1476     if (other == this) return;
1477   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
1478     if (GetOwningArena() != nullptr &&
1479         GetOwningArena() == other->GetOwningArena()) {
1480    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
1481     if (GetOwningArena() == other->GetOwningArena()) {
1482   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
1483       InternalSwap(other);
1484     } else {
1485       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1486     }
1487   }
1488   void UnsafeArenaSwap(HeartbeatResponse* other) {
1489     if (other == this) return;
1490     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
1491     InternalSwap(other);
1492   }
1493 
1494   // implements Message ----------------------------------------------
1495 
1496   HeartbeatResponse* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1497     return CreateMaybeMessage<HeartbeatResponse>(arena);
1498   }
1499   HeartbeatResponse* New() const {
1500     return New(nullptr);
1501   }
1502   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
1503   void CopyFrom(const HeartbeatResponse& from);
1504   void MergeFrom(const HeartbeatResponse& from);
1505   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1506   bool IsInitialized() const final;
1507 
1508   size_t ByteSizeLong() const final;
1509   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1510   ::uint8_t* _InternalSerialize(
1511       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
1512   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
1513 
1514   private:
1515   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
1516   void SharedDtor();
1517   void SetCachedSize(int size) const;
1518   void InternalSwap(HeartbeatResponse* other);
1519 
1520   private:
1521   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1522   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1523     return "tensorflow.HeartbeatResponse";
1524   }
1525   protected:
1526   explicit HeartbeatResponse(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1527                        bool is_message_owned = false);
1528   public:
1529 
1530   std::string GetTypeName() const final;
1531 
1532   // nested types ----------------------------------------------------
1533 
1534   // accessors -------------------------------------------------------
1535 
1536   enum : int {
1537     kLeaderIncarnationFieldNumber = 1,
1538   };
1539   // fixed64 leader_incarnation = 1;
1540   void clear_leader_incarnation();
1541   ::uint64_t leader_incarnation() const;
1542   void set_leader_incarnation(::uint64_t value);
1543   private:
1544   ::uint64_t _internal_leader_incarnation() const;
1545   void _internal_set_leader_incarnation(::uint64_t value);
1546   public:
1547 
1548   // @@protoc_insertion_point(class_scope:tensorflow.HeartbeatResponse)
1549  private:
1550   class _Internal;
1551 
1552   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1553   typedef void InternalArenaConstructable_;
1554   typedef void DestructorSkippable_;
1555   struct Impl_ {
1556     ::uint64_t leader_incarnation_;
1557     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1558   };
1559   union { Impl_ _impl_; };
1560   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto;
1561 };
1562 // -------------------------------------------------------------------
1563 
1564 class WaitForAllTasksRequest final :
1565     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.WaitForAllTasksRequest) */ {
1566  public:
WaitForAllTasksRequest()1567   inline WaitForAllTasksRequest() : WaitForAllTasksRequest(nullptr) {}
1568   ~WaitForAllTasksRequest() override;
1569   explicit PROTOBUF_CONSTEXPR WaitForAllTasksRequest(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
1570 
1571   WaitForAllTasksRequest(const WaitForAllTasksRequest& from);
WaitForAllTasksRequest(WaitForAllTasksRequest && from)1572   WaitForAllTasksRequest(WaitForAllTasksRequest&& from) noexcept
1573     : WaitForAllTasksRequest() {
1574     *this = ::std::move(from);
1575   }
1576 
1577   inline WaitForAllTasksRequest& operator=(const WaitForAllTasksRequest& from) {
1578     if (this == &from) return *this;
1579     CopyFrom(from);
1580     return *this;
1581   }
1582   inline WaitForAllTasksRequest& operator=(WaitForAllTasksRequest&& from) noexcept {
1583     if (this == &from) return *this;
1584     if (GetOwningArena() == from.GetOwningArena()
1585   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
1586         && GetOwningArena() != nullptr
1587   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
1588     ) {
1589       InternalSwap(&from);
1590     } else {
1591       CopyFrom(from);
1592     }
1593     return *this;
1594   }
1595 
default_instance()1596   static const WaitForAllTasksRequest& default_instance() {
1597     return *internal_default_instance();
1598   }
internal_default_instance()1599   static inline const WaitForAllTasksRequest* internal_default_instance() {
1600     return reinterpret_cast<const WaitForAllTasksRequest*>(
1601                &_WaitForAllTasksRequest_default_instance_);
1602   }
1603   static constexpr int kIndexInFileMessages =
1604     9;
1605 
swap(WaitForAllTasksRequest & a,WaitForAllTasksRequest & b)1606   friend void swap(WaitForAllTasksRequest& a, WaitForAllTasksRequest& b) {
1607     a.Swap(&b);
1608   }
Swap(WaitForAllTasksRequest * other)1609   inline void Swap(WaitForAllTasksRequest* other) {
1610     if (other == this) return;
1611   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
1612     if (GetOwningArena() != nullptr &&
1613         GetOwningArena() == other->GetOwningArena()) {
1614    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
1615     if (GetOwningArena() == other->GetOwningArena()) {
1616   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
1617       InternalSwap(other);
1618     } else {
1619       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1620     }
1621   }
1622   void UnsafeArenaSwap(WaitForAllTasksRequest* other) {
1623     if (other == this) return;
1624     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
1625     InternalSwap(other);
1626   }
1627 
1628   // implements Message ----------------------------------------------
1629 
1630   WaitForAllTasksRequest* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1631     return CreateMaybeMessage<WaitForAllTasksRequest>(arena);
1632   }
1633   WaitForAllTasksRequest* New() const {
1634     return New(nullptr);
1635   }
1636   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
1637   void CopyFrom(const WaitForAllTasksRequest& from);
1638   void MergeFrom(const WaitForAllTasksRequest& from);
1639   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1640   bool IsInitialized() const final;
1641 
1642   size_t ByteSizeLong() const final;
1643   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1644   ::uint8_t* _InternalSerialize(
1645       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
1646   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
1647 
1648   private:
1649   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
1650   void SharedDtor();
1651   void SetCachedSize(int size) const;
1652   void InternalSwap(WaitForAllTasksRequest* other);
1653 
1654   private:
1655   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1656   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1657     return "tensorflow.WaitForAllTasksRequest";
1658   }
1659   protected:
1660   explicit WaitForAllTasksRequest(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1661                        bool is_message_owned = false);
1662   public:
1663 
1664   std::string GetTypeName() const final;
1665 
1666   // nested types ----------------------------------------------------
1667 
1668   // accessors -------------------------------------------------------
1669 
1670   enum : int {
1671     kLocalDeviceInfoFieldNumber = 4,
1672     kSourceTaskFieldNumber = 5,
1673   };
1674   // .tensorflow.CoordinationServiceDeviceInfo local_device_info = 4;
1675   bool has_local_device_info() const;
1676   private:
1677   bool _internal_has_local_device_info() const;
1678   public:
1679   void clear_local_device_info();
1680   const ::tensorflow::CoordinationServiceDeviceInfo& local_device_info() const;
1681   PROTOBUF_NODISCARD ::tensorflow::CoordinationServiceDeviceInfo* release_local_device_info();
1682   ::tensorflow::CoordinationServiceDeviceInfo* mutable_local_device_info();
1683   void set_allocated_local_device_info(::tensorflow::CoordinationServiceDeviceInfo* local_device_info);
1684   private:
1685   const ::tensorflow::CoordinationServiceDeviceInfo& _internal_local_device_info() const;
1686   ::tensorflow::CoordinationServiceDeviceInfo* _internal_mutable_local_device_info();
1687   public:
1688   void unsafe_arena_set_allocated_local_device_info(
1689       ::tensorflow::CoordinationServiceDeviceInfo* local_device_info);
1690   ::tensorflow::CoordinationServiceDeviceInfo* unsafe_arena_release_local_device_info();
1691 
1692   // .tensorflow.CoordinatedTask source_task = 5;
1693   bool has_source_task() const;
1694   private:
1695   bool _internal_has_source_task() const;
1696   public:
1697   void clear_source_task();
1698   const ::tensorflow::CoordinatedTask& source_task() const;
1699   PROTOBUF_NODISCARD ::tensorflow::CoordinatedTask* release_source_task();
1700   ::tensorflow::CoordinatedTask* mutable_source_task();
1701   void set_allocated_source_task(::tensorflow::CoordinatedTask* source_task);
1702   private:
1703   const ::tensorflow::CoordinatedTask& _internal_source_task() const;
1704   ::tensorflow::CoordinatedTask* _internal_mutable_source_task();
1705   public:
1706   void unsafe_arena_set_allocated_source_task(
1707       ::tensorflow::CoordinatedTask* source_task);
1708   ::tensorflow::CoordinatedTask* unsafe_arena_release_source_task();
1709 
1710   // @@protoc_insertion_point(class_scope:tensorflow.WaitForAllTasksRequest)
1711  private:
1712   class _Internal;
1713 
1714   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1715   typedef void InternalArenaConstructable_;
1716   typedef void DestructorSkippable_;
1717   struct Impl_ {
1718     ::tensorflow::CoordinationServiceDeviceInfo* local_device_info_;
1719     ::tensorflow::CoordinatedTask* source_task_;
1720     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1721   };
1722   union { Impl_ _impl_; };
1723   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto;
1724 };
1725 // -------------------------------------------------------------------
1726 
1727 class WaitForAllTasksResponse final :
1728     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.WaitForAllTasksResponse) */ {
1729  public:
WaitForAllTasksResponse()1730   inline WaitForAllTasksResponse() : WaitForAllTasksResponse(nullptr) {}
1731   ~WaitForAllTasksResponse() override;
1732   explicit PROTOBUF_CONSTEXPR WaitForAllTasksResponse(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
1733 
1734   WaitForAllTasksResponse(const WaitForAllTasksResponse& from);
WaitForAllTasksResponse(WaitForAllTasksResponse && from)1735   WaitForAllTasksResponse(WaitForAllTasksResponse&& from) noexcept
1736     : WaitForAllTasksResponse() {
1737     *this = ::std::move(from);
1738   }
1739 
1740   inline WaitForAllTasksResponse& operator=(const WaitForAllTasksResponse& from) {
1741     if (this == &from) return *this;
1742     CopyFrom(from);
1743     return *this;
1744   }
1745   inline WaitForAllTasksResponse& operator=(WaitForAllTasksResponse&& from) noexcept {
1746     if (this == &from) return *this;
1747     if (GetOwningArena() == from.GetOwningArena()
1748   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
1749         && GetOwningArena() != nullptr
1750   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
1751     ) {
1752       InternalSwap(&from);
1753     } else {
1754       CopyFrom(from);
1755     }
1756     return *this;
1757   }
1758 
default_instance()1759   static const WaitForAllTasksResponse& default_instance() {
1760     return *internal_default_instance();
1761   }
internal_default_instance()1762   static inline const WaitForAllTasksResponse* internal_default_instance() {
1763     return reinterpret_cast<const WaitForAllTasksResponse*>(
1764                &_WaitForAllTasksResponse_default_instance_);
1765   }
1766   static constexpr int kIndexInFileMessages =
1767     10;
1768 
swap(WaitForAllTasksResponse & a,WaitForAllTasksResponse & b)1769   friend void swap(WaitForAllTasksResponse& a, WaitForAllTasksResponse& b) {
1770     a.Swap(&b);
1771   }
Swap(WaitForAllTasksResponse * other)1772   inline void Swap(WaitForAllTasksResponse* other) {
1773     if (other == this) return;
1774   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
1775     if (GetOwningArena() != nullptr &&
1776         GetOwningArena() == other->GetOwningArena()) {
1777    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
1778     if (GetOwningArena() == other->GetOwningArena()) {
1779   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
1780       InternalSwap(other);
1781     } else {
1782       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1783     }
1784   }
1785   void UnsafeArenaSwap(WaitForAllTasksResponse* other) {
1786     if (other == this) return;
1787     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
1788     InternalSwap(other);
1789   }
1790 
1791   // implements Message ----------------------------------------------
1792 
1793   WaitForAllTasksResponse* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1794     return CreateMaybeMessage<WaitForAllTasksResponse>(arena);
1795   }
1796   WaitForAllTasksResponse* New() const {
1797     return New(nullptr);
1798   }
1799   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
1800   void CopyFrom(const WaitForAllTasksResponse& from);
1801   void MergeFrom(const WaitForAllTasksResponse& from);
1802   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1803   bool IsInitialized() const final;
1804 
1805   size_t ByteSizeLong() const final;
1806   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1807   ::uint8_t* _InternalSerialize(
1808       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
1809   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
1810 
1811   private:
1812   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
1813   void SharedDtor();
1814   void SetCachedSize(int size) const;
1815   void InternalSwap(WaitForAllTasksResponse* other);
1816 
1817   private:
1818   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1819   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1820     return "tensorflow.WaitForAllTasksResponse";
1821   }
1822   protected:
1823   explicit WaitForAllTasksResponse(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1824                        bool is_message_owned = false);
1825   public:
1826 
1827   std::string GetTypeName() const final;
1828 
1829   // nested types ----------------------------------------------------
1830 
1831   // accessors -------------------------------------------------------
1832 
1833   enum : int {
1834     kClusterDeviceInfoFieldNumber = 3,
1835     kLeaderIncarnationFieldNumber = 1,
1836   };
1837   // .tensorflow.CoordinationServiceDeviceInfo cluster_device_info = 3;
1838   bool has_cluster_device_info() const;
1839   private:
1840   bool _internal_has_cluster_device_info() const;
1841   public:
1842   void clear_cluster_device_info();
1843   const ::tensorflow::CoordinationServiceDeviceInfo& cluster_device_info() const;
1844   PROTOBUF_NODISCARD ::tensorflow::CoordinationServiceDeviceInfo* release_cluster_device_info();
1845   ::tensorflow::CoordinationServiceDeviceInfo* mutable_cluster_device_info();
1846   void set_allocated_cluster_device_info(::tensorflow::CoordinationServiceDeviceInfo* cluster_device_info);
1847   private:
1848   const ::tensorflow::CoordinationServiceDeviceInfo& _internal_cluster_device_info() const;
1849   ::tensorflow::CoordinationServiceDeviceInfo* _internal_mutable_cluster_device_info();
1850   public:
1851   void unsafe_arena_set_allocated_cluster_device_info(
1852       ::tensorflow::CoordinationServiceDeviceInfo* cluster_device_info);
1853   ::tensorflow::CoordinationServiceDeviceInfo* unsafe_arena_release_cluster_device_info();
1854 
1855   // fixed64 leader_incarnation = 1;
1856   void clear_leader_incarnation();
1857   ::uint64_t leader_incarnation() const;
1858   void set_leader_incarnation(::uint64_t value);
1859   private:
1860   ::uint64_t _internal_leader_incarnation() const;
1861   void _internal_set_leader_incarnation(::uint64_t value);
1862   public:
1863 
1864   // @@protoc_insertion_point(class_scope:tensorflow.WaitForAllTasksResponse)
1865  private:
1866   class _Internal;
1867 
1868   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1869   typedef void InternalArenaConstructable_;
1870   typedef void DestructorSkippable_;
1871   struct Impl_ {
1872     ::tensorflow::CoordinationServiceDeviceInfo* cluster_device_info_;
1873     ::uint64_t leader_incarnation_;
1874     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1875   };
1876   union { Impl_ _impl_; };
1877   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto;
1878 };
1879 // -------------------------------------------------------------------
1880 
1881 class ShutdownTaskRequest final :
1882     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.ShutdownTaskRequest) */ {
1883  public:
ShutdownTaskRequest()1884   inline ShutdownTaskRequest() : ShutdownTaskRequest(nullptr) {}
1885   ~ShutdownTaskRequest() override;
1886   explicit PROTOBUF_CONSTEXPR ShutdownTaskRequest(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
1887 
1888   ShutdownTaskRequest(const ShutdownTaskRequest& from);
ShutdownTaskRequest(ShutdownTaskRequest && from)1889   ShutdownTaskRequest(ShutdownTaskRequest&& from) noexcept
1890     : ShutdownTaskRequest() {
1891     *this = ::std::move(from);
1892   }
1893 
1894   inline ShutdownTaskRequest& operator=(const ShutdownTaskRequest& from) {
1895     if (this == &from) return *this;
1896     CopyFrom(from);
1897     return *this;
1898   }
1899   inline ShutdownTaskRequest& operator=(ShutdownTaskRequest&& from) noexcept {
1900     if (this == &from) return *this;
1901     if (GetOwningArena() == from.GetOwningArena()
1902   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
1903         && GetOwningArena() != nullptr
1904   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
1905     ) {
1906       InternalSwap(&from);
1907     } else {
1908       CopyFrom(from);
1909     }
1910     return *this;
1911   }
1912 
default_instance()1913   static const ShutdownTaskRequest& default_instance() {
1914     return *internal_default_instance();
1915   }
internal_default_instance()1916   static inline const ShutdownTaskRequest* internal_default_instance() {
1917     return reinterpret_cast<const ShutdownTaskRequest*>(
1918                &_ShutdownTaskRequest_default_instance_);
1919   }
1920   static constexpr int kIndexInFileMessages =
1921     11;
1922 
swap(ShutdownTaskRequest & a,ShutdownTaskRequest & b)1923   friend void swap(ShutdownTaskRequest& a, ShutdownTaskRequest& b) {
1924     a.Swap(&b);
1925   }
Swap(ShutdownTaskRequest * other)1926   inline void Swap(ShutdownTaskRequest* other) {
1927     if (other == this) return;
1928   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
1929     if (GetOwningArena() != nullptr &&
1930         GetOwningArena() == other->GetOwningArena()) {
1931    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
1932     if (GetOwningArena() == other->GetOwningArena()) {
1933   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
1934       InternalSwap(other);
1935     } else {
1936       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1937     }
1938   }
1939   void UnsafeArenaSwap(ShutdownTaskRequest* other) {
1940     if (other == this) return;
1941     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
1942     InternalSwap(other);
1943   }
1944 
1945   // implements Message ----------------------------------------------
1946 
1947   ShutdownTaskRequest* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1948     return CreateMaybeMessage<ShutdownTaskRequest>(arena);
1949   }
1950   ShutdownTaskRequest* New() const {
1951     return New(nullptr);
1952   }
1953   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
1954   void CopyFrom(const ShutdownTaskRequest& from);
1955   void MergeFrom(const ShutdownTaskRequest& from);
1956   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1957   bool IsInitialized() const final;
1958 
1959   size_t ByteSizeLong() const final;
1960   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1961   ::uint8_t* _InternalSerialize(
1962       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
1963   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
1964 
1965   private:
1966   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
1967   void SharedDtor();
1968   void SetCachedSize(int size) const;
1969   void InternalSwap(ShutdownTaskRequest* other);
1970 
1971   private:
1972   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1973   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1974     return "tensorflow.ShutdownTaskRequest";
1975   }
1976   protected:
1977   explicit ShutdownTaskRequest(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1978                        bool is_message_owned = false);
1979   public:
1980 
1981   std::string GetTypeName() const final;
1982 
1983   // nested types ----------------------------------------------------
1984 
1985   // accessors -------------------------------------------------------
1986 
1987   enum : int {
1988     kSourceTaskFieldNumber = 1,
1989   };
1990   // .tensorflow.CoordinatedTask source_task = 1;
1991   bool has_source_task() const;
1992   private:
1993   bool _internal_has_source_task() const;
1994   public:
1995   void clear_source_task();
1996   const ::tensorflow::CoordinatedTask& source_task() const;
1997   PROTOBUF_NODISCARD ::tensorflow::CoordinatedTask* release_source_task();
1998   ::tensorflow::CoordinatedTask* mutable_source_task();
1999   void set_allocated_source_task(::tensorflow::CoordinatedTask* source_task);
2000   private:
2001   const ::tensorflow::CoordinatedTask& _internal_source_task() const;
2002   ::tensorflow::CoordinatedTask* _internal_mutable_source_task();
2003   public:
2004   void unsafe_arena_set_allocated_source_task(
2005       ::tensorflow::CoordinatedTask* source_task);
2006   ::tensorflow::CoordinatedTask* unsafe_arena_release_source_task();
2007 
2008   // @@protoc_insertion_point(class_scope:tensorflow.ShutdownTaskRequest)
2009  private:
2010   class _Internal;
2011 
2012   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
2013   typedef void InternalArenaConstructable_;
2014   typedef void DestructorSkippable_;
2015   struct Impl_ {
2016     ::tensorflow::CoordinatedTask* source_task_;
2017     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
2018   };
2019   union { Impl_ _impl_; };
2020   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto;
2021 };
2022 // -------------------------------------------------------------------
2023 
2024 class ShutdownTaskResponse final :
2025     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.ShutdownTaskResponse) */ {
2026  public:
ShutdownTaskResponse()2027   inline ShutdownTaskResponse() : ShutdownTaskResponse(nullptr) {}
2028   ~ShutdownTaskResponse() override;
2029   explicit PROTOBUF_CONSTEXPR ShutdownTaskResponse(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
2030 
2031   ShutdownTaskResponse(const ShutdownTaskResponse& from);
ShutdownTaskResponse(ShutdownTaskResponse && from)2032   ShutdownTaskResponse(ShutdownTaskResponse&& from) noexcept
2033     : ShutdownTaskResponse() {
2034     *this = ::std::move(from);
2035   }
2036 
2037   inline ShutdownTaskResponse& operator=(const ShutdownTaskResponse& from) {
2038     if (this == &from) return *this;
2039     CopyFrom(from);
2040     return *this;
2041   }
2042   inline ShutdownTaskResponse& operator=(ShutdownTaskResponse&& from) noexcept {
2043     if (this == &from) return *this;
2044     if (GetOwningArena() == from.GetOwningArena()
2045   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
2046         && GetOwningArena() != nullptr
2047   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
2048     ) {
2049       InternalSwap(&from);
2050     } else {
2051       CopyFrom(from);
2052     }
2053     return *this;
2054   }
2055 
default_instance()2056   static const ShutdownTaskResponse& default_instance() {
2057     return *internal_default_instance();
2058   }
internal_default_instance()2059   static inline const ShutdownTaskResponse* internal_default_instance() {
2060     return reinterpret_cast<const ShutdownTaskResponse*>(
2061                &_ShutdownTaskResponse_default_instance_);
2062   }
2063   static constexpr int kIndexInFileMessages =
2064     12;
2065 
swap(ShutdownTaskResponse & a,ShutdownTaskResponse & b)2066   friend void swap(ShutdownTaskResponse& a, ShutdownTaskResponse& b) {
2067     a.Swap(&b);
2068   }
Swap(ShutdownTaskResponse * other)2069   inline void Swap(ShutdownTaskResponse* other) {
2070     if (other == this) return;
2071   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
2072     if (GetOwningArena() != nullptr &&
2073         GetOwningArena() == other->GetOwningArena()) {
2074    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
2075     if (GetOwningArena() == other->GetOwningArena()) {
2076   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
2077       InternalSwap(other);
2078     } else {
2079       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
2080     }
2081   }
2082   void UnsafeArenaSwap(ShutdownTaskResponse* other) {
2083     if (other == this) return;
2084     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
2085     InternalSwap(other);
2086   }
2087 
2088   // implements Message ----------------------------------------------
2089 
2090   ShutdownTaskResponse* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
2091     return CreateMaybeMessage<ShutdownTaskResponse>(arena);
2092   }
2093   ShutdownTaskResponse* New() const {
2094     return New(nullptr);
2095   }
2096   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
2097   void CopyFrom(const ShutdownTaskResponse& from);
2098   void MergeFrom(const ShutdownTaskResponse& from);
2099   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
2100   bool IsInitialized() const final;
2101 
2102   size_t ByteSizeLong() const final;
2103   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
2104   ::uint8_t* _InternalSerialize(
2105       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
2106   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
2107 
2108   private:
2109   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
2110   void SharedDtor();
2111   void SetCachedSize(int size) const;
2112   void InternalSwap(ShutdownTaskResponse* other);
2113 
2114   private:
2115   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
2116   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
2117     return "tensorflow.ShutdownTaskResponse";
2118   }
2119   protected:
2120   explicit ShutdownTaskResponse(::PROTOBUF_NAMESPACE_ID::Arena* arena,
2121                        bool is_message_owned = false);
2122   public:
2123 
2124   std::string GetTypeName() const final;
2125 
2126   // nested types ----------------------------------------------------
2127 
2128   // accessors -------------------------------------------------------
2129 
2130   // @@protoc_insertion_point(class_scope:tensorflow.ShutdownTaskResponse)
2131  private:
2132   class _Internal;
2133 
2134   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
2135   typedef void InternalArenaConstructable_;
2136   typedef void DestructorSkippable_;
2137   struct Impl_ {
2138     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
2139   };
2140   union { Impl_ _impl_; };
2141   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto;
2142 };
2143 // -------------------------------------------------------------------
2144 
2145 class ResetTaskRequest final :
2146     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.ResetTaskRequest) */ {
2147  public:
ResetTaskRequest()2148   inline ResetTaskRequest() : ResetTaskRequest(nullptr) {}
2149   ~ResetTaskRequest() override;
2150   explicit PROTOBUF_CONSTEXPR ResetTaskRequest(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
2151 
2152   ResetTaskRequest(const ResetTaskRequest& from);
ResetTaskRequest(ResetTaskRequest && from)2153   ResetTaskRequest(ResetTaskRequest&& from) noexcept
2154     : ResetTaskRequest() {
2155     *this = ::std::move(from);
2156   }
2157 
2158   inline ResetTaskRequest& operator=(const ResetTaskRequest& from) {
2159     if (this == &from) return *this;
2160     CopyFrom(from);
2161     return *this;
2162   }
2163   inline ResetTaskRequest& operator=(ResetTaskRequest&& from) noexcept {
2164     if (this == &from) return *this;
2165     if (GetOwningArena() == from.GetOwningArena()
2166   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
2167         && GetOwningArena() != nullptr
2168   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
2169     ) {
2170       InternalSwap(&from);
2171     } else {
2172       CopyFrom(from);
2173     }
2174     return *this;
2175   }
2176 
default_instance()2177   static const ResetTaskRequest& default_instance() {
2178     return *internal_default_instance();
2179   }
internal_default_instance()2180   static inline const ResetTaskRequest* internal_default_instance() {
2181     return reinterpret_cast<const ResetTaskRequest*>(
2182                &_ResetTaskRequest_default_instance_);
2183   }
2184   static constexpr int kIndexInFileMessages =
2185     13;
2186 
swap(ResetTaskRequest & a,ResetTaskRequest & b)2187   friend void swap(ResetTaskRequest& a, ResetTaskRequest& b) {
2188     a.Swap(&b);
2189   }
Swap(ResetTaskRequest * other)2190   inline void Swap(ResetTaskRequest* other) {
2191     if (other == this) return;
2192   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
2193     if (GetOwningArena() != nullptr &&
2194         GetOwningArena() == other->GetOwningArena()) {
2195    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
2196     if (GetOwningArena() == other->GetOwningArena()) {
2197   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
2198       InternalSwap(other);
2199     } else {
2200       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
2201     }
2202   }
2203   void UnsafeArenaSwap(ResetTaskRequest* other) {
2204     if (other == this) return;
2205     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
2206     InternalSwap(other);
2207   }
2208 
2209   // implements Message ----------------------------------------------
2210 
2211   ResetTaskRequest* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
2212     return CreateMaybeMessage<ResetTaskRequest>(arena);
2213   }
2214   ResetTaskRequest* New() const {
2215     return New(nullptr);
2216   }
2217   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
2218   void CopyFrom(const ResetTaskRequest& from);
2219   void MergeFrom(const ResetTaskRequest& from);
2220   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
2221   bool IsInitialized() const final;
2222 
2223   size_t ByteSizeLong() const final;
2224   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
2225   ::uint8_t* _InternalSerialize(
2226       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
2227   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
2228 
2229   private:
2230   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
2231   void SharedDtor();
2232   void SetCachedSize(int size) const;
2233   void InternalSwap(ResetTaskRequest* other);
2234 
2235   private:
2236   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
2237   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
2238     return "tensorflow.ResetTaskRequest";
2239   }
2240   protected:
2241   explicit ResetTaskRequest(::PROTOBUF_NAMESPACE_ID::Arena* arena,
2242                        bool is_message_owned = false);
2243   public:
2244 
2245   std::string GetTypeName() const final;
2246 
2247   // nested types ----------------------------------------------------
2248 
2249   // accessors -------------------------------------------------------
2250 
2251   enum : int {
2252     kSourceTaskFieldNumber = 1,
2253   };
2254   // .tensorflow.CoordinatedTask source_task = 1;
2255   bool has_source_task() const;
2256   private:
2257   bool _internal_has_source_task() const;
2258   public:
2259   void clear_source_task();
2260   const ::tensorflow::CoordinatedTask& source_task() const;
2261   PROTOBUF_NODISCARD ::tensorflow::CoordinatedTask* release_source_task();
2262   ::tensorflow::CoordinatedTask* mutable_source_task();
2263   void set_allocated_source_task(::tensorflow::CoordinatedTask* source_task);
2264   private:
2265   const ::tensorflow::CoordinatedTask& _internal_source_task() const;
2266   ::tensorflow::CoordinatedTask* _internal_mutable_source_task();
2267   public:
2268   void unsafe_arena_set_allocated_source_task(
2269       ::tensorflow::CoordinatedTask* source_task);
2270   ::tensorflow::CoordinatedTask* unsafe_arena_release_source_task();
2271 
2272   // @@protoc_insertion_point(class_scope:tensorflow.ResetTaskRequest)
2273  private:
2274   class _Internal;
2275 
2276   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
2277   typedef void InternalArenaConstructable_;
2278   typedef void DestructorSkippable_;
2279   struct Impl_ {
2280     ::tensorflow::CoordinatedTask* source_task_;
2281     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
2282   };
2283   union { Impl_ _impl_; };
2284   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto;
2285 };
2286 // -------------------------------------------------------------------
2287 
2288 class ResetTaskResponse final :
2289     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.ResetTaskResponse) */ {
2290  public:
ResetTaskResponse()2291   inline ResetTaskResponse() : ResetTaskResponse(nullptr) {}
2292   ~ResetTaskResponse() override;
2293   explicit PROTOBUF_CONSTEXPR ResetTaskResponse(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
2294 
2295   ResetTaskResponse(const ResetTaskResponse& from);
ResetTaskResponse(ResetTaskResponse && from)2296   ResetTaskResponse(ResetTaskResponse&& from) noexcept
2297     : ResetTaskResponse() {
2298     *this = ::std::move(from);
2299   }
2300 
2301   inline ResetTaskResponse& operator=(const ResetTaskResponse& from) {
2302     if (this == &from) return *this;
2303     CopyFrom(from);
2304     return *this;
2305   }
2306   inline ResetTaskResponse& operator=(ResetTaskResponse&& from) noexcept {
2307     if (this == &from) return *this;
2308     if (GetOwningArena() == from.GetOwningArena()
2309   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
2310         && GetOwningArena() != nullptr
2311   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
2312     ) {
2313       InternalSwap(&from);
2314     } else {
2315       CopyFrom(from);
2316     }
2317     return *this;
2318   }
2319 
default_instance()2320   static const ResetTaskResponse& default_instance() {
2321     return *internal_default_instance();
2322   }
internal_default_instance()2323   static inline const ResetTaskResponse* internal_default_instance() {
2324     return reinterpret_cast<const ResetTaskResponse*>(
2325                &_ResetTaskResponse_default_instance_);
2326   }
2327   static constexpr int kIndexInFileMessages =
2328     14;
2329 
swap(ResetTaskResponse & a,ResetTaskResponse & b)2330   friend void swap(ResetTaskResponse& a, ResetTaskResponse& b) {
2331     a.Swap(&b);
2332   }
Swap(ResetTaskResponse * other)2333   inline void Swap(ResetTaskResponse* other) {
2334     if (other == this) return;
2335   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
2336     if (GetOwningArena() != nullptr &&
2337         GetOwningArena() == other->GetOwningArena()) {
2338    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
2339     if (GetOwningArena() == other->GetOwningArena()) {
2340   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
2341       InternalSwap(other);
2342     } else {
2343       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
2344     }
2345   }
2346   void UnsafeArenaSwap(ResetTaskResponse* other) {
2347     if (other == this) return;
2348     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
2349     InternalSwap(other);
2350   }
2351 
2352   // implements Message ----------------------------------------------
2353 
2354   ResetTaskResponse* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
2355     return CreateMaybeMessage<ResetTaskResponse>(arena);
2356   }
2357   ResetTaskResponse* New() const {
2358     return New(nullptr);
2359   }
2360   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
2361   void CopyFrom(const ResetTaskResponse& from);
2362   void MergeFrom(const ResetTaskResponse& from);
2363   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
2364   bool IsInitialized() const final;
2365 
2366   size_t ByteSizeLong() const final;
2367   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
2368   ::uint8_t* _InternalSerialize(
2369       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
2370   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
2371 
2372   private:
2373   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
2374   void SharedDtor();
2375   void SetCachedSize(int size) const;
2376   void InternalSwap(ResetTaskResponse* other);
2377 
2378   private:
2379   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
2380   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
2381     return "tensorflow.ResetTaskResponse";
2382   }
2383   protected:
2384   explicit ResetTaskResponse(::PROTOBUF_NAMESPACE_ID::Arena* arena,
2385                        bool is_message_owned = false);
2386   public:
2387 
2388   std::string GetTypeName() const final;
2389 
2390   // nested types ----------------------------------------------------
2391 
2392   // accessors -------------------------------------------------------
2393 
2394   // @@protoc_insertion_point(class_scope:tensorflow.ResetTaskResponse)
2395  private:
2396   class _Internal;
2397 
2398   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
2399   typedef void InternalArenaConstructable_;
2400   typedef void DestructorSkippable_;
2401   struct Impl_ {
2402     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
2403   };
2404   union { Impl_ _impl_; };
2405   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto;
2406 };
2407 // -------------------------------------------------------------------
2408 
2409 class ReportErrorToTaskRequest final :
2410     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.ReportErrorToTaskRequest) */ {
2411  public:
ReportErrorToTaskRequest()2412   inline ReportErrorToTaskRequest() : ReportErrorToTaskRequest(nullptr) {}
2413   ~ReportErrorToTaskRequest() override;
2414   explicit PROTOBUF_CONSTEXPR ReportErrorToTaskRequest(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
2415 
2416   ReportErrorToTaskRequest(const ReportErrorToTaskRequest& from);
ReportErrorToTaskRequest(ReportErrorToTaskRequest && from)2417   ReportErrorToTaskRequest(ReportErrorToTaskRequest&& from) noexcept
2418     : ReportErrorToTaskRequest() {
2419     *this = ::std::move(from);
2420   }
2421 
2422   inline ReportErrorToTaskRequest& operator=(const ReportErrorToTaskRequest& from) {
2423     if (this == &from) return *this;
2424     CopyFrom(from);
2425     return *this;
2426   }
2427   inline ReportErrorToTaskRequest& operator=(ReportErrorToTaskRequest&& from) noexcept {
2428     if (this == &from) return *this;
2429     if (GetOwningArena() == from.GetOwningArena()
2430   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
2431         && GetOwningArena() != nullptr
2432   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
2433     ) {
2434       InternalSwap(&from);
2435     } else {
2436       CopyFrom(from);
2437     }
2438     return *this;
2439   }
2440 
default_instance()2441   static const ReportErrorToTaskRequest& default_instance() {
2442     return *internal_default_instance();
2443   }
internal_default_instance()2444   static inline const ReportErrorToTaskRequest* internal_default_instance() {
2445     return reinterpret_cast<const ReportErrorToTaskRequest*>(
2446                &_ReportErrorToTaskRequest_default_instance_);
2447   }
2448   static constexpr int kIndexInFileMessages =
2449     15;
2450 
swap(ReportErrorToTaskRequest & a,ReportErrorToTaskRequest & b)2451   friend void swap(ReportErrorToTaskRequest& a, ReportErrorToTaskRequest& b) {
2452     a.Swap(&b);
2453   }
Swap(ReportErrorToTaskRequest * other)2454   inline void Swap(ReportErrorToTaskRequest* other) {
2455     if (other == this) return;
2456   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
2457     if (GetOwningArena() != nullptr &&
2458         GetOwningArena() == other->GetOwningArena()) {
2459    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
2460     if (GetOwningArena() == other->GetOwningArena()) {
2461   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
2462       InternalSwap(other);
2463     } else {
2464       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
2465     }
2466   }
2467   void UnsafeArenaSwap(ReportErrorToTaskRequest* other) {
2468     if (other == this) return;
2469     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
2470     InternalSwap(other);
2471   }
2472 
2473   // implements Message ----------------------------------------------
2474 
2475   ReportErrorToTaskRequest* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
2476     return CreateMaybeMessage<ReportErrorToTaskRequest>(arena);
2477   }
2478   ReportErrorToTaskRequest* New() const {
2479     return New(nullptr);
2480   }
2481   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
2482   void CopyFrom(const ReportErrorToTaskRequest& from);
2483   void MergeFrom(const ReportErrorToTaskRequest& from);
2484   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
2485   bool IsInitialized() const final;
2486 
2487   size_t ByteSizeLong() const final;
2488   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
2489   ::uint8_t* _InternalSerialize(
2490       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
2491   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
2492 
2493   private:
2494   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
2495   void SharedDtor();
2496   void SetCachedSize(int size) const;
2497   void InternalSwap(ReportErrorToTaskRequest* other);
2498 
2499   private:
2500   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
2501   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
2502     return "tensorflow.ReportErrorToTaskRequest";
2503   }
2504   protected:
2505   explicit ReportErrorToTaskRequest(::PROTOBUF_NAMESPACE_ID::Arena* arena,
2506                        bool is_message_owned = false);
2507   public:
2508 
2509   std::string GetTypeName() const final;
2510 
2511   // nested types ----------------------------------------------------
2512 
2513   // accessors -------------------------------------------------------
2514 
2515   enum : int {
2516     kErrorMessageFieldNumber = 2,
2517     kErrorPayloadFieldNumber = 5,
2518     kErrorCodeFieldNumber = 1,
2519   };
2520   // string error_message = 2;
2521   void clear_error_message();
2522   const std::string& error_message() const;
2523   template <typename ArgT0 = const std::string&, typename... ArgT>
2524   void set_error_message(ArgT0&& arg0, ArgT... args);
2525   std::string* mutable_error_message();
2526   PROTOBUF_NODISCARD std::string* release_error_message();
2527   void set_allocated_error_message(std::string* error_message);
2528   private:
2529   const std::string& _internal_error_message() const;
2530   inline PROTOBUF_ALWAYS_INLINE void _internal_set_error_message(const std::string& value);
2531   std::string* _internal_mutable_error_message();
2532   public:
2533 
2534   // .tensorflow.CoordinationServiceError error_payload = 5;
2535   bool has_error_payload() const;
2536   private:
2537   bool _internal_has_error_payload() const;
2538   public:
2539   void clear_error_payload();
2540   const ::tensorflow::CoordinationServiceError& error_payload() const;
2541   PROTOBUF_NODISCARD ::tensorflow::CoordinationServiceError* release_error_payload();
2542   ::tensorflow::CoordinationServiceError* mutable_error_payload();
2543   void set_allocated_error_payload(::tensorflow::CoordinationServiceError* error_payload);
2544   private:
2545   const ::tensorflow::CoordinationServiceError& _internal_error_payload() const;
2546   ::tensorflow::CoordinationServiceError* _internal_mutable_error_payload();
2547   public:
2548   void unsafe_arena_set_allocated_error_payload(
2549       ::tensorflow::CoordinationServiceError* error_payload);
2550   ::tensorflow::CoordinationServiceError* unsafe_arena_release_error_payload();
2551 
2552   // int32 error_code = 1;
2553   void clear_error_code();
2554   ::int32_t error_code() const;
2555   void set_error_code(::int32_t value);
2556   private:
2557   ::int32_t _internal_error_code() const;
2558   void _internal_set_error_code(::int32_t value);
2559   public:
2560 
2561   // @@protoc_insertion_point(class_scope:tensorflow.ReportErrorToTaskRequest)
2562  private:
2563   class _Internal;
2564 
2565   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
2566   typedef void InternalArenaConstructable_;
2567   typedef void DestructorSkippable_;
2568   struct Impl_ {
2569     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr error_message_;
2570     ::tensorflow::CoordinationServiceError* error_payload_;
2571     ::int32_t error_code_;
2572     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
2573   };
2574   union { Impl_ _impl_; };
2575   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto;
2576 };
2577 // -------------------------------------------------------------------
2578 
2579 class ReportErrorToTaskResponse final :
2580     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.ReportErrorToTaskResponse) */ {
2581  public:
ReportErrorToTaskResponse()2582   inline ReportErrorToTaskResponse() : ReportErrorToTaskResponse(nullptr) {}
2583   ~ReportErrorToTaskResponse() override;
2584   explicit PROTOBUF_CONSTEXPR ReportErrorToTaskResponse(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
2585 
2586   ReportErrorToTaskResponse(const ReportErrorToTaskResponse& from);
ReportErrorToTaskResponse(ReportErrorToTaskResponse && from)2587   ReportErrorToTaskResponse(ReportErrorToTaskResponse&& from) noexcept
2588     : ReportErrorToTaskResponse() {
2589     *this = ::std::move(from);
2590   }
2591 
2592   inline ReportErrorToTaskResponse& operator=(const ReportErrorToTaskResponse& from) {
2593     if (this == &from) return *this;
2594     CopyFrom(from);
2595     return *this;
2596   }
2597   inline ReportErrorToTaskResponse& operator=(ReportErrorToTaskResponse&& from) noexcept {
2598     if (this == &from) return *this;
2599     if (GetOwningArena() == from.GetOwningArena()
2600   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
2601         && GetOwningArena() != nullptr
2602   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
2603     ) {
2604       InternalSwap(&from);
2605     } else {
2606       CopyFrom(from);
2607     }
2608     return *this;
2609   }
2610 
default_instance()2611   static const ReportErrorToTaskResponse& default_instance() {
2612     return *internal_default_instance();
2613   }
internal_default_instance()2614   static inline const ReportErrorToTaskResponse* internal_default_instance() {
2615     return reinterpret_cast<const ReportErrorToTaskResponse*>(
2616                &_ReportErrorToTaskResponse_default_instance_);
2617   }
2618   static constexpr int kIndexInFileMessages =
2619     16;
2620 
swap(ReportErrorToTaskResponse & a,ReportErrorToTaskResponse & b)2621   friend void swap(ReportErrorToTaskResponse& a, ReportErrorToTaskResponse& b) {
2622     a.Swap(&b);
2623   }
Swap(ReportErrorToTaskResponse * other)2624   inline void Swap(ReportErrorToTaskResponse* other) {
2625     if (other == this) return;
2626   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
2627     if (GetOwningArena() != nullptr &&
2628         GetOwningArena() == other->GetOwningArena()) {
2629    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
2630     if (GetOwningArena() == other->GetOwningArena()) {
2631   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
2632       InternalSwap(other);
2633     } else {
2634       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
2635     }
2636   }
2637   void UnsafeArenaSwap(ReportErrorToTaskResponse* other) {
2638     if (other == this) return;
2639     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
2640     InternalSwap(other);
2641   }
2642 
2643   // implements Message ----------------------------------------------
2644 
2645   ReportErrorToTaskResponse* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
2646     return CreateMaybeMessage<ReportErrorToTaskResponse>(arena);
2647   }
2648   ReportErrorToTaskResponse* New() const {
2649     return New(nullptr);
2650   }
2651   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
2652   void CopyFrom(const ReportErrorToTaskResponse& from);
2653   void MergeFrom(const ReportErrorToTaskResponse& from);
2654   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
2655   bool IsInitialized() const final;
2656 
2657   size_t ByteSizeLong() const final;
2658   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
2659   ::uint8_t* _InternalSerialize(
2660       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
2661   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
2662 
2663   private:
2664   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
2665   void SharedDtor();
2666   void SetCachedSize(int size) const;
2667   void InternalSwap(ReportErrorToTaskResponse* other);
2668 
2669   private:
2670   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
2671   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
2672     return "tensorflow.ReportErrorToTaskResponse";
2673   }
2674   protected:
2675   explicit ReportErrorToTaskResponse(::PROTOBUF_NAMESPACE_ID::Arena* arena,
2676                        bool is_message_owned = false);
2677   public:
2678 
2679   std::string GetTypeName() const final;
2680 
2681   // nested types ----------------------------------------------------
2682 
2683   // accessors -------------------------------------------------------
2684 
2685   // @@protoc_insertion_point(class_scope:tensorflow.ReportErrorToTaskResponse)
2686  private:
2687   class _Internal;
2688 
2689   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
2690   typedef void InternalArenaConstructable_;
2691   typedef void DestructorSkippable_;
2692   struct Impl_ {
2693     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
2694   };
2695   union { Impl_ _impl_; };
2696   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto;
2697 };
2698 // -------------------------------------------------------------------
2699 
2700 class ReportErrorToServiceRequest final :
2701     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.ReportErrorToServiceRequest) */ {
2702  public:
ReportErrorToServiceRequest()2703   inline ReportErrorToServiceRequest() : ReportErrorToServiceRequest(nullptr) {}
2704   ~ReportErrorToServiceRequest() override;
2705   explicit PROTOBUF_CONSTEXPR ReportErrorToServiceRequest(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
2706 
2707   ReportErrorToServiceRequest(const ReportErrorToServiceRequest& from);
ReportErrorToServiceRequest(ReportErrorToServiceRequest && from)2708   ReportErrorToServiceRequest(ReportErrorToServiceRequest&& from) noexcept
2709     : ReportErrorToServiceRequest() {
2710     *this = ::std::move(from);
2711   }
2712 
2713   inline ReportErrorToServiceRequest& operator=(const ReportErrorToServiceRequest& from) {
2714     if (this == &from) return *this;
2715     CopyFrom(from);
2716     return *this;
2717   }
2718   inline ReportErrorToServiceRequest& operator=(ReportErrorToServiceRequest&& from) noexcept {
2719     if (this == &from) return *this;
2720     if (GetOwningArena() == from.GetOwningArena()
2721   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
2722         && GetOwningArena() != nullptr
2723   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
2724     ) {
2725       InternalSwap(&from);
2726     } else {
2727       CopyFrom(from);
2728     }
2729     return *this;
2730   }
2731 
default_instance()2732   static const ReportErrorToServiceRequest& default_instance() {
2733     return *internal_default_instance();
2734   }
internal_default_instance()2735   static inline const ReportErrorToServiceRequest* internal_default_instance() {
2736     return reinterpret_cast<const ReportErrorToServiceRequest*>(
2737                &_ReportErrorToServiceRequest_default_instance_);
2738   }
2739   static constexpr int kIndexInFileMessages =
2740     17;
2741 
swap(ReportErrorToServiceRequest & a,ReportErrorToServiceRequest & b)2742   friend void swap(ReportErrorToServiceRequest& a, ReportErrorToServiceRequest& b) {
2743     a.Swap(&b);
2744   }
Swap(ReportErrorToServiceRequest * other)2745   inline void Swap(ReportErrorToServiceRequest* other) {
2746     if (other == this) return;
2747   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
2748     if (GetOwningArena() != nullptr &&
2749         GetOwningArena() == other->GetOwningArena()) {
2750    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
2751     if (GetOwningArena() == other->GetOwningArena()) {
2752   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
2753       InternalSwap(other);
2754     } else {
2755       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
2756     }
2757   }
2758   void UnsafeArenaSwap(ReportErrorToServiceRequest* other) {
2759     if (other == this) return;
2760     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
2761     InternalSwap(other);
2762   }
2763 
2764   // implements Message ----------------------------------------------
2765 
2766   ReportErrorToServiceRequest* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
2767     return CreateMaybeMessage<ReportErrorToServiceRequest>(arena);
2768   }
2769   ReportErrorToServiceRequest* New() const {
2770     return New(nullptr);
2771   }
2772   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
2773   void CopyFrom(const ReportErrorToServiceRequest& from);
2774   void MergeFrom(const ReportErrorToServiceRequest& from);
2775   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
2776   bool IsInitialized() const final;
2777 
2778   size_t ByteSizeLong() const final;
2779   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
2780   ::uint8_t* _InternalSerialize(
2781       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
2782   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
2783 
2784   private:
2785   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
2786   void SharedDtor();
2787   void SetCachedSize(int size) const;
2788   void InternalSwap(ReportErrorToServiceRequest* other);
2789 
2790   private:
2791   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
2792   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
2793     return "tensorflow.ReportErrorToServiceRequest";
2794   }
2795   protected:
2796   explicit ReportErrorToServiceRequest(::PROTOBUF_NAMESPACE_ID::Arena* arena,
2797                        bool is_message_owned = false);
2798   public:
2799 
2800   std::string GetTypeName() const final;
2801 
2802   // nested types ----------------------------------------------------
2803 
2804   // accessors -------------------------------------------------------
2805 
2806   enum : int {
2807     kErrorMessageFieldNumber = 2,
2808     kErrorOriginFieldNumber = 5,
2809     kErrorCodeFieldNumber = 1,
2810   };
2811   // string error_message = 2;
2812   void clear_error_message();
2813   const std::string& error_message() const;
2814   template <typename ArgT0 = const std::string&, typename... ArgT>
2815   void set_error_message(ArgT0&& arg0, ArgT... args);
2816   std::string* mutable_error_message();
2817   PROTOBUF_NODISCARD std::string* release_error_message();
2818   void set_allocated_error_message(std::string* error_message);
2819   private:
2820   const std::string& _internal_error_message() const;
2821   inline PROTOBUF_ALWAYS_INLINE void _internal_set_error_message(const std::string& value);
2822   std::string* _internal_mutable_error_message();
2823   public:
2824 
2825   // .tensorflow.CoordinatedTask error_origin = 5;
2826   bool has_error_origin() const;
2827   private:
2828   bool _internal_has_error_origin() const;
2829   public:
2830   void clear_error_origin();
2831   const ::tensorflow::CoordinatedTask& error_origin() const;
2832   PROTOBUF_NODISCARD ::tensorflow::CoordinatedTask* release_error_origin();
2833   ::tensorflow::CoordinatedTask* mutable_error_origin();
2834   void set_allocated_error_origin(::tensorflow::CoordinatedTask* error_origin);
2835   private:
2836   const ::tensorflow::CoordinatedTask& _internal_error_origin() const;
2837   ::tensorflow::CoordinatedTask* _internal_mutable_error_origin();
2838   public:
2839   void unsafe_arena_set_allocated_error_origin(
2840       ::tensorflow::CoordinatedTask* error_origin);
2841   ::tensorflow::CoordinatedTask* unsafe_arena_release_error_origin();
2842 
2843   // int32 error_code = 1;
2844   void clear_error_code();
2845   ::int32_t error_code() const;
2846   void set_error_code(::int32_t value);
2847   private:
2848   ::int32_t _internal_error_code() const;
2849   void _internal_set_error_code(::int32_t value);
2850   public:
2851 
2852   // @@protoc_insertion_point(class_scope:tensorflow.ReportErrorToServiceRequest)
2853  private:
2854   class _Internal;
2855 
2856   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
2857   typedef void InternalArenaConstructable_;
2858   typedef void DestructorSkippable_;
2859   struct Impl_ {
2860     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr error_message_;
2861     ::tensorflow::CoordinatedTask* error_origin_;
2862     ::int32_t error_code_;
2863     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
2864   };
2865   union { Impl_ _impl_; };
2866   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto;
2867 };
2868 // -------------------------------------------------------------------
2869 
2870 class ReportErrorToServiceResponse final :
2871     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.ReportErrorToServiceResponse) */ {
2872  public:
ReportErrorToServiceResponse()2873   inline ReportErrorToServiceResponse() : ReportErrorToServiceResponse(nullptr) {}
2874   ~ReportErrorToServiceResponse() override;
2875   explicit PROTOBUF_CONSTEXPR ReportErrorToServiceResponse(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
2876 
2877   ReportErrorToServiceResponse(const ReportErrorToServiceResponse& from);
ReportErrorToServiceResponse(ReportErrorToServiceResponse && from)2878   ReportErrorToServiceResponse(ReportErrorToServiceResponse&& from) noexcept
2879     : ReportErrorToServiceResponse() {
2880     *this = ::std::move(from);
2881   }
2882 
2883   inline ReportErrorToServiceResponse& operator=(const ReportErrorToServiceResponse& from) {
2884     if (this == &from) return *this;
2885     CopyFrom(from);
2886     return *this;
2887   }
2888   inline ReportErrorToServiceResponse& operator=(ReportErrorToServiceResponse&& from) noexcept {
2889     if (this == &from) return *this;
2890     if (GetOwningArena() == from.GetOwningArena()
2891   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
2892         && GetOwningArena() != nullptr
2893   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
2894     ) {
2895       InternalSwap(&from);
2896     } else {
2897       CopyFrom(from);
2898     }
2899     return *this;
2900   }
2901 
default_instance()2902   static const ReportErrorToServiceResponse& default_instance() {
2903     return *internal_default_instance();
2904   }
internal_default_instance()2905   static inline const ReportErrorToServiceResponse* internal_default_instance() {
2906     return reinterpret_cast<const ReportErrorToServiceResponse*>(
2907                &_ReportErrorToServiceResponse_default_instance_);
2908   }
2909   static constexpr int kIndexInFileMessages =
2910     18;
2911 
swap(ReportErrorToServiceResponse & a,ReportErrorToServiceResponse & b)2912   friend void swap(ReportErrorToServiceResponse& a, ReportErrorToServiceResponse& b) {
2913     a.Swap(&b);
2914   }
Swap(ReportErrorToServiceResponse * other)2915   inline void Swap(ReportErrorToServiceResponse* other) {
2916     if (other == this) return;
2917   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
2918     if (GetOwningArena() != nullptr &&
2919         GetOwningArena() == other->GetOwningArena()) {
2920    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
2921     if (GetOwningArena() == other->GetOwningArena()) {
2922   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
2923       InternalSwap(other);
2924     } else {
2925       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
2926     }
2927   }
2928   void UnsafeArenaSwap(ReportErrorToServiceResponse* other) {
2929     if (other == this) return;
2930     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
2931     InternalSwap(other);
2932   }
2933 
2934   // implements Message ----------------------------------------------
2935 
2936   ReportErrorToServiceResponse* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
2937     return CreateMaybeMessage<ReportErrorToServiceResponse>(arena);
2938   }
2939   ReportErrorToServiceResponse* New() const {
2940     return New(nullptr);
2941   }
2942   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
2943   void CopyFrom(const ReportErrorToServiceResponse& from);
2944   void MergeFrom(const ReportErrorToServiceResponse& from);
2945   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
2946   bool IsInitialized() const final;
2947 
2948   size_t ByteSizeLong() const final;
2949   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
2950   ::uint8_t* _InternalSerialize(
2951       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
2952   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
2953 
2954   private:
2955   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
2956   void SharedDtor();
2957   void SetCachedSize(int size) const;
2958   void InternalSwap(ReportErrorToServiceResponse* other);
2959 
2960   private:
2961   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
2962   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
2963     return "tensorflow.ReportErrorToServiceResponse";
2964   }
2965   protected:
2966   explicit ReportErrorToServiceResponse(::PROTOBUF_NAMESPACE_ID::Arena* arena,
2967                        bool is_message_owned = false);
2968   public:
2969 
2970   std::string GetTypeName() const final;
2971 
2972   // nested types ----------------------------------------------------
2973 
2974   // accessors -------------------------------------------------------
2975 
2976   // @@protoc_insertion_point(class_scope:tensorflow.ReportErrorToServiceResponse)
2977  private:
2978   class _Internal;
2979 
2980   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
2981   typedef void InternalArenaConstructable_;
2982   typedef void DestructorSkippable_;
2983   struct Impl_ {
2984     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
2985   };
2986   union { Impl_ _impl_; };
2987   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto;
2988 };
2989 // -------------------------------------------------------------------
2990 
2991 class KeyValueEntry final :
2992     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.KeyValueEntry) */ {
2993  public:
KeyValueEntry()2994   inline KeyValueEntry() : KeyValueEntry(nullptr) {}
2995   ~KeyValueEntry() override;
2996   explicit PROTOBUF_CONSTEXPR KeyValueEntry(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
2997 
2998   KeyValueEntry(const KeyValueEntry& from);
KeyValueEntry(KeyValueEntry && from)2999   KeyValueEntry(KeyValueEntry&& from) noexcept
3000     : KeyValueEntry() {
3001     *this = ::std::move(from);
3002   }
3003 
3004   inline KeyValueEntry& operator=(const KeyValueEntry& from) {
3005     if (this == &from) return *this;
3006     CopyFrom(from);
3007     return *this;
3008   }
3009   inline KeyValueEntry& operator=(KeyValueEntry&& from) noexcept {
3010     if (this == &from) return *this;
3011     if (GetOwningArena() == from.GetOwningArena()
3012   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
3013         && GetOwningArena() != nullptr
3014   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
3015     ) {
3016       InternalSwap(&from);
3017     } else {
3018       CopyFrom(from);
3019     }
3020     return *this;
3021   }
3022 
default_instance()3023   static const KeyValueEntry& default_instance() {
3024     return *internal_default_instance();
3025   }
internal_default_instance()3026   static inline const KeyValueEntry* internal_default_instance() {
3027     return reinterpret_cast<const KeyValueEntry*>(
3028                &_KeyValueEntry_default_instance_);
3029   }
3030   static constexpr int kIndexInFileMessages =
3031     19;
3032 
swap(KeyValueEntry & a,KeyValueEntry & b)3033   friend void swap(KeyValueEntry& a, KeyValueEntry& b) {
3034     a.Swap(&b);
3035   }
Swap(KeyValueEntry * other)3036   inline void Swap(KeyValueEntry* other) {
3037     if (other == this) return;
3038   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
3039     if (GetOwningArena() != nullptr &&
3040         GetOwningArena() == other->GetOwningArena()) {
3041    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
3042     if (GetOwningArena() == other->GetOwningArena()) {
3043   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
3044       InternalSwap(other);
3045     } else {
3046       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
3047     }
3048   }
3049   void UnsafeArenaSwap(KeyValueEntry* other) {
3050     if (other == this) return;
3051     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
3052     InternalSwap(other);
3053   }
3054 
3055   // implements Message ----------------------------------------------
3056 
3057   KeyValueEntry* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
3058     return CreateMaybeMessage<KeyValueEntry>(arena);
3059   }
3060   KeyValueEntry* New() const {
3061     return New(nullptr);
3062   }
3063   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
3064   void CopyFrom(const KeyValueEntry& from);
3065   void MergeFrom(const KeyValueEntry& from);
3066   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
3067   bool IsInitialized() const final;
3068 
3069   size_t ByteSizeLong() const final;
3070   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
3071   ::uint8_t* _InternalSerialize(
3072       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
3073   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
3074 
3075   private:
3076   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
3077   void SharedDtor();
3078   void SetCachedSize(int size) const;
3079   void InternalSwap(KeyValueEntry* other);
3080 
3081   private:
3082   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
3083   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
3084     return "tensorflow.KeyValueEntry";
3085   }
3086   protected:
3087   explicit KeyValueEntry(::PROTOBUF_NAMESPACE_ID::Arena* arena,
3088                        bool is_message_owned = false);
3089   public:
3090 
3091   std::string GetTypeName() const final;
3092 
3093   // nested types ----------------------------------------------------
3094 
3095   // accessors -------------------------------------------------------
3096 
3097   enum : int {
3098     kKeyFieldNumber = 1,
3099     kValueFieldNumber = 2,
3100   };
3101   // string key = 1;
3102   void clear_key();
3103   const std::string& key() const;
3104   template <typename ArgT0 = const std::string&, typename... ArgT>
3105   void set_key(ArgT0&& arg0, ArgT... args);
3106   std::string* mutable_key();
3107   PROTOBUF_NODISCARD std::string* release_key();
3108   void set_allocated_key(std::string* key);
3109   private:
3110   const std::string& _internal_key() const;
3111   inline PROTOBUF_ALWAYS_INLINE void _internal_set_key(const std::string& value);
3112   std::string* _internal_mutable_key();
3113   public:
3114 
3115   // bytes value = 2;
3116   void clear_value();
3117   const std::string& value() const;
3118   template <typename ArgT0 = const std::string&, typename... ArgT>
3119   void set_value(ArgT0&& arg0, ArgT... args);
3120   std::string* mutable_value();
3121   PROTOBUF_NODISCARD std::string* release_value();
3122   void set_allocated_value(std::string* value);
3123   private:
3124   const std::string& _internal_value() const;
3125   inline PROTOBUF_ALWAYS_INLINE void _internal_set_value(const std::string& value);
3126   std::string* _internal_mutable_value();
3127   public:
3128 
3129   // @@protoc_insertion_point(class_scope:tensorflow.KeyValueEntry)
3130  private:
3131   class _Internal;
3132 
3133   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
3134   typedef void InternalArenaConstructable_;
3135   typedef void DestructorSkippable_;
3136   struct Impl_ {
3137     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr key_;
3138     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr value_;
3139     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
3140   };
3141   union { Impl_ _impl_; };
3142   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto;
3143 };
3144 // -------------------------------------------------------------------
3145 
3146 class InsertKeyValueRequest final :
3147     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.InsertKeyValueRequest) */ {
3148  public:
InsertKeyValueRequest()3149   inline InsertKeyValueRequest() : InsertKeyValueRequest(nullptr) {}
3150   ~InsertKeyValueRequest() override;
3151   explicit PROTOBUF_CONSTEXPR InsertKeyValueRequest(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
3152 
3153   InsertKeyValueRequest(const InsertKeyValueRequest& from);
InsertKeyValueRequest(InsertKeyValueRequest && from)3154   InsertKeyValueRequest(InsertKeyValueRequest&& from) noexcept
3155     : InsertKeyValueRequest() {
3156     *this = ::std::move(from);
3157   }
3158 
3159   inline InsertKeyValueRequest& operator=(const InsertKeyValueRequest& from) {
3160     if (this == &from) return *this;
3161     CopyFrom(from);
3162     return *this;
3163   }
3164   inline InsertKeyValueRequest& operator=(InsertKeyValueRequest&& from) noexcept {
3165     if (this == &from) return *this;
3166     if (GetOwningArena() == from.GetOwningArena()
3167   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
3168         && GetOwningArena() != nullptr
3169   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
3170     ) {
3171       InternalSwap(&from);
3172     } else {
3173       CopyFrom(from);
3174     }
3175     return *this;
3176   }
3177 
default_instance()3178   static const InsertKeyValueRequest& default_instance() {
3179     return *internal_default_instance();
3180   }
internal_default_instance()3181   static inline const InsertKeyValueRequest* internal_default_instance() {
3182     return reinterpret_cast<const InsertKeyValueRequest*>(
3183                &_InsertKeyValueRequest_default_instance_);
3184   }
3185   static constexpr int kIndexInFileMessages =
3186     20;
3187 
swap(InsertKeyValueRequest & a,InsertKeyValueRequest & b)3188   friend void swap(InsertKeyValueRequest& a, InsertKeyValueRequest& b) {
3189     a.Swap(&b);
3190   }
Swap(InsertKeyValueRequest * other)3191   inline void Swap(InsertKeyValueRequest* other) {
3192     if (other == this) return;
3193   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
3194     if (GetOwningArena() != nullptr &&
3195         GetOwningArena() == other->GetOwningArena()) {
3196    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
3197     if (GetOwningArena() == other->GetOwningArena()) {
3198   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
3199       InternalSwap(other);
3200     } else {
3201       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
3202     }
3203   }
3204   void UnsafeArenaSwap(InsertKeyValueRequest* other) {
3205     if (other == this) return;
3206     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
3207     InternalSwap(other);
3208   }
3209 
3210   // implements Message ----------------------------------------------
3211 
3212   InsertKeyValueRequest* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
3213     return CreateMaybeMessage<InsertKeyValueRequest>(arena);
3214   }
3215   InsertKeyValueRequest* New() const {
3216     return New(nullptr);
3217   }
3218   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
3219   void CopyFrom(const InsertKeyValueRequest& from);
3220   void MergeFrom(const InsertKeyValueRequest& from);
3221   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
3222   bool IsInitialized() const final;
3223 
3224   size_t ByteSizeLong() const final;
3225   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
3226   ::uint8_t* _InternalSerialize(
3227       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
3228   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
3229 
3230   private:
3231   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
3232   void SharedDtor();
3233   void SetCachedSize(int size) const;
3234   void InternalSwap(InsertKeyValueRequest* other);
3235 
3236   private:
3237   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
3238   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
3239     return "tensorflow.InsertKeyValueRequest";
3240   }
3241   protected:
3242   explicit InsertKeyValueRequest(::PROTOBUF_NAMESPACE_ID::Arena* arena,
3243                        bool is_message_owned = false);
3244   public:
3245 
3246   std::string GetTypeName() const final;
3247 
3248   // nested types ----------------------------------------------------
3249 
3250   // accessors -------------------------------------------------------
3251 
3252   enum : int {
3253     kKvFieldNumber = 1,
3254   };
3255   // .tensorflow.KeyValueEntry kv = 1;
3256   bool has_kv() const;
3257   private:
3258   bool _internal_has_kv() const;
3259   public:
3260   void clear_kv();
3261   const ::tensorflow::KeyValueEntry& kv() const;
3262   PROTOBUF_NODISCARD ::tensorflow::KeyValueEntry* release_kv();
3263   ::tensorflow::KeyValueEntry* mutable_kv();
3264   void set_allocated_kv(::tensorflow::KeyValueEntry* kv);
3265   private:
3266   const ::tensorflow::KeyValueEntry& _internal_kv() const;
3267   ::tensorflow::KeyValueEntry* _internal_mutable_kv();
3268   public:
3269   void unsafe_arena_set_allocated_kv(
3270       ::tensorflow::KeyValueEntry* kv);
3271   ::tensorflow::KeyValueEntry* unsafe_arena_release_kv();
3272 
3273   // @@protoc_insertion_point(class_scope:tensorflow.InsertKeyValueRequest)
3274  private:
3275   class _Internal;
3276 
3277   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
3278   typedef void InternalArenaConstructable_;
3279   typedef void DestructorSkippable_;
3280   struct Impl_ {
3281     ::tensorflow::KeyValueEntry* kv_;
3282     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
3283   };
3284   union { Impl_ _impl_; };
3285   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto;
3286 };
3287 // -------------------------------------------------------------------
3288 
3289 class InsertKeyValueResponse final :
3290     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.InsertKeyValueResponse) */ {
3291  public:
InsertKeyValueResponse()3292   inline InsertKeyValueResponse() : InsertKeyValueResponse(nullptr) {}
3293   ~InsertKeyValueResponse() override;
3294   explicit PROTOBUF_CONSTEXPR InsertKeyValueResponse(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
3295 
3296   InsertKeyValueResponse(const InsertKeyValueResponse& from);
InsertKeyValueResponse(InsertKeyValueResponse && from)3297   InsertKeyValueResponse(InsertKeyValueResponse&& from) noexcept
3298     : InsertKeyValueResponse() {
3299     *this = ::std::move(from);
3300   }
3301 
3302   inline InsertKeyValueResponse& operator=(const InsertKeyValueResponse& from) {
3303     if (this == &from) return *this;
3304     CopyFrom(from);
3305     return *this;
3306   }
3307   inline InsertKeyValueResponse& operator=(InsertKeyValueResponse&& from) noexcept {
3308     if (this == &from) return *this;
3309     if (GetOwningArena() == from.GetOwningArena()
3310   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
3311         && GetOwningArena() != nullptr
3312   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
3313     ) {
3314       InternalSwap(&from);
3315     } else {
3316       CopyFrom(from);
3317     }
3318     return *this;
3319   }
3320 
default_instance()3321   static const InsertKeyValueResponse& default_instance() {
3322     return *internal_default_instance();
3323   }
internal_default_instance()3324   static inline const InsertKeyValueResponse* internal_default_instance() {
3325     return reinterpret_cast<const InsertKeyValueResponse*>(
3326                &_InsertKeyValueResponse_default_instance_);
3327   }
3328   static constexpr int kIndexInFileMessages =
3329     21;
3330 
swap(InsertKeyValueResponse & a,InsertKeyValueResponse & b)3331   friend void swap(InsertKeyValueResponse& a, InsertKeyValueResponse& b) {
3332     a.Swap(&b);
3333   }
Swap(InsertKeyValueResponse * other)3334   inline void Swap(InsertKeyValueResponse* other) {
3335     if (other == this) return;
3336   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
3337     if (GetOwningArena() != nullptr &&
3338         GetOwningArena() == other->GetOwningArena()) {
3339    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
3340     if (GetOwningArena() == other->GetOwningArena()) {
3341   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
3342       InternalSwap(other);
3343     } else {
3344       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
3345     }
3346   }
3347   void UnsafeArenaSwap(InsertKeyValueResponse* other) {
3348     if (other == this) return;
3349     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
3350     InternalSwap(other);
3351   }
3352 
3353   // implements Message ----------------------------------------------
3354 
3355   InsertKeyValueResponse* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
3356     return CreateMaybeMessage<InsertKeyValueResponse>(arena);
3357   }
3358   InsertKeyValueResponse* New() const {
3359     return New(nullptr);
3360   }
3361   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
3362   void CopyFrom(const InsertKeyValueResponse& from);
3363   void MergeFrom(const InsertKeyValueResponse& from);
3364   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
3365   bool IsInitialized() const final;
3366 
3367   size_t ByteSizeLong() const final;
3368   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
3369   ::uint8_t* _InternalSerialize(
3370       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
3371   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
3372 
3373   private:
3374   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
3375   void SharedDtor();
3376   void SetCachedSize(int size) const;
3377   void InternalSwap(InsertKeyValueResponse* other);
3378 
3379   private:
3380   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
3381   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
3382     return "tensorflow.InsertKeyValueResponse";
3383   }
3384   protected:
3385   explicit InsertKeyValueResponse(::PROTOBUF_NAMESPACE_ID::Arena* arena,
3386                        bool is_message_owned = false);
3387   public:
3388 
3389   std::string GetTypeName() const final;
3390 
3391   // nested types ----------------------------------------------------
3392 
3393   // accessors -------------------------------------------------------
3394 
3395   // @@protoc_insertion_point(class_scope:tensorflow.InsertKeyValueResponse)
3396  private:
3397   class _Internal;
3398 
3399   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
3400   typedef void InternalArenaConstructable_;
3401   typedef void DestructorSkippable_;
3402   struct Impl_ {
3403     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
3404   };
3405   union { Impl_ _impl_; };
3406   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto;
3407 };
3408 // -------------------------------------------------------------------
3409 
3410 class GetKeyValueRequest final :
3411     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.GetKeyValueRequest) */ {
3412  public:
GetKeyValueRequest()3413   inline GetKeyValueRequest() : GetKeyValueRequest(nullptr) {}
3414   ~GetKeyValueRequest() override;
3415   explicit PROTOBUF_CONSTEXPR GetKeyValueRequest(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
3416 
3417   GetKeyValueRequest(const GetKeyValueRequest& from);
GetKeyValueRequest(GetKeyValueRequest && from)3418   GetKeyValueRequest(GetKeyValueRequest&& from) noexcept
3419     : GetKeyValueRequest() {
3420     *this = ::std::move(from);
3421   }
3422 
3423   inline GetKeyValueRequest& operator=(const GetKeyValueRequest& from) {
3424     if (this == &from) return *this;
3425     CopyFrom(from);
3426     return *this;
3427   }
3428   inline GetKeyValueRequest& operator=(GetKeyValueRequest&& from) noexcept {
3429     if (this == &from) return *this;
3430     if (GetOwningArena() == from.GetOwningArena()
3431   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
3432         && GetOwningArena() != nullptr
3433   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
3434     ) {
3435       InternalSwap(&from);
3436     } else {
3437       CopyFrom(from);
3438     }
3439     return *this;
3440   }
3441 
default_instance()3442   static const GetKeyValueRequest& default_instance() {
3443     return *internal_default_instance();
3444   }
internal_default_instance()3445   static inline const GetKeyValueRequest* internal_default_instance() {
3446     return reinterpret_cast<const GetKeyValueRequest*>(
3447                &_GetKeyValueRequest_default_instance_);
3448   }
3449   static constexpr int kIndexInFileMessages =
3450     22;
3451 
swap(GetKeyValueRequest & a,GetKeyValueRequest & b)3452   friend void swap(GetKeyValueRequest& a, GetKeyValueRequest& b) {
3453     a.Swap(&b);
3454   }
Swap(GetKeyValueRequest * other)3455   inline void Swap(GetKeyValueRequest* other) {
3456     if (other == this) return;
3457   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
3458     if (GetOwningArena() != nullptr &&
3459         GetOwningArena() == other->GetOwningArena()) {
3460    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
3461     if (GetOwningArena() == other->GetOwningArena()) {
3462   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
3463       InternalSwap(other);
3464     } else {
3465       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
3466     }
3467   }
3468   void UnsafeArenaSwap(GetKeyValueRequest* other) {
3469     if (other == this) return;
3470     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
3471     InternalSwap(other);
3472   }
3473 
3474   // implements Message ----------------------------------------------
3475 
3476   GetKeyValueRequest* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
3477     return CreateMaybeMessage<GetKeyValueRequest>(arena);
3478   }
3479   GetKeyValueRequest* New() const {
3480     return New(nullptr);
3481   }
3482   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
3483   void CopyFrom(const GetKeyValueRequest& from);
3484   void MergeFrom(const GetKeyValueRequest& from);
3485   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
3486   bool IsInitialized() const final;
3487 
3488   size_t ByteSizeLong() const final;
3489   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
3490   ::uint8_t* _InternalSerialize(
3491       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
3492   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
3493 
3494   private:
3495   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
3496   void SharedDtor();
3497   void SetCachedSize(int size) const;
3498   void InternalSwap(GetKeyValueRequest* other);
3499 
3500   private:
3501   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
3502   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
3503     return "tensorflow.GetKeyValueRequest";
3504   }
3505   protected:
3506   explicit GetKeyValueRequest(::PROTOBUF_NAMESPACE_ID::Arena* arena,
3507                        bool is_message_owned = false);
3508   public:
3509 
3510   std::string GetTypeName() const final;
3511 
3512   // nested types ----------------------------------------------------
3513 
3514   // accessors -------------------------------------------------------
3515 
3516   enum : int {
3517     kKeyFieldNumber = 1,
3518   };
3519   // string key = 1;
3520   void clear_key();
3521   const std::string& key() const;
3522   template <typename ArgT0 = const std::string&, typename... ArgT>
3523   void set_key(ArgT0&& arg0, ArgT... args);
3524   std::string* mutable_key();
3525   PROTOBUF_NODISCARD std::string* release_key();
3526   void set_allocated_key(std::string* key);
3527   private:
3528   const std::string& _internal_key() const;
3529   inline PROTOBUF_ALWAYS_INLINE void _internal_set_key(const std::string& value);
3530   std::string* _internal_mutable_key();
3531   public:
3532 
3533   // @@protoc_insertion_point(class_scope:tensorflow.GetKeyValueRequest)
3534  private:
3535   class _Internal;
3536 
3537   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
3538   typedef void InternalArenaConstructable_;
3539   typedef void DestructorSkippable_;
3540   struct Impl_ {
3541     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr key_;
3542     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
3543   };
3544   union { Impl_ _impl_; };
3545   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto;
3546 };
3547 // -------------------------------------------------------------------
3548 
3549 class GetKeyValueResponse final :
3550     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.GetKeyValueResponse) */ {
3551  public:
GetKeyValueResponse()3552   inline GetKeyValueResponse() : GetKeyValueResponse(nullptr) {}
3553   ~GetKeyValueResponse() override;
3554   explicit PROTOBUF_CONSTEXPR GetKeyValueResponse(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
3555 
3556   GetKeyValueResponse(const GetKeyValueResponse& from);
GetKeyValueResponse(GetKeyValueResponse && from)3557   GetKeyValueResponse(GetKeyValueResponse&& from) noexcept
3558     : GetKeyValueResponse() {
3559     *this = ::std::move(from);
3560   }
3561 
3562   inline GetKeyValueResponse& operator=(const GetKeyValueResponse& from) {
3563     if (this == &from) return *this;
3564     CopyFrom(from);
3565     return *this;
3566   }
3567   inline GetKeyValueResponse& operator=(GetKeyValueResponse&& from) noexcept {
3568     if (this == &from) return *this;
3569     if (GetOwningArena() == from.GetOwningArena()
3570   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
3571         && GetOwningArena() != nullptr
3572   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
3573     ) {
3574       InternalSwap(&from);
3575     } else {
3576       CopyFrom(from);
3577     }
3578     return *this;
3579   }
3580 
default_instance()3581   static const GetKeyValueResponse& default_instance() {
3582     return *internal_default_instance();
3583   }
internal_default_instance()3584   static inline const GetKeyValueResponse* internal_default_instance() {
3585     return reinterpret_cast<const GetKeyValueResponse*>(
3586                &_GetKeyValueResponse_default_instance_);
3587   }
3588   static constexpr int kIndexInFileMessages =
3589     23;
3590 
swap(GetKeyValueResponse & a,GetKeyValueResponse & b)3591   friend void swap(GetKeyValueResponse& a, GetKeyValueResponse& b) {
3592     a.Swap(&b);
3593   }
Swap(GetKeyValueResponse * other)3594   inline void Swap(GetKeyValueResponse* other) {
3595     if (other == this) return;
3596   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
3597     if (GetOwningArena() != nullptr &&
3598         GetOwningArena() == other->GetOwningArena()) {
3599    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
3600     if (GetOwningArena() == other->GetOwningArena()) {
3601   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
3602       InternalSwap(other);
3603     } else {
3604       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
3605     }
3606   }
3607   void UnsafeArenaSwap(GetKeyValueResponse* other) {
3608     if (other == this) return;
3609     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
3610     InternalSwap(other);
3611   }
3612 
3613   // implements Message ----------------------------------------------
3614 
3615   GetKeyValueResponse* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
3616     return CreateMaybeMessage<GetKeyValueResponse>(arena);
3617   }
3618   GetKeyValueResponse* New() const {
3619     return New(nullptr);
3620   }
3621   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
3622   void CopyFrom(const GetKeyValueResponse& from);
3623   void MergeFrom(const GetKeyValueResponse& from);
3624   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
3625   bool IsInitialized() const final;
3626 
3627   size_t ByteSizeLong() const final;
3628   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
3629   ::uint8_t* _InternalSerialize(
3630       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
3631   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
3632 
3633   private:
3634   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
3635   void SharedDtor();
3636   void SetCachedSize(int size) const;
3637   void InternalSwap(GetKeyValueResponse* other);
3638 
3639   private:
3640   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
3641   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
3642     return "tensorflow.GetKeyValueResponse";
3643   }
3644   protected:
3645   explicit GetKeyValueResponse(::PROTOBUF_NAMESPACE_ID::Arena* arena,
3646                        bool is_message_owned = false);
3647   public:
3648 
3649   std::string GetTypeName() const final;
3650 
3651   // nested types ----------------------------------------------------
3652 
3653   // accessors -------------------------------------------------------
3654 
3655   enum : int {
3656     kKvFieldNumber = 1,
3657   };
3658   // .tensorflow.KeyValueEntry kv = 1;
3659   bool has_kv() const;
3660   private:
3661   bool _internal_has_kv() const;
3662   public:
3663   void clear_kv();
3664   const ::tensorflow::KeyValueEntry& kv() const;
3665   PROTOBUF_NODISCARD ::tensorflow::KeyValueEntry* release_kv();
3666   ::tensorflow::KeyValueEntry* mutable_kv();
3667   void set_allocated_kv(::tensorflow::KeyValueEntry* kv);
3668   private:
3669   const ::tensorflow::KeyValueEntry& _internal_kv() const;
3670   ::tensorflow::KeyValueEntry* _internal_mutable_kv();
3671   public:
3672   void unsafe_arena_set_allocated_kv(
3673       ::tensorflow::KeyValueEntry* kv);
3674   ::tensorflow::KeyValueEntry* unsafe_arena_release_kv();
3675 
3676   // @@protoc_insertion_point(class_scope:tensorflow.GetKeyValueResponse)
3677  private:
3678   class _Internal;
3679 
3680   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
3681   typedef void InternalArenaConstructable_;
3682   typedef void DestructorSkippable_;
3683   struct Impl_ {
3684     ::tensorflow::KeyValueEntry* kv_;
3685     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
3686   };
3687   union { Impl_ _impl_; };
3688   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto;
3689 };
3690 // -------------------------------------------------------------------
3691 
3692 class TryGetKeyValueRequest final :
3693     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.TryGetKeyValueRequest) */ {
3694  public:
TryGetKeyValueRequest()3695   inline TryGetKeyValueRequest() : TryGetKeyValueRequest(nullptr) {}
3696   ~TryGetKeyValueRequest() override;
3697   explicit PROTOBUF_CONSTEXPR TryGetKeyValueRequest(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
3698 
3699   TryGetKeyValueRequest(const TryGetKeyValueRequest& from);
TryGetKeyValueRequest(TryGetKeyValueRequest && from)3700   TryGetKeyValueRequest(TryGetKeyValueRequest&& from) noexcept
3701     : TryGetKeyValueRequest() {
3702     *this = ::std::move(from);
3703   }
3704 
3705   inline TryGetKeyValueRequest& operator=(const TryGetKeyValueRequest& from) {
3706     if (this == &from) return *this;
3707     CopyFrom(from);
3708     return *this;
3709   }
3710   inline TryGetKeyValueRequest& operator=(TryGetKeyValueRequest&& from) noexcept {
3711     if (this == &from) return *this;
3712     if (GetOwningArena() == from.GetOwningArena()
3713   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
3714         && GetOwningArena() != nullptr
3715   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
3716     ) {
3717       InternalSwap(&from);
3718     } else {
3719       CopyFrom(from);
3720     }
3721     return *this;
3722   }
3723 
default_instance()3724   static const TryGetKeyValueRequest& default_instance() {
3725     return *internal_default_instance();
3726   }
internal_default_instance()3727   static inline const TryGetKeyValueRequest* internal_default_instance() {
3728     return reinterpret_cast<const TryGetKeyValueRequest*>(
3729                &_TryGetKeyValueRequest_default_instance_);
3730   }
3731   static constexpr int kIndexInFileMessages =
3732     24;
3733 
swap(TryGetKeyValueRequest & a,TryGetKeyValueRequest & b)3734   friend void swap(TryGetKeyValueRequest& a, TryGetKeyValueRequest& b) {
3735     a.Swap(&b);
3736   }
Swap(TryGetKeyValueRequest * other)3737   inline void Swap(TryGetKeyValueRequest* other) {
3738     if (other == this) return;
3739   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
3740     if (GetOwningArena() != nullptr &&
3741         GetOwningArena() == other->GetOwningArena()) {
3742    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
3743     if (GetOwningArena() == other->GetOwningArena()) {
3744   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
3745       InternalSwap(other);
3746     } else {
3747       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
3748     }
3749   }
3750   void UnsafeArenaSwap(TryGetKeyValueRequest* other) {
3751     if (other == this) return;
3752     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
3753     InternalSwap(other);
3754   }
3755 
3756   // implements Message ----------------------------------------------
3757 
3758   TryGetKeyValueRequest* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
3759     return CreateMaybeMessage<TryGetKeyValueRequest>(arena);
3760   }
3761   TryGetKeyValueRequest* New() const {
3762     return New(nullptr);
3763   }
3764   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
3765   void CopyFrom(const TryGetKeyValueRequest& from);
3766   void MergeFrom(const TryGetKeyValueRequest& from);
3767   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
3768   bool IsInitialized() const final;
3769 
3770   size_t ByteSizeLong() const final;
3771   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
3772   ::uint8_t* _InternalSerialize(
3773       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
3774   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
3775 
3776   private:
3777   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
3778   void SharedDtor();
3779   void SetCachedSize(int size) const;
3780   void InternalSwap(TryGetKeyValueRequest* other);
3781 
3782   private:
3783   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
3784   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
3785     return "tensorflow.TryGetKeyValueRequest";
3786   }
3787   protected:
3788   explicit TryGetKeyValueRequest(::PROTOBUF_NAMESPACE_ID::Arena* arena,
3789                        bool is_message_owned = false);
3790   public:
3791 
3792   std::string GetTypeName() const final;
3793 
3794   // nested types ----------------------------------------------------
3795 
3796   // accessors -------------------------------------------------------
3797 
3798   enum : int {
3799     kKeyFieldNumber = 1,
3800   };
3801   // string key = 1;
3802   void clear_key();
3803   const std::string& key() const;
3804   template <typename ArgT0 = const std::string&, typename... ArgT>
3805   void set_key(ArgT0&& arg0, ArgT... args);
3806   std::string* mutable_key();
3807   PROTOBUF_NODISCARD std::string* release_key();
3808   void set_allocated_key(std::string* key);
3809   private:
3810   const std::string& _internal_key() const;
3811   inline PROTOBUF_ALWAYS_INLINE void _internal_set_key(const std::string& value);
3812   std::string* _internal_mutable_key();
3813   public:
3814 
3815   // @@protoc_insertion_point(class_scope:tensorflow.TryGetKeyValueRequest)
3816  private:
3817   class _Internal;
3818 
3819   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
3820   typedef void InternalArenaConstructable_;
3821   typedef void DestructorSkippable_;
3822   struct Impl_ {
3823     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr key_;
3824     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
3825   };
3826   union { Impl_ _impl_; };
3827   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto;
3828 };
3829 // -------------------------------------------------------------------
3830 
3831 class TryGetKeyValueResponse final :
3832     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.TryGetKeyValueResponse) */ {
3833  public:
TryGetKeyValueResponse()3834   inline TryGetKeyValueResponse() : TryGetKeyValueResponse(nullptr) {}
3835   ~TryGetKeyValueResponse() override;
3836   explicit PROTOBUF_CONSTEXPR TryGetKeyValueResponse(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
3837 
3838   TryGetKeyValueResponse(const TryGetKeyValueResponse& from);
TryGetKeyValueResponse(TryGetKeyValueResponse && from)3839   TryGetKeyValueResponse(TryGetKeyValueResponse&& from) noexcept
3840     : TryGetKeyValueResponse() {
3841     *this = ::std::move(from);
3842   }
3843 
3844   inline TryGetKeyValueResponse& operator=(const TryGetKeyValueResponse& from) {
3845     if (this == &from) return *this;
3846     CopyFrom(from);
3847     return *this;
3848   }
3849   inline TryGetKeyValueResponse& operator=(TryGetKeyValueResponse&& from) noexcept {
3850     if (this == &from) return *this;
3851     if (GetOwningArena() == from.GetOwningArena()
3852   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
3853         && GetOwningArena() != nullptr
3854   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
3855     ) {
3856       InternalSwap(&from);
3857     } else {
3858       CopyFrom(from);
3859     }
3860     return *this;
3861   }
3862 
default_instance()3863   static const TryGetKeyValueResponse& default_instance() {
3864     return *internal_default_instance();
3865   }
internal_default_instance()3866   static inline const TryGetKeyValueResponse* internal_default_instance() {
3867     return reinterpret_cast<const TryGetKeyValueResponse*>(
3868                &_TryGetKeyValueResponse_default_instance_);
3869   }
3870   static constexpr int kIndexInFileMessages =
3871     25;
3872 
swap(TryGetKeyValueResponse & a,TryGetKeyValueResponse & b)3873   friend void swap(TryGetKeyValueResponse& a, TryGetKeyValueResponse& b) {
3874     a.Swap(&b);
3875   }
Swap(TryGetKeyValueResponse * other)3876   inline void Swap(TryGetKeyValueResponse* other) {
3877     if (other == this) return;
3878   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
3879     if (GetOwningArena() != nullptr &&
3880         GetOwningArena() == other->GetOwningArena()) {
3881    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
3882     if (GetOwningArena() == other->GetOwningArena()) {
3883   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
3884       InternalSwap(other);
3885     } else {
3886       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
3887     }
3888   }
3889   void UnsafeArenaSwap(TryGetKeyValueResponse* other) {
3890     if (other == this) return;
3891     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
3892     InternalSwap(other);
3893   }
3894 
3895   // implements Message ----------------------------------------------
3896 
3897   TryGetKeyValueResponse* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
3898     return CreateMaybeMessage<TryGetKeyValueResponse>(arena);
3899   }
3900   TryGetKeyValueResponse* New() const {
3901     return New(nullptr);
3902   }
3903   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
3904   void CopyFrom(const TryGetKeyValueResponse& from);
3905   void MergeFrom(const TryGetKeyValueResponse& from);
3906   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
3907   bool IsInitialized() const final;
3908 
3909   size_t ByteSizeLong() const final;
3910   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
3911   ::uint8_t* _InternalSerialize(
3912       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
3913   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
3914 
3915   private:
3916   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
3917   void SharedDtor();
3918   void SetCachedSize(int size) const;
3919   void InternalSwap(TryGetKeyValueResponse* other);
3920 
3921   private:
3922   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
3923   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
3924     return "tensorflow.TryGetKeyValueResponse";
3925   }
3926   protected:
3927   explicit TryGetKeyValueResponse(::PROTOBUF_NAMESPACE_ID::Arena* arena,
3928                        bool is_message_owned = false);
3929   public:
3930 
3931   std::string GetTypeName() const final;
3932 
3933   // nested types ----------------------------------------------------
3934 
3935   // accessors -------------------------------------------------------
3936 
3937   enum : int {
3938     kKvFieldNumber = 1,
3939   };
3940   // .tensorflow.KeyValueEntry kv = 1;
3941   bool has_kv() const;
3942   private:
3943   bool _internal_has_kv() const;
3944   public:
3945   void clear_kv();
3946   const ::tensorflow::KeyValueEntry& kv() const;
3947   PROTOBUF_NODISCARD ::tensorflow::KeyValueEntry* release_kv();
3948   ::tensorflow::KeyValueEntry* mutable_kv();
3949   void set_allocated_kv(::tensorflow::KeyValueEntry* kv);
3950   private:
3951   const ::tensorflow::KeyValueEntry& _internal_kv() const;
3952   ::tensorflow::KeyValueEntry* _internal_mutable_kv();
3953   public:
3954   void unsafe_arena_set_allocated_kv(
3955       ::tensorflow::KeyValueEntry* kv);
3956   ::tensorflow::KeyValueEntry* unsafe_arena_release_kv();
3957 
3958   // @@protoc_insertion_point(class_scope:tensorflow.TryGetKeyValueResponse)
3959  private:
3960   class _Internal;
3961 
3962   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
3963   typedef void InternalArenaConstructable_;
3964   typedef void DestructorSkippable_;
3965   struct Impl_ {
3966     ::tensorflow::KeyValueEntry* kv_;
3967     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
3968   };
3969   union { Impl_ _impl_; };
3970   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto;
3971 };
3972 // -------------------------------------------------------------------
3973 
3974 class GetKeyValueDirRequest final :
3975     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.GetKeyValueDirRequest) */ {
3976  public:
GetKeyValueDirRequest()3977   inline GetKeyValueDirRequest() : GetKeyValueDirRequest(nullptr) {}
3978   ~GetKeyValueDirRequest() override;
3979   explicit PROTOBUF_CONSTEXPR GetKeyValueDirRequest(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
3980 
3981   GetKeyValueDirRequest(const GetKeyValueDirRequest& from);
GetKeyValueDirRequest(GetKeyValueDirRequest && from)3982   GetKeyValueDirRequest(GetKeyValueDirRequest&& from) noexcept
3983     : GetKeyValueDirRequest() {
3984     *this = ::std::move(from);
3985   }
3986 
3987   inline GetKeyValueDirRequest& operator=(const GetKeyValueDirRequest& from) {
3988     if (this == &from) return *this;
3989     CopyFrom(from);
3990     return *this;
3991   }
3992   inline GetKeyValueDirRequest& operator=(GetKeyValueDirRequest&& from) noexcept {
3993     if (this == &from) return *this;
3994     if (GetOwningArena() == from.GetOwningArena()
3995   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
3996         && GetOwningArena() != nullptr
3997   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
3998     ) {
3999       InternalSwap(&from);
4000     } else {
4001       CopyFrom(from);
4002     }
4003     return *this;
4004   }
4005 
default_instance()4006   static const GetKeyValueDirRequest& default_instance() {
4007     return *internal_default_instance();
4008   }
internal_default_instance()4009   static inline const GetKeyValueDirRequest* internal_default_instance() {
4010     return reinterpret_cast<const GetKeyValueDirRequest*>(
4011                &_GetKeyValueDirRequest_default_instance_);
4012   }
4013   static constexpr int kIndexInFileMessages =
4014     26;
4015 
swap(GetKeyValueDirRequest & a,GetKeyValueDirRequest & b)4016   friend void swap(GetKeyValueDirRequest& a, GetKeyValueDirRequest& b) {
4017     a.Swap(&b);
4018   }
Swap(GetKeyValueDirRequest * other)4019   inline void Swap(GetKeyValueDirRequest* other) {
4020     if (other == this) return;
4021   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
4022     if (GetOwningArena() != nullptr &&
4023         GetOwningArena() == other->GetOwningArena()) {
4024    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
4025     if (GetOwningArena() == other->GetOwningArena()) {
4026   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
4027       InternalSwap(other);
4028     } else {
4029       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
4030     }
4031   }
4032   void UnsafeArenaSwap(GetKeyValueDirRequest* other) {
4033     if (other == this) return;
4034     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
4035     InternalSwap(other);
4036   }
4037 
4038   // implements Message ----------------------------------------------
4039 
4040   GetKeyValueDirRequest* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
4041     return CreateMaybeMessage<GetKeyValueDirRequest>(arena);
4042   }
4043   GetKeyValueDirRequest* New() const {
4044     return New(nullptr);
4045   }
4046   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
4047   void CopyFrom(const GetKeyValueDirRequest& from);
4048   void MergeFrom(const GetKeyValueDirRequest& from);
4049   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
4050   bool IsInitialized() const final;
4051 
4052   size_t ByteSizeLong() const final;
4053   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
4054   ::uint8_t* _InternalSerialize(
4055       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
4056   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
4057 
4058   private:
4059   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
4060   void SharedDtor();
4061   void SetCachedSize(int size) const;
4062   void InternalSwap(GetKeyValueDirRequest* other);
4063 
4064   private:
4065   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
4066   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
4067     return "tensorflow.GetKeyValueDirRequest";
4068   }
4069   protected:
4070   explicit GetKeyValueDirRequest(::PROTOBUF_NAMESPACE_ID::Arena* arena,
4071                        bool is_message_owned = false);
4072   public:
4073 
4074   std::string GetTypeName() const final;
4075 
4076   // nested types ----------------------------------------------------
4077 
4078   // accessors -------------------------------------------------------
4079 
4080   enum : int {
4081     kDirectoryKeyFieldNumber = 1,
4082   };
4083   // string directory_key = 1;
4084   void clear_directory_key();
4085   const std::string& directory_key() const;
4086   template <typename ArgT0 = const std::string&, typename... ArgT>
4087   void set_directory_key(ArgT0&& arg0, ArgT... args);
4088   std::string* mutable_directory_key();
4089   PROTOBUF_NODISCARD std::string* release_directory_key();
4090   void set_allocated_directory_key(std::string* directory_key);
4091   private:
4092   const std::string& _internal_directory_key() const;
4093   inline PROTOBUF_ALWAYS_INLINE void _internal_set_directory_key(const std::string& value);
4094   std::string* _internal_mutable_directory_key();
4095   public:
4096 
4097   // @@protoc_insertion_point(class_scope:tensorflow.GetKeyValueDirRequest)
4098  private:
4099   class _Internal;
4100 
4101   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
4102   typedef void InternalArenaConstructable_;
4103   typedef void DestructorSkippable_;
4104   struct Impl_ {
4105     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr directory_key_;
4106     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
4107   };
4108   union { Impl_ _impl_; };
4109   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto;
4110 };
4111 // -------------------------------------------------------------------
4112 
4113 class GetKeyValueDirResponse final :
4114     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.GetKeyValueDirResponse) */ {
4115  public:
GetKeyValueDirResponse()4116   inline GetKeyValueDirResponse() : GetKeyValueDirResponse(nullptr) {}
4117   ~GetKeyValueDirResponse() override;
4118   explicit PROTOBUF_CONSTEXPR GetKeyValueDirResponse(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
4119 
4120   GetKeyValueDirResponse(const GetKeyValueDirResponse& from);
GetKeyValueDirResponse(GetKeyValueDirResponse && from)4121   GetKeyValueDirResponse(GetKeyValueDirResponse&& from) noexcept
4122     : GetKeyValueDirResponse() {
4123     *this = ::std::move(from);
4124   }
4125 
4126   inline GetKeyValueDirResponse& operator=(const GetKeyValueDirResponse& from) {
4127     if (this == &from) return *this;
4128     CopyFrom(from);
4129     return *this;
4130   }
4131   inline GetKeyValueDirResponse& operator=(GetKeyValueDirResponse&& from) noexcept {
4132     if (this == &from) return *this;
4133     if (GetOwningArena() == from.GetOwningArena()
4134   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
4135         && GetOwningArena() != nullptr
4136   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
4137     ) {
4138       InternalSwap(&from);
4139     } else {
4140       CopyFrom(from);
4141     }
4142     return *this;
4143   }
4144 
default_instance()4145   static const GetKeyValueDirResponse& default_instance() {
4146     return *internal_default_instance();
4147   }
internal_default_instance()4148   static inline const GetKeyValueDirResponse* internal_default_instance() {
4149     return reinterpret_cast<const GetKeyValueDirResponse*>(
4150                &_GetKeyValueDirResponse_default_instance_);
4151   }
4152   static constexpr int kIndexInFileMessages =
4153     27;
4154 
swap(GetKeyValueDirResponse & a,GetKeyValueDirResponse & b)4155   friend void swap(GetKeyValueDirResponse& a, GetKeyValueDirResponse& b) {
4156     a.Swap(&b);
4157   }
Swap(GetKeyValueDirResponse * other)4158   inline void Swap(GetKeyValueDirResponse* other) {
4159     if (other == this) return;
4160   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
4161     if (GetOwningArena() != nullptr &&
4162         GetOwningArena() == other->GetOwningArena()) {
4163    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
4164     if (GetOwningArena() == other->GetOwningArena()) {
4165   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
4166       InternalSwap(other);
4167     } else {
4168       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
4169     }
4170   }
4171   void UnsafeArenaSwap(GetKeyValueDirResponse* other) {
4172     if (other == this) return;
4173     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
4174     InternalSwap(other);
4175   }
4176 
4177   // implements Message ----------------------------------------------
4178 
4179   GetKeyValueDirResponse* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
4180     return CreateMaybeMessage<GetKeyValueDirResponse>(arena);
4181   }
4182   GetKeyValueDirResponse* New() const {
4183     return New(nullptr);
4184   }
4185   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
4186   void CopyFrom(const GetKeyValueDirResponse& from);
4187   void MergeFrom(const GetKeyValueDirResponse& from);
4188   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
4189   bool IsInitialized() const final;
4190 
4191   size_t ByteSizeLong() const final;
4192   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
4193   ::uint8_t* _InternalSerialize(
4194       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
4195   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
4196 
4197   private:
4198   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
4199   void SharedDtor();
4200   void SetCachedSize(int size) const;
4201   void InternalSwap(GetKeyValueDirResponse* other);
4202 
4203   private:
4204   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
4205   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
4206     return "tensorflow.GetKeyValueDirResponse";
4207   }
4208   protected:
4209   explicit GetKeyValueDirResponse(::PROTOBUF_NAMESPACE_ID::Arena* arena,
4210                        bool is_message_owned = false);
4211   public:
4212 
4213   std::string GetTypeName() const final;
4214 
4215   // nested types ----------------------------------------------------
4216 
4217   // accessors -------------------------------------------------------
4218 
4219   enum : int {
4220     kKvFieldNumber = 2,
4221     kDirectoryKeyFieldNumber = 1,
4222   };
4223   // repeated .tensorflow.KeyValueEntry kv = 2;
4224   int kv_size() const;
4225   private:
4226   int _internal_kv_size() const;
4227   public:
4228   void clear_kv();
4229   ::tensorflow::KeyValueEntry* mutable_kv(int index);
4230   ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::KeyValueEntry >*
4231       mutable_kv();
4232   private:
4233   const ::tensorflow::KeyValueEntry& _internal_kv(int index) const;
4234   ::tensorflow::KeyValueEntry* _internal_add_kv();
4235   public:
4236   const ::tensorflow::KeyValueEntry& kv(int index) const;
4237   ::tensorflow::KeyValueEntry* add_kv();
4238   const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::KeyValueEntry >&
4239       kv() const;
4240 
4241   // string directory_key = 1;
4242   void clear_directory_key();
4243   const std::string& directory_key() const;
4244   template <typename ArgT0 = const std::string&, typename... ArgT>
4245   void set_directory_key(ArgT0&& arg0, ArgT... args);
4246   std::string* mutable_directory_key();
4247   PROTOBUF_NODISCARD std::string* release_directory_key();
4248   void set_allocated_directory_key(std::string* directory_key);
4249   private:
4250   const std::string& _internal_directory_key() const;
4251   inline PROTOBUF_ALWAYS_INLINE void _internal_set_directory_key(const std::string& value);
4252   std::string* _internal_mutable_directory_key();
4253   public:
4254 
4255   // @@protoc_insertion_point(class_scope:tensorflow.GetKeyValueDirResponse)
4256  private:
4257   class _Internal;
4258 
4259   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
4260   typedef void InternalArenaConstructable_;
4261   typedef void DestructorSkippable_;
4262   struct Impl_ {
4263     ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::KeyValueEntry > kv_;
4264     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr directory_key_;
4265     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
4266   };
4267   union { Impl_ _impl_; };
4268   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto;
4269 };
4270 // -------------------------------------------------------------------
4271 
4272 class DeleteKeyValueRequest final :
4273     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.DeleteKeyValueRequest) */ {
4274  public:
DeleteKeyValueRequest()4275   inline DeleteKeyValueRequest() : DeleteKeyValueRequest(nullptr) {}
4276   ~DeleteKeyValueRequest() override;
4277   explicit PROTOBUF_CONSTEXPR DeleteKeyValueRequest(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
4278 
4279   DeleteKeyValueRequest(const DeleteKeyValueRequest& from);
DeleteKeyValueRequest(DeleteKeyValueRequest && from)4280   DeleteKeyValueRequest(DeleteKeyValueRequest&& from) noexcept
4281     : DeleteKeyValueRequest() {
4282     *this = ::std::move(from);
4283   }
4284 
4285   inline DeleteKeyValueRequest& operator=(const DeleteKeyValueRequest& from) {
4286     if (this == &from) return *this;
4287     CopyFrom(from);
4288     return *this;
4289   }
4290   inline DeleteKeyValueRequest& operator=(DeleteKeyValueRequest&& from) noexcept {
4291     if (this == &from) return *this;
4292     if (GetOwningArena() == from.GetOwningArena()
4293   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
4294         && GetOwningArena() != nullptr
4295   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
4296     ) {
4297       InternalSwap(&from);
4298     } else {
4299       CopyFrom(from);
4300     }
4301     return *this;
4302   }
4303 
default_instance()4304   static const DeleteKeyValueRequest& default_instance() {
4305     return *internal_default_instance();
4306   }
internal_default_instance()4307   static inline const DeleteKeyValueRequest* internal_default_instance() {
4308     return reinterpret_cast<const DeleteKeyValueRequest*>(
4309                &_DeleteKeyValueRequest_default_instance_);
4310   }
4311   static constexpr int kIndexInFileMessages =
4312     28;
4313 
swap(DeleteKeyValueRequest & a,DeleteKeyValueRequest & b)4314   friend void swap(DeleteKeyValueRequest& a, DeleteKeyValueRequest& b) {
4315     a.Swap(&b);
4316   }
Swap(DeleteKeyValueRequest * other)4317   inline void Swap(DeleteKeyValueRequest* other) {
4318     if (other == this) return;
4319   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
4320     if (GetOwningArena() != nullptr &&
4321         GetOwningArena() == other->GetOwningArena()) {
4322    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
4323     if (GetOwningArena() == other->GetOwningArena()) {
4324   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
4325       InternalSwap(other);
4326     } else {
4327       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
4328     }
4329   }
4330   void UnsafeArenaSwap(DeleteKeyValueRequest* other) {
4331     if (other == this) return;
4332     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
4333     InternalSwap(other);
4334   }
4335 
4336   // implements Message ----------------------------------------------
4337 
4338   DeleteKeyValueRequest* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
4339     return CreateMaybeMessage<DeleteKeyValueRequest>(arena);
4340   }
4341   DeleteKeyValueRequest* New() const {
4342     return New(nullptr);
4343   }
4344   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
4345   void CopyFrom(const DeleteKeyValueRequest& from);
4346   void MergeFrom(const DeleteKeyValueRequest& from);
4347   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
4348   bool IsInitialized() const final;
4349 
4350   size_t ByteSizeLong() const final;
4351   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
4352   ::uint8_t* _InternalSerialize(
4353       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
4354   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
4355 
4356   private:
4357   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
4358   void SharedDtor();
4359   void SetCachedSize(int size) const;
4360   void InternalSwap(DeleteKeyValueRequest* other);
4361 
4362   private:
4363   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
4364   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
4365     return "tensorflow.DeleteKeyValueRequest";
4366   }
4367   protected:
4368   explicit DeleteKeyValueRequest(::PROTOBUF_NAMESPACE_ID::Arena* arena,
4369                        bool is_message_owned = false);
4370   public:
4371 
4372   std::string GetTypeName() const final;
4373 
4374   // nested types ----------------------------------------------------
4375 
4376   // accessors -------------------------------------------------------
4377 
4378   enum : int {
4379     kKeyFieldNumber = 1,
4380     kIsDirectoryFieldNumber = 2,
4381   };
4382   // string key = 1;
4383   void clear_key();
4384   const std::string& key() const;
4385   template <typename ArgT0 = const std::string&, typename... ArgT>
4386   void set_key(ArgT0&& arg0, ArgT... args);
4387   std::string* mutable_key();
4388   PROTOBUF_NODISCARD std::string* release_key();
4389   void set_allocated_key(std::string* key);
4390   private:
4391   const std::string& _internal_key() const;
4392   inline PROTOBUF_ALWAYS_INLINE void _internal_set_key(const std::string& value);
4393   std::string* _internal_mutable_key();
4394   public:
4395 
4396   // bool is_directory = 2;
4397   void clear_is_directory();
4398   bool is_directory() const;
4399   void set_is_directory(bool value);
4400   private:
4401   bool _internal_is_directory() const;
4402   void _internal_set_is_directory(bool value);
4403   public:
4404 
4405   // @@protoc_insertion_point(class_scope:tensorflow.DeleteKeyValueRequest)
4406  private:
4407   class _Internal;
4408 
4409   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
4410   typedef void InternalArenaConstructable_;
4411   typedef void DestructorSkippable_;
4412   struct Impl_ {
4413     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr key_;
4414     bool is_directory_;
4415     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
4416   };
4417   union { Impl_ _impl_; };
4418   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto;
4419 };
4420 // -------------------------------------------------------------------
4421 
4422 class DeleteKeyValueResponse final :
4423     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.DeleteKeyValueResponse) */ {
4424  public:
DeleteKeyValueResponse()4425   inline DeleteKeyValueResponse() : DeleteKeyValueResponse(nullptr) {}
4426   ~DeleteKeyValueResponse() override;
4427   explicit PROTOBUF_CONSTEXPR DeleteKeyValueResponse(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
4428 
4429   DeleteKeyValueResponse(const DeleteKeyValueResponse& from);
DeleteKeyValueResponse(DeleteKeyValueResponse && from)4430   DeleteKeyValueResponse(DeleteKeyValueResponse&& from) noexcept
4431     : DeleteKeyValueResponse() {
4432     *this = ::std::move(from);
4433   }
4434 
4435   inline DeleteKeyValueResponse& operator=(const DeleteKeyValueResponse& from) {
4436     if (this == &from) return *this;
4437     CopyFrom(from);
4438     return *this;
4439   }
4440   inline DeleteKeyValueResponse& operator=(DeleteKeyValueResponse&& from) noexcept {
4441     if (this == &from) return *this;
4442     if (GetOwningArena() == from.GetOwningArena()
4443   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
4444         && GetOwningArena() != nullptr
4445   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
4446     ) {
4447       InternalSwap(&from);
4448     } else {
4449       CopyFrom(from);
4450     }
4451     return *this;
4452   }
4453 
default_instance()4454   static const DeleteKeyValueResponse& default_instance() {
4455     return *internal_default_instance();
4456   }
internal_default_instance()4457   static inline const DeleteKeyValueResponse* internal_default_instance() {
4458     return reinterpret_cast<const DeleteKeyValueResponse*>(
4459                &_DeleteKeyValueResponse_default_instance_);
4460   }
4461   static constexpr int kIndexInFileMessages =
4462     29;
4463 
swap(DeleteKeyValueResponse & a,DeleteKeyValueResponse & b)4464   friend void swap(DeleteKeyValueResponse& a, DeleteKeyValueResponse& b) {
4465     a.Swap(&b);
4466   }
Swap(DeleteKeyValueResponse * other)4467   inline void Swap(DeleteKeyValueResponse* other) {
4468     if (other == this) return;
4469   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
4470     if (GetOwningArena() != nullptr &&
4471         GetOwningArena() == other->GetOwningArena()) {
4472    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
4473     if (GetOwningArena() == other->GetOwningArena()) {
4474   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
4475       InternalSwap(other);
4476     } else {
4477       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
4478     }
4479   }
4480   void UnsafeArenaSwap(DeleteKeyValueResponse* other) {
4481     if (other == this) return;
4482     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
4483     InternalSwap(other);
4484   }
4485 
4486   // implements Message ----------------------------------------------
4487 
4488   DeleteKeyValueResponse* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
4489     return CreateMaybeMessage<DeleteKeyValueResponse>(arena);
4490   }
4491   DeleteKeyValueResponse* New() const {
4492     return New(nullptr);
4493   }
4494   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
4495   void CopyFrom(const DeleteKeyValueResponse& from);
4496   void MergeFrom(const DeleteKeyValueResponse& from);
4497   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
4498   bool IsInitialized() const final;
4499 
4500   size_t ByteSizeLong() const final;
4501   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
4502   ::uint8_t* _InternalSerialize(
4503       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
4504   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
4505 
4506   private:
4507   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
4508   void SharedDtor();
4509   void SetCachedSize(int size) const;
4510   void InternalSwap(DeleteKeyValueResponse* other);
4511 
4512   private:
4513   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
4514   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
4515     return "tensorflow.DeleteKeyValueResponse";
4516   }
4517   protected:
4518   explicit DeleteKeyValueResponse(::PROTOBUF_NAMESPACE_ID::Arena* arena,
4519                        bool is_message_owned = false);
4520   public:
4521 
4522   std::string GetTypeName() const final;
4523 
4524   // nested types ----------------------------------------------------
4525 
4526   // accessors -------------------------------------------------------
4527 
4528   // @@protoc_insertion_point(class_scope:tensorflow.DeleteKeyValueResponse)
4529  private:
4530   class _Internal;
4531 
4532   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
4533   typedef void InternalArenaConstructable_;
4534   typedef void DestructorSkippable_;
4535   struct Impl_ {
4536     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
4537   };
4538   union { Impl_ _impl_; };
4539   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto;
4540 };
4541 // -------------------------------------------------------------------
4542 
4543 class BarrierRequest final :
4544     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.BarrierRequest) */ {
4545  public:
BarrierRequest()4546   inline BarrierRequest() : BarrierRequest(nullptr) {}
4547   ~BarrierRequest() override;
4548   explicit PROTOBUF_CONSTEXPR BarrierRequest(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
4549 
4550   BarrierRequest(const BarrierRequest& from);
BarrierRequest(BarrierRequest && from)4551   BarrierRequest(BarrierRequest&& from) noexcept
4552     : BarrierRequest() {
4553     *this = ::std::move(from);
4554   }
4555 
4556   inline BarrierRequest& operator=(const BarrierRequest& from) {
4557     if (this == &from) return *this;
4558     CopyFrom(from);
4559     return *this;
4560   }
4561   inline BarrierRequest& operator=(BarrierRequest&& from) noexcept {
4562     if (this == &from) return *this;
4563     if (GetOwningArena() == from.GetOwningArena()
4564   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
4565         && GetOwningArena() != nullptr
4566   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
4567     ) {
4568       InternalSwap(&from);
4569     } else {
4570       CopyFrom(from);
4571     }
4572     return *this;
4573   }
4574 
default_instance()4575   static const BarrierRequest& default_instance() {
4576     return *internal_default_instance();
4577   }
internal_default_instance()4578   static inline const BarrierRequest* internal_default_instance() {
4579     return reinterpret_cast<const BarrierRequest*>(
4580                &_BarrierRequest_default_instance_);
4581   }
4582   static constexpr int kIndexInFileMessages =
4583     30;
4584 
swap(BarrierRequest & a,BarrierRequest & b)4585   friend void swap(BarrierRequest& a, BarrierRequest& b) {
4586     a.Swap(&b);
4587   }
Swap(BarrierRequest * other)4588   inline void Swap(BarrierRequest* other) {
4589     if (other == this) return;
4590   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
4591     if (GetOwningArena() != nullptr &&
4592         GetOwningArena() == other->GetOwningArena()) {
4593    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
4594     if (GetOwningArena() == other->GetOwningArena()) {
4595   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
4596       InternalSwap(other);
4597     } else {
4598       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
4599     }
4600   }
4601   void UnsafeArenaSwap(BarrierRequest* other) {
4602     if (other == this) return;
4603     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
4604     InternalSwap(other);
4605   }
4606 
4607   // implements Message ----------------------------------------------
4608 
4609   BarrierRequest* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
4610     return CreateMaybeMessage<BarrierRequest>(arena);
4611   }
4612   BarrierRequest* New() const {
4613     return New(nullptr);
4614   }
4615   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
4616   void CopyFrom(const BarrierRequest& from);
4617   void MergeFrom(const BarrierRequest& from);
4618   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
4619   bool IsInitialized() const final;
4620 
4621   size_t ByteSizeLong() const final;
4622   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
4623   ::uint8_t* _InternalSerialize(
4624       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
4625   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
4626 
4627   private:
4628   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
4629   void SharedDtor();
4630   void SetCachedSize(int size) const;
4631   void InternalSwap(BarrierRequest* other);
4632 
4633   private:
4634   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
4635   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
4636     return "tensorflow.BarrierRequest";
4637   }
4638   protected:
4639   explicit BarrierRequest(::PROTOBUF_NAMESPACE_ID::Arena* arena,
4640                        bool is_message_owned = false);
4641   public:
4642 
4643   std::string GetTypeName() const final;
4644 
4645   // nested types ----------------------------------------------------
4646 
4647   // accessors -------------------------------------------------------
4648 
4649   enum : int {
4650     kTasksFieldNumber = 3,
4651     kBarrierIdFieldNumber = 1,
4652     kSourceTaskFieldNumber = 4,
4653     kBarrierTimeoutInMsFieldNumber = 2,
4654   };
4655   // repeated .tensorflow.CoordinatedTask tasks = 3;
4656   int tasks_size() const;
4657   private:
4658   int _internal_tasks_size() const;
4659   public:
4660   void clear_tasks();
4661   ::tensorflow::CoordinatedTask* mutable_tasks(int index);
4662   ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::CoordinatedTask >*
4663       mutable_tasks();
4664   private:
4665   const ::tensorflow::CoordinatedTask& _internal_tasks(int index) const;
4666   ::tensorflow::CoordinatedTask* _internal_add_tasks();
4667   public:
4668   const ::tensorflow::CoordinatedTask& tasks(int index) const;
4669   ::tensorflow::CoordinatedTask* add_tasks();
4670   const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::CoordinatedTask >&
4671       tasks() const;
4672 
4673   // string barrier_id = 1;
4674   void clear_barrier_id();
4675   const std::string& barrier_id() const;
4676   template <typename ArgT0 = const std::string&, typename... ArgT>
4677   void set_barrier_id(ArgT0&& arg0, ArgT... args);
4678   std::string* mutable_barrier_id();
4679   PROTOBUF_NODISCARD std::string* release_barrier_id();
4680   void set_allocated_barrier_id(std::string* barrier_id);
4681   private:
4682   const std::string& _internal_barrier_id() const;
4683   inline PROTOBUF_ALWAYS_INLINE void _internal_set_barrier_id(const std::string& value);
4684   std::string* _internal_mutable_barrier_id();
4685   public:
4686 
4687   // .tensorflow.CoordinatedTask source_task = 4;
4688   bool has_source_task() const;
4689   private:
4690   bool _internal_has_source_task() const;
4691   public:
4692   void clear_source_task();
4693   const ::tensorflow::CoordinatedTask& source_task() const;
4694   PROTOBUF_NODISCARD ::tensorflow::CoordinatedTask* release_source_task();
4695   ::tensorflow::CoordinatedTask* mutable_source_task();
4696   void set_allocated_source_task(::tensorflow::CoordinatedTask* source_task);
4697   private:
4698   const ::tensorflow::CoordinatedTask& _internal_source_task() const;
4699   ::tensorflow::CoordinatedTask* _internal_mutable_source_task();
4700   public:
4701   void unsafe_arena_set_allocated_source_task(
4702       ::tensorflow::CoordinatedTask* source_task);
4703   ::tensorflow::CoordinatedTask* unsafe_arena_release_source_task();
4704 
4705   // int64 barrier_timeout_in_ms = 2;
4706   void clear_barrier_timeout_in_ms();
4707   ::int64_t barrier_timeout_in_ms() const;
4708   void set_barrier_timeout_in_ms(::int64_t value);
4709   private:
4710   ::int64_t _internal_barrier_timeout_in_ms() const;
4711   void _internal_set_barrier_timeout_in_ms(::int64_t value);
4712   public:
4713 
4714   // @@protoc_insertion_point(class_scope:tensorflow.BarrierRequest)
4715  private:
4716   class _Internal;
4717 
4718   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
4719   typedef void InternalArenaConstructable_;
4720   typedef void DestructorSkippable_;
4721   struct Impl_ {
4722     ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::CoordinatedTask > tasks_;
4723     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr barrier_id_;
4724     ::tensorflow::CoordinatedTask* source_task_;
4725     ::int64_t barrier_timeout_in_ms_;
4726     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
4727   };
4728   union { Impl_ _impl_; };
4729   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto;
4730 };
4731 // -------------------------------------------------------------------
4732 
4733 class BarrierResponse final :
4734     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.BarrierResponse) */ {
4735  public:
BarrierResponse()4736   inline BarrierResponse() : BarrierResponse(nullptr) {}
4737   ~BarrierResponse() override;
4738   explicit PROTOBUF_CONSTEXPR BarrierResponse(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
4739 
4740   BarrierResponse(const BarrierResponse& from);
BarrierResponse(BarrierResponse && from)4741   BarrierResponse(BarrierResponse&& from) noexcept
4742     : BarrierResponse() {
4743     *this = ::std::move(from);
4744   }
4745 
4746   inline BarrierResponse& operator=(const BarrierResponse& from) {
4747     if (this == &from) return *this;
4748     CopyFrom(from);
4749     return *this;
4750   }
4751   inline BarrierResponse& operator=(BarrierResponse&& from) noexcept {
4752     if (this == &from) return *this;
4753     if (GetOwningArena() == from.GetOwningArena()
4754   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
4755         && GetOwningArena() != nullptr
4756   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
4757     ) {
4758       InternalSwap(&from);
4759     } else {
4760       CopyFrom(from);
4761     }
4762     return *this;
4763   }
4764 
default_instance()4765   static const BarrierResponse& default_instance() {
4766     return *internal_default_instance();
4767   }
internal_default_instance()4768   static inline const BarrierResponse* internal_default_instance() {
4769     return reinterpret_cast<const BarrierResponse*>(
4770                &_BarrierResponse_default_instance_);
4771   }
4772   static constexpr int kIndexInFileMessages =
4773     31;
4774 
swap(BarrierResponse & a,BarrierResponse & b)4775   friend void swap(BarrierResponse& a, BarrierResponse& b) {
4776     a.Swap(&b);
4777   }
Swap(BarrierResponse * other)4778   inline void Swap(BarrierResponse* other) {
4779     if (other == this) return;
4780   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
4781     if (GetOwningArena() != nullptr &&
4782         GetOwningArena() == other->GetOwningArena()) {
4783    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
4784     if (GetOwningArena() == other->GetOwningArena()) {
4785   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
4786       InternalSwap(other);
4787     } else {
4788       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
4789     }
4790   }
4791   void UnsafeArenaSwap(BarrierResponse* other) {
4792     if (other == this) return;
4793     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
4794     InternalSwap(other);
4795   }
4796 
4797   // implements Message ----------------------------------------------
4798 
4799   BarrierResponse* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
4800     return CreateMaybeMessage<BarrierResponse>(arena);
4801   }
4802   BarrierResponse* New() const {
4803     return New(nullptr);
4804   }
4805   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
4806   void CopyFrom(const BarrierResponse& from);
4807   void MergeFrom(const BarrierResponse& from);
4808   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
4809   bool IsInitialized() const final;
4810 
4811   size_t ByteSizeLong() const final;
4812   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
4813   ::uint8_t* _InternalSerialize(
4814       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
4815   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
4816 
4817   private:
4818   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
4819   void SharedDtor();
4820   void SetCachedSize(int size) const;
4821   void InternalSwap(BarrierResponse* other);
4822 
4823   private:
4824   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
4825   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
4826     return "tensorflow.BarrierResponse";
4827   }
4828   protected:
4829   explicit BarrierResponse(::PROTOBUF_NAMESPACE_ID::Arena* arena,
4830                        bool is_message_owned = false);
4831   public:
4832 
4833   std::string GetTypeName() const final;
4834 
4835   // nested types ----------------------------------------------------
4836 
4837   // accessors -------------------------------------------------------
4838 
4839   // @@protoc_insertion_point(class_scope:tensorflow.BarrierResponse)
4840  private:
4841   class _Internal;
4842 
4843   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
4844   typedef void InternalArenaConstructable_;
4845   typedef void DestructorSkippable_;
4846   struct Impl_ {
4847     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
4848   };
4849   union { Impl_ _impl_; };
4850   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto;
4851 };
4852 // -------------------------------------------------------------------
4853 
4854 class CancelBarrierRequest final :
4855     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.CancelBarrierRequest) */ {
4856  public:
CancelBarrierRequest()4857   inline CancelBarrierRequest() : CancelBarrierRequest(nullptr) {}
4858   ~CancelBarrierRequest() override;
4859   explicit PROTOBUF_CONSTEXPR CancelBarrierRequest(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
4860 
4861   CancelBarrierRequest(const CancelBarrierRequest& from);
CancelBarrierRequest(CancelBarrierRequest && from)4862   CancelBarrierRequest(CancelBarrierRequest&& from) noexcept
4863     : CancelBarrierRequest() {
4864     *this = ::std::move(from);
4865   }
4866 
4867   inline CancelBarrierRequest& operator=(const CancelBarrierRequest& from) {
4868     if (this == &from) return *this;
4869     CopyFrom(from);
4870     return *this;
4871   }
4872   inline CancelBarrierRequest& operator=(CancelBarrierRequest&& from) noexcept {
4873     if (this == &from) return *this;
4874     if (GetOwningArena() == from.GetOwningArena()
4875   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
4876         && GetOwningArena() != nullptr
4877   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
4878     ) {
4879       InternalSwap(&from);
4880     } else {
4881       CopyFrom(from);
4882     }
4883     return *this;
4884   }
4885 
default_instance()4886   static const CancelBarrierRequest& default_instance() {
4887     return *internal_default_instance();
4888   }
internal_default_instance()4889   static inline const CancelBarrierRequest* internal_default_instance() {
4890     return reinterpret_cast<const CancelBarrierRequest*>(
4891                &_CancelBarrierRequest_default_instance_);
4892   }
4893   static constexpr int kIndexInFileMessages =
4894     32;
4895 
swap(CancelBarrierRequest & a,CancelBarrierRequest & b)4896   friend void swap(CancelBarrierRequest& a, CancelBarrierRequest& b) {
4897     a.Swap(&b);
4898   }
Swap(CancelBarrierRequest * other)4899   inline void Swap(CancelBarrierRequest* other) {
4900     if (other == this) return;
4901   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
4902     if (GetOwningArena() != nullptr &&
4903         GetOwningArena() == other->GetOwningArena()) {
4904    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
4905     if (GetOwningArena() == other->GetOwningArena()) {
4906   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
4907       InternalSwap(other);
4908     } else {
4909       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
4910     }
4911   }
4912   void UnsafeArenaSwap(CancelBarrierRequest* other) {
4913     if (other == this) return;
4914     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
4915     InternalSwap(other);
4916   }
4917 
4918   // implements Message ----------------------------------------------
4919 
4920   CancelBarrierRequest* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
4921     return CreateMaybeMessage<CancelBarrierRequest>(arena);
4922   }
4923   CancelBarrierRequest* New() const {
4924     return New(nullptr);
4925   }
4926   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
4927   void CopyFrom(const CancelBarrierRequest& from);
4928   void MergeFrom(const CancelBarrierRequest& from);
4929   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
4930   bool IsInitialized() const final;
4931 
4932   size_t ByteSizeLong() const final;
4933   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
4934   ::uint8_t* _InternalSerialize(
4935       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
4936   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
4937 
4938   private:
4939   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
4940   void SharedDtor();
4941   void SetCachedSize(int size) const;
4942   void InternalSwap(CancelBarrierRequest* other);
4943 
4944   private:
4945   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
4946   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
4947     return "tensorflow.CancelBarrierRequest";
4948   }
4949   protected:
4950   explicit CancelBarrierRequest(::PROTOBUF_NAMESPACE_ID::Arena* arena,
4951                        bool is_message_owned = false);
4952   public:
4953 
4954   std::string GetTypeName() const final;
4955 
4956   // nested types ----------------------------------------------------
4957 
4958   // accessors -------------------------------------------------------
4959 
4960   enum : int {
4961     kBarrierIdFieldNumber = 1,
4962     kSourceTaskFieldNumber = 2,
4963   };
4964   // string barrier_id = 1;
4965   void clear_barrier_id();
4966   const std::string& barrier_id() const;
4967   template <typename ArgT0 = const std::string&, typename... ArgT>
4968   void set_barrier_id(ArgT0&& arg0, ArgT... args);
4969   std::string* mutable_barrier_id();
4970   PROTOBUF_NODISCARD std::string* release_barrier_id();
4971   void set_allocated_barrier_id(std::string* barrier_id);
4972   private:
4973   const std::string& _internal_barrier_id() const;
4974   inline PROTOBUF_ALWAYS_INLINE void _internal_set_barrier_id(const std::string& value);
4975   std::string* _internal_mutable_barrier_id();
4976   public:
4977 
4978   // .tensorflow.CoordinatedTask source_task = 2;
4979   bool has_source_task() const;
4980   private:
4981   bool _internal_has_source_task() const;
4982   public:
4983   void clear_source_task();
4984   const ::tensorflow::CoordinatedTask& source_task() const;
4985   PROTOBUF_NODISCARD ::tensorflow::CoordinatedTask* release_source_task();
4986   ::tensorflow::CoordinatedTask* mutable_source_task();
4987   void set_allocated_source_task(::tensorflow::CoordinatedTask* source_task);
4988   private:
4989   const ::tensorflow::CoordinatedTask& _internal_source_task() const;
4990   ::tensorflow::CoordinatedTask* _internal_mutable_source_task();
4991   public:
4992   void unsafe_arena_set_allocated_source_task(
4993       ::tensorflow::CoordinatedTask* source_task);
4994   ::tensorflow::CoordinatedTask* unsafe_arena_release_source_task();
4995 
4996   // @@protoc_insertion_point(class_scope:tensorflow.CancelBarrierRequest)
4997  private:
4998   class _Internal;
4999 
5000   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
5001   typedef void InternalArenaConstructable_;
5002   typedef void DestructorSkippable_;
5003   struct Impl_ {
5004     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr barrier_id_;
5005     ::tensorflow::CoordinatedTask* source_task_;
5006     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
5007   };
5008   union { Impl_ _impl_; };
5009   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto;
5010 };
5011 // -------------------------------------------------------------------
5012 
5013 class CancelBarrierResponse final :
5014     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.CancelBarrierResponse) */ {
5015  public:
CancelBarrierResponse()5016   inline CancelBarrierResponse() : CancelBarrierResponse(nullptr) {}
5017   ~CancelBarrierResponse() override;
5018   explicit PROTOBUF_CONSTEXPR CancelBarrierResponse(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
5019 
5020   CancelBarrierResponse(const CancelBarrierResponse& from);
CancelBarrierResponse(CancelBarrierResponse && from)5021   CancelBarrierResponse(CancelBarrierResponse&& from) noexcept
5022     : CancelBarrierResponse() {
5023     *this = ::std::move(from);
5024   }
5025 
5026   inline CancelBarrierResponse& operator=(const CancelBarrierResponse& from) {
5027     if (this == &from) return *this;
5028     CopyFrom(from);
5029     return *this;
5030   }
5031   inline CancelBarrierResponse& operator=(CancelBarrierResponse&& from) noexcept {
5032     if (this == &from) return *this;
5033     if (GetOwningArena() == from.GetOwningArena()
5034   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
5035         && GetOwningArena() != nullptr
5036   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
5037     ) {
5038       InternalSwap(&from);
5039     } else {
5040       CopyFrom(from);
5041     }
5042     return *this;
5043   }
5044 
default_instance()5045   static const CancelBarrierResponse& default_instance() {
5046     return *internal_default_instance();
5047   }
internal_default_instance()5048   static inline const CancelBarrierResponse* internal_default_instance() {
5049     return reinterpret_cast<const CancelBarrierResponse*>(
5050                &_CancelBarrierResponse_default_instance_);
5051   }
5052   static constexpr int kIndexInFileMessages =
5053     33;
5054 
swap(CancelBarrierResponse & a,CancelBarrierResponse & b)5055   friend void swap(CancelBarrierResponse& a, CancelBarrierResponse& b) {
5056     a.Swap(&b);
5057   }
Swap(CancelBarrierResponse * other)5058   inline void Swap(CancelBarrierResponse* other) {
5059     if (other == this) return;
5060   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
5061     if (GetOwningArena() != nullptr &&
5062         GetOwningArena() == other->GetOwningArena()) {
5063    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
5064     if (GetOwningArena() == other->GetOwningArena()) {
5065   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
5066       InternalSwap(other);
5067     } else {
5068       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
5069     }
5070   }
5071   void UnsafeArenaSwap(CancelBarrierResponse* other) {
5072     if (other == this) return;
5073     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
5074     InternalSwap(other);
5075   }
5076 
5077   // implements Message ----------------------------------------------
5078 
5079   CancelBarrierResponse* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
5080     return CreateMaybeMessage<CancelBarrierResponse>(arena);
5081   }
5082   CancelBarrierResponse* New() const {
5083     return New(nullptr);
5084   }
5085   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
5086   void CopyFrom(const CancelBarrierResponse& from);
5087   void MergeFrom(const CancelBarrierResponse& from);
5088   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
5089   bool IsInitialized() const final;
5090 
5091   size_t ByteSizeLong() const final;
5092   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
5093   ::uint8_t* _InternalSerialize(
5094       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
5095   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
5096 
5097   private:
5098   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
5099   void SharedDtor();
5100   void SetCachedSize(int size) const;
5101   void InternalSwap(CancelBarrierResponse* other);
5102 
5103   private:
5104   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
5105   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
5106     return "tensorflow.CancelBarrierResponse";
5107   }
5108   protected:
5109   explicit CancelBarrierResponse(::PROTOBUF_NAMESPACE_ID::Arena* arena,
5110                        bool is_message_owned = false);
5111   public:
5112 
5113   std::string GetTypeName() const final;
5114 
5115   // nested types ----------------------------------------------------
5116 
5117   // accessors -------------------------------------------------------
5118 
5119   // @@protoc_insertion_point(class_scope:tensorflow.CancelBarrierResponse)
5120  private:
5121   class _Internal;
5122 
5123   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
5124   typedef void InternalArenaConstructable_;
5125   typedef void DestructorSkippable_;
5126   struct Impl_ {
5127     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
5128   };
5129   union { Impl_ _impl_; };
5130   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto;
5131 };
5132 // ===================================================================
5133 
5134 
5135 // ===================================================================
5136 
5137 #ifdef __GNUC__
5138   #pragma GCC diagnostic push
5139   #pragma GCC diagnostic ignored "-Wstrict-aliasing"
5140 #endif  // __GNUC__
5141 // CoordinatedTask
5142 
5143 // string job_name = 1;
clear_job_name()5144 inline void CoordinatedTask::clear_job_name() {
5145   _impl_.job_name_.ClearToEmpty();
5146 }
job_name()5147 inline const std::string& CoordinatedTask::job_name() const {
5148   // @@protoc_insertion_point(field_get:tensorflow.CoordinatedTask.job_name)
5149   return _internal_job_name();
5150 }
5151 template <typename ArgT0, typename... ArgT>
5152 inline PROTOBUF_ALWAYS_INLINE
set_job_name(ArgT0 && arg0,ArgT...args)5153 void CoordinatedTask::set_job_name(ArgT0&& arg0, ArgT... args) {
5154 
5155  _impl_.job_name_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
5156   // @@protoc_insertion_point(field_set:tensorflow.CoordinatedTask.job_name)
5157 }
mutable_job_name()5158 inline std::string* CoordinatedTask::mutable_job_name() {
5159   std::string* _s = _internal_mutable_job_name();
5160   // @@protoc_insertion_point(field_mutable:tensorflow.CoordinatedTask.job_name)
5161   return _s;
5162 }
_internal_job_name()5163 inline const std::string& CoordinatedTask::_internal_job_name() const {
5164   return _impl_.job_name_.Get();
5165 }
_internal_set_job_name(const std::string & value)5166 inline void CoordinatedTask::_internal_set_job_name(const std::string& value) {
5167 
5168   _impl_.job_name_.Set(value, GetArenaForAllocation());
5169 }
_internal_mutable_job_name()5170 inline std::string* CoordinatedTask::_internal_mutable_job_name() {
5171 
5172   return _impl_.job_name_.Mutable(GetArenaForAllocation());
5173 }
release_job_name()5174 inline std::string* CoordinatedTask::release_job_name() {
5175   // @@protoc_insertion_point(field_release:tensorflow.CoordinatedTask.job_name)
5176   return _impl_.job_name_.Release();
5177 }
set_allocated_job_name(std::string * job_name)5178 inline void CoordinatedTask::set_allocated_job_name(std::string* job_name) {
5179   _impl_.job_name_.SetAllocated(job_name, GetArenaForAllocation());
5180 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
5181   if (_impl_.job_name_.IsDefault()) {
5182     _impl_.job_name_.Set("", GetArenaForAllocation());
5183   }
5184 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
5185   // @@protoc_insertion_point(field_set_allocated:tensorflow.CoordinatedTask.job_name)
5186 }
5187 
5188 // int32 task_id = 2;
clear_task_id()5189 inline void CoordinatedTask::clear_task_id() {
5190   _impl_.task_id_ = 0;
5191 }
_internal_task_id()5192 inline ::int32_t CoordinatedTask::_internal_task_id() const {
5193   return _impl_.task_id_;
5194 }
task_id()5195 inline ::int32_t CoordinatedTask::task_id() const {
5196   // @@protoc_insertion_point(field_get:tensorflow.CoordinatedTask.task_id)
5197   return _internal_task_id();
5198 }
_internal_set_task_id(::int32_t value)5199 inline void CoordinatedTask::_internal_set_task_id(::int32_t value) {
5200 
5201   _impl_.task_id_ = value;
5202 }
set_task_id(::int32_t value)5203 inline void CoordinatedTask::set_task_id(::int32_t value) {
5204   _internal_set_task_id(value);
5205   // @@protoc_insertion_point(field_set:tensorflow.CoordinatedTask.task_id)
5206 }
5207 
5208 // -------------------------------------------------------------------
5209 
5210 // CoordinationServiceError
5211 
5212 // bool is_reported_error = 3;
clear_is_reported_error()5213 inline void CoordinationServiceError::clear_is_reported_error() {
5214   _impl_.is_reported_error_ = false;
5215 }
_internal_is_reported_error()5216 inline bool CoordinationServiceError::_internal_is_reported_error() const {
5217   return _impl_.is_reported_error_;
5218 }
is_reported_error()5219 inline bool CoordinationServiceError::is_reported_error() const {
5220   // @@protoc_insertion_point(field_get:tensorflow.CoordinationServiceError.is_reported_error)
5221   return _internal_is_reported_error();
5222 }
_internal_set_is_reported_error(bool value)5223 inline void CoordinationServiceError::_internal_set_is_reported_error(bool value) {
5224 
5225   _impl_.is_reported_error_ = value;
5226 }
set_is_reported_error(bool value)5227 inline void CoordinationServiceError::set_is_reported_error(bool value) {
5228   _internal_set_is_reported_error(value);
5229   // @@protoc_insertion_point(field_set:tensorflow.CoordinationServiceError.is_reported_error)
5230 }
5231 
5232 // .tensorflow.CoordinatedTask source_task = 4;
_internal_has_source_task()5233 inline bool CoordinationServiceError::_internal_has_source_task() const {
5234   return this != internal_default_instance() && _impl_.source_task_ != nullptr;
5235 }
has_source_task()5236 inline bool CoordinationServiceError::has_source_task() const {
5237   return _internal_has_source_task();
5238 }
clear_source_task()5239 inline void CoordinationServiceError::clear_source_task() {
5240   if (GetArenaForAllocation() == nullptr && _impl_.source_task_ != nullptr) {
5241     delete _impl_.source_task_;
5242   }
5243   _impl_.source_task_ = nullptr;
5244 }
_internal_source_task()5245 inline const ::tensorflow::CoordinatedTask& CoordinationServiceError::_internal_source_task() const {
5246   const ::tensorflow::CoordinatedTask* p = _impl_.source_task_;
5247   return p != nullptr ? *p : reinterpret_cast<const ::tensorflow::CoordinatedTask&>(
5248       ::tensorflow::_CoordinatedTask_default_instance_);
5249 }
source_task()5250 inline const ::tensorflow::CoordinatedTask& CoordinationServiceError::source_task() const {
5251   // @@protoc_insertion_point(field_get:tensorflow.CoordinationServiceError.source_task)
5252   return _internal_source_task();
5253 }
unsafe_arena_set_allocated_source_task(::tensorflow::CoordinatedTask * source_task)5254 inline void CoordinationServiceError::unsafe_arena_set_allocated_source_task(
5255     ::tensorflow::CoordinatedTask* source_task) {
5256   if (GetArenaForAllocation() == nullptr) {
5257     delete reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.source_task_);
5258   }
5259   _impl_.source_task_ = source_task;
5260   // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.CoordinationServiceError.source_task)
5261 }
release_source_task()5262 inline ::tensorflow::CoordinatedTask* CoordinationServiceError::release_source_task() {
5263 
5264   ::tensorflow::CoordinatedTask* temp = _impl_.source_task_;
5265   _impl_.source_task_ = nullptr;
5266 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
5267   auto* old =  reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(temp);
5268   temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
5269   if (GetArenaForAllocation() == nullptr) { delete old; }
5270 #else  // PROTOBUF_FORCE_COPY_IN_RELEASE
5271   if (GetArenaForAllocation() != nullptr) {
5272     temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
5273   }
5274 #endif  // !PROTOBUF_FORCE_COPY_IN_RELEASE
5275   return temp;
5276 }
unsafe_arena_release_source_task()5277 inline ::tensorflow::CoordinatedTask* CoordinationServiceError::unsafe_arena_release_source_task() {
5278   // @@protoc_insertion_point(field_release:tensorflow.CoordinationServiceError.source_task)
5279 
5280   ::tensorflow::CoordinatedTask* temp = _impl_.source_task_;
5281   _impl_.source_task_ = nullptr;
5282   return temp;
5283 }
_internal_mutable_source_task()5284 inline ::tensorflow::CoordinatedTask* CoordinationServiceError::_internal_mutable_source_task() {
5285 
5286   if (_impl_.source_task_ == nullptr) {
5287     auto* p = CreateMaybeMessage<::tensorflow::CoordinatedTask>(GetArenaForAllocation());
5288     _impl_.source_task_ = p;
5289   }
5290   return _impl_.source_task_;
5291 }
mutable_source_task()5292 inline ::tensorflow::CoordinatedTask* CoordinationServiceError::mutable_source_task() {
5293   ::tensorflow::CoordinatedTask* _msg = _internal_mutable_source_task();
5294   // @@protoc_insertion_point(field_mutable:tensorflow.CoordinationServiceError.source_task)
5295   return _msg;
5296 }
set_allocated_source_task(::tensorflow::CoordinatedTask * source_task)5297 inline void CoordinationServiceError::set_allocated_source_task(::tensorflow::CoordinatedTask* source_task) {
5298   ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
5299   if (message_arena == nullptr) {
5300     delete _impl_.source_task_;
5301   }
5302   if (source_task) {
5303     ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
5304         ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(source_task);
5305     if (message_arena != submessage_arena) {
5306       source_task = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
5307           message_arena, source_task, submessage_arena);
5308     }
5309 
5310   } else {
5311 
5312   }
5313   _impl_.source_task_ = source_task;
5314   // @@protoc_insertion_point(field_set_allocated:tensorflow.CoordinationServiceError.source_task)
5315 }
5316 
5317 // -------------------------------------------------------------------
5318 
5319 // TfDeviceList
5320 
5321 // repeated .tensorflow.DeviceAttributes devices = 1;
_internal_devices_size()5322 inline int TfDeviceList::_internal_devices_size() const {
5323   return _impl_.devices_.size();
5324 }
devices_size()5325 inline int TfDeviceList::devices_size() const {
5326   return _internal_devices_size();
5327 }
mutable_devices(int index)5328 inline ::tensorflow::DeviceAttributes* TfDeviceList::mutable_devices(int index) {
5329   // @@protoc_insertion_point(field_mutable:tensorflow.TfDeviceList.devices)
5330   return _impl_.devices_.Mutable(index);
5331 }
5332 inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::DeviceAttributes >*
mutable_devices()5333 TfDeviceList::mutable_devices() {
5334   // @@protoc_insertion_point(field_mutable_list:tensorflow.TfDeviceList.devices)
5335   return &_impl_.devices_;
5336 }
_internal_devices(int index)5337 inline const ::tensorflow::DeviceAttributes& TfDeviceList::_internal_devices(int index) const {
5338   return _impl_.devices_.Get(index);
5339 }
devices(int index)5340 inline const ::tensorflow::DeviceAttributes& TfDeviceList::devices(int index) const {
5341   // @@protoc_insertion_point(field_get:tensorflow.TfDeviceList.devices)
5342   return _internal_devices(index);
5343 }
_internal_add_devices()5344 inline ::tensorflow::DeviceAttributes* TfDeviceList::_internal_add_devices() {
5345   return _impl_.devices_.Add();
5346 }
add_devices()5347 inline ::tensorflow::DeviceAttributes* TfDeviceList::add_devices() {
5348   ::tensorflow::DeviceAttributes* _add = _internal_add_devices();
5349   // @@protoc_insertion_point(field_add:tensorflow.TfDeviceList.devices)
5350   return _add;
5351 }
5352 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::DeviceAttributes >&
devices()5353 TfDeviceList::devices() const {
5354   // @@protoc_insertion_point(field_list:tensorflow.TfDeviceList.devices)
5355   return _impl_.devices_;
5356 }
5357 
5358 // -------------------------------------------------------------------
5359 
5360 // XlaDeviceList
5361 
5362 // .xla.GlobalTopologyProto devices = 1;
_internal_has_devices()5363 inline bool XlaDeviceList::_internal_has_devices() const {
5364   return this != internal_default_instance() && _impl_.devices_ != nullptr;
5365 }
has_devices()5366 inline bool XlaDeviceList::has_devices() const {
5367   return _internal_has_devices();
5368 }
_internal_devices()5369 inline const ::xla::GlobalTopologyProto& XlaDeviceList::_internal_devices() const {
5370   const ::xla::GlobalTopologyProto* p = _impl_.devices_;
5371   return p != nullptr ? *p : reinterpret_cast<const ::xla::GlobalTopologyProto&>(
5372       ::xla::_GlobalTopologyProto_default_instance_);
5373 }
devices()5374 inline const ::xla::GlobalTopologyProto& XlaDeviceList::devices() const {
5375   // @@protoc_insertion_point(field_get:tensorflow.XlaDeviceList.devices)
5376   return _internal_devices();
5377 }
unsafe_arena_set_allocated_devices(::xla::GlobalTopologyProto * devices)5378 inline void XlaDeviceList::unsafe_arena_set_allocated_devices(
5379     ::xla::GlobalTopologyProto* devices) {
5380   if (GetArenaForAllocation() == nullptr) {
5381     delete reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.devices_);
5382   }
5383   _impl_.devices_ = devices;
5384   // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.XlaDeviceList.devices)
5385 }
release_devices()5386 inline ::xla::GlobalTopologyProto* XlaDeviceList::release_devices() {
5387 
5388   ::xla::GlobalTopologyProto* temp = _impl_.devices_;
5389   _impl_.devices_ = nullptr;
5390 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
5391   auto* old =  reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(temp);
5392   temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
5393   if (GetArenaForAllocation() == nullptr) { delete old; }
5394 #else  // PROTOBUF_FORCE_COPY_IN_RELEASE
5395   if (GetArenaForAllocation() != nullptr) {
5396     temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
5397   }
5398 #endif  // !PROTOBUF_FORCE_COPY_IN_RELEASE
5399   return temp;
5400 }
unsafe_arena_release_devices()5401 inline ::xla::GlobalTopologyProto* XlaDeviceList::unsafe_arena_release_devices() {
5402   // @@protoc_insertion_point(field_release:tensorflow.XlaDeviceList.devices)
5403 
5404   ::xla::GlobalTopologyProto* temp = _impl_.devices_;
5405   _impl_.devices_ = nullptr;
5406   return temp;
5407 }
_internal_mutable_devices()5408 inline ::xla::GlobalTopologyProto* XlaDeviceList::_internal_mutable_devices() {
5409 
5410   if (_impl_.devices_ == nullptr) {
5411     auto* p = CreateMaybeMessage<::xla::GlobalTopologyProto>(GetArenaForAllocation());
5412     _impl_.devices_ = p;
5413   }
5414   return _impl_.devices_;
5415 }
mutable_devices()5416 inline ::xla::GlobalTopologyProto* XlaDeviceList::mutable_devices() {
5417   ::xla::GlobalTopologyProto* _msg = _internal_mutable_devices();
5418   // @@protoc_insertion_point(field_mutable:tensorflow.XlaDeviceList.devices)
5419   return _msg;
5420 }
set_allocated_devices(::xla::GlobalTopologyProto * devices)5421 inline void XlaDeviceList::set_allocated_devices(::xla::GlobalTopologyProto* devices) {
5422   ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
5423   if (message_arena == nullptr) {
5424     delete reinterpret_cast< ::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.devices_);
5425   }
5426   if (devices) {
5427     ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
5428         ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(
5429                 reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(devices));
5430     if (message_arena != submessage_arena) {
5431       devices = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
5432           message_arena, devices, submessage_arena);
5433     }
5434 
5435   } else {
5436 
5437   }
5438   _impl_.devices_ = devices;
5439   // @@protoc_insertion_point(field_set_allocated:tensorflow.XlaDeviceList.devices)
5440 }
5441 
5442 // -------------------------------------------------------------------
5443 
5444 // CoordinationServiceDeviceInfo
5445 
5446 // .tensorflow.TfDeviceList tf = 1;
_internal_has_tf()5447 inline bool CoordinationServiceDeviceInfo::_internal_has_tf() const {
5448   return type_case() == kTf;
5449 }
has_tf()5450 inline bool CoordinationServiceDeviceInfo::has_tf() const {
5451   return _internal_has_tf();
5452 }
set_has_tf()5453 inline void CoordinationServiceDeviceInfo::set_has_tf() {
5454   _impl_._oneof_case_[0] = kTf;
5455 }
clear_tf()5456 inline void CoordinationServiceDeviceInfo::clear_tf() {
5457   if (_internal_has_tf()) {
5458     if (GetArenaForAllocation() == nullptr) {
5459       delete _impl_.type_.tf_;
5460     }
5461     clear_has_type();
5462   }
5463 }
release_tf()5464 inline ::tensorflow::TfDeviceList* CoordinationServiceDeviceInfo::release_tf() {
5465   // @@protoc_insertion_point(field_release:tensorflow.CoordinationServiceDeviceInfo.tf)
5466   if (_internal_has_tf()) {
5467     clear_has_type();
5468     ::tensorflow::TfDeviceList* temp = _impl_.type_.tf_;
5469     if (GetArenaForAllocation() != nullptr) {
5470       temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
5471     }
5472     _impl_.type_.tf_ = nullptr;
5473     return temp;
5474   } else {
5475     return nullptr;
5476   }
5477 }
_internal_tf()5478 inline const ::tensorflow::TfDeviceList& CoordinationServiceDeviceInfo::_internal_tf() const {
5479   return _internal_has_tf()
5480       ? *_impl_.type_.tf_
5481       : reinterpret_cast< ::tensorflow::TfDeviceList&>(::tensorflow::_TfDeviceList_default_instance_);
5482 }
tf()5483 inline const ::tensorflow::TfDeviceList& CoordinationServiceDeviceInfo::tf() const {
5484   // @@protoc_insertion_point(field_get:tensorflow.CoordinationServiceDeviceInfo.tf)
5485   return _internal_tf();
5486 }
unsafe_arena_release_tf()5487 inline ::tensorflow::TfDeviceList* CoordinationServiceDeviceInfo::unsafe_arena_release_tf() {
5488   // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.CoordinationServiceDeviceInfo.tf)
5489   if (_internal_has_tf()) {
5490     clear_has_type();
5491     ::tensorflow::TfDeviceList* temp = _impl_.type_.tf_;
5492     _impl_.type_.tf_ = nullptr;
5493     return temp;
5494   } else {
5495     return nullptr;
5496   }
5497 }
unsafe_arena_set_allocated_tf(::tensorflow::TfDeviceList * tf)5498 inline void CoordinationServiceDeviceInfo::unsafe_arena_set_allocated_tf(::tensorflow::TfDeviceList* tf) {
5499   clear_type();
5500   if (tf) {
5501     set_has_tf();
5502     _impl_.type_.tf_ = tf;
5503   }
5504   // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.CoordinationServiceDeviceInfo.tf)
5505 }
_internal_mutable_tf()5506 inline ::tensorflow::TfDeviceList* CoordinationServiceDeviceInfo::_internal_mutable_tf() {
5507   if (!_internal_has_tf()) {
5508     clear_type();
5509     set_has_tf();
5510     _impl_.type_.tf_ = CreateMaybeMessage< ::tensorflow::TfDeviceList >(GetArenaForAllocation());
5511   }
5512   return _impl_.type_.tf_;
5513 }
mutable_tf()5514 inline ::tensorflow::TfDeviceList* CoordinationServiceDeviceInfo::mutable_tf() {
5515   ::tensorflow::TfDeviceList* _msg = _internal_mutable_tf();
5516   // @@protoc_insertion_point(field_mutable:tensorflow.CoordinationServiceDeviceInfo.tf)
5517   return _msg;
5518 }
5519 
5520 // .tensorflow.XlaDeviceList xla = 2;
_internal_has_xla()5521 inline bool CoordinationServiceDeviceInfo::_internal_has_xla() const {
5522   return type_case() == kXla;
5523 }
has_xla()5524 inline bool CoordinationServiceDeviceInfo::has_xla() const {
5525   return _internal_has_xla();
5526 }
set_has_xla()5527 inline void CoordinationServiceDeviceInfo::set_has_xla() {
5528   _impl_._oneof_case_[0] = kXla;
5529 }
clear_xla()5530 inline void CoordinationServiceDeviceInfo::clear_xla() {
5531   if (_internal_has_xla()) {
5532     if (GetArenaForAllocation() == nullptr) {
5533       delete _impl_.type_.xla_;
5534     }
5535     clear_has_type();
5536   }
5537 }
release_xla()5538 inline ::tensorflow::XlaDeviceList* CoordinationServiceDeviceInfo::release_xla() {
5539   // @@protoc_insertion_point(field_release:tensorflow.CoordinationServiceDeviceInfo.xla)
5540   if (_internal_has_xla()) {
5541     clear_has_type();
5542     ::tensorflow::XlaDeviceList* temp = _impl_.type_.xla_;
5543     if (GetArenaForAllocation() != nullptr) {
5544       temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
5545     }
5546     _impl_.type_.xla_ = nullptr;
5547     return temp;
5548   } else {
5549     return nullptr;
5550   }
5551 }
_internal_xla()5552 inline const ::tensorflow::XlaDeviceList& CoordinationServiceDeviceInfo::_internal_xla() const {
5553   return _internal_has_xla()
5554       ? *_impl_.type_.xla_
5555       : reinterpret_cast< ::tensorflow::XlaDeviceList&>(::tensorflow::_XlaDeviceList_default_instance_);
5556 }
xla()5557 inline const ::tensorflow::XlaDeviceList& CoordinationServiceDeviceInfo::xla() const {
5558   // @@protoc_insertion_point(field_get:tensorflow.CoordinationServiceDeviceInfo.xla)
5559   return _internal_xla();
5560 }
unsafe_arena_release_xla()5561 inline ::tensorflow::XlaDeviceList* CoordinationServiceDeviceInfo::unsafe_arena_release_xla() {
5562   // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.CoordinationServiceDeviceInfo.xla)
5563   if (_internal_has_xla()) {
5564     clear_has_type();
5565     ::tensorflow::XlaDeviceList* temp = _impl_.type_.xla_;
5566     _impl_.type_.xla_ = nullptr;
5567     return temp;
5568   } else {
5569     return nullptr;
5570   }
5571 }
unsafe_arena_set_allocated_xla(::tensorflow::XlaDeviceList * xla)5572 inline void CoordinationServiceDeviceInfo::unsafe_arena_set_allocated_xla(::tensorflow::XlaDeviceList* xla) {
5573   clear_type();
5574   if (xla) {
5575     set_has_xla();
5576     _impl_.type_.xla_ = xla;
5577   }
5578   // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.CoordinationServiceDeviceInfo.xla)
5579 }
_internal_mutable_xla()5580 inline ::tensorflow::XlaDeviceList* CoordinationServiceDeviceInfo::_internal_mutable_xla() {
5581   if (!_internal_has_xla()) {
5582     clear_type();
5583     set_has_xla();
5584     _impl_.type_.xla_ = CreateMaybeMessage< ::tensorflow::XlaDeviceList >(GetArenaForAllocation());
5585   }
5586   return _impl_.type_.xla_;
5587 }
mutable_xla()5588 inline ::tensorflow::XlaDeviceList* CoordinationServiceDeviceInfo::mutable_xla() {
5589   ::tensorflow::XlaDeviceList* _msg = _internal_mutable_xla();
5590   // @@protoc_insertion_point(field_mutable:tensorflow.CoordinationServiceDeviceInfo.xla)
5591   return _msg;
5592 }
5593 
has_type()5594 inline bool CoordinationServiceDeviceInfo::has_type() const {
5595   return type_case() != TYPE_NOT_SET;
5596 }
clear_has_type()5597 inline void CoordinationServiceDeviceInfo::clear_has_type() {
5598   _impl_._oneof_case_[0] = TYPE_NOT_SET;
5599 }
type_case()5600 inline CoordinationServiceDeviceInfo::TypeCase CoordinationServiceDeviceInfo::type_case() const {
5601   return CoordinationServiceDeviceInfo::TypeCase(_impl_._oneof_case_[0]);
5602 }
5603 // -------------------------------------------------------------------
5604 
5605 // RegisterTaskRequest
5606 
5607 // fixed64 incarnation = 3;
clear_incarnation()5608 inline void RegisterTaskRequest::clear_incarnation() {
5609   _impl_.incarnation_ = ::uint64_t{0u};
5610 }
_internal_incarnation()5611 inline ::uint64_t RegisterTaskRequest::_internal_incarnation() const {
5612   return _impl_.incarnation_;
5613 }
incarnation()5614 inline ::uint64_t RegisterTaskRequest::incarnation() const {
5615   // @@protoc_insertion_point(field_get:tensorflow.RegisterTaskRequest.incarnation)
5616   return _internal_incarnation();
5617 }
_internal_set_incarnation(::uint64_t value)5618 inline void RegisterTaskRequest::_internal_set_incarnation(::uint64_t value) {
5619 
5620   _impl_.incarnation_ = value;
5621 }
set_incarnation(::uint64_t value)5622 inline void RegisterTaskRequest::set_incarnation(::uint64_t value) {
5623   _internal_set_incarnation(value);
5624   // @@protoc_insertion_point(field_set:tensorflow.RegisterTaskRequest.incarnation)
5625 }
5626 
5627 // .tensorflow.CoordinatedTask source_task = 5;
_internal_has_source_task()5628 inline bool RegisterTaskRequest::_internal_has_source_task() const {
5629   return this != internal_default_instance() && _impl_.source_task_ != nullptr;
5630 }
has_source_task()5631 inline bool RegisterTaskRequest::has_source_task() const {
5632   return _internal_has_source_task();
5633 }
clear_source_task()5634 inline void RegisterTaskRequest::clear_source_task() {
5635   if (GetArenaForAllocation() == nullptr && _impl_.source_task_ != nullptr) {
5636     delete _impl_.source_task_;
5637   }
5638   _impl_.source_task_ = nullptr;
5639 }
_internal_source_task()5640 inline const ::tensorflow::CoordinatedTask& RegisterTaskRequest::_internal_source_task() const {
5641   const ::tensorflow::CoordinatedTask* p = _impl_.source_task_;
5642   return p != nullptr ? *p : reinterpret_cast<const ::tensorflow::CoordinatedTask&>(
5643       ::tensorflow::_CoordinatedTask_default_instance_);
5644 }
source_task()5645 inline const ::tensorflow::CoordinatedTask& RegisterTaskRequest::source_task() const {
5646   // @@protoc_insertion_point(field_get:tensorflow.RegisterTaskRequest.source_task)
5647   return _internal_source_task();
5648 }
unsafe_arena_set_allocated_source_task(::tensorflow::CoordinatedTask * source_task)5649 inline void RegisterTaskRequest::unsafe_arena_set_allocated_source_task(
5650     ::tensorflow::CoordinatedTask* source_task) {
5651   if (GetArenaForAllocation() == nullptr) {
5652     delete reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.source_task_);
5653   }
5654   _impl_.source_task_ = source_task;
5655   // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.RegisterTaskRequest.source_task)
5656 }
release_source_task()5657 inline ::tensorflow::CoordinatedTask* RegisterTaskRequest::release_source_task() {
5658 
5659   ::tensorflow::CoordinatedTask* temp = _impl_.source_task_;
5660   _impl_.source_task_ = nullptr;
5661 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
5662   auto* old =  reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(temp);
5663   temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
5664   if (GetArenaForAllocation() == nullptr) { delete old; }
5665 #else  // PROTOBUF_FORCE_COPY_IN_RELEASE
5666   if (GetArenaForAllocation() != nullptr) {
5667     temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
5668   }
5669 #endif  // !PROTOBUF_FORCE_COPY_IN_RELEASE
5670   return temp;
5671 }
unsafe_arena_release_source_task()5672 inline ::tensorflow::CoordinatedTask* RegisterTaskRequest::unsafe_arena_release_source_task() {
5673   // @@protoc_insertion_point(field_release:tensorflow.RegisterTaskRequest.source_task)
5674 
5675   ::tensorflow::CoordinatedTask* temp = _impl_.source_task_;
5676   _impl_.source_task_ = nullptr;
5677   return temp;
5678 }
_internal_mutable_source_task()5679 inline ::tensorflow::CoordinatedTask* RegisterTaskRequest::_internal_mutable_source_task() {
5680 
5681   if (_impl_.source_task_ == nullptr) {
5682     auto* p = CreateMaybeMessage<::tensorflow::CoordinatedTask>(GetArenaForAllocation());
5683     _impl_.source_task_ = p;
5684   }
5685   return _impl_.source_task_;
5686 }
mutable_source_task()5687 inline ::tensorflow::CoordinatedTask* RegisterTaskRequest::mutable_source_task() {
5688   ::tensorflow::CoordinatedTask* _msg = _internal_mutable_source_task();
5689   // @@protoc_insertion_point(field_mutable:tensorflow.RegisterTaskRequest.source_task)
5690   return _msg;
5691 }
set_allocated_source_task(::tensorflow::CoordinatedTask * source_task)5692 inline void RegisterTaskRequest::set_allocated_source_task(::tensorflow::CoordinatedTask* source_task) {
5693   ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
5694   if (message_arena == nullptr) {
5695     delete _impl_.source_task_;
5696   }
5697   if (source_task) {
5698     ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
5699         ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(source_task);
5700     if (message_arena != submessage_arena) {
5701       source_task = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
5702           message_arena, source_task, submessage_arena);
5703     }
5704 
5705   } else {
5706 
5707   }
5708   _impl_.source_task_ = source_task;
5709   // @@protoc_insertion_point(field_set_allocated:tensorflow.RegisterTaskRequest.source_task)
5710 }
5711 
5712 // -------------------------------------------------------------------
5713 
5714 // RegisterTaskResponse
5715 
5716 // fixed64 leader_incarnation = 1;
clear_leader_incarnation()5717 inline void RegisterTaskResponse::clear_leader_incarnation() {
5718   _impl_.leader_incarnation_ = ::uint64_t{0u};
5719 }
_internal_leader_incarnation()5720 inline ::uint64_t RegisterTaskResponse::_internal_leader_incarnation() const {
5721   return _impl_.leader_incarnation_;
5722 }
leader_incarnation()5723 inline ::uint64_t RegisterTaskResponse::leader_incarnation() const {
5724   // @@protoc_insertion_point(field_get:tensorflow.RegisterTaskResponse.leader_incarnation)
5725   return _internal_leader_incarnation();
5726 }
_internal_set_leader_incarnation(::uint64_t value)5727 inline void RegisterTaskResponse::_internal_set_leader_incarnation(::uint64_t value) {
5728 
5729   _impl_.leader_incarnation_ = value;
5730 }
set_leader_incarnation(::uint64_t value)5731 inline void RegisterTaskResponse::set_leader_incarnation(::uint64_t value) {
5732   _internal_set_leader_incarnation(value);
5733   // @@protoc_insertion_point(field_set:tensorflow.RegisterTaskResponse.leader_incarnation)
5734 }
5735 
5736 // -------------------------------------------------------------------
5737 
5738 // HeartbeatRequest
5739 
5740 // fixed64 incarnation = 3;
clear_incarnation()5741 inline void HeartbeatRequest::clear_incarnation() {
5742   _impl_.incarnation_ = ::uint64_t{0u};
5743 }
_internal_incarnation()5744 inline ::uint64_t HeartbeatRequest::_internal_incarnation() const {
5745   return _impl_.incarnation_;
5746 }
incarnation()5747 inline ::uint64_t HeartbeatRequest::incarnation() const {
5748   // @@protoc_insertion_point(field_get:tensorflow.HeartbeatRequest.incarnation)
5749   return _internal_incarnation();
5750 }
_internal_set_incarnation(::uint64_t value)5751 inline void HeartbeatRequest::_internal_set_incarnation(::uint64_t value) {
5752 
5753   _impl_.incarnation_ = value;
5754 }
set_incarnation(::uint64_t value)5755 inline void HeartbeatRequest::set_incarnation(::uint64_t value) {
5756   _internal_set_incarnation(value);
5757   // @@protoc_insertion_point(field_set:tensorflow.HeartbeatRequest.incarnation)
5758 }
5759 
5760 // .tensorflow.CoordinatedTask source_task = 4;
_internal_has_source_task()5761 inline bool HeartbeatRequest::_internal_has_source_task() const {
5762   return this != internal_default_instance() && _impl_.source_task_ != nullptr;
5763 }
has_source_task()5764 inline bool HeartbeatRequest::has_source_task() const {
5765   return _internal_has_source_task();
5766 }
clear_source_task()5767 inline void HeartbeatRequest::clear_source_task() {
5768   if (GetArenaForAllocation() == nullptr && _impl_.source_task_ != nullptr) {
5769     delete _impl_.source_task_;
5770   }
5771   _impl_.source_task_ = nullptr;
5772 }
_internal_source_task()5773 inline const ::tensorflow::CoordinatedTask& HeartbeatRequest::_internal_source_task() const {
5774   const ::tensorflow::CoordinatedTask* p = _impl_.source_task_;
5775   return p != nullptr ? *p : reinterpret_cast<const ::tensorflow::CoordinatedTask&>(
5776       ::tensorflow::_CoordinatedTask_default_instance_);
5777 }
source_task()5778 inline const ::tensorflow::CoordinatedTask& HeartbeatRequest::source_task() const {
5779   // @@protoc_insertion_point(field_get:tensorflow.HeartbeatRequest.source_task)
5780   return _internal_source_task();
5781 }
unsafe_arena_set_allocated_source_task(::tensorflow::CoordinatedTask * source_task)5782 inline void HeartbeatRequest::unsafe_arena_set_allocated_source_task(
5783     ::tensorflow::CoordinatedTask* source_task) {
5784   if (GetArenaForAllocation() == nullptr) {
5785     delete reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.source_task_);
5786   }
5787   _impl_.source_task_ = source_task;
5788   // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.HeartbeatRequest.source_task)
5789 }
release_source_task()5790 inline ::tensorflow::CoordinatedTask* HeartbeatRequest::release_source_task() {
5791 
5792   ::tensorflow::CoordinatedTask* temp = _impl_.source_task_;
5793   _impl_.source_task_ = nullptr;
5794 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
5795   auto* old =  reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(temp);
5796   temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
5797   if (GetArenaForAllocation() == nullptr) { delete old; }
5798 #else  // PROTOBUF_FORCE_COPY_IN_RELEASE
5799   if (GetArenaForAllocation() != nullptr) {
5800     temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
5801   }
5802 #endif  // !PROTOBUF_FORCE_COPY_IN_RELEASE
5803   return temp;
5804 }
unsafe_arena_release_source_task()5805 inline ::tensorflow::CoordinatedTask* HeartbeatRequest::unsafe_arena_release_source_task() {
5806   // @@protoc_insertion_point(field_release:tensorflow.HeartbeatRequest.source_task)
5807 
5808   ::tensorflow::CoordinatedTask* temp = _impl_.source_task_;
5809   _impl_.source_task_ = nullptr;
5810   return temp;
5811 }
_internal_mutable_source_task()5812 inline ::tensorflow::CoordinatedTask* HeartbeatRequest::_internal_mutable_source_task() {
5813 
5814   if (_impl_.source_task_ == nullptr) {
5815     auto* p = CreateMaybeMessage<::tensorflow::CoordinatedTask>(GetArenaForAllocation());
5816     _impl_.source_task_ = p;
5817   }
5818   return _impl_.source_task_;
5819 }
mutable_source_task()5820 inline ::tensorflow::CoordinatedTask* HeartbeatRequest::mutable_source_task() {
5821   ::tensorflow::CoordinatedTask* _msg = _internal_mutable_source_task();
5822   // @@protoc_insertion_point(field_mutable:tensorflow.HeartbeatRequest.source_task)
5823   return _msg;
5824 }
set_allocated_source_task(::tensorflow::CoordinatedTask * source_task)5825 inline void HeartbeatRequest::set_allocated_source_task(::tensorflow::CoordinatedTask* source_task) {
5826   ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
5827   if (message_arena == nullptr) {
5828     delete _impl_.source_task_;
5829   }
5830   if (source_task) {
5831     ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
5832         ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(source_task);
5833     if (message_arena != submessage_arena) {
5834       source_task = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
5835           message_arena, source_task, submessage_arena);
5836     }
5837 
5838   } else {
5839 
5840   }
5841   _impl_.source_task_ = source_task;
5842   // @@protoc_insertion_point(field_set_allocated:tensorflow.HeartbeatRequest.source_task)
5843 }
5844 
5845 // -------------------------------------------------------------------
5846 
5847 // HeartbeatResponse
5848 
5849 // fixed64 leader_incarnation = 1;
clear_leader_incarnation()5850 inline void HeartbeatResponse::clear_leader_incarnation() {
5851   _impl_.leader_incarnation_ = ::uint64_t{0u};
5852 }
_internal_leader_incarnation()5853 inline ::uint64_t HeartbeatResponse::_internal_leader_incarnation() const {
5854   return _impl_.leader_incarnation_;
5855 }
leader_incarnation()5856 inline ::uint64_t HeartbeatResponse::leader_incarnation() const {
5857   // @@protoc_insertion_point(field_get:tensorflow.HeartbeatResponse.leader_incarnation)
5858   return _internal_leader_incarnation();
5859 }
_internal_set_leader_incarnation(::uint64_t value)5860 inline void HeartbeatResponse::_internal_set_leader_incarnation(::uint64_t value) {
5861 
5862   _impl_.leader_incarnation_ = value;
5863 }
set_leader_incarnation(::uint64_t value)5864 inline void HeartbeatResponse::set_leader_incarnation(::uint64_t value) {
5865   _internal_set_leader_incarnation(value);
5866   // @@protoc_insertion_point(field_set:tensorflow.HeartbeatResponse.leader_incarnation)
5867 }
5868 
5869 // -------------------------------------------------------------------
5870 
5871 // WaitForAllTasksRequest
5872 
5873 // .tensorflow.CoordinationServiceDeviceInfo local_device_info = 4;
_internal_has_local_device_info()5874 inline bool WaitForAllTasksRequest::_internal_has_local_device_info() const {
5875   return this != internal_default_instance() && _impl_.local_device_info_ != nullptr;
5876 }
has_local_device_info()5877 inline bool WaitForAllTasksRequest::has_local_device_info() const {
5878   return _internal_has_local_device_info();
5879 }
clear_local_device_info()5880 inline void WaitForAllTasksRequest::clear_local_device_info() {
5881   if (GetArenaForAllocation() == nullptr && _impl_.local_device_info_ != nullptr) {
5882     delete _impl_.local_device_info_;
5883   }
5884   _impl_.local_device_info_ = nullptr;
5885 }
_internal_local_device_info()5886 inline const ::tensorflow::CoordinationServiceDeviceInfo& WaitForAllTasksRequest::_internal_local_device_info() const {
5887   const ::tensorflow::CoordinationServiceDeviceInfo* p = _impl_.local_device_info_;
5888   return p != nullptr ? *p : reinterpret_cast<const ::tensorflow::CoordinationServiceDeviceInfo&>(
5889       ::tensorflow::_CoordinationServiceDeviceInfo_default_instance_);
5890 }
local_device_info()5891 inline const ::tensorflow::CoordinationServiceDeviceInfo& WaitForAllTasksRequest::local_device_info() const {
5892   // @@protoc_insertion_point(field_get:tensorflow.WaitForAllTasksRequest.local_device_info)
5893   return _internal_local_device_info();
5894 }
unsafe_arena_set_allocated_local_device_info(::tensorflow::CoordinationServiceDeviceInfo * local_device_info)5895 inline void WaitForAllTasksRequest::unsafe_arena_set_allocated_local_device_info(
5896     ::tensorflow::CoordinationServiceDeviceInfo* local_device_info) {
5897   if (GetArenaForAllocation() == nullptr) {
5898     delete reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.local_device_info_);
5899   }
5900   _impl_.local_device_info_ = local_device_info;
5901   // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.WaitForAllTasksRequest.local_device_info)
5902 }
release_local_device_info()5903 inline ::tensorflow::CoordinationServiceDeviceInfo* WaitForAllTasksRequest::release_local_device_info() {
5904 
5905   ::tensorflow::CoordinationServiceDeviceInfo* temp = _impl_.local_device_info_;
5906   _impl_.local_device_info_ = nullptr;
5907 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
5908   auto* old =  reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(temp);
5909   temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
5910   if (GetArenaForAllocation() == nullptr) { delete old; }
5911 #else  // PROTOBUF_FORCE_COPY_IN_RELEASE
5912   if (GetArenaForAllocation() != nullptr) {
5913     temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
5914   }
5915 #endif  // !PROTOBUF_FORCE_COPY_IN_RELEASE
5916   return temp;
5917 }
unsafe_arena_release_local_device_info()5918 inline ::tensorflow::CoordinationServiceDeviceInfo* WaitForAllTasksRequest::unsafe_arena_release_local_device_info() {
5919   // @@protoc_insertion_point(field_release:tensorflow.WaitForAllTasksRequest.local_device_info)
5920 
5921   ::tensorflow::CoordinationServiceDeviceInfo* temp = _impl_.local_device_info_;
5922   _impl_.local_device_info_ = nullptr;
5923   return temp;
5924 }
_internal_mutable_local_device_info()5925 inline ::tensorflow::CoordinationServiceDeviceInfo* WaitForAllTasksRequest::_internal_mutable_local_device_info() {
5926 
5927   if (_impl_.local_device_info_ == nullptr) {
5928     auto* p = CreateMaybeMessage<::tensorflow::CoordinationServiceDeviceInfo>(GetArenaForAllocation());
5929     _impl_.local_device_info_ = p;
5930   }
5931   return _impl_.local_device_info_;
5932 }
mutable_local_device_info()5933 inline ::tensorflow::CoordinationServiceDeviceInfo* WaitForAllTasksRequest::mutable_local_device_info() {
5934   ::tensorflow::CoordinationServiceDeviceInfo* _msg = _internal_mutable_local_device_info();
5935   // @@protoc_insertion_point(field_mutable:tensorflow.WaitForAllTasksRequest.local_device_info)
5936   return _msg;
5937 }
set_allocated_local_device_info(::tensorflow::CoordinationServiceDeviceInfo * local_device_info)5938 inline void WaitForAllTasksRequest::set_allocated_local_device_info(::tensorflow::CoordinationServiceDeviceInfo* local_device_info) {
5939   ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
5940   if (message_arena == nullptr) {
5941     delete _impl_.local_device_info_;
5942   }
5943   if (local_device_info) {
5944     ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
5945         ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(local_device_info);
5946     if (message_arena != submessage_arena) {
5947       local_device_info = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
5948           message_arena, local_device_info, submessage_arena);
5949     }
5950 
5951   } else {
5952 
5953   }
5954   _impl_.local_device_info_ = local_device_info;
5955   // @@protoc_insertion_point(field_set_allocated:tensorflow.WaitForAllTasksRequest.local_device_info)
5956 }
5957 
5958 // .tensorflow.CoordinatedTask source_task = 5;
_internal_has_source_task()5959 inline bool WaitForAllTasksRequest::_internal_has_source_task() const {
5960   return this != internal_default_instance() && _impl_.source_task_ != nullptr;
5961 }
has_source_task()5962 inline bool WaitForAllTasksRequest::has_source_task() const {
5963   return _internal_has_source_task();
5964 }
clear_source_task()5965 inline void WaitForAllTasksRequest::clear_source_task() {
5966   if (GetArenaForAllocation() == nullptr && _impl_.source_task_ != nullptr) {
5967     delete _impl_.source_task_;
5968   }
5969   _impl_.source_task_ = nullptr;
5970 }
_internal_source_task()5971 inline const ::tensorflow::CoordinatedTask& WaitForAllTasksRequest::_internal_source_task() const {
5972   const ::tensorflow::CoordinatedTask* p = _impl_.source_task_;
5973   return p != nullptr ? *p : reinterpret_cast<const ::tensorflow::CoordinatedTask&>(
5974       ::tensorflow::_CoordinatedTask_default_instance_);
5975 }
source_task()5976 inline const ::tensorflow::CoordinatedTask& WaitForAllTasksRequest::source_task() const {
5977   // @@protoc_insertion_point(field_get:tensorflow.WaitForAllTasksRequest.source_task)
5978   return _internal_source_task();
5979 }
unsafe_arena_set_allocated_source_task(::tensorflow::CoordinatedTask * source_task)5980 inline void WaitForAllTasksRequest::unsafe_arena_set_allocated_source_task(
5981     ::tensorflow::CoordinatedTask* source_task) {
5982   if (GetArenaForAllocation() == nullptr) {
5983     delete reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.source_task_);
5984   }
5985   _impl_.source_task_ = source_task;
5986   // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.WaitForAllTasksRequest.source_task)
5987 }
release_source_task()5988 inline ::tensorflow::CoordinatedTask* WaitForAllTasksRequest::release_source_task() {
5989 
5990   ::tensorflow::CoordinatedTask* temp = _impl_.source_task_;
5991   _impl_.source_task_ = nullptr;
5992 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
5993   auto* old =  reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(temp);
5994   temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
5995   if (GetArenaForAllocation() == nullptr) { delete old; }
5996 #else  // PROTOBUF_FORCE_COPY_IN_RELEASE
5997   if (GetArenaForAllocation() != nullptr) {
5998     temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
5999   }
6000 #endif  // !PROTOBUF_FORCE_COPY_IN_RELEASE
6001   return temp;
6002 }
unsafe_arena_release_source_task()6003 inline ::tensorflow::CoordinatedTask* WaitForAllTasksRequest::unsafe_arena_release_source_task() {
6004   // @@protoc_insertion_point(field_release:tensorflow.WaitForAllTasksRequest.source_task)
6005 
6006   ::tensorflow::CoordinatedTask* temp = _impl_.source_task_;
6007   _impl_.source_task_ = nullptr;
6008   return temp;
6009 }
_internal_mutable_source_task()6010 inline ::tensorflow::CoordinatedTask* WaitForAllTasksRequest::_internal_mutable_source_task() {
6011 
6012   if (_impl_.source_task_ == nullptr) {
6013     auto* p = CreateMaybeMessage<::tensorflow::CoordinatedTask>(GetArenaForAllocation());
6014     _impl_.source_task_ = p;
6015   }
6016   return _impl_.source_task_;
6017 }
mutable_source_task()6018 inline ::tensorflow::CoordinatedTask* WaitForAllTasksRequest::mutable_source_task() {
6019   ::tensorflow::CoordinatedTask* _msg = _internal_mutable_source_task();
6020   // @@protoc_insertion_point(field_mutable:tensorflow.WaitForAllTasksRequest.source_task)
6021   return _msg;
6022 }
set_allocated_source_task(::tensorflow::CoordinatedTask * source_task)6023 inline void WaitForAllTasksRequest::set_allocated_source_task(::tensorflow::CoordinatedTask* source_task) {
6024   ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
6025   if (message_arena == nullptr) {
6026     delete _impl_.source_task_;
6027   }
6028   if (source_task) {
6029     ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
6030         ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(source_task);
6031     if (message_arena != submessage_arena) {
6032       source_task = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
6033           message_arena, source_task, submessage_arena);
6034     }
6035 
6036   } else {
6037 
6038   }
6039   _impl_.source_task_ = source_task;
6040   // @@protoc_insertion_point(field_set_allocated:tensorflow.WaitForAllTasksRequest.source_task)
6041 }
6042 
6043 // -------------------------------------------------------------------
6044 
6045 // WaitForAllTasksResponse
6046 
6047 // fixed64 leader_incarnation = 1;
clear_leader_incarnation()6048 inline void WaitForAllTasksResponse::clear_leader_incarnation() {
6049   _impl_.leader_incarnation_ = ::uint64_t{0u};
6050 }
_internal_leader_incarnation()6051 inline ::uint64_t WaitForAllTasksResponse::_internal_leader_incarnation() const {
6052   return _impl_.leader_incarnation_;
6053 }
leader_incarnation()6054 inline ::uint64_t WaitForAllTasksResponse::leader_incarnation() const {
6055   // @@protoc_insertion_point(field_get:tensorflow.WaitForAllTasksResponse.leader_incarnation)
6056   return _internal_leader_incarnation();
6057 }
_internal_set_leader_incarnation(::uint64_t value)6058 inline void WaitForAllTasksResponse::_internal_set_leader_incarnation(::uint64_t value) {
6059 
6060   _impl_.leader_incarnation_ = value;
6061 }
set_leader_incarnation(::uint64_t value)6062 inline void WaitForAllTasksResponse::set_leader_incarnation(::uint64_t value) {
6063   _internal_set_leader_incarnation(value);
6064   // @@protoc_insertion_point(field_set:tensorflow.WaitForAllTasksResponse.leader_incarnation)
6065 }
6066 
6067 // .tensorflow.CoordinationServiceDeviceInfo cluster_device_info = 3;
_internal_has_cluster_device_info()6068 inline bool WaitForAllTasksResponse::_internal_has_cluster_device_info() const {
6069   return this != internal_default_instance() && _impl_.cluster_device_info_ != nullptr;
6070 }
has_cluster_device_info()6071 inline bool WaitForAllTasksResponse::has_cluster_device_info() const {
6072   return _internal_has_cluster_device_info();
6073 }
clear_cluster_device_info()6074 inline void WaitForAllTasksResponse::clear_cluster_device_info() {
6075   if (GetArenaForAllocation() == nullptr && _impl_.cluster_device_info_ != nullptr) {
6076     delete _impl_.cluster_device_info_;
6077   }
6078   _impl_.cluster_device_info_ = nullptr;
6079 }
_internal_cluster_device_info()6080 inline const ::tensorflow::CoordinationServiceDeviceInfo& WaitForAllTasksResponse::_internal_cluster_device_info() const {
6081   const ::tensorflow::CoordinationServiceDeviceInfo* p = _impl_.cluster_device_info_;
6082   return p != nullptr ? *p : reinterpret_cast<const ::tensorflow::CoordinationServiceDeviceInfo&>(
6083       ::tensorflow::_CoordinationServiceDeviceInfo_default_instance_);
6084 }
cluster_device_info()6085 inline const ::tensorflow::CoordinationServiceDeviceInfo& WaitForAllTasksResponse::cluster_device_info() const {
6086   // @@protoc_insertion_point(field_get:tensorflow.WaitForAllTasksResponse.cluster_device_info)
6087   return _internal_cluster_device_info();
6088 }
unsafe_arena_set_allocated_cluster_device_info(::tensorflow::CoordinationServiceDeviceInfo * cluster_device_info)6089 inline void WaitForAllTasksResponse::unsafe_arena_set_allocated_cluster_device_info(
6090     ::tensorflow::CoordinationServiceDeviceInfo* cluster_device_info) {
6091   if (GetArenaForAllocation() == nullptr) {
6092     delete reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.cluster_device_info_);
6093   }
6094   _impl_.cluster_device_info_ = cluster_device_info;
6095   // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.WaitForAllTasksResponse.cluster_device_info)
6096 }
release_cluster_device_info()6097 inline ::tensorflow::CoordinationServiceDeviceInfo* WaitForAllTasksResponse::release_cluster_device_info() {
6098 
6099   ::tensorflow::CoordinationServiceDeviceInfo* temp = _impl_.cluster_device_info_;
6100   _impl_.cluster_device_info_ = nullptr;
6101 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
6102   auto* old =  reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(temp);
6103   temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
6104   if (GetArenaForAllocation() == nullptr) { delete old; }
6105 #else  // PROTOBUF_FORCE_COPY_IN_RELEASE
6106   if (GetArenaForAllocation() != nullptr) {
6107     temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
6108   }
6109 #endif  // !PROTOBUF_FORCE_COPY_IN_RELEASE
6110   return temp;
6111 }
unsafe_arena_release_cluster_device_info()6112 inline ::tensorflow::CoordinationServiceDeviceInfo* WaitForAllTasksResponse::unsafe_arena_release_cluster_device_info() {
6113   // @@protoc_insertion_point(field_release:tensorflow.WaitForAllTasksResponse.cluster_device_info)
6114 
6115   ::tensorflow::CoordinationServiceDeviceInfo* temp = _impl_.cluster_device_info_;
6116   _impl_.cluster_device_info_ = nullptr;
6117   return temp;
6118 }
_internal_mutable_cluster_device_info()6119 inline ::tensorflow::CoordinationServiceDeviceInfo* WaitForAllTasksResponse::_internal_mutable_cluster_device_info() {
6120 
6121   if (_impl_.cluster_device_info_ == nullptr) {
6122     auto* p = CreateMaybeMessage<::tensorflow::CoordinationServiceDeviceInfo>(GetArenaForAllocation());
6123     _impl_.cluster_device_info_ = p;
6124   }
6125   return _impl_.cluster_device_info_;
6126 }
mutable_cluster_device_info()6127 inline ::tensorflow::CoordinationServiceDeviceInfo* WaitForAllTasksResponse::mutable_cluster_device_info() {
6128   ::tensorflow::CoordinationServiceDeviceInfo* _msg = _internal_mutable_cluster_device_info();
6129   // @@protoc_insertion_point(field_mutable:tensorflow.WaitForAllTasksResponse.cluster_device_info)
6130   return _msg;
6131 }
set_allocated_cluster_device_info(::tensorflow::CoordinationServiceDeviceInfo * cluster_device_info)6132 inline void WaitForAllTasksResponse::set_allocated_cluster_device_info(::tensorflow::CoordinationServiceDeviceInfo* cluster_device_info) {
6133   ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
6134   if (message_arena == nullptr) {
6135     delete _impl_.cluster_device_info_;
6136   }
6137   if (cluster_device_info) {
6138     ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
6139         ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(cluster_device_info);
6140     if (message_arena != submessage_arena) {
6141       cluster_device_info = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
6142           message_arena, cluster_device_info, submessage_arena);
6143     }
6144 
6145   } else {
6146 
6147   }
6148   _impl_.cluster_device_info_ = cluster_device_info;
6149   // @@protoc_insertion_point(field_set_allocated:tensorflow.WaitForAllTasksResponse.cluster_device_info)
6150 }
6151 
6152 // -------------------------------------------------------------------
6153 
6154 // ShutdownTaskRequest
6155 
6156 // .tensorflow.CoordinatedTask source_task = 1;
_internal_has_source_task()6157 inline bool ShutdownTaskRequest::_internal_has_source_task() const {
6158   return this != internal_default_instance() && _impl_.source_task_ != nullptr;
6159 }
has_source_task()6160 inline bool ShutdownTaskRequest::has_source_task() const {
6161   return _internal_has_source_task();
6162 }
clear_source_task()6163 inline void ShutdownTaskRequest::clear_source_task() {
6164   if (GetArenaForAllocation() == nullptr && _impl_.source_task_ != nullptr) {
6165     delete _impl_.source_task_;
6166   }
6167   _impl_.source_task_ = nullptr;
6168 }
_internal_source_task()6169 inline const ::tensorflow::CoordinatedTask& ShutdownTaskRequest::_internal_source_task() const {
6170   const ::tensorflow::CoordinatedTask* p = _impl_.source_task_;
6171   return p != nullptr ? *p : reinterpret_cast<const ::tensorflow::CoordinatedTask&>(
6172       ::tensorflow::_CoordinatedTask_default_instance_);
6173 }
source_task()6174 inline const ::tensorflow::CoordinatedTask& ShutdownTaskRequest::source_task() const {
6175   // @@protoc_insertion_point(field_get:tensorflow.ShutdownTaskRequest.source_task)
6176   return _internal_source_task();
6177 }
unsafe_arena_set_allocated_source_task(::tensorflow::CoordinatedTask * source_task)6178 inline void ShutdownTaskRequest::unsafe_arena_set_allocated_source_task(
6179     ::tensorflow::CoordinatedTask* source_task) {
6180   if (GetArenaForAllocation() == nullptr) {
6181     delete reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.source_task_);
6182   }
6183   _impl_.source_task_ = source_task;
6184   // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.ShutdownTaskRequest.source_task)
6185 }
release_source_task()6186 inline ::tensorflow::CoordinatedTask* ShutdownTaskRequest::release_source_task() {
6187 
6188   ::tensorflow::CoordinatedTask* temp = _impl_.source_task_;
6189   _impl_.source_task_ = nullptr;
6190 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
6191   auto* old =  reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(temp);
6192   temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
6193   if (GetArenaForAllocation() == nullptr) { delete old; }
6194 #else  // PROTOBUF_FORCE_COPY_IN_RELEASE
6195   if (GetArenaForAllocation() != nullptr) {
6196     temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
6197   }
6198 #endif  // !PROTOBUF_FORCE_COPY_IN_RELEASE
6199   return temp;
6200 }
unsafe_arena_release_source_task()6201 inline ::tensorflow::CoordinatedTask* ShutdownTaskRequest::unsafe_arena_release_source_task() {
6202   // @@protoc_insertion_point(field_release:tensorflow.ShutdownTaskRequest.source_task)
6203 
6204   ::tensorflow::CoordinatedTask* temp = _impl_.source_task_;
6205   _impl_.source_task_ = nullptr;
6206   return temp;
6207 }
_internal_mutable_source_task()6208 inline ::tensorflow::CoordinatedTask* ShutdownTaskRequest::_internal_mutable_source_task() {
6209 
6210   if (_impl_.source_task_ == nullptr) {
6211     auto* p = CreateMaybeMessage<::tensorflow::CoordinatedTask>(GetArenaForAllocation());
6212     _impl_.source_task_ = p;
6213   }
6214   return _impl_.source_task_;
6215 }
mutable_source_task()6216 inline ::tensorflow::CoordinatedTask* ShutdownTaskRequest::mutable_source_task() {
6217   ::tensorflow::CoordinatedTask* _msg = _internal_mutable_source_task();
6218   // @@protoc_insertion_point(field_mutable:tensorflow.ShutdownTaskRequest.source_task)
6219   return _msg;
6220 }
set_allocated_source_task(::tensorflow::CoordinatedTask * source_task)6221 inline void ShutdownTaskRequest::set_allocated_source_task(::tensorflow::CoordinatedTask* source_task) {
6222   ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
6223   if (message_arena == nullptr) {
6224     delete _impl_.source_task_;
6225   }
6226   if (source_task) {
6227     ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
6228         ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(source_task);
6229     if (message_arena != submessage_arena) {
6230       source_task = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
6231           message_arena, source_task, submessage_arena);
6232     }
6233 
6234   } else {
6235 
6236   }
6237   _impl_.source_task_ = source_task;
6238   // @@protoc_insertion_point(field_set_allocated:tensorflow.ShutdownTaskRequest.source_task)
6239 }
6240 
6241 // -------------------------------------------------------------------
6242 
6243 // ShutdownTaskResponse
6244 
6245 // -------------------------------------------------------------------
6246 
6247 // ResetTaskRequest
6248 
6249 // .tensorflow.CoordinatedTask source_task = 1;
_internal_has_source_task()6250 inline bool ResetTaskRequest::_internal_has_source_task() const {
6251   return this != internal_default_instance() && _impl_.source_task_ != nullptr;
6252 }
has_source_task()6253 inline bool ResetTaskRequest::has_source_task() const {
6254   return _internal_has_source_task();
6255 }
clear_source_task()6256 inline void ResetTaskRequest::clear_source_task() {
6257   if (GetArenaForAllocation() == nullptr && _impl_.source_task_ != nullptr) {
6258     delete _impl_.source_task_;
6259   }
6260   _impl_.source_task_ = nullptr;
6261 }
_internal_source_task()6262 inline const ::tensorflow::CoordinatedTask& ResetTaskRequest::_internal_source_task() const {
6263   const ::tensorflow::CoordinatedTask* p = _impl_.source_task_;
6264   return p != nullptr ? *p : reinterpret_cast<const ::tensorflow::CoordinatedTask&>(
6265       ::tensorflow::_CoordinatedTask_default_instance_);
6266 }
source_task()6267 inline const ::tensorflow::CoordinatedTask& ResetTaskRequest::source_task() const {
6268   // @@protoc_insertion_point(field_get:tensorflow.ResetTaskRequest.source_task)
6269   return _internal_source_task();
6270 }
unsafe_arena_set_allocated_source_task(::tensorflow::CoordinatedTask * source_task)6271 inline void ResetTaskRequest::unsafe_arena_set_allocated_source_task(
6272     ::tensorflow::CoordinatedTask* source_task) {
6273   if (GetArenaForAllocation() == nullptr) {
6274     delete reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.source_task_);
6275   }
6276   _impl_.source_task_ = source_task;
6277   // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.ResetTaskRequest.source_task)
6278 }
release_source_task()6279 inline ::tensorflow::CoordinatedTask* ResetTaskRequest::release_source_task() {
6280 
6281   ::tensorflow::CoordinatedTask* temp = _impl_.source_task_;
6282   _impl_.source_task_ = nullptr;
6283 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
6284   auto* old =  reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(temp);
6285   temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
6286   if (GetArenaForAllocation() == nullptr) { delete old; }
6287 #else  // PROTOBUF_FORCE_COPY_IN_RELEASE
6288   if (GetArenaForAllocation() != nullptr) {
6289     temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
6290   }
6291 #endif  // !PROTOBUF_FORCE_COPY_IN_RELEASE
6292   return temp;
6293 }
unsafe_arena_release_source_task()6294 inline ::tensorflow::CoordinatedTask* ResetTaskRequest::unsafe_arena_release_source_task() {
6295   // @@protoc_insertion_point(field_release:tensorflow.ResetTaskRequest.source_task)
6296 
6297   ::tensorflow::CoordinatedTask* temp = _impl_.source_task_;
6298   _impl_.source_task_ = nullptr;
6299   return temp;
6300 }
_internal_mutable_source_task()6301 inline ::tensorflow::CoordinatedTask* ResetTaskRequest::_internal_mutable_source_task() {
6302 
6303   if (_impl_.source_task_ == nullptr) {
6304     auto* p = CreateMaybeMessage<::tensorflow::CoordinatedTask>(GetArenaForAllocation());
6305     _impl_.source_task_ = p;
6306   }
6307   return _impl_.source_task_;
6308 }
mutable_source_task()6309 inline ::tensorflow::CoordinatedTask* ResetTaskRequest::mutable_source_task() {
6310   ::tensorflow::CoordinatedTask* _msg = _internal_mutable_source_task();
6311   // @@protoc_insertion_point(field_mutable:tensorflow.ResetTaskRequest.source_task)
6312   return _msg;
6313 }
set_allocated_source_task(::tensorflow::CoordinatedTask * source_task)6314 inline void ResetTaskRequest::set_allocated_source_task(::tensorflow::CoordinatedTask* source_task) {
6315   ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
6316   if (message_arena == nullptr) {
6317     delete _impl_.source_task_;
6318   }
6319   if (source_task) {
6320     ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
6321         ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(source_task);
6322     if (message_arena != submessage_arena) {
6323       source_task = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
6324           message_arena, source_task, submessage_arena);
6325     }
6326 
6327   } else {
6328 
6329   }
6330   _impl_.source_task_ = source_task;
6331   // @@protoc_insertion_point(field_set_allocated:tensorflow.ResetTaskRequest.source_task)
6332 }
6333 
6334 // -------------------------------------------------------------------
6335 
6336 // ResetTaskResponse
6337 
6338 // -------------------------------------------------------------------
6339 
6340 // ReportErrorToTaskRequest
6341 
6342 // int32 error_code = 1;
clear_error_code()6343 inline void ReportErrorToTaskRequest::clear_error_code() {
6344   _impl_.error_code_ = 0;
6345 }
_internal_error_code()6346 inline ::int32_t ReportErrorToTaskRequest::_internal_error_code() const {
6347   return _impl_.error_code_;
6348 }
error_code()6349 inline ::int32_t ReportErrorToTaskRequest::error_code() const {
6350   // @@protoc_insertion_point(field_get:tensorflow.ReportErrorToTaskRequest.error_code)
6351   return _internal_error_code();
6352 }
_internal_set_error_code(::int32_t value)6353 inline void ReportErrorToTaskRequest::_internal_set_error_code(::int32_t value) {
6354 
6355   _impl_.error_code_ = value;
6356 }
set_error_code(::int32_t value)6357 inline void ReportErrorToTaskRequest::set_error_code(::int32_t value) {
6358   _internal_set_error_code(value);
6359   // @@protoc_insertion_point(field_set:tensorflow.ReportErrorToTaskRequest.error_code)
6360 }
6361 
6362 // string error_message = 2;
clear_error_message()6363 inline void ReportErrorToTaskRequest::clear_error_message() {
6364   _impl_.error_message_.ClearToEmpty();
6365 }
error_message()6366 inline const std::string& ReportErrorToTaskRequest::error_message() const {
6367   // @@protoc_insertion_point(field_get:tensorflow.ReportErrorToTaskRequest.error_message)
6368   return _internal_error_message();
6369 }
6370 template <typename ArgT0, typename... ArgT>
6371 inline PROTOBUF_ALWAYS_INLINE
set_error_message(ArgT0 && arg0,ArgT...args)6372 void ReportErrorToTaskRequest::set_error_message(ArgT0&& arg0, ArgT... args) {
6373 
6374  _impl_.error_message_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
6375   // @@protoc_insertion_point(field_set:tensorflow.ReportErrorToTaskRequest.error_message)
6376 }
mutable_error_message()6377 inline std::string* ReportErrorToTaskRequest::mutable_error_message() {
6378   std::string* _s = _internal_mutable_error_message();
6379   // @@protoc_insertion_point(field_mutable:tensorflow.ReportErrorToTaskRequest.error_message)
6380   return _s;
6381 }
_internal_error_message()6382 inline const std::string& ReportErrorToTaskRequest::_internal_error_message() const {
6383   return _impl_.error_message_.Get();
6384 }
_internal_set_error_message(const std::string & value)6385 inline void ReportErrorToTaskRequest::_internal_set_error_message(const std::string& value) {
6386 
6387   _impl_.error_message_.Set(value, GetArenaForAllocation());
6388 }
_internal_mutable_error_message()6389 inline std::string* ReportErrorToTaskRequest::_internal_mutable_error_message() {
6390 
6391   return _impl_.error_message_.Mutable(GetArenaForAllocation());
6392 }
release_error_message()6393 inline std::string* ReportErrorToTaskRequest::release_error_message() {
6394   // @@protoc_insertion_point(field_release:tensorflow.ReportErrorToTaskRequest.error_message)
6395   return _impl_.error_message_.Release();
6396 }
set_allocated_error_message(std::string * error_message)6397 inline void ReportErrorToTaskRequest::set_allocated_error_message(std::string* error_message) {
6398   _impl_.error_message_.SetAllocated(error_message, GetArenaForAllocation());
6399 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
6400   if (_impl_.error_message_.IsDefault()) {
6401     _impl_.error_message_.Set("", GetArenaForAllocation());
6402   }
6403 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
6404   // @@protoc_insertion_point(field_set_allocated:tensorflow.ReportErrorToTaskRequest.error_message)
6405 }
6406 
6407 // .tensorflow.CoordinationServiceError error_payload = 5;
_internal_has_error_payload()6408 inline bool ReportErrorToTaskRequest::_internal_has_error_payload() const {
6409   return this != internal_default_instance() && _impl_.error_payload_ != nullptr;
6410 }
has_error_payload()6411 inline bool ReportErrorToTaskRequest::has_error_payload() const {
6412   return _internal_has_error_payload();
6413 }
clear_error_payload()6414 inline void ReportErrorToTaskRequest::clear_error_payload() {
6415   if (GetArenaForAllocation() == nullptr && _impl_.error_payload_ != nullptr) {
6416     delete _impl_.error_payload_;
6417   }
6418   _impl_.error_payload_ = nullptr;
6419 }
_internal_error_payload()6420 inline const ::tensorflow::CoordinationServiceError& ReportErrorToTaskRequest::_internal_error_payload() const {
6421   const ::tensorflow::CoordinationServiceError* p = _impl_.error_payload_;
6422   return p != nullptr ? *p : reinterpret_cast<const ::tensorflow::CoordinationServiceError&>(
6423       ::tensorflow::_CoordinationServiceError_default_instance_);
6424 }
error_payload()6425 inline const ::tensorflow::CoordinationServiceError& ReportErrorToTaskRequest::error_payload() const {
6426   // @@protoc_insertion_point(field_get:tensorflow.ReportErrorToTaskRequest.error_payload)
6427   return _internal_error_payload();
6428 }
unsafe_arena_set_allocated_error_payload(::tensorflow::CoordinationServiceError * error_payload)6429 inline void ReportErrorToTaskRequest::unsafe_arena_set_allocated_error_payload(
6430     ::tensorflow::CoordinationServiceError* error_payload) {
6431   if (GetArenaForAllocation() == nullptr) {
6432     delete reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.error_payload_);
6433   }
6434   _impl_.error_payload_ = error_payload;
6435   // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.ReportErrorToTaskRequest.error_payload)
6436 }
release_error_payload()6437 inline ::tensorflow::CoordinationServiceError* ReportErrorToTaskRequest::release_error_payload() {
6438 
6439   ::tensorflow::CoordinationServiceError* temp = _impl_.error_payload_;
6440   _impl_.error_payload_ = nullptr;
6441 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
6442   auto* old =  reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(temp);
6443   temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
6444   if (GetArenaForAllocation() == nullptr) { delete old; }
6445 #else  // PROTOBUF_FORCE_COPY_IN_RELEASE
6446   if (GetArenaForAllocation() != nullptr) {
6447     temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
6448   }
6449 #endif  // !PROTOBUF_FORCE_COPY_IN_RELEASE
6450   return temp;
6451 }
unsafe_arena_release_error_payload()6452 inline ::tensorflow::CoordinationServiceError* ReportErrorToTaskRequest::unsafe_arena_release_error_payload() {
6453   // @@protoc_insertion_point(field_release:tensorflow.ReportErrorToTaskRequest.error_payload)
6454 
6455   ::tensorflow::CoordinationServiceError* temp = _impl_.error_payload_;
6456   _impl_.error_payload_ = nullptr;
6457   return temp;
6458 }
_internal_mutable_error_payload()6459 inline ::tensorflow::CoordinationServiceError* ReportErrorToTaskRequest::_internal_mutable_error_payload() {
6460 
6461   if (_impl_.error_payload_ == nullptr) {
6462     auto* p = CreateMaybeMessage<::tensorflow::CoordinationServiceError>(GetArenaForAllocation());
6463     _impl_.error_payload_ = p;
6464   }
6465   return _impl_.error_payload_;
6466 }
mutable_error_payload()6467 inline ::tensorflow::CoordinationServiceError* ReportErrorToTaskRequest::mutable_error_payload() {
6468   ::tensorflow::CoordinationServiceError* _msg = _internal_mutable_error_payload();
6469   // @@protoc_insertion_point(field_mutable:tensorflow.ReportErrorToTaskRequest.error_payload)
6470   return _msg;
6471 }
set_allocated_error_payload(::tensorflow::CoordinationServiceError * error_payload)6472 inline void ReportErrorToTaskRequest::set_allocated_error_payload(::tensorflow::CoordinationServiceError* error_payload) {
6473   ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
6474   if (message_arena == nullptr) {
6475     delete _impl_.error_payload_;
6476   }
6477   if (error_payload) {
6478     ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
6479         ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(error_payload);
6480     if (message_arena != submessage_arena) {
6481       error_payload = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
6482           message_arena, error_payload, submessage_arena);
6483     }
6484 
6485   } else {
6486 
6487   }
6488   _impl_.error_payload_ = error_payload;
6489   // @@protoc_insertion_point(field_set_allocated:tensorflow.ReportErrorToTaskRequest.error_payload)
6490 }
6491 
6492 // -------------------------------------------------------------------
6493 
6494 // ReportErrorToTaskResponse
6495 
6496 // -------------------------------------------------------------------
6497 
6498 // ReportErrorToServiceRequest
6499 
6500 // int32 error_code = 1;
clear_error_code()6501 inline void ReportErrorToServiceRequest::clear_error_code() {
6502   _impl_.error_code_ = 0;
6503 }
_internal_error_code()6504 inline ::int32_t ReportErrorToServiceRequest::_internal_error_code() const {
6505   return _impl_.error_code_;
6506 }
error_code()6507 inline ::int32_t ReportErrorToServiceRequest::error_code() const {
6508   // @@protoc_insertion_point(field_get:tensorflow.ReportErrorToServiceRequest.error_code)
6509   return _internal_error_code();
6510 }
_internal_set_error_code(::int32_t value)6511 inline void ReportErrorToServiceRequest::_internal_set_error_code(::int32_t value) {
6512 
6513   _impl_.error_code_ = value;
6514 }
set_error_code(::int32_t value)6515 inline void ReportErrorToServiceRequest::set_error_code(::int32_t value) {
6516   _internal_set_error_code(value);
6517   // @@protoc_insertion_point(field_set:tensorflow.ReportErrorToServiceRequest.error_code)
6518 }
6519 
6520 // string error_message = 2;
clear_error_message()6521 inline void ReportErrorToServiceRequest::clear_error_message() {
6522   _impl_.error_message_.ClearToEmpty();
6523 }
error_message()6524 inline const std::string& ReportErrorToServiceRequest::error_message() const {
6525   // @@protoc_insertion_point(field_get:tensorflow.ReportErrorToServiceRequest.error_message)
6526   return _internal_error_message();
6527 }
6528 template <typename ArgT0, typename... ArgT>
6529 inline PROTOBUF_ALWAYS_INLINE
set_error_message(ArgT0 && arg0,ArgT...args)6530 void ReportErrorToServiceRequest::set_error_message(ArgT0&& arg0, ArgT... args) {
6531 
6532  _impl_.error_message_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
6533   // @@protoc_insertion_point(field_set:tensorflow.ReportErrorToServiceRequest.error_message)
6534 }
mutable_error_message()6535 inline std::string* ReportErrorToServiceRequest::mutable_error_message() {
6536   std::string* _s = _internal_mutable_error_message();
6537   // @@protoc_insertion_point(field_mutable:tensorflow.ReportErrorToServiceRequest.error_message)
6538   return _s;
6539 }
_internal_error_message()6540 inline const std::string& ReportErrorToServiceRequest::_internal_error_message() const {
6541   return _impl_.error_message_.Get();
6542 }
_internal_set_error_message(const std::string & value)6543 inline void ReportErrorToServiceRequest::_internal_set_error_message(const std::string& value) {
6544 
6545   _impl_.error_message_.Set(value, GetArenaForAllocation());
6546 }
_internal_mutable_error_message()6547 inline std::string* ReportErrorToServiceRequest::_internal_mutable_error_message() {
6548 
6549   return _impl_.error_message_.Mutable(GetArenaForAllocation());
6550 }
release_error_message()6551 inline std::string* ReportErrorToServiceRequest::release_error_message() {
6552   // @@protoc_insertion_point(field_release:tensorflow.ReportErrorToServiceRequest.error_message)
6553   return _impl_.error_message_.Release();
6554 }
set_allocated_error_message(std::string * error_message)6555 inline void ReportErrorToServiceRequest::set_allocated_error_message(std::string* error_message) {
6556   _impl_.error_message_.SetAllocated(error_message, GetArenaForAllocation());
6557 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
6558   if (_impl_.error_message_.IsDefault()) {
6559     _impl_.error_message_.Set("", GetArenaForAllocation());
6560   }
6561 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
6562   // @@protoc_insertion_point(field_set_allocated:tensorflow.ReportErrorToServiceRequest.error_message)
6563 }
6564 
6565 // .tensorflow.CoordinatedTask error_origin = 5;
_internal_has_error_origin()6566 inline bool ReportErrorToServiceRequest::_internal_has_error_origin() const {
6567   return this != internal_default_instance() && _impl_.error_origin_ != nullptr;
6568 }
has_error_origin()6569 inline bool ReportErrorToServiceRequest::has_error_origin() const {
6570   return _internal_has_error_origin();
6571 }
clear_error_origin()6572 inline void ReportErrorToServiceRequest::clear_error_origin() {
6573   if (GetArenaForAllocation() == nullptr && _impl_.error_origin_ != nullptr) {
6574     delete _impl_.error_origin_;
6575   }
6576   _impl_.error_origin_ = nullptr;
6577 }
_internal_error_origin()6578 inline const ::tensorflow::CoordinatedTask& ReportErrorToServiceRequest::_internal_error_origin() const {
6579   const ::tensorflow::CoordinatedTask* p = _impl_.error_origin_;
6580   return p != nullptr ? *p : reinterpret_cast<const ::tensorflow::CoordinatedTask&>(
6581       ::tensorflow::_CoordinatedTask_default_instance_);
6582 }
error_origin()6583 inline const ::tensorflow::CoordinatedTask& ReportErrorToServiceRequest::error_origin() const {
6584   // @@protoc_insertion_point(field_get:tensorflow.ReportErrorToServiceRequest.error_origin)
6585   return _internal_error_origin();
6586 }
unsafe_arena_set_allocated_error_origin(::tensorflow::CoordinatedTask * error_origin)6587 inline void ReportErrorToServiceRequest::unsafe_arena_set_allocated_error_origin(
6588     ::tensorflow::CoordinatedTask* error_origin) {
6589   if (GetArenaForAllocation() == nullptr) {
6590     delete reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.error_origin_);
6591   }
6592   _impl_.error_origin_ = error_origin;
6593   // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.ReportErrorToServiceRequest.error_origin)
6594 }
release_error_origin()6595 inline ::tensorflow::CoordinatedTask* ReportErrorToServiceRequest::release_error_origin() {
6596 
6597   ::tensorflow::CoordinatedTask* temp = _impl_.error_origin_;
6598   _impl_.error_origin_ = nullptr;
6599 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
6600   auto* old =  reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(temp);
6601   temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
6602   if (GetArenaForAllocation() == nullptr) { delete old; }
6603 #else  // PROTOBUF_FORCE_COPY_IN_RELEASE
6604   if (GetArenaForAllocation() != nullptr) {
6605     temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
6606   }
6607 #endif  // !PROTOBUF_FORCE_COPY_IN_RELEASE
6608   return temp;
6609 }
unsafe_arena_release_error_origin()6610 inline ::tensorflow::CoordinatedTask* ReportErrorToServiceRequest::unsafe_arena_release_error_origin() {
6611   // @@protoc_insertion_point(field_release:tensorflow.ReportErrorToServiceRequest.error_origin)
6612 
6613   ::tensorflow::CoordinatedTask* temp = _impl_.error_origin_;
6614   _impl_.error_origin_ = nullptr;
6615   return temp;
6616 }
_internal_mutable_error_origin()6617 inline ::tensorflow::CoordinatedTask* ReportErrorToServiceRequest::_internal_mutable_error_origin() {
6618 
6619   if (_impl_.error_origin_ == nullptr) {
6620     auto* p = CreateMaybeMessage<::tensorflow::CoordinatedTask>(GetArenaForAllocation());
6621     _impl_.error_origin_ = p;
6622   }
6623   return _impl_.error_origin_;
6624 }
mutable_error_origin()6625 inline ::tensorflow::CoordinatedTask* ReportErrorToServiceRequest::mutable_error_origin() {
6626   ::tensorflow::CoordinatedTask* _msg = _internal_mutable_error_origin();
6627   // @@protoc_insertion_point(field_mutable:tensorflow.ReportErrorToServiceRequest.error_origin)
6628   return _msg;
6629 }
set_allocated_error_origin(::tensorflow::CoordinatedTask * error_origin)6630 inline void ReportErrorToServiceRequest::set_allocated_error_origin(::tensorflow::CoordinatedTask* error_origin) {
6631   ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
6632   if (message_arena == nullptr) {
6633     delete _impl_.error_origin_;
6634   }
6635   if (error_origin) {
6636     ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
6637         ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(error_origin);
6638     if (message_arena != submessage_arena) {
6639       error_origin = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
6640           message_arena, error_origin, submessage_arena);
6641     }
6642 
6643   } else {
6644 
6645   }
6646   _impl_.error_origin_ = error_origin;
6647   // @@protoc_insertion_point(field_set_allocated:tensorflow.ReportErrorToServiceRequest.error_origin)
6648 }
6649 
6650 // -------------------------------------------------------------------
6651 
6652 // ReportErrorToServiceResponse
6653 
6654 // -------------------------------------------------------------------
6655 
6656 // KeyValueEntry
6657 
6658 // string key = 1;
clear_key()6659 inline void KeyValueEntry::clear_key() {
6660   _impl_.key_.ClearToEmpty();
6661 }
key()6662 inline const std::string& KeyValueEntry::key() const {
6663   // @@protoc_insertion_point(field_get:tensorflow.KeyValueEntry.key)
6664   return _internal_key();
6665 }
6666 template <typename ArgT0, typename... ArgT>
6667 inline PROTOBUF_ALWAYS_INLINE
set_key(ArgT0 && arg0,ArgT...args)6668 void KeyValueEntry::set_key(ArgT0&& arg0, ArgT... args) {
6669 
6670  _impl_.key_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
6671   // @@protoc_insertion_point(field_set:tensorflow.KeyValueEntry.key)
6672 }
mutable_key()6673 inline std::string* KeyValueEntry::mutable_key() {
6674   std::string* _s = _internal_mutable_key();
6675   // @@protoc_insertion_point(field_mutable:tensorflow.KeyValueEntry.key)
6676   return _s;
6677 }
_internal_key()6678 inline const std::string& KeyValueEntry::_internal_key() const {
6679   return _impl_.key_.Get();
6680 }
_internal_set_key(const std::string & value)6681 inline void KeyValueEntry::_internal_set_key(const std::string& value) {
6682 
6683   _impl_.key_.Set(value, GetArenaForAllocation());
6684 }
_internal_mutable_key()6685 inline std::string* KeyValueEntry::_internal_mutable_key() {
6686 
6687   return _impl_.key_.Mutable(GetArenaForAllocation());
6688 }
release_key()6689 inline std::string* KeyValueEntry::release_key() {
6690   // @@protoc_insertion_point(field_release:tensorflow.KeyValueEntry.key)
6691   return _impl_.key_.Release();
6692 }
set_allocated_key(std::string * key)6693 inline void KeyValueEntry::set_allocated_key(std::string* key) {
6694   _impl_.key_.SetAllocated(key, GetArenaForAllocation());
6695 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
6696   if (_impl_.key_.IsDefault()) {
6697     _impl_.key_.Set("", GetArenaForAllocation());
6698   }
6699 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
6700   // @@protoc_insertion_point(field_set_allocated:tensorflow.KeyValueEntry.key)
6701 }
6702 
6703 // bytes value = 2;
clear_value()6704 inline void KeyValueEntry::clear_value() {
6705   _impl_.value_.ClearToEmpty();
6706 }
value()6707 inline const std::string& KeyValueEntry::value() const {
6708   // @@protoc_insertion_point(field_get:tensorflow.KeyValueEntry.value)
6709   return _internal_value();
6710 }
6711 template <typename ArgT0, typename... ArgT>
6712 inline PROTOBUF_ALWAYS_INLINE
set_value(ArgT0 && arg0,ArgT...args)6713 void KeyValueEntry::set_value(ArgT0&& arg0, ArgT... args) {
6714 
6715  _impl_.value_.SetBytes(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
6716   // @@protoc_insertion_point(field_set:tensorflow.KeyValueEntry.value)
6717 }
mutable_value()6718 inline std::string* KeyValueEntry::mutable_value() {
6719   std::string* _s = _internal_mutable_value();
6720   // @@protoc_insertion_point(field_mutable:tensorflow.KeyValueEntry.value)
6721   return _s;
6722 }
_internal_value()6723 inline const std::string& KeyValueEntry::_internal_value() const {
6724   return _impl_.value_.Get();
6725 }
_internal_set_value(const std::string & value)6726 inline void KeyValueEntry::_internal_set_value(const std::string& value) {
6727 
6728   _impl_.value_.Set(value, GetArenaForAllocation());
6729 }
_internal_mutable_value()6730 inline std::string* KeyValueEntry::_internal_mutable_value() {
6731 
6732   return _impl_.value_.Mutable(GetArenaForAllocation());
6733 }
release_value()6734 inline std::string* KeyValueEntry::release_value() {
6735   // @@protoc_insertion_point(field_release:tensorflow.KeyValueEntry.value)
6736   return _impl_.value_.Release();
6737 }
set_allocated_value(std::string * value)6738 inline void KeyValueEntry::set_allocated_value(std::string* value) {
6739   _impl_.value_.SetAllocated(value, GetArenaForAllocation());
6740 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
6741   if (_impl_.value_.IsDefault()) {
6742     _impl_.value_.Set("", GetArenaForAllocation());
6743   }
6744 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
6745   // @@protoc_insertion_point(field_set_allocated:tensorflow.KeyValueEntry.value)
6746 }
6747 
6748 // -------------------------------------------------------------------
6749 
6750 // InsertKeyValueRequest
6751 
6752 // .tensorflow.KeyValueEntry kv = 1;
_internal_has_kv()6753 inline bool InsertKeyValueRequest::_internal_has_kv() const {
6754   return this != internal_default_instance() && _impl_.kv_ != nullptr;
6755 }
has_kv()6756 inline bool InsertKeyValueRequest::has_kv() const {
6757   return _internal_has_kv();
6758 }
clear_kv()6759 inline void InsertKeyValueRequest::clear_kv() {
6760   if (GetArenaForAllocation() == nullptr && _impl_.kv_ != nullptr) {
6761     delete _impl_.kv_;
6762   }
6763   _impl_.kv_ = nullptr;
6764 }
_internal_kv()6765 inline const ::tensorflow::KeyValueEntry& InsertKeyValueRequest::_internal_kv() const {
6766   const ::tensorflow::KeyValueEntry* p = _impl_.kv_;
6767   return p != nullptr ? *p : reinterpret_cast<const ::tensorflow::KeyValueEntry&>(
6768       ::tensorflow::_KeyValueEntry_default_instance_);
6769 }
kv()6770 inline const ::tensorflow::KeyValueEntry& InsertKeyValueRequest::kv() const {
6771   // @@protoc_insertion_point(field_get:tensorflow.InsertKeyValueRequest.kv)
6772   return _internal_kv();
6773 }
unsafe_arena_set_allocated_kv(::tensorflow::KeyValueEntry * kv)6774 inline void InsertKeyValueRequest::unsafe_arena_set_allocated_kv(
6775     ::tensorflow::KeyValueEntry* kv) {
6776   if (GetArenaForAllocation() == nullptr) {
6777     delete reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.kv_);
6778   }
6779   _impl_.kv_ = kv;
6780   // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.InsertKeyValueRequest.kv)
6781 }
release_kv()6782 inline ::tensorflow::KeyValueEntry* InsertKeyValueRequest::release_kv() {
6783 
6784   ::tensorflow::KeyValueEntry* temp = _impl_.kv_;
6785   _impl_.kv_ = nullptr;
6786 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
6787   auto* old =  reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(temp);
6788   temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
6789   if (GetArenaForAllocation() == nullptr) { delete old; }
6790 #else  // PROTOBUF_FORCE_COPY_IN_RELEASE
6791   if (GetArenaForAllocation() != nullptr) {
6792     temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
6793   }
6794 #endif  // !PROTOBUF_FORCE_COPY_IN_RELEASE
6795   return temp;
6796 }
unsafe_arena_release_kv()6797 inline ::tensorflow::KeyValueEntry* InsertKeyValueRequest::unsafe_arena_release_kv() {
6798   // @@protoc_insertion_point(field_release:tensorflow.InsertKeyValueRequest.kv)
6799 
6800   ::tensorflow::KeyValueEntry* temp = _impl_.kv_;
6801   _impl_.kv_ = nullptr;
6802   return temp;
6803 }
_internal_mutable_kv()6804 inline ::tensorflow::KeyValueEntry* InsertKeyValueRequest::_internal_mutable_kv() {
6805 
6806   if (_impl_.kv_ == nullptr) {
6807     auto* p = CreateMaybeMessage<::tensorflow::KeyValueEntry>(GetArenaForAllocation());
6808     _impl_.kv_ = p;
6809   }
6810   return _impl_.kv_;
6811 }
mutable_kv()6812 inline ::tensorflow::KeyValueEntry* InsertKeyValueRequest::mutable_kv() {
6813   ::tensorflow::KeyValueEntry* _msg = _internal_mutable_kv();
6814   // @@protoc_insertion_point(field_mutable:tensorflow.InsertKeyValueRequest.kv)
6815   return _msg;
6816 }
set_allocated_kv(::tensorflow::KeyValueEntry * kv)6817 inline void InsertKeyValueRequest::set_allocated_kv(::tensorflow::KeyValueEntry* kv) {
6818   ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
6819   if (message_arena == nullptr) {
6820     delete _impl_.kv_;
6821   }
6822   if (kv) {
6823     ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
6824         ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(kv);
6825     if (message_arena != submessage_arena) {
6826       kv = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
6827           message_arena, kv, submessage_arena);
6828     }
6829 
6830   } else {
6831 
6832   }
6833   _impl_.kv_ = kv;
6834   // @@protoc_insertion_point(field_set_allocated:tensorflow.InsertKeyValueRequest.kv)
6835 }
6836 
6837 // -------------------------------------------------------------------
6838 
6839 // InsertKeyValueResponse
6840 
6841 // -------------------------------------------------------------------
6842 
6843 // GetKeyValueRequest
6844 
6845 // string key = 1;
clear_key()6846 inline void GetKeyValueRequest::clear_key() {
6847   _impl_.key_.ClearToEmpty();
6848 }
key()6849 inline const std::string& GetKeyValueRequest::key() const {
6850   // @@protoc_insertion_point(field_get:tensorflow.GetKeyValueRequest.key)
6851   return _internal_key();
6852 }
6853 template <typename ArgT0, typename... ArgT>
6854 inline PROTOBUF_ALWAYS_INLINE
set_key(ArgT0 && arg0,ArgT...args)6855 void GetKeyValueRequest::set_key(ArgT0&& arg0, ArgT... args) {
6856 
6857  _impl_.key_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
6858   // @@protoc_insertion_point(field_set:tensorflow.GetKeyValueRequest.key)
6859 }
mutable_key()6860 inline std::string* GetKeyValueRequest::mutable_key() {
6861   std::string* _s = _internal_mutable_key();
6862   // @@protoc_insertion_point(field_mutable:tensorflow.GetKeyValueRequest.key)
6863   return _s;
6864 }
_internal_key()6865 inline const std::string& GetKeyValueRequest::_internal_key() const {
6866   return _impl_.key_.Get();
6867 }
_internal_set_key(const std::string & value)6868 inline void GetKeyValueRequest::_internal_set_key(const std::string& value) {
6869 
6870   _impl_.key_.Set(value, GetArenaForAllocation());
6871 }
_internal_mutable_key()6872 inline std::string* GetKeyValueRequest::_internal_mutable_key() {
6873 
6874   return _impl_.key_.Mutable(GetArenaForAllocation());
6875 }
release_key()6876 inline std::string* GetKeyValueRequest::release_key() {
6877   // @@protoc_insertion_point(field_release:tensorflow.GetKeyValueRequest.key)
6878   return _impl_.key_.Release();
6879 }
set_allocated_key(std::string * key)6880 inline void GetKeyValueRequest::set_allocated_key(std::string* key) {
6881   _impl_.key_.SetAllocated(key, GetArenaForAllocation());
6882 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
6883   if (_impl_.key_.IsDefault()) {
6884     _impl_.key_.Set("", GetArenaForAllocation());
6885   }
6886 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
6887   // @@protoc_insertion_point(field_set_allocated:tensorflow.GetKeyValueRequest.key)
6888 }
6889 
6890 // -------------------------------------------------------------------
6891 
6892 // GetKeyValueResponse
6893 
6894 // .tensorflow.KeyValueEntry kv = 1;
_internal_has_kv()6895 inline bool GetKeyValueResponse::_internal_has_kv() const {
6896   return this != internal_default_instance() && _impl_.kv_ != nullptr;
6897 }
has_kv()6898 inline bool GetKeyValueResponse::has_kv() const {
6899   return _internal_has_kv();
6900 }
clear_kv()6901 inline void GetKeyValueResponse::clear_kv() {
6902   if (GetArenaForAllocation() == nullptr && _impl_.kv_ != nullptr) {
6903     delete _impl_.kv_;
6904   }
6905   _impl_.kv_ = nullptr;
6906 }
_internal_kv()6907 inline const ::tensorflow::KeyValueEntry& GetKeyValueResponse::_internal_kv() const {
6908   const ::tensorflow::KeyValueEntry* p = _impl_.kv_;
6909   return p != nullptr ? *p : reinterpret_cast<const ::tensorflow::KeyValueEntry&>(
6910       ::tensorflow::_KeyValueEntry_default_instance_);
6911 }
kv()6912 inline const ::tensorflow::KeyValueEntry& GetKeyValueResponse::kv() const {
6913   // @@protoc_insertion_point(field_get:tensorflow.GetKeyValueResponse.kv)
6914   return _internal_kv();
6915 }
unsafe_arena_set_allocated_kv(::tensorflow::KeyValueEntry * kv)6916 inline void GetKeyValueResponse::unsafe_arena_set_allocated_kv(
6917     ::tensorflow::KeyValueEntry* kv) {
6918   if (GetArenaForAllocation() == nullptr) {
6919     delete reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.kv_);
6920   }
6921   _impl_.kv_ = kv;
6922   // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.GetKeyValueResponse.kv)
6923 }
release_kv()6924 inline ::tensorflow::KeyValueEntry* GetKeyValueResponse::release_kv() {
6925 
6926   ::tensorflow::KeyValueEntry* temp = _impl_.kv_;
6927   _impl_.kv_ = nullptr;
6928 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
6929   auto* old =  reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(temp);
6930   temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
6931   if (GetArenaForAllocation() == nullptr) { delete old; }
6932 #else  // PROTOBUF_FORCE_COPY_IN_RELEASE
6933   if (GetArenaForAllocation() != nullptr) {
6934     temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
6935   }
6936 #endif  // !PROTOBUF_FORCE_COPY_IN_RELEASE
6937   return temp;
6938 }
unsafe_arena_release_kv()6939 inline ::tensorflow::KeyValueEntry* GetKeyValueResponse::unsafe_arena_release_kv() {
6940   // @@protoc_insertion_point(field_release:tensorflow.GetKeyValueResponse.kv)
6941 
6942   ::tensorflow::KeyValueEntry* temp = _impl_.kv_;
6943   _impl_.kv_ = nullptr;
6944   return temp;
6945 }
_internal_mutable_kv()6946 inline ::tensorflow::KeyValueEntry* GetKeyValueResponse::_internal_mutable_kv() {
6947 
6948   if (_impl_.kv_ == nullptr) {
6949     auto* p = CreateMaybeMessage<::tensorflow::KeyValueEntry>(GetArenaForAllocation());
6950     _impl_.kv_ = p;
6951   }
6952   return _impl_.kv_;
6953 }
mutable_kv()6954 inline ::tensorflow::KeyValueEntry* GetKeyValueResponse::mutable_kv() {
6955   ::tensorflow::KeyValueEntry* _msg = _internal_mutable_kv();
6956   // @@protoc_insertion_point(field_mutable:tensorflow.GetKeyValueResponse.kv)
6957   return _msg;
6958 }
set_allocated_kv(::tensorflow::KeyValueEntry * kv)6959 inline void GetKeyValueResponse::set_allocated_kv(::tensorflow::KeyValueEntry* kv) {
6960   ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
6961   if (message_arena == nullptr) {
6962     delete _impl_.kv_;
6963   }
6964   if (kv) {
6965     ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
6966         ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(kv);
6967     if (message_arena != submessage_arena) {
6968       kv = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
6969           message_arena, kv, submessage_arena);
6970     }
6971 
6972   } else {
6973 
6974   }
6975   _impl_.kv_ = kv;
6976   // @@protoc_insertion_point(field_set_allocated:tensorflow.GetKeyValueResponse.kv)
6977 }
6978 
6979 // -------------------------------------------------------------------
6980 
6981 // TryGetKeyValueRequest
6982 
6983 // string key = 1;
clear_key()6984 inline void TryGetKeyValueRequest::clear_key() {
6985   _impl_.key_.ClearToEmpty();
6986 }
key()6987 inline const std::string& TryGetKeyValueRequest::key() const {
6988   // @@protoc_insertion_point(field_get:tensorflow.TryGetKeyValueRequest.key)
6989   return _internal_key();
6990 }
6991 template <typename ArgT0, typename... ArgT>
6992 inline PROTOBUF_ALWAYS_INLINE
set_key(ArgT0 && arg0,ArgT...args)6993 void TryGetKeyValueRequest::set_key(ArgT0&& arg0, ArgT... args) {
6994 
6995  _impl_.key_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
6996   // @@protoc_insertion_point(field_set:tensorflow.TryGetKeyValueRequest.key)
6997 }
mutable_key()6998 inline std::string* TryGetKeyValueRequest::mutable_key() {
6999   std::string* _s = _internal_mutable_key();
7000   // @@protoc_insertion_point(field_mutable:tensorflow.TryGetKeyValueRequest.key)
7001   return _s;
7002 }
_internal_key()7003 inline const std::string& TryGetKeyValueRequest::_internal_key() const {
7004   return _impl_.key_.Get();
7005 }
_internal_set_key(const std::string & value)7006 inline void TryGetKeyValueRequest::_internal_set_key(const std::string& value) {
7007 
7008   _impl_.key_.Set(value, GetArenaForAllocation());
7009 }
_internal_mutable_key()7010 inline std::string* TryGetKeyValueRequest::_internal_mutable_key() {
7011 
7012   return _impl_.key_.Mutable(GetArenaForAllocation());
7013 }
release_key()7014 inline std::string* TryGetKeyValueRequest::release_key() {
7015   // @@protoc_insertion_point(field_release:tensorflow.TryGetKeyValueRequest.key)
7016   return _impl_.key_.Release();
7017 }
set_allocated_key(std::string * key)7018 inline void TryGetKeyValueRequest::set_allocated_key(std::string* key) {
7019   _impl_.key_.SetAllocated(key, GetArenaForAllocation());
7020 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
7021   if (_impl_.key_.IsDefault()) {
7022     _impl_.key_.Set("", GetArenaForAllocation());
7023   }
7024 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
7025   // @@protoc_insertion_point(field_set_allocated:tensorflow.TryGetKeyValueRequest.key)
7026 }
7027 
7028 // -------------------------------------------------------------------
7029 
7030 // TryGetKeyValueResponse
7031 
7032 // .tensorflow.KeyValueEntry kv = 1;
_internal_has_kv()7033 inline bool TryGetKeyValueResponse::_internal_has_kv() const {
7034   return this != internal_default_instance() && _impl_.kv_ != nullptr;
7035 }
has_kv()7036 inline bool TryGetKeyValueResponse::has_kv() const {
7037   return _internal_has_kv();
7038 }
clear_kv()7039 inline void TryGetKeyValueResponse::clear_kv() {
7040   if (GetArenaForAllocation() == nullptr && _impl_.kv_ != nullptr) {
7041     delete _impl_.kv_;
7042   }
7043   _impl_.kv_ = nullptr;
7044 }
_internal_kv()7045 inline const ::tensorflow::KeyValueEntry& TryGetKeyValueResponse::_internal_kv() const {
7046   const ::tensorflow::KeyValueEntry* p = _impl_.kv_;
7047   return p != nullptr ? *p : reinterpret_cast<const ::tensorflow::KeyValueEntry&>(
7048       ::tensorflow::_KeyValueEntry_default_instance_);
7049 }
kv()7050 inline const ::tensorflow::KeyValueEntry& TryGetKeyValueResponse::kv() const {
7051   // @@protoc_insertion_point(field_get:tensorflow.TryGetKeyValueResponse.kv)
7052   return _internal_kv();
7053 }
unsafe_arena_set_allocated_kv(::tensorflow::KeyValueEntry * kv)7054 inline void TryGetKeyValueResponse::unsafe_arena_set_allocated_kv(
7055     ::tensorflow::KeyValueEntry* kv) {
7056   if (GetArenaForAllocation() == nullptr) {
7057     delete reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.kv_);
7058   }
7059   _impl_.kv_ = kv;
7060   // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.TryGetKeyValueResponse.kv)
7061 }
release_kv()7062 inline ::tensorflow::KeyValueEntry* TryGetKeyValueResponse::release_kv() {
7063 
7064   ::tensorflow::KeyValueEntry* temp = _impl_.kv_;
7065   _impl_.kv_ = nullptr;
7066 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
7067   auto* old =  reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(temp);
7068   temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
7069   if (GetArenaForAllocation() == nullptr) { delete old; }
7070 #else  // PROTOBUF_FORCE_COPY_IN_RELEASE
7071   if (GetArenaForAllocation() != nullptr) {
7072     temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
7073   }
7074 #endif  // !PROTOBUF_FORCE_COPY_IN_RELEASE
7075   return temp;
7076 }
unsafe_arena_release_kv()7077 inline ::tensorflow::KeyValueEntry* TryGetKeyValueResponse::unsafe_arena_release_kv() {
7078   // @@protoc_insertion_point(field_release:tensorflow.TryGetKeyValueResponse.kv)
7079 
7080   ::tensorflow::KeyValueEntry* temp = _impl_.kv_;
7081   _impl_.kv_ = nullptr;
7082   return temp;
7083 }
_internal_mutable_kv()7084 inline ::tensorflow::KeyValueEntry* TryGetKeyValueResponse::_internal_mutable_kv() {
7085 
7086   if (_impl_.kv_ == nullptr) {
7087     auto* p = CreateMaybeMessage<::tensorflow::KeyValueEntry>(GetArenaForAllocation());
7088     _impl_.kv_ = p;
7089   }
7090   return _impl_.kv_;
7091 }
mutable_kv()7092 inline ::tensorflow::KeyValueEntry* TryGetKeyValueResponse::mutable_kv() {
7093   ::tensorflow::KeyValueEntry* _msg = _internal_mutable_kv();
7094   // @@protoc_insertion_point(field_mutable:tensorflow.TryGetKeyValueResponse.kv)
7095   return _msg;
7096 }
set_allocated_kv(::tensorflow::KeyValueEntry * kv)7097 inline void TryGetKeyValueResponse::set_allocated_kv(::tensorflow::KeyValueEntry* kv) {
7098   ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
7099   if (message_arena == nullptr) {
7100     delete _impl_.kv_;
7101   }
7102   if (kv) {
7103     ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
7104         ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(kv);
7105     if (message_arena != submessage_arena) {
7106       kv = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
7107           message_arena, kv, submessage_arena);
7108     }
7109 
7110   } else {
7111 
7112   }
7113   _impl_.kv_ = kv;
7114   // @@protoc_insertion_point(field_set_allocated:tensorflow.TryGetKeyValueResponse.kv)
7115 }
7116 
7117 // -------------------------------------------------------------------
7118 
7119 // GetKeyValueDirRequest
7120 
7121 // string directory_key = 1;
clear_directory_key()7122 inline void GetKeyValueDirRequest::clear_directory_key() {
7123   _impl_.directory_key_.ClearToEmpty();
7124 }
directory_key()7125 inline const std::string& GetKeyValueDirRequest::directory_key() const {
7126   // @@protoc_insertion_point(field_get:tensorflow.GetKeyValueDirRequest.directory_key)
7127   return _internal_directory_key();
7128 }
7129 template <typename ArgT0, typename... ArgT>
7130 inline PROTOBUF_ALWAYS_INLINE
set_directory_key(ArgT0 && arg0,ArgT...args)7131 void GetKeyValueDirRequest::set_directory_key(ArgT0&& arg0, ArgT... args) {
7132 
7133  _impl_.directory_key_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
7134   // @@protoc_insertion_point(field_set:tensorflow.GetKeyValueDirRequest.directory_key)
7135 }
mutable_directory_key()7136 inline std::string* GetKeyValueDirRequest::mutable_directory_key() {
7137   std::string* _s = _internal_mutable_directory_key();
7138   // @@protoc_insertion_point(field_mutable:tensorflow.GetKeyValueDirRequest.directory_key)
7139   return _s;
7140 }
_internal_directory_key()7141 inline const std::string& GetKeyValueDirRequest::_internal_directory_key() const {
7142   return _impl_.directory_key_.Get();
7143 }
_internal_set_directory_key(const std::string & value)7144 inline void GetKeyValueDirRequest::_internal_set_directory_key(const std::string& value) {
7145 
7146   _impl_.directory_key_.Set(value, GetArenaForAllocation());
7147 }
_internal_mutable_directory_key()7148 inline std::string* GetKeyValueDirRequest::_internal_mutable_directory_key() {
7149 
7150   return _impl_.directory_key_.Mutable(GetArenaForAllocation());
7151 }
release_directory_key()7152 inline std::string* GetKeyValueDirRequest::release_directory_key() {
7153   // @@protoc_insertion_point(field_release:tensorflow.GetKeyValueDirRequest.directory_key)
7154   return _impl_.directory_key_.Release();
7155 }
set_allocated_directory_key(std::string * directory_key)7156 inline void GetKeyValueDirRequest::set_allocated_directory_key(std::string* directory_key) {
7157   _impl_.directory_key_.SetAllocated(directory_key, GetArenaForAllocation());
7158 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
7159   if (_impl_.directory_key_.IsDefault()) {
7160     _impl_.directory_key_.Set("", GetArenaForAllocation());
7161   }
7162 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
7163   // @@protoc_insertion_point(field_set_allocated:tensorflow.GetKeyValueDirRequest.directory_key)
7164 }
7165 
7166 // -------------------------------------------------------------------
7167 
7168 // GetKeyValueDirResponse
7169 
7170 // string directory_key = 1;
clear_directory_key()7171 inline void GetKeyValueDirResponse::clear_directory_key() {
7172   _impl_.directory_key_.ClearToEmpty();
7173 }
directory_key()7174 inline const std::string& GetKeyValueDirResponse::directory_key() const {
7175   // @@protoc_insertion_point(field_get:tensorflow.GetKeyValueDirResponse.directory_key)
7176   return _internal_directory_key();
7177 }
7178 template <typename ArgT0, typename... ArgT>
7179 inline PROTOBUF_ALWAYS_INLINE
set_directory_key(ArgT0 && arg0,ArgT...args)7180 void GetKeyValueDirResponse::set_directory_key(ArgT0&& arg0, ArgT... args) {
7181 
7182  _impl_.directory_key_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
7183   // @@protoc_insertion_point(field_set:tensorflow.GetKeyValueDirResponse.directory_key)
7184 }
mutable_directory_key()7185 inline std::string* GetKeyValueDirResponse::mutable_directory_key() {
7186   std::string* _s = _internal_mutable_directory_key();
7187   // @@protoc_insertion_point(field_mutable:tensorflow.GetKeyValueDirResponse.directory_key)
7188   return _s;
7189 }
_internal_directory_key()7190 inline const std::string& GetKeyValueDirResponse::_internal_directory_key() const {
7191   return _impl_.directory_key_.Get();
7192 }
_internal_set_directory_key(const std::string & value)7193 inline void GetKeyValueDirResponse::_internal_set_directory_key(const std::string& value) {
7194 
7195   _impl_.directory_key_.Set(value, GetArenaForAllocation());
7196 }
_internal_mutable_directory_key()7197 inline std::string* GetKeyValueDirResponse::_internal_mutable_directory_key() {
7198 
7199   return _impl_.directory_key_.Mutable(GetArenaForAllocation());
7200 }
release_directory_key()7201 inline std::string* GetKeyValueDirResponse::release_directory_key() {
7202   // @@protoc_insertion_point(field_release:tensorflow.GetKeyValueDirResponse.directory_key)
7203   return _impl_.directory_key_.Release();
7204 }
set_allocated_directory_key(std::string * directory_key)7205 inline void GetKeyValueDirResponse::set_allocated_directory_key(std::string* directory_key) {
7206   _impl_.directory_key_.SetAllocated(directory_key, GetArenaForAllocation());
7207 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
7208   if (_impl_.directory_key_.IsDefault()) {
7209     _impl_.directory_key_.Set("", GetArenaForAllocation());
7210   }
7211 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
7212   // @@protoc_insertion_point(field_set_allocated:tensorflow.GetKeyValueDirResponse.directory_key)
7213 }
7214 
7215 // repeated .tensorflow.KeyValueEntry kv = 2;
_internal_kv_size()7216 inline int GetKeyValueDirResponse::_internal_kv_size() const {
7217   return _impl_.kv_.size();
7218 }
kv_size()7219 inline int GetKeyValueDirResponse::kv_size() const {
7220   return _internal_kv_size();
7221 }
clear_kv()7222 inline void GetKeyValueDirResponse::clear_kv() {
7223   _impl_.kv_.Clear();
7224 }
mutable_kv(int index)7225 inline ::tensorflow::KeyValueEntry* GetKeyValueDirResponse::mutable_kv(int index) {
7226   // @@protoc_insertion_point(field_mutable:tensorflow.GetKeyValueDirResponse.kv)
7227   return _impl_.kv_.Mutable(index);
7228 }
7229 inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::KeyValueEntry >*
mutable_kv()7230 GetKeyValueDirResponse::mutable_kv() {
7231   // @@protoc_insertion_point(field_mutable_list:tensorflow.GetKeyValueDirResponse.kv)
7232   return &_impl_.kv_;
7233 }
_internal_kv(int index)7234 inline const ::tensorflow::KeyValueEntry& GetKeyValueDirResponse::_internal_kv(int index) const {
7235   return _impl_.kv_.Get(index);
7236 }
kv(int index)7237 inline const ::tensorflow::KeyValueEntry& GetKeyValueDirResponse::kv(int index) const {
7238   // @@protoc_insertion_point(field_get:tensorflow.GetKeyValueDirResponse.kv)
7239   return _internal_kv(index);
7240 }
_internal_add_kv()7241 inline ::tensorflow::KeyValueEntry* GetKeyValueDirResponse::_internal_add_kv() {
7242   return _impl_.kv_.Add();
7243 }
add_kv()7244 inline ::tensorflow::KeyValueEntry* GetKeyValueDirResponse::add_kv() {
7245   ::tensorflow::KeyValueEntry* _add = _internal_add_kv();
7246   // @@protoc_insertion_point(field_add:tensorflow.GetKeyValueDirResponse.kv)
7247   return _add;
7248 }
7249 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::KeyValueEntry >&
kv()7250 GetKeyValueDirResponse::kv() const {
7251   // @@protoc_insertion_point(field_list:tensorflow.GetKeyValueDirResponse.kv)
7252   return _impl_.kv_;
7253 }
7254 
7255 // -------------------------------------------------------------------
7256 
7257 // DeleteKeyValueRequest
7258 
7259 // string key = 1;
clear_key()7260 inline void DeleteKeyValueRequest::clear_key() {
7261   _impl_.key_.ClearToEmpty();
7262 }
key()7263 inline const std::string& DeleteKeyValueRequest::key() const {
7264   // @@protoc_insertion_point(field_get:tensorflow.DeleteKeyValueRequest.key)
7265   return _internal_key();
7266 }
7267 template <typename ArgT0, typename... ArgT>
7268 inline PROTOBUF_ALWAYS_INLINE
set_key(ArgT0 && arg0,ArgT...args)7269 void DeleteKeyValueRequest::set_key(ArgT0&& arg0, ArgT... args) {
7270 
7271  _impl_.key_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
7272   // @@protoc_insertion_point(field_set:tensorflow.DeleteKeyValueRequest.key)
7273 }
mutable_key()7274 inline std::string* DeleteKeyValueRequest::mutable_key() {
7275   std::string* _s = _internal_mutable_key();
7276   // @@protoc_insertion_point(field_mutable:tensorflow.DeleteKeyValueRequest.key)
7277   return _s;
7278 }
_internal_key()7279 inline const std::string& DeleteKeyValueRequest::_internal_key() const {
7280   return _impl_.key_.Get();
7281 }
_internal_set_key(const std::string & value)7282 inline void DeleteKeyValueRequest::_internal_set_key(const std::string& value) {
7283 
7284   _impl_.key_.Set(value, GetArenaForAllocation());
7285 }
_internal_mutable_key()7286 inline std::string* DeleteKeyValueRequest::_internal_mutable_key() {
7287 
7288   return _impl_.key_.Mutable(GetArenaForAllocation());
7289 }
release_key()7290 inline std::string* DeleteKeyValueRequest::release_key() {
7291   // @@protoc_insertion_point(field_release:tensorflow.DeleteKeyValueRequest.key)
7292   return _impl_.key_.Release();
7293 }
set_allocated_key(std::string * key)7294 inline void DeleteKeyValueRequest::set_allocated_key(std::string* key) {
7295   _impl_.key_.SetAllocated(key, GetArenaForAllocation());
7296 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
7297   if (_impl_.key_.IsDefault()) {
7298     _impl_.key_.Set("", GetArenaForAllocation());
7299   }
7300 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
7301   // @@protoc_insertion_point(field_set_allocated:tensorflow.DeleteKeyValueRequest.key)
7302 }
7303 
7304 // bool is_directory = 2;
clear_is_directory()7305 inline void DeleteKeyValueRequest::clear_is_directory() {
7306   _impl_.is_directory_ = false;
7307 }
_internal_is_directory()7308 inline bool DeleteKeyValueRequest::_internal_is_directory() const {
7309   return _impl_.is_directory_;
7310 }
is_directory()7311 inline bool DeleteKeyValueRequest::is_directory() const {
7312   // @@protoc_insertion_point(field_get:tensorflow.DeleteKeyValueRequest.is_directory)
7313   return _internal_is_directory();
7314 }
_internal_set_is_directory(bool value)7315 inline void DeleteKeyValueRequest::_internal_set_is_directory(bool value) {
7316 
7317   _impl_.is_directory_ = value;
7318 }
set_is_directory(bool value)7319 inline void DeleteKeyValueRequest::set_is_directory(bool value) {
7320   _internal_set_is_directory(value);
7321   // @@protoc_insertion_point(field_set:tensorflow.DeleteKeyValueRequest.is_directory)
7322 }
7323 
7324 // -------------------------------------------------------------------
7325 
7326 // DeleteKeyValueResponse
7327 
7328 // -------------------------------------------------------------------
7329 
7330 // BarrierRequest
7331 
7332 // string barrier_id = 1;
clear_barrier_id()7333 inline void BarrierRequest::clear_barrier_id() {
7334   _impl_.barrier_id_.ClearToEmpty();
7335 }
barrier_id()7336 inline const std::string& BarrierRequest::barrier_id() const {
7337   // @@protoc_insertion_point(field_get:tensorflow.BarrierRequest.barrier_id)
7338   return _internal_barrier_id();
7339 }
7340 template <typename ArgT0, typename... ArgT>
7341 inline PROTOBUF_ALWAYS_INLINE
set_barrier_id(ArgT0 && arg0,ArgT...args)7342 void BarrierRequest::set_barrier_id(ArgT0&& arg0, ArgT... args) {
7343 
7344  _impl_.barrier_id_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
7345   // @@protoc_insertion_point(field_set:tensorflow.BarrierRequest.barrier_id)
7346 }
mutable_barrier_id()7347 inline std::string* BarrierRequest::mutable_barrier_id() {
7348   std::string* _s = _internal_mutable_barrier_id();
7349   // @@protoc_insertion_point(field_mutable:tensorflow.BarrierRequest.barrier_id)
7350   return _s;
7351 }
_internal_barrier_id()7352 inline const std::string& BarrierRequest::_internal_barrier_id() const {
7353   return _impl_.barrier_id_.Get();
7354 }
_internal_set_barrier_id(const std::string & value)7355 inline void BarrierRequest::_internal_set_barrier_id(const std::string& value) {
7356 
7357   _impl_.barrier_id_.Set(value, GetArenaForAllocation());
7358 }
_internal_mutable_barrier_id()7359 inline std::string* BarrierRequest::_internal_mutable_barrier_id() {
7360 
7361   return _impl_.barrier_id_.Mutable(GetArenaForAllocation());
7362 }
release_barrier_id()7363 inline std::string* BarrierRequest::release_barrier_id() {
7364   // @@protoc_insertion_point(field_release:tensorflow.BarrierRequest.barrier_id)
7365   return _impl_.barrier_id_.Release();
7366 }
set_allocated_barrier_id(std::string * barrier_id)7367 inline void BarrierRequest::set_allocated_barrier_id(std::string* barrier_id) {
7368   _impl_.barrier_id_.SetAllocated(barrier_id, GetArenaForAllocation());
7369 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
7370   if (_impl_.barrier_id_.IsDefault()) {
7371     _impl_.barrier_id_.Set("", GetArenaForAllocation());
7372   }
7373 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
7374   // @@protoc_insertion_point(field_set_allocated:tensorflow.BarrierRequest.barrier_id)
7375 }
7376 
7377 // int64 barrier_timeout_in_ms = 2;
clear_barrier_timeout_in_ms()7378 inline void BarrierRequest::clear_barrier_timeout_in_ms() {
7379   _impl_.barrier_timeout_in_ms_ = ::int64_t{0};
7380 }
_internal_barrier_timeout_in_ms()7381 inline ::int64_t BarrierRequest::_internal_barrier_timeout_in_ms() const {
7382   return _impl_.barrier_timeout_in_ms_;
7383 }
barrier_timeout_in_ms()7384 inline ::int64_t BarrierRequest::barrier_timeout_in_ms() const {
7385   // @@protoc_insertion_point(field_get:tensorflow.BarrierRequest.barrier_timeout_in_ms)
7386   return _internal_barrier_timeout_in_ms();
7387 }
_internal_set_barrier_timeout_in_ms(::int64_t value)7388 inline void BarrierRequest::_internal_set_barrier_timeout_in_ms(::int64_t value) {
7389 
7390   _impl_.barrier_timeout_in_ms_ = value;
7391 }
set_barrier_timeout_in_ms(::int64_t value)7392 inline void BarrierRequest::set_barrier_timeout_in_ms(::int64_t value) {
7393   _internal_set_barrier_timeout_in_ms(value);
7394   // @@protoc_insertion_point(field_set:tensorflow.BarrierRequest.barrier_timeout_in_ms)
7395 }
7396 
7397 // repeated .tensorflow.CoordinatedTask tasks = 3;
_internal_tasks_size()7398 inline int BarrierRequest::_internal_tasks_size() const {
7399   return _impl_.tasks_.size();
7400 }
tasks_size()7401 inline int BarrierRequest::tasks_size() const {
7402   return _internal_tasks_size();
7403 }
clear_tasks()7404 inline void BarrierRequest::clear_tasks() {
7405   _impl_.tasks_.Clear();
7406 }
mutable_tasks(int index)7407 inline ::tensorflow::CoordinatedTask* BarrierRequest::mutable_tasks(int index) {
7408   // @@protoc_insertion_point(field_mutable:tensorflow.BarrierRequest.tasks)
7409   return _impl_.tasks_.Mutable(index);
7410 }
7411 inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::CoordinatedTask >*
mutable_tasks()7412 BarrierRequest::mutable_tasks() {
7413   // @@protoc_insertion_point(field_mutable_list:tensorflow.BarrierRequest.tasks)
7414   return &_impl_.tasks_;
7415 }
_internal_tasks(int index)7416 inline const ::tensorflow::CoordinatedTask& BarrierRequest::_internal_tasks(int index) const {
7417   return _impl_.tasks_.Get(index);
7418 }
tasks(int index)7419 inline const ::tensorflow::CoordinatedTask& BarrierRequest::tasks(int index) const {
7420   // @@protoc_insertion_point(field_get:tensorflow.BarrierRequest.tasks)
7421   return _internal_tasks(index);
7422 }
_internal_add_tasks()7423 inline ::tensorflow::CoordinatedTask* BarrierRequest::_internal_add_tasks() {
7424   return _impl_.tasks_.Add();
7425 }
add_tasks()7426 inline ::tensorflow::CoordinatedTask* BarrierRequest::add_tasks() {
7427   ::tensorflow::CoordinatedTask* _add = _internal_add_tasks();
7428   // @@protoc_insertion_point(field_add:tensorflow.BarrierRequest.tasks)
7429   return _add;
7430 }
7431 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::CoordinatedTask >&
tasks()7432 BarrierRequest::tasks() const {
7433   // @@protoc_insertion_point(field_list:tensorflow.BarrierRequest.tasks)
7434   return _impl_.tasks_;
7435 }
7436 
7437 // .tensorflow.CoordinatedTask source_task = 4;
_internal_has_source_task()7438 inline bool BarrierRequest::_internal_has_source_task() const {
7439   return this != internal_default_instance() && _impl_.source_task_ != nullptr;
7440 }
has_source_task()7441 inline bool BarrierRequest::has_source_task() const {
7442   return _internal_has_source_task();
7443 }
clear_source_task()7444 inline void BarrierRequest::clear_source_task() {
7445   if (GetArenaForAllocation() == nullptr && _impl_.source_task_ != nullptr) {
7446     delete _impl_.source_task_;
7447   }
7448   _impl_.source_task_ = nullptr;
7449 }
_internal_source_task()7450 inline const ::tensorflow::CoordinatedTask& BarrierRequest::_internal_source_task() const {
7451   const ::tensorflow::CoordinatedTask* p = _impl_.source_task_;
7452   return p != nullptr ? *p : reinterpret_cast<const ::tensorflow::CoordinatedTask&>(
7453       ::tensorflow::_CoordinatedTask_default_instance_);
7454 }
source_task()7455 inline const ::tensorflow::CoordinatedTask& BarrierRequest::source_task() const {
7456   // @@protoc_insertion_point(field_get:tensorflow.BarrierRequest.source_task)
7457   return _internal_source_task();
7458 }
unsafe_arena_set_allocated_source_task(::tensorflow::CoordinatedTask * source_task)7459 inline void BarrierRequest::unsafe_arena_set_allocated_source_task(
7460     ::tensorflow::CoordinatedTask* source_task) {
7461   if (GetArenaForAllocation() == nullptr) {
7462     delete reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.source_task_);
7463   }
7464   _impl_.source_task_ = source_task;
7465   // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.BarrierRequest.source_task)
7466 }
release_source_task()7467 inline ::tensorflow::CoordinatedTask* BarrierRequest::release_source_task() {
7468 
7469   ::tensorflow::CoordinatedTask* temp = _impl_.source_task_;
7470   _impl_.source_task_ = nullptr;
7471 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
7472   auto* old =  reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(temp);
7473   temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
7474   if (GetArenaForAllocation() == nullptr) { delete old; }
7475 #else  // PROTOBUF_FORCE_COPY_IN_RELEASE
7476   if (GetArenaForAllocation() != nullptr) {
7477     temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
7478   }
7479 #endif  // !PROTOBUF_FORCE_COPY_IN_RELEASE
7480   return temp;
7481 }
unsafe_arena_release_source_task()7482 inline ::tensorflow::CoordinatedTask* BarrierRequest::unsafe_arena_release_source_task() {
7483   // @@protoc_insertion_point(field_release:tensorflow.BarrierRequest.source_task)
7484 
7485   ::tensorflow::CoordinatedTask* temp = _impl_.source_task_;
7486   _impl_.source_task_ = nullptr;
7487   return temp;
7488 }
_internal_mutable_source_task()7489 inline ::tensorflow::CoordinatedTask* BarrierRequest::_internal_mutable_source_task() {
7490 
7491   if (_impl_.source_task_ == nullptr) {
7492     auto* p = CreateMaybeMessage<::tensorflow::CoordinatedTask>(GetArenaForAllocation());
7493     _impl_.source_task_ = p;
7494   }
7495   return _impl_.source_task_;
7496 }
mutable_source_task()7497 inline ::tensorflow::CoordinatedTask* BarrierRequest::mutable_source_task() {
7498   ::tensorflow::CoordinatedTask* _msg = _internal_mutable_source_task();
7499   // @@protoc_insertion_point(field_mutable:tensorflow.BarrierRequest.source_task)
7500   return _msg;
7501 }
set_allocated_source_task(::tensorflow::CoordinatedTask * source_task)7502 inline void BarrierRequest::set_allocated_source_task(::tensorflow::CoordinatedTask* source_task) {
7503   ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
7504   if (message_arena == nullptr) {
7505     delete _impl_.source_task_;
7506   }
7507   if (source_task) {
7508     ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
7509         ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(source_task);
7510     if (message_arena != submessage_arena) {
7511       source_task = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
7512           message_arena, source_task, submessage_arena);
7513     }
7514 
7515   } else {
7516 
7517   }
7518   _impl_.source_task_ = source_task;
7519   // @@protoc_insertion_point(field_set_allocated:tensorflow.BarrierRequest.source_task)
7520 }
7521 
7522 // -------------------------------------------------------------------
7523 
7524 // BarrierResponse
7525 
7526 // -------------------------------------------------------------------
7527 
7528 // CancelBarrierRequest
7529 
7530 // string barrier_id = 1;
clear_barrier_id()7531 inline void CancelBarrierRequest::clear_barrier_id() {
7532   _impl_.barrier_id_.ClearToEmpty();
7533 }
barrier_id()7534 inline const std::string& CancelBarrierRequest::barrier_id() const {
7535   // @@protoc_insertion_point(field_get:tensorflow.CancelBarrierRequest.barrier_id)
7536   return _internal_barrier_id();
7537 }
7538 template <typename ArgT0, typename... ArgT>
7539 inline PROTOBUF_ALWAYS_INLINE
set_barrier_id(ArgT0 && arg0,ArgT...args)7540 void CancelBarrierRequest::set_barrier_id(ArgT0&& arg0, ArgT... args) {
7541 
7542  _impl_.barrier_id_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
7543   // @@protoc_insertion_point(field_set:tensorflow.CancelBarrierRequest.barrier_id)
7544 }
mutable_barrier_id()7545 inline std::string* CancelBarrierRequest::mutable_barrier_id() {
7546   std::string* _s = _internal_mutable_barrier_id();
7547   // @@protoc_insertion_point(field_mutable:tensorflow.CancelBarrierRequest.barrier_id)
7548   return _s;
7549 }
_internal_barrier_id()7550 inline const std::string& CancelBarrierRequest::_internal_barrier_id() const {
7551   return _impl_.barrier_id_.Get();
7552 }
_internal_set_barrier_id(const std::string & value)7553 inline void CancelBarrierRequest::_internal_set_barrier_id(const std::string& value) {
7554 
7555   _impl_.barrier_id_.Set(value, GetArenaForAllocation());
7556 }
_internal_mutable_barrier_id()7557 inline std::string* CancelBarrierRequest::_internal_mutable_barrier_id() {
7558 
7559   return _impl_.barrier_id_.Mutable(GetArenaForAllocation());
7560 }
release_barrier_id()7561 inline std::string* CancelBarrierRequest::release_barrier_id() {
7562   // @@protoc_insertion_point(field_release:tensorflow.CancelBarrierRequest.barrier_id)
7563   return _impl_.barrier_id_.Release();
7564 }
set_allocated_barrier_id(std::string * barrier_id)7565 inline void CancelBarrierRequest::set_allocated_barrier_id(std::string* barrier_id) {
7566   _impl_.barrier_id_.SetAllocated(barrier_id, GetArenaForAllocation());
7567 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
7568   if (_impl_.barrier_id_.IsDefault()) {
7569     _impl_.barrier_id_.Set("", GetArenaForAllocation());
7570   }
7571 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
7572   // @@protoc_insertion_point(field_set_allocated:tensorflow.CancelBarrierRequest.barrier_id)
7573 }
7574 
7575 // .tensorflow.CoordinatedTask source_task = 2;
_internal_has_source_task()7576 inline bool CancelBarrierRequest::_internal_has_source_task() const {
7577   return this != internal_default_instance() && _impl_.source_task_ != nullptr;
7578 }
has_source_task()7579 inline bool CancelBarrierRequest::has_source_task() const {
7580   return _internal_has_source_task();
7581 }
clear_source_task()7582 inline void CancelBarrierRequest::clear_source_task() {
7583   if (GetArenaForAllocation() == nullptr && _impl_.source_task_ != nullptr) {
7584     delete _impl_.source_task_;
7585   }
7586   _impl_.source_task_ = nullptr;
7587 }
_internal_source_task()7588 inline const ::tensorflow::CoordinatedTask& CancelBarrierRequest::_internal_source_task() const {
7589   const ::tensorflow::CoordinatedTask* p = _impl_.source_task_;
7590   return p != nullptr ? *p : reinterpret_cast<const ::tensorflow::CoordinatedTask&>(
7591       ::tensorflow::_CoordinatedTask_default_instance_);
7592 }
source_task()7593 inline const ::tensorflow::CoordinatedTask& CancelBarrierRequest::source_task() const {
7594   // @@protoc_insertion_point(field_get:tensorflow.CancelBarrierRequest.source_task)
7595   return _internal_source_task();
7596 }
unsafe_arena_set_allocated_source_task(::tensorflow::CoordinatedTask * source_task)7597 inline void CancelBarrierRequest::unsafe_arena_set_allocated_source_task(
7598     ::tensorflow::CoordinatedTask* source_task) {
7599   if (GetArenaForAllocation() == nullptr) {
7600     delete reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.source_task_);
7601   }
7602   _impl_.source_task_ = source_task;
7603   // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.CancelBarrierRequest.source_task)
7604 }
release_source_task()7605 inline ::tensorflow::CoordinatedTask* CancelBarrierRequest::release_source_task() {
7606 
7607   ::tensorflow::CoordinatedTask* temp = _impl_.source_task_;
7608   _impl_.source_task_ = nullptr;
7609 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
7610   auto* old =  reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(temp);
7611   temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
7612   if (GetArenaForAllocation() == nullptr) { delete old; }
7613 #else  // PROTOBUF_FORCE_COPY_IN_RELEASE
7614   if (GetArenaForAllocation() != nullptr) {
7615     temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
7616   }
7617 #endif  // !PROTOBUF_FORCE_COPY_IN_RELEASE
7618   return temp;
7619 }
unsafe_arena_release_source_task()7620 inline ::tensorflow::CoordinatedTask* CancelBarrierRequest::unsafe_arena_release_source_task() {
7621   // @@protoc_insertion_point(field_release:tensorflow.CancelBarrierRequest.source_task)
7622 
7623   ::tensorflow::CoordinatedTask* temp = _impl_.source_task_;
7624   _impl_.source_task_ = nullptr;
7625   return temp;
7626 }
_internal_mutable_source_task()7627 inline ::tensorflow::CoordinatedTask* CancelBarrierRequest::_internal_mutable_source_task() {
7628 
7629   if (_impl_.source_task_ == nullptr) {
7630     auto* p = CreateMaybeMessage<::tensorflow::CoordinatedTask>(GetArenaForAllocation());
7631     _impl_.source_task_ = p;
7632   }
7633   return _impl_.source_task_;
7634 }
mutable_source_task()7635 inline ::tensorflow::CoordinatedTask* CancelBarrierRequest::mutable_source_task() {
7636   ::tensorflow::CoordinatedTask* _msg = _internal_mutable_source_task();
7637   // @@protoc_insertion_point(field_mutable:tensorflow.CancelBarrierRequest.source_task)
7638   return _msg;
7639 }
set_allocated_source_task(::tensorflow::CoordinatedTask * source_task)7640 inline void CancelBarrierRequest::set_allocated_source_task(::tensorflow::CoordinatedTask* source_task) {
7641   ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
7642   if (message_arena == nullptr) {
7643     delete _impl_.source_task_;
7644   }
7645   if (source_task) {
7646     ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
7647         ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(source_task);
7648     if (message_arena != submessage_arena) {
7649       source_task = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
7650           message_arena, source_task, submessage_arena);
7651     }
7652 
7653   } else {
7654 
7655   }
7656   _impl_.source_task_ = source_task;
7657   // @@protoc_insertion_point(field_set_allocated:tensorflow.CancelBarrierRequest.source_task)
7658 }
7659 
7660 // -------------------------------------------------------------------
7661 
7662 // CancelBarrierResponse
7663 
7664 #ifdef __GNUC__
7665   #pragma GCC diagnostic pop
7666 #endif  // __GNUC__
7667 // -------------------------------------------------------------------
7668 
7669 // -------------------------------------------------------------------
7670 
7671 // -------------------------------------------------------------------
7672 
7673 // -------------------------------------------------------------------
7674 
7675 // -------------------------------------------------------------------
7676 
7677 // -------------------------------------------------------------------
7678 
7679 // -------------------------------------------------------------------
7680 
7681 // -------------------------------------------------------------------
7682 
7683 // -------------------------------------------------------------------
7684 
7685 // -------------------------------------------------------------------
7686 
7687 // -------------------------------------------------------------------
7688 
7689 // -------------------------------------------------------------------
7690 
7691 // -------------------------------------------------------------------
7692 
7693 // -------------------------------------------------------------------
7694 
7695 // -------------------------------------------------------------------
7696 
7697 // -------------------------------------------------------------------
7698 
7699 // -------------------------------------------------------------------
7700 
7701 // -------------------------------------------------------------------
7702 
7703 // -------------------------------------------------------------------
7704 
7705 // -------------------------------------------------------------------
7706 
7707 // -------------------------------------------------------------------
7708 
7709 // -------------------------------------------------------------------
7710 
7711 // -------------------------------------------------------------------
7712 
7713 // -------------------------------------------------------------------
7714 
7715 // -------------------------------------------------------------------
7716 
7717 // -------------------------------------------------------------------
7718 
7719 // -------------------------------------------------------------------
7720 
7721 // -------------------------------------------------------------------
7722 
7723 // -------------------------------------------------------------------
7724 
7725 // -------------------------------------------------------------------
7726 
7727 // -------------------------------------------------------------------
7728 
7729 // -------------------------------------------------------------------
7730 
7731 // -------------------------------------------------------------------
7732 
7733 
7734 // @@protoc_insertion_point(namespace_scope)
7735 
7736 }  // namespace tensorflow
7737 
7738 PROTOBUF_NAMESPACE_OPEN
7739 
7740 template <> struct is_proto_enum< ::tensorflow::CoordinatedTaskState> : ::std::true_type {};
7741 
7742 PROTOBUF_NAMESPACE_CLOSE
7743 
7744 // @@protoc_insertion_point(global_scope)
7745 
7746 #include <google/protobuf/port_undef.inc>
7747 #endif  // GOOGLE_PROTOBUF_INCLUDED_GOOGLE_PROTOBUF_INCLUDED_tensorflow_2fcore_2fprotobuf_2fcoordination_5fservice_2eproto
7748