1 // Generated by the protocol buffer compiler. DO NOT EDIT!
2 // source: tensorflow/core/protobuf/eager_service.proto
3
4 #ifndef GOOGLE_PROTOBUF_INCLUDED_tensorflow_2fcore_2fprotobuf_2feager_5fservice_2eproto
5 #define GOOGLE_PROTOBUF_INCLUDED_tensorflow_2fcore_2fprotobuf_2feager_5fservice_2eproto
6
7 #include <cstdint>
8 #include <limits>
9 #include <string>
10
11 #include <google/protobuf/port_def.inc>
12 #if PROTOBUF_VERSION < 3021000
13 #error This file was generated by a newer version of protoc which is
14 #error incompatible with your Protocol Buffer headers. Please update
15 #error your headers.
16 #endif
17 #if 3021012 < PROTOBUF_MIN_PROTOC_VERSION
18 #error This file was generated by an older version of protoc which is
19 #error incompatible with your Protocol Buffer headers. Please
20 #error regenerate this file with a newer version of protoc.
21 #endif
22
23 #include <google/protobuf/port_undef.inc>
24 #include <google/protobuf/io/coded_stream.h>
25 #include <google/protobuf/arena.h>
26 #include <google/protobuf/arenastring.h>
27 #include <google/protobuf/generated_message_util.h>
28 #include <google/protobuf/metadata_lite.h>
29 #include <google/protobuf/message_lite.h>
30 #include <google/protobuf/repeated_field.h> // IWYU pragma: export
31 #include <google/protobuf/extension_set.h> // IWYU pragma: export
32 #include <google/protobuf/map.h> // IWYU pragma: export
33 #include <google/protobuf/map_entry_lite.h>
34 #include <google/protobuf/map_field_lite.h>
35 #include "tensorflow/core/framework/attr_value.pb.h"
36 #include "tensorflow/core/framework/device_attributes.pb.h"
37 #include "tensorflow/core/framework/function.pb.h"
38 #include "tensorflow/core/framework/tensor.pb.h"
39 #include "tensorflow/core/framework/tensor_shape.pb.h"
40 #include "tensorflow/core/framework/versions.pb.h"
41 #include "tensorflow/core/protobuf/remote_tensor_handle.pb.h"
42 #include "tensorflow/core/protobuf/tensorflow_server.pb.h"
43 // @@protoc_insertion_point(includes)
44 #include <google/protobuf/port_def.inc>
45 #define PROTOBUF_INTERNAL_EXPORT_tensorflow_2fcore_2fprotobuf_2feager_5fservice_2eproto
46 PROTOBUF_NAMESPACE_OPEN
47 namespace internal {
48 class AnyMetadata;
49 } // namespace internal
50 PROTOBUF_NAMESPACE_CLOSE
51
52 // Internal implementation detail -- do not use these members.
53 struct TableStruct_tensorflow_2fcore_2fprotobuf_2feager_5fservice_2eproto {
54 static const ::uint32_t offsets[];
55 };
56 namespace tensorflow {
57 namespace eager {
58 class CleanupFunctionOp;
59 struct CleanupFunctionOpDefaultTypeInternal;
60 extern CleanupFunctionOpDefaultTypeInternal _CleanupFunctionOp_default_instance_;
61 class CloseContextRequest;
62 struct CloseContextRequestDefaultTypeInternal;
63 extern CloseContextRequestDefaultTypeInternal _CloseContextRequest_default_instance_;
64 class CloseContextResponse;
65 struct CloseContextResponseDefaultTypeInternal;
66 extern CloseContextResponseDefaultTypeInternal _CloseContextResponse_default_instance_;
67 class CreateContextRequest;
68 struct CreateContextRequestDefaultTypeInternal;
69 extern CreateContextRequestDefaultTypeInternal _CreateContextRequest_default_instance_;
70 class CreateContextResponse;
71 struct CreateContextResponseDefaultTypeInternal;
72 extern CreateContextResponseDefaultTypeInternal _CreateContextResponse_default_instance_;
73 class EnqueueRequest;
74 struct EnqueueRequestDefaultTypeInternal;
75 extern EnqueueRequestDefaultTypeInternal _EnqueueRequest_default_instance_;
76 class EnqueueResponse;
77 struct EnqueueResponseDefaultTypeInternal;
78 extern EnqueueResponseDefaultTypeInternal _EnqueueResponse_default_instance_;
79 class KeepAliveRequest;
80 struct KeepAliveRequestDefaultTypeInternal;
81 extern KeepAliveRequestDefaultTypeInternal _KeepAliveRequest_default_instance_;
82 class KeepAliveResponse;
83 struct KeepAliveResponseDefaultTypeInternal;
84 extern KeepAliveResponseDefaultTypeInternal _KeepAliveResponse_default_instance_;
85 class Operation;
86 struct OperationDefaultTypeInternal;
87 extern OperationDefaultTypeInternal _Operation_default_instance_;
88 class Operation_AttrsEntry_DoNotUse;
89 struct Operation_AttrsEntry_DoNotUseDefaultTypeInternal;
90 extern Operation_AttrsEntry_DoNotUseDefaultTypeInternal _Operation_AttrsEntry_DoNotUse_default_instance_;
91 class Operation_Input;
92 struct Operation_InputDefaultTypeInternal;
93 extern Operation_InputDefaultTypeInternal _Operation_Input_default_instance_;
94 class QueueItem;
95 struct QueueItemDefaultTypeInternal;
96 extern QueueItemDefaultTypeInternal _QueueItem_default_instance_;
97 class QueueResponse;
98 struct QueueResponseDefaultTypeInternal;
99 extern QueueResponseDefaultTypeInternal _QueueResponse_default_instance_;
100 class RegisterFunctionOp;
101 struct RegisterFunctionOpDefaultTypeInternal;
102 extern RegisterFunctionOpDefaultTypeInternal _RegisterFunctionOp_default_instance_;
103 class RunComponentFunctionRequest;
104 struct RunComponentFunctionRequestDefaultTypeInternal;
105 extern RunComponentFunctionRequestDefaultTypeInternal _RunComponentFunctionRequest_default_instance_;
106 class RunComponentFunctionResponse;
107 struct RunComponentFunctionResponseDefaultTypeInternal;
108 extern RunComponentFunctionResponseDefaultTypeInternal _RunComponentFunctionResponse_default_instance_;
109 class SendPackedHandleOp;
110 struct SendPackedHandleOpDefaultTypeInternal;
111 extern SendPackedHandleOpDefaultTypeInternal _SendPackedHandleOp_default_instance_;
112 class SendPackedHandleOp_Handle;
113 struct SendPackedHandleOp_HandleDefaultTypeInternal;
114 extern SendPackedHandleOp_HandleDefaultTypeInternal _SendPackedHandleOp_Handle_default_instance_;
115 class SendPackedHandleOp_LocalTensorHandle;
116 struct SendPackedHandleOp_LocalTensorHandleDefaultTypeInternal;
117 extern SendPackedHandleOp_LocalTensorHandleDefaultTypeInternal _SendPackedHandleOp_LocalTensorHandle_default_instance_;
118 class SendTensorOp;
119 struct SendTensorOpDefaultTypeInternal;
120 extern SendTensorOpDefaultTypeInternal _SendTensorOp_default_instance_;
121 class SyncRemoteExecutorForStream;
122 struct SyncRemoteExecutorForStreamDefaultTypeInternal;
123 extern SyncRemoteExecutorForStreamDefaultTypeInternal _SyncRemoteExecutorForStream_default_instance_;
124 class UpdateContextRequest;
125 struct UpdateContextRequestDefaultTypeInternal;
126 extern UpdateContextRequestDefaultTypeInternal _UpdateContextRequest_default_instance_;
127 class UpdateContextResponse;
128 struct UpdateContextResponseDefaultTypeInternal;
129 extern UpdateContextResponseDefaultTypeInternal _UpdateContextResponse_default_instance_;
130 class WaitQueueDoneRequest;
131 struct WaitQueueDoneRequestDefaultTypeInternal;
132 extern WaitQueueDoneRequestDefaultTypeInternal _WaitQueueDoneRequest_default_instance_;
133 class WaitQueueDoneResponse;
134 struct WaitQueueDoneResponseDefaultTypeInternal;
135 extern WaitQueueDoneResponseDefaultTypeInternal _WaitQueueDoneResponse_default_instance_;
136 } // namespace eager
137 } // namespace tensorflow
138 PROTOBUF_NAMESPACE_OPEN
139 template<> ::tensorflow::eager::CleanupFunctionOp* Arena::CreateMaybeMessage<::tensorflow::eager::CleanupFunctionOp>(Arena*);
140 template<> ::tensorflow::eager::CloseContextRequest* Arena::CreateMaybeMessage<::tensorflow::eager::CloseContextRequest>(Arena*);
141 template<> ::tensorflow::eager::CloseContextResponse* Arena::CreateMaybeMessage<::tensorflow::eager::CloseContextResponse>(Arena*);
142 template<> ::tensorflow::eager::CreateContextRequest* Arena::CreateMaybeMessage<::tensorflow::eager::CreateContextRequest>(Arena*);
143 template<> ::tensorflow::eager::CreateContextResponse* Arena::CreateMaybeMessage<::tensorflow::eager::CreateContextResponse>(Arena*);
144 template<> ::tensorflow::eager::EnqueueRequest* Arena::CreateMaybeMessage<::tensorflow::eager::EnqueueRequest>(Arena*);
145 template<> ::tensorflow::eager::EnqueueResponse* Arena::CreateMaybeMessage<::tensorflow::eager::EnqueueResponse>(Arena*);
146 template<> ::tensorflow::eager::KeepAliveRequest* Arena::CreateMaybeMessage<::tensorflow::eager::KeepAliveRequest>(Arena*);
147 template<> ::tensorflow::eager::KeepAliveResponse* Arena::CreateMaybeMessage<::tensorflow::eager::KeepAliveResponse>(Arena*);
148 template<> ::tensorflow::eager::Operation* Arena::CreateMaybeMessage<::tensorflow::eager::Operation>(Arena*);
149 template<> ::tensorflow::eager::Operation_AttrsEntry_DoNotUse* Arena::CreateMaybeMessage<::tensorflow::eager::Operation_AttrsEntry_DoNotUse>(Arena*);
150 template<> ::tensorflow::eager::Operation_Input* Arena::CreateMaybeMessage<::tensorflow::eager::Operation_Input>(Arena*);
151 template<> ::tensorflow::eager::QueueItem* Arena::CreateMaybeMessage<::tensorflow::eager::QueueItem>(Arena*);
152 template<> ::tensorflow::eager::QueueResponse* Arena::CreateMaybeMessage<::tensorflow::eager::QueueResponse>(Arena*);
153 template<> ::tensorflow::eager::RegisterFunctionOp* Arena::CreateMaybeMessage<::tensorflow::eager::RegisterFunctionOp>(Arena*);
154 template<> ::tensorflow::eager::RunComponentFunctionRequest* Arena::CreateMaybeMessage<::tensorflow::eager::RunComponentFunctionRequest>(Arena*);
155 template<> ::tensorflow::eager::RunComponentFunctionResponse* Arena::CreateMaybeMessage<::tensorflow::eager::RunComponentFunctionResponse>(Arena*);
156 template<> ::tensorflow::eager::SendPackedHandleOp* Arena::CreateMaybeMessage<::tensorflow::eager::SendPackedHandleOp>(Arena*);
157 template<> ::tensorflow::eager::SendPackedHandleOp_Handle* Arena::CreateMaybeMessage<::tensorflow::eager::SendPackedHandleOp_Handle>(Arena*);
158 template<> ::tensorflow::eager::SendPackedHandleOp_LocalTensorHandle* Arena::CreateMaybeMessage<::tensorflow::eager::SendPackedHandleOp_LocalTensorHandle>(Arena*);
159 template<> ::tensorflow::eager::SendTensorOp* Arena::CreateMaybeMessage<::tensorflow::eager::SendTensorOp>(Arena*);
160 template<> ::tensorflow::eager::SyncRemoteExecutorForStream* Arena::CreateMaybeMessage<::tensorflow::eager::SyncRemoteExecutorForStream>(Arena*);
161 template<> ::tensorflow::eager::UpdateContextRequest* Arena::CreateMaybeMessage<::tensorflow::eager::UpdateContextRequest>(Arena*);
162 template<> ::tensorflow::eager::UpdateContextResponse* Arena::CreateMaybeMessage<::tensorflow::eager::UpdateContextResponse>(Arena*);
163 template<> ::tensorflow::eager::WaitQueueDoneRequest* Arena::CreateMaybeMessage<::tensorflow::eager::WaitQueueDoneRequest>(Arena*);
164 template<> ::tensorflow::eager::WaitQueueDoneResponse* Arena::CreateMaybeMessage<::tensorflow::eager::WaitQueueDoneResponse>(Arena*);
165 PROTOBUF_NAMESPACE_CLOSE
166 namespace tensorflow {
167 namespace eager {
168
169 // ===================================================================
170
171 class Operation_Input final :
172 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.eager.Operation.Input) */ {
173 public:
Operation_Input()174 inline Operation_Input() : Operation_Input(nullptr) {}
175 ~Operation_Input() override;
176 explicit PROTOBUF_CONSTEXPR Operation_Input(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
177
178 Operation_Input(const Operation_Input& from);
Operation_Input(Operation_Input && from)179 Operation_Input(Operation_Input&& from) noexcept
180 : Operation_Input() {
181 *this = ::std::move(from);
182 }
183
184 inline Operation_Input& operator=(const Operation_Input& from) {
185 if (this == &from) return *this;
186 CopyFrom(from);
187 return *this;
188 }
189 inline Operation_Input& operator=(Operation_Input&& from) noexcept {
190 if (this == &from) return *this;
191 if (GetOwningArena() == from.GetOwningArena()
192 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
193 && GetOwningArena() != nullptr
194 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
195 ) {
196 InternalSwap(&from);
197 } else {
198 CopyFrom(from);
199 }
200 return *this;
201 }
202
default_instance()203 static const Operation_Input& default_instance() {
204 return *internal_default_instance();
205 }
206 enum ItemCase {
207 kRemoteHandle = 1,
208 kTensor = 2,
209 ITEM_NOT_SET = 0,
210 };
211
internal_default_instance()212 static inline const Operation_Input* internal_default_instance() {
213 return reinterpret_cast<const Operation_Input*>(
214 &_Operation_Input_default_instance_);
215 }
216 static constexpr int kIndexInFileMessages =
217 0;
218
swap(Operation_Input & a,Operation_Input & b)219 friend void swap(Operation_Input& a, Operation_Input& b) {
220 a.Swap(&b);
221 }
Swap(Operation_Input * other)222 inline void Swap(Operation_Input* other) {
223 if (other == this) return;
224 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
225 if (GetOwningArena() != nullptr &&
226 GetOwningArena() == other->GetOwningArena()) {
227 #else // PROTOBUF_FORCE_COPY_IN_SWAP
228 if (GetOwningArena() == other->GetOwningArena()) {
229 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
230 InternalSwap(other);
231 } else {
232 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
233 }
234 }
235 void UnsafeArenaSwap(Operation_Input* other) {
236 if (other == this) return;
237 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
238 InternalSwap(other);
239 }
240
241 // implements Message ----------------------------------------------
242
243 Operation_Input* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
244 return CreateMaybeMessage<Operation_Input>(arena);
245 }
246 Operation_Input* New() const {
247 return New(nullptr);
248 }
249 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
250 void CopyFrom(const Operation_Input& from);
251 void MergeFrom(const Operation_Input& from);
252 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
253 bool IsInitialized() const final;
254
255 size_t ByteSizeLong() const final;
256 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
257 ::uint8_t* _InternalSerialize(
258 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
259 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
260
261 private:
262 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
263 void SharedDtor();
264 void SetCachedSize(int size) const;
265 void InternalSwap(Operation_Input* other);
266
267 private:
268 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
269 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
270 return "tensorflow.eager.Operation.Input";
271 }
272 protected:
273 explicit Operation_Input(::PROTOBUF_NAMESPACE_ID::Arena* arena,
274 bool is_message_owned = false);
275 public:
276
277 std::string GetTypeName() const final;
278
279 // nested types ----------------------------------------------------
280
281 // accessors -------------------------------------------------------
282
283 enum : int {
284 kRemoteHandleFieldNumber = 1,
285 kTensorFieldNumber = 2,
286 };
287 // .tensorflow.eager.RemoteTensorHandle remote_handle = 1;
288 bool has_remote_handle() const;
289 private:
290 bool _internal_has_remote_handle() const;
291 public:
292 void clear_remote_handle();
293 const ::tensorflow::eager::RemoteTensorHandle& remote_handle() const;
294 PROTOBUF_NODISCARD ::tensorflow::eager::RemoteTensorHandle* release_remote_handle();
295 ::tensorflow::eager::RemoteTensorHandle* mutable_remote_handle();
296 void set_allocated_remote_handle(::tensorflow::eager::RemoteTensorHandle* remote_handle);
297 private:
298 const ::tensorflow::eager::RemoteTensorHandle& _internal_remote_handle() const;
299 ::tensorflow::eager::RemoteTensorHandle* _internal_mutable_remote_handle();
300 public:
301 void unsafe_arena_set_allocated_remote_handle(
302 ::tensorflow::eager::RemoteTensorHandle* remote_handle);
303 ::tensorflow::eager::RemoteTensorHandle* unsafe_arena_release_remote_handle();
304
305 // .tensorflow.TensorProto tensor = 2;
306 bool has_tensor() const;
307 private:
308 bool _internal_has_tensor() const;
309 public:
310 void clear_tensor();
311 const ::tensorflow::TensorProto& tensor() const;
312 PROTOBUF_NODISCARD ::tensorflow::TensorProto* release_tensor();
313 ::tensorflow::TensorProto* mutable_tensor();
314 void set_allocated_tensor(::tensorflow::TensorProto* tensor);
315 private:
316 const ::tensorflow::TensorProto& _internal_tensor() const;
317 ::tensorflow::TensorProto* _internal_mutable_tensor();
318 public:
319 void unsafe_arena_set_allocated_tensor(
320 ::tensorflow::TensorProto* tensor);
321 ::tensorflow::TensorProto* unsafe_arena_release_tensor();
322
323 void clear_item();
324 ItemCase item_case() const;
325 // @@protoc_insertion_point(class_scope:tensorflow.eager.Operation.Input)
326 private:
327 class _Internal;
328 void set_has_remote_handle();
329 void set_has_tensor();
330
331 inline bool has_item() const;
332 inline void clear_has_item();
333
334 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
335 typedef void InternalArenaConstructable_;
336 typedef void DestructorSkippable_;
337 struct Impl_ {
338 union ItemUnion {
339 constexpr ItemUnion() : _constinit_{} {}
340 ::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized _constinit_;
341 ::tensorflow::eager::RemoteTensorHandle* remote_handle_;
342 ::tensorflow::TensorProto* tensor_;
343 } item_;
344 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
345 ::uint32_t _oneof_case_[1];
346
347 };
348 union { Impl_ _impl_; };
349 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2feager_5fservice_2eproto;
350 };
351 // -------------------------------------------------------------------
352
353 class Operation_AttrsEntry_DoNotUse : public ::PROTOBUF_NAMESPACE_ID::internal::MapEntryLite<Operation_AttrsEntry_DoNotUse,
354 std::string, ::tensorflow::AttrValue,
355 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_STRING,
356 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_MESSAGE> {
357 public:
358 typedef ::PROTOBUF_NAMESPACE_ID::internal::MapEntryLite<Operation_AttrsEntry_DoNotUse,
359 std::string, ::tensorflow::AttrValue,
360 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_STRING,
361 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_MESSAGE> SuperType;
362 Operation_AttrsEntry_DoNotUse();
363 explicit PROTOBUF_CONSTEXPR Operation_AttrsEntry_DoNotUse(
364 ::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
365 explicit Operation_AttrsEntry_DoNotUse(::PROTOBUF_NAMESPACE_ID::Arena* arena);
366 void MergeFrom(const Operation_AttrsEntry_DoNotUse& other);
internal_default_instance()367 static const Operation_AttrsEntry_DoNotUse* internal_default_instance() { return reinterpret_cast<const Operation_AttrsEntry_DoNotUse*>(&_Operation_AttrsEntry_DoNotUse_default_instance_); }
ValidateKey(std::string * s)368 static bool ValidateKey(std::string* s) {
369 return ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::VerifyUtf8String(s->data(), static_cast<int>(s->size()), ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::PARSE, "tensorflow.eager.Operation.AttrsEntry.key");
370 }
ValidateValue(void *)371 static bool ValidateValue(void*) { return true; }
372 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2feager_5fservice_2eproto;
373 };
374
375 // -------------------------------------------------------------------
376
377 class Operation final :
378 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.eager.Operation) */ {
379 public:
Operation()380 inline Operation() : Operation(nullptr) {}
381 ~Operation() override;
382 explicit PROTOBUF_CONSTEXPR Operation(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
383
384 Operation(const Operation& from);
Operation(Operation && from)385 Operation(Operation&& from) noexcept
386 : Operation() {
387 *this = ::std::move(from);
388 }
389
390 inline Operation& operator=(const Operation& from) {
391 if (this == &from) return *this;
392 CopyFrom(from);
393 return *this;
394 }
395 inline Operation& operator=(Operation&& from) noexcept {
396 if (this == &from) return *this;
397 if (GetOwningArena() == from.GetOwningArena()
398 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
399 && GetOwningArena() != nullptr
400 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
401 ) {
402 InternalSwap(&from);
403 } else {
404 CopyFrom(from);
405 }
406 return *this;
407 }
408
default_instance()409 static const Operation& default_instance() {
410 return *internal_default_instance();
411 }
internal_default_instance()412 static inline const Operation* internal_default_instance() {
413 return reinterpret_cast<const Operation*>(
414 &_Operation_default_instance_);
415 }
416 static constexpr int kIndexInFileMessages =
417 2;
418
swap(Operation & a,Operation & b)419 friend void swap(Operation& a, Operation& b) {
420 a.Swap(&b);
421 }
Swap(Operation * other)422 inline void Swap(Operation* other) {
423 if (other == this) return;
424 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
425 if (GetOwningArena() != nullptr &&
426 GetOwningArena() == other->GetOwningArena()) {
427 #else // PROTOBUF_FORCE_COPY_IN_SWAP
428 if (GetOwningArena() == other->GetOwningArena()) {
429 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
430 InternalSwap(other);
431 } else {
432 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
433 }
434 }
435 void UnsafeArenaSwap(Operation* other) {
436 if (other == this) return;
437 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
438 InternalSwap(other);
439 }
440
441 // implements Message ----------------------------------------------
442
443 Operation* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
444 return CreateMaybeMessage<Operation>(arena);
445 }
446 Operation* New() const {
447 return New(nullptr);
448 }
449 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
450 void CopyFrom(const Operation& from);
451 void MergeFrom(const Operation& from);
452 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
453 bool IsInitialized() const final;
454
455 size_t ByteSizeLong() const final;
456 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
457 ::uint8_t* _InternalSerialize(
458 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
459 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
460
461 private:
462 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
463 void SharedDtor();
464 void SetCachedSize(int size) const;
465 void InternalSwap(Operation* other);
466
467 private:
468 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
469 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
470 return "tensorflow.eager.Operation";
471 }
472 protected:
473 explicit Operation(::PROTOBUF_NAMESPACE_ID::Arena* arena,
474 bool is_message_owned = false);
475 public:
476
477 std::string GetTypeName() const final;
478
479 // nested types ----------------------------------------------------
480
481 typedef Operation_Input Input;
482
483 // accessors -------------------------------------------------------
484
485 enum : int {
486 kControlOpIdsFieldNumber = 4,
487 kAttrsFieldNumber = 5,
488 kOpInputsFieldNumber = 10,
489 kNameFieldNumber = 2,
490 kDeviceFieldNumber = 6,
491 kIdFieldNumber = 1,
492 kFuncStepIdFieldNumber = 8,
493 kIsComponentFunctionFieldNumber = 7,
494 kIsFunctionFieldNumber = 9,
495 };
496 // repeated int64 control_op_ids = 4;
497 int control_op_ids_size() const;
498 private:
499 int _internal_control_op_ids_size() const;
500 public:
501 void clear_control_op_ids();
502 private:
503 ::int64_t _internal_control_op_ids(int index) const;
504 const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >&
505 _internal_control_op_ids() const;
506 void _internal_add_control_op_ids(::int64_t value);
507 ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >*
508 _internal_mutable_control_op_ids();
509 public:
510 ::int64_t control_op_ids(int index) const;
511 void set_control_op_ids(int index, ::int64_t value);
512 void add_control_op_ids(::int64_t value);
513 const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >&
514 control_op_ids() const;
515 ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >*
516 mutable_control_op_ids();
517
518 // map<string, .tensorflow.AttrValue> attrs = 5;
519 int attrs_size() const;
520 private:
521 int _internal_attrs_size() const;
522 public:
523 void clear_attrs();
524 private:
525 const ::PROTOBUF_NAMESPACE_ID::Map< std::string, ::tensorflow::AttrValue >&
526 _internal_attrs() const;
527 ::PROTOBUF_NAMESPACE_ID::Map< std::string, ::tensorflow::AttrValue >*
528 _internal_mutable_attrs();
529 public:
530 const ::PROTOBUF_NAMESPACE_ID::Map< std::string, ::tensorflow::AttrValue >&
531 attrs() const;
532 ::PROTOBUF_NAMESPACE_ID::Map< std::string, ::tensorflow::AttrValue >*
533 mutable_attrs();
534
535 // repeated .tensorflow.eager.Operation.Input op_inputs = 10;
536 int op_inputs_size() const;
537 private:
538 int _internal_op_inputs_size() const;
539 public:
540 void clear_op_inputs();
541 ::tensorflow::eager::Operation_Input* mutable_op_inputs(int index);
542 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::eager::Operation_Input >*
543 mutable_op_inputs();
544 private:
545 const ::tensorflow::eager::Operation_Input& _internal_op_inputs(int index) const;
546 ::tensorflow::eager::Operation_Input* _internal_add_op_inputs();
547 public:
548 const ::tensorflow::eager::Operation_Input& op_inputs(int index) const;
549 ::tensorflow::eager::Operation_Input* add_op_inputs();
550 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::eager::Operation_Input >&
551 op_inputs() const;
552
553 // string name = 2;
554 void clear_name();
555 const std::string& name() const;
556 template <typename ArgT0 = const std::string&, typename... ArgT>
557 void set_name(ArgT0&& arg0, ArgT... args);
558 std::string* mutable_name();
559 PROTOBUF_NODISCARD std::string* release_name();
560 void set_allocated_name(std::string* name);
561 private:
562 const std::string& _internal_name() const;
563 inline PROTOBUF_ALWAYS_INLINE void _internal_set_name(const std::string& value);
564 std::string* _internal_mutable_name();
565 public:
566
567 // string device = 6;
568 void clear_device();
569 const std::string& device() const;
570 template <typename ArgT0 = const std::string&, typename... ArgT>
571 void set_device(ArgT0&& arg0, ArgT... args);
572 std::string* mutable_device();
573 PROTOBUF_NODISCARD std::string* release_device();
574 void set_allocated_device(std::string* device);
575 private:
576 const std::string& _internal_device() const;
577 inline PROTOBUF_ALWAYS_INLINE void _internal_set_device(const std::string& value);
578 std::string* _internal_mutable_device();
579 public:
580
581 // int64 id = 1;
582 void clear_id();
583 ::int64_t id() const;
584 void set_id(::int64_t value);
585 private:
586 ::int64_t _internal_id() const;
587 void _internal_set_id(::int64_t value);
588 public:
589
590 // int64 func_step_id = 8;
591 void clear_func_step_id();
592 ::int64_t func_step_id() const;
593 void set_func_step_id(::int64_t value);
594 private:
595 ::int64_t _internal_func_step_id() const;
596 void _internal_set_func_step_id(::int64_t value);
597 public:
598
599 // bool is_component_function = 7;
600 void clear_is_component_function();
601 bool is_component_function() const;
602 void set_is_component_function(bool value);
603 private:
604 bool _internal_is_component_function() const;
605 void _internal_set_is_component_function(bool value);
606 public:
607
608 // bool is_function = 9;
609 void clear_is_function();
610 bool is_function() const;
611 void set_is_function(bool value);
612 private:
613 bool _internal_is_function() const;
614 void _internal_set_is_function(bool value);
615 public:
616
617 // @@protoc_insertion_point(class_scope:tensorflow.eager.Operation)
618 private:
619 class _Internal;
620
621 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
622 typedef void InternalArenaConstructable_;
623 typedef void DestructorSkippable_;
624 struct Impl_ {
625 ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t > control_op_ids_;
626 mutable std::atomic<int> _control_op_ids_cached_byte_size_;
627 ::PROTOBUF_NAMESPACE_ID::internal::MapFieldLite<
628 Operation_AttrsEntry_DoNotUse,
629 std::string, ::tensorflow::AttrValue,
630 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_STRING,
631 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_MESSAGE> attrs_;
632 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::eager::Operation_Input > op_inputs_;
633 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr name_;
634 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr device_;
635 ::int64_t id_;
636 ::int64_t func_step_id_;
637 bool is_component_function_;
638 bool is_function_;
639 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
640 };
641 union { Impl_ _impl_; };
642 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2feager_5fservice_2eproto;
643 };
644 // -------------------------------------------------------------------
645
646 class QueueItem final :
647 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.eager.QueueItem) */ {
648 public:
QueueItem()649 inline QueueItem() : QueueItem(nullptr) {}
650 ~QueueItem() override;
651 explicit PROTOBUF_CONSTEXPR QueueItem(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
652
653 QueueItem(const QueueItem& from);
QueueItem(QueueItem && from)654 QueueItem(QueueItem&& from) noexcept
655 : QueueItem() {
656 *this = ::std::move(from);
657 }
658
659 inline QueueItem& operator=(const QueueItem& from) {
660 if (this == &from) return *this;
661 CopyFrom(from);
662 return *this;
663 }
664 inline QueueItem& operator=(QueueItem&& from) noexcept {
665 if (this == &from) return *this;
666 if (GetOwningArena() == from.GetOwningArena()
667 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
668 && GetOwningArena() != nullptr
669 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
670 ) {
671 InternalSwap(&from);
672 } else {
673 CopyFrom(from);
674 }
675 return *this;
676 }
677
default_instance()678 static const QueueItem& default_instance() {
679 return *internal_default_instance();
680 }
681 enum ItemCase {
682 kHandleToDecref = 1,
683 kOperation = 2,
684 kSendTensor = 3,
685 kRegisterFunction = 4,
686 kCleanupFunction = 5,
687 kSyncRemoteExecutorForStream = 6,
688 kSendPackedHandle = 7,
689 ITEM_NOT_SET = 0,
690 };
691
internal_default_instance()692 static inline const QueueItem* internal_default_instance() {
693 return reinterpret_cast<const QueueItem*>(
694 &_QueueItem_default_instance_);
695 }
696 static constexpr int kIndexInFileMessages =
697 3;
698
swap(QueueItem & a,QueueItem & b)699 friend void swap(QueueItem& a, QueueItem& b) {
700 a.Swap(&b);
701 }
Swap(QueueItem * other)702 inline void Swap(QueueItem* other) {
703 if (other == this) return;
704 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
705 if (GetOwningArena() != nullptr &&
706 GetOwningArena() == other->GetOwningArena()) {
707 #else // PROTOBUF_FORCE_COPY_IN_SWAP
708 if (GetOwningArena() == other->GetOwningArena()) {
709 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
710 InternalSwap(other);
711 } else {
712 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
713 }
714 }
715 void UnsafeArenaSwap(QueueItem* other) {
716 if (other == this) return;
717 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
718 InternalSwap(other);
719 }
720
721 // implements Message ----------------------------------------------
722
723 QueueItem* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
724 return CreateMaybeMessage<QueueItem>(arena);
725 }
726 QueueItem* New() const {
727 return New(nullptr);
728 }
729 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
730 void CopyFrom(const QueueItem& from);
731 void MergeFrom(const QueueItem& from);
732 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
733 bool IsInitialized() const final;
734
735 size_t ByteSizeLong() const final;
736 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
737 ::uint8_t* _InternalSerialize(
738 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
739 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
740
741 private:
742 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
743 void SharedDtor();
744 void SetCachedSize(int size) const;
745 void InternalSwap(QueueItem* other);
746
747 private:
748 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
749 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
750 return "tensorflow.eager.QueueItem";
751 }
752 protected:
753 explicit QueueItem(::PROTOBUF_NAMESPACE_ID::Arena* arena,
754 bool is_message_owned = false);
755 public:
756
757 std::string GetTypeName() const final;
758
759 // nested types ----------------------------------------------------
760
761 // accessors -------------------------------------------------------
762
763 enum : int {
764 kHandleToDecrefFieldNumber = 1,
765 kOperationFieldNumber = 2,
766 kSendTensorFieldNumber = 3,
767 kRegisterFunctionFieldNumber = 4,
768 kCleanupFunctionFieldNumber = 5,
769 kSyncRemoteExecutorForStreamFieldNumber = 6,
770 kSendPackedHandleFieldNumber = 7,
771 };
772 // .tensorflow.eager.RemoteTensorHandle handle_to_decref = 1;
773 bool has_handle_to_decref() const;
774 private:
775 bool _internal_has_handle_to_decref() const;
776 public:
777 void clear_handle_to_decref();
778 const ::tensorflow::eager::RemoteTensorHandle& handle_to_decref() const;
779 PROTOBUF_NODISCARD ::tensorflow::eager::RemoteTensorHandle* release_handle_to_decref();
780 ::tensorflow::eager::RemoteTensorHandle* mutable_handle_to_decref();
781 void set_allocated_handle_to_decref(::tensorflow::eager::RemoteTensorHandle* handle_to_decref);
782 private:
783 const ::tensorflow::eager::RemoteTensorHandle& _internal_handle_to_decref() const;
784 ::tensorflow::eager::RemoteTensorHandle* _internal_mutable_handle_to_decref();
785 public:
786 void unsafe_arena_set_allocated_handle_to_decref(
787 ::tensorflow::eager::RemoteTensorHandle* handle_to_decref);
788 ::tensorflow::eager::RemoteTensorHandle* unsafe_arena_release_handle_to_decref();
789
790 // .tensorflow.eager.Operation operation = 2;
791 bool has_operation() const;
792 private:
793 bool _internal_has_operation() const;
794 public:
795 void clear_operation();
796 const ::tensorflow::eager::Operation& operation() const;
797 PROTOBUF_NODISCARD ::tensorflow::eager::Operation* release_operation();
798 ::tensorflow::eager::Operation* mutable_operation();
799 void set_allocated_operation(::tensorflow::eager::Operation* operation);
800 private:
801 const ::tensorflow::eager::Operation& _internal_operation() const;
802 ::tensorflow::eager::Operation* _internal_mutable_operation();
803 public:
804 void unsafe_arena_set_allocated_operation(
805 ::tensorflow::eager::Operation* operation);
806 ::tensorflow::eager::Operation* unsafe_arena_release_operation();
807
808 // .tensorflow.eager.SendTensorOp send_tensor = 3;
809 bool has_send_tensor() const;
810 private:
811 bool _internal_has_send_tensor() const;
812 public:
813 void clear_send_tensor();
814 const ::tensorflow::eager::SendTensorOp& send_tensor() const;
815 PROTOBUF_NODISCARD ::tensorflow::eager::SendTensorOp* release_send_tensor();
816 ::tensorflow::eager::SendTensorOp* mutable_send_tensor();
817 void set_allocated_send_tensor(::tensorflow::eager::SendTensorOp* send_tensor);
818 private:
819 const ::tensorflow::eager::SendTensorOp& _internal_send_tensor() const;
820 ::tensorflow::eager::SendTensorOp* _internal_mutable_send_tensor();
821 public:
822 void unsafe_arena_set_allocated_send_tensor(
823 ::tensorflow::eager::SendTensorOp* send_tensor);
824 ::tensorflow::eager::SendTensorOp* unsafe_arena_release_send_tensor();
825
826 // .tensorflow.eager.RegisterFunctionOp register_function = 4;
827 bool has_register_function() const;
828 private:
829 bool _internal_has_register_function() const;
830 public:
831 void clear_register_function();
832 const ::tensorflow::eager::RegisterFunctionOp& register_function() const;
833 PROTOBUF_NODISCARD ::tensorflow::eager::RegisterFunctionOp* release_register_function();
834 ::tensorflow::eager::RegisterFunctionOp* mutable_register_function();
835 void set_allocated_register_function(::tensorflow::eager::RegisterFunctionOp* register_function);
836 private:
837 const ::tensorflow::eager::RegisterFunctionOp& _internal_register_function() const;
838 ::tensorflow::eager::RegisterFunctionOp* _internal_mutable_register_function();
839 public:
840 void unsafe_arena_set_allocated_register_function(
841 ::tensorflow::eager::RegisterFunctionOp* register_function);
842 ::tensorflow::eager::RegisterFunctionOp* unsafe_arena_release_register_function();
843
844 // .tensorflow.eager.CleanupFunctionOp cleanup_function = 5;
845 bool has_cleanup_function() const;
846 private:
847 bool _internal_has_cleanup_function() const;
848 public:
849 void clear_cleanup_function();
850 const ::tensorflow::eager::CleanupFunctionOp& cleanup_function() const;
851 PROTOBUF_NODISCARD ::tensorflow::eager::CleanupFunctionOp* release_cleanup_function();
852 ::tensorflow::eager::CleanupFunctionOp* mutable_cleanup_function();
853 void set_allocated_cleanup_function(::tensorflow::eager::CleanupFunctionOp* cleanup_function);
854 private:
855 const ::tensorflow::eager::CleanupFunctionOp& _internal_cleanup_function() const;
856 ::tensorflow::eager::CleanupFunctionOp* _internal_mutable_cleanup_function();
857 public:
858 void unsafe_arena_set_allocated_cleanup_function(
859 ::tensorflow::eager::CleanupFunctionOp* cleanup_function);
860 ::tensorflow::eager::CleanupFunctionOp* unsafe_arena_release_cleanup_function();
861
862 // .tensorflow.eager.SyncRemoteExecutorForStream sync_remote_executor_for_stream = 6;
863 bool has_sync_remote_executor_for_stream() const;
864 private:
865 bool _internal_has_sync_remote_executor_for_stream() const;
866 public:
867 void clear_sync_remote_executor_for_stream();
868 const ::tensorflow::eager::SyncRemoteExecutorForStream& sync_remote_executor_for_stream() const;
869 PROTOBUF_NODISCARD ::tensorflow::eager::SyncRemoteExecutorForStream* release_sync_remote_executor_for_stream();
870 ::tensorflow::eager::SyncRemoteExecutorForStream* mutable_sync_remote_executor_for_stream();
871 void set_allocated_sync_remote_executor_for_stream(::tensorflow::eager::SyncRemoteExecutorForStream* sync_remote_executor_for_stream);
872 private:
873 const ::tensorflow::eager::SyncRemoteExecutorForStream& _internal_sync_remote_executor_for_stream() const;
874 ::tensorflow::eager::SyncRemoteExecutorForStream* _internal_mutable_sync_remote_executor_for_stream();
875 public:
876 void unsafe_arena_set_allocated_sync_remote_executor_for_stream(
877 ::tensorflow::eager::SyncRemoteExecutorForStream* sync_remote_executor_for_stream);
878 ::tensorflow::eager::SyncRemoteExecutorForStream* unsafe_arena_release_sync_remote_executor_for_stream();
879
880 // .tensorflow.eager.SendPackedHandleOp send_packed_handle = 7;
881 bool has_send_packed_handle() const;
882 private:
883 bool _internal_has_send_packed_handle() const;
884 public:
885 void clear_send_packed_handle();
886 const ::tensorflow::eager::SendPackedHandleOp& send_packed_handle() const;
887 PROTOBUF_NODISCARD ::tensorflow::eager::SendPackedHandleOp* release_send_packed_handle();
888 ::tensorflow::eager::SendPackedHandleOp* mutable_send_packed_handle();
889 void set_allocated_send_packed_handle(::tensorflow::eager::SendPackedHandleOp* send_packed_handle);
890 private:
891 const ::tensorflow::eager::SendPackedHandleOp& _internal_send_packed_handle() const;
892 ::tensorflow::eager::SendPackedHandleOp* _internal_mutable_send_packed_handle();
893 public:
894 void unsafe_arena_set_allocated_send_packed_handle(
895 ::tensorflow::eager::SendPackedHandleOp* send_packed_handle);
896 ::tensorflow::eager::SendPackedHandleOp* unsafe_arena_release_send_packed_handle();
897
898 void clear_item();
899 ItemCase item_case() const;
900 // @@protoc_insertion_point(class_scope:tensorflow.eager.QueueItem)
901 private:
902 class _Internal;
903 void set_has_handle_to_decref();
904 void set_has_operation();
905 void set_has_send_tensor();
906 void set_has_register_function();
907 void set_has_cleanup_function();
908 void set_has_sync_remote_executor_for_stream();
909 void set_has_send_packed_handle();
910
911 inline bool has_item() const;
912 inline void clear_has_item();
913
914 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
915 typedef void InternalArenaConstructable_;
916 typedef void DestructorSkippable_;
917 struct Impl_ {
918 union ItemUnion {
919 constexpr ItemUnion() : _constinit_{} {}
920 ::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized _constinit_;
921 ::tensorflow::eager::RemoteTensorHandle* handle_to_decref_;
922 ::tensorflow::eager::Operation* operation_;
923 ::tensorflow::eager::SendTensorOp* send_tensor_;
924 ::tensorflow::eager::RegisterFunctionOp* register_function_;
925 ::tensorflow::eager::CleanupFunctionOp* cleanup_function_;
926 ::tensorflow::eager::SyncRemoteExecutorForStream* sync_remote_executor_for_stream_;
927 ::tensorflow::eager::SendPackedHandleOp* send_packed_handle_;
928 } item_;
929 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
930 ::uint32_t _oneof_case_[1];
931
932 };
933 union { Impl_ _impl_; };
934 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2feager_5fservice_2eproto;
935 };
936 // -------------------------------------------------------------------
937
938 class QueueResponse final :
939 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.eager.QueueResponse) */ {
940 public:
QueueResponse()941 inline QueueResponse() : QueueResponse(nullptr) {}
942 ~QueueResponse() override;
943 explicit PROTOBUF_CONSTEXPR QueueResponse(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
944
945 QueueResponse(const QueueResponse& from);
QueueResponse(QueueResponse && from)946 QueueResponse(QueueResponse&& from) noexcept
947 : QueueResponse() {
948 *this = ::std::move(from);
949 }
950
951 inline QueueResponse& operator=(const QueueResponse& from) {
952 if (this == &from) return *this;
953 CopyFrom(from);
954 return *this;
955 }
956 inline QueueResponse& operator=(QueueResponse&& from) noexcept {
957 if (this == &from) return *this;
958 if (GetOwningArena() == from.GetOwningArena()
959 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
960 && GetOwningArena() != nullptr
961 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
962 ) {
963 InternalSwap(&from);
964 } else {
965 CopyFrom(from);
966 }
967 return *this;
968 }
969
default_instance()970 static const QueueResponse& default_instance() {
971 return *internal_default_instance();
972 }
internal_default_instance()973 static inline const QueueResponse* internal_default_instance() {
974 return reinterpret_cast<const QueueResponse*>(
975 &_QueueResponse_default_instance_);
976 }
977 static constexpr int kIndexInFileMessages =
978 4;
979
swap(QueueResponse & a,QueueResponse & b)980 friend void swap(QueueResponse& a, QueueResponse& b) {
981 a.Swap(&b);
982 }
Swap(QueueResponse * other)983 inline void Swap(QueueResponse* other) {
984 if (other == this) return;
985 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
986 if (GetOwningArena() != nullptr &&
987 GetOwningArena() == other->GetOwningArena()) {
988 #else // PROTOBUF_FORCE_COPY_IN_SWAP
989 if (GetOwningArena() == other->GetOwningArena()) {
990 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
991 InternalSwap(other);
992 } else {
993 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
994 }
995 }
996 void UnsafeArenaSwap(QueueResponse* other) {
997 if (other == this) return;
998 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
999 InternalSwap(other);
1000 }
1001
1002 // implements Message ----------------------------------------------
1003
1004 QueueResponse* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1005 return CreateMaybeMessage<QueueResponse>(arena);
1006 }
1007 QueueResponse* New() const {
1008 return New(nullptr);
1009 }
1010 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
1011 void CopyFrom(const QueueResponse& from);
1012 void MergeFrom(const QueueResponse& from);
1013 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1014 bool IsInitialized() const final;
1015
1016 size_t ByteSizeLong() const final;
1017 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1018 ::uint8_t* _InternalSerialize(
1019 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
1020 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
1021
1022 private:
1023 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
1024 void SharedDtor();
1025 void SetCachedSize(int size) const;
1026 void InternalSwap(QueueResponse* other);
1027
1028 private:
1029 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1030 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1031 return "tensorflow.eager.QueueResponse";
1032 }
1033 protected:
1034 explicit QueueResponse(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1035 bool is_message_owned = false);
1036 public:
1037
1038 std::string GetTypeName() const final;
1039
1040 // nested types ----------------------------------------------------
1041
1042 // accessors -------------------------------------------------------
1043
1044 enum : int {
1045 kShapeFieldNumber = 1,
1046 kTensorFieldNumber = 2,
1047 kDeviceFieldNumber = 3,
1048 };
1049 // repeated .tensorflow.TensorShapeProto shape = 1;
1050 int shape_size() const;
1051 private:
1052 int _internal_shape_size() const;
1053 public:
1054 void clear_shape();
1055 ::tensorflow::TensorShapeProto* mutable_shape(int index);
1056 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorShapeProto >*
1057 mutable_shape();
1058 private:
1059 const ::tensorflow::TensorShapeProto& _internal_shape(int index) const;
1060 ::tensorflow::TensorShapeProto* _internal_add_shape();
1061 public:
1062 const ::tensorflow::TensorShapeProto& shape(int index) const;
1063 ::tensorflow::TensorShapeProto* add_shape();
1064 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorShapeProto >&
1065 shape() const;
1066
1067 // repeated .tensorflow.TensorProto tensor = 2;
1068 int tensor_size() const;
1069 private:
1070 int _internal_tensor_size() const;
1071 public:
1072 void clear_tensor();
1073 ::tensorflow::TensorProto* mutable_tensor(int index);
1074 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorProto >*
1075 mutable_tensor();
1076 private:
1077 const ::tensorflow::TensorProto& _internal_tensor(int index) const;
1078 ::tensorflow::TensorProto* _internal_add_tensor();
1079 public:
1080 const ::tensorflow::TensorProto& tensor(int index) const;
1081 ::tensorflow::TensorProto* add_tensor();
1082 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorProto >&
1083 tensor() const;
1084
1085 // repeated string device = 3;
1086 int device_size() const;
1087 private:
1088 int _internal_device_size() const;
1089 public:
1090 void clear_device();
1091 const std::string& device(int index) const;
1092 std::string* mutable_device(int index);
1093 void set_device(int index, const std::string& value);
1094 void set_device(int index, std::string&& value);
1095 void set_device(int index, const char* value);
1096 void set_device(int index, const char* value, size_t size);
1097 std::string* add_device();
1098 void add_device(const std::string& value);
1099 void add_device(std::string&& value);
1100 void add_device(const char* value);
1101 void add_device(const char* value, size_t size);
1102 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>& device() const;
1103 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>* mutable_device();
1104 private:
1105 const std::string& _internal_device(int index) const;
1106 std::string* _internal_add_device();
1107 public:
1108
1109 // @@protoc_insertion_point(class_scope:tensorflow.eager.QueueResponse)
1110 private:
1111 class _Internal;
1112
1113 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1114 typedef void InternalArenaConstructable_;
1115 typedef void DestructorSkippable_;
1116 struct Impl_ {
1117 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorShapeProto > shape_;
1118 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorProto > tensor_;
1119 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string> device_;
1120 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1121 };
1122 union { Impl_ _impl_; };
1123 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2feager_5fservice_2eproto;
1124 };
1125 // -------------------------------------------------------------------
1126
1127 class CreateContextRequest final :
1128 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.eager.CreateContextRequest) */ {
1129 public:
CreateContextRequest()1130 inline CreateContextRequest() : CreateContextRequest(nullptr) {}
1131 ~CreateContextRequest() override;
1132 explicit PROTOBUF_CONSTEXPR CreateContextRequest(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
1133
1134 CreateContextRequest(const CreateContextRequest& from);
CreateContextRequest(CreateContextRequest && from)1135 CreateContextRequest(CreateContextRequest&& from) noexcept
1136 : CreateContextRequest() {
1137 *this = ::std::move(from);
1138 }
1139
1140 inline CreateContextRequest& operator=(const CreateContextRequest& from) {
1141 if (this == &from) return *this;
1142 CopyFrom(from);
1143 return *this;
1144 }
1145 inline CreateContextRequest& operator=(CreateContextRequest&& from) noexcept {
1146 if (this == &from) return *this;
1147 if (GetOwningArena() == from.GetOwningArena()
1148 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
1149 && GetOwningArena() != nullptr
1150 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
1151 ) {
1152 InternalSwap(&from);
1153 } else {
1154 CopyFrom(from);
1155 }
1156 return *this;
1157 }
1158
default_instance()1159 static const CreateContextRequest& default_instance() {
1160 return *internal_default_instance();
1161 }
internal_default_instance()1162 static inline const CreateContextRequest* internal_default_instance() {
1163 return reinterpret_cast<const CreateContextRequest*>(
1164 &_CreateContextRequest_default_instance_);
1165 }
1166 static constexpr int kIndexInFileMessages =
1167 5;
1168
swap(CreateContextRequest & a,CreateContextRequest & b)1169 friend void swap(CreateContextRequest& a, CreateContextRequest& b) {
1170 a.Swap(&b);
1171 }
Swap(CreateContextRequest * other)1172 inline void Swap(CreateContextRequest* other) {
1173 if (other == this) return;
1174 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
1175 if (GetOwningArena() != nullptr &&
1176 GetOwningArena() == other->GetOwningArena()) {
1177 #else // PROTOBUF_FORCE_COPY_IN_SWAP
1178 if (GetOwningArena() == other->GetOwningArena()) {
1179 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
1180 InternalSwap(other);
1181 } else {
1182 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1183 }
1184 }
1185 void UnsafeArenaSwap(CreateContextRequest* other) {
1186 if (other == this) return;
1187 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
1188 InternalSwap(other);
1189 }
1190
1191 // implements Message ----------------------------------------------
1192
1193 CreateContextRequest* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1194 return CreateMaybeMessage<CreateContextRequest>(arena);
1195 }
1196 CreateContextRequest* New() const {
1197 return New(nullptr);
1198 }
1199 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
1200 void CopyFrom(const CreateContextRequest& from);
1201 void MergeFrom(const CreateContextRequest& from);
1202 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1203 bool IsInitialized() const final;
1204
1205 size_t ByteSizeLong() const final;
1206 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1207 ::uint8_t* _InternalSerialize(
1208 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
1209 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
1210
1211 private:
1212 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
1213 void SharedDtor();
1214 void SetCachedSize(int size) const;
1215 void InternalSwap(CreateContextRequest* other);
1216
1217 private:
1218 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1219 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1220 return "tensorflow.eager.CreateContextRequest";
1221 }
1222 protected:
1223 explicit CreateContextRequest(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1224 bool is_message_owned = false);
1225 public:
1226
1227 std::string GetTypeName() const final;
1228
1229 // nested types ----------------------------------------------------
1230
1231 // accessors -------------------------------------------------------
1232
1233 enum : int {
1234 kClusterDeviceAttributesFieldNumber = 6,
1235 kServerDefFieldNumber = 1,
1236 kVersionDefFieldNumber = 4,
1237 kKeepAliveSecsFieldNumber = 3,
1238 kContextIdFieldNumber = 7,
1239 kContextViewIdFieldNumber = 8,
1240 kAsyncFieldNumber = 2,
1241 kLazyCopyRemoteFunctionInputsFieldNumber = 9,
1242 };
1243 // repeated .tensorflow.DeviceAttributes cluster_device_attributes = 6;
1244 int cluster_device_attributes_size() const;
1245 private:
1246 int _internal_cluster_device_attributes_size() const;
1247 public:
1248 void clear_cluster_device_attributes();
1249 ::tensorflow::DeviceAttributes* mutable_cluster_device_attributes(int index);
1250 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::DeviceAttributes >*
1251 mutable_cluster_device_attributes();
1252 private:
1253 const ::tensorflow::DeviceAttributes& _internal_cluster_device_attributes(int index) const;
1254 ::tensorflow::DeviceAttributes* _internal_add_cluster_device_attributes();
1255 public:
1256 const ::tensorflow::DeviceAttributes& cluster_device_attributes(int index) const;
1257 ::tensorflow::DeviceAttributes* add_cluster_device_attributes();
1258 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::DeviceAttributes >&
1259 cluster_device_attributes() const;
1260
1261 // .tensorflow.ServerDef server_def = 1;
1262 bool has_server_def() const;
1263 private:
1264 bool _internal_has_server_def() const;
1265 public:
1266 void clear_server_def();
1267 const ::tensorflow::ServerDef& server_def() const;
1268 PROTOBUF_NODISCARD ::tensorflow::ServerDef* release_server_def();
1269 ::tensorflow::ServerDef* mutable_server_def();
1270 void set_allocated_server_def(::tensorflow::ServerDef* server_def);
1271 private:
1272 const ::tensorflow::ServerDef& _internal_server_def() const;
1273 ::tensorflow::ServerDef* _internal_mutable_server_def();
1274 public:
1275 void unsafe_arena_set_allocated_server_def(
1276 ::tensorflow::ServerDef* server_def);
1277 ::tensorflow::ServerDef* unsafe_arena_release_server_def();
1278
1279 // .tensorflow.VersionDef version_def = 4;
1280 bool has_version_def() const;
1281 private:
1282 bool _internal_has_version_def() const;
1283 public:
1284 void clear_version_def();
1285 const ::tensorflow::VersionDef& version_def() const;
1286 PROTOBUF_NODISCARD ::tensorflow::VersionDef* release_version_def();
1287 ::tensorflow::VersionDef* mutable_version_def();
1288 void set_allocated_version_def(::tensorflow::VersionDef* version_def);
1289 private:
1290 const ::tensorflow::VersionDef& _internal_version_def() const;
1291 ::tensorflow::VersionDef* _internal_mutable_version_def();
1292 public:
1293 void unsafe_arena_set_allocated_version_def(
1294 ::tensorflow::VersionDef* version_def);
1295 ::tensorflow::VersionDef* unsafe_arena_release_version_def();
1296
1297 // int64 keep_alive_secs = 3;
1298 void clear_keep_alive_secs();
1299 ::int64_t keep_alive_secs() const;
1300 void set_keep_alive_secs(::int64_t value);
1301 private:
1302 ::int64_t _internal_keep_alive_secs() const;
1303 void _internal_set_keep_alive_secs(::int64_t value);
1304 public:
1305
1306 // fixed64 context_id = 7;
1307 void clear_context_id();
1308 ::uint64_t context_id() const;
1309 void set_context_id(::uint64_t value);
1310 private:
1311 ::uint64_t _internal_context_id() const;
1312 void _internal_set_context_id(::uint64_t value);
1313 public:
1314
1315 // fixed64 context_view_id = 8;
1316 void clear_context_view_id();
1317 ::uint64_t context_view_id() const;
1318 void set_context_view_id(::uint64_t value);
1319 private:
1320 ::uint64_t _internal_context_view_id() const;
1321 void _internal_set_context_view_id(::uint64_t value);
1322 public:
1323
1324 // bool async = 2;
1325 void clear_async();
1326 bool async() const;
1327 void set_async(bool value);
1328 private:
1329 bool _internal_async() const;
1330 void _internal_set_async(bool value);
1331 public:
1332
1333 // bool lazy_copy_remote_function_inputs = 9;
1334 void clear_lazy_copy_remote_function_inputs();
1335 bool lazy_copy_remote_function_inputs() const;
1336 void set_lazy_copy_remote_function_inputs(bool value);
1337 private:
1338 bool _internal_lazy_copy_remote_function_inputs() const;
1339 void _internal_set_lazy_copy_remote_function_inputs(bool value);
1340 public:
1341
1342 // @@protoc_insertion_point(class_scope:tensorflow.eager.CreateContextRequest)
1343 private:
1344 class _Internal;
1345
1346 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1347 typedef void InternalArenaConstructable_;
1348 typedef void DestructorSkippable_;
1349 struct Impl_ {
1350 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::DeviceAttributes > cluster_device_attributes_;
1351 ::tensorflow::ServerDef* server_def_;
1352 ::tensorflow::VersionDef* version_def_;
1353 ::int64_t keep_alive_secs_;
1354 ::uint64_t context_id_;
1355 ::uint64_t context_view_id_;
1356 bool async_;
1357 bool lazy_copy_remote_function_inputs_;
1358 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1359 };
1360 union { Impl_ _impl_; };
1361 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2feager_5fservice_2eproto;
1362 };
1363 // -------------------------------------------------------------------
1364
1365 class CreateContextResponse final :
1366 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.eager.CreateContextResponse) */ {
1367 public:
CreateContextResponse()1368 inline CreateContextResponse() : CreateContextResponse(nullptr) {}
1369 ~CreateContextResponse() override;
1370 explicit PROTOBUF_CONSTEXPR CreateContextResponse(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
1371
1372 CreateContextResponse(const CreateContextResponse& from);
CreateContextResponse(CreateContextResponse && from)1373 CreateContextResponse(CreateContextResponse&& from) noexcept
1374 : CreateContextResponse() {
1375 *this = ::std::move(from);
1376 }
1377
1378 inline CreateContextResponse& operator=(const CreateContextResponse& from) {
1379 if (this == &from) return *this;
1380 CopyFrom(from);
1381 return *this;
1382 }
1383 inline CreateContextResponse& operator=(CreateContextResponse&& from) noexcept {
1384 if (this == &from) return *this;
1385 if (GetOwningArena() == from.GetOwningArena()
1386 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
1387 && GetOwningArena() != nullptr
1388 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
1389 ) {
1390 InternalSwap(&from);
1391 } else {
1392 CopyFrom(from);
1393 }
1394 return *this;
1395 }
1396
default_instance()1397 static const CreateContextResponse& default_instance() {
1398 return *internal_default_instance();
1399 }
internal_default_instance()1400 static inline const CreateContextResponse* internal_default_instance() {
1401 return reinterpret_cast<const CreateContextResponse*>(
1402 &_CreateContextResponse_default_instance_);
1403 }
1404 static constexpr int kIndexInFileMessages =
1405 6;
1406
swap(CreateContextResponse & a,CreateContextResponse & b)1407 friend void swap(CreateContextResponse& a, CreateContextResponse& b) {
1408 a.Swap(&b);
1409 }
Swap(CreateContextResponse * other)1410 inline void Swap(CreateContextResponse* other) {
1411 if (other == this) return;
1412 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
1413 if (GetOwningArena() != nullptr &&
1414 GetOwningArena() == other->GetOwningArena()) {
1415 #else // PROTOBUF_FORCE_COPY_IN_SWAP
1416 if (GetOwningArena() == other->GetOwningArena()) {
1417 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
1418 InternalSwap(other);
1419 } else {
1420 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1421 }
1422 }
1423 void UnsafeArenaSwap(CreateContextResponse* other) {
1424 if (other == this) return;
1425 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
1426 InternalSwap(other);
1427 }
1428
1429 // implements Message ----------------------------------------------
1430
1431 CreateContextResponse* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1432 return CreateMaybeMessage<CreateContextResponse>(arena);
1433 }
1434 CreateContextResponse* New() const {
1435 return New(nullptr);
1436 }
1437 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
1438 void CopyFrom(const CreateContextResponse& from);
1439 void MergeFrom(const CreateContextResponse& from);
1440 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1441 bool IsInitialized() const final;
1442
1443 size_t ByteSizeLong() const final;
1444 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1445 ::uint8_t* _InternalSerialize(
1446 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
1447 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
1448
1449 private:
1450 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
1451 void SharedDtor();
1452 void SetCachedSize(int size) const;
1453 void InternalSwap(CreateContextResponse* other);
1454
1455 private:
1456 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1457 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1458 return "tensorflow.eager.CreateContextResponse";
1459 }
1460 protected:
1461 explicit CreateContextResponse(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1462 bool is_message_owned = false);
1463 public:
1464
1465 std::string GetTypeName() const final;
1466
1467 // nested types ----------------------------------------------------
1468
1469 // accessors -------------------------------------------------------
1470
1471 enum : int {
1472 kDeviceAttributesFieldNumber = 2,
1473 };
1474 // repeated .tensorflow.DeviceAttributes device_attributes = 2;
1475 int device_attributes_size() const;
1476 private:
1477 int _internal_device_attributes_size() const;
1478 public:
1479 void clear_device_attributes();
1480 ::tensorflow::DeviceAttributes* mutable_device_attributes(int index);
1481 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::DeviceAttributes >*
1482 mutable_device_attributes();
1483 private:
1484 const ::tensorflow::DeviceAttributes& _internal_device_attributes(int index) const;
1485 ::tensorflow::DeviceAttributes* _internal_add_device_attributes();
1486 public:
1487 const ::tensorflow::DeviceAttributes& device_attributes(int index) const;
1488 ::tensorflow::DeviceAttributes* add_device_attributes();
1489 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::DeviceAttributes >&
1490 device_attributes() const;
1491
1492 // @@protoc_insertion_point(class_scope:tensorflow.eager.CreateContextResponse)
1493 private:
1494 class _Internal;
1495
1496 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1497 typedef void InternalArenaConstructable_;
1498 typedef void DestructorSkippable_;
1499 struct Impl_ {
1500 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::DeviceAttributes > device_attributes_;
1501 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1502 };
1503 union { Impl_ _impl_; };
1504 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2feager_5fservice_2eproto;
1505 };
1506 // -------------------------------------------------------------------
1507
1508 class UpdateContextRequest final :
1509 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.eager.UpdateContextRequest) */ {
1510 public:
UpdateContextRequest()1511 inline UpdateContextRequest() : UpdateContextRequest(nullptr) {}
1512 ~UpdateContextRequest() override;
1513 explicit PROTOBUF_CONSTEXPR UpdateContextRequest(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
1514
1515 UpdateContextRequest(const UpdateContextRequest& from);
UpdateContextRequest(UpdateContextRequest && from)1516 UpdateContextRequest(UpdateContextRequest&& from) noexcept
1517 : UpdateContextRequest() {
1518 *this = ::std::move(from);
1519 }
1520
1521 inline UpdateContextRequest& operator=(const UpdateContextRequest& from) {
1522 if (this == &from) return *this;
1523 CopyFrom(from);
1524 return *this;
1525 }
1526 inline UpdateContextRequest& operator=(UpdateContextRequest&& from) noexcept {
1527 if (this == &from) return *this;
1528 if (GetOwningArena() == from.GetOwningArena()
1529 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
1530 && GetOwningArena() != nullptr
1531 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
1532 ) {
1533 InternalSwap(&from);
1534 } else {
1535 CopyFrom(from);
1536 }
1537 return *this;
1538 }
1539
default_instance()1540 static const UpdateContextRequest& default_instance() {
1541 return *internal_default_instance();
1542 }
internal_default_instance()1543 static inline const UpdateContextRequest* internal_default_instance() {
1544 return reinterpret_cast<const UpdateContextRequest*>(
1545 &_UpdateContextRequest_default_instance_);
1546 }
1547 static constexpr int kIndexInFileMessages =
1548 7;
1549
swap(UpdateContextRequest & a,UpdateContextRequest & b)1550 friend void swap(UpdateContextRequest& a, UpdateContextRequest& b) {
1551 a.Swap(&b);
1552 }
Swap(UpdateContextRequest * other)1553 inline void Swap(UpdateContextRequest* other) {
1554 if (other == this) return;
1555 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
1556 if (GetOwningArena() != nullptr &&
1557 GetOwningArena() == other->GetOwningArena()) {
1558 #else // PROTOBUF_FORCE_COPY_IN_SWAP
1559 if (GetOwningArena() == other->GetOwningArena()) {
1560 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
1561 InternalSwap(other);
1562 } else {
1563 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1564 }
1565 }
1566 void UnsafeArenaSwap(UpdateContextRequest* other) {
1567 if (other == this) return;
1568 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
1569 InternalSwap(other);
1570 }
1571
1572 // implements Message ----------------------------------------------
1573
1574 UpdateContextRequest* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1575 return CreateMaybeMessage<UpdateContextRequest>(arena);
1576 }
1577 UpdateContextRequest* New() const {
1578 return New(nullptr);
1579 }
1580 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
1581 void CopyFrom(const UpdateContextRequest& from);
1582 void MergeFrom(const UpdateContextRequest& from);
1583 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1584 bool IsInitialized() const final;
1585
1586 size_t ByteSizeLong() const final;
1587 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1588 ::uint8_t* _InternalSerialize(
1589 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
1590 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
1591
1592 private:
1593 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
1594 void SharedDtor();
1595 void SetCachedSize(int size) const;
1596 void InternalSwap(UpdateContextRequest* other);
1597
1598 private:
1599 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1600 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1601 return "tensorflow.eager.UpdateContextRequest";
1602 }
1603 protected:
1604 explicit UpdateContextRequest(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1605 bool is_message_owned = false);
1606 public:
1607
1608 std::string GetTypeName() const final;
1609
1610 // nested types ----------------------------------------------------
1611
1612 // accessors -------------------------------------------------------
1613
1614 enum : int {
1615 kClusterDeviceAttributesFieldNumber = 2,
1616 kServerDefFieldNumber = 1,
1617 kContextIdFieldNumber = 3,
1618 kContextViewIdFieldNumber = 4,
1619 };
1620 // repeated .tensorflow.DeviceAttributes cluster_device_attributes = 2;
1621 int cluster_device_attributes_size() const;
1622 private:
1623 int _internal_cluster_device_attributes_size() const;
1624 public:
1625 void clear_cluster_device_attributes();
1626 ::tensorflow::DeviceAttributes* mutable_cluster_device_attributes(int index);
1627 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::DeviceAttributes >*
1628 mutable_cluster_device_attributes();
1629 private:
1630 const ::tensorflow::DeviceAttributes& _internal_cluster_device_attributes(int index) const;
1631 ::tensorflow::DeviceAttributes* _internal_add_cluster_device_attributes();
1632 public:
1633 const ::tensorflow::DeviceAttributes& cluster_device_attributes(int index) const;
1634 ::tensorflow::DeviceAttributes* add_cluster_device_attributes();
1635 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::DeviceAttributes >&
1636 cluster_device_attributes() const;
1637
1638 // .tensorflow.ServerDef server_def = 1;
1639 bool has_server_def() const;
1640 private:
1641 bool _internal_has_server_def() const;
1642 public:
1643 void clear_server_def();
1644 const ::tensorflow::ServerDef& server_def() const;
1645 PROTOBUF_NODISCARD ::tensorflow::ServerDef* release_server_def();
1646 ::tensorflow::ServerDef* mutable_server_def();
1647 void set_allocated_server_def(::tensorflow::ServerDef* server_def);
1648 private:
1649 const ::tensorflow::ServerDef& _internal_server_def() const;
1650 ::tensorflow::ServerDef* _internal_mutable_server_def();
1651 public:
1652 void unsafe_arena_set_allocated_server_def(
1653 ::tensorflow::ServerDef* server_def);
1654 ::tensorflow::ServerDef* unsafe_arena_release_server_def();
1655
1656 // fixed64 context_id = 3;
1657 void clear_context_id();
1658 ::uint64_t context_id() const;
1659 void set_context_id(::uint64_t value);
1660 private:
1661 ::uint64_t _internal_context_id() const;
1662 void _internal_set_context_id(::uint64_t value);
1663 public:
1664
1665 // fixed64 context_view_id = 4;
1666 void clear_context_view_id();
1667 ::uint64_t context_view_id() const;
1668 void set_context_view_id(::uint64_t value);
1669 private:
1670 ::uint64_t _internal_context_view_id() const;
1671 void _internal_set_context_view_id(::uint64_t value);
1672 public:
1673
1674 // @@protoc_insertion_point(class_scope:tensorflow.eager.UpdateContextRequest)
1675 private:
1676 class _Internal;
1677
1678 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1679 typedef void InternalArenaConstructable_;
1680 typedef void DestructorSkippable_;
1681 struct Impl_ {
1682 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::DeviceAttributes > cluster_device_attributes_;
1683 ::tensorflow::ServerDef* server_def_;
1684 ::uint64_t context_id_;
1685 ::uint64_t context_view_id_;
1686 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1687 };
1688 union { Impl_ _impl_; };
1689 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2feager_5fservice_2eproto;
1690 };
1691 // -------------------------------------------------------------------
1692
1693 class UpdateContextResponse final :
1694 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.eager.UpdateContextResponse) */ {
1695 public:
UpdateContextResponse()1696 inline UpdateContextResponse() : UpdateContextResponse(nullptr) {}
1697 ~UpdateContextResponse() override;
1698 explicit PROTOBUF_CONSTEXPR UpdateContextResponse(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
1699
1700 UpdateContextResponse(const UpdateContextResponse& from);
UpdateContextResponse(UpdateContextResponse && from)1701 UpdateContextResponse(UpdateContextResponse&& from) noexcept
1702 : UpdateContextResponse() {
1703 *this = ::std::move(from);
1704 }
1705
1706 inline UpdateContextResponse& operator=(const UpdateContextResponse& from) {
1707 if (this == &from) return *this;
1708 CopyFrom(from);
1709 return *this;
1710 }
1711 inline UpdateContextResponse& operator=(UpdateContextResponse&& from) noexcept {
1712 if (this == &from) return *this;
1713 if (GetOwningArena() == from.GetOwningArena()
1714 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
1715 && GetOwningArena() != nullptr
1716 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
1717 ) {
1718 InternalSwap(&from);
1719 } else {
1720 CopyFrom(from);
1721 }
1722 return *this;
1723 }
1724
default_instance()1725 static const UpdateContextResponse& default_instance() {
1726 return *internal_default_instance();
1727 }
internal_default_instance()1728 static inline const UpdateContextResponse* internal_default_instance() {
1729 return reinterpret_cast<const UpdateContextResponse*>(
1730 &_UpdateContextResponse_default_instance_);
1731 }
1732 static constexpr int kIndexInFileMessages =
1733 8;
1734
swap(UpdateContextResponse & a,UpdateContextResponse & b)1735 friend void swap(UpdateContextResponse& a, UpdateContextResponse& b) {
1736 a.Swap(&b);
1737 }
Swap(UpdateContextResponse * other)1738 inline void Swap(UpdateContextResponse* other) {
1739 if (other == this) return;
1740 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
1741 if (GetOwningArena() != nullptr &&
1742 GetOwningArena() == other->GetOwningArena()) {
1743 #else // PROTOBUF_FORCE_COPY_IN_SWAP
1744 if (GetOwningArena() == other->GetOwningArena()) {
1745 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
1746 InternalSwap(other);
1747 } else {
1748 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1749 }
1750 }
1751 void UnsafeArenaSwap(UpdateContextResponse* other) {
1752 if (other == this) return;
1753 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
1754 InternalSwap(other);
1755 }
1756
1757 // implements Message ----------------------------------------------
1758
1759 UpdateContextResponse* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1760 return CreateMaybeMessage<UpdateContextResponse>(arena);
1761 }
1762 UpdateContextResponse* New() const {
1763 return New(nullptr);
1764 }
1765 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
1766 void CopyFrom(const UpdateContextResponse& from);
1767 void MergeFrom(const UpdateContextResponse& from);
1768 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1769 bool IsInitialized() const final;
1770
1771 size_t ByteSizeLong() const final;
1772 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1773 ::uint8_t* _InternalSerialize(
1774 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
1775 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
1776
1777 private:
1778 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
1779 void SharedDtor();
1780 void SetCachedSize(int size) const;
1781 void InternalSwap(UpdateContextResponse* other);
1782
1783 private:
1784 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1785 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1786 return "tensorflow.eager.UpdateContextResponse";
1787 }
1788 protected:
1789 explicit UpdateContextResponse(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1790 bool is_message_owned = false);
1791 public:
1792
1793 std::string GetTypeName() const final;
1794
1795 // nested types ----------------------------------------------------
1796
1797 // accessors -------------------------------------------------------
1798
1799 enum : int {
1800 kDeviceAttributesFieldNumber = 1,
1801 };
1802 // repeated .tensorflow.DeviceAttributes device_attributes = 1;
1803 int device_attributes_size() const;
1804 private:
1805 int _internal_device_attributes_size() const;
1806 public:
1807 void clear_device_attributes();
1808 ::tensorflow::DeviceAttributes* mutable_device_attributes(int index);
1809 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::DeviceAttributes >*
1810 mutable_device_attributes();
1811 private:
1812 const ::tensorflow::DeviceAttributes& _internal_device_attributes(int index) const;
1813 ::tensorflow::DeviceAttributes* _internal_add_device_attributes();
1814 public:
1815 const ::tensorflow::DeviceAttributes& device_attributes(int index) const;
1816 ::tensorflow::DeviceAttributes* add_device_attributes();
1817 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::DeviceAttributes >&
1818 device_attributes() const;
1819
1820 // @@protoc_insertion_point(class_scope:tensorflow.eager.UpdateContextResponse)
1821 private:
1822 class _Internal;
1823
1824 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1825 typedef void InternalArenaConstructable_;
1826 typedef void DestructorSkippable_;
1827 struct Impl_ {
1828 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::DeviceAttributes > device_attributes_;
1829 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1830 };
1831 union { Impl_ _impl_; };
1832 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2feager_5fservice_2eproto;
1833 };
1834 // -------------------------------------------------------------------
1835
1836 class EnqueueRequest final :
1837 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.eager.EnqueueRequest) */ {
1838 public:
EnqueueRequest()1839 inline EnqueueRequest() : EnqueueRequest(nullptr) {}
1840 ~EnqueueRequest() override;
1841 explicit PROTOBUF_CONSTEXPR EnqueueRequest(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
1842
1843 EnqueueRequest(const EnqueueRequest& from);
EnqueueRequest(EnqueueRequest && from)1844 EnqueueRequest(EnqueueRequest&& from) noexcept
1845 : EnqueueRequest() {
1846 *this = ::std::move(from);
1847 }
1848
1849 inline EnqueueRequest& operator=(const EnqueueRequest& from) {
1850 if (this == &from) return *this;
1851 CopyFrom(from);
1852 return *this;
1853 }
1854 inline EnqueueRequest& operator=(EnqueueRequest&& from) noexcept {
1855 if (this == &from) return *this;
1856 if (GetOwningArena() == from.GetOwningArena()
1857 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
1858 && GetOwningArena() != nullptr
1859 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
1860 ) {
1861 InternalSwap(&from);
1862 } else {
1863 CopyFrom(from);
1864 }
1865 return *this;
1866 }
1867
default_instance()1868 static const EnqueueRequest& default_instance() {
1869 return *internal_default_instance();
1870 }
internal_default_instance()1871 static inline const EnqueueRequest* internal_default_instance() {
1872 return reinterpret_cast<const EnqueueRequest*>(
1873 &_EnqueueRequest_default_instance_);
1874 }
1875 static constexpr int kIndexInFileMessages =
1876 9;
1877
swap(EnqueueRequest & a,EnqueueRequest & b)1878 friend void swap(EnqueueRequest& a, EnqueueRequest& b) {
1879 a.Swap(&b);
1880 }
Swap(EnqueueRequest * other)1881 inline void Swap(EnqueueRequest* other) {
1882 if (other == this) return;
1883 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
1884 if (GetOwningArena() != nullptr &&
1885 GetOwningArena() == other->GetOwningArena()) {
1886 #else // PROTOBUF_FORCE_COPY_IN_SWAP
1887 if (GetOwningArena() == other->GetOwningArena()) {
1888 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
1889 InternalSwap(other);
1890 } else {
1891 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1892 }
1893 }
1894 void UnsafeArenaSwap(EnqueueRequest* other) {
1895 if (other == this) return;
1896 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
1897 InternalSwap(other);
1898 }
1899
1900 // implements Message ----------------------------------------------
1901
1902 EnqueueRequest* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1903 return CreateMaybeMessage<EnqueueRequest>(arena);
1904 }
1905 EnqueueRequest* New() const {
1906 return New(nullptr);
1907 }
1908 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
1909 void CopyFrom(const EnqueueRequest& from);
1910 void MergeFrom(const EnqueueRequest& from);
1911 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1912 bool IsInitialized() const final;
1913
1914 size_t ByteSizeLong() const final;
1915 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1916 ::uint8_t* _InternalSerialize(
1917 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
1918 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
1919
1920 private:
1921 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
1922 void SharedDtor();
1923 void SetCachedSize(int size) const;
1924 void InternalSwap(EnqueueRequest* other);
1925
1926 private:
1927 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1928 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1929 return "tensorflow.eager.EnqueueRequest";
1930 }
1931 protected:
1932 explicit EnqueueRequest(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1933 bool is_message_owned = false);
1934 public:
1935
1936 std::string GetTypeName() const final;
1937
1938 // nested types ----------------------------------------------------
1939
1940 // accessors -------------------------------------------------------
1941
1942 enum : int {
1943 kQueueFieldNumber = 3,
1944 kContextIdFieldNumber = 1,
1945 };
1946 // repeated .tensorflow.eager.QueueItem queue = 3;
1947 int queue_size() const;
1948 private:
1949 int _internal_queue_size() const;
1950 public:
1951 void clear_queue();
1952 ::tensorflow::eager::QueueItem* mutable_queue(int index);
1953 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::eager::QueueItem >*
1954 mutable_queue();
1955 private:
1956 const ::tensorflow::eager::QueueItem& _internal_queue(int index) const;
1957 ::tensorflow::eager::QueueItem* _internal_add_queue();
1958 public:
1959 const ::tensorflow::eager::QueueItem& queue(int index) const;
1960 ::tensorflow::eager::QueueItem* add_queue();
1961 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::eager::QueueItem >&
1962 queue() const;
1963
1964 // fixed64 context_id = 1;
1965 void clear_context_id();
1966 ::uint64_t context_id() const;
1967 void set_context_id(::uint64_t value);
1968 private:
1969 ::uint64_t _internal_context_id() const;
1970 void _internal_set_context_id(::uint64_t value);
1971 public:
1972
1973 // @@protoc_insertion_point(class_scope:tensorflow.eager.EnqueueRequest)
1974 private:
1975 class _Internal;
1976
1977 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1978 typedef void InternalArenaConstructable_;
1979 typedef void DestructorSkippable_;
1980 struct Impl_ {
1981 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::eager::QueueItem > queue_;
1982 ::uint64_t context_id_;
1983 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1984 };
1985 union { Impl_ _impl_; };
1986 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2feager_5fservice_2eproto;
1987 };
1988 // -------------------------------------------------------------------
1989
1990 class EnqueueResponse final :
1991 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.eager.EnqueueResponse) */ {
1992 public:
EnqueueResponse()1993 inline EnqueueResponse() : EnqueueResponse(nullptr) {}
1994 ~EnqueueResponse() override;
1995 explicit PROTOBUF_CONSTEXPR EnqueueResponse(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
1996
1997 EnqueueResponse(const EnqueueResponse& from);
EnqueueResponse(EnqueueResponse && from)1998 EnqueueResponse(EnqueueResponse&& from) noexcept
1999 : EnqueueResponse() {
2000 *this = ::std::move(from);
2001 }
2002
2003 inline EnqueueResponse& operator=(const EnqueueResponse& from) {
2004 if (this == &from) return *this;
2005 CopyFrom(from);
2006 return *this;
2007 }
2008 inline EnqueueResponse& operator=(EnqueueResponse&& from) noexcept {
2009 if (this == &from) return *this;
2010 if (GetOwningArena() == from.GetOwningArena()
2011 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
2012 && GetOwningArena() != nullptr
2013 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
2014 ) {
2015 InternalSwap(&from);
2016 } else {
2017 CopyFrom(from);
2018 }
2019 return *this;
2020 }
2021
default_instance()2022 static const EnqueueResponse& default_instance() {
2023 return *internal_default_instance();
2024 }
internal_default_instance()2025 static inline const EnqueueResponse* internal_default_instance() {
2026 return reinterpret_cast<const EnqueueResponse*>(
2027 &_EnqueueResponse_default_instance_);
2028 }
2029 static constexpr int kIndexInFileMessages =
2030 10;
2031
swap(EnqueueResponse & a,EnqueueResponse & b)2032 friend void swap(EnqueueResponse& a, EnqueueResponse& b) {
2033 a.Swap(&b);
2034 }
Swap(EnqueueResponse * other)2035 inline void Swap(EnqueueResponse* other) {
2036 if (other == this) return;
2037 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
2038 if (GetOwningArena() != nullptr &&
2039 GetOwningArena() == other->GetOwningArena()) {
2040 #else // PROTOBUF_FORCE_COPY_IN_SWAP
2041 if (GetOwningArena() == other->GetOwningArena()) {
2042 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
2043 InternalSwap(other);
2044 } else {
2045 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
2046 }
2047 }
2048 void UnsafeArenaSwap(EnqueueResponse* other) {
2049 if (other == this) return;
2050 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
2051 InternalSwap(other);
2052 }
2053
2054 // implements Message ----------------------------------------------
2055
2056 EnqueueResponse* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
2057 return CreateMaybeMessage<EnqueueResponse>(arena);
2058 }
2059 EnqueueResponse* New() const {
2060 return New(nullptr);
2061 }
2062 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
2063 void CopyFrom(const EnqueueResponse& from);
2064 void MergeFrom(const EnqueueResponse& from);
2065 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
2066 bool IsInitialized() const final;
2067
2068 size_t ByteSizeLong() const final;
2069 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
2070 ::uint8_t* _InternalSerialize(
2071 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
2072 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
2073
2074 private:
2075 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
2076 void SharedDtor();
2077 void SetCachedSize(int size) const;
2078 void InternalSwap(EnqueueResponse* other);
2079
2080 private:
2081 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
2082 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
2083 return "tensorflow.eager.EnqueueResponse";
2084 }
2085 protected:
2086 explicit EnqueueResponse(::PROTOBUF_NAMESPACE_ID::Arena* arena,
2087 bool is_message_owned = false);
2088 public:
2089
2090 std::string GetTypeName() const final;
2091
2092 // nested types ----------------------------------------------------
2093
2094 // accessors -------------------------------------------------------
2095
2096 enum : int {
2097 kQueueResponseFieldNumber = 1,
2098 };
2099 // repeated .tensorflow.eager.QueueResponse queue_response = 1;
2100 int queue_response_size() const;
2101 private:
2102 int _internal_queue_response_size() const;
2103 public:
2104 void clear_queue_response();
2105 ::tensorflow::eager::QueueResponse* mutable_queue_response(int index);
2106 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::eager::QueueResponse >*
2107 mutable_queue_response();
2108 private:
2109 const ::tensorflow::eager::QueueResponse& _internal_queue_response(int index) const;
2110 ::tensorflow::eager::QueueResponse* _internal_add_queue_response();
2111 public:
2112 const ::tensorflow::eager::QueueResponse& queue_response(int index) const;
2113 ::tensorflow::eager::QueueResponse* add_queue_response();
2114 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::eager::QueueResponse >&
2115 queue_response() const;
2116
2117 // @@protoc_insertion_point(class_scope:tensorflow.eager.EnqueueResponse)
2118 private:
2119 class _Internal;
2120
2121 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
2122 typedef void InternalArenaConstructable_;
2123 typedef void DestructorSkippable_;
2124 struct Impl_ {
2125 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::eager::QueueResponse > queue_response_;
2126 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
2127 };
2128 union { Impl_ _impl_; };
2129 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2feager_5fservice_2eproto;
2130 };
2131 // -------------------------------------------------------------------
2132
2133 class WaitQueueDoneRequest final :
2134 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.eager.WaitQueueDoneRequest) */ {
2135 public:
WaitQueueDoneRequest()2136 inline WaitQueueDoneRequest() : WaitQueueDoneRequest(nullptr) {}
2137 ~WaitQueueDoneRequest() override;
2138 explicit PROTOBUF_CONSTEXPR WaitQueueDoneRequest(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
2139
2140 WaitQueueDoneRequest(const WaitQueueDoneRequest& from);
WaitQueueDoneRequest(WaitQueueDoneRequest && from)2141 WaitQueueDoneRequest(WaitQueueDoneRequest&& from) noexcept
2142 : WaitQueueDoneRequest() {
2143 *this = ::std::move(from);
2144 }
2145
2146 inline WaitQueueDoneRequest& operator=(const WaitQueueDoneRequest& from) {
2147 if (this == &from) return *this;
2148 CopyFrom(from);
2149 return *this;
2150 }
2151 inline WaitQueueDoneRequest& operator=(WaitQueueDoneRequest&& from) noexcept {
2152 if (this == &from) return *this;
2153 if (GetOwningArena() == from.GetOwningArena()
2154 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
2155 && GetOwningArena() != nullptr
2156 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
2157 ) {
2158 InternalSwap(&from);
2159 } else {
2160 CopyFrom(from);
2161 }
2162 return *this;
2163 }
2164
default_instance()2165 static const WaitQueueDoneRequest& default_instance() {
2166 return *internal_default_instance();
2167 }
internal_default_instance()2168 static inline const WaitQueueDoneRequest* internal_default_instance() {
2169 return reinterpret_cast<const WaitQueueDoneRequest*>(
2170 &_WaitQueueDoneRequest_default_instance_);
2171 }
2172 static constexpr int kIndexInFileMessages =
2173 11;
2174
swap(WaitQueueDoneRequest & a,WaitQueueDoneRequest & b)2175 friend void swap(WaitQueueDoneRequest& a, WaitQueueDoneRequest& b) {
2176 a.Swap(&b);
2177 }
Swap(WaitQueueDoneRequest * other)2178 inline void Swap(WaitQueueDoneRequest* other) {
2179 if (other == this) return;
2180 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
2181 if (GetOwningArena() != nullptr &&
2182 GetOwningArena() == other->GetOwningArena()) {
2183 #else // PROTOBUF_FORCE_COPY_IN_SWAP
2184 if (GetOwningArena() == other->GetOwningArena()) {
2185 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
2186 InternalSwap(other);
2187 } else {
2188 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
2189 }
2190 }
2191 void UnsafeArenaSwap(WaitQueueDoneRequest* other) {
2192 if (other == this) return;
2193 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
2194 InternalSwap(other);
2195 }
2196
2197 // implements Message ----------------------------------------------
2198
2199 WaitQueueDoneRequest* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
2200 return CreateMaybeMessage<WaitQueueDoneRequest>(arena);
2201 }
2202 WaitQueueDoneRequest* New() const {
2203 return New(nullptr);
2204 }
2205 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
2206 void CopyFrom(const WaitQueueDoneRequest& from);
2207 void MergeFrom(const WaitQueueDoneRequest& from);
2208 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
2209 bool IsInitialized() const final;
2210
2211 size_t ByteSizeLong() const final;
2212 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
2213 ::uint8_t* _InternalSerialize(
2214 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
2215 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
2216
2217 private:
2218 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
2219 void SharedDtor();
2220 void SetCachedSize(int size) const;
2221 void InternalSwap(WaitQueueDoneRequest* other);
2222
2223 private:
2224 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
2225 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
2226 return "tensorflow.eager.WaitQueueDoneRequest";
2227 }
2228 protected:
2229 explicit WaitQueueDoneRequest(::PROTOBUF_NAMESPACE_ID::Arena* arena,
2230 bool is_message_owned = false);
2231 public:
2232
2233 std::string GetTypeName() const final;
2234
2235 // nested types ----------------------------------------------------
2236
2237 // accessors -------------------------------------------------------
2238
2239 enum : int {
2240 kOpIdFieldNumber = 2,
2241 kContextIdFieldNumber = 1,
2242 };
2243 // repeated int64 op_id = 2;
2244 int op_id_size() const;
2245 private:
2246 int _internal_op_id_size() const;
2247 public:
2248 void clear_op_id();
2249 private:
2250 ::int64_t _internal_op_id(int index) const;
2251 const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >&
2252 _internal_op_id() const;
2253 void _internal_add_op_id(::int64_t value);
2254 ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >*
2255 _internal_mutable_op_id();
2256 public:
2257 ::int64_t op_id(int index) const;
2258 void set_op_id(int index, ::int64_t value);
2259 void add_op_id(::int64_t value);
2260 const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >&
2261 op_id() const;
2262 ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >*
2263 mutable_op_id();
2264
2265 // fixed64 context_id = 1;
2266 void clear_context_id();
2267 ::uint64_t context_id() const;
2268 void set_context_id(::uint64_t value);
2269 private:
2270 ::uint64_t _internal_context_id() const;
2271 void _internal_set_context_id(::uint64_t value);
2272 public:
2273
2274 // @@protoc_insertion_point(class_scope:tensorflow.eager.WaitQueueDoneRequest)
2275 private:
2276 class _Internal;
2277
2278 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
2279 typedef void InternalArenaConstructable_;
2280 typedef void DestructorSkippable_;
2281 struct Impl_ {
2282 ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t > op_id_;
2283 mutable std::atomic<int> _op_id_cached_byte_size_;
2284 ::uint64_t context_id_;
2285 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
2286 };
2287 union { Impl_ _impl_; };
2288 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2feager_5fservice_2eproto;
2289 };
2290 // -------------------------------------------------------------------
2291
2292 class WaitQueueDoneResponse final :
2293 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.eager.WaitQueueDoneResponse) */ {
2294 public:
WaitQueueDoneResponse()2295 inline WaitQueueDoneResponse() : WaitQueueDoneResponse(nullptr) {}
2296 ~WaitQueueDoneResponse() override;
2297 explicit PROTOBUF_CONSTEXPR WaitQueueDoneResponse(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
2298
2299 WaitQueueDoneResponse(const WaitQueueDoneResponse& from);
WaitQueueDoneResponse(WaitQueueDoneResponse && from)2300 WaitQueueDoneResponse(WaitQueueDoneResponse&& from) noexcept
2301 : WaitQueueDoneResponse() {
2302 *this = ::std::move(from);
2303 }
2304
2305 inline WaitQueueDoneResponse& operator=(const WaitQueueDoneResponse& from) {
2306 if (this == &from) return *this;
2307 CopyFrom(from);
2308 return *this;
2309 }
2310 inline WaitQueueDoneResponse& operator=(WaitQueueDoneResponse&& from) noexcept {
2311 if (this == &from) return *this;
2312 if (GetOwningArena() == from.GetOwningArena()
2313 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
2314 && GetOwningArena() != nullptr
2315 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
2316 ) {
2317 InternalSwap(&from);
2318 } else {
2319 CopyFrom(from);
2320 }
2321 return *this;
2322 }
2323
default_instance()2324 static const WaitQueueDoneResponse& default_instance() {
2325 return *internal_default_instance();
2326 }
internal_default_instance()2327 static inline const WaitQueueDoneResponse* internal_default_instance() {
2328 return reinterpret_cast<const WaitQueueDoneResponse*>(
2329 &_WaitQueueDoneResponse_default_instance_);
2330 }
2331 static constexpr int kIndexInFileMessages =
2332 12;
2333
swap(WaitQueueDoneResponse & a,WaitQueueDoneResponse & b)2334 friend void swap(WaitQueueDoneResponse& a, WaitQueueDoneResponse& b) {
2335 a.Swap(&b);
2336 }
Swap(WaitQueueDoneResponse * other)2337 inline void Swap(WaitQueueDoneResponse* other) {
2338 if (other == this) return;
2339 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
2340 if (GetOwningArena() != nullptr &&
2341 GetOwningArena() == other->GetOwningArena()) {
2342 #else // PROTOBUF_FORCE_COPY_IN_SWAP
2343 if (GetOwningArena() == other->GetOwningArena()) {
2344 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
2345 InternalSwap(other);
2346 } else {
2347 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
2348 }
2349 }
2350 void UnsafeArenaSwap(WaitQueueDoneResponse* other) {
2351 if (other == this) return;
2352 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
2353 InternalSwap(other);
2354 }
2355
2356 // implements Message ----------------------------------------------
2357
2358 WaitQueueDoneResponse* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
2359 return CreateMaybeMessage<WaitQueueDoneResponse>(arena);
2360 }
2361 WaitQueueDoneResponse* New() const {
2362 return New(nullptr);
2363 }
2364 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
2365 void CopyFrom(const WaitQueueDoneResponse& from);
2366 void MergeFrom(const WaitQueueDoneResponse& from);
2367 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
2368 bool IsInitialized() const final;
2369
2370 size_t ByteSizeLong() const final;
2371 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
2372 ::uint8_t* _InternalSerialize(
2373 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
2374 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
2375
2376 private:
2377 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
2378 void SharedDtor();
2379 void SetCachedSize(int size) const;
2380 void InternalSwap(WaitQueueDoneResponse* other);
2381
2382 private:
2383 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
2384 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
2385 return "tensorflow.eager.WaitQueueDoneResponse";
2386 }
2387 protected:
2388 explicit WaitQueueDoneResponse(::PROTOBUF_NAMESPACE_ID::Arena* arena,
2389 bool is_message_owned = false);
2390 public:
2391
2392 std::string GetTypeName() const final;
2393
2394 // nested types ----------------------------------------------------
2395
2396 // accessors -------------------------------------------------------
2397
2398 // @@protoc_insertion_point(class_scope:tensorflow.eager.WaitQueueDoneResponse)
2399 private:
2400 class _Internal;
2401
2402 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
2403 typedef void InternalArenaConstructable_;
2404 typedef void DestructorSkippable_;
2405 struct Impl_ {
2406 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
2407 };
2408 union { Impl_ _impl_; };
2409 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2feager_5fservice_2eproto;
2410 };
2411 // -------------------------------------------------------------------
2412
2413 class RunComponentFunctionRequest final :
2414 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.eager.RunComponentFunctionRequest) */ {
2415 public:
RunComponentFunctionRequest()2416 inline RunComponentFunctionRequest() : RunComponentFunctionRequest(nullptr) {}
2417 ~RunComponentFunctionRequest() override;
2418 explicit PROTOBUF_CONSTEXPR RunComponentFunctionRequest(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
2419
2420 RunComponentFunctionRequest(const RunComponentFunctionRequest& from);
RunComponentFunctionRequest(RunComponentFunctionRequest && from)2421 RunComponentFunctionRequest(RunComponentFunctionRequest&& from) noexcept
2422 : RunComponentFunctionRequest() {
2423 *this = ::std::move(from);
2424 }
2425
2426 inline RunComponentFunctionRequest& operator=(const RunComponentFunctionRequest& from) {
2427 if (this == &from) return *this;
2428 CopyFrom(from);
2429 return *this;
2430 }
2431 inline RunComponentFunctionRequest& operator=(RunComponentFunctionRequest&& from) noexcept {
2432 if (this == &from) return *this;
2433 if (GetOwningArena() == from.GetOwningArena()
2434 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
2435 && GetOwningArena() != nullptr
2436 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
2437 ) {
2438 InternalSwap(&from);
2439 } else {
2440 CopyFrom(from);
2441 }
2442 return *this;
2443 }
2444
default_instance()2445 static const RunComponentFunctionRequest& default_instance() {
2446 return *internal_default_instance();
2447 }
internal_default_instance()2448 static inline const RunComponentFunctionRequest* internal_default_instance() {
2449 return reinterpret_cast<const RunComponentFunctionRequest*>(
2450 &_RunComponentFunctionRequest_default_instance_);
2451 }
2452 static constexpr int kIndexInFileMessages =
2453 13;
2454
swap(RunComponentFunctionRequest & a,RunComponentFunctionRequest & b)2455 friend void swap(RunComponentFunctionRequest& a, RunComponentFunctionRequest& b) {
2456 a.Swap(&b);
2457 }
Swap(RunComponentFunctionRequest * other)2458 inline void Swap(RunComponentFunctionRequest* other) {
2459 if (other == this) return;
2460 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
2461 if (GetOwningArena() != nullptr &&
2462 GetOwningArena() == other->GetOwningArena()) {
2463 #else // PROTOBUF_FORCE_COPY_IN_SWAP
2464 if (GetOwningArena() == other->GetOwningArena()) {
2465 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
2466 InternalSwap(other);
2467 } else {
2468 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
2469 }
2470 }
2471 void UnsafeArenaSwap(RunComponentFunctionRequest* other) {
2472 if (other == this) return;
2473 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
2474 InternalSwap(other);
2475 }
2476
2477 // implements Message ----------------------------------------------
2478
2479 RunComponentFunctionRequest* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
2480 return CreateMaybeMessage<RunComponentFunctionRequest>(arena);
2481 }
2482 RunComponentFunctionRequest* New() const {
2483 return New(nullptr);
2484 }
2485 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
2486 void CopyFrom(const RunComponentFunctionRequest& from);
2487 void MergeFrom(const RunComponentFunctionRequest& from);
2488 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
2489 bool IsInitialized() const final;
2490
2491 size_t ByteSizeLong() const final;
2492 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
2493 ::uint8_t* _InternalSerialize(
2494 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
2495 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
2496
2497 private:
2498 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
2499 void SharedDtor();
2500 void SetCachedSize(int size) const;
2501 void InternalSwap(RunComponentFunctionRequest* other);
2502
2503 private:
2504 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
2505 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
2506 return "tensorflow.eager.RunComponentFunctionRequest";
2507 }
2508 protected:
2509 explicit RunComponentFunctionRequest(::PROTOBUF_NAMESPACE_ID::Arena* arena,
2510 bool is_message_owned = false);
2511 public:
2512
2513 std::string GetTypeName() const final;
2514
2515 // nested types ----------------------------------------------------
2516
2517 // accessors -------------------------------------------------------
2518
2519 enum : int {
2520 kOutputNumFieldNumber = 3,
2521 kOperationFieldNumber = 2,
2522 kContextIdFieldNumber = 1,
2523 };
2524 // repeated int32 output_num = 3;
2525 int output_num_size() const;
2526 private:
2527 int _internal_output_num_size() const;
2528 public:
2529 void clear_output_num();
2530 private:
2531 ::int32_t _internal_output_num(int index) const;
2532 const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int32_t >&
2533 _internal_output_num() const;
2534 void _internal_add_output_num(::int32_t value);
2535 ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int32_t >*
2536 _internal_mutable_output_num();
2537 public:
2538 ::int32_t output_num(int index) const;
2539 void set_output_num(int index, ::int32_t value);
2540 void add_output_num(::int32_t value);
2541 const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int32_t >&
2542 output_num() const;
2543 ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int32_t >*
2544 mutable_output_num();
2545
2546 // .tensorflow.eager.Operation operation = 2;
2547 bool has_operation() const;
2548 private:
2549 bool _internal_has_operation() const;
2550 public:
2551 void clear_operation();
2552 const ::tensorflow::eager::Operation& operation() const;
2553 PROTOBUF_NODISCARD ::tensorflow::eager::Operation* release_operation();
2554 ::tensorflow::eager::Operation* mutable_operation();
2555 void set_allocated_operation(::tensorflow::eager::Operation* operation);
2556 private:
2557 const ::tensorflow::eager::Operation& _internal_operation() const;
2558 ::tensorflow::eager::Operation* _internal_mutable_operation();
2559 public:
2560 void unsafe_arena_set_allocated_operation(
2561 ::tensorflow::eager::Operation* operation);
2562 ::tensorflow::eager::Operation* unsafe_arena_release_operation();
2563
2564 // fixed64 context_id = 1;
2565 void clear_context_id();
2566 ::uint64_t context_id() const;
2567 void set_context_id(::uint64_t value);
2568 private:
2569 ::uint64_t _internal_context_id() const;
2570 void _internal_set_context_id(::uint64_t value);
2571 public:
2572
2573 // @@protoc_insertion_point(class_scope:tensorflow.eager.RunComponentFunctionRequest)
2574 private:
2575 class _Internal;
2576
2577 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
2578 typedef void InternalArenaConstructable_;
2579 typedef void DestructorSkippable_;
2580 struct Impl_ {
2581 ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int32_t > output_num_;
2582 mutable std::atomic<int> _output_num_cached_byte_size_;
2583 ::tensorflow::eager::Operation* operation_;
2584 ::uint64_t context_id_;
2585 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
2586 };
2587 union { Impl_ _impl_; };
2588 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2feager_5fservice_2eproto;
2589 };
2590 // -------------------------------------------------------------------
2591
2592 class RunComponentFunctionResponse final :
2593 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.eager.RunComponentFunctionResponse) */ {
2594 public:
RunComponentFunctionResponse()2595 inline RunComponentFunctionResponse() : RunComponentFunctionResponse(nullptr) {}
2596 ~RunComponentFunctionResponse() override;
2597 explicit PROTOBUF_CONSTEXPR RunComponentFunctionResponse(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
2598
2599 RunComponentFunctionResponse(const RunComponentFunctionResponse& from);
RunComponentFunctionResponse(RunComponentFunctionResponse && from)2600 RunComponentFunctionResponse(RunComponentFunctionResponse&& from) noexcept
2601 : RunComponentFunctionResponse() {
2602 *this = ::std::move(from);
2603 }
2604
2605 inline RunComponentFunctionResponse& operator=(const RunComponentFunctionResponse& from) {
2606 if (this == &from) return *this;
2607 CopyFrom(from);
2608 return *this;
2609 }
2610 inline RunComponentFunctionResponse& operator=(RunComponentFunctionResponse&& from) noexcept {
2611 if (this == &from) return *this;
2612 if (GetOwningArena() == from.GetOwningArena()
2613 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
2614 && GetOwningArena() != nullptr
2615 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
2616 ) {
2617 InternalSwap(&from);
2618 } else {
2619 CopyFrom(from);
2620 }
2621 return *this;
2622 }
2623
default_instance()2624 static const RunComponentFunctionResponse& default_instance() {
2625 return *internal_default_instance();
2626 }
internal_default_instance()2627 static inline const RunComponentFunctionResponse* internal_default_instance() {
2628 return reinterpret_cast<const RunComponentFunctionResponse*>(
2629 &_RunComponentFunctionResponse_default_instance_);
2630 }
2631 static constexpr int kIndexInFileMessages =
2632 14;
2633
swap(RunComponentFunctionResponse & a,RunComponentFunctionResponse & b)2634 friend void swap(RunComponentFunctionResponse& a, RunComponentFunctionResponse& b) {
2635 a.Swap(&b);
2636 }
Swap(RunComponentFunctionResponse * other)2637 inline void Swap(RunComponentFunctionResponse* other) {
2638 if (other == this) return;
2639 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
2640 if (GetOwningArena() != nullptr &&
2641 GetOwningArena() == other->GetOwningArena()) {
2642 #else // PROTOBUF_FORCE_COPY_IN_SWAP
2643 if (GetOwningArena() == other->GetOwningArena()) {
2644 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
2645 InternalSwap(other);
2646 } else {
2647 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
2648 }
2649 }
2650 void UnsafeArenaSwap(RunComponentFunctionResponse* other) {
2651 if (other == this) return;
2652 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
2653 InternalSwap(other);
2654 }
2655
2656 // implements Message ----------------------------------------------
2657
2658 RunComponentFunctionResponse* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
2659 return CreateMaybeMessage<RunComponentFunctionResponse>(arena);
2660 }
2661 RunComponentFunctionResponse* New() const {
2662 return New(nullptr);
2663 }
2664 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
2665 void CopyFrom(const RunComponentFunctionResponse& from);
2666 void MergeFrom(const RunComponentFunctionResponse& from);
2667 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
2668 bool IsInitialized() const final;
2669
2670 size_t ByteSizeLong() const final;
2671 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
2672 ::uint8_t* _InternalSerialize(
2673 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
2674 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
2675
2676 private:
2677 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
2678 void SharedDtor();
2679 void SetCachedSize(int size) const;
2680 void InternalSwap(RunComponentFunctionResponse* other);
2681
2682 private:
2683 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
2684 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
2685 return "tensorflow.eager.RunComponentFunctionResponse";
2686 }
2687 protected:
2688 explicit RunComponentFunctionResponse(::PROTOBUF_NAMESPACE_ID::Arena* arena,
2689 bool is_message_owned = false);
2690 public:
2691
2692 std::string GetTypeName() const final;
2693
2694 // nested types ----------------------------------------------------
2695
2696 // accessors -------------------------------------------------------
2697
2698 enum : int {
2699 kShapeFieldNumber = 1,
2700 kTensorFieldNumber = 2,
2701 };
2702 // repeated .tensorflow.TensorShapeProto shape = 1;
2703 int shape_size() const;
2704 private:
2705 int _internal_shape_size() const;
2706 public:
2707 void clear_shape();
2708 ::tensorflow::TensorShapeProto* mutable_shape(int index);
2709 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorShapeProto >*
2710 mutable_shape();
2711 private:
2712 const ::tensorflow::TensorShapeProto& _internal_shape(int index) const;
2713 ::tensorflow::TensorShapeProto* _internal_add_shape();
2714 public:
2715 const ::tensorflow::TensorShapeProto& shape(int index) const;
2716 ::tensorflow::TensorShapeProto* add_shape();
2717 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorShapeProto >&
2718 shape() const;
2719
2720 // repeated .tensorflow.TensorProto tensor = 2;
2721 int tensor_size() const;
2722 private:
2723 int _internal_tensor_size() const;
2724 public:
2725 void clear_tensor();
2726 ::tensorflow::TensorProto* mutable_tensor(int index);
2727 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorProto >*
2728 mutable_tensor();
2729 private:
2730 const ::tensorflow::TensorProto& _internal_tensor(int index) const;
2731 ::tensorflow::TensorProto* _internal_add_tensor();
2732 public:
2733 const ::tensorflow::TensorProto& tensor(int index) const;
2734 ::tensorflow::TensorProto* add_tensor();
2735 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorProto >&
2736 tensor() const;
2737
2738 // @@protoc_insertion_point(class_scope:tensorflow.eager.RunComponentFunctionResponse)
2739 private:
2740 class _Internal;
2741
2742 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
2743 typedef void InternalArenaConstructable_;
2744 typedef void DestructorSkippable_;
2745 struct Impl_ {
2746 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorShapeProto > shape_;
2747 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorProto > tensor_;
2748 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
2749 };
2750 union { Impl_ _impl_; };
2751 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2feager_5fservice_2eproto;
2752 };
2753 // -------------------------------------------------------------------
2754
2755 class KeepAliveRequest final :
2756 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.eager.KeepAliveRequest) */ {
2757 public:
KeepAliveRequest()2758 inline KeepAliveRequest() : KeepAliveRequest(nullptr) {}
2759 ~KeepAliveRequest() override;
2760 explicit PROTOBUF_CONSTEXPR KeepAliveRequest(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
2761
2762 KeepAliveRequest(const KeepAliveRequest& from);
KeepAliveRequest(KeepAliveRequest && from)2763 KeepAliveRequest(KeepAliveRequest&& from) noexcept
2764 : KeepAliveRequest() {
2765 *this = ::std::move(from);
2766 }
2767
2768 inline KeepAliveRequest& operator=(const KeepAliveRequest& from) {
2769 if (this == &from) return *this;
2770 CopyFrom(from);
2771 return *this;
2772 }
2773 inline KeepAliveRequest& operator=(KeepAliveRequest&& from) noexcept {
2774 if (this == &from) return *this;
2775 if (GetOwningArena() == from.GetOwningArena()
2776 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
2777 && GetOwningArena() != nullptr
2778 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
2779 ) {
2780 InternalSwap(&from);
2781 } else {
2782 CopyFrom(from);
2783 }
2784 return *this;
2785 }
2786
default_instance()2787 static const KeepAliveRequest& default_instance() {
2788 return *internal_default_instance();
2789 }
internal_default_instance()2790 static inline const KeepAliveRequest* internal_default_instance() {
2791 return reinterpret_cast<const KeepAliveRequest*>(
2792 &_KeepAliveRequest_default_instance_);
2793 }
2794 static constexpr int kIndexInFileMessages =
2795 15;
2796
swap(KeepAliveRequest & a,KeepAliveRequest & b)2797 friend void swap(KeepAliveRequest& a, KeepAliveRequest& b) {
2798 a.Swap(&b);
2799 }
Swap(KeepAliveRequest * other)2800 inline void Swap(KeepAliveRequest* other) {
2801 if (other == this) return;
2802 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
2803 if (GetOwningArena() != nullptr &&
2804 GetOwningArena() == other->GetOwningArena()) {
2805 #else // PROTOBUF_FORCE_COPY_IN_SWAP
2806 if (GetOwningArena() == other->GetOwningArena()) {
2807 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
2808 InternalSwap(other);
2809 } else {
2810 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
2811 }
2812 }
2813 void UnsafeArenaSwap(KeepAliveRequest* other) {
2814 if (other == this) return;
2815 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
2816 InternalSwap(other);
2817 }
2818
2819 // implements Message ----------------------------------------------
2820
2821 KeepAliveRequest* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
2822 return CreateMaybeMessage<KeepAliveRequest>(arena);
2823 }
2824 KeepAliveRequest* New() const {
2825 return New(nullptr);
2826 }
2827 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
2828 void CopyFrom(const KeepAliveRequest& from);
2829 void MergeFrom(const KeepAliveRequest& from);
2830 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
2831 bool IsInitialized() const final;
2832
2833 size_t ByteSizeLong() const final;
2834 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
2835 ::uint8_t* _InternalSerialize(
2836 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
2837 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
2838
2839 private:
2840 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
2841 void SharedDtor();
2842 void SetCachedSize(int size) const;
2843 void InternalSwap(KeepAliveRequest* other);
2844
2845 private:
2846 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
2847 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
2848 return "tensorflow.eager.KeepAliveRequest";
2849 }
2850 protected:
2851 explicit KeepAliveRequest(::PROTOBUF_NAMESPACE_ID::Arena* arena,
2852 bool is_message_owned = false);
2853 public:
2854
2855 std::string GetTypeName() const final;
2856
2857 // nested types ----------------------------------------------------
2858
2859 // accessors -------------------------------------------------------
2860
2861 enum : int {
2862 kContextIdFieldNumber = 1,
2863 };
2864 // fixed64 context_id = 1;
2865 void clear_context_id();
2866 ::uint64_t context_id() const;
2867 void set_context_id(::uint64_t value);
2868 private:
2869 ::uint64_t _internal_context_id() const;
2870 void _internal_set_context_id(::uint64_t value);
2871 public:
2872
2873 // @@protoc_insertion_point(class_scope:tensorflow.eager.KeepAliveRequest)
2874 private:
2875 class _Internal;
2876
2877 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
2878 typedef void InternalArenaConstructable_;
2879 typedef void DestructorSkippable_;
2880 struct Impl_ {
2881 ::uint64_t context_id_;
2882 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
2883 };
2884 union { Impl_ _impl_; };
2885 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2feager_5fservice_2eproto;
2886 };
2887 // -------------------------------------------------------------------
2888
2889 class KeepAliveResponse final :
2890 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.eager.KeepAliveResponse) */ {
2891 public:
KeepAliveResponse()2892 inline KeepAliveResponse() : KeepAliveResponse(nullptr) {}
2893 ~KeepAliveResponse() override;
2894 explicit PROTOBUF_CONSTEXPR KeepAliveResponse(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
2895
2896 KeepAliveResponse(const KeepAliveResponse& from);
KeepAliveResponse(KeepAliveResponse && from)2897 KeepAliveResponse(KeepAliveResponse&& from) noexcept
2898 : KeepAliveResponse() {
2899 *this = ::std::move(from);
2900 }
2901
2902 inline KeepAliveResponse& operator=(const KeepAliveResponse& from) {
2903 if (this == &from) return *this;
2904 CopyFrom(from);
2905 return *this;
2906 }
2907 inline KeepAliveResponse& operator=(KeepAliveResponse&& from) noexcept {
2908 if (this == &from) return *this;
2909 if (GetOwningArena() == from.GetOwningArena()
2910 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
2911 && GetOwningArena() != nullptr
2912 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
2913 ) {
2914 InternalSwap(&from);
2915 } else {
2916 CopyFrom(from);
2917 }
2918 return *this;
2919 }
2920
default_instance()2921 static const KeepAliveResponse& default_instance() {
2922 return *internal_default_instance();
2923 }
internal_default_instance()2924 static inline const KeepAliveResponse* internal_default_instance() {
2925 return reinterpret_cast<const KeepAliveResponse*>(
2926 &_KeepAliveResponse_default_instance_);
2927 }
2928 static constexpr int kIndexInFileMessages =
2929 16;
2930
swap(KeepAliveResponse & a,KeepAliveResponse & b)2931 friend void swap(KeepAliveResponse& a, KeepAliveResponse& b) {
2932 a.Swap(&b);
2933 }
Swap(KeepAliveResponse * other)2934 inline void Swap(KeepAliveResponse* other) {
2935 if (other == this) return;
2936 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
2937 if (GetOwningArena() != nullptr &&
2938 GetOwningArena() == other->GetOwningArena()) {
2939 #else // PROTOBUF_FORCE_COPY_IN_SWAP
2940 if (GetOwningArena() == other->GetOwningArena()) {
2941 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
2942 InternalSwap(other);
2943 } else {
2944 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
2945 }
2946 }
2947 void UnsafeArenaSwap(KeepAliveResponse* other) {
2948 if (other == this) return;
2949 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
2950 InternalSwap(other);
2951 }
2952
2953 // implements Message ----------------------------------------------
2954
2955 KeepAliveResponse* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
2956 return CreateMaybeMessage<KeepAliveResponse>(arena);
2957 }
2958 KeepAliveResponse* New() const {
2959 return New(nullptr);
2960 }
2961 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
2962 void CopyFrom(const KeepAliveResponse& from);
2963 void MergeFrom(const KeepAliveResponse& from);
2964 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
2965 bool IsInitialized() const final;
2966
2967 size_t ByteSizeLong() const final;
2968 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
2969 ::uint8_t* _InternalSerialize(
2970 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
2971 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
2972
2973 private:
2974 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
2975 void SharedDtor();
2976 void SetCachedSize(int size) const;
2977 void InternalSwap(KeepAliveResponse* other);
2978
2979 private:
2980 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
2981 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
2982 return "tensorflow.eager.KeepAliveResponse";
2983 }
2984 protected:
2985 explicit KeepAliveResponse(::PROTOBUF_NAMESPACE_ID::Arena* arena,
2986 bool is_message_owned = false);
2987 public:
2988
2989 std::string GetTypeName() const final;
2990
2991 // nested types ----------------------------------------------------
2992
2993 // accessors -------------------------------------------------------
2994
2995 enum : int {
2996 kContextViewIdFieldNumber = 1,
2997 };
2998 // fixed64 context_view_id = 1;
2999 void clear_context_view_id();
3000 ::uint64_t context_view_id() const;
3001 void set_context_view_id(::uint64_t value);
3002 private:
3003 ::uint64_t _internal_context_view_id() const;
3004 void _internal_set_context_view_id(::uint64_t value);
3005 public:
3006
3007 // @@protoc_insertion_point(class_scope:tensorflow.eager.KeepAliveResponse)
3008 private:
3009 class _Internal;
3010
3011 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
3012 typedef void InternalArenaConstructable_;
3013 typedef void DestructorSkippable_;
3014 struct Impl_ {
3015 ::uint64_t context_view_id_;
3016 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
3017 };
3018 union { Impl_ _impl_; };
3019 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2feager_5fservice_2eproto;
3020 };
3021 // -------------------------------------------------------------------
3022
3023 class CloseContextRequest final :
3024 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.eager.CloseContextRequest) */ {
3025 public:
CloseContextRequest()3026 inline CloseContextRequest() : CloseContextRequest(nullptr) {}
3027 ~CloseContextRequest() override;
3028 explicit PROTOBUF_CONSTEXPR CloseContextRequest(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
3029
3030 CloseContextRequest(const CloseContextRequest& from);
CloseContextRequest(CloseContextRequest && from)3031 CloseContextRequest(CloseContextRequest&& from) noexcept
3032 : CloseContextRequest() {
3033 *this = ::std::move(from);
3034 }
3035
3036 inline CloseContextRequest& operator=(const CloseContextRequest& from) {
3037 if (this == &from) return *this;
3038 CopyFrom(from);
3039 return *this;
3040 }
3041 inline CloseContextRequest& operator=(CloseContextRequest&& from) noexcept {
3042 if (this == &from) return *this;
3043 if (GetOwningArena() == from.GetOwningArena()
3044 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
3045 && GetOwningArena() != nullptr
3046 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
3047 ) {
3048 InternalSwap(&from);
3049 } else {
3050 CopyFrom(from);
3051 }
3052 return *this;
3053 }
3054
default_instance()3055 static const CloseContextRequest& default_instance() {
3056 return *internal_default_instance();
3057 }
internal_default_instance()3058 static inline const CloseContextRequest* internal_default_instance() {
3059 return reinterpret_cast<const CloseContextRequest*>(
3060 &_CloseContextRequest_default_instance_);
3061 }
3062 static constexpr int kIndexInFileMessages =
3063 17;
3064
swap(CloseContextRequest & a,CloseContextRequest & b)3065 friend void swap(CloseContextRequest& a, CloseContextRequest& b) {
3066 a.Swap(&b);
3067 }
Swap(CloseContextRequest * other)3068 inline void Swap(CloseContextRequest* other) {
3069 if (other == this) return;
3070 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
3071 if (GetOwningArena() != nullptr &&
3072 GetOwningArena() == other->GetOwningArena()) {
3073 #else // PROTOBUF_FORCE_COPY_IN_SWAP
3074 if (GetOwningArena() == other->GetOwningArena()) {
3075 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
3076 InternalSwap(other);
3077 } else {
3078 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
3079 }
3080 }
3081 void UnsafeArenaSwap(CloseContextRequest* other) {
3082 if (other == this) return;
3083 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
3084 InternalSwap(other);
3085 }
3086
3087 // implements Message ----------------------------------------------
3088
3089 CloseContextRequest* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
3090 return CreateMaybeMessage<CloseContextRequest>(arena);
3091 }
3092 CloseContextRequest* New() const {
3093 return New(nullptr);
3094 }
3095 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
3096 void CopyFrom(const CloseContextRequest& from);
3097 void MergeFrom(const CloseContextRequest& from);
3098 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
3099 bool IsInitialized() const final;
3100
3101 size_t ByteSizeLong() const final;
3102 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
3103 ::uint8_t* _InternalSerialize(
3104 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
3105 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
3106
3107 private:
3108 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
3109 void SharedDtor();
3110 void SetCachedSize(int size) const;
3111 void InternalSwap(CloseContextRequest* other);
3112
3113 private:
3114 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
3115 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
3116 return "tensorflow.eager.CloseContextRequest";
3117 }
3118 protected:
3119 explicit CloseContextRequest(::PROTOBUF_NAMESPACE_ID::Arena* arena,
3120 bool is_message_owned = false);
3121 public:
3122
3123 std::string GetTypeName() const final;
3124
3125 // nested types ----------------------------------------------------
3126
3127 // accessors -------------------------------------------------------
3128
3129 enum : int {
3130 kContextIdFieldNumber = 1,
3131 kContextViewIdFieldNumber = 2,
3132 };
3133 // fixed64 context_id = 1;
3134 void clear_context_id();
3135 ::uint64_t context_id() const;
3136 void set_context_id(::uint64_t value);
3137 private:
3138 ::uint64_t _internal_context_id() const;
3139 void _internal_set_context_id(::uint64_t value);
3140 public:
3141
3142 // fixed64 context_view_id = 2;
3143 void clear_context_view_id();
3144 ::uint64_t context_view_id() const;
3145 void set_context_view_id(::uint64_t value);
3146 private:
3147 ::uint64_t _internal_context_view_id() const;
3148 void _internal_set_context_view_id(::uint64_t value);
3149 public:
3150
3151 // @@protoc_insertion_point(class_scope:tensorflow.eager.CloseContextRequest)
3152 private:
3153 class _Internal;
3154
3155 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
3156 typedef void InternalArenaConstructable_;
3157 typedef void DestructorSkippable_;
3158 struct Impl_ {
3159 ::uint64_t context_id_;
3160 ::uint64_t context_view_id_;
3161 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
3162 };
3163 union { Impl_ _impl_; };
3164 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2feager_5fservice_2eproto;
3165 };
3166 // -------------------------------------------------------------------
3167
3168 class CloseContextResponse final :
3169 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.eager.CloseContextResponse) */ {
3170 public:
CloseContextResponse()3171 inline CloseContextResponse() : CloseContextResponse(nullptr) {}
3172 ~CloseContextResponse() override;
3173 explicit PROTOBUF_CONSTEXPR CloseContextResponse(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
3174
3175 CloseContextResponse(const CloseContextResponse& from);
CloseContextResponse(CloseContextResponse && from)3176 CloseContextResponse(CloseContextResponse&& from) noexcept
3177 : CloseContextResponse() {
3178 *this = ::std::move(from);
3179 }
3180
3181 inline CloseContextResponse& operator=(const CloseContextResponse& from) {
3182 if (this == &from) return *this;
3183 CopyFrom(from);
3184 return *this;
3185 }
3186 inline CloseContextResponse& operator=(CloseContextResponse&& from) noexcept {
3187 if (this == &from) return *this;
3188 if (GetOwningArena() == from.GetOwningArena()
3189 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
3190 && GetOwningArena() != nullptr
3191 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
3192 ) {
3193 InternalSwap(&from);
3194 } else {
3195 CopyFrom(from);
3196 }
3197 return *this;
3198 }
3199
default_instance()3200 static const CloseContextResponse& default_instance() {
3201 return *internal_default_instance();
3202 }
internal_default_instance()3203 static inline const CloseContextResponse* internal_default_instance() {
3204 return reinterpret_cast<const CloseContextResponse*>(
3205 &_CloseContextResponse_default_instance_);
3206 }
3207 static constexpr int kIndexInFileMessages =
3208 18;
3209
swap(CloseContextResponse & a,CloseContextResponse & b)3210 friend void swap(CloseContextResponse& a, CloseContextResponse& b) {
3211 a.Swap(&b);
3212 }
Swap(CloseContextResponse * other)3213 inline void Swap(CloseContextResponse* other) {
3214 if (other == this) return;
3215 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
3216 if (GetOwningArena() != nullptr &&
3217 GetOwningArena() == other->GetOwningArena()) {
3218 #else // PROTOBUF_FORCE_COPY_IN_SWAP
3219 if (GetOwningArena() == other->GetOwningArena()) {
3220 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
3221 InternalSwap(other);
3222 } else {
3223 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
3224 }
3225 }
3226 void UnsafeArenaSwap(CloseContextResponse* other) {
3227 if (other == this) return;
3228 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
3229 InternalSwap(other);
3230 }
3231
3232 // implements Message ----------------------------------------------
3233
3234 CloseContextResponse* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
3235 return CreateMaybeMessage<CloseContextResponse>(arena);
3236 }
3237 CloseContextResponse* New() const {
3238 return New(nullptr);
3239 }
3240 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
3241 void CopyFrom(const CloseContextResponse& from);
3242 void MergeFrom(const CloseContextResponse& from);
3243 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
3244 bool IsInitialized() const final;
3245
3246 size_t ByteSizeLong() const final;
3247 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
3248 ::uint8_t* _InternalSerialize(
3249 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
3250 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
3251
3252 private:
3253 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
3254 void SharedDtor();
3255 void SetCachedSize(int size) const;
3256 void InternalSwap(CloseContextResponse* other);
3257
3258 private:
3259 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
3260 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
3261 return "tensorflow.eager.CloseContextResponse";
3262 }
3263 protected:
3264 explicit CloseContextResponse(::PROTOBUF_NAMESPACE_ID::Arena* arena,
3265 bool is_message_owned = false);
3266 public:
3267
3268 std::string GetTypeName() const final;
3269
3270 // nested types ----------------------------------------------------
3271
3272 // accessors -------------------------------------------------------
3273
3274 // @@protoc_insertion_point(class_scope:tensorflow.eager.CloseContextResponse)
3275 private:
3276 class _Internal;
3277
3278 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
3279 typedef void InternalArenaConstructable_;
3280 typedef void DestructorSkippable_;
3281 struct Impl_ {
3282 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
3283 };
3284 union { Impl_ _impl_; };
3285 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2feager_5fservice_2eproto;
3286 };
3287 // -------------------------------------------------------------------
3288
3289 class RegisterFunctionOp final :
3290 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.eager.RegisterFunctionOp) */ {
3291 public:
RegisterFunctionOp()3292 inline RegisterFunctionOp() : RegisterFunctionOp(nullptr) {}
3293 ~RegisterFunctionOp() override;
3294 explicit PROTOBUF_CONSTEXPR RegisterFunctionOp(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
3295
3296 RegisterFunctionOp(const RegisterFunctionOp& from);
RegisterFunctionOp(RegisterFunctionOp && from)3297 RegisterFunctionOp(RegisterFunctionOp&& from) noexcept
3298 : RegisterFunctionOp() {
3299 *this = ::std::move(from);
3300 }
3301
3302 inline RegisterFunctionOp& operator=(const RegisterFunctionOp& from) {
3303 if (this == &from) return *this;
3304 CopyFrom(from);
3305 return *this;
3306 }
3307 inline RegisterFunctionOp& operator=(RegisterFunctionOp&& from) noexcept {
3308 if (this == &from) return *this;
3309 if (GetOwningArena() == from.GetOwningArena()
3310 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
3311 && GetOwningArena() != nullptr
3312 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
3313 ) {
3314 InternalSwap(&from);
3315 } else {
3316 CopyFrom(from);
3317 }
3318 return *this;
3319 }
3320
default_instance()3321 static const RegisterFunctionOp& default_instance() {
3322 return *internal_default_instance();
3323 }
internal_default_instance()3324 static inline const RegisterFunctionOp* internal_default_instance() {
3325 return reinterpret_cast<const RegisterFunctionOp*>(
3326 &_RegisterFunctionOp_default_instance_);
3327 }
3328 static constexpr int kIndexInFileMessages =
3329 19;
3330
swap(RegisterFunctionOp & a,RegisterFunctionOp & b)3331 friend void swap(RegisterFunctionOp& a, RegisterFunctionOp& b) {
3332 a.Swap(&b);
3333 }
Swap(RegisterFunctionOp * other)3334 inline void Swap(RegisterFunctionOp* other) {
3335 if (other == this) return;
3336 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
3337 if (GetOwningArena() != nullptr &&
3338 GetOwningArena() == other->GetOwningArena()) {
3339 #else // PROTOBUF_FORCE_COPY_IN_SWAP
3340 if (GetOwningArena() == other->GetOwningArena()) {
3341 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
3342 InternalSwap(other);
3343 } else {
3344 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
3345 }
3346 }
3347 void UnsafeArenaSwap(RegisterFunctionOp* other) {
3348 if (other == this) return;
3349 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
3350 InternalSwap(other);
3351 }
3352
3353 // implements Message ----------------------------------------------
3354
3355 RegisterFunctionOp* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
3356 return CreateMaybeMessage<RegisterFunctionOp>(arena);
3357 }
3358 RegisterFunctionOp* New() const {
3359 return New(nullptr);
3360 }
3361 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
3362 void CopyFrom(const RegisterFunctionOp& from);
3363 void MergeFrom(const RegisterFunctionOp& from);
3364 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
3365 bool IsInitialized() const final;
3366
3367 size_t ByteSizeLong() const final;
3368 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
3369 ::uint8_t* _InternalSerialize(
3370 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
3371 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
3372
3373 private:
3374 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
3375 void SharedDtor();
3376 void SetCachedSize(int size) const;
3377 void InternalSwap(RegisterFunctionOp* other);
3378
3379 private:
3380 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
3381 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
3382 return "tensorflow.eager.RegisterFunctionOp";
3383 }
3384 protected:
3385 explicit RegisterFunctionOp(::PROTOBUF_NAMESPACE_ID::Arena* arena,
3386 bool is_message_owned = false);
3387 public:
3388
3389 std::string GetTypeName() const final;
3390
3391 // nested types ----------------------------------------------------
3392
3393 // accessors -------------------------------------------------------
3394
3395 enum : int {
3396 kFunctionDefFieldNumber = 1,
3397 kLibraryFieldNumber = 3,
3398 kIsComponentFunctionFieldNumber = 2,
3399 };
3400 // .tensorflow.FunctionDef function_def = 1;
3401 bool has_function_def() const;
3402 private:
3403 bool _internal_has_function_def() const;
3404 public:
3405 void clear_function_def();
3406 const ::tensorflow::FunctionDef& function_def() const;
3407 PROTOBUF_NODISCARD ::tensorflow::FunctionDef* release_function_def();
3408 ::tensorflow::FunctionDef* mutable_function_def();
3409 void set_allocated_function_def(::tensorflow::FunctionDef* function_def);
3410 private:
3411 const ::tensorflow::FunctionDef& _internal_function_def() const;
3412 ::tensorflow::FunctionDef* _internal_mutable_function_def();
3413 public:
3414 void unsafe_arena_set_allocated_function_def(
3415 ::tensorflow::FunctionDef* function_def);
3416 ::tensorflow::FunctionDef* unsafe_arena_release_function_def();
3417
3418 // .tensorflow.FunctionDefLibrary library = 3;
3419 bool has_library() const;
3420 private:
3421 bool _internal_has_library() const;
3422 public:
3423 void clear_library();
3424 const ::tensorflow::FunctionDefLibrary& library() const;
3425 PROTOBUF_NODISCARD ::tensorflow::FunctionDefLibrary* release_library();
3426 ::tensorflow::FunctionDefLibrary* mutable_library();
3427 void set_allocated_library(::tensorflow::FunctionDefLibrary* library);
3428 private:
3429 const ::tensorflow::FunctionDefLibrary& _internal_library() const;
3430 ::tensorflow::FunctionDefLibrary* _internal_mutable_library();
3431 public:
3432 void unsafe_arena_set_allocated_library(
3433 ::tensorflow::FunctionDefLibrary* library);
3434 ::tensorflow::FunctionDefLibrary* unsafe_arena_release_library();
3435
3436 // bool is_component_function = 2;
3437 void clear_is_component_function();
3438 bool is_component_function() const;
3439 void set_is_component_function(bool value);
3440 private:
3441 bool _internal_is_component_function() const;
3442 void _internal_set_is_component_function(bool value);
3443 public:
3444
3445 // @@protoc_insertion_point(class_scope:tensorflow.eager.RegisterFunctionOp)
3446 private:
3447 class _Internal;
3448
3449 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
3450 typedef void InternalArenaConstructable_;
3451 typedef void DestructorSkippable_;
3452 struct Impl_ {
3453 ::tensorflow::FunctionDef* function_def_;
3454 ::tensorflow::FunctionDefLibrary* library_;
3455 bool is_component_function_;
3456 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
3457 };
3458 union { Impl_ _impl_; };
3459 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2feager_5fservice_2eproto;
3460 };
3461 // -------------------------------------------------------------------
3462
3463 class CleanupFunctionOp final :
3464 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.eager.CleanupFunctionOp) */ {
3465 public:
CleanupFunctionOp()3466 inline CleanupFunctionOp() : CleanupFunctionOp(nullptr) {}
3467 ~CleanupFunctionOp() override;
3468 explicit PROTOBUF_CONSTEXPR CleanupFunctionOp(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
3469
3470 CleanupFunctionOp(const CleanupFunctionOp& from);
CleanupFunctionOp(CleanupFunctionOp && from)3471 CleanupFunctionOp(CleanupFunctionOp&& from) noexcept
3472 : CleanupFunctionOp() {
3473 *this = ::std::move(from);
3474 }
3475
3476 inline CleanupFunctionOp& operator=(const CleanupFunctionOp& from) {
3477 if (this == &from) return *this;
3478 CopyFrom(from);
3479 return *this;
3480 }
3481 inline CleanupFunctionOp& operator=(CleanupFunctionOp&& from) noexcept {
3482 if (this == &from) return *this;
3483 if (GetOwningArena() == from.GetOwningArena()
3484 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
3485 && GetOwningArena() != nullptr
3486 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
3487 ) {
3488 InternalSwap(&from);
3489 } else {
3490 CopyFrom(from);
3491 }
3492 return *this;
3493 }
3494
default_instance()3495 static const CleanupFunctionOp& default_instance() {
3496 return *internal_default_instance();
3497 }
internal_default_instance()3498 static inline const CleanupFunctionOp* internal_default_instance() {
3499 return reinterpret_cast<const CleanupFunctionOp*>(
3500 &_CleanupFunctionOp_default_instance_);
3501 }
3502 static constexpr int kIndexInFileMessages =
3503 20;
3504
swap(CleanupFunctionOp & a,CleanupFunctionOp & b)3505 friend void swap(CleanupFunctionOp& a, CleanupFunctionOp& b) {
3506 a.Swap(&b);
3507 }
Swap(CleanupFunctionOp * other)3508 inline void Swap(CleanupFunctionOp* other) {
3509 if (other == this) return;
3510 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
3511 if (GetOwningArena() != nullptr &&
3512 GetOwningArena() == other->GetOwningArena()) {
3513 #else // PROTOBUF_FORCE_COPY_IN_SWAP
3514 if (GetOwningArena() == other->GetOwningArena()) {
3515 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
3516 InternalSwap(other);
3517 } else {
3518 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
3519 }
3520 }
3521 void UnsafeArenaSwap(CleanupFunctionOp* other) {
3522 if (other == this) return;
3523 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
3524 InternalSwap(other);
3525 }
3526
3527 // implements Message ----------------------------------------------
3528
3529 CleanupFunctionOp* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
3530 return CreateMaybeMessage<CleanupFunctionOp>(arena);
3531 }
3532 CleanupFunctionOp* New() const {
3533 return New(nullptr);
3534 }
3535 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
3536 void CopyFrom(const CleanupFunctionOp& from);
3537 void MergeFrom(const CleanupFunctionOp& from);
3538 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
3539 bool IsInitialized() const final;
3540
3541 size_t ByteSizeLong() const final;
3542 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
3543 ::uint8_t* _InternalSerialize(
3544 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
3545 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
3546
3547 private:
3548 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
3549 void SharedDtor();
3550 void SetCachedSize(int size) const;
3551 void InternalSwap(CleanupFunctionOp* other);
3552
3553 private:
3554 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
3555 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
3556 return "tensorflow.eager.CleanupFunctionOp";
3557 }
3558 protected:
3559 explicit CleanupFunctionOp(::PROTOBUF_NAMESPACE_ID::Arena* arena,
3560 bool is_message_owned = false);
3561 public:
3562
3563 std::string GetTypeName() const final;
3564
3565 // nested types ----------------------------------------------------
3566
3567 // accessors -------------------------------------------------------
3568
3569 enum : int {
3570 kStepIdFieldNumber = 1,
3571 };
3572 // int64 step_id = 1;
3573 void clear_step_id();
3574 ::int64_t step_id() const;
3575 void set_step_id(::int64_t value);
3576 private:
3577 ::int64_t _internal_step_id() const;
3578 void _internal_set_step_id(::int64_t value);
3579 public:
3580
3581 // @@protoc_insertion_point(class_scope:tensorflow.eager.CleanupFunctionOp)
3582 private:
3583 class _Internal;
3584
3585 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
3586 typedef void InternalArenaConstructable_;
3587 typedef void DestructorSkippable_;
3588 struct Impl_ {
3589 ::int64_t step_id_;
3590 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
3591 };
3592 union { Impl_ _impl_; };
3593 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2feager_5fservice_2eproto;
3594 };
3595 // -------------------------------------------------------------------
3596
3597 class SyncRemoteExecutorForStream final :
3598 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.eager.SyncRemoteExecutorForStream) */ {
3599 public:
SyncRemoteExecutorForStream()3600 inline SyncRemoteExecutorForStream() : SyncRemoteExecutorForStream(nullptr) {}
3601 ~SyncRemoteExecutorForStream() override;
3602 explicit PROTOBUF_CONSTEXPR SyncRemoteExecutorForStream(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
3603
3604 SyncRemoteExecutorForStream(const SyncRemoteExecutorForStream& from);
SyncRemoteExecutorForStream(SyncRemoteExecutorForStream && from)3605 SyncRemoteExecutorForStream(SyncRemoteExecutorForStream&& from) noexcept
3606 : SyncRemoteExecutorForStream() {
3607 *this = ::std::move(from);
3608 }
3609
3610 inline SyncRemoteExecutorForStream& operator=(const SyncRemoteExecutorForStream& from) {
3611 if (this == &from) return *this;
3612 CopyFrom(from);
3613 return *this;
3614 }
3615 inline SyncRemoteExecutorForStream& operator=(SyncRemoteExecutorForStream&& from) noexcept {
3616 if (this == &from) return *this;
3617 if (GetOwningArena() == from.GetOwningArena()
3618 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
3619 && GetOwningArena() != nullptr
3620 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
3621 ) {
3622 InternalSwap(&from);
3623 } else {
3624 CopyFrom(from);
3625 }
3626 return *this;
3627 }
3628
default_instance()3629 static const SyncRemoteExecutorForStream& default_instance() {
3630 return *internal_default_instance();
3631 }
internal_default_instance()3632 static inline const SyncRemoteExecutorForStream* internal_default_instance() {
3633 return reinterpret_cast<const SyncRemoteExecutorForStream*>(
3634 &_SyncRemoteExecutorForStream_default_instance_);
3635 }
3636 static constexpr int kIndexInFileMessages =
3637 21;
3638
swap(SyncRemoteExecutorForStream & a,SyncRemoteExecutorForStream & b)3639 friend void swap(SyncRemoteExecutorForStream& a, SyncRemoteExecutorForStream& b) {
3640 a.Swap(&b);
3641 }
Swap(SyncRemoteExecutorForStream * other)3642 inline void Swap(SyncRemoteExecutorForStream* other) {
3643 if (other == this) return;
3644 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
3645 if (GetOwningArena() != nullptr &&
3646 GetOwningArena() == other->GetOwningArena()) {
3647 #else // PROTOBUF_FORCE_COPY_IN_SWAP
3648 if (GetOwningArena() == other->GetOwningArena()) {
3649 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
3650 InternalSwap(other);
3651 } else {
3652 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
3653 }
3654 }
3655 void UnsafeArenaSwap(SyncRemoteExecutorForStream* other) {
3656 if (other == this) return;
3657 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
3658 InternalSwap(other);
3659 }
3660
3661 // implements Message ----------------------------------------------
3662
3663 SyncRemoteExecutorForStream* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
3664 return CreateMaybeMessage<SyncRemoteExecutorForStream>(arena);
3665 }
3666 SyncRemoteExecutorForStream* New() const {
3667 return New(nullptr);
3668 }
3669 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
3670 void CopyFrom(const SyncRemoteExecutorForStream& from);
3671 void MergeFrom(const SyncRemoteExecutorForStream& from);
3672 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
3673 bool IsInitialized() const final;
3674
3675 size_t ByteSizeLong() const final;
3676 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
3677 ::uint8_t* _InternalSerialize(
3678 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
3679 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
3680
3681 private:
3682 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
3683 void SharedDtor();
3684 void SetCachedSize(int size) const;
3685 void InternalSwap(SyncRemoteExecutorForStream* other);
3686
3687 private:
3688 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
3689 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
3690 return "tensorflow.eager.SyncRemoteExecutorForStream";
3691 }
3692 protected:
3693 explicit SyncRemoteExecutorForStream(::PROTOBUF_NAMESPACE_ID::Arena* arena,
3694 bool is_message_owned = false);
3695 public:
3696
3697 std::string GetTypeName() const final;
3698
3699 // nested types ----------------------------------------------------
3700
3701 // accessors -------------------------------------------------------
3702
3703 // @@protoc_insertion_point(class_scope:tensorflow.eager.SyncRemoteExecutorForStream)
3704 private:
3705 class _Internal;
3706
3707 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
3708 typedef void InternalArenaConstructable_;
3709 typedef void DestructorSkippable_;
3710 struct Impl_ {
3711 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
3712 };
3713 union { Impl_ _impl_; };
3714 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2feager_5fservice_2eproto;
3715 };
3716 // -------------------------------------------------------------------
3717
3718 class SendTensorOp final :
3719 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.eager.SendTensorOp) */ {
3720 public:
SendTensorOp()3721 inline SendTensorOp() : SendTensorOp(nullptr) {}
3722 ~SendTensorOp() override;
3723 explicit PROTOBUF_CONSTEXPR SendTensorOp(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
3724
3725 SendTensorOp(const SendTensorOp& from);
SendTensorOp(SendTensorOp && from)3726 SendTensorOp(SendTensorOp&& from) noexcept
3727 : SendTensorOp() {
3728 *this = ::std::move(from);
3729 }
3730
3731 inline SendTensorOp& operator=(const SendTensorOp& from) {
3732 if (this == &from) return *this;
3733 CopyFrom(from);
3734 return *this;
3735 }
3736 inline SendTensorOp& operator=(SendTensorOp&& from) noexcept {
3737 if (this == &from) return *this;
3738 if (GetOwningArena() == from.GetOwningArena()
3739 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
3740 && GetOwningArena() != nullptr
3741 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
3742 ) {
3743 InternalSwap(&from);
3744 } else {
3745 CopyFrom(from);
3746 }
3747 return *this;
3748 }
3749
default_instance()3750 static const SendTensorOp& default_instance() {
3751 return *internal_default_instance();
3752 }
internal_default_instance()3753 static inline const SendTensorOp* internal_default_instance() {
3754 return reinterpret_cast<const SendTensorOp*>(
3755 &_SendTensorOp_default_instance_);
3756 }
3757 static constexpr int kIndexInFileMessages =
3758 22;
3759
swap(SendTensorOp & a,SendTensorOp & b)3760 friend void swap(SendTensorOp& a, SendTensorOp& b) {
3761 a.Swap(&b);
3762 }
Swap(SendTensorOp * other)3763 inline void Swap(SendTensorOp* other) {
3764 if (other == this) return;
3765 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
3766 if (GetOwningArena() != nullptr &&
3767 GetOwningArena() == other->GetOwningArena()) {
3768 #else // PROTOBUF_FORCE_COPY_IN_SWAP
3769 if (GetOwningArena() == other->GetOwningArena()) {
3770 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
3771 InternalSwap(other);
3772 } else {
3773 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
3774 }
3775 }
3776 void UnsafeArenaSwap(SendTensorOp* other) {
3777 if (other == this) return;
3778 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
3779 InternalSwap(other);
3780 }
3781
3782 // implements Message ----------------------------------------------
3783
3784 SendTensorOp* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
3785 return CreateMaybeMessage<SendTensorOp>(arena);
3786 }
3787 SendTensorOp* New() const {
3788 return New(nullptr);
3789 }
3790 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
3791 void CopyFrom(const SendTensorOp& from);
3792 void MergeFrom(const SendTensorOp& from);
3793 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
3794 bool IsInitialized() const final;
3795
3796 size_t ByteSizeLong() const final;
3797 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
3798 ::uint8_t* _InternalSerialize(
3799 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
3800 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
3801
3802 private:
3803 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
3804 void SharedDtor();
3805 void SetCachedSize(int size) const;
3806 void InternalSwap(SendTensorOp* other);
3807
3808 private:
3809 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
3810 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
3811 return "tensorflow.eager.SendTensorOp";
3812 }
3813 protected:
3814 explicit SendTensorOp(::PROTOBUF_NAMESPACE_ID::Arena* arena,
3815 bool is_message_owned = false);
3816 public:
3817
3818 std::string GetTypeName() const final;
3819
3820 // nested types ----------------------------------------------------
3821
3822 // accessors -------------------------------------------------------
3823
3824 enum : int {
3825 kTensorsFieldNumber = 2,
3826 kDeviceNameFieldNumber = 3,
3827 kOpIdFieldNumber = 1,
3828 };
3829 // repeated .tensorflow.TensorProto tensors = 2;
3830 int tensors_size() const;
3831 private:
3832 int _internal_tensors_size() const;
3833 public:
3834 void clear_tensors();
3835 ::tensorflow::TensorProto* mutable_tensors(int index);
3836 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorProto >*
3837 mutable_tensors();
3838 private:
3839 const ::tensorflow::TensorProto& _internal_tensors(int index) const;
3840 ::tensorflow::TensorProto* _internal_add_tensors();
3841 public:
3842 const ::tensorflow::TensorProto& tensors(int index) const;
3843 ::tensorflow::TensorProto* add_tensors();
3844 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorProto >&
3845 tensors() const;
3846
3847 // string device_name = 3;
3848 void clear_device_name();
3849 const std::string& device_name() const;
3850 template <typename ArgT0 = const std::string&, typename... ArgT>
3851 void set_device_name(ArgT0&& arg0, ArgT... args);
3852 std::string* mutable_device_name();
3853 PROTOBUF_NODISCARD std::string* release_device_name();
3854 void set_allocated_device_name(std::string* device_name);
3855 private:
3856 const std::string& _internal_device_name() const;
3857 inline PROTOBUF_ALWAYS_INLINE void _internal_set_device_name(const std::string& value);
3858 std::string* _internal_mutable_device_name();
3859 public:
3860
3861 // int64 op_id = 1;
3862 void clear_op_id();
3863 ::int64_t op_id() const;
3864 void set_op_id(::int64_t value);
3865 private:
3866 ::int64_t _internal_op_id() const;
3867 void _internal_set_op_id(::int64_t value);
3868 public:
3869
3870 // @@protoc_insertion_point(class_scope:tensorflow.eager.SendTensorOp)
3871 private:
3872 class _Internal;
3873
3874 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
3875 typedef void InternalArenaConstructable_;
3876 typedef void DestructorSkippable_;
3877 struct Impl_ {
3878 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorProto > tensors_;
3879 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr device_name_;
3880 ::int64_t op_id_;
3881 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
3882 };
3883 union { Impl_ _impl_; };
3884 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2feager_5fservice_2eproto;
3885 };
3886 // -------------------------------------------------------------------
3887
3888 class SendPackedHandleOp_LocalTensorHandle final :
3889 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.eager.SendPackedHandleOp.LocalTensorHandle) */ {
3890 public:
SendPackedHandleOp_LocalTensorHandle()3891 inline SendPackedHandleOp_LocalTensorHandle() : SendPackedHandleOp_LocalTensorHandle(nullptr) {}
3892 ~SendPackedHandleOp_LocalTensorHandle() override;
3893 explicit PROTOBUF_CONSTEXPR SendPackedHandleOp_LocalTensorHandle(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
3894
3895 SendPackedHandleOp_LocalTensorHandle(const SendPackedHandleOp_LocalTensorHandle& from);
SendPackedHandleOp_LocalTensorHandle(SendPackedHandleOp_LocalTensorHandle && from)3896 SendPackedHandleOp_LocalTensorHandle(SendPackedHandleOp_LocalTensorHandle&& from) noexcept
3897 : SendPackedHandleOp_LocalTensorHandle() {
3898 *this = ::std::move(from);
3899 }
3900
3901 inline SendPackedHandleOp_LocalTensorHandle& operator=(const SendPackedHandleOp_LocalTensorHandle& from) {
3902 if (this == &from) return *this;
3903 CopyFrom(from);
3904 return *this;
3905 }
3906 inline SendPackedHandleOp_LocalTensorHandle& operator=(SendPackedHandleOp_LocalTensorHandle&& from) noexcept {
3907 if (this == &from) return *this;
3908 if (GetOwningArena() == from.GetOwningArena()
3909 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
3910 && GetOwningArena() != nullptr
3911 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
3912 ) {
3913 InternalSwap(&from);
3914 } else {
3915 CopyFrom(from);
3916 }
3917 return *this;
3918 }
3919
default_instance()3920 static const SendPackedHandleOp_LocalTensorHandle& default_instance() {
3921 return *internal_default_instance();
3922 }
internal_default_instance()3923 static inline const SendPackedHandleOp_LocalTensorHandle* internal_default_instance() {
3924 return reinterpret_cast<const SendPackedHandleOp_LocalTensorHandle*>(
3925 &_SendPackedHandleOp_LocalTensorHandle_default_instance_);
3926 }
3927 static constexpr int kIndexInFileMessages =
3928 23;
3929
swap(SendPackedHandleOp_LocalTensorHandle & a,SendPackedHandleOp_LocalTensorHandle & b)3930 friend void swap(SendPackedHandleOp_LocalTensorHandle& a, SendPackedHandleOp_LocalTensorHandle& b) {
3931 a.Swap(&b);
3932 }
Swap(SendPackedHandleOp_LocalTensorHandle * other)3933 inline void Swap(SendPackedHandleOp_LocalTensorHandle* other) {
3934 if (other == this) return;
3935 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
3936 if (GetOwningArena() != nullptr &&
3937 GetOwningArena() == other->GetOwningArena()) {
3938 #else // PROTOBUF_FORCE_COPY_IN_SWAP
3939 if (GetOwningArena() == other->GetOwningArena()) {
3940 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
3941 InternalSwap(other);
3942 } else {
3943 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
3944 }
3945 }
3946 void UnsafeArenaSwap(SendPackedHandleOp_LocalTensorHandle* other) {
3947 if (other == this) return;
3948 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
3949 InternalSwap(other);
3950 }
3951
3952 // implements Message ----------------------------------------------
3953
3954 SendPackedHandleOp_LocalTensorHandle* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
3955 return CreateMaybeMessage<SendPackedHandleOp_LocalTensorHandle>(arena);
3956 }
3957 SendPackedHandleOp_LocalTensorHandle* New() const {
3958 return New(nullptr);
3959 }
3960 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
3961 void CopyFrom(const SendPackedHandleOp_LocalTensorHandle& from);
3962 void MergeFrom(const SendPackedHandleOp_LocalTensorHandle& from);
3963 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
3964 bool IsInitialized() const final;
3965
3966 size_t ByteSizeLong() const final;
3967 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
3968 ::uint8_t* _InternalSerialize(
3969 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
3970 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
3971
3972 private:
3973 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
3974 void SharedDtor();
3975 void SetCachedSize(int size) const;
3976 void InternalSwap(SendPackedHandleOp_LocalTensorHandle* other);
3977
3978 private:
3979 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
3980 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
3981 return "tensorflow.eager.SendPackedHandleOp.LocalTensorHandle";
3982 }
3983 protected:
3984 explicit SendPackedHandleOp_LocalTensorHandle(::PROTOBUF_NAMESPACE_ID::Arena* arena,
3985 bool is_message_owned = false);
3986 public:
3987
3988 std::string GetTypeName() const final;
3989
3990 // nested types ----------------------------------------------------
3991
3992 // accessors -------------------------------------------------------
3993
3994 enum : int {
3995 kDeviceFieldNumber = 2,
3996 kTensorFieldNumber = 1,
3997 };
3998 // string device = 2;
3999 void clear_device();
4000 const std::string& device() const;
4001 template <typename ArgT0 = const std::string&, typename... ArgT>
4002 void set_device(ArgT0&& arg0, ArgT... args);
4003 std::string* mutable_device();
4004 PROTOBUF_NODISCARD std::string* release_device();
4005 void set_allocated_device(std::string* device);
4006 private:
4007 const std::string& _internal_device() const;
4008 inline PROTOBUF_ALWAYS_INLINE void _internal_set_device(const std::string& value);
4009 std::string* _internal_mutable_device();
4010 public:
4011
4012 // .tensorflow.TensorProto tensor = 1;
4013 bool has_tensor() const;
4014 private:
4015 bool _internal_has_tensor() const;
4016 public:
4017 void clear_tensor();
4018 const ::tensorflow::TensorProto& tensor() const;
4019 PROTOBUF_NODISCARD ::tensorflow::TensorProto* release_tensor();
4020 ::tensorflow::TensorProto* mutable_tensor();
4021 void set_allocated_tensor(::tensorflow::TensorProto* tensor);
4022 private:
4023 const ::tensorflow::TensorProto& _internal_tensor() const;
4024 ::tensorflow::TensorProto* _internal_mutable_tensor();
4025 public:
4026 void unsafe_arena_set_allocated_tensor(
4027 ::tensorflow::TensorProto* tensor);
4028 ::tensorflow::TensorProto* unsafe_arena_release_tensor();
4029
4030 // @@protoc_insertion_point(class_scope:tensorflow.eager.SendPackedHandleOp.LocalTensorHandle)
4031 private:
4032 class _Internal;
4033
4034 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
4035 typedef void InternalArenaConstructable_;
4036 typedef void DestructorSkippable_;
4037 struct Impl_ {
4038 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr device_;
4039 ::tensorflow::TensorProto* tensor_;
4040 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
4041 };
4042 union { Impl_ _impl_; };
4043 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2feager_5fservice_2eproto;
4044 };
4045 // -------------------------------------------------------------------
4046
4047 class SendPackedHandleOp_Handle final :
4048 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.eager.SendPackedHandleOp.Handle) */ {
4049 public:
SendPackedHandleOp_Handle()4050 inline SendPackedHandleOp_Handle() : SendPackedHandleOp_Handle(nullptr) {}
4051 ~SendPackedHandleOp_Handle() override;
4052 explicit PROTOBUF_CONSTEXPR SendPackedHandleOp_Handle(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
4053
4054 SendPackedHandleOp_Handle(const SendPackedHandleOp_Handle& from);
SendPackedHandleOp_Handle(SendPackedHandleOp_Handle && from)4055 SendPackedHandleOp_Handle(SendPackedHandleOp_Handle&& from) noexcept
4056 : SendPackedHandleOp_Handle() {
4057 *this = ::std::move(from);
4058 }
4059
4060 inline SendPackedHandleOp_Handle& operator=(const SendPackedHandleOp_Handle& from) {
4061 if (this == &from) return *this;
4062 CopyFrom(from);
4063 return *this;
4064 }
4065 inline SendPackedHandleOp_Handle& operator=(SendPackedHandleOp_Handle&& from) noexcept {
4066 if (this == &from) return *this;
4067 if (GetOwningArena() == from.GetOwningArena()
4068 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
4069 && GetOwningArena() != nullptr
4070 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
4071 ) {
4072 InternalSwap(&from);
4073 } else {
4074 CopyFrom(from);
4075 }
4076 return *this;
4077 }
4078
default_instance()4079 static const SendPackedHandleOp_Handle& default_instance() {
4080 return *internal_default_instance();
4081 }
4082 enum ItemCase {
4083 kLocalHandle = 1,
4084 kRemoteHandle = 2,
4085 ITEM_NOT_SET = 0,
4086 };
4087
internal_default_instance()4088 static inline const SendPackedHandleOp_Handle* internal_default_instance() {
4089 return reinterpret_cast<const SendPackedHandleOp_Handle*>(
4090 &_SendPackedHandleOp_Handle_default_instance_);
4091 }
4092 static constexpr int kIndexInFileMessages =
4093 24;
4094
swap(SendPackedHandleOp_Handle & a,SendPackedHandleOp_Handle & b)4095 friend void swap(SendPackedHandleOp_Handle& a, SendPackedHandleOp_Handle& b) {
4096 a.Swap(&b);
4097 }
Swap(SendPackedHandleOp_Handle * other)4098 inline void Swap(SendPackedHandleOp_Handle* other) {
4099 if (other == this) return;
4100 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
4101 if (GetOwningArena() != nullptr &&
4102 GetOwningArena() == other->GetOwningArena()) {
4103 #else // PROTOBUF_FORCE_COPY_IN_SWAP
4104 if (GetOwningArena() == other->GetOwningArena()) {
4105 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
4106 InternalSwap(other);
4107 } else {
4108 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
4109 }
4110 }
4111 void UnsafeArenaSwap(SendPackedHandleOp_Handle* other) {
4112 if (other == this) return;
4113 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
4114 InternalSwap(other);
4115 }
4116
4117 // implements Message ----------------------------------------------
4118
4119 SendPackedHandleOp_Handle* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
4120 return CreateMaybeMessage<SendPackedHandleOp_Handle>(arena);
4121 }
4122 SendPackedHandleOp_Handle* New() const {
4123 return New(nullptr);
4124 }
4125 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
4126 void CopyFrom(const SendPackedHandleOp_Handle& from);
4127 void MergeFrom(const SendPackedHandleOp_Handle& from);
4128 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
4129 bool IsInitialized() const final;
4130
4131 size_t ByteSizeLong() const final;
4132 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
4133 ::uint8_t* _InternalSerialize(
4134 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
4135 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
4136
4137 private:
4138 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
4139 void SharedDtor();
4140 void SetCachedSize(int size) const;
4141 void InternalSwap(SendPackedHandleOp_Handle* other);
4142
4143 private:
4144 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
4145 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
4146 return "tensorflow.eager.SendPackedHandleOp.Handle";
4147 }
4148 protected:
4149 explicit SendPackedHandleOp_Handle(::PROTOBUF_NAMESPACE_ID::Arena* arena,
4150 bool is_message_owned = false);
4151 public:
4152
4153 std::string GetTypeName() const final;
4154
4155 // nested types ----------------------------------------------------
4156
4157 // accessors -------------------------------------------------------
4158
4159 enum : int {
4160 kLocalHandleFieldNumber = 1,
4161 kRemoteHandleFieldNumber = 2,
4162 };
4163 // .tensorflow.eager.SendPackedHandleOp.LocalTensorHandle local_handle = 1;
4164 bool has_local_handle() const;
4165 private:
4166 bool _internal_has_local_handle() const;
4167 public:
4168 void clear_local_handle();
4169 const ::tensorflow::eager::SendPackedHandleOp_LocalTensorHandle& local_handle() const;
4170 PROTOBUF_NODISCARD ::tensorflow::eager::SendPackedHandleOp_LocalTensorHandle* release_local_handle();
4171 ::tensorflow::eager::SendPackedHandleOp_LocalTensorHandle* mutable_local_handle();
4172 void set_allocated_local_handle(::tensorflow::eager::SendPackedHandleOp_LocalTensorHandle* local_handle);
4173 private:
4174 const ::tensorflow::eager::SendPackedHandleOp_LocalTensorHandle& _internal_local_handle() const;
4175 ::tensorflow::eager::SendPackedHandleOp_LocalTensorHandle* _internal_mutable_local_handle();
4176 public:
4177 void unsafe_arena_set_allocated_local_handle(
4178 ::tensorflow::eager::SendPackedHandleOp_LocalTensorHandle* local_handle);
4179 ::tensorflow::eager::SendPackedHandleOp_LocalTensorHandle* unsafe_arena_release_local_handle();
4180
4181 // .tensorflow.eager.RemoteTensorHandle remote_handle = 2;
4182 bool has_remote_handle() const;
4183 private:
4184 bool _internal_has_remote_handle() const;
4185 public:
4186 void clear_remote_handle();
4187 const ::tensorflow::eager::RemoteTensorHandle& remote_handle() const;
4188 PROTOBUF_NODISCARD ::tensorflow::eager::RemoteTensorHandle* release_remote_handle();
4189 ::tensorflow::eager::RemoteTensorHandle* mutable_remote_handle();
4190 void set_allocated_remote_handle(::tensorflow::eager::RemoteTensorHandle* remote_handle);
4191 private:
4192 const ::tensorflow::eager::RemoteTensorHandle& _internal_remote_handle() const;
4193 ::tensorflow::eager::RemoteTensorHandle* _internal_mutable_remote_handle();
4194 public:
4195 void unsafe_arena_set_allocated_remote_handle(
4196 ::tensorflow::eager::RemoteTensorHandle* remote_handle);
4197 ::tensorflow::eager::RemoteTensorHandle* unsafe_arena_release_remote_handle();
4198
4199 void clear_item();
4200 ItemCase item_case() const;
4201 // @@protoc_insertion_point(class_scope:tensorflow.eager.SendPackedHandleOp.Handle)
4202 private:
4203 class _Internal;
4204 void set_has_local_handle();
4205 void set_has_remote_handle();
4206
4207 inline bool has_item() const;
4208 inline void clear_has_item();
4209
4210 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
4211 typedef void InternalArenaConstructable_;
4212 typedef void DestructorSkippable_;
4213 struct Impl_ {
4214 union ItemUnion {
4215 constexpr ItemUnion() : _constinit_{} {}
4216 ::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized _constinit_;
4217 ::tensorflow::eager::SendPackedHandleOp_LocalTensorHandle* local_handle_;
4218 ::tensorflow::eager::RemoteTensorHandle* remote_handle_;
4219 } item_;
4220 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
4221 ::uint32_t _oneof_case_[1];
4222
4223 };
4224 union { Impl_ _impl_; };
4225 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2feager_5fservice_2eproto;
4226 };
4227 // -------------------------------------------------------------------
4228
4229 class SendPackedHandleOp final :
4230 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.eager.SendPackedHandleOp) */ {
4231 public:
SendPackedHandleOp()4232 inline SendPackedHandleOp() : SendPackedHandleOp(nullptr) {}
4233 ~SendPackedHandleOp() override;
4234 explicit PROTOBUF_CONSTEXPR SendPackedHandleOp(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
4235
4236 SendPackedHandleOp(const SendPackedHandleOp& from);
SendPackedHandleOp(SendPackedHandleOp && from)4237 SendPackedHandleOp(SendPackedHandleOp&& from) noexcept
4238 : SendPackedHandleOp() {
4239 *this = ::std::move(from);
4240 }
4241
4242 inline SendPackedHandleOp& operator=(const SendPackedHandleOp& from) {
4243 if (this == &from) return *this;
4244 CopyFrom(from);
4245 return *this;
4246 }
4247 inline SendPackedHandleOp& operator=(SendPackedHandleOp&& from) noexcept {
4248 if (this == &from) return *this;
4249 if (GetOwningArena() == from.GetOwningArena()
4250 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
4251 && GetOwningArena() != nullptr
4252 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
4253 ) {
4254 InternalSwap(&from);
4255 } else {
4256 CopyFrom(from);
4257 }
4258 return *this;
4259 }
4260
default_instance()4261 static const SendPackedHandleOp& default_instance() {
4262 return *internal_default_instance();
4263 }
internal_default_instance()4264 static inline const SendPackedHandleOp* internal_default_instance() {
4265 return reinterpret_cast<const SendPackedHandleOp*>(
4266 &_SendPackedHandleOp_default_instance_);
4267 }
4268 static constexpr int kIndexInFileMessages =
4269 25;
4270
swap(SendPackedHandleOp & a,SendPackedHandleOp & b)4271 friend void swap(SendPackedHandleOp& a, SendPackedHandleOp& b) {
4272 a.Swap(&b);
4273 }
Swap(SendPackedHandleOp * other)4274 inline void Swap(SendPackedHandleOp* other) {
4275 if (other == this) return;
4276 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
4277 if (GetOwningArena() != nullptr &&
4278 GetOwningArena() == other->GetOwningArena()) {
4279 #else // PROTOBUF_FORCE_COPY_IN_SWAP
4280 if (GetOwningArena() == other->GetOwningArena()) {
4281 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
4282 InternalSwap(other);
4283 } else {
4284 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
4285 }
4286 }
4287 void UnsafeArenaSwap(SendPackedHandleOp* other) {
4288 if (other == this) return;
4289 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
4290 InternalSwap(other);
4291 }
4292
4293 // implements Message ----------------------------------------------
4294
4295 SendPackedHandleOp* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
4296 return CreateMaybeMessage<SendPackedHandleOp>(arena);
4297 }
4298 SendPackedHandleOp* New() const {
4299 return New(nullptr);
4300 }
4301 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
4302 void CopyFrom(const SendPackedHandleOp& from);
4303 void MergeFrom(const SendPackedHandleOp& from);
4304 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
4305 bool IsInitialized() const final;
4306
4307 size_t ByteSizeLong() const final;
4308 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
4309 ::uint8_t* _InternalSerialize(
4310 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
4311 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
4312
4313 private:
4314 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
4315 void SharedDtor();
4316 void SetCachedSize(int size) const;
4317 void InternalSwap(SendPackedHandleOp* other);
4318
4319 private:
4320 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
4321 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
4322 return "tensorflow.eager.SendPackedHandleOp";
4323 }
4324 protected:
4325 explicit SendPackedHandleOp(::PROTOBUF_NAMESPACE_ID::Arena* arena,
4326 bool is_message_owned = false);
4327 public:
4328
4329 std::string GetTypeName() const final;
4330
4331 // nested types ----------------------------------------------------
4332
4333 typedef SendPackedHandleOp_LocalTensorHandle LocalTensorHandle;
4334 typedef SendPackedHandleOp_Handle Handle;
4335
4336 // accessors -------------------------------------------------------
4337
4338 enum : int {
4339 kHandlesFieldNumber = 2,
4340 kDeviceNameFieldNumber = 3,
4341 kOpIdFieldNumber = 1,
4342 };
4343 // repeated .tensorflow.eager.SendPackedHandleOp.Handle handles = 2;
4344 int handles_size() const;
4345 private:
4346 int _internal_handles_size() const;
4347 public:
4348 void clear_handles();
4349 ::tensorflow::eager::SendPackedHandleOp_Handle* mutable_handles(int index);
4350 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::eager::SendPackedHandleOp_Handle >*
4351 mutable_handles();
4352 private:
4353 const ::tensorflow::eager::SendPackedHandleOp_Handle& _internal_handles(int index) const;
4354 ::tensorflow::eager::SendPackedHandleOp_Handle* _internal_add_handles();
4355 public:
4356 const ::tensorflow::eager::SendPackedHandleOp_Handle& handles(int index) const;
4357 ::tensorflow::eager::SendPackedHandleOp_Handle* add_handles();
4358 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::eager::SendPackedHandleOp_Handle >&
4359 handles() const;
4360
4361 // string device_name = 3;
4362 void clear_device_name();
4363 const std::string& device_name() const;
4364 template <typename ArgT0 = const std::string&, typename... ArgT>
4365 void set_device_name(ArgT0&& arg0, ArgT... args);
4366 std::string* mutable_device_name();
4367 PROTOBUF_NODISCARD std::string* release_device_name();
4368 void set_allocated_device_name(std::string* device_name);
4369 private:
4370 const std::string& _internal_device_name() const;
4371 inline PROTOBUF_ALWAYS_INLINE void _internal_set_device_name(const std::string& value);
4372 std::string* _internal_mutable_device_name();
4373 public:
4374
4375 // int64 op_id = 1;
4376 void clear_op_id();
4377 ::int64_t op_id() const;
4378 void set_op_id(::int64_t value);
4379 private:
4380 ::int64_t _internal_op_id() const;
4381 void _internal_set_op_id(::int64_t value);
4382 public:
4383
4384 // @@protoc_insertion_point(class_scope:tensorflow.eager.SendPackedHandleOp)
4385 private:
4386 class _Internal;
4387
4388 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
4389 typedef void InternalArenaConstructable_;
4390 typedef void DestructorSkippable_;
4391 struct Impl_ {
4392 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::eager::SendPackedHandleOp_Handle > handles_;
4393 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr device_name_;
4394 ::int64_t op_id_;
4395 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
4396 };
4397 union { Impl_ _impl_; };
4398 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2feager_5fservice_2eproto;
4399 };
4400 // ===================================================================
4401
4402
4403 // ===================================================================
4404
4405 #ifdef __GNUC__
4406 #pragma GCC diagnostic push
4407 #pragma GCC diagnostic ignored "-Wstrict-aliasing"
4408 #endif // __GNUC__
4409 // Operation_Input
4410
4411 // .tensorflow.eager.RemoteTensorHandle remote_handle = 1;
_internal_has_remote_handle()4412 inline bool Operation_Input::_internal_has_remote_handle() const {
4413 return item_case() == kRemoteHandle;
4414 }
has_remote_handle()4415 inline bool Operation_Input::has_remote_handle() const {
4416 return _internal_has_remote_handle();
4417 }
set_has_remote_handle()4418 inline void Operation_Input::set_has_remote_handle() {
4419 _impl_._oneof_case_[0] = kRemoteHandle;
4420 }
release_remote_handle()4421 inline ::tensorflow::eager::RemoteTensorHandle* Operation_Input::release_remote_handle() {
4422 // @@protoc_insertion_point(field_release:tensorflow.eager.Operation.Input.remote_handle)
4423 if (_internal_has_remote_handle()) {
4424 clear_has_item();
4425 ::tensorflow::eager::RemoteTensorHandle* temp = _impl_.item_.remote_handle_;
4426 if (GetArenaForAllocation() != nullptr) {
4427 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
4428 }
4429 _impl_.item_.remote_handle_ = nullptr;
4430 return temp;
4431 } else {
4432 return nullptr;
4433 }
4434 }
_internal_remote_handle()4435 inline const ::tensorflow::eager::RemoteTensorHandle& Operation_Input::_internal_remote_handle() const {
4436 return _internal_has_remote_handle()
4437 ? *_impl_.item_.remote_handle_
4438 : reinterpret_cast< ::tensorflow::eager::RemoteTensorHandle&>(::tensorflow::eager::_RemoteTensorHandle_default_instance_);
4439 }
remote_handle()4440 inline const ::tensorflow::eager::RemoteTensorHandle& Operation_Input::remote_handle() const {
4441 // @@protoc_insertion_point(field_get:tensorflow.eager.Operation.Input.remote_handle)
4442 return _internal_remote_handle();
4443 }
unsafe_arena_release_remote_handle()4444 inline ::tensorflow::eager::RemoteTensorHandle* Operation_Input::unsafe_arena_release_remote_handle() {
4445 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.eager.Operation.Input.remote_handle)
4446 if (_internal_has_remote_handle()) {
4447 clear_has_item();
4448 ::tensorflow::eager::RemoteTensorHandle* temp = _impl_.item_.remote_handle_;
4449 _impl_.item_.remote_handle_ = nullptr;
4450 return temp;
4451 } else {
4452 return nullptr;
4453 }
4454 }
unsafe_arena_set_allocated_remote_handle(::tensorflow::eager::RemoteTensorHandle * remote_handle)4455 inline void Operation_Input::unsafe_arena_set_allocated_remote_handle(::tensorflow::eager::RemoteTensorHandle* remote_handle) {
4456 clear_item();
4457 if (remote_handle) {
4458 set_has_remote_handle();
4459 _impl_.item_.remote_handle_ = remote_handle;
4460 }
4461 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.eager.Operation.Input.remote_handle)
4462 }
_internal_mutable_remote_handle()4463 inline ::tensorflow::eager::RemoteTensorHandle* Operation_Input::_internal_mutable_remote_handle() {
4464 if (!_internal_has_remote_handle()) {
4465 clear_item();
4466 set_has_remote_handle();
4467 _impl_.item_.remote_handle_ = CreateMaybeMessage< ::tensorflow::eager::RemoteTensorHandle >(GetArenaForAllocation());
4468 }
4469 return _impl_.item_.remote_handle_;
4470 }
mutable_remote_handle()4471 inline ::tensorflow::eager::RemoteTensorHandle* Operation_Input::mutable_remote_handle() {
4472 ::tensorflow::eager::RemoteTensorHandle* _msg = _internal_mutable_remote_handle();
4473 // @@protoc_insertion_point(field_mutable:tensorflow.eager.Operation.Input.remote_handle)
4474 return _msg;
4475 }
4476
4477 // .tensorflow.TensorProto tensor = 2;
_internal_has_tensor()4478 inline bool Operation_Input::_internal_has_tensor() const {
4479 return item_case() == kTensor;
4480 }
has_tensor()4481 inline bool Operation_Input::has_tensor() const {
4482 return _internal_has_tensor();
4483 }
set_has_tensor()4484 inline void Operation_Input::set_has_tensor() {
4485 _impl_._oneof_case_[0] = kTensor;
4486 }
release_tensor()4487 inline ::tensorflow::TensorProto* Operation_Input::release_tensor() {
4488 // @@protoc_insertion_point(field_release:tensorflow.eager.Operation.Input.tensor)
4489 if (_internal_has_tensor()) {
4490 clear_has_item();
4491 ::tensorflow::TensorProto* temp = _impl_.item_.tensor_;
4492 if (GetArenaForAllocation() != nullptr) {
4493 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
4494 }
4495 _impl_.item_.tensor_ = nullptr;
4496 return temp;
4497 } else {
4498 return nullptr;
4499 }
4500 }
_internal_tensor()4501 inline const ::tensorflow::TensorProto& Operation_Input::_internal_tensor() const {
4502 return _internal_has_tensor()
4503 ? *_impl_.item_.tensor_
4504 : reinterpret_cast< ::tensorflow::TensorProto&>(::tensorflow::_TensorProto_default_instance_);
4505 }
tensor()4506 inline const ::tensorflow::TensorProto& Operation_Input::tensor() const {
4507 // @@protoc_insertion_point(field_get:tensorflow.eager.Operation.Input.tensor)
4508 return _internal_tensor();
4509 }
unsafe_arena_release_tensor()4510 inline ::tensorflow::TensorProto* Operation_Input::unsafe_arena_release_tensor() {
4511 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.eager.Operation.Input.tensor)
4512 if (_internal_has_tensor()) {
4513 clear_has_item();
4514 ::tensorflow::TensorProto* temp = _impl_.item_.tensor_;
4515 _impl_.item_.tensor_ = nullptr;
4516 return temp;
4517 } else {
4518 return nullptr;
4519 }
4520 }
unsafe_arena_set_allocated_tensor(::tensorflow::TensorProto * tensor)4521 inline void Operation_Input::unsafe_arena_set_allocated_tensor(::tensorflow::TensorProto* tensor) {
4522 clear_item();
4523 if (tensor) {
4524 set_has_tensor();
4525 _impl_.item_.tensor_ = tensor;
4526 }
4527 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.eager.Operation.Input.tensor)
4528 }
_internal_mutable_tensor()4529 inline ::tensorflow::TensorProto* Operation_Input::_internal_mutable_tensor() {
4530 if (!_internal_has_tensor()) {
4531 clear_item();
4532 set_has_tensor();
4533 _impl_.item_.tensor_ = CreateMaybeMessage< ::tensorflow::TensorProto >(GetArenaForAllocation());
4534 }
4535 return _impl_.item_.tensor_;
4536 }
mutable_tensor()4537 inline ::tensorflow::TensorProto* Operation_Input::mutable_tensor() {
4538 ::tensorflow::TensorProto* _msg = _internal_mutable_tensor();
4539 // @@protoc_insertion_point(field_mutable:tensorflow.eager.Operation.Input.tensor)
4540 return _msg;
4541 }
4542
has_item()4543 inline bool Operation_Input::has_item() const {
4544 return item_case() != ITEM_NOT_SET;
4545 }
clear_has_item()4546 inline void Operation_Input::clear_has_item() {
4547 _impl_._oneof_case_[0] = ITEM_NOT_SET;
4548 }
item_case()4549 inline Operation_Input::ItemCase Operation_Input::item_case() const {
4550 return Operation_Input::ItemCase(_impl_._oneof_case_[0]);
4551 }
4552 // -------------------------------------------------------------------
4553
4554 // -------------------------------------------------------------------
4555
4556 // Operation
4557
4558 // int64 id = 1;
clear_id()4559 inline void Operation::clear_id() {
4560 _impl_.id_ = ::int64_t{0};
4561 }
_internal_id()4562 inline ::int64_t Operation::_internal_id() const {
4563 return _impl_.id_;
4564 }
id()4565 inline ::int64_t Operation::id() const {
4566 // @@protoc_insertion_point(field_get:tensorflow.eager.Operation.id)
4567 return _internal_id();
4568 }
_internal_set_id(::int64_t value)4569 inline void Operation::_internal_set_id(::int64_t value) {
4570
4571 _impl_.id_ = value;
4572 }
set_id(::int64_t value)4573 inline void Operation::set_id(::int64_t value) {
4574 _internal_set_id(value);
4575 // @@protoc_insertion_point(field_set:tensorflow.eager.Operation.id)
4576 }
4577
4578 // string name = 2;
clear_name()4579 inline void Operation::clear_name() {
4580 _impl_.name_.ClearToEmpty();
4581 }
name()4582 inline const std::string& Operation::name() const {
4583 // @@protoc_insertion_point(field_get:tensorflow.eager.Operation.name)
4584 return _internal_name();
4585 }
4586 template <typename ArgT0, typename... ArgT>
4587 inline PROTOBUF_ALWAYS_INLINE
set_name(ArgT0 && arg0,ArgT...args)4588 void Operation::set_name(ArgT0&& arg0, ArgT... args) {
4589
4590 _impl_.name_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
4591 // @@protoc_insertion_point(field_set:tensorflow.eager.Operation.name)
4592 }
mutable_name()4593 inline std::string* Operation::mutable_name() {
4594 std::string* _s = _internal_mutable_name();
4595 // @@protoc_insertion_point(field_mutable:tensorflow.eager.Operation.name)
4596 return _s;
4597 }
_internal_name()4598 inline const std::string& Operation::_internal_name() const {
4599 return _impl_.name_.Get();
4600 }
_internal_set_name(const std::string & value)4601 inline void Operation::_internal_set_name(const std::string& value) {
4602
4603 _impl_.name_.Set(value, GetArenaForAllocation());
4604 }
_internal_mutable_name()4605 inline std::string* Operation::_internal_mutable_name() {
4606
4607 return _impl_.name_.Mutable(GetArenaForAllocation());
4608 }
release_name()4609 inline std::string* Operation::release_name() {
4610 // @@protoc_insertion_point(field_release:tensorflow.eager.Operation.name)
4611 return _impl_.name_.Release();
4612 }
set_allocated_name(std::string * name)4613 inline void Operation::set_allocated_name(std::string* name) {
4614 _impl_.name_.SetAllocated(name, GetArenaForAllocation());
4615 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
4616 if (_impl_.name_.IsDefault()) {
4617 _impl_.name_.Set("", GetArenaForAllocation());
4618 }
4619 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
4620 // @@protoc_insertion_point(field_set_allocated:tensorflow.eager.Operation.name)
4621 }
4622
4623 // repeated .tensorflow.eager.Operation.Input op_inputs = 10;
_internal_op_inputs_size()4624 inline int Operation::_internal_op_inputs_size() const {
4625 return _impl_.op_inputs_.size();
4626 }
op_inputs_size()4627 inline int Operation::op_inputs_size() const {
4628 return _internal_op_inputs_size();
4629 }
clear_op_inputs()4630 inline void Operation::clear_op_inputs() {
4631 _impl_.op_inputs_.Clear();
4632 }
mutable_op_inputs(int index)4633 inline ::tensorflow::eager::Operation_Input* Operation::mutable_op_inputs(int index) {
4634 // @@protoc_insertion_point(field_mutable:tensorflow.eager.Operation.op_inputs)
4635 return _impl_.op_inputs_.Mutable(index);
4636 }
4637 inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::eager::Operation_Input >*
mutable_op_inputs()4638 Operation::mutable_op_inputs() {
4639 // @@protoc_insertion_point(field_mutable_list:tensorflow.eager.Operation.op_inputs)
4640 return &_impl_.op_inputs_;
4641 }
_internal_op_inputs(int index)4642 inline const ::tensorflow::eager::Operation_Input& Operation::_internal_op_inputs(int index) const {
4643 return _impl_.op_inputs_.Get(index);
4644 }
op_inputs(int index)4645 inline const ::tensorflow::eager::Operation_Input& Operation::op_inputs(int index) const {
4646 // @@protoc_insertion_point(field_get:tensorflow.eager.Operation.op_inputs)
4647 return _internal_op_inputs(index);
4648 }
_internal_add_op_inputs()4649 inline ::tensorflow::eager::Operation_Input* Operation::_internal_add_op_inputs() {
4650 return _impl_.op_inputs_.Add();
4651 }
add_op_inputs()4652 inline ::tensorflow::eager::Operation_Input* Operation::add_op_inputs() {
4653 ::tensorflow::eager::Operation_Input* _add = _internal_add_op_inputs();
4654 // @@protoc_insertion_point(field_add:tensorflow.eager.Operation.op_inputs)
4655 return _add;
4656 }
4657 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::eager::Operation_Input >&
op_inputs()4658 Operation::op_inputs() const {
4659 // @@protoc_insertion_point(field_list:tensorflow.eager.Operation.op_inputs)
4660 return _impl_.op_inputs_;
4661 }
4662
4663 // repeated int64 control_op_ids = 4;
_internal_control_op_ids_size()4664 inline int Operation::_internal_control_op_ids_size() const {
4665 return _impl_.control_op_ids_.size();
4666 }
control_op_ids_size()4667 inline int Operation::control_op_ids_size() const {
4668 return _internal_control_op_ids_size();
4669 }
clear_control_op_ids()4670 inline void Operation::clear_control_op_ids() {
4671 _impl_.control_op_ids_.Clear();
4672 }
_internal_control_op_ids(int index)4673 inline ::int64_t Operation::_internal_control_op_ids(int index) const {
4674 return _impl_.control_op_ids_.Get(index);
4675 }
control_op_ids(int index)4676 inline ::int64_t Operation::control_op_ids(int index) const {
4677 // @@protoc_insertion_point(field_get:tensorflow.eager.Operation.control_op_ids)
4678 return _internal_control_op_ids(index);
4679 }
set_control_op_ids(int index,::int64_t value)4680 inline void Operation::set_control_op_ids(int index, ::int64_t value) {
4681 _impl_.control_op_ids_.Set(index, value);
4682 // @@protoc_insertion_point(field_set:tensorflow.eager.Operation.control_op_ids)
4683 }
_internal_add_control_op_ids(::int64_t value)4684 inline void Operation::_internal_add_control_op_ids(::int64_t value) {
4685 _impl_.control_op_ids_.Add(value);
4686 }
add_control_op_ids(::int64_t value)4687 inline void Operation::add_control_op_ids(::int64_t value) {
4688 _internal_add_control_op_ids(value);
4689 // @@protoc_insertion_point(field_add:tensorflow.eager.Operation.control_op_ids)
4690 }
4691 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >&
_internal_control_op_ids()4692 Operation::_internal_control_op_ids() const {
4693 return _impl_.control_op_ids_;
4694 }
4695 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >&
control_op_ids()4696 Operation::control_op_ids() const {
4697 // @@protoc_insertion_point(field_list:tensorflow.eager.Operation.control_op_ids)
4698 return _internal_control_op_ids();
4699 }
4700 inline ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >*
_internal_mutable_control_op_ids()4701 Operation::_internal_mutable_control_op_ids() {
4702 return &_impl_.control_op_ids_;
4703 }
4704 inline ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >*
mutable_control_op_ids()4705 Operation::mutable_control_op_ids() {
4706 // @@protoc_insertion_point(field_mutable_list:tensorflow.eager.Operation.control_op_ids)
4707 return _internal_mutable_control_op_ids();
4708 }
4709
4710 // map<string, .tensorflow.AttrValue> attrs = 5;
_internal_attrs_size()4711 inline int Operation::_internal_attrs_size() const {
4712 return _impl_.attrs_.size();
4713 }
attrs_size()4714 inline int Operation::attrs_size() const {
4715 return _internal_attrs_size();
4716 }
4717 inline const ::PROTOBUF_NAMESPACE_ID::Map< std::string, ::tensorflow::AttrValue >&
_internal_attrs()4718 Operation::_internal_attrs() const {
4719 return _impl_.attrs_.GetMap();
4720 }
4721 inline const ::PROTOBUF_NAMESPACE_ID::Map< std::string, ::tensorflow::AttrValue >&
attrs()4722 Operation::attrs() const {
4723 // @@protoc_insertion_point(field_map:tensorflow.eager.Operation.attrs)
4724 return _internal_attrs();
4725 }
4726 inline ::PROTOBUF_NAMESPACE_ID::Map< std::string, ::tensorflow::AttrValue >*
_internal_mutable_attrs()4727 Operation::_internal_mutable_attrs() {
4728 return _impl_.attrs_.MutableMap();
4729 }
4730 inline ::PROTOBUF_NAMESPACE_ID::Map< std::string, ::tensorflow::AttrValue >*
mutable_attrs()4731 Operation::mutable_attrs() {
4732 // @@protoc_insertion_point(field_mutable_map:tensorflow.eager.Operation.attrs)
4733 return _internal_mutable_attrs();
4734 }
4735
4736 // string device = 6;
clear_device()4737 inline void Operation::clear_device() {
4738 _impl_.device_.ClearToEmpty();
4739 }
device()4740 inline const std::string& Operation::device() const {
4741 // @@protoc_insertion_point(field_get:tensorflow.eager.Operation.device)
4742 return _internal_device();
4743 }
4744 template <typename ArgT0, typename... ArgT>
4745 inline PROTOBUF_ALWAYS_INLINE
set_device(ArgT0 && arg0,ArgT...args)4746 void Operation::set_device(ArgT0&& arg0, ArgT... args) {
4747
4748 _impl_.device_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
4749 // @@protoc_insertion_point(field_set:tensorflow.eager.Operation.device)
4750 }
mutable_device()4751 inline std::string* Operation::mutable_device() {
4752 std::string* _s = _internal_mutable_device();
4753 // @@protoc_insertion_point(field_mutable:tensorflow.eager.Operation.device)
4754 return _s;
4755 }
_internal_device()4756 inline const std::string& Operation::_internal_device() const {
4757 return _impl_.device_.Get();
4758 }
_internal_set_device(const std::string & value)4759 inline void Operation::_internal_set_device(const std::string& value) {
4760
4761 _impl_.device_.Set(value, GetArenaForAllocation());
4762 }
_internal_mutable_device()4763 inline std::string* Operation::_internal_mutable_device() {
4764
4765 return _impl_.device_.Mutable(GetArenaForAllocation());
4766 }
release_device()4767 inline std::string* Operation::release_device() {
4768 // @@protoc_insertion_point(field_release:tensorflow.eager.Operation.device)
4769 return _impl_.device_.Release();
4770 }
set_allocated_device(std::string * device)4771 inline void Operation::set_allocated_device(std::string* device) {
4772 _impl_.device_.SetAllocated(device, GetArenaForAllocation());
4773 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
4774 if (_impl_.device_.IsDefault()) {
4775 _impl_.device_.Set("", GetArenaForAllocation());
4776 }
4777 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
4778 // @@protoc_insertion_point(field_set_allocated:tensorflow.eager.Operation.device)
4779 }
4780
4781 // bool is_component_function = 7;
clear_is_component_function()4782 inline void Operation::clear_is_component_function() {
4783 _impl_.is_component_function_ = false;
4784 }
_internal_is_component_function()4785 inline bool Operation::_internal_is_component_function() const {
4786 return _impl_.is_component_function_;
4787 }
is_component_function()4788 inline bool Operation::is_component_function() const {
4789 // @@protoc_insertion_point(field_get:tensorflow.eager.Operation.is_component_function)
4790 return _internal_is_component_function();
4791 }
_internal_set_is_component_function(bool value)4792 inline void Operation::_internal_set_is_component_function(bool value) {
4793
4794 _impl_.is_component_function_ = value;
4795 }
set_is_component_function(bool value)4796 inline void Operation::set_is_component_function(bool value) {
4797 _internal_set_is_component_function(value);
4798 // @@protoc_insertion_point(field_set:tensorflow.eager.Operation.is_component_function)
4799 }
4800
4801 // int64 func_step_id = 8;
clear_func_step_id()4802 inline void Operation::clear_func_step_id() {
4803 _impl_.func_step_id_ = ::int64_t{0};
4804 }
_internal_func_step_id()4805 inline ::int64_t Operation::_internal_func_step_id() const {
4806 return _impl_.func_step_id_;
4807 }
func_step_id()4808 inline ::int64_t Operation::func_step_id() const {
4809 // @@protoc_insertion_point(field_get:tensorflow.eager.Operation.func_step_id)
4810 return _internal_func_step_id();
4811 }
_internal_set_func_step_id(::int64_t value)4812 inline void Operation::_internal_set_func_step_id(::int64_t value) {
4813
4814 _impl_.func_step_id_ = value;
4815 }
set_func_step_id(::int64_t value)4816 inline void Operation::set_func_step_id(::int64_t value) {
4817 _internal_set_func_step_id(value);
4818 // @@protoc_insertion_point(field_set:tensorflow.eager.Operation.func_step_id)
4819 }
4820
4821 // bool is_function = 9;
clear_is_function()4822 inline void Operation::clear_is_function() {
4823 _impl_.is_function_ = false;
4824 }
_internal_is_function()4825 inline bool Operation::_internal_is_function() const {
4826 return _impl_.is_function_;
4827 }
is_function()4828 inline bool Operation::is_function() const {
4829 // @@protoc_insertion_point(field_get:tensorflow.eager.Operation.is_function)
4830 return _internal_is_function();
4831 }
_internal_set_is_function(bool value)4832 inline void Operation::_internal_set_is_function(bool value) {
4833
4834 _impl_.is_function_ = value;
4835 }
set_is_function(bool value)4836 inline void Operation::set_is_function(bool value) {
4837 _internal_set_is_function(value);
4838 // @@protoc_insertion_point(field_set:tensorflow.eager.Operation.is_function)
4839 }
4840
4841 // -------------------------------------------------------------------
4842
4843 // QueueItem
4844
4845 // .tensorflow.eager.RemoteTensorHandle handle_to_decref = 1;
_internal_has_handle_to_decref()4846 inline bool QueueItem::_internal_has_handle_to_decref() const {
4847 return item_case() == kHandleToDecref;
4848 }
has_handle_to_decref()4849 inline bool QueueItem::has_handle_to_decref() const {
4850 return _internal_has_handle_to_decref();
4851 }
set_has_handle_to_decref()4852 inline void QueueItem::set_has_handle_to_decref() {
4853 _impl_._oneof_case_[0] = kHandleToDecref;
4854 }
release_handle_to_decref()4855 inline ::tensorflow::eager::RemoteTensorHandle* QueueItem::release_handle_to_decref() {
4856 // @@protoc_insertion_point(field_release:tensorflow.eager.QueueItem.handle_to_decref)
4857 if (_internal_has_handle_to_decref()) {
4858 clear_has_item();
4859 ::tensorflow::eager::RemoteTensorHandle* temp = _impl_.item_.handle_to_decref_;
4860 if (GetArenaForAllocation() != nullptr) {
4861 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
4862 }
4863 _impl_.item_.handle_to_decref_ = nullptr;
4864 return temp;
4865 } else {
4866 return nullptr;
4867 }
4868 }
_internal_handle_to_decref()4869 inline const ::tensorflow::eager::RemoteTensorHandle& QueueItem::_internal_handle_to_decref() const {
4870 return _internal_has_handle_to_decref()
4871 ? *_impl_.item_.handle_to_decref_
4872 : reinterpret_cast< ::tensorflow::eager::RemoteTensorHandle&>(::tensorflow::eager::_RemoteTensorHandle_default_instance_);
4873 }
handle_to_decref()4874 inline const ::tensorflow::eager::RemoteTensorHandle& QueueItem::handle_to_decref() const {
4875 // @@protoc_insertion_point(field_get:tensorflow.eager.QueueItem.handle_to_decref)
4876 return _internal_handle_to_decref();
4877 }
unsafe_arena_release_handle_to_decref()4878 inline ::tensorflow::eager::RemoteTensorHandle* QueueItem::unsafe_arena_release_handle_to_decref() {
4879 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.eager.QueueItem.handle_to_decref)
4880 if (_internal_has_handle_to_decref()) {
4881 clear_has_item();
4882 ::tensorflow::eager::RemoteTensorHandle* temp = _impl_.item_.handle_to_decref_;
4883 _impl_.item_.handle_to_decref_ = nullptr;
4884 return temp;
4885 } else {
4886 return nullptr;
4887 }
4888 }
unsafe_arena_set_allocated_handle_to_decref(::tensorflow::eager::RemoteTensorHandle * handle_to_decref)4889 inline void QueueItem::unsafe_arena_set_allocated_handle_to_decref(::tensorflow::eager::RemoteTensorHandle* handle_to_decref) {
4890 clear_item();
4891 if (handle_to_decref) {
4892 set_has_handle_to_decref();
4893 _impl_.item_.handle_to_decref_ = handle_to_decref;
4894 }
4895 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.eager.QueueItem.handle_to_decref)
4896 }
_internal_mutable_handle_to_decref()4897 inline ::tensorflow::eager::RemoteTensorHandle* QueueItem::_internal_mutable_handle_to_decref() {
4898 if (!_internal_has_handle_to_decref()) {
4899 clear_item();
4900 set_has_handle_to_decref();
4901 _impl_.item_.handle_to_decref_ = CreateMaybeMessage< ::tensorflow::eager::RemoteTensorHandle >(GetArenaForAllocation());
4902 }
4903 return _impl_.item_.handle_to_decref_;
4904 }
mutable_handle_to_decref()4905 inline ::tensorflow::eager::RemoteTensorHandle* QueueItem::mutable_handle_to_decref() {
4906 ::tensorflow::eager::RemoteTensorHandle* _msg = _internal_mutable_handle_to_decref();
4907 // @@protoc_insertion_point(field_mutable:tensorflow.eager.QueueItem.handle_to_decref)
4908 return _msg;
4909 }
4910
4911 // .tensorflow.eager.Operation operation = 2;
_internal_has_operation()4912 inline bool QueueItem::_internal_has_operation() const {
4913 return item_case() == kOperation;
4914 }
has_operation()4915 inline bool QueueItem::has_operation() const {
4916 return _internal_has_operation();
4917 }
set_has_operation()4918 inline void QueueItem::set_has_operation() {
4919 _impl_._oneof_case_[0] = kOperation;
4920 }
clear_operation()4921 inline void QueueItem::clear_operation() {
4922 if (_internal_has_operation()) {
4923 if (GetArenaForAllocation() == nullptr) {
4924 delete _impl_.item_.operation_;
4925 }
4926 clear_has_item();
4927 }
4928 }
release_operation()4929 inline ::tensorflow::eager::Operation* QueueItem::release_operation() {
4930 // @@protoc_insertion_point(field_release:tensorflow.eager.QueueItem.operation)
4931 if (_internal_has_operation()) {
4932 clear_has_item();
4933 ::tensorflow::eager::Operation* temp = _impl_.item_.operation_;
4934 if (GetArenaForAllocation() != nullptr) {
4935 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
4936 }
4937 _impl_.item_.operation_ = nullptr;
4938 return temp;
4939 } else {
4940 return nullptr;
4941 }
4942 }
_internal_operation()4943 inline const ::tensorflow::eager::Operation& QueueItem::_internal_operation() const {
4944 return _internal_has_operation()
4945 ? *_impl_.item_.operation_
4946 : reinterpret_cast< ::tensorflow::eager::Operation&>(::tensorflow::eager::_Operation_default_instance_);
4947 }
operation()4948 inline const ::tensorflow::eager::Operation& QueueItem::operation() const {
4949 // @@protoc_insertion_point(field_get:tensorflow.eager.QueueItem.operation)
4950 return _internal_operation();
4951 }
unsafe_arena_release_operation()4952 inline ::tensorflow::eager::Operation* QueueItem::unsafe_arena_release_operation() {
4953 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.eager.QueueItem.operation)
4954 if (_internal_has_operation()) {
4955 clear_has_item();
4956 ::tensorflow::eager::Operation* temp = _impl_.item_.operation_;
4957 _impl_.item_.operation_ = nullptr;
4958 return temp;
4959 } else {
4960 return nullptr;
4961 }
4962 }
unsafe_arena_set_allocated_operation(::tensorflow::eager::Operation * operation)4963 inline void QueueItem::unsafe_arena_set_allocated_operation(::tensorflow::eager::Operation* operation) {
4964 clear_item();
4965 if (operation) {
4966 set_has_operation();
4967 _impl_.item_.operation_ = operation;
4968 }
4969 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.eager.QueueItem.operation)
4970 }
_internal_mutable_operation()4971 inline ::tensorflow::eager::Operation* QueueItem::_internal_mutable_operation() {
4972 if (!_internal_has_operation()) {
4973 clear_item();
4974 set_has_operation();
4975 _impl_.item_.operation_ = CreateMaybeMessage< ::tensorflow::eager::Operation >(GetArenaForAllocation());
4976 }
4977 return _impl_.item_.operation_;
4978 }
mutable_operation()4979 inline ::tensorflow::eager::Operation* QueueItem::mutable_operation() {
4980 ::tensorflow::eager::Operation* _msg = _internal_mutable_operation();
4981 // @@protoc_insertion_point(field_mutable:tensorflow.eager.QueueItem.operation)
4982 return _msg;
4983 }
4984
4985 // .tensorflow.eager.SendTensorOp send_tensor = 3;
_internal_has_send_tensor()4986 inline bool QueueItem::_internal_has_send_tensor() const {
4987 return item_case() == kSendTensor;
4988 }
has_send_tensor()4989 inline bool QueueItem::has_send_tensor() const {
4990 return _internal_has_send_tensor();
4991 }
set_has_send_tensor()4992 inline void QueueItem::set_has_send_tensor() {
4993 _impl_._oneof_case_[0] = kSendTensor;
4994 }
clear_send_tensor()4995 inline void QueueItem::clear_send_tensor() {
4996 if (_internal_has_send_tensor()) {
4997 if (GetArenaForAllocation() == nullptr) {
4998 delete _impl_.item_.send_tensor_;
4999 }
5000 clear_has_item();
5001 }
5002 }
release_send_tensor()5003 inline ::tensorflow::eager::SendTensorOp* QueueItem::release_send_tensor() {
5004 // @@protoc_insertion_point(field_release:tensorflow.eager.QueueItem.send_tensor)
5005 if (_internal_has_send_tensor()) {
5006 clear_has_item();
5007 ::tensorflow::eager::SendTensorOp* temp = _impl_.item_.send_tensor_;
5008 if (GetArenaForAllocation() != nullptr) {
5009 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
5010 }
5011 _impl_.item_.send_tensor_ = nullptr;
5012 return temp;
5013 } else {
5014 return nullptr;
5015 }
5016 }
_internal_send_tensor()5017 inline const ::tensorflow::eager::SendTensorOp& QueueItem::_internal_send_tensor() const {
5018 return _internal_has_send_tensor()
5019 ? *_impl_.item_.send_tensor_
5020 : reinterpret_cast< ::tensorflow::eager::SendTensorOp&>(::tensorflow::eager::_SendTensorOp_default_instance_);
5021 }
send_tensor()5022 inline const ::tensorflow::eager::SendTensorOp& QueueItem::send_tensor() const {
5023 // @@protoc_insertion_point(field_get:tensorflow.eager.QueueItem.send_tensor)
5024 return _internal_send_tensor();
5025 }
unsafe_arena_release_send_tensor()5026 inline ::tensorflow::eager::SendTensorOp* QueueItem::unsafe_arena_release_send_tensor() {
5027 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.eager.QueueItem.send_tensor)
5028 if (_internal_has_send_tensor()) {
5029 clear_has_item();
5030 ::tensorflow::eager::SendTensorOp* temp = _impl_.item_.send_tensor_;
5031 _impl_.item_.send_tensor_ = nullptr;
5032 return temp;
5033 } else {
5034 return nullptr;
5035 }
5036 }
unsafe_arena_set_allocated_send_tensor(::tensorflow::eager::SendTensorOp * send_tensor)5037 inline void QueueItem::unsafe_arena_set_allocated_send_tensor(::tensorflow::eager::SendTensorOp* send_tensor) {
5038 clear_item();
5039 if (send_tensor) {
5040 set_has_send_tensor();
5041 _impl_.item_.send_tensor_ = send_tensor;
5042 }
5043 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.eager.QueueItem.send_tensor)
5044 }
_internal_mutable_send_tensor()5045 inline ::tensorflow::eager::SendTensorOp* QueueItem::_internal_mutable_send_tensor() {
5046 if (!_internal_has_send_tensor()) {
5047 clear_item();
5048 set_has_send_tensor();
5049 _impl_.item_.send_tensor_ = CreateMaybeMessage< ::tensorflow::eager::SendTensorOp >(GetArenaForAllocation());
5050 }
5051 return _impl_.item_.send_tensor_;
5052 }
mutable_send_tensor()5053 inline ::tensorflow::eager::SendTensorOp* QueueItem::mutable_send_tensor() {
5054 ::tensorflow::eager::SendTensorOp* _msg = _internal_mutable_send_tensor();
5055 // @@protoc_insertion_point(field_mutable:tensorflow.eager.QueueItem.send_tensor)
5056 return _msg;
5057 }
5058
5059 // .tensorflow.eager.RegisterFunctionOp register_function = 4;
_internal_has_register_function()5060 inline bool QueueItem::_internal_has_register_function() const {
5061 return item_case() == kRegisterFunction;
5062 }
has_register_function()5063 inline bool QueueItem::has_register_function() const {
5064 return _internal_has_register_function();
5065 }
set_has_register_function()5066 inline void QueueItem::set_has_register_function() {
5067 _impl_._oneof_case_[0] = kRegisterFunction;
5068 }
clear_register_function()5069 inline void QueueItem::clear_register_function() {
5070 if (_internal_has_register_function()) {
5071 if (GetArenaForAllocation() == nullptr) {
5072 delete _impl_.item_.register_function_;
5073 }
5074 clear_has_item();
5075 }
5076 }
release_register_function()5077 inline ::tensorflow::eager::RegisterFunctionOp* QueueItem::release_register_function() {
5078 // @@protoc_insertion_point(field_release:tensorflow.eager.QueueItem.register_function)
5079 if (_internal_has_register_function()) {
5080 clear_has_item();
5081 ::tensorflow::eager::RegisterFunctionOp* temp = _impl_.item_.register_function_;
5082 if (GetArenaForAllocation() != nullptr) {
5083 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
5084 }
5085 _impl_.item_.register_function_ = nullptr;
5086 return temp;
5087 } else {
5088 return nullptr;
5089 }
5090 }
_internal_register_function()5091 inline const ::tensorflow::eager::RegisterFunctionOp& QueueItem::_internal_register_function() const {
5092 return _internal_has_register_function()
5093 ? *_impl_.item_.register_function_
5094 : reinterpret_cast< ::tensorflow::eager::RegisterFunctionOp&>(::tensorflow::eager::_RegisterFunctionOp_default_instance_);
5095 }
register_function()5096 inline const ::tensorflow::eager::RegisterFunctionOp& QueueItem::register_function() const {
5097 // @@protoc_insertion_point(field_get:tensorflow.eager.QueueItem.register_function)
5098 return _internal_register_function();
5099 }
unsafe_arena_release_register_function()5100 inline ::tensorflow::eager::RegisterFunctionOp* QueueItem::unsafe_arena_release_register_function() {
5101 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.eager.QueueItem.register_function)
5102 if (_internal_has_register_function()) {
5103 clear_has_item();
5104 ::tensorflow::eager::RegisterFunctionOp* temp = _impl_.item_.register_function_;
5105 _impl_.item_.register_function_ = nullptr;
5106 return temp;
5107 } else {
5108 return nullptr;
5109 }
5110 }
unsafe_arena_set_allocated_register_function(::tensorflow::eager::RegisterFunctionOp * register_function)5111 inline void QueueItem::unsafe_arena_set_allocated_register_function(::tensorflow::eager::RegisterFunctionOp* register_function) {
5112 clear_item();
5113 if (register_function) {
5114 set_has_register_function();
5115 _impl_.item_.register_function_ = register_function;
5116 }
5117 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.eager.QueueItem.register_function)
5118 }
_internal_mutable_register_function()5119 inline ::tensorflow::eager::RegisterFunctionOp* QueueItem::_internal_mutable_register_function() {
5120 if (!_internal_has_register_function()) {
5121 clear_item();
5122 set_has_register_function();
5123 _impl_.item_.register_function_ = CreateMaybeMessage< ::tensorflow::eager::RegisterFunctionOp >(GetArenaForAllocation());
5124 }
5125 return _impl_.item_.register_function_;
5126 }
mutable_register_function()5127 inline ::tensorflow::eager::RegisterFunctionOp* QueueItem::mutable_register_function() {
5128 ::tensorflow::eager::RegisterFunctionOp* _msg = _internal_mutable_register_function();
5129 // @@protoc_insertion_point(field_mutable:tensorflow.eager.QueueItem.register_function)
5130 return _msg;
5131 }
5132
5133 // .tensorflow.eager.CleanupFunctionOp cleanup_function = 5;
_internal_has_cleanup_function()5134 inline bool QueueItem::_internal_has_cleanup_function() const {
5135 return item_case() == kCleanupFunction;
5136 }
has_cleanup_function()5137 inline bool QueueItem::has_cleanup_function() const {
5138 return _internal_has_cleanup_function();
5139 }
set_has_cleanup_function()5140 inline void QueueItem::set_has_cleanup_function() {
5141 _impl_._oneof_case_[0] = kCleanupFunction;
5142 }
clear_cleanup_function()5143 inline void QueueItem::clear_cleanup_function() {
5144 if (_internal_has_cleanup_function()) {
5145 if (GetArenaForAllocation() == nullptr) {
5146 delete _impl_.item_.cleanup_function_;
5147 }
5148 clear_has_item();
5149 }
5150 }
release_cleanup_function()5151 inline ::tensorflow::eager::CleanupFunctionOp* QueueItem::release_cleanup_function() {
5152 // @@protoc_insertion_point(field_release:tensorflow.eager.QueueItem.cleanup_function)
5153 if (_internal_has_cleanup_function()) {
5154 clear_has_item();
5155 ::tensorflow::eager::CleanupFunctionOp* temp = _impl_.item_.cleanup_function_;
5156 if (GetArenaForAllocation() != nullptr) {
5157 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
5158 }
5159 _impl_.item_.cleanup_function_ = nullptr;
5160 return temp;
5161 } else {
5162 return nullptr;
5163 }
5164 }
_internal_cleanup_function()5165 inline const ::tensorflow::eager::CleanupFunctionOp& QueueItem::_internal_cleanup_function() const {
5166 return _internal_has_cleanup_function()
5167 ? *_impl_.item_.cleanup_function_
5168 : reinterpret_cast< ::tensorflow::eager::CleanupFunctionOp&>(::tensorflow::eager::_CleanupFunctionOp_default_instance_);
5169 }
cleanup_function()5170 inline const ::tensorflow::eager::CleanupFunctionOp& QueueItem::cleanup_function() const {
5171 // @@protoc_insertion_point(field_get:tensorflow.eager.QueueItem.cleanup_function)
5172 return _internal_cleanup_function();
5173 }
unsafe_arena_release_cleanup_function()5174 inline ::tensorflow::eager::CleanupFunctionOp* QueueItem::unsafe_arena_release_cleanup_function() {
5175 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.eager.QueueItem.cleanup_function)
5176 if (_internal_has_cleanup_function()) {
5177 clear_has_item();
5178 ::tensorflow::eager::CleanupFunctionOp* temp = _impl_.item_.cleanup_function_;
5179 _impl_.item_.cleanup_function_ = nullptr;
5180 return temp;
5181 } else {
5182 return nullptr;
5183 }
5184 }
unsafe_arena_set_allocated_cleanup_function(::tensorflow::eager::CleanupFunctionOp * cleanup_function)5185 inline void QueueItem::unsafe_arena_set_allocated_cleanup_function(::tensorflow::eager::CleanupFunctionOp* cleanup_function) {
5186 clear_item();
5187 if (cleanup_function) {
5188 set_has_cleanup_function();
5189 _impl_.item_.cleanup_function_ = cleanup_function;
5190 }
5191 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.eager.QueueItem.cleanup_function)
5192 }
_internal_mutable_cleanup_function()5193 inline ::tensorflow::eager::CleanupFunctionOp* QueueItem::_internal_mutable_cleanup_function() {
5194 if (!_internal_has_cleanup_function()) {
5195 clear_item();
5196 set_has_cleanup_function();
5197 _impl_.item_.cleanup_function_ = CreateMaybeMessage< ::tensorflow::eager::CleanupFunctionOp >(GetArenaForAllocation());
5198 }
5199 return _impl_.item_.cleanup_function_;
5200 }
mutable_cleanup_function()5201 inline ::tensorflow::eager::CleanupFunctionOp* QueueItem::mutable_cleanup_function() {
5202 ::tensorflow::eager::CleanupFunctionOp* _msg = _internal_mutable_cleanup_function();
5203 // @@protoc_insertion_point(field_mutable:tensorflow.eager.QueueItem.cleanup_function)
5204 return _msg;
5205 }
5206
5207 // .tensorflow.eager.SyncRemoteExecutorForStream sync_remote_executor_for_stream = 6;
_internal_has_sync_remote_executor_for_stream()5208 inline bool QueueItem::_internal_has_sync_remote_executor_for_stream() const {
5209 return item_case() == kSyncRemoteExecutorForStream;
5210 }
has_sync_remote_executor_for_stream()5211 inline bool QueueItem::has_sync_remote_executor_for_stream() const {
5212 return _internal_has_sync_remote_executor_for_stream();
5213 }
set_has_sync_remote_executor_for_stream()5214 inline void QueueItem::set_has_sync_remote_executor_for_stream() {
5215 _impl_._oneof_case_[0] = kSyncRemoteExecutorForStream;
5216 }
clear_sync_remote_executor_for_stream()5217 inline void QueueItem::clear_sync_remote_executor_for_stream() {
5218 if (_internal_has_sync_remote_executor_for_stream()) {
5219 if (GetArenaForAllocation() == nullptr) {
5220 delete _impl_.item_.sync_remote_executor_for_stream_;
5221 }
5222 clear_has_item();
5223 }
5224 }
release_sync_remote_executor_for_stream()5225 inline ::tensorflow::eager::SyncRemoteExecutorForStream* QueueItem::release_sync_remote_executor_for_stream() {
5226 // @@protoc_insertion_point(field_release:tensorflow.eager.QueueItem.sync_remote_executor_for_stream)
5227 if (_internal_has_sync_remote_executor_for_stream()) {
5228 clear_has_item();
5229 ::tensorflow::eager::SyncRemoteExecutorForStream* temp = _impl_.item_.sync_remote_executor_for_stream_;
5230 if (GetArenaForAllocation() != nullptr) {
5231 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
5232 }
5233 _impl_.item_.sync_remote_executor_for_stream_ = nullptr;
5234 return temp;
5235 } else {
5236 return nullptr;
5237 }
5238 }
_internal_sync_remote_executor_for_stream()5239 inline const ::tensorflow::eager::SyncRemoteExecutorForStream& QueueItem::_internal_sync_remote_executor_for_stream() const {
5240 return _internal_has_sync_remote_executor_for_stream()
5241 ? *_impl_.item_.sync_remote_executor_for_stream_
5242 : reinterpret_cast< ::tensorflow::eager::SyncRemoteExecutorForStream&>(::tensorflow::eager::_SyncRemoteExecutorForStream_default_instance_);
5243 }
sync_remote_executor_for_stream()5244 inline const ::tensorflow::eager::SyncRemoteExecutorForStream& QueueItem::sync_remote_executor_for_stream() const {
5245 // @@protoc_insertion_point(field_get:tensorflow.eager.QueueItem.sync_remote_executor_for_stream)
5246 return _internal_sync_remote_executor_for_stream();
5247 }
unsafe_arena_release_sync_remote_executor_for_stream()5248 inline ::tensorflow::eager::SyncRemoteExecutorForStream* QueueItem::unsafe_arena_release_sync_remote_executor_for_stream() {
5249 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.eager.QueueItem.sync_remote_executor_for_stream)
5250 if (_internal_has_sync_remote_executor_for_stream()) {
5251 clear_has_item();
5252 ::tensorflow::eager::SyncRemoteExecutorForStream* temp = _impl_.item_.sync_remote_executor_for_stream_;
5253 _impl_.item_.sync_remote_executor_for_stream_ = nullptr;
5254 return temp;
5255 } else {
5256 return nullptr;
5257 }
5258 }
unsafe_arena_set_allocated_sync_remote_executor_for_stream(::tensorflow::eager::SyncRemoteExecutorForStream * sync_remote_executor_for_stream)5259 inline void QueueItem::unsafe_arena_set_allocated_sync_remote_executor_for_stream(::tensorflow::eager::SyncRemoteExecutorForStream* sync_remote_executor_for_stream) {
5260 clear_item();
5261 if (sync_remote_executor_for_stream) {
5262 set_has_sync_remote_executor_for_stream();
5263 _impl_.item_.sync_remote_executor_for_stream_ = sync_remote_executor_for_stream;
5264 }
5265 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.eager.QueueItem.sync_remote_executor_for_stream)
5266 }
_internal_mutable_sync_remote_executor_for_stream()5267 inline ::tensorflow::eager::SyncRemoteExecutorForStream* QueueItem::_internal_mutable_sync_remote_executor_for_stream() {
5268 if (!_internal_has_sync_remote_executor_for_stream()) {
5269 clear_item();
5270 set_has_sync_remote_executor_for_stream();
5271 _impl_.item_.sync_remote_executor_for_stream_ = CreateMaybeMessage< ::tensorflow::eager::SyncRemoteExecutorForStream >(GetArenaForAllocation());
5272 }
5273 return _impl_.item_.sync_remote_executor_for_stream_;
5274 }
mutable_sync_remote_executor_for_stream()5275 inline ::tensorflow::eager::SyncRemoteExecutorForStream* QueueItem::mutable_sync_remote_executor_for_stream() {
5276 ::tensorflow::eager::SyncRemoteExecutorForStream* _msg = _internal_mutable_sync_remote_executor_for_stream();
5277 // @@protoc_insertion_point(field_mutable:tensorflow.eager.QueueItem.sync_remote_executor_for_stream)
5278 return _msg;
5279 }
5280
5281 // .tensorflow.eager.SendPackedHandleOp send_packed_handle = 7;
_internal_has_send_packed_handle()5282 inline bool QueueItem::_internal_has_send_packed_handle() const {
5283 return item_case() == kSendPackedHandle;
5284 }
has_send_packed_handle()5285 inline bool QueueItem::has_send_packed_handle() const {
5286 return _internal_has_send_packed_handle();
5287 }
set_has_send_packed_handle()5288 inline void QueueItem::set_has_send_packed_handle() {
5289 _impl_._oneof_case_[0] = kSendPackedHandle;
5290 }
clear_send_packed_handle()5291 inline void QueueItem::clear_send_packed_handle() {
5292 if (_internal_has_send_packed_handle()) {
5293 if (GetArenaForAllocation() == nullptr) {
5294 delete _impl_.item_.send_packed_handle_;
5295 }
5296 clear_has_item();
5297 }
5298 }
release_send_packed_handle()5299 inline ::tensorflow::eager::SendPackedHandleOp* QueueItem::release_send_packed_handle() {
5300 // @@protoc_insertion_point(field_release:tensorflow.eager.QueueItem.send_packed_handle)
5301 if (_internal_has_send_packed_handle()) {
5302 clear_has_item();
5303 ::tensorflow::eager::SendPackedHandleOp* temp = _impl_.item_.send_packed_handle_;
5304 if (GetArenaForAllocation() != nullptr) {
5305 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
5306 }
5307 _impl_.item_.send_packed_handle_ = nullptr;
5308 return temp;
5309 } else {
5310 return nullptr;
5311 }
5312 }
_internal_send_packed_handle()5313 inline const ::tensorflow::eager::SendPackedHandleOp& QueueItem::_internal_send_packed_handle() const {
5314 return _internal_has_send_packed_handle()
5315 ? *_impl_.item_.send_packed_handle_
5316 : reinterpret_cast< ::tensorflow::eager::SendPackedHandleOp&>(::tensorflow::eager::_SendPackedHandleOp_default_instance_);
5317 }
send_packed_handle()5318 inline const ::tensorflow::eager::SendPackedHandleOp& QueueItem::send_packed_handle() const {
5319 // @@protoc_insertion_point(field_get:tensorflow.eager.QueueItem.send_packed_handle)
5320 return _internal_send_packed_handle();
5321 }
unsafe_arena_release_send_packed_handle()5322 inline ::tensorflow::eager::SendPackedHandleOp* QueueItem::unsafe_arena_release_send_packed_handle() {
5323 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.eager.QueueItem.send_packed_handle)
5324 if (_internal_has_send_packed_handle()) {
5325 clear_has_item();
5326 ::tensorflow::eager::SendPackedHandleOp* temp = _impl_.item_.send_packed_handle_;
5327 _impl_.item_.send_packed_handle_ = nullptr;
5328 return temp;
5329 } else {
5330 return nullptr;
5331 }
5332 }
unsafe_arena_set_allocated_send_packed_handle(::tensorflow::eager::SendPackedHandleOp * send_packed_handle)5333 inline void QueueItem::unsafe_arena_set_allocated_send_packed_handle(::tensorflow::eager::SendPackedHandleOp* send_packed_handle) {
5334 clear_item();
5335 if (send_packed_handle) {
5336 set_has_send_packed_handle();
5337 _impl_.item_.send_packed_handle_ = send_packed_handle;
5338 }
5339 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.eager.QueueItem.send_packed_handle)
5340 }
_internal_mutable_send_packed_handle()5341 inline ::tensorflow::eager::SendPackedHandleOp* QueueItem::_internal_mutable_send_packed_handle() {
5342 if (!_internal_has_send_packed_handle()) {
5343 clear_item();
5344 set_has_send_packed_handle();
5345 _impl_.item_.send_packed_handle_ = CreateMaybeMessage< ::tensorflow::eager::SendPackedHandleOp >(GetArenaForAllocation());
5346 }
5347 return _impl_.item_.send_packed_handle_;
5348 }
mutable_send_packed_handle()5349 inline ::tensorflow::eager::SendPackedHandleOp* QueueItem::mutable_send_packed_handle() {
5350 ::tensorflow::eager::SendPackedHandleOp* _msg = _internal_mutable_send_packed_handle();
5351 // @@protoc_insertion_point(field_mutable:tensorflow.eager.QueueItem.send_packed_handle)
5352 return _msg;
5353 }
5354
has_item()5355 inline bool QueueItem::has_item() const {
5356 return item_case() != ITEM_NOT_SET;
5357 }
clear_has_item()5358 inline void QueueItem::clear_has_item() {
5359 _impl_._oneof_case_[0] = ITEM_NOT_SET;
5360 }
item_case()5361 inline QueueItem::ItemCase QueueItem::item_case() const {
5362 return QueueItem::ItemCase(_impl_._oneof_case_[0]);
5363 }
5364 // -------------------------------------------------------------------
5365
5366 // QueueResponse
5367
5368 // repeated .tensorflow.TensorShapeProto shape = 1;
_internal_shape_size()5369 inline int QueueResponse::_internal_shape_size() const {
5370 return _impl_.shape_.size();
5371 }
shape_size()5372 inline int QueueResponse::shape_size() const {
5373 return _internal_shape_size();
5374 }
mutable_shape(int index)5375 inline ::tensorflow::TensorShapeProto* QueueResponse::mutable_shape(int index) {
5376 // @@protoc_insertion_point(field_mutable:tensorflow.eager.QueueResponse.shape)
5377 return _impl_.shape_.Mutable(index);
5378 }
5379 inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorShapeProto >*
mutable_shape()5380 QueueResponse::mutable_shape() {
5381 // @@protoc_insertion_point(field_mutable_list:tensorflow.eager.QueueResponse.shape)
5382 return &_impl_.shape_;
5383 }
_internal_shape(int index)5384 inline const ::tensorflow::TensorShapeProto& QueueResponse::_internal_shape(int index) const {
5385 return _impl_.shape_.Get(index);
5386 }
shape(int index)5387 inline const ::tensorflow::TensorShapeProto& QueueResponse::shape(int index) const {
5388 // @@protoc_insertion_point(field_get:tensorflow.eager.QueueResponse.shape)
5389 return _internal_shape(index);
5390 }
_internal_add_shape()5391 inline ::tensorflow::TensorShapeProto* QueueResponse::_internal_add_shape() {
5392 return _impl_.shape_.Add();
5393 }
add_shape()5394 inline ::tensorflow::TensorShapeProto* QueueResponse::add_shape() {
5395 ::tensorflow::TensorShapeProto* _add = _internal_add_shape();
5396 // @@protoc_insertion_point(field_add:tensorflow.eager.QueueResponse.shape)
5397 return _add;
5398 }
5399 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorShapeProto >&
shape()5400 QueueResponse::shape() const {
5401 // @@protoc_insertion_point(field_list:tensorflow.eager.QueueResponse.shape)
5402 return _impl_.shape_;
5403 }
5404
5405 // repeated string device = 3;
_internal_device_size()5406 inline int QueueResponse::_internal_device_size() const {
5407 return _impl_.device_.size();
5408 }
device_size()5409 inline int QueueResponse::device_size() const {
5410 return _internal_device_size();
5411 }
clear_device()5412 inline void QueueResponse::clear_device() {
5413 _impl_.device_.Clear();
5414 }
add_device()5415 inline std::string* QueueResponse::add_device() {
5416 std::string* _s = _internal_add_device();
5417 // @@protoc_insertion_point(field_add_mutable:tensorflow.eager.QueueResponse.device)
5418 return _s;
5419 }
_internal_device(int index)5420 inline const std::string& QueueResponse::_internal_device(int index) const {
5421 return _impl_.device_.Get(index);
5422 }
device(int index)5423 inline const std::string& QueueResponse::device(int index) const {
5424 // @@protoc_insertion_point(field_get:tensorflow.eager.QueueResponse.device)
5425 return _internal_device(index);
5426 }
mutable_device(int index)5427 inline std::string* QueueResponse::mutable_device(int index) {
5428 // @@protoc_insertion_point(field_mutable:tensorflow.eager.QueueResponse.device)
5429 return _impl_.device_.Mutable(index);
5430 }
set_device(int index,const std::string & value)5431 inline void QueueResponse::set_device(int index, const std::string& value) {
5432 _impl_.device_.Mutable(index)->assign(value);
5433 // @@protoc_insertion_point(field_set:tensorflow.eager.QueueResponse.device)
5434 }
set_device(int index,std::string && value)5435 inline void QueueResponse::set_device(int index, std::string&& value) {
5436 _impl_.device_.Mutable(index)->assign(std::move(value));
5437 // @@protoc_insertion_point(field_set:tensorflow.eager.QueueResponse.device)
5438 }
set_device(int index,const char * value)5439 inline void QueueResponse::set_device(int index, const char* value) {
5440 GOOGLE_DCHECK(value != nullptr);
5441 _impl_.device_.Mutable(index)->assign(value);
5442 // @@protoc_insertion_point(field_set_char:tensorflow.eager.QueueResponse.device)
5443 }
set_device(int index,const char * value,size_t size)5444 inline void QueueResponse::set_device(int index, const char* value, size_t size) {
5445 _impl_.device_.Mutable(index)->assign(
5446 reinterpret_cast<const char*>(value), size);
5447 // @@protoc_insertion_point(field_set_pointer:tensorflow.eager.QueueResponse.device)
5448 }
_internal_add_device()5449 inline std::string* QueueResponse::_internal_add_device() {
5450 return _impl_.device_.Add();
5451 }
add_device(const std::string & value)5452 inline void QueueResponse::add_device(const std::string& value) {
5453 _impl_.device_.Add()->assign(value);
5454 // @@protoc_insertion_point(field_add:tensorflow.eager.QueueResponse.device)
5455 }
add_device(std::string && value)5456 inline void QueueResponse::add_device(std::string&& value) {
5457 _impl_.device_.Add(std::move(value));
5458 // @@protoc_insertion_point(field_add:tensorflow.eager.QueueResponse.device)
5459 }
add_device(const char * value)5460 inline void QueueResponse::add_device(const char* value) {
5461 GOOGLE_DCHECK(value != nullptr);
5462 _impl_.device_.Add()->assign(value);
5463 // @@protoc_insertion_point(field_add_char:tensorflow.eager.QueueResponse.device)
5464 }
add_device(const char * value,size_t size)5465 inline void QueueResponse::add_device(const char* value, size_t size) {
5466 _impl_.device_.Add()->assign(reinterpret_cast<const char*>(value), size);
5467 // @@protoc_insertion_point(field_add_pointer:tensorflow.eager.QueueResponse.device)
5468 }
5469 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>&
device()5470 QueueResponse::device() const {
5471 // @@protoc_insertion_point(field_list:tensorflow.eager.QueueResponse.device)
5472 return _impl_.device_;
5473 }
5474 inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>*
mutable_device()5475 QueueResponse::mutable_device() {
5476 // @@protoc_insertion_point(field_mutable_list:tensorflow.eager.QueueResponse.device)
5477 return &_impl_.device_;
5478 }
5479
5480 // repeated .tensorflow.TensorProto tensor = 2;
_internal_tensor_size()5481 inline int QueueResponse::_internal_tensor_size() const {
5482 return _impl_.tensor_.size();
5483 }
tensor_size()5484 inline int QueueResponse::tensor_size() const {
5485 return _internal_tensor_size();
5486 }
mutable_tensor(int index)5487 inline ::tensorflow::TensorProto* QueueResponse::mutable_tensor(int index) {
5488 // @@protoc_insertion_point(field_mutable:tensorflow.eager.QueueResponse.tensor)
5489 return _impl_.tensor_.Mutable(index);
5490 }
5491 inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorProto >*
mutable_tensor()5492 QueueResponse::mutable_tensor() {
5493 // @@protoc_insertion_point(field_mutable_list:tensorflow.eager.QueueResponse.tensor)
5494 return &_impl_.tensor_;
5495 }
_internal_tensor(int index)5496 inline const ::tensorflow::TensorProto& QueueResponse::_internal_tensor(int index) const {
5497 return _impl_.tensor_.Get(index);
5498 }
tensor(int index)5499 inline const ::tensorflow::TensorProto& QueueResponse::tensor(int index) const {
5500 // @@protoc_insertion_point(field_get:tensorflow.eager.QueueResponse.tensor)
5501 return _internal_tensor(index);
5502 }
_internal_add_tensor()5503 inline ::tensorflow::TensorProto* QueueResponse::_internal_add_tensor() {
5504 return _impl_.tensor_.Add();
5505 }
add_tensor()5506 inline ::tensorflow::TensorProto* QueueResponse::add_tensor() {
5507 ::tensorflow::TensorProto* _add = _internal_add_tensor();
5508 // @@protoc_insertion_point(field_add:tensorflow.eager.QueueResponse.tensor)
5509 return _add;
5510 }
5511 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorProto >&
tensor()5512 QueueResponse::tensor() const {
5513 // @@protoc_insertion_point(field_list:tensorflow.eager.QueueResponse.tensor)
5514 return _impl_.tensor_;
5515 }
5516
5517 // -------------------------------------------------------------------
5518
5519 // CreateContextRequest
5520
5521 // .tensorflow.ServerDef server_def = 1;
_internal_has_server_def()5522 inline bool CreateContextRequest::_internal_has_server_def() const {
5523 return this != internal_default_instance() && _impl_.server_def_ != nullptr;
5524 }
has_server_def()5525 inline bool CreateContextRequest::has_server_def() const {
5526 return _internal_has_server_def();
5527 }
_internal_server_def()5528 inline const ::tensorflow::ServerDef& CreateContextRequest::_internal_server_def() const {
5529 const ::tensorflow::ServerDef* p = _impl_.server_def_;
5530 return p != nullptr ? *p : reinterpret_cast<const ::tensorflow::ServerDef&>(
5531 ::tensorflow::_ServerDef_default_instance_);
5532 }
server_def()5533 inline const ::tensorflow::ServerDef& CreateContextRequest::server_def() const {
5534 // @@protoc_insertion_point(field_get:tensorflow.eager.CreateContextRequest.server_def)
5535 return _internal_server_def();
5536 }
unsafe_arena_set_allocated_server_def(::tensorflow::ServerDef * server_def)5537 inline void CreateContextRequest::unsafe_arena_set_allocated_server_def(
5538 ::tensorflow::ServerDef* server_def) {
5539 if (GetArenaForAllocation() == nullptr) {
5540 delete reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.server_def_);
5541 }
5542 _impl_.server_def_ = server_def;
5543 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.eager.CreateContextRequest.server_def)
5544 }
release_server_def()5545 inline ::tensorflow::ServerDef* CreateContextRequest::release_server_def() {
5546
5547 ::tensorflow::ServerDef* temp = _impl_.server_def_;
5548 _impl_.server_def_ = nullptr;
5549 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
5550 auto* old = reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(temp);
5551 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
5552 if (GetArenaForAllocation() == nullptr) { delete old; }
5553 #else // PROTOBUF_FORCE_COPY_IN_RELEASE
5554 if (GetArenaForAllocation() != nullptr) {
5555 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
5556 }
5557 #endif // !PROTOBUF_FORCE_COPY_IN_RELEASE
5558 return temp;
5559 }
unsafe_arena_release_server_def()5560 inline ::tensorflow::ServerDef* CreateContextRequest::unsafe_arena_release_server_def() {
5561 // @@protoc_insertion_point(field_release:tensorflow.eager.CreateContextRequest.server_def)
5562
5563 ::tensorflow::ServerDef* temp = _impl_.server_def_;
5564 _impl_.server_def_ = nullptr;
5565 return temp;
5566 }
_internal_mutable_server_def()5567 inline ::tensorflow::ServerDef* CreateContextRequest::_internal_mutable_server_def() {
5568
5569 if (_impl_.server_def_ == nullptr) {
5570 auto* p = CreateMaybeMessage<::tensorflow::ServerDef>(GetArenaForAllocation());
5571 _impl_.server_def_ = p;
5572 }
5573 return _impl_.server_def_;
5574 }
mutable_server_def()5575 inline ::tensorflow::ServerDef* CreateContextRequest::mutable_server_def() {
5576 ::tensorflow::ServerDef* _msg = _internal_mutable_server_def();
5577 // @@protoc_insertion_point(field_mutable:tensorflow.eager.CreateContextRequest.server_def)
5578 return _msg;
5579 }
set_allocated_server_def(::tensorflow::ServerDef * server_def)5580 inline void CreateContextRequest::set_allocated_server_def(::tensorflow::ServerDef* server_def) {
5581 ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
5582 if (message_arena == nullptr) {
5583 delete reinterpret_cast< ::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.server_def_);
5584 }
5585 if (server_def) {
5586 ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
5587 ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(
5588 reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(server_def));
5589 if (message_arena != submessage_arena) {
5590 server_def = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
5591 message_arena, server_def, submessage_arena);
5592 }
5593
5594 } else {
5595
5596 }
5597 _impl_.server_def_ = server_def;
5598 // @@protoc_insertion_point(field_set_allocated:tensorflow.eager.CreateContextRequest.server_def)
5599 }
5600
5601 // bool async = 2;
clear_async()5602 inline void CreateContextRequest::clear_async() {
5603 _impl_.async_ = false;
5604 }
_internal_async()5605 inline bool CreateContextRequest::_internal_async() const {
5606 return _impl_.async_;
5607 }
async()5608 inline bool CreateContextRequest::async() const {
5609 // @@protoc_insertion_point(field_get:tensorflow.eager.CreateContextRequest.async)
5610 return _internal_async();
5611 }
_internal_set_async(bool value)5612 inline void CreateContextRequest::_internal_set_async(bool value) {
5613
5614 _impl_.async_ = value;
5615 }
set_async(bool value)5616 inline void CreateContextRequest::set_async(bool value) {
5617 _internal_set_async(value);
5618 // @@protoc_insertion_point(field_set:tensorflow.eager.CreateContextRequest.async)
5619 }
5620
5621 // int64 keep_alive_secs = 3;
clear_keep_alive_secs()5622 inline void CreateContextRequest::clear_keep_alive_secs() {
5623 _impl_.keep_alive_secs_ = ::int64_t{0};
5624 }
_internal_keep_alive_secs()5625 inline ::int64_t CreateContextRequest::_internal_keep_alive_secs() const {
5626 return _impl_.keep_alive_secs_;
5627 }
keep_alive_secs()5628 inline ::int64_t CreateContextRequest::keep_alive_secs() const {
5629 // @@protoc_insertion_point(field_get:tensorflow.eager.CreateContextRequest.keep_alive_secs)
5630 return _internal_keep_alive_secs();
5631 }
_internal_set_keep_alive_secs(::int64_t value)5632 inline void CreateContextRequest::_internal_set_keep_alive_secs(::int64_t value) {
5633
5634 _impl_.keep_alive_secs_ = value;
5635 }
set_keep_alive_secs(::int64_t value)5636 inline void CreateContextRequest::set_keep_alive_secs(::int64_t value) {
5637 _internal_set_keep_alive_secs(value);
5638 // @@protoc_insertion_point(field_set:tensorflow.eager.CreateContextRequest.keep_alive_secs)
5639 }
5640
5641 // .tensorflow.VersionDef version_def = 4;
_internal_has_version_def()5642 inline bool CreateContextRequest::_internal_has_version_def() const {
5643 return this != internal_default_instance() && _impl_.version_def_ != nullptr;
5644 }
has_version_def()5645 inline bool CreateContextRequest::has_version_def() const {
5646 return _internal_has_version_def();
5647 }
_internal_version_def()5648 inline const ::tensorflow::VersionDef& CreateContextRequest::_internal_version_def() const {
5649 const ::tensorflow::VersionDef* p = _impl_.version_def_;
5650 return p != nullptr ? *p : reinterpret_cast<const ::tensorflow::VersionDef&>(
5651 ::tensorflow::_VersionDef_default_instance_);
5652 }
version_def()5653 inline const ::tensorflow::VersionDef& CreateContextRequest::version_def() const {
5654 // @@protoc_insertion_point(field_get:tensorflow.eager.CreateContextRequest.version_def)
5655 return _internal_version_def();
5656 }
unsafe_arena_set_allocated_version_def(::tensorflow::VersionDef * version_def)5657 inline void CreateContextRequest::unsafe_arena_set_allocated_version_def(
5658 ::tensorflow::VersionDef* version_def) {
5659 if (GetArenaForAllocation() == nullptr) {
5660 delete reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.version_def_);
5661 }
5662 _impl_.version_def_ = version_def;
5663 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.eager.CreateContextRequest.version_def)
5664 }
release_version_def()5665 inline ::tensorflow::VersionDef* CreateContextRequest::release_version_def() {
5666
5667 ::tensorflow::VersionDef* temp = _impl_.version_def_;
5668 _impl_.version_def_ = nullptr;
5669 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
5670 auto* old = reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(temp);
5671 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
5672 if (GetArenaForAllocation() == nullptr) { delete old; }
5673 #else // PROTOBUF_FORCE_COPY_IN_RELEASE
5674 if (GetArenaForAllocation() != nullptr) {
5675 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
5676 }
5677 #endif // !PROTOBUF_FORCE_COPY_IN_RELEASE
5678 return temp;
5679 }
unsafe_arena_release_version_def()5680 inline ::tensorflow::VersionDef* CreateContextRequest::unsafe_arena_release_version_def() {
5681 // @@protoc_insertion_point(field_release:tensorflow.eager.CreateContextRequest.version_def)
5682
5683 ::tensorflow::VersionDef* temp = _impl_.version_def_;
5684 _impl_.version_def_ = nullptr;
5685 return temp;
5686 }
_internal_mutable_version_def()5687 inline ::tensorflow::VersionDef* CreateContextRequest::_internal_mutable_version_def() {
5688
5689 if (_impl_.version_def_ == nullptr) {
5690 auto* p = CreateMaybeMessage<::tensorflow::VersionDef>(GetArenaForAllocation());
5691 _impl_.version_def_ = p;
5692 }
5693 return _impl_.version_def_;
5694 }
mutable_version_def()5695 inline ::tensorflow::VersionDef* CreateContextRequest::mutable_version_def() {
5696 ::tensorflow::VersionDef* _msg = _internal_mutable_version_def();
5697 // @@protoc_insertion_point(field_mutable:tensorflow.eager.CreateContextRequest.version_def)
5698 return _msg;
5699 }
set_allocated_version_def(::tensorflow::VersionDef * version_def)5700 inline void CreateContextRequest::set_allocated_version_def(::tensorflow::VersionDef* version_def) {
5701 ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
5702 if (message_arena == nullptr) {
5703 delete reinterpret_cast< ::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.version_def_);
5704 }
5705 if (version_def) {
5706 ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
5707 ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(
5708 reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(version_def));
5709 if (message_arena != submessage_arena) {
5710 version_def = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
5711 message_arena, version_def, submessage_arena);
5712 }
5713
5714 } else {
5715
5716 }
5717 _impl_.version_def_ = version_def;
5718 // @@protoc_insertion_point(field_set_allocated:tensorflow.eager.CreateContextRequest.version_def)
5719 }
5720
5721 // repeated .tensorflow.DeviceAttributes cluster_device_attributes = 6;
_internal_cluster_device_attributes_size()5722 inline int CreateContextRequest::_internal_cluster_device_attributes_size() const {
5723 return _impl_.cluster_device_attributes_.size();
5724 }
cluster_device_attributes_size()5725 inline int CreateContextRequest::cluster_device_attributes_size() const {
5726 return _internal_cluster_device_attributes_size();
5727 }
mutable_cluster_device_attributes(int index)5728 inline ::tensorflow::DeviceAttributes* CreateContextRequest::mutable_cluster_device_attributes(int index) {
5729 // @@protoc_insertion_point(field_mutable:tensorflow.eager.CreateContextRequest.cluster_device_attributes)
5730 return _impl_.cluster_device_attributes_.Mutable(index);
5731 }
5732 inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::DeviceAttributes >*
mutable_cluster_device_attributes()5733 CreateContextRequest::mutable_cluster_device_attributes() {
5734 // @@protoc_insertion_point(field_mutable_list:tensorflow.eager.CreateContextRequest.cluster_device_attributes)
5735 return &_impl_.cluster_device_attributes_;
5736 }
_internal_cluster_device_attributes(int index)5737 inline const ::tensorflow::DeviceAttributes& CreateContextRequest::_internal_cluster_device_attributes(int index) const {
5738 return _impl_.cluster_device_attributes_.Get(index);
5739 }
cluster_device_attributes(int index)5740 inline const ::tensorflow::DeviceAttributes& CreateContextRequest::cluster_device_attributes(int index) const {
5741 // @@protoc_insertion_point(field_get:tensorflow.eager.CreateContextRequest.cluster_device_attributes)
5742 return _internal_cluster_device_attributes(index);
5743 }
_internal_add_cluster_device_attributes()5744 inline ::tensorflow::DeviceAttributes* CreateContextRequest::_internal_add_cluster_device_attributes() {
5745 return _impl_.cluster_device_attributes_.Add();
5746 }
add_cluster_device_attributes()5747 inline ::tensorflow::DeviceAttributes* CreateContextRequest::add_cluster_device_attributes() {
5748 ::tensorflow::DeviceAttributes* _add = _internal_add_cluster_device_attributes();
5749 // @@protoc_insertion_point(field_add:tensorflow.eager.CreateContextRequest.cluster_device_attributes)
5750 return _add;
5751 }
5752 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::DeviceAttributes >&
cluster_device_attributes()5753 CreateContextRequest::cluster_device_attributes() const {
5754 // @@protoc_insertion_point(field_list:tensorflow.eager.CreateContextRequest.cluster_device_attributes)
5755 return _impl_.cluster_device_attributes_;
5756 }
5757
5758 // fixed64 context_id = 7;
clear_context_id()5759 inline void CreateContextRequest::clear_context_id() {
5760 _impl_.context_id_ = ::uint64_t{0u};
5761 }
_internal_context_id()5762 inline ::uint64_t CreateContextRequest::_internal_context_id() const {
5763 return _impl_.context_id_;
5764 }
context_id()5765 inline ::uint64_t CreateContextRequest::context_id() const {
5766 // @@protoc_insertion_point(field_get:tensorflow.eager.CreateContextRequest.context_id)
5767 return _internal_context_id();
5768 }
_internal_set_context_id(::uint64_t value)5769 inline void CreateContextRequest::_internal_set_context_id(::uint64_t value) {
5770
5771 _impl_.context_id_ = value;
5772 }
set_context_id(::uint64_t value)5773 inline void CreateContextRequest::set_context_id(::uint64_t value) {
5774 _internal_set_context_id(value);
5775 // @@protoc_insertion_point(field_set:tensorflow.eager.CreateContextRequest.context_id)
5776 }
5777
5778 // fixed64 context_view_id = 8;
clear_context_view_id()5779 inline void CreateContextRequest::clear_context_view_id() {
5780 _impl_.context_view_id_ = ::uint64_t{0u};
5781 }
_internal_context_view_id()5782