1 // Generated by the protocol buffer compiler.  DO NOT EDIT!
2 // source: tensorflow/core/protobuf/debug_event.proto
3 
4 #ifndef GOOGLE_PROTOBUF_INCLUDED_tensorflow_2fcore_2fprotobuf_2fdebug_5fevent_2eproto
5 #define GOOGLE_PROTOBUF_INCLUDED_tensorflow_2fcore_2fprotobuf_2fdebug_5fevent_2eproto
6 
7 #include <cstdint>
8 #include <limits>
9 #include <string>
10 
11 #include <google/protobuf/port_def.inc>
12 #if PROTOBUF_VERSION < 3021000
13 #error This file was generated by a newer version of protoc which is
14 #error incompatible with your Protocol Buffer headers. Please update
15 #error your headers.
16 #endif
17 #if 3021012 < PROTOBUF_MIN_PROTOC_VERSION
18 #error This file was generated by an older version of protoc which is
19 #error incompatible with your Protocol Buffer headers. Please
20 #error regenerate this file with a newer version of protoc.
21 #endif
22 
23 #include <google/protobuf/port_undef.inc>
24 #include <google/protobuf/io/coded_stream.h>
25 #include <google/protobuf/arena.h>
26 #include <google/protobuf/arenastring.h>
27 #include <google/protobuf/generated_message_util.h>
28 #include <google/protobuf/metadata_lite.h>
29 #include <google/protobuf/message_lite.h>
30 #include <google/protobuf/repeated_field.h>  // IWYU pragma: export
31 #include <google/protobuf/extension_set.h>  // IWYU pragma: export
32 #include <google/protobuf/generated_enum_util.h>
33 #include "tensorflow/core/framework/tensor.pb.h"
34 #include "tensorflow/core/protobuf/graph_debug_info.pb.h"
35 // @@protoc_insertion_point(includes)
36 #include <google/protobuf/port_def.inc>
37 #define PROTOBUF_INTERNAL_EXPORT_tensorflow_2fcore_2fprotobuf_2fdebug_5fevent_2eproto
38 PROTOBUF_NAMESPACE_OPEN
39 namespace internal {
40 class AnyMetadata;
41 }  // namespace internal
42 PROTOBUF_NAMESPACE_CLOSE
43 
44 // Internal implementation detail -- do not use these members.
45 struct TableStruct_tensorflow_2fcore_2fprotobuf_2fdebug_5fevent_2eproto {
46   static const ::uint32_t offsets[];
47 };
48 namespace tensorflow {
49 class CodeLocation;
50 struct CodeLocationDefaultTypeInternal;
51 extern CodeLocationDefaultTypeInternal _CodeLocation_default_instance_;
52 class DebugEvent;
53 struct DebugEventDefaultTypeInternal;
54 extern DebugEventDefaultTypeInternal _DebugEvent_default_instance_;
55 class DebugMetadata;
56 struct DebugMetadataDefaultTypeInternal;
57 extern DebugMetadataDefaultTypeInternal _DebugMetadata_default_instance_;
58 class DebuggedDevice;
59 struct DebuggedDeviceDefaultTypeInternal;
60 extern DebuggedDeviceDefaultTypeInternal _DebuggedDevice_default_instance_;
61 class DebuggedGraph;
62 struct DebuggedGraphDefaultTypeInternal;
63 extern DebuggedGraphDefaultTypeInternal _DebuggedGraph_default_instance_;
64 class Execution;
65 struct ExecutionDefaultTypeInternal;
66 extern ExecutionDefaultTypeInternal _Execution_default_instance_;
67 class GraphExecutionTrace;
68 struct GraphExecutionTraceDefaultTypeInternal;
69 extern GraphExecutionTraceDefaultTypeInternal _GraphExecutionTrace_default_instance_;
70 class GraphOpCreation;
71 struct GraphOpCreationDefaultTypeInternal;
72 extern GraphOpCreationDefaultTypeInternal _GraphOpCreation_default_instance_;
73 class SourceFile;
74 struct SourceFileDefaultTypeInternal;
75 extern SourceFileDefaultTypeInternal _SourceFile_default_instance_;
76 class StackFrameWithId;
77 struct StackFrameWithIdDefaultTypeInternal;
78 extern StackFrameWithIdDefaultTypeInternal _StackFrameWithId_default_instance_;
79 }  // namespace tensorflow
80 PROTOBUF_NAMESPACE_OPEN
81 template<> ::tensorflow::CodeLocation* Arena::CreateMaybeMessage<::tensorflow::CodeLocation>(Arena*);
82 template<> ::tensorflow::DebugEvent* Arena::CreateMaybeMessage<::tensorflow::DebugEvent>(Arena*);
83 template<> ::tensorflow::DebugMetadata* Arena::CreateMaybeMessage<::tensorflow::DebugMetadata>(Arena*);
84 template<> ::tensorflow::DebuggedDevice* Arena::CreateMaybeMessage<::tensorflow::DebuggedDevice>(Arena*);
85 template<> ::tensorflow::DebuggedGraph* Arena::CreateMaybeMessage<::tensorflow::DebuggedGraph>(Arena*);
86 template<> ::tensorflow::Execution* Arena::CreateMaybeMessage<::tensorflow::Execution>(Arena*);
87 template<> ::tensorflow::GraphExecutionTrace* Arena::CreateMaybeMessage<::tensorflow::GraphExecutionTrace>(Arena*);
88 template<> ::tensorflow::GraphOpCreation* Arena::CreateMaybeMessage<::tensorflow::GraphOpCreation>(Arena*);
89 template<> ::tensorflow::SourceFile* Arena::CreateMaybeMessage<::tensorflow::SourceFile>(Arena*);
90 template<> ::tensorflow::StackFrameWithId* Arena::CreateMaybeMessage<::tensorflow::StackFrameWithId>(Arena*);
91 PROTOBUF_NAMESPACE_CLOSE
92 namespace tensorflow {
93 
94 enum TensorDebugMode : int {
95   UNSPECIFIED = 0,
96   NO_TENSOR = 1,
97   CURT_HEALTH = 2,
98   CONCISE_HEALTH = 3,
99   FULL_HEALTH = 4,
100   SHAPE = 5,
101   FULL_NUMERICS = 6,
102   FULL_TENSOR = 7,
103   REDUCE_INF_NAN_THREE_SLOTS = 8,
104   TensorDebugMode_INT_MIN_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::int32_t>::min(),
105   TensorDebugMode_INT_MAX_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::int32_t>::max()
106 };
107 bool TensorDebugMode_IsValid(int value);
108 constexpr TensorDebugMode TensorDebugMode_MIN = UNSPECIFIED;
109 constexpr TensorDebugMode TensorDebugMode_MAX = REDUCE_INF_NAN_THREE_SLOTS;
110 constexpr int TensorDebugMode_ARRAYSIZE = TensorDebugMode_MAX + 1;
111 
112 const std::string& TensorDebugMode_Name(TensorDebugMode value);
113 template<typename T>
TensorDebugMode_Name(T enum_t_value)114 inline const std::string& TensorDebugMode_Name(T enum_t_value) {
115   static_assert(::std::is_same<T, TensorDebugMode>::value ||
116     ::std::is_integral<T>::value,
117     "Incorrect type passed to function TensorDebugMode_Name.");
118   return TensorDebugMode_Name(static_cast<TensorDebugMode>(enum_t_value));
119 }
120 bool TensorDebugMode_Parse(
121     ::PROTOBUF_NAMESPACE_ID::ConstStringParam name, TensorDebugMode* value);
122 // ===================================================================
123 
124 class DebugEvent final :
125     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.DebugEvent) */ {
126  public:
DebugEvent()127   inline DebugEvent() : DebugEvent(nullptr) {}
128   ~DebugEvent() override;
129   explicit PROTOBUF_CONSTEXPR DebugEvent(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
130 
131   DebugEvent(const DebugEvent& from);
DebugEvent(DebugEvent && from)132   DebugEvent(DebugEvent&& from) noexcept
133     : DebugEvent() {
134     *this = ::std::move(from);
135   }
136 
137   inline DebugEvent& operator=(const DebugEvent& from) {
138     if (this == &from) return *this;
139     CopyFrom(from);
140     return *this;
141   }
142   inline DebugEvent& operator=(DebugEvent&& from) noexcept {
143     if (this == &from) return *this;
144     if (GetOwningArena() == from.GetOwningArena()
145   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
146         && GetOwningArena() != nullptr
147   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
148     ) {
149       InternalSwap(&from);
150     } else {
151       CopyFrom(from);
152     }
153     return *this;
154   }
155 
default_instance()156   static const DebugEvent& default_instance() {
157     return *internal_default_instance();
158   }
159   enum WhatCase {
160     kDebugMetadata = 3,
161     kSourceFile = 4,
162     kStackFrameWithId = 6,
163     kGraphOpCreation = 7,
164     kDebuggedGraph = 8,
165     kExecution = 9,
166     kGraphExecutionTrace = 10,
167     kGraphId = 11,
168     kDebuggedDevice = 12,
169     WHAT_NOT_SET = 0,
170   };
171 
internal_default_instance()172   static inline const DebugEvent* internal_default_instance() {
173     return reinterpret_cast<const DebugEvent*>(
174                &_DebugEvent_default_instance_);
175   }
176   static constexpr int kIndexInFileMessages =
177     0;
178 
swap(DebugEvent & a,DebugEvent & b)179   friend void swap(DebugEvent& a, DebugEvent& b) {
180     a.Swap(&b);
181   }
Swap(DebugEvent * other)182   inline void Swap(DebugEvent* other) {
183     if (other == this) return;
184   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
185     if (GetOwningArena() != nullptr &&
186         GetOwningArena() == other->GetOwningArena()) {
187    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
188     if (GetOwningArena() == other->GetOwningArena()) {
189   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
190       InternalSwap(other);
191     } else {
192       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
193     }
194   }
195   void UnsafeArenaSwap(DebugEvent* other) {
196     if (other == this) return;
197     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
198     InternalSwap(other);
199   }
200 
201   // implements Message ----------------------------------------------
202 
203   DebugEvent* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
204     return CreateMaybeMessage<DebugEvent>(arena);
205   }
206   DebugEvent* New() const {
207     return New(nullptr);
208   }
209   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
210   void CopyFrom(const DebugEvent& from);
211   void MergeFrom(const DebugEvent& from);
212   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
213   bool IsInitialized() const final;
214 
215   size_t ByteSizeLong() const final;
216   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
217   ::uint8_t* _InternalSerialize(
218       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
219   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
220 
221   private:
222   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
223   void SharedDtor();
224   void SetCachedSize(int size) const;
225   void InternalSwap(DebugEvent* other);
226 
227   private:
228   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
229   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
230     return "tensorflow.DebugEvent";
231   }
232   protected:
233   explicit DebugEvent(::PROTOBUF_NAMESPACE_ID::Arena* arena,
234                        bool is_message_owned = false);
235   public:
236 
237   std::string GetTypeName() const final;
238 
239   // nested types ----------------------------------------------------
240 
241   // accessors -------------------------------------------------------
242 
243   enum : int {
244     kWallTimeFieldNumber = 1,
245     kStepFieldNumber = 2,
246     kDebugMetadataFieldNumber = 3,
247     kSourceFileFieldNumber = 4,
248     kStackFrameWithIdFieldNumber = 6,
249     kGraphOpCreationFieldNumber = 7,
250     kDebuggedGraphFieldNumber = 8,
251     kExecutionFieldNumber = 9,
252     kGraphExecutionTraceFieldNumber = 10,
253     kGraphIdFieldNumber = 11,
254     kDebuggedDeviceFieldNumber = 12,
255   };
256   // double wall_time = 1;
257   void clear_wall_time();
258   double wall_time() const;
259   void set_wall_time(double value);
260   private:
261   double _internal_wall_time() const;
262   void _internal_set_wall_time(double value);
263   public:
264 
265   // int64 step = 2;
266   void clear_step();
267   ::int64_t step() const;
268   void set_step(::int64_t value);
269   private:
270   ::int64_t _internal_step() const;
271   void _internal_set_step(::int64_t value);
272   public:
273 
274   // .tensorflow.DebugMetadata debug_metadata = 3;
275   bool has_debug_metadata() const;
276   private:
277   bool _internal_has_debug_metadata() const;
278   public:
279   void clear_debug_metadata();
280   const ::tensorflow::DebugMetadata& debug_metadata() const;
281   PROTOBUF_NODISCARD ::tensorflow::DebugMetadata* release_debug_metadata();
282   ::tensorflow::DebugMetadata* mutable_debug_metadata();
283   void set_allocated_debug_metadata(::tensorflow::DebugMetadata* debug_metadata);
284   private:
285   const ::tensorflow::DebugMetadata& _internal_debug_metadata() const;
286   ::tensorflow::DebugMetadata* _internal_mutable_debug_metadata();
287   public:
288   void unsafe_arena_set_allocated_debug_metadata(
289       ::tensorflow::DebugMetadata* debug_metadata);
290   ::tensorflow::DebugMetadata* unsafe_arena_release_debug_metadata();
291 
292   // .tensorflow.SourceFile source_file = 4;
293   bool has_source_file() const;
294   private:
295   bool _internal_has_source_file() const;
296   public:
297   void clear_source_file();
298   const ::tensorflow::SourceFile& source_file() const;
299   PROTOBUF_NODISCARD ::tensorflow::SourceFile* release_source_file();
300   ::tensorflow::SourceFile* mutable_source_file();
301   void set_allocated_source_file(::tensorflow::SourceFile* source_file);
302   private:
303   const ::tensorflow::SourceFile& _internal_source_file() const;
304   ::tensorflow::SourceFile* _internal_mutable_source_file();
305   public:
306   void unsafe_arena_set_allocated_source_file(
307       ::tensorflow::SourceFile* source_file);
308   ::tensorflow::SourceFile* unsafe_arena_release_source_file();
309 
310   // .tensorflow.StackFrameWithId stack_frame_with_id = 6;
311   bool has_stack_frame_with_id() const;
312   private:
313   bool _internal_has_stack_frame_with_id() const;
314   public:
315   void clear_stack_frame_with_id();
316   const ::tensorflow::StackFrameWithId& stack_frame_with_id() const;
317   PROTOBUF_NODISCARD ::tensorflow::StackFrameWithId* release_stack_frame_with_id();
318   ::tensorflow::StackFrameWithId* mutable_stack_frame_with_id();
319   void set_allocated_stack_frame_with_id(::tensorflow::StackFrameWithId* stack_frame_with_id);
320   private:
321   const ::tensorflow::StackFrameWithId& _internal_stack_frame_with_id() const;
322   ::tensorflow::StackFrameWithId* _internal_mutable_stack_frame_with_id();
323   public:
324   void unsafe_arena_set_allocated_stack_frame_with_id(
325       ::tensorflow::StackFrameWithId* stack_frame_with_id);
326   ::tensorflow::StackFrameWithId* unsafe_arena_release_stack_frame_with_id();
327 
328   // .tensorflow.GraphOpCreation graph_op_creation = 7;
329   bool has_graph_op_creation() const;
330   private:
331   bool _internal_has_graph_op_creation() const;
332   public:
333   void clear_graph_op_creation();
334   const ::tensorflow::GraphOpCreation& graph_op_creation() const;
335   PROTOBUF_NODISCARD ::tensorflow::GraphOpCreation* release_graph_op_creation();
336   ::tensorflow::GraphOpCreation* mutable_graph_op_creation();
337   void set_allocated_graph_op_creation(::tensorflow::GraphOpCreation* graph_op_creation);
338   private:
339   const ::tensorflow::GraphOpCreation& _internal_graph_op_creation() const;
340   ::tensorflow::GraphOpCreation* _internal_mutable_graph_op_creation();
341   public:
342   void unsafe_arena_set_allocated_graph_op_creation(
343       ::tensorflow::GraphOpCreation* graph_op_creation);
344   ::tensorflow::GraphOpCreation* unsafe_arena_release_graph_op_creation();
345 
346   // .tensorflow.DebuggedGraph debugged_graph = 8;
347   bool has_debugged_graph() const;
348   private:
349   bool _internal_has_debugged_graph() const;
350   public:
351   void clear_debugged_graph();
352   const ::tensorflow::DebuggedGraph& debugged_graph() const;
353   PROTOBUF_NODISCARD ::tensorflow::DebuggedGraph* release_debugged_graph();
354   ::tensorflow::DebuggedGraph* mutable_debugged_graph();
355   void set_allocated_debugged_graph(::tensorflow::DebuggedGraph* debugged_graph);
356   private:
357   const ::tensorflow::DebuggedGraph& _internal_debugged_graph() const;
358   ::tensorflow::DebuggedGraph* _internal_mutable_debugged_graph();
359   public:
360   void unsafe_arena_set_allocated_debugged_graph(
361       ::tensorflow::DebuggedGraph* debugged_graph);
362   ::tensorflow::DebuggedGraph* unsafe_arena_release_debugged_graph();
363 
364   // .tensorflow.Execution execution = 9;
365   bool has_execution() const;
366   private:
367   bool _internal_has_execution() const;
368   public:
369   void clear_execution();
370   const ::tensorflow::Execution& execution() const;
371   PROTOBUF_NODISCARD ::tensorflow::Execution* release_execution();
372   ::tensorflow::Execution* mutable_execution();
373   void set_allocated_execution(::tensorflow::Execution* execution);
374   private:
375   const ::tensorflow::Execution& _internal_execution() const;
376   ::tensorflow::Execution* _internal_mutable_execution();
377   public:
378   void unsafe_arena_set_allocated_execution(
379       ::tensorflow::Execution* execution);
380   ::tensorflow::Execution* unsafe_arena_release_execution();
381 
382   // .tensorflow.GraphExecutionTrace graph_execution_trace = 10;
383   bool has_graph_execution_trace() const;
384   private:
385   bool _internal_has_graph_execution_trace() const;
386   public:
387   void clear_graph_execution_trace();
388   const ::tensorflow::GraphExecutionTrace& graph_execution_trace() const;
389   PROTOBUF_NODISCARD ::tensorflow::GraphExecutionTrace* release_graph_execution_trace();
390   ::tensorflow::GraphExecutionTrace* mutable_graph_execution_trace();
391   void set_allocated_graph_execution_trace(::tensorflow::GraphExecutionTrace* graph_execution_trace);
392   private:
393   const ::tensorflow::GraphExecutionTrace& _internal_graph_execution_trace() const;
394   ::tensorflow::GraphExecutionTrace* _internal_mutable_graph_execution_trace();
395   public:
396   void unsafe_arena_set_allocated_graph_execution_trace(
397       ::tensorflow::GraphExecutionTrace* graph_execution_trace);
398   ::tensorflow::GraphExecutionTrace* unsafe_arena_release_graph_execution_trace();
399 
400   // string graph_id = 11;
401   bool has_graph_id() const;
402   private:
403   bool _internal_has_graph_id() const;
404   public:
405   void clear_graph_id();
406   const std::string& graph_id() const;
407   template <typename ArgT0 = const std::string&, typename... ArgT>
408   void set_graph_id(ArgT0&& arg0, ArgT... args);
409   std::string* mutable_graph_id();
410   PROTOBUF_NODISCARD std::string* release_graph_id();
411   void set_allocated_graph_id(std::string* graph_id);
412   private:
413   const std::string& _internal_graph_id() const;
414   inline PROTOBUF_ALWAYS_INLINE void _internal_set_graph_id(const std::string& value);
415   std::string* _internal_mutable_graph_id();
416   public:
417 
418   // .tensorflow.DebuggedDevice debugged_device = 12;
419   bool has_debugged_device() const;
420   private:
421   bool _internal_has_debugged_device() const;
422   public:
423   void clear_debugged_device();
424   const ::tensorflow::DebuggedDevice& debugged_device() const;
425   PROTOBUF_NODISCARD ::tensorflow::DebuggedDevice* release_debugged_device();
426   ::tensorflow::DebuggedDevice* mutable_debugged_device();
427   void set_allocated_debugged_device(::tensorflow::DebuggedDevice* debugged_device);
428   private:
429   const ::tensorflow::DebuggedDevice& _internal_debugged_device() const;
430   ::tensorflow::DebuggedDevice* _internal_mutable_debugged_device();
431   public:
432   void unsafe_arena_set_allocated_debugged_device(
433       ::tensorflow::DebuggedDevice* debugged_device);
434   ::tensorflow::DebuggedDevice* unsafe_arena_release_debugged_device();
435 
436   void clear_what();
437   WhatCase what_case() const;
438   // @@protoc_insertion_point(class_scope:tensorflow.DebugEvent)
439  private:
440   class _Internal;
441   void set_has_debug_metadata();
442   void set_has_source_file();
443   void set_has_stack_frame_with_id();
444   void set_has_graph_op_creation();
445   void set_has_debugged_graph();
446   void set_has_execution();
447   void set_has_graph_execution_trace();
448   void set_has_graph_id();
449   void set_has_debugged_device();
450 
451   inline bool has_what() const;
452   inline void clear_has_what();
453 
454   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
455   typedef void InternalArenaConstructable_;
456   typedef void DestructorSkippable_;
457   struct Impl_ {
458     double wall_time_;
459     ::int64_t step_;
460     union WhatUnion {
461       constexpr WhatUnion() : _constinit_{} {}
462         ::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized _constinit_;
463       ::tensorflow::DebugMetadata* debug_metadata_;
464       ::tensorflow::SourceFile* source_file_;
465       ::tensorflow::StackFrameWithId* stack_frame_with_id_;
466       ::tensorflow::GraphOpCreation* graph_op_creation_;
467       ::tensorflow::DebuggedGraph* debugged_graph_;
468       ::tensorflow::Execution* execution_;
469       ::tensorflow::GraphExecutionTrace* graph_execution_trace_;
470       ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr graph_id_;
471       ::tensorflow::DebuggedDevice* debugged_device_;
472     } what_;
473     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
474     ::uint32_t _oneof_case_[1];
475 
476   };
477   union { Impl_ _impl_; };
478   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fdebug_5fevent_2eproto;
479 };
480 // -------------------------------------------------------------------
481 
482 class DebugMetadata final :
483     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.DebugMetadata) */ {
484  public:
DebugMetadata()485   inline DebugMetadata() : DebugMetadata(nullptr) {}
486   ~DebugMetadata() override;
487   explicit PROTOBUF_CONSTEXPR DebugMetadata(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
488 
489   DebugMetadata(const DebugMetadata& from);
DebugMetadata(DebugMetadata && from)490   DebugMetadata(DebugMetadata&& from) noexcept
491     : DebugMetadata() {
492     *this = ::std::move(from);
493   }
494 
495   inline DebugMetadata& operator=(const DebugMetadata& from) {
496     if (this == &from) return *this;
497     CopyFrom(from);
498     return *this;
499   }
500   inline DebugMetadata& operator=(DebugMetadata&& from) noexcept {
501     if (this == &from) return *this;
502     if (GetOwningArena() == from.GetOwningArena()
503   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
504         && GetOwningArena() != nullptr
505   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
506     ) {
507       InternalSwap(&from);
508     } else {
509       CopyFrom(from);
510     }
511     return *this;
512   }
513 
default_instance()514   static const DebugMetadata& default_instance() {
515     return *internal_default_instance();
516   }
internal_default_instance()517   static inline const DebugMetadata* internal_default_instance() {
518     return reinterpret_cast<const DebugMetadata*>(
519                &_DebugMetadata_default_instance_);
520   }
521   static constexpr int kIndexInFileMessages =
522     1;
523 
swap(DebugMetadata & a,DebugMetadata & b)524   friend void swap(DebugMetadata& a, DebugMetadata& b) {
525     a.Swap(&b);
526   }
Swap(DebugMetadata * other)527   inline void Swap(DebugMetadata* other) {
528     if (other == this) return;
529   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
530     if (GetOwningArena() != nullptr &&
531         GetOwningArena() == other->GetOwningArena()) {
532    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
533     if (GetOwningArena() == other->GetOwningArena()) {
534   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
535       InternalSwap(other);
536     } else {
537       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
538     }
539   }
540   void UnsafeArenaSwap(DebugMetadata* other) {
541     if (other == this) return;
542     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
543     InternalSwap(other);
544   }
545 
546   // implements Message ----------------------------------------------
547 
548   DebugMetadata* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
549     return CreateMaybeMessage<DebugMetadata>(arena);
550   }
551   DebugMetadata* New() const {
552     return New(nullptr);
553   }
554   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
555   void CopyFrom(const DebugMetadata& from);
556   void MergeFrom(const DebugMetadata& from);
557   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
558   bool IsInitialized() const final;
559 
560   size_t ByteSizeLong() const final;
561   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
562   ::uint8_t* _InternalSerialize(
563       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
564   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
565 
566   private:
567   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
568   void SharedDtor();
569   void SetCachedSize(int size) const;
570   void InternalSwap(DebugMetadata* other);
571 
572   private:
573   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
574   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
575     return "tensorflow.DebugMetadata";
576   }
577   protected:
578   explicit DebugMetadata(::PROTOBUF_NAMESPACE_ID::Arena* arena,
579                        bool is_message_owned = false);
580   public:
581 
582   std::string GetTypeName() const final;
583 
584   // nested types ----------------------------------------------------
585 
586   // accessors -------------------------------------------------------
587 
588   enum : int {
589     kTensorflowVersionFieldNumber = 1,
590     kFileVersionFieldNumber = 2,
591     kTfdbgRunIdFieldNumber = 3,
592   };
593   // string tensorflow_version = 1;
594   void clear_tensorflow_version();
595   const std::string& tensorflow_version() const;
596   template <typename ArgT0 = const std::string&, typename... ArgT>
597   void set_tensorflow_version(ArgT0&& arg0, ArgT... args);
598   std::string* mutable_tensorflow_version();
599   PROTOBUF_NODISCARD std::string* release_tensorflow_version();
600   void set_allocated_tensorflow_version(std::string* tensorflow_version);
601   private:
602   const std::string& _internal_tensorflow_version() const;
603   inline PROTOBUF_ALWAYS_INLINE void _internal_set_tensorflow_version(const std::string& value);
604   std::string* _internal_mutable_tensorflow_version();
605   public:
606 
607   // string file_version = 2;
608   void clear_file_version();
609   const std::string& file_version() const;
610   template <typename ArgT0 = const std::string&, typename... ArgT>
611   void set_file_version(ArgT0&& arg0, ArgT... args);
612   std::string* mutable_file_version();
613   PROTOBUF_NODISCARD std::string* release_file_version();
614   void set_allocated_file_version(std::string* file_version);
615   private:
616   const std::string& _internal_file_version() const;
617   inline PROTOBUF_ALWAYS_INLINE void _internal_set_file_version(const std::string& value);
618   std::string* _internal_mutable_file_version();
619   public:
620 
621   // string tfdbg_run_id = 3;
622   void clear_tfdbg_run_id();
623   const std::string& tfdbg_run_id() const;
624   template <typename ArgT0 = const std::string&, typename... ArgT>
625   void set_tfdbg_run_id(ArgT0&& arg0, ArgT... args);
626   std::string* mutable_tfdbg_run_id();
627   PROTOBUF_NODISCARD std::string* release_tfdbg_run_id();
628   void set_allocated_tfdbg_run_id(std::string* tfdbg_run_id);
629   private:
630   const std::string& _internal_tfdbg_run_id() const;
631   inline PROTOBUF_ALWAYS_INLINE void _internal_set_tfdbg_run_id(const std::string& value);
632   std::string* _internal_mutable_tfdbg_run_id();
633   public:
634 
635   // @@protoc_insertion_point(class_scope:tensorflow.DebugMetadata)
636  private:
637   class _Internal;
638 
639   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
640   typedef void InternalArenaConstructable_;
641   typedef void DestructorSkippable_;
642   struct Impl_ {
643     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr tensorflow_version_;
644     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr file_version_;
645     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr tfdbg_run_id_;
646     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
647   };
648   union { Impl_ _impl_; };
649   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fdebug_5fevent_2eproto;
650 };
651 // -------------------------------------------------------------------
652 
653 class SourceFile final :
654     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.SourceFile) */ {
655  public:
SourceFile()656   inline SourceFile() : SourceFile(nullptr) {}
657   ~SourceFile() override;
658   explicit PROTOBUF_CONSTEXPR SourceFile(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
659 
660   SourceFile(const SourceFile& from);
SourceFile(SourceFile && from)661   SourceFile(SourceFile&& from) noexcept
662     : SourceFile() {
663     *this = ::std::move(from);
664   }
665 
666   inline SourceFile& operator=(const SourceFile& from) {
667     if (this == &from) return *this;
668     CopyFrom(from);
669     return *this;
670   }
671   inline SourceFile& operator=(SourceFile&& from) noexcept {
672     if (this == &from) return *this;
673     if (GetOwningArena() == from.GetOwningArena()
674   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
675         && GetOwningArena() != nullptr
676   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
677     ) {
678       InternalSwap(&from);
679     } else {
680       CopyFrom(from);
681     }
682     return *this;
683   }
684 
default_instance()685   static const SourceFile& default_instance() {
686     return *internal_default_instance();
687   }
internal_default_instance()688   static inline const SourceFile* internal_default_instance() {
689     return reinterpret_cast<const SourceFile*>(
690                &_SourceFile_default_instance_);
691   }
692   static constexpr int kIndexInFileMessages =
693     2;
694 
swap(SourceFile & a,SourceFile & b)695   friend void swap(SourceFile& a, SourceFile& b) {
696     a.Swap(&b);
697   }
Swap(SourceFile * other)698   inline void Swap(SourceFile* other) {
699     if (other == this) return;
700   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
701     if (GetOwningArena() != nullptr &&
702         GetOwningArena() == other->GetOwningArena()) {
703    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
704     if (GetOwningArena() == other->GetOwningArena()) {
705   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
706       InternalSwap(other);
707     } else {
708       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
709     }
710   }
711   void UnsafeArenaSwap(SourceFile* other) {
712     if (other == this) return;
713     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
714     InternalSwap(other);
715   }
716 
717   // implements Message ----------------------------------------------
718 
719   SourceFile* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
720     return CreateMaybeMessage<SourceFile>(arena);
721   }
722   SourceFile* New() const {
723     return New(nullptr);
724   }
725   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
726   void CopyFrom(const SourceFile& from);
727   void MergeFrom(const SourceFile& from);
728   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
729   bool IsInitialized() const final;
730 
731   size_t ByteSizeLong() const final;
732   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
733   ::uint8_t* _InternalSerialize(
734       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
735   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
736 
737   private:
738   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
739   void SharedDtor();
740   void SetCachedSize(int size) const;
741   void InternalSwap(SourceFile* other);
742 
743   private:
744   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
745   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
746     return "tensorflow.SourceFile";
747   }
748   protected:
749   explicit SourceFile(::PROTOBUF_NAMESPACE_ID::Arena* arena,
750                        bool is_message_owned = false);
751   public:
752 
753   std::string GetTypeName() const final;
754 
755   // nested types ----------------------------------------------------
756 
757   // accessors -------------------------------------------------------
758 
759   enum : int {
760     kLinesFieldNumber = 3,
761     kFilePathFieldNumber = 1,
762     kHostNameFieldNumber = 2,
763   };
764   // repeated string lines = 3;
765   int lines_size() const;
766   private:
767   int _internal_lines_size() const;
768   public:
769   void clear_lines();
770   const std::string& lines(int index) const;
771   std::string* mutable_lines(int index);
772   void set_lines(int index, const std::string& value);
773   void set_lines(int index, std::string&& value);
774   void set_lines(int index, const char* value);
775   void set_lines(int index, const char* value, size_t size);
776   std::string* add_lines();
777   void add_lines(const std::string& value);
778   void add_lines(std::string&& value);
779   void add_lines(const char* value);
780   void add_lines(const char* value, size_t size);
781   const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>& lines() const;
782   ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>* mutable_lines();
783   private:
784   const std::string& _internal_lines(int index) const;
785   std::string* _internal_add_lines();
786   public:
787 
788   // string file_path = 1;
789   void clear_file_path();
790   const std::string& file_path() const;
791   template <typename ArgT0 = const std::string&, typename... ArgT>
792   void set_file_path(ArgT0&& arg0, ArgT... args);
793   std::string* mutable_file_path();
794   PROTOBUF_NODISCARD std::string* release_file_path();
795   void set_allocated_file_path(std::string* file_path);
796   private:
797   const std::string& _internal_file_path() const;
798   inline PROTOBUF_ALWAYS_INLINE void _internal_set_file_path(const std::string& value);
799   std::string* _internal_mutable_file_path();
800   public:
801 
802   // string host_name = 2;
803   void clear_host_name();
804   const std::string& host_name() const;
805   template <typename ArgT0 = const std::string&, typename... ArgT>
806   void set_host_name(ArgT0&& arg0, ArgT... args);
807   std::string* mutable_host_name();
808   PROTOBUF_NODISCARD std::string* release_host_name();
809   void set_allocated_host_name(std::string* host_name);
810   private:
811   const std::string& _internal_host_name() const;
812   inline PROTOBUF_ALWAYS_INLINE void _internal_set_host_name(const std::string& value);
813   std::string* _internal_mutable_host_name();
814   public:
815 
816   // @@protoc_insertion_point(class_scope:tensorflow.SourceFile)
817  private:
818   class _Internal;
819 
820   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
821   typedef void InternalArenaConstructable_;
822   typedef void DestructorSkippable_;
823   struct Impl_ {
824     ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string> lines_;
825     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr file_path_;
826     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr host_name_;
827     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
828   };
829   union { Impl_ _impl_; };
830   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fdebug_5fevent_2eproto;
831 };
832 // -------------------------------------------------------------------
833 
834 class StackFrameWithId final :
835     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.StackFrameWithId) */ {
836  public:
StackFrameWithId()837   inline StackFrameWithId() : StackFrameWithId(nullptr) {}
838   ~StackFrameWithId() override;
839   explicit PROTOBUF_CONSTEXPR StackFrameWithId(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
840 
841   StackFrameWithId(const StackFrameWithId& from);
StackFrameWithId(StackFrameWithId && from)842   StackFrameWithId(StackFrameWithId&& from) noexcept
843     : StackFrameWithId() {
844     *this = ::std::move(from);
845   }
846 
847   inline StackFrameWithId& operator=(const StackFrameWithId& from) {
848     if (this == &from) return *this;
849     CopyFrom(from);
850     return *this;
851   }
852   inline StackFrameWithId& operator=(StackFrameWithId&& from) noexcept {
853     if (this == &from) return *this;
854     if (GetOwningArena() == from.GetOwningArena()
855   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
856         && GetOwningArena() != nullptr
857   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
858     ) {
859       InternalSwap(&from);
860     } else {
861       CopyFrom(from);
862     }
863     return *this;
864   }
865 
default_instance()866   static const StackFrameWithId& default_instance() {
867     return *internal_default_instance();
868   }
internal_default_instance()869   static inline const StackFrameWithId* internal_default_instance() {
870     return reinterpret_cast<const StackFrameWithId*>(
871                &_StackFrameWithId_default_instance_);
872   }
873   static constexpr int kIndexInFileMessages =
874     3;
875 
swap(StackFrameWithId & a,StackFrameWithId & b)876   friend void swap(StackFrameWithId& a, StackFrameWithId& b) {
877     a.Swap(&b);
878   }
Swap(StackFrameWithId * other)879   inline void Swap(StackFrameWithId* other) {
880     if (other == this) return;
881   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
882     if (GetOwningArena() != nullptr &&
883         GetOwningArena() == other->GetOwningArena()) {
884    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
885     if (GetOwningArena() == other->GetOwningArena()) {
886   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
887       InternalSwap(other);
888     } else {
889       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
890     }
891   }
892   void UnsafeArenaSwap(StackFrameWithId* other) {
893     if (other == this) return;
894     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
895     InternalSwap(other);
896   }
897 
898   // implements Message ----------------------------------------------
899 
900   StackFrameWithId* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
901     return CreateMaybeMessage<StackFrameWithId>(arena);
902   }
903   StackFrameWithId* New() const {
904     return New(nullptr);
905   }
906   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
907   void CopyFrom(const StackFrameWithId& from);
908   void MergeFrom(const StackFrameWithId& from);
909   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
910   bool IsInitialized() const final;
911 
912   size_t ByteSizeLong() const final;
913   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
914   ::uint8_t* _InternalSerialize(
915       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
916   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
917 
918   private:
919   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
920   void SharedDtor();
921   void SetCachedSize(int size) const;
922   void InternalSwap(StackFrameWithId* other);
923 
924   private:
925   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
926   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
927     return "tensorflow.StackFrameWithId";
928   }
929   protected:
930   explicit StackFrameWithId(::PROTOBUF_NAMESPACE_ID::Arena* arena,
931                        bool is_message_owned = false);
932   public:
933 
934   std::string GetTypeName() const final;
935 
936   // nested types ----------------------------------------------------
937 
938   // accessors -------------------------------------------------------
939 
940   enum : int {
941     kIdFieldNumber = 1,
942     kFileLineColFieldNumber = 2,
943   };
944   // string id = 1;
945   void clear_id();
946   const std::string& id() const;
947   template <typename ArgT0 = const std::string&, typename... ArgT>
948   void set_id(ArgT0&& arg0, ArgT... args);
949   std::string* mutable_id();
950   PROTOBUF_NODISCARD std::string* release_id();
951   void set_allocated_id(std::string* id);
952   private:
953   const std::string& _internal_id() const;
954   inline PROTOBUF_ALWAYS_INLINE void _internal_set_id(const std::string& value);
955   std::string* _internal_mutable_id();
956   public:
957 
958   // .tensorflow.GraphDebugInfo.FileLineCol file_line_col = 2;
959   bool has_file_line_col() const;
960   private:
961   bool _internal_has_file_line_col() const;
962   public:
963   void clear_file_line_col();
964   const ::tensorflow::GraphDebugInfo_FileLineCol& file_line_col() const;
965   PROTOBUF_NODISCARD ::tensorflow::GraphDebugInfo_FileLineCol* release_file_line_col();
966   ::tensorflow::GraphDebugInfo_FileLineCol* mutable_file_line_col();
967   void set_allocated_file_line_col(::tensorflow::GraphDebugInfo_FileLineCol* file_line_col);
968   private:
969   const ::tensorflow::GraphDebugInfo_FileLineCol& _internal_file_line_col() const;
970   ::tensorflow::GraphDebugInfo_FileLineCol* _internal_mutable_file_line_col();
971   public:
972   void unsafe_arena_set_allocated_file_line_col(
973       ::tensorflow::GraphDebugInfo_FileLineCol* file_line_col);
974   ::tensorflow::GraphDebugInfo_FileLineCol* unsafe_arena_release_file_line_col();
975 
976   // @@protoc_insertion_point(class_scope:tensorflow.StackFrameWithId)
977  private:
978   class _Internal;
979 
980   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
981   typedef void InternalArenaConstructable_;
982   typedef void DestructorSkippable_;
983   struct Impl_ {
984     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr id_;
985     ::tensorflow::GraphDebugInfo_FileLineCol* file_line_col_;
986     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
987   };
988   union { Impl_ _impl_; };
989   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fdebug_5fevent_2eproto;
990 };
991 // -------------------------------------------------------------------
992 
993 class CodeLocation final :
994     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.CodeLocation) */ {
995  public:
CodeLocation()996   inline CodeLocation() : CodeLocation(nullptr) {}
997   ~CodeLocation() override;
998   explicit PROTOBUF_CONSTEXPR CodeLocation(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
999 
1000   CodeLocation(const CodeLocation& from);
CodeLocation(CodeLocation && from)1001   CodeLocation(CodeLocation&& from) noexcept
1002     : CodeLocation() {
1003     *this = ::std::move(from);
1004   }
1005 
1006   inline CodeLocation& operator=(const CodeLocation& from) {
1007     if (this == &from) return *this;
1008     CopyFrom(from);
1009     return *this;
1010   }
1011   inline CodeLocation& operator=(CodeLocation&& from) noexcept {
1012     if (this == &from) return *this;
1013     if (GetOwningArena() == from.GetOwningArena()
1014   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
1015         && GetOwningArena() != nullptr
1016   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
1017     ) {
1018       InternalSwap(&from);
1019     } else {
1020       CopyFrom(from);
1021     }
1022     return *this;
1023   }
1024 
default_instance()1025   static const CodeLocation& default_instance() {
1026     return *internal_default_instance();
1027   }
internal_default_instance()1028   static inline const CodeLocation* internal_default_instance() {
1029     return reinterpret_cast<const CodeLocation*>(
1030                &_CodeLocation_default_instance_);
1031   }
1032   static constexpr int kIndexInFileMessages =
1033     4;
1034 
swap(CodeLocation & a,CodeLocation & b)1035   friend void swap(CodeLocation& a, CodeLocation& b) {
1036     a.Swap(&b);
1037   }
Swap(CodeLocation * other)1038   inline void Swap(CodeLocation* other) {
1039     if (other == this) return;
1040   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
1041     if (GetOwningArena() != nullptr &&
1042         GetOwningArena() == other->GetOwningArena()) {
1043    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
1044     if (GetOwningArena() == other->GetOwningArena()) {
1045   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
1046       InternalSwap(other);
1047     } else {
1048       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1049     }
1050   }
1051   void UnsafeArenaSwap(CodeLocation* other) {
1052     if (other == this) return;
1053     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
1054     InternalSwap(other);
1055   }
1056 
1057   // implements Message ----------------------------------------------
1058 
1059   CodeLocation* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1060     return CreateMaybeMessage<CodeLocation>(arena);
1061   }
1062   CodeLocation* New() const {
1063     return New(nullptr);
1064   }
1065   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
1066   void CopyFrom(const CodeLocation& from);
1067   void MergeFrom(const CodeLocation& from);
1068   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1069   bool IsInitialized() const final;
1070 
1071   size_t ByteSizeLong() const final;
1072   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1073   ::uint8_t* _InternalSerialize(
1074       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
1075   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
1076 
1077   private:
1078   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
1079   void SharedDtor();
1080   void SetCachedSize(int size) const;
1081   void InternalSwap(CodeLocation* other);
1082 
1083   private:
1084   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1085   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1086     return "tensorflow.CodeLocation";
1087   }
1088   protected:
1089   explicit CodeLocation(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1090                        bool is_message_owned = false);
1091   public:
1092 
1093   std::string GetTypeName() const final;
1094 
1095   // nested types ----------------------------------------------------
1096 
1097   // accessors -------------------------------------------------------
1098 
1099   enum : int {
1100     kStackFrameIdsFieldNumber = 2,
1101     kHostNameFieldNumber = 1,
1102   };
1103   // repeated string stack_frame_ids = 2;
1104   int stack_frame_ids_size() const;
1105   private:
1106   int _internal_stack_frame_ids_size() const;
1107   public:
1108   void clear_stack_frame_ids();
1109   const std::string& stack_frame_ids(int index) const;
1110   std::string* mutable_stack_frame_ids(int index);
1111   void set_stack_frame_ids(int index, const std::string& value);
1112   void set_stack_frame_ids(int index, std::string&& value);
1113   void set_stack_frame_ids(int index, const char* value);
1114   void set_stack_frame_ids(int index, const char* value, size_t size);
1115   std::string* add_stack_frame_ids();
1116   void add_stack_frame_ids(const std::string& value);
1117   void add_stack_frame_ids(std::string&& value);
1118   void add_stack_frame_ids(const char* value);
1119   void add_stack_frame_ids(const char* value, size_t size);
1120   const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>& stack_frame_ids() const;
1121   ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>* mutable_stack_frame_ids();
1122   private:
1123   const std::string& _internal_stack_frame_ids(int index) const;
1124   std::string* _internal_add_stack_frame_ids();
1125   public:
1126 
1127   // string host_name = 1;
1128   void clear_host_name();
1129   const std::string& host_name() const;
1130   template <typename ArgT0 = const std::string&, typename... ArgT>
1131   void set_host_name(ArgT0&& arg0, ArgT... args);
1132   std::string* mutable_host_name();
1133   PROTOBUF_NODISCARD std::string* release_host_name();
1134   void set_allocated_host_name(std::string* host_name);
1135   private:
1136   const std::string& _internal_host_name() const;
1137   inline PROTOBUF_ALWAYS_INLINE void _internal_set_host_name(const std::string& value);
1138   std::string* _internal_mutable_host_name();
1139   public:
1140 
1141   // @@protoc_insertion_point(class_scope:tensorflow.CodeLocation)
1142  private:
1143   class _Internal;
1144 
1145   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1146   typedef void InternalArenaConstructable_;
1147   typedef void DestructorSkippable_;
1148   struct Impl_ {
1149     ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string> stack_frame_ids_;
1150     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr host_name_;
1151     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1152   };
1153   union { Impl_ _impl_; };
1154   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fdebug_5fevent_2eproto;
1155 };
1156 // -------------------------------------------------------------------
1157 
1158 class GraphOpCreation final :
1159     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.GraphOpCreation) */ {
1160  public:
GraphOpCreation()1161   inline GraphOpCreation() : GraphOpCreation(nullptr) {}
1162   ~GraphOpCreation() override;
1163   explicit PROTOBUF_CONSTEXPR GraphOpCreation(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
1164 
1165   GraphOpCreation(const GraphOpCreation& from);
GraphOpCreation(GraphOpCreation && from)1166   GraphOpCreation(GraphOpCreation&& from) noexcept
1167     : GraphOpCreation() {
1168     *this = ::std::move(from);
1169   }
1170 
1171   inline GraphOpCreation& operator=(const GraphOpCreation& from) {
1172     if (this == &from) return *this;
1173     CopyFrom(from);
1174     return *this;
1175   }
1176   inline GraphOpCreation& operator=(GraphOpCreation&& from) noexcept {
1177     if (this == &from) return *this;
1178     if (GetOwningArena() == from.GetOwningArena()
1179   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
1180         && GetOwningArena() != nullptr
1181   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
1182     ) {
1183       InternalSwap(&from);
1184     } else {
1185       CopyFrom(from);
1186     }
1187     return *this;
1188   }
1189 
default_instance()1190   static const GraphOpCreation& default_instance() {
1191     return *internal_default_instance();
1192   }
internal_default_instance()1193   static inline const GraphOpCreation* internal_default_instance() {
1194     return reinterpret_cast<const GraphOpCreation*>(
1195                &_GraphOpCreation_default_instance_);
1196   }
1197   static constexpr int kIndexInFileMessages =
1198     5;
1199 
swap(GraphOpCreation & a,GraphOpCreation & b)1200   friend void swap(GraphOpCreation& a, GraphOpCreation& b) {
1201     a.Swap(&b);
1202   }
Swap(GraphOpCreation * other)1203   inline void Swap(GraphOpCreation* other) {
1204     if (other == this) return;
1205   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
1206     if (GetOwningArena() != nullptr &&
1207         GetOwningArena() == other->GetOwningArena()) {
1208    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
1209     if (GetOwningArena() == other->GetOwningArena()) {
1210   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
1211       InternalSwap(other);
1212     } else {
1213       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1214     }
1215   }
1216   void UnsafeArenaSwap(GraphOpCreation* other) {
1217     if (other == this) return;
1218     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
1219     InternalSwap(other);
1220   }
1221 
1222   // implements Message ----------------------------------------------
1223 
1224   GraphOpCreation* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1225     return CreateMaybeMessage<GraphOpCreation>(arena);
1226   }
1227   GraphOpCreation* New() const {
1228     return New(nullptr);
1229   }
1230   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
1231   void CopyFrom(const GraphOpCreation& from);
1232   void MergeFrom(const GraphOpCreation& from);
1233   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1234   bool IsInitialized() const final;
1235 
1236   size_t ByteSizeLong() const final;
1237   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1238   ::uint8_t* _InternalSerialize(
1239       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
1240   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
1241 
1242   private:
1243   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
1244   void SharedDtor();
1245   void SetCachedSize(int size) const;
1246   void InternalSwap(GraphOpCreation* other);
1247 
1248   private:
1249   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1250   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1251     return "tensorflow.GraphOpCreation";
1252   }
1253   protected:
1254   explicit GraphOpCreation(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1255                        bool is_message_owned = false);
1256   public:
1257 
1258   std::string GetTypeName() const final;
1259 
1260   // nested types ----------------------------------------------------
1261 
1262   // accessors -------------------------------------------------------
1263 
1264   enum : int {
1265     kInputNamesFieldNumber = 6,
1266     kOutputTensorIdsFieldNumber = 9,
1267     kOpTypeFieldNumber = 1,
1268     kOpNameFieldNumber = 2,
1269     kGraphNameFieldNumber = 3,
1270     kGraphIdFieldNumber = 4,
1271     kDeviceNameFieldNumber = 5,
1272     kCodeLocationFieldNumber = 8,
1273     kNumOutputsFieldNumber = 7,
1274   };
1275   // repeated string input_names = 6;
1276   int input_names_size() const;
1277   private:
1278   int _internal_input_names_size() const;
1279   public:
1280   void clear_input_names();
1281   const std::string& input_names(int index) const;
1282   std::string* mutable_input_names(int index);
1283   void set_input_names(int index, const std::string& value);
1284   void set_input_names(int index, std::string&& value);
1285   void set_input_names(int index, const char* value);
1286   void set_input_names(int index, const char* value, size_t size);
1287   std::string* add_input_names();
1288   void add_input_names(const std::string& value);
1289   void add_input_names(std::string&& value);
1290   void add_input_names(const char* value);
1291   void add_input_names(const char* value, size_t size);
1292   const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>& input_names() const;
1293   ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>* mutable_input_names();
1294   private:
1295   const std::string& _internal_input_names(int index) const;
1296   std::string* _internal_add_input_names();
1297   public:
1298 
1299   // repeated int32 output_tensor_ids = 9;
1300   int output_tensor_ids_size() const;
1301   private:
1302   int _internal_output_tensor_ids_size() const;
1303   public:
1304   void clear_output_tensor_ids();
1305   private:
1306   ::int32_t _internal_output_tensor_ids(int index) const;
1307   const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int32_t >&
1308       _internal_output_tensor_ids() const;
1309   void _internal_add_output_tensor_ids(::int32_t value);
1310   ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int32_t >*
1311       _internal_mutable_output_tensor_ids();
1312   public:
1313   ::int32_t output_tensor_ids(int index) const;
1314   void set_output_tensor_ids(int index, ::int32_t value);
1315   void add_output_tensor_ids(::int32_t value);
1316   const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int32_t >&
1317       output_tensor_ids() const;
1318   ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int32_t >*
1319       mutable_output_tensor_ids();
1320 
1321   // string op_type = 1;
1322   void clear_op_type();
1323   const std::string& op_type() const;
1324   template <typename ArgT0 = const std::string&, typename... ArgT>
1325   void set_op_type(ArgT0&& arg0, ArgT... args);
1326   std::string* mutable_op_type();
1327   PROTOBUF_NODISCARD std::string* release_op_type();
1328   void set_allocated_op_type(std::string* op_type);
1329   private:
1330   const std::string& _internal_op_type() const;
1331   inline PROTOBUF_ALWAYS_INLINE void _internal_set_op_type(const std::string& value);
1332   std::string* _internal_mutable_op_type();
1333   public:
1334 
1335   // string op_name = 2;
1336   void clear_op_name();
1337   const std::string& op_name() const;
1338   template <typename ArgT0 = const std::string&, typename... ArgT>
1339   void set_op_name(ArgT0&& arg0, ArgT... args);
1340   std::string* mutable_op_name();
1341   PROTOBUF_NODISCARD std::string* release_op_name();
1342   void set_allocated_op_name(std::string* op_name);
1343   private:
1344   const std::string& _internal_op_name() const;
1345   inline PROTOBUF_ALWAYS_INLINE void _internal_set_op_name(const std::string& value);
1346   std::string* _internal_mutable_op_name();
1347   public:
1348 
1349   // string graph_name = 3;
1350   void clear_graph_name();
1351   const std::string& graph_name() const;
1352   template <typename ArgT0 = const std::string&, typename... ArgT>
1353   void set_graph_name(ArgT0&& arg0, ArgT... args);
1354   std::string* mutable_graph_name();
1355   PROTOBUF_NODISCARD std::string* release_graph_name();
1356   void set_allocated_graph_name(std::string* graph_name);
1357   private:
1358   const std::string& _internal_graph_name() const;
1359   inline PROTOBUF_ALWAYS_INLINE void _internal_set_graph_name(const std::string& value);
1360   std::string* _internal_mutable_graph_name();
1361   public:
1362 
1363   // string graph_id = 4;
1364   void clear_graph_id();
1365   const std::string& graph_id() const;
1366   template <typename ArgT0 = const std::string&, typename... ArgT>
1367   void set_graph_id(ArgT0&& arg0, ArgT... args);
1368   std::string* mutable_graph_id();
1369   PROTOBUF_NODISCARD std::string* release_graph_id();
1370   void set_allocated_graph_id(std::string* graph_id);
1371   private:
1372   const std::string& _internal_graph_id() const;
1373   inline PROTOBUF_ALWAYS_INLINE void _internal_set_graph_id(const std::string& value);
1374   std::string* _internal_mutable_graph_id();
1375   public:
1376 
1377   // string device_name = 5;
1378   void clear_device_name();
1379   const std::string& device_name() const;
1380   template <typename ArgT0 = const std::string&, typename... ArgT>
1381   void set_device_name(ArgT0&& arg0, ArgT... args);
1382   std::string* mutable_device_name();
1383   PROTOBUF_NODISCARD std::string* release_device_name();
1384   void set_allocated_device_name(std::string* device_name);
1385   private:
1386   const std::string& _internal_device_name() const;
1387   inline PROTOBUF_ALWAYS_INLINE void _internal_set_device_name(const std::string& value);
1388   std::string* _internal_mutable_device_name();
1389   public:
1390 
1391   // .tensorflow.CodeLocation code_location = 8;
1392   bool has_code_location() const;
1393   private:
1394   bool _internal_has_code_location() const;
1395   public:
1396   void clear_code_location();
1397   const ::tensorflow::CodeLocation& code_location() const;
1398   PROTOBUF_NODISCARD ::tensorflow::CodeLocation* release_code_location();
1399   ::tensorflow::CodeLocation* mutable_code_location();
1400   void set_allocated_code_location(::tensorflow::CodeLocation* code_location);
1401   private:
1402   const ::tensorflow::CodeLocation& _internal_code_location() const;
1403   ::tensorflow::CodeLocation* _internal_mutable_code_location();
1404   public:
1405   void unsafe_arena_set_allocated_code_location(
1406       ::tensorflow::CodeLocation* code_location);
1407   ::tensorflow::CodeLocation* unsafe_arena_release_code_location();
1408 
1409   // int32 num_outputs = 7;
1410   void clear_num_outputs();
1411   ::int32_t num_outputs() const;
1412   void set_num_outputs(::int32_t value);
1413   private:
1414   ::int32_t _internal_num_outputs() const;
1415   void _internal_set_num_outputs(::int32_t value);
1416   public:
1417 
1418   // @@protoc_insertion_point(class_scope:tensorflow.GraphOpCreation)
1419  private:
1420   class _Internal;
1421 
1422   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1423   typedef void InternalArenaConstructable_;
1424   typedef void DestructorSkippable_;
1425   struct Impl_ {
1426     ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string> input_names_;
1427     ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int32_t > output_tensor_ids_;
1428     mutable std::atomic<int> _output_tensor_ids_cached_byte_size_;
1429     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr op_type_;
1430     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr op_name_;
1431     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr graph_name_;
1432     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr graph_id_;
1433     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr device_name_;
1434     ::tensorflow::CodeLocation* code_location_;
1435     ::int32_t num_outputs_;
1436     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1437   };
1438   union { Impl_ _impl_; };
1439   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fdebug_5fevent_2eproto;
1440 };
1441 // -------------------------------------------------------------------
1442 
1443 class DebuggedGraph final :
1444     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.DebuggedGraph) */ {
1445  public:
DebuggedGraph()1446   inline DebuggedGraph() : DebuggedGraph(nullptr) {}
1447   ~DebuggedGraph() override;
1448   explicit PROTOBUF_CONSTEXPR DebuggedGraph(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
1449 
1450   DebuggedGraph(const DebuggedGraph& from);
DebuggedGraph(DebuggedGraph && from)1451   DebuggedGraph(DebuggedGraph&& from) noexcept
1452     : DebuggedGraph() {
1453     *this = ::std::move(from);
1454   }
1455 
1456   inline DebuggedGraph& operator=(const DebuggedGraph& from) {
1457     if (this == &from) return *this;
1458     CopyFrom(from);
1459     return *this;
1460   }
1461   inline DebuggedGraph& operator=(DebuggedGraph&& from) noexcept {
1462     if (this == &from) return *this;
1463     if (GetOwningArena() == from.GetOwningArena()
1464   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
1465         && GetOwningArena() != nullptr
1466   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
1467     ) {
1468       InternalSwap(&from);
1469     } else {
1470       CopyFrom(from);
1471     }
1472     return *this;
1473   }
1474 
default_instance()1475   static const DebuggedGraph& default_instance() {
1476     return *internal_default_instance();
1477   }
internal_default_instance()1478   static inline const DebuggedGraph* internal_default_instance() {
1479     return reinterpret_cast<const DebuggedGraph*>(
1480                &_DebuggedGraph_default_instance_);
1481   }
1482   static constexpr int kIndexInFileMessages =
1483     6;
1484 
swap(DebuggedGraph & a,DebuggedGraph & b)1485   friend void swap(DebuggedGraph& a, DebuggedGraph& b) {
1486     a.Swap(&b);
1487   }
Swap(DebuggedGraph * other)1488   inline void Swap(DebuggedGraph* other) {
1489     if (other == this) return;
1490   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
1491     if (GetOwningArena() != nullptr &&
1492         GetOwningArena() == other->GetOwningArena()) {
1493    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
1494     if (GetOwningArena() == other->GetOwningArena()) {
1495   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
1496       InternalSwap(other);
1497     } else {
1498       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1499     }
1500   }
1501   void UnsafeArenaSwap(DebuggedGraph* other) {
1502     if (other == this) return;
1503     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
1504     InternalSwap(other);
1505   }
1506 
1507   // implements Message ----------------------------------------------
1508 
1509   DebuggedGraph* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1510     return CreateMaybeMessage<DebuggedGraph>(arena);
1511   }
1512   DebuggedGraph* New() const {
1513     return New(nullptr);
1514   }
1515   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
1516   void CopyFrom(const DebuggedGraph& from);
1517   void MergeFrom(const DebuggedGraph& from);
1518   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1519   bool IsInitialized() const final;
1520 
1521   size_t ByteSizeLong() const final;
1522   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1523   ::uint8_t* _InternalSerialize(
1524       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
1525   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
1526 
1527   private:
1528   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
1529   void SharedDtor();
1530   void SetCachedSize(int size) const;
1531   void InternalSwap(DebuggedGraph* other);
1532 
1533   private:
1534   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1535   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1536     return "tensorflow.DebuggedGraph";
1537   }
1538   protected:
1539   explicit DebuggedGraph(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1540                        bool is_message_owned = false);
1541   public:
1542 
1543   std::string GetTypeName() const final;
1544 
1545   // nested types ----------------------------------------------------
1546 
1547   // accessors -------------------------------------------------------
1548 
1549   enum : int {
1550     kInstrumentedOpsFieldNumber = 3,
1551     kGraphIdFieldNumber = 1,
1552     kGraphNameFieldNumber = 2,
1553     kOriginalGraphDefFieldNumber = 4,
1554     kInstrumentedGraphDefFieldNumber = 5,
1555     kOuterContextIdFieldNumber = 6,
1556   };
1557   // repeated string instrumented_ops = 3;
1558   int instrumented_ops_size() const;
1559   private:
1560   int _internal_instrumented_ops_size() const;
1561   public:
1562   void clear_instrumented_ops();
1563   const std::string& instrumented_ops(int index) const;
1564   std::string* mutable_instrumented_ops(int index);
1565   void set_instrumented_ops(int index, const std::string& value);
1566   void set_instrumented_ops(int index, std::string&& value);
1567   void set_instrumented_ops(int index, const char* value);
1568   void set_instrumented_ops(int index, const char* value, size_t size);
1569   std::string* add_instrumented_ops();
1570   void add_instrumented_ops(const std::string& value);
1571   void add_instrumented_ops(std::string&& value);
1572   void add_instrumented_ops(const char* value);
1573   void add_instrumented_ops(const char* value, size_t size);
1574   const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>& instrumented_ops() const;
1575   ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>* mutable_instrumented_ops();
1576   private:
1577   const std::string& _internal_instrumented_ops(int index) const;
1578   std::string* _internal_add_instrumented_ops();
1579   public:
1580 
1581   // string graph_id = 1;
1582   void clear_graph_id();
1583   const std::string& graph_id() const;
1584   template <typename ArgT0 = const std::string&, typename... ArgT>
1585   void set_graph_id(ArgT0&& arg0, ArgT... args);
1586   std::string* mutable_graph_id();
1587   PROTOBUF_NODISCARD std::string* release_graph_id();
1588   void set_allocated_graph_id(std::string* graph_id);
1589   private:
1590   const std::string& _internal_graph_id() const;
1591   inline PROTOBUF_ALWAYS_INLINE void _internal_set_graph_id(const std::string& value);
1592   std::string* _internal_mutable_graph_id();
1593   public:
1594 
1595   // string graph_name = 2;
1596   void clear_graph_name();
1597   const std::string& graph_name() const;
1598   template <typename ArgT0 = const std::string&, typename... ArgT>
1599   void set_graph_name(ArgT0&& arg0, ArgT... args);
1600   std::string* mutable_graph_name();
1601   PROTOBUF_NODISCARD std::string* release_graph_name();
1602   void set_allocated_graph_name(std::string* graph_name);
1603   private:
1604   const std::string& _internal_graph_name() const;
1605   inline PROTOBUF_ALWAYS_INLINE void _internal_set_graph_name(const std::string& value);
1606   std::string* _internal_mutable_graph_name();
1607   public:
1608 
1609   // bytes original_graph_def = 4;
1610   void clear_original_graph_def();
1611   const std::string& original_graph_def() const;
1612   template <typename ArgT0 = const std::string&, typename... ArgT>
1613   void set_original_graph_def(ArgT0&& arg0, ArgT... args);
1614   std::string* mutable_original_graph_def();
1615   PROTOBUF_NODISCARD std::string* release_original_graph_def();
1616   void set_allocated_original_graph_def(std::string* original_graph_def);
1617   private:
1618   const std::string& _internal_original_graph_def() const;
1619   inline PROTOBUF_ALWAYS_INLINE void _internal_set_original_graph_def(const std::string& value);
1620   std::string* _internal_mutable_original_graph_def();
1621   public:
1622 
1623   // bytes instrumented_graph_def = 5;
1624   void clear_instrumented_graph_def();
1625   const std::string& instrumented_graph_def() const;
1626   template <typename ArgT0 = const std::string&, typename... ArgT>
1627   void set_instrumented_graph_def(ArgT0&& arg0, ArgT... args);
1628   std::string* mutable_instrumented_graph_def();
1629   PROTOBUF_NODISCARD std::string* release_instrumented_graph_def();
1630   void set_allocated_instrumented_graph_def(std::string* instrumented_graph_def);
1631   private:
1632   const std::string& _internal_instrumented_graph_def() const;
1633   inline PROTOBUF_ALWAYS_INLINE void _internal_set_instrumented_graph_def(const std::string& value);
1634   std::string* _internal_mutable_instrumented_graph_def();
1635   public:
1636 
1637   // string outer_context_id = 6;
1638   void clear_outer_context_id();
1639   const std::string& outer_context_id() const;
1640   template <typename ArgT0 = const std::string&, typename... ArgT>
1641   void set_outer_context_id(ArgT0&& arg0, ArgT... args);
1642   std::string* mutable_outer_context_id();
1643   PROTOBUF_NODISCARD std::string* release_outer_context_id();
1644   void set_allocated_outer_context_id(std::string* outer_context_id);
1645   private:
1646   const std::string& _internal_outer_context_id() const;
1647   inline PROTOBUF_ALWAYS_INLINE void _internal_set_outer_context_id(const std::string& value);
1648   std::string* _internal_mutable_outer_context_id();
1649   public:
1650 
1651   // @@protoc_insertion_point(class_scope:tensorflow.DebuggedGraph)
1652  private:
1653   class _Internal;
1654 
1655   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1656   typedef void InternalArenaConstructable_;
1657   typedef void DestructorSkippable_;
1658   struct Impl_ {
1659     ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string> instrumented_ops_;
1660     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr graph_id_;
1661     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr graph_name_;
1662     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr original_graph_def_;
1663     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr instrumented_graph_def_;
1664     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr outer_context_id_;
1665     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1666   };
1667   union { Impl_ _impl_; };
1668   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fdebug_5fevent_2eproto;
1669 };
1670 // -------------------------------------------------------------------
1671 
1672 class DebuggedDevice final :
1673     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.DebuggedDevice) */ {
1674  public:
DebuggedDevice()1675   inline DebuggedDevice() : DebuggedDevice(nullptr) {}
1676   ~DebuggedDevice() override;
1677   explicit PROTOBUF_CONSTEXPR DebuggedDevice(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
1678 
1679   DebuggedDevice(const DebuggedDevice& from);
DebuggedDevice(DebuggedDevice && from)1680   DebuggedDevice(DebuggedDevice&& from) noexcept
1681     : DebuggedDevice() {
1682     *this = ::std::move(from);
1683   }
1684 
1685   inline DebuggedDevice& operator=(const DebuggedDevice& from) {
1686     if (this == &from) return *this;
1687     CopyFrom(from);
1688     return *this;
1689   }
1690   inline DebuggedDevice& operator=(DebuggedDevice&& from) noexcept {
1691     if (this == &from) return *this;
1692     if (GetOwningArena() == from.GetOwningArena()
1693   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
1694         && GetOwningArena() != nullptr
1695   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
1696     ) {
1697       InternalSwap(&from);
1698     } else {
1699       CopyFrom(from);
1700     }
1701     return *this;
1702   }
1703 
default_instance()1704   static const DebuggedDevice& default_instance() {
1705     return *internal_default_instance();
1706   }
internal_default_instance()1707   static inline const DebuggedDevice* internal_default_instance() {
1708     return reinterpret_cast<const DebuggedDevice*>(
1709                &_DebuggedDevice_default_instance_);
1710   }
1711   static constexpr int kIndexInFileMessages =
1712     7;
1713 
swap(DebuggedDevice & a,DebuggedDevice & b)1714   friend void swap(DebuggedDevice& a, DebuggedDevice& b) {
1715     a.Swap(&b);
1716   }
Swap(DebuggedDevice * other)1717   inline void Swap(DebuggedDevice* other) {
1718     if (other == this) return;
1719   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
1720     if (GetOwningArena() != nullptr &&
1721         GetOwningArena() == other->GetOwningArena()) {
1722    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
1723     if (GetOwningArena() == other->GetOwningArena()) {
1724   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
1725       InternalSwap(other);
1726     } else {
1727       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1728     }
1729   }
1730   void UnsafeArenaSwap(DebuggedDevice* other) {
1731     if (other == this) return;
1732     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
1733     InternalSwap(other);
1734   }
1735 
1736   // implements Message ----------------------------------------------
1737 
1738   DebuggedDevice* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1739     return CreateMaybeMessage<DebuggedDevice>(arena);
1740   }
1741   DebuggedDevice* New() const {
1742     return New(nullptr);
1743   }
1744   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
1745   void CopyFrom(const DebuggedDevice& from);
1746   void MergeFrom(const DebuggedDevice& from);
1747   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1748   bool IsInitialized() const final;
1749 
1750   size_t ByteSizeLong() const final;
1751   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1752   ::uint8_t* _InternalSerialize(
1753       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
1754   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
1755 
1756   private:
1757   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
1758   void SharedDtor();
1759   void SetCachedSize(int size) const;
1760   void InternalSwap(DebuggedDevice* other);
1761 
1762   private:
1763   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1764   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1765     return "tensorflow.DebuggedDevice";
1766   }
1767   protected:
1768   explicit DebuggedDevice(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1769                        bool is_message_owned = false);
1770   public:
1771 
1772   std::string GetTypeName() const final;
1773 
1774   // nested types ----------------------------------------------------
1775 
1776   // accessors -------------------------------------------------------
1777 
1778   enum : int {
1779     kDeviceNameFieldNumber = 1,
1780     kDeviceIdFieldNumber = 2,
1781   };
1782   // string device_name = 1;
1783   void clear_device_name();
1784   const std::string& device_name() const;
1785   template <typename ArgT0 = const std::string&, typename... ArgT>
1786   void set_device_name(ArgT0&& arg0, ArgT... args);
1787   std::string* mutable_device_name();
1788   PROTOBUF_NODISCARD std::string* release_device_name();
1789   void set_allocated_device_name(std::string* device_name);
1790   private:
1791   const std::string& _internal_device_name() const;
1792   inline PROTOBUF_ALWAYS_INLINE void _internal_set_device_name(const std::string& value);
1793   std::string* _internal_mutable_device_name();
1794   public:
1795 
1796   // int32 device_id = 2;
1797   void clear_device_id();
1798   ::int32_t device_id() const;
1799   void set_device_id(::int32_t value);
1800   private:
1801   ::int32_t _internal_device_id() const;
1802   void _internal_set_device_id(::int32_t value);
1803   public:
1804 
1805   // @@protoc_insertion_point(class_scope:tensorflow.DebuggedDevice)
1806  private:
1807   class _Internal;
1808 
1809   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1810   typedef void InternalArenaConstructable_;
1811   typedef void DestructorSkippable_;
1812   struct Impl_ {
1813     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr device_name_;
1814     ::int32_t device_id_;
1815     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1816   };
1817   union { Impl_ _impl_; };
1818   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fdebug_5fevent_2eproto;
1819 };
1820 // -------------------------------------------------------------------
1821 
1822 class Execution final :
1823     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.Execution) */ {
1824  public:
Execution()1825   inline Execution() : Execution(nullptr) {}
1826   ~Execution() override;
1827   explicit PROTOBUF_CONSTEXPR Execution(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
1828 
1829   Execution(const Execution& from);
Execution(Execution && from)1830   Execution(Execution&& from) noexcept
1831     : Execution() {
1832     *this = ::std::move(from);
1833   }
1834 
1835   inline Execution& operator=(const Execution& from) {
1836     if (this == &from) return *this;
1837     CopyFrom(from);
1838     return *this;
1839   }
1840   inline Execution& operator=(Execution&& from) noexcept {
1841     if (this == &from) return *this;
1842     if (GetOwningArena() == from.GetOwningArena()
1843   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
1844         && GetOwningArena() != nullptr
1845   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
1846     ) {
1847       InternalSwap(&from);
1848     } else {
1849       CopyFrom(from);
1850     }
1851     return *this;
1852   }
1853 
default_instance()1854   static const Execution& default_instance() {
1855     return *internal_default_instance();
1856   }
internal_default_instance()1857   static inline const Execution* internal_default_instance() {
1858     return reinterpret_cast<const Execution*>(
1859                &_Execution_default_instance_);
1860   }
1861   static constexpr int kIndexInFileMessages =
1862     8;
1863 
swap(Execution & a,Execution & b)1864   friend void swap(Execution& a, Execution& b) {
1865     a.Swap(&b);
1866   }
Swap(Execution * other)1867   inline void Swap(Execution* other) {
1868     if (other == this) return;
1869   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
1870     if (GetOwningArena() != nullptr &&
1871         GetOwningArena() == other->GetOwningArena()) {
1872    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
1873     if (GetOwningArena() == other->GetOwningArena()) {
1874   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
1875       InternalSwap(other);
1876     } else {
1877       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1878     }
1879   }
1880   void UnsafeArenaSwap(Execution* other) {
1881     if (other == this) return;
1882     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
1883     InternalSwap(other);
1884   }
1885 
1886   // implements Message ----------------------------------------------
1887 
1888   Execution* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1889     return CreateMaybeMessage<Execution>(arena);
1890   }
1891   Execution* New() const {
1892     return New(nullptr);
1893   }
1894   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
1895   void CopyFrom(const Execution& from);
1896   void MergeFrom(const Execution& from);
1897   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1898   bool IsInitialized() const final;
1899 
1900   size_t ByteSizeLong() const final;
1901   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1902   ::uint8_t* _InternalSerialize(
1903       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
1904   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
1905 
1906   private:
1907   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
1908   void SharedDtor();
1909   void SetCachedSize(int size) const;
1910   void InternalSwap(Execution* other);
1911 
1912   private:
1913   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1914   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1915     return "tensorflow.Execution";
1916   }
1917   protected:
1918   explicit Execution(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1919                        bool is_message_owned = false);
1920   public:
1921 
1922   std::string GetTypeName() const final;
1923 
1924   // nested types ----------------------------------------------------
1925 
1926   // accessors -------------------------------------------------------
1927 
1928   enum : int {
1929     kInputTensorIdsFieldNumber = 4,
1930     kOutputTensorIdsFieldNumber = 5,
1931     kTensorProtosFieldNumber = 7,
1932     kOutputTensorDeviceIdsFieldNumber = 9,
1933     kOpTypeFieldNumber = 1,
1934     kGraphIdFieldNumber = 3,
1935     kCodeLocationFieldNumber = 8,
1936     kNumOutputsFieldNumber = 2,
1937     kTensorDebugModeFieldNumber = 6,
1938   };
1939   // repeated int64 input_tensor_ids = 4;
1940   int input_tensor_ids_size() const;
1941   private:
1942   int _internal_input_tensor_ids_size() const;
1943   public:
1944   void clear_input_tensor_ids();
1945   private:
1946   ::int64_t _internal_input_tensor_ids(int index) const;
1947   const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >&
1948       _internal_input_tensor_ids() const;
1949   void _internal_add_input_tensor_ids(::int64_t value);
1950   ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >*
1951       _internal_mutable_input_tensor_ids();
1952   public:
1953   ::int64_t input_tensor_ids(int index) const;
1954   void set_input_tensor_ids(int index, ::int64_t value);
1955   void add_input_tensor_ids(::int64_t value);
1956   const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >&
1957       input_tensor_ids() const;
1958   ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >*
1959       mutable_input_tensor_ids();
1960 
1961   // repeated int64 output_tensor_ids = 5;
1962   int output_tensor_ids_size() const;
1963   private:
1964   int _internal_output_tensor_ids_size() const;
1965   public:
1966   void clear_output_tensor_ids();
1967   private:
1968   ::int64_t _internal_output_tensor_ids(int index) const;
1969   const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >&
1970       _internal_output_tensor_ids() const;
1971   void _internal_add_output_tensor_ids(::int64_t value);
1972   ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >*
1973       _internal_mutable_output_tensor_ids();
1974   public:
1975   ::int64_t output_tensor_ids(int index) const;
1976   void set_output_tensor_ids(int index, ::int64_t value);
1977   void add_output_tensor_ids(::int64_t value);
1978   const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >&
1979       output_tensor_ids() const;
1980   ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >*
1981       mutable_output_tensor_ids();
1982 
1983   // repeated .tensorflow.TensorProto tensor_protos = 7;
1984   int tensor_protos_size() const;
1985   private:
1986   int _internal_tensor_protos_size() const;
1987   public:
1988   void clear_tensor_protos();
1989   ::tensorflow::TensorProto* mutable_tensor_protos(int index);
1990   ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorProto >*
1991       mutable_tensor_protos();
1992   private:
1993   const ::tensorflow::TensorProto& _internal_tensor_protos(int index) const;
1994   ::tensorflow::TensorProto* _internal_add_tensor_protos();
1995   public:
1996   const ::tensorflow::TensorProto& tensor_protos(int index) const;
1997   ::tensorflow::TensorProto* add_tensor_protos();
1998   const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorProto >&
1999       tensor_protos() const;
2000 
2001   // repeated int32 output_tensor_device_ids = 9;
2002   int output_tensor_device_ids_size() const;
2003   private:
2004   int _internal_output_tensor_device_ids_size() const;
2005   public:
2006   void clear_output_tensor_device_ids();
2007   private:
2008   ::int32_t _internal_output_tensor_device_ids(int index) const;
2009   const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int32_t >&
2010       _internal_output_tensor_device_ids() const;
2011   void _internal_add_output_tensor_device_ids(::int32_t value);
2012   ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int32_t >*
2013       _internal_mutable_output_tensor_device_ids();
2014   public:
2015   ::int32_t output_tensor_device_ids(int index) const;
2016   void set_output_tensor_device_ids(int index, ::int32_t value);
2017   void add_output_tensor_device_ids(::int32_t value);
2018   const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int32_t >&
2019       output_tensor_device_ids() const;
2020   ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int32_t >*
2021       mutable_output_tensor_device_ids();
2022 
2023   // string op_type = 1;
2024   void clear_op_type();
2025   const std::string& op_type() const;
2026   template <typename ArgT0 = const std::string&, typename... ArgT>
2027   void set_op_type(ArgT0&& arg0, ArgT... args);
2028   std::string* mutable_op_type();
2029   PROTOBUF_NODISCARD std::string* release_op_type();
2030   void set_allocated_op_type(std::string* op_type);
2031   private:
2032   const std::string& _internal_op_type() const;
2033   inline PROTOBUF_ALWAYS_INLINE void _internal_set_op_type(const std::string& value);
2034   std::string* _internal_mutable_op_type();
2035   public:
2036 
2037   // string graph_id = 3;
2038   void clear_graph_id();
2039   const std::string& graph_id() const;
2040   template <typename ArgT0 = const std::string&, typename... ArgT>
2041   void set_graph_id(ArgT0&& arg0, ArgT... args);
2042   std::string* mutable_graph_id();
2043   PROTOBUF_NODISCARD std::string* release_graph_id();
2044   void set_allocated_graph_id(std::string* graph_id);
2045   private:
2046   const std::string& _internal_graph_id() const;
2047   inline PROTOBUF_ALWAYS_INLINE void _internal_set_graph_id(const std::string& value);
2048   std::string* _internal_mutable_graph_id();
2049   public:
2050 
2051   // .tensorflow.CodeLocation code_location = 8;
2052   bool has_code_location() const;
2053   private:
2054   bool _internal_has_code_location() const;
2055   public:
2056   void clear_code_location();
2057   const ::tensorflow::CodeLocation& code_location() const;
2058   PROTOBUF_NODISCARD ::tensorflow::CodeLocation* release_code_location();
2059   ::tensorflow::CodeLocation* mutable_code_location();
2060   void set_allocated_code_location(::tensorflow::CodeLocation* code_location);
2061   private:
2062   const ::tensorflow::CodeLocation& _internal_code_location() const;
2063   ::tensorflow::CodeLocation* _internal_mutable_code_location();
2064   public:
2065   void unsafe_arena_set_allocated_code_location(
2066       ::tensorflow::CodeLocation* code_location);
2067   ::tensorflow::CodeLocation* unsafe_arena_release_code_location();
2068 
2069   // int32 num_outputs = 2;
2070   void clear_num_outputs();
2071   ::int32_t num_outputs() const;
2072   void set_num_outputs(::int32_t value);
2073   private:
2074   ::int32_t _internal_num_outputs() const;
2075   void _internal_set_num_outputs(::int32_t value);
2076   public:
2077 
2078   // .tensorflow.TensorDebugMode tensor_debug_mode = 6;
2079   void clear_tensor_debug_mode();
2080   ::tensorflow::TensorDebugMode tensor_debug_mode() const;
2081   void set_tensor_debug_mode(::tensorflow::TensorDebugMode value);
2082   private:
2083   ::tensorflow::TensorDebugMode _internal_tensor_debug_mode() const;
2084   void _internal_set_tensor_debug_mode(::tensorflow::TensorDebugMode value);
2085   public:
2086 
2087   // @@protoc_insertion_point(class_scope:tensorflow.Execution)
2088  private:
2089   class _Internal;
2090 
2091   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
2092   typedef void InternalArenaConstructable_;
2093   typedef void DestructorSkippable_;
2094   struct Impl_ {
2095     ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t > input_tensor_ids_;
2096     mutable std::atomic<int> _input_tensor_ids_cached_byte_size_;
2097     ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t > output_tensor_ids_;
2098     mutable std::atomic<int> _output_tensor_ids_cached_byte_size_;
2099     ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorProto > tensor_protos_;
2100     ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int32_t > output_tensor_device_ids_;
2101     mutable std::atomic<int> _output_tensor_device_ids_cached_byte_size_;
2102     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr op_type_;
2103     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr graph_id_;
2104     ::tensorflow::CodeLocation* code_location_;
2105     ::int32_t num_outputs_;
2106     int tensor_debug_mode_;
2107     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
2108   };
2109   union { Impl_ _impl_; };
2110   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fdebug_5fevent_2eproto;
2111 };
2112 // -------------------------------------------------------------------
2113 
2114 class GraphExecutionTrace final :
2115     public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.GraphExecutionTrace) */ {
2116  public:
GraphExecutionTrace()2117   inline GraphExecutionTrace() : GraphExecutionTrace(nullptr) {}
2118   ~GraphExecutionTrace() override;
2119   explicit PROTOBUF_CONSTEXPR GraphExecutionTrace(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
2120 
2121   GraphExecutionTrace(const GraphExecutionTrace& from);
GraphExecutionTrace(GraphExecutionTrace && from)2122   GraphExecutionTrace(GraphExecutionTrace&& from) noexcept
2123     : GraphExecutionTrace() {
2124     *this = ::std::move(from);
2125   }
2126 
2127   inline GraphExecutionTrace& operator=(const GraphExecutionTrace& from) {
2128     if (this == &from) return *this;
2129     CopyFrom(from);
2130     return *this;
2131   }
2132   inline GraphExecutionTrace& operator=(GraphExecutionTrace&& from) noexcept {
2133     if (this == &from) return *this;
2134     if (GetOwningArena() == from.GetOwningArena()
2135   #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
2136         && GetOwningArena() != nullptr
2137   #endif  // !PROTOBUF_FORCE_COPY_IN_MOVE
2138     ) {
2139       InternalSwap(&from);
2140     } else {
2141       CopyFrom(from);
2142     }
2143     return *this;
2144   }
2145 
default_instance()2146   static const GraphExecutionTrace& default_instance() {
2147     return *internal_default_instance();
2148   }
internal_default_instance()2149   static inline const GraphExecutionTrace* internal_default_instance() {
2150     return reinterpret_cast<const GraphExecutionTrace*>(
2151                &_GraphExecutionTrace_default_instance_);
2152   }
2153   static constexpr int kIndexInFileMessages =
2154     9;
2155 
swap(GraphExecutionTrace & a,GraphExecutionTrace & b)2156   friend void swap(GraphExecutionTrace& a, GraphExecutionTrace& b) {
2157     a.Swap(&b);
2158   }
Swap(GraphExecutionTrace * other)2159   inline void Swap(GraphExecutionTrace* other) {
2160     if (other == this) return;
2161   #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
2162     if (GetOwningArena() != nullptr &&
2163         GetOwningArena() == other->GetOwningArena()) {
2164    #else  // PROTOBUF_FORCE_COPY_IN_SWAP
2165     if (GetOwningArena() == other->GetOwningArena()) {
2166   #endif  // !PROTOBUF_FORCE_COPY_IN_SWAP
2167       InternalSwap(other);
2168     } else {
2169       ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
2170     }
2171   }
2172   void UnsafeArenaSwap(GraphExecutionTrace* other) {
2173     if (other == this) return;
2174     GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
2175     InternalSwap(other);
2176   }
2177 
2178   // implements Message ----------------------------------------------
2179 
2180   GraphExecutionTrace* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
2181     return CreateMaybeMessage<GraphExecutionTrace>(arena);
2182   }
2183   GraphExecutionTrace* New() const {
2184     return New(nullptr);
2185   }
2186   void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from)  final;
2187   void CopyFrom(const GraphExecutionTrace& from);
2188   void MergeFrom(const GraphExecutionTrace& from);
2189   PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
2190   bool IsInitialized() const final;
2191 
2192   size_t ByteSizeLong() const final;
2193   const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
2194   ::uint8_t* _InternalSerialize(
2195       ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
2196   int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
2197 
2198   private:
2199   void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
2200   void SharedDtor();
2201   void SetCachedSize(int size) const;
2202   void InternalSwap(GraphExecutionTrace* other);
2203 
2204   private:
2205   friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
2206   static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
2207     return "tensorflow.GraphExecutionTrace";
2208   }
2209   protected:
2210   explicit GraphExecutionTrace(::PROTOBUF_NAMESPACE_ID::Arena* arena,
2211                        bool is_message_owned = false);
2212   public:
2213 
2214   std::string GetTypeName() const final;
2215 
2216   // nested types ----------------------------------------------------
2217 
2218   // accessors -------------------------------------------------------
2219 
2220   enum : int {
2221     kTfdbgContextIdFieldNumber = 1,
2222     kOpNameFieldNumber = 2,
2223     kDeviceNameFieldNumber = 6,
2224     kTensorProtoFieldNumber = 5,
2225     kOutputSlotFieldNumber = 3,
2226     kTensorDebugModeFieldNumber = 4,
2227   };
2228   // string tfdbg_context_id = 1;
2229   void clear_tfdbg_context_id();
2230   const std::string& tfdbg_context_id() const;
2231   template <typename ArgT0 = const std::string&, typename... ArgT>
2232   void set_tfdbg_context_id(ArgT0&& arg0, ArgT... args);
2233   std::string* mutable_tfdbg_context_id();
2234   PROTOBUF_NODISCARD std::string* release_tfdbg_context_id();
2235   void set_allocated_tfdbg_context_id(std::string* tfdbg_context_id);
2236   private:
2237   const std::string& _internal_tfdbg_context_id() const;
2238   inline PROTOBUF_ALWAYS_INLINE void _internal_set_tfdbg_context_id(const std::string& value);
2239   std::string* _internal_mutable_tfdbg_context_id();
2240   public:
2241 
2242   // string op_name = 2;
2243   void clear_op_name();
2244   const std::string& op_name() const;
2245   template <typename ArgT0 = const std::string&, typename... ArgT>
2246   void set_op_name(ArgT0&& arg0, ArgT... args);
2247   std::string* mutable_op_name();
2248   PROTOBUF_NODISCARD std::string* release_op_name();
2249   void set_allocated_op_name(std::string* op_name);
2250   private:
2251   const std::string& _internal_op_name() const;
2252   inline PROTOBUF_ALWAYS_INLINE void _internal_set_op_name(const std::string& value);
2253   std::string* _internal_mutable_op_name();
2254   public:
2255 
2256   // string device_name = 6;
2257   void clear_device_name();
2258   const std::string& device_name() const;
2259   template <typename ArgT0 = const std::string&, typename... ArgT>
2260   void set_device_name(ArgT0&& arg0, ArgT... args);
2261   std::string* mutable_device_name();
2262   PROTOBUF_NODISCARD std::string* release_device_name();
2263   void set_allocated_device_name(std::string* device_name);
2264   private:
2265   const std::string& _internal_device_name() const;
2266   inline PROTOBUF_ALWAYS_INLINE void _internal_set_device_name(const std::string& value);
2267   std::string* _internal_mutable_device_name();
2268   public:
2269 
2270   // .tensorflow.TensorProto tensor_proto = 5;
2271   bool has_tensor_proto() const;
2272   private:
2273   bool _internal_has_tensor_proto() const;
2274   public:
2275   void clear_tensor_proto();
2276   const ::tensorflow::TensorProto& tensor_proto() const;
2277   PROTOBUF_NODISCARD ::tensorflow::TensorProto* release_tensor_proto();
2278   ::tensorflow::TensorProto* mutable_tensor_proto();
2279   void set_allocated_tensor_proto(::tensorflow::TensorProto* tensor_proto);
2280   private:
2281   const ::tensorflow::TensorProto& _internal_tensor_proto() const;
2282   ::tensorflow::TensorProto* _internal_mutable_tensor_proto();
2283   public:
2284   void unsafe_arena_set_allocated_tensor_proto(
2285       ::tensorflow::TensorProto* tensor_proto);
2286   ::tensorflow::TensorProto* unsafe_arena_release_tensor_proto();
2287 
2288   // int32 output_slot = 3;
2289   void clear_output_slot();
2290   ::int32_t output_slot() const;
2291   void set_output_slot(::int32_t value);
2292   private:
2293   ::int32_t _internal_output_slot() const;
2294   void _internal_set_output_slot(::int32_t value);
2295   public:
2296 
2297   // .tensorflow.TensorDebugMode tensor_debug_mode = 4;
2298   void clear_tensor_debug_mode();
2299   ::tensorflow::TensorDebugMode tensor_debug_mode() const;
2300   void set_tensor_debug_mode(::tensorflow::TensorDebugMode value);
2301   private:
2302   ::tensorflow::TensorDebugMode _internal_tensor_debug_mode() const;
2303   void _internal_set_tensor_debug_mode(::tensorflow::TensorDebugMode value);
2304   public:
2305 
2306   // @@protoc_insertion_point(class_scope:tensorflow.GraphExecutionTrace)
2307  private:
2308   class _Internal;
2309 
2310   template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
2311   typedef void InternalArenaConstructable_;
2312   typedef void DestructorSkippable_;
2313   struct Impl_ {
2314     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr tfdbg_context_id_;
2315     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr op_name_;
2316     ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr device_name_;
2317     ::tensorflow::TensorProto* tensor_proto_;
2318     ::int32_t output_slot_;
2319     int tensor_debug_mode_;
2320     mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
2321   };
2322   union { Impl_ _impl_; };
2323   friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fdebug_5fevent_2eproto;
2324 };
2325 // ===================================================================
2326 
2327 
2328 // ===================================================================
2329 
2330 #ifdef __GNUC__
2331   #pragma GCC diagnostic push
2332   #pragma GCC diagnostic ignored "-Wstrict-aliasing"
2333 #endif  // __GNUC__
2334 // DebugEvent
2335 
2336 // double wall_time = 1;
clear_wall_time()2337 inline void DebugEvent::clear_wall_time() {
2338   _impl_.wall_time_ = 0;
2339 }
_internal_wall_time()2340 inline double DebugEvent::_internal_wall_time() const {
2341   return _impl_.wall_time_;
2342 }
wall_time()2343 inline double DebugEvent::wall_time() const {
2344   // @@protoc_insertion_point(field_get:tensorflow.DebugEvent.wall_time)
2345   return _internal_wall_time();
2346 }
_internal_set_wall_time(double value)2347 inline void DebugEvent::_internal_set_wall_time(double value) {
2348 
2349   _impl_.wall_time_ = value;
2350 }
set_wall_time(double value)2351 inline void DebugEvent::set_wall_time(double value) {
2352   _internal_set_wall_time(value);
2353   // @@protoc_insertion_point(field_set:tensorflow.DebugEvent.wall_time)
2354 }
2355 
2356 // int64 step = 2;
clear_step()2357 inline void DebugEvent::clear_step() {
2358   _impl_.step_ = ::int64_t{0};
2359 }
_internal_step()2360 inline ::int64_t DebugEvent::_internal_step() const {
2361   return _impl_.step_;
2362 }
step()2363 inline ::int64_t DebugEvent::step() const {
2364   // @@protoc_insertion_point(field_get:tensorflow.DebugEvent.step)
2365   return _internal_step();
2366 }
_internal_set_step(::int64_t value)2367 inline void DebugEvent::_internal_set_step(::int64_t value) {
2368 
2369   _impl_.step_ = value;
2370 }
set_step(::int64_t value)2371 inline void DebugEvent::set_step(::int64_t value) {
2372   _internal_set_step(value);
2373   // @@protoc_insertion_point(field_set:tensorflow.DebugEvent.step)
2374 }
2375 
2376 // .tensorflow.DebugMetadata debug_metadata = 3;
_internal_has_debug_metadata()2377 inline bool DebugEvent::_internal_has_debug_metadata() const {
2378   return what_case() == kDebugMetadata;
2379 }
has_debug_metadata()2380 inline bool DebugEvent::has_debug_metadata() const {
2381   return _internal_has_debug_metadata();
2382 }
set_has_debug_metadata()2383 inline void DebugEvent::set_has_debug_metadata() {
2384   _impl_._oneof_case_[0] = kDebugMetadata;
2385 }
clear_debug_metadata()2386 inline void DebugEvent::clear_debug_metadata() {
2387   if (_internal_has_debug_metadata()) {
2388     if (GetArenaForAllocation() == nullptr) {
2389       delete _impl_.what_.debug_metadata_;
2390     }
2391     clear_has_what();
2392   }
2393 }
release_debug_metadata()2394 inline ::tensorflow::DebugMetadata* DebugEvent::release_debug_metadata() {
2395   // @@protoc_insertion_point(field_release:tensorflow.DebugEvent.debug_metadata)
2396   if (_internal_has_debug_metadata()) {
2397     clear_has_what();
2398     ::tensorflow::DebugMetadata* temp = _impl_.what_.debug_metadata_;
2399     if (GetArenaForAllocation() != nullptr) {
2400       temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
2401     }
2402     _impl_.what_.debug_metadata_ = nullptr;
2403     return temp;
2404   } else {
2405     return nullptr;
2406   }
2407 }
_internal_debug_metadata()2408 inline const ::tensorflow::DebugMetadata& DebugEvent::_internal_debug_metadata() const {
2409   return _internal_has_debug_metadata()
2410       ? *_impl_.what_.debug_metadata_
2411       : reinterpret_cast< ::tensorflow::DebugMetadata&>(::tensorflow::_DebugMetadata_default_instance_);
2412 }
debug_metadata()2413 inline const ::tensorflow::DebugMetadata& DebugEvent::debug_metadata() const {
2414   // @@protoc_insertion_point(field_get:tensorflow.DebugEvent.debug_metadata)
2415   return _internal_debug_metadata();
2416 }
unsafe_arena_release_debug_metadata()2417 inline ::tensorflow::DebugMetadata* DebugEvent::unsafe_arena_release_debug_metadata() {
2418   // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.DebugEvent.debug_metadata)
2419   if (_internal_has_debug_metadata()) {
2420     clear_has_what();
2421     ::tensorflow::DebugMetadata* temp = _impl_.what_.debug_metadata_;
2422     _impl_.what_.debug_metadata_ = nullptr;
2423     return temp;
2424   } else {
2425     return nullptr;
2426   }
2427 }
unsafe_arena_set_allocated_debug_metadata(::tensorflow::DebugMetadata * debug_metadata)2428 inline void DebugEvent::unsafe_arena_set_allocated_debug_metadata(::tensorflow::DebugMetadata* debug_metadata) {
2429   clear_what();
2430   if (debug_metadata) {
2431     set_has_debug_metadata();
2432     _impl_.what_.debug_metadata_ = debug_metadata;
2433   }
2434   // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.DebugEvent.debug_metadata)
2435 }
_internal_mutable_debug_metadata()2436 inline ::tensorflow::DebugMetadata* DebugEvent::_internal_mutable_debug_metadata() {
2437   if (!_internal_has_debug_metadata()) {
2438     clear_what();
2439     set_has_debug_metadata();
2440     _impl_.what_.debug_metadata_ = CreateMaybeMessage< ::tensorflow::DebugMetadata >(GetArenaForAllocation());
2441   }
2442   return _impl_.what_.debug_metadata_;
2443 }
mutable_debug_metadata()2444 inline ::tensorflow::DebugMetadata* DebugEvent::mutable_debug_metadata() {
2445   ::tensorflow::DebugMetadata* _msg = _internal_mutable_debug_metadata();
2446   // @@protoc_insertion_point(field_mutable:tensorflow.DebugEvent.debug_metadata)
2447   return _msg;
2448 }
2449 
2450 // .tensorflow.SourceFile source_file = 4;
_internal_has_source_file()2451 inline bool DebugEvent::_internal_has_source_file() const {
2452   return what_case() == kSourceFile;
2453 }
has_source_file()2454 inline bool DebugEvent::has_source_file() const {
2455   return _internal_has_source_file();
2456 }
set_has_source_file()2457 inline void DebugEvent::set_has_source_file() {
2458   _impl_._oneof_case_[0] = kSourceFile;
2459 }
clear_source_file()2460 inline void DebugEvent::clear_source_file() {
2461   if (_internal_has_source_file()) {
2462     if (GetArenaForAllocation() == nullptr) {
2463       delete _impl_.what_.source_file_;
2464     }
2465     clear_has_what();
2466   }
2467 }
release_source_file()2468 inline ::tensorflow::SourceFile* DebugEvent::release_source_file() {
2469   // @@protoc_insertion_point(field_release:tensorflow.DebugEvent.source_file)
2470   if (_internal_has_source_file()) {
2471     clear_has_what();
2472     ::tensorflow::SourceFile* temp = _impl_.what_.source_file_;
2473     if (GetArenaForAllocation() != nullptr) {
2474       temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
2475     }
2476     _impl_.what_.source_file_ = nullptr;
2477     return temp;
2478   } else {
2479     return nullptr;
2480   }
2481 }
_internal_source_file()2482 inline const ::tensorflow::SourceFile& DebugEvent::_internal_source_file() const {
2483   return _internal_has_source_file()
2484       ? *_impl_.what_.source_file_
2485       : reinterpret_cast< ::tensorflow::SourceFile&>(::tensorflow::_SourceFile_default_instance_);
2486 }
source_file()2487 inline const ::tensorflow::SourceFile& DebugEvent::source_file() const {
2488   // @@protoc_insertion_point(field_get:tensorflow.DebugEvent.source_file)
2489   return _internal_source_file();
2490 }
unsafe_arena_release_source_file()2491 inline ::tensorflow::SourceFile* DebugEvent::unsafe_arena_release_source_file() {
2492   // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.DebugEvent.source_file)
2493   if (_internal_has_source_file()) {
2494     clear_has_what();
2495     ::tensorflow::SourceFile* temp = _impl_.what_.source_file_;
2496     _impl_.what_.source_file_ = nullptr;
2497     return temp;
2498   } else {
2499     return nullptr;
2500   }
2501 }
unsafe_arena_set_allocated_source_file(::tensorflow::SourceFile * source_file)2502 inline void DebugEvent::unsafe_arena_set_allocated_source_file(::tensorflow::SourceFile* source_file) {
2503   clear_what();
2504   if (source_file) {
2505     set_has_source_file();
2506     _impl_.what_.source_file_ = source_file;
2507   }
2508   // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.DebugEvent.source_file)
2509 }
_internal_mutable_source_file()2510 inline ::tensorflow::SourceFile* DebugEvent::_internal_mutable_source_file() {
2511   if (!_internal_has_source_file()) {
2512     clear_what();
2513     set_has_source_file();
2514     _impl_.what_.source_file_ = CreateMaybeMessage< ::tensorflow::SourceFile >(GetArenaForAllocation());
2515   }
2516   return _impl_.what_.source_file_;
2517 }
mutable_source_file()2518 inline ::tensorflow::SourceFile* DebugEvent::mutable_source_file() {
2519   ::tensorflow::SourceFile* _msg = _internal_mutable_source_file();
2520   // @@protoc_insertion_point(field_mutable:tensorflow.DebugEvent.source_file)
2521   return _msg;
2522 }
2523 
2524 // .tensorflow.StackFrameWithId stack_frame_with_id = 6;
_internal_has_stack_frame_with_id()2525 inline bool DebugEvent::_internal_has_stack_frame_with_id() const {
2526   return what_case() == kStackFrameWithId;
2527 }
has_stack_frame_with_id()2528 inline bool DebugEvent::has_stack_frame_with_id() const {
2529   return _internal_has_stack_frame_with_id();
2530 }
set_has_stack_frame_with_id()2531 inline void DebugEvent::set_has_stack_frame_with_id() {
2532   _impl_._oneof_case_[0] = kStackFrameWithId;
2533 }
clear_stack_frame_with_id()2534 inline void DebugEvent::clear_stack_frame_with_id() {
2535   if (_internal_has_stack_frame_with_id()) {
2536     if (GetArenaForAllocation() == nullptr) {
2537       delete _impl_.what_.stack_frame_with_id_;
2538     }
2539     clear_has_what();
2540   }
2541 }
release_stack_frame_with_id()2542 inline ::tensorflow::StackFrameWithId* DebugEvent::release_stack_frame_with_id() {
2543   // @@protoc_insertion_point(field_release:tensorflow.DebugEvent.stack_frame_with_id)
2544   if (_internal_has_stack_frame_with_id()) {
2545     clear_has_what();
2546     ::tensorflow::StackFrameWithId* temp = _impl_.what_.stack_frame_with_id_;
2547     if (GetArenaForAllocation() != nullptr) {
2548       temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
2549     }
2550     _impl_.what_.stack_frame_with_id_ = nullptr;
2551     return temp;
2552   } else {
2553     return nullptr;
2554   }
2555 }
_internal_stack_frame_with_id()2556 inline const ::tensorflow::StackFrameWithId& DebugEvent::_internal_stack_frame_with_id() const {
2557   return _internal_has_stack_frame_with_id()
2558       ? *_impl_.what_.stack_frame_with_id_
2559       : reinterpret_cast< ::tensorflow::StackFrameWithId&>(::tensorflow::_StackFrameWithId_default_instance_);
2560 }
stack_frame_with_id()2561 inline const ::tensorflow::StackFrameWithId& DebugEvent::stack_frame_with_id() const {
2562   // @@protoc_insertion_point(field_get:tensorflow.DebugEvent.stack_frame_with_id)
2563   return _internal_stack_frame_with_id();
2564 }
unsafe_arena_release_stack_frame_with_id()2565 inline ::tensorflow::StackFrameWithId* DebugEvent::unsafe_arena_release_stack_frame_with_id() {
2566   // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.DebugEvent.stack_frame_with_id)
2567   if (_internal_has_stack_frame_with_id()) {
2568     clear_has_what();
2569     ::tensorflow::StackFrameWithId* temp = _impl_.what_.stack_frame_with_id_;
2570     _impl_.what_.stack_frame_with_id_ = nullptr;
2571     return temp;
2572   } else {
2573     return nullptr;
2574   }
2575 }
unsafe_arena_set_allocated_stack_frame_with_id(::tensorflow::StackFrameWithId * stack_frame_with_id)2576 inline void DebugEvent::unsafe_arena_set_allocated_stack_frame_with_id(::tensorflow::StackFrameWithId* stack_frame_with_id) {
2577   clear_what();
2578   if (stack_frame_with_id) {
2579     set_has_stack_frame_with_id();
2580     _impl_.what_.stack_frame_with_id_ = stack_frame_with_id;
2581   }
2582   // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.DebugEvent.stack_frame_with_id)
2583 }
_internal_mutable_stack_frame_with_id()2584 inline ::tensorflow::StackFrameWithId* DebugEvent::_internal_mutable_stack_frame_with_id() {
2585   if (!_internal_has_stack_frame_with_id()) {
2586     clear_what();
2587     set_has_stack_frame_with_id();
2588     _impl_.what_.stack_frame_with_id_ = CreateMaybeMessage< ::tensorflow::StackFrameWithId >(GetArenaForAllocation());
2589   }
2590   return _impl_.what_.stack_frame_with_id_;
2591 }
mutable_stack_frame_with_id()2592 inline ::tensorflow::StackFrameWithId* DebugEvent::mutable_stack_frame_with_id() {
2593   ::tensorflow::StackFrameWithId* _msg = _internal_mutable_stack_frame_with_id();
2594   // @@protoc_insertion_point(field_mutable:tensorflow.DebugEvent.stack_frame_with_id)
2595   return _msg;
2596 }
2597 
2598 // .tensorflow.GraphOpCreation graph_op_creation = 7;
_internal_has_graph_op_creation()2599 inline bool DebugEvent::_internal_has_graph_op_creation() const {
2600   return what_case() == kGraphOpCreation;
2601 }
has_graph_op_creation()2602 inline bool DebugEvent::has_graph_op_creation() const {
2603   return _internal_has_graph_op_creation();
2604 }
set_has_graph_op_creation()2605 inline void DebugEvent::set_has_graph_op_creation() {
2606   _impl_._oneof_case_[0] = kGraphOpCreation;
2607 }
clear_graph_op_creation()2608 inline void DebugEvent::clear_graph_op_creation() {
2609   if (_internal_has_graph_op_creation()) {
2610     if (GetArenaForAllocation() == nullptr) {
2611       delete _impl_.what_.graph_op_creation_;
2612     }
2613     clear_has_what();
2614   }
2615 }
release_graph_op_creation()2616 inline ::tensorflow::GraphOpCreation* DebugEvent::release_graph_op_creation() {
2617   // @@protoc_insertion_point(field_release:tensorflow.DebugEvent.graph_op_creation)
2618   if (_internal_has_graph_op_creation()) {
2619     clear_has_what();
2620     ::tensorflow::GraphOpCreation* temp = _impl_.what_.graph_op_creation_;
2621     if (GetArenaForAllocation() != nullptr) {
2622       temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
2623     }
2624     _impl_.what_.graph_op_creation_ = nullptr;
2625     return temp;
2626   } else {
2627     return nullptr;
2628   }
2629 }
_internal_graph_op_creation()2630 inline const ::tensorflow::GraphOpCreation& DebugEvent::_internal_graph_op_creation() const {
2631   return _internal_has_graph_op_creation()
2632       ? *_impl_.what_.graph_op_creation_
2633       : reinterpret_cast< ::tensorflow::GraphOpCreation&>(::tensorflow::_GraphOpCreation_default_instance_);
2634 }
graph_op_creation()2635 inline const ::tensorflow::GraphOpCreation& DebugEvent::graph_op_creation() const {
2636   // @@protoc_insertion_point(field_get:tensorflow.DebugEvent.graph_op_creation)
2637   return _internal_graph_op_creation();
2638 }
unsafe_arena_release_graph_op_creation()2639 inline ::tensorflow::GraphOpCreation* DebugEvent::unsafe_arena_release_graph_op_creation() {
2640   // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.DebugEvent.graph_op_creation)
2641   if (_internal_has_graph_op_creation()) {
2642     clear_has_what();
2643     ::tensorflow::GraphOpCreation* temp = _impl_.what_.graph_op_creation_;
2644     _impl_.what_.graph_op_creation_ = nullptr;
2645     return temp;
2646   } else {
2647     return nullptr;
2648   }
2649 }
unsafe_arena_set_allocated_graph_op_creation(::tensorflow::GraphOpCreation * graph_op_creation)2650 inline void DebugEvent::unsafe_arena_set_allocated_graph_op_creation(::tensorflow::GraphOpCreation* graph_op_creation) {
2651   clear_what();
2652   if (graph_op_creation) {
2653     set_has_graph_op_creation();
2654     _impl_.what_.graph_op_creation_ = graph_op_creation;
2655   }
2656   // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.DebugEvent.graph_op_creation)
2657 }
_internal_mutable_graph_op_creation()2658 inline ::tensorflow::GraphOpCreation* DebugEvent::_internal_mutable_graph_op_creation() {
2659   if (!_internal_has_graph_op_creation()) {
2660     clear_what();
2661     set_has_graph_op_creation();
2662     _impl_.what_.graph_op_creation_ = CreateMaybeMessage< ::tensorflow::GraphOpCreation >(GetArenaForAllocation());
2663   }
2664   return _impl_.what_.graph_op_creation_;
2665 }
mutable_graph_op_creation()2666 inline ::tensorflow::GraphOpCreation* DebugEvent::mutable_graph_op_creation() {
2667   ::tensorflow::GraphOpCreation* _msg = _internal_mutable_graph_op_creation();
2668   // @@protoc_insertion_point(field_mutable:tensorflow.DebugEvent.graph_op_creation)
2669   return _msg;
2670 }
2671 
2672 // .tensorflow.DebuggedGraph debugged_graph = 8;
_internal_has_debugged_graph()2673 inline bool DebugEvent::_internal_has_debugged_graph() const {
2674   return what_case() == kDebuggedGraph;
2675 }
has_debugged_graph()2676 inline bool DebugEvent::has_debugged_graph() const {
2677   return _internal_has_debugged_graph();
2678 }
set_has_debugged_graph()2679 inline void DebugEvent::set_has_debugged_graph() {
2680   _impl_._oneof_case_[0] = kDebuggedGraph;
2681 }
clear_debugged_graph()2682 inline void DebugEvent::clear_debugged_graph() {
2683   if (_internal_has_debugged_graph()) {
2684     if (GetArenaForAllocation() == nullptr) {
2685       delete _impl_.what_.debugged_graph_;
2686     }
2687     clear_has_what();
2688   }
2689 }
release_debugged_graph()2690 inline ::tensorflow::DebuggedGraph* DebugEvent::release_debugged_graph() {
2691   // @@protoc_insertion_point(field_release:tensorflow.DebugEvent.debugged_graph)
2692   if (_internal_has_debugged_graph()) {
2693     clear_has_what();
2694     ::tensorflow::DebuggedGraph* temp = _impl_.what_.debugged_graph_;
2695     if (GetArenaForAllocation() != nullptr) {
2696       temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
2697     }
2698     _impl_.what_.debugged_graph_ = nullptr;
2699     return temp;
2700   } else {
2701     return nullptr;
2702   }
2703 }
_internal_debugged_graph()2704 inline const ::tensorflow::DebuggedGraph& DebugEvent::_internal_debugged_graph() const {
2705   return _internal_has_debugged_graph()
2706       ? *_impl_.what_.debugged_graph_
2707       : reinterpret_cast< ::tensorflow::DebuggedGraph&>(::tensorflow::_DebuggedGraph_default_instance_);
2708 }
debugged_graph()2709 inline const ::tensorflow::DebuggedGraph& DebugEvent::debugged_graph() const {
2710   // @@protoc_insertion_point(field_get:tensorflow.DebugEvent.debugged_graph)
2711   return _internal_debugged_graph();
2712 }
unsafe_arena_release_debugged_graph()2713 inline ::tensorflow::DebuggedGraph* DebugEvent::unsafe_arena_release_debugged_graph() {
2714   // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.DebugEvent.debugged_graph)
2715   if (_internal_has_debugged_graph()) {
2716     clear_has_what();
2717     ::tensorflow::DebuggedGraph* temp = _impl_.what_.debugged_graph_;
2718     _impl_.what_.debugged_graph_ = nullptr;
2719     return temp;
2720   } else {
2721     return nullptr;
2722   }
2723 }
unsafe_arena_set_allocated_debugged_graph(::tensorflow::DebuggedGraph * debugged_graph)2724 inline void DebugEvent::unsafe_arena_set_allocated_debugged_graph(::tensorflow::DebuggedGraph* debugged_graph) {
2725   clear_what();
2726   if (debugged_graph) {
2727     set_has_debugged_graph();
2728     _impl_.what_.debugged_graph_ = debugged_graph;
2729   }
2730   // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.DebugEvent.debugged_graph)
2731 }
_internal_mutable_debugged_graph()2732 inline ::tensorflow::DebuggedGraph* DebugEvent::_internal_mutable_debugged_graph() {
2733   if (!_internal_has_debugged_graph()) {
2734     clear_what();
2735     set_has_debugged_graph();
2736     _impl_.what_.debugged_graph_ = CreateMaybeMessage< ::tensorflow::DebuggedGraph >(GetArenaForAllocation());
2737   }
2738   return _impl_.what_.debugged_graph_;
2739 }
mutable_debugged_graph()2740 inline ::tensorflow::DebuggedGraph* DebugEvent::mutable_debugged_graph() {
2741   ::tensorflow::DebuggedGraph* _msg = _internal_mutable_debugged_graph();
2742   // @@protoc_insertion_point(field_mutable:tensorflow.DebugEvent.debugged_graph)
2743   return _msg;
2744 }
2745 
2746 // .tensorflow.Execution execution = 9;
_internal_has_execution()2747 inline bool DebugEvent::_internal_has_execution() const {
2748   return what_case() == kExecution;
2749 }
has_execution()2750 inline bool DebugEvent::has_execution() const {
2751   return _internal_has_execution();
2752 }
set_has_execution()2753 inline void DebugEvent::set_has_execution() {
2754   _impl_._oneof_case_[0] = kExecution;
2755 }
clear_execution()2756 inline void DebugEvent::clear_execution() {
2757   if (_internal_has_execution()) {
2758     if (GetArenaForAllocation() == nullptr) {
2759       delete _impl_.what_.execution_;
2760     }
2761     clear_has_what();
2762   }
2763 }
release_execution()2764 inline ::tensorflow::Execution* DebugEvent::release_execution() {
2765   // @@protoc_insertion_point(field_release:tensorflow.DebugEvent.execution)
2766   if (_internal_has_execution()) {
2767     clear_has_what();
2768     ::tensorflow::Execution* temp = _impl_.what_.execution_;
2769     if (GetArenaForAllocation() != nullptr) {
2770       temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
2771     }
2772     _impl_.what_.execution_ = nullptr;
2773     return temp;
2774   } else {
2775     return nullptr;
2776   }
2777 }
_internal_execution()2778 inline const ::tensorflow::Execution& DebugEvent::_internal_execution() const {
2779   return _internal_has_execution()
2780       ? *_impl_.what_.execution_
2781       : reinterpret_cast< ::tensorflow::Execution&>(::tensorflow::_Execution_default_instance_);
2782 }
execution()2783 inline const ::tensorflow::Execution& DebugEvent::execution() const {
2784   // @@protoc_insertion_point(field_get:tensorflow.DebugEvent.execution)
2785   return _internal_execution();
2786 }
unsafe_arena_release_execution()2787 inline ::tensorflow::Execution* DebugEvent::unsafe_arena_release_execution() {
2788   // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.DebugEvent.execution)
2789   if (_internal_has_execution()) {
2790     clear_has_what();
2791     ::tensorflow::Execution* temp = _impl_.what_.execution_;
2792     _impl_.what_.execution_ = nullptr;
2793     return temp;
2794   } else {
2795     return nullptr;
2796   }
2797 }
unsafe_arena_set_allocated_execution(::tensorflow::Execution * execution)2798 inline void DebugEvent::unsafe_arena_set_allocated_execution(::tensorflow::Execution* execution) {
2799   clear_what();
2800   if (execution) {
2801     set_has_execution();
2802     _impl_.what_.execution_ = execution;
2803   }
2804   // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.DebugEvent.execution)
2805 }
_internal_mutable_execution()2806 inline ::tensorflow::Execution* DebugEvent::_internal_mutable_execution() {
2807   if (!_internal_has_execution()) {
2808     clear_what();
2809     set_has_execution();
2810     _impl_.what_.execution_ = CreateMaybeMessage< ::tensorflow::Execution >(GetArenaForAllocation());
2811   }
2812   return _impl_.what_.execution_;
2813 }
mutable_execution()2814 inline ::tensorflow::Execution* DebugEvent::mutable_execution() {
2815   ::tensorflow::Execution* _msg = _internal_mutable_execution();
2816   // @@protoc_insertion_point(field_mutable:tensorflow.DebugEvent.execution)
2817   return _msg;
2818 }
2819 
2820 // .tensorflow.GraphExecutionTrace graph_execution_trace = 10;
_internal_has_graph_execution_trace()2821 inline bool DebugEvent::_internal_has_graph_execution_trace() const {
2822   return what_case() == kGraphExecutionTrace;
2823 }
has_graph_execution_trace()2824 inline bool DebugEvent::has_graph_execution_trace() const {
2825   return _internal_has_graph_execution_trace();
2826 }
set_has_graph_execution_trace()2827 inline void DebugEvent::set_has_graph_execution_trace() {
2828   _impl_._oneof_case_[0] = kGraphExecutionTrace;
2829 }
clear_graph_execution_trace()2830 inline void DebugEvent::clear_graph_execution_trace() {
2831   if (_internal_has_graph_execution_trace()) {
2832     if (GetArenaForAllocation() == nullptr) {
2833       delete _impl_.what_.graph_execution_trace_;
2834     }
2835     clear_has_what();
2836   }
2837 }
release_graph_execution_trace()2838 inline ::tensorflow::GraphExecutionTrace* DebugEvent::release_graph_execution_trace() {
2839   // @@protoc_insertion_point(field_release:tensorflow.DebugEvent.graph_execution_trace)
2840   if (_internal_has_graph_execution_trace()) {
2841     clear_has_what();
2842     ::tensorflow::GraphExecutionTrace* temp = _impl_.what_.graph_execution_trace_;
2843     if (GetArenaForAllocation() != nullptr) {
2844       temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
2845     }
2846     _impl_.what_.graph_execution_trace_ = nullptr;
2847     return temp;
2848   } else {
2849     return nullptr;
2850   }
2851 }
_internal_graph_execution_trace()2852 inline const ::tensorflow::GraphExecutionTrace& DebugEvent::_internal_graph_execution_trace() const {
2853   return _internal_has_graph_execution_trace()
2854       ? *_impl_.what_.graph_execution_trace_
2855       : reinterpret_cast< ::tensorflow::GraphExecutionTrace&>(::tensorflow::_GraphExecutionTrace_default_instance_);
2856 }
graph_execution_trace()2857 inline const ::tensorflow::GraphExecutionTrace& DebugEvent::graph_execution_trace() const {
2858   // @@protoc_insertion_point(field_get:tensorflow.DebugEvent.graph_execution_trace)
2859   return _internal_graph_execution_trace();
2860 }
unsafe_arena_release_graph_execution_trace()2861 inline ::tensorflow::GraphExecutionTrace* DebugEvent::unsafe_arena_release_graph_execution_trace() {
2862   // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.DebugEvent.graph_execution_trace)
2863   if (_internal_has_graph_execution_trace()) {
2864     clear_has_what();
2865     ::tensorflow::GraphExecutionTrace* temp = _impl_.what_.graph_execution_trace_;
2866     _impl_.what_.graph_execution_trace_ = nullptr;
2867     return temp;
2868   } else {
2869     return nullptr;
2870   }
2871 }
unsafe_arena_set_allocated_graph_execution_trace(::tensorflow::GraphExecutionTrace * graph_execution_trace)2872 inline void DebugEvent::unsafe_arena_set_allocated_graph_execution_trace(::tensorflow::GraphExecutionTrace* graph_execution_trace) {
2873   clear_what();
2874   if (graph_execution_trace) {
2875     set_has_graph_execution_trace();
2876     _impl_.what_.graph_execution_trace_ = graph_execution_trace;
2877   }
2878   // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.DebugEvent.graph_execution_trace)
2879 }
_internal_mutable_graph_execution_trace()2880 inline ::tensorflow::GraphExecutionTrace* DebugEvent::_internal_mutable_graph_execution_trace() {
2881   if (!_internal_has_graph_execution_trace()) {
2882     clear_what();
2883     set_has_graph_execution_trace();
2884     _impl_.what_.graph_execution_trace_ = CreateMaybeMessage< ::tensorflow::GraphExecutionTrace >(GetArenaForAllocation());
2885   }
2886   return _impl_.what_.graph_execution_trace_;
2887 }
mutable_graph_execution_trace()2888 inline ::tensorflow::GraphExecutionTrace* DebugEvent::mutable_graph_execution_trace() {
2889   ::tensorflow::GraphExecutionTrace* _msg = _internal_mutable_graph_execution_trace();
2890   // @@protoc_insertion_point(field_mutable:tensorflow.DebugEvent.graph_execution_trace)
2891   return _msg;
2892 }
2893 
2894 // string graph_id = 11;
_internal_has_graph_id()2895 inline bool DebugEvent::_internal_has_graph_id() const {
2896   return what_case() == kGraphId;
2897 }
has_graph_id()2898 inline bool DebugEvent::has_graph_id() const {
2899   return _internal_has_graph_id();
2900 }
set_has_graph_id()2901 inline void DebugEvent::set_has_graph_id() {
2902   _impl_._oneof_case_[0] = kGraphId;
2903 }
clear_graph_id()2904 inline void DebugEvent::clear_graph_id() {
2905   if (_internal_has_graph_id()) {
2906     _impl_.what_.graph_id_.Destroy();
2907     clear_has_what();
2908   }
2909 }
graph_id()2910 inline const std::string& DebugEvent::graph_id() const {
2911   // @@protoc_insertion_point(field_get:tensorflow.DebugEvent.graph_id)
2912   return _internal_graph_id();
2913 }
2914 template <typename ArgT0, typename... ArgT>
set_graph_id(ArgT0 && arg0,ArgT...args)2915 inline void DebugEvent::set_graph_id(ArgT0&& arg0, ArgT... args) {
2916   if (!_internal_has_graph_id()) {
2917     clear_what();
2918     set_has_graph_id();
2919     _impl_.what_.graph_id_.InitDefault();
2920   }
2921   _impl_.what_.graph_id_.Set( static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
2922   // @@protoc_insertion_point(field_set:tensorflow.DebugEvent.graph_id)
2923 }
mutable_graph_id()2924 inline std::string* DebugEvent::mutable_graph_id() {
2925   std::string* _s = _internal_mutable_graph_id();
2926   // @@protoc_insertion_point(field_mutable:tensorflow.DebugEvent.graph_id)
2927   return _s;
2928 }
_internal_graph_id()2929 inline const std::string& DebugEvent::_internal_graph_id() const {
2930   if (_internal_has_graph_id()) {
2931     return _impl_.what_.graph_id_.Get();
2932   }
2933   return ::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited();
2934 }
_internal_set_graph_id(const std::string & value)2935 inline void DebugEvent::_internal_set_graph_id(const std::string& value) {
2936   if (!_internal_has_graph_id()) {
2937     clear_what();
2938     set_has_graph_id();
2939     _impl_.what_.graph_id_.InitDefault();
2940   }
2941   _impl_.what_.graph_id_.Set(value, GetArenaForAllocation());
2942 }
_internal_mutable_graph_id()2943 inline std::string* DebugEvent::_internal_mutable_graph_id() {
2944   if (!_internal_has_graph_id()) {
2945     clear_what();
2946     set_has_graph_id();
2947     _impl_.what_.graph_id_.InitDefault();
2948   }
2949   return _impl_.what_.graph_id_.Mutable(      GetArenaForAllocation());
2950 }
release_graph_id()2951 inline std::string* DebugEvent::release_graph_id() {
2952   // @@protoc_insertion_point(field_release:tensorflow.DebugEvent.graph_id)
2953   if (_internal_has_graph_id()) {
2954     clear_has_what();
2955     return _impl_.what_.graph_id_.Release();
2956   } else {
2957     return nullptr;
2958   }
2959 }
set_allocated_graph_id(std::string * graph_id)2960 inline void DebugEvent::set_allocated_graph_id(std::string* graph_id) {
2961   if (has_what()) {
2962     clear_what();
2963   }
2964   if (graph_id != nullptr) {
2965     set_has_graph_id();
2966     _impl_.what_.graph_id_.InitAllocated(graph_id, GetArenaForAllocation());
2967   }
2968   // @@protoc_insertion_point(field_set_allocated:tensorflow.DebugEvent.graph_id)
2969 }
2970 
2971 // .tensorflow.DebuggedDevice debugged_device = 12;
_internal_has_debugged_device()2972 inline bool DebugEvent::_internal_has_debugged_device() const {
2973   return what_case() == kDebuggedDevice;
2974 }
has_debugged_device()2975 inline bool DebugEvent::has_debugged_device() const {
2976   return _internal_has_debugged_device();
2977 }
set_has_debugged_device()2978 inline void DebugEvent::set_has_debugged_device() {
2979   _impl_._oneof_case_[0] = kDebuggedDevice;
2980 }
clear_debugged_device()2981 inline void DebugEvent::clear_debugged_device() {
2982   if (_internal_has_debugged_device()) {
2983     if (GetArenaForAllocation() == nullptr) {
2984       delete _impl_.what_.debugged_device_;
2985     }
2986     clear_has_what();
2987   }
2988 }
release_debugged_device()2989 inline ::tensorflow::DebuggedDevice* DebugEvent::release_debugged_device() {
2990   // @@protoc_insertion_point(field_release:tensorflow.DebugEvent.debugged_device)
2991   if (_internal_has_debugged_device()) {
2992     clear_has_what();
2993     ::tensorflow::DebuggedDevice* temp = _impl_.what_.debugged_device_;
2994     if (GetArenaForAllocation() != nullptr) {
2995       temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
2996     }
2997     _impl_.what_.debugged_device_ = nullptr;
2998     return temp;
2999   } else {
3000     return nullptr;
3001   }
3002 }
_internal_debugged_device()3003 inline const ::tensorflow::DebuggedDevice& DebugEvent::_internal_debugged_device() const {
3004   return _internal_has_debugged_device()
3005       ? *_impl_.what_.debugged_device_
3006       : reinterpret_cast< ::tensorflow::DebuggedDevice&>(::tensorflow::_DebuggedDevice_default_instance_);
3007 }
debugged_device()3008 inline const ::tensorflow::DebuggedDevice& DebugEvent::debugged_device() const {
3009   // @@protoc_insertion_point(field_get:tensorflow.DebugEvent.debugged_device)
3010   return _internal_debugged_device();
3011 }
unsafe_arena_release_debugged_device()3012 inline ::tensorflow::DebuggedDevice* DebugEvent::unsafe_arena_release_debugged_device() {
3013   // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.DebugEvent.debugged_device)
3014   if (_internal_has_debugged_device()) {
3015     clear_has_what();
3016     ::tensorflow::DebuggedDevice* temp = _impl_.what_.debugged_device_;
3017     _impl_.what_.debugged_device_ = nullptr;
3018     return temp;
3019   } else {
3020     return nullptr;
3021   }
3022 }
unsafe_arena_set_allocated_debugged_device(::tensorflow::DebuggedDevice * debugged_device)3023 inline void DebugEvent::unsafe_arena_set_allocated_debugged_device(::tensorflow::DebuggedDevice* debugged_device) {
3024   clear_what();
3025   if (debugged_device) {
3026     set_has_debugged_device();
3027     _impl_.what_.debugged_device_ = debugged_device;
3028   }
3029   // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.DebugEvent.debugged_device)
3030 }
_internal_mutable_debugged_device()3031 inline ::tensorflow::DebuggedDevice* DebugEvent::_internal_mutable_debugged_device() {
3032   if (!_internal_has_debugged_device()) {
3033     clear_what();
3034     set_has_debugged_device();
3035     _impl_.what_.debugged_device_ = CreateMaybeMessage< ::tensorflow::DebuggedDevice >(GetArenaForAllocation());
3036   }
3037   return _impl_.what_.debugged_device_;
3038 }
mutable_debugged_device()3039 inline ::tensorflow::DebuggedDevice* DebugEvent::mutable_debugged_device() {
3040   ::tensorflow::DebuggedDevice* _msg = _internal_mutable_debugged_device();
3041   // @@protoc_insertion_point(field_mutable:tensorflow.DebugEvent.debugged_device)
3042   return _msg;
3043 }
3044 
has_what()3045 inline bool DebugEvent::has_what() const {
3046   return what_case() != WHAT_NOT_SET;
3047 }
clear_has_what()3048 inline void DebugEvent::clear_has_what() {
3049   _impl_._oneof_case_[0] = WHAT_NOT_SET;
3050 }
what_case()3051 inline DebugEvent::WhatCase DebugEvent::what_case() const {
3052   return DebugEvent::WhatCase(_impl_._oneof_case_[0]);
3053 }
3054 // -------------------------------------------------------------------
3055 
3056 // DebugMetadata
3057 
3058 // string tensorflow_version = 1;
clear_tensorflow_version()3059 inline void DebugMetadata::clear_tensorflow_version() {
3060   _impl_.tensorflow_version_.ClearToEmpty();
3061 }
tensorflow_version()3062 inline const std::string& DebugMetadata::tensorflow_version() const {
3063   // @@protoc_insertion_point(field_get:tensorflow.DebugMetadata.tensorflow_version)
3064   return _internal_tensorflow_version();
3065 }
3066 template <typename ArgT0, typename... ArgT>
3067 inline PROTOBUF_ALWAYS_INLINE
set_tensorflow_version(ArgT0 && arg0,ArgT...args)3068 void DebugMetadata::set_tensorflow_version(ArgT0&& arg0, ArgT... args) {
3069 
3070  _impl_.tensorflow_version_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
3071   // @@protoc_insertion_point(field_set:tensorflow.DebugMetadata.tensorflow_version)
3072 }
mutable_tensorflow_version()3073 inline std::string* DebugMetadata::mutable_tensorflow_version() {
3074   std::string* _s = _internal_mutable_tensorflow_version();
3075   // @@protoc_insertion_point(field_mutable:tensorflow.DebugMetadata.tensorflow_version)
3076   return _s;
3077 }
_internal_tensorflow_version()3078 inline const std::string& DebugMetadata::_internal_tensorflow_version() const {
3079   return _impl_.tensorflow_version_.Get();
3080 }
_internal_set_tensorflow_version(const std::string & value)3081 inline void DebugMetadata::_internal_set_tensorflow_version(const std::string& value) {
3082 
3083   _impl_.tensorflow_version_.Set(value, GetArenaForAllocation());
3084 }
_internal_mutable_tensorflow_version()3085 inline std::string* DebugMetadata::_internal_mutable_tensorflow_version() {
3086 
3087   return _impl_.tensorflow_version_.Mutable(GetArenaForAllocation());
3088 }
release_tensorflow_version()3089 inline std::string* DebugMetadata::release_tensorflow_version() {
3090   // @@protoc_insertion_point(field_release:tensorflow.DebugMetadata.tensorflow_version)
3091   return _impl_.tensorflow_version_.Release();
3092 }
set_allocated_tensorflow_version(std::string * tensorflow_version)3093 inline void DebugMetadata::set_allocated_tensorflow_version(std::string* tensorflow_version) {
3094   _impl_.tensorflow_version_.SetAllocated(tensorflow_version, GetArenaForAllocation());
3095 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
3096   if (_impl_.tensorflow_version_.IsDefault()) {
3097     _impl_.tensorflow_version_.Set("", GetArenaForAllocation());
3098   }
3099 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
3100   // @@protoc_insertion_point(field_set_allocated:tensorflow.DebugMetadata.tensorflow_version)
3101 }
3102 
3103 // string file_version = 2;
clear_file_version()3104 inline void DebugMetadata::clear_file_version() {
3105   _impl_.file_version_.ClearToEmpty();
3106 }
file_version()3107 inline const std::string& DebugMetadata::file_version() const {
3108   // @@protoc_insertion_point(field_get:tensorflow.DebugMetadata.file_version)
3109   return _internal_file_version();
3110 }
3111 template <typename ArgT0, typename... ArgT>
3112 inline PROTOBUF_ALWAYS_INLINE
set_file_version(ArgT0 && arg0,ArgT...args)3113 void DebugMetadata::set_file_version(ArgT0&& arg0, ArgT... args) {
3114 
3115  _impl_.file_version_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
3116   // @@protoc_insertion_point(field_set:tensorflow.DebugMetadata.file_version)
3117 }
mutable_file_version()3118 inline std::string* DebugMetadata::mutable_file_version() {
3119   std::string* _s = _internal_mutable_file_version();
3120   // @@protoc_insertion_point(field_mutable:tensorflow.DebugMetadata.file_version)
3121   return _s;
3122 }
_internal_file_version()3123 inline const std::string& DebugMetadata::_internal_file_version() const {
3124   return _impl_.file_version_.Get();
3125 }
_internal_set_file_version(const std::string & value)3126 inline void DebugMetadata::_internal_set_file_version(const std::string& value) {
3127 
3128   _impl_.file_version_.Set(value, GetArenaForAllocation());
3129 }
_internal_mutable_file_version()3130 inline std::string* DebugMetadata::_internal_mutable_file_version() {
3131 
3132   return _impl_.file_version_.Mutable(GetArenaForAllocation());
3133 }
release_file_version()3134 inline std::string* DebugMetadata::release_file_version() {
3135   // @@protoc_insertion_point(field_release:tensorflow.DebugMetadata.file_version)
3136   return _impl_.file_version_.Release();
3137 }
set_allocated_file_version(std::string * file_version)3138 inline void DebugMetadata::set_allocated_file_version(std::string* file_version) {
3139   _impl_.file_version_.SetAllocated(file_version, GetArenaForAllocation());
3140 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
3141   if (_impl_.file_version_.IsDefault()) {
3142     _impl_.file_version_.Set("", GetArenaForAllocation());
3143   }
3144 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
3145   // @@protoc_insertion_point(field_set_allocated:tensorflow.DebugMetadata.file_version)
3146 }
3147 
3148 // string tfdbg_run_id = 3;
clear_tfdbg_run_id()3149 inline void DebugMetadata::clear_tfdbg_run_id() {
3150   _impl_.tfdbg_run_id_.ClearToEmpty();
3151 }
tfdbg_run_id()3152 inline const std::string& DebugMetadata::tfdbg_run_id() const {
3153   // @@protoc_insertion_point(field_get:tensorflow.DebugMetadata.tfdbg_run_id)
3154   return _internal_tfdbg_run_id();
3155 }
3156 template <typename ArgT0, typename... ArgT>
3157 inline PROTOBUF_ALWAYS_INLINE
set_tfdbg_run_id(ArgT0 && arg0,ArgT...args)3158 void DebugMetadata::set_tfdbg_run_id(ArgT0&& arg0, ArgT... args) {
3159 
3160  _impl_.tfdbg_run_id_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
3161   // @@protoc_insertion_point(field_set:tensorflow.DebugMetadata.tfdbg_run_id)
3162 }
mutable_tfdbg_run_id()3163 inline std::string* DebugMetadata::mutable_tfdbg_run_id() {
3164   std::string* _s = _internal_mutable_tfdbg_run_id();
3165   // @@protoc_insertion_point(field_mutable:tensorflow.DebugMetadata.tfdbg_run_id)
3166   return _s;
3167 }
_internal_tfdbg_run_id()3168 inline const std::string& DebugMetadata::_internal_tfdbg_run_id() const {
3169   return _impl_.tfdbg_run_id_.Get();
3170 }
_internal_set_tfdbg_run_id(const std::string & value)3171 inline void DebugMetadata::_internal_set_tfdbg_run_id(const std::string& value) {
3172 
3173   _impl_.tfdbg_run_id_.Set(value, GetArenaForAllocation());
3174 }
_internal_mutable_tfdbg_run_id()3175 inline std::string* DebugMetadata::_internal_mutable_tfdbg_run_id() {
3176 
3177   return _impl_.tfdbg_run_id_.Mutable(GetArenaForAllocation());
3178 }
release_tfdbg_run_id()3179 inline std::string* DebugMetadata::release_tfdbg_run_id() {
3180   // @@protoc_insertion_point(field_release:tensorflow.DebugMetadata.tfdbg_run_id)
3181   return _impl_.tfdbg_run_id_.Release();
3182 }
set_allocated_tfdbg_run_id(std::string * tfdbg_run_id)3183 inline void DebugMetadata::set_allocated_tfdbg_run_id(std::string* tfdbg_run_id) {
3184   _impl_.tfdbg_run_id_.SetAllocated(tfdbg_run_id, GetArenaForAllocation());
3185 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
3186   if (_impl_.tfdbg_run_id_.IsDefault()) {
3187     _impl_.tfdbg_run_id_.Set("", GetArenaForAllocation());
3188   }
3189 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
3190   // @@protoc_insertion_point(field_set_allocated:tensorflow.DebugMetadata.tfdbg_run_id)
3191 }
3192 
3193 // -------------------------------------------------------------------
3194 
3195 // SourceFile
3196 
3197 // string file_path = 1;
clear_file_path()3198 inline void SourceFile::clear_file_path() {
3199   _impl_.file_path_.ClearToEmpty();
3200 }
file_path()3201 inline const std::string& SourceFile::file_path() const {
3202   // @@protoc_insertion_point(field_get:tensorflow.SourceFile.file_path)
3203   return _internal_file_path();
3204 }
3205 template <typename ArgT0, typename... ArgT>
3206 inline PROTOBUF_ALWAYS_INLINE
set_file_path(ArgT0 && arg0,ArgT...args)3207 void SourceFile::set_file_path(ArgT0&& arg0, ArgT... args) {
3208 
3209  _impl_.file_path_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
3210   // @@protoc_insertion_point(field_set:tensorflow.SourceFile.file_path)
3211 }
mutable_file_path()3212 inline std::string* SourceFile::mutable_file_path() {
3213   std::string* _s = _internal_mutable_file_path();
3214   // @@protoc_insertion_point(field_mutable:tensorflow.SourceFile.file_path)
3215   return _s;
3216 }
_internal_file_path()3217 inline const std::string& SourceFile::_internal_file_path() const {
3218   return _impl_.file_path_.Get();
3219 }
_internal_set_file_path(const std::string & value)3220 inline void SourceFile::_internal_set_file_path(const std::string& value) {
3221 
3222   _impl_.file_path_.Set(value, GetArenaForAllocation());
3223 }
_internal_mutable_file_path()3224 inline std::string* SourceFile::_internal_mutable_file_path() {
3225 
3226   return _impl_.file_path_.Mutable(GetArenaForAllocation());
3227 }
release_file_path()3228 inline std::string* SourceFile::release_file_path() {
3229   // @@protoc_insertion_point(field_release:tensorflow.SourceFile.file_path)
3230   return _impl_.file_path_.Release();
3231 }
set_allocated_file_path(std::string * file_path)3232 inline void SourceFile::set_allocated_file_path(std::string* file_path) {
3233   _impl_.file_path_.SetAllocated(file_path, GetArenaForAllocation());
3234 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
3235   if (_impl_.file_path_.IsDefault()) {
3236     _impl_.file_path_.Set("", GetArenaForAllocation());
3237   }
3238 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
3239   // @@protoc_insertion_point(field_set_allocated:tensorflow.SourceFile.file_path)
3240 }
3241 
3242 // string host_name = 2;
clear_host_name()3243 inline void SourceFile::clear_host_name() {
3244   _impl_.host_name_.ClearToEmpty();
3245 }
host_name()3246 inline const std::string& SourceFile::host_name() const {
3247   // @@protoc_insertion_point(field_get:tensorflow.SourceFile.host_name)
3248   return _internal_host_name();
3249 }
3250 template <typename ArgT0, typename... ArgT>
3251 inline PROTOBUF_ALWAYS_INLINE
set_host_name(ArgT0 && arg0,ArgT...args)3252 void SourceFile::set_host_name(ArgT0&& arg0, ArgT... args) {
3253 
3254  _impl_.host_name_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
3255   // @@protoc_insertion_point(field_set:tensorflow.SourceFile.host_name)
3256 }
mutable_host_name()3257 inline std::string* SourceFile::mutable_host_name() {
3258   std::string* _s = _internal_mutable_host_name();
3259   // @@protoc_insertion_point(field_mutable:tensorflow.SourceFile.host_name)
3260   return _s;
3261 }
_internal_host_name()3262 inline const std::string& SourceFile::_internal_host_name() const {
3263   return _impl_.host_name_.Get();
3264 }
_internal_set_host_name(const std::string & value)3265 inline void SourceFile::_internal_set_host_name(const std::string& value) {
3266 
3267   _impl_.host_name_.Set(value, GetArenaForAllocation());
3268 }
_internal_mutable_host_name()3269 inline std::string* SourceFile::_internal_mutable_host_name() {
3270 
3271   return _impl_.host_name_.Mutable(GetArenaForAllocation());
3272 }
release_host_name()3273 inline std::string* SourceFile::release_host_name() {
3274   // @@protoc_insertion_point(field_release:tensorflow.SourceFile.host_name)
3275   return _impl_.host_name_.Release();
3276 }
set_allocated_host_name(std::string * host_name)3277 inline void SourceFile::set_allocated_host_name(std::string* host_name) {
3278   _impl_.host_name_.SetAllocated(host_name, GetArenaForAllocation());
3279 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
3280   if (_impl_.host_name_.IsDefault()) {
3281     _impl_.host_name_.Set("", GetArenaForAllocation());
3282   }
3283 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
3284   // @@protoc_insertion_point(field_set_allocated:tensorflow.SourceFile.host_name)
3285 }
3286 
3287 // repeated string lines = 3;
_internal_lines_size()3288 inline int SourceFile::_internal_lines_size() const {
3289   return _impl_.lines_.size();
3290 }
lines_size()3291 inline int SourceFile::lines_size() const {
3292   return _internal_lines_size();
3293 }
clear_lines()3294 inline void SourceFile::clear_lines() {
3295   _impl_.lines_.Clear();
3296 }
add_lines()3297 inline std::string* SourceFile::add_lines() {
3298   std::string* _s = _internal_add_lines();
3299   // @@protoc_insertion_point(field_add_mutable:tensorflow.SourceFile.lines)
3300   return _s;
3301 }
_internal_lines(int index)3302 inline const std::string& SourceFile::_internal_lines(int index) const {
3303   return _impl_.lines_.Get(index);
3304 }
lines(int index)3305 inline const std::string& SourceFile::lines(int index) const {
3306   // @@protoc_insertion_point(field_get:tensorflow.SourceFile.lines)
3307   return _internal_lines(index);
3308 }
mutable_lines(int index)3309 inline std::string* SourceFile::mutable_lines(int index) {
3310   // @@protoc_insertion_point(field_mutable:tensorflow.SourceFile.lines)
3311   return _impl_.lines_.Mutable(index);
3312 }
set_lines(int index,const std::string & value)3313 inline void SourceFile::set_lines(int index, const std::string& value) {
3314   _impl_.lines_.Mutable(index)->assign(value);
3315   // @@protoc_insertion_point(field_set:tensorflow.SourceFile.lines)
3316 }
set_lines(int index,std::string && value)3317 inline void SourceFile::set_lines(int index, std::string&& value) {
3318   _impl_.lines_.Mutable(index)->assign(std::move(value));
3319   // @@protoc_insertion_point(field_set:tensorflow.SourceFile.lines)
3320 }
set_lines(int index,const char * value)3321 inline void SourceFile::set_lines(int index, const char* value) {
3322   GOOGLE_DCHECK(value != nullptr);
3323   _impl_.lines_.Mutable(index)->assign(value);
3324   // @@protoc_insertion_point(field_set_char:tensorflow.SourceFile.lines)
3325 }
set_lines(int index,const char * value,size_t size)3326 inline void SourceFile::set_lines(int index, const char* value, size_t size) {
3327   _impl_.lines_.Mutable(index)->assign(
3328     reinterpret_cast<const char*>(value), size);
3329   // @@protoc_insertion_point(field_set_pointer:tensorflow.SourceFile.lines)
3330 }
_internal_add_lines()3331 inline std::string* SourceFile::_internal_add_lines() {
3332   return _impl_.lines_.Add();
3333 }
add_lines(const std::string & value)3334 inline void SourceFile::add_lines(const std::string& value) {
3335   _impl_.lines_.Add()->assign(value);
3336   // @@protoc_insertion_point(field_add:tensorflow.SourceFile.lines)
3337 }
add_lines(std::string && value)3338 inline void SourceFile::add_lines(std::string&& value) {
3339   _impl_.lines_.Add(std::move(value));
3340   // @@protoc_insertion_point(field_add:tensorflow.SourceFile.lines)
3341 }
add_lines(const char * value)3342 inline void SourceFile::add_lines(const char* value) {
3343   GOOGLE_DCHECK(value != nullptr);
3344   _impl_.lines_.Add()->assign(value);
3345   // @@protoc_insertion_point(field_add_char:tensorflow.SourceFile.lines)
3346 }
add_lines(const char * value,size_t size)3347 inline void SourceFile::add_lines(const char* value, size_t size) {
3348   _impl_.lines_.Add()->assign(reinterpret_cast<const char*>(value), size);
3349   // @@protoc_insertion_point(field_add_pointer:tensorflow.SourceFile.lines)
3350 }
3351 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>&
lines()3352 SourceFile::lines() const {
3353   // @@protoc_insertion_point(field_list:tensorflow.SourceFile.lines)
3354   return _impl_.lines_;
3355 }
3356 inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>*
mutable_lines()3357 SourceFile::mutable_lines() {
3358   // @@protoc_insertion_point(field_mutable_list:tensorflow.SourceFile.lines)
3359   return &_impl_.lines_;
3360 }
3361 
3362 // -------------------------------------------------------------------
3363 
3364 // StackFrameWithId
3365 
3366 // string id = 1;
clear_id()3367 inline void StackFrameWithId::clear_id() {
3368   _impl_.id_.ClearToEmpty();
3369 }
id()3370 inline const std::string& StackFrameWithId::id() const {
3371   // @@protoc_insertion_point(field_get:tensorflow.StackFrameWithId.id)
3372   return _internal_id();
3373 }
3374 template <typename ArgT0, typename... ArgT>
3375 inline PROTOBUF_ALWAYS_INLINE
set_id(ArgT0 && arg0,ArgT...args)3376 void StackFrameWithId::set_id(ArgT0&& arg0, ArgT... args) {
3377 
3378  _impl_.id_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
3379   // @@protoc_insertion_point(field_set:tensorflow.StackFrameWithId.id)
3380 }
mutable_id()3381 inline std::string* StackFrameWithId::mutable_id() {
3382   std::string* _s = _internal_mutable_id();
3383   // @@protoc_insertion_point(field_mutable:tensorflow.StackFrameWithId.id)
3384   return _s;
3385 }
_internal_id()3386 inline const std::string& StackFrameWithId::_internal_id() const {
3387   return _impl_.id_.Get();
3388 }
_internal_set_id(const std::string & value)3389 inline void StackFrameWithId::_internal_set_id(const std::string& value) {
3390 
3391   _impl_.id_.Set(value, GetArenaForAllocation());
3392 }
_internal_mutable_id()3393 inline std::string* StackFrameWithId::_internal_mutable_id() {
3394 
3395   return _impl_.id_.Mutable(GetArenaForAllocation());
3396 }
release_id()3397 inline std::string* StackFrameWithId::release_id() {
3398   // @@protoc_insertion_point(field_release:tensorflow.StackFrameWithId.id)
3399   return _impl_.id_.Release();
3400 }
set_allocated_id(std::string * id)3401 inline void StackFrameWithId::set_allocated_id(std::string* id) {
3402   _impl_.id_.SetAllocated(id, GetArenaForAllocation());
3403 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
3404   if (_impl_.id_.IsDefault()) {
3405     _impl_.id_.Set("", GetArenaForAllocation());
3406   }
3407 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
3408   // @@protoc_insertion_point(field_set_allocated:tensorflow.StackFrameWithId.id)
3409 }
3410 
3411 // .tensorflow.GraphDebugInfo.FileLineCol file_line_col = 2;
_internal_has_file_line_col()3412 inline bool StackFrameWithId::_internal_has_file_line_col() const {
3413   return this != internal_default_instance() && _impl_.file_line_col_ != nullptr;
3414 }
has_file_line_col()3415 inline bool StackFrameWithId::has_file_line_col() const {
3416   return _internal_has_file_line_col();
3417 }
_internal_file_line_col()3418 inline const ::tensorflow::GraphDebugInfo_FileLineCol& StackFrameWithId::_internal_file_line_col() const {
3419   const ::tensorflow::GraphDebugInfo_FileLineCol* p = _impl_.file_line_col_;
3420   return p != nullptr ? *p : reinterpret_cast<const ::tensorflow::GraphDebugInfo_FileLineCol&>(
3421       ::tensorflow::_GraphDebugInfo_FileLineCol_default_instance_);
3422 }
file_line_col()3423 inline const ::tensorflow::GraphDebugInfo_FileLineCol& StackFrameWithId::file_line_col() const {
3424   // @@protoc_insertion_point(field_get:tensorflow.StackFrameWithId.file_line_col)
3425   return _internal_file_line_col();
3426 }
unsafe_arena_set_allocated_file_line_col(::tensorflow::GraphDebugInfo_FileLineCol * file_line_col)3427 inline void StackFrameWithId::unsafe_arena_set_allocated_file_line_col(
3428     ::tensorflow::GraphDebugInfo_FileLineCol* file_line_col) {
3429   if (GetArenaForAllocation() == nullptr) {
3430     delete reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.file_line_col_);
3431   }
3432   _impl_.file_line_col_ = file_line_col;
3433   // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.StackFrameWithId.file_line_col)
3434 }
release_file_line_col()3435 inline ::tensorflow::GraphDebugInfo_FileLineCol* StackFrameWithId::release_file_line_col() {
3436 
3437   ::tensorflow::GraphDebugInfo_FileLineCol* temp = _impl_.file_line_col_;
3438   _impl_.file_line_col_ = nullptr;
3439 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
3440   auto* old =  reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(temp);
3441   temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
3442   if (GetArenaForAllocation() == nullptr) { delete old; }
3443 #else  // PROTOBUF_FORCE_COPY_IN_RELEASE
3444   if (GetArenaForAllocation() != nullptr) {
3445     temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
3446   }
3447 #endif  // !PROTOBUF_FORCE_COPY_IN_RELEASE
3448   return temp;
3449 }
unsafe_arena_release_file_line_col()3450 inline ::tensorflow::GraphDebugInfo_FileLineCol* StackFrameWithId::unsafe_arena_release_file_line_col() {
3451   // @@protoc_insertion_point(field_release:tensorflow.StackFrameWithId.file_line_col)
3452 
3453   ::tensorflow::GraphDebugInfo_FileLineCol* temp = _impl_.file_line_col_;
3454   _impl_.file_line_col_ = nullptr;
3455   return temp;
3456 }
_internal_mutable_file_line_col()3457 inline ::tensorflow::GraphDebugInfo_FileLineCol* StackFrameWithId::_internal_mutable_file_line_col() {
3458 
3459   if (_impl_.file_line_col_ == nullptr) {
3460     auto* p = CreateMaybeMessage<::tensorflow::GraphDebugInfo_FileLineCol>(GetArenaForAllocation());
3461     _impl_.file_line_col_ = p;
3462   }
3463   return _impl_.file_line_col_;
3464 }
mutable_file_line_col()3465 inline ::tensorflow::GraphDebugInfo_FileLineCol* StackFrameWithId::mutable_file_line_col() {
3466   ::tensorflow::GraphDebugInfo_FileLineCol* _msg = _internal_mutable_file_line_col();
3467   // @@protoc_insertion_point(field_mutable:tensorflow.StackFrameWithId.file_line_col)
3468   return _msg;
3469 }
set_allocated_file_line_col(::tensorflow::GraphDebugInfo_FileLineCol * file_line_col)3470 inline void StackFrameWithId::set_allocated_file_line_col(::tensorflow::GraphDebugInfo_FileLineCol* file_line_col) {
3471   ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
3472   if (message_arena == nullptr) {
3473     delete reinterpret_cast< ::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.file_line_col_);
3474   }
3475   if (file_line_col) {
3476     ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
3477         ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(
3478                 reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(file_line_col));
3479     if (message_arena != submessage_arena) {
3480       file_line_col = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
3481           message_arena, file_line_col, submessage_arena);
3482     }
3483 
3484   } else {
3485 
3486   }
3487   _impl_.file_line_col_ = file_line_col;
3488   // @@protoc_insertion_point(field_set_allocated:tensorflow.StackFrameWithId.file_line_col)
3489 }
3490 
3491 // -------------------------------------------------------------------
3492 
3493 // CodeLocation
3494 
3495 // string host_name = 1;
clear_host_name()3496 inline void CodeLocation::clear_host_name() {
3497   _impl_.host_name_.ClearToEmpty();
3498 }
host_name()3499 inline const std::string& CodeLocation::host_name() const {
3500   // @@protoc_insertion_point(field_get:tensorflow.CodeLocation.host_name)
3501   return _internal_host_name();
3502 }
3503 template <typename ArgT0, typename... ArgT>
3504 inline PROTOBUF_ALWAYS_INLINE
set_host_name(ArgT0 && arg0,ArgT...args)3505 void CodeLocation::set_host_name(ArgT0&& arg0, ArgT... args) {
3506 
3507  _impl_.host_name_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
3508   // @@protoc_insertion_point(field_set:tensorflow.CodeLocation.host_name)
3509 }
mutable_host_name()3510 inline std::string* CodeLocation::mutable_host_name() {
3511   std::string* _s = _internal_mutable_host_name();
3512   // @@protoc_insertion_point(field_mutable:tensorflow.CodeLocation.host_name)
3513   return _s;
3514 }
_internal_host_name()3515 inline const std::string& CodeLocation::_internal_host_name() const {
3516   return _impl_.host_name_.Get();
3517 }
_internal_set_host_name(const std::string & value)3518 inline void CodeLocation::_internal_set_host_name(const std::string& value) {
3519 
3520   _impl_.host_name_.Set(value, GetArenaForAllocation());
3521 }
_internal_mutable_host_name()3522 inline std::string* CodeLocation::_internal_mutable_host_name() {
3523 
3524   return _impl_.host_name_.Mutable(GetArenaForAllocation());
3525 }
release_host_name()3526 inline std::string* CodeLocation::release_host_name() {
3527   // @@protoc_insertion_point(field_release:tensorflow.CodeLocation.host_name)
3528   return _impl_.host_name_.Release();
3529 }
set_allocated_host_name(std::string * host_name)3530 inline void CodeLocation::set_allocated_host_name(std::string* host_name) {
3531   _impl_.host_name_.SetAllocated(host_name, GetArenaForAllocation());
3532 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
3533   if (_impl_.host_name_.IsDefault()) {
3534     _impl_.host_name_.Set("", GetArenaForAllocation());
3535   }
3536 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
3537   // @@protoc_insertion_point(field_set_allocated:tensorflow.CodeLocation.host_name)
3538 }
3539 
3540 // repeated string stack_frame_ids = 2;
_internal_stack_frame_ids_size()3541 inline int CodeLocation::_internal_stack_frame_ids_size() const {
3542   return _impl_.stack_frame_ids_.size();
3543 }
stack_frame_ids_size()3544 inline int CodeLocation::stack_frame_ids_size() const {
3545   return _internal_stack_frame_ids_size();
3546 }
clear_stack_frame_ids()3547 inline void CodeLocation::clear_stack_frame_ids() {
3548   _impl_.stack_frame_ids_.Clear();
3549 }
add_stack_frame_ids()3550 inline std::string* CodeLocation::add_stack_frame_ids() {
3551   std::string* _s = _internal_add_stack_frame_ids();
3552   // @@protoc_insertion_point(field_add_mutable:tensorflow.CodeLocation.stack_frame_ids)
3553   return _s;
3554 }
_internal_stack_frame_ids(int index)3555 inline const std::string& CodeLocation::_internal_stack_frame_ids(int index) const {
3556   return _impl_.stack_frame_ids_.Get(index);
3557 }
stack_frame_ids(int index)3558 inline const std::string& CodeLocation::stack_frame_ids(int index) const {
3559   // @@protoc_insertion_point(field_get:tensorflow.CodeLocation.stack_frame_ids)
3560   return _internal_stack_frame_ids(index);
3561 }
mutable_stack_frame_ids(int index)3562 inline std::string* CodeLocation::mutable_stack_frame_ids(int index) {
3563   // @@protoc_insertion_point(field_mutable:tensorflow.CodeLocation.stack_frame_ids)
3564   return _impl_.stack_frame_ids_.Mutable(index);
3565 }
set_stack_frame_ids(int index,const std::string & value)3566 inline void CodeLocation::set_stack_frame_ids(int index, const std::string& value) {
3567   _impl_.stack_frame_ids_.Mutable(index)->assign(value);
3568   // @@protoc_insertion_point(field_set:tensorflow.CodeLocation.stack_frame_ids)
3569 }
set_stack_frame_ids(int index,std::string && value)3570 inline void CodeLocation::set_stack_frame_ids(int index, std::string&& value) {
3571   _impl_.stack_frame_ids_.Mutable(index)->assign(std::move(value));
3572   // @@protoc_insertion_point(field_set:tensorflow.CodeLocation.stack_frame_ids)
3573 }
set_stack_frame_ids(int index,const char * value)3574 inline void CodeLocation::set_stack_frame_ids(int index, const char* value) {
3575   GOOGLE_DCHECK(value != nullptr);
3576   _impl_.stack_frame_ids_.Mutable(index)->assign(value);
3577   // @@protoc_insertion_point(field_set_char:tensorflow.CodeLocation.stack_frame_ids)
3578 }
set_stack_frame_ids(int index,const char * value,size_t size)3579 inline void CodeLocation::set_stack_frame_ids(int index, const char* value, size_t size) {
3580   _impl_.stack_frame_ids_.Mutable(index)->assign(
3581     reinterpret_cast<const char*>(value), size);
3582   // @@protoc_insertion_point(field_set_pointer:tensorflow.CodeLocation.stack_frame_ids)
3583 }
_internal_add_stack_frame_ids()3584 inline std::string* CodeLocation::_internal_add_stack_frame_ids() {
3585   return _impl_.stack_frame_ids_.Add();
3586 }
add_stack_frame_ids(const std::string & value)3587 inline void CodeLocation::add_stack_frame_ids(const std::string& value) {
3588   _impl_.stack_frame_ids_.Add()->assign(value);
3589   // @@protoc_insertion_point(field_add:tensorflow.CodeLocation.stack_frame_ids)
3590 }
add_stack_frame_ids(std::string && value)3591 inline void CodeLocation::add_stack_frame_ids(std::string&& value) {
3592   _impl_.stack_frame_ids_.Add(std::move(value));
3593   // @@protoc_insertion_point(field_add:tensorflow.CodeLocation.stack_frame_ids)
3594 }
add_stack_frame_ids(const char * value)3595 inline void CodeLocation::add_stack_frame_ids(const char* value) {
3596   GOOGLE_DCHECK(value != nullptr);
3597   _impl_.stack_frame_ids_.Add()->assign(value);
3598   // @@protoc_insertion_point(field_add_char:tensorflow.CodeLocation.stack_frame_ids)
3599 }
add_stack_frame_ids(const char * value,size_t size)3600 inline void CodeLocation::add_stack_frame_ids(const char* value, size_t size) {
3601   _impl_.stack_frame_ids_.Add()->assign(reinterpret_cast<const char*>(value), size);
3602   // @@protoc_insertion_point(field_add_pointer:tensorflow.CodeLocation.stack_frame_ids)
3603 }
3604 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>&
stack_frame_ids()3605 CodeLocation::stack_frame_ids() const {
3606   // @@protoc_insertion_point(field_list:tensorflow.CodeLocation.stack_frame_ids)
3607   return _impl_.stack_frame_ids_;
3608 }
3609 inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>*
mutable_stack_frame_ids()3610 CodeLocation::mutable_stack_frame_ids() {
3611   // @@protoc_insertion_point(field_mutable_list:tensorflow.CodeLocation.stack_frame_ids)
3612   return &_impl_.stack_frame_ids_;
3613 }
3614 
3615 // -------------------------------------------------------------------
3616 
3617 // GraphOpCreation
3618 
3619 // string op_type = 1;
clear_op_type()3620 inline void GraphOpCreation::clear_op_type() {
3621   _impl_.op_type_.ClearToEmpty();
3622 }
op_type()3623 inline const std::string& GraphOpCreation::op_type() const {
3624   // @@protoc_insertion_point(field_get:tensorflow.GraphOpCreation.op_type)
3625   return _internal_op_type();
3626 }
3627 template <typename ArgT0, typename... ArgT>
3628 inline PROTOBUF_ALWAYS_INLINE
set_op_type(ArgT0 && arg0,ArgT...args)3629 void GraphOpCreation::set_op_type(ArgT0&& arg0, ArgT... args) {
3630 
3631  _impl_.op_type_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
3632   // @@protoc_insertion_point(field_set:tensorflow.GraphOpCreation.op_type)
3633 }
mutable_op_type()3634 inline std::string* GraphOpCreation::mutable_op_type() {
3635   std::string* _s = _internal_mutable_op_type();
3636   // @@protoc_insertion_point(field_mutable:tensorflow.GraphOpCreation.op_type)
3637   return _s;
3638 }
_internal_op_type()3639 inline const std::string& GraphOpCreation::_internal_op_type() const {
3640   return _impl_.op_type_.Get();
3641 }
_internal_set_op_type(const std::string & value)3642 inline void GraphOpCreation::_internal_set_op_type(const std::string& value) {
3643 
3644   _impl_.op_type_.Set(value, GetArenaForAllocation());
3645 }
_internal_mutable_op_type()3646 inline std::string* GraphOpCreation::_internal_mutable_op_type() {
3647 
3648   return _impl_.op_type_.Mutable(GetArenaForAllocation());
3649 }
release_op_type()3650 inline std::string* GraphOpCreation::release_op_type() {
3651   // @@protoc_insertion_point(field_release:tensorflow.GraphOpCreation.op_type)
3652   return _impl_.op_type_.Release();
3653 }
set_allocated_op_type(std::string * op_type)3654 inline void GraphOpCreation::set_allocated_op_type(std::string* op_type) {
3655   _impl_.op_type_.SetAllocated(op_type, GetArenaForAllocation());
3656 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
3657   if (_impl_.op_type_.IsDefault()) {
3658     _impl_.op_type_.Set("", GetArenaForAllocation());
3659   }
3660 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
3661   // @@protoc_insertion_point(field_set_allocated:tensorflow.GraphOpCreation.op_type)
3662 }
3663 
3664 // string op_name = 2;
clear_op_name()3665 inline void GraphOpCreation::clear_op_name() {
3666   _impl_.op_name_.ClearToEmpty();
3667 }
op_name()3668 inline const std::string& GraphOpCreation::op_name() const {
3669   // @@protoc_insertion_point(field_get:tensorflow.GraphOpCreation.op_name)
3670   return _internal_op_name();
3671 }
3672 template <typename ArgT0, typename... ArgT>
3673 inline PROTOBUF_ALWAYS_INLINE
set_op_name(ArgT0 && arg0,ArgT...args)3674 void GraphOpCreation::set_op_name(ArgT0&& arg0, ArgT... args) {
3675 
3676  _impl_.op_name_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
3677   // @@protoc_insertion_point(field_set:tensorflow.GraphOpCreation.op_name)
3678 }
mutable_op_name()3679 inline std::string* GraphOpCreation::mutable_op_name() {
3680   std::string* _s = _internal_mutable_op_name();
3681   // @@protoc_insertion_point(field_mutable:tensorflow.GraphOpCreation.op_name)
3682   return _s;
3683 }
_internal_op_name()3684 inline const std::string& GraphOpCreation::_internal_op_name() const {
3685   return _impl_.op_name_.Get();
3686 }
_internal_set_op_name(const std::string & value)3687 inline void GraphOpCreation::_internal_set_op_name(const std::string& value) {
3688 
3689   _impl_.op_name_.Set(value, GetArenaForAllocation());
3690 }
_internal_mutable_op_name()3691 inline std::string* GraphOpCreation::_internal_mutable_op_name() {
3692 
3693   return _impl_.op_name_.Mutable(GetArenaForAllocation());
3694 }
release_op_name()3695 inline std::string* GraphOpCreation::release_op_name() {
3696   // @@protoc_insertion_point(field_release:tensorflow.GraphOpCreation.op_name)
3697   return _impl_.op_name_.Release();
3698 }
set_allocated_op_name(std::string * op_name)3699 inline void GraphOpCreation::set_allocated_op_name(std::string* op_name) {
3700   _impl_.op_name_.SetAllocated(op_name, GetArenaForAllocation());
3701 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
3702   if (_impl_.op_name_.IsDefault()) {
3703     _impl_.op_name_.Set("", GetArenaForAllocation());
3704   }
3705 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
3706   // @@protoc_insertion_point(field_set_allocated:tensorflow.GraphOpCreation.op_name)
3707 }
3708 
3709 // string graph_name = 3;
clear_graph_name()3710 inline void GraphOpCreation::clear_graph_name() {
3711   _impl_.graph_name_.ClearToEmpty();
3712 }
graph_name()3713 inline const std::string& GraphOpCreation::graph_name() const {
3714   // @@protoc_insertion_point(field_get:tensorflow.GraphOpCreation.graph_name)
3715   return _internal_graph_name();
3716 }
3717 template <typename ArgT0, typename... ArgT>
3718 inline PROTOBUF_ALWAYS_INLINE
set_graph_name(ArgT0 && arg0,ArgT...args)3719 void GraphOpCreation::set_graph_name(ArgT0&& arg0, ArgT... args) {
3720 
3721  _impl_.graph_name_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
3722   // @@protoc_insertion_point(field_set:tensorflow.GraphOpCreation.graph_name)
3723 }
mutable_graph_name()3724 inline std::string* GraphOpCreation::mutable_graph_name() {
3725   std::string* _s = _internal_mutable_graph_name();
3726   // @@protoc_insertion_point(field_mutable:tensorflow.GraphOpCreation.graph_name)
3727   return _s;
3728 }
_internal_graph_name()3729 inline const std::string& GraphOpCreation::_internal_graph_name() const {
3730   return _impl_.graph_name_.Get();
3731 }
_internal_set_graph_name(const std::string & value)3732 inline void GraphOpCreation::_internal_set_graph_name(const std::string& value) {
3733 
3734   _impl_.graph_name_.Set(value, GetArenaForAllocation());
3735 }
_internal_mutable_graph_name()3736 inline std::string* GraphOpCreation::_internal_mutable_graph_name() {
3737 
3738   return _impl_.graph_name_.Mutable(GetArenaForAllocation());
3739 }
release_graph_name()3740 inline std::string* GraphOpCreation::release_graph_name() {
3741   // @@protoc_insertion_point(field_release:tensorflow.GraphOpCreation.graph_name)
3742   return _impl_.graph_name_.Release();
3743 }
set_allocated_graph_name(std::string * graph_name)3744 inline void GraphOpCreation::set_allocated_graph_name(std::string* graph_name) {
3745   _impl_.graph_name_.SetAllocated(graph_name, GetArenaForAllocation());
3746 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
3747   if (_impl_.graph_name_.IsDefault()) {
3748     _impl_.graph_name_.Set("", GetArenaForAllocation());
3749   }
3750 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
3751   // @@protoc_insertion_point(field_set_allocated:tensorflow.GraphOpCreation.graph_name)
3752 }
3753 
3754 // string graph_id = 4;
clear_graph_id()3755 inline void GraphOpCreation::clear_graph_id() {
3756   _impl_.graph_id_.ClearToEmpty();
3757 }
graph_id()3758 inline const std::string& GraphOpCreation::graph_id() const {
3759   // @@protoc_insertion_point(field_get:tensorflow.GraphOpCreation.graph_id)
3760   return _internal_graph_id();
3761 }
3762 template <typename ArgT0, typename... ArgT>
3763 inline PROTOBUF_ALWAYS_INLINE
set_graph_id(ArgT0 && arg0,ArgT...args)3764 void GraphOpCreation::set_graph_id(ArgT0&& arg0, ArgT... args) {
3765 
3766  _impl_.graph_id_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
3767   // @@protoc_insertion_point(field_set:tensorflow.GraphOpCreation.graph_id)
3768 }
mutable_graph_id()3769 inline std::string* GraphOpCreation::mutable_graph_id() {
3770   std::string* _s = _internal_mutable_graph_id();
3771   // @@protoc_insertion_point(field_mutable:tensorflow.GraphOpCreation.graph_id)
3772   return _s;
3773 }
_internal_graph_id()3774 inline const std::string& GraphOpCreation::_internal_graph_id() const {
3775   return _impl_.graph_id_.Get();
3776 }
_internal_set_graph_id(const std::string & value)3777 inline void GraphOpCreation::_internal_set_graph_id(const std::string& value) {
3778 
3779   _impl_.graph_id_.Set(value, GetArenaForAllocation());
3780 }
_internal_mutable_graph_id()3781 inline std::string* GraphOpCreation::_internal_mutable_graph_id() {
3782 
3783   return _impl_.graph_id_.Mutable(GetArenaForAllocation());
3784 }
release_graph_id()3785 inline std::string* GraphOpCreation::release_graph_id() {
3786   // @@protoc_insertion_point(field_release:tensorflow.GraphOpCreation.graph_id)
3787   return _impl_.graph_id_.Release();
3788 }
set_allocated_graph_id(std::string * graph_id)3789 inline void GraphOpCreation::set_allocated_graph_id(std::string* graph_id) {
3790   _impl_.graph_id_.SetAllocated(graph_id, GetArenaForAllocation());
3791 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
3792   if (_impl_.graph_id_.IsDefault()) {
3793     _impl_.graph_id_.Set("", GetArenaForAllocation());
3794   }
3795 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
3796   // @@protoc_insertion_point(field_set_allocated:tensorflow.GraphOpCreation.graph_id)
3797 }
3798 
3799 // string device_name = 5;
clear_device_name()3800 inline void GraphOpCreation::clear_device_name() {
3801   _impl_.device_name_.ClearToEmpty();
3802 }
device_name()3803 inline const std::string& GraphOpCreation::device_name() const {
3804   // @@protoc_insertion_point(field_get:tensorflow.GraphOpCreation.device_name)
3805   return _internal_device_name();
3806 }
3807 template <typename ArgT0, typename... ArgT>
3808 inline PROTOBUF_ALWAYS_INLINE
set_device_name(ArgT0 && arg0,ArgT...args)3809 void GraphOpCreation::set_device_name(ArgT0&& arg0, ArgT... args) {
3810 
3811  _impl_.device_name_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
3812   // @@protoc_insertion_point(field_set:tensorflow.GraphOpCreation.device_name)
3813 }
mutable_device_name()3814 inline std::string* GraphOpCreation::mutable_device_name() {
3815   std::string* _s = _internal_mutable_device_name();
3816   // @@protoc_insertion_point(field_mutable:tensorflow.GraphOpCreation.device_name)
3817   return _s;
3818 }
_internal_device_name()3819 inline const std::string& GraphOpCreation::_internal_device_name() const {
3820   return _impl_.device_name_.Get();
3821 }
_internal_set_device_name(const std::string & value)3822 inline void GraphOpCreation::_internal_set_device_name(const std::string& value) {
3823 
3824   _impl_.device_name_.Set(value, GetArenaForAllocation());
3825 }
_internal_mutable_device_name()3826 inline std::string* GraphOpCreation::_internal_mutable_device_name() {
3827 
3828   return _impl_.device_name_.Mutable(GetArenaForAllocation());
3829 }
release_device_name()3830 inline std::string* GraphOpCreation::release_device_name() {
3831   // @@protoc_insertion_point(field_release:tensorflow.GraphOpCreation.device_name)
3832   return _impl_.device_name_.Release();
3833 }
set_allocated_device_name(std::string * device_name)3834 inline void GraphOpCreation::set_allocated_device_name(std::string* device_name) {
3835   _impl_.device_name_.SetAllocated(device_name, GetArenaForAllocation());
3836 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
3837   if (_impl_.device_name_.IsDefault()) {
3838     _impl_.device_name_.Set("", GetArenaForAllocation());
3839   }
3840 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
3841   // @@protoc_insertion_point(field_set_allocated:tensorflow.GraphOpCreation.device_name)
3842 }
3843 
3844 // repeated string input_names = 6;
_internal_input_names_size()3845 inline int GraphOpCreation::_internal_input_names_size() const {
3846   return _impl_.input_names_.size();
3847 }
input_names_size()3848 inline int GraphOpCreation::input_names_size() const {
3849   return _internal_input_names_size();
3850 }
clear_input_names()3851 inline void GraphOpCreation::clear_input_names() {
3852   _impl_.input_names_.Clear();
3853 }
add_input_names()3854 inline std::string* GraphOpCreation::add_input_names() {
3855   std::string* _s = _internal_add_input_names();
3856   // @@protoc_insertion_point(field_add_mutable:tensorflow.GraphOpCreation.input_names)
3857   return _s;
3858 }
_internal_input_names(int index)3859 inline const std::string& GraphOpCreation::_internal_input_names(int index) const {
3860   return _impl_.input_names_.Get(index);
3861 }
input_names(int index)3862 inline const std::string& GraphOpCreation::input_names(int index) const {
3863   // @@protoc_insertion_point(field_get:tensorflow.GraphOpCreation.input_names)
3864   return _internal_input_names(index);
3865 }
mutable_input_names(int index)3866 inline std::string* GraphOpCreation::mutable_input_names(int index) {
3867   // @@protoc_insertion_point(field_mutable:tensorflow.GraphOpCreation.input_names)
3868   return _impl_.input_names_.Mutable(index);
3869 }
set_input_names(int index,const std::string & value)3870 inline void GraphOpCreation::set_input_names(int index, const std::string& value) {
3871   _impl_.input_names_.Mutable(index)->assign(value);
3872   // @@protoc_insertion_point(field_set:tensorflow.GraphOpCreation.input_names)
3873 }
set_input_names(int index,std::string && value)3874 inline void GraphOpCreation::set_input_names(int index, std::string&& value) {
3875   _impl_.input_names_.Mutable(index)->assign(std::move(value));
3876   // @@protoc_insertion_point(field_set:tensorflow.GraphOpCreation.input_names)
3877 }
set_input_names(int index,const char * value)3878 inline void GraphOpCreation::set_input_names(int index, const char* value) {
3879   GOOGLE_DCHECK(value != nullptr);
3880   _impl_.input_names_.Mutable(index)->assign(value);
3881   // @@protoc_insertion_point(field_set_char:tensorflow.GraphOpCreation.input_names)
3882 }
set_input_names(int index,const char * value,size_t size)3883 inline void GraphOpCreation::set_input_names(int index, const char* value, size_t size) {
3884   _impl_.input_names_.Mutable(index)->assign(
3885     reinterpret_cast<const char*>(value), size);
3886   // @@protoc_insertion_point(field_set_pointer:tensorflow.GraphOpCreation.input_names)
3887 }
_internal_add_input_names()3888 inline std::string* GraphOpCreation::_internal_add_input_names() {
3889   return _impl_.input_names_.Add();
3890 }
add_input_names(const std::string & value)3891 inline void GraphOpCreation::add_input_names(const std::string& value) {
3892   _impl_.input_names_.Add()->assign(value);
3893   // @@protoc_insertion_point(field_add:tensorflow.GraphOpCreation.input_names)
3894 }
add_input_names(std::string && value)3895 inline void GraphOpCreation::add_input_names(std::string&& value) {
3896   _impl_.input_names_.Add(std::move(value));
3897   // @@protoc_insertion_point(field_add:tensorflow.GraphOpCreation.input_names)
3898 }
add_input_names(const char * value)3899 inline void GraphOpCreation::add_input_names(const char* value) {
3900   GOOGLE_DCHECK(value != nullptr);
3901   _impl_.input_names_.Add()->assign(value);
3902   // @@protoc_insertion_point(field_add_char:tensorflow.GraphOpCreation.input_names)
3903 }
add_input_names(const char * value,size_t size)3904 inline void GraphOpCreation::add_input_names(const char* value, size_t size) {
3905   _impl_.input_names_.Add()->assign(reinterpret_cast<const char*>(value), size);
3906   // @@protoc_insertion_point(field_add_pointer:tensorflow.GraphOpCreation.input_names)
3907 }
3908 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>&
input_names()3909 GraphOpCreation::input_names() const {
3910   // @@protoc_insertion_point(field_list:tensorflow.GraphOpCreation.input_names)
3911   return _impl_.input_names_;
3912 }
3913 inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>*
mutable_input_names()3914 GraphOpCreation::mutable_input_names() {
3915   // @@protoc_insertion_point(field_mutable_list:tensorflow.GraphOpCreation.input_names)
3916   return &_impl_.input_names_;
3917 }
3918 
3919 // int32 num_outputs = 7;
clear_num_outputs()3920 inline void GraphOpCreation::clear_num_outputs() {
3921   _impl_.num_outputs_ = 0;
3922 }
_internal_num_outputs()3923 inline ::int32_t GraphOpCreation::_internal_num_outputs() const {
3924   return _impl_.num_outputs_;
3925 }
num_outputs()3926 inline ::int32_t GraphOpCreation::num_outputs() const {
3927   // @@protoc_insertion_point(field_get:tensorflow.GraphOpCreation.num_outputs)
3928   return _internal_num_outputs();
3929 }
_internal_set_num_outputs(::int32_t value)3930 inline void GraphOpCreation::_internal_set_num_outputs(::int32_t value) {
3931 
3932   _impl_.num_outputs_ = value;
3933 }
set_num_outputs(::int32_t value)3934 inline void GraphOpCreation::set_num_outputs(::int32_t value) {
3935   _internal_set_num_outputs(value);
3936   // @@protoc_insertion_point(field_set:tensorflow.GraphOpCreation.num_outputs)
3937 }
3938 
3939 // .tensorflow.CodeLocation code_location = 8;
_internal_has_code_location()3940 inline bool GraphOpCreation::_internal_has_code_location() const {
3941   return this != internal_default_instance() && _impl_.code_location_ != nullptr;
3942 }
has_code_location()3943 inline bool GraphOpCreation::has_code_location() const {
3944   return _internal_has_code_location();
3945 }
clear_code_location()3946 inline void GraphOpCreation::clear_code_location() {
3947   if (GetArenaForAllocation() == nullptr && _impl_.code_location_ != nullptr) {
3948     delete _impl_.code_location_;
3949   }
3950   _impl_.code_location_ = nullptr;
3951 }
_internal_code_location()3952 inline const ::tensorflow::CodeLocation& GraphOpCreation::_internal_code_location() const {
3953   const ::tensorflow::CodeLocation* p = _impl_.code_location_;
3954   return p != nullptr ? *p : reinterpret_cast<const ::tensorflow::CodeLocation&>(
3955       ::tensorflow::_CodeLocation_default_instance_);
3956 }
code_location()3957 inline const ::tensorflow::CodeLocation& GraphOpCreation::code_location() const {
3958   // @@protoc_insertion_point(field_get:tensorflow.GraphOpCreation.code_location)
3959   return _internal_code_location();
3960 }
unsafe_arena_set_allocated_code_location(::tensorflow::CodeLocation * code_location)3961 inline void GraphOpCreation::unsafe_arena_set_allocated_code_location(
3962     ::tensorflow::CodeLocation* code_location) {
3963   if (GetArenaForAllocation() == nullptr) {
3964     delete reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.code_location_);
3965   }
3966   _impl_.code_location_ = code_location;
3967   // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.GraphOpCreation.code_location)
3968 }
release_code_location()3969 inline ::tensorflow::CodeLocation* GraphOpCreation::release_code_location() {
3970 
3971   ::tensorflow::CodeLocation* temp = _impl_.code_location_;
3972   _impl_.code_location_ = nullptr;
3973 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
3974   auto* old =  reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(temp);
3975   temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
3976   if (GetArenaForAllocation() == nullptr) { delete old; }
3977 #else  // PROTOBUF_FORCE_COPY_IN_RELEASE
3978   if (GetArenaForAllocation() != nullptr) {
3979     temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
3980   }
3981 #endif  // !PROTOBUF_FORCE_COPY_IN_RELEASE
3982   return temp;
3983 }
unsafe_arena_release_code_location()3984 inline ::tensorflow::CodeLocation* GraphOpCreation::unsafe_arena_release_code_location() {
3985   // @@protoc_insertion_point(field_release:tensorflow.GraphOpCreation.code_location)
3986 
3987   ::tensorflow::CodeLocation* temp = _impl_.code_location_;
3988   _impl_.code_location_ = nullptr;
3989   return temp;
3990 }
_internal_mutable_code_location()3991 inline ::tensorflow::CodeLocation* GraphOpCreation::_internal_mutable_code_location() {
3992 
3993   if (_impl_.code_location_ == nullptr) {
3994     auto* p = CreateMaybeMessage<::tensorflow::CodeLocation>(GetArenaForAllocation());
3995     _impl_.code_location_ = p;
3996   }
3997   return _impl_.code_location_;
3998 }
mutable_code_location()3999 inline ::tensorflow::CodeLocation* GraphOpCreation::mutable_code_location() {
4000   ::tensorflow::CodeLocation* _msg = _internal_mutable_code_location();
4001   // @@protoc_insertion_point(field_mutable:tensorflow.GraphOpCreation.code_location)
4002   return _msg;
4003 }
set_allocated_code_location(::tensorflow::CodeLocation * code_location)4004 inline void GraphOpCreation::set_allocated_code_location(::tensorflow::CodeLocation* code_location) {
4005   ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
4006   if (message_arena == nullptr) {
4007     delete _impl_.code_location_;
4008   }
4009   if (code_location) {
4010     ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
4011         ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(code_location);
4012     if (message_arena != submessage_arena) {
4013       code_location = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
4014           message_arena, code_location, submessage_arena);
4015     }
4016 
4017   } else {
4018 
4019   }
4020   _impl_.code_location_ = code_location;
4021   // @@protoc_insertion_point(field_set_allocated:tensorflow.GraphOpCreation.code_location)
4022 }
4023 
4024 // repeated int32 output_tensor_ids = 9;
_internal_output_tensor_ids_size()4025 inline int GraphOpCreation::_internal_output_tensor_ids_size() const {
4026   return _impl_.output_tensor_ids_.size();
4027 }
output_tensor_ids_size()4028 inline int GraphOpCreation::output_tensor_ids_size() const {
4029   return _internal_output_tensor_ids_size();
4030 }
clear_output_tensor_ids()4031 inline void GraphOpCreation::clear_output_tensor_ids() {
4032   _impl_.output_tensor_ids_.Clear();
4033 }
_internal_output_tensor_ids(int index)4034 inline ::int32_t GraphOpCreation::_internal_output_tensor_ids(int index) const {
4035   return _impl_.output_tensor_ids_.Get(index);
4036 }
output_tensor_ids(int index)4037 inline ::int32_t GraphOpCreation::output_tensor_ids(int index) const {
4038   // @@protoc_insertion_point(field_get:tensorflow.GraphOpCreation.output_tensor_ids)
4039   return _internal_output_tensor_ids(index);
4040 }
set_output_tensor_ids(int index,::int32_t value)4041 inline void GraphOpCreation::set_output_tensor_ids(int index, ::int32_t value) {
4042   _impl_.output_tensor_ids_.Set(index, value);
4043   // @@protoc_insertion_point(field_set:tensorflow.GraphOpCreation.output_tensor_ids)
4044 }
_internal_add_output_tensor_ids(::int32_t value)4045 inline void GraphOpCreation::_internal_add_output_tensor_ids(::int32_t value) {
4046   _impl_.output_tensor_ids_.Add(value);
4047 }
add_output_tensor_ids(::int32_t value)4048 inline void GraphOpCreation::add_output_tensor_ids(::int32_t value) {
4049   _internal_add_output_tensor_ids(value);
4050   // @@protoc_insertion_point(field_add:tensorflow.GraphOpCreation.output_tensor_ids)
4051 }
4052 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int32_t >&
_internal_output_tensor_ids()4053 GraphOpCreation::_internal_output_tensor_ids() const {
4054   return _impl_.output_tensor_ids_;
4055 }
4056 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int32_t >&
output_tensor_ids()4057 GraphOpCreation::output_tensor_ids() const {
4058   // @@protoc_insertion_point(field_list:tensorflow.GraphOpCreation.output_tensor_ids)
4059   return _internal_output_tensor_ids();
4060 }
4061 inline ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int32_t >*
_internal_mutable_output_tensor_ids()4062 GraphOpCreation::_internal_mutable_output_tensor_ids() {
4063   return &_impl_.output_tensor_ids_;
4064 }
4065 inline ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int32_t >*
mutable_output_tensor_ids()4066 GraphOpCreation::mutable_output_tensor_ids() {
4067   // @@protoc_insertion_point(field_mutable_list:tensorflow.GraphOpCreation.output_tensor_ids)
4068   return _internal_mutable_output_tensor_ids();
4069 }
4070 
4071 // -------------------------------------------------------------------
4072 
4073 // DebuggedGraph
4074 
4075 // string graph_id = 1;
clear_graph_id()4076 inline void DebuggedGraph::clear_graph_id() {
4077   _impl_.graph_id_.ClearToEmpty();
4078 }
graph_id()4079 inline const std::string& DebuggedGraph::graph_id() const {
4080   // @@protoc_insertion_point(field_get:tensorflow.DebuggedGraph.graph_id)
4081   return _internal_graph_id();
4082 }
4083 template <typename ArgT0, typename... ArgT>
4084 inline PROTOBUF_ALWAYS_INLINE
set_graph_id(ArgT0 && arg0,ArgT...args)4085 void DebuggedGraph::set_graph_id(ArgT0&& arg0, ArgT... args) {
4086 
4087  _impl_.graph_id_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
4088   // @@protoc_insertion_point(field_set:tensorflow.DebuggedGraph.graph_id)
4089 }
mutable_graph_id()4090 inline std::string* DebuggedGraph::mutable_graph_id() {
4091   std::string* _s = _internal_mutable_graph_id();
4092   // @@protoc_insertion_point(field_mutable:tensorflow.DebuggedGraph.graph_id)
4093   return _s;
4094 }
_internal_graph_id()4095 inline const std::string& DebuggedGraph::_internal_graph_id() const {
4096   return _impl_.graph_id_.Get();
4097 }
_internal_set_graph_id(const std::string & value)4098 inline void DebuggedGraph::_internal_set_graph_id(const std::string& value) {
4099 
4100   _impl_.graph_id_.Set(value, GetArenaForAllocation());
4101 }
_internal_mutable_graph_id()4102 inline std::string* DebuggedGraph::_internal_mutable_graph_id() {
4103 
4104   return _impl_.graph_id_.Mutable(GetArenaForAllocation());
4105 }
release_graph_id()4106 inline std::string* DebuggedGraph::release_graph_id() {
4107   // @@protoc_insertion_point(field_release:tensorflow.DebuggedGraph.graph_id)
4108   return _impl_.graph_id_.Release();
4109 }
set_allocated_graph_id(std::string * graph_id)4110 inline void DebuggedGraph::set_allocated_graph_id(std::string* graph_id) {
4111   _impl_.graph_id_.SetAllocated(graph_id, GetArenaForAllocation());
4112 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
4113   if (_impl_.graph_id_.IsDefault()) {
4114     _impl_.graph_id_.Set("", GetArenaForAllocation());
4115   }
4116 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
4117   // @@protoc_insertion_point(field_set_allocated:tensorflow.DebuggedGraph.graph_id)
4118 }
4119 
4120 // string graph_name = 2;
clear_graph_name()4121 inline void DebuggedGraph::clear_graph_name() {
4122   _impl_.graph_name_.ClearToEmpty();
4123 }
graph_name()4124 inline const std::string& DebuggedGraph::graph_name() const {
4125   // @@protoc_insertion_point(field_get:tensorflow.DebuggedGraph.graph_name)
4126   return _internal_graph_name();
4127 }
4128 template <typename ArgT0, typename... ArgT>
4129 inline PROTOBUF_ALWAYS_INLINE
set_graph_name(ArgT0 && arg0,ArgT...args)4130 void DebuggedGraph::set_graph_name(ArgT0&& arg0, ArgT... args) {
4131 
4132  _impl_.graph_name_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
4133   // @@protoc_insertion_point(field_set:tensorflow.DebuggedGraph.graph_name)
4134 }
mutable_graph_name()4135 inline std::string* DebuggedGraph::mutable_graph_name() {
4136   std::string* _s = _internal_mutable_graph_name();
4137   // @@protoc_insertion_point(field_mutable:tensorflow.DebuggedGraph.graph_name)
4138   return _s;
4139 }
_internal_graph_name()4140 inline const std::string& DebuggedGraph::_internal_graph_name() const {
4141   return _impl_.graph_name_.Get();
4142 }
_internal_set_graph_name(const std::string & value)4143 inline void DebuggedGraph::_internal_set_graph_name(const std::string& value) {
4144 
4145   _impl_.graph_name_.Set(value, GetArenaForAllocation());
4146 }
_internal_mutable_graph_name()4147 inline std::string* DebuggedGraph::_internal_mutable_graph_name() {
4148 
4149   return _impl_.graph_name_.Mutable(GetArenaForAllocation());
4150 }
release_graph_name()4151 inline std::string* DebuggedGraph::release_graph_name() {
4152   // @@protoc_insertion_point(field_release:tensorflow.DebuggedGraph.graph_name)
4153   return _impl_.graph_name_.Release();
4154 }
set_allocated_graph_name(std::string * graph_name)4155 inline void DebuggedGraph::set_allocated_graph_name(std::string* graph_name) {
4156   _impl_.graph_name_.SetAllocated(graph_name, GetArenaForAllocation());
4157 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
4158   if (_impl_.graph_name_.IsDefault()) {
4159     _impl_.graph_name_.Set("", GetArenaForAllocation());
4160   }
4161 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
4162   // @@protoc_insertion_point(field_set_allocated:tensorflow.DebuggedGraph.graph_name)
4163 }
4164 
4165 // repeated string instrumented_ops = 3;
_internal_instrumented_ops_size()4166 inline int DebuggedGraph::_internal_instrumented_ops_size() const {
4167   return _impl_.instrumented_ops_.size();
4168 }
instrumented_ops_size()4169 inline int DebuggedGraph::instrumented_ops_size() const {
4170   return _internal_instrumented_ops_size();
4171 }
clear_instrumented_ops()4172 inline void DebuggedGraph::clear_instrumented_ops() {
4173   _impl_.instrumented_ops_.Clear();
4174 }
add_instrumented_ops()4175 inline std::string* DebuggedGraph::add_instrumented_ops() {
4176   std::string* _s = _internal_add_instrumented_ops();
4177   // @@protoc_insertion_point(field_add_mutable:tensorflow.DebuggedGraph.instrumented_ops)
4178   return _s;
4179 }
_internal_instrumented_ops(int index)4180 inline const std::string& DebuggedGraph::_internal_instrumented_ops(int index) const {
4181   return _impl_.instrumented_ops_.Get(index);
4182 }
instrumented_ops(int index)4183 inline const std::string& DebuggedGraph::instrumented_ops(int index) const {
4184   // @@protoc_insertion_point(field_get:tensorflow.DebuggedGraph.instrumented_ops)
4185   return _internal_instrumented_ops(index);
4186 }
mutable_instrumented_ops(int index)4187 inline std::string* DebuggedGraph::mutable_instrumented_ops(int index) {
4188   // @@protoc_insertion_point(field_mutable:tensorflow.DebuggedGraph.instrumented_ops)
4189   return _impl_.instrumented_ops_.Mutable(index);
4190 }
set_instrumented_ops(int index,const std::string & value)4191 inline void DebuggedGraph::set_instrumented_ops(int index, const std::string& value) {
4192   _impl_.instrumented_ops_.Mutable(index)->assign(value);
4193   // @@protoc_insertion_point(field_set:tensorflow.DebuggedGraph.instrumented_ops)
4194 }
set_instrumented_ops(int index,std::string && value)4195 inline void DebuggedGraph::set_instrumented_ops(int index, std::string&& value) {
4196   _impl_.instrumented_ops_.Mutable(index)->assign(std::move(value));
4197   // @@protoc_insertion_point(field_set:tensorflow.DebuggedGraph.instrumented_ops)
4198 }
set_instrumented_ops(int index,const char * value)4199 inline void DebuggedGraph::set_instrumented_ops(int index, const char* value) {
4200   GOOGLE_DCHECK(value != nullptr);
4201   _impl_.instrumented_ops_.Mutable(index)->assign(value);
4202   // @@protoc_insertion_point(field_set_char:tensorflow.DebuggedGraph.instrumented_ops)
4203 }
set_instrumented_ops(int index,const char * value,size_t size)4204 inline void DebuggedGraph::set_instrumented_ops(int index, const char* value, size_t size) {
4205   _impl_.instrumented_ops_.Mutable(index)->assign(
4206     reinterpret_cast<const char*>(value), size);
4207   // @@protoc_insertion_point(field_set_pointer:tensorflow.DebuggedGraph.instrumented_ops)
4208 }
_internal_add_instrumented_ops()4209 inline std::string* DebuggedGraph::_internal_add_instrumented_ops() {
4210   return _impl_.instrumented_ops_.Add();
4211 }
add_instrumented_ops(const std::string & value)4212 inline void DebuggedGraph::add_instrumented_ops(const std::string& value) {
4213   _impl_.instrumented_ops_.Add()->assign(value);
4214   // @@protoc_insertion_point(field_add:tensorflow.DebuggedGraph.instrumented_ops)
4215 }
add_instrumented_ops(std::string && value)4216 inline void DebuggedGraph::add_instrumented_ops(std::string&& value) {
4217   _impl_.instrumented_ops_.Add(std::move(value));
4218   // @@protoc_insertion_point(field_add:tensorflow.DebuggedGraph.instrumented_ops)
4219 }
add_instrumented_ops(const char * value)4220 inline void DebuggedGraph::add_instrumented_ops(const char* value) {
4221   GOOGLE_DCHECK(value != nullptr);
4222   _impl_.instrumented_ops_.Add()->assign(value);
4223   // @@protoc_insertion_point(field_add_char:tensorflow.DebuggedGraph.instrumented_ops)
4224 }
add_instrumented_ops(const char * value,size_t size)4225 inline void DebuggedGraph::add_instrumented_ops(const char* value, size_t size) {
4226   _impl_.instrumented_ops_.Add()->assign(reinterpret_cast<const char*>(value), size);
4227   // @@protoc_insertion_point(field_add_pointer:tensorflow.DebuggedGraph.instrumented_ops)
4228 }
4229 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>&
instrumented_ops()4230 DebuggedGraph::instrumented_ops() const {
4231   // @@protoc_insertion_point(field_list:tensorflow.DebuggedGraph.instrumented_ops)
4232   return _impl_.instrumented_ops_;
4233 }
4234 inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>*
mutable_instrumented_ops()4235 DebuggedGraph::mutable_instrumented_ops() {
4236   // @@protoc_insertion_point(field_mutable_list:tensorflow.DebuggedGraph.instrumented_ops)
4237   return &_impl_.instrumented_ops_;
4238 }
4239 
4240 // bytes original_graph_def = 4;
clear_original_graph_def()4241 inline void DebuggedGraph::clear_original_graph_def() {
4242   _impl_.original_graph_def_.ClearToEmpty();
4243 }
original_graph_def()4244 inline const std::string& DebuggedGraph::original_graph_def() const {
4245   // @@protoc_insertion_point(field_get:tensorflow.DebuggedGraph.original_graph_def)
4246   return _internal_original_graph_def();
4247 }
4248 template <typename ArgT0, typename... ArgT>
4249 inline PROTOBUF_ALWAYS_INLINE
set_original_graph_def(ArgT0 && arg0,ArgT...args)4250 void DebuggedGraph::set_original_graph_def(ArgT0&& arg0, ArgT... args) {
4251 
4252  _impl_.original_graph_def_.SetBytes(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
4253   // @@protoc_insertion_point(field_set:tensorflow.DebuggedGraph.original_graph_def)
4254 }
mutable_original_graph_def()4255 inline std::string* DebuggedGraph::mutable_original_graph_def() {
4256   std::string* _s = _internal_mutable_original_graph_def();
4257   // @@protoc_insertion_point(field_mutable:tensorflow.DebuggedGraph.original_graph_def)
4258   return _s;
4259 }
_internal_original_graph_def()4260 inline const std::string& DebuggedGraph::_internal_original_graph_def() const {
4261   return _impl_.original_graph_def_.Get();
4262 }
_internal_set_original_graph_def(const std::string & value)4263 inline void DebuggedGraph::_internal_set_original_graph_def(const std::string& value) {
4264 
4265   _impl_.original_graph_def_.Set(value, GetArenaForAllocation());
4266 }
_internal_mutable_original_graph_def()4267 inline std::string* DebuggedGraph::_internal_mutable_original_graph_def() {
4268 
4269   return _impl_.original_graph_def_.Mutable(GetArenaForAllocation());
4270 }
release_original_graph_def()4271 inline std::string* DebuggedGraph::release_original_graph_def() {
4272   // @@protoc_insertion_point(field_release:tensorflow.DebuggedGraph.original_graph_def)
4273   return _impl_.original_graph_def_.Release();
4274 }
set_allocated_original_graph_def(std::string * original_graph_def)4275 inline void DebuggedGraph::set_allocated_original_graph_def(std::string* original_graph_def) {
4276   _impl_.original_graph_def_.SetAllocated(original_graph_def, GetArenaForAllocation());
4277 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
4278   if (_impl_.original_graph_def_.IsDefault()) {
4279     _impl_.original_graph_def_.Set("", GetArenaForAllocation());
4280   }
4281 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
4282   // @@protoc_insertion_point(field_set_allocated:tensorflow.DebuggedGraph.original_graph_def)
4283 }
4284 
4285 // bytes instrumented_graph_def = 5;
clear_instrumented_graph_def()4286 inline void DebuggedGraph::clear_instrumented_graph_def() {
4287   _impl_.instrumented_graph_def_.ClearToEmpty();
4288 }
instrumented_graph_def()4289 inline const std::string& DebuggedGraph::instrumented_graph_def() const {
4290   // @@protoc_insertion_point(field_get:tensorflow.DebuggedGraph.instrumented_graph_def)
4291   return _internal_instrumented_graph_def();
4292 }
4293 template <typename ArgT0, typename... ArgT>
4294 inline PROTOBUF_ALWAYS_INLINE
set_instrumented_graph_def(ArgT0 && arg0,ArgT...args)4295 void DebuggedGraph::set_instrumented_graph_def(ArgT0&& arg0, ArgT... args) {
4296 
4297  _impl_.instrumented_graph_def_.SetBytes(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
4298   // @@protoc_insertion_point(field_set:tensorflow.DebuggedGraph.instrumented_graph_def)
4299 }
mutable_instrumented_graph_def()4300 inline std::string* DebuggedGraph::mutable_instrumented_graph_def() {
4301   std::string* _s = _internal_mutable_instrumented_graph_def();
4302   // @@protoc_insertion_point(field_mutable:tensorflow.DebuggedGraph.instrumented_graph_def)
4303   return _s;
4304 }
_internal_instrumented_graph_def()4305 inline const std::string& DebuggedGraph::_internal_instrumented_graph_def() const {
4306   return _impl_.instrumented_graph_def_.Get();
4307 }
_internal_set_instrumented_graph_def(const std::string & value)4308 inline void DebuggedGraph::_internal_set_instrumented_graph_def(const std::string& value) {
4309 
4310   _impl_.instrumented_graph_def_.Set(value, GetArenaForAllocation());
4311 }
_internal_mutable_instrumented_graph_def()4312 inline std::string* DebuggedGraph::_internal_mutable_instrumented_graph_def() {
4313 
4314   return _impl_.instrumented_graph_def_.Mutable(GetArenaForAllocation());
4315 }
release_instrumented_graph_def()4316 inline std::string* DebuggedGraph::release_instrumented_graph_def() {
4317   // @@protoc_insertion_point(field_release:tensorflow.DebuggedGraph.instrumented_graph_def)
4318   return _impl_.instrumented_graph_def_.Release();
4319 }
set_allocated_instrumented_graph_def(std::string * instrumented_graph_def)4320 inline void DebuggedGraph::set_allocated_instrumented_graph_def(std::string* instrumented_graph_def) {
4321   _impl_.instrumented_graph_def_.SetAllocated(instrumented_graph_def, GetArenaForAllocation());
4322 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
4323   if (_impl_.instrumented_graph_def_.IsDefault()) {
4324     _impl_.instrumented_graph_def_.Set("", GetArenaForAllocation());
4325   }
4326 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
4327   // @@protoc_insertion_point(field_set_allocated:tensorflow.DebuggedGraph.instrumented_graph_def)
4328 }
4329 
4330 // string outer_context_id = 6;
clear_outer_context_id()4331 inline void DebuggedGraph::clear_outer_context_id() {
4332   _impl_.outer_context_id_.ClearToEmpty();
4333 }
outer_context_id()4334 inline const std::string& DebuggedGraph::outer_context_id() const {
4335   // @@protoc_insertion_point(field_get:tensorflow.DebuggedGraph.outer_context_id)
4336   return _internal_outer_context_id();
4337 }
4338 template <typename ArgT0, typename... ArgT>
4339 inline PROTOBUF_ALWAYS_INLINE
set_outer_context_id(ArgT0 && arg0,ArgT...args)4340 void DebuggedGraph::set_outer_context_id(ArgT0&& arg0, ArgT... args) {
4341 
4342  _impl_.outer_context_id_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
4343   // @@protoc_insertion_point(field_set:tensorflow.DebuggedGraph.outer_context_id)
4344 }
mutable_outer_context_id()4345 inline std::string* DebuggedGraph::mutable_outer_context_id() {
4346   std::string* _s = _internal_mutable_outer_context_id();
4347   // @@protoc_insertion_point(field_mutable:tensorflow.DebuggedGraph.outer_context_id)
4348   return _s;
4349 }
_internal_outer_context_id()4350 inline const std::string& DebuggedGraph::_internal_outer_context_id() const {
4351   return _impl_.outer_context_id_.Get();
4352 }
_internal_set_outer_context_id(const std::string & value)4353 inline void DebuggedGraph::_internal_set_outer_context_id(const std::string& value) {
4354 
4355   _impl_.outer_context_id_.Set(value, GetArenaForAllocation());
4356 }
_internal_mutable_outer_context_id()4357 inline std::string* DebuggedGraph::_internal_mutable_outer_context_id() {
4358 
4359   return _impl_.outer_context_id_.Mutable(GetArenaForAllocation());
4360 }
release_outer_context_id()4361 inline std::string* DebuggedGraph::release_outer_context_id() {
4362   // @@protoc_insertion_point(field_release:tensorflow.DebuggedGraph.outer_context_id)
4363   return _impl_.outer_context_id_.Release();
4364 }
set_allocated_outer_context_id(std::string * outer_context_id)4365 inline void DebuggedGraph::set_allocated_outer_context_id(std::string* outer_context_id) {
4366   _impl_.outer_context_id_.SetAllocated(outer_context_id, GetArenaForAllocation());
4367 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
4368   if (_impl_.outer_context_id_.IsDefault()) {
4369     _impl_.outer_context_id_.Set("", GetArenaForAllocation());
4370   }
4371 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
4372   // @@protoc_insertion_point(field_set_allocated:tensorflow.DebuggedGraph.outer_context_id)
4373 }
4374 
4375 // -------------------------------------------------------------------
4376 
4377 // DebuggedDevice
4378 
4379 // string device_name = 1;
clear_device_name()4380 inline void DebuggedDevice::clear_device_name() {
4381   _impl_.device_name_.ClearToEmpty();
4382 }
device_name()4383 inline const std::string& DebuggedDevice::device_name() const {
4384   // @@protoc_insertion_point(field_get:tensorflow.DebuggedDevice.device_name)
4385   return _internal_device_name();
4386 }
4387 template <typename ArgT0, typename... ArgT>
4388 inline PROTOBUF_ALWAYS_INLINE
set_device_name(ArgT0 && arg0,ArgT...args)4389 void DebuggedDevice::set_device_name(ArgT0&& arg0, ArgT... args) {
4390 
4391  _impl_.device_name_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
4392   // @@protoc_insertion_point(field_set:tensorflow.DebuggedDevice.device_name)
4393 }
mutable_device_name()4394 inline std::string* DebuggedDevice::mutable_device_name() {
4395   std::string* _s = _internal_mutable_device_name();
4396   // @@protoc_insertion_point(field_mutable:tensorflow.DebuggedDevice.device_name)
4397   return _s;
4398 }
_internal_device_name()4399 inline const std::string& DebuggedDevice::_internal_device_name() const {
4400   return _impl_.device_name_.Get();
4401 }
_internal_set_device_name(const std::string & value)4402 inline void DebuggedDevice::_internal_set_device_name(const std::string& value) {
4403 
4404   _impl_.device_name_.Set(value, GetArenaForAllocation());
4405 }
_internal_mutable_device_name()4406 inline std::string* DebuggedDevice::_internal_mutable_device_name() {
4407 
4408   return _impl_.device_name_.Mutable(GetArenaForAllocation());
4409 }
release_device_name()4410 inline std::string* DebuggedDevice::release_device_name() {
4411   // @@protoc_insertion_point(field_release:tensorflow.DebuggedDevice.device_name)
4412   return _impl_.device_name_.Release();
4413 }
set_allocated_device_name(std::string * device_name)4414 inline void DebuggedDevice::set_allocated_device_name(std::string* device_name) {
4415   _impl_.device_name_.SetAllocated(device_name, GetArenaForAllocation());
4416 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
4417   if (_impl_.device_name_.IsDefault()) {
4418     _impl_.device_name_.Set("", GetArenaForAllocation());
4419   }
4420 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
4421   // @@protoc_insertion_point(field_set_allocated:tensorflow.DebuggedDevice.device_name)
4422 }
4423 
4424 // int32 device_id = 2;
clear_device_id()4425 inline void DebuggedDevice::clear_device_id() {
4426   _impl_.device_id_ = 0;
4427 }
_internal_device_id()4428 inline ::int32_t DebuggedDevice::_internal_device_id() const {
4429   return _impl_.device_id_;
4430 }
device_id()4431 inline ::int32_t DebuggedDevice::device_id() const {
4432   // @@protoc_insertion_point(field_get:tensorflow.DebuggedDevice.device_id)
4433   return _internal_device_id();
4434 }
_internal_set_device_id(::int32_t value)4435 inline void DebuggedDevice::_internal_set_device_id(::int32_t value) {
4436 
4437   _impl_.device_id_ = value;
4438 }
set_device_id(::int32_t value)4439 inline void DebuggedDevice::set_device_id(::int32_t value) {
4440   _internal_set_device_id(value);
4441   // @@protoc_insertion_point(field_set:tensorflow.DebuggedDevice.device_id)
4442 }
4443 
4444 // -------------------------------------------------------------------
4445 
4446 // Execution
4447 
4448 // string op_type = 1;
clear_op_type()4449 inline void Execution::clear_op_type() {
4450   _impl_.op_type_.ClearToEmpty();
4451 }
op_type()4452 inline const std::string& Execution::op_type() const {
4453   // @@protoc_insertion_point(field_get:tensorflow.Execution.op_type)
4454   return _internal_op_type();
4455 }
4456 template <typename ArgT0, typename... ArgT>
4457 inline PROTOBUF_ALWAYS_INLINE
set_op_type(ArgT0 && arg0,ArgT...args)4458 void Execution::set_op_type(ArgT0&& arg0, ArgT... args) {
4459 
4460  _impl_.op_type_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
4461   // @@protoc_insertion_point(field_set:tensorflow.Execution.op_type)
4462 }
mutable_op_type()4463 inline std::string* Execution::mutable_op_type() {
4464   std::string* _s = _internal_mutable_op_type();
4465   // @@protoc_insertion_point(field_mutable:tensorflow.Execution.op_type)
4466   return _s;
4467 }
_internal_op_type()4468 inline const std::string& Execution::_internal_op_type() const {
4469   return _impl_.op_type_.Get();
4470 }
_internal_set_op_type(const std::string & value)4471 inline void Execution::_internal_set_op_type(const std::string& value) {
4472 
4473   _impl_.op_type_.Set(value, GetArenaForAllocation());
4474 }
_internal_mutable_op_type()4475 inline std::string* Execution::_internal_mutable_op_type() {
4476 
4477   return _impl_.op_type_.Mutable(GetArenaForAllocation());
4478 }
release_op_type()4479 inline std::string* Execution::release_op_type() {
4480   // @@protoc_insertion_point(field_release:tensorflow.Execution.op_type)
4481   return _impl_.op_type_.Release();
4482 }
set_allocated_op_type(std::string * op_type)4483 inline void Execution::set_allocated_op_type(std::string* op_type) {
4484   _impl_.op_type_.SetAllocated(op_type, GetArenaForAllocation());
4485 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
4486   if (_impl_.op_type_.IsDefault()) {
4487     _impl_.op_type_.Set("", GetArenaForAllocation());
4488   }
4489 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
4490   // @@protoc_insertion_point(field_set_allocated:tensorflow.Execution.op_type)
4491 }
4492 
4493 // int32 num_outputs = 2;
clear_num_outputs()4494 inline void Execution::clear_num_outputs() {
4495   _impl_.num_outputs_ = 0;
4496 }
_internal_num_outputs()4497 inline ::int32_t Execution::_internal_num_outputs() const {
4498   return _impl_.num_outputs_;
4499 }
num_outputs()4500 inline ::int32_t Execution::num_outputs() const {
4501   // @@protoc_insertion_point(field_get:tensorflow.Execution.num_outputs)
4502   return _internal_num_outputs();
4503 }
_internal_set_num_outputs(::int32_t value)4504 inline void Execution::_internal_set_num_outputs(::int32_t value) {
4505 
4506   _impl_.num_outputs_ = value;
4507 }
set_num_outputs(::int32_t value)4508 inline void Execution::set_num_outputs(::int32_t value) {
4509   _internal_set_num_outputs(value);
4510   // @@protoc_insertion_point(field_set:tensorflow.Execution.num_outputs)
4511 }
4512 
4513 // string graph_id = 3;
clear_graph_id()4514 inline void Execution::clear_graph_id() {
4515   _impl_.graph_id_.ClearToEmpty();
4516 }
graph_id()4517 inline const std::string& Execution::graph_id() const {
4518   // @@protoc_insertion_point(field_get:tensorflow.Execution.graph_id)
4519   return _internal_graph_id();
4520 }
4521 template <typename ArgT0, typename... ArgT>
4522 inline PROTOBUF_ALWAYS_INLINE
set_graph_id(ArgT0 && arg0,ArgT...args)4523 void Execution::set_graph_id(ArgT0&& arg0, ArgT... args) {
4524 
4525  _impl_.graph_id_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
4526   // @@protoc_insertion_point(field_set:tensorflow.Execution.graph_id)
4527 }
mutable_graph_id()4528 inline std::string* Execution::mutable_graph_id() {
4529   std::string* _s = _internal_mutable_graph_id();
4530   // @@protoc_insertion_point(field_mutable:tensorflow.Execution.graph_id)
4531   return _s;
4532 }
_internal_graph_id()4533 inline const std::string& Execution::_internal_graph_id() const {
4534   return _impl_.graph_id_.Get();
4535 }
_internal_set_graph_id(const std::string & value)4536 inline void Execution::_internal_set_graph_id(const std::string& value) {
4537 
4538   _impl_.graph_id_.Set(value, GetArenaForAllocation());
4539 }
_internal_mutable_graph_id()4540 inline std::string* Execution::_internal_mutable_graph_id() {
4541 
4542   return _impl_.graph_id_.Mutable(GetArenaForAllocation());
4543 }
release_graph_id()4544 inline std::string* Execution::release_graph_id() {
4545   // @@protoc_insertion_point(field_release:tensorflow.Execution.graph_id)
4546   return _impl_.graph_id_.Release();
4547 }
set_allocated_graph_id(std::string * graph_id)4548 inline void Execution::set_allocated_graph_id(std::string* graph_id) {
4549   _impl_.graph_id_.SetAllocated(graph_id, GetArenaForAllocation());
4550 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
4551   if (_impl_.graph_id_.IsDefault()) {
4552     _impl_.graph_id_.Set("", GetArenaForAllocation());
4553   }
4554 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
4555   // @@protoc_insertion_point(field_set_allocated:tensorflow.Execution.graph_id)
4556 }
4557 
4558 // repeated int64 input_tensor_ids = 4;
_internal_input_tensor_ids_size()4559 inline int Execution::_internal_input_tensor_ids_size() const {
4560   return _impl_.input_tensor_ids_.size();
4561 }
input_tensor_ids_size()4562 inline int Execution::input_tensor_ids_size() const {
4563   return _internal_input_tensor_ids_size();
4564 }
clear_input_tensor_ids()4565 inline void Execution::clear_input_tensor_ids() {
4566   _impl_.input_tensor_ids_.Clear();
4567 }
_internal_input_tensor_ids(int index)4568 inline ::int64_t Execution::_internal_input_tensor_ids(int index) const {
4569   return _impl_.input_tensor_ids_.Get(index);
4570 }
input_tensor_ids(int index)4571 inline ::int64_t Execution::input_tensor_ids(int index) const {
4572   // @@protoc_insertion_point(field_get:tensorflow.Execution.input_tensor_ids)
4573   return _internal_input_tensor_ids(index);
4574 }
set_input_tensor_ids(int index,::int64_t value)4575 inline void Execution::set_input_tensor_ids(int index, ::int64_t value) {
4576   _impl_.input_tensor_ids_.Set(index, value);
4577   // @@protoc_insertion_point(field_set:tensorflow.Execution.input_tensor_ids)
4578 }
_internal_add_input_tensor_ids(::int64_t value)4579 inline void Execution::_internal_add_input_tensor_ids(::int64_t value) {
4580   _impl_.input_tensor_ids_.Add(value);
4581 }
add_input_tensor_ids(::int64_t value)4582 inline void Execution::add_input_tensor_ids(::int64_t value) {
4583   _internal_add_input_tensor_ids(value);
4584   // @@protoc_insertion_point(field_add:tensorflow.Execution.input_tensor_ids)
4585 }
4586 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >&
_internal_input_tensor_ids()4587 Execution::_internal_input_tensor_ids() const {
4588   return _impl_.input_tensor_ids_;
4589 }
4590 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >&
input_tensor_ids()4591 Execution::input_tensor_ids() const {
4592   // @@protoc_insertion_point(field_list:tensorflow.Execution.input_tensor_ids)
4593   return _internal_input_tensor_ids();
4594 }
4595 inline ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >*
_internal_mutable_input_tensor_ids()4596 Execution::_internal_mutable_input_tensor_ids() {
4597   return &_impl_.input_tensor_ids_;
4598 }
4599 inline ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >*
mutable_input_tensor_ids()4600 Execution::mutable_input_tensor_ids() {
4601   // @@protoc_insertion_point(field_mutable_list:tensorflow.Execution.input_tensor_ids)
4602   return _internal_mutable_input_tensor_ids();
4603 }
4604 
4605 // repeated int64 output_tensor_ids = 5;
_internal_output_tensor_ids_size()4606 inline int Execution::_internal_output_tensor_ids_size() const {
4607   return _impl_.output_tensor_ids_.size();
4608 }
output_tensor_ids_size()4609 inline int Execution::output_tensor_ids_size() const {
4610   return _internal_output_tensor_ids_size();
4611 }
clear_output_tensor_ids()4612 inline void Execution::clear_output_tensor_ids() {
4613   _impl_.output_tensor_ids_.Clear();
4614 }
_internal_output_tensor_ids(int index)4615 inline ::int64_t Execution::_internal_output_tensor_ids(int index) const {
4616   return _impl_.output_tensor_ids_.Get(index);
4617 }
output_tensor_ids(int index)4618 inline ::int64_t Execution::output_tensor_ids(int index) const {
4619   // @@protoc_insertion_point(field_get:tensorflow.Execution.output_tensor_ids)
4620   return _internal_output_tensor_ids(index);
4621 }
set_output_tensor_ids(int index,::int64_t value)4622 inline void Execution::set_output_tensor_ids(int index, ::int64_t value) {
4623   _impl_.output_tensor_ids_.Set(index, value);
4624   // @@protoc_insertion_point(field_set:tensorflow.Execution.output_tensor_ids)
4625 }
_internal_add_output_tensor_ids(::int64_t value)4626 inline void Execution::_internal_add_output_tensor_ids(::int64_t value) {
4627   _impl_.output_tensor_ids_.Add(value);
4628 }
add_output_tensor_ids(::int64_t value)4629 inline void Execution::add_output_tensor_ids(::int64_t value) {
4630   _internal_add_output_tensor_ids(value);
4631   // @@protoc_insertion_point(field_add:tensorflow.Execution.output_tensor_ids)
4632 }
4633 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >&
_internal_output_tensor_ids()4634 Execution::_internal_output_tensor_ids() const {
4635   return _impl_.output_tensor_ids_;
4636 }
4637 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >&
output_tensor_ids()4638 Execution::output_tensor_ids() const {
4639   // @@protoc_insertion_point(field_list:tensorflow.Execution.output_tensor_ids)
4640   return _internal_output_tensor_ids();
4641 }
4642 inline ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >*
_internal_mutable_output_tensor_ids()4643 Execution::_internal_mutable_output_tensor_ids() {
4644   return &_impl_.output_tensor_ids_;
4645 }
4646 inline ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >*
mutable_output_tensor_ids()4647 Execution::mutable_output_tensor_ids() {
4648   // @@protoc_insertion_point(field_mutable_list:tensorflow.Execution.output_tensor_ids)
4649   return _internal_mutable_output_tensor_ids();
4650 }
4651 
4652 // .tensorflow.TensorDebugMode tensor_debug_mode = 6;
clear_tensor_debug_mode()4653 inline void Execution::clear_tensor_debug_mode() {
4654   _impl_.tensor_debug_mode_ = 0;
4655 }
_internal_tensor_debug_mode()4656 inline ::tensorflow::TensorDebugMode Execution::_internal_tensor_debug_mode() const {
4657   return static_cast< ::tensorflow::TensorDebugMode >(_impl_.tensor_debug_mode_);
4658 }
tensor_debug_mode()4659 inline ::tensorflow::TensorDebugMode Execution::tensor_debug_mode() const {
4660   // @@protoc_insertion_point(field_get:tensorflow.Execution.tensor_debug_mode)
4661   return _internal_tensor_debug_mode();
4662 }
_internal_set_tensor_debug_mode(::tensorflow::TensorDebugMode value)4663 inline void Execution::_internal_set_tensor_debug_mode(::tensorflow::TensorDebugMode value) {
4664 
4665   _impl_.tensor_debug_mode_ = value;
4666 }
set_tensor_debug_mode(::tensorflow::TensorDebugMode value)4667 inline void Execution::set_tensor_debug_mode(::tensorflow::TensorDebugMode value) {
4668   _internal_set_tensor_debug_mode(value);
4669   // @@protoc_insertion_point(field_set:tensorflow.Execution.tensor_debug_mode)
4670 }
4671 
4672 // repeated .tensorflow.TensorProto tensor_protos = 7;
_internal_tensor_protos_size()4673 inline int Execution::_internal_tensor_protos_size() const {
4674   return _impl_.tensor_protos_.size();
4675 }
tensor_protos_size()4676 inline int Execution::tensor_protos_size() const {
4677   return _internal_tensor_protos_size();
4678 }
mutable_tensor_protos(int index)4679 inline ::tensorflow::TensorProto* Execution::mutable_tensor_protos(int index) {
4680   // @@protoc_insertion_point(field_mutable:tensorflow.Execution.tensor_protos)
4681   return _impl_.tensor_protos_.Mutable(index);
4682 }
4683 inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorProto >*
mutable_tensor_protos()4684 Execution::mutable_tensor_protos() {
4685   // @@protoc_insertion_point(field_mutable_list:tensorflow.Execution.tensor_protos)
4686   return &_impl_.tensor_protos_;
4687 }
_internal_tensor_protos(int index)4688 inline const ::tensorflow::TensorProto& Execution::_internal_tensor_protos(int index) const {
4689   return _impl_.tensor_protos_.Get(index);
4690 }
tensor_protos(int index)4691 inline const ::tensorflow::TensorProto& Execution::tensor_protos(int index) const {
4692   // @@protoc_insertion_point(field_get:tensorflow.Execution.tensor_protos)
4693   return _internal_tensor_protos(index);
4694 }
_internal_add_tensor_protos()4695 inline ::tensorflow::TensorProto* Execution::_internal_add_tensor_protos() {
4696   return _impl_.tensor_protos_.Add();
4697 }
add_tensor_protos()4698 inline ::tensorflow::TensorProto* Execution::add_tensor_protos() {
4699   ::tensorflow::TensorProto* _add = _internal_add_tensor_protos();
4700   // @@protoc_insertion_point(field_add:tensorflow.Execution.tensor_protos)
4701   return _add;
4702 }
4703 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorProto >&
tensor_protos()4704 Execution::tensor_protos() const {
4705   // @@protoc_insertion_point(field_list:tensorflow.Execution.tensor_protos)
4706   return _impl_.tensor_protos_;
4707 }
4708 
4709 // .tensorflow.CodeLocation code_location = 8;
_internal_has_code_location()4710 inline bool Execution::_internal_has_code_location() const {
4711   return this != internal_default_instance() && _impl_.code_location_ != nullptr;
4712 }
has_code_location()4713 inline bool Execution::has_code_location() const {
4714   return _internal_has_code_location();
4715 }
clear_code_location()4716 inline void Execution::clear_code_location() {
4717   if (GetArenaForAllocation() == nullptr && _impl_.code_location_ != nullptr) {
4718     delete _impl_.code_location_;
4719   }
4720   _impl_.code_location_ = nullptr;
4721 }
_internal_code_location()4722 inline const ::tensorflow::CodeLocation& Execution::_internal_code_location() const {
4723   const ::tensorflow::CodeLocation* p = _impl_.code_location_;
4724   return p != nullptr ? *p : reinterpret_cast<const ::tensorflow::CodeLocation&>(
4725       ::tensorflow::_CodeLocation_default_instance_);
4726 }
code_location()4727 inline const ::tensorflow::CodeLocation& Execution::code_location() const {
4728   // @@protoc_insertion_point(field_get:tensorflow.Execution.code_location)
4729   return _internal_code_location();
4730 }
unsafe_arena_set_allocated_code_location(::tensorflow::CodeLocation * code_location)4731 inline void Execution::unsafe_arena_set_allocated_code_location(
4732     ::tensorflow::CodeLocation* code_location) {
4733   if (GetArenaForAllocation() == nullptr) {
4734     delete reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.code_location_);
4735   }
4736   _impl_.code_location_ = code_location;
4737   // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.Execution.code_location)
4738 }
release_code_location()4739 inline ::tensorflow::CodeLocation* Execution::release_code_location() {
4740 
4741   ::tensorflow::CodeLocation* temp = _impl_.code_location_;
4742   _impl_.code_location_ = nullptr;
4743 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
4744   auto* old =  reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(temp);
4745   temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
4746   if (GetArenaForAllocation() == nullptr) { delete old; }
4747 #else  // PROTOBUF_FORCE_COPY_IN_RELEASE
4748   if (GetArenaForAllocation() != nullptr) {
4749     temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
4750   }
4751 #endif  // !PROTOBUF_FORCE_COPY_IN_RELEASE
4752   return temp;
4753 }
unsafe_arena_release_code_location()4754 inline ::tensorflow::CodeLocation* Execution::unsafe_arena_release_code_location() {
4755   // @@protoc_insertion_point(field_release:tensorflow.Execution.code_location)
4756 
4757   ::tensorflow::CodeLocation* temp = _impl_.code_location_;
4758   _impl_.code_location_ = nullptr;
4759   return temp;
4760 }
_internal_mutable_code_location()4761 inline ::tensorflow::CodeLocation* Execution::_internal_mutable_code_location() {
4762 
4763   if (_impl_.code_location_ == nullptr) {
4764     auto* p = CreateMaybeMessage<::tensorflow::CodeLocation>(GetArenaForAllocation());
4765     _impl_.code_location_ = p;
4766   }
4767   return _impl_.code_location_;
4768 }
mutable_code_location()4769 inline ::tensorflow::CodeLocation* Execution::mutable_code_location() {
4770   ::tensorflow::CodeLocation* _msg = _internal_mutable_code_location();
4771   // @@protoc_insertion_point(field_mutable:tensorflow.Execution.code_location)
4772   return _msg;
4773 }
set_allocated_code_location(::tensorflow::CodeLocation * code_location)4774 inline void Execution::set_allocated_code_location(::tensorflow::CodeLocation* code_location) {
4775   ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
4776   if (message_arena == nullptr) {
4777     delete _impl_.code_location_;
4778   }
4779   if (code_location) {
4780     ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
4781         ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(code_location);
4782     if (message_arena != submessage_arena) {
4783       code_location = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
4784           message_arena, code_location, submessage_arena);
4785     }
4786 
4787   } else {
4788 
4789   }
4790   _impl_.code_location_ = code_location;
4791   // @@protoc_insertion_point(field_set_allocated:tensorflow.Execution.code_location)
4792 }
4793 
4794 // repeated int32 output_tensor_device_ids = 9;
_internal_output_tensor_device_ids_size()4795 inline int Execution::_internal_output_tensor_device_ids_size() const {
4796   return _impl_.output_tensor_device_ids_.size();
4797 }
output_tensor_device_ids_size()4798 inline int Execution::output_tensor_device_ids_size() const {
4799   return _internal_output_tensor_device_ids_size();
4800 }
clear_output_tensor_device_ids()4801 inline void Execution::clear_output_tensor_device_ids() {
4802   _impl_.output_tensor_device_ids_.Clear();
4803 }
_internal_output_tensor_device_ids(int index)4804 inline ::int32_t Execution::_internal_output_tensor_device_ids(int index) const {
4805   return _impl_.output_tensor_device_ids_.Get(index);
4806 }
output_tensor_device_ids(int index)4807 inline ::int32_t Execution::output_tensor_device_ids(int index) const {
4808   // @@protoc_insertion_point(field_get:tensorflow.Execution.output_tensor_device_ids)
4809   return _internal_output_tensor_device_ids(index);
4810 }
set_output_tensor_device_ids(int index,::int32_t value)4811 inline void Execution::set_output_tensor_device_ids(int index, ::int32_t value) {
4812   _impl_.output_tensor_device_ids_.Set(index, value);
4813   // @@protoc_insertion_point(field_set:tensorflow.Execution.output_tensor_device_ids)
4814 }
_internal_add_output_tensor_device_ids(::int32_t value)4815 inline void Execution::_internal_add_output_tensor_device_ids(::int32_t value) {
4816   _impl_.output_tensor_device_ids_.Add(value);
4817 }
add_output_tensor_device_ids(::int32_t value)4818 inline void Execution::add_output_tensor_device_ids(::int32_t value) {
4819   _internal_add_output_tensor_device_ids(value);
4820   // @@protoc_insertion_point(field_add:tensorflow.Execution.output_tensor_device_ids)
4821 }
4822 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int32_t >&
_internal_output_tensor_device_ids()4823 Execution::_internal_output_tensor_device_ids() const {
4824   return _impl_.output_tensor_device_ids_;
4825 }
4826 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int32_t >&
output_tensor_device_ids()4827 Execution::output_tensor_device_ids() const {
4828   // @@protoc_insertion_point(field_list:tensorflow.Execution.output_tensor_device_ids)
4829   return _internal_output_tensor_device_ids();
4830 }
4831 inline ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int32_t >*
_internal_mutable_output_tensor_device_ids()4832 Execution::_internal_mutable_output_tensor_device_ids() {
4833   return &_impl_.output_tensor_device_ids_;
4834 }
4835 inline ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int32_t >*
mutable_output_tensor_device_ids()4836 Execution::mutable_output_tensor_device_ids() {
4837   // @@protoc_insertion_point(field_mutable_list:tensorflow.Execution.output_tensor_device_ids)
4838   return _internal_mutable_output_tensor_device_ids();
4839 }
4840 
4841 // -------------------------------------------------------------------
4842 
4843 // GraphExecutionTrace
4844 
4845 // string tfdbg_context_id = 1;
clear_tfdbg_context_id()4846 inline void GraphExecutionTrace::clear_tfdbg_context_id() {
4847   _impl_.tfdbg_context_id_.ClearToEmpty();
4848 }
tfdbg_context_id()4849 inline const std::string& GraphExecutionTrace::tfdbg_context_id() const {
4850   // @@protoc_insertion_point(field_get:tensorflow.GraphExecutionTrace.tfdbg_context_id)
4851   return _internal_tfdbg_context_id();
4852 }
4853 template <typename ArgT0, typename... ArgT>
4854 inline PROTOBUF_ALWAYS_INLINE
set_tfdbg_context_id(ArgT0 && arg0,ArgT...args)4855 void GraphExecutionTrace::set_tfdbg_context_id(ArgT0&& arg0, ArgT... args) {
4856 
4857  _impl_.tfdbg_context_id_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
4858   // @@protoc_insertion_point(field_set:tensorflow.GraphExecutionTrace.tfdbg_context_id)
4859 }
mutable_tfdbg_context_id()4860 inline std::string* GraphExecutionTrace::mutable_tfdbg_context_id() {
4861   std::string* _s = _internal_mutable_tfdbg_context_id();
4862   // @@protoc_insertion_point(field_mutable:tensorflow.GraphExecutionTrace.tfdbg_context_id)
4863   return _s;
4864 }
_internal_tfdbg_context_id()4865 inline const std::string& GraphExecutionTrace::_internal_tfdbg_context_id() const {
4866   return _impl_.tfdbg_context_id_.Get();
4867 }
_internal_set_tfdbg_context_id(const std::string & value)4868 inline void GraphExecutionTrace::_internal_set_tfdbg_context_id(const std::string& value) {
4869 
4870   _impl_.tfdbg_context_id_.Set(value, GetArenaForAllocation());
4871 }
_internal_mutable_tfdbg_context_id()4872 inline std::string* GraphExecutionTrace::_internal_mutable_tfdbg_context_id() {
4873 
4874   return _impl_.tfdbg_context_id_.Mutable(GetArenaForAllocation());
4875 }
release_tfdbg_context_id()4876 inline std::string* GraphExecutionTrace::release_tfdbg_context_id() {
4877   // @@protoc_insertion_point(field_release:tensorflow.GraphExecutionTrace.tfdbg_context_id)
4878   return _impl_.tfdbg_context_id_.Release();
4879 }
set_allocated_tfdbg_context_id(std::string * tfdbg_context_id)4880 inline void GraphExecutionTrace::set_allocated_tfdbg_context_id(std::string* tfdbg_context_id) {
4881   _impl_.tfdbg_context_id_.SetAllocated(tfdbg_context_id, GetArenaForAllocation());
4882 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
4883   if (_impl_.tfdbg_context_id_.IsDefault()) {
4884     _impl_.tfdbg_context_id_.Set("", GetArenaForAllocation());
4885   }
4886 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
4887   // @@protoc_insertion_point(field_set_allocated:tensorflow.GraphExecutionTrace.tfdbg_context_id)
4888 }
4889 
4890 // string op_name = 2;
clear_op_name()4891 inline void GraphExecutionTrace::clear_op_name() {
4892   _impl_.op_name_.ClearToEmpty();
4893 }
op_name()4894 inline const std::string& GraphExecutionTrace::op_name() const {
4895   // @@protoc_insertion_point(field_get:tensorflow.GraphExecutionTrace.op_name)
4896   return _internal_op_name();
4897 }
4898 template <typename ArgT0, typename... ArgT>
4899 inline PROTOBUF_ALWAYS_INLINE
set_op_name(ArgT0 && arg0,ArgT...args)4900 void GraphExecutionTrace::set_op_name(ArgT0&& arg0, ArgT... args) {
4901 
4902  _impl_.op_name_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
4903   // @@protoc_insertion_point(field_set:tensorflow.GraphExecutionTrace.op_name)
4904 }
mutable_op_name()4905 inline std::string* GraphExecutionTrace::mutable_op_name() {
4906   std::string* _s = _internal_mutable_op_name();
4907   // @@protoc_insertion_point(field_mutable:tensorflow.GraphExecutionTrace.op_name)
4908   return _s;
4909 }
_internal_op_name()4910 inline const std::string& GraphExecutionTrace::_internal_op_name() const {
4911   return _impl_.op_name_.Get();
4912 }
_internal_set_op_name(const std::string & value)4913 inline void GraphExecutionTrace::_internal_set_op_name(const std::string& value) {
4914 
4915   _impl_.op_name_.Set(value, GetArenaForAllocation());
4916 }
_internal_mutable_op_name()4917 inline std::string* GraphExecutionTrace::_internal_mutable_op_name() {
4918 
4919   return _impl_.op_name_.Mutable(GetArenaForAllocation());
4920 }
release_op_name()4921 inline std::string* GraphExecutionTrace::release_op_name() {
4922   // @@protoc_insertion_point(field_release:tensorflow.GraphExecutionTrace.op_name)
4923   return _impl_.op_name_.Release();
4924 }
set_allocated_op_name(std::string * op_name)4925 inline void GraphExecutionTrace::set_allocated_op_name(std::string* op_name) {
4926   _impl_.op_name_.SetAllocated(op_name, GetArenaForAllocation());
4927 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
4928   if (_impl_.op_name_.IsDefault()) {
4929     _impl_.op_name_.Set("", GetArenaForAllocation());
4930   }
4931 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
4932   // @@protoc_insertion_point(field_set_allocated:tensorflow.GraphExecutionTrace.op_name)
4933 }
4934 
4935 // int32 output_slot = 3;
clear_output_slot()4936 inline void GraphExecutionTrace::clear_output_slot() {
4937   _impl_.output_slot_ = 0;
4938 }
_internal_output_slot()4939 inline ::int32_t GraphExecutionTrace::_internal_output_slot() const {
4940   return _impl_.output_slot_;
4941 }
output_slot()4942 inline ::int32_t GraphExecutionTrace::output_slot() const {
4943   // @@protoc_insertion_point(field_get:tensorflow.GraphExecutionTrace.output_slot)
4944   return _internal_output_slot();
4945 }
_internal_set_output_slot(::int32_t value)4946 inline void GraphExecutionTrace::_internal_set_output_slot(::int32_t value) {
4947 
4948   _impl_.output_slot_ = value;
4949 }
set_output_slot(::int32_t value)4950 inline void GraphExecutionTrace::set_output_slot(::int32_t value) {
4951   _internal_set_output_slot(value);
4952   // @@protoc_insertion_point(field_set:tensorflow.GraphExecutionTrace.output_slot)
4953 }
4954 
4955 // .tensorflow.TensorDebugMode tensor_debug_mode = 4;
clear_tensor_debug_mode()4956 inline void GraphExecutionTrace::clear_tensor_debug_mode() {
4957   _impl_.tensor_debug_mode_ = 0;
4958 }
_internal_tensor_debug_mode()4959 inline ::tensorflow::TensorDebugMode GraphExecutionTrace::_internal_tensor_debug_mode() const {
4960   return static_cast< ::tensorflow::TensorDebugMode >(_impl_.tensor_debug_mode_);
4961 }
tensor_debug_mode()4962 inline ::tensorflow::TensorDebugMode GraphExecutionTrace::tensor_debug_mode() const {
4963   // @@protoc_insertion_point(field_get:tensorflow.GraphExecutionTrace.tensor_debug_mode)
4964   return _internal_tensor_debug_mode();
4965 }
_internal_set_tensor_debug_mode(::tensorflow::TensorDebugMode value)4966 inline void GraphExecutionTrace::_internal_set_tensor_debug_mode(::tensorflow::TensorDebugMode value) {
4967 
4968   _impl_.tensor_debug_mode_ = value;
4969 }
set_tensor_debug_mode(::tensorflow::TensorDebugMode value)4970 inline void GraphExecutionTrace::set_tensor_debug_mode(::tensorflow::TensorDebugMode value) {
4971   _internal_set_tensor_debug_mode(value);
4972   // @@protoc_insertion_point(field_set:tensorflow.GraphExecutionTrace.tensor_debug_mode)
4973 }
4974 
4975 // .tensorflow.TensorProto tensor_proto = 5;
_internal_has_tensor_proto()4976 inline bool GraphExecutionTrace::_internal_has_tensor_proto() const {
4977   return this != internal_default_instance() && _impl_.tensor_proto_ != nullptr;
4978 }
has_tensor_proto()4979 inline bool GraphExecutionTrace::has_tensor_proto() const {
4980   return _internal_has_tensor_proto();
4981 }
_internal_tensor_proto()4982 inline const ::tensorflow::TensorProto& GraphExecutionTrace::_internal_tensor_proto() const {
4983   const ::tensorflow::TensorProto* p = _impl_.tensor_proto_;
4984   return p != nullptr ? *p : reinterpret_cast<const ::tensorflow::TensorProto&>(
4985       ::tensorflow::_TensorProto_default_instance_);
4986 }
tensor_proto()4987 inline const ::tensorflow::TensorProto& GraphExecutionTrace::tensor_proto() const {
4988   // @@protoc_insertion_point(field_get:tensorflow.GraphExecutionTrace.tensor_proto)
4989   return _internal_tensor_proto();
4990 }
unsafe_arena_set_allocated_tensor_proto(::tensorflow::TensorProto * tensor_proto)4991 inline void GraphExecutionTrace::unsafe_arena_set_allocated_tensor_proto(
4992     ::tensorflow::TensorProto* tensor_proto) {
4993   if (GetArenaForAllocation() == nullptr) {
4994     delete reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.tensor_proto_);
4995   }
4996   _impl_.tensor_proto_ = tensor_proto;
4997   // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.GraphExecutionTrace.tensor_proto)
4998 }
release_tensor_proto()4999 inline ::tensorflow::TensorProto* GraphExecutionTrace::release_tensor_proto() {
5000 
5001   ::tensorflow::TensorProto* temp = _impl_.tensor_proto_;
5002   _impl_.tensor_proto_ = nullptr;
5003 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
5004   auto* old =  reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(temp);
5005   temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
5006   if (GetArenaForAllocation() == nullptr) { delete old; }
5007 #else  // PROTOBUF_FORCE_COPY_IN_RELEASE
5008   if (GetArenaForAllocation() != nullptr) {
5009     temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
5010   }
5011 #endif  // !PROTOBUF_FORCE_COPY_IN_RELEASE
5012   return temp;
5013 }
unsafe_arena_release_tensor_proto()5014 inline ::tensorflow::TensorProto* GraphExecutionTrace::unsafe_arena_release_tensor_proto() {
5015   // @@protoc_insertion_point(field_release:tensorflow.GraphExecutionTrace.tensor_proto)
5016 
5017   ::tensorflow::TensorProto* temp = _impl_.tensor_proto_;
5018   _impl_.tensor_proto_ = nullptr;
5019   return temp;
5020 }
_internal_mutable_tensor_proto()5021 inline ::tensorflow::TensorProto* GraphExecutionTrace::_internal_mutable_tensor_proto() {
5022 
5023   if (_impl_.tensor_proto_ == nullptr) {
5024     auto* p = CreateMaybeMessage<::tensorflow::TensorProto>(GetArenaForAllocation());
5025     _impl_.tensor_proto_ = p;
5026   }
5027   return _impl_.tensor_proto_;
5028 }
mutable_tensor_proto()5029 inline ::tensorflow::TensorProto* GraphExecutionTrace::mutable_tensor_proto() {
5030   ::tensorflow::TensorProto* _msg = _internal_mutable_tensor_proto();
5031   // @@protoc_insertion_point(field_mutable:tensorflow.GraphExecutionTrace.tensor_proto)
5032   return _msg;
5033 }
set_allocated_tensor_proto(::tensorflow::TensorProto * tensor_proto)5034 inline void GraphExecutionTrace::set_allocated_tensor_proto(::tensorflow::TensorProto* tensor_proto) {
5035   ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
5036   if (message_arena == nullptr) {
5037     delete reinterpret_cast< ::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.tensor_proto_);
5038   }
5039   if (tensor_proto) {
5040     ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
5041         ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(
5042                 reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(tensor_proto));
5043     if (message_arena != submessage_arena) {
5044       tensor_proto = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
5045           message_arena, tensor_proto, submessage_arena);
5046     }
5047 
5048   } else {
5049 
5050   }
5051   _impl_.tensor_proto_ = tensor_proto;
5052   // @@protoc_insertion_point(field_set_allocated:tensorflow.GraphExecutionTrace.tensor_proto)
5053 }
5054 
5055 // string device_name = 6;
clear_device_name()5056 inline void GraphExecutionTrace::clear_device_name() {
5057   _impl_.device_name_.ClearToEmpty();
5058 }
device_name()5059 inline const std::string& GraphExecutionTrace::device_name() const {
5060   // @@protoc_insertion_point(field_get:tensorflow.GraphExecutionTrace.device_name)
5061   return _internal_device_name();
5062 }
5063 template <typename ArgT0, typename... ArgT>
5064 inline PROTOBUF_ALWAYS_INLINE
set_device_name(ArgT0 && arg0,ArgT...args)5065 void GraphExecutionTrace::set_device_name(ArgT0&& arg0, ArgT... args) {
5066 
5067  _impl_.device_name_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
5068   // @@protoc_insertion_point(field_set:tensorflow.GraphExecutionTrace.device_name)
5069 }
mutable_device_name()5070 inline std::string* GraphExecutionTrace::mutable_device_name() {
5071   std::string* _s = _internal_mutable_device_name();
5072   // @@protoc_insertion_point(field_mutable:tensorflow.GraphExecutionTrace.device_name)
5073   return _s;
5074 }
_internal_device_name()5075 inline const std::string& GraphExecutionTrace::_internal_device_name() const {
5076   return _impl_.device_name_.Get();
5077 }
_internal_set_device_name(const std::string & value)5078 inline void GraphExecutionTrace::_internal_set_device_name(const std::string& value) {
5079 
5080   _impl_.device_name_.Set(value, GetArenaForAllocation());
5081 }
_internal_mutable_device_name()5082 inline std::string* GraphExecutionTrace::_internal_mutable_device_name() {
5083 
5084   return _impl_.device_name_.Mutable(GetArenaForAllocation());
5085 }
release_device_name()5086 inline std::string* GraphExecutionTrace::release_device_name() {
5087   // @@protoc_insertion_point(field_release:tensorflow.GraphExecutionTrace.device_name)
5088   return _impl_.device_name_.Release();
5089 }
set_allocated_device_name(std::string * device_name)5090 inline void GraphExecutionTrace::set_allocated_device_name(std::string* device_name) {
5091   _impl_.device_name_.SetAllocated(device_name, GetArenaForAllocation());
5092 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
5093   if (_impl_.device_name_.IsDefault()) {
5094     _impl_.device_name_.Set("", GetArenaForAllocation());
5095   }
5096 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
5097   // @@protoc_insertion_point(field_set_allocated:tensorflow.GraphExecutionTrace.device_name)
5098 }
5099 
5100 #ifdef __GNUC__
5101   #pragma GCC diagnostic pop
5102 #endif  // __GNUC__
5103 // -------------------------------------------------------------------
5104 
5105 // -------------------------------------------------------------------
5106 
5107 // -------------------------------------------------------------------
5108 
5109 // -------------------------------------------------------------------
5110 
5111 // -------------------------------------------------------------------
5112 
5113 // -------------------------------------------------------------------
5114 
5115 // -------------------------------------------------------------------
5116 
5117 // -------------------------------------------------------------------
5118 
5119 // -------------------------------------------------------------------
5120 
5121 
5122 // @@protoc_insertion_point(namespace_scope)
5123 
5124 }  // namespace tensorflow
5125 
5126 PROTOBUF_NAMESPACE_OPEN
5127 
5128 template <> struct is_proto_enum< ::tensorflow::TensorDebugMode> : ::std::true_type {};
5129 
5130 PROTOBUF_NAMESPACE_CLOSE
5131 
5132 // @@protoc_insertion_point(global_scope)
5133 
5134 #include <google/protobuf/port_undef.inc>
5135 #endif  // GOOGLE_PROTOBUF_INCLUDED_GOOGLE_PROTOBUF_INCLUDED_tensorflow_2fcore_2fprotobuf_2fdebug_5fevent_2eproto
5136