1 // Generated by the protocol buffer compiler. DO NOT EDIT!
2 // source: tensorflow/core/protobuf/debug_event.proto
3
4 #ifndef GOOGLE_PROTOBUF_INCLUDED_tensorflow_2fcore_2fprotobuf_2fdebug_5fevent_2eproto
5 #define GOOGLE_PROTOBUF_INCLUDED_tensorflow_2fcore_2fprotobuf_2fdebug_5fevent_2eproto
6
7 #include <cstdint>
8 #include <limits>
9 #include <string>
10
11 #include <google/protobuf/port_def.inc>
12 #if PROTOBUF_VERSION < 3021000
13 #error This file was generated by a newer version of protoc which is
14 #error incompatible with your Protocol Buffer headers. Please update
15 #error your headers.
16 #endif
17 #if 3021012 < PROTOBUF_MIN_PROTOC_VERSION
18 #error This file was generated by an older version of protoc which is
19 #error incompatible with your Protocol Buffer headers. Please
20 #error regenerate this file with a newer version of protoc.
21 #endif
22
23 #include <google/protobuf/port_undef.inc>
24 #include <google/protobuf/io/coded_stream.h>
25 #include <google/protobuf/arena.h>
26 #include <google/protobuf/arenastring.h>
27 #include <google/protobuf/generated_message_util.h>
28 #include <google/protobuf/metadata_lite.h>
29 #include <google/protobuf/message_lite.h>
30 #include <google/protobuf/repeated_field.h> // IWYU pragma: export
31 #include <google/protobuf/extension_set.h> // IWYU pragma: export
32 #include <google/protobuf/generated_enum_util.h>
33 #include "tensorflow/core/framework/tensor.pb.h"
34 #include "tensorflow/core/protobuf/graph_debug_info.pb.h"
35 // @@protoc_insertion_point(includes)
36 #include <google/protobuf/port_def.inc>
37 #define PROTOBUF_INTERNAL_EXPORT_tensorflow_2fcore_2fprotobuf_2fdebug_5fevent_2eproto
38 PROTOBUF_NAMESPACE_OPEN
39 namespace internal {
40 class AnyMetadata;
41 } // namespace internal
42 PROTOBUF_NAMESPACE_CLOSE
43
44 // Internal implementation detail -- do not use these members.
45 struct TableStruct_tensorflow_2fcore_2fprotobuf_2fdebug_5fevent_2eproto {
46 static const ::uint32_t offsets[];
47 };
48 namespace tensorflow {
49 class CodeLocation;
50 struct CodeLocationDefaultTypeInternal;
51 extern CodeLocationDefaultTypeInternal _CodeLocation_default_instance_;
52 class DebugEvent;
53 struct DebugEventDefaultTypeInternal;
54 extern DebugEventDefaultTypeInternal _DebugEvent_default_instance_;
55 class DebugMetadata;
56 struct DebugMetadataDefaultTypeInternal;
57 extern DebugMetadataDefaultTypeInternal _DebugMetadata_default_instance_;
58 class DebuggedDevice;
59 struct DebuggedDeviceDefaultTypeInternal;
60 extern DebuggedDeviceDefaultTypeInternal _DebuggedDevice_default_instance_;
61 class DebuggedGraph;
62 struct DebuggedGraphDefaultTypeInternal;
63 extern DebuggedGraphDefaultTypeInternal _DebuggedGraph_default_instance_;
64 class Execution;
65 struct ExecutionDefaultTypeInternal;
66 extern ExecutionDefaultTypeInternal _Execution_default_instance_;
67 class GraphExecutionTrace;
68 struct GraphExecutionTraceDefaultTypeInternal;
69 extern GraphExecutionTraceDefaultTypeInternal _GraphExecutionTrace_default_instance_;
70 class GraphOpCreation;
71 struct GraphOpCreationDefaultTypeInternal;
72 extern GraphOpCreationDefaultTypeInternal _GraphOpCreation_default_instance_;
73 class SourceFile;
74 struct SourceFileDefaultTypeInternal;
75 extern SourceFileDefaultTypeInternal _SourceFile_default_instance_;
76 class StackFrameWithId;
77 struct StackFrameWithIdDefaultTypeInternal;
78 extern StackFrameWithIdDefaultTypeInternal _StackFrameWithId_default_instance_;
79 } // namespace tensorflow
80 PROTOBUF_NAMESPACE_OPEN
81 template<> ::tensorflow::CodeLocation* Arena::CreateMaybeMessage<::tensorflow::CodeLocation>(Arena*);
82 template<> ::tensorflow::DebugEvent* Arena::CreateMaybeMessage<::tensorflow::DebugEvent>(Arena*);
83 template<> ::tensorflow::DebugMetadata* Arena::CreateMaybeMessage<::tensorflow::DebugMetadata>(Arena*);
84 template<> ::tensorflow::DebuggedDevice* Arena::CreateMaybeMessage<::tensorflow::DebuggedDevice>(Arena*);
85 template<> ::tensorflow::DebuggedGraph* Arena::CreateMaybeMessage<::tensorflow::DebuggedGraph>(Arena*);
86 template<> ::tensorflow::Execution* Arena::CreateMaybeMessage<::tensorflow::Execution>(Arena*);
87 template<> ::tensorflow::GraphExecutionTrace* Arena::CreateMaybeMessage<::tensorflow::GraphExecutionTrace>(Arena*);
88 template<> ::tensorflow::GraphOpCreation* Arena::CreateMaybeMessage<::tensorflow::GraphOpCreation>(Arena*);
89 template<> ::tensorflow::SourceFile* Arena::CreateMaybeMessage<::tensorflow::SourceFile>(Arena*);
90 template<> ::tensorflow::StackFrameWithId* Arena::CreateMaybeMessage<::tensorflow::StackFrameWithId>(Arena*);
91 PROTOBUF_NAMESPACE_CLOSE
92 namespace tensorflow {
93
94 enum TensorDebugMode : int {
95 UNSPECIFIED = 0,
96 NO_TENSOR = 1,
97 CURT_HEALTH = 2,
98 CONCISE_HEALTH = 3,
99 FULL_HEALTH = 4,
100 SHAPE = 5,
101 FULL_NUMERICS = 6,
102 FULL_TENSOR = 7,
103 REDUCE_INF_NAN_THREE_SLOTS = 8,
104 TensorDebugMode_INT_MIN_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::int32_t>::min(),
105 TensorDebugMode_INT_MAX_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::int32_t>::max()
106 };
107 bool TensorDebugMode_IsValid(int value);
108 constexpr TensorDebugMode TensorDebugMode_MIN = UNSPECIFIED;
109 constexpr TensorDebugMode TensorDebugMode_MAX = REDUCE_INF_NAN_THREE_SLOTS;
110 constexpr int TensorDebugMode_ARRAYSIZE = TensorDebugMode_MAX + 1;
111
112 const std::string& TensorDebugMode_Name(TensorDebugMode value);
113 template<typename T>
TensorDebugMode_Name(T enum_t_value)114 inline const std::string& TensorDebugMode_Name(T enum_t_value) {
115 static_assert(::std::is_same<T, TensorDebugMode>::value ||
116 ::std::is_integral<T>::value,
117 "Incorrect type passed to function TensorDebugMode_Name.");
118 return TensorDebugMode_Name(static_cast<TensorDebugMode>(enum_t_value));
119 }
120 bool TensorDebugMode_Parse(
121 ::PROTOBUF_NAMESPACE_ID::ConstStringParam name, TensorDebugMode* value);
122 // ===================================================================
123
124 class DebugEvent final :
125 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.DebugEvent) */ {
126 public:
DebugEvent()127 inline DebugEvent() : DebugEvent(nullptr) {}
128 ~DebugEvent() override;
129 explicit PROTOBUF_CONSTEXPR DebugEvent(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
130
131 DebugEvent(const DebugEvent& from);
DebugEvent(DebugEvent && from)132 DebugEvent(DebugEvent&& from) noexcept
133 : DebugEvent() {
134 *this = ::std::move(from);
135 }
136
137 inline DebugEvent& operator=(const DebugEvent& from) {
138 if (this == &from) return *this;
139 CopyFrom(from);
140 return *this;
141 }
142 inline DebugEvent& operator=(DebugEvent&& from) noexcept {
143 if (this == &from) return *this;
144 if (GetOwningArena() == from.GetOwningArena()
145 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
146 && GetOwningArena() != nullptr
147 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
148 ) {
149 InternalSwap(&from);
150 } else {
151 CopyFrom(from);
152 }
153 return *this;
154 }
155
default_instance()156 static const DebugEvent& default_instance() {
157 return *internal_default_instance();
158 }
159 enum WhatCase {
160 kDebugMetadata = 3,
161 kSourceFile = 4,
162 kStackFrameWithId = 6,
163 kGraphOpCreation = 7,
164 kDebuggedGraph = 8,
165 kExecution = 9,
166 kGraphExecutionTrace = 10,
167 kGraphId = 11,
168 kDebuggedDevice = 12,
169 WHAT_NOT_SET = 0,
170 };
171
internal_default_instance()172 static inline const DebugEvent* internal_default_instance() {
173 return reinterpret_cast<const DebugEvent*>(
174 &_DebugEvent_default_instance_);
175 }
176 static constexpr int kIndexInFileMessages =
177 0;
178
swap(DebugEvent & a,DebugEvent & b)179 friend void swap(DebugEvent& a, DebugEvent& b) {
180 a.Swap(&b);
181 }
Swap(DebugEvent * other)182 inline void Swap(DebugEvent* other) {
183 if (other == this) return;
184 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
185 if (GetOwningArena() != nullptr &&
186 GetOwningArena() == other->GetOwningArena()) {
187 #else // PROTOBUF_FORCE_COPY_IN_SWAP
188 if (GetOwningArena() == other->GetOwningArena()) {
189 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
190 InternalSwap(other);
191 } else {
192 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
193 }
194 }
195 void UnsafeArenaSwap(DebugEvent* other) {
196 if (other == this) return;
197 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
198 InternalSwap(other);
199 }
200
201 // implements Message ----------------------------------------------
202
203 DebugEvent* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
204 return CreateMaybeMessage<DebugEvent>(arena);
205 }
206 DebugEvent* New() const {
207 return New(nullptr);
208 }
209 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
210 void CopyFrom(const DebugEvent& from);
211 void MergeFrom(const DebugEvent& from);
212 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
213 bool IsInitialized() const final;
214
215 size_t ByteSizeLong() const final;
216 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
217 ::uint8_t* _InternalSerialize(
218 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
219 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
220
221 private:
222 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
223 void SharedDtor();
224 void SetCachedSize(int size) const;
225 void InternalSwap(DebugEvent* other);
226
227 private:
228 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
229 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
230 return "tensorflow.DebugEvent";
231 }
232 protected:
233 explicit DebugEvent(::PROTOBUF_NAMESPACE_ID::Arena* arena,
234 bool is_message_owned = false);
235 public:
236
237 std::string GetTypeName() const final;
238
239 // nested types ----------------------------------------------------
240
241 // accessors -------------------------------------------------------
242
243 enum : int {
244 kWallTimeFieldNumber = 1,
245 kStepFieldNumber = 2,
246 kDebugMetadataFieldNumber = 3,
247 kSourceFileFieldNumber = 4,
248 kStackFrameWithIdFieldNumber = 6,
249 kGraphOpCreationFieldNumber = 7,
250 kDebuggedGraphFieldNumber = 8,
251 kExecutionFieldNumber = 9,
252 kGraphExecutionTraceFieldNumber = 10,
253 kGraphIdFieldNumber = 11,
254 kDebuggedDeviceFieldNumber = 12,
255 };
256 // double wall_time = 1;
257 void clear_wall_time();
258 double wall_time() const;
259 void set_wall_time(double value);
260 private:
261 double _internal_wall_time() const;
262 void _internal_set_wall_time(double value);
263 public:
264
265 // int64 step = 2;
266 void clear_step();
267 ::int64_t step() const;
268 void set_step(::int64_t value);
269 private:
270 ::int64_t _internal_step() const;
271 void _internal_set_step(::int64_t value);
272 public:
273
274 // .tensorflow.DebugMetadata debug_metadata = 3;
275 bool has_debug_metadata() const;
276 private:
277 bool _internal_has_debug_metadata() const;
278 public:
279 void clear_debug_metadata();
280 const ::tensorflow::DebugMetadata& debug_metadata() const;
281 PROTOBUF_NODISCARD ::tensorflow::DebugMetadata* release_debug_metadata();
282 ::tensorflow::DebugMetadata* mutable_debug_metadata();
283 void set_allocated_debug_metadata(::tensorflow::DebugMetadata* debug_metadata);
284 private:
285 const ::tensorflow::DebugMetadata& _internal_debug_metadata() const;
286 ::tensorflow::DebugMetadata* _internal_mutable_debug_metadata();
287 public:
288 void unsafe_arena_set_allocated_debug_metadata(
289 ::tensorflow::DebugMetadata* debug_metadata);
290 ::tensorflow::DebugMetadata* unsafe_arena_release_debug_metadata();
291
292 // .tensorflow.SourceFile source_file = 4;
293 bool has_source_file() const;
294 private:
295 bool _internal_has_source_file() const;
296 public:
297 void clear_source_file();
298 const ::tensorflow::SourceFile& source_file() const;
299 PROTOBUF_NODISCARD ::tensorflow::SourceFile* release_source_file();
300 ::tensorflow::SourceFile* mutable_source_file();
301 void set_allocated_source_file(::tensorflow::SourceFile* source_file);
302 private:
303 const ::tensorflow::SourceFile& _internal_source_file() const;
304 ::tensorflow::SourceFile* _internal_mutable_source_file();
305 public:
306 void unsafe_arena_set_allocated_source_file(
307 ::tensorflow::SourceFile* source_file);
308 ::tensorflow::SourceFile* unsafe_arena_release_source_file();
309
310 // .tensorflow.StackFrameWithId stack_frame_with_id = 6;
311 bool has_stack_frame_with_id() const;
312 private:
313 bool _internal_has_stack_frame_with_id() const;
314 public:
315 void clear_stack_frame_with_id();
316 const ::tensorflow::StackFrameWithId& stack_frame_with_id() const;
317 PROTOBUF_NODISCARD ::tensorflow::StackFrameWithId* release_stack_frame_with_id();
318 ::tensorflow::StackFrameWithId* mutable_stack_frame_with_id();
319 void set_allocated_stack_frame_with_id(::tensorflow::StackFrameWithId* stack_frame_with_id);
320 private:
321 const ::tensorflow::StackFrameWithId& _internal_stack_frame_with_id() const;
322 ::tensorflow::StackFrameWithId* _internal_mutable_stack_frame_with_id();
323 public:
324 void unsafe_arena_set_allocated_stack_frame_with_id(
325 ::tensorflow::StackFrameWithId* stack_frame_with_id);
326 ::tensorflow::StackFrameWithId* unsafe_arena_release_stack_frame_with_id();
327
328 // .tensorflow.GraphOpCreation graph_op_creation = 7;
329 bool has_graph_op_creation() const;
330 private:
331 bool _internal_has_graph_op_creation() const;
332 public:
333 void clear_graph_op_creation();
334 const ::tensorflow::GraphOpCreation& graph_op_creation() const;
335 PROTOBUF_NODISCARD ::tensorflow::GraphOpCreation* release_graph_op_creation();
336 ::tensorflow::GraphOpCreation* mutable_graph_op_creation();
337 void set_allocated_graph_op_creation(::tensorflow::GraphOpCreation* graph_op_creation);
338 private:
339 const ::tensorflow::GraphOpCreation& _internal_graph_op_creation() const;
340 ::tensorflow::GraphOpCreation* _internal_mutable_graph_op_creation();
341 public:
342 void unsafe_arena_set_allocated_graph_op_creation(
343 ::tensorflow::GraphOpCreation* graph_op_creation);
344 ::tensorflow::GraphOpCreation* unsafe_arena_release_graph_op_creation();
345
346 // .tensorflow.DebuggedGraph debugged_graph = 8;
347 bool has_debugged_graph() const;
348 private:
349 bool _internal_has_debugged_graph() const;
350 public:
351 void clear_debugged_graph();
352 const ::tensorflow::DebuggedGraph& debugged_graph() const;
353 PROTOBUF_NODISCARD ::tensorflow::DebuggedGraph* release_debugged_graph();
354 ::tensorflow::DebuggedGraph* mutable_debugged_graph();
355 void set_allocated_debugged_graph(::tensorflow::DebuggedGraph* debugged_graph);
356 private:
357 const ::tensorflow::DebuggedGraph& _internal_debugged_graph() const;
358 ::tensorflow::DebuggedGraph* _internal_mutable_debugged_graph();
359 public:
360 void unsafe_arena_set_allocated_debugged_graph(
361 ::tensorflow::DebuggedGraph* debugged_graph);
362 ::tensorflow::DebuggedGraph* unsafe_arena_release_debugged_graph();
363
364 // .tensorflow.Execution execution = 9;
365 bool has_execution() const;
366 private:
367 bool _internal_has_execution() const;
368 public:
369 void clear_execution();
370 const ::tensorflow::Execution& execution() const;
371 PROTOBUF_NODISCARD ::tensorflow::Execution* release_execution();
372 ::tensorflow::Execution* mutable_execution();
373 void set_allocated_execution(::tensorflow::Execution* execution);
374 private:
375 const ::tensorflow::Execution& _internal_execution() const;
376 ::tensorflow::Execution* _internal_mutable_execution();
377 public:
378 void unsafe_arena_set_allocated_execution(
379 ::tensorflow::Execution* execution);
380 ::tensorflow::Execution* unsafe_arena_release_execution();
381
382 // .tensorflow.GraphExecutionTrace graph_execution_trace = 10;
383 bool has_graph_execution_trace() const;
384 private:
385 bool _internal_has_graph_execution_trace() const;
386 public:
387 void clear_graph_execution_trace();
388 const ::tensorflow::GraphExecutionTrace& graph_execution_trace() const;
389 PROTOBUF_NODISCARD ::tensorflow::GraphExecutionTrace* release_graph_execution_trace();
390 ::tensorflow::GraphExecutionTrace* mutable_graph_execution_trace();
391 void set_allocated_graph_execution_trace(::tensorflow::GraphExecutionTrace* graph_execution_trace);
392 private:
393 const ::tensorflow::GraphExecutionTrace& _internal_graph_execution_trace() const;
394 ::tensorflow::GraphExecutionTrace* _internal_mutable_graph_execution_trace();
395 public:
396 void unsafe_arena_set_allocated_graph_execution_trace(
397 ::tensorflow::GraphExecutionTrace* graph_execution_trace);
398 ::tensorflow::GraphExecutionTrace* unsafe_arena_release_graph_execution_trace();
399
400 // string graph_id = 11;
401 bool has_graph_id() const;
402 private:
403 bool _internal_has_graph_id() const;
404 public:
405 void clear_graph_id();
406 const std::string& graph_id() const;
407 template <typename ArgT0 = const std::string&, typename... ArgT>
408 void set_graph_id(ArgT0&& arg0, ArgT... args);
409 std::string* mutable_graph_id();
410 PROTOBUF_NODISCARD std::string* release_graph_id();
411 void set_allocated_graph_id(std::string* graph_id);
412 private:
413 const std::string& _internal_graph_id() const;
414 inline PROTOBUF_ALWAYS_INLINE void _internal_set_graph_id(const std::string& value);
415 std::string* _internal_mutable_graph_id();
416 public:
417
418 // .tensorflow.DebuggedDevice debugged_device = 12;
419 bool has_debugged_device() const;
420 private:
421 bool _internal_has_debugged_device() const;
422 public:
423 void clear_debugged_device();
424 const ::tensorflow::DebuggedDevice& debugged_device() const;
425 PROTOBUF_NODISCARD ::tensorflow::DebuggedDevice* release_debugged_device();
426 ::tensorflow::DebuggedDevice* mutable_debugged_device();
427 void set_allocated_debugged_device(::tensorflow::DebuggedDevice* debugged_device);
428 private:
429 const ::tensorflow::DebuggedDevice& _internal_debugged_device() const;
430 ::tensorflow::DebuggedDevice* _internal_mutable_debugged_device();
431 public:
432 void unsafe_arena_set_allocated_debugged_device(
433 ::tensorflow::DebuggedDevice* debugged_device);
434 ::tensorflow::DebuggedDevice* unsafe_arena_release_debugged_device();
435
436 void clear_what();
437 WhatCase what_case() const;
438 // @@protoc_insertion_point(class_scope:tensorflow.DebugEvent)
439 private:
440 class _Internal;
441 void set_has_debug_metadata();
442 void set_has_source_file();
443 void set_has_stack_frame_with_id();
444 void set_has_graph_op_creation();
445 void set_has_debugged_graph();
446 void set_has_execution();
447 void set_has_graph_execution_trace();
448 void set_has_graph_id();
449 void set_has_debugged_device();
450
451 inline bool has_what() const;
452 inline void clear_has_what();
453
454 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
455 typedef void InternalArenaConstructable_;
456 typedef void DestructorSkippable_;
457 struct Impl_ {
458 double wall_time_;
459 ::int64_t step_;
460 union WhatUnion {
461 constexpr WhatUnion() : _constinit_{} {}
462 ::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized _constinit_;
463 ::tensorflow::DebugMetadata* debug_metadata_;
464 ::tensorflow::SourceFile* source_file_;
465 ::tensorflow::StackFrameWithId* stack_frame_with_id_;
466 ::tensorflow::GraphOpCreation* graph_op_creation_;
467 ::tensorflow::DebuggedGraph* debugged_graph_;
468 ::tensorflow::Execution* execution_;
469 ::tensorflow::GraphExecutionTrace* graph_execution_trace_;
470 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr graph_id_;
471 ::tensorflow::DebuggedDevice* debugged_device_;
472 } what_;
473 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
474 ::uint32_t _oneof_case_[1];
475
476 };
477 union { Impl_ _impl_; };
478 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fdebug_5fevent_2eproto;
479 };
480 // -------------------------------------------------------------------
481
482 class DebugMetadata final :
483 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.DebugMetadata) */ {
484 public:
DebugMetadata()485 inline DebugMetadata() : DebugMetadata(nullptr) {}
486 ~DebugMetadata() override;
487 explicit PROTOBUF_CONSTEXPR DebugMetadata(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
488
489 DebugMetadata(const DebugMetadata& from);
DebugMetadata(DebugMetadata && from)490 DebugMetadata(DebugMetadata&& from) noexcept
491 : DebugMetadata() {
492 *this = ::std::move(from);
493 }
494
495 inline DebugMetadata& operator=(const DebugMetadata& from) {
496 if (this == &from) return *this;
497 CopyFrom(from);
498 return *this;
499 }
500 inline DebugMetadata& operator=(DebugMetadata&& from) noexcept {
501 if (this == &from) return *this;
502 if (GetOwningArena() == from.GetOwningArena()
503 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
504 && GetOwningArena() != nullptr
505 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
506 ) {
507 InternalSwap(&from);
508 } else {
509 CopyFrom(from);
510 }
511 return *this;
512 }
513
default_instance()514 static const DebugMetadata& default_instance() {
515 return *internal_default_instance();
516 }
internal_default_instance()517 static inline const DebugMetadata* internal_default_instance() {
518 return reinterpret_cast<const DebugMetadata*>(
519 &_DebugMetadata_default_instance_);
520 }
521 static constexpr int kIndexInFileMessages =
522 1;
523
swap(DebugMetadata & a,DebugMetadata & b)524 friend void swap(DebugMetadata& a, DebugMetadata& b) {
525 a.Swap(&b);
526 }
Swap(DebugMetadata * other)527 inline void Swap(DebugMetadata* other) {
528 if (other == this) return;
529 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
530 if (GetOwningArena() != nullptr &&
531 GetOwningArena() == other->GetOwningArena()) {
532 #else // PROTOBUF_FORCE_COPY_IN_SWAP
533 if (GetOwningArena() == other->GetOwningArena()) {
534 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
535 InternalSwap(other);
536 } else {
537 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
538 }
539 }
540 void UnsafeArenaSwap(DebugMetadata* other) {
541 if (other == this) return;
542 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
543 InternalSwap(other);
544 }
545
546 // implements Message ----------------------------------------------
547
548 DebugMetadata* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
549 return CreateMaybeMessage<DebugMetadata>(arena);
550 }
551 DebugMetadata* New() const {
552 return New(nullptr);
553 }
554 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
555 void CopyFrom(const DebugMetadata& from);
556 void MergeFrom(const DebugMetadata& from);
557 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
558 bool IsInitialized() const final;
559
560 size_t ByteSizeLong() const final;
561 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
562 ::uint8_t* _InternalSerialize(
563 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
564 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
565
566 private:
567 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
568 void SharedDtor();
569 void SetCachedSize(int size) const;
570 void InternalSwap(DebugMetadata* other);
571
572 private:
573 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
574 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
575 return "tensorflow.DebugMetadata";
576 }
577 protected:
578 explicit DebugMetadata(::PROTOBUF_NAMESPACE_ID::Arena* arena,
579 bool is_message_owned = false);
580 public:
581
582 std::string GetTypeName() const final;
583
584 // nested types ----------------------------------------------------
585
586 // accessors -------------------------------------------------------
587
588 enum : int {
589 kTensorflowVersionFieldNumber = 1,
590 kFileVersionFieldNumber = 2,
591 kTfdbgRunIdFieldNumber = 3,
592 };
593 // string tensorflow_version = 1;
594 void clear_tensorflow_version();
595 const std::string& tensorflow_version() const;
596 template <typename ArgT0 = const std::string&, typename... ArgT>
597 void set_tensorflow_version(ArgT0&& arg0, ArgT... args);
598 std::string* mutable_tensorflow_version();
599 PROTOBUF_NODISCARD std::string* release_tensorflow_version();
600 void set_allocated_tensorflow_version(std::string* tensorflow_version);
601 private:
602 const std::string& _internal_tensorflow_version() const;
603 inline PROTOBUF_ALWAYS_INLINE void _internal_set_tensorflow_version(const std::string& value);
604 std::string* _internal_mutable_tensorflow_version();
605 public:
606
607 // string file_version = 2;
608 void clear_file_version();
609 const std::string& file_version() const;
610 template <typename ArgT0 = const std::string&, typename... ArgT>
611 void set_file_version(ArgT0&& arg0, ArgT... args);
612 std::string* mutable_file_version();
613 PROTOBUF_NODISCARD std::string* release_file_version();
614 void set_allocated_file_version(std::string* file_version);
615 private:
616 const std::string& _internal_file_version() const;
617 inline PROTOBUF_ALWAYS_INLINE void _internal_set_file_version(const std::string& value);
618 std::string* _internal_mutable_file_version();
619 public:
620
621 // string tfdbg_run_id = 3;
622 void clear_tfdbg_run_id();
623 const std::string& tfdbg_run_id() const;
624 template <typename ArgT0 = const std::string&, typename... ArgT>
625 void set_tfdbg_run_id(ArgT0&& arg0, ArgT... args);
626 std::string* mutable_tfdbg_run_id();
627 PROTOBUF_NODISCARD std::string* release_tfdbg_run_id();
628 void set_allocated_tfdbg_run_id(std::string* tfdbg_run_id);
629 private:
630 const std::string& _internal_tfdbg_run_id() const;
631 inline PROTOBUF_ALWAYS_INLINE void _internal_set_tfdbg_run_id(const std::string& value);
632 std::string* _internal_mutable_tfdbg_run_id();
633 public:
634
635 // @@protoc_insertion_point(class_scope:tensorflow.DebugMetadata)
636 private:
637 class _Internal;
638
639 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
640 typedef void InternalArenaConstructable_;
641 typedef void DestructorSkippable_;
642 struct Impl_ {
643 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr tensorflow_version_;
644 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr file_version_;
645 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr tfdbg_run_id_;
646 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
647 };
648 union { Impl_ _impl_; };
649 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fdebug_5fevent_2eproto;
650 };
651 // -------------------------------------------------------------------
652
653 class SourceFile final :
654 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.SourceFile) */ {
655 public:
SourceFile()656 inline SourceFile() : SourceFile(nullptr) {}
657 ~SourceFile() override;
658 explicit PROTOBUF_CONSTEXPR SourceFile(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
659
660 SourceFile(const SourceFile& from);
SourceFile(SourceFile && from)661 SourceFile(SourceFile&& from) noexcept
662 : SourceFile() {
663 *this = ::std::move(from);
664 }
665
666 inline SourceFile& operator=(const SourceFile& from) {
667 if (this == &from) return *this;
668 CopyFrom(from);
669 return *this;
670 }
671 inline SourceFile& operator=(SourceFile&& from) noexcept {
672 if (this == &from) return *this;
673 if (GetOwningArena() == from.GetOwningArena()
674 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
675 && GetOwningArena() != nullptr
676 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
677 ) {
678 InternalSwap(&from);
679 } else {
680 CopyFrom(from);
681 }
682 return *this;
683 }
684
default_instance()685 static const SourceFile& default_instance() {
686 return *internal_default_instance();
687 }
internal_default_instance()688 static inline const SourceFile* internal_default_instance() {
689 return reinterpret_cast<const SourceFile*>(
690 &_SourceFile_default_instance_);
691 }
692 static constexpr int kIndexInFileMessages =
693 2;
694
swap(SourceFile & a,SourceFile & b)695 friend void swap(SourceFile& a, SourceFile& b) {
696 a.Swap(&b);
697 }
Swap(SourceFile * other)698 inline void Swap(SourceFile* other) {
699 if (other == this) return;
700 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
701 if (GetOwningArena() != nullptr &&
702 GetOwningArena() == other->GetOwningArena()) {
703 #else // PROTOBUF_FORCE_COPY_IN_SWAP
704 if (GetOwningArena() == other->GetOwningArena()) {
705 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
706 InternalSwap(other);
707 } else {
708 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
709 }
710 }
711 void UnsafeArenaSwap(SourceFile* other) {
712 if (other == this) return;
713 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
714 InternalSwap(other);
715 }
716
717 // implements Message ----------------------------------------------
718
719 SourceFile* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
720 return CreateMaybeMessage<SourceFile>(arena);
721 }
722 SourceFile* New() const {
723 return New(nullptr);
724 }
725 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
726 void CopyFrom(const SourceFile& from);
727 void MergeFrom(const SourceFile& from);
728 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
729 bool IsInitialized() const final;
730
731 size_t ByteSizeLong() const final;
732 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
733 ::uint8_t* _InternalSerialize(
734 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
735 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
736
737 private:
738 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
739 void SharedDtor();
740 void SetCachedSize(int size) const;
741 void InternalSwap(SourceFile* other);
742
743 private:
744 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
745 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
746 return "tensorflow.SourceFile";
747 }
748 protected:
749 explicit SourceFile(::PROTOBUF_NAMESPACE_ID::Arena* arena,
750 bool is_message_owned = false);
751 public:
752
753 std::string GetTypeName() const final;
754
755 // nested types ----------------------------------------------------
756
757 // accessors -------------------------------------------------------
758
759 enum : int {
760 kLinesFieldNumber = 3,
761 kFilePathFieldNumber = 1,
762 kHostNameFieldNumber = 2,
763 };
764 // repeated string lines = 3;
765 int lines_size() const;
766 private:
767 int _internal_lines_size() const;
768 public:
769 void clear_lines();
770 const std::string& lines(int index) const;
771 std::string* mutable_lines(int index);
772 void set_lines(int index, const std::string& value);
773 void set_lines(int index, std::string&& value);
774 void set_lines(int index, const char* value);
775 void set_lines(int index, const char* value, size_t size);
776 std::string* add_lines();
777 void add_lines(const std::string& value);
778 void add_lines(std::string&& value);
779 void add_lines(const char* value);
780 void add_lines(const char* value, size_t size);
781 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>& lines() const;
782 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>* mutable_lines();
783 private:
784 const std::string& _internal_lines(int index) const;
785 std::string* _internal_add_lines();
786 public:
787
788 // string file_path = 1;
789 void clear_file_path();
790 const std::string& file_path() const;
791 template <typename ArgT0 = const std::string&, typename... ArgT>
792 void set_file_path(ArgT0&& arg0, ArgT... args);
793 std::string* mutable_file_path();
794 PROTOBUF_NODISCARD std::string* release_file_path();
795 void set_allocated_file_path(std::string* file_path);
796 private:
797 const std::string& _internal_file_path() const;
798 inline PROTOBUF_ALWAYS_INLINE void _internal_set_file_path(const std::string& value);
799 std::string* _internal_mutable_file_path();
800 public:
801
802 // string host_name = 2;
803 void clear_host_name();
804 const std::string& host_name() const;
805 template <typename ArgT0 = const std::string&, typename... ArgT>
806 void set_host_name(ArgT0&& arg0, ArgT... args);
807 std::string* mutable_host_name();
808 PROTOBUF_NODISCARD std::string* release_host_name();
809 void set_allocated_host_name(std::string* host_name);
810 private:
811 const std::string& _internal_host_name() const;
812 inline PROTOBUF_ALWAYS_INLINE void _internal_set_host_name(const std::string& value);
813 std::string* _internal_mutable_host_name();
814 public:
815
816 // @@protoc_insertion_point(class_scope:tensorflow.SourceFile)
817 private:
818 class _Internal;
819
820 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
821 typedef void InternalArenaConstructable_;
822 typedef void DestructorSkippable_;
823 struct Impl_ {
824 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string> lines_;
825 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr file_path_;
826 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr host_name_;
827 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
828 };
829 union { Impl_ _impl_; };
830 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fdebug_5fevent_2eproto;
831 };
832 // -------------------------------------------------------------------
833
834 class StackFrameWithId final :
835 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.StackFrameWithId) */ {
836 public:
StackFrameWithId()837 inline StackFrameWithId() : StackFrameWithId(nullptr) {}
838 ~StackFrameWithId() override;
839 explicit PROTOBUF_CONSTEXPR StackFrameWithId(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
840
841 StackFrameWithId(const StackFrameWithId& from);
StackFrameWithId(StackFrameWithId && from)842 StackFrameWithId(StackFrameWithId&& from) noexcept
843 : StackFrameWithId() {
844 *this = ::std::move(from);
845 }
846
847 inline StackFrameWithId& operator=(const StackFrameWithId& from) {
848 if (this == &from) return *this;
849 CopyFrom(from);
850 return *this;
851 }
852 inline StackFrameWithId& operator=(StackFrameWithId&& from) noexcept {
853 if (this == &from) return *this;
854 if (GetOwningArena() == from.GetOwningArena()
855 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
856 && GetOwningArena() != nullptr
857 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
858 ) {
859 InternalSwap(&from);
860 } else {
861 CopyFrom(from);
862 }
863 return *this;
864 }
865
default_instance()866 static const StackFrameWithId& default_instance() {
867 return *internal_default_instance();
868 }
internal_default_instance()869 static inline const StackFrameWithId* internal_default_instance() {
870 return reinterpret_cast<const StackFrameWithId*>(
871 &_StackFrameWithId_default_instance_);
872 }
873 static constexpr int kIndexInFileMessages =
874 3;
875
swap(StackFrameWithId & a,StackFrameWithId & b)876 friend void swap(StackFrameWithId& a, StackFrameWithId& b) {
877 a.Swap(&b);
878 }
Swap(StackFrameWithId * other)879 inline void Swap(StackFrameWithId* other) {
880 if (other == this) return;
881 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
882 if (GetOwningArena() != nullptr &&
883 GetOwningArena() == other->GetOwningArena()) {
884 #else // PROTOBUF_FORCE_COPY_IN_SWAP
885 if (GetOwningArena() == other->GetOwningArena()) {
886 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
887 InternalSwap(other);
888 } else {
889 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
890 }
891 }
892 void UnsafeArenaSwap(StackFrameWithId* other) {
893 if (other == this) return;
894 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
895 InternalSwap(other);
896 }
897
898 // implements Message ----------------------------------------------
899
900 StackFrameWithId* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
901 return CreateMaybeMessage<StackFrameWithId>(arena);
902 }
903 StackFrameWithId* New() const {
904 return New(nullptr);
905 }
906 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
907 void CopyFrom(const StackFrameWithId& from);
908 void MergeFrom(const StackFrameWithId& from);
909 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
910 bool IsInitialized() const final;
911
912 size_t ByteSizeLong() const final;
913 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
914 ::uint8_t* _InternalSerialize(
915 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
916 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
917
918 private:
919 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
920 void SharedDtor();
921 void SetCachedSize(int size) const;
922 void InternalSwap(StackFrameWithId* other);
923
924 private:
925 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
926 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
927 return "tensorflow.StackFrameWithId";
928 }
929 protected:
930 explicit StackFrameWithId(::PROTOBUF_NAMESPACE_ID::Arena* arena,
931 bool is_message_owned = false);
932 public:
933
934 std::string GetTypeName() const final;
935
936 // nested types ----------------------------------------------------
937
938 // accessors -------------------------------------------------------
939
940 enum : int {
941 kIdFieldNumber = 1,
942 kFileLineColFieldNumber = 2,
943 };
944 // string id = 1;
945 void clear_id();
946 const std::string& id() const;
947 template <typename ArgT0 = const std::string&, typename... ArgT>
948 void set_id(ArgT0&& arg0, ArgT... args);
949 std::string* mutable_id();
950 PROTOBUF_NODISCARD std::string* release_id();
951 void set_allocated_id(std::string* id);
952 private:
953 const std::string& _internal_id() const;
954 inline PROTOBUF_ALWAYS_INLINE void _internal_set_id(const std::string& value);
955 std::string* _internal_mutable_id();
956 public:
957
958 // .tensorflow.GraphDebugInfo.FileLineCol file_line_col = 2;
959 bool has_file_line_col() const;
960 private:
961 bool _internal_has_file_line_col() const;
962 public:
963 void clear_file_line_col();
964 const ::tensorflow::GraphDebugInfo_FileLineCol& file_line_col() const;
965 PROTOBUF_NODISCARD ::tensorflow::GraphDebugInfo_FileLineCol* release_file_line_col();
966 ::tensorflow::GraphDebugInfo_FileLineCol* mutable_file_line_col();
967 void set_allocated_file_line_col(::tensorflow::GraphDebugInfo_FileLineCol* file_line_col);
968 private:
969 const ::tensorflow::GraphDebugInfo_FileLineCol& _internal_file_line_col() const;
970 ::tensorflow::GraphDebugInfo_FileLineCol* _internal_mutable_file_line_col();
971 public:
972 void unsafe_arena_set_allocated_file_line_col(
973 ::tensorflow::GraphDebugInfo_FileLineCol* file_line_col);
974 ::tensorflow::GraphDebugInfo_FileLineCol* unsafe_arena_release_file_line_col();
975
976 // @@protoc_insertion_point(class_scope:tensorflow.StackFrameWithId)
977 private:
978 class _Internal;
979
980 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
981 typedef void InternalArenaConstructable_;
982 typedef void DestructorSkippable_;
983 struct Impl_ {
984 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr id_;
985 ::tensorflow::GraphDebugInfo_FileLineCol* file_line_col_;
986 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
987 };
988 union { Impl_ _impl_; };
989 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fdebug_5fevent_2eproto;
990 };
991 // -------------------------------------------------------------------
992
993 class CodeLocation final :
994 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.CodeLocation) */ {
995 public:
CodeLocation()996 inline CodeLocation() : CodeLocation(nullptr) {}
997 ~CodeLocation() override;
998 explicit PROTOBUF_CONSTEXPR CodeLocation(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
999
1000 CodeLocation(const CodeLocation& from);
CodeLocation(CodeLocation && from)1001 CodeLocation(CodeLocation&& from) noexcept
1002 : CodeLocation() {
1003 *this = ::std::move(from);
1004 }
1005
1006 inline CodeLocation& operator=(const CodeLocation& from) {
1007 if (this == &from) return *this;
1008 CopyFrom(from);
1009 return *this;
1010 }
1011 inline CodeLocation& operator=(CodeLocation&& from) noexcept {
1012 if (this == &from) return *this;
1013 if (GetOwningArena() == from.GetOwningArena()
1014 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
1015 && GetOwningArena() != nullptr
1016 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
1017 ) {
1018 InternalSwap(&from);
1019 } else {
1020 CopyFrom(from);
1021 }
1022 return *this;
1023 }
1024
default_instance()1025 static const CodeLocation& default_instance() {
1026 return *internal_default_instance();
1027 }
internal_default_instance()1028 static inline const CodeLocation* internal_default_instance() {
1029 return reinterpret_cast<const CodeLocation*>(
1030 &_CodeLocation_default_instance_);
1031 }
1032 static constexpr int kIndexInFileMessages =
1033 4;
1034
swap(CodeLocation & a,CodeLocation & b)1035 friend void swap(CodeLocation& a, CodeLocation& b) {
1036 a.Swap(&b);
1037 }
Swap(CodeLocation * other)1038 inline void Swap(CodeLocation* other) {
1039 if (other == this) return;
1040 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
1041 if (GetOwningArena() != nullptr &&
1042 GetOwningArena() == other->GetOwningArena()) {
1043 #else // PROTOBUF_FORCE_COPY_IN_SWAP
1044 if (GetOwningArena() == other->GetOwningArena()) {
1045 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
1046 InternalSwap(other);
1047 } else {
1048 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1049 }
1050 }
1051 void UnsafeArenaSwap(CodeLocation* other) {
1052 if (other == this) return;
1053 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
1054 InternalSwap(other);
1055 }
1056
1057 // implements Message ----------------------------------------------
1058
1059 CodeLocation* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1060 return CreateMaybeMessage<CodeLocation>(arena);
1061 }
1062 CodeLocation* New() const {
1063 return New(nullptr);
1064 }
1065 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
1066 void CopyFrom(const CodeLocation& from);
1067 void MergeFrom(const CodeLocation& from);
1068 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1069 bool IsInitialized() const final;
1070
1071 size_t ByteSizeLong() const final;
1072 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1073 ::uint8_t* _InternalSerialize(
1074 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
1075 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
1076
1077 private:
1078 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
1079 void SharedDtor();
1080 void SetCachedSize(int size) const;
1081 void InternalSwap(CodeLocation* other);
1082
1083 private:
1084 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1085 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1086 return "tensorflow.CodeLocation";
1087 }
1088 protected:
1089 explicit CodeLocation(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1090 bool is_message_owned = false);
1091 public:
1092
1093 std::string GetTypeName() const final;
1094
1095 // nested types ----------------------------------------------------
1096
1097 // accessors -------------------------------------------------------
1098
1099 enum : int {
1100 kStackFrameIdsFieldNumber = 2,
1101 kHostNameFieldNumber = 1,
1102 };
1103 // repeated string stack_frame_ids = 2;
1104 int stack_frame_ids_size() const;
1105 private:
1106 int _internal_stack_frame_ids_size() const;
1107 public:
1108 void clear_stack_frame_ids();
1109 const std::string& stack_frame_ids(int index) const;
1110 std::string* mutable_stack_frame_ids(int index);
1111 void set_stack_frame_ids(int index, const std::string& value);
1112 void set_stack_frame_ids(int index, std::string&& value);
1113 void set_stack_frame_ids(int index, const char* value);
1114 void set_stack_frame_ids(int index, const char* value, size_t size);
1115 std::string* add_stack_frame_ids();
1116 void add_stack_frame_ids(const std::string& value);
1117 void add_stack_frame_ids(std::string&& value);
1118 void add_stack_frame_ids(const char* value);
1119 void add_stack_frame_ids(const char* value, size_t size);
1120 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>& stack_frame_ids() const;
1121 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>* mutable_stack_frame_ids();
1122 private:
1123 const std::string& _internal_stack_frame_ids(int index) const;
1124 std::string* _internal_add_stack_frame_ids();
1125 public:
1126
1127 // string host_name = 1;
1128 void clear_host_name();
1129 const std::string& host_name() const;
1130 template <typename ArgT0 = const std::string&, typename... ArgT>
1131 void set_host_name(ArgT0&& arg0, ArgT... args);
1132 std::string* mutable_host_name();
1133 PROTOBUF_NODISCARD std::string* release_host_name();
1134 void set_allocated_host_name(std::string* host_name);
1135 private:
1136 const std::string& _internal_host_name() const;
1137 inline PROTOBUF_ALWAYS_INLINE void _internal_set_host_name(const std::string& value);
1138 std::string* _internal_mutable_host_name();
1139 public:
1140
1141 // @@protoc_insertion_point(class_scope:tensorflow.CodeLocation)
1142 private:
1143 class _Internal;
1144
1145 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1146 typedef void InternalArenaConstructable_;
1147 typedef void DestructorSkippable_;
1148 struct Impl_ {
1149 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string> stack_frame_ids_;
1150 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr host_name_;
1151 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1152 };
1153 union { Impl_ _impl_; };
1154 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fdebug_5fevent_2eproto;
1155 };
1156 // -------------------------------------------------------------------
1157
1158 class GraphOpCreation final :
1159 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.GraphOpCreation) */ {
1160 public:
GraphOpCreation()1161 inline GraphOpCreation() : GraphOpCreation(nullptr) {}
1162 ~GraphOpCreation() override;
1163 explicit PROTOBUF_CONSTEXPR GraphOpCreation(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
1164
1165 GraphOpCreation(const GraphOpCreation& from);
GraphOpCreation(GraphOpCreation && from)1166 GraphOpCreation(GraphOpCreation&& from) noexcept
1167 : GraphOpCreation() {
1168 *this = ::std::move(from);
1169 }
1170
1171 inline GraphOpCreation& operator=(const GraphOpCreation& from) {
1172 if (this == &from) return *this;
1173 CopyFrom(from);
1174 return *this;
1175 }
1176 inline GraphOpCreation& operator=(GraphOpCreation&& from) noexcept {
1177 if (this == &from) return *this;
1178 if (GetOwningArena() == from.GetOwningArena()
1179 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
1180 && GetOwningArena() != nullptr
1181 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
1182 ) {
1183 InternalSwap(&from);
1184 } else {
1185 CopyFrom(from);
1186 }
1187 return *this;
1188 }
1189
default_instance()1190 static const GraphOpCreation& default_instance() {
1191 return *internal_default_instance();
1192 }
internal_default_instance()1193 static inline const GraphOpCreation* internal_default_instance() {
1194 return reinterpret_cast<const GraphOpCreation*>(
1195 &_GraphOpCreation_default_instance_);
1196 }
1197 static constexpr int kIndexInFileMessages =
1198 5;
1199
swap(GraphOpCreation & a,GraphOpCreation & b)1200 friend void swap(GraphOpCreation& a, GraphOpCreation& b) {
1201 a.Swap(&b);
1202 }
Swap(GraphOpCreation * other)1203 inline void Swap(GraphOpCreation* other) {
1204 if (other == this) return;
1205 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
1206 if (GetOwningArena() != nullptr &&
1207 GetOwningArena() == other->GetOwningArena()) {
1208 #else // PROTOBUF_FORCE_COPY_IN_SWAP
1209 if (GetOwningArena() == other->GetOwningArena()) {
1210 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
1211 InternalSwap(other);
1212 } else {
1213 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1214 }
1215 }
1216 void UnsafeArenaSwap(GraphOpCreation* other) {
1217 if (other == this) return;
1218 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
1219 InternalSwap(other);
1220 }
1221
1222 // implements Message ----------------------------------------------
1223
1224 GraphOpCreation* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1225 return CreateMaybeMessage<GraphOpCreation>(arena);
1226 }
1227 GraphOpCreation* New() const {
1228 return New(nullptr);
1229 }
1230 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
1231 void CopyFrom(const GraphOpCreation& from);
1232 void MergeFrom(const GraphOpCreation& from);
1233 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1234 bool IsInitialized() const final;
1235
1236 size_t ByteSizeLong() const final;
1237 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1238 ::uint8_t* _InternalSerialize(
1239 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
1240 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
1241
1242 private:
1243 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
1244 void SharedDtor();
1245 void SetCachedSize(int size) const;
1246 void InternalSwap(GraphOpCreation* other);
1247
1248 private:
1249 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1250 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1251 return "tensorflow.GraphOpCreation";
1252 }
1253 protected:
1254 explicit GraphOpCreation(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1255 bool is_message_owned = false);
1256 public:
1257
1258 std::string GetTypeName() const final;
1259
1260 // nested types ----------------------------------------------------
1261
1262 // accessors -------------------------------------------------------
1263
1264 enum : int {
1265 kInputNamesFieldNumber = 6,
1266 kOutputTensorIdsFieldNumber = 9,
1267 kOpTypeFieldNumber = 1,
1268 kOpNameFieldNumber = 2,
1269 kGraphNameFieldNumber = 3,
1270 kGraphIdFieldNumber = 4,
1271 kDeviceNameFieldNumber = 5,
1272 kCodeLocationFieldNumber = 8,
1273 kNumOutputsFieldNumber = 7,
1274 };
1275 // repeated string input_names = 6;
1276 int input_names_size() const;
1277 private:
1278 int _internal_input_names_size() const;
1279 public:
1280 void clear_input_names();
1281 const std::string& input_names(int index) const;
1282 std::string* mutable_input_names(int index);
1283 void set_input_names(int index, const std::string& value);
1284 void set_input_names(int index, std::string&& value);
1285 void set_input_names(int index, const char* value);
1286 void set_input_names(int index, const char* value, size_t size);
1287 std::string* add_input_names();
1288 void add_input_names(const std::string& value);
1289 void add_input_names(std::string&& value);
1290 void add_input_names(const char* value);
1291 void add_input_names(const char* value, size_t size);
1292 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>& input_names() const;
1293 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>* mutable_input_names();
1294 private:
1295 const std::string& _internal_input_names(int index) const;
1296 std::string* _internal_add_input_names();
1297 public:
1298
1299 // repeated int32 output_tensor_ids = 9;
1300 int output_tensor_ids_size() const;
1301 private:
1302 int _internal_output_tensor_ids_size() const;
1303 public:
1304 void clear_output_tensor_ids();
1305 private:
1306 ::int32_t _internal_output_tensor_ids(int index) const;
1307 const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int32_t >&
1308 _internal_output_tensor_ids() const;
1309 void _internal_add_output_tensor_ids(::int32_t value);
1310 ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int32_t >*
1311 _internal_mutable_output_tensor_ids();
1312 public:
1313 ::int32_t output_tensor_ids(int index) const;
1314 void set_output_tensor_ids(int index, ::int32_t value);
1315 void add_output_tensor_ids(::int32_t value);
1316 const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int32_t >&
1317 output_tensor_ids() const;
1318 ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int32_t >*
1319 mutable_output_tensor_ids();
1320
1321 // string op_type = 1;
1322 void clear_op_type();
1323 const std::string& op_type() const;
1324 template <typename ArgT0 = const std::string&, typename... ArgT>
1325 void set_op_type(ArgT0&& arg0, ArgT... args);
1326 std::string* mutable_op_type();
1327 PROTOBUF_NODISCARD std::string* release_op_type();
1328 void set_allocated_op_type(std::string* op_type);
1329 private:
1330 const std::string& _internal_op_type() const;
1331 inline PROTOBUF_ALWAYS_INLINE void _internal_set_op_type(const std::string& value);
1332 std::string* _internal_mutable_op_type();
1333 public:
1334
1335 // string op_name = 2;
1336 void clear_op_name();
1337 const std::string& op_name() const;
1338 template <typename ArgT0 = const std::string&, typename... ArgT>
1339 void set_op_name(ArgT0&& arg0, ArgT... args);
1340 std::string* mutable_op_name();
1341 PROTOBUF_NODISCARD std::string* release_op_name();
1342 void set_allocated_op_name(std::string* op_name);
1343 private:
1344 const std::string& _internal_op_name() const;
1345 inline PROTOBUF_ALWAYS_INLINE void _internal_set_op_name(const std::string& value);
1346 std::string* _internal_mutable_op_name();
1347 public:
1348
1349 // string graph_name = 3;
1350 void clear_graph_name();
1351 const std::string& graph_name() const;
1352 template <typename ArgT0 = const std::string&, typename... ArgT>
1353 void set_graph_name(ArgT0&& arg0, ArgT... args);
1354 std::string* mutable_graph_name();
1355 PROTOBUF_NODISCARD std::string* release_graph_name();
1356 void set_allocated_graph_name(std::string* graph_name);
1357 private:
1358 const std::string& _internal_graph_name() const;
1359 inline PROTOBUF_ALWAYS_INLINE void _internal_set_graph_name(const std::string& value);
1360 std::string* _internal_mutable_graph_name();
1361 public:
1362
1363 // string graph_id = 4;
1364 void clear_graph_id();
1365 const std::string& graph_id() const;
1366 template <typename ArgT0 = const std::string&, typename... ArgT>
1367 void set_graph_id(ArgT0&& arg0, ArgT... args);
1368 std::string* mutable_graph_id();
1369 PROTOBUF_NODISCARD std::string* release_graph_id();
1370 void set_allocated_graph_id(std::string* graph_id);
1371 private:
1372 const std::string& _internal_graph_id() const;
1373 inline PROTOBUF_ALWAYS_INLINE void _internal_set_graph_id(const std::string& value);
1374 std::string* _internal_mutable_graph_id();
1375 public:
1376
1377 // string device_name = 5;
1378 void clear_device_name();
1379 const std::string& device_name() const;
1380 template <typename ArgT0 = const std::string&, typename... ArgT>
1381 void set_device_name(ArgT0&& arg0, ArgT... args);
1382 std::string* mutable_device_name();
1383 PROTOBUF_NODISCARD std::string* release_device_name();
1384 void set_allocated_device_name(std::string* device_name);
1385 private:
1386 const std::string& _internal_device_name() const;
1387 inline PROTOBUF_ALWAYS_INLINE void _internal_set_device_name(const std::string& value);
1388 std::string* _internal_mutable_device_name();
1389 public:
1390
1391 // .tensorflow.CodeLocation code_location = 8;
1392 bool has_code_location() const;
1393 private:
1394 bool _internal_has_code_location() const;
1395 public:
1396 void clear_code_location();
1397 const ::tensorflow::CodeLocation& code_location() const;
1398 PROTOBUF_NODISCARD ::tensorflow::CodeLocation* release_code_location();
1399 ::tensorflow::CodeLocation* mutable_code_location();
1400 void set_allocated_code_location(::tensorflow::CodeLocation* code_location);
1401 private:
1402 const ::tensorflow::CodeLocation& _internal_code_location() const;
1403 ::tensorflow::CodeLocation* _internal_mutable_code_location();
1404 public:
1405 void unsafe_arena_set_allocated_code_location(
1406 ::tensorflow::CodeLocation* code_location);
1407 ::tensorflow::CodeLocation* unsafe_arena_release_code_location();
1408
1409 // int32 num_outputs = 7;
1410 void clear_num_outputs();
1411 ::int32_t num_outputs() const;
1412 void set_num_outputs(::int32_t value);
1413 private:
1414 ::int32_t _internal_num_outputs() const;
1415 void _internal_set_num_outputs(::int32_t value);
1416 public:
1417
1418 // @@protoc_insertion_point(class_scope:tensorflow.GraphOpCreation)
1419 private:
1420 class _Internal;
1421
1422 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1423 typedef void InternalArenaConstructable_;
1424 typedef void DestructorSkippable_;
1425 struct Impl_ {
1426 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string> input_names_;
1427 ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int32_t > output_tensor_ids_;
1428 mutable std::atomic<int> _output_tensor_ids_cached_byte_size_;
1429 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr op_type_;
1430 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr op_name_;
1431 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr graph_name_;
1432 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr graph_id_;
1433 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr device_name_;
1434 ::tensorflow::CodeLocation* code_location_;
1435 ::int32_t num_outputs_;
1436 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1437 };
1438 union { Impl_ _impl_; };
1439 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fdebug_5fevent_2eproto;
1440 };
1441 // -------------------------------------------------------------------
1442
1443 class DebuggedGraph final :
1444 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.DebuggedGraph) */ {
1445 public:
DebuggedGraph()1446 inline DebuggedGraph() : DebuggedGraph(nullptr) {}
1447 ~DebuggedGraph() override;
1448 explicit PROTOBUF_CONSTEXPR DebuggedGraph(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
1449
1450 DebuggedGraph(const DebuggedGraph& from);
DebuggedGraph(DebuggedGraph && from)1451 DebuggedGraph(DebuggedGraph&& from) noexcept
1452 : DebuggedGraph() {
1453 *this = ::std::move(from);
1454 }
1455
1456 inline DebuggedGraph& operator=(const DebuggedGraph& from) {
1457 if (this == &from) return *this;
1458 CopyFrom(from);
1459 return *this;
1460 }
1461 inline DebuggedGraph& operator=(DebuggedGraph&& from) noexcept {
1462 if (this == &from) return *this;
1463 if (GetOwningArena() == from.GetOwningArena()
1464 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
1465 && GetOwningArena() != nullptr
1466 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
1467 ) {
1468 InternalSwap(&from);
1469 } else {
1470 CopyFrom(from);
1471 }
1472 return *this;
1473 }
1474
default_instance()1475 static const DebuggedGraph& default_instance() {
1476 return *internal_default_instance();
1477 }
internal_default_instance()1478 static inline const DebuggedGraph* internal_default_instance() {
1479 return reinterpret_cast<const DebuggedGraph*>(
1480 &_DebuggedGraph_default_instance_);
1481 }
1482 static constexpr int kIndexInFileMessages =
1483 6;
1484
swap(DebuggedGraph & a,DebuggedGraph & b)1485 friend void swap(DebuggedGraph& a, DebuggedGraph& b) {
1486 a.Swap(&b);
1487 }
Swap(DebuggedGraph * other)1488 inline void Swap(DebuggedGraph* other) {
1489 if (other == this) return;
1490 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
1491 if (GetOwningArena() != nullptr &&
1492 GetOwningArena() == other->GetOwningArena()) {
1493 #else // PROTOBUF_FORCE_COPY_IN_SWAP
1494 if (GetOwningArena() == other->GetOwningArena()) {
1495 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
1496 InternalSwap(other);
1497 } else {
1498 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1499 }
1500 }
1501 void UnsafeArenaSwap(DebuggedGraph* other) {
1502 if (other == this) return;
1503 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
1504 InternalSwap(other);
1505 }
1506
1507 // implements Message ----------------------------------------------
1508
1509 DebuggedGraph* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1510 return CreateMaybeMessage<DebuggedGraph>(arena);
1511 }
1512 DebuggedGraph* New() const {
1513 return New(nullptr);
1514 }
1515 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
1516 void CopyFrom(const DebuggedGraph& from);
1517 void MergeFrom(const DebuggedGraph& from);
1518 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1519 bool IsInitialized() const final;
1520
1521 size_t ByteSizeLong() const final;
1522 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1523 ::uint8_t* _InternalSerialize(
1524 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
1525 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
1526
1527 private:
1528 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
1529 void SharedDtor();
1530 void SetCachedSize(int size) const;
1531 void InternalSwap(DebuggedGraph* other);
1532
1533 private:
1534 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1535 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1536 return "tensorflow.DebuggedGraph";
1537 }
1538 protected:
1539 explicit DebuggedGraph(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1540 bool is_message_owned = false);
1541 public:
1542
1543 std::string GetTypeName() const final;
1544
1545 // nested types ----------------------------------------------------
1546
1547 // accessors -------------------------------------------------------
1548
1549 enum : int {
1550 kInstrumentedOpsFieldNumber = 3,
1551 kGraphIdFieldNumber = 1,
1552 kGraphNameFieldNumber = 2,
1553 kOriginalGraphDefFieldNumber = 4,
1554 kInstrumentedGraphDefFieldNumber = 5,
1555 kOuterContextIdFieldNumber = 6,
1556 };
1557 // repeated string instrumented_ops = 3;
1558 int instrumented_ops_size() const;
1559 private:
1560 int _internal_instrumented_ops_size() const;
1561 public:
1562 void clear_instrumented_ops();
1563 const std::string& instrumented_ops(int index) const;
1564 std::string* mutable_instrumented_ops(int index);
1565 void set_instrumented_ops(int index, const std::string& value);
1566 void set_instrumented_ops(int index, std::string&& value);
1567 void set_instrumented_ops(int index, const char* value);
1568 void set_instrumented_ops(int index, const char* value, size_t size);
1569 std::string* add_instrumented_ops();
1570 void add_instrumented_ops(const std::string& value);
1571 void add_instrumented_ops(std::string&& value);
1572 void add_instrumented_ops(const char* value);
1573 void add_instrumented_ops(const char* value, size_t size);
1574 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>& instrumented_ops() const;
1575 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>* mutable_instrumented_ops();
1576 private:
1577 const std::string& _internal_instrumented_ops(int index) const;
1578 std::string* _internal_add_instrumented_ops();
1579 public:
1580
1581 // string graph_id = 1;
1582 void clear_graph_id();
1583 const std::string& graph_id() const;
1584 template <typename ArgT0 = const std::string&, typename... ArgT>
1585 void set_graph_id(ArgT0&& arg0, ArgT... args);
1586 std::string* mutable_graph_id();
1587 PROTOBUF_NODISCARD std::string* release_graph_id();
1588 void set_allocated_graph_id(std::string* graph_id);
1589 private:
1590 const std::string& _internal_graph_id() const;
1591 inline PROTOBUF_ALWAYS_INLINE void _internal_set_graph_id(const std::string& value);
1592 std::string* _internal_mutable_graph_id();
1593 public:
1594
1595 // string graph_name = 2;
1596 void clear_graph_name();
1597 const std::string& graph_name() const;
1598 template <typename ArgT0 = const std::string&, typename... ArgT>
1599 void set_graph_name(ArgT0&& arg0, ArgT... args);
1600 std::string* mutable_graph_name();
1601 PROTOBUF_NODISCARD std::string* release_graph_name();
1602 void set_allocated_graph_name(std::string* graph_name);
1603 private:
1604 const std::string& _internal_graph_name() const;
1605 inline PROTOBUF_ALWAYS_INLINE void _internal_set_graph_name(const std::string& value);
1606 std::string* _internal_mutable_graph_name();
1607 public:
1608
1609 // bytes original_graph_def = 4;
1610 void clear_original_graph_def();
1611 const std::string& original_graph_def() const;
1612 template <typename ArgT0 = const std::string&, typename... ArgT>
1613 void set_original_graph_def(ArgT0&& arg0, ArgT... args);
1614 std::string* mutable_original_graph_def();
1615 PROTOBUF_NODISCARD std::string* release_original_graph_def();
1616 void set_allocated_original_graph_def(std::string* original_graph_def);
1617 private:
1618 const std::string& _internal_original_graph_def() const;
1619 inline PROTOBUF_ALWAYS_INLINE void _internal_set_original_graph_def(const std::string& value);
1620 std::string* _internal_mutable_original_graph_def();
1621 public:
1622
1623 // bytes instrumented_graph_def = 5;
1624 void clear_instrumented_graph_def();
1625 const std::string& instrumented_graph_def() const;
1626 template <typename ArgT0 = const std::string&, typename... ArgT>
1627 void set_instrumented_graph_def(ArgT0&& arg0, ArgT... args);
1628 std::string* mutable_instrumented_graph_def();
1629 PROTOBUF_NODISCARD std::string* release_instrumented_graph_def();
1630 void set_allocated_instrumented_graph_def(std::string* instrumented_graph_def);
1631 private:
1632 const std::string& _internal_instrumented_graph_def() const;
1633 inline PROTOBUF_ALWAYS_INLINE void _internal_set_instrumented_graph_def(const std::string& value);
1634 std::string* _internal_mutable_instrumented_graph_def();
1635 public:
1636
1637 // string outer_context_id = 6;
1638 void clear_outer_context_id();
1639 const std::string& outer_context_id() const;
1640 template <typename ArgT0 = const std::string&, typename... ArgT>
1641 void set_outer_context_id(ArgT0&& arg0, ArgT... args);
1642 std::string* mutable_outer_context_id();
1643 PROTOBUF_NODISCARD std::string* release_outer_context_id();
1644 void set_allocated_outer_context_id(std::string* outer_context_id);
1645 private:
1646 const std::string& _internal_outer_context_id() const;
1647 inline PROTOBUF_ALWAYS_INLINE void _internal_set_outer_context_id(const std::string& value);
1648 std::string* _internal_mutable_outer_context_id();
1649 public:
1650
1651 // @@protoc_insertion_point(class_scope:tensorflow.DebuggedGraph)
1652 private:
1653 class _Internal;
1654
1655 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1656 typedef void InternalArenaConstructable_;
1657 typedef void DestructorSkippable_;
1658 struct Impl_ {
1659 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string> instrumented_ops_;
1660 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr graph_id_;
1661 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr graph_name_;
1662 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr original_graph_def_;
1663 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr instrumented_graph_def_;
1664 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr outer_context_id_;
1665 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1666 };
1667 union { Impl_ _impl_; };
1668 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fdebug_5fevent_2eproto;
1669 };
1670 // -------------------------------------------------------------------
1671
1672 class DebuggedDevice final :
1673 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.DebuggedDevice) */ {
1674 public:
DebuggedDevice()1675 inline DebuggedDevice() : DebuggedDevice(nullptr) {}
1676 ~DebuggedDevice() override;
1677 explicit PROTOBUF_CONSTEXPR DebuggedDevice(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
1678
1679 DebuggedDevice(const DebuggedDevice& from);
DebuggedDevice(DebuggedDevice && from)1680 DebuggedDevice(DebuggedDevice&& from) noexcept
1681 : DebuggedDevice() {
1682 *this = ::std::move(from);
1683 }
1684
1685 inline DebuggedDevice& operator=(const DebuggedDevice& from) {
1686 if (this == &from) return *this;
1687 CopyFrom(from);
1688 return *this;
1689 }
1690 inline DebuggedDevice& operator=(DebuggedDevice&& from) noexcept {
1691 if (this == &from) return *this;
1692 if (GetOwningArena() == from.GetOwningArena()
1693 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
1694 && GetOwningArena() != nullptr
1695 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
1696 ) {
1697 InternalSwap(&from);
1698 } else {
1699 CopyFrom(from);
1700 }
1701 return *this;
1702 }
1703
default_instance()1704 static const DebuggedDevice& default_instance() {
1705 return *internal_default_instance();
1706 }
internal_default_instance()1707 static inline const DebuggedDevice* internal_default_instance() {
1708 return reinterpret_cast<const DebuggedDevice*>(
1709 &_DebuggedDevice_default_instance_);
1710 }
1711 static constexpr int kIndexInFileMessages =
1712 7;
1713
swap(DebuggedDevice & a,DebuggedDevice & b)1714 friend void swap(DebuggedDevice& a, DebuggedDevice& b) {
1715 a.Swap(&b);
1716 }
Swap(DebuggedDevice * other)1717 inline void Swap(DebuggedDevice* other) {
1718 if (other == this) return;
1719 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
1720 if (GetOwningArena() != nullptr &&
1721 GetOwningArena() == other->GetOwningArena()) {
1722 #else // PROTOBUF_FORCE_COPY_IN_SWAP
1723 if (GetOwningArena() == other->GetOwningArena()) {
1724 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
1725 InternalSwap(other);
1726 } else {
1727 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1728 }
1729 }
1730 void UnsafeArenaSwap(DebuggedDevice* other) {
1731 if (other == this) return;
1732 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
1733 InternalSwap(other);
1734 }
1735
1736 // implements Message ----------------------------------------------
1737
1738 DebuggedDevice* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1739 return CreateMaybeMessage<DebuggedDevice>(arena);
1740 }
1741 DebuggedDevice* New() const {
1742 return New(nullptr);
1743 }
1744 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
1745 void CopyFrom(const DebuggedDevice& from);
1746 void MergeFrom(const DebuggedDevice& from);
1747 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1748 bool IsInitialized() const final;
1749
1750 size_t ByteSizeLong() const final;
1751 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1752 ::uint8_t* _InternalSerialize(
1753 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
1754 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
1755
1756 private:
1757 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
1758 void SharedDtor();
1759 void SetCachedSize(int size) const;
1760 void InternalSwap(DebuggedDevice* other);
1761
1762 private:
1763 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1764 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1765 return "tensorflow.DebuggedDevice";
1766 }
1767 protected:
1768 explicit DebuggedDevice(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1769 bool is_message_owned = false);
1770 public:
1771
1772 std::string GetTypeName() const final;
1773
1774 // nested types ----------------------------------------------------
1775
1776 // accessors -------------------------------------------------------
1777
1778 enum : int {
1779 kDeviceNameFieldNumber = 1,
1780 kDeviceIdFieldNumber = 2,
1781 };
1782 // string device_name = 1;
1783 void clear_device_name();
1784 const std::string& device_name() const;
1785 template <typename ArgT0 = const std::string&, typename... ArgT>
1786 void set_device_name(ArgT0&& arg0, ArgT... args);
1787 std::string* mutable_device_name();
1788 PROTOBUF_NODISCARD std::string* release_device_name();
1789 void set_allocated_device_name(std::string* device_name);
1790 private:
1791 const std::string& _internal_device_name() const;
1792 inline PROTOBUF_ALWAYS_INLINE void _internal_set_device_name(const std::string& value);
1793 std::string* _internal_mutable_device_name();
1794 public:
1795
1796 // int32 device_id = 2;
1797 void clear_device_id();
1798 ::int32_t device_id() const;
1799 void set_device_id(::int32_t value);
1800 private:
1801 ::int32_t _internal_device_id() const;
1802 void _internal_set_device_id(::int32_t value);
1803 public:
1804
1805 // @@protoc_insertion_point(class_scope:tensorflow.DebuggedDevice)
1806 private:
1807 class _Internal;
1808
1809 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1810 typedef void InternalArenaConstructable_;
1811 typedef void DestructorSkippable_;
1812 struct Impl_ {
1813 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr device_name_;
1814 ::int32_t device_id_;
1815 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1816 };
1817 union { Impl_ _impl_; };
1818 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fdebug_5fevent_2eproto;
1819 };
1820 // -------------------------------------------------------------------
1821
1822 class Execution final :
1823 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.Execution) */ {
1824 public:
Execution()1825 inline Execution() : Execution(nullptr) {}
1826 ~Execution() override;
1827 explicit PROTOBUF_CONSTEXPR Execution(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
1828
1829 Execution(const Execution& from);
Execution(Execution && from)1830 Execution(Execution&& from) noexcept
1831 : Execution() {
1832 *this = ::std::move(from);
1833 }
1834
1835 inline Execution& operator=(const Execution& from) {
1836 if (this == &from) return *this;
1837 CopyFrom(from);
1838 return *this;
1839 }
1840 inline Execution& operator=(Execution&& from) noexcept {
1841 if (this == &from) return *this;
1842 if (GetOwningArena() == from.GetOwningArena()
1843 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
1844 && GetOwningArena() != nullptr
1845 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
1846 ) {
1847 InternalSwap(&from);
1848 } else {
1849 CopyFrom(from);
1850 }
1851 return *this;
1852 }
1853
default_instance()1854 static const Execution& default_instance() {
1855 return *internal_default_instance();
1856 }
internal_default_instance()1857 static inline const Execution* internal_default_instance() {
1858 return reinterpret_cast<const Execution*>(
1859 &_Execution_default_instance_);
1860 }
1861 static constexpr int kIndexInFileMessages =
1862 8;
1863
swap(Execution & a,Execution & b)1864 friend void swap(Execution& a, Execution& b) {
1865 a.Swap(&b);
1866 }
Swap(Execution * other)1867 inline void Swap(Execution* other) {
1868 if (other == this) return;
1869 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
1870 if (GetOwningArena() != nullptr &&
1871 GetOwningArena() == other->GetOwningArena()) {
1872 #else // PROTOBUF_FORCE_COPY_IN_SWAP
1873 if (GetOwningArena() == other->GetOwningArena()) {
1874 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
1875 InternalSwap(other);
1876 } else {
1877 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1878 }
1879 }
1880 void UnsafeArenaSwap(Execution* other) {
1881 if (other == this) return;
1882 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
1883 InternalSwap(other);
1884 }
1885
1886 // implements Message ----------------------------------------------
1887
1888 Execution* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1889 return CreateMaybeMessage<Execution>(arena);
1890 }
1891 Execution* New() const {
1892 return New(nullptr);
1893 }
1894 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
1895 void CopyFrom(const Execution& from);
1896 void MergeFrom(const Execution& from);
1897 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1898 bool IsInitialized() const final;
1899
1900 size_t ByteSizeLong() const final;
1901 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1902 ::uint8_t* _InternalSerialize(
1903 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
1904 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
1905
1906 private:
1907 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
1908 void SharedDtor();
1909 void SetCachedSize(int size) const;
1910 void InternalSwap(Execution* other);
1911
1912 private:
1913 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1914 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1915 return "tensorflow.Execution";
1916 }
1917 protected:
1918 explicit Execution(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1919 bool is_message_owned = false);
1920 public:
1921
1922 std::string GetTypeName() const final;
1923
1924 // nested types ----------------------------------------------------
1925
1926 // accessors -------------------------------------------------------
1927
1928 enum : int {
1929 kInputTensorIdsFieldNumber = 4,
1930 kOutputTensorIdsFieldNumber = 5,
1931 kTensorProtosFieldNumber = 7,
1932 kOutputTensorDeviceIdsFieldNumber = 9,
1933 kOpTypeFieldNumber = 1,
1934 kGraphIdFieldNumber = 3,
1935 kCodeLocationFieldNumber = 8,
1936 kNumOutputsFieldNumber = 2,
1937 kTensorDebugModeFieldNumber = 6,
1938 };
1939 // repeated int64 input_tensor_ids = 4;
1940 int input_tensor_ids_size() const;
1941 private:
1942 int _internal_input_tensor_ids_size() const;
1943 public:
1944 void clear_input_tensor_ids();
1945 private:
1946 ::int64_t _internal_input_tensor_ids(int index) const;
1947 const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >&
1948 _internal_input_tensor_ids() const;
1949 void _internal_add_input_tensor_ids(::int64_t value);
1950 ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >*
1951 _internal_mutable_input_tensor_ids();
1952 public:
1953 ::int64_t input_tensor_ids(int index) const;
1954 void set_input_tensor_ids(int index, ::int64_t value);
1955 void add_input_tensor_ids(::int64_t value);
1956 const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >&
1957 input_tensor_ids() const;
1958 ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >*
1959 mutable_input_tensor_ids();
1960
1961 // repeated int64 output_tensor_ids = 5;
1962 int output_tensor_ids_size() const;
1963 private:
1964 int _internal_output_tensor_ids_size() const;
1965 public:
1966 void clear_output_tensor_ids();
1967 private:
1968 ::int64_t _internal_output_tensor_ids(int index) const;
1969 const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >&
1970 _internal_output_tensor_ids() const;
1971 void _internal_add_output_tensor_ids(::int64_t value);
1972 ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >*
1973 _internal_mutable_output_tensor_ids();
1974 public:
1975 ::int64_t output_tensor_ids(int index) const;
1976 void set_output_tensor_ids(int index, ::int64_t value);
1977 void add_output_tensor_ids(::int64_t value);
1978 const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >&
1979 output_tensor_ids() const;
1980 ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >*
1981 mutable_output_tensor_ids();
1982
1983 // repeated .tensorflow.TensorProto tensor_protos = 7;
1984 int tensor_protos_size() const;
1985 private:
1986 int _internal_tensor_protos_size() const;
1987 public:
1988 void clear_tensor_protos();
1989 ::tensorflow::TensorProto* mutable_tensor_protos(int index);
1990 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorProto >*
1991 mutable_tensor_protos();
1992 private:
1993 const ::tensorflow::TensorProto& _internal_tensor_protos(int index) const;
1994 ::tensorflow::TensorProto* _internal_add_tensor_protos();
1995 public:
1996 const ::tensorflow::TensorProto& tensor_protos(int index) const;
1997 ::tensorflow::TensorProto* add_tensor_protos();
1998 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorProto >&
1999 tensor_protos() const;
2000
2001 // repeated int32 output_tensor_device_ids = 9;
2002 int output_tensor_device_ids_size() const;
2003 private:
2004 int _internal_output_tensor_device_ids_size() const;
2005 public:
2006 void clear_output_tensor_device_ids();
2007 private:
2008 ::int32_t _internal_output_tensor_device_ids(int index) const;
2009 const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int32_t >&
2010 _internal_output_tensor_device_ids() const;
2011 void _internal_add_output_tensor_device_ids(::int32_t value);
2012 ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int32_t >*
2013 _internal_mutable_output_tensor_device_ids();
2014 public:
2015 ::int32_t output_tensor_device_ids(int index) const;
2016 void set_output_tensor_device_ids(int index, ::int32_t value);
2017 void add_output_tensor_device_ids(::int32_t value);
2018 const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int32_t >&
2019 output_tensor_device_ids() const;
2020 ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int32_t >*
2021 mutable_output_tensor_device_ids();
2022
2023 // string op_type = 1;
2024 void clear_op_type();
2025 const std::string& op_type() const;
2026 template <typename ArgT0 = const std::string&, typename... ArgT>
2027 void set_op_type(ArgT0&& arg0, ArgT... args);
2028 std::string* mutable_op_type();
2029 PROTOBUF_NODISCARD std::string* release_op_type();
2030 void set_allocated_op_type(std::string* op_type);
2031 private:
2032 const std::string& _internal_op_type() const;
2033 inline PROTOBUF_ALWAYS_INLINE void _internal_set_op_type(const std::string& value);
2034 std::string* _internal_mutable_op_type();
2035 public:
2036
2037 // string graph_id = 3;
2038 void clear_graph_id();
2039 const std::string& graph_id() const;
2040 template <typename ArgT0 = const std::string&, typename... ArgT>
2041 void set_graph_id(ArgT0&& arg0, ArgT... args);
2042 std::string* mutable_graph_id();
2043 PROTOBUF_NODISCARD std::string* release_graph_id();
2044 void set_allocated_graph_id(std::string* graph_id);
2045 private:
2046 const std::string& _internal_graph_id() const;
2047 inline PROTOBUF_ALWAYS_INLINE void _internal_set_graph_id(const std::string& value);
2048 std::string* _internal_mutable_graph_id();
2049 public:
2050
2051 // .tensorflow.CodeLocation code_location = 8;
2052 bool has_code_location() const;
2053 private:
2054 bool _internal_has_code_location() const;
2055 public:
2056 void clear_code_location();
2057 const ::tensorflow::CodeLocation& code_location() const;
2058 PROTOBUF_NODISCARD ::tensorflow::CodeLocation* release_code_location();
2059 ::tensorflow::CodeLocation* mutable_code_location();
2060 void set_allocated_code_location(::tensorflow::CodeLocation* code_location);
2061 private:
2062 const ::tensorflow::CodeLocation& _internal_code_location() const;
2063 ::tensorflow::CodeLocation* _internal_mutable_code_location();
2064 public:
2065 void unsafe_arena_set_allocated_code_location(
2066 ::tensorflow::CodeLocation* code_location);
2067 ::tensorflow::CodeLocation* unsafe_arena_release_code_location();
2068
2069 // int32 num_outputs = 2;
2070 void clear_num_outputs();
2071 ::int32_t num_outputs() const;
2072 void set_num_outputs(::int32_t value);
2073 private:
2074 ::int32_t _internal_num_outputs() const;
2075 void _internal_set_num_outputs(::int32_t value);
2076 public:
2077
2078 // .tensorflow.TensorDebugMode tensor_debug_mode = 6;
2079 void clear_tensor_debug_mode();
2080 ::tensorflow::TensorDebugMode tensor_debug_mode() const;
2081 void set_tensor_debug_mode(::tensorflow::TensorDebugMode value);
2082 private:
2083 ::tensorflow::TensorDebugMode _internal_tensor_debug_mode() const;
2084 void _internal_set_tensor_debug_mode(::tensorflow::TensorDebugMode value);
2085 public:
2086
2087 // @@protoc_insertion_point(class_scope:tensorflow.Execution)
2088 private:
2089 class _Internal;
2090
2091 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
2092 typedef void InternalArenaConstructable_;
2093 typedef void DestructorSkippable_;
2094 struct Impl_ {
2095 ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t > input_tensor_ids_;
2096 mutable std::atomic<int> _input_tensor_ids_cached_byte_size_;
2097 ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t > output_tensor_ids_;
2098 mutable std::atomic<int> _output_tensor_ids_cached_byte_size_;
2099 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::TensorProto > tensor_protos_;
2100 ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int32_t > output_tensor_device_ids_;
2101 mutable std::atomic<int> _output_tensor_device_ids_cached_byte_size_;
2102 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr op_type_;
2103 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr graph_id_;
2104 ::tensorflow::CodeLocation* code_location_;
2105 ::int32_t num_outputs_;
2106 int tensor_debug_mode_;
2107 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
2108 };
2109 union { Impl_ _impl_; };
2110 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fdebug_5fevent_2eproto;
2111 };
2112 // -------------------------------------------------------------------
2113
2114 class GraphExecutionTrace final :
2115 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.GraphExecutionTrace) */ {
2116 public:
GraphExecutionTrace()2117 inline GraphExecutionTrace() : GraphExecutionTrace(nullptr) {}
2118 ~GraphExecutionTrace() override;
2119 explicit PROTOBUF_CONSTEXPR GraphExecutionTrace(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
2120
2121 GraphExecutionTrace(const GraphExecutionTrace& from);
GraphExecutionTrace(GraphExecutionTrace && from)2122 GraphExecutionTrace(GraphExecutionTrace&& from) noexcept
2123 : GraphExecutionTrace() {
2124 *this = ::std::move(from);
2125 }
2126
2127 inline GraphExecutionTrace& operator=(const GraphExecutionTrace& from) {
2128 if (this == &from) return *this;
2129 CopyFrom(from);
2130 return *this;
2131 }
2132 inline GraphExecutionTrace& operator=(GraphExecutionTrace&& from) noexcept {
2133 if (this == &from) return *this;
2134 if (GetOwningArena() == from.GetOwningArena()
2135 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
2136 && GetOwningArena() != nullptr
2137 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
2138 ) {
2139 InternalSwap(&from);
2140 } else {
2141 CopyFrom(from);
2142 }
2143 return *this;
2144 }
2145
default_instance()2146 static const GraphExecutionTrace& default_instance() {
2147 return *internal_default_instance();
2148 }
internal_default_instance()2149 static inline const GraphExecutionTrace* internal_default_instance() {
2150 return reinterpret_cast<const GraphExecutionTrace*>(
2151 &_GraphExecutionTrace_default_instance_);
2152 }
2153 static constexpr int kIndexInFileMessages =
2154 9;
2155
swap(GraphExecutionTrace & a,GraphExecutionTrace & b)2156 friend void swap(GraphExecutionTrace& a, GraphExecutionTrace& b) {
2157 a.Swap(&b);
2158 }
Swap(GraphExecutionTrace * other)2159 inline void Swap(GraphExecutionTrace* other) {
2160 if (other == this) return;
2161 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
2162 if (GetOwningArena() != nullptr &&
2163 GetOwningArena() == other->GetOwningArena()) {
2164 #else // PROTOBUF_FORCE_COPY_IN_SWAP
2165 if (GetOwningArena() == other->GetOwningArena()) {
2166 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
2167 InternalSwap(other);
2168 } else {
2169 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
2170 }
2171 }
2172 void UnsafeArenaSwap(GraphExecutionTrace* other) {
2173 if (other == this) return;
2174 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
2175 InternalSwap(other);
2176 }
2177
2178 // implements Message ----------------------------------------------
2179
2180 GraphExecutionTrace* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
2181 return CreateMaybeMessage<GraphExecutionTrace>(arena);
2182 }
2183 GraphExecutionTrace* New() const {
2184 return New(nullptr);
2185 }
2186 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
2187 void CopyFrom(const GraphExecutionTrace& from);
2188 void MergeFrom(const GraphExecutionTrace& from);
2189 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
2190 bool IsInitialized() const final;
2191
2192 size_t ByteSizeLong() const final;
2193 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
2194 ::uint8_t* _InternalSerialize(
2195 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
2196 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
2197
2198 private:
2199 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
2200 void SharedDtor();
2201 void SetCachedSize(int size) const;
2202 void InternalSwap(GraphExecutionTrace* other);
2203
2204 private:
2205 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
2206 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
2207 return "tensorflow.GraphExecutionTrace";
2208 }
2209 protected:
2210 explicit GraphExecutionTrace(::PROTOBUF_NAMESPACE_ID::Arena* arena,
2211 bool is_message_owned = false);
2212 public:
2213
2214 std::string GetTypeName() const final;
2215
2216 // nested types ----------------------------------------------------
2217
2218 // accessors -------------------------------------------------------
2219
2220 enum : int {
2221 kTfdbgContextIdFieldNumber = 1,
2222 kOpNameFieldNumber = 2,
2223 kDeviceNameFieldNumber = 6,
2224 kTensorProtoFieldNumber = 5,
2225 kOutputSlotFieldNumber = 3,
2226 kTensorDebugModeFieldNumber = 4,
2227 };
2228 // string tfdbg_context_id = 1;
2229 void clear_tfdbg_context_id();
2230 const std::string& tfdbg_context_id() const;
2231 template <typename ArgT0 = const std::string&, typename... ArgT>
2232 void set_tfdbg_context_id(ArgT0&& arg0, ArgT... args);
2233 std::string* mutable_tfdbg_context_id();
2234 PROTOBUF_NODISCARD std::string* release_tfdbg_context_id();
2235 void set_allocated_tfdbg_context_id(std::string* tfdbg_context_id);
2236 private:
2237 const std::string& _internal_tfdbg_context_id() const;
2238 inline PROTOBUF_ALWAYS_INLINE void _internal_set_tfdbg_context_id(const std::string& value);
2239 std::string* _internal_mutable_tfdbg_context_id();
2240 public:
2241
2242 // string op_name = 2;
2243 void clear_op_name();
2244 const std::string& op_name() const;
2245 template <typename ArgT0 = const std::string&, typename... ArgT>
2246 void set_op_name(ArgT0&& arg0, ArgT... args);
2247 std::string* mutable_op_name();
2248 PROTOBUF_NODISCARD std::string* release_op_name();
2249 void set_allocated_op_name(std::string* op_name);
2250 private:
2251 const std::string& _internal_op_name() const;
2252 inline PROTOBUF_ALWAYS_INLINE void _internal_set_op_name(const std::string& value);
2253 std::string* _internal_mutable_op_name();
2254 public:
2255
2256 // string device_name = 6;
2257 void clear_device_name();
2258 const std::string& device_name() const;
2259 template <typename ArgT0 = const std::string&, typename... ArgT>
2260 void set_device_name(ArgT0&& arg0, ArgT... args);
2261 std::string* mutable_device_name();
2262 PROTOBUF_NODISCARD std::string* release_device_name();
2263 void set_allocated_device_name(std::string* device_name);
2264 private:
2265 const std::string& _internal_device_name() const;
2266 inline PROTOBUF_ALWAYS_INLINE void _internal_set_device_name(const std::string& value);
2267 std::string* _internal_mutable_device_name();
2268 public:
2269
2270 // .tensorflow.TensorProto tensor_proto = 5;
2271 bool has_tensor_proto() const;
2272 private:
2273 bool _internal_has_tensor_proto() const;
2274 public:
2275 void clear_tensor_proto();
2276 const ::tensorflow::TensorProto& tensor_proto() const;
2277 PROTOBUF_NODISCARD ::tensorflow::TensorProto* release_tensor_proto();
2278 ::tensorflow::TensorProto* mutable_tensor_proto();
2279 void set_allocated_tensor_proto(::tensorflow::TensorProto* tensor_proto);
2280 private:
2281 const ::tensorflow::TensorProto& _internal_tensor_proto() const;
2282 ::tensorflow::TensorProto* _internal_mutable_tensor_proto();
2283 public:
2284 void unsafe_arena_set_allocated_tensor_proto(
2285 ::tensorflow::TensorProto* tensor_proto);
2286 ::tensorflow::TensorProto* unsafe_arena_release_tensor_proto();
2287
2288 // int32 output_slot = 3;
2289 void clear_output_slot();
2290 ::int32_t output_slot() const;
2291 void set_output_slot(::int32_t value);
2292 private:
2293 ::int32_t _internal_output_slot() const;
2294 void _internal_set_output_slot(::int32_t value);
2295 public:
2296
2297 // .tensorflow.TensorDebugMode tensor_debug_mode = 4;
2298 void clear_tensor_debug_mode();
2299 ::tensorflow::TensorDebugMode tensor_debug_mode() const;
2300 void set_tensor_debug_mode(::tensorflow::TensorDebugMode value);
2301 private:
2302 ::tensorflow::TensorDebugMode _internal_tensor_debug_mode() const;
2303 void _internal_set_tensor_debug_mode(::tensorflow::TensorDebugMode value);
2304 public:
2305
2306 // @@protoc_insertion_point(class_scope:tensorflow.GraphExecutionTrace)
2307 private:
2308 class _Internal;
2309
2310 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
2311 typedef void InternalArenaConstructable_;
2312 typedef void DestructorSkippable_;
2313 struct Impl_ {
2314 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr tfdbg_context_id_;
2315 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr op_name_;
2316 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr device_name_;
2317 ::tensorflow::TensorProto* tensor_proto_;
2318 ::int32_t output_slot_;
2319 int tensor_debug_mode_;
2320 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
2321 };
2322 union { Impl_ _impl_; };
2323 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fdebug_5fevent_2eproto;
2324 };
2325 // ===================================================================
2326
2327
2328 // ===================================================================
2329
2330 #ifdef __GNUC__
2331 #pragma GCC diagnostic push
2332 #pragma GCC diagnostic ignored "-Wstrict-aliasing"
2333 #endif // __GNUC__
2334 // DebugEvent
2335
2336 // double wall_time = 1;
clear_wall_time()2337 inline void DebugEvent::clear_wall_time() {
2338 _impl_.wall_time_ = 0;
2339 }
_internal_wall_time()2340 inline double DebugEvent::_internal_wall_time() const {
2341 return _impl_.wall_time_;
2342 }
wall_time()2343 inline double DebugEvent::wall_time() const {
2344 // @@protoc_insertion_point(field_get:tensorflow.DebugEvent.wall_time)
2345 return _internal_wall_time();
2346 }
_internal_set_wall_time(double value)2347 inline void DebugEvent::_internal_set_wall_time(double value) {
2348
2349 _impl_.wall_time_ = value;
2350 }
set_wall_time(double value)2351 inline void DebugEvent::set_wall_time(double value) {
2352 _internal_set_wall_time(value);
2353 // @@protoc_insertion_point(field_set:tensorflow.DebugEvent.wall_time)
2354 }
2355
2356 // int64 step = 2;
clear_step()2357 inline void DebugEvent::clear_step() {
2358 _impl_.step_ = ::int64_t{0};
2359 }
_internal_step()2360 inline ::int64_t DebugEvent::_internal_step() const {
2361 return _impl_.step_;
2362 }
step()2363 inline ::int64_t DebugEvent::step() const {
2364 // @@protoc_insertion_point(field_get:tensorflow.DebugEvent.step)
2365 return _internal_step();
2366 }
_internal_set_step(::int64_t value)2367 inline void DebugEvent::_internal_set_step(::int64_t value) {
2368
2369 _impl_.step_ = value;
2370 }
set_step(::int64_t value)2371 inline void DebugEvent::set_step(::int64_t value) {
2372 _internal_set_step(value);
2373 // @@protoc_insertion_point(field_set:tensorflow.DebugEvent.step)
2374 }
2375
2376 // .tensorflow.DebugMetadata debug_metadata = 3;
_internal_has_debug_metadata()2377 inline bool DebugEvent::_internal_has_debug_metadata() const {
2378 return what_case() == kDebugMetadata;
2379 }
has_debug_metadata()2380 inline bool DebugEvent::has_debug_metadata() const {
2381 return _internal_has_debug_metadata();
2382 }
set_has_debug_metadata()2383 inline void DebugEvent::set_has_debug_metadata() {
2384 _impl_._oneof_case_[0] = kDebugMetadata;
2385 }
clear_debug_metadata()2386 inline void DebugEvent::clear_debug_metadata() {
2387 if (_internal_has_debug_metadata()) {
2388 if (GetArenaForAllocation() == nullptr) {
2389 delete _impl_.what_.debug_metadata_;
2390 }
2391 clear_has_what();
2392 }
2393 }
release_debug_metadata()2394 inline ::tensorflow::DebugMetadata* DebugEvent::release_debug_metadata() {
2395 // @@protoc_insertion_point(field_release:tensorflow.DebugEvent.debug_metadata)
2396 if (_internal_has_debug_metadata()) {
2397 clear_has_what();
2398 ::tensorflow::DebugMetadata* temp = _impl_.what_.debug_metadata_;
2399 if (GetArenaForAllocation() != nullptr) {
2400 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
2401 }
2402 _impl_.what_.debug_metadata_ = nullptr;
2403 return temp;
2404 } else {
2405 return nullptr;
2406 }
2407 }
_internal_debug_metadata()2408 inline const ::tensorflow::DebugMetadata& DebugEvent::_internal_debug_metadata() const {
2409 return _internal_has_debug_metadata()
2410 ? *_impl_.what_.debug_metadata_
2411 : reinterpret_cast< ::tensorflow::DebugMetadata&>(::tensorflow::_DebugMetadata_default_instance_);
2412 }
debug_metadata()2413 inline const ::tensorflow::DebugMetadata& DebugEvent::debug_metadata() const {
2414 // @@protoc_insertion_point(field_get:tensorflow.DebugEvent.debug_metadata)
2415 return _internal_debug_metadata();
2416 }
unsafe_arena_release_debug_metadata()2417 inline ::tensorflow::DebugMetadata* DebugEvent::unsafe_arena_release_debug_metadata() {
2418 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.DebugEvent.debug_metadata)
2419 if (_internal_has_debug_metadata()) {
2420 clear_has_what();
2421 ::tensorflow::DebugMetadata* temp = _impl_.what_.debug_metadata_;
2422 _impl_.what_.debug_metadata_ = nullptr;
2423 return temp;
2424 } else {
2425 return nullptr;
2426 }
2427 }
unsafe_arena_set_allocated_debug_metadata(::tensorflow::DebugMetadata * debug_metadata)2428 inline void DebugEvent::unsafe_arena_set_allocated_debug_metadata(::tensorflow::DebugMetadata* debug_metadata) {
2429 clear_what();
2430 if (debug_metadata) {
2431 set_has_debug_metadata();
2432 _impl_.what_.debug_metadata_ = debug_metadata;
2433 }
2434 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.DebugEvent.debug_metadata)
2435 }
_internal_mutable_debug_metadata()2436 inline ::tensorflow::DebugMetadata* DebugEvent::_internal_mutable_debug_metadata() {
2437 if (!_internal_has_debug_metadata()) {
2438 clear_what();
2439 set_has_debug_metadata();
2440 _impl_.what_.debug_metadata_ = CreateMaybeMessage< ::tensorflow::DebugMetadata >(GetArenaForAllocation());
2441 }
2442 return _impl_.what_.debug_metadata_;
2443 }
mutable_debug_metadata()2444 inline ::tensorflow::DebugMetadata* DebugEvent::mutable_debug_metadata() {
2445 ::tensorflow::DebugMetadata* _msg = _internal_mutable_debug_metadata();
2446 // @@protoc_insertion_point(field_mutable:tensorflow.DebugEvent.debug_metadata)
2447 return _msg;
2448 }
2449
2450 // .tensorflow.SourceFile source_file = 4;
_internal_has_source_file()2451 inline bool DebugEvent::_internal_has_source_file() const {
2452 return what_case() == kSourceFile;
2453 }
has_source_file()2454 inline bool DebugEvent::has_source_file() const {
2455 return _internal_has_source_file();
2456 }
set_has_source_file()2457 inline void DebugEvent::set_has_source_file() {
2458 _impl_._oneof_case_[0] = kSourceFile;
2459 }
clear_source_file()2460 inline void DebugEvent::clear_source_file() {
2461 if (_internal_has_source_file()) {
2462 if (GetArenaForAllocation() == nullptr) {
2463 delete _impl_.what_.source_file_;
2464 }
2465 clear_has_what();
2466 }
2467 }
release_source_file()2468 inline ::tensorflow::SourceFile* DebugEvent::release_source_file() {
2469 // @@protoc_insertion_point(field_release:tensorflow.DebugEvent.source_file)
2470 if (_internal_has_source_file()) {
2471 clear_has_what();
2472 ::tensorflow::SourceFile* temp = _impl_.what_.source_file_;
2473 if (GetArenaForAllocation() != nullptr) {
2474 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
2475 }
2476 _impl_.what_.source_file_ = nullptr;
2477 return temp;
2478 } else {
2479 return nullptr;
2480 }
2481 }
_internal_source_file()2482 inline const ::tensorflow::SourceFile& DebugEvent::_internal_source_file() const {
2483 return _internal_has_source_file()
2484 ? *_impl_.what_.source_file_
2485 : reinterpret_cast< ::tensorflow::SourceFile&>(::tensorflow::_SourceFile_default_instance_);
2486 }
source_file()2487 inline const ::tensorflow::SourceFile& DebugEvent::source_file() const {
2488 // @@protoc_insertion_point(field_get:tensorflow.DebugEvent.source_file)
2489 return _internal_source_file();
2490 }
unsafe_arena_release_source_file()2491 inline ::tensorflow::SourceFile* DebugEvent::unsafe_arena_release_source_file() {
2492 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.DebugEvent.source_file)
2493 if (_internal_has_source_file()) {
2494 clear_has_what();
2495 ::tensorflow::SourceFile* temp = _impl_.what_.source_file_;
2496 _impl_.what_.source_file_ = nullptr;
2497 return temp;
2498 } else {
2499 return nullptr;
2500 }
2501 }
unsafe_arena_set_allocated_source_file(::tensorflow::SourceFile * source_file)2502 inline void DebugEvent::unsafe_arena_set_allocated_source_file(::tensorflow::SourceFile* source_file) {
2503 clear_what();
2504 if (source_file) {
2505 set_has_source_file();
2506 _impl_.what_.source_file_ = source_file;
2507 }
2508 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.DebugEvent.source_file)
2509 }
_internal_mutable_source_file()2510 inline ::tensorflow::SourceFile* DebugEvent::_internal_mutable_source_file() {
2511 if (!_internal_has_source_file()) {
2512 clear_what();
2513 set_has_source_file();
2514 _impl_.what_.source_file_ = CreateMaybeMessage< ::tensorflow::SourceFile >(GetArenaForAllocation());
2515 }
2516 return _impl_.what_.source_file_;
2517 }
mutable_source_file()2518 inline ::tensorflow::SourceFile* DebugEvent::mutable_source_file() {
2519 ::tensorflow::SourceFile* _msg = _internal_mutable_source_file();
2520 // @@protoc_insertion_point(field_mutable:tensorflow.DebugEvent.source_file)
2521 return _msg;
2522 }
2523
2524 // .tensorflow.StackFrameWithId stack_frame_with_id = 6;
_internal_has_stack_frame_with_id()2525 inline bool DebugEvent::_internal_has_stack_frame_with_id() const {
2526 return what_case() == kStackFrameWithId;
2527 }
has_stack_frame_with_id()2528 inline bool DebugEvent::has_stack_frame_with_id() const {
2529 return _internal_has_stack_frame_with_id();
2530 }
set_has_stack_frame_with_id()2531 inline void DebugEvent::set_has_stack_frame_with_id() {
2532 _impl_._oneof_case_[0] = kStackFrameWithId;
2533 }
clear_stack_frame_with_id()2534 inline void DebugEvent::clear_stack_frame_with_id() {
2535 if (_internal_has_stack_frame_with_id()) {
2536 if (GetArenaForAllocation() == nullptr) {
2537 delete _impl_.what_.stack_frame_with_id_;
2538 }
2539 clear_has_what();
2540 }
2541 }
release_stack_frame_with_id()2542 inline ::tensorflow::StackFrameWithId* DebugEvent::release_stack_frame_with_id() {
2543 // @@protoc_insertion_point(field_release:tensorflow.DebugEvent.stack_frame_with_id)
2544 if (_internal_has_stack_frame_with_id()) {
2545 clear_has_what();
2546 ::tensorflow::StackFrameWithId* temp = _impl_.what_.stack_frame_with_id_;
2547 if (GetArenaForAllocation() != nullptr) {
2548 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
2549 }
2550 _impl_.what_.stack_frame_with_id_ = nullptr;
2551 return temp;
2552 } else {
2553 return nullptr;
2554 }
2555 }
_internal_stack_frame_with_id()2556 inline const ::tensorflow::StackFrameWithId& DebugEvent::_internal_stack_frame_with_id() const {
2557 return _internal_has_stack_frame_with_id()
2558 ? *_impl_.what_.stack_frame_with_id_
2559 : reinterpret_cast< ::tensorflow::StackFrameWithId&>(::tensorflow::_StackFrameWithId_default_instance_);
2560 }
stack_frame_with_id()2561 inline const ::tensorflow::StackFrameWithId& DebugEvent::stack_frame_with_id() const {
2562 // @@protoc_insertion_point(field_get:tensorflow.DebugEvent.stack_frame_with_id)
2563 return _internal_stack_frame_with_id();
2564 }
unsafe_arena_release_stack_frame_with_id()2565 inline ::tensorflow::StackFrameWithId* DebugEvent::unsafe_arena_release_stack_frame_with_id() {
2566 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.DebugEvent.stack_frame_with_id)
2567 if (_internal_has_stack_frame_with_id()) {
2568 clear_has_what();
2569 ::tensorflow::StackFrameWithId* temp = _impl_.what_.stack_frame_with_id_;
2570 _impl_.what_.stack_frame_with_id_ = nullptr;
2571 return temp;
2572 } else {
2573 return nullptr;
2574 }
2575 }
unsafe_arena_set_allocated_stack_frame_with_id(::tensorflow::StackFrameWithId * stack_frame_with_id)2576 inline void DebugEvent::unsafe_arena_set_allocated_stack_frame_with_id(::tensorflow::StackFrameWithId* stack_frame_with_id) {
2577 clear_what();
2578 if (stack_frame_with_id) {
2579 set_has_stack_frame_with_id();
2580 _impl_.what_.stack_frame_with_id_ = stack_frame_with_id;
2581 }
2582 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.DebugEvent.stack_frame_with_id)
2583 }
_internal_mutable_stack_frame_with_id()2584 inline ::tensorflow::StackFrameWithId* DebugEvent::_internal_mutable_stack_frame_with_id() {
2585 if (!_internal_has_stack_frame_with_id()) {
2586 clear_what();
2587 set_has_stack_frame_with_id();
2588 _impl_.what_.stack_frame_with_id_ = CreateMaybeMessage< ::tensorflow::StackFrameWithId >(GetArenaForAllocation());
2589 }
2590 return _impl_.what_.stack_frame_with_id_;
2591 }
mutable_stack_frame_with_id()2592 inline ::tensorflow::StackFrameWithId* DebugEvent::mutable_stack_frame_with_id() {
2593 ::tensorflow::StackFrameWithId* _msg = _internal_mutable_stack_frame_with_id();
2594 // @@protoc_insertion_point(field_mutable:tensorflow.DebugEvent.stack_frame_with_id)
2595 return _msg;
2596 }
2597
2598 // .tensorflow.GraphOpCreation graph_op_creation = 7;
_internal_has_graph_op_creation()2599 inline bool DebugEvent::_internal_has_graph_op_creation() const {
2600 return what_case() == kGraphOpCreation;
2601 }
has_graph_op_creation()2602 inline bool DebugEvent::has_graph_op_creation() const {
2603 return _internal_has_graph_op_creation();
2604 }
set_has_graph_op_creation()2605 inline void DebugEvent::set_has_graph_op_creation() {
2606 _impl_._oneof_case_[0] = kGraphOpCreation;
2607 }
clear_graph_op_creation()2608 inline void DebugEvent::clear_graph_op_creation() {
2609 if (_internal_has_graph_op_creation()) {
2610 if (GetArenaForAllocation() == nullptr) {
2611 delete _impl_.what_.graph_op_creation_;
2612 }
2613 clear_has_what();
2614 }
2615 }
release_graph_op_creation()2616 inline ::tensorflow::GraphOpCreation* DebugEvent::release_graph_op_creation() {
2617 // @@protoc_insertion_point(field_release:tensorflow.DebugEvent.graph_op_creation)
2618 if (_internal_has_graph_op_creation()) {
2619 clear_has_what();
2620 ::tensorflow::GraphOpCreation* temp = _impl_.what_.graph_op_creation_;
2621 if (GetArenaForAllocation() != nullptr) {
2622 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
2623 }
2624 _impl_.what_.graph_op_creation_ = nullptr;
2625 return temp;
2626 } else {
2627 return nullptr;
2628 }
2629 }
_internal_graph_op_creation()2630 inline const ::tensorflow::GraphOpCreation& DebugEvent::_internal_graph_op_creation() const {
2631 return _internal_has_graph_op_creation()
2632 ? *_impl_.what_.graph_op_creation_
2633 : reinterpret_cast< ::tensorflow::GraphOpCreation&>(::tensorflow::_GraphOpCreation_default_instance_);
2634 }
graph_op_creation()2635 inline const ::tensorflow::GraphOpCreation& DebugEvent::graph_op_creation() const {
2636 // @@protoc_insertion_point(field_get:tensorflow.DebugEvent.graph_op_creation)
2637 return _internal_graph_op_creation();
2638 }
unsafe_arena_release_graph_op_creation()2639 inline ::tensorflow::GraphOpCreation* DebugEvent::unsafe_arena_release_graph_op_creation() {
2640 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.DebugEvent.graph_op_creation)
2641 if (_internal_has_graph_op_creation()) {
2642 clear_has_what();
2643 ::tensorflow::GraphOpCreation* temp = _impl_.what_.graph_op_creation_;
2644 _impl_.what_.graph_op_creation_ = nullptr;
2645 return temp;
2646 } else {
2647 return nullptr;
2648 }
2649 }
unsafe_arena_set_allocated_graph_op_creation(::tensorflow::GraphOpCreation * graph_op_creation)2650 inline void DebugEvent::unsafe_arena_set_allocated_graph_op_creation(::tensorflow::GraphOpCreation* graph_op_creation) {
2651 clear_what();
2652 if (graph_op_creation) {
2653 set_has_graph_op_creation();
2654 _impl_.what_.graph_op_creation_ = graph_op_creation;
2655 }
2656 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.DebugEvent.graph_op_creation)
2657 }
_internal_mutable_graph_op_creation()2658 inline ::tensorflow::GraphOpCreation* DebugEvent::_internal_mutable_graph_op_creation() {
2659 if (!_internal_has_graph_op_creation()) {
2660 clear_what();
2661 set_has_graph_op_creation();
2662 _impl_.what_.graph_op_creation_ = CreateMaybeMessage< ::tensorflow::GraphOpCreation >(GetArenaForAllocation());
2663 }
2664 return _impl_.what_.graph_op_creation_;
2665 }
mutable_graph_op_creation()2666 inline ::tensorflow::GraphOpCreation* DebugEvent::mutable_graph_op_creation() {
2667 ::tensorflow::GraphOpCreation* _msg = _internal_mutable_graph_op_creation();
2668 // @@protoc_insertion_point(field_mutable:tensorflow.DebugEvent.graph_op_creation)
2669 return _msg;
2670 }
2671
2672 // .tensorflow.DebuggedGraph debugged_graph = 8;
_internal_has_debugged_graph()2673 inline bool DebugEvent::_internal_has_debugged_graph() const {
2674 return what_case() == kDebuggedGraph;
2675 }
has_debugged_graph()2676 inline bool DebugEvent::has_debugged_graph() const {
2677 return _internal_has_debugged_graph();
2678 }
set_has_debugged_graph()2679 inline void DebugEvent::set_has_debugged_graph() {
2680 _impl_._oneof_case_[0] = kDebuggedGraph;
2681 }
clear_debugged_graph()2682 inline void DebugEvent::clear_debugged_graph() {
2683 if (_internal_has_debugged_graph()) {
2684 if (GetArenaForAllocation() == nullptr) {
2685 delete _impl_.what_.debugged_graph_;
2686 }
2687 clear_has_what();
2688 }
2689 }
release_debugged_graph()2690 inline ::tensorflow::DebuggedGraph* DebugEvent::release_debugged_graph() {
2691 // @@protoc_insertion_point(field_release:tensorflow.DebugEvent.debugged_graph)
2692 if (_internal_has_debugged_graph()) {
2693 clear_has_what();
2694 ::tensorflow::DebuggedGraph* temp = _impl_.what_.debugged_graph_;
2695 if (GetArenaForAllocation() != nullptr) {
2696 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
2697 }
2698 _impl_.what_.debugged_graph_ = nullptr;
2699 return temp;
2700 } else {
2701 return nullptr;
2702 }
2703 }
_internal_debugged_graph()2704 inline const ::tensorflow::DebuggedGraph& DebugEvent::_internal_debugged_graph() const {
2705 return _internal_has_debugged_graph()
2706 ? *_impl_.what_.debugged_graph_
2707 : reinterpret_cast< ::tensorflow::DebuggedGraph&>(::tensorflow::_DebuggedGraph_default_instance_);
2708 }
debugged_graph()2709 inline const ::tensorflow::DebuggedGraph& DebugEvent::debugged_graph() const {
2710 // @@protoc_insertion_point(field_get:tensorflow.DebugEvent.debugged_graph)
2711 return _internal_debugged_graph();
2712 }
unsafe_arena_release_debugged_graph()2713 inline ::tensorflow::DebuggedGraph* DebugEvent::unsafe_arena_release_debugged_graph() {
2714 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.DebugEvent.debugged_graph)
2715 if (_internal_has_debugged_graph()) {
2716 clear_has_what();
2717 ::tensorflow::DebuggedGraph* temp = _impl_.what_.debugged_graph_;
2718 _impl_.what_.debugged_graph_ = nullptr;
2719 return temp;
2720 } else {
2721 return nullptr;
2722 }
2723 }
unsafe_arena_set_allocated_debugged_graph(::tensorflow::DebuggedGraph * debugged_graph)2724 inline void DebugEvent::unsafe_arena_set_allocated_debugged_graph(::tensorflow::DebuggedGraph* debugged_graph) {
2725 clear_what();
2726 if (debugged_graph) {
2727 set_has_debugged_graph();
2728 _impl_.what_.debugged_graph_ = debugged_graph;
2729 }
2730 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.DebugEvent.debugged_graph)
2731 }
_internal_mutable_debugged_graph()2732 inline ::tensorflow::DebuggedGraph* DebugEvent::_internal_mutable_debugged_graph() {
2733 if (!_internal_has_debugged_graph()) {
2734 clear_what();
2735 set_has_debugged_graph();
2736 _impl_.what_.debugged_graph_ = CreateMaybeMessage< ::tensorflow::DebuggedGraph >(GetArenaForAllocation());
2737 }
2738 return _impl_.what_.debugged_graph_;
2739 }
mutable_debugged_graph()2740 inline ::tensorflow::DebuggedGraph* DebugEvent::mutable_debugged_graph() {
2741 ::tensorflow::DebuggedGraph* _msg = _internal_mutable_debugged_graph();
2742 // @@protoc_insertion_point(field_mutable:tensorflow.DebugEvent.debugged_graph)
2743 return _msg;
2744 }
2745
2746 // .tensorflow.Execution execution = 9;
_internal_has_execution()2747 inline bool DebugEvent::_internal_has_execution() const {
2748 return what_case() == kExecution;
2749 }
has_execution()2750 inline bool DebugEvent::has_execution() const {
2751 return _internal_has_execution();
2752 }
set_has_execution()2753 inline void DebugEvent::set_has_execution() {
2754 _impl_._oneof_case_[0] = kExecution;
2755 }
clear_execution()2756 inline void DebugEvent::clear_execution() {
2757 if (_internal_has_execution()) {
2758 if (GetArenaForAllocation() == nullptr) {
2759 delete _impl_.what_.execution_;
2760 }
2761 clear_has_what();
2762 }
2763 }
release_execution()2764 inline ::tensorflow::Execution* DebugEvent::release_execution() {
2765 // @@protoc_insertion_point(field_release:tensorflow.DebugEvent.execution)
2766 if (_internal_has_execution()) {
2767 clear_has_what();
2768 ::tensorflow::Execution* temp = _impl_.what_.execution_;
2769 if (GetArenaForAllocation() != nullptr) {
2770 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
2771 }
2772 _impl_.what_.execution_ = nullptr;
2773 return temp;
2774 } else {
2775 return nullptr;
2776 }
2777 }
_internal_execution()2778 inline const ::tensorflow::Execution& DebugEvent::_internal_execution() const {
2779 return _internal_has_execution()
2780 ? *_impl_.what_.execution_
2781 : reinterpret_cast< ::tensorflow::Execution&>(::tensorflow::_Execution_default_instance_);
2782 }
execution()2783 inline const ::tensorflow::Execution& DebugEvent::execution() const {
2784 // @@protoc_insertion_point(field_get:tensorflow.DebugEvent.execution)
2785 return _internal_execution();
2786 }
unsafe_arena_release_execution()2787 inline ::tensorflow::Execution* DebugEvent::unsafe_arena_release_execution() {
2788 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.DebugEvent.execution)
2789 if (_internal_has_execution()) {
2790 clear_has_what();
2791 ::tensorflow::Execution* temp = _impl_.what_.execution_;
2792 _impl_.what_.execution_ = nullptr;
2793 return temp;
2794 } else {
2795 return nullptr;
2796 }
2797 }
unsafe_arena_set_allocated_execution(::tensorflow::Execution * execution)2798 inline void DebugEvent::unsafe_arena_set_allocated_execution(::tensorflow::Execution* execution) {
2799 clear_what();
2800 if (execution) {
2801 set_has_execution();
2802 _impl_.what_.execution_ = execution;
2803 }
2804 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.DebugEvent.execution)
2805 }
_internal_mutable_execution()2806 inline ::tensorflow::Execution* DebugEvent::_internal_mutable_execution() {
2807 if (!_internal_has_execution()) {
2808 clear_what();
2809 set_has_execution();
2810 _impl_.what_.execution_ = CreateMaybeMessage< ::tensorflow::Execution >(GetArenaForAllocation());
2811 }
2812 return _impl_.what_.execution_;
2813 }
mutable_execution()2814 inline ::tensorflow::Execution* DebugEvent::mutable_execution() {
2815 ::tensorflow::Execution* _msg = _internal_mutable_execution();
2816 // @@protoc_insertion_point(field_mutable:tensorflow.DebugEvent.execution)
2817 return _msg;
2818 }
2819
2820 // .tensorflow.GraphExecutionTrace graph_execution_trace = 10;
_internal_has_graph_execution_trace()2821 inline bool DebugEvent::_internal_has_graph_execution_trace() const {
2822 return what_case() == kGraphExecutionTrace;
2823 }
has_graph_execution_trace()2824 inline bool DebugEvent::has_graph_execution_trace() const {
2825 return _internal_has_graph_execution_trace();
2826 }
set_has_graph_execution_trace()2827 inline void DebugEvent::set_has_graph_execution_trace() {
2828 _impl_._oneof_case_[0] = kGraphExecutionTrace;
2829 }
clear_graph_execution_trace()2830 inline void DebugEvent::clear_graph_execution_trace() {
2831 if (_internal_has_graph_execution_trace()) {
2832 if (GetArenaForAllocation() == nullptr) {
2833 delete _impl_.what_.graph_execution_trace_;
2834 }
2835 clear_has_what();
2836 }
2837 }
release_graph_execution_trace()2838 inline ::tensorflow::GraphExecutionTrace* DebugEvent::release_graph_execution_trace() {
2839 // @@protoc_insertion_point(field_release:tensorflow.DebugEvent.graph_execution_trace)
2840 if (_internal_has_graph_execution_trace()) {
2841 clear_has_what();
2842 ::tensorflow::GraphExecutionTrace* temp = _impl_.what_.graph_execution_trace_;
2843 if (GetArenaForAllocation() != nullptr) {
2844 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
2845 }
2846 _impl_.what_.graph_execution_trace_ = nullptr;
2847 return temp;
2848 } else {
2849 return nullptr;
2850 }
2851 }
_internal_graph_execution_trace()2852 inline const ::tensorflow::GraphExecutionTrace& DebugEvent::_internal_graph_execution_trace() const {
2853 return _internal_has_graph_execution_trace()
2854 ? *_impl_.what_.graph_execution_trace_
2855 : reinterpret_cast< ::tensorflow::GraphExecutionTrace&>(::tensorflow::_GraphExecutionTrace_default_instance_);
2856 }
graph_execution_trace()2857 inline const ::tensorflow::GraphExecutionTrace& DebugEvent::graph_execution_trace() const {
2858 // @@protoc_insertion_point(field_get:tensorflow.DebugEvent.graph_execution_trace)
2859 return _internal_graph_execution_trace();
2860 }
unsafe_arena_release_graph_execution_trace()2861 inline ::tensorflow::GraphExecutionTrace* DebugEvent::unsafe_arena_release_graph_execution_trace() {
2862 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.DebugEvent.graph_execution_trace)
2863 if (_internal_has_graph_execution_trace()) {
2864 clear_has_what();
2865 ::tensorflow::GraphExecutionTrace* temp = _impl_.what_.graph_execution_trace_;
2866 _impl_.what_.graph_execution_trace_ = nullptr;
2867 return temp;
2868 } else {
2869 return nullptr;
2870 }
2871 }
unsafe_arena_set_allocated_graph_execution_trace(::tensorflow::GraphExecutionTrace * graph_execution_trace)2872 inline void DebugEvent::unsafe_arena_set_allocated_graph_execution_trace(::tensorflow::GraphExecutionTrace* graph_execution_trace) {
2873 clear_what();
2874 if (graph_execution_trace) {
2875 set_has_graph_execution_trace();
2876 _impl_.what_.graph_execution_trace_ = graph_execution_trace;
2877 }
2878 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.DebugEvent.graph_execution_trace)
2879 }
_internal_mutable_graph_execution_trace()2880 inline ::tensorflow::GraphExecutionTrace* DebugEvent::_internal_mutable_graph_execution_trace() {
2881 if (!_internal_has_graph_execution_trace()) {
2882 clear_what();
2883 set_has_graph_execution_trace();
2884 _impl_.what_.graph_execution_trace_ = CreateMaybeMessage< ::tensorflow::GraphExecutionTrace >(GetArenaForAllocation());
2885 }
2886 return _impl_.what_.graph_execution_trace_;
2887 }
mutable_graph_execution_trace()2888 inline ::tensorflow::GraphExecutionTrace* DebugEvent::mutable_graph_execution_trace() {
2889 ::tensorflow::GraphExecutionTrace* _msg = _internal_mutable_graph_execution_trace();
2890 // @@protoc_insertion_point(field_mutable:tensorflow.DebugEvent.graph_execution_trace)
2891 return _msg;
2892 }
2893
2894 // string graph_id = 11;
_internal_has_graph_id()2895 inline bool DebugEvent::_internal_has_graph_id() const {
2896 return what_case() == kGraphId;
2897 }
has_graph_id()2898 inline bool DebugEvent::has_graph_id() const {
2899 return _internal_has_graph_id();
2900 }
set_has_graph_id()2901 inline void DebugEvent::set_has_graph_id() {
2902 _impl_._oneof_case_[0] = kGraphId;
2903 }
clear_graph_id()2904 inline void DebugEvent::clear_graph_id() {
2905 if (_internal_has_graph_id()) {
2906 _impl_.what_.graph_id_.Destroy();
2907 clear_has_what();
2908 }
2909 }
graph_id()2910 inline const std::string& DebugEvent::graph_id() const {
2911 // @@protoc_insertion_point(field_get:tensorflow.DebugEvent.graph_id)
2912 return _internal_graph_id();
2913 }
2914 template <typename ArgT0, typename... ArgT>
set_graph_id(ArgT0 && arg0,ArgT...args)2915 inline void DebugEvent::set_graph_id(ArgT0&& arg0, ArgT... args) {
2916 if (!_internal_has_graph_id()) {
2917 clear_what();
2918 set_has_graph_id();
2919 _impl_.what_.graph_id_.InitDefault();
2920 }
2921 _impl_.what_.graph_id_.Set( static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
2922 // @@protoc_insertion_point(field_set:tensorflow.DebugEvent.graph_id)
2923 }
mutable_graph_id()2924 inline std::string* DebugEvent::mutable_graph_id() {
2925 std::string* _s = _internal_mutable_graph_id();
2926 // @@protoc_insertion_point(field_mutable:tensorflow.DebugEvent.graph_id)
2927 return _s;
2928 }
_internal_graph_id()2929 inline const std::string& DebugEvent::_internal_graph_id() const {
2930 if (_internal_has_graph_id()) {
2931 return _impl_.what_.graph_id_.Get();
2932 }
2933 return ::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited();
2934 }
_internal_set_graph_id(const std::string & value)2935 inline void DebugEvent::_internal_set_graph_id(const std::string& value) {
2936 if (!_internal_has_graph_id()) {
2937 clear_what();
2938 set_has_graph_id();
2939 _impl_.what_.graph_id_.InitDefault();
2940 }
2941 _impl_.what_.graph_id_.Set(value, GetArenaForAllocation());
2942 }
_internal_mutable_graph_id()2943 inline std::string* DebugEvent::_internal_mutable_graph_id() {
2944 if (!_internal_has_graph_id()) {
2945 clear_what();
2946 set_has_graph_id();
2947 _impl_.what_.graph_id_.InitDefault();
2948 }
2949 return _impl_.what_.graph_id_.Mutable( GetArenaForAllocation());
2950 }
release_graph_id()2951 inline std::string* DebugEvent::release_graph_id() {
2952 // @@protoc_insertion_point(field_release:tensorflow.DebugEvent.graph_id)
2953 if (_internal_has_graph_id()) {
2954 clear_has_what();
2955 return _impl_.what_.graph_id_.Release();
2956 } else {
2957 return nullptr;
2958 }
2959 }
set_allocated_graph_id(std::string * graph_id)2960 inline void DebugEvent::set_allocated_graph_id(std::string* graph_id) {
2961 if (has_what()) {
2962 clear_what();
2963 }
2964 if (graph_id != nullptr) {
2965 set_has_graph_id();
2966 _impl_.what_.graph_id_.InitAllocated(graph_id, GetArenaForAllocation());
2967 }
2968 // @@protoc_insertion_point(field_set_allocated:tensorflow.DebugEvent.graph_id)
2969 }
2970
2971 // .tensorflow.DebuggedDevice debugged_device = 12;
_internal_has_debugged_device()2972 inline bool DebugEvent::_internal_has_debugged_device() const {
2973 return what_case() == kDebuggedDevice;
2974 }
has_debugged_device()2975 inline bool DebugEvent::has_debugged_device() const {
2976 return _internal_has_debugged_device();
2977 }
set_has_debugged_device()2978 inline void DebugEvent::set_has_debugged_device() {
2979 _impl_._oneof_case_[0] = kDebuggedDevice;
2980 }
clear_debugged_device()2981 inline void DebugEvent::clear_debugged_device() {
2982 if (_internal_has_debugged_device()) {
2983 if (GetArenaForAllocation() == nullptr) {
2984 delete _impl_.what_.debugged_device_;
2985 }
2986 clear_has_what();
2987 }
2988 }
release_debugged_device()2989 inline ::tensorflow::DebuggedDevice* DebugEvent::release_debugged_device() {
2990 // @@protoc_insertion_point(field_release:tensorflow.DebugEvent.debugged_device)
2991 if (_internal_has_debugged_device()) {
2992 clear_has_what();
2993 ::tensorflow::DebuggedDevice* temp = _impl_.what_.debugged_device_;
2994 if (GetArenaForAllocation() != nullptr) {
2995 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
2996 }
2997 _impl_.what_.debugged_device_ = nullptr;
2998 return temp;
2999 } else {
3000 return nullptr;
3001 }
3002 }
_internal_debugged_device()3003 inline const ::tensorflow::DebuggedDevice& DebugEvent::_internal_debugged_device() const {
3004 return _internal_has_debugged_device()
3005 ? *_impl_.what_.debugged_device_
3006 : reinterpret_cast< ::tensorflow::DebuggedDevice&>(::tensorflow::_DebuggedDevice_default_instance_);
3007 }
debugged_device()3008 inline const ::tensorflow::DebuggedDevice& DebugEvent::debugged_device() const {
3009 // @@protoc_insertion_point(field_get:tensorflow.DebugEvent.debugged_device)
3010 return _internal_debugged_device();
3011 }
unsafe_arena_release_debugged_device()3012 inline ::tensorflow::DebuggedDevice* DebugEvent::unsafe_arena_release_debugged_device() {
3013 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.DebugEvent.debugged_device)
3014 if (_internal_has_debugged_device()) {
3015 clear_has_what();
3016 ::tensorflow::DebuggedDevice* temp = _impl_.what_.debugged_device_;
3017 _impl_.what_.debugged_device_ = nullptr;
3018 return temp;
3019 } else {
3020 return nullptr;
3021 }
3022 }
unsafe_arena_set_allocated_debugged_device(::tensorflow::DebuggedDevice * debugged_device)3023 inline void DebugEvent::unsafe_arena_set_allocated_debugged_device(::tensorflow::DebuggedDevice* debugged_device) {
3024 clear_what();
3025 if (debugged_device) {
3026 set_has_debugged_device();
3027 _impl_.what_.debugged_device_ = debugged_device;
3028 }
3029 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.DebugEvent.debugged_device)
3030 }
_internal_mutable_debugged_device()3031 inline ::tensorflow::DebuggedDevice* DebugEvent::_internal_mutable_debugged_device() {
3032 if (!_internal_has_debugged_device()) {
3033 clear_what();
3034 set_has_debugged_device();
3035 _impl_.what_.debugged_device_ = CreateMaybeMessage< ::tensorflow::DebuggedDevice >(GetArenaForAllocation());
3036 }
3037 return _impl_.what_.debugged_device_;
3038 }
mutable_debugged_device()3039 inline ::tensorflow::DebuggedDevice* DebugEvent::mutable_debugged_device() {
3040 ::tensorflow::DebuggedDevice* _msg = _internal_mutable_debugged_device();
3041 // @@protoc_insertion_point(field_mutable:tensorflow.DebugEvent.debugged_device)
3042 return _msg;
3043 }
3044
has_what()3045 inline bool DebugEvent::has_what() const {
3046 return what_case() != WHAT_NOT_SET;
3047 }
clear_has_what()3048 inline void DebugEvent::clear_has_what() {
3049 _impl_._oneof_case_[0] = WHAT_NOT_SET;
3050 }
what_case()3051 inline DebugEvent::WhatCase DebugEvent::what_case() const {
3052 return DebugEvent::WhatCase(_impl_._oneof_case_[0]);
3053 }
3054 // -------------------------------------------------------------------
3055
3056 // DebugMetadata
3057
3058 // string tensorflow_version = 1;
clear_tensorflow_version()3059 inline void DebugMetadata::clear_tensorflow_version() {
3060 _impl_.tensorflow_version_.ClearToEmpty();
3061 }
tensorflow_version()3062 inline const std::string& DebugMetadata::tensorflow_version() const {
3063 // @@protoc_insertion_point(field_get:tensorflow.DebugMetadata.tensorflow_version)
3064 return _internal_tensorflow_version();
3065 }
3066 template <typename ArgT0, typename... ArgT>
3067 inline PROTOBUF_ALWAYS_INLINE
set_tensorflow_version(ArgT0 && arg0,ArgT...args)3068 void DebugMetadata::set_tensorflow_version(ArgT0&& arg0, ArgT... args) {
3069
3070 _impl_.tensorflow_version_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
3071 // @@protoc_insertion_point(field_set:tensorflow.DebugMetadata.tensorflow_version)
3072 }
mutable_tensorflow_version()3073 inline std::string* DebugMetadata::mutable_tensorflow_version() {
3074 std::string* _s = _internal_mutable_tensorflow_version();
3075 // @@protoc_insertion_point(field_mutable:tensorflow.DebugMetadata.tensorflow_version)
3076 return _s;
3077 }
_internal_tensorflow_version()3078 inline const std::string& DebugMetadata::_internal_tensorflow_version() const {
3079 return _impl_.tensorflow_version_.Get();
3080 }
_internal_set_tensorflow_version(const std::string & value)3081 inline void DebugMetadata::_internal_set_tensorflow_version(const std::string& value) {
3082
3083 _impl_.tensorflow_version_.Set(value, GetArenaForAllocation());
3084 }
_internal_mutable_tensorflow_version()3085 inline std::string* DebugMetadata::_internal_mutable_tensorflow_version() {
3086
3087 return _impl_.tensorflow_version_.Mutable(GetArenaForAllocation());
3088 }
release_tensorflow_version()3089 inline std::string* DebugMetadata::release_tensorflow_version() {
3090 // @@protoc_insertion_point(field_release:tensorflow.DebugMetadata.tensorflow_version)
3091 return _impl_.tensorflow_version_.Release();
3092 }
set_allocated_tensorflow_version(std::string * tensorflow_version)3093 inline void DebugMetadata::set_allocated_tensorflow_version(std::string* tensorflow_version) {
3094 _impl_.tensorflow_version_.SetAllocated(tensorflow_version, GetArenaForAllocation());
3095 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
3096 if (_impl_.tensorflow_version_.IsDefault()) {
3097 _impl_.tensorflow_version_.Set("", GetArenaForAllocation());
3098 }
3099 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
3100 // @@protoc_insertion_point(field_set_allocated:tensorflow.DebugMetadata.tensorflow_version)
3101 }
3102
3103 // string file_version = 2;
clear_file_version()3104 inline void DebugMetadata::clear_file_version() {
3105 _impl_.file_version_.ClearToEmpty();
3106 }
file_version()3107 inline const std::string& DebugMetadata::file_version() const {
3108 // @@protoc_insertion_point(field_get:tensorflow.DebugMetadata.file_version)
3109 return _internal_file_version();
3110 }
3111 template <typename ArgT0, typename... ArgT>
3112 inline PROTOBUF_ALWAYS_INLINE
set_file_version(ArgT0 && arg0,ArgT...args)3113 void DebugMetadata::set_file_version(ArgT0&& arg0, ArgT... args) {
3114
3115 _impl_.file_version_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
3116 // @@protoc_insertion_point(field_set:tensorflow.DebugMetadata.file_version)
3117 }
mutable_file_version()3118 inline std::string* DebugMetadata::mutable_file_version() {
3119 std::string* _s = _internal_mutable_file_version();
3120 // @@protoc_insertion_point(field_mutable:tensorflow.DebugMetadata.file_version)
3121 return _s;
3122 }
_internal_file_version()3123 inline const std::string& DebugMetadata::_internal_file_version() const {
3124 return _impl_.file_version_.Get();
3125 }
_internal_set_file_version(const std::string & value)3126 inline void DebugMetadata::_internal_set_file_version(const std::string& value) {
3127
3128 _impl_.file_version_.Set(value, GetArenaForAllocation());
3129 }
_internal_mutable_file_version()3130 inline std::string* DebugMetadata::_internal_mutable_file_version() {
3131
3132 return _impl_.file_version_.Mutable(GetArenaForAllocation());
3133 }
release_file_version()3134 inline std::string* DebugMetadata::release_file_version() {
3135 // @@protoc_insertion_point(field_release:tensorflow.DebugMetadata.file_version)
3136 return _impl_.file_version_.Release();
3137 }
set_allocated_file_version(std::string * file_version)3138 inline void DebugMetadata::set_allocated_file_version(std::string* file_version) {
3139 _impl_.file_version_.SetAllocated(file_version, GetArenaForAllocation());
3140 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
3141 if (_impl_.file_version_.IsDefault()) {
3142 _impl_.file_version_.Set("", GetArenaForAllocation());
3143 }
3144 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
3145 // @@protoc_insertion_point(field_set_allocated:tensorflow.DebugMetadata.file_version)
3146 }
3147
3148 // string tfdbg_run_id = 3;
clear_tfdbg_run_id()3149 inline void DebugMetadata::clear_tfdbg_run_id() {
3150 _impl_.tfdbg_run_id_.ClearToEmpty();
3151 }
tfdbg_run_id()3152 inline const std::string& DebugMetadata::tfdbg_run_id() const {
3153 // @@protoc_insertion_point(field_get:tensorflow.DebugMetadata.tfdbg_run_id)
3154 return _internal_tfdbg_run_id();
3155 }
3156 template <typename ArgT0, typename... ArgT>
3157 inline PROTOBUF_ALWAYS_INLINE
set_tfdbg_run_id(ArgT0 && arg0,ArgT...args)3158 void DebugMetadata::set_tfdbg_run_id(ArgT0&& arg0, ArgT... args) {
3159
3160 _impl_.tfdbg_run_id_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
3161 // @@protoc_insertion_point(field_set:tensorflow.DebugMetadata.tfdbg_run_id)
3162 }
mutable_tfdbg_run_id()3163 inline std::string* DebugMetadata::mutable_tfdbg_run_id() {
3164 std::string* _s = _internal_mutable_tfdbg_run_id();
3165 // @@protoc_insertion_point(field_mutable:tensorflow.DebugMetadata.tfdbg_run_id)
3166 return _s;
3167 }
_internal_tfdbg_run_id()3168 inline const std::string& DebugMetadata::_internal_tfdbg_run_id() const {
3169 return _impl_.tfdbg_run_id_.Get();
3170 }
_internal_set_tfdbg_run_id(const std::string & value)3171 inline void DebugMetadata::_internal_set_tfdbg_run_id(const std::string& value) {
3172
3173 _impl_.tfdbg_run_id_.Set(value, GetArenaForAllocation());
3174 }
_internal_mutable_tfdbg_run_id()3175 inline std::string* DebugMetadata::_internal_mutable_tfdbg_run_id() {
3176
3177 return _impl_.tfdbg_run_id_.Mutable(GetArenaForAllocation());
3178 }
release_tfdbg_run_id()3179 inline std::string* DebugMetadata::release_tfdbg_run_id() {
3180 // @@protoc_insertion_point(field_release:tensorflow.DebugMetadata.tfdbg_run_id)
3181 return _impl_.tfdbg_run_id_.Release();
3182 }
set_allocated_tfdbg_run_id(std::string * tfdbg_run_id)3183 inline void DebugMetadata::set_allocated_tfdbg_run_id(std::string* tfdbg_run_id) {
3184 _impl_.tfdbg_run_id_.SetAllocated(tfdbg_run_id, GetArenaForAllocation());
3185 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
3186 if (_impl_.tfdbg_run_id_.IsDefault()) {
3187 _impl_.tfdbg_run_id_.Set("", GetArenaForAllocation());
3188 }
3189 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
3190 // @@protoc_insertion_point(field_set_allocated:tensorflow.DebugMetadata.tfdbg_run_id)
3191 }
3192
3193 // -------------------------------------------------------------------
3194
3195 // SourceFile
3196
3197 // string file_path = 1;
clear_file_path()3198 inline void SourceFile::clear_file_path() {
3199 _impl_.file_path_.ClearToEmpty();
3200 }
file_path()3201 inline const std::string& SourceFile::file_path() const {
3202 // @@protoc_insertion_point(field_get:tensorflow.SourceFile.file_path)
3203 return _internal_file_path();
3204 }
3205 template <typename ArgT0, typename... ArgT>
3206 inline PROTOBUF_ALWAYS_INLINE
set_file_path(ArgT0 && arg0,ArgT...args)3207 void SourceFile::set_file_path(ArgT0&& arg0, ArgT... args) {
3208
3209 _impl_.file_path_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
3210 // @@protoc_insertion_point(field_set:tensorflow.SourceFile.file_path)
3211 }
mutable_file_path()3212 inline std::string* SourceFile::mutable_file_path() {
3213 std::string* _s = _internal_mutable_file_path();
3214 // @@protoc_insertion_point(field_mutable:tensorflow.SourceFile.file_path)
3215 return _s;
3216 }
_internal_file_path()3217 inline const std::string& SourceFile::_internal_file_path() const {
3218 return _impl_.file_path_.Get();
3219 }
_internal_set_file_path(const std::string & value)3220 inline void SourceFile::_internal_set_file_path(const std::string& value) {
3221
3222 _impl_.file_path_.Set(value, GetArenaForAllocation());
3223 }
_internal_mutable_file_path()3224 inline std::string* SourceFile::_internal_mutable_file_path() {
3225
3226 return _impl_.file_path_.Mutable(GetArenaForAllocation());
3227 }
release_file_path()3228 inline std::string* SourceFile::release_file_path() {
3229 // @@protoc_insertion_point(field_release:tensorflow.SourceFile.file_path)
3230 return _impl_.file_path_.Release();
3231 }
set_allocated_file_path(std::string * file_path)3232 inline void SourceFile::set_allocated_file_path(std::string* file_path) {
3233 _impl_.file_path_.SetAllocated(file_path, GetArenaForAllocation());
3234 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
3235 if (_impl_.file_path_.IsDefault()) {
3236 _impl_.file_path_.Set("", GetArenaForAllocation());
3237 }
3238 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
3239 // @@protoc_insertion_point(field_set_allocated:tensorflow.SourceFile.file_path)
3240 }
3241
3242 // string host_name = 2;
clear_host_name()3243 inline void SourceFile::clear_host_name() {
3244 _impl_.host_name_.ClearToEmpty();
3245 }
host_name()3246 inline const std::string& SourceFile::host_name() const {
3247 // @@protoc_insertion_point(field_get:tensorflow.SourceFile.host_name)
3248 return _internal_host_name();
3249 }
3250 template <typename ArgT0, typename... ArgT>
3251 inline PROTOBUF_ALWAYS_INLINE
set_host_name(ArgT0 && arg0,ArgT...args)3252 void SourceFile::set_host_name(ArgT0&& arg0, ArgT... args) {
3253
3254 _impl_.host_name_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
3255 // @@protoc_insertion_point(field_set:tensorflow.SourceFile.host_name)
3256 }
mutable_host_name()3257 inline std::string* SourceFile::mutable_host_name() {
3258 std::string* _s = _internal_mutable_host_name();
3259 // @@protoc_insertion_point(field_mutable:tensorflow.SourceFile.host_name)
3260 return _s;
3261 }
_internal_host_name()3262 inline const std::string& SourceFile::_internal_host_name() const {
3263 return _impl_.host_name_.Get();
3264 }
_internal_set_host_name(const std::string & value)3265 inline void SourceFile::_internal_set_host_name(const std::string& value) {
3266
3267 _impl_.host_name_.Set(value, GetArenaForAllocation());
3268 }
_internal_mutable_host_name()3269 inline std::string* SourceFile::_internal_mutable_host_name() {
3270
3271 return _impl_.host_name_.Mutable(GetArenaForAllocation());
3272 }
release_host_name()3273 inline std::string* SourceFile::release_host_name() {
3274 // @@protoc_insertion_point(field_release:tensorflow.SourceFile.host_name)
3275 return _impl_.host_name_.Release();
3276 }
set_allocated_host_name(std::string * host_name)3277 inline void SourceFile::set_allocated_host_name(std::string* host_name) {
3278 _impl_.host_name_.SetAllocated(host_name, GetArenaForAllocation());
3279 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
3280 if (_impl_.host_name_.IsDefault()) {
3281 _impl_.host_name_.Set("", GetArenaForAllocation());
3282 }
3283 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
3284 // @@protoc_insertion_point(field_set_allocated:tensorflow.SourceFile.host_name)
3285 }
3286
3287 // repeated string lines = 3;
_internal_lines_size()3288 inline int SourceFile::_internal_lines_size() const {
3289 return _impl_.lines_.size();
3290 }
lines_size()3291 inline int SourceFile::lines_size() const {
3292 return _internal_lines_size();
3293 }
clear_lines()3294 inline void SourceFile::clear_lines() {
3295 _impl_.lines_.Clear();
3296 }
add_lines()3297 inline std::string* SourceFile::add_lines() {
3298 std::string* _s = _internal_add_lines();
3299 // @@protoc_insertion_point(field_add_mutable:tensorflow.SourceFile.lines)
3300 return _s;
3301 }
_internal_lines(int index)3302 inline const std::string& SourceFile::_internal_lines(int index) const {
3303 return _impl_.lines_.Get(index);
3304 }
lines(int index)3305 inline const std::string& SourceFile::lines(int index) const {
3306 // @@protoc_insertion_point(field_get:tensorflow.SourceFile.lines)
3307 return _internal_lines(index);
3308 }
mutable_lines(int index)3309 inline std::string* SourceFile::mutable_lines(int index) {
3310 // @@protoc_insertion_point(field_mutable:tensorflow.SourceFile.lines)
3311 return _impl_.lines_.Mutable(index);
3312 }
set_lines(int index,const std::string & value)3313 inline void SourceFile::set_lines(int index, const std::string& value) {
3314 _impl_.lines_.Mutable(index)->assign(value);
3315 // @@protoc_insertion_point(field_set:tensorflow.SourceFile.lines)
3316 }
set_lines(int index,std::string && value)3317 inline void SourceFile::set_lines(int index, std::string&& value) {
3318 _impl_.lines_.Mutable(index)->assign(std::move(value));
3319 // @@protoc_insertion_point(field_set:tensorflow.SourceFile.lines)
3320 }
set_lines(int index,const char * value)3321 inline void SourceFile::set_lines(int index, const char* value) {
3322 GOOGLE_DCHECK(value != nullptr);
3323 _impl_.lines_.Mutable(index)->assign(value);
3324 // @@protoc_insertion_point(field_set_char:tensorflow.SourceFile.lines)
3325 }
set_lines(int index,const char * value,size_t size)3326 inline void SourceFile::set_lines(int index, const char* value, size_t size) {
3327 _impl_.lines_.Mutable(index)->assign(
3328 reinterpret_cast<const char*>(value), size);
3329 // @@protoc_insertion_point(field_set_pointer:tensorflow.SourceFile.lines)
3330 }
_internal_add_lines()3331 inline std::string* SourceFile::_internal_add_lines() {
3332 return _impl_.lines_.Add();
3333 }
add_lines(const std::string & value)3334 inline void SourceFile::add_lines(const std::string& value) {
3335 _impl_.lines_.Add()->assign(value);
3336 // @@protoc_insertion_point(field_add:tensorflow.SourceFile.lines)
3337 }
add_lines(std::string && value)3338 inline void SourceFile::add_lines(std::string&& value) {
3339 _impl_.lines_.Add(std::move(value));
3340 // @@protoc_insertion_point(field_add:tensorflow.SourceFile.lines)
3341 }
add_lines(const char * value)3342 inline void SourceFile::add_lines(const char* value) {
3343 GOOGLE_DCHECK(value != nullptr);
3344 _impl_.lines_.Add()->assign(value);
3345 // @@protoc_insertion_point(field_add_char:tensorflow.SourceFile.lines)
3346 }
add_lines(const char * value,size_t size)3347 inline void SourceFile::add_lines(const char* value, size_t size) {
3348 _impl_.lines_.Add()->assign(reinterpret_cast<const char*>(value), size);
3349 // @@protoc_insertion_point(field_add_pointer:tensorflow.SourceFile.lines)
3350 }
3351 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>&
lines()3352 SourceFile::lines() const {
3353 // @@protoc_insertion_point(field_list:tensorflow.SourceFile.lines)
3354 return _impl_.lines_;
3355 }
3356 inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>*
mutable_lines()3357 SourceFile::mutable_lines() {
3358 // @@protoc_insertion_point(field_mutable_list:tensorflow.SourceFile.lines)
3359 return &_impl_.lines_;
3360 }
3361
3362 // -------------------------------------------------------------------
3363
3364 // StackFrameWithId
3365
3366 // string id = 1;
clear_id()3367 inline void StackFrameWithId::clear_id() {
3368 _impl_.id_.ClearToEmpty();
3369 }
id()3370 inline const std::string& StackFrameWithId::id() const {
3371 // @@protoc_insertion_point(field_get:tensorflow.StackFrameWithId.id)
3372 return _internal_id();
3373 }
3374 template <typename ArgT0, typename... ArgT>
3375 inline PROTOBUF_ALWAYS_INLINE
set_id(ArgT0 && arg0,ArgT...args)3376 void StackFrameWithId::set_id(ArgT0&& arg0, ArgT... args) {
3377
3378 _impl_.id_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
3379 // @@protoc_insertion_point(field_set:tensorflow.StackFrameWithId.id)
3380 }
mutable_id()3381 inline std::string* StackFrameWithId::mutable_id() {
3382 std::string* _s = _internal_mutable_id();
3383 // @@protoc_insertion_point(field_mutable:tensorflow.StackFrameWithId.id)
3384 return _s;
3385 }
_internal_id()3386 inline const std::string& StackFrameWithId::_internal_id() const {
3387 return _impl_.id_.Get();
3388 }
_internal_set_id(const std::string & value)3389 inline void StackFrameWithId::_internal_set_id(const std::string& value) {
3390
3391 _impl_.id_.Set(value, GetArenaForAllocation());
3392 }
_internal_mutable_id()3393 inline std::string* StackFrameWithId::_internal_mutable_id() {
3394
3395 return _impl_.id_.Mutable(GetArenaForAllocation());
3396 }
release_id()3397 inline std::string* StackFrameWithId::release_id() {
3398 // @@protoc_insertion_point(field_release:tensorflow.StackFrameWithId.id)
3399 return _impl_.id_.Release();
3400 }
set_allocated_id(std::string * id)3401 inline void StackFrameWithId::set_allocated_id(std::string* id) {
3402 _impl_.id_.SetAllocated(id, GetArenaForAllocation());
3403 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
3404 if (_impl_.id_.IsDefault()) {
3405 _impl_.id_.Set("", GetArenaForAllocation());
3406 }
3407 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
3408 // @@protoc_insertion_point(field_set_allocated:tensorflow.StackFrameWithId.id)
3409 }
3410
3411 // .tensorflow.GraphDebugInfo.FileLineCol file_line_col = 2;
_internal_has_file_line_col()3412 inline bool StackFrameWithId::_internal_has_file_line_col() const {
3413 return this != internal_default_instance() && _impl_.file_line_col_ != nullptr;
3414 }
has_file_line_col()3415 inline bool StackFrameWithId::has_file_line_col() const {
3416 return _internal_has_file_line_col();
3417 }
_internal_file_line_col()3418 inline const ::tensorflow::GraphDebugInfo_FileLineCol& StackFrameWithId::_internal_file_line_col() const {
3419 const ::tensorflow::GraphDebugInfo_FileLineCol* p = _impl_.file_line_col_;
3420 return p != nullptr ? *p : reinterpret_cast<const ::tensorflow::GraphDebugInfo_FileLineCol&>(
3421 ::tensorflow::_GraphDebugInfo_FileLineCol_default_instance_);
3422 }
file_line_col()3423 inline const ::tensorflow::GraphDebugInfo_FileLineCol& StackFrameWithId::file_line_col() const {
3424 // @@protoc_insertion_point(field_get:tensorflow.StackFrameWithId.file_line_col)
3425 return _internal_file_line_col();
3426 }
unsafe_arena_set_allocated_file_line_col(::tensorflow::GraphDebugInfo_FileLineCol * file_line_col)3427 inline void StackFrameWithId::unsafe_arena_set_allocated_file_line_col(
3428 ::tensorflow::GraphDebugInfo_FileLineCol* file_line_col) {
3429 if (GetArenaForAllocation() == nullptr) {
3430 delete reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.file_line_col_);
3431 }
3432 _impl_.file_line_col_ = file_line_col;
3433 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.StackFrameWithId.file_line_col)
3434 }
release_file_line_col()3435 inline ::tensorflow::GraphDebugInfo_FileLineCol* StackFrameWithId::release_file_line_col() {
3436
3437 ::tensorflow::GraphDebugInfo_FileLineCol* temp = _impl_.file_line_col_;
3438 _impl_.file_line_col_ = nullptr;
3439 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
3440 auto* old = reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(temp);
3441 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
3442 if (GetArenaForAllocation() == nullptr) { delete old; }
3443 #else // PROTOBUF_FORCE_COPY_IN_RELEASE
3444 if (GetArenaForAllocation() != nullptr) {
3445 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
3446 }
3447 #endif // !PROTOBUF_FORCE_COPY_IN_RELEASE
3448 return temp;
3449 }
unsafe_arena_release_file_line_col()3450 inline ::tensorflow::GraphDebugInfo_FileLineCol* StackFrameWithId::unsafe_arena_release_file_line_col() {
3451 // @@protoc_insertion_point(field_release:tensorflow.StackFrameWithId.file_line_col)
3452
3453 ::tensorflow::GraphDebugInfo_FileLineCol* temp = _impl_.file_line_col_;
3454 _impl_.file_line_col_ = nullptr;
3455 return temp;
3456 }
_internal_mutable_file_line_col()3457 inline ::tensorflow::GraphDebugInfo_FileLineCol* StackFrameWithId::_internal_mutable_file_line_col() {
3458
3459 if (_impl_.file_line_col_ == nullptr) {
3460 auto* p = CreateMaybeMessage<::tensorflow::GraphDebugInfo_FileLineCol>(GetArenaForAllocation());
3461 _impl_.file_line_col_ = p;
3462 }
3463 return _impl_.file_line_col_;
3464 }
mutable_file_line_col()3465 inline ::tensorflow::GraphDebugInfo_FileLineCol* StackFrameWithId::mutable_file_line_col() {
3466 ::tensorflow::GraphDebugInfo_FileLineCol* _msg = _internal_mutable_file_line_col();
3467 // @@protoc_insertion_point(field_mutable:tensorflow.StackFrameWithId.file_line_col)
3468 return _msg;
3469 }
set_allocated_file_line_col(::tensorflow::GraphDebugInfo_FileLineCol * file_line_col)3470 inline void StackFrameWithId::set_allocated_file_line_col(::tensorflow::GraphDebugInfo_FileLineCol* file_line_col) {
3471 ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
3472 if (message_arena == nullptr) {
3473 delete reinterpret_cast< ::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.file_line_col_);
3474 }
3475 if (file_line_col) {
3476 ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
3477 ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(
3478 reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(file_line_col));
3479 if (message_arena != submessage_arena) {
3480 file_line_col = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
3481 message_arena, file_line_col, submessage_arena);
3482 }
3483
3484 } else {
3485
3486 }
3487 _impl_.file_line_col_ = file_line_col;
3488 // @@protoc_insertion_point(field_set_allocated:tensorflow.StackFrameWithId.file_line_col)
3489 }
3490
3491 // -------------------------------------------------------------------
3492
3493 // CodeLocation
3494
3495 // string host_name = 1;
clear_host_name()3496 inline void CodeLocation::clear_host_name() {
3497 _impl_.host_name_.ClearToEmpty();
3498 }
host_name()3499 inline const std::string& CodeLocation::host_name() const {
3500 // @@protoc_insertion_point(field_get:tensorflow.CodeLocation.host_name)
3501 return _internal_host_name();
3502 }
3503 template <typename ArgT0, typename... ArgT>
3504 inline PROTOBUF_ALWAYS_INLINE
set_host_name(ArgT0 && arg0,ArgT...args)3505 void CodeLocation::set_host_name(ArgT0&& arg0, ArgT... args) {
3506
3507 _impl_.host_name_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
3508 // @@protoc_insertion_point(field_set:tensorflow.CodeLocation.host_name)
3509 }
mutable_host_name()3510 inline std::string* CodeLocation::mutable_host_name() {
3511 std::string* _s = _internal_mutable_host_name();
3512 // @@protoc_insertion_point(field_mutable:tensorflow.CodeLocation.host_name)
3513 return _s;
3514 }
_internal_host_name()3515 inline const std::string& CodeLocation::_internal_host_name() const {
3516 return _impl_.host_name_.Get();
3517 }
_internal_set_host_name(const std::string & value)3518 inline void CodeLocation::_internal_set_host_name(const std::string& value) {
3519
3520 _impl_.host_name_.Set(value, GetArenaForAllocation());
3521 }
_internal_mutable_host_name()3522 inline std::string* CodeLocation::_internal_mutable_host_name() {
3523
3524 return _impl_.host_name_.Mutable(GetArenaForAllocation());
3525 }
release_host_name()3526 inline std::string* CodeLocation::release_host_name() {
3527 // @@protoc_insertion_point(field_release:tensorflow.CodeLocation.host_name)
3528 return _impl_.host_name_.Release();
3529 }
set_allocated_host_name(std::string * host_name)3530 inline void CodeLocation::set_allocated_host_name(std::string* host_name) {
3531 _impl_.host_name_.SetAllocated(host_name, GetArenaForAllocation());
3532 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
3533 if (_impl_.host_name_.IsDefault()) {
3534 _impl_.host_name_.Set("", GetArenaForAllocation());
3535 }
3536 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
3537 // @@protoc_insertion_point(field_set_allocated:tensorflow.CodeLocation.host_name)
3538 }
3539
3540 // repeated string stack_frame_ids = 2;
_internal_stack_frame_ids_size()3541 inline int CodeLocation::_internal_stack_frame_ids_size() const {
3542 return _impl_.stack_frame_ids_.size();
3543 }
stack_frame_ids_size()3544 inline int CodeLocation::stack_frame_ids_size() const {
3545 return _internal_stack_frame_ids_size();
3546 }
clear_stack_frame_ids()3547 inline void CodeLocation::clear_stack_frame_ids() {
3548 _impl_.stack_frame_ids_.Clear();
3549 }
add_stack_frame_ids()3550 inline std::string* CodeLocation::add_stack_frame_ids() {
3551 std::string* _s = _internal_add_stack_frame_ids();
3552 // @@protoc_insertion_point(field_add_mutable:tensorflow.CodeLocation.stack_frame_ids)
3553 return _s;
3554 }
_internal_stack_frame_ids(int index)3555 inline const std::string& CodeLocation::_internal_stack_frame_ids(int index) const {
3556 return _impl_.stack_frame_ids_.Get(index);
3557 }
stack_frame_ids(int index)3558 inline const std::string& CodeLocation::stack_frame_ids(int index) const {
3559 // @@protoc_insertion_point(field_get:tensorflow.CodeLocation.stack_frame_ids)
3560 return _internal_stack_frame_ids(index);
3561 }
mutable_stack_frame_ids(int index)3562 inline std::string* CodeLocation::mutable_stack_frame_ids(int index) {
3563 // @@protoc_insertion_point(field_mutable:tensorflow.CodeLocation.stack_frame_ids)
3564 return _impl_.stack_frame_ids_.Mutable(index);
3565 }
set_stack_frame_ids(int index,const std::string & value)3566 inline void CodeLocation::set_stack_frame_ids(int index, const std::string& value) {
3567 _impl_.stack_frame_ids_.Mutable(index)->assign(value);
3568 // @@protoc_insertion_point(field_set:tensorflow.CodeLocation.stack_frame_ids)
3569 }
set_stack_frame_ids(int index,std::string && value)3570 inline void CodeLocation::set_stack_frame_ids(int index, std::string&& value) {
3571 _impl_.stack_frame_ids_.Mutable(index)->assign(std::move(value));
3572 // @@protoc_insertion_point(field_set:tensorflow.CodeLocation.stack_frame_ids)
3573 }
set_stack_frame_ids(int index,const char * value)3574 inline void CodeLocation::set_stack_frame_ids(int index, const char* value) {
3575 GOOGLE_DCHECK(value != nullptr);
3576 _impl_.stack_frame_ids_.Mutable(index)->assign(value);
3577 // @@protoc_insertion_point(field_set_char:tensorflow.CodeLocation.stack_frame_ids)
3578 }
set_stack_frame_ids(int index,const char * value,size_t size)3579 inline void CodeLocation::set_stack_frame_ids(int index, const char* value, size_t size) {
3580 _impl_.stack_frame_ids_.Mutable(index)->assign(
3581 reinterpret_cast<const char*>(value), size);
3582 // @@protoc_insertion_point(field_set_pointer:tensorflow.CodeLocation.stack_frame_ids)
3583 }
_internal_add_stack_frame_ids()3584 inline std::string* CodeLocation::_internal_add_stack_frame_ids() {
3585 return _impl_.stack_frame_ids_.Add();
3586 }
add_stack_frame_ids(const std::string & value)3587 inline void CodeLocation::add_stack_frame_ids(const std::string& value) {
3588 _impl_.stack_frame_ids_.Add()->assign(value);
3589 // @@protoc_insertion_point(field_add:tensorflow.CodeLocation.stack_frame_ids)
3590 }
add_stack_frame_ids(std::string && value)3591 inline void CodeLocation::add_stack_frame_ids(std::string&& value) {
3592 _impl_.stack_frame_ids_.Add(std::move(value));
3593 // @@protoc_insertion_point(field_add:tensorflow.CodeLocation.stack_frame_ids)
3594 }
add_stack_frame_ids(const char * value)3595 inline void CodeLocation::add_stack_frame_ids(const char* value) {
3596 GOOGLE_DCHECK(value != nullptr);
3597 _impl_.stack_frame_ids_.Add()->assign(value);
3598 // @@protoc_insertion_point(field_add_char:tensorflow.CodeLocation.stack_frame_ids)
3599 }
add_stack_frame_ids(const char * value,size_t size)3600 inline void CodeLocation::add_stack_frame_ids(const char* value, size_t size) {
3601 _impl_.stack_frame_ids_.Add()->assign(reinterpret_cast<const char*>(value), size);
3602 // @@protoc_insertion_point(field_add_pointer:tensorflow.CodeLocation.stack_frame_ids)
3603 }
3604 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>&
stack_frame_ids()3605 CodeLocation::stack_frame_ids() const {
3606 // @@protoc_insertion_point(field_list:tensorflow.CodeLocation.stack_frame_ids)
3607 return _impl_.stack_frame_ids_;
3608 }
3609 inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField<std::string>*
mutable_stack_frame_ids()3610 CodeLocation::mutable_stack_frame_ids() {
3611 // @@protoc_insertion_point(field_mutable_list:tensorflow.CodeLocation.stack_frame_ids)
3612 return &_impl_.stack_frame_ids_;
3613 }
3614
3615 // -------------------------------------------------------------------
3616
3617 // GraphOpCreation
3618
3619 // string op_type = 1;
clear_op_type()3620 inline void GraphOpCreation::clear_op_type() {
3621 _impl_.op_type_.ClearToEmpty();
3622 }
op_type()3623 inline const std::string& GraphOpCreation::op_type() const {
3624 // @@protoc_insertion_point(field_get:tensorflow.GraphOpCreation.op_type)
3625 return _internal_op_type();
3626 }
3627 template <typename ArgT0, typename... ArgT>
3628 inline PROTOBUF_ALWAYS_INLINE
set_op_type(ArgT0 && arg0,ArgT...args)3629 void GraphOpCreation::set_op_type(ArgT0&& arg0, ArgT... args) {
3630
3631 _impl_.op_type_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
3632 // @@protoc_insertion_point(field_set:tensorflow.GraphOpCreation.op_type)
3633 }
mutable_op_type()3634 inline std::string* GraphOpCreation::mutable_op_type() {
3635 std::string* _s = _internal_mutable_op_type();
3636 // @@protoc_insertion_point(field_mutable:tensorflow.GraphOpCreation.op_type)
3637 return _s;
3638 }
_internal_op_type()3639 inline const std::string& GraphOpCreation::_internal_op_type() const {
3640 return _impl_.op_type_.Get();
3641 }
_internal_set_op_type(const std::string & value)3642 inline void GraphOpCreation::_internal_set_op_type(const std::string& value) {
3643
3644 _impl_.op_type_.Set(value, GetArenaForAllocation());
3645 }
_internal_mutable_op_type()3646 inline std::string* GraphOpCreation::_internal_mutable_op_type() {
3647
3648 return _impl_.op_type_.Mutable(GetArenaForAllocation());
3649 }
release_op_type()3650 inline std::string* GraphOpCreation::release_op_type() {
3651 // @@protoc_insertion_point(field_release:tensorflow.GraphOpCreation.op_type)
3652 return _impl_.op_type_.Release();
3653 }
set_allocated_op_type(std::string * op_type)3654 inline void GraphOpCreation::set_allocated_op_type(std::string* op_type) {
3655 _impl_.op_type_.SetAllocated(op_type, GetArenaForAllocation());
3656 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
3657 if (_impl_.op_type_.IsDefault()) {
3658 _impl_.op_type_.Set("", GetArenaForAllocation());
3659 }
3660 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
3661 // @@protoc_insertion_point(field_set_allocated:tensorflow.GraphOpCreation.op_type)
3662 }
3663
3664 // string op_name = 2;
clear_op_name()3665 inline void GraphOpCreation::clear_op_name() {
3666 _impl_.op_name_.ClearToEmpty();
3667 }
op_name()3668 inline const std::string& GraphOpCreation::op_name() const {
3669 // @@protoc_insertion_point(field_get:tensorflow.GraphOpCreation.op_name)
3670 return _internal_op_name();
3671 }
3672 template <typename ArgT0, typename... ArgT>
3673 inline PROTOBUF_ALWAYS_INLINE
set_op_name(ArgT0 && arg0,ArgT...args)3674 void GraphOpCreation::set_op_name(ArgT0&& arg0, ArgT... args) {
3675
3676 _impl_.op_name_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
3677 // @@protoc_insertion_point(field_set:tensorflow.GraphOpCreation.op_name)
3678 }
mutable_op_name()3679 inline std::string* GraphOpCreation::mutable_op_name() {
3680 std::string* _s = _internal_mutable_op_name();
3681 // @@protoc_insertion_point(field_mutable:tensorflow.GraphOpCreation.op_name)
3682 return _s;
3683 }
_internal_op_name()3684 inline const std::string& GraphOpCreation::_internal_op_name() const {
3685 return _impl_.op_name_.Get();
3686 }
_internal_set_op_name(const std::string & value)3687 inline void GraphOpCreation::_internal_set_op_name(const std::string& value) {
3688
3689 _impl_.op_name_.Set(value, GetArenaForAllocation());
3690 }
_internal_mutable_op_name()3691 inline std::string* GraphOpCreation::_internal_mutable_op_name() {
3692
3693 return _impl_.op_name_.Mutable(GetArenaForAllocation());
3694 }
release_op_name()3695 inline std::string* GraphOpCreation::release_op_name() {
3696 // @@protoc_insertion_point(field_release:tensorflow.GraphOpCreation.op_name)
3697 return _impl_.op_name_.Release();
3698 }
set_allocated_op_name(std::string * op_name)3699 inline void GraphOpCreation::set_allocated_op_name(std::string* op_name) {
3700 _impl_.op_name_.SetAllocated(op_name, GetArenaForAllocation());
3701 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
3702 if (_impl_.op_name_.IsDefault()) {
3703 _impl_.op_name_.Set("", GetArenaForAllocation());
3704 }
3705 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
3706 // @@protoc_insertion_point(field_set_allocated:tensorflow.GraphOpCreation.op_name)
3707 }
3708
3709 // string graph_name = 3;
clear_graph_name()3710 inline void GraphOpCreation::clear_graph_name() {
3711 _impl_.graph_name_.ClearToEmpty();
3712 }
graph_name()3713 inline const std::string& GraphOpCreation::graph_name() const {
3714 // @@protoc_insertion_point(field_get:tensorflow.GraphOpCreation.graph_name)
3715 return _internal_graph_name();
3716 }
3717 template <typename ArgT0, typename... ArgT>
3718 inline PROTOBUF_ALWAYS_INLINE
set_graph_name(ArgT0 && arg0,ArgT...args)3719 void GraphOpCreation::set_graph_name(ArgT0&& arg0, ArgT... args) {
3720
3721 _impl_.graph_name_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
3722 // @@protoc_insertion_point(field_set:tensorflow.GraphOpCreation.graph_name)
3723 }
mutable_graph_name()3724 inline std::string* GraphOpCreation::mutable_graph_name() {
3725 std::string* _s = _internal_mutable_graph_name();
3726 // @@protoc_insertion_point(field_mutable:tensorflow.GraphOpCreation.graph_name)
3727 return _s;
3728 }
_internal_graph_name()3729 inline const std::string& GraphOpCreation::_internal_graph_name() const {
3730 return _impl_.graph_name_.Get();
3731 }
_internal_set_graph_name(const std::string & value)3732 inline void GraphOpCreation::_internal_set_graph_name(const std::string& value) {
3733
3734 _impl_.graph_name_.Set(value, GetArenaForAllocation());
3735 }
_internal_mutable_graph_name()3736 inline std::string* GraphOpCreation::_internal_mutable_graph_name() {
3737
3738 return _impl_.graph_name_.Mutable(GetArenaForAllocation());
3739 }
release_graph_name()3740 inline std::string* GraphOpCreation::release_graph_name() {
3741 // @@protoc_insertion_point(field_release:tensorflow.GraphOpCreation.graph_name)
3742 return _impl_.graph_name_.Release();
3743 }
set_allocated_graph_name(std::string * graph_name)3744 inline void GraphOpCreation::set_allocated_graph_name(std::string* graph_name) {
3745 _impl_.graph_name_.SetAllocated(graph_name, GetArenaForAllocation());
3746 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
3747 if (_impl_.graph_name_.IsDefault()) {
3748 _impl_.graph_name_.Set("", GetArenaForAllocation());
3749 }
3750 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
3751 // @@protoc_insertion_point(field_set_allocated:tensorflow.GraphOpCreation.graph_name)
3752 }
3753
3754 // string graph_id = 4;
clear_graph_id()3755 inline void GraphOpCreation::clear_graph_id() {
3756 _impl_.graph_id_.ClearToEmpty();
3757 }
graph_id()3758 inline const std::string& GraphOpCreation::graph_id() const {
3759 // @@protoc_insertion_point(field_get:tensorflow.GraphOpCreation.graph_id)
3760 return _internal_graph_id();
3761 }
3762 template <typename ArgT0, typename... ArgT>
3763 inline PROTOBUF_ALWAYS_INLINE
set_graph_id(ArgT0 && arg0,ArgT...args)3764 void GraphOpCreation::set_graph_id(ArgT0&& arg0, ArgT... args) {
3765
3766 _impl_.graph_id_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
3767 // @@protoc_insertion_point(field_set:tensorflow.GraphOpCreation.graph_id)
3768 }
mutable_graph_id()3769 inline std::string* GraphOpCreation::mutable_graph_id() {
3770 std::string* _s = _internal_mutable_graph_id();
3771 // @@protoc_insertion_point(field_mutable:tensorflow.GraphOpCreation.graph_id)
3772 return _s;
3773 }
_internal_graph_id()3774 inline const std::string& GraphOpCreation::_internal_graph_id() const {
3775 return _impl_.graph_id_.Get();
3776 }
_internal_set_graph_id(const std::string & value)3777 inline void GraphOpCreation::_internal_set_graph_id(const std::string& value) {
3778
3779 _impl_.graph_id_.Set(value, GetArenaForAllocation());
3780 }
_internal_mutable_graph_id()3781 inline std::string* GraphOpCreation::_internal_mutable_graph_id() {
3782
3783 return _impl_.graph_id_.Mutable(GetArenaForAllocation());
3784 }
release_graph_id()3785 inline std::string* GraphOpCreation::release_graph_id() {
3786 // @@protoc_insertion_point(field_release:tensorflow.GraphOpCreation.graph_id)
3787 return _impl_.graph_id_.Release();
3788 }
set_allocated_graph_id(std::string * graph_id)3789 inline void GraphOpCreation::set_allocated_graph_id(std::string* graph_id) {
3790 _impl_.graph_id_.SetAllocated(graph_id, GetArenaForAllocation());
3791 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
3792 if (_impl_.graph_id_.IsDefault()) {
3793 _impl_.graph_id_.Set("", GetArenaForAllocation());
3794 }
3795 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
3796 // @@protoc_insertion_point(field_set_allocated:tensorflow.GraphOpCreation.graph_id)
3797 }
3798
3799 // string device_name = 5;
clear_device_name()3800 inline void GraphOpCreation::clear_device_name() {
3801 _impl_.device_name_.ClearToEmpty();
3802 }
device_name()3803 inline const std::string& GraphOpCreation::device_name() const {
3804 // @@protoc_insertion_point(field_get:tensorflow.GraphOpCreation.device_name)
3805 return _internal_device_name();
3806 }
3807 template <typename ArgT0, typename... ArgT>
3808 inline PROTOBUF_ALWAYS_INLINE
set_device_name(ArgT0 && arg0,ArgT...args)3809 void GraphOpCreation::set_device_name(ArgT0&& arg0, ArgT... args) {
3810
3811 _impl_.device_name_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
3812 // @@protoc_insertion_point(field_set:tensorflow.GraphOpCreation.device_name)
3813 }
mutable_device_name()3814 inline std::string* GraphOpCreation::mutable_device_name() {
3815 std::string* _s = _internal_mutable_device_name();
3816 // @@protoc_insertion_point(field_mutable:tensorflow.GraphOpCreation.device_name)
3817 return _s;
3818 }
_internal_device_name()3819 inline const std::string& GraphOpCreation::_internal_device_name() const {
3820 return _impl_.device_name_.Get();
3821 }
_internal_set_device_name(const std::string & value)3822 inline void GraphOpCreation::_internal_set_device_name(const std::string& value) {
3823
3824 _impl_.device_name_.Set(value, GetArenaForAllocation());
3825 }
_internal_mutable_device_name()3826 inline std::string* GraphOpCreation::_internal_mutable_device_name() {
3827
3828 return _impl_.device_name_.Mutable(GetArenaForAllocation());
3829 }
release_device_name()3830 inline std::string* GraphOpCreation::release_device_name() {
3831 // @@protoc_insertion_point(field_release:tensorflow.GraphOpCreation.device_name)
3832 return _impl_.device_name_.Release();
3833 }
set_allocated_device_name(std::string * device_name)3834 inline void GraphOpCreation::set_allocated_device_name(std::string* device_name) {
3835 _impl_.device_name_.SetAllocated(device_name, GetArenaForAllocation());
3836 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
3837 if (_impl_.device_name_.IsDefault()) {
3838 _impl_.device_name_.Set("", GetArenaForAllocation());
3839 }
3840 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
3841 // @@protoc_insertion_point(field_set_allocated:tensorflow.GraphOpCreation.device_name)
3842 }
3843
3844 // repeated string input_names = 6;
_internal_input_names_size()3845 inline int GraphOpCreation::_internal_input_names_size() const {
3846 return _impl_.input_names_.size();
3847 }
input_names_size()3848 inline int GraphOpCreation::input_names_size() const {
3849 return _internal_input_names_size();
3850 }
clear_input_names()3851 inline void GraphOpCreation::clear_input_names() {
3852 _impl_.input_names_.Clear();
3853 }
add_input_names()3854 inline std::string* GraphOpCreation::add_input_names() {
3855 std::string* _s = _internal_add_input_names();
3856 // @@protoc_insertion_point(field_add_mutable:tensorflow.GraphOpCreation.input_names)
3857 return _s;
3858 }
_internal_input_names(int index)3859 inline const std::string& GraphOpCreation::_internal_input_names(int index) const {
3860 return _impl_.input_names_.Get(index);
3861 }
input_names(int index)3862 inline const std::string& GraphOpCreation::input_names(int index) const {
3863 // @@protoc_insertion_point(field_get:tensorflow.GraphOpCreation.input_names)
3864 return _internal_input_names(index);
3865 }
mutable_input_names(int index)3866 inline std::string* GraphOpCreation::mutable_input_names(int index) {
3867 // @@protoc_insertion_point(field_mutable:tensorflow.GraphOpCreation.input_names)
3868 return _impl_.input_names_.Mutable(index);
3869 }
set_input_names(int index,const std::string & value)3870