1 // Generated by the protocol buffer compiler. DO NOT EDIT!
2 // source: tensorflow/core/framework/step_stats.proto
3
4 #ifndef GOOGLE_PROTOBUF_INCLUDED_tensorflow_2fcore_2fframework_2fstep_5fstats_2eproto
5 #define GOOGLE_PROTOBUF_INCLUDED_tensorflow_2fcore_2fframework_2fstep_5fstats_2eproto
6
7 #include <cstdint>
8 #include <limits>
9 #include <string>
10
11 #include <google/protobuf/port_def.inc>
12 #if PROTOBUF_VERSION < 3021000
13 #error This file was generated by a newer version of protoc which is
14 #error incompatible with your Protocol Buffer headers. Please update
15 #error your headers.
16 #endif
17 #if 3021012 < PROTOBUF_MIN_PROTOC_VERSION
18 #error This file was generated by an older version of protoc which is
19 #error incompatible with your Protocol Buffer headers. Please
20 #error regenerate this file with a newer version of protoc.
21 #endif
22
23 #include <google/protobuf/port_undef.inc>
24 #include <google/protobuf/io/coded_stream.h>
25 #include <google/protobuf/arena.h>
26 #include <google/protobuf/arenastring.h>
27 #include <google/protobuf/generated_message_util.h>
28 #include <google/protobuf/metadata_lite.h>
29 #include <google/protobuf/message_lite.h>
30 #include <google/protobuf/repeated_field.h> // IWYU pragma: export
31 #include <google/protobuf/extension_set.h> // IWYU pragma: export
32 #include <google/protobuf/map.h> // IWYU pragma: export
33 #include <google/protobuf/map_entry_lite.h>
34 #include <google/protobuf/map_field_lite.h>
35 #include "tensorflow/core/framework/allocation_description.pb.h"
36 #include "tensorflow/core/framework/tensor_description.pb.h"
37 // @@protoc_insertion_point(includes)
38 #include <google/protobuf/port_def.inc>
39 #define PROTOBUF_INTERNAL_EXPORT_tensorflow_2fcore_2fframework_2fstep_5fstats_2eproto
40 PROTOBUF_NAMESPACE_OPEN
41 namespace internal {
42 class AnyMetadata;
43 } // namespace internal
44 PROTOBUF_NAMESPACE_CLOSE
45
46 // Internal implementation detail -- do not use these members.
47 struct TableStruct_tensorflow_2fcore_2fframework_2fstep_5fstats_2eproto {
48 static const ::uint32_t offsets[];
49 };
50 namespace tensorflow {
51 class AllocationRecord;
52 struct AllocationRecordDefaultTypeInternal;
53 extern AllocationRecordDefaultTypeInternal _AllocationRecord_default_instance_;
54 class AllocatorMemoryUsed;
55 struct AllocatorMemoryUsedDefaultTypeInternal;
56 extern AllocatorMemoryUsedDefaultTypeInternal _AllocatorMemoryUsed_default_instance_;
57 class DeviceStepStats;
58 struct DeviceStepStatsDefaultTypeInternal;
59 extern DeviceStepStatsDefaultTypeInternal _DeviceStepStats_default_instance_;
60 class DeviceStepStats_ThreadNamesEntry_DoNotUse;
61 struct DeviceStepStats_ThreadNamesEntry_DoNotUseDefaultTypeInternal;
62 extern DeviceStepStats_ThreadNamesEntry_DoNotUseDefaultTypeInternal _DeviceStepStats_ThreadNamesEntry_DoNotUse_default_instance_;
63 class MemoryStats;
64 struct MemoryStatsDefaultTypeInternal;
65 extern MemoryStatsDefaultTypeInternal _MemoryStats_default_instance_;
66 class NodeExecStats;
67 struct NodeExecStatsDefaultTypeInternal;
68 extern NodeExecStatsDefaultTypeInternal _NodeExecStats_default_instance_;
69 class NodeOutput;
70 struct NodeOutputDefaultTypeInternal;
71 extern NodeOutputDefaultTypeInternal _NodeOutput_default_instance_;
72 class StepStats;
73 struct StepStatsDefaultTypeInternal;
74 extern StepStatsDefaultTypeInternal _StepStats_default_instance_;
75 } // namespace tensorflow
76 PROTOBUF_NAMESPACE_OPEN
77 template<> ::tensorflow::AllocationRecord* Arena::CreateMaybeMessage<::tensorflow::AllocationRecord>(Arena*);
78 template<> ::tensorflow::AllocatorMemoryUsed* Arena::CreateMaybeMessage<::tensorflow::AllocatorMemoryUsed>(Arena*);
79 template<> ::tensorflow::DeviceStepStats* Arena::CreateMaybeMessage<::tensorflow::DeviceStepStats>(Arena*);
80 template<> ::tensorflow::DeviceStepStats_ThreadNamesEntry_DoNotUse* Arena::CreateMaybeMessage<::tensorflow::DeviceStepStats_ThreadNamesEntry_DoNotUse>(Arena*);
81 template<> ::tensorflow::MemoryStats* Arena::CreateMaybeMessage<::tensorflow::MemoryStats>(Arena*);
82 template<> ::tensorflow::NodeExecStats* Arena::CreateMaybeMessage<::tensorflow::NodeExecStats>(Arena*);
83 template<> ::tensorflow::NodeOutput* Arena::CreateMaybeMessage<::tensorflow::NodeOutput>(Arena*);
84 template<> ::tensorflow::StepStats* Arena::CreateMaybeMessage<::tensorflow::StepStats>(Arena*);
85 PROTOBUF_NAMESPACE_CLOSE
86 namespace tensorflow {
87
88 // ===================================================================
89
90 class AllocationRecord final :
91 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.AllocationRecord) */ {
92 public:
AllocationRecord()93 inline AllocationRecord() : AllocationRecord(nullptr) {}
94 ~AllocationRecord() override;
95 explicit PROTOBUF_CONSTEXPR AllocationRecord(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
96
97 AllocationRecord(const AllocationRecord& from);
AllocationRecord(AllocationRecord && from)98 AllocationRecord(AllocationRecord&& from) noexcept
99 : AllocationRecord() {
100 *this = ::std::move(from);
101 }
102
103 inline AllocationRecord& operator=(const AllocationRecord& from) {
104 if (this == &from) return *this;
105 CopyFrom(from);
106 return *this;
107 }
108 inline AllocationRecord& operator=(AllocationRecord&& from) noexcept {
109 if (this == &from) return *this;
110 if (GetOwningArena() == from.GetOwningArena()
111 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
112 && GetOwningArena() != nullptr
113 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
114 ) {
115 InternalSwap(&from);
116 } else {
117 CopyFrom(from);
118 }
119 return *this;
120 }
121
default_instance()122 static const AllocationRecord& default_instance() {
123 return *internal_default_instance();
124 }
internal_default_instance()125 static inline const AllocationRecord* internal_default_instance() {
126 return reinterpret_cast<const AllocationRecord*>(
127 &_AllocationRecord_default_instance_);
128 }
129 static constexpr int kIndexInFileMessages =
130 0;
131
swap(AllocationRecord & a,AllocationRecord & b)132 friend void swap(AllocationRecord& a, AllocationRecord& b) {
133 a.Swap(&b);
134 }
Swap(AllocationRecord * other)135 inline void Swap(AllocationRecord* other) {
136 if (other == this) return;
137 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
138 if (GetOwningArena() != nullptr &&
139 GetOwningArena() == other->GetOwningArena()) {
140 #else // PROTOBUF_FORCE_COPY_IN_SWAP
141 if (GetOwningArena() == other->GetOwningArena()) {
142 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
143 InternalSwap(other);
144 } else {
145 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
146 }
147 }
148 void UnsafeArenaSwap(AllocationRecord* other) {
149 if (other == this) return;
150 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
151 InternalSwap(other);
152 }
153
154 // implements Message ----------------------------------------------
155
156 AllocationRecord* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
157 return CreateMaybeMessage<AllocationRecord>(arena);
158 }
159 AllocationRecord* New() const {
160 return New(nullptr);
161 }
162 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
163 void CopyFrom(const AllocationRecord& from);
164 void MergeFrom(const AllocationRecord& from);
165 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
166 bool IsInitialized() const final;
167
168 size_t ByteSizeLong() const final;
169 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
170 ::uint8_t* _InternalSerialize(
171 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
172 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
173
174 private:
175 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
176 void SharedDtor();
177 void SetCachedSize(int size) const;
178 void InternalSwap(AllocationRecord* other);
179
180 private:
181 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
182 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
183 return "tensorflow.AllocationRecord";
184 }
185 protected:
186 explicit AllocationRecord(::PROTOBUF_NAMESPACE_ID::Arena* arena,
187 bool is_message_owned = false);
188 public:
189
190 std::string GetTypeName() const final;
191
192 // nested types ----------------------------------------------------
193
194 // accessors -------------------------------------------------------
195
196 enum : int {
197 kAllocMicrosFieldNumber = 1,
198 kAllocBytesFieldNumber = 2,
199 };
200 // int64 alloc_micros = 1;
201 void clear_alloc_micros();
202 ::int64_t alloc_micros() const;
203 void set_alloc_micros(::int64_t value);
204 private:
205 ::int64_t _internal_alloc_micros() const;
206 void _internal_set_alloc_micros(::int64_t value);
207 public:
208
209 // int64 alloc_bytes = 2;
210 void clear_alloc_bytes();
211 ::int64_t alloc_bytes() const;
212 void set_alloc_bytes(::int64_t value);
213 private:
214 ::int64_t _internal_alloc_bytes() const;
215 void _internal_set_alloc_bytes(::int64_t value);
216 public:
217
218 // @@protoc_insertion_point(class_scope:tensorflow.AllocationRecord)
219 private:
220 class _Internal;
221
222 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
223 typedef void InternalArenaConstructable_;
224 typedef void DestructorSkippable_;
225 struct Impl_ {
226 ::int64_t alloc_micros_;
227 ::int64_t alloc_bytes_;
228 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
229 };
230 union { Impl_ _impl_; };
231 friend struct ::TableStruct_tensorflow_2fcore_2fframework_2fstep_5fstats_2eproto;
232 };
233 // -------------------------------------------------------------------
234
235 class AllocatorMemoryUsed final :
236 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.AllocatorMemoryUsed) */ {
237 public:
AllocatorMemoryUsed()238 inline AllocatorMemoryUsed() : AllocatorMemoryUsed(nullptr) {}
239 ~AllocatorMemoryUsed() override;
240 explicit PROTOBUF_CONSTEXPR AllocatorMemoryUsed(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
241
242 AllocatorMemoryUsed(const AllocatorMemoryUsed& from);
AllocatorMemoryUsed(AllocatorMemoryUsed && from)243 AllocatorMemoryUsed(AllocatorMemoryUsed&& from) noexcept
244 : AllocatorMemoryUsed() {
245 *this = ::std::move(from);
246 }
247
248 inline AllocatorMemoryUsed& operator=(const AllocatorMemoryUsed& from) {
249 if (this == &from) return *this;
250 CopyFrom(from);
251 return *this;
252 }
253 inline AllocatorMemoryUsed& operator=(AllocatorMemoryUsed&& from) noexcept {
254 if (this == &from) return *this;
255 if (GetOwningArena() == from.GetOwningArena()
256 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
257 && GetOwningArena() != nullptr
258 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
259 ) {
260 InternalSwap(&from);
261 } else {
262 CopyFrom(from);
263 }
264 return *this;
265 }
266
default_instance()267 static const AllocatorMemoryUsed& default_instance() {
268 return *internal_default_instance();
269 }
internal_default_instance()270 static inline const AllocatorMemoryUsed* internal_default_instance() {
271 return reinterpret_cast<const AllocatorMemoryUsed*>(
272 &_AllocatorMemoryUsed_default_instance_);
273 }
274 static constexpr int kIndexInFileMessages =
275 1;
276
swap(AllocatorMemoryUsed & a,AllocatorMemoryUsed & b)277 friend void swap(AllocatorMemoryUsed& a, AllocatorMemoryUsed& b) {
278 a.Swap(&b);
279 }
Swap(AllocatorMemoryUsed * other)280 inline void Swap(AllocatorMemoryUsed* other) {
281 if (other == this) return;
282 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
283 if (GetOwningArena() != nullptr &&
284 GetOwningArena() == other->GetOwningArena()) {
285 #else // PROTOBUF_FORCE_COPY_IN_SWAP
286 if (GetOwningArena() == other->GetOwningArena()) {
287 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
288 InternalSwap(other);
289 } else {
290 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
291 }
292 }
293 void UnsafeArenaSwap(AllocatorMemoryUsed* other) {
294 if (other == this) return;
295 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
296 InternalSwap(other);
297 }
298
299 // implements Message ----------------------------------------------
300
301 AllocatorMemoryUsed* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
302 return CreateMaybeMessage<AllocatorMemoryUsed>(arena);
303 }
304 AllocatorMemoryUsed* New() const {
305 return New(nullptr);
306 }
307 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
308 void CopyFrom(const AllocatorMemoryUsed& from);
309 void MergeFrom(const AllocatorMemoryUsed& from);
310 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
311 bool IsInitialized() const final;
312
313 size_t ByteSizeLong() const final;
314 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
315 ::uint8_t* _InternalSerialize(
316 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
317 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
318
319 private:
320 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
321 void SharedDtor();
322 void SetCachedSize(int size) const;
323 void InternalSwap(AllocatorMemoryUsed* other);
324
325 private:
326 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
327 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
328 return "tensorflow.AllocatorMemoryUsed";
329 }
330 protected:
331 explicit AllocatorMemoryUsed(::PROTOBUF_NAMESPACE_ID::Arena* arena,
332 bool is_message_owned = false);
333 public:
334
335 std::string GetTypeName() const final;
336
337 // nested types ----------------------------------------------------
338
339 // accessors -------------------------------------------------------
340
341 enum : int {
342 kAllocationRecordsFieldNumber = 6,
343 kAllocatorNameFieldNumber = 1,
344 kTotalBytesFieldNumber = 2,
345 kPeakBytesFieldNumber = 3,
346 kLiveBytesFieldNumber = 4,
347 kAllocatorBytesInUseFieldNumber = 5,
348 };
349 // repeated .tensorflow.AllocationRecord allocation_records = 6;
350 int allocation_records_size() const;
351 private:
352 int _internal_allocation_records_size() const;
353 public:
354 void clear_allocation_records();
355 ::tensorflow::AllocationRecord* mutable_allocation_records(int index);
356 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::AllocationRecord >*
357 mutable_allocation_records();
358 private:
359 const ::tensorflow::AllocationRecord& _internal_allocation_records(int index) const;
360 ::tensorflow::AllocationRecord* _internal_add_allocation_records();
361 public:
362 const ::tensorflow::AllocationRecord& allocation_records(int index) const;
363 ::tensorflow::AllocationRecord* add_allocation_records();
364 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::AllocationRecord >&
365 allocation_records() const;
366
367 // string allocator_name = 1;
368 void clear_allocator_name();
369 const std::string& allocator_name() const;
370 template <typename ArgT0 = const std::string&, typename... ArgT>
371 void set_allocator_name(ArgT0&& arg0, ArgT... args);
372 std::string* mutable_allocator_name();
373 PROTOBUF_NODISCARD std::string* release_allocator_name();
374 void set_allocated_allocator_name(std::string* allocator_name);
375 private:
376 const std::string& _internal_allocator_name() const;
377 inline PROTOBUF_ALWAYS_INLINE void _internal_set_allocator_name(const std::string& value);
378 std::string* _internal_mutable_allocator_name();
379 public:
380
381 // int64 total_bytes = 2;
382 void clear_total_bytes();
383 ::int64_t total_bytes() const;
384 void set_total_bytes(::int64_t value);
385 private:
386 ::int64_t _internal_total_bytes() const;
387 void _internal_set_total_bytes(::int64_t value);
388 public:
389
390 // int64 peak_bytes = 3;
391 void clear_peak_bytes();
392 ::int64_t peak_bytes() const;
393 void set_peak_bytes(::int64_t value);
394 private:
395 ::int64_t _internal_peak_bytes() const;
396 void _internal_set_peak_bytes(::int64_t value);
397 public:
398
399 // int64 live_bytes = 4;
400 void clear_live_bytes();
401 ::int64_t live_bytes() const;
402 void set_live_bytes(::int64_t value);
403 private:
404 ::int64_t _internal_live_bytes() const;
405 void _internal_set_live_bytes(::int64_t value);
406 public:
407
408 // int64 allocator_bytes_in_use = 5;
409 void clear_allocator_bytes_in_use();
410 ::int64_t allocator_bytes_in_use() const;
411 void set_allocator_bytes_in_use(::int64_t value);
412 private:
413 ::int64_t _internal_allocator_bytes_in_use() const;
414 void _internal_set_allocator_bytes_in_use(::int64_t value);
415 public:
416
417 // @@protoc_insertion_point(class_scope:tensorflow.AllocatorMemoryUsed)
418 private:
419 class _Internal;
420
421 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
422 typedef void InternalArenaConstructable_;
423 typedef void DestructorSkippable_;
424 struct Impl_ {
425 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::AllocationRecord > allocation_records_;
426 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr allocator_name_;
427 ::int64_t total_bytes_;
428 ::int64_t peak_bytes_;
429 ::int64_t live_bytes_;
430 ::int64_t allocator_bytes_in_use_;
431 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
432 };
433 union { Impl_ _impl_; };
434 friend struct ::TableStruct_tensorflow_2fcore_2fframework_2fstep_5fstats_2eproto;
435 };
436 // -------------------------------------------------------------------
437
438 class NodeOutput final :
439 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.NodeOutput) */ {
440 public:
NodeOutput()441 inline NodeOutput() : NodeOutput(nullptr) {}
442 ~NodeOutput() override;
443 explicit PROTOBUF_CONSTEXPR NodeOutput(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
444
445 NodeOutput(const NodeOutput& from);
NodeOutput(NodeOutput && from)446 NodeOutput(NodeOutput&& from) noexcept
447 : NodeOutput() {
448 *this = ::std::move(from);
449 }
450
451 inline NodeOutput& operator=(const NodeOutput& from) {
452 if (this == &from) return *this;
453 CopyFrom(from);
454 return *this;
455 }
456 inline NodeOutput& operator=(NodeOutput&& from) noexcept {
457 if (this == &from) return *this;
458 if (GetOwningArena() == from.GetOwningArena()
459 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
460 && GetOwningArena() != nullptr
461 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
462 ) {
463 InternalSwap(&from);
464 } else {
465 CopyFrom(from);
466 }
467 return *this;
468 }
469
default_instance()470 static const NodeOutput& default_instance() {
471 return *internal_default_instance();
472 }
internal_default_instance()473 static inline const NodeOutput* internal_default_instance() {
474 return reinterpret_cast<const NodeOutput*>(
475 &_NodeOutput_default_instance_);
476 }
477 static constexpr int kIndexInFileMessages =
478 2;
479
swap(NodeOutput & a,NodeOutput & b)480 friend void swap(NodeOutput& a, NodeOutput& b) {
481 a.Swap(&b);
482 }
Swap(NodeOutput * other)483 inline void Swap(NodeOutput* other) {
484 if (other == this) return;
485 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
486 if (GetOwningArena() != nullptr &&
487 GetOwningArena() == other->GetOwningArena()) {
488 #else // PROTOBUF_FORCE_COPY_IN_SWAP
489 if (GetOwningArena() == other->GetOwningArena()) {
490 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
491 InternalSwap(other);
492 } else {
493 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
494 }
495 }
496 void UnsafeArenaSwap(NodeOutput* other) {
497 if (other == this) return;
498 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
499 InternalSwap(other);
500 }
501
502 // implements Message ----------------------------------------------
503
504 NodeOutput* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
505 return CreateMaybeMessage<NodeOutput>(arena);
506 }
507 NodeOutput* New() const {
508 return New(nullptr);
509 }
510 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
511 void CopyFrom(const NodeOutput& from);
512 void MergeFrom(const NodeOutput& from);
513 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
514 bool IsInitialized() const final;
515
516 size_t ByteSizeLong() const final;
517 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
518 ::uint8_t* _InternalSerialize(
519 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
520 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
521
522 private:
523 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
524 void SharedDtor();
525 void SetCachedSize(int size) const;
526 void InternalSwap(NodeOutput* other);
527
528 private:
529 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
530 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
531 return "tensorflow.NodeOutput";
532 }
533 protected:
534 explicit NodeOutput(::PROTOBUF_NAMESPACE_ID::Arena* arena,
535 bool is_message_owned = false);
536 public:
537
538 std::string GetTypeName() const final;
539
540 // nested types ----------------------------------------------------
541
542 // accessors -------------------------------------------------------
543
544 enum : int {
545 kTensorDescriptionFieldNumber = 3,
546 kSlotFieldNumber = 1,
547 };
548 // .tensorflow.TensorDescription tensor_description = 3;
549 bool has_tensor_description() const;
550 private:
551 bool _internal_has_tensor_description() const;
552 public:
553 void clear_tensor_description();
554 const ::tensorflow::TensorDescription& tensor_description() const;
555 PROTOBUF_NODISCARD ::tensorflow::TensorDescription* release_tensor_description();
556 ::tensorflow::TensorDescription* mutable_tensor_description();
557 void set_allocated_tensor_description(::tensorflow::TensorDescription* tensor_description);
558 private:
559 const ::tensorflow::TensorDescription& _internal_tensor_description() const;
560 ::tensorflow::TensorDescription* _internal_mutable_tensor_description();
561 public:
562 void unsafe_arena_set_allocated_tensor_description(
563 ::tensorflow::TensorDescription* tensor_description);
564 ::tensorflow::TensorDescription* unsafe_arena_release_tensor_description();
565
566 // int32 slot = 1;
567 void clear_slot();
568 ::int32_t slot() const;
569 void set_slot(::int32_t value);
570 private:
571 ::int32_t _internal_slot() const;
572 void _internal_set_slot(::int32_t value);
573 public:
574
575 // @@protoc_insertion_point(class_scope:tensorflow.NodeOutput)
576 private:
577 class _Internal;
578
579 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
580 typedef void InternalArenaConstructable_;
581 typedef void DestructorSkippable_;
582 struct Impl_ {
583 ::tensorflow::TensorDescription* tensor_description_;
584 ::int32_t slot_;
585 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
586 };
587 union { Impl_ _impl_; };
588 friend struct ::TableStruct_tensorflow_2fcore_2fframework_2fstep_5fstats_2eproto;
589 };
590 // -------------------------------------------------------------------
591
592 class MemoryStats final :
593 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.MemoryStats) */ {
594 public:
MemoryStats()595 inline MemoryStats() : MemoryStats(nullptr) {}
596 ~MemoryStats() override;
597 explicit PROTOBUF_CONSTEXPR MemoryStats(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
598
599 MemoryStats(const MemoryStats& from);
MemoryStats(MemoryStats && from)600 MemoryStats(MemoryStats&& from) noexcept
601 : MemoryStats() {
602 *this = ::std::move(from);
603 }
604
605 inline MemoryStats& operator=(const MemoryStats& from) {
606 if (this == &from) return *this;
607 CopyFrom(from);
608 return *this;
609 }
610 inline MemoryStats& operator=(MemoryStats&& from) noexcept {
611 if (this == &from) return *this;
612 if (GetOwningArena() == from.GetOwningArena()
613 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
614 && GetOwningArena() != nullptr
615 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
616 ) {
617 InternalSwap(&from);
618 } else {
619 CopyFrom(from);
620 }
621 return *this;
622 }
623
default_instance()624 static const MemoryStats& default_instance() {
625 return *internal_default_instance();
626 }
internal_default_instance()627 static inline const MemoryStats* internal_default_instance() {
628 return reinterpret_cast<const MemoryStats*>(
629 &_MemoryStats_default_instance_);
630 }
631 static constexpr int kIndexInFileMessages =
632 3;
633
swap(MemoryStats & a,MemoryStats & b)634 friend void swap(MemoryStats& a, MemoryStats& b) {
635 a.Swap(&b);
636 }
Swap(MemoryStats * other)637 inline void Swap(MemoryStats* other) {
638 if (other == this) return;
639 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
640 if (GetOwningArena() != nullptr &&
641 GetOwningArena() == other->GetOwningArena()) {
642 #else // PROTOBUF_FORCE_COPY_IN_SWAP
643 if (GetOwningArena() == other->GetOwningArena()) {
644 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
645 InternalSwap(other);
646 } else {
647 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
648 }
649 }
650 void UnsafeArenaSwap(MemoryStats* other) {
651 if (other == this) return;
652 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
653 InternalSwap(other);
654 }
655
656 // implements Message ----------------------------------------------
657
658 MemoryStats* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
659 return CreateMaybeMessage<MemoryStats>(arena);
660 }
661 MemoryStats* New() const {
662 return New(nullptr);
663 }
664 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
665 void CopyFrom(const MemoryStats& from);
666 void MergeFrom(const MemoryStats& from);
667 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
668 bool IsInitialized() const final;
669
670 size_t ByteSizeLong() const final;
671 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
672 ::uint8_t* _InternalSerialize(
673 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
674 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
675
676 private:
677 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
678 void SharedDtor();
679 void SetCachedSize(int size) const;
680 void InternalSwap(MemoryStats* other);
681
682 private:
683 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
684 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
685 return "tensorflow.MemoryStats";
686 }
687 protected:
688 explicit MemoryStats(::PROTOBUF_NAMESPACE_ID::Arena* arena,
689 bool is_message_owned = false);
690 public:
691
692 std::string GetTypeName() const final;
693
694 // nested types ----------------------------------------------------
695
696 // accessors -------------------------------------------------------
697
698 enum : int {
699 kPersistentTensorAllocIdsFieldNumber = 5,
700 kDevicePersistentTensorAllocIdsFieldNumber = 6,
701 kTempMemorySizeFieldNumber = 1,
702 kDeviceTempMemorySizeFieldNumber = 2,
703 kPersistentMemorySizeFieldNumber = 3,
704 kDevicePersistentMemorySizeFieldNumber = 4,
705 };
706 // repeated int64 persistent_tensor_alloc_ids = 5;
707 int persistent_tensor_alloc_ids_size() const;
708 private:
709 int _internal_persistent_tensor_alloc_ids_size() const;
710 public:
711 void clear_persistent_tensor_alloc_ids();
712 private:
713 ::int64_t _internal_persistent_tensor_alloc_ids(int index) const;
714 const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >&
715 _internal_persistent_tensor_alloc_ids() const;
716 void _internal_add_persistent_tensor_alloc_ids(::int64_t value);
717 ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >*
718 _internal_mutable_persistent_tensor_alloc_ids();
719 public:
720 ::int64_t persistent_tensor_alloc_ids(int index) const;
721 void set_persistent_tensor_alloc_ids(int index, ::int64_t value);
722 void add_persistent_tensor_alloc_ids(::int64_t value);
723 const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >&
724 persistent_tensor_alloc_ids() const;
725 ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >*
726 mutable_persistent_tensor_alloc_ids();
727
728 // repeated int64 device_persistent_tensor_alloc_ids = 6 [deprecated = true];
729 PROTOBUF_DEPRECATED int device_persistent_tensor_alloc_ids_size() const;
730 private:
731 int _internal_device_persistent_tensor_alloc_ids_size() const;
732 public:
733 PROTOBUF_DEPRECATED void clear_device_persistent_tensor_alloc_ids();
734 private:
735 ::int64_t _internal_device_persistent_tensor_alloc_ids(int index) const;
736 const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >&
737 _internal_device_persistent_tensor_alloc_ids() const;
738 void _internal_add_device_persistent_tensor_alloc_ids(::int64_t value);
739 ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >*
740 _internal_mutable_device_persistent_tensor_alloc_ids();
741 public:
742 PROTOBUF_DEPRECATED ::int64_t device_persistent_tensor_alloc_ids(int index) const;
743 PROTOBUF_DEPRECATED void set_device_persistent_tensor_alloc_ids(int index, ::int64_t value);
744 PROTOBUF_DEPRECATED void add_device_persistent_tensor_alloc_ids(::int64_t value);
745 PROTOBUF_DEPRECATED const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >&
746 device_persistent_tensor_alloc_ids() const;
747 PROTOBUF_DEPRECATED ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >*
748 mutable_device_persistent_tensor_alloc_ids();
749
750 // int64 temp_memory_size = 1;
751 void clear_temp_memory_size();
752 ::int64_t temp_memory_size() const;
753 void set_temp_memory_size(::int64_t value);
754 private:
755 ::int64_t _internal_temp_memory_size() const;
756 void _internal_set_temp_memory_size(::int64_t value);
757 public:
758
759 // int64 device_temp_memory_size = 2 [deprecated = true];
760 PROTOBUF_DEPRECATED void clear_device_temp_memory_size();
761 PROTOBUF_DEPRECATED ::int64_t device_temp_memory_size() const;
762 PROTOBUF_DEPRECATED void set_device_temp_memory_size(::int64_t value);
763 private:
764 ::int64_t _internal_device_temp_memory_size() const;
765 void _internal_set_device_temp_memory_size(::int64_t value);
766 public:
767
768 // int64 persistent_memory_size = 3;
769 void clear_persistent_memory_size();
770 ::int64_t persistent_memory_size() const;
771 void set_persistent_memory_size(::int64_t value);
772 private:
773 ::int64_t _internal_persistent_memory_size() const;
774 void _internal_set_persistent_memory_size(::int64_t value);
775 public:
776
777 // int64 device_persistent_memory_size = 4 [deprecated = true];
778 PROTOBUF_DEPRECATED void clear_device_persistent_memory_size();
779 PROTOBUF_DEPRECATED ::int64_t device_persistent_memory_size() const;
780 PROTOBUF_DEPRECATED void set_device_persistent_memory_size(::int64_t value);
781 private:
782 ::int64_t _internal_device_persistent_memory_size() const;
783 void _internal_set_device_persistent_memory_size(::int64_t value);
784 public:
785
786 // @@protoc_insertion_point(class_scope:tensorflow.MemoryStats)
787 private:
788 class _Internal;
789
790 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
791 typedef void InternalArenaConstructable_;
792 typedef void DestructorSkippable_;
793 struct Impl_ {
794 ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t > persistent_tensor_alloc_ids_;
795 mutable std::atomic<int> _persistent_tensor_alloc_ids_cached_byte_size_;
796 ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t > device_persistent_tensor_alloc_ids_;
797 mutable std::atomic<int> _device_persistent_tensor_alloc_ids_cached_byte_size_;
798 ::int64_t temp_memory_size_;
799 ::int64_t device_temp_memory_size_;
800 ::int64_t persistent_memory_size_;
801 ::int64_t device_persistent_memory_size_;
802 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
803 };
804 union { Impl_ _impl_; };
805 friend struct ::TableStruct_tensorflow_2fcore_2fframework_2fstep_5fstats_2eproto;
806 };
807 // -------------------------------------------------------------------
808
809 class NodeExecStats final :
810 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.NodeExecStats) */ {
811 public:
NodeExecStats()812 inline NodeExecStats() : NodeExecStats(nullptr) {}
813 ~NodeExecStats() override;
814 explicit PROTOBUF_CONSTEXPR NodeExecStats(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
815
816 NodeExecStats(const NodeExecStats& from);
NodeExecStats(NodeExecStats && from)817 NodeExecStats(NodeExecStats&& from) noexcept
818 : NodeExecStats() {
819 *this = ::std::move(from);
820 }
821
822 inline NodeExecStats& operator=(const NodeExecStats& from) {
823 if (this == &from) return *this;
824 CopyFrom(from);
825 return *this;
826 }
827 inline NodeExecStats& operator=(NodeExecStats&& from) noexcept {
828 if (this == &from) return *this;
829 if (GetOwningArena() == from.GetOwningArena()
830 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
831 && GetOwningArena() != nullptr
832 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
833 ) {
834 InternalSwap(&from);
835 } else {
836 CopyFrom(from);
837 }
838 return *this;
839 }
840
default_instance()841 static const NodeExecStats& default_instance() {
842 return *internal_default_instance();
843 }
internal_default_instance()844 static inline const NodeExecStats* internal_default_instance() {
845 return reinterpret_cast<const NodeExecStats*>(
846 &_NodeExecStats_default_instance_);
847 }
848 static constexpr int kIndexInFileMessages =
849 4;
850
swap(NodeExecStats & a,NodeExecStats & b)851 friend void swap(NodeExecStats& a, NodeExecStats& b) {
852 a.Swap(&b);
853 }
Swap(NodeExecStats * other)854 inline void Swap(NodeExecStats* other) {
855 if (other == this) return;
856 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
857 if (GetOwningArena() != nullptr &&
858 GetOwningArena() == other->GetOwningArena()) {
859 #else // PROTOBUF_FORCE_COPY_IN_SWAP
860 if (GetOwningArena() == other->GetOwningArena()) {
861 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
862 InternalSwap(other);
863 } else {
864 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
865 }
866 }
867 void UnsafeArenaSwap(NodeExecStats* other) {
868 if (other == this) return;
869 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
870 InternalSwap(other);
871 }
872
873 // implements Message ----------------------------------------------
874
875 NodeExecStats* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
876 return CreateMaybeMessage<NodeExecStats>(arena);
877 }
878 NodeExecStats* New() const {
879 return New(nullptr);
880 }
881 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
882 void CopyFrom(const NodeExecStats& from);
883 void MergeFrom(const NodeExecStats& from);
884 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
885 bool IsInitialized() const final;
886
887 size_t ByteSizeLong() const final;
888 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
889 ::uint8_t* _InternalSerialize(
890 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
891 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
892
893 private:
894 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
895 void SharedDtor();
896 void SetCachedSize(int size) const;
897 void InternalSwap(NodeExecStats* other);
898
899 private:
900 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
901 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
902 return "tensorflow.NodeExecStats";
903 }
904 protected:
905 explicit NodeExecStats(::PROTOBUF_NAMESPACE_ID::Arena* arena,
906 bool is_message_owned = false);
907 public:
908
909 std::string GetTypeName() const final;
910
911 // nested types ----------------------------------------------------
912
913 // accessors -------------------------------------------------------
914
915 enum : int {
916 kMemoryFieldNumber = 6,
917 kOutputFieldNumber = 7,
918 kReferencedTensorFieldNumber = 11,
919 kNodeNameFieldNumber = 1,
920 kTimelineLabelFieldNumber = 8,
921 kMemoryStatsFieldNumber = 12,
922 kAllStartMicrosFieldNumber = 2,
923 kOpStartRelMicrosFieldNumber = 3,
924 kOpEndRelMicrosFieldNumber = 4,
925 kAllEndRelMicrosFieldNumber = 5,
926 kScheduledMicrosFieldNumber = 9,
927 kAllStartNanosFieldNumber = 13,
928 kOpStartRelNanosFieldNumber = 14,
929 kOpEndRelNanosFieldNumber = 15,
930 kAllEndRelNanosFieldNumber = 16,
931 kScheduledNanosFieldNumber = 17,
932 kThreadIdFieldNumber = 10,
933 };
934 // repeated .tensorflow.AllocatorMemoryUsed memory = 6;
935 int memory_size() const;
936 private:
937 int _internal_memory_size() const;
938 public:
939 void clear_memory();
940 ::tensorflow::AllocatorMemoryUsed* mutable_memory(int index);
941 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::AllocatorMemoryUsed >*
942 mutable_memory();
943 private:
944 const ::tensorflow::AllocatorMemoryUsed& _internal_memory(int index) const;
945 ::tensorflow::AllocatorMemoryUsed* _internal_add_memory();
946 public:
947 const ::tensorflow::AllocatorMemoryUsed& memory(int index) const;
948 ::tensorflow::AllocatorMemoryUsed* add_memory();
949 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::AllocatorMemoryUsed >&
950 memory() const;
951
952 // repeated .tensorflow.NodeOutput output = 7;
953 int output_size() const;
954 private:
955 int _internal_output_size() const;
956 public:
957 void clear_output();
958 ::tensorflow::NodeOutput* mutable_output(int index);
959 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::NodeOutput >*
960 mutable_output();
961 private:
962 const ::tensorflow::NodeOutput& _internal_output(int index) const;
963 ::tensorflow::NodeOutput* _internal_add_output();
964 public:
965 const ::tensorflow::NodeOutput& output(int index) const;
966 ::tensorflow::NodeOutput* add_output();
967 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::NodeOutput >&
968 output() const;
969
970 // repeated .tensorflow.AllocationDescription referenced_tensor = 11;
971 int referenced_tensor_size() const;
972 private:
973 int _internal_referenced_tensor_size() const;
974 public:
975 void clear_referenced_tensor();
976 ::tensorflow::AllocationDescription* mutable_referenced_tensor(int index);
977 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::AllocationDescription >*
978 mutable_referenced_tensor();
979 private:
980 const ::tensorflow::AllocationDescription& _internal_referenced_tensor(int index) const;
981 ::tensorflow::AllocationDescription* _internal_add_referenced_tensor();
982 public:
983 const ::tensorflow::AllocationDescription& referenced_tensor(int index) const;
984 ::tensorflow::AllocationDescription* add_referenced_tensor();
985 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::AllocationDescription >&
986 referenced_tensor() const;
987
988 // string node_name = 1;
989 void clear_node_name();
990 const std::string& node_name() const;
991 template <typename ArgT0 = const std::string&, typename... ArgT>
992 void set_node_name(ArgT0&& arg0, ArgT... args);
993 std::string* mutable_node_name();
994 PROTOBUF_NODISCARD std::string* release_node_name();
995 void set_allocated_node_name(std::string* node_name);
996 private:
997 const std::string& _internal_node_name() const;
998 inline PROTOBUF_ALWAYS_INLINE void _internal_set_node_name(const std::string& value);
999 std::string* _internal_mutable_node_name();
1000 public:
1001
1002 // string timeline_label = 8;
1003 void clear_timeline_label();
1004 const std::string& timeline_label() const;
1005 template <typename ArgT0 = const std::string&, typename... ArgT>
1006 void set_timeline_label(ArgT0&& arg0, ArgT... args);
1007 std::string* mutable_timeline_label();
1008 PROTOBUF_NODISCARD std::string* release_timeline_label();
1009 void set_allocated_timeline_label(std::string* timeline_label);
1010 private:
1011 const std::string& _internal_timeline_label() const;
1012 inline PROTOBUF_ALWAYS_INLINE void _internal_set_timeline_label(const std::string& value);
1013 std::string* _internal_mutable_timeline_label();
1014 public:
1015
1016 // .tensorflow.MemoryStats memory_stats = 12;
1017 bool has_memory_stats() const;
1018 private:
1019 bool _internal_has_memory_stats() const;
1020 public:
1021 void clear_memory_stats();
1022 const ::tensorflow::MemoryStats& memory_stats() const;
1023 PROTOBUF_NODISCARD ::tensorflow::MemoryStats* release_memory_stats();
1024 ::tensorflow::MemoryStats* mutable_memory_stats();
1025 void set_allocated_memory_stats(::tensorflow::MemoryStats* memory_stats);
1026 private:
1027 const ::tensorflow::MemoryStats& _internal_memory_stats() const;
1028 ::tensorflow::MemoryStats* _internal_mutable_memory_stats();
1029 public:
1030 void unsafe_arena_set_allocated_memory_stats(
1031 ::tensorflow::MemoryStats* memory_stats);
1032 ::tensorflow::MemoryStats* unsafe_arena_release_memory_stats();
1033
1034 // int64 all_start_micros = 2;
1035 void clear_all_start_micros();
1036 ::int64_t all_start_micros() const;
1037 void set_all_start_micros(::int64_t value);
1038 private:
1039 ::int64_t _internal_all_start_micros() const;
1040 void _internal_set_all_start_micros(::int64_t value);
1041 public:
1042
1043 // int64 op_start_rel_micros = 3;
1044 void clear_op_start_rel_micros();
1045 ::int64_t op_start_rel_micros() const;
1046 void set_op_start_rel_micros(::int64_t value);
1047 private:
1048 ::int64_t _internal_op_start_rel_micros() const;
1049 void _internal_set_op_start_rel_micros(::int64_t value);
1050 public:
1051
1052 // int64 op_end_rel_micros = 4;
1053 void clear_op_end_rel_micros();
1054 ::int64_t op_end_rel_micros() const;
1055 void set_op_end_rel_micros(::int64_t value);
1056 private:
1057 ::int64_t _internal_op_end_rel_micros() const;
1058 void _internal_set_op_end_rel_micros(::int64_t value);
1059 public:
1060
1061 // int64 all_end_rel_micros = 5;
1062 void clear_all_end_rel_micros();
1063 ::int64_t all_end_rel_micros() const;
1064 void set_all_end_rel_micros(::int64_t value);
1065 private:
1066 ::int64_t _internal_all_end_rel_micros() const;
1067 void _internal_set_all_end_rel_micros(::int64_t value);
1068 public:
1069
1070 // int64 scheduled_micros = 9;
1071 void clear_scheduled_micros();
1072 ::int64_t scheduled_micros() const;
1073 void set_scheduled_micros(::int64_t value);
1074 private:
1075 ::int64_t _internal_scheduled_micros() const;
1076 void _internal_set_scheduled_micros(::int64_t value);
1077 public:
1078
1079 // int64 all_start_nanos = 13;
1080 void clear_all_start_nanos();
1081 ::int64_t all_start_nanos() const;
1082 void set_all_start_nanos(::int64_t value);
1083 private:
1084 ::int64_t _internal_all_start_nanos() const;
1085 void _internal_set_all_start_nanos(::int64_t value);
1086 public:
1087
1088 // int64 op_start_rel_nanos = 14;
1089 void clear_op_start_rel_nanos();
1090 ::int64_t op_start_rel_nanos() const;
1091 void set_op_start_rel_nanos(::int64_t value);
1092 private:
1093 ::int64_t _internal_op_start_rel_nanos() const;
1094 void _internal_set_op_start_rel_nanos(::int64_t value);
1095 public:
1096
1097 // int64 op_end_rel_nanos = 15;
1098 void clear_op_end_rel_nanos();
1099 ::int64_t op_end_rel_nanos() const;
1100 void set_op_end_rel_nanos(::int64_t value);
1101 private:
1102 ::int64_t _internal_op_end_rel_nanos() const;
1103 void _internal_set_op_end_rel_nanos(::int64_t value);
1104 public:
1105
1106 // int64 all_end_rel_nanos = 16;
1107 void clear_all_end_rel_nanos();
1108 ::int64_t all_end_rel_nanos() const;
1109 void set_all_end_rel_nanos(::int64_t value);
1110 private:
1111 ::int64_t _internal_all_end_rel_nanos() const;
1112 void _internal_set_all_end_rel_nanos(::int64_t value);
1113 public:
1114
1115 // int64 scheduled_nanos = 17;
1116 void clear_scheduled_nanos();
1117 ::int64_t scheduled_nanos() const;
1118 void set_scheduled_nanos(::int64_t value);
1119 private:
1120 ::int64_t _internal_scheduled_nanos() const;
1121 void _internal_set_scheduled_nanos(::int64_t value);
1122 public:
1123
1124 // uint32 thread_id = 10;
1125 void clear_thread_id();
1126 ::uint32_t thread_id() const;
1127 void set_thread_id(::uint32_t value);
1128 private:
1129 ::uint32_t _internal_thread_id() const;
1130 void _internal_set_thread_id(::uint32_t value);
1131 public:
1132
1133 // @@protoc_insertion_point(class_scope:tensorflow.NodeExecStats)
1134 private:
1135 class _Internal;
1136
1137 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1138 typedef void InternalArenaConstructable_;
1139 typedef void DestructorSkippable_;
1140 struct Impl_ {
1141 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::AllocatorMemoryUsed > memory_;
1142 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::NodeOutput > output_;
1143 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::AllocationDescription > referenced_tensor_;
1144 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr node_name_;
1145 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr timeline_label_;
1146 ::tensorflow::MemoryStats* memory_stats_;
1147 ::int64_t all_start_micros_;
1148 ::int64_t op_start_rel_micros_;
1149 ::int64_t op_end_rel_micros_;
1150 ::int64_t all_end_rel_micros_;
1151 ::int64_t scheduled_micros_;
1152 ::int64_t all_start_nanos_;
1153 ::int64_t op_start_rel_nanos_;
1154 ::int64_t op_end_rel_nanos_;
1155 ::int64_t all_end_rel_nanos_;
1156 ::int64_t scheduled_nanos_;
1157 ::uint32_t thread_id_;
1158 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1159 };
1160 union { Impl_ _impl_; };
1161 friend struct ::TableStruct_tensorflow_2fcore_2fframework_2fstep_5fstats_2eproto;
1162 };
1163 // -------------------------------------------------------------------
1164
1165 class DeviceStepStats_ThreadNamesEntry_DoNotUse : public ::PROTOBUF_NAMESPACE_ID::internal::MapEntryLite<DeviceStepStats_ThreadNamesEntry_DoNotUse,
1166 ::uint32_t, std::string,
1167 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32,
1168 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_STRING> {
1169 public:
1170 typedef ::PROTOBUF_NAMESPACE_ID::internal::MapEntryLite<DeviceStepStats_ThreadNamesEntry_DoNotUse,
1171 ::uint32_t, std::string,
1172 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32,
1173 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_STRING> SuperType;
1174 DeviceStepStats_ThreadNamesEntry_DoNotUse();
1175 explicit PROTOBUF_CONSTEXPR DeviceStepStats_ThreadNamesEntry_DoNotUse(
1176 ::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
1177 explicit DeviceStepStats_ThreadNamesEntry_DoNotUse(::PROTOBUF_NAMESPACE_ID::Arena* arena);
1178 void MergeFrom(const DeviceStepStats_ThreadNamesEntry_DoNotUse& other);
internal_default_instance()1179 static const DeviceStepStats_ThreadNamesEntry_DoNotUse* internal_default_instance() { return reinterpret_cast<const DeviceStepStats_ThreadNamesEntry_DoNotUse*>(&_DeviceStepStats_ThreadNamesEntry_DoNotUse_default_instance_); }
ValidateKey(void *)1180 static bool ValidateKey(void*) { return true; }
ValidateValue(std::string * s)1181 static bool ValidateValue(std::string* s) {
1182 return ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::VerifyUtf8String(s->data(), static_cast<int>(s->size()), ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::PARSE, "tensorflow.DeviceStepStats.ThreadNamesEntry.value");
1183 }
1184 friend struct ::TableStruct_tensorflow_2fcore_2fframework_2fstep_5fstats_2eproto;
1185 };
1186
1187 // -------------------------------------------------------------------
1188
1189 class DeviceStepStats final :
1190 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.DeviceStepStats) */ {
1191 public:
DeviceStepStats()1192 inline DeviceStepStats() : DeviceStepStats(nullptr) {}
1193 ~DeviceStepStats() override;
1194 explicit PROTOBUF_CONSTEXPR DeviceStepStats(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
1195
1196 DeviceStepStats(const DeviceStepStats& from);
DeviceStepStats(DeviceStepStats && from)1197 DeviceStepStats(DeviceStepStats&& from) noexcept
1198 : DeviceStepStats() {
1199 *this = ::std::move(from);
1200 }
1201
1202 inline DeviceStepStats& operator=(const DeviceStepStats& from) {
1203 if (this == &from) return *this;
1204 CopyFrom(from);
1205 return *this;
1206 }
1207 inline DeviceStepStats& operator=(DeviceStepStats&& from) noexcept {
1208 if (this == &from) return *this;
1209 if (GetOwningArena() == from.GetOwningArena()
1210 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
1211 && GetOwningArena() != nullptr
1212 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
1213 ) {
1214 InternalSwap(&from);
1215 } else {
1216 CopyFrom(from);
1217 }
1218 return *this;
1219 }
1220
default_instance()1221 static const DeviceStepStats& default_instance() {
1222 return *internal_default_instance();
1223 }
internal_default_instance()1224 static inline const DeviceStepStats* internal_default_instance() {
1225 return reinterpret_cast<const DeviceStepStats*>(
1226 &_DeviceStepStats_default_instance_);
1227 }
1228 static constexpr int kIndexInFileMessages =
1229 6;
1230
swap(DeviceStepStats & a,DeviceStepStats & b)1231 friend void swap(DeviceStepStats& a, DeviceStepStats& b) {
1232 a.Swap(&b);
1233 }
Swap(DeviceStepStats * other)1234 inline void Swap(DeviceStepStats* other) {
1235 if (other == this) return;
1236 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
1237 if (GetOwningArena() != nullptr &&
1238 GetOwningArena() == other->GetOwningArena()) {
1239 #else // PROTOBUF_FORCE_COPY_IN_SWAP
1240 if (GetOwningArena() == other->GetOwningArena()) {
1241 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
1242 InternalSwap(other);
1243 } else {
1244 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1245 }
1246 }
1247 void UnsafeArenaSwap(DeviceStepStats* other) {
1248 if (other == this) return;
1249 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
1250 InternalSwap(other);
1251 }
1252
1253 // implements Message ----------------------------------------------
1254
1255 DeviceStepStats* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1256 return CreateMaybeMessage<DeviceStepStats>(arena);
1257 }
1258 DeviceStepStats* New() const {
1259 return New(nullptr);
1260 }
1261 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
1262 void CopyFrom(const DeviceStepStats& from);
1263 void MergeFrom(const DeviceStepStats& from);
1264 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1265 bool IsInitialized() const final;
1266
1267 size_t ByteSizeLong() const final;
1268 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1269 ::uint8_t* _InternalSerialize(
1270 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
1271 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
1272
1273 private:
1274 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
1275 void SharedDtor();
1276 void SetCachedSize(int size) const;
1277 void InternalSwap(DeviceStepStats* other);
1278
1279 private:
1280 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1281 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1282 return "tensorflow.DeviceStepStats";
1283 }
1284 protected:
1285 explicit DeviceStepStats(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1286 bool is_message_owned = false);
1287 public:
1288
1289 std::string GetTypeName() const final;
1290
1291 // nested types ----------------------------------------------------
1292
1293
1294 // accessors -------------------------------------------------------
1295
1296 enum : int {
1297 kNodeStatsFieldNumber = 2,
1298 kThreadNamesFieldNumber = 3,
1299 kDeviceFieldNumber = 1,
1300 };
1301 // repeated .tensorflow.NodeExecStats node_stats = 2;
1302 int node_stats_size() const;
1303 private:
1304 int _internal_node_stats_size() const;
1305 public:
1306 void clear_node_stats();
1307 ::tensorflow::NodeExecStats* mutable_node_stats(int index);
1308 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::NodeExecStats >*
1309 mutable_node_stats();
1310 private:
1311 const ::tensorflow::NodeExecStats& _internal_node_stats(int index) const;
1312 ::tensorflow::NodeExecStats* _internal_add_node_stats();
1313 public:
1314 const ::tensorflow::NodeExecStats& node_stats(int index) const;
1315 ::tensorflow::NodeExecStats* add_node_stats();
1316 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::NodeExecStats >&
1317 node_stats() const;
1318
1319 // map<uint32, string> thread_names = 3;
1320 int thread_names_size() const;
1321 private:
1322 int _internal_thread_names_size() const;
1323 public:
1324 void clear_thread_names();
1325 private:
1326 const ::PROTOBUF_NAMESPACE_ID::Map< ::uint32_t, std::string >&
1327 _internal_thread_names() const;
1328 ::PROTOBUF_NAMESPACE_ID::Map< ::uint32_t, std::string >*
1329 _internal_mutable_thread_names();
1330 public:
1331 const ::PROTOBUF_NAMESPACE_ID::Map< ::uint32_t, std::string >&
1332 thread_names() const;
1333 ::PROTOBUF_NAMESPACE_ID::Map< ::uint32_t, std::string >*
1334 mutable_thread_names();
1335
1336 // string device = 1;
1337 void clear_device();
1338 const std::string& device() const;
1339 template <typename ArgT0 = const std::string&, typename... ArgT>
1340 void set_device(ArgT0&& arg0, ArgT... args);
1341 std::string* mutable_device();
1342 PROTOBUF_NODISCARD std::string* release_device();
1343 void set_allocated_device(std::string* device);
1344 private:
1345 const std::string& _internal_device() const;
1346 inline PROTOBUF_ALWAYS_INLINE void _internal_set_device(const std::string& value);
1347 std::string* _internal_mutable_device();
1348 public:
1349
1350 // @@protoc_insertion_point(class_scope:tensorflow.DeviceStepStats)
1351 private:
1352 class _Internal;
1353
1354 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1355 typedef void InternalArenaConstructable_;
1356 typedef void DestructorSkippable_;
1357 struct Impl_ {
1358 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::NodeExecStats > node_stats_;
1359 ::PROTOBUF_NAMESPACE_ID::internal::MapFieldLite<
1360 DeviceStepStats_ThreadNamesEntry_DoNotUse,
1361 ::uint32_t, std::string,
1362 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_UINT32,
1363 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_STRING> thread_names_;
1364 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr device_;
1365 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1366 };
1367 union { Impl_ _impl_; };
1368 friend struct ::TableStruct_tensorflow_2fcore_2fframework_2fstep_5fstats_2eproto;
1369 };
1370 // -------------------------------------------------------------------
1371
1372 class StepStats final :
1373 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.StepStats) */ {
1374 public:
StepStats()1375 inline StepStats() : StepStats(nullptr) {}
1376 ~StepStats() override;
1377 explicit PROTOBUF_CONSTEXPR StepStats(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
1378
1379 StepStats(const StepStats& from);
StepStats(StepStats && from)1380 StepStats(StepStats&& from) noexcept
1381 : StepStats() {
1382 *this = ::std::move(from);
1383 }
1384
1385 inline StepStats& operator=(const StepStats& from) {
1386 if (this == &from) return *this;
1387 CopyFrom(from);
1388 return *this;
1389 }
1390 inline StepStats& operator=(StepStats&& from) noexcept {
1391 if (this == &from) return *this;
1392 if (GetOwningArena() == from.GetOwningArena()
1393 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
1394 && GetOwningArena() != nullptr
1395 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
1396 ) {
1397 InternalSwap(&from);
1398 } else {
1399 CopyFrom(from);
1400 }
1401 return *this;
1402 }
1403
default_instance()1404 static const StepStats& default_instance() {
1405 return *internal_default_instance();
1406 }
internal_default_instance()1407 static inline const StepStats* internal_default_instance() {
1408 return reinterpret_cast<const StepStats*>(
1409 &_StepStats_default_instance_);
1410 }
1411 static constexpr int kIndexInFileMessages =
1412 7;
1413
swap(StepStats & a,StepStats & b)1414 friend void swap(StepStats& a, StepStats& b) {
1415 a.Swap(&b);
1416 }
Swap(StepStats * other)1417 inline void Swap(StepStats* other) {
1418 if (other == this) return;
1419 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
1420 if (GetOwningArena() != nullptr &&
1421 GetOwningArena() == other->GetOwningArena()) {
1422 #else // PROTOBUF_FORCE_COPY_IN_SWAP
1423 if (GetOwningArena() == other->GetOwningArena()) {
1424 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
1425 InternalSwap(other);
1426 } else {
1427 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1428 }
1429 }
1430 void UnsafeArenaSwap(StepStats* other) {
1431 if (other == this) return;
1432 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
1433 InternalSwap(other);
1434 }
1435
1436 // implements Message ----------------------------------------------
1437
1438 StepStats* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1439 return CreateMaybeMessage<StepStats>(arena);
1440 }
1441 StepStats* New() const {
1442 return New(nullptr);
1443 }
1444 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
1445 void CopyFrom(const StepStats& from);
1446 void MergeFrom(const StepStats& from);
1447 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1448 bool IsInitialized() const final;
1449
1450 size_t ByteSizeLong() const final;
1451 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1452 ::uint8_t* _InternalSerialize(
1453 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
1454 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
1455
1456 private:
1457 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
1458 void SharedDtor();
1459 void SetCachedSize(int size) const;
1460 void InternalSwap(StepStats* other);
1461
1462 private:
1463 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1464 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1465 return "tensorflow.StepStats";
1466 }
1467 protected:
1468 explicit StepStats(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1469 bool is_message_owned = false);
1470 public:
1471
1472 std::string GetTypeName() const final;
1473
1474 // nested types ----------------------------------------------------
1475
1476 // accessors -------------------------------------------------------
1477
1478 enum : int {
1479 kDevStatsFieldNumber = 1,
1480 };
1481 // repeated .tensorflow.DeviceStepStats dev_stats = 1;
1482 int dev_stats_size() const;
1483 private:
1484 int _internal_dev_stats_size() const;
1485 public:
1486 void clear_dev_stats();
1487 ::tensorflow::DeviceStepStats* mutable_dev_stats(int index);
1488 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::DeviceStepStats >*
1489 mutable_dev_stats();
1490 private:
1491 const ::tensorflow::DeviceStepStats& _internal_dev_stats(int index) const;
1492 ::tensorflow::DeviceStepStats* _internal_add_dev_stats();
1493 public:
1494 const ::tensorflow::DeviceStepStats& dev_stats(int index) const;
1495 ::tensorflow::DeviceStepStats* add_dev_stats();
1496 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::DeviceStepStats >&
1497 dev_stats() const;
1498
1499 // @@protoc_insertion_point(class_scope:tensorflow.StepStats)
1500 private:
1501 class _Internal;
1502
1503 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1504 typedef void InternalArenaConstructable_;
1505 typedef void DestructorSkippable_;
1506 struct Impl_ {
1507 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::DeviceStepStats > dev_stats_;
1508 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1509 };
1510 union { Impl_ _impl_; };
1511 friend struct ::TableStruct_tensorflow_2fcore_2fframework_2fstep_5fstats_2eproto;
1512 };
1513 // ===================================================================
1514
1515
1516 // ===================================================================
1517
1518 #ifdef __GNUC__
1519 #pragma GCC diagnostic push
1520 #pragma GCC diagnostic ignored "-Wstrict-aliasing"
1521 #endif // __GNUC__
1522 // AllocationRecord
1523
1524 // int64 alloc_micros = 1;
clear_alloc_micros()1525 inline void AllocationRecord::clear_alloc_micros() {
1526 _impl_.alloc_micros_ = ::int64_t{0};
1527 }
_internal_alloc_micros()1528 inline ::int64_t AllocationRecord::_internal_alloc_micros() const {
1529 return _impl_.alloc_micros_;
1530 }
alloc_micros()1531 inline ::int64_t AllocationRecord::alloc_micros() const {
1532 // @@protoc_insertion_point(field_get:tensorflow.AllocationRecord.alloc_micros)
1533 return _internal_alloc_micros();
1534 }
_internal_set_alloc_micros(::int64_t value)1535 inline void AllocationRecord::_internal_set_alloc_micros(::int64_t value) {
1536
1537 _impl_.alloc_micros_ = value;
1538 }
set_alloc_micros(::int64_t value)1539 inline void AllocationRecord::set_alloc_micros(::int64_t value) {
1540 _internal_set_alloc_micros(value);
1541 // @@protoc_insertion_point(field_set:tensorflow.AllocationRecord.alloc_micros)
1542 }
1543
1544 // int64 alloc_bytes = 2;
clear_alloc_bytes()1545 inline void AllocationRecord::clear_alloc_bytes() {
1546 _impl_.alloc_bytes_ = ::int64_t{0};
1547 }
_internal_alloc_bytes()1548 inline ::int64_t AllocationRecord::_internal_alloc_bytes() const {
1549 return _impl_.alloc_bytes_;
1550 }
alloc_bytes()1551 inline ::int64_t AllocationRecord::alloc_bytes() const {
1552 // @@protoc_insertion_point(field_get:tensorflow.AllocationRecord.alloc_bytes)
1553 return _internal_alloc_bytes();
1554 }
_internal_set_alloc_bytes(::int64_t value)1555 inline void AllocationRecord::_internal_set_alloc_bytes(::int64_t value) {
1556
1557 _impl_.alloc_bytes_ = value;
1558 }
set_alloc_bytes(::int64_t value)1559 inline void AllocationRecord::set_alloc_bytes(::int64_t value) {
1560 _internal_set_alloc_bytes(value);
1561 // @@protoc_insertion_point(field_set:tensorflow.AllocationRecord.alloc_bytes)
1562 }
1563
1564 // -------------------------------------------------------------------
1565
1566 // AllocatorMemoryUsed
1567
1568 // string allocator_name = 1;
clear_allocator_name()1569 inline void AllocatorMemoryUsed::clear_allocator_name() {
1570 _impl_.allocator_name_.ClearToEmpty();
1571 }
allocator_name()1572 inline const std::string& AllocatorMemoryUsed::allocator_name() const {
1573 // @@protoc_insertion_point(field_get:tensorflow.AllocatorMemoryUsed.allocator_name)
1574 return _internal_allocator_name();
1575 }
1576 template <typename ArgT0, typename... ArgT>
1577 inline PROTOBUF_ALWAYS_INLINE
set_allocator_name(ArgT0 && arg0,ArgT...args)1578 void AllocatorMemoryUsed::set_allocator_name(ArgT0&& arg0, ArgT... args) {
1579
1580 _impl_.allocator_name_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
1581 // @@protoc_insertion_point(field_set:tensorflow.AllocatorMemoryUsed.allocator_name)
1582 }
mutable_allocator_name()1583 inline std::string* AllocatorMemoryUsed::mutable_allocator_name() {
1584 std::string* _s = _internal_mutable_allocator_name();
1585 // @@protoc_insertion_point(field_mutable:tensorflow.AllocatorMemoryUsed.allocator_name)
1586 return _s;
1587 }
_internal_allocator_name()1588 inline const std::string& AllocatorMemoryUsed::_internal_allocator_name() const {
1589 return _impl_.allocator_name_.Get();
1590 }
_internal_set_allocator_name(const std::string & value)1591 inline void AllocatorMemoryUsed::_internal_set_allocator_name(const std::string& value) {
1592
1593 _impl_.allocator_name_.Set(value, GetArenaForAllocation());
1594 }
_internal_mutable_allocator_name()1595 inline std::string* AllocatorMemoryUsed::_internal_mutable_allocator_name() {
1596
1597 return _impl_.allocator_name_.Mutable(GetArenaForAllocation());
1598 }
release_allocator_name()1599 inline std::string* AllocatorMemoryUsed::release_allocator_name() {
1600 // @@protoc_insertion_point(field_release:tensorflow.AllocatorMemoryUsed.allocator_name)
1601 return _impl_.allocator_name_.Release();
1602 }
set_allocated_allocator_name(std::string * allocator_name)1603 inline void AllocatorMemoryUsed::set_allocated_allocator_name(std::string* allocator_name) {
1604 _impl_.allocator_name_.SetAllocated(allocator_name, GetArenaForAllocation());
1605 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
1606 if (_impl_.allocator_name_.IsDefault()) {
1607 _impl_.allocator_name_.Set("", GetArenaForAllocation());
1608 }
1609 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
1610 // @@protoc_insertion_point(field_set_allocated:tensorflow.AllocatorMemoryUsed.allocator_name)
1611 }
1612
1613 // int64 total_bytes = 2;
clear_total_bytes()1614 inline void AllocatorMemoryUsed::clear_total_bytes() {
1615 _impl_.total_bytes_ = ::int64_t{0};
1616 }
_internal_total_bytes()1617 inline ::int64_t AllocatorMemoryUsed::_internal_total_bytes() const {
1618 return _impl_.total_bytes_;
1619 }
total_bytes()1620 inline ::int64_t AllocatorMemoryUsed::total_bytes() const {
1621 // @@protoc_insertion_point(field_get:tensorflow.AllocatorMemoryUsed.total_bytes)
1622 return _internal_total_bytes();
1623 }
_internal_set_total_bytes(::int64_t value)1624 inline void AllocatorMemoryUsed::_internal_set_total_bytes(::int64_t value) {
1625
1626 _impl_.total_bytes_ = value;
1627 }
set_total_bytes(::int64_t value)1628 inline void AllocatorMemoryUsed::set_total_bytes(::int64_t value) {
1629 _internal_set_total_bytes(value);
1630 // @@protoc_insertion_point(field_set:tensorflow.AllocatorMemoryUsed.total_bytes)
1631 }
1632
1633 // int64 peak_bytes = 3;
clear_peak_bytes()1634 inline void AllocatorMemoryUsed::clear_peak_bytes() {
1635 _impl_.peak_bytes_ = ::int64_t{0};
1636 }
_internal_peak_bytes()1637 inline ::int64_t AllocatorMemoryUsed::_internal_peak_bytes() const {
1638 return _impl_.peak_bytes_;
1639 }
peak_bytes()1640 inline ::int64_t AllocatorMemoryUsed::peak_bytes() const {
1641 // @@protoc_insertion_point(field_get:tensorflow.AllocatorMemoryUsed.peak_bytes)
1642 return _internal_peak_bytes();
1643 }
_internal_set_peak_bytes(::int64_t value)1644 inline void AllocatorMemoryUsed::_internal_set_peak_bytes(::int64_t value) {
1645
1646 _impl_.peak_bytes_ = value;
1647 }
set_peak_bytes(::int64_t value)1648 inline void AllocatorMemoryUsed::set_peak_bytes(::int64_t value) {
1649 _internal_set_peak_bytes(value);
1650 // @@protoc_insertion_point(field_set:tensorflow.AllocatorMemoryUsed.peak_bytes)
1651 }
1652
1653 // int64 live_bytes = 4;
clear_live_bytes()1654 inline void AllocatorMemoryUsed::clear_live_bytes() {
1655 _impl_.live_bytes_ = ::int64_t{0};
1656 }
_internal_live_bytes()1657 inline ::int64_t AllocatorMemoryUsed::_internal_live_bytes() const {
1658 return _impl_.live_bytes_;
1659 }
live_bytes()1660 inline ::int64_t AllocatorMemoryUsed::live_bytes() const {
1661 // @@protoc_insertion_point(field_get:tensorflow.AllocatorMemoryUsed.live_bytes)
1662 return _internal_live_bytes();
1663 }
_internal_set_live_bytes(::int64_t value)1664 inline void AllocatorMemoryUsed::_internal_set_live_bytes(::int64_t value) {
1665
1666 _impl_.live_bytes_ = value;
1667 }
set_live_bytes(::int64_t value)1668 inline void AllocatorMemoryUsed::set_live_bytes(::int64_t value) {
1669 _internal_set_live_bytes(value);
1670 // @@protoc_insertion_point(field_set:tensorflow.AllocatorMemoryUsed.live_bytes)
1671 }
1672
1673 // repeated .tensorflow.AllocationRecord allocation_records = 6;
_internal_allocation_records_size()1674 inline int AllocatorMemoryUsed::_internal_allocation_records_size() const {
1675 return _impl_.allocation_records_.size();
1676 }
allocation_records_size()1677 inline int AllocatorMemoryUsed::allocation_records_size() const {
1678 return _internal_allocation_records_size();
1679 }
clear_allocation_records()1680 inline void AllocatorMemoryUsed::clear_allocation_records() {
1681 _impl_.allocation_records_.Clear();
1682 }
mutable_allocation_records(int index)1683 inline ::tensorflow::AllocationRecord* AllocatorMemoryUsed::mutable_allocation_records(int index) {
1684 // @@protoc_insertion_point(field_mutable:tensorflow.AllocatorMemoryUsed.allocation_records)
1685 return _impl_.allocation_records_.Mutable(index);
1686 }
1687 inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::AllocationRecord >*
mutable_allocation_records()1688 AllocatorMemoryUsed::mutable_allocation_records() {
1689 // @@protoc_insertion_point(field_mutable_list:tensorflow.AllocatorMemoryUsed.allocation_records)
1690 return &_impl_.allocation_records_;
1691 }
_internal_allocation_records(int index)1692 inline const ::tensorflow::AllocationRecord& AllocatorMemoryUsed::_internal_allocation_records(int index) const {
1693 return _impl_.allocation_records_.Get(index);
1694 }
allocation_records(int index)1695 inline const ::tensorflow::AllocationRecord& AllocatorMemoryUsed::allocation_records(int index) const {
1696 // @@protoc_insertion_point(field_get:tensorflow.AllocatorMemoryUsed.allocation_records)
1697 return _internal_allocation_records(index);
1698 }
_internal_add_allocation_records()1699 inline ::tensorflow::AllocationRecord* AllocatorMemoryUsed::_internal_add_allocation_records() {
1700 return _impl_.allocation_records_.Add();
1701 }
add_allocation_records()1702 inline ::tensorflow::AllocationRecord* AllocatorMemoryUsed::add_allocation_records() {
1703 ::tensorflow::AllocationRecord* _add = _internal_add_allocation_records();
1704 // @@protoc_insertion_point(field_add:tensorflow.AllocatorMemoryUsed.allocation_records)
1705 return _add;
1706 }
1707 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::AllocationRecord >&
allocation_records()1708 AllocatorMemoryUsed::allocation_records() const {
1709 // @@protoc_insertion_point(field_list:tensorflow.AllocatorMemoryUsed.allocation_records)
1710 return _impl_.allocation_records_;
1711 }
1712
1713 // int64 allocator_bytes_in_use = 5;
clear_allocator_bytes_in_use()1714 inline void AllocatorMemoryUsed::clear_allocator_bytes_in_use() {
1715 _impl_.allocator_bytes_in_use_ = ::int64_t{0};
1716 }
_internal_allocator_bytes_in_use()1717 inline ::int64_t AllocatorMemoryUsed::_internal_allocator_bytes_in_use() const {
1718 return _impl_.allocator_bytes_in_use_;
1719 }
allocator_bytes_in_use()1720 inline ::int64_t AllocatorMemoryUsed::allocator_bytes_in_use() const {
1721 // @@protoc_insertion_point(field_get:tensorflow.AllocatorMemoryUsed.allocator_bytes_in_use)
1722 return _internal_allocator_bytes_in_use();
1723 }
_internal_set_allocator_bytes_in_use(::int64_t value)1724 inline void AllocatorMemoryUsed::_internal_set_allocator_bytes_in_use(::int64_t value) {
1725
1726 _impl_.allocator_bytes_in_use_ = value;
1727 }
set_allocator_bytes_in_use(::int64_t value)1728 inline void AllocatorMemoryUsed::set_allocator_bytes_in_use(::int64_t value) {
1729 _internal_set_allocator_bytes_in_use(value);
1730 // @@protoc_insertion_point(field_set:tensorflow.AllocatorMemoryUsed.allocator_bytes_in_use)
1731 }
1732
1733 // -------------------------------------------------------------------
1734
1735 // NodeOutput
1736
1737 // int32 slot = 1;
clear_slot()1738 inline void NodeOutput::clear_slot() {
1739 _impl_.slot_ = 0;
1740 }
_internal_slot()1741 inline ::int32_t NodeOutput::_internal_slot() const {
1742 return _impl_.slot_;
1743 }
slot()1744 inline ::int32_t NodeOutput::slot() const {
1745 // @@protoc_insertion_point(field_get:tensorflow.NodeOutput.slot)
1746 return _internal_slot();
1747 }
_internal_set_slot(::int32_t value)1748 inline void NodeOutput::_internal_set_slot(::int32_t value) {
1749
1750 _impl_.slot_ = value;
1751 }
set_slot(::int32_t value)1752 inline void NodeOutput::set_slot(::int32_t value) {
1753 _internal_set_slot(value);
1754 // @@protoc_insertion_point(field_set:tensorflow.NodeOutput.slot)
1755 }
1756
1757 // .tensorflow.TensorDescription tensor_description = 3;
_internal_has_tensor_description()1758 inline bool NodeOutput::_internal_has_tensor_description() const {
1759 return this != internal_default_instance() && _impl_.tensor_description_ != nullptr;
1760 }
has_tensor_description()1761 inline bool NodeOutput::has_tensor_description() const {
1762 return _internal_has_tensor_description();
1763 }
_internal_tensor_description()1764 inline const ::tensorflow::TensorDescription& NodeOutput::_internal_tensor_description() const {
1765 const ::tensorflow::TensorDescription* p = _impl_.tensor_description_;
1766 return p != nullptr ? *p : reinterpret_cast<const ::tensorflow::TensorDescription&>(
1767 ::tensorflow::_TensorDescription_default_instance_);
1768 }
tensor_description()1769 inline const ::tensorflow::TensorDescription& NodeOutput::tensor_description() const {
1770 // @@protoc_insertion_point(field_get:tensorflow.NodeOutput.tensor_description)
1771 return _internal_tensor_description();
1772 }
unsafe_arena_set_allocated_tensor_description(::tensorflow::TensorDescription * tensor_description)1773 inline void NodeOutput::unsafe_arena_set_allocated_tensor_description(
1774 ::tensorflow::TensorDescription* tensor_description) {
1775 if (GetArenaForAllocation() == nullptr) {
1776 delete reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.tensor_description_);
1777 }
1778 _impl_.tensor_description_ = tensor_description;
1779 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.NodeOutput.tensor_description)
1780 }
release_tensor_description()1781 inline ::tensorflow::TensorDescription* NodeOutput::release_tensor_description() {
1782
1783 ::tensorflow::TensorDescription* temp = _impl_.tensor_description_;
1784 _impl_.tensor_description_ = nullptr;
1785 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
1786 auto* old = reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(temp);
1787 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
1788 if (GetArenaForAllocation() == nullptr) { delete old; }
1789 #else // PROTOBUF_FORCE_COPY_IN_RELEASE
1790 if (GetArenaForAllocation() != nullptr) {
1791 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
1792 }
1793 #endif // !PROTOBUF_FORCE_COPY_IN_RELEASE
1794 return temp;
1795 }
unsafe_arena_release_tensor_description()1796 inline ::tensorflow::TensorDescription* NodeOutput::unsafe_arena_release_tensor_description() {
1797 // @@protoc_insertion_point(field_release:tensorflow.NodeOutput.tensor_description)
1798
1799 ::tensorflow::TensorDescription* temp = _impl_.tensor_description_;
1800 _impl_.tensor_description_ = nullptr;
1801 return temp;
1802 }
_internal_mutable_tensor_description()1803 inline ::tensorflow::TensorDescription* NodeOutput::_internal_mutable_tensor_description() {
1804
1805 if (_impl_.tensor_description_ == nullptr) {
1806 auto* p = CreateMaybeMessage<::tensorflow::TensorDescription>(GetArenaForAllocation());
1807 _impl_.tensor_description_ = p;
1808 }
1809 return _impl_.tensor_description_;
1810 }
mutable_tensor_description()1811 inline ::tensorflow::TensorDescription* NodeOutput::mutable_tensor_description() {
1812 ::tensorflow::TensorDescription* _msg = _internal_mutable_tensor_description();
1813 // @@protoc_insertion_point(field_mutable:tensorflow.NodeOutput.tensor_description)
1814 return _msg;
1815 }
set_allocated_tensor_description(::tensorflow::TensorDescription * tensor_description)1816 inline void NodeOutput::set_allocated_tensor_description(::tensorflow::TensorDescription* tensor_description) {
1817 ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
1818 if (message_arena == nullptr) {
1819 delete reinterpret_cast< ::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.tensor_description_);
1820 }
1821 if (tensor_description) {
1822 ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
1823 ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(
1824 reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(tensor_description));
1825 if (message_arena != submessage_arena) {
1826 tensor_description = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
1827 message_arena, tensor_description, submessage_arena);
1828 }
1829
1830 } else {
1831
1832 }
1833 _impl_.tensor_description_ = tensor_description;
1834 // @@protoc_insertion_point(field_set_allocated:tensorflow.NodeOutput.tensor_description)
1835 }
1836
1837 // -------------------------------------------------------------------
1838
1839 // MemoryStats
1840
1841 // int64 temp_memory_size = 1;
clear_temp_memory_size()1842 inline void MemoryStats::clear_temp_memory_size() {
1843 _impl_.temp_memory_size_ = ::int64_t{0};
1844 }
_internal_temp_memory_size()1845 inline ::int64_t MemoryStats::_internal_temp_memory_size() const {
1846 return _impl_.temp_memory_size_;
1847 }
temp_memory_size()1848 inline ::int64_t MemoryStats::temp_memory_size() const {
1849 // @@protoc_insertion_point(field_get:tensorflow.MemoryStats.temp_memory_size)
1850 return _internal_temp_memory_size();
1851 }
_internal_set_temp_memory_size(::int64_t value)1852 inline void MemoryStats::_internal_set_temp_memory_size(::int64_t value) {
1853
1854 _impl_.temp_memory_size_ = value;
1855 }
set_temp_memory_size(::int64_t value)1856 inline void MemoryStats::set_temp_memory_size(::int64_t value) {
1857 _internal_set_temp_memory_size(value);
1858 // @@protoc_insertion_point(field_set:tensorflow.MemoryStats.temp_memory_size)
1859 }
1860
1861 // int64 persistent_memory_size = 3;
clear_persistent_memory_size()1862 inline void MemoryStats::clear_persistent_memory_size() {
1863 _impl_.persistent_memory_size_ = ::int64_t{0};
1864 }
_internal_persistent_memory_size()1865 inline ::int64_t MemoryStats::_internal_persistent_memory_size() const {
1866 return _impl_.persistent_memory_size_;
1867 }
persistent_memory_size()1868 inline ::int64_t MemoryStats::persistent_memory_size() const {
1869 // @@protoc_insertion_point(field_get:tensorflow.MemoryStats.persistent_memory_size)
1870 return _internal_persistent_memory_size();
1871 }
_internal_set_persistent_memory_size(::int64_t value)1872 inline void MemoryStats::_internal_set_persistent_memory_size(::int64_t value) {
1873
1874 _impl_.persistent_memory_size_ = value;
1875 }
set_persistent_memory_size(::int64_t value)1876 inline void MemoryStats::set_persistent_memory_size(::int64_t value) {
1877 _internal_set_persistent_memory_size(value);
1878 // @@protoc_insertion_point(field_set:tensorflow.MemoryStats.persistent_memory_size)
1879 }
1880
1881 // repeated int64 persistent_tensor_alloc_ids = 5;
_internal_persistent_tensor_alloc_ids_size()1882 inline int MemoryStats::_internal_persistent_tensor_alloc_ids_size() const {
1883 return _impl_.persistent_tensor_alloc_ids_.size();
1884 }
persistent_tensor_alloc_ids_size()1885 inline int MemoryStats::persistent_tensor_alloc_ids_size() const {
1886 return _internal_persistent_tensor_alloc_ids_size();
1887 }
clear_persistent_tensor_alloc_ids()1888 inline void MemoryStats::clear_persistent_tensor_alloc_ids() {
1889 _impl_.persistent_tensor_alloc_ids_.Clear();
1890 }
_internal_persistent_tensor_alloc_ids(int index)1891 inline ::int64_t MemoryStats::_internal_persistent_tensor_alloc_ids(int index) const {
1892 return _impl_.persistent_tensor_alloc_ids_.Get(index);
1893 }
persistent_tensor_alloc_ids(int index)1894 inline ::int64_t MemoryStats::persistent_tensor_alloc_ids(int index) const {
1895 // @@protoc_insertion_point(field_get:tensorflow.MemoryStats.persistent_tensor_alloc_ids)
1896 return _internal_persistent_tensor_alloc_ids(index);
1897 }
set_persistent_tensor_alloc_ids(int index,::int64_t value)1898 inline void MemoryStats::set_persistent_tensor_alloc_ids(int index, ::int64_t value) {
1899 _impl_.persistent_tensor_alloc_ids_.Set(index, value);
1900 // @@protoc_insertion_point(field_set:tensorflow.MemoryStats.persistent_tensor_alloc_ids)
1901 }
_internal_add_persistent_tensor_alloc_ids(::int64_t value)1902 inline void MemoryStats::_internal_add_persistent_tensor_alloc_ids(::int64_t value) {
1903 _impl_.persistent_tensor_alloc_ids_.Add(value);
1904 }
add_persistent_tensor_alloc_ids(::int64_t value)1905 inline void MemoryStats::add_persistent_tensor_alloc_ids(::int64_t value) {
1906 _internal_add_persistent_tensor_alloc_ids(value);
1907 // @@protoc_insertion_point(field_add:tensorflow.MemoryStats.persistent_tensor_alloc_ids)
1908 }
1909 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >&
_internal_persistent_tensor_alloc_ids()1910 MemoryStats::_internal_persistent_tensor_alloc_ids() const {
1911 return _impl_.persistent_tensor_alloc_ids_;
1912 }
1913 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >&
persistent_tensor_alloc_ids()1914 MemoryStats::persistent_tensor_alloc_ids() const {
1915 // @@protoc_insertion_point(field_list:tensorflow.MemoryStats.persistent_tensor_alloc_ids)
1916 return _internal_persistent_tensor_alloc_ids();
1917 }
1918 inline ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >*
_internal_mutable_persistent_tensor_alloc_ids()1919 MemoryStats::_internal_mutable_persistent_tensor_alloc_ids() {
1920 return &_impl_.persistent_tensor_alloc_ids_;
1921 }
1922 inline ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >*
mutable_persistent_tensor_alloc_ids()1923 MemoryStats::mutable_persistent_tensor_alloc_ids() {
1924 // @@protoc_insertion_point(field_mutable_list:tensorflow.MemoryStats.persistent_tensor_alloc_ids)
1925 return _internal_mutable_persistent_tensor_alloc_ids();
1926 }
1927
1928 // int64 device_temp_memory_size = 2 [deprecated = true];
clear_device_temp_memory_size()1929 inline void MemoryStats::clear_device_temp_memory_size() {
1930 _impl_.device_temp_memory_size_ = ::int64_t{0};
1931 }
_internal_device_temp_memory_size()1932 inline ::int64_t MemoryStats::_internal_device_temp_memory_size() const {
1933 return _impl_.device_temp_memory_size_;
1934 }
device_temp_memory_size()1935 inline ::int64_t MemoryStats::device_temp_memory_size() const {
1936 // @@protoc_insertion_point(field_get:tensorflow.MemoryStats.device_temp_memory_size)
1937 return _internal_device_temp_memory_size();
1938 }
_internal_set_device_temp_memory_size(::int64_t value)1939 inline void MemoryStats::_internal_set_device_temp_memory_size(::int64_t value) {
1940
1941 _impl_.device_temp_memory_size_ = value;
1942 }
set_device_temp_memory_size(::int64_t value)1943 inline void MemoryStats::set_device_temp_memory_size(::int64_t value) {
1944 _internal_set_device_temp_memory_size(value);
1945 // @@protoc_insertion_point(field_set:tensorflow.MemoryStats.device_temp_memory_size)
1946 }
1947
1948 // int64 device_persistent_memory_size = 4 [deprecated = true];
clear_device_persistent_memory_size()1949 inline void MemoryStats::clear_device_persistent_memory_size() {
1950 _impl_.device_persistent_memory_size_ = ::int64_t{0};
1951 }
_internal_device_persistent_memory_size()1952 inline ::int64_t MemoryStats::_internal_device_persistent_memory_size() const {
1953 return _impl_.device_persistent_memory_size_;
1954 }
device_persistent_memory_size()1955 inline ::int64_t MemoryStats::device_persistent_memory_size() const {
1956 // @@protoc_insertion_point(field_get:tensorflow.MemoryStats.device_persistent_memory_size)
1957 return _internal_device_persistent_memory_size();
1958 }
_internal_set_device_persistent_memory_size(::int64_t value)1959 inline void MemoryStats::_internal_set_device_persistent_memory_size(::int64_t value) {
1960
1961 _impl_.device_persistent_memory_size_ = value;
1962 }
set_device_persistent_memory_size(::int64_t value)1963 inline void MemoryStats::set_device_persistent_memory_size(::int64_t value) {
1964 _internal_set_device_persistent_memory_size(value);
1965 // @@protoc_insertion_point(field_set:tensorflow.MemoryStats.device_persistent_memory_size)
1966 }
1967
1968 // repeated int64 device_persistent_tensor_alloc_ids = 6 [deprecated = true];
_internal_device_persistent_tensor_alloc_ids_size()1969 inline int MemoryStats::_internal_device_persistent_tensor_alloc_ids_size() const {
1970 return _impl_.device_persistent_tensor_alloc_ids_.size();
1971 }
device_persistent_tensor_alloc_ids_size()1972 inline int MemoryStats::device_persistent_tensor_alloc_ids_size() const {
1973 return _internal_device_persistent_tensor_alloc_ids_size();
1974 }
clear_device_persistent_tensor_alloc_ids()1975 inline void MemoryStats::clear_device_persistent_tensor_alloc_ids() {
1976 _impl_.device_persistent_tensor_alloc_ids_.Clear();
1977 }
_internal_device_persistent_tensor_alloc_ids(int index)1978 inline ::int64_t MemoryStats::_internal_device_persistent_tensor_alloc_ids(int index) const {
1979 return _impl_.device_persistent_tensor_alloc_ids_.Get(index);
1980 }
device_persistent_tensor_alloc_ids(int index)1981 inline ::int64_t MemoryStats::device_persistent_tensor_alloc_ids(int index) const {
1982 // @@protoc_insertion_point(field_get:tensorflow.MemoryStats.device_persistent_tensor_alloc_ids)
1983 return _internal_device_persistent_tensor_alloc_ids(index);
1984 }
set_device_persistent_tensor_alloc_ids(int index,::int64_t value)1985 inline void MemoryStats::set_device_persistent_tensor_alloc_ids(int index, ::int64_t value) {
1986 _impl_.device_persistent_tensor_alloc_ids_.Set(index, value);
1987 // @@protoc_insertion_point(field_set:tensorflow.MemoryStats.device_persistent_tensor_alloc_ids)
1988 }
_internal_add_device_persistent_tensor_alloc_ids(::int64_t value)1989 inline void MemoryStats::_internal_add_device_persistent_tensor_alloc_ids(::int64_t value) {
1990 _impl_.device_persistent_tensor_alloc_ids_.Add(value);
1991 }
add_device_persistent_tensor_alloc_ids(::int64_t value)1992 inline void MemoryStats::add_device_persistent_tensor_alloc_ids(::int64_t value) {
1993 _internal_add_device_persistent_tensor_alloc_ids(value);
1994 // @@protoc_insertion_point(field_add:tensorflow.MemoryStats.device_persistent_tensor_alloc_ids)
1995 }
1996 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >&
_internal_device_persistent_tensor_alloc_ids()1997 MemoryStats::_internal_device_persistent_tensor_alloc_ids() const {
1998 return _impl_.device_persistent_tensor_alloc_ids_;
1999 }
2000 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >&
device_persistent_tensor_alloc_ids()2001 MemoryStats::device_persistent_tensor_alloc_ids() const {
2002 // @@protoc_insertion_point(field_list:tensorflow.MemoryStats.device_persistent_tensor_alloc_ids)
2003 return _internal_device_persistent_tensor_alloc_ids();
2004 }
2005 inline ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >*
_internal_mutable_device_persistent_tensor_alloc_ids()2006 MemoryStats::_internal_mutable_device_persistent_tensor_alloc_ids() {
2007 return &_impl_.device_persistent_tensor_alloc_ids_;
2008 }
2009 inline ::PROTOBUF_NAMESPACE_ID::RepeatedField< ::int64_t >*
mutable_device_persistent_tensor_alloc_ids()2010 MemoryStats::mutable_device_persistent_tensor_alloc_ids() {
2011 // @@protoc_insertion_point(field_mutable_list:tensorflow.MemoryStats.device_persistent_tensor_alloc_ids)
2012 return _internal_mutable_device_persistent_tensor_alloc_ids();
2013 }
2014
2015 // -------------------------------------------------------------------
2016
2017 // NodeExecStats
2018
2019 // string node_name = 1;
clear_node_name()2020 inline void NodeExecStats::clear_node_name() {
2021 _impl_.node_name_.ClearToEmpty();
2022 }
node_name()2023 inline const std::string& NodeExecStats::node_name() const {
2024 // @@protoc_insertion_point(field_get:tensorflow.NodeExecStats.node_name)
2025 return _internal_node_name();
2026 }
2027 template <typename ArgT0, typename... ArgT>
2028 inline PROTOBUF_ALWAYS_INLINE
set_node_name(ArgT0 && arg0,ArgT...args)2029 void NodeExecStats::set_node_name(ArgT0&& arg0, ArgT... args) {
2030
2031 _impl_.node_name_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
2032 // @@protoc_insertion_point(field_set:tensorflow.NodeExecStats.node_name)
2033 }
mutable_node_name()2034 inline std::string* NodeExecStats::mutable_node_name() {
2035 std::string* _s = _internal_mutable_node_name();
2036 // @@protoc_insertion_point(field_mutable:tensorflow.NodeExecStats.node_name)
2037 return _s;
2038 }
_internal_node_name()2039 inline const std::string& NodeExecStats::_internal_node_name() const {
2040 return _impl_.node_name_.Get();
2041 }
_internal_set_node_name(const std::string & value)2042 inline void NodeExecStats::_internal_set_node_name(const std::string& value) {
2043
2044 _impl_.node_name_.Set(value, GetArenaForAllocation());
2045 }
_internal_mutable_node_name()2046 inline std::string* NodeExecStats::_internal_mutable_node_name() {
2047
2048 return _impl_.node_name_.Mutable(GetArenaForAllocation());
2049 }
release_node_name()2050 inline std::string* NodeExecStats::release_node_name() {
2051 // @@protoc_insertion_point(field_release:tensorflow.NodeExecStats.node_name)
2052 return _impl_.node_name_.Release();
2053 }
set_allocated_node_name(std::string * node_name)2054 inline void NodeExecStats::set_allocated_node_name(std::string* node_name) {
2055 _impl_.node_name_.SetAllocated(node_name, GetArenaForAllocation());
2056 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
2057 if (_impl_.node_name_.IsDefault()) {
2058 _impl_.node_name_.Set("", GetArenaForAllocation());
2059 }
2060 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
2061 // @@protoc_insertion_point(field_set_allocated:tensorflow.NodeExecStats.node_name)
2062 }
2063
2064 // int64 all_start_micros = 2;
clear_all_start_micros()2065 inline void NodeExecStats::clear_all_start_micros() {
2066 _impl_.all_start_micros_ = ::int64_t{0};
2067 }
_internal_all_start_micros()2068 inline ::int64_t NodeExecStats::_internal_all_start_micros() const {
2069 return _impl_.all_start_micros_;
2070 }
all_start_micros()2071 inline ::int64_t NodeExecStats::all_start_micros() const {
2072 // @@protoc_insertion_point(field_get:tensorflow.NodeExecStats.all_start_micros)
2073 return _internal_all_start_micros();
2074 }
_internal_set_all_start_micros(::int64_t value)2075 inline void NodeExecStats::_internal_set_all_start_micros(::int64_t value) {
2076
2077 _impl_.all_start_micros_ = value;
2078 }
set_all_start_micros(::int64_t value)2079 inline void NodeExecStats::set_all_start_micros(::int64_t value) {
2080 _internal_set_all_start_micros(value);
2081 // @@protoc_insertion_point(field_set:tensorflow.NodeExecStats.all_start_micros)
2082 }
2083
2084 // int64 op_start_rel_micros = 3;
clear_op_start_rel_micros()2085 inline void NodeExecStats::clear_op_start_rel_micros() {
2086 _impl_.op_start_rel_micros_ = ::int64_t{0};
2087 }
_internal_op_start_rel_micros()2088 inline ::int64_t NodeExecStats::_internal_op_start_rel_micros() const {
2089 return _impl_.op_start_rel_micros_;
2090 }
op_start_rel_micros()2091 inline ::int64_t NodeExecStats::op_start_rel_micros() const {
2092 // @@protoc_insertion_point(field_get:tensorflow.NodeExecStats.op_start_rel_micros)
2093 return _internal_op_start_rel_micros();
2094 }
_internal_set_op_start_rel_micros(::int64_t value)2095 inline void NodeExecStats::_internal_set_op_start_rel_micros(::int64_t value) {
2096
2097 _impl_.op_start_rel_micros_ = value;
2098 }
set_op_start_rel_micros(::int64_t value)2099 inline void NodeExecStats::set_op_start_rel_micros(::int64_t value) {
2100 _internal_set_op_start_rel_micros(value);
2101 // @@protoc_insertion_point(field_set:tensorflow.NodeExecStats.op_start_rel_micros)
2102 }
2103
2104 // int64 op_end_rel_micros = 4;
clear_op_end_rel_micros()2105 inline void NodeExecStats::clear_op_end_rel_micros() {
2106 _impl_.op_end_rel_micros_ = ::int64_t{0};
2107 }
_internal_op_end_rel_micros()2108 inline ::int64_t NodeExecStats::_internal_op_end_rel_micros() const {
2109 return _impl_.op_end_rel_micros_;
2110 }
op_end_rel_micros()2111 inline ::int64_t NodeExecStats::op_end_rel_micros() const {
2112 // @@protoc_insertion_point(field_get:tensorflow.NodeExecStats.op_end_rel_micros)
2113 return _internal_op_end_rel_micros();
2114 }
_internal_set_op_end_rel_micros(::int64_t value)2115 inline void NodeExecStats::_internal_set_op_end_rel_micros(::int64_t value) {
2116
2117 _impl_.op_end_rel_micros_ = value;
2118 }
set_op_end_rel_micros(::int64_t value)2119 inline void NodeExecStats::set_op_end_rel_micros(::int64_t value) {
2120 _internal_set_op_end_rel_micros(value);
2121 // @@protoc_insertion_point(field_set:tensorflow.NodeExecStats.op_end_rel_micros)
2122 }
2123
2124 // int64 all_end_rel_micros = 5;
clear_all_end_rel_micros()2125 inline void NodeExecStats::clear_all_end_rel_micros() {
2126 _impl_.all_end_rel_micros_ = ::int64_t{0};
2127 }
_internal_all_end_rel_micros()2128 inline ::int64_t NodeExecStats::_internal_all_end_rel_micros() const {
2129 return _impl_.all_end_rel_micros_;
2130 }
all_end_rel_micros()2131 inline ::int64_t NodeExecStats::all_end_rel_micros() const {
2132 // @@protoc_insertion_point(field_get:tensorflow.NodeExecStats.all_end_rel_micros)
2133 return _internal_all_end_rel_micros();
2134 }
_internal_set_all_end_rel_micros(::int64_t value)2135 inline void NodeExecStats::_internal_set_all_end_rel_micros(::int64_t value) {
2136
2137 _impl_.all_end_rel_micros_ = value;
2138 }
set_all_end_rel_micros(::int64_t value)2139 inline void NodeExecStats::set_all_end_rel_micros(::int64_t value) {
2140 _internal_set_all_end_rel_micros(value);
2141 // @@protoc_insertion_point(field_set:tensorflow.NodeExecStats.all_end_rel_micros)
2142 }
2143
2144 // repeated .tensorflow.AllocatorMemoryUsed memory = 6;
_internal_memory_size()2145 inline int NodeExecStats::_internal_memory_size() const {
2146 return _impl_.memory_.size();
2147 }
memory_size()2148 inline int NodeExecStats::memory_size() const {
2149 return _internal_memory_size();
2150 }
clear_memory()2151 inline void NodeExecStats::clear_memory() {
2152 _impl_.memory_.Clear();
2153 }
mutable_memory(int index)2154 inline ::tensorflow::AllocatorMemoryUsed* NodeExecStats::mutable_memory(int index) {
2155 // @@protoc_insertion_point(field_mutable:tensorflow.NodeExecStats.memory)
2156 return _impl_.memory_.Mutable(index);
2157 }
2158 inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::AllocatorMemoryUsed >*
mutable_memory()2159 NodeExecStats::mutable_memory() {
2160 // @@protoc_insertion_point(field_mutable_list:tensorflow.NodeExecStats.memory)
2161 return &_impl_.memory_;
2162 }
_internal_memory(int index)2163 inline const ::tensorflow::AllocatorMemoryUsed& NodeExecStats::_internal_memory(int index) const {
2164 return _impl_.memory_.Get(index);
2165 }
memory(int index)2166 inline const ::tensorflow::AllocatorMemoryUsed& NodeExecStats::memory(int index) const {
2167 // @@protoc_insertion_point(field_get:tensorflow.NodeExecStats.memory)
2168 return _internal_memory(index);
2169 }
_internal_add_memory()2170 inline ::tensorflow::AllocatorMemoryUsed* NodeExecStats::_internal_add_memory() {
2171 return _impl_.memory_.Add();
2172 }
add_memory()2173 inline ::tensorflow::AllocatorMemoryUsed* NodeExecStats::add_memory() {
2174 ::tensorflow::AllocatorMemoryUsed* _add = _internal_add_memory();
2175 // @@protoc_insertion_point(field_add:tensorflow.NodeExecStats.memory)
2176 return _add;
2177 }
2178 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::AllocatorMemoryUsed >&
memory()2179 NodeExecStats::memory() const {
2180 // @@protoc_insertion_point(field_list:tensorflow.NodeExecStats.memory)
2181 return _impl_.memory_;
2182 }
2183
2184 // repeated .tensorflow.NodeOutput output = 7;
_internal_output_size()2185 inline int NodeExecStats::_internal_output_size() const {
2186 return _impl_.output_.size();
2187 }
output_size()2188 inline int NodeExecStats::output_size() const {
2189 return _internal_output_size();
2190 }
clear_output()2191 inline void NodeExecStats::clear_output() {
2192 _impl_.output_.Clear();
2193 }
mutable_output(int index)2194 inline ::tensorflow::NodeOutput* NodeExecStats::mutable_output(int index) {
2195 // @@protoc_insertion_point(field_mutable:tensorflow.NodeExecStats.output)
2196 return _impl_.output_.Mutable(index);
2197 }
2198 inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::NodeOutput >*
mutable_output()2199 NodeExecStats::mutable_output() {
2200 // @@protoc_insertion_point(field_mutable_list:tensorflow.NodeExecStats.output)
2201 return &_impl_.output_;
2202 }
_internal_output(int index)2203 inline const ::tensorflow::NodeOutput& NodeExecStats::_internal_output(int index) const {
2204 return _impl_.output_.Get(index);
2205 }
output(int index)2206 inline const ::tensorflow::NodeOutput& NodeExecStats::output(int index) const {
2207 // @@protoc_insertion_point(field_get:tensorflow.NodeExecStats.output)
2208 return _internal_output(index);
2209 }
_internal_add_output()2210 inline ::tensorflow::NodeOutput* NodeExecStats::_internal_add_output() {
2211 return _impl_.output_.Add();
2212 }
add_output()2213 inline ::tensorflow::NodeOutput* NodeExecStats::add_output() {
2214 ::tensorflow::NodeOutput* _add = _internal_add_output();
2215 // @@protoc_insertion_point(field_add:tensorflow.NodeExecStats.output)
2216 return _add;
2217 }
2218 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::NodeOutput >&
output()2219 NodeExecStats::output() const {
2220 // @@protoc_insertion_point(field_list:tensorflow.NodeExecStats.output)
2221 return _impl_.output_;
2222 }
2223
2224 // string timeline_label = 8;
clear_timeline_label()2225 inline void NodeExecStats::clear_timeline_label() {
2226 _impl_.timeline_label_.ClearToEmpty();
2227 }
timeline_label()2228 inline const std::string& NodeExecStats::timeline_label() const {
2229 // @@protoc_insertion_point(field_get:tensorflow.NodeExecStats.timeline_label)
2230 return _internal_timeline_label();
2231 }
2232 template <typename ArgT0, typename... ArgT>
2233 inline PROTOBUF_ALWAYS_INLINE
set_timeline_label(ArgT0 && arg0,ArgT...args)2234 void NodeExecStats::set_timeline_label(ArgT0&& arg0, ArgT... args) {
2235
2236 _impl_.timeline_label_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
2237 // @@protoc_insertion_point(field_set:tensorflow.NodeExecStats.timeline_label)
2238 }
mutable_timeline_label()2239 inline std::string* NodeExecStats::mutable_timeline_label() {
2240 std::string* _s = _internal_mutable_timeline_label();
2241 // @@protoc_insertion_point(field_mutable:tensorflow.NodeExecStats.timeline_label)
2242 return _s;
2243 }
_internal_timeline_label()2244 inline const std::string& NodeExecStats::_internal_timeline_label() const {
2245 return _impl_.timeline_label_.Get();
2246 }
_internal_set_timeline_label(const std::string & value)2247 inline void NodeExecStats::_internal_set_timeline_label(const std::string& value) {
2248
2249 _impl_.timeline_label_.Set(value, GetArenaForAllocation());
2250 }
_internal_mutable_timeline_label()2251 inline std::string* NodeExecStats::_internal_mutable_timeline_label() {
2252
2253 return _impl_.timeline_label_.Mutable(GetArenaForAllocation());
2254 }
release_timeline_label()2255 inline std::string* NodeExecStats::release_timeline_label() {
2256 // @@protoc_insertion_point(field_release:tensorflow.NodeExecStats.timeline_label)
2257 return _impl_.timeline_label_.Release();
2258 }
set_allocated_timeline_label(std::string * timeline_label)2259 inline void NodeExecStats::set_allocated_timeline_label(std::string* timeline_label) {
2260 _impl_.timeline_label_.SetAllocated(timeline_label, GetArenaForAllocation());
2261 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
2262 if (_impl_.timeline_label_.IsDefault()) {
2263 _impl_.timeline_label_.Set("", GetArenaForAllocation());
2264 }
2265 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
2266 // @@protoc_insertion_point(field_set_allocated:tensorflow.NodeExecStats.timeline_label)
2267 }
2268
2269 // int64 scheduled_micros = 9;
clear_scheduled_micros()2270 inline void NodeExecStats::clear_scheduled_micros() {
2271 _impl_.scheduled_micros_ = ::int64_t{0};
2272 }
_internal_scheduled_micros()2273 inline ::int64_t NodeExecStats::_internal_scheduled_micros() const {
2274 return _impl_.scheduled_micros_;
2275 }
scheduled_micros()2276 inline ::int64_t NodeExecStats::scheduled_micros() const {
2277 // @@protoc_insertion_point(field_get:tensorflow.NodeExecStats.scheduled_micros)
2278 return _internal_scheduled_micros();
2279 }
_internal_set_scheduled_micros(::int64_t value)2280 inline void NodeExecStats::_internal_set_scheduled_micros(::int64_t value) {
2281
2282 _impl_.scheduled_micros_ = value;
2283 }
set_scheduled_micros(::int64_t value)2284 inline void NodeExecStats::set_scheduled_micros(::int64_t value) {
2285 _internal_set_scheduled_micros(value);
2286 // @@protoc_insertion_point(field_set:tensorflow.NodeExecStats.scheduled_micros)
2287 }
2288
2289 // uint32 thread_id = 10;
clear_thread_id()2290 inline void NodeExecStats::clear_thread_id() {
2291 _impl_.thread_id_ = 0u;
2292 }
_internal_thread_id()2293 inline ::uint32_t NodeExecStats::_internal_thread_id() const {
2294 return _impl_.thread_id_;
2295 }
thread_id()2296 inline ::uint32_t NodeExecStats::thread_id() const {
2297 // @@protoc_insertion_point(field_get:tensorflow.NodeExecStats.thread_id)
2298 return _internal_thread_id();
2299 }
_internal_set_thread_id(::uint32_t value)2300 inline void NodeExecStats::_internal_set_thread_id(::uint32_t value) {
2301
2302 _impl_.thread_id_ = value;
2303 }
set_thread_id(::uint32_t value)2304 inline void NodeExecStats::set_thread_id(::uint32_t value) {
2305 _internal_set_thread_id(value);
2306 // @@protoc_insertion_point(field_set:tensorflow.NodeExecStats.thread_id)
2307 }
2308
2309 // repeated .tensorflow.AllocationDescription referenced_tensor = 11;
_internal_referenced_tensor_size()2310 inline int NodeExecStats::_internal_referenced_tensor_size() const {
2311 return _impl_.referenced_tensor_.size();
2312 }
referenced_tensor_size()2313 inline int NodeExecStats::referenced_tensor_size() const {
2314 return _internal_referenced_tensor_size();
2315 }
mutable_referenced_tensor(int index)2316 inline ::tensorflow::AllocationDescription* NodeExecStats::mutable_referenced_tensor(int index) {
2317 // @@protoc_insertion_point(field_mutable:tensorflow.NodeExecStats.referenced_tensor)
2318 return _impl_.referenced_tensor_.Mutable(index);
2319 }
2320 inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::AllocationDescription >*
mutable_referenced_tensor()2321 NodeExecStats::mutable_referenced_tensor() {
2322 // @@protoc_insertion_point(field_mutable_list:tensorflow.NodeExecStats.referenced_tensor)
2323 return &_impl_.referenced_tensor_;
2324 }
_internal_referenced_tensor(int index)2325 inline const ::tensorflow::AllocationDescription& NodeExecStats::_internal_referenced_tensor(int index) const {
2326 return _impl_.referenced_tensor_.Get(index);
2327 }
referenced_tensor(int index)2328 inline const ::tensorflow::AllocationDescription& NodeExecStats::referenced_tensor(int index) const {
2329 // @@protoc_insertion_point(field_get:tensorflow.NodeExecStats.referenced_tensor)
2330 return _internal_referenced_tensor(index);
2331 }
_internal_add_referenced_tensor()2332 inline ::tensorflow::AllocationDescription* NodeExecStats::_internal_add_referenced_tensor() {
2333 return _impl_.referenced_tensor_.Add();
2334 }
add_referenced_tensor()2335 inline ::tensorflow::AllocationDescription* NodeExecStats::add_referenced_tensor() {
2336 ::tensorflow::AllocationDescription* _add = _internal_add_referenced_tensor();
2337 // @@protoc_insertion_point(field_add:tensorflow.NodeExecStats.referenced_tensor)
2338 return _add;
2339 }
2340 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::AllocationDescription >&
referenced_tensor()2341 NodeExecStats::referenced_tensor() const {
2342 // @@protoc_insertion_point(field_list:tensorflow.NodeExecStats.referenced_tensor)
2343 return _impl_.referenced_tensor_;
2344 }
2345
2346 // .tensorflow.MemoryStats memory_stats = 12;
_internal_has_memory_stats()2347 inline bool NodeExecStats::_internal_has_memory_stats() const {
2348 return this != internal_default_instance() && _impl_.memory_stats_ != nullptr;
2349 }
has_memory_stats()2350 inline bool NodeExecStats::has_memory_stats() const {
2351 return _internal_has_memory_stats();
2352 }
clear_memory_stats()2353 inline void NodeExecStats::clear_memory_stats() {
2354 if (GetArenaForAllocation() == nullptr && _impl_.memory_stats_ != nullptr) {
2355 delete _impl_.memory_stats_;
2356 }
2357 _impl_.memory_stats_ = nullptr;
2358 }
_internal_memory_stats()2359 inline const ::tensorflow::MemoryStats& NodeExecStats::_internal_memory_stats() const {
2360 const ::tensorflow::MemoryStats* p = _impl_.memory_stats_;
2361 return p != nullptr ? *p : reinterpret_cast<const ::tensorflow::MemoryStats&>(
2362 ::tensorflow::_MemoryStats_default_instance_);
2363 }
memory_stats()2364 inline const ::tensorflow::MemoryStats& NodeExecStats::memory_stats() const {
2365 // @@protoc_insertion_point(field_get:tensorflow.NodeExecStats.memory_stats)
2366 return _internal_memory_stats();
2367 }
unsafe_arena_set_allocated_memory_stats(::tensorflow::MemoryStats * memory_stats)2368 inline void NodeExecStats::unsafe_arena_set_allocated_memory_stats(
2369 ::tensorflow::MemoryStats* memory_stats) {
2370 if (GetArenaForAllocation() == nullptr) {
2371 delete reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.memory_stats_);
2372 }
2373 _impl_.memory_stats_ = memory_stats;
2374 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.NodeExecStats.memory_stats)
2375 }
release_memory_stats()2376 inline ::tensorflow::MemoryStats* NodeExecStats::release_memory_stats() {
2377
2378 ::tensorflow::MemoryStats* temp = _impl_.memory_stats_;
2379 _impl_.memory_stats_ = nullptr;
2380 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
2381 auto* old = reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(temp);
2382 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
2383 if (GetArenaForAllocation() == nullptr) { delete old; }
2384 #else // PROTOBUF_FORCE_COPY_IN_RELEASE
2385 if (GetArenaForAllocation() != nullptr) {
2386 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
2387 }
2388 #endif // !PROTOBUF_FORCE_COPY_IN_RELEASE
2389 return temp;
2390 }
unsafe_arena_release_memory_stats()2391 inline ::tensorflow::MemoryStats* NodeExecStats::unsafe_arena_release_memory_stats() {
2392 // @@protoc_insertion_point(field_release:tensorflow.NodeExecStats.memory_stats)
2393
2394 ::tensorflow::MemoryStats* temp = _impl_.memory_stats_;
2395 _impl_.memory_stats_ = nullptr;
2396 return temp;
2397 }
_internal_mutable_memory_stats()2398 inline ::tensorflow::MemoryStats* NodeExecStats::_internal_mutable_memory_stats() {
2399
2400 if (_impl_.memory_stats_ == nullptr) {
2401 auto* p = CreateMaybeMessage<::tensorflow::MemoryStats>(GetArenaForAllocation());
2402 _impl_.memory_stats_ = p;
2403 }
2404 return _impl_.memory_stats_;
2405 }
mutable_memory_stats()2406 inline ::tensorflow::MemoryStats* NodeExecStats::mutable_memory_stats() {
2407 ::tensorflow::MemoryStats* _msg = _internal_mutable_memory_stats();
2408 // @@protoc_insertion_point(field_mutable:tensorflow.NodeExecStats.memory_stats)
2409 return _msg;
2410 }
set_allocated_memory_stats(::tensorflow::MemoryStats * memory_stats)2411 inline void NodeExecStats::set_allocated_memory_stats(::tensorflow::MemoryStats* memory_stats) {
2412 ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
2413 if (message_arena == nullptr) {
2414 delete _impl_.memory_stats_;
2415 }
2416 if (memory_stats) {
2417 ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
2418 ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(memory_stats);
2419 if (message_arena != submessage_arena) {
2420 memory_stats = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
2421 message_arena, memory_stats, submessage_arena);
2422 }
2423
2424 } else {
2425
2426 }
2427 _impl_.memory_stats_ = memory_stats;
2428 // @@protoc_insertion_point(field_set_allocated:tensorflow.NodeExecStats.memory_stats)
2429 }
2430
2431 // int64 all_start_nanos = 13;
clear_all_start_nanos()2432 inline void NodeExecStats::clear_all_start_nanos() {
2433 _impl_.all_start_nanos_ = ::int64_t{0};
2434 }
_internal_all_start_nanos()2435 inline ::int64_t NodeExecStats::_internal_all_start_nanos() const {
2436 return _impl_.all_start_nanos_;
2437 }
all_start_nanos()2438 inline ::int64_t NodeExecStats::all_start_nanos() const {
2439 // @@protoc_insertion_point(field_get:tensorflow.NodeExecStats.all_start_nanos)
2440 return _internal_all_start_nanos();
2441 }
_internal_set_all_start_nanos(::int64_t value)2442 inline void NodeExecStats::_internal_set_all_start_nanos(::int64_t value) {
2443
2444 _impl_.all_start_nanos_ = value;
2445 }
set_all_start_nanos(::int64_t value)2446 inline void NodeExecStats::set_all_start_nanos(::int64_t value) {
2447 _internal_set_all_start_nanos(value);
2448 // @@protoc_insertion_point(field_set:tensorflow.NodeExecStats.all_start_nanos)
2449 }
2450
2451 // int64 op_start_rel_nanos = 14;
clear_op_start_rel_nanos()2452 inline void NodeExecStats::clear_op_start_rel_nanos() {
2453 _impl_.op_start_rel_nanos_ = ::int64_t{0};
2454 }
_internal_op_start_rel_nanos()2455 inline ::int64_t NodeExecStats::_internal_op_start_rel_nanos() const {
2456 return _impl_.op_start_rel_nanos_;
2457 }
op_start_rel_nanos()2458 inline ::int64_t NodeExecStats::op_start_rel_nanos() const {
2459 // @@protoc_insertion_point(field_get:tensorflow.NodeExecStats.op_start_rel_nanos)
2460 return _internal_op_start_rel_nanos();
2461 }
_internal_set_op_start_rel_nanos(::int64_t value)2462 inline void NodeExecStats::_internal_set_op_start_rel_nanos(::int64_t value) {
2463
2464 _impl_.op_start_rel_nanos_ = value;
2465 }
set_op_start_rel_nanos(::int64_t value)2466 inline void NodeExecStats::set_op_start_rel_nanos(::int64_t value) {
2467 _internal_set_op_start_rel_nanos(value);
2468 // @@protoc_insertion_point(field_set:tensorflow.NodeExecStats.op_start_rel_nanos)
2469 }
2470
2471 // int64 op_end_rel_nanos = 15;
clear_op_end_rel_nanos()2472 inline void NodeExecStats::clear_op_end_rel_nanos() {
2473 _impl_.op_end_rel_nanos_ = ::int64_t{0};
2474 }
_internal_op_end_rel_nanos()2475 inline ::int64_t NodeExecStats::_internal_op_end_rel_nanos() const {
2476 return _impl_.op_end_rel_nanos_;
2477 }
op_end_rel_nanos()2478 inline ::int64_t NodeExecStats::op_end_rel_nanos() const {
2479 // @@protoc_insertion_point(field_get:tensorflow.NodeExecStats.op_end_rel_nanos)
2480 return _internal_op_end_rel_nanos();
2481 }
_internal_set_op_end_rel_nanos(::int64_t value)2482 inline void NodeExecStats::_internal_set_op_end_rel_nanos(::int64_t value) {
2483
2484 _impl_.op_end_rel_nanos_ = value;
2485 }
set_op_end_rel_nanos(::int64_t value)2486 inline void NodeExecStats::set_op_end_rel_nanos(::int64_t value) {
2487 _internal_set_op_end_rel_nanos(value);
2488 // @@protoc_insertion_point(field_set:tensorflow.NodeExecStats.op_end_rel_nanos)
2489 }
2490
2491 // int64 all_end_rel_nanos = 16;
clear_all_end_rel_nanos()2492 inline void NodeExecStats::clear_all_end_rel_nanos() {
2493 _impl_.all_end_rel_nanos_ = ::int64_t{0};
2494 }
_internal_all_end_rel_nanos()2495 inline ::int64_t NodeExecStats::_internal_all_end_rel_nanos() const {
2496 return _impl_.all_end_rel_nanos_;
2497 }
all_end_rel_nanos()2498 inline ::int64_t NodeExecStats::all_end_rel_nanos() const {
2499 // @@protoc_insertion_point(field_get:tensorflow.NodeExecStats.all_end_rel_nanos)
2500 return _internal_all_end_rel_nanos();
2501 }
_internal_set_all_end_rel_nanos(::int64_t value)2502 inline void NodeExecStats::_internal_set_all_end_rel_nanos(::int64_t value) {
2503
2504 _impl_.all_end_rel_nanos_ = value;
2505 }
set_all_end_rel_nanos(::int64_t value)2506 inline void NodeExecStats::set_all_end_rel_nanos(::int64_t value) {
2507 _internal_set_all_end_rel_nanos(value);
2508 // @@protoc_insertion_point(field_set:tensorflow.NodeExecStats.all_end_rel_nanos)
2509 }
2510
2511 // int64 scheduled_nanos = 17;
clear_scheduled_nanos()2512 inline void NodeExecStats::clear_scheduled_nanos() {
2513 _impl_.scheduled_nanos_ = ::int64_t{0};
2514 }
_internal_scheduled_nanos()2515 inline ::int64_t NodeExecStats::_internal_scheduled_nanos() const {
2516 return _impl_.scheduled_nanos_;
2517 }
scheduled_nanos()2518 inline ::int64_t NodeExecStats::scheduled_nanos() const {
2519 // @@protoc_insertion_point(field_get:tensorflow.NodeExecStats.scheduled_nanos)
2520 return _internal_scheduled_nanos();
2521 }
_internal_set_scheduled_nanos(::int64_t value)2522 inline void NodeExecStats::_internal_set_scheduled_nanos(::int64_t value) {
2523
2524 _impl_.scheduled_nanos_ = value;
2525 }
set_scheduled_nanos(::int64_t value)2526 inline void NodeExecStats::set_scheduled_nanos(::int64_t value) {
2527 _internal_set_scheduled_nanos(value);
2528 // @@protoc_insertion_point(field_set:tensorflow.NodeExecStats.scheduled_nanos)
2529 }
2530
2531 // -------------------------------------------------------------------
2532
2533 // -------------------------------------------------------------------
2534
2535 // DeviceStepStats
2536
2537 // string device = 1;
clear_device()2538 inline void DeviceStepStats::clear_device() {
2539 _impl_.device_.ClearToEmpty();
2540 }
device()2541 inline const std::string& DeviceStepStats::device() const {
2542 // @@protoc_insertion_point(field_get:tensorflow.DeviceStepStats.device)
2543 return _internal_device();
2544 }
2545 template <typename ArgT0, typename... ArgT>
2546 inline PROTOBUF_ALWAYS_INLINE
set_device(ArgT0 && arg0,ArgT...args)2547 void DeviceStepStats::set_device(ArgT0&& arg0, ArgT... args) {
2548
2549 _impl_.device_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
2550 // @@protoc_insertion_point(field_set:tensorflow.DeviceStepStats.device)
2551 }
mutable_device()2552 inline std::string* DeviceStepStats::mutable_device() {
2553 std::string* _s = _internal_mutable_device();
2554 // @@protoc_insertion_point(field_mutable:tensorflow.DeviceStepStats.device)
2555 return _s;
2556 }
_internal_device()2557 inline const std::string& DeviceStepStats::_internal_device() const {
2558 return _impl_.device_.Get();
2559 }
_internal_set_device(const std::string & value)2560 inline void DeviceStepStats::_internal_set_device(const std::string& value) {
2561
2562 _impl_.device_.Set(value, GetArenaForAllocation());
2563 }
_internal_mutable_device()2564 inline std::string* DeviceStepStats::_internal_mutable_device() {
2565
2566 return _impl_.device_.Mutable(GetArenaForAllocation());
2567 }
release_device()2568 inline std::string* DeviceStepStats::release_device() {
2569 // @@protoc_insertion_point(field_release:tensorflow.DeviceStepStats.device)
2570 return _impl_.device_.Release();
2571 }
set_allocated_device(std::string * device)2572 inline void DeviceStepStats::set_allocated_device(std::string* device) {
2573 _impl_.device_.SetAllocated(device, GetArenaForAllocation());
2574 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
2575 if (_impl_.device_.IsDefault()) {
2576 _impl_.device_.Set("", GetArenaForAllocation());
2577 }
2578 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
2579 // @@protoc_insertion_point(field_set_allocated:tensorflow.DeviceStepStats.device)
2580 }
2581
2582 // repeated .tensorflow.NodeExecStats node_stats = 2;
_internal_node_stats_size()2583 inline int DeviceStepStats::_internal_node_stats_size() const {
2584 return _impl_.node_stats_.size();
2585 }
node_stats_size()2586 inline int DeviceStepStats::node_stats_size() const {
2587 return _internal_node_stats_size();
2588 }
clear_node_stats()2589 inline void DeviceStepStats::clear_node_stats() {
2590 _impl_.node_stats_.Clear();
2591 }
mutable_node_stats(int index)2592 inline ::tensorflow::NodeExecStats* DeviceStepStats::mutable_node_stats(int index) {
2593 // @@protoc_insertion_point(field_mutable:tensorflow.DeviceStepStats.node_stats)
2594 return _impl_.node_stats_.Mutable(index);
2595 }
2596 inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::NodeExecStats >*
mutable_node_stats()2597 DeviceStepStats::mutable_node_stats() {
2598 // @@protoc_insertion_point(field_mutable_list:tensorflow.DeviceStepStats.node_stats)
2599 return &_impl_.node_stats_;
2600 }
_internal_node_stats(int index)2601 inline const ::tensorflow::NodeExecStats& DeviceStepStats::_internal_node_stats(int index) const {
2602 return _impl_.node_stats_.Get(index);
2603 }
node_stats(int index)2604 inline const ::tensorflow::NodeExecStats& DeviceStepStats::node_stats(int index) const {
2605 // @@protoc_insertion_point(field_get:tensorflow.DeviceStepStats.node_stats)
2606 return _internal_node_stats(index);
2607 }
_internal_add_node_stats()2608 inline ::tensorflow::NodeExecStats* DeviceStepStats::_internal_add_node_stats() {
2609 return _impl_.node_stats_.Add();
2610 }
add_node_stats()2611 inline ::tensorflow::NodeExecStats* DeviceStepStats::add_node_stats() {
2612 ::tensorflow::NodeExecStats* _add = _internal_add_node_stats();
2613 // @@protoc_insertion_point(field_add:tensorflow.DeviceStepStats.node_stats)
2614 return _add;
2615 }
2616 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::NodeExecStats >&
node_stats()2617 DeviceStepStats::node_stats() const {
2618 // @@protoc_insertion_point(field_list:tensorflow.DeviceStepStats.node_stats)
2619 return _impl_.node_stats_;
2620 }
2621
2622 // map<uint32, string> thread_names = 3;
_internal_thread_names_size()2623 inline int DeviceStepStats::_internal_thread_names_size() const {
2624 return _impl_.thread_names_.size();
2625 }
thread_names_size()2626 inline int DeviceStepStats::thread_names_size() const {
2627 return _internal_thread_names_size();
2628 }
clear_thread_names()2629 inline void DeviceStepStats::clear_thread_names() {
2630 _impl_.thread_names_.Clear();
2631 }
2632 inline const ::PROTOBUF_NAMESPACE_ID::Map< ::uint32_t, std::string >&
_internal_thread_names()2633 DeviceStepStats::_internal_thread_names() const {
2634 return _impl_.thread_names_.GetMap();
2635 }
2636 inline const ::PROTOBUF_NAMESPACE_ID::Map< ::uint32_t, std::string >&
thread_names()2637 DeviceStepStats::thread_names() const {
2638 // @@protoc_insertion_point(field_map:tensorflow.DeviceStepStats.thread_names)
2639 return _internal_thread_names();
2640 }
2641 inline ::PROTOBUF_NAMESPACE_ID::Map< ::uint32_t, std::string >*
_internal_mutable_thread_names()2642 DeviceStepStats::_internal_mutable_thread_names() {
2643 return _impl_.thread_names_.MutableMap();
2644 }
2645 inline ::PROTOBUF_NAMESPACE_ID::Map< ::uint32_t, std::string >*
mutable_thread_names()2646 DeviceStepStats::mutable_thread_names() {
2647 // @@protoc_insertion_point(field_mutable_map:tensorflow.DeviceStepStats.thread_names)
2648 return _internal_mutable_thread_names();
2649 }
2650
2651 // -------------------------------------------------------------------
2652
2653 // StepStats
2654
2655 // repeated .tensorflow.DeviceStepStats dev_stats = 1;
_internal_dev_stats_size()2656 inline int StepStats::_internal_dev_stats_size() const {
2657 return _impl_.dev_stats_.size();
2658 }
dev_stats_size()2659 inline int StepStats::dev_stats_size() const {
2660 return _internal_dev_stats_size();
2661 }
clear_dev_stats()2662 inline void StepStats::clear_dev_stats() {
2663 _impl_.dev_stats_.Clear();
2664 }
mutable_dev_stats(int index)2665 inline ::tensorflow::DeviceStepStats* StepStats::mutable_dev_stats(int index) {
2666 // @@protoc_insertion_point(field_mutable:tensorflow.StepStats.dev_stats)
2667 return _impl_.dev_stats_.Mutable(index);
2668 }
2669 inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::DeviceStepStats >*
mutable_dev_stats()2670 StepStats::mutable_dev_stats() {
2671 // @@protoc_insertion_point(field_mutable_list:tensorflow.StepStats.dev_stats)
2672 return &_impl_.dev_stats_;
2673 }
_internal_dev_stats(int index)2674 inline const ::tensorflow::DeviceStepStats& StepStats::_internal_dev_stats(int index) const {
2675 return _impl_.dev_stats_.Get(index);
2676 }
dev_stats(int index)2677 inline const ::tensorflow::DeviceStepStats& StepStats::dev_stats(int index) const {
2678 // @@protoc_insertion_point(field_get:tensorflow.StepStats.dev_stats)
2679 return _internal_dev_stats(index);
2680 }
_internal_add_dev_stats()2681 inline ::tensorflow::DeviceStepStats* StepStats::_internal_add_dev_stats() {
2682 return _impl_.dev_stats_.Add();
2683 }
add_dev_stats()2684 inline ::tensorflow::DeviceStepStats* StepStats::add_dev_stats() {
2685 ::tensorflow::DeviceStepStats* _add = _internal_add_dev_stats();
2686 // @@protoc_insertion_point(field_add:tensorflow.StepStats.dev_stats)
2687 return _add;
2688 }
2689 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::DeviceStepStats >&
dev_stats()2690 StepStats::dev_stats() const {
2691 // @@protoc_insertion_point(field_list:tensorflow.StepStats.dev_stats)
2692 return _impl_.dev_stats_;
2693 }
2694
2695 #ifdef __GNUC__
2696 #pragma GCC diagnostic pop
2697 #endif // __GNUC__
2698 // -------------------------------------------------------------------
2699
2700 // -------------------------------------------------------------------
2701
2702 // -------------------------------------------------------------------
2703
2704 // -------------------------------------------------------------------
2705
2706 // -------------------------------------------------------------------
2707
2708 // -------------------------------------------------------------------
2709
2710 // -------------------------------------------------------------------
2711
2712
2713 // @@protoc_insertion_point(namespace_scope)
2714
2715 } // namespace tensorflow
2716
2717 // @@protoc_insertion_point(global_scope)
2718
2719 #include <google/protobuf/port_undef.inc>
2720 #endif // GOOGLE_PROTOBUF_INCLUDED_GOOGLE_PROTOBUF_INCLUDED_tensorflow_2fcore_2fframework_2fstep_5fstats_2eproto
2721