1 // Generated by the protocol buffer compiler. DO NOT EDIT!
2 // source: tensorflow/core/protobuf/bfc_memory_map.proto
3
4 #include "tensorflow/core/protobuf/bfc_memory_map.pb.h"
5
6 #include <algorithm>
7 #include <cstdint>
8
9 #include <google/protobuf/io/coded_stream.h>
10 #include <google/protobuf/extension_set.h>
11 #include <google/protobuf/wire_format_lite.h>
12 #include <google/protobuf/io/zero_copy_stream_impl_lite.h>
13 // @@protoc_insertion_point(includes)
14 #include <google/protobuf/port_def.inc>
15
16 PROTOBUF_PRAGMA_INIT_SEG
17
18 namespace _pb = ::PROTOBUF_NAMESPACE_ID;
19 namespace _pbi = _pb::internal;
20
21 namespace tensorflow {
MemAllocatorStats(::_pbi::ConstantInitialized)22 PROTOBUF_CONSTEXPR MemAllocatorStats::MemAllocatorStats(
23 ::_pbi::ConstantInitialized): _impl_{
24 /*decltype(_impl_.num_allocs_)*/::int64_t{0}
25 , /*decltype(_impl_.bytes_in_use_)*/::int64_t{0}
26 , /*decltype(_impl_.peak_bytes_in_use_)*/::int64_t{0}
27 , /*decltype(_impl_.largest_alloc_size_)*/::int64_t{0}
28 , /*decltype(_impl_.fragmentation_metric_)*/0
29 , /*decltype(_impl_._cached_size_)*/{}} {}
30 struct MemAllocatorStatsDefaultTypeInternal {
MemAllocatorStatsDefaultTypeInternaltensorflow::MemAllocatorStatsDefaultTypeInternal31 PROTOBUF_CONSTEXPR MemAllocatorStatsDefaultTypeInternal()
32 : _instance(::_pbi::ConstantInitialized{}) {}
~MemAllocatorStatsDefaultTypeInternaltensorflow::MemAllocatorStatsDefaultTypeInternal33 ~MemAllocatorStatsDefaultTypeInternal() {}
34 union { // NOLINT(misc-non-private-member-variables-in-classes)
35 MemAllocatorStats _instance;
36 };
37 };
38 PROTOBUF_ATTRIBUTE_NO_DESTROY PROTOBUF_CONSTINIT PROTOBUF_ATTRIBUTE_INIT_PRIORITY1 MemAllocatorStatsDefaultTypeInternal _MemAllocatorStats_default_instance_;
MemChunk(::_pbi::ConstantInitialized)39 PROTOBUF_CONSTEXPR MemChunk::MemChunk(
40 ::_pbi::ConstantInitialized): _impl_{
41 /*decltype(_impl_.op_name_)*/{&::_pbi::fixed_address_empty_string, ::_pbi::ConstantInitialized{}}
42 , /*decltype(_impl_.address_)*/::uint64_t{0u}
43 , /*decltype(_impl_.size_)*/::int64_t{0}
44 , /*decltype(_impl_.requested_size_)*/::int64_t{0}
45 , /*decltype(_impl_.freed_at_count_)*/::uint64_t{0u}
46 , /*decltype(_impl_.bin_)*/0
47 , /*decltype(_impl_.in_use_)*/false
48 , /*decltype(_impl_.action_count_)*/::uint64_t{0u}
49 , /*decltype(_impl_.step_id_)*/::uint64_t{0u}
50 , /*decltype(_impl_._cached_size_)*/{}} {}
51 struct MemChunkDefaultTypeInternal {
MemChunkDefaultTypeInternaltensorflow::MemChunkDefaultTypeInternal52 PROTOBUF_CONSTEXPR MemChunkDefaultTypeInternal()
53 : _instance(::_pbi::ConstantInitialized{}) {}
~MemChunkDefaultTypeInternaltensorflow::MemChunkDefaultTypeInternal54 ~MemChunkDefaultTypeInternal() {}
55 union { // NOLINT(misc-non-private-member-variables-in-classes)
56 MemChunk _instance;
57 };
58 };
59 PROTOBUF_ATTRIBUTE_NO_DESTROY PROTOBUF_CONSTINIT PROTOBUF_ATTRIBUTE_INIT_PRIORITY1 MemChunkDefaultTypeInternal _MemChunk_default_instance_;
BinSummary(::_pbi::ConstantInitialized)60 PROTOBUF_CONSTEXPR BinSummary::BinSummary(
61 ::_pbi::ConstantInitialized): _impl_{
62 /*decltype(_impl_.total_bytes_in_use_)*/::int64_t{0}
63 , /*decltype(_impl_.total_bytes_in_bin_)*/::int64_t{0}
64 , /*decltype(_impl_.total_chunks_in_use_)*/::int64_t{0}
65 , /*decltype(_impl_.total_chunks_in_bin_)*/::int64_t{0}
66 , /*decltype(_impl_.bin_)*/0
67 , /*decltype(_impl_._cached_size_)*/{}} {}
68 struct BinSummaryDefaultTypeInternal {
BinSummaryDefaultTypeInternaltensorflow::BinSummaryDefaultTypeInternal69 PROTOBUF_CONSTEXPR BinSummaryDefaultTypeInternal()
70 : _instance(::_pbi::ConstantInitialized{}) {}
~BinSummaryDefaultTypeInternaltensorflow::BinSummaryDefaultTypeInternal71 ~BinSummaryDefaultTypeInternal() {}
72 union { // NOLINT(misc-non-private-member-variables-in-classes)
73 BinSummary _instance;
74 };
75 };
76 PROTOBUF_ATTRIBUTE_NO_DESTROY PROTOBUF_CONSTINIT PROTOBUF_ATTRIBUTE_INIT_PRIORITY1 BinSummaryDefaultTypeInternal _BinSummary_default_instance_;
SnapShot(::_pbi::ConstantInitialized)77 PROTOBUF_CONSTEXPR SnapShot::SnapShot(
78 ::_pbi::ConstantInitialized): _impl_{
79 /*decltype(_impl_.action_count_)*/::uint64_t{0u}
80 , /*decltype(_impl_.size_)*/::int64_t{0}
81 , /*decltype(_impl_._cached_size_)*/{}} {}
82 struct SnapShotDefaultTypeInternal {
SnapShotDefaultTypeInternaltensorflow::SnapShotDefaultTypeInternal83 PROTOBUF_CONSTEXPR SnapShotDefaultTypeInternal()
84 : _instance(::_pbi::ConstantInitialized{}) {}
~SnapShotDefaultTypeInternaltensorflow::SnapShotDefaultTypeInternal85 ~SnapShotDefaultTypeInternal() {}
86 union { // NOLINT(misc-non-private-member-variables-in-classes)
87 SnapShot _instance;
88 };
89 };
90 PROTOBUF_ATTRIBUTE_NO_DESTROY PROTOBUF_CONSTINIT PROTOBUF_ATTRIBUTE_INIT_PRIORITY1 SnapShotDefaultTypeInternal _SnapShot_default_instance_;
MemoryDump(::_pbi::ConstantInitialized)91 PROTOBUF_CONSTEXPR MemoryDump::MemoryDump(
92 ::_pbi::ConstantInitialized): _impl_{
93 /*decltype(_impl_.bin_summary_)*/{}
94 , /*decltype(_impl_.chunk_)*/{}
95 , /*decltype(_impl_.snap_shot_)*/{}
96 , /*decltype(_impl_.allocator_name_)*/{&::_pbi::fixed_address_empty_string, ::_pbi::ConstantInitialized{}}
97 , /*decltype(_impl_.stats_)*/nullptr
98 , /*decltype(_impl_._cached_size_)*/{}} {}
99 struct MemoryDumpDefaultTypeInternal {
MemoryDumpDefaultTypeInternaltensorflow::MemoryDumpDefaultTypeInternal100 PROTOBUF_CONSTEXPR MemoryDumpDefaultTypeInternal()
101 : _instance(::_pbi::ConstantInitialized{}) {}
~MemoryDumpDefaultTypeInternaltensorflow::MemoryDumpDefaultTypeInternal102 ~MemoryDumpDefaultTypeInternal() {}
103 union { // NOLINT(misc-non-private-member-variables-in-classes)
104 MemoryDump _instance;
105 };
106 };
107 PROTOBUF_ATTRIBUTE_NO_DESTROY PROTOBUF_CONSTINIT PROTOBUF_ATTRIBUTE_INIT_PRIORITY1 MemoryDumpDefaultTypeInternal _MemoryDump_default_instance_;
108 } // namespace tensorflow
109 namespace tensorflow {
110
111 // ===================================================================
112
113 class MemAllocatorStats::_Internal {
114 public:
115 };
116
MemAllocatorStats(::PROTOBUF_NAMESPACE_ID::Arena * arena,bool is_message_owned)117 MemAllocatorStats::MemAllocatorStats(::PROTOBUF_NAMESPACE_ID::Arena* arena,
118 bool is_message_owned)
119 : ::PROTOBUF_NAMESPACE_ID::MessageLite(arena, is_message_owned) {
120 SharedCtor(arena, is_message_owned);
121 // @@protoc_insertion_point(arena_constructor:tensorflow.MemAllocatorStats)
122 }
MemAllocatorStats(const MemAllocatorStats & from)123 MemAllocatorStats::MemAllocatorStats(const MemAllocatorStats& from)
124 : ::PROTOBUF_NAMESPACE_ID::MessageLite() {
125 MemAllocatorStats* const _this = this; (void)_this;
126 new (&_impl_) Impl_{
127 decltype(_impl_.num_allocs_){}
128 , decltype(_impl_.bytes_in_use_){}
129 , decltype(_impl_.peak_bytes_in_use_){}
130 , decltype(_impl_.largest_alloc_size_){}
131 , decltype(_impl_.fragmentation_metric_){}
132 , /*decltype(_impl_._cached_size_)*/{}};
133
134 _internal_metadata_.MergeFrom<std::string>(from._internal_metadata_);
135 ::memcpy(&_impl_.num_allocs_, &from._impl_.num_allocs_,
136 static_cast<size_t>(reinterpret_cast<char*>(&_impl_.fragmentation_metric_) -
137 reinterpret_cast<char*>(&_impl_.num_allocs_)) + sizeof(_impl_.fragmentation_metric_));
138 // @@protoc_insertion_point(copy_constructor:tensorflow.MemAllocatorStats)
139 }
140
SharedCtor(::_pb::Arena * arena,bool is_message_owned)141 inline void MemAllocatorStats::SharedCtor(
142 ::_pb::Arena* arena, bool is_message_owned) {
143 (void)arena;
144 (void)is_message_owned;
145 new (&_impl_) Impl_{
146 decltype(_impl_.num_allocs_){::int64_t{0}}
147 , decltype(_impl_.bytes_in_use_){::int64_t{0}}
148 , decltype(_impl_.peak_bytes_in_use_){::int64_t{0}}
149 , decltype(_impl_.largest_alloc_size_){::int64_t{0}}
150 , decltype(_impl_.fragmentation_metric_){0}
151 , /*decltype(_impl_._cached_size_)*/{}
152 };
153 }
154
~MemAllocatorStats()155 MemAllocatorStats::~MemAllocatorStats() {
156 // @@protoc_insertion_point(destructor:tensorflow.MemAllocatorStats)
157 if (auto *arena = _internal_metadata_.DeleteReturnArena<std::string>()) {
158 (void)arena;
159 return;
160 }
161 SharedDtor();
162 }
163
SharedDtor()164 inline void MemAllocatorStats::SharedDtor() {
165 GOOGLE_DCHECK(GetArenaForAllocation() == nullptr);
166 }
167
SetCachedSize(int size) const168 void MemAllocatorStats::SetCachedSize(int size) const {
169 _impl_._cached_size_.Set(size);
170 }
171
Clear()172 void MemAllocatorStats::Clear() {
173 // @@protoc_insertion_point(message_clear_start:tensorflow.MemAllocatorStats)
174 ::uint32_t cached_has_bits = 0;
175 // Prevent compiler warnings about cached_has_bits being unused
176 (void) cached_has_bits;
177
178 ::memset(&_impl_.num_allocs_, 0, static_cast<size_t>(
179 reinterpret_cast<char*>(&_impl_.fragmentation_metric_) -
180 reinterpret_cast<char*>(&_impl_.num_allocs_)) + sizeof(_impl_.fragmentation_metric_));
181 _internal_metadata_.Clear<std::string>();
182 }
183
_InternalParse(const char * ptr,::_pbi::ParseContext * ctx)184 const char* MemAllocatorStats::_InternalParse(const char* ptr, ::_pbi::ParseContext* ctx) {
185 #define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure
186 while (!ctx->Done(&ptr)) {
187 ::uint32_t tag;
188 ptr = ::_pbi::ReadTag(ptr, &tag);
189 switch (tag >> 3) {
190 // int64 num_allocs = 1;
191 case 1:
192 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 8)) {
193 _impl_.num_allocs_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint64(&ptr);
194 CHK_(ptr);
195 } else {
196 goto handle_unusual;
197 }
198 continue;
199 // int64 bytes_in_use = 2;
200 case 2:
201 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 16)) {
202 _impl_.bytes_in_use_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint64(&ptr);
203 CHK_(ptr);
204 } else {
205 goto handle_unusual;
206 }
207 continue;
208 // int64 peak_bytes_in_use = 3;
209 case 3:
210 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 24)) {
211 _impl_.peak_bytes_in_use_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint64(&ptr);
212 CHK_(ptr);
213 } else {
214 goto handle_unusual;
215 }
216 continue;
217 // int64 largest_alloc_size = 4;
218 case 4:
219 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 32)) {
220 _impl_.largest_alloc_size_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint64(&ptr);
221 CHK_(ptr);
222 } else {
223 goto handle_unusual;
224 }
225 continue;
226 // float fragmentation_metric = 5;
227 case 5:
228 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 45)) {
229 _impl_.fragmentation_metric_ = ::PROTOBUF_NAMESPACE_ID::internal::UnalignedLoad<float>(ptr);
230 ptr += sizeof(float);
231 } else {
232 goto handle_unusual;
233 }
234 continue;
235 default:
236 goto handle_unusual;
237 } // switch
238 handle_unusual:
239 if ((tag == 0) || ((tag & 7) == 4)) {
240 CHK_(ptr);
241 ctx->SetLastTag(tag);
242 goto message_done;
243 }
244 ptr = UnknownFieldParse(
245 tag,
246 _internal_metadata_.mutable_unknown_fields<std::string>(),
247 ptr, ctx);
248 CHK_(ptr != nullptr);
249 } // while
250 message_done:
251 return ptr;
252 failure:
253 ptr = nullptr;
254 goto message_done;
255 #undef CHK_
256 }
257
_InternalSerialize(::uint8_t * target,::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream * stream) const258 ::uint8_t* MemAllocatorStats::_InternalSerialize(
259 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const {
260 // @@protoc_insertion_point(serialize_to_array_start:tensorflow.MemAllocatorStats)
261 ::uint32_t cached_has_bits = 0;
262 (void) cached_has_bits;
263
264 // int64 num_allocs = 1;
265 if (this->_internal_num_allocs() != 0) {
266 target = stream->EnsureSpace(target);
267 target = ::_pbi::WireFormatLite::WriteInt64ToArray(1, this->_internal_num_allocs(), target);
268 }
269
270 // int64 bytes_in_use = 2;
271 if (this->_internal_bytes_in_use() != 0) {
272 target = stream->EnsureSpace(target);
273 target = ::_pbi::WireFormatLite::WriteInt64ToArray(2, this->_internal_bytes_in_use(), target);
274 }
275
276 // int64 peak_bytes_in_use = 3;
277 if (this->_internal_peak_bytes_in_use() != 0) {
278 target = stream->EnsureSpace(target);
279 target = ::_pbi::WireFormatLite::WriteInt64ToArray(3, this->_internal_peak_bytes_in_use(), target);
280 }
281
282 // int64 largest_alloc_size = 4;
283 if (this->_internal_largest_alloc_size() != 0) {
284 target = stream->EnsureSpace(target);
285 target = ::_pbi::WireFormatLite::WriteInt64ToArray(4, this->_internal_largest_alloc_size(), target);
286 }
287
288 // float fragmentation_metric = 5;
289 static_assert(sizeof(::uint32_t) == sizeof(float), "Code assumes uint32_t and float are the same size.");
290 float tmp_fragmentation_metric = this->_internal_fragmentation_metric();
291 ::uint32_t raw_fragmentation_metric;
292 memcpy(&raw_fragmentation_metric, &tmp_fragmentation_metric, sizeof(tmp_fragmentation_metric));
293 if (raw_fragmentation_metric != 0) {
294 target = stream->EnsureSpace(target);
295 target = ::_pbi::WireFormatLite::WriteFloatToArray(5, this->_internal_fragmentation_metric(), target);
296 }
297
298 if (PROTOBUF_PREDICT_FALSE(_internal_metadata_.have_unknown_fields())) {
299 target = stream->WriteRaw(_internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).data(),
300 static_cast<int>(_internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).size()), target);
301 }
302 // @@protoc_insertion_point(serialize_to_array_end:tensorflow.MemAllocatorStats)
303 return target;
304 }
305
ByteSizeLong() const306 size_t MemAllocatorStats::ByteSizeLong() const {
307 // @@protoc_insertion_point(message_byte_size_start:tensorflow.MemAllocatorStats)
308 size_t total_size = 0;
309
310 ::uint32_t cached_has_bits = 0;
311 // Prevent compiler warnings about cached_has_bits being unused
312 (void) cached_has_bits;
313
314 // int64 num_allocs = 1;
315 if (this->_internal_num_allocs() != 0) {
316 total_size += ::_pbi::WireFormatLite::Int64SizePlusOne(this->_internal_num_allocs());
317 }
318
319 // int64 bytes_in_use = 2;
320 if (this->_internal_bytes_in_use() != 0) {
321 total_size += ::_pbi::WireFormatLite::Int64SizePlusOne(this->_internal_bytes_in_use());
322 }
323
324 // int64 peak_bytes_in_use = 3;
325 if (this->_internal_peak_bytes_in_use() != 0) {
326 total_size += ::_pbi::WireFormatLite::Int64SizePlusOne(this->_internal_peak_bytes_in_use());
327 }
328
329 // int64 largest_alloc_size = 4;
330 if (this->_internal_largest_alloc_size() != 0) {
331 total_size += ::_pbi::WireFormatLite::Int64SizePlusOne(this->_internal_largest_alloc_size());
332 }
333
334 // float fragmentation_metric = 5;
335 static_assert(sizeof(::uint32_t) == sizeof(float), "Code assumes uint32_t and float are the same size.");
336 float tmp_fragmentation_metric = this->_internal_fragmentation_metric();
337 ::uint32_t raw_fragmentation_metric;
338 memcpy(&raw_fragmentation_metric, &tmp_fragmentation_metric, sizeof(tmp_fragmentation_metric));
339 if (raw_fragmentation_metric != 0) {
340 total_size += 1 + 4;
341 }
342
343 if (PROTOBUF_PREDICT_FALSE(_internal_metadata_.have_unknown_fields())) {
344 total_size += _internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).size();
345 }
346 int cached_size = ::_pbi::ToCachedSize(total_size);
347 SetCachedSize(cached_size);
348 return total_size;
349 }
350
CheckTypeAndMergeFrom(const::PROTOBUF_NAMESPACE_ID::MessageLite & from)351 void MemAllocatorStats::CheckTypeAndMergeFrom(
352 const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) {
353 MergeFrom(*::_pbi::DownCast<const MemAllocatorStats*>(
354 &from));
355 }
356
MergeFrom(const MemAllocatorStats & from)357 void MemAllocatorStats::MergeFrom(const MemAllocatorStats& from) {
358 MemAllocatorStats* const _this = this;
359 // @@protoc_insertion_point(class_specific_merge_from_start:tensorflow.MemAllocatorStats)
360 GOOGLE_DCHECK_NE(&from, _this);
361 ::uint32_t cached_has_bits = 0;
362 (void) cached_has_bits;
363
364 if (from._internal_num_allocs() != 0) {
365 _this->_internal_set_num_allocs(from._internal_num_allocs());
366 }
367 if (from._internal_bytes_in_use() != 0) {
368 _this->_internal_set_bytes_in_use(from._internal_bytes_in_use());
369 }
370 if (from._internal_peak_bytes_in_use() != 0) {
371 _this->_internal_set_peak_bytes_in_use(from._internal_peak_bytes_in_use());
372 }
373 if (from._internal_largest_alloc_size() != 0) {
374 _this->_internal_set_largest_alloc_size(from._internal_largest_alloc_size());
375 }
376 static_assert(sizeof(::uint32_t) == sizeof(float), "Code assumes uint32_t and float are the same size.");
377 float tmp_fragmentation_metric = from._internal_fragmentation_metric();
378 ::uint32_t raw_fragmentation_metric;
379 memcpy(&raw_fragmentation_metric, &tmp_fragmentation_metric, sizeof(tmp_fragmentation_metric));
380 if (raw_fragmentation_metric != 0) {
381 _this->_internal_set_fragmentation_metric(from._internal_fragmentation_metric());
382 }
383 _this->_internal_metadata_.MergeFrom<std::string>(from._internal_metadata_);
384 }
385
CopyFrom(const MemAllocatorStats & from)386 void MemAllocatorStats::CopyFrom(const MemAllocatorStats& from) {
387 // @@protoc_insertion_point(class_specific_copy_from_start:tensorflow.MemAllocatorStats)
388 if (&from == this) return;
389 Clear();
390 MergeFrom(from);
391 }
392
IsInitialized() const393 bool MemAllocatorStats::IsInitialized() const {
394 return true;
395 }
396
InternalSwap(MemAllocatorStats * other)397 void MemAllocatorStats::InternalSwap(MemAllocatorStats* other) {
398 using std::swap;
399 _internal_metadata_.InternalSwap(&other->_internal_metadata_);
400 ::PROTOBUF_NAMESPACE_ID::internal::memswap<
401 PROTOBUF_FIELD_OFFSET(MemAllocatorStats, _impl_.fragmentation_metric_)
402 + sizeof(MemAllocatorStats::_impl_.fragmentation_metric_) // NOLINT
403 - PROTOBUF_FIELD_OFFSET(MemAllocatorStats, _impl_.num_allocs_)>(
404 reinterpret_cast<char*>(&_impl_.num_allocs_),
405 reinterpret_cast<char*>(&other->_impl_.num_allocs_));
406 }
407
GetTypeName() const408 std::string MemAllocatorStats::GetTypeName() const {
409 return "tensorflow.MemAllocatorStats";
410 }
411
412
413 // ===================================================================
414
415 class MemChunk::_Internal {
416 public:
417 };
418
MemChunk(::PROTOBUF_NAMESPACE_ID::Arena * arena,bool is_message_owned)419 MemChunk::MemChunk(::PROTOBUF_NAMESPACE_ID::Arena* arena,
420 bool is_message_owned)
421 : ::PROTOBUF_NAMESPACE_ID::MessageLite(arena, is_message_owned) {
422 SharedCtor(arena, is_message_owned);
423 // @@protoc_insertion_point(arena_constructor:tensorflow.MemChunk)
424 }
MemChunk(const MemChunk & from)425 MemChunk::MemChunk(const MemChunk& from)
426 : ::PROTOBUF_NAMESPACE_ID::MessageLite() {
427 MemChunk* const _this = this; (void)_this;
428 new (&_impl_) Impl_{
429 decltype(_impl_.op_name_){}
430 , decltype(_impl_.address_){}
431 , decltype(_impl_.size_){}
432 , decltype(_impl_.requested_size_){}
433 , decltype(_impl_.freed_at_count_){}
434 , decltype(_impl_.bin_){}
435 , decltype(_impl_.in_use_){}
436 , decltype(_impl_.action_count_){}
437 , decltype(_impl_.step_id_){}
438 , /*decltype(_impl_._cached_size_)*/{}};
439
440 _internal_metadata_.MergeFrom<std::string>(from._internal_metadata_);
441 _impl_.op_name_.InitDefault();
442 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
443 _impl_.op_name_.Set("", GetArenaForAllocation());
444 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
445 if (!from._internal_op_name().empty()) {
446 _this->_impl_.op_name_.Set(from._internal_op_name(),
447 _this->GetArenaForAllocation());
448 }
449 ::memcpy(&_impl_.address_, &from._impl_.address_,
450 static_cast<size_t>(reinterpret_cast<char*>(&_impl_.step_id_) -
451 reinterpret_cast<char*>(&_impl_.address_)) + sizeof(_impl_.step_id_));
452 // @@protoc_insertion_point(copy_constructor:tensorflow.MemChunk)
453 }
454
SharedCtor(::_pb::Arena * arena,bool is_message_owned)455 inline void MemChunk::SharedCtor(
456 ::_pb::Arena* arena, bool is_message_owned) {
457 (void)arena;
458 (void)is_message_owned;
459 new (&_impl_) Impl_{
460 decltype(_impl_.op_name_){}
461 , decltype(_impl_.address_){::uint64_t{0u}}
462 , decltype(_impl_.size_){::int64_t{0}}
463 , decltype(_impl_.requested_size_){::int64_t{0}}
464 , decltype(_impl_.freed_at_count_){::uint64_t{0u}}
465 , decltype(_impl_.bin_){0}
466 , decltype(_impl_.in_use_){false}
467 , decltype(_impl_.action_count_){::uint64_t{0u}}
468 , decltype(_impl_.step_id_){::uint64_t{0u}}
469 , /*decltype(_impl_._cached_size_)*/{}
470 };
471 _impl_.op_name_.InitDefault();
472 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
473 _impl_.op_name_.Set("", GetArenaForAllocation());
474 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
475 }
476
~MemChunk()477 MemChunk::~MemChunk() {
478 // @@protoc_insertion_point(destructor:tensorflow.MemChunk)
479 if (auto *arena = _internal_metadata_.DeleteReturnArena<std::string>()) {
480 (void)arena;
481 return;
482 }
483 SharedDtor();
484 }
485
SharedDtor()486 inline void MemChunk::SharedDtor() {
487 GOOGLE_DCHECK(GetArenaForAllocation() == nullptr);
488 _impl_.op_name_.Destroy();
489 }
490
SetCachedSize(int size) const491 void MemChunk::SetCachedSize(int size) const {
492 _impl_._cached_size_.Set(size);
493 }
494
Clear()495 void MemChunk::Clear() {
496 // @@protoc_insertion_point(message_clear_start:tensorflow.MemChunk)
497 ::uint32_t cached_has_bits = 0;
498 // Prevent compiler warnings about cached_has_bits being unused
499 (void) cached_has_bits;
500
501 _impl_.op_name_.ClearToEmpty();
502 ::memset(&_impl_.address_, 0, static_cast<size_t>(
503 reinterpret_cast<char*>(&_impl_.step_id_) -
504 reinterpret_cast<char*>(&_impl_.address_)) + sizeof(_impl_.step_id_));
505 _internal_metadata_.Clear<std::string>();
506 }
507
_InternalParse(const char * ptr,::_pbi::ParseContext * ctx)508 const char* MemChunk::_InternalParse(const char* ptr, ::_pbi::ParseContext* ctx) {
509 #define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure
510 while (!ctx->Done(&ptr)) {
511 ::uint32_t tag;
512 ptr = ::_pbi::ReadTag(ptr, &tag);
513 switch (tag >> 3) {
514 // uint64 address = 1;
515 case 1:
516 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 8)) {
517 _impl_.address_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint64(&ptr);
518 CHK_(ptr);
519 } else {
520 goto handle_unusual;
521 }
522 continue;
523 // int64 size = 2;
524 case 2:
525 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 16)) {
526 _impl_.size_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint64(&ptr);
527 CHK_(ptr);
528 } else {
529 goto handle_unusual;
530 }
531 continue;
532 // int64 requested_size = 3;
533 case 3:
534 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 24)) {
535 _impl_.requested_size_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint64(&ptr);
536 CHK_(ptr);
537 } else {
538 goto handle_unusual;
539 }
540 continue;
541 // int32 bin = 4;
542 case 4:
543 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 32)) {
544 _impl_.bin_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint32(&ptr);
545 CHK_(ptr);
546 } else {
547 goto handle_unusual;
548 }
549 continue;
550 // string op_name = 5;
551 case 5:
552 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 42)) {
553 auto str = _internal_mutable_op_name();
554 ptr = ::_pbi::InlineGreedyStringParser(str, ptr, ctx);
555 CHK_(ptr);
556 CHK_(::_pbi::VerifyUTF8(str, nullptr));
557 } else {
558 goto handle_unusual;
559 }
560 continue;
561 // uint64 freed_at_count = 6;
562 case 6:
563 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 48)) {
564 _impl_.freed_at_count_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint64(&ptr);
565 CHK_(ptr);
566 } else {
567 goto handle_unusual;
568 }
569 continue;
570 // uint64 action_count = 7;
571 case 7:
572 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 56)) {
573 _impl_.action_count_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint64(&ptr);
574 CHK_(ptr);
575 } else {
576 goto handle_unusual;
577 }
578 continue;
579 // bool in_use = 8;
580 case 8:
581 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 64)) {
582 _impl_.in_use_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint64(&ptr);
583 CHK_(ptr);
584 } else {
585 goto handle_unusual;
586 }
587 continue;
588 // uint64 step_id = 9;
589 case 9:
590 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 72)) {
591 _impl_.step_id_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint64(&ptr);
592 CHK_(ptr);
593 } else {
594 goto handle_unusual;
595 }
596 continue;
597 default:
598 goto handle_unusual;
599 } // switch
600 handle_unusual:
601 if ((tag == 0) || ((tag & 7) == 4)) {
602 CHK_(ptr);
603 ctx->SetLastTag(tag);
604 goto message_done;
605 }
606 ptr = UnknownFieldParse(
607 tag,
608 _internal_metadata_.mutable_unknown_fields<std::string>(),
609 ptr, ctx);
610 CHK_(ptr != nullptr);
611 } // while
612 message_done:
613 return ptr;
614 failure:
615 ptr = nullptr;
616 goto message_done;
617 #undef CHK_
618 }
619
_InternalSerialize(::uint8_t * target,::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream * stream) const620 ::uint8_t* MemChunk::_InternalSerialize(
621 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const {
622 // @@protoc_insertion_point(serialize_to_array_start:tensorflow.MemChunk)
623 ::uint32_t cached_has_bits = 0;
624 (void) cached_has_bits;
625
626 // uint64 address = 1;
627 if (this->_internal_address() != 0) {
628 target = stream->EnsureSpace(target);
629 target = ::_pbi::WireFormatLite::WriteUInt64ToArray(1, this->_internal_address(), target);
630 }
631
632 // int64 size = 2;
633 if (this->_internal_size() != 0) {
634 target = stream->EnsureSpace(target);
635 target = ::_pbi::WireFormatLite::WriteInt64ToArray(2, this->_internal_size(), target);
636 }
637
638 // int64 requested_size = 3;
639 if (this->_internal_requested_size() != 0) {
640 target = stream->EnsureSpace(target);
641 target = ::_pbi::WireFormatLite::WriteInt64ToArray(3, this->_internal_requested_size(), target);
642 }
643
644 // int32 bin = 4;
645 if (this->_internal_bin() != 0) {
646 target = stream->EnsureSpace(target);
647 target = ::_pbi::WireFormatLite::WriteInt32ToArray(4, this->_internal_bin(), target);
648 }
649
650 // string op_name = 5;
651 if (!this->_internal_op_name().empty()) {
652 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::VerifyUtf8String(
653 this->_internal_op_name().data(), static_cast<int>(this->_internal_op_name().length()),
654 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SERIALIZE,
655 "tensorflow.MemChunk.op_name");
656 target = stream->WriteStringMaybeAliased(
657 5, this->_internal_op_name(), target);
658 }
659
660 // uint64 freed_at_count = 6;
661 if (this->_internal_freed_at_count() != 0) {
662 target = stream->EnsureSpace(target);
663 target = ::_pbi::WireFormatLite::WriteUInt64ToArray(6, this->_internal_freed_at_count(), target);
664 }
665
666 // uint64 action_count = 7;
667 if (this->_internal_action_count() != 0) {
668 target = stream->EnsureSpace(target);
669 target = ::_pbi::WireFormatLite::WriteUInt64ToArray(7, this->_internal_action_count(), target);
670 }
671
672 // bool in_use = 8;
673 if (this->_internal_in_use() != 0) {
674 target = stream->EnsureSpace(target);
675 target = ::_pbi::WireFormatLite::WriteBoolToArray(8, this->_internal_in_use(), target);
676 }
677
678 // uint64 step_id = 9;
679 if (this->_internal_step_id() != 0) {
680 target = stream->EnsureSpace(target);
681 target = ::_pbi::WireFormatLite::WriteUInt64ToArray(9, this->_internal_step_id(), target);
682 }
683
684 if (PROTOBUF_PREDICT_FALSE(_internal_metadata_.have_unknown_fields())) {
685 target = stream->WriteRaw(_internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).data(),
686 static_cast<int>(_internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).size()), target);
687 }
688 // @@protoc_insertion_point(serialize_to_array_end:tensorflow.MemChunk)
689 return target;
690 }
691
ByteSizeLong() const692 size_t MemChunk::ByteSizeLong() const {
693 // @@protoc_insertion_point(message_byte_size_start:tensorflow.MemChunk)
694 size_t total_size = 0;
695
696 ::uint32_t cached_has_bits = 0;
697 // Prevent compiler warnings about cached_has_bits being unused
698 (void) cached_has_bits;
699
700 // string op_name = 5;
701 if (!this->_internal_op_name().empty()) {
702 total_size += 1 +
703 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::StringSize(
704 this->_internal_op_name());
705 }
706
707 // uint64 address = 1;
708 if (this->_internal_address() != 0) {
709 total_size += ::_pbi::WireFormatLite::UInt64SizePlusOne(this->_internal_address());
710 }
711
712 // int64 size = 2;
713 if (this->_internal_size() != 0) {
714 total_size += ::_pbi::WireFormatLite::Int64SizePlusOne(this->_internal_size());
715 }
716
717 // int64 requested_size = 3;
718 if (this->_internal_requested_size() != 0) {
719 total_size += ::_pbi::WireFormatLite::Int64SizePlusOne(this->_internal_requested_size());
720 }
721
722 // uint64 freed_at_count = 6;
723 if (this->_internal_freed_at_count() != 0) {
724 total_size += ::_pbi::WireFormatLite::UInt64SizePlusOne(this->_internal_freed_at_count());
725 }
726
727 // int32 bin = 4;
728 if (this->_internal_bin() != 0) {
729 total_size += ::_pbi::WireFormatLite::Int32SizePlusOne(this->_internal_bin());
730 }
731
732 // bool in_use = 8;
733 if (this->_internal_in_use() != 0) {
734 total_size += 1 + 1;
735 }
736
737 // uint64 action_count = 7;
738 if (this->_internal_action_count() != 0) {
739 total_size += ::_pbi::WireFormatLite::UInt64SizePlusOne(this->_internal_action_count());
740 }
741
742 // uint64 step_id = 9;
743 if (this->_internal_step_id() != 0) {
744 total_size += ::_pbi::WireFormatLite::UInt64SizePlusOne(this->_internal_step_id());
745 }
746
747 if (PROTOBUF_PREDICT_FALSE(_internal_metadata_.have_unknown_fields())) {
748 total_size += _internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).size();
749 }
750 int cached_size = ::_pbi::ToCachedSize(total_size);
751 SetCachedSize(cached_size);
752 return total_size;
753 }
754
CheckTypeAndMergeFrom(const::PROTOBUF_NAMESPACE_ID::MessageLite & from)755 void MemChunk::CheckTypeAndMergeFrom(
756 const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) {
757 MergeFrom(*::_pbi::DownCast<const MemChunk*>(
758 &from));
759 }
760
MergeFrom(const MemChunk & from)761 void MemChunk::MergeFrom(const MemChunk& from) {
762 MemChunk* const _this = this;
763 // @@protoc_insertion_point(class_specific_merge_from_start:tensorflow.MemChunk)
764 GOOGLE_DCHECK_NE(&from, _this);
765 ::uint32_t cached_has_bits = 0;
766 (void) cached_has_bits;
767
768 if (!from._internal_op_name().empty()) {
769 _this->_internal_set_op_name(from._internal_op_name());
770 }
771 if (from._internal_address() != 0) {
772 _this->_internal_set_address(from._internal_address());
773 }
774 if (from._internal_size() != 0) {
775 _this->_internal_set_size(from._internal_size());
776 }
777 if (from._internal_requested_size() != 0) {
778 _this->_internal_set_requested_size(from._internal_requested_size());
779 }
780 if (from._internal_freed_at_count() != 0) {
781 _this->_internal_set_freed_at_count(from._internal_freed_at_count());
782 }
783 if (from._internal_bin() != 0) {
784 _this->_internal_set_bin(from._internal_bin());
785 }
786 if (from._internal_in_use() != 0) {
787 _this->_internal_set_in_use(from._internal_in_use());
788 }
789 if (from._internal_action_count() != 0) {
790 _this->_internal_set_action_count(from._internal_action_count());
791 }
792 if (from._internal_step_id() != 0) {
793 _this->_internal_set_step_id(from._internal_step_id());
794 }
795 _this->_internal_metadata_.MergeFrom<std::string>(from._internal_metadata_);
796 }
797
CopyFrom(const MemChunk & from)798 void MemChunk::CopyFrom(const MemChunk& from) {
799 // @@protoc_insertion_point(class_specific_copy_from_start:tensorflow.MemChunk)
800 if (&from == this) return;
801 Clear();
802 MergeFrom(from);
803 }
804
IsInitialized() const805 bool MemChunk::IsInitialized() const {
806 return true;
807 }
808
InternalSwap(MemChunk * other)809 void MemChunk::InternalSwap(MemChunk* other) {
810 using std::swap;
811 auto* lhs_arena = GetArenaForAllocation();
812 auto* rhs_arena = other->GetArenaForAllocation();
813 _internal_metadata_.InternalSwap(&other->_internal_metadata_);
814 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr::InternalSwap(
815 &_impl_.op_name_, lhs_arena,
816 &other->_impl_.op_name_, rhs_arena
817 );
818 ::PROTOBUF_NAMESPACE_ID::internal::memswap<
819 PROTOBUF_FIELD_OFFSET(MemChunk, _impl_.step_id_)
820 + sizeof(MemChunk::_impl_.step_id_) // NOLINT
821 - PROTOBUF_FIELD_OFFSET(MemChunk, _impl_.address_)>(
822 reinterpret_cast<char*>(&_impl_.address_),
823 reinterpret_cast<char*>(&other->_impl_.address_));
824 }
825
GetTypeName() const826 std::string MemChunk::GetTypeName() const {
827 return "tensorflow.MemChunk";
828 }
829
830
831 // ===================================================================
832
833 class BinSummary::_Internal {
834 public:
835 };
836
BinSummary(::PROTOBUF_NAMESPACE_ID::Arena * arena,bool is_message_owned)837 BinSummary::BinSummary(::PROTOBUF_NAMESPACE_ID::Arena* arena,
838 bool is_message_owned)
839 : ::PROTOBUF_NAMESPACE_ID::MessageLite(arena, is_message_owned) {
840 SharedCtor(arena, is_message_owned);
841 // @@protoc_insertion_point(arena_constructor:tensorflow.BinSummary)
842 }
BinSummary(const BinSummary & from)843 BinSummary::BinSummary(const BinSummary& from)
844 : ::PROTOBUF_NAMESPACE_ID::MessageLite() {
845 BinSummary* const _this = this; (void)_this;
846 new (&_impl_) Impl_{
847 decltype(_impl_.total_bytes_in_use_){}
848 , decltype(_impl_.total_bytes_in_bin_){}
849 , decltype(_impl_.total_chunks_in_use_){}
850 , decltype(_impl_.total_chunks_in_bin_){}
851 , decltype(_impl_.bin_){}
852 , /*decltype(_impl_._cached_size_)*/{}};
853
854 _internal_metadata_.MergeFrom<std::string>(from._internal_metadata_);
855 ::memcpy(&_impl_.total_bytes_in_use_, &from._impl_.total_bytes_in_use_,
856 static_cast<size_t>(reinterpret_cast<char*>(&_impl_.bin_) -
857 reinterpret_cast<char*>(&_impl_.total_bytes_in_use_)) + sizeof(_impl_.bin_));
858 // @@protoc_insertion_point(copy_constructor:tensorflow.BinSummary)
859 }
860
SharedCtor(::_pb::Arena * arena,bool is_message_owned)861 inline void BinSummary::SharedCtor(
862 ::_pb::Arena* arena, bool is_message_owned) {
863 (void)arena;
864 (void)is_message_owned;
865 new (&_impl_) Impl_{
866 decltype(_impl_.total_bytes_in_use_){::int64_t{0}}
867 , decltype(_impl_.total_bytes_in_bin_){::int64_t{0}}
868 , decltype(_impl_.total_chunks_in_use_){::int64_t{0}}
869 , decltype(_impl_.total_chunks_in_bin_){::int64_t{0}}
870 , decltype(_impl_.bin_){0}
871 , /*decltype(_impl_._cached_size_)*/{}
872 };
873 }
874
~BinSummary()875 BinSummary::~BinSummary() {
876 // @@protoc_insertion_point(destructor:tensorflow.BinSummary)
877 if (auto *arena = _internal_metadata_.DeleteReturnArena<std::string>()) {
878 (void)arena;
879 return;
880 }
881 SharedDtor();
882 }
883
SharedDtor()884 inline void BinSummary::SharedDtor() {
885 GOOGLE_DCHECK(GetArenaForAllocation() == nullptr);
886 }
887
SetCachedSize(int size) const888 void BinSummary::SetCachedSize(int size) const {
889 _impl_._cached_size_.Set(size);
890 }
891
Clear()892 void BinSummary::Clear() {
893 // @@protoc_insertion_point(message_clear_start:tensorflow.BinSummary)
894 ::uint32_t cached_has_bits = 0;
895 // Prevent compiler warnings about cached_has_bits being unused
896 (void) cached_has_bits;
897
898 ::memset(&_impl_.total_bytes_in_use_, 0, static_cast<size_t>(
899 reinterpret_cast<char*>(&_impl_.bin_) -
900 reinterpret_cast<char*>(&_impl_.total_bytes_in_use_)) + sizeof(_impl_.bin_));
901 _internal_metadata_.Clear<std::string>();
902 }
903
_InternalParse(const char * ptr,::_pbi::ParseContext * ctx)904 const char* BinSummary::_InternalParse(const char* ptr, ::_pbi::ParseContext* ctx) {
905 #define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure
906 while (!ctx->Done(&ptr)) {
907 ::uint32_t tag;
908 ptr = ::_pbi::ReadTag(ptr, &tag);
909 switch (tag >> 3) {
910 // int32 bin = 1;
911 case 1:
912 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 8)) {
913 _impl_.bin_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint32(&ptr);
914 CHK_(ptr);
915 } else {
916 goto handle_unusual;
917 }
918 continue;
919 // int64 total_bytes_in_use = 2;
920 case 2:
921 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 16)) {
922 _impl_.total_bytes_in_use_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint64(&ptr);
923 CHK_(ptr);
924 } else {
925 goto handle_unusual;
926 }
927 continue;
928 // int64 total_bytes_in_bin = 3;
929 case 3:
930 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 24)) {
931 _impl_.total_bytes_in_bin_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint64(&ptr);
932 CHK_(ptr);
933 } else {
934 goto handle_unusual;
935 }
936 continue;
937 // int64 total_chunks_in_use = 4;
938 case 4:
939 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 32)) {
940 _impl_.total_chunks_in_use_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint64(&ptr);
941 CHK_(ptr);
942 } else {
943 goto handle_unusual;
944 }
945 continue;
946 // int64 total_chunks_in_bin = 5;
947 case 5:
948 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 40)) {
949 _impl_.total_chunks_in_bin_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint64(&ptr);
950 CHK_(ptr);
951 } else {
952 goto handle_unusual;
953 }
954 continue;
955 default:
956 goto handle_unusual;
957 } // switch
958 handle_unusual:
959 if ((tag == 0) || ((tag & 7) == 4)) {
960 CHK_(ptr);
961 ctx->SetLastTag(tag);
962 goto message_done;
963 }
964 ptr = UnknownFieldParse(
965 tag,
966 _internal_metadata_.mutable_unknown_fields<std::string>(),
967 ptr, ctx);
968 CHK_(ptr != nullptr);
969 } // while
970 message_done:
971 return ptr;
972 failure:
973 ptr = nullptr;
974 goto message_done;
975 #undef CHK_
976 }
977
_InternalSerialize(::uint8_t * target,::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream * stream) const978 ::uint8_t* BinSummary::_InternalSerialize(
979 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const {
980 // @@protoc_insertion_point(serialize_to_array_start:tensorflow.BinSummary)
981 ::uint32_t cached_has_bits = 0;
982 (void) cached_has_bits;
983
984 // int32 bin = 1;
985 if (this->_internal_bin() != 0) {
986 target = stream->EnsureSpace(target);
987 target = ::_pbi::WireFormatLite::WriteInt32ToArray(1, this->_internal_bin(), target);
988 }
989
990 // int64 total_bytes_in_use = 2;
991 if (this->_internal_total_bytes_in_use() != 0) {
992 target = stream->EnsureSpace(target);
993 target = ::_pbi::WireFormatLite::WriteInt64ToArray(2, this->_internal_total_bytes_in_use(), target);
994 }
995
996 // int64 total_bytes_in_bin = 3;
997 if (this->_internal_total_bytes_in_bin() != 0) {
998 target = stream->EnsureSpace(target);
999 target = ::_pbi::WireFormatLite::WriteInt64ToArray(3, this->_internal_total_bytes_in_bin(), target);
1000 }
1001
1002 // int64 total_chunks_in_use = 4;
1003 if (this->_internal_total_chunks_in_use() != 0) {
1004 target = stream->EnsureSpace(target);
1005 target = ::_pbi::WireFormatLite::WriteInt64ToArray(4, this->_internal_total_chunks_in_use(), target);
1006 }
1007
1008 // int64 total_chunks_in_bin = 5;
1009 if (this->_internal_total_chunks_in_bin() != 0) {
1010 target = stream->EnsureSpace(target);
1011 target = ::_pbi::WireFormatLite::WriteInt64ToArray(5, this->_internal_total_chunks_in_bin(), target);
1012 }
1013
1014 if (PROTOBUF_PREDICT_FALSE(_internal_metadata_.have_unknown_fields())) {
1015 target = stream->WriteRaw(_internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).data(),
1016 static_cast<int>(_internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).size()), target);
1017 }
1018 // @@protoc_insertion_point(serialize_to_array_end:tensorflow.BinSummary)
1019 return target;
1020 }
1021
ByteSizeLong() const1022 size_t BinSummary::ByteSizeLong() const {
1023 // @@protoc_insertion_point(message_byte_size_start:tensorflow.BinSummary)
1024 size_t total_size = 0;
1025
1026 ::uint32_t cached_has_bits = 0;
1027 // Prevent compiler warnings about cached_has_bits being unused
1028 (void) cached_has_bits;
1029
1030 // int64 total_bytes_in_use = 2;
1031 if (this->_internal_total_bytes_in_use() != 0) {
1032 total_size += ::_pbi::WireFormatLite::Int64SizePlusOne(this->_internal_total_bytes_in_use());
1033 }
1034
1035 // int64 total_bytes_in_bin = 3;
1036 if (this->_internal_total_bytes_in_bin() != 0) {
1037 total_size += ::_pbi::WireFormatLite::Int64SizePlusOne(this->_internal_total_bytes_in_bin());
1038 }
1039
1040 // int64 total_chunks_in_use = 4;
1041 if (this->_internal_total_chunks_in_use() != 0) {
1042 total_size += ::_pbi::WireFormatLite::Int64SizePlusOne(this->_internal_total_chunks_in_use());
1043 }
1044
1045 // int64 total_chunks_in_bin = 5;
1046 if (this->_internal_total_chunks_in_bin() != 0) {
1047 total_size += ::_pbi::WireFormatLite::Int64SizePlusOne(this->_internal_total_chunks_in_bin());
1048 }
1049
1050 // int32 bin = 1;
1051 if (this->_internal_bin() != 0) {
1052 total_size += ::_pbi::WireFormatLite::Int32SizePlusOne(this->_internal_bin());
1053 }
1054
1055 if (PROTOBUF_PREDICT_FALSE(_internal_metadata_.have_unknown_fields())) {
1056 total_size += _internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).size();
1057 }
1058 int cached_size = ::_pbi::ToCachedSize(total_size);
1059 SetCachedSize(cached_size);
1060 return total_size;
1061 }
1062
CheckTypeAndMergeFrom(const::PROTOBUF_NAMESPACE_ID::MessageLite & from)1063 void BinSummary::CheckTypeAndMergeFrom(
1064 const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) {
1065 MergeFrom(*::_pbi::DownCast<const BinSummary*>(
1066 &from));
1067 }
1068
MergeFrom(const BinSummary & from)1069 void BinSummary::MergeFrom(const BinSummary& from) {
1070 BinSummary* const _this = this;
1071 // @@protoc_insertion_point(class_specific_merge_from_start:tensorflow.BinSummary)
1072 GOOGLE_DCHECK_NE(&from, _this);
1073 ::uint32_t cached_has_bits = 0;
1074 (void) cached_has_bits;
1075
1076 if (from._internal_total_bytes_in_use() != 0) {
1077 _this->_internal_set_total_bytes_in_use(from._internal_total_bytes_in_use());
1078 }
1079 if (from._internal_total_bytes_in_bin() != 0) {
1080 _this->_internal_set_total_bytes_in_bin(from._internal_total_bytes_in_bin());
1081 }
1082 if (from._internal_total_chunks_in_use() != 0) {
1083 _this->_internal_set_total_chunks_in_use(from._internal_total_chunks_in_use());
1084 }
1085 if (from._internal_total_chunks_in_bin() != 0) {
1086 _this->_internal_set_total_chunks_in_bin(from._internal_total_chunks_in_bin());
1087 }
1088 if (from._internal_bin() != 0) {
1089 _this->_internal_set_bin(from._internal_bin());
1090 }
1091 _this->_internal_metadata_.MergeFrom<std::string>(from._internal_metadata_);
1092 }
1093
CopyFrom(const BinSummary & from)1094 void BinSummary::CopyFrom(const BinSummary& from) {
1095 // @@protoc_insertion_point(class_specific_copy_from_start:tensorflow.BinSummary)
1096 if (&from == this) return;
1097 Clear();
1098 MergeFrom(from);
1099 }
1100
IsInitialized() const1101 bool BinSummary::IsInitialized() const {
1102 return true;
1103 }
1104
InternalSwap(BinSummary * other)1105 void BinSummary::InternalSwap(BinSummary* other) {
1106 using std::swap;
1107 _internal_metadata_.InternalSwap(&other->_internal_metadata_);
1108 ::PROTOBUF_NAMESPACE_ID::internal::memswap<
1109 PROTOBUF_FIELD_OFFSET(BinSummary, _impl_.bin_)
1110 + sizeof(BinSummary::_impl_.bin_) // NOLINT
1111 - PROTOBUF_FIELD_OFFSET(BinSummary, _impl_.total_bytes_in_use_)>(
1112 reinterpret_cast<char*>(&_impl_.total_bytes_in_use_),
1113 reinterpret_cast<char*>(&other->_impl_.total_bytes_in_use_));
1114 }
1115
GetTypeName() const1116 std::string BinSummary::GetTypeName() const {
1117 return "tensorflow.BinSummary";
1118 }
1119
1120
1121 // ===================================================================
1122
1123 class SnapShot::_Internal {
1124 public:
1125 };
1126
SnapShot(::PROTOBUF_NAMESPACE_ID::Arena * arena,bool is_message_owned)1127 SnapShot::SnapShot(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1128 bool is_message_owned)
1129 : ::PROTOBUF_NAMESPACE_ID::MessageLite(arena, is_message_owned) {
1130 SharedCtor(arena, is_message_owned);
1131 // @@protoc_insertion_point(arena_constructor:tensorflow.SnapShot)
1132 }
SnapShot(const SnapShot & from)1133 SnapShot::SnapShot(const SnapShot& from)
1134 : ::PROTOBUF_NAMESPACE_ID::MessageLite() {
1135 SnapShot* const _this = this; (void)_this;
1136 new (&_impl_) Impl_{
1137 decltype(_impl_.action_count_){}
1138 , decltype(_impl_.size_){}
1139 , /*decltype(_impl_._cached_size_)*/{}};
1140
1141 _internal_metadata_.MergeFrom<std::string>(from._internal_metadata_);
1142 ::memcpy(&_impl_.action_count_, &from._impl_.action_count_,
1143 static_cast<size_t>(reinterpret_cast<char*>(&_impl_.size_) -
1144 reinterpret_cast<char*>(&_impl_.action_count_)) + sizeof(_impl_.size_));
1145 // @@protoc_insertion_point(copy_constructor:tensorflow.SnapShot)
1146 }
1147
SharedCtor(::_pb::Arena * arena,bool is_message_owned)1148 inline void SnapShot::SharedCtor(
1149 ::_pb::Arena* arena, bool is_message_owned) {
1150 (void)arena;
1151 (void)is_message_owned;
1152 new (&_impl_) Impl_{
1153 decltype(_impl_.action_count_){::uint64_t{0u}}
1154 , decltype(_impl_.size_){::int64_t{0}}
1155 , /*decltype(_impl_._cached_size_)*/{}
1156 };
1157 }
1158
~SnapShot()1159 SnapShot::~SnapShot() {
1160 // @@protoc_insertion_point(destructor:tensorflow.SnapShot)
1161 if (auto *arena = _internal_metadata_.DeleteReturnArena<std::string>()) {
1162 (void)arena;
1163 return;
1164 }
1165 SharedDtor();
1166 }
1167
SharedDtor()1168 inline void SnapShot::SharedDtor() {
1169 GOOGLE_DCHECK(GetArenaForAllocation() == nullptr);
1170 }
1171
SetCachedSize(int size) const1172 void SnapShot::SetCachedSize(int size) const {
1173 _impl_._cached_size_.Set(size);
1174 }
1175
Clear()1176 void SnapShot::Clear() {
1177 // @@protoc_insertion_point(message_clear_start:tensorflow.SnapShot)
1178 ::uint32_t cached_has_bits = 0;
1179 // Prevent compiler warnings about cached_has_bits being unused
1180 (void) cached_has_bits;
1181
1182 ::memset(&_impl_.action_count_, 0, static_cast<size_t>(
1183 reinterpret_cast<char*>(&_impl_.size_) -
1184 reinterpret_cast<char*>(&_impl_.action_count_)) + sizeof(_impl_.size_));
1185 _internal_metadata_.Clear<std::string>();
1186 }
1187
_InternalParse(const char * ptr,::_pbi::ParseContext * ctx)1188 const char* SnapShot::_InternalParse(const char* ptr, ::_pbi::ParseContext* ctx) {
1189 #define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure
1190 while (!ctx->Done(&ptr)) {
1191 ::uint32_t tag;
1192 ptr = ::_pbi::ReadTag(ptr, &tag);
1193 switch (tag >> 3) {
1194 // uint64 action_count = 1;
1195 case 1:
1196 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 8)) {
1197 _impl_.action_count_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint64(&ptr);
1198 CHK_(ptr);
1199 } else {
1200 goto handle_unusual;
1201 }
1202 continue;
1203 // int64 size = 2;
1204 case 2:
1205 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 16)) {
1206 _impl_.size_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint64(&ptr);
1207 CHK_(ptr);
1208 } else {
1209 goto handle_unusual;
1210 }
1211 continue;
1212 default:
1213 goto handle_unusual;
1214 } // switch
1215 handle_unusual:
1216 if ((tag == 0) || ((tag & 7) == 4)) {
1217 CHK_(ptr);
1218 ctx->SetLastTag(tag);
1219 goto message_done;
1220 }
1221 ptr = UnknownFieldParse(
1222 tag,
1223 _internal_metadata_.mutable_unknown_fields<std::string>(),
1224 ptr, ctx);
1225 CHK_(ptr != nullptr);
1226 } // while
1227 message_done:
1228 return ptr;
1229 failure:
1230 ptr = nullptr;
1231 goto message_done;
1232 #undef CHK_
1233 }
1234
_InternalSerialize(::uint8_t * target,::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream * stream) const1235 ::uint8_t* SnapShot::_InternalSerialize(
1236 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const {
1237 // @@protoc_insertion_point(serialize_to_array_start:tensorflow.SnapShot)
1238 ::uint32_t cached_has_bits = 0;
1239 (void) cached_has_bits;
1240
1241 // uint64 action_count = 1;
1242 if (this->_internal_action_count() != 0) {
1243 target = stream->EnsureSpace(target);
1244 target = ::_pbi::WireFormatLite::WriteUInt64ToArray(1, this->_internal_action_count(), target);
1245 }
1246
1247 // int64 size = 2;
1248 if (this->_internal_size() != 0) {
1249 target = stream->EnsureSpace(target);
1250 target = ::_pbi::WireFormatLite::WriteInt64ToArray(2, this->_internal_size(), target);
1251 }
1252
1253 if (PROTOBUF_PREDICT_FALSE(_internal_metadata_.have_unknown_fields())) {
1254 target = stream->WriteRaw(_internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).data(),
1255 static_cast<int>(_internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).size()), target);
1256 }
1257 // @@protoc_insertion_point(serialize_to_array_end:tensorflow.SnapShot)
1258 return target;
1259 }
1260
ByteSizeLong() const1261 size_t SnapShot::ByteSizeLong() const {
1262 // @@protoc_insertion_point(message_byte_size_start:tensorflow.SnapShot)
1263 size_t total_size = 0;
1264
1265 ::uint32_t cached_has_bits = 0;
1266 // Prevent compiler warnings about cached_has_bits being unused
1267 (void) cached_has_bits;
1268
1269 // uint64 action_count = 1;
1270 if (this->_internal_action_count() != 0) {
1271 total_size += ::_pbi::WireFormatLite::UInt64SizePlusOne(this->_internal_action_count());
1272 }
1273
1274 // int64 size = 2;
1275 if (this->_internal_size() != 0) {
1276 total_size += ::_pbi::WireFormatLite::Int64SizePlusOne(this->_internal_size());
1277 }
1278
1279 if (PROTOBUF_PREDICT_FALSE(_internal_metadata_.have_unknown_fields())) {
1280 total_size += _internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).size();
1281 }
1282 int cached_size = ::_pbi::ToCachedSize(total_size);
1283 SetCachedSize(cached_size);
1284 return total_size;
1285 }
1286
CheckTypeAndMergeFrom(const::PROTOBUF_NAMESPACE_ID::MessageLite & from)1287 void SnapShot::CheckTypeAndMergeFrom(
1288 const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) {
1289 MergeFrom(*::_pbi::DownCast<const SnapShot*>(
1290 &from));
1291 }
1292
MergeFrom(const SnapShot & from)1293 void SnapShot::MergeFrom(const SnapShot& from) {
1294 SnapShot* const _this = this;
1295 // @@protoc_insertion_point(class_specific_merge_from_start:tensorflow.SnapShot)
1296 GOOGLE_DCHECK_NE(&from, _this);
1297 ::uint32_t cached_has_bits = 0;
1298 (void) cached_has_bits;
1299
1300 if (from._internal_action_count() != 0) {
1301 _this->_internal_set_action_count(from._internal_action_count());
1302 }
1303 if (from._internal_size() != 0) {
1304 _this->_internal_set_size(from._internal_size());
1305 }
1306 _this->_internal_metadata_.MergeFrom<std::string>(from._internal_metadata_);
1307 }
1308
CopyFrom(const SnapShot & from)1309 void SnapShot::CopyFrom(const SnapShot& from) {
1310 // @@protoc_insertion_point(class_specific_copy_from_start:tensorflow.SnapShot)
1311 if (&from == this) return;
1312 Clear();
1313 MergeFrom(from);
1314 }
1315
IsInitialized() const1316 bool SnapShot::IsInitialized() const {
1317 return true;
1318 }
1319
InternalSwap(SnapShot * other)1320 void SnapShot::InternalSwap(SnapShot* other) {
1321 using std::swap;
1322 _internal_metadata_.InternalSwap(&other->_internal_metadata_);
1323 ::PROTOBUF_NAMESPACE_ID::internal::memswap<
1324 PROTOBUF_FIELD_OFFSET(SnapShot, _impl_.size_)
1325 + sizeof(SnapShot::_impl_.size_) // NOLINT
1326 - PROTOBUF_FIELD_OFFSET(SnapShot, _impl_.action_count_)>(
1327 reinterpret_cast<char*>(&_impl_.action_count_),
1328 reinterpret_cast<char*>(&other->_impl_.action_count_));
1329 }
1330
GetTypeName() const1331 std::string SnapShot::GetTypeName() const {
1332 return "tensorflow.SnapShot";
1333 }
1334
1335
1336 // ===================================================================
1337
1338 class MemoryDump::_Internal {
1339 public:
1340 static const ::tensorflow::MemAllocatorStats& stats(const MemoryDump* msg);
1341 };
1342
1343 const ::tensorflow::MemAllocatorStats&
stats(const MemoryDump * msg)1344 MemoryDump::_Internal::stats(const MemoryDump* msg) {
1345 return *msg->_impl_.stats_;
1346 }
MemoryDump(::PROTOBUF_NAMESPACE_ID::Arena * arena,bool is_message_owned)1347 MemoryDump::MemoryDump(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1348 bool is_message_owned)
1349 : ::PROTOBUF_NAMESPACE_ID::MessageLite(arena, is_message_owned) {
1350 SharedCtor(arena, is_message_owned);
1351 // @@protoc_insertion_point(arena_constructor:tensorflow.MemoryDump)
1352 }
MemoryDump(const MemoryDump & from)1353 MemoryDump::MemoryDump(const MemoryDump& from)
1354 : ::PROTOBUF_NAMESPACE_ID::MessageLite() {
1355 MemoryDump* const _this = this; (void)_this;
1356 new (&_impl_) Impl_{
1357 decltype(_impl_.bin_summary_){from._impl_.bin_summary_}
1358 , decltype(_impl_.chunk_){from._impl_.chunk_}
1359 , decltype(_impl_.snap_shot_){from._impl_.snap_shot_}
1360 , decltype(_impl_.allocator_name_){}
1361 , decltype(_impl_.stats_){nullptr}
1362 , /*decltype(_impl_._cached_size_)*/{}};
1363
1364 _internal_metadata_.MergeFrom<std::string>(from._internal_metadata_);
1365 _impl_.allocator_name_.InitDefault();
1366 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
1367 _impl_.allocator_name_.Set("", GetArenaForAllocation());
1368 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
1369 if (!from._internal_allocator_name().empty()) {
1370 _this->_impl_.allocator_name_.Set(from._internal_allocator_name(),
1371 _this->GetArenaForAllocation());
1372 }
1373 if (from._internal_has_stats()) {
1374 _this->_impl_.stats_ = new ::tensorflow::MemAllocatorStats(*from._impl_.stats_);
1375 }
1376 // @@protoc_insertion_point(copy_constructor:tensorflow.MemoryDump)
1377 }
1378
SharedCtor(::_pb::Arena * arena,bool is_message_owned)1379 inline void MemoryDump::SharedCtor(
1380 ::_pb::Arena* arena, bool is_message_owned) {
1381 (void)arena;
1382 (void)is_message_owned;
1383 new (&_impl_) Impl_{
1384 decltype(_impl_.bin_summary_){arena}
1385 , decltype(_impl_.chunk_){arena}
1386 , decltype(_impl_.snap_shot_){arena}
1387 , decltype(_impl_.allocator_name_){}
1388 , decltype(_impl_.stats_){nullptr}
1389 , /*decltype(_impl_._cached_size_)*/{}
1390 };
1391 _impl_.allocator_name_.InitDefault();
1392 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
1393 _impl_.allocator_name_.Set("", GetArenaForAllocation());
1394 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
1395 }
1396
~MemoryDump()1397 MemoryDump::~MemoryDump() {
1398 // @@protoc_insertion_point(destructor:tensorflow.MemoryDump)
1399 if (auto *arena = _internal_metadata_.DeleteReturnArena<std::string>()) {
1400 (void)arena;
1401 return;
1402 }
1403 SharedDtor();
1404 }
1405
SharedDtor()1406 inline void MemoryDump::SharedDtor() {
1407 GOOGLE_DCHECK(GetArenaForAllocation() == nullptr);
1408 _impl_.bin_summary_.~RepeatedPtrField();
1409 _impl_.chunk_.~RepeatedPtrField();
1410 _impl_.snap_shot_.~RepeatedPtrField();
1411 _impl_.allocator_name_.Destroy();
1412 if (this != internal_default_instance()) delete _impl_.stats_;
1413 }
1414
SetCachedSize(int size) const1415 void MemoryDump::SetCachedSize(int size) const {
1416 _impl_._cached_size_.Set(size);
1417 }
1418
Clear()1419 void MemoryDump::Clear() {
1420 // @@protoc_insertion_point(message_clear_start:tensorflow.MemoryDump)
1421 ::uint32_t cached_has_bits = 0;
1422 // Prevent compiler warnings about cached_has_bits being unused
1423 (void) cached_has_bits;
1424
1425 _impl_.bin_summary_.Clear();
1426 _impl_.chunk_.Clear();
1427 _impl_.snap_shot_.Clear();
1428 _impl_.allocator_name_.ClearToEmpty();
1429 if (GetArenaForAllocation() == nullptr && _impl_.stats_ != nullptr) {
1430 delete _impl_.stats_;
1431 }
1432 _impl_.stats_ = nullptr;
1433 _internal_metadata_.Clear<std::string>();
1434 }
1435
_InternalParse(const char * ptr,::_pbi::ParseContext * ctx)1436 const char* MemoryDump::_InternalParse(const char* ptr, ::_pbi::ParseContext* ctx) {
1437 #define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure
1438 while (!ctx->Done(&ptr)) {
1439 ::uint32_t tag;
1440 ptr = ::_pbi::ReadTag(ptr, &tag);
1441 switch (tag >> 3) {
1442 // string allocator_name = 1;
1443 case 1:
1444 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 10)) {
1445 auto str = _internal_mutable_allocator_name();
1446 ptr = ::_pbi::InlineGreedyStringParser(str, ptr, ctx);
1447 CHK_(ptr);
1448 CHK_(::_pbi::VerifyUTF8(str, nullptr));
1449 } else {
1450 goto handle_unusual;
1451 }
1452 continue;
1453 // repeated .tensorflow.BinSummary bin_summary = 2;
1454 case 2:
1455 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 18)) {
1456 ptr -= 1;
1457 do {
1458 ptr += 1;
1459 ptr = ctx->ParseMessage(_internal_add_bin_summary(), ptr);
1460 CHK_(ptr);
1461 if (!ctx->DataAvailable(ptr)) break;
1462 } while (::PROTOBUF_NAMESPACE_ID::internal::ExpectTag<18>(ptr));
1463 } else {
1464 goto handle_unusual;
1465 }
1466 continue;
1467 // repeated .tensorflow.MemChunk chunk = 3;
1468 case 3:
1469 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 26)) {
1470 ptr -= 1;
1471 do {
1472 ptr += 1;
1473 ptr = ctx->ParseMessage(_internal_add_chunk(), ptr);
1474 CHK_(ptr);
1475 if (!ctx->DataAvailable(ptr)) break;
1476 } while (::PROTOBUF_NAMESPACE_ID::internal::ExpectTag<26>(ptr));
1477 } else {
1478 goto handle_unusual;
1479 }
1480 continue;
1481 // repeated .tensorflow.SnapShot snap_shot = 4;
1482 case 4:
1483 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 34)) {
1484 ptr -= 1;
1485 do {
1486 ptr += 1;
1487 ptr = ctx->ParseMessage(_internal_add_snap_shot(), ptr);
1488 CHK_(ptr);
1489 if (!ctx->DataAvailable(ptr)) break;
1490 } while (::PROTOBUF_NAMESPACE_ID::internal::ExpectTag<34>(ptr));
1491 } else {
1492 goto handle_unusual;
1493 }
1494 continue;
1495 // .tensorflow.MemAllocatorStats stats = 5;
1496 case 5:
1497 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 42)) {
1498 ptr = ctx->ParseMessage(_internal_mutable_stats(), ptr);
1499 CHK_(ptr);
1500 } else {
1501 goto handle_unusual;
1502 }
1503 continue;
1504 default:
1505 goto handle_unusual;
1506 } // switch
1507 handle_unusual:
1508 if ((tag == 0) || ((tag & 7) == 4)) {
1509 CHK_(ptr);
1510 ctx->SetLastTag(tag);
1511 goto message_done;
1512 }
1513 ptr = UnknownFieldParse(
1514 tag,
1515 _internal_metadata_.mutable_unknown_fields<std::string>(),
1516 ptr, ctx);
1517 CHK_(ptr != nullptr);
1518 } // while
1519 message_done:
1520 return ptr;
1521 failure:
1522 ptr = nullptr;
1523 goto message_done;
1524 #undef CHK_
1525 }
1526
_InternalSerialize(::uint8_t * target,::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream * stream) const1527 ::uint8_t* MemoryDump::_InternalSerialize(
1528 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const {
1529 // @@protoc_insertion_point(serialize_to_array_start:tensorflow.MemoryDump)
1530 ::uint32_t cached_has_bits = 0;
1531 (void) cached_has_bits;
1532
1533 // string allocator_name = 1;
1534 if (!this->_internal_allocator_name().empty()) {
1535 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::VerifyUtf8String(
1536 this->_internal_allocator_name().data(), static_cast<int>(this->_internal_allocator_name().length()),
1537 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SERIALIZE,
1538 "tensorflow.MemoryDump.allocator_name");
1539 target = stream->WriteStringMaybeAliased(
1540 1, this->_internal_allocator_name(), target);
1541 }
1542
1543 // repeated .tensorflow.BinSummary bin_summary = 2;
1544 for (unsigned i = 0,
1545 n = static_cast<unsigned>(this->_internal_bin_summary_size()); i < n; i++) {
1546 const auto& repfield = this->_internal_bin_summary(i);
1547 target = ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::
1548 InternalWriteMessage(2, repfield, repfield.GetCachedSize(), target, stream);
1549 }
1550
1551 // repeated .tensorflow.MemChunk chunk = 3;
1552 for (unsigned i = 0,
1553 n = static_cast<unsigned>(this->_internal_chunk_size()); i < n; i++) {
1554 const auto& repfield = this->_internal_chunk(i);
1555 target = ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::
1556 InternalWriteMessage(3, repfield, repfield.GetCachedSize(), target, stream);
1557 }
1558
1559 // repeated .tensorflow.SnapShot snap_shot = 4;
1560 for (unsigned i = 0,
1561 n = static_cast<unsigned>(this->_internal_snap_shot_size()); i < n; i++) {
1562 const auto& repfield = this->_internal_snap_shot(i);
1563 target = ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::
1564 InternalWriteMessage(4, repfield, repfield.GetCachedSize(), target, stream);
1565 }
1566
1567 // .tensorflow.MemAllocatorStats stats = 5;
1568 if (this->_internal_has_stats()) {
1569 target = ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::
1570 InternalWriteMessage(5, _Internal::stats(this),
1571 _Internal::stats(this).GetCachedSize(), target, stream);
1572 }
1573
1574 if (PROTOBUF_PREDICT_FALSE(_internal_metadata_.have_unknown_fields())) {
1575 target = stream->WriteRaw(_internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).data(),
1576 static_cast<int>(_internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).size()), target);
1577 }
1578 // @@protoc_insertion_point(serialize_to_array_end:tensorflow.MemoryDump)
1579 return target;
1580 }
1581
ByteSizeLong() const1582 size_t MemoryDump::ByteSizeLong() const {
1583 // @@protoc_insertion_point(message_byte_size_start:tensorflow.MemoryDump)
1584 size_t total_size = 0;
1585
1586 ::uint32_t cached_has_bits = 0;
1587 // Prevent compiler warnings about cached_has_bits being unused
1588 (void) cached_has_bits;
1589
1590 // repeated .tensorflow.BinSummary bin_summary = 2;
1591 total_size += 1UL * this->_internal_bin_summary_size();
1592 for (const auto& msg : this->_impl_.bin_summary_) {
1593 total_size +=
1594 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize(msg);
1595 }
1596
1597 // repeated .tensorflow.MemChunk chunk = 3;
1598 total_size += 1UL * this->_internal_chunk_size();
1599 for (const auto& msg : this->_impl_.chunk_) {
1600 total_size +=
1601 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize(msg);
1602 }
1603
1604 // repeated .tensorflow.SnapShot snap_shot = 4;
1605 total_size += 1UL * this->_internal_snap_shot_size();
1606 for (const auto& msg : this->_impl_.snap_shot_) {
1607 total_size +=
1608 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize(msg);
1609 }
1610
1611 // string allocator_name = 1;
1612 if (!this->_internal_allocator_name().empty()) {
1613 total_size += 1 +
1614 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::StringSize(
1615 this->_internal_allocator_name());
1616 }
1617
1618 // .tensorflow.MemAllocatorStats stats = 5;
1619 if (this->_internal_has_stats()) {
1620 total_size += 1 +
1621 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize(
1622 *_impl_.stats_);
1623 }
1624
1625 if (PROTOBUF_PREDICT_FALSE(_internal_metadata_.have_unknown_fields())) {
1626 total_size += _internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).size();
1627 }
1628 int cached_size = ::_pbi::ToCachedSize(total_size);
1629 SetCachedSize(cached_size);
1630 return total_size;
1631 }
1632
CheckTypeAndMergeFrom(const::PROTOBUF_NAMESPACE_ID::MessageLite & from)1633 void MemoryDump::CheckTypeAndMergeFrom(
1634 const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) {
1635 MergeFrom(*::_pbi::DownCast<const MemoryDump*>(
1636 &from));
1637 }
1638
MergeFrom(const MemoryDump & from)1639 void MemoryDump::MergeFrom(const MemoryDump& from) {
1640 MemoryDump* const _this = this;
1641 // @@protoc_insertion_point(class_specific_merge_from_start:tensorflow.MemoryDump)
1642 GOOGLE_DCHECK_NE(&from, _this);
1643 ::uint32_t cached_has_bits = 0;
1644 (void) cached_has_bits;
1645
1646 _this->_impl_.bin_summary_.MergeFrom(from._impl_.bin_summary_);
1647 _this->_impl_.chunk_.MergeFrom(from._impl_.chunk_);
1648 _this->_impl_.snap_shot_.MergeFrom(from._impl_.snap_shot_);
1649 if (!from._internal_allocator_name().empty()) {
1650 _this->_internal_set_allocator_name(from._internal_allocator_name());
1651 }
1652 if (from._internal_has_stats()) {
1653 _this->_internal_mutable_stats()->::tensorflow::MemAllocatorStats::MergeFrom(
1654 from._internal_stats());
1655 }
1656 _this->_internal_metadata_.MergeFrom<std::string>(from._internal_metadata_);
1657 }
1658
CopyFrom(const MemoryDump & from)1659 void MemoryDump::CopyFrom(const MemoryDump& from) {
1660 // @@protoc_insertion_point(class_specific_copy_from_start:tensorflow.MemoryDump)
1661 if (&from == this) return;
1662 Clear();
1663 MergeFrom(from);
1664 }
1665
IsInitialized() const1666 bool MemoryDump::IsInitialized() const {
1667 return true;
1668 }
1669
InternalSwap(MemoryDump * other)1670 void MemoryDump::InternalSwap(MemoryDump* other) {
1671 using std::swap;
1672 auto* lhs_arena = GetArenaForAllocation();
1673 auto* rhs_arena = other->GetArenaForAllocation();
1674 _internal_metadata_.InternalSwap(&other->_internal_metadata_);
1675 _impl_.bin_summary_.InternalSwap(&other->_impl_.bin_summary_);
1676 _impl_.chunk_.InternalSwap(&other->_impl_.chunk_);
1677 _impl_.snap_shot_.InternalSwap(&other->_impl_.snap_shot_);
1678 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr::InternalSwap(
1679 &_impl_.allocator_name_, lhs_arena,
1680 &other->_impl_.allocator_name_, rhs_arena
1681 );
1682 swap(_impl_.stats_, other->_impl_.stats_);
1683 }
1684
GetTypeName() const1685 std::string MemoryDump::GetTypeName() const {
1686 return "tensorflow.MemoryDump";
1687 }
1688
1689
1690 // @@protoc_insertion_point(namespace_scope)
1691 } // namespace tensorflow
1692 PROTOBUF_NAMESPACE_OPEN
1693 template<> PROTOBUF_NOINLINE ::tensorflow::MemAllocatorStats*
CreateMaybeMessage(Arena * arena)1694 Arena::CreateMaybeMessage< ::tensorflow::MemAllocatorStats >(Arena* arena) {
1695 return Arena::CreateMessageInternal< ::tensorflow::MemAllocatorStats >(arena);
1696 }
1697 template<> PROTOBUF_NOINLINE ::tensorflow::MemChunk*
CreateMaybeMessage(Arena * arena)1698 Arena::CreateMaybeMessage< ::tensorflow::MemChunk >(Arena* arena) {
1699 return Arena::CreateMessageInternal< ::tensorflow::MemChunk >(arena);
1700 }
1701 template<> PROTOBUF_NOINLINE ::tensorflow::BinSummary*
CreateMaybeMessage(Arena * arena)1702 Arena::CreateMaybeMessage< ::tensorflow::BinSummary >(Arena* arena) {
1703 return Arena::CreateMessageInternal< ::tensorflow::BinSummary >(arena);
1704 }
1705 template<> PROTOBUF_NOINLINE ::tensorflow::SnapShot*
CreateMaybeMessage(Arena * arena)1706 Arena::CreateMaybeMessage< ::tensorflow::SnapShot >(Arena* arena) {
1707 return Arena::CreateMessageInternal< ::tensorflow::SnapShot >(arena);
1708 }
1709 template<> PROTOBUF_NOINLINE ::tensorflow::MemoryDump*
CreateMaybeMessage(Arena * arena)1710 Arena::CreateMaybeMessage< ::tensorflow::MemoryDump >(Arena* arena) {
1711 return Arena::CreateMessageInternal< ::tensorflow::MemoryDump >(arena);
1712 }
1713 PROTOBUF_NAMESPACE_CLOSE
1714
1715 // @@protoc_insertion_point(global_scope)
1716 #include <google/protobuf/port_undef.inc>
1717