1 // Generated by the protocol buffer compiler. DO NOT EDIT!
2 // source: tensorflow/core/protobuf/struct.proto
3
4 #ifndef GOOGLE_PROTOBUF_INCLUDED_tensorflow_2fcore_2fprotobuf_2fstruct_2eproto
5 #define GOOGLE_PROTOBUF_INCLUDED_tensorflow_2fcore_2fprotobuf_2fstruct_2eproto
6
7 #include <cstdint>
8 #include <limits>
9 #include <string>
10
11 #include <google/protobuf/port_def.inc>
12 #if PROTOBUF_VERSION < 3021000
13 #error This file was generated by a newer version of protoc which is
14 #error incompatible with your Protocol Buffer headers. Please update
15 #error your headers.
16 #endif
17 #if 3021012 < PROTOBUF_MIN_PROTOC_VERSION
18 #error This file was generated by an older version of protoc which is
19 #error incompatible with your Protocol Buffer headers. Please
20 #error regenerate this file with a newer version of protoc.
21 #endif
22
23 #include <google/protobuf/port_undef.inc>
24 #include <google/protobuf/io/coded_stream.h>
25 #include <google/protobuf/arena.h>
26 #include <google/protobuf/arenastring.h>
27 #include <google/protobuf/generated_message_util.h>
28 #include <google/protobuf/metadata_lite.h>
29 #include <google/protobuf/message_lite.h>
30 #include <google/protobuf/repeated_field.h> // IWYU pragma: export
31 #include <google/protobuf/extension_set.h> // IWYU pragma: export
32 #include <google/protobuf/map.h> // IWYU pragma: export
33 #include <google/protobuf/map_entry_lite.h>
34 #include <google/protobuf/map_field_lite.h>
35 #include <google/protobuf/generated_enum_util.h>
36 #include "tensorflow/core/framework/tensor.pb.h"
37 #include "tensorflow/core/framework/tensor_shape.pb.h"
38 #include "tensorflow/core/framework/types.pb.h"
39 // @@protoc_insertion_point(includes)
40 #include <google/protobuf/port_def.inc>
41 #define PROTOBUF_INTERNAL_EXPORT_tensorflow_2fcore_2fprotobuf_2fstruct_2eproto
42 PROTOBUF_NAMESPACE_OPEN
43 namespace internal {
44 class AnyMetadata;
45 } // namespace internal
46 PROTOBUF_NAMESPACE_CLOSE
47
48 // Internal implementation detail -- do not use these members.
49 struct TableStruct_tensorflow_2fcore_2fprotobuf_2fstruct_2eproto {
50 static const ::uint32_t offsets[];
51 };
52 namespace tensorflow {
53 class BoundedTensorSpecProto;
54 struct BoundedTensorSpecProtoDefaultTypeInternal;
55 extern BoundedTensorSpecProtoDefaultTypeInternal _BoundedTensorSpecProto_default_instance_;
56 class DictValue;
57 struct DictValueDefaultTypeInternal;
58 extern DictValueDefaultTypeInternal _DictValue_default_instance_;
59 class DictValue_FieldsEntry_DoNotUse;
60 struct DictValue_FieldsEntry_DoNotUseDefaultTypeInternal;
61 extern DictValue_FieldsEntry_DoNotUseDefaultTypeInternal _DictValue_FieldsEntry_DoNotUse_default_instance_;
62 class ListValue;
63 struct ListValueDefaultTypeInternal;
64 extern ListValueDefaultTypeInternal _ListValue_default_instance_;
65 class NamedTupleValue;
66 struct NamedTupleValueDefaultTypeInternal;
67 extern NamedTupleValueDefaultTypeInternal _NamedTupleValue_default_instance_;
68 class NoneValue;
69 struct NoneValueDefaultTypeInternal;
70 extern NoneValueDefaultTypeInternal _NoneValue_default_instance_;
71 class PairValue;
72 struct PairValueDefaultTypeInternal;
73 extern PairValueDefaultTypeInternal _PairValue_default_instance_;
74 class StructuredValue;
75 struct StructuredValueDefaultTypeInternal;
76 extern StructuredValueDefaultTypeInternal _StructuredValue_default_instance_;
77 class TensorSpecProto;
78 struct TensorSpecProtoDefaultTypeInternal;
79 extern TensorSpecProtoDefaultTypeInternal _TensorSpecProto_default_instance_;
80 class TupleValue;
81 struct TupleValueDefaultTypeInternal;
82 extern TupleValueDefaultTypeInternal _TupleValue_default_instance_;
83 class TypeSpecProto;
84 struct TypeSpecProtoDefaultTypeInternal;
85 extern TypeSpecProtoDefaultTypeInternal _TypeSpecProto_default_instance_;
86 } // namespace tensorflow
87 PROTOBUF_NAMESPACE_OPEN
88 template<> ::tensorflow::BoundedTensorSpecProto* Arena::CreateMaybeMessage<::tensorflow::BoundedTensorSpecProto>(Arena*);
89 template<> ::tensorflow::DictValue* Arena::CreateMaybeMessage<::tensorflow::DictValue>(Arena*);
90 template<> ::tensorflow::DictValue_FieldsEntry_DoNotUse* Arena::CreateMaybeMessage<::tensorflow::DictValue_FieldsEntry_DoNotUse>(Arena*);
91 template<> ::tensorflow::ListValue* Arena::CreateMaybeMessage<::tensorflow::ListValue>(Arena*);
92 template<> ::tensorflow::NamedTupleValue* Arena::CreateMaybeMessage<::tensorflow::NamedTupleValue>(Arena*);
93 template<> ::tensorflow::NoneValue* Arena::CreateMaybeMessage<::tensorflow::NoneValue>(Arena*);
94 template<> ::tensorflow::PairValue* Arena::CreateMaybeMessage<::tensorflow::PairValue>(Arena*);
95 template<> ::tensorflow::StructuredValue* Arena::CreateMaybeMessage<::tensorflow::StructuredValue>(Arena*);
96 template<> ::tensorflow::TensorSpecProto* Arena::CreateMaybeMessage<::tensorflow::TensorSpecProto>(Arena*);
97 template<> ::tensorflow::TupleValue* Arena::CreateMaybeMessage<::tensorflow::TupleValue>(Arena*);
98 template<> ::tensorflow::TypeSpecProto* Arena::CreateMaybeMessage<::tensorflow::TypeSpecProto>(Arena*);
99 PROTOBUF_NAMESPACE_CLOSE
100 namespace tensorflow {
101
102 enum TypeSpecProto_TypeSpecClass : int {
103 TypeSpecProto_TypeSpecClass_UNKNOWN = 0,
104 TypeSpecProto_TypeSpecClass_SPARSE_TENSOR_SPEC = 1,
105 TypeSpecProto_TypeSpecClass_INDEXED_SLICES_SPEC = 2,
106 TypeSpecProto_TypeSpecClass_RAGGED_TENSOR_SPEC = 3,
107 TypeSpecProto_TypeSpecClass_TENSOR_ARRAY_SPEC = 4,
108 TypeSpecProto_TypeSpecClass_DATA_DATASET_SPEC = 5,
109 TypeSpecProto_TypeSpecClass_DATA_ITERATOR_SPEC = 6,
110 TypeSpecProto_TypeSpecClass_OPTIONAL_SPEC = 7,
111 TypeSpecProto_TypeSpecClass_PER_REPLICA_SPEC = 8,
112 TypeSpecProto_TypeSpecClass_VARIABLE_SPEC = 9,
113 TypeSpecProto_TypeSpecClass_ROW_PARTITION_SPEC = 10,
114 TypeSpecProto_TypeSpecClass_REGISTERED_TYPE_SPEC = 12,
115 TypeSpecProto_TypeSpecClass_EXTENSION_TYPE_SPEC = 13,
116 TypeSpecProto_TypeSpecClass_TypeSpecProto_TypeSpecClass_INT_MIN_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::int32_t>::min(),
117 TypeSpecProto_TypeSpecClass_TypeSpecProto_TypeSpecClass_INT_MAX_SENTINEL_DO_NOT_USE_ = std::numeric_limits<::int32_t>::max()
118 };
119 bool TypeSpecProto_TypeSpecClass_IsValid(int value);
120 constexpr TypeSpecProto_TypeSpecClass TypeSpecProto_TypeSpecClass_TypeSpecClass_MIN = TypeSpecProto_TypeSpecClass_UNKNOWN;
121 constexpr TypeSpecProto_TypeSpecClass TypeSpecProto_TypeSpecClass_TypeSpecClass_MAX = TypeSpecProto_TypeSpecClass_EXTENSION_TYPE_SPEC;
122 constexpr int TypeSpecProto_TypeSpecClass_TypeSpecClass_ARRAYSIZE = TypeSpecProto_TypeSpecClass_TypeSpecClass_MAX + 1;
123
124 const std::string& TypeSpecProto_TypeSpecClass_Name(TypeSpecProto_TypeSpecClass value);
125 template<typename T>
TypeSpecProto_TypeSpecClass_Name(T enum_t_value)126 inline const std::string& TypeSpecProto_TypeSpecClass_Name(T enum_t_value) {
127 static_assert(::std::is_same<T, TypeSpecProto_TypeSpecClass>::value ||
128 ::std::is_integral<T>::value,
129 "Incorrect type passed to function TypeSpecProto_TypeSpecClass_Name.");
130 return TypeSpecProto_TypeSpecClass_Name(static_cast<TypeSpecProto_TypeSpecClass>(enum_t_value));
131 }
132 bool TypeSpecProto_TypeSpecClass_Parse(
133 ::PROTOBUF_NAMESPACE_ID::ConstStringParam name, TypeSpecProto_TypeSpecClass* value);
134 // ===================================================================
135
136 class StructuredValue final :
137 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.StructuredValue) */ {
138 public:
StructuredValue()139 inline StructuredValue() : StructuredValue(nullptr) {}
140 ~StructuredValue() override;
141 explicit PROTOBUF_CONSTEXPR StructuredValue(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
142
143 StructuredValue(const StructuredValue& from);
StructuredValue(StructuredValue && from)144 StructuredValue(StructuredValue&& from) noexcept
145 : StructuredValue() {
146 *this = ::std::move(from);
147 }
148
149 inline StructuredValue& operator=(const StructuredValue& from) {
150 if (this == &from) return *this;
151 CopyFrom(from);
152 return *this;
153 }
154 inline StructuredValue& operator=(StructuredValue&& from) noexcept {
155 if (this == &from) return *this;
156 if (GetOwningArena() == from.GetOwningArena()
157 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
158 && GetOwningArena() != nullptr
159 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
160 ) {
161 InternalSwap(&from);
162 } else {
163 CopyFrom(from);
164 }
165 return *this;
166 }
167
default_instance()168 static const StructuredValue& default_instance() {
169 return *internal_default_instance();
170 }
171 enum KindCase {
172 kNoneValue = 1,
173 kFloat64Value = 11,
174 kInt64Value = 12,
175 kStringValue = 13,
176 kBoolValue = 14,
177 kTensorShapeValue = 31,
178 kTensorDtypeValue = 32,
179 kTensorSpecValue = 33,
180 kTypeSpecValue = 34,
181 kBoundedTensorSpecValue = 35,
182 kListValue = 51,
183 kTupleValue = 52,
184 kDictValue = 53,
185 kNamedTupleValue = 54,
186 KIND_NOT_SET = 0,
187 };
188
internal_default_instance()189 static inline const StructuredValue* internal_default_instance() {
190 return reinterpret_cast<const StructuredValue*>(
191 &_StructuredValue_default_instance_);
192 }
193 static constexpr int kIndexInFileMessages =
194 0;
195
swap(StructuredValue & a,StructuredValue & b)196 friend void swap(StructuredValue& a, StructuredValue& b) {
197 a.Swap(&b);
198 }
Swap(StructuredValue * other)199 inline void Swap(StructuredValue* other) {
200 if (other == this) return;
201 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
202 if (GetOwningArena() != nullptr &&
203 GetOwningArena() == other->GetOwningArena()) {
204 #else // PROTOBUF_FORCE_COPY_IN_SWAP
205 if (GetOwningArena() == other->GetOwningArena()) {
206 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
207 InternalSwap(other);
208 } else {
209 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
210 }
211 }
212 void UnsafeArenaSwap(StructuredValue* other) {
213 if (other == this) return;
214 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
215 InternalSwap(other);
216 }
217
218 // implements Message ----------------------------------------------
219
220 StructuredValue* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
221 return CreateMaybeMessage<StructuredValue>(arena);
222 }
223 StructuredValue* New() const {
224 return New(nullptr);
225 }
226 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
227 void CopyFrom(const StructuredValue& from);
228 void MergeFrom(const StructuredValue& from);
229 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
230 bool IsInitialized() const final;
231
232 size_t ByteSizeLong() const final;
233 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
234 ::uint8_t* _InternalSerialize(
235 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
236 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
237
238 private:
239 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
240 void SharedDtor();
241 void SetCachedSize(int size) const;
242 void InternalSwap(StructuredValue* other);
243
244 private:
245 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
246 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
247 return "tensorflow.StructuredValue";
248 }
249 protected:
250 explicit StructuredValue(::PROTOBUF_NAMESPACE_ID::Arena* arena,
251 bool is_message_owned = false);
252 public:
253
254 std::string GetTypeName() const final;
255
256 // nested types ----------------------------------------------------
257
258 // accessors -------------------------------------------------------
259
260 enum : int {
261 kNoneValueFieldNumber = 1,
262 kFloat64ValueFieldNumber = 11,
263 kInt64ValueFieldNumber = 12,
264 kStringValueFieldNumber = 13,
265 kBoolValueFieldNumber = 14,
266 kTensorShapeValueFieldNumber = 31,
267 kTensorDtypeValueFieldNumber = 32,
268 kTensorSpecValueFieldNumber = 33,
269 kTypeSpecValueFieldNumber = 34,
270 kBoundedTensorSpecValueFieldNumber = 35,
271 kListValueFieldNumber = 51,
272 kTupleValueFieldNumber = 52,
273 kDictValueFieldNumber = 53,
274 kNamedTupleValueFieldNumber = 54,
275 };
276 // .tensorflow.NoneValue none_value = 1;
277 bool has_none_value() const;
278 private:
279 bool _internal_has_none_value() const;
280 public:
281 void clear_none_value();
282 const ::tensorflow::NoneValue& none_value() const;
283 PROTOBUF_NODISCARD ::tensorflow::NoneValue* release_none_value();
284 ::tensorflow::NoneValue* mutable_none_value();
285 void set_allocated_none_value(::tensorflow::NoneValue* none_value);
286 private:
287 const ::tensorflow::NoneValue& _internal_none_value() const;
288 ::tensorflow::NoneValue* _internal_mutable_none_value();
289 public:
290 void unsafe_arena_set_allocated_none_value(
291 ::tensorflow::NoneValue* none_value);
292 ::tensorflow::NoneValue* unsafe_arena_release_none_value();
293
294 // double float64_value = 11;
295 bool has_float64_value() const;
296 private:
297 bool _internal_has_float64_value() const;
298 public:
299 void clear_float64_value();
300 double float64_value() const;
301 void set_float64_value(double value);
302 private:
303 double _internal_float64_value() const;
304 void _internal_set_float64_value(double value);
305 public:
306
307 // sint64 int64_value = 12;
308 bool has_int64_value() const;
309 private:
310 bool _internal_has_int64_value() const;
311 public:
312 void clear_int64_value();
313 ::int64_t int64_value() const;
314 void set_int64_value(::int64_t value);
315 private:
316 ::int64_t _internal_int64_value() const;
317 void _internal_set_int64_value(::int64_t value);
318 public:
319
320 // string string_value = 13;
321 bool has_string_value() const;
322 private:
323 bool _internal_has_string_value() const;
324 public:
325 void clear_string_value();
326 const std::string& string_value() const;
327 template <typename ArgT0 = const std::string&, typename... ArgT>
328 void set_string_value(ArgT0&& arg0, ArgT... args);
329 std::string* mutable_string_value();
330 PROTOBUF_NODISCARD std::string* release_string_value();
331 void set_allocated_string_value(std::string* string_value);
332 private:
333 const std::string& _internal_string_value() const;
334 inline PROTOBUF_ALWAYS_INLINE void _internal_set_string_value(const std::string& value);
335 std::string* _internal_mutable_string_value();
336 public:
337
338 // bool bool_value = 14;
339 bool has_bool_value() const;
340 private:
341 bool _internal_has_bool_value() const;
342 public:
343 void clear_bool_value();
344 bool bool_value() const;
345 void set_bool_value(bool value);
346 private:
347 bool _internal_bool_value() const;
348 void _internal_set_bool_value(bool value);
349 public:
350
351 // .tensorflow.TensorShapeProto tensor_shape_value = 31;
352 bool has_tensor_shape_value() const;
353 private:
354 bool _internal_has_tensor_shape_value() const;
355 public:
356 void clear_tensor_shape_value();
357 const ::tensorflow::TensorShapeProto& tensor_shape_value() const;
358 PROTOBUF_NODISCARD ::tensorflow::TensorShapeProto* release_tensor_shape_value();
359 ::tensorflow::TensorShapeProto* mutable_tensor_shape_value();
360 void set_allocated_tensor_shape_value(::tensorflow::TensorShapeProto* tensor_shape_value);
361 private:
362 const ::tensorflow::TensorShapeProto& _internal_tensor_shape_value() const;
363 ::tensorflow::TensorShapeProto* _internal_mutable_tensor_shape_value();
364 public:
365 void unsafe_arena_set_allocated_tensor_shape_value(
366 ::tensorflow::TensorShapeProto* tensor_shape_value);
367 ::tensorflow::TensorShapeProto* unsafe_arena_release_tensor_shape_value();
368
369 // .tensorflow.DataType tensor_dtype_value = 32;
370 bool has_tensor_dtype_value() const;
371 private:
372 bool _internal_has_tensor_dtype_value() const;
373 public:
374 void clear_tensor_dtype_value();
375 ::tensorflow::DataType tensor_dtype_value() const;
376 void set_tensor_dtype_value(::tensorflow::DataType value);
377 private:
378 ::tensorflow::DataType _internal_tensor_dtype_value() const;
379 void _internal_set_tensor_dtype_value(::tensorflow::DataType value);
380 public:
381
382 // .tensorflow.TensorSpecProto tensor_spec_value = 33;
383 bool has_tensor_spec_value() const;
384 private:
385 bool _internal_has_tensor_spec_value() const;
386 public:
387 void clear_tensor_spec_value();
388 const ::tensorflow::TensorSpecProto& tensor_spec_value() const;
389 PROTOBUF_NODISCARD ::tensorflow::TensorSpecProto* release_tensor_spec_value();
390 ::tensorflow::TensorSpecProto* mutable_tensor_spec_value();
391 void set_allocated_tensor_spec_value(::tensorflow::TensorSpecProto* tensor_spec_value);
392 private:
393 const ::tensorflow::TensorSpecProto& _internal_tensor_spec_value() const;
394 ::tensorflow::TensorSpecProto* _internal_mutable_tensor_spec_value();
395 public:
396 void unsafe_arena_set_allocated_tensor_spec_value(
397 ::tensorflow::TensorSpecProto* tensor_spec_value);
398 ::tensorflow::TensorSpecProto* unsafe_arena_release_tensor_spec_value();
399
400 // .tensorflow.TypeSpecProto type_spec_value = 34;
401 bool has_type_spec_value() const;
402 private:
403 bool _internal_has_type_spec_value() const;
404 public:
405 void clear_type_spec_value();
406 const ::tensorflow::TypeSpecProto& type_spec_value() const;
407 PROTOBUF_NODISCARD ::tensorflow::TypeSpecProto* release_type_spec_value();
408 ::tensorflow::TypeSpecProto* mutable_type_spec_value();
409 void set_allocated_type_spec_value(::tensorflow::TypeSpecProto* type_spec_value);
410 private:
411 const ::tensorflow::TypeSpecProto& _internal_type_spec_value() const;
412 ::tensorflow::TypeSpecProto* _internal_mutable_type_spec_value();
413 public:
414 void unsafe_arena_set_allocated_type_spec_value(
415 ::tensorflow::TypeSpecProto* type_spec_value);
416 ::tensorflow::TypeSpecProto* unsafe_arena_release_type_spec_value();
417
418 // .tensorflow.BoundedTensorSpecProto bounded_tensor_spec_value = 35;
419 bool has_bounded_tensor_spec_value() const;
420 private:
421 bool _internal_has_bounded_tensor_spec_value() const;
422 public:
423 void clear_bounded_tensor_spec_value();
424 const ::tensorflow::BoundedTensorSpecProto& bounded_tensor_spec_value() const;
425 PROTOBUF_NODISCARD ::tensorflow::BoundedTensorSpecProto* release_bounded_tensor_spec_value();
426 ::tensorflow::BoundedTensorSpecProto* mutable_bounded_tensor_spec_value();
427 void set_allocated_bounded_tensor_spec_value(::tensorflow::BoundedTensorSpecProto* bounded_tensor_spec_value);
428 private:
429 const ::tensorflow::BoundedTensorSpecProto& _internal_bounded_tensor_spec_value() const;
430 ::tensorflow::BoundedTensorSpecProto* _internal_mutable_bounded_tensor_spec_value();
431 public:
432 void unsafe_arena_set_allocated_bounded_tensor_spec_value(
433 ::tensorflow::BoundedTensorSpecProto* bounded_tensor_spec_value);
434 ::tensorflow::BoundedTensorSpecProto* unsafe_arena_release_bounded_tensor_spec_value();
435
436 // .tensorflow.ListValue list_value = 51;
437 bool has_list_value() const;
438 private:
439 bool _internal_has_list_value() const;
440 public:
441 void clear_list_value();
442 const ::tensorflow::ListValue& list_value() const;
443 PROTOBUF_NODISCARD ::tensorflow::ListValue* release_list_value();
444 ::tensorflow::ListValue* mutable_list_value();
445 void set_allocated_list_value(::tensorflow::ListValue* list_value);
446 private:
447 const ::tensorflow::ListValue& _internal_list_value() const;
448 ::tensorflow::ListValue* _internal_mutable_list_value();
449 public:
450 void unsafe_arena_set_allocated_list_value(
451 ::tensorflow::ListValue* list_value);
452 ::tensorflow::ListValue* unsafe_arena_release_list_value();
453
454 // .tensorflow.TupleValue tuple_value = 52;
455 bool has_tuple_value() const;
456 private:
457 bool _internal_has_tuple_value() const;
458 public:
459 void clear_tuple_value();
460 const ::tensorflow::TupleValue& tuple_value() const;
461 PROTOBUF_NODISCARD ::tensorflow::TupleValue* release_tuple_value();
462 ::tensorflow::TupleValue* mutable_tuple_value();
463 void set_allocated_tuple_value(::tensorflow::TupleValue* tuple_value);
464 private:
465 const ::tensorflow::TupleValue& _internal_tuple_value() const;
466 ::tensorflow::TupleValue* _internal_mutable_tuple_value();
467 public:
468 void unsafe_arena_set_allocated_tuple_value(
469 ::tensorflow::TupleValue* tuple_value);
470 ::tensorflow::TupleValue* unsafe_arena_release_tuple_value();
471
472 // .tensorflow.DictValue dict_value = 53;
473 bool has_dict_value() const;
474 private:
475 bool _internal_has_dict_value() const;
476 public:
477 void clear_dict_value();
478 const ::tensorflow::DictValue& dict_value() const;
479 PROTOBUF_NODISCARD ::tensorflow::DictValue* release_dict_value();
480 ::tensorflow::DictValue* mutable_dict_value();
481 void set_allocated_dict_value(::tensorflow::DictValue* dict_value);
482 private:
483 const ::tensorflow::DictValue& _internal_dict_value() const;
484 ::tensorflow::DictValue* _internal_mutable_dict_value();
485 public:
486 void unsafe_arena_set_allocated_dict_value(
487 ::tensorflow::DictValue* dict_value);
488 ::tensorflow::DictValue* unsafe_arena_release_dict_value();
489
490 // .tensorflow.NamedTupleValue named_tuple_value = 54;
491 bool has_named_tuple_value() const;
492 private:
493 bool _internal_has_named_tuple_value() const;
494 public:
495 void clear_named_tuple_value();
496 const ::tensorflow::NamedTupleValue& named_tuple_value() const;
497 PROTOBUF_NODISCARD ::tensorflow::NamedTupleValue* release_named_tuple_value();
498 ::tensorflow::NamedTupleValue* mutable_named_tuple_value();
499 void set_allocated_named_tuple_value(::tensorflow::NamedTupleValue* named_tuple_value);
500 private:
501 const ::tensorflow::NamedTupleValue& _internal_named_tuple_value() const;
502 ::tensorflow::NamedTupleValue* _internal_mutable_named_tuple_value();
503 public:
504 void unsafe_arena_set_allocated_named_tuple_value(
505 ::tensorflow::NamedTupleValue* named_tuple_value);
506 ::tensorflow::NamedTupleValue* unsafe_arena_release_named_tuple_value();
507
508 void clear_kind();
509 KindCase kind_case() const;
510 // @@protoc_insertion_point(class_scope:tensorflow.StructuredValue)
511 private:
512 class _Internal;
513 void set_has_none_value();
514 void set_has_float64_value();
515 void set_has_int64_value();
516 void set_has_string_value();
517 void set_has_bool_value();
518 void set_has_tensor_shape_value();
519 void set_has_tensor_dtype_value();
520 void set_has_tensor_spec_value();
521 void set_has_type_spec_value();
522 void set_has_bounded_tensor_spec_value();
523 void set_has_list_value();
524 void set_has_tuple_value();
525 void set_has_dict_value();
526 void set_has_named_tuple_value();
527
528 inline bool has_kind() const;
529 inline void clear_has_kind();
530
531 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
532 typedef void InternalArenaConstructable_;
533 typedef void DestructorSkippable_;
534 struct Impl_ {
535 union KindUnion {
536 constexpr KindUnion() : _constinit_{} {}
537 ::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized _constinit_;
538 ::tensorflow::NoneValue* none_value_;
539 double float64_value_;
540 ::int64_t int64_value_;
541 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr string_value_;
542 bool bool_value_;
543 ::tensorflow::TensorShapeProto* tensor_shape_value_;
544 int tensor_dtype_value_;
545 ::tensorflow::TensorSpecProto* tensor_spec_value_;
546 ::tensorflow::TypeSpecProto* type_spec_value_;
547 ::tensorflow::BoundedTensorSpecProto* bounded_tensor_spec_value_;
548 ::tensorflow::ListValue* list_value_;
549 ::tensorflow::TupleValue* tuple_value_;
550 ::tensorflow::DictValue* dict_value_;
551 ::tensorflow::NamedTupleValue* named_tuple_value_;
552 } kind_;
553 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
554 ::uint32_t _oneof_case_[1];
555
556 };
557 union { Impl_ _impl_; };
558 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fstruct_2eproto;
559 };
560 // -------------------------------------------------------------------
561
562 class NoneValue final :
563 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.NoneValue) */ {
564 public:
NoneValue()565 inline NoneValue() : NoneValue(nullptr) {}
566 ~NoneValue() override;
567 explicit PROTOBUF_CONSTEXPR NoneValue(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
568
569 NoneValue(const NoneValue& from);
NoneValue(NoneValue && from)570 NoneValue(NoneValue&& from) noexcept
571 : NoneValue() {
572 *this = ::std::move(from);
573 }
574
575 inline NoneValue& operator=(const NoneValue& from) {
576 if (this == &from) return *this;
577 CopyFrom(from);
578 return *this;
579 }
580 inline NoneValue& operator=(NoneValue&& from) noexcept {
581 if (this == &from) return *this;
582 if (GetOwningArena() == from.GetOwningArena()
583 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
584 && GetOwningArena() != nullptr
585 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
586 ) {
587 InternalSwap(&from);
588 } else {
589 CopyFrom(from);
590 }
591 return *this;
592 }
593
default_instance()594 static const NoneValue& default_instance() {
595 return *internal_default_instance();
596 }
internal_default_instance()597 static inline const NoneValue* internal_default_instance() {
598 return reinterpret_cast<const NoneValue*>(
599 &_NoneValue_default_instance_);
600 }
601 static constexpr int kIndexInFileMessages =
602 1;
603
swap(NoneValue & a,NoneValue & b)604 friend void swap(NoneValue& a, NoneValue& b) {
605 a.Swap(&b);
606 }
Swap(NoneValue * other)607 inline void Swap(NoneValue* other) {
608 if (other == this) return;
609 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
610 if (GetOwningArena() != nullptr &&
611 GetOwningArena() == other->GetOwningArena()) {
612 #else // PROTOBUF_FORCE_COPY_IN_SWAP
613 if (GetOwningArena() == other->GetOwningArena()) {
614 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
615 InternalSwap(other);
616 } else {
617 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
618 }
619 }
620 void UnsafeArenaSwap(NoneValue* other) {
621 if (other == this) return;
622 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
623 InternalSwap(other);
624 }
625
626 // implements Message ----------------------------------------------
627
628 NoneValue* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
629 return CreateMaybeMessage<NoneValue>(arena);
630 }
631 NoneValue* New() const {
632 return New(nullptr);
633 }
634 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
635 void CopyFrom(const NoneValue& from);
636 void MergeFrom(const NoneValue& from);
637 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
638 bool IsInitialized() const final;
639
640 size_t ByteSizeLong() const final;
641 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
642 ::uint8_t* _InternalSerialize(
643 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
644 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
645
646 private:
647 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
648 void SharedDtor();
649 void SetCachedSize(int size) const;
650 void InternalSwap(NoneValue* other);
651
652 private:
653 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
654 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
655 return "tensorflow.NoneValue";
656 }
657 protected:
658 explicit NoneValue(::PROTOBUF_NAMESPACE_ID::Arena* arena,
659 bool is_message_owned = false);
660 public:
661
662 std::string GetTypeName() const final;
663
664 // nested types ----------------------------------------------------
665
666 // accessors -------------------------------------------------------
667
668 // @@protoc_insertion_point(class_scope:tensorflow.NoneValue)
669 private:
670 class _Internal;
671
672 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
673 typedef void InternalArenaConstructable_;
674 typedef void DestructorSkippable_;
675 struct Impl_ {
676 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
677 };
678 union { Impl_ _impl_; };
679 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fstruct_2eproto;
680 };
681 // -------------------------------------------------------------------
682
683 class ListValue final :
684 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.ListValue) */ {
685 public:
ListValue()686 inline ListValue() : ListValue(nullptr) {}
687 ~ListValue() override;
688 explicit PROTOBUF_CONSTEXPR ListValue(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
689
690 ListValue(const ListValue& from);
ListValue(ListValue && from)691 ListValue(ListValue&& from) noexcept
692 : ListValue() {
693 *this = ::std::move(from);
694 }
695
696 inline ListValue& operator=(const ListValue& from) {
697 if (this == &from) return *this;
698 CopyFrom(from);
699 return *this;
700 }
701 inline ListValue& operator=(ListValue&& from) noexcept {
702 if (this == &from) return *this;
703 if (GetOwningArena() == from.GetOwningArena()
704 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
705 && GetOwningArena() != nullptr
706 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
707 ) {
708 InternalSwap(&from);
709 } else {
710 CopyFrom(from);
711 }
712 return *this;
713 }
714
default_instance()715 static const ListValue& default_instance() {
716 return *internal_default_instance();
717 }
internal_default_instance()718 static inline const ListValue* internal_default_instance() {
719 return reinterpret_cast<const ListValue*>(
720 &_ListValue_default_instance_);
721 }
722 static constexpr int kIndexInFileMessages =
723 2;
724
swap(ListValue & a,ListValue & b)725 friend void swap(ListValue& a, ListValue& b) {
726 a.Swap(&b);
727 }
Swap(ListValue * other)728 inline void Swap(ListValue* other) {
729 if (other == this) return;
730 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
731 if (GetOwningArena() != nullptr &&
732 GetOwningArena() == other->GetOwningArena()) {
733 #else // PROTOBUF_FORCE_COPY_IN_SWAP
734 if (GetOwningArena() == other->GetOwningArena()) {
735 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
736 InternalSwap(other);
737 } else {
738 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
739 }
740 }
741 void UnsafeArenaSwap(ListValue* other) {
742 if (other == this) return;
743 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
744 InternalSwap(other);
745 }
746
747 // implements Message ----------------------------------------------
748
749 ListValue* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
750 return CreateMaybeMessage<ListValue>(arena);
751 }
752 ListValue* New() const {
753 return New(nullptr);
754 }
755 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
756 void CopyFrom(const ListValue& from);
757 void MergeFrom(const ListValue& from);
758 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
759 bool IsInitialized() const final;
760
761 size_t ByteSizeLong() const final;
762 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
763 ::uint8_t* _InternalSerialize(
764 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
765 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
766
767 private:
768 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
769 void SharedDtor();
770 void SetCachedSize(int size) const;
771 void InternalSwap(ListValue* other);
772
773 private:
774 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
775 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
776 return "tensorflow.ListValue";
777 }
778 protected:
779 explicit ListValue(::PROTOBUF_NAMESPACE_ID::Arena* arena,
780 bool is_message_owned = false);
781 public:
782
783 std::string GetTypeName() const final;
784
785 // nested types ----------------------------------------------------
786
787 // accessors -------------------------------------------------------
788
789 enum : int {
790 kValuesFieldNumber = 1,
791 };
792 // repeated .tensorflow.StructuredValue values = 1;
793 int values_size() const;
794 private:
795 int _internal_values_size() const;
796 public:
797 void clear_values();
798 ::tensorflow::StructuredValue* mutable_values(int index);
799 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::StructuredValue >*
800 mutable_values();
801 private:
802 const ::tensorflow::StructuredValue& _internal_values(int index) const;
803 ::tensorflow::StructuredValue* _internal_add_values();
804 public:
805 const ::tensorflow::StructuredValue& values(int index) const;
806 ::tensorflow::StructuredValue* add_values();
807 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::StructuredValue >&
808 values() const;
809
810 // @@protoc_insertion_point(class_scope:tensorflow.ListValue)
811 private:
812 class _Internal;
813
814 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
815 typedef void InternalArenaConstructable_;
816 typedef void DestructorSkippable_;
817 struct Impl_ {
818 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::StructuredValue > values_;
819 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
820 };
821 union { Impl_ _impl_; };
822 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fstruct_2eproto;
823 };
824 // -------------------------------------------------------------------
825
826 class TupleValue final :
827 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.TupleValue) */ {
828 public:
TupleValue()829 inline TupleValue() : TupleValue(nullptr) {}
830 ~TupleValue() override;
831 explicit PROTOBUF_CONSTEXPR TupleValue(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
832
833 TupleValue(const TupleValue& from);
TupleValue(TupleValue && from)834 TupleValue(TupleValue&& from) noexcept
835 : TupleValue() {
836 *this = ::std::move(from);
837 }
838
839 inline TupleValue& operator=(const TupleValue& from) {
840 if (this == &from) return *this;
841 CopyFrom(from);
842 return *this;
843 }
844 inline TupleValue& operator=(TupleValue&& from) noexcept {
845 if (this == &from) return *this;
846 if (GetOwningArena() == from.GetOwningArena()
847 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
848 && GetOwningArena() != nullptr
849 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
850 ) {
851 InternalSwap(&from);
852 } else {
853 CopyFrom(from);
854 }
855 return *this;
856 }
857
default_instance()858 static const TupleValue& default_instance() {
859 return *internal_default_instance();
860 }
internal_default_instance()861 static inline const TupleValue* internal_default_instance() {
862 return reinterpret_cast<const TupleValue*>(
863 &_TupleValue_default_instance_);
864 }
865 static constexpr int kIndexInFileMessages =
866 3;
867
swap(TupleValue & a,TupleValue & b)868 friend void swap(TupleValue& a, TupleValue& b) {
869 a.Swap(&b);
870 }
Swap(TupleValue * other)871 inline void Swap(TupleValue* other) {
872 if (other == this) return;
873 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
874 if (GetOwningArena() != nullptr &&
875 GetOwningArena() == other->GetOwningArena()) {
876 #else // PROTOBUF_FORCE_COPY_IN_SWAP
877 if (GetOwningArena() == other->GetOwningArena()) {
878 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
879 InternalSwap(other);
880 } else {
881 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
882 }
883 }
884 void UnsafeArenaSwap(TupleValue* other) {
885 if (other == this) return;
886 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
887 InternalSwap(other);
888 }
889
890 // implements Message ----------------------------------------------
891
892 TupleValue* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
893 return CreateMaybeMessage<TupleValue>(arena);
894 }
895 TupleValue* New() const {
896 return New(nullptr);
897 }
898 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
899 void CopyFrom(const TupleValue& from);
900 void MergeFrom(const TupleValue& from);
901 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
902 bool IsInitialized() const final;
903
904 size_t ByteSizeLong() const final;
905 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
906 ::uint8_t* _InternalSerialize(
907 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
908 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
909
910 private:
911 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
912 void SharedDtor();
913 void SetCachedSize(int size) const;
914 void InternalSwap(TupleValue* other);
915
916 private:
917 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
918 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
919 return "tensorflow.TupleValue";
920 }
921 protected:
922 explicit TupleValue(::PROTOBUF_NAMESPACE_ID::Arena* arena,
923 bool is_message_owned = false);
924 public:
925
926 std::string GetTypeName() const final;
927
928 // nested types ----------------------------------------------------
929
930 // accessors -------------------------------------------------------
931
932 enum : int {
933 kValuesFieldNumber = 1,
934 };
935 // repeated .tensorflow.StructuredValue values = 1;
936 int values_size() const;
937 private:
938 int _internal_values_size() const;
939 public:
940 void clear_values();
941 ::tensorflow::StructuredValue* mutable_values(int index);
942 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::StructuredValue >*
943 mutable_values();
944 private:
945 const ::tensorflow::StructuredValue& _internal_values(int index) const;
946 ::tensorflow::StructuredValue* _internal_add_values();
947 public:
948 const ::tensorflow::StructuredValue& values(int index) const;
949 ::tensorflow::StructuredValue* add_values();
950 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::StructuredValue >&
951 values() const;
952
953 // @@protoc_insertion_point(class_scope:tensorflow.TupleValue)
954 private:
955 class _Internal;
956
957 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
958 typedef void InternalArenaConstructable_;
959 typedef void DestructorSkippable_;
960 struct Impl_ {
961 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::StructuredValue > values_;
962 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
963 };
964 union { Impl_ _impl_; };
965 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fstruct_2eproto;
966 };
967 // -------------------------------------------------------------------
968
969 class DictValue_FieldsEntry_DoNotUse : public ::PROTOBUF_NAMESPACE_ID::internal::MapEntryLite<DictValue_FieldsEntry_DoNotUse,
970 std::string, ::tensorflow::StructuredValue,
971 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_STRING,
972 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_MESSAGE> {
973 public:
974 typedef ::PROTOBUF_NAMESPACE_ID::internal::MapEntryLite<DictValue_FieldsEntry_DoNotUse,
975 std::string, ::tensorflow::StructuredValue,
976 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_STRING,
977 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_MESSAGE> SuperType;
978 DictValue_FieldsEntry_DoNotUse();
979 explicit PROTOBUF_CONSTEXPR DictValue_FieldsEntry_DoNotUse(
980 ::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
981 explicit DictValue_FieldsEntry_DoNotUse(::PROTOBUF_NAMESPACE_ID::Arena* arena);
982 void MergeFrom(const DictValue_FieldsEntry_DoNotUse& other);
internal_default_instance()983 static const DictValue_FieldsEntry_DoNotUse* internal_default_instance() { return reinterpret_cast<const DictValue_FieldsEntry_DoNotUse*>(&_DictValue_FieldsEntry_DoNotUse_default_instance_); }
ValidateKey(std::string * s)984 static bool ValidateKey(std::string* s) {
985 return ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::VerifyUtf8String(s->data(), static_cast<int>(s->size()), ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::PARSE, "tensorflow.DictValue.FieldsEntry.key");
986 }
ValidateValue(void *)987 static bool ValidateValue(void*) { return true; }
988 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fstruct_2eproto;
989 };
990
991 // -------------------------------------------------------------------
992
993 class DictValue final :
994 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.DictValue) */ {
995 public:
DictValue()996 inline DictValue() : DictValue(nullptr) {}
997 ~DictValue() override;
998 explicit PROTOBUF_CONSTEXPR DictValue(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
999
1000 DictValue(const DictValue& from);
DictValue(DictValue && from)1001 DictValue(DictValue&& from) noexcept
1002 : DictValue() {
1003 *this = ::std::move(from);
1004 }
1005
1006 inline DictValue& operator=(const DictValue& from) {
1007 if (this == &from) return *this;
1008 CopyFrom(from);
1009 return *this;
1010 }
1011 inline DictValue& operator=(DictValue&& from) noexcept {
1012 if (this == &from) return *this;
1013 if (GetOwningArena() == from.GetOwningArena()
1014 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
1015 && GetOwningArena() != nullptr
1016 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
1017 ) {
1018 InternalSwap(&from);
1019 } else {
1020 CopyFrom(from);
1021 }
1022 return *this;
1023 }
1024
default_instance()1025 static const DictValue& default_instance() {
1026 return *internal_default_instance();
1027 }
internal_default_instance()1028 static inline const DictValue* internal_default_instance() {
1029 return reinterpret_cast<const DictValue*>(
1030 &_DictValue_default_instance_);
1031 }
1032 static constexpr int kIndexInFileMessages =
1033 5;
1034
swap(DictValue & a,DictValue & b)1035 friend void swap(DictValue& a, DictValue& b) {
1036 a.Swap(&b);
1037 }
Swap(DictValue * other)1038 inline void Swap(DictValue* other) {
1039 if (other == this) return;
1040 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
1041 if (GetOwningArena() != nullptr &&
1042 GetOwningArena() == other->GetOwningArena()) {
1043 #else // PROTOBUF_FORCE_COPY_IN_SWAP
1044 if (GetOwningArena() == other->GetOwningArena()) {
1045 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
1046 InternalSwap(other);
1047 } else {
1048 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1049 }
1050 }
1051 void UnsafeArenaSwap(DictValue* other) {
1052 if (other == this) return;
1053 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
1054 InternalSwap(other);
1055 }
1056
1057 // implements Message ----------------------------------------------
1058
1059 DictValue* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1060 return CreateMaybeMessage<DictValue>(arena);
1061 }
1062 DictValue* New() const {
1063 return New(nullptr);
1064 }
1065 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
1066 void CopyFrom(const DictValue& from);
1067 void MergeFrom(const DictValue& from);
1068 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1069 bool IsInitialized() const final;
1070
1071 size_t ByteSizeLong() const final;
1072 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1073 ::uint8_t* _InternalSerialize(
1074 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
1075 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
1076
1077 private:
1078 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
1079 void SharedDtor();
1080 void SetCachedSize(int size) const;
1081 void InternalSwap(DictValue* other);
1082
1083 private:
1084 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1085 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1086 return "tensorflow.DictValue";
1087 }
1088 protected:
1089 explicit DictValue(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1090 bool is_message_owned = false);
1091 public:
1092
1093 std::string GetTypeName() const final;
1094
1095 // nested types ----------------------------------------------------
1096
1097
1098 // accessors -------------------------------------------------------
1099
1100 enum : int {
1101 kFieldsFieldNumber = 1,
1102 };
1103 // map<string, .tensorflow.StructuredValue> fields = 1;
1104 int fields_size() const;
1105 private:
1106 int _internal_fields_size() const;
1107 public:
1108 void clear_fields();
1109 private:
1110 const ::PROTOBUF_NAMESPACE_ID::Map< std::string, ::tensorflow::StructuredValue >&
1111 _internal_fields() const;
1112 ::PROTOBUF_NAMESPACE_ID::Map< std::string, ::tensorflow::StructuredValue >*
1113 _internal_mutable_fields();
1114 public:
1115 const ::PROTOBUF_NAMESPACE_ID::Map< std::string, ::tensorflow::StructuredValue >&
1116 fields() const;
1117 ::PROTOBUF_NAMESPACE_ID::Map< std::string, ::tensorflow::StructuredValue >*
1118 mutable_fields();
1119
1120 // @@protoc_insertion_point(class_scope:tensorflow.DictValue)
1121 private:
1122 class _Internal;
1123
1124 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1125 typedef void InternalArenaConstructable_;
1126 typedef void DestructorSkippable_;
1127 struct Impl_ {
1128 ::PROTOBUF_NAMESPACE_ID::internal::MapFieldLite<
1129 DictValue_FieldsEntry_DoNotUse,
1130 std::string, ::tensorflow::StructuredValue,
1131 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_STRING,
1132 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::TYPE_MESSAGE> fields_;
1133 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1134 };
1135 union { Impl_ _impl_; };
1136 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fstruct_2eproto;
1137 };
1138 // -------------------------------------------------------------------
1139
1140 class PairValue final :
1141 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.PairValue) */ {
1142 public:
PairValue()1143 inline PairValue() : PairValue(nullptr) {}
1144 ~PairValue() override;
1145 explicit PROTOBUF_CONSTEXPR PairValue(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
1146
1147 PairValue(const PairValue& from);
PairValue(PairValue && from)1148 PairValue(PairValue&& from) noexcept
1149 : PairValue() {
1150 *this = ::std::move(from);
1151 }
1152
1153 inline PairValue& operator=(const PairValue& from) {
1154 if (this == &from) return *this;
1155 CopyFrom(from);
1156 return *this;
1157 }
1158 inline PairValue& operator=(PairValue&& from) noexcept {
1159 if (this == &from) return *this;
1160 if (GetOwningArena() == from.GetOwningArena()
1161 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
1162 && GetOwningArena() != nullptr
1163 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
1164 ) {
1165 InternalSwap(&from);
1166 } else {
1167 CopyFrom(from);
1168 }
1169 return *this;
1170 }
1171
default_instance()1172 static const PairValue& default_instance() {
1173 return *internal_default_instance();
1174 }
internal_default_instance()1175 static inline const PairValue* internal_default_instance() {
1176 return reinterpret_cast<const PairValue*>(
1177 &_PairValue_default_instance_);
1178 }
1179 static constexpr int kIndexInFileMessages =
1180 6;
1181
swap(PairValue & a,PairValue & b)1182 friend void swap(PairValue& a, PairValue& b) {
1183 a.Swap(&b);
1184 }
Swap(PairValue * other)1185 inline void Swap(PairValue* other) {
1186 if (other == this) return;
1187 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
1188 if (GetOwningArena() != nullptr &&
1189 GetOwningArena() == other->GetOwningArena()) {
1190 #else // PROTOBUF_FORCE_COPY_IN_SWAP
1191 if (GetOwningArena() == other->GetOwningArena()) {
1192 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
1193 InternalSwap(other);
1194 } else {
1195 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1196 }
1197 }
1198 void UnsafeArenaSwap(PairValue* other) {
1199 if (other == this) return;
1200 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
1201 InternalSwap(other);
1202 }
1203
1204 // implements Message ----------------------------------------------
1205
1206 PairValue* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1207 return CreateMaybeMessage<PairValue>(arena);
1208 }
1209 PairValue* New() const {
1210 return New(nullptr);
1211 }
1212 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
1213 void CopyFrom(const PairValue& from);
1214 void MergeFrom(const PairValue& from);
1215 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1216 bool IsInitialized() const final;
1217
1218 size_t ByteSizeLong() const final;
1219 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1220 ::uint8_t* _InternalSerialize(
1221 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
1222 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
1223
1224 private:
1225 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
1226 void SharedDtor();
1227 void SetCachedSize(int size) const;
1228 void InternalSwap(PairValue* other);
1229
1230 private:
1231 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1232 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1233 return "tensorflow.PairValue";
1234 }
1235 protected:
1236 explicit PairValue(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1237 bool is_message_owned = false);
1238 public:
1239
1240 std::string GetTypeName() const final;
1241
1242 // nested types ----------------------------------------------------
1243
1244 // accessors -------------------------------------------------------
1245
1246 enum : int {
1247 kKeyFieldNumber = 1,
1248 kValueFieldNumber = 2,
1249 };
1250 // string key = 1;
1251 void clear_key();
1252 const std::string& key() const;
1253 template <typename ArgT0 = const std::string&, typename... ArgT>
1254 void set_key(ArgT0&& arg0, ArgT... args);
1255 std::string* mutable_key();
1256 PROTOBUF_NODISCARD std::string* release_key();
1257 void set_allocated_key(std::string* key);
1258 private:
1259 const std::string& _internal_key() const;
1260 inline PROTOBUF_ALWAYS_INLINE void _internal_set_key(const std::string& value);
1261 std::string* _internal_mutable_key();
1262 public:
1263
1264 // .tensorflow.StructuredValue value = 2;
1265 bool has_value() const;
1266 private:
1267 bool _internal_has_value() const;
1268 public:
1269 void clear_value();
1270 const ::tensorflow::StructuredValue& value() const;
1271 PROTOBUF_NODISCARD ::tensorflow::StructuredValue* release_value();
1272 ::tensorflow::StructuredValue* mutable_value();
1273 void set_allocated_value(::tensorflow::StructuredValue* value);
1274 private:
1275 const ::tensorflow::StructuredValue& _internal_value() const;
1276 ::tensorflow::StructuredValue* _internal_mutable_value();
1277 public:
1278 void unsafe_arena_set_allocated_value(
1279 ::tensorflow::StructuredValue* value);
1280 ::tensorflow::StructuredValue* unsafe_arena_release_value();
1281
1282 // @@protoc_insertion_point(class_scope:tensorflow.PairValue)
1283 private:
1284 class _Internal;
1285
1286 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1287 typedef void InternalArenaConstructable_;
1288 typedef void DestructorSkippable_;
1289 struct Impl_ {
1290 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr key_;
1291 ::tensorflow::StructuredValue* value_;
1292 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1293 };
1294 union { Impl_ _impl_; };
1295 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fstruct_2eproto;
1296 };
1297 // -------------------------------------------------------------------
1298
1299 class NamedTupleValue final :
1300 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.NamedTupleValue) */ {
1301 public:
NamedTupleValue()1302 inline NamedTupleValue() : NamedTupleValue(nullptr) {}
1303 ~NamedTupleValue() override;
1304 explicit PROTOBUF_CONSTEXPR NamedTupleValue(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
1305
1306 NamedTupleValue(const NamedTupleValue& from);
NamedTupleValue(NamedTupleValue && from)1307 NamedTupleValue(NamedTupleValue&& from) noexcept
1308 : NamedTupleValue() {
1309 *this = ::std::move(from);
1310 }
1311
1312 inline NamedTupleValue& operator=(const NamedTupleValue& from) {
1313 if (this == &from) return *this;
1314 CopyFrom(from);
1315 return *this;
1316 }
1317 inline NamedTupleValue& operator=(NamedTupleValue&& from) noexcept {
1318 if (this == &from) return *this;
1319 if (GetOwningArena() == from.GetOwningArena()
1320 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
1321 && GetOwningArena() != nullptr
1322 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
1323 ) {
1324 InternalSwap(&from);
1325 } else {
1326 CopyFrom(from);
1327 }
1328 return *this;
1329 }
1330
default_instance()1331 static const NamedTupleValue& default_instance() {
1332 return *internal_default_instance();
1333 }
internal_default_instance()1334 static inline const NamedTupleValue* internal_default_instance() {
1335 return reinterpret_cast<const NamedTupleValue*>(
1336 &_NamedTupleValue_default_instance_);
1337 }
1338 static constexpr int kIndexInFileMessages =
1339 7;
1340
swap(NamedTupleValue & a,NamedTupleValue & b)1341 friend void swap(NamedTupleValue& a, NamedTupleValue& b) {
1342 a.Swap(&b);
1343 }
Swap(NamedTupleValue * other)1344 inline void Swap(NamedTupleValue* other) {
1345 if (other == this) return;
1346 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
1347 if (GetOwningArena() != nullptr &&
1348 GetOwningArena() == other->GetOwningArena()) {
1349 #else // PROTOBUF_FORCE_COPY_IN_SWAP
1350 if (GetOwningArena() == other->GetOwningArena()) {
1351 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
1352 InternalSwap(other);
1353 } else {
1354 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1355 }
1356 }
1357 void UnsafeArenaSwap(NamedTupleValue* other) {
1358 if (other == this) return;
1359 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
1360 InternalSwap(other);
1361 }
1362
1363 // implements Message ----------------------------------------------
1364
1365 NamedTupleValue* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1366 return CreateMaybeMessage<NamedTupleValue>(arena);
1367 }
1368 NamedTupleValue* New() const {
1369 return New(nullptr);
1370 }
1371 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
1372 void CopyFrom(const NamedTupleValue& from);
1373 void MergeFrom(const NamedTupleValue& from);
1374 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1375 bool IsInitialized() const final;
1376
1377 size_t ByteSizeLong() const final;
1378 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1379 ::uint8_t* _InternalSerialize(
1380 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
1381 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
1382
1383 private:
1384 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
1385 void SharedDtor();
1386 void SetCachedSize(int size) const;
1387 void InternalSwap(NamedTupleValue* other);
1388
1389 private:
1390 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1391 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1392 return "tensorflow.NamedTupleValue";
1393 }
1394 protected:
1395 explicit NamedTupleValue(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1396 bool is_message_owned = false);
1397 public:
1398
1399 std::string GetTypeName() const final;
1400
1401 // nested types ----------------------------------------------------
1402
1403 // accessors -------------------------------------------------------
1404
1405 enum : int {
1406 kValuesFieldNumber = 2,
1407 kNameFieldNumber = 1,
1408 };
1409 // repeated .tensorflow.PairValue values = 2;
1410 int values_size() const;
1411 private:
1412 int _internal_values_size() const;
1413 public:
1414 void clear_values();
1415 ::tensorflow::PairValue* mutable_values(int index);
1416 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::PairValue >*
1417 mutable_values();
1418 private:
1419 const ::tensorflow::PairValue& _internal_values(int index) const;
1420 ::tensorflow::PairValue* _internal_add_values();
1421 public:
1422 const ::tensorflow::PairValue& values(int index) const;
1423 ::tensorflow::PairValue* add_values();
1424 const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::PairValue >&
1425 values() const;
1426
1427 // string name = 1;
1428 void clear_name();
1429 const std::string& name() const;
1430 template <typename ArgT0 = const std::string&, typename... ArgT>
1431 void set_name(ArgT0&& arg0, ArgT... args);
1432 std::string* mutable_name();
1433 PROTOBUF_NODISCARD std::string* release_name();
1434 void set_allocated_name(std::string* name);
1435 private:
1436 const std::string& _internal_name() const;
1437 inline PROTOBUF_ALWAYS_INLINE void _internal_set_name(const std::string& value);
1438 std::string* _internal_mutable_name();
1439 public:
1440
1441 // @@protoc_insertion_point(class_scope:tensorflow.NamedTupleValue)
1442 private:
1443 class _Internal;
1444
1445 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1446 typedef void InternalArenaConstructable_;
1447 typedef void DestructorSkippable_;
1448 struct Impl_ {
1449 ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::PairValue > values_;
1450 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr name_;
1451 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1452 };
1453 union { Impl_ _impl_; };
1454 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fstruct_2eproto;
1455 };
1456 // -------------------------------------------------------------------
1457
1458 class TensorSpecProto final :
1459 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.TensorSpecProto) */ {
1460 public:
TensorSpecProto()1461 inline TensorSpecProto() : TensorSpecProto(nullptr) {}
1462 ~TensorSpecProto() override;
1463 explicit PROTOBUF_CONSTEXPR TensorSpecProto(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
1464
1465 TensorSpecProto(const TensorSpecProto& from);
TensorSpecProto(TensorSpecProto && from)1466 TensorSpecProto(TensorSpecProto&& from) noexcept
1467 : TensorSpecProto() {
1468 *this = ::std::move(from);
1469 }
1470
1471 inline TensorSpecProto& operator=(const TensorSpecProto& from) {
1472 if (this == &from) return *this;
1473 CopyFrom(from);
1474 return *this;
1475 }
1476 inline TensorSpecProto& operator=(TensorSpecProto&& from) noexcept {
1477 if (this == &from) return *this;
1478 if (GetOwningArena() == from.GetOwningArena()
1479 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
1480 && GetOwningArena() != nullptr
1481 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
1482 ) {
1483 InternalSwap(&from);
1484 } else {
1485 CopyFrom(from);
1486 }
1487 return *this;
1488 }
1489
default_instance()1490 static const TensorSpecProto& default_instance() {
1491 return *internal_default_instance();
1492 }
internal_default_instance()1493 static inline const TensorSpecProto* internal_default_instance() {
1494 return reinterpret_cast<const TensorSpecProto*>(
1495 &_TensorSpecProto_default_instance_);
1496 }
1497 static constexpr int kIndexInFileMessages =
1498 8;
1499
swap(TensorSpecProto & a,TensorSpecProto & b)1500 friend void swap(TensorSpecProto& a, TensorSpecProto& b) {
1501 a.Swap(&b);
1502 }
Swap(TensorSpecProto * other)1503 inline void Swap(TensorSpecProto* other) {
1504 if (other == this) return;
1505 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
1506 if (GetOwningArena() != nullptr &&
1507 GetOwningArena() == other->GetOwningArena()) {
1508 #else // PROTOBUF_FORCE_COPY_IN_SWAP
1509 if (GetOwningArena() == other->GetOwningArena()) {
1510 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
1511 InternalSwap(other);
1512 } else {
1513 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1514 }
1515 }
1516 void UnsafeArenaSwap(TensorSpecProto* other) {
1517 if (other == this) return;
1518 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
1519 InternalSwap(other);
1520 }
1521
1522 // implements Message ----------------------------------------------
1523
1524 TensorSpecProto* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1525 return CreateMaybeMessage<TensorSpecProto>(arena);
1526 }
1527 TensorSpecProto* New() const {
1528 return New(nullptr);
1529 }
1530 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
1531 void CopyFrom(const TensorSpecProto& from);
1532 void MergeFrom(const TensorSpecProto& from);
1533 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1534 bool IsInitialized() const final;
1535
1536 size_t ByteSizeLong() const final;
1537 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1538 ::uint8_t* _InternalSerialize(
1539 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
1540 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
1541
1542 private:
1543 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
1544 void SharedDtor();
1545 void SetCachedSize(int size) const;
1546 void InternalSwap(TensorSpecProto* other);
1547
1548 private:
1549 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1550 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1551 return "tensorflow.TensorSpecProto";
1552 }
1553 protected:
1554 explicit TensorSpecProto(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1555 bool is_message_owned = false);
1556 public:
1557
1558 std::string GetTypeName() const final;
1559
1560 // nested types ----------------------------------------------------
1561
1562 // accessors -------------------------------------------------------
1563
1564 enum : int {
1565 kNameFieldNumber = 1,
1566 kShapeFieldNumber = 2,
1567 kDtypeFieldNumber = 3,
1568 };
1569 // string name = 1;
1570 void clear_name();
1571 const std::string& name() const;
1572 template <typename ArgT0 = const std::string&, typename... ArgT>
1573 void set_name(ArgT0&& arg0, ArgT... args);
1574 std::string* mutable_name();
1575 PROTOBUF_NODISCARD std::string* release_name();
1576 void set_allocated_name(std::string* name);
1577 private:
1578 const std::string& _internal_name() const;
1579 inline PROTOBUF_ALWAYS_INLINE void _internal_set_name(const std::string& value);
1580 std::string* _internal_mutable_name();
1581 public:
1582
1583 // .tensorflow.TensorShapeProto shape = 2;
1584 bool has_shape() const;
1585 private:
1586 bool _internal_has_shape() const;
1587 public:
1588 void clear_shape();
1589 const ::tensorflow::TensorShapeProto& shape() const;
1590 PROTOBUF_NODISCARD ::tensorflow::TensorShapeProto* release_shape();
1591 ::tensorflow::TensorShapeProto* mutable_shape();
1592 void set_allocated_shape(::tensorflow::TensorShapeProto* shape);
1593 private:
1594 const ::tensorflow::TensorShapeProto& _internal_shape() const;
1595 ::tensorflow::TensorShapeProto* _internal_mutable_shape();
1596 public:
1597 void unsafe_arena_set_allocated_shape(
1598 ::tensorflow::TensorShapeProto* shape);
1599 ::tensorflow::TensorShapeProto* unsafe_arena_release_shape();
1600
1601 // .tensorflow.DataType dtype = 3;
1602 void clear_dtype();
1603 ::tensorflow::DataType dtype() const;
1604 void set_dtype(::tensorflow::DataType value);
1605 private:
1606 ::tensorflow::DataType _internal_dtype() const;
1607 void _internal_set_dtype(::tensorflow::DataType value);
1608 public:
1609
1610 // @@protoc_insertion_point(class_scope:tensorflow.TensorSpecProto)
1611 private:
1612 class _Internal;
1613
1614 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1615 typedef void InternalArenaConstructable_;
1616 typedef void DestructorSkippable_;
1617 struct Impl_ {
1618 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr name_;
1619 ::tensorflow::TensorShapeProto* shape_;
1620 int dtype_;
1621 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1622 };
1623 union { Impl_ _impl_; };
1624 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fstruct_2eproto;
1625 };
1626 // -------------------------------------------------------------------
1627
1628 class BoundedTensorSpecProto final :
1629 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.BoundedTensorSpecProto) */ {
1630 public:
BoundedTensorSpecProto()1631 inline BoundedTensorSpecProto() : BoundedTensorSpecProto(nullptr) {}
1632 ~BoundedTensorSpecProto() override;
1633 explicit PROTOBUF_CONSTEXPR BoundedTensorSpecProto(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
1634
1635 BoundedTensorSpecProto(const BoundedTensorSpecProto& from);
BoundedTensorSpecProto(BoundedTensorSpecProto && from)1636 BoundedTensorSpecProto(BoundedTensorSpecProto&& from) noexcept
1637 : BoundedTensorSpecProto() {
1638 *this = ::std::move(from);
1639 }
1640
1641 inline BoundedTensorSpecProto& operator=(const BoundedTensorSpecProto& from) {
1642 if (this == &from) return *this;
1643 CopyFrom(from);
1644 return *this;
1645 }
1646 inline BoundedTensorSpecProto& operator=(BoundedTensorSpecProto&& from) noexcept {
1647 if (this == &from) return *this;
1648 if (GetOwningArena() == from.GetOwningArena()
1649 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
1650 && GetOwningArena() != nullptr
1651 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
1652 ) {
1653 InternalSwap(&from);
1654 } else {
1655 CopyFrom(from);
1656 }
1657 return *this;
1658 }
1659
default_instance()1660 static const BoundedTensorSpecProto& default_instance() {
1661 return *internal_default_instance();
1662 }
internal_default_instance()1663 static inline const BoundedTensorSpecProto* internal_default_instance() {
1664 return reinterpret_cast<const BoundedTensorSpecProto*>(
1665 &_BoundedTensorSpecProto_default_instance_);
1666 }
1667 static constexpr int kIndexInFileMessages =
1668 9;
1669
swap(BoundedTensorSpecProto & a,BoundedTensorSpecProto & b)1670 friend void swap(BoundedTensorSpecProto& a, BoundedTensorSpecProto& b) {
1671 a.Swap(&b);
1672 }
Swap(BoundedTensorSpecProto * other)1673 inline void Swap(BoundedTensorSpecProto* other) {
1674 if (other == this) return;
1675 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
1676 if (GetOwningArena() != nullptr &&
1677 GetOwningArena() == other->GetOwningArena()) {
1678 #else // PROTOBUF_FORCE_COPY_IN_SWAP
1679 if (GetOwningArena() == other->GetOwningArena()) {
1680 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
1681 InternalSwap(other);
1682 } else {
1683 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1684 }
1685 }
1686 void UnsafeArenaSwap(BoundedTensorSpecProto* other) {
1687 if (other == this) return;
1688 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
1689 InternalSwap(other);
1690 }
1691
1692 // implements Message ----------------------------------------------
1693
1694 BoundedTensorSpecProto* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1695 return CreateMaybeMessage<BoundedTensorSpecProto>(arena);
1696 }
1697 BoundedTensorSpecProto* New() const {
1698 return New(nullptr);
1699 }
1700 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
1701 void CopyFrom(const BoundedTensorSpecProto& from);
1702 void MergeFrom(const BoundedTensorSpecProto& from);
1703 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1704 bool IsInitialized() const final;
1705
1706 size_t ByteSizeLong() const final;
1707 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1708 ::uint8_t* _InternalSerialize(
1709 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
1710 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
1711
1712 private:
1713 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
1714 void SharedDtor();
1715 void SetCachedSize(int size) const;
1716 void InternalSwap(BoundedTensorSpecProto* other);
1717
1718 private:
1719 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1720 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1721 return "tensorflow.BoundedTensorSpecProto";
1722 }
1723 protected:
1724 explicit BoundedTensorSpecProto(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1725 bool is_message_owned = false);
1726 public:
1727
1728 std::string GetTypeName() const final;
1729
1730 // nested types ----------------------------------------------------
1731
1732 // accessors -------------------------------------------------------
1733
1734 enum : int {
1735 kNameFieldNumber = 1,
1736 kShapeFieldNumber = 2,
1737 kMinimumFieldNumber = 4,
1738 kMaximumFieldNumber = 5,
1739 kDtypeFieldNumber = 3,
1740 };
1741 // string name = 1;
1742 void clear_name();
1743 const std::string& name() const;
1744 template <typename ArgT0 = const std::string&, typename... ArgT>
1745 void set_name(ArgT0&& arg0, ArgT... args);
1746 std::string* mutable_name();
1747 PROTOBUF_NODISCARD std::string* release_name();
1748 void set_allocated_name(std::string* name);
1749 private:
1750 const std::string& _internal_name() const;
1751 inline PROTOBUF_ALWAYS_INLINE void _internal_set_name(const std::string& value);
1752 std::string* _internal_mutable_name();
1753 public:
1754
1755 // .tensorflow.TensorShapeProto shape = 2;
1756 bool has_shape() const;
1757 private:
1758 bool _internal_has_shape() const;
1759 public:
1760 void clear_shape();
1761 const ::tensorflow::TensorShapeProto& shape() const;
1762 PROTOBUF_NODISCARD ::tensorflow::TensorShapeProto* release_shape();
1763 ::tensorflow::TensorShapeProto* mutable_shape();
1764 void set_allocated_shape(::tensorflow::TensorShapeProto* shape);
1765 private:
1766 const ::tensorflow::TensorShapeProto& _internal_shape() const;
1767 ::tensorflow::TensorShapeProto* _internal_mutable_shape();
1768 public:
1769 void unsafe_arena_set_allocated_shape(
1770 ::tensorflow::TensorShapeProto* shape);
1771 ::tensorflow::TensorShapeProto* unsafe_arena_release_shape();
1772
1773 // .tensorflow.TensorProto minimum = 4;
1774 bool has_minimum() const;
1775 private:
1776 bool _internal_has_minimum() const;
1777 public:
1778 void clear_minimum();
1779 const ::tensorflow::TensorProto& minimum() const;
1780 PROTOBUF_NODISCARD ::tensorflow::TensorProto* release_minimum();
1781 ::tensorflow::TensorProto* mutable_minimum();
1782 void set_allocated_minimum(::tensorflow::TensorProto* minimum);
1783 private:
1784 const ::tensorflow::TensorProto& _internal_minimum() const;
1785 ::tensorflow::TensorProto* _internal_mutable_minimum();
1786 public:
1787 void unsafe_arena_set_allocated_minimum(
1788 ::tensorflow::TensorProto* minimum);
1789 ::tensorflow::TensorProto* unsafe_arena_release_minimum();
1790
1791 // .tensorflow.TensorProto maximum = 5;
1792 bool has_maximum() const;
1793 private:
1794 bool _internal_has_maximum() const;
1795 public:
1796 void clear_maximum();
1797 const ::tensorflow::TensorProto& maximum() const;
1798 PROTOBUF_NODISCARD ::tensorflow::TensorProto* release_maximum();
1799 ::tensorflow::TensorProto* mutable_maximum();
1800 void set_allocated_maximum(::tensorflow::TensorProto* maximum);
1801 private:
1802 const ::tensorflow::TensorProto& _internal_maximum() const;
1803 ::tensorflow::TensorProto* _internal_mutable_maximum();
1804 public:
1805 void unsafe_arena_set_allocated_maximum(
1806 ::tensorflow::TensorProto* maximum);
1807 ::tensorflow::TensorProto* unsafe_arena_release_maximum();
1808
1809 // .tensorflow.DataType dtype = 3;
1810 void clear_dtype();
1811 ::tensorflow::DataType dtype() const;
1812 void set_dtype(::tensorflow::DataType value);
1813 private:
1814 ::tensorflow::DataType _internal_dtype() const;
1815 void _internal_set_dtype(::tensorflow::DataType value);
1816 public:
1817
1818 // @@protoc_insertion_point(class_scope:tensorflow.BoundedTensorSpecProto)
1819 private:
1820 class _Internal;
1821
1822 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
1823 typedef void InternalArenaConstructable_;
1824 typedef void DestructorSkippable_;
1825 struct Impl_ {
1826 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr name_;
1827 ::tensorflow::TensorShapeProto* shape_;
1828 ::tensorflow::TensorProto* minimum_;
1829 ::tensorflow::TensorProto* maximum_;
1830 int dtype_;
1831 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
1832 };
1833 union { Impl_ _impl_; };
1834 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fstruct_2eproto;
1835 };
1836 // -------------------------------------------------------------------
1837
1838 class TypeSpecProto final :
1839 public ::PROTOBUF_NAMESPACE_ID::MessageLite /* @@protoc_insertion_point(class_definition:tensorflow.TypeSpecProto) */ {
1840 public:
TypeSpecProto()1841 inline TypeSpecProto() : TypeSpecProto(nullptr) {}
1842 ~TypeSpecProto() override;
1843 explicit PROTOBUF_CONSTEXPR TypeSpecProto(::PROTOBUF_NAMESPACE_ID::internal::ConstantInitialized);
1844
1845 TypeSpecProto(const TypeSpecProto& from);
TypeSpecProto(TypeSpecProto && from)1846 TypeSpecProto(TypeSpecProto&& from) noexcept
1847 : TypeSpecProto() {
1848 *this = ::std::move(from);
1849 }
1850
1851 inline TypeSpecProto& operator=(const TypeSpecProto& from) {
1852 if (this == &from) return *this;
1853 CopyFrom(from);
1854 return *this;
1855 }
1856 inline TypeSpecProto& operator=(TypeSpecProto&& from) noexcept {
1857 if (this == &from) return *this;
1858 if (GetOwningArena() == from.GetOwningArena()
1859 #ifdef PROTOBUF_FORCE_COPY_IN_MOVE
1860 && GetOwningArena() != nullptr
1861 #endif // !PROTOBUF_FORCE_COPY_IN_MOVE
1862 ) {
1863 InternalSwap(&from);
1864 } else {
1865 CopyFrom(from);
1866 }
1867 return *this;
1868 }
1869
default_instance()1870 static const TypeSpecProto& default_instance() {
1871 return *internal_default_instance();
1872 }
internal_default_instance()1873 static inline const TypeSpecProto* internal_default_instance() {
1874 return reinterpret_cast<const TypeSpecProto*>(
1875 &_TypeSpecProto_default_instance_);
1876 }
1877 static constexpr int kIndexInFileMessages =
1878 10;
1879
swap(TypeSpecProto & a,TypeSpecProto & b)1880 friend void swap(TypeSpecProto& a, TypeSpecProto& b) {
1881 a.Swap(&b);
1882 }
Swap(TypeSpecProto * other)1883 inline void Swap(TypeSpecProto* other) {
1884 if (other == this) return;
1885 #ifdef PROTOBUF_FORCE_COPY_IN_SWAP
1886 if (GetOwningArena() != nullptr &&
1887 GetOwningArena() == other->GetOwningArena()) {
1888 #else // PROTOBUF_FORCE_COPY_IN_SWAP
1889 if (GetOwningArena() == other->GetOwningArena()) {
1890 #endif // !PROTOBUF_FORCE_COPY_IN_SWAP
1891 InternalSwap(other);
1892 } else {
1893 ::PROTOBUF_NAMESPACE_ID::internal::GenericSwap(this, other);
1894 }
1895 }
1896 void UnsafeArenaSwap(TypeSpecProto* other) {
1897 if (other == this) return;
1898 GOOGLE_DCHECK(GetOwningArena() == other->GetOwningArena());
1899 InternalSwap(other);
1900 }
1901
1902 // implements Message ----------------------------------------------
1903
1904 TypeSpecProto* New(::PROTOBUF_NAMESPACE_ID::Arena* arena) const final {
1905 return CreateMaybeMessage<TypeSpecProto>(arena);
1906 }
1907 TypeSpecProto* New() const {
1908 return New(nullptr);
1909 }
1910 void CheckTypeAndMergeFrom(const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) final;
1911 void CopyFrom(const TypeSpecProto& from);
1912 void MergeFrom(const TypeSpecProto& from);
1913 PROTOBUF_ATTRIBUTE_REINITIALIZES void Clear() final;
1914 bool IsInitialized() const final;
1915
1916 size_t ByteSizeLong() const final;
1917 const char* _InternalParse(const char* ptr, ::PROTOBUF_NAMESPACE_ID::internal::ParseContext* ctx) final;
1918 ::uint8_t* _InternalSerialize(
1919 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const final;
1920 int GetCachedSize() const final { return _impl_._cached_size_.Get(); }
1921
1922 private:
1923 void SharedCtor(::PROTOBUF_NAMESPACE_ID::Arena* arena, bool is_message_owned);
1924 void SharedDtor();
1925 void SetCachedSize(int size) const;
1926 void InternalSwap(TypeSpecProto* other);
1927
1928 private:
1929 friend class ::PROTOBUF_NAMESPACE_ID::internal::AnyMetadata;
1930 static ::PROTOBUF_NAMESPACE_ID::StringPiece FullMessageName() {
1931 return "tensorflow.TypeSpecProto";
1932 }
1933 protected:
1934 explicit TypeSpecProto(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1935 bool is_message_owned = false);
1936 public:
1937
1938 std::string GetTypeName() const final;
1939
1940 // nested types ----------------------------------------------------
1941
1942 typedef TypeSpecProto_TypeSpecClass TypeSpecClass;
1943 static constexpr TypeSpecClass UNKNOWN =
1944 TypeSpecProto_TypeSpecClass_UNKNOWN;
1945 static constexpr TypeSpecClass SPARSE_TENSOR_SPEC =
1946 TypeSpecProto_TypeSpecClass_SPARSE_TENSOR_SPEC;
1947 static constexpr TypeSpecClass INDEXED_SLICES_SPEC =
1948 TypeSpecProto_TypeSpecClass_INDEXED_SLICES_SPEC;
1949 static constexpr TypeSpecClass RAGGED_TENSOR_SPEC =
1950 TypeSpecProto_TypeSpecClass_RAGGED_TENSOR_SPEC;
1951 static constexpr TypeSpecClass TENSOR_ARRAY_SPEC =
1952 TypeSpecProto_TypeSpecClass_TENSOR_ARRAY_SPEC;
1953 static constexpr TypeSpecClass DATA_DATASET_SPEC =
1954 TypeSpecProto_TypeSpecClass_DATA_DATASET_SPEC;
1955 static constexpr TypeSpecClass DATA_ITERATOR_SPEC =
1956 TypeSpecProto_TypeSpecClass_DATA_ITERATOR_SPEC;
1957 static constexpr TypeSpecClass OPTIONAL_SPEC =
1958 TypeSpecProto_TypeSpecClass_OPTIONAL_SPEC;
1959 static constexpr TypeSpecClass PER_REPLICA_SPEC =
1960 TypeSpecProto_TypeSpecClass_PER_REPLICA_SPEC;
1961 static constexpr TypeSpecClass VARIABLE_SPEC =
1962 TypeSpecProto_TypeSpecClass_VARIABLE_SPEC;
1963 static constexpr TypeSpecClass ROW_PARTITION_SPEC =
1964 TypeSpecProto_TypeSpecClass_ROW_PARTITION_SPEC;
1965 static constexpr TypeSpecClass REGISTERED_TYPE_SPEC =
1966 TypeSpecProto_TypeSpecClass_REGISTERED_TYPE_SPEC;
1967 static constexpr TypeSpecClass EXTENSION_TYPE_SPEC =
1968 TypeSpecProto_TypeSpecClass_EXTENSION_TYPE_SPEC;
1969 static inline bool TypeSpecClass_IsValid(int value) {
1970 return TypeSpecProto_TypeSpecClass_IsValid(value);
1971 }
1972 static constexpr TypeSpecClass TypeSpecClass_MIN =
1973 TypeSpecProto_TypeSpecClass_TypeSpecClass_MIN;
1974 static constexpr TypeSpecClass TypeSpecClass_MAX =
1975 TypeSpecProto_TypeSpecClass_TypeSpecClass_MAX;
1976 static constexpr int TypeSpecClass_ARRAYSIZE =
1977 TypeSpecProto_TypeSpecClass_TypeSpecClass_ARRAYSIZE;
1978 template<typename T>
1979 static inline const std::string& TypeSpecClass_Name(T enum_t_value) {
1980 static_assert(::std::is_same<T, TypeSpecClass>::value ||
1981 ::std::is_integral<T>::value,
1982 "Incorrect type passed to function TypeSpecClass_Name.");
1983 return TypeSpecProto_TypeSpecClass_Name(enum_t_value);
1984 }
1985 static inline bool TypeSpecClass_Parse(::PROTOBUF_NAMESPACE_ID::ConstStringParam name,
1986 TypeSpecClass* value) {
1987 return TypeSpecProto_TypeSpecClass_Parse(name, value);
1988 }
1989
1990 // accessors -------------------------------------------------------
1991
1992 enum : int {
1993 kTypeSpecClassNameFieldNumber = 3,
1994 kTypeStateFieldNumber = 2,
1995 kTypeSpecClassFieldNumber = 1,
1996 kNumFlatComponentsFieldNumber = 4,
1997 };
1998 // string type_spec_class_name = 3;
1999 void clear_type_spec_class_name();
2000 const std::string& type_spec_class_name() const;
2001 template <typename ArgT0 = const std::string&, typename... ArgT>
2002 void set_type_spec_class_name(ArgT0&& arg0, ArgT... args);
2003 std::string* mutable_type_spec_class_name();
2004 PROTOBUF_NODISCARD std::string* release_type_spec_class_name();
2005 void set_allocated_type_spec_class_name(std::string* type_spec_class_name);
2006 private:
2007 const std::string& _internal_type_spec_class_name() const;
2008 inline PROTOBUF_ALWAYS_INLINE void _internal_set_type_spec_class_name(const std::string& value);
2009 std::string* _internal_mutable_type_spec_class_name();
2010 public:
2011
2012 // .tensorflow.StructuredValue type_state = 2;
2013 bool has_type_state() const;
2014 private:
2015 bool _internal_has_type_state() const;
2016 public:
2017 void clear_type_state();
2018 const ::tensorflow::StructuredValue& type_state() const;
2019 PROTOBUF_NODISCARD ::tensorflow::StructuredValue* release_type_state();
2020 ::tensorflow::StructuredValue* mutable_type_state();
2021 void set_allocated_type_state(::tensorflow::StructuredValue* type_state);
2022 private:
2023 const ::tensorflow::StructuredValue& _internal_type_state() const;
2024 ::tensorflow::StructuredValue* _internal_mutable_type_state();
2025 public:
2026 void unsafe_arena_set_allocated_type_state(
2027 ::tensorflow::StructuredValue* type_state);
2028 ::tensorflow::StructuredValue* unsafe_arena_release_type_state();
2029
2030 // .tensorflow.TypeSpecProto.TypeSpecClass type_spec_class = 1;
2031 void clear_type_spec_class();
2032 ::tensorflow::TypeSpecProto_TypeSpecClass type_spec_class() const;
2033 void set_type_spec_class(::tensorflow::TypeSpecProto_TypeSpecClass value);
2034 private:
2035 ::tensorflow::TypeSpecProto_TypeSpecClass _internal_type_spec_class() const;
2036 void _internal_set_type_spec_class(::tensorflow::TypeSpecProto_TypeSpecClass value);
2037 public:
2038
2039 // int32 num_flat_components = 4;
2040 void clear_num_flat_components();
2041 ::int32_t num_flat_components() const;
2042 void set_num_flat_components(::int32_t value);
2043 private:
2044 ::int32_t _internal_num_flat_components() const;
2045 void _internal_set_num_flat_components(::int32_t value);
2046 public:
2047
2048 // @@protoc_insertion_point(class_scope:tensorflow.TypeSpecProto)
2049 private:
2050 class _Internal;
2051
2052 template <typename T> friend class ::PROTOBUF_NAMESPACE_ID::Arena::InternalHelper;
2053 typedef void InternalArenaConstructable_;
2054 typedef void DestructorSkippable_;
2055 struct Impl_ {
2056 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr type_spec_class_name_;
2057 ::tensorflow::StructuredValue* type_state_;
2058 int type_spec_class_;
2059 ::int32_t num_flat_components_;
2060 mutable ::PROTOBUF_NAMESPACE_ID::internal::CachedSize _cached_size_;
2061 };
2062 union { Impl_ _impl_; };
2063 friend struct ::TableStruct_tensorflow_2fcore_2fprotobuf_2fstruct_2eproto;
2064 };
2065 // ===================================================================
2066
2067
2068 // ===================================================================
2069
2070 #ifdef __GNUC__
2071 #pragma GCC diagnostic push
2072 #pragma GCC diagnostic ignored "-Wstrict-aliasing"
2073 #endif // __GNUC__
2074 // StructuredValue
2075
2076 // .tensorflow.NoneValue none_value = 1;
_internal_has_none_value()2077 inline bool StructuredValue::_internal_has_none_value() const {
2078 return kind_case() == kNoneValue;
2079 }
has_none_value()2080 inline bool StructuredValue::has_none_value() const {
2081 return _internal_has_none_value();
2082 }
set_has_none_value()2083 inline void StructuredValue::set_has_none_value() {
2084 _impl_._oneof_case_[0] = kNoneValue;
2085 }
clear_none_value()2086 inline void StructuredValue::clear_none_value() {
2087 if (_internal_has_none_value()) {
2088 if (GetArenaForAllocation() == nullptr) {
2089 delete _impl_.kind_.none_value_;
2090 }
2091 clear_has_kind();
2092 }
2093 }
release_none_value()2094 inline ::tensorflow::NoneValue* StructuredValue::release_none_value() {
2095 // @@protoc_insertion_point(field_release:tensorflow.StructuredValue.none_value)
2096 if (_internal_has_none_value()) {
2097 clear_has_kind();
2098 ::tensorflow::NoneValue* temp = _impl_.kind_.none_value_;
2099 if (GetArenaForAllocation() != nullptr) {
2100 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
2101 }
2102 _impl_.kind_.none_value_ = nullptr;
2103 return temp;
2104 } else {
2105 return nullptr;
2106 }
2107 }
_internal_none_value()2108 inline const ::tensorflow::NoneValue& StructuredValue::_internal_none_value() const {
2109 return _internal_has_none_value()
2110 ? *_impl_.kind_.none_value_
2111 : reinterpret_cast< ::tensorflow::NoneValue&>(::tensorflow::_NoneValue_default_instance_);
2112 }
none_value()2113 inline const ::tensorflow::NoneValue& StructuredValue::none_value() const {
2114 // @@protoc_insertion_point(field_get:tensorflow.StructuredValue.none_value)
2115 return _internal_none_value();
2116 }
unsafe_arena_release_none_value()2117 inline ::tensorflow::NoneValue* StructuredValue::unsafe_arena_release_none_value() {
2118 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.StructuredValue.none_value)
2119 if (_internal_has_none_value()) {
2120 clear_has_kind();
2121 ::tensorflow::NoneValue* temp = _impl_.kind_.none_value_;
2122 _impl_.kind_.none_value_ = nullptr;
2123 return temp;
2124 } else {
2125 return nullptr;
2126 }
2127 }
unsafe_arena_set_allocated_none_value(::tensorflow::NoneValue * none_value)2128 inline void StructuredValue::unsafe_arena_set_allocated_none_value(::tensorflow::NoneValue* none_value) {
2129 clear_kind();
2130 if (none_value) {
2131 set_has_none_value();
2132 _impl_.kind_.none_value_ = none_value;
2133 }
2134 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.StructuredValue.none_value)
2135 }
_internal_mutable_none_value()2136 inline ::tensorflow::NoneValue* StructuredValue::_internal_mutable_none_value() {
2137 if (!_internal_has_none_value()) {
2138 clear_kind();
2139 set_has_none_value();
2140 _impl_.kind_.none_value_ = CreateMaybeMessage< ::tensorflow::NoneValue >(GetArenaForAllocation());
2141 }
2142 return _impl_.kind_.none_value_;
2143 }
mutable_none_value()2144 inline ::tensorflow::NoneValue* StructuredValue::mutable_none_value() {
2145 ::tensorflow::NoneValue* _msg = _internal_mutable_none_value();
2146 // @@protoc_insertion_point(field_mutable:tensorflow.StructuredValue.none_value)
2147 return _msg;
2148 }
2149
2150 // double float64_value = 11;
_internal_has_float64_value()2151 inline bool StructuredValue::_internal_has_float64_value() const {
2152 return kind_case() == kFloat64Value;
2153 }
has_float64_value()2154 inline bool StructuredValue::has_float64_value() const {
2155 return _internal_has_float64_value();
2156 }
set_has_float64_value()2157 inline void StructuredValue::set_has_float64_value() {
2158 _impl_._oneof_case_[0] = kFloat64Value;
2159 }
clear_float64_value()2160 inline void StructuredValue::clear_float64_value() {
2161 if (_internal_has_float64_value()) {
2162 _impl_.kind_.float64_value_ = 0;
2163 clear_has_kind();
2164 }
2165 }
_internal_float64_value()2166 inline double StructuredValue::_internal_float64_value() const {
2167 if (_internal_has_float64_value()) {
2168 return _impl_.kind_.float64_value_;
2169 }
2170 return 0;
2171 }
_internal_set_float64_value(double value)2172 inline void StructuredValue::_internal_set_float64_value(double value) {
2173 if (!_internal_has_float64_value()) {
2174 clear_kind();
2175 set_has_float64_value();
2176 }
2177 _impl_.kind_.float64_value_ = value;
2178 }
float64_value()2179 inline double StructuredValue::float64_value() const {
2180 // @@protoc_insertion_point(field_get:tensorflow.StructuredValue.float64_value)
2181 return _internal_float64_value();
2182 }
set_float64_value(double value)2183 inline void StructuredValue::set_float64_value(double value) {
2184 _internal_set_float64_value(value);
2185 // @@protoc_insertion_point(field_set:tensorflow.StructuredValue.float64_value)
2186 }
2187
2188 // sint64 int64_value = 12;
_internal_has_int64_value()2189 inline bool StructuredValue::_internal_has_int64_value() const {
2190 return kind_case() == kInt64Value;
2191 }
has_int64_value()2192 inline bool StructuredValue::has_int64_value() const {
2193 return _internal_has_int64_value();
2194 }
set_has_int64_value()2195 inline void StructuredValue::set_has_int64_value() {
2196 _impl_._oneof_case_[0] = kInt64Value;
2197 }
clear_int64_value()2198 inline void StructuredValue::clear_int64_value() {
2199 if (_internal_has_int64_value()) {
2200 _impl_.kind_.int64_value_ = ::int64_t{0};
2201 clear_has_kind();
2202 }
2203 }
_internal_int64_value()2204 inline ::int64_t StructuredValue::_internal_int64_value() const {
2205 if (_internal_has_int64_value()) {
2206 return _impl_.kind_.int64_value_;
2207 }
2208 return ::int64_t{0};
2209 }
_internal_set_int64_value(::int64_t value)2210 inline void StructuredValue::_internal_set_int64_value(::int64_t value) {
2211 if (!_internal_has_int64_value()) {
2212 clear_kind();
2213 set_has_int64_value();
2214 }
2215 _impl_.kind_.int64_value_ = value;
2216 }
int64_value()2217 inline ::int64_t StructuredValue::int64_value() const {
2218 // @@protoc_insertion_point(field_get:tensorflow.StructuredValue.int64_value)
2219 return _internal_int64_value();
2220 }
set_int64_value(::int64_t value)2221 inline void StructuredValue::set_int64_value(::int64_t value) {
2222 _internal_set_int64_value(value);
2223 // @@protoc_insertion_point(field_set:tensorflow.StructuredValue.int64_value)
2224 }
2225
2226 // string string_value = 13;
_internal_has_string_value()2227 inline bool StructuredValue::_internal_has_string_value() const {
2228 return kind_case() == kStringValue;
2229 }
has_string_value()2230 inline bool StructuredValue::has_string_value() const {
2231 return _internal_has_string_value();
2232 }
set_has_string_value()2233 inline void StructuredValue::set_has_string_value() {
2234 _impl_._oneof_case_[0] = kStringValue;
2235 }
clear_string_value()2236 inline void StructuredValue::clear_string_value() {
2237 if (_internal_has_string_value()) {
2238 _impl_.kind_.string_value_.Destroy();
2239 clear_has_kind();
2240 }
2241 }
string_value()2242 inline const std::string& StructuredValue::string_value() const {
2243 // @@protoc_insertion_point(field_get:tensorflow.StructuredValue.string_value)
2244 return _internal_string_value();
2245 }
2246 template <typename ArgT0, typename... ArgT>
set_string_value(ArgT0 && arg0,ArgT...args)2247 inline void StructuredValue::set_string_value(ArgT0&& arg0, ArgT... args) {
2248 if (!_internal_has_string_value()) {
2249 clear_kind();
2250 set_has_string_value();
2251 _impl_.kind_.string_value_.InitDefault();
2252 }
2253 _impl_.kind_.string_value_.Set( static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
2254 // @@protoc_insertion_point(field_set:tensorflow.StructuredValue.string_value)
2255 }
mutable_string_value()2256 inline std::string* StructuredValue::mutable_string_value() {
2257 std::string* _s = _internal_mutable_string_value();
2258 // @@protoc_insertion_point(field_mutable:tensorflow.StructuredValue.string_value)
2259 return _s;
2260 }
_internal_string_value()2261 inline const std::string& StructuredValue::_internal_string_value() const {
2262 if (_internal_has_string_value()) {
2263 return _impl_.kind_.string_value_.Get();
2264 }
2265 return ::PROTOBUF_NAMESPACE_ID::internal::GetEmptyStringAlreadyInited();
2266 }
_internal_set_string_value(const std::string & value)2267 inline void StructuredValue::_internal_set_string_value(const std::string& value) {
2268 if (!_internal_has_string_value()) {
2269 clear_kind();
2270 set_has_string_value();
2271 _impl_.kind_.string_value_.InitDefault();
2272 }
2273 _impl_.kind_.string_value_.Set(value, GetArenaForAllocation());
2274 }
_internal_mutable_string_value()2275 inline std::string* StructuredValue::_internal_mutable_string_value() {
2276 if (!_internal_has_string_value()) {
2277 clear_kind();
2278 set_has_string_value();
2279 _impl_.kind_.string_value_.InitDefault();
2280 }
2281 return _impl_.kind_.string_value_.Mutable( GetArenaForAllocation());
2282 }
release_string_value()2283 inline std::string* StructuredValue::release_string_value() {
2284 // @@protoc_insertion_point(field_release:tensorflow.StructuredValue.string_value)
2285 if (_internal_has_string_value()) {
2286 clear_has_kind();
2287 return _impl_.kind_.string_value_.Release();
2288 } else {
2289 return nullptr;
2290 }
2291 }
set_allocated_string_value(std::string * string_value)2292 inline void StructuredValue::set_allocated_string_value(std::string* string_value) {
2293 if (has_kind()) {
2294 clear_kind();
2295 }
2296 if (string_value != nullptr) {
2297 set_has_string_value();
2298 _impl_.kind_.string_value_.InitAllocated(string_value, GetArenaForAllocation());
2299 }
2300 // @@protoc_insertion_point(field_set_allocated:tensorflow.StructuredValue.string_value)
2301 }
2302
2303 // bool bool_value = 14;
_internal_has_bool_value()2304 inline bool StructuredValue::_internal_has_bool_value() const {
2305 return kind_case() == kBoolValue;
2306 }
has_bool_value()2307 inline bool StructuredValue::has_bool_value() const {
2308 return _internal_has_bool_value();
2309 }
set_has_bool_value()2310 inline void StructuredValue::set_has_bool_value() {
2311 _impl_._oneof_case_[0] = kBoolValue;
2312 }
clear_bool_value()2313 inline void StructuredValue::clear_bool_value() {
2314 if (_internal_has_bool_value()) {
2315 _impl_.kind_.bool_value_ = false;
2316 clear_has_kind();
2317 }
2318 }
_internal_bool_value()2319 inline bool StructuredValue::_internal_bool_value() const {
2320 if (_internal_has_bool_value()) {
2321 return _impl_.kind_.bool_value_;
2322 }
2323 return false;
2324 }
_internal_set_bool_value(bool value)2325 inline void StructuredValue::_internal_set_bool_value(bool value) {
2326 if (!_internal_has_bool_value()) {
2327 clear_kind();
2328 set_has_bool_value();
2329 }
2330 _impl_.kind_.bool_value_ = value;
2331 }
bool_value()2332 inline bool StructuredValue::bool_value() const {
2333 // @@protoc_insertion_point(field_get:tensorflow.StructuredValue.bool_value)
2334 return _internal_bool_value();
2335 }
set_bool_value(bool value)2336 inline void StructuredValue::set_bool_value(bool value) {
2337 _internal_set_bool_value(value);
2338 // @@protoc_insertion_point(field_set:tensorflow.StructuredValue.bool_value)
2339 }
2340
2341 // .tensorflow.TensorShapeProto tensor_shape_value = 31;
_internal_has_tensor_shape_value()2342 inline bool StructuredValue::_internal_has_tensor_shape_value() const {
2343 return kind_case() == kTensorShapeValue;
2344 }
has_tensor_shape_value()2345 inline bool StructuredValue::has_tensor_shape_value() const {
2346 return _internal_has_tensor_shape_value();
2347 }
set_has_tensor_shape_value()2348 inline void StructuredValue::set_has_tensor_shape_value() {
2349 _impl_._oneof_case_[0] = kTensorShapeValue;
2350 }
release_tensor_shape_value()2351 inline ::tensorflow::TensorShapeProto* StructuredValue::release_tensor_shape_value() {
2352 // @@protoc_insertion_point(field_release:tensorflow.StructuredValue.tensor_shape_value)
2353 if (_internal_has_tensor_shape_value()) {
2354 clear_has_kind();
2355 ::tensorflow::TensorShapeProto* temp = _impl_.kind_.tensor_shape_value_;
2356 if (GetArenaForAllocation() != nullptr) {
2357 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
2358 }
2359 _impl_.kind_.tensor_shape_value_ = nullptr;
2360 return temp;
2361 } else {
2362 return nullptr;
2363 }
2364 }
_internal_tensor_shape_value()2365 inline const ::tensorflow::TensorShapeProto& StructuredValue::_internal_tensor_shape_value() const {
2366 return _internal_has_tensor_shape_value()
2367 ? *_impl_.kind_.tensor_shape_value_
2368 : reinterpret_cast< ::tensorflow::TensorShapeProto&>(::tensorflow::_TensorShapeProto_default_instance_);
2369 }
tensor_shape_value()2370 inline const ::tensorflow::TensorShapeProto& StructuredValue::tensor_shape_value() const {
2371 // @@protoc_insertion_point(field_get:tensorflow.StructuredValue.tensor_shape_value)
2372 return _internal_tensor_shape_value();
2373 }
unsafe_arena_release_tensor_shape_value()2374 inline ::tensorflow::TensorShapeProto* StructuredValue::unsafe_arena_release_tensor_shape_value() {
2375 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.StructuredValue.tensor_shape_value)
2376 if (_internal_has_tensor_shape_value()) {
2377 clear_has_kind();
2378 ::tensorflow::TensorShapeProto* temp = _impl_.kind_.tensor_shape_value_;
2379 _impl_.kind_.tensor_shape_value_ = nullptr;
2380 return temp;
2381 } else {
2382 return nullptr;
2383 }
2384 }
unsafe_arena_set_allocated_tensor_shape_value(::tensorflow::TensorShapeProto * tensor_shape_value)2385 inline void StructuredValue::unsafe_arena_set_allocated_tensor_shape_value(::tensorflow::TensorShapeProto* tensor_shape_value) {
2386 clear_kind();
2387 if (tensor_shape_value) {
2388 set_has_tensor_shape_value();
2389 _impl_.kind_.tensor_shape_value_ = tensor_shape_value;
2390 }
2391 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.StructuredValue.tensor_shape_value)
2392 }
_internal_mutable_tensor_shape_value()2393 inline ::tensorflow::TensorShapeProto* StructuredValue::_internal_mutable_tensor_shape_value() {
2394 if (!_internal_has_tensor_shape_value()) {
2395 clear_kind();
2396 set_has_tensor_shape_value();
2397 _impl_.kind_.tensor_shape_value_ = CreateMaybeMessage< ::tensorflow::TensorShapeProto >(GetArenaForAllocation());
2398 }
2399 return _impl_.kind_.tensor_shape_value_;
2400 }
mutable_tensor_shape_value()2401 inline ::tensorflow::TensorShapeProto* StructuredValue::mutable_tensor_shape_value() {
2402 ::tensorflow::TensorShapeProto* _msg = _internal_mutable_tensor_shape_value();
2403 // @@protoc_insertion_point(field_mutable:tensorflow.StructuredValue.tensor_shape_value)
2404 return _msg;
2405 }
2406
2407 // .tensorflow.DataType tensor_dtype_value = 32;
_internal_has_tensor_dtype_value()2408 inline bool StructuredValue::_internal_has_tensor_dtype_value() const {
2409 return kind_case() == kTensorDtypeValue;
2410 }
has_tensor_dtype_value()2411 inline bool StructuredValue::has_tensor_dtype_value() const {
2412 return _internal_has_tensor_dtype_value();
2413 }
set_has_tensor_dtype_value()2414 inline void StructuredValue::set_has_tensor_dtype_value() {
2415 _impl_._oneof_case_[0] = kTensorDtypeValue;
2416 }
clear_tensor_dtype_value()2417 inline void StructuredValue::clear_tensor_dtype_value() {
2418 if (_internal_has_tensor_dtype_value()) {
2419 _impl_.kind_.tensor_dtype_value_ = 0;
2420 clear_has_kind();
2421 }
2422 }
_internal_tensor_dtype_value()2423 inline ::tensorflow::DataType StructuredValue::_internal_tensor_dtype_value() const {
2424 if (_internal_has_tensor_dtype_value()) {
2425 return static_cast< ::tensorflow::DataType >(_impl_.kind_.tensor_dtype_value_);
2426 }
2427 return static_cast< ::tensorflow::DataType >(0);
2428 }
tensor_dtype_value()2429 inline ::tensorflow::DataType StructuredValue::tensor_dtype_value() const {
2430 // @@protoc_insertion_point(field_get:tensorflow.StructuredValue.tensor_dtype_value)
2431 return _internal_tensor_dtype_value();
2432 }
_internal_set_tensor_dtype_value(::tensorflow::DataType value)2433 inline void StructuredValue::_internal_set_tensor_dtype_value(::tensorflow::DataType value) {
2434 if (!_internal_has_tensor_dtype_value()) {
2435 clear_kind();
2436 set_has_tensor_dtype_value();
2437 }
2438 _impl_.kind_.tensor_dtype_value_ = value;
2439 }
set_tensor_dtype_value(::tensorflow::DataType value)2440 inline void StructuredValue::set_tensor_dtype_value(::tensorflow::DataType value) {
2441 _internal_set_tensor_dtype_value(value);
2442 // @@protoc_insertion_point(field_set:tensorflow.StructuredValue.tensor_dtype_value)
2443 }
2444
2445 // .tensorflow.TensorSpecProto tensor_spec_value = 33;
_internal_has_tensor_spec_value()2446 inline bool StructuredValue::_internal_has_tensor_spec_value() const {
2447 return kind_case() == kTensorSpecValue;
2448 }
has_tensor_spec_value()2449 inline bool StructuredValue::has_tensor_spec_value() const {
2450 return _internal_has_tensor_spec_value();
2451 }
set_has_tensor_spec_value()2452 inline void StructuredValue::set_has_tensor_spec_value() {
2453 _impl_._oneof_case_[0] = kTensorSpecValue;
2454 }
clear_tensor_spec_value()2455 inline void StructuredValue::clear_tensor_spec_value() {
2456 if (_internal_has_tensor_spec_value()) {
2457 if (GetArenaForAllocation() == nullptr) {
2458 delete _impl_.kind_.tensor_spec_value_;
2459 }
2460 clear_has_kind();
2461 }
2462 }
release_tensor_spec_value()2463 inline ::tensorflow::TensorSpecProto* StructuredValue::release_tensor_spec_value() {
2464 // @@protoc_insertion_point(field_release:tensorflow.StructuredValue.tensor_spec_value)
2465 if (_internal_has_tensor_spec_value()) {
2466 clear_has_kind();
2467 ::tensorflow::TensorSpecProto* temp = _impl_.kind_.tensor_spec_value_;
2468 if (GetArenaForAllocation() != nullptr) {
2469 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
2470 }
2471 _impl_.kind_.tensor_spec_value_ = nullptr;
2472 return temp;
2473 } else {
2474 return nullptr;
2475 }
2476 }
_internal_tensor_spec_value()2477 inline const ::tensorflow::TensorSpecProto& StructuredValue::_internal_tensor_spec_value() const {
2478 return _internal_has_tensor_spec_value()
2479 ? *_impl_.kind_.tensor_spec_value_
2480 : reinterpret_cast< ::tensorflow::TensorSpecProto&>(::tensorflow::_TensorSpecProto_default_instance_);
2481 }
tensor_spec_value()2482 inline const ::tensorflow::TensorSpecProto& StructuredValue::tensor_spec_value() const {
2483 // @@protoc_insertion_point(field_get:tensorflow.StructuredValue.tensor_spec_value)
2484 return _internal_tensor_spec_value();
2485 }
unsafe_arena_release_tensor_spec_value()2486 inline ::tensorflow::TensorSpecProto* StructuredValue::unsafe_arena_release_tensor_spec_value() {
2487 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.StructuredValue.tensor_spec_value)
2488 if (_internal_has_tensor_spec_value()) {
2489 clear_has_kind();
2490 ::tensorflow::TensorSpecProto* temp = _impl_.kind_.tensor_spec_value_;
2491 _impl_.kind_.tensor_spec_value_ = nullptr;
2492 return temp;
2493 } else {
2494 return nullptr;
2495 }
2496 }
unsafe_arena_set_allocated_tensor_spec_value(::tensorflow::TensorSpecProto * tensor_spec_value)2497 inline void StructuredValue::unsafe_arena_set_allocated_tensor_spec_value(::tensorflow::TensorSpecProto* tensor_spec_value) {
2498 clear_kind();
2499 if (tensor_spec_value) {
2500 set_has_tensor_spec_value();
2501 _impl_.kind_.tensor_spec_value_ = tensor_spec_value;
2502 }
2503 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.StructuredValue.tensor_spec_value)
2504 }
_internal_mutable_tensor_spec_value()2505 inline ::tensorflow::TensorSpecProto* StructuredValue::_internal_mutable_tensor_spec_value() {
2506 if (!_internal_has_tensor_spec_value()) {
2507 clear_kind();
2508 set_has_tensor_spec_value();
2509 _impl_.kind_.tensor_spec_value_ = CreateMaybeMessage< ::tensorflow::TensorSpecProto >(GetArenaForAllocation());
2510 }
2511 return _impl_.kind_.tensor_spec_value_;
2512 }
mutable_tensor_spec_value()2513 inline ::tensorflow::TensorSpecProto* StructuredValue::mutable_tensor_spec_value() {
2514 ::tensorflow::TensorSpecProto* _msg = _internal_mutable_tensor_spec_value();
2515 // @@protoc_insertion_point(field_mutable:tensorflow.StructuredValue.tensor_spec_value)
2516 return _msg;
2517 }
2518
2519 // .tensorflow.TypeSpecProto type_spec_value = 34;
_internal_has_type_spec_value()2520 inline bool StructuredValue::_internal_has_type_spec_value() const {
2521 return kind_case() == kTypeSpecValue;
2522 }
has_type_spec_value()2523 inline bool StructuredValue::has_type_spec_value() const {
2524 return _internal_has_type_spec_value();
2525 }
set_has_type_spec_value()2526 inline void StructuredValue::set_has_type_spec_value() {
2527 _impl_._oneof_case_[0] = kTypeSpecValue;
2528 }
clear_type_spec_value()2529 inline void StructuredValue::clear_type_spec_value() {
2530 if (_internal_has_type_spec_value()) {
2531 if (GetArenaForAllocation() == nullptr) {
2532 delete _impl_.kind_.type_spec_value_;
2533 }
2534 clear_has_kind();
2535 }
2536 }
release_type_spec_value()2537 inline ::tensorflow::TypeSpecProto* StructuredValue::release_type_spec_value() {
2538 // @@protoc_insertion_point(field_release:tensorflow.StructuredValue.type_spec_value)
2539 if (_internal_has_type_spec_value()) {
2540 clear_has_kind();
2541 ::tensorflow::TypeSpecProto* temp = _impl_.kind_.type_spec_value_;
2542 if (GetArenaForAllocation() != nullptr) {
2543 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
2544 }
2545 _impl_.kind_.type_spec_value_ = nullptr;
2546 return temp;
2547 } else {
2548 return nullptr;
2549 }
2550 }
_internal_type_spec_value()2551 inline const ::tensorflow::TypeSpecProto& StructuredValue::_internal_type_spec_value() const {
2552 return _internal_has_type_spec_value()
2553 ? *_impl_.kind_.type_spec_value_
2554 : reinterpret_cast< ::tensorflow::TypeSpecProto&>(::tensorflow::_TypeSpecProto_default_instance_);
2555 }
type_spec_value()2556 inline const ::tensorflow::TypeSpecProto& StructuredValue::type_spec_value() const {
2557 // @@protoc_insertion_point(field_get:tensorflow.StructuredValue.type_spec_value)
2558 return _internal_type_spec_value();
2559 }
unsafe_arena_release_type_spec_value()2560 inline ::tensorflow::TypeSpecProto* StructuredValue::unsafe_arena_release_type_spec_value() {
2561 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.StructuredValue.type_spec_value)
2562 if (_internal_has_type_spec_value()) {
2563 clear_has_kind();
2564 ::tensorflow::TypeSpecProto* temp = _impl_.kind_.type_spec_value_;
2565 _impl_.kind_.type_spec_value_ = nullptr;
2566 return temp;
2567 } else {
2568 return nullptr;
2569 }
2570 }
unsafe_arena_set_allocated_type_spec_value(::tensorflow::TypeSpecProto * type_spec_value)2571 inline void StructuredValue::unsafe_arena_set_allocated_type_spec_value(::tensorflow::TypeSpecProto* type_spec_value) {
2572 clear_kind();
2573 if (type_spec_value) {
2574 set_has_type_spec_value();
2575 _impl_.kind_.type_spec_value_ = type_spec_value;
2576 }
2577 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.StructuredValue.type_spec_value)
2578 }
_internal_mutable_type_spec_value()2579 inline ::tensorflow::TypeSpecProto* StructuredValue::_internal_mutable_type_spec_value() {
2580 if (!_internal_has_type_spec_value()) {
2581 clear_kind();
2582 set_has_type_spec_value();
2583 _impl_.kind_.type_spec_value_ = CreateMaybeMessage< ::tensorflow::TypeSpecProto >(GetArenaForAllocation());
2584 }
2585 return _impl_.kind_.type_spec_value_;
2586 }
mutable_type_spec_value()2587 inline ::tensorflow::TypeSpecProto* StructuredValue::mutable_type_spec_value() {
2588 ::tensorflow::TypeSpecProto* _msg = _internal_mutable_type_spec_value();
2589 // @@protoc_insertion_point(field_mutable:tensorflow.StructuredValue.type_spec_value)
2590 return _msg;
2591 }
2592
2593 // .tensorflow.BoundedTensorSpecProto bounded_tensor_spec_value = 35;
_internal_has_bounded_tensor_spec_value()2594 inline bool StructuredValue::_internal_has_bounded_tensor_spec_value() const {
2595 return kind_case() == kBoundedTensorSpecValue;
2596 }
has_bounded_tensor_spec_value()2597 inline bool StructuredValue::has_bounded_tensor_spec_value() const {
2598 return _internal_has_bounded_tensor_spec_value();
2599 }
set_has_bounded_tensor_spec_value()2600 inline void StructuredValue::set_has_bounded_tensor_spec_value() {
2601 _impl_._oneof_case_[0] = kBoundedTensorSpecValue;
2602 }
clear_bounded_tensor_spec_value()2603 inline void StructuredValue::clear_bounded_tensor_spec_value() {
2604 if (_internal_has_bounded_tensor_spec_value()) {
2605 if (GetArenaForAllocation() == nullptr) {
2606 delete _impl_.kind_.bounded_tensor_spec_value_;
2607 }
2608 clear_has_kind();
2609 }
2610 }
release_bounded_tensor_spec_value()2611 inline ::tensorflow::BoundedTensorSpecProto* StructuredValue::release_bounded_tensor_spec_value() {
2612 // @@protoc_insertion_point(field_release:tensorflow.StructuredValue.bounded_tensor_spec_value)
2613 if (_internal_has_bounded_tensor_spec_value()) {
2614 clear_has_kind();
2615 ::tensorflow::BoundedTensorSpecProto* temp = _impl_.kind_.bounded_tensor_spec_value_;
2616 if (GetArenaForAllocation() != nullptr) {
2617 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
2618 }
2619 _impl_.kind_.bounded_tensor_spec_value_ = nullptr;
2620 return temp;
2621 } else {
2622 return nullptr;
2623 }
2624 }
_internal_bounded_tensor_spec_value()2625 inline const ::tensorflow::BoundedTensorSpecProto& StructuredValue::_internal_bounded_tensor_spec_value() const {
2626 return _internal_has_bounded_tensor_spec_value()
2627 ? *_impl_.kind_.bounded_tensor_spec_value_
2628 : reinterpret_cast< ::tensorflow::BoundedTensorSpecProto&>(::tensorflow::_BoundedTensorSpecProto_default_instance_);
2629 }
bounded_tensor_spec_value()2630 inline const ::tensorflow::BoundedTensorSpecProto& StructuredValue::bounded_tensor_spec_value() const {
2631 // @@protoc_insertion_point(field_get:tensorflow.StructuredValue.bounded_tensor_spec_value)
2632 return _internal_bounded_tensor_spec_value();
2633 }
unsafe_arena_release_bounded_tensor_spec_value()2634 inline ::tensorflow::BoundedTensorSpecProto* StructuredValue::unsafe_arena_release_bounded_tensor_spec_value() {
2635 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.StructuredValue.bounded_tensor_spec_value)
2636 if (_internal_has_bounded_tensor_spec_value()) {
2637 clear_has_kind();
2638 ::tensorflow::BoundedTensorSpecProto* temp = _impl_.kind_.bounded_tensor_spec_value_;
2639 _impl_.kind_.bounded_tensor_spec_value_ = nullptr;
2640 return temp;
2641 } else {
2642 return nullptr;
2643 }
2644 }
unsafe_arena_set_allocated_bounded_tensor_spec_value(::tensorflow::BoundedTensorSpecProto * bounded_tensor_spec_value)2645 inline void StructuredValue::unsafe_arena_set_allocated_bounded_tensor_spec_value(::tensorflow::BoundedTensorSpecProto* bounded_tensor_spec_value) {
2646 clear_kind();
2647 if (bounded_tensor_spec_value) {
2648 set_has_bounded_tensor_spec_value();
2649 _impl_.kind_.bounded_tensor_spec_value_ = bounded_tensor_spec_value;
2650 }
2651 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.StructuredValue.bounded_tensor_spec_value)
2652 }
_internal_mutable_bounded_tensor_spec_value()2653 inline ::tensorflow::BoundedTensorSpecProto* StructuredValue::_internal_mutable_bounded_tensor_spec_value() {
2654 if (!_internal_has_bounded_tensor_spec_value()) {
2655 clear_kind();
2656 set_has_bounded_tensor_spec_value();
2657 _impl_.kind_.bounded_tensor_spec_value_ = CreateMaybeMessage< ::tensorflow::BoundedTensorSpecProto >(GetArenaForAllocation());
2658 }
2659 return _impl_.kind_.bounded_tensor_spec_value_;
2660 }
mutable_bounded_tensor_spec_value()2661 inline ::tensorflow::BoundedTensorSpecProto* StructuredValue::mutable_bounded_tensor_spec_value() {
2662 ::tensorflow::BoundedTensorSpecProto* _msg = _internal_mutable_bounded_tensor_spec_value();
2663 // @@protoc_insertion_point(field_mutable:tensorflow.StructuredValue.bounded_tensor_spec_value)
2664 return _msg;
2665 }
2666
2667 // .tensorflow.ListValue list_value = 51;
_internal_has_list_value()2668 inline bool StructuredValue::_internal_has_list_value() const {
2669 return kind_case() == kListValue;
2670 }
has_list_value()2671 inline bool StructuredValue::has_list_value() const {
2672 return _internal_has_list_value();
2673 }
set_has_list_value()2674 inline void StructuredValue::set_has_list_value() {
2675 _impl_._oneof_case_[0] = kListValue;
2676 }
clear_list_value()2677 inline void StructuredValue::clear_list_value() {
2678 if (_internal_has_list_value()) {
2679 if (GetArenaForAllocation() == nullptr) {
2680 delete _impl_.kind_.list_value_;
2681 }
2682 clear_has_kind();
2683 }
2684 }
release_list_value()2685 inline ::tensorflow::ListValue* StructuredValue::release_list_value() {
2686 // @@protoc_insertion_point(field_release:tensorflow.StructuredValue.list_value)
2687 if (_internal_has_list_value()) {
2688 clear_has_kind();
2689 ::tensorflow::ListValue* temp = _impl_.kind_.list_value_;
2690 if (GetArenaForAllocation() != nullptr) {
2691 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
2692 }
2693 _impl_.kind_.list_value_ = nullptr;
2694 return temp;
2695 } else {
2696 return nullptr;
2697 }
2698 }
_internal_list_value()2699 inline const ::tensorflow::ListValue& StructuredValue::_internal_list_value() const {
2700 return _internal_has_list_value()
2701 ? *_impl_.kind_.list_value_
2702 : reinterpret_cast< ::tensorflow::ListValue&>(::tensorflow::_ListValue_default_instance_);
2703 }
list_value()2704 inline const ::tensorflow::ListValue& StructuredValue::list_value() const {
2705 // @@protoc_insertion_point(field_get:tensorflow.StructuredValue.list_value)
2706 return _internal_list_value();
2707 }
unsafe_arena_release_list_value()2708 inline ::tensorflow::ListValue* StructuredValue::unsafe_arena_release_list_value() {
2709 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.StructuredValue.list_value)
2710 if (_internal_has_list_value()) {
2711 clear_has_kind();
2712 ::tensorflow::ListValue* temp = _impl_.kind_.list_value_;
2713 _impl_.kind_.list_value_ = nullptr;
2714 return temp;
2715 } else {
2716 return nullptr;
2717 }
2718 }
unsafe_arena_set_allocated_list_value(::tensorflow::ListValue * list_value)2719 inline void StructuredValue::unsafe_arena_set_allocated_list_value(::tensorflow::ListValue* list_value) {
2720 clear_kind();
2721 if (list_value) {
2722 set_has_list_value();
2723 _impl_.kind_.list_value_ = list_value;
2724 }
2725 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.StructuredValue.list_value)
2726 }
_internal_mutable_list_value()2727 inline ::tensorflow::ListValue* StructuredValue::_internal_mutable_list_value() {
2728 if (!_internal_has_list_value()) {
2729 clear_kind();
2730 set_has_list_value();
2731 _impl_.kind_.list_value_ = CreateMaybeMessage< ::tensorflow::ListValue >(GetArenaForAllocation());
2732 }
2733 return _impl_.kind_.list_value_;
2734 }
mutable_list_value()2735 inline ::tensorflow::ListValue* StructuredValue::mutable_list_value() {
2736 ::tensorflow::ListValue* _msg = _internal_mutable_list_value();
2737 // @@protoc_insertion_point(field_mutable:tensorflow.StructuredValue.list_value)
2738 return _msg;
2739 }
2740
2741 // .tensorflow.TupleValue tuple_value = 52;
_internal_has_tuple_value()2742 inline bool StructuredValue::_internal_has_tuple_value() const {
2743 return kind_case() == kTupleValue;
2744 }
has_tuple_value()2745 inline bool StructuredValue::has_tuple_value() const {
2746 return _internal_has_tuple_value();
2747 }
set_has_tuple_value()2748 inline void StructuredValue::set_has_tuple_value() {
2749 _impl_._oneof_case_[0] = kTupleValue;
2750 }
clear_tuple_value()2751 inline void StructuredValue::clear_tuple_value() {
2752 if (_internal_has_tuple_value()) {
2753 if (GetArenaForAllocation() == nullptr) {
2754 delete _impl_.kind_.tuple_value_;
2755 }
2756 clear_has_kind();
2757 }
2758 }
release_tuple_value()2759 inline ::tensorflow::TupleValue* StructuredValue::release_tuple_value() {
2760 // @@protoc_insertion_point(field_release:tensorflow.StructuredValue.tuple_value)
2761 if (_internal_has_tuple_value()) {
2762 clear_has_kind();
2763 ::tensorflow::TupleValue* temp = _impl_.kind_.tuple_value_;
2764 if (GetArenaForAllocation() != nullptr) {
2765 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
2766 }
2767 _impl_.kind_.tuple_value_ = nullptr;
2768 return temp;
2769 } else {
2770 return nullptr;
2771 }
2772 }
_internal_tuple_value()2773 inline const ::tensorflow::TupleValue& StructuredValue::_internal_tuple_value() const {
2774 return _internal_has_tuple_value()
2775 ? *_impl_.kind_.tuple_value_
2776 : reinterpret_cast< ::tensorflow::TupleValue&>(::tensorflow::_TupleValue_default_instance_);
2777 }
tuple_value()2778 inline const ::tensorflow::TupleValue& StructuredValue::tuple_value() const {
2779 // @@protoc_insertion_point(field_get:tensorflow.StructuredValue.tuple_value)
2780 return _internal_tuple_value();
2781 }
unsafe_arena_release_tuple_value()2782 inline ::tensorflow::TupleValue* StructuredValue::unsafe_arena_release_tuple_value() {
2783 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.StructuredValue.tuple_value)
2784 if (_internal_has_tuple_value()) {
2785 clear_has_kind();
2786 ::tensorflow::TupleValue* temp = _impl_.kind_.tuple_value_;
2787 _impl_.kind_.tuple_value_ = nullptr;
2788 return temp;
2789 } else {
2790 return nullptr;
2791 }
2792 }
unsafe_arena_set_allocated_tuple_value(::tensorflow::TupleValue * tuple_value)2793 inline void StructuredValue::unsafe_arena_set_allocated_tuple_value(::tensorflow::TupleValue* tuple_value) {
2794 clear_kind();
2795 if (tuple_value) {
2796 set_has_tuple_value();
2797 _impl_.kind_.tuple_value_ = tuple_value;
2798 }
2799 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.StructuredValue.tuple_value)
2800 }
_internal_mutable_tuple_value()2801 inline ::tensorflow::TupleValue* StructuredValue::_internal_mutable_tuple_value() {
2802 if (!_internal_has_tuple_value()) {
2803 clear_kind();
2804 set_has_tuple_value();
2805 _impl_.kind_.tuple_value_ = CreateMaybeMessage< ::tensorflow::TupleValue >(GetArenaForAllocation());
2806 }
2807 return _impl_.kind_.tuple_value_;
2808 }
mutable_tuple_value()2809 inline ::tensorflow::TupleValue* StructuredValue::mutable_tuple_value() {
2810 ::tensorflow::TupleValue* _msg = _internal_mutable_tuple_value();
2811 // @@protoc_insertion_point(field_mutable:tensorflow.StructuredValue.tuple_value)
2812 return _msg;
2813 }
2814
2815 // .tensorflow.DictValue dict_value = 53;
_internal_has_dict_value()2816 inline bool StructuredValue::_internal_has_dict_value() const {
2817 return kind_case() == kDictValue;
2818 }
has_dict_value()2819 inline bool StructuredValue::has_dict_value() const {
2820 return _internal_has_dict_value();
2821 }
set_has_dict_value()2822 inline void StructuredValue::set_has_dict_value() {
2823 _impl_._oneof_case_[0] = kDictValue;
2824 }
clear_dict_value()2825 inline void StructuredValue::clear_dict_value() {
2826 if (_internal_has_dict_value()) {
2827 if (GetArenaForAllocation() == nullptr) {
2828 delete _impl_.kind_.dict_value_;
2829 }
2830 clear_has_kind();
2831 }
2832 }
release_dict_value()2833 inline ::tensorflow::DictValue* StructuredValue::release_dict_value() {
2834 // @@protoc_insertion_point(field_release:tensorflow.StructuredValue.dict_value)
2835 if (_internal_has_dict_value()) {
2836 clear_has_kind();
2837 ::tensorflow::DictValue* temp = _impl_.kind_.dict_value_;
2838 if (GetArenaForAllocation() != nullptr) {
2839 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
2840 }
2841 _impl_.kind_.dict_value_ = nullptr;
2842 return temp;
2843 } else {
2844 return nullptr;
2845 }
2846 }
_internal_dict_value()2847 inline const ::tensorflow::DictValue& StructuredValue::_internal_dict_value() const {
2848 return _internal_has_dict_value()
2849 ? *_impl_.kind_.dict_value_
2850 : reinterpret_cast< ::tensorflow::DictValue&>(::tensorflow::_DictValue_default_instance_);
2851 }
dict_value()2852 inline const ::tensorflow::DictValue& StructuredValue::dict_value() const {
2853 // @@protoc_insertion_point(field_get:tensorflow.StructuredValue.dict_value)
2854 return _internal_dict_value();
2855 }
unsafe_arena_release_dict_value()2856 inline ::tensorflow::DictValue* StructuredValue::unsafe_arena_release_dict_value() {
2857 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.StructuredValue.dict_value)
2858 if (_internal_has_dict_value()) {
2859 clear_has_kind();
2860 ::tensorflow::DictValue* temp = _impl_.kind_.dict_value_;
2861 _impl_.kind_.dict_value_ = nullptr;
2862 return temp;
2863 } else {
2864 return nullptr;
2865 }
2866 }
unsafe_arena_set_allocated_dict_value(::tensorflow::DictValue * dict_value)2867 inline void StructuredValue::unsafe_arena_set_allocated_dict_value(::tensorflow::DictValue* dict_value) {
2868 clear_kind();
2869 if (dict_value) {
2870 set_has_dict_value();
2871 _impl_.kind_.dict_value_ = dict_value;
2872 }
2873 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.StructuredValue.dict_value)
2874 }
_internal_mutable_dict_value()2875 inline ::tensorflow::DictValue* StructuredValue::_internal_mutable_dict_value() {
2876 if (!_internal_has_dict_value()) {
2877 clear_kind();
2878 set_has_dict_value();
2879 _impl_.kind_.dict_value_ = CreateMaybeMessage< ::tensorflow::DictValue >(GetArenaForAllocation());
2880 }
2881 return _impl_.kind_.dict_value_;
2882 }
mutable_dict_value()2883 inline ::tensorflow::DictValue* StructuredValue::mutable_dict_value() {
2884 ::tensorflow::DictValue* _msg = _internal_mutable_dict_value();
2885 // @@protoc_insertion_point(field_mutable:tensorflow.StructuredValue.dict_value)
2886 return _msg;
2887 }
2888
2889 // .tensorflow.NamedTupleValue named_tuple_value = 54;
_internal_has_named_tuple_value()2890 inline bool StructuredValue::_internal_has_named_tuple_value() const {
2891 return kind_case() == kNamedTupleValue;
2892 }
has_named_tuple_value()2893 inline bool StructuredValue::has_named_tuple_value() const {
2894 return _internal_has_named_tuple_value();
2895 }
set_has_named_tuple_value()2896 inline void StructuredValue::set_has_named_tuple_value() {
2897 _impl_._oneof_case_[0] = kNamedTupleValue;
2898 }
clear_named_tuple_value()2899 inline void StructuredValue::clear_named_tuple_value() {
2900 if (_internal_has_named_tuple_value()) {
2901 if (GetArenaForAllocation() == nullptr) {
2902 delete _impl_.kind_.named_tuple_value_;
2903 }
2904 clear_has_kind();
2905 }
2906 }
release_named_tuple_value()2907 inline ::tensorflow::NamedTupleValue* StructuredValue::release_named_tuple_value() {
2908 // @@protoc_insertion_point(field_release:tensorflow.StructuredValue.named_tuple_value)
2909 if (_internal_has_named_tuple_value()) {
2910 clear_has_kind();
2911 ::tensorflow::NamedTupleValue* temp = _impl_.kind_.named_tuple_value_;
2912 if (GetArenaForAllocation() != nullptr) {
2913 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
2914 }
2915 _impl_.kind_.named_tuple_value_ = nullptr;
2916 return temp;
2917 } else {
2918 return nullptr;
2919 }
2920 }
_internal_named_tuple_value()2921 inline const ::tensorflow::NamedTupleValue& StructuredValue::_internal_named_tuple_value() const {
2922 return _internal_has_named_tuple_value()
2923 ? *_impl_.kind_.named_tuple_value_
2924 : reinterpret_cast< ::tensorflow::NamedTupleValue&>(::tensorflow::_NamedTupleValue_default_instance_);
2925 }
named_tuple_value()2926 inline const ::tensorflow::NamedTupleValue& StructuredValue::named_tuple_value() const {
2927 // @@protoc_insertion_point(field_get:tensorflow.StructuredValue.named_tuple_value)
2928 return _internal_named_tuple_value();
2929 }
unsafe_arena_release_named_tuple_value()2930 inline ::tensorflow::NamedTupleValue* StructuredValue::unsafe_arena_release_named_tuple_value() {
2931 // @@protoc_insertion_point(field_unsafe_arena_release:tensorflow.StructuredValue.named_tuple_value)
2932 if (_internal_has_named_tuple_value()) {
2933 clear_has_kind();
2934 ::tensorflow::NamedTupleValue* temp = _impl_.kind_.named_tuple_value_;
2935 _impl_.kind_.named_tuple_value_ = nullptr;
2936 return temp;
2937 } else {
2938 return nullptr;
2939 }
2940 }
unsafe_arena_set_allocated_named_tuple_value(::tensorflow::NamedTupleValue * named_tuple_value)2941 inline void StructuredValue::unsafe_arena_set_allocated_named_tuple_value(::tensorflow::NamedTupleValue* named_tuple_value) {
2942 clear_kind();
2943 if (named_tuple_value) {
2944 set_has_named_tuple_value();
2945 _impl_.kind_.named_tuple_value_ = named_tuple_value;
2946 }
2947 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.StructuredValue.named_tuple_value)
2948 }
_internal_mutable_named_tuple_value()2949 inline ::tensorflow::NamedTupleValue* StructuredValue::_internal_mutable_named_tuple_value() {
2950 if (!_internal_has_named_tuple_value()) {
2951 clear_kind();
2952 set_has_named_tuple_value();
2953 _impl_.kind_.named_tuple_value_ = CreateMaybeMessage< ::tensorflow::NamedTupleValue >(GetArenaForAllocation());
2954 }
2955 return _impl_.kind_.named_tuple_value_;
2956 }
mutable_named_tuple_value()2957 inline ::tensorflow::NamedTupleValue* StructuredValue::mutable_named_tuple_value() {
2958 ::tensorflow::NamedTupleValue* _msg = _internal_mutable_named_tuple_value();
2959 // @@protoc_insertion_point(field_mutable:tensorflow.StructuredValue.named_tuple_value)
2960 return _msg;
2961 }
2962
has_kind()2963 inline bool StructuredValue::has_kind() const {
2964 return kind_case() != KIND_NOT_SET;
2965 }
clear_has_kind()2966 inline void StructuredValue::clear_has_kind() {
2967 _impl_._oneof_case_[0] = KIND_NOT_SET;
2968 }
kind_case()2969 inline StructuredValue::KindCase StructuredValue::kind_case() const {
2970 return StructuredValue::KindCase(_impl_._oneof_case_[0]);
2971 }
2972 // -------------------------------------------------------------------
2973
2974 // NoneValue
2975
2976 // -------------------------------------------------------------------
2977
2978 // ListValue
2979
2980 // repeated .tensorflow.StructuredValue values = 1;
_internal_values_size()2981 inline int ListValue::_internal_values_size() const {
2982 return _impl_.values_.size();
2983 }
values_size()2984 inline int ListValue::values_size() const {
2985 return _internal_values_size();
2986 }
clear_values()2987 inline void ListValue::clear_values() {
2988 _impl_.values_.Clear();
2989 }
mutable_values(int index)2990 inline ::tensorflow::StructuredValue* ListValue::mutable_values(int index) {
2991 // @@protoc_insertion_point(field_mutable:tensorflow.ListValue.values)
2992 return _impl_.values_.Mutable(index);
2993 }
2994 inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::StructuredValue >*
mutable_values()2995 ListValue::mutable_values() {
2996 // @@protoc_insertion_point(field_mutable_list:tensorflow.ListValue.values)
2997 return &_impl_.values_;
2998 }
_internal_values(int index)2999 inline const ::tensorflow::StructuredValue& ListValue::_internal_values(int index) const {
3000 return _impl_.values_.Get(index);
3001 }
values(int index)3002 inline const ::tensorflow::StructuredValue& ListValue::values(int index) const {
3003 // @@protoc_insertion_point(field_get:tensorflow.ListValue.values)
3004 return _internal_values(index);
3005 }
_internal_add_values()3006 inline ::tensorflow::StructuredValue* ListValue::_internal_add_values() {
3007 return _impl_.values_.Add();
3008 }
add_values()3009 inline ::tensorflow::StructuredValue* ListValue::add_values() {
3010 ::tensorflow::StructuredValue* _add = _internal_add_values();
3011 // @@protoc_insertion_point(field_add:tensorflow.ListValue.values)
3012 return _add;
3013 }
3014 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::StructuredValue >&
values()3015 ListValue::values() const {
3016 // @@protoc_insertion_point(field_list:tensorflow.ListValue.values)
3017 return _impl_.values_;
3018 }
3019
3020 // -------------------------------------------------------------------
3021
3022 // TupleValue
3023
3024 // repeated .tensorflow.StructuredValue values = 1;
_internal_values_size()3025 inline int TupleValue::_internal_values_size() const {
3026 return _impl_.values_.size();
3027 }
values_size()3028 inline int TupleValue::values_size() const {
3029 return _internal_values_size();
3030 }
clear_values()3031 inline void TupleValue::clear_values() {
3032 _impl_.values_.Clear();
3033 }
mutable_values(int index)3034 inline ::tensorflow::StructuredValue* TupleValue::mutable_values(int index) {
3035 // @@protoc_insertion_point(field_mutable:tensorflow.TupleValue.values)
3036 return _impl_.values_.Mutable(index);
3037 }
3038 inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::StructuredValue >*
mutable_values()3039 TupleValue::mutable_values() {
3040 // @@protoc_insertion_point(field_mutable_list:tensorflow.TupleValue.values)
3041 return &_impl_.values_;
3042 }
_internal_values(int index)3043 inline const ::tensorflow::StructuredValue& TupleValue::_internal_values(int index) const {
3044 return _impl_.values_.Get(index);
3045 }
values(int index)3046 inline const ::tensorflow::StructuredValue& TupleValue::values(int index) const {
3047 // @@protoc_insertion_point(field_get:tensorflow.TupleValue.values)
3048 return _internal_values(index);
3049 }
_internal_add_values()3050 inline ::tensorflow::StructuredValue* TupleValue::_internal_add_values() {
3051 return _impl_.values_.Add();
3052 }
add_values()3053 inline ::tensorflow::StructuredValue* TupleValue::add_values() {
3054 ::tensorflow::StructuredValue* _add = _internal_add_values();
3055 // @@protoc_insertion_point(field_add:tensorflow.TupleValue.values)
3056 return _add;
3057 }
3058 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::StructuredValue >&
values()3059 TupleValue::values() const {
3060 // @@protoc_insertion_point(field_list:tensorflow.TupleValue.values)
3061 return _impl_.values_;
3062 }
3063
3064 // -------------------------------------------------------------------
3065
3066 // -------------------------------------------------------------------
3067
3068 // DictValue
3069
3070 // map<string, .tensorflow.StructuredValue> fields = 1;
_internal_fields_size()3071 inline int DictValue::_internal_fields_size() const {
3072 return _impl_.fields_.size();
3073 }
fields_size()3074 inline int DictValue::fields_size() const {
3075 return _internal_fields_size();
3076 }
clear_fields()3077 inline void DictValue::clear_fields() {
3078 _impl_.fields_.Clear();
3079 }
3080 inline const ::PROTOBUF_NAMESPACE_ID::Map< std::string, ::tensorflow::StructuredValue >&
_internal_fields()3081 DictValue::_internal_fields() const {
3082 return _impl_.fields_.GetMap();
3083 }
3084 inline const ::PROTOBUF_NAMESPACE_ID::Map< std::string, ::tensorflow::StructuredValue >&
fields()3085 DictValue::fields() const {
3086 // @@protoc_insertion_point(field_map:tensorflow.DictValue.fields)
3087 return _internal_fields();
3088 }
3089 inline ::PROTOBUF_NAMESPACE_ID::Map< std::string, ::tensorflow::StructuredValue >*
_internal_mutable_fields()3090 DictValue::_internal_mutable_fields() {
3091 return _impl_.fields_.MutableMap();
3092 }
3093 inline ::PROTOBUF_NAMESPACE_ID::Map< std::string, ::tensorflow::StructuredValue >*
mutable_fields()3094 DictValue::mutable_fields() {
3095 // @@protoc_insertion_point(field_mutable_map:tensorflow.DictValue.fields)
3096 return _internal_mutable_fields();
3097 }
3098
3099 // -------------------------------------------------------------------
3100
3101 // PairValue
3102
3103 // string key = 1;
clear_key()3104 inline void PairValue::clear_key() {
3105 _impl_.key_.ClearToEmpty();
3106 }
key()3107 inline const std::string& PairValue::key() const {
3108 // @@protoc_insertion_point(field_get:tensorflow.PairValue.key)
3109 return _internal_key();
3110 }
3111 template <typename ArgT0, typename... ArgT>
3112 inline PROTOBUF_ALWAYS_INLINE
set_key(ArgT0 && arg0,ArgT...args)3113 void PairValue::set_key(ArgT0&& arg0, ArgT... args) {
3114
3115 _impl_.key_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
3116 // @@protoc_insertion_point(field_set:tensorflow.PairValue.key)
3117 }
mutable_key()3118 inline std::string* PairValue::mutable_key() {
3119 std::string* _s = _internal_mutable_key();
3120 // @@protoc_insertion_point(field_mutable:tensorflow.PairValue.key)
3121 return _s;
3122 }
_internal_key()3123 inline const std::string& PairValue::_internal_key() const {
3124 return _impl_.key_.Get();
3125 }
_internal_set_key(const std::string & value)3126 inline void PairValue::_internal_set_key(const std::string& value) {
3127
3128 _impl_.key_.Set(value, GetArenaForAllocation());
3129 }
_internal_mutable_key()3130 inline std::string* PairValue::_internal_mutable_key() {
3131
3132 return _impl_.key_.Mutable(GetArenaForAllocation());
3133 }
release_key()3134 inline std::string* PairValue::release_key() {
3135 // @@protoc_insertion_point(field_release:tensorflow.PairValue.key)
3136 return _impl_.key_.Release();
3137 }
set_allocated_key(std::string * key)3138 inline void PairValue::set_allocated_key(std::string* key) {
3139 _impl_.key_.SetAllocated(key, GetArenaForAllocation());
3140 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
3141 if (_impl_.key_.IsDefault()) {
3142 _impl_.key_.Set("", GetArenaForAllocation());
3143 }
3144 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
3145 // @@protoc_insertion_point(field_set_allocated:tensorflow.PairValue.key)
3146 }
3147
3148 // .tensorflow.StructuredValue value = 2;
_internal_has_value()3149 inline bool PairValue::_internal_has_value() const {
3150 return this != internal_default_instance() && _impl_.value_ != nullptr;
3151 }
has_value()3152 inline bool PairValue::has_value() const {
3153 return _internal_has_value();
3154 }
clear_value()3155 inline void PairValue::clear_value() {
3156 if (GetArenaForAllocation() == nullptr && _impl_.value_ != nullptr) {
3157 delete _impl_.value_;
3158 }
3159 _impl_.value_ = nullptr;
3160 }
_internal_value()3161 inline const ::tensorflow::StructuredValue& PairValue::_internal_value() const {
3162 const ::tensorflow::StructuredValue* p = _impl_.value_;
3163 return p != nullptr ? *p : reinterpret_cast<const ::tensorflow::StructuredValue&>(
3164 ::tensorflow::_StructuredValue_default_instance_);
3165 }
value()3166 inline const ::tensorflow::StructuredValue& PairValue::value() const {
3167 // @@protoc_insertion_point(field_get:tensorflow.PairValue.value)
3168 return _internal_value();
3169 }
unsafe_arena_set_allocated_value(::tensorflow::StructuredValue * value)3170 inline void PairValue::unsafe_arena_set_allocated_value(
3171 ::tensorflow::StructuredValue* value) {
3172 if (GetArenaForAllocation() == nullptr) {
3173 delete reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.value_);
3174 }
3175 _impl_.value_ = value;
3176 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.PairValue.value)
3177 }
release_value()3178 inline ::tensorflow::StructuredValue* PairValue::release_value() {
3179
3180 ::tensorflow::StructuredValue* temp = _impl_.value_;
3181 _impl_.value_ = nullptr;
3182 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
3183 auto* old = reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(temp);
3184 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
3185 if (GetArenaForAllocation() == nullptr) { delete old; }
3186 #else // PROTOBUF_FORCE_COPY_IN_RELEASE
3187 if (GetArenaForAllocation() != nullptr) {
3188 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
3189 }
3190 #endif // !PROTOBUF_FORCE_COPY_IN_RELEASE
3191 return temp;
3192 }
unsafe_arena_release_value()3193 inline ::tensorflow::StructuredValue* PairValue::unsafe_arena_release_value() {
3194 // @@protoc_insertion_point(field_release:tensorflow.PairValue.value)
3195
3196 ::tensorflow::StructuredValue* temp = _impl_.value_;
3197 _impl_.value_ = nullptr;
3198 return temp;
3199 }
_internal_mutable_value()3200 inline ::tensorflow::StructuredValue* PairValue::_internal_mutable_value() {
3201
3202 if (_impl_.value_ == nullptr) {
3203 auto* p = CreateMaybeMessage<::tensorflow::StructuredValue>(GetArenaForAllocation());
3204 _impl_.value_ = p;
3205 }
3206 return _impl_.value_;
3207 }
mutable_value()3208 inline ::tensorflow::StructuredValue* PairValue::mutable_value() {
3209 ::tensorflow::StructuredValue* _msg = _internal_mutable_value();
3210 // @@protoc_insertion_point(field_mutable:tensorflow.PairValue.value)
3211 return _msg;
3212 }
set_allocated_value(::tensorflow::StructuredValue * value)3213 inline void PairValue::set_allocated_value(::tensorflow::StructuredValue* value) {
3214 ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
3215 if (message_arena == nullptr) {
3216 delete _impl_.value_;
3217 }
3218 if (value) {
3219 ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
3220 ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(value);
3221 if (message_arena != submessage_arena) {
3222 value = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
3223 message_arena, value, submessage_arena);
3224 }
3225
3226 } else {
3227
3228 }
3229 _impl_.value_ = value;
3230 // @@protoc_insertion_point(field_set_allocated:tensorflow.PairValue.value)
3231 }
3232
3233 // -------------------------------------------------------------------
3234
3235 // NamedTupleValue
3236
3237 // string name = 1;
clear_name()3238 inline void NamedTupleValue::clear_name() {
3239 _impl_.name_.ClearToEmpty();
3240 }
name()3241 inline const std::string& NamedTupleValue::name() const {
3242 // @@protoc_insertion_point(field_get:tensorflow.NamedTupleValue.name)
3243 return _internal_name();
3244 }
3245 template <typename ArgT0, typename... ArgT>
3246 inline PROTOBUF_ALWAYS_INLINE
set_name(ArgT0 && arg0,ArgT...args)3247 void NamedTupleValue::set_name(ArgT0&& arg0, ArgT... args) {
3248
3249 _impl_.name_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
3250 // @@protoc_insertion_point(field_set:tensorflow.NamedTupleValue.name)
3251 }
mutable_name()3252 inline std::string* NamedTupleValue::mutable_name() {
3253 std::string* _s = _internal_mutable_name();
3254 // @@protoc_insertion_point(field_mutable:tensorflow.NamedTupleValue.name)
3255 return _s;
3256 }
_internal_name()3257 inline const std::string& NamedTupleValue::_internal_name() const {
3258 return _impl_.name_.Get();
3259 }
_internal_set_name(const std::string & value)3260 inline void NamedTupleValue::_internal_set_name(const std::string& value) {
3261
3262 _impl_.name_.Set(value, GetArenaForAllocation());
3263 }
_internal_mutable_name()3264 inline std::string* NamedTupleValue::_internal_mutable_name() {
3265
3266 return _impl_.name_.Mutable(GetArenaForAllocation());
3267 }
release_name()3268 inline std::string* NamedTupleValue::release_name() {
3269 // @@protoc_insertion_point(field_release:tensorflow.NamedTupleValue.name)
3270 return _impl_.name_.Release();
3271 }
set_allocated_name(std::string * name)3272 inline void NamedTupleValue::set_allocated_name(std::string* name) {
3273 _impl_.name_.SetAllocated(name, GetArenaForAllocation());
3274 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
3275 if (_impl_.name_.IsDefault()) {
3276 _impl_.name_.Set("", GetArenaForAllocation());
3277 }
3278 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
3279 // @@protoc_insertion_point(field_set_allocated:tensorflow.NamedTupleValue.name)
3280 }
3281
3282 // repeated .tensorflow.PairValue values = 2;
_internal_values_size()3283 inline int NamedTupleValue::_internal_values_size() const {
3284 return _impl_.values_.size();
3285 }
values_size()3286 inline int NamedTupleValue::values_size() const {
3287 return _internal_values_size();
3288 }
clear_values()3289 inline void NamedTupleValue::clear_values() {
3290 _impl_.values_.Clear();
3291 }
mutable_values(int index)3292 inline ::tensorflow::PairValue* NamedTupleValue::mutable_values(int index) {
3293 // @@protoc_insertion_point(field_mutable:tensorflow.NamedTupleValue.values)
3294 return _impl_.values_.Mutable(index);
3295 }
3296 inline ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::PairValue >*
mutable_values()3297 NamedTupleValue::mutable_values() {
3298 // @@protoc_insertion_point(field_mutable_list:tensorflow.NamedTupleValue.values)
3299 return &_impl_.values_;
3300 }
_internal_values(int index)3301 inline const ::tensorflow::PairValue& NamedTupleValue::_internal_values(int index) const {
3302 return _impl_.values_.Get(index);
3303 }
values(int index)3304 inline const ::tensorflow::PairValue& NamedTupleValue::values(int index) const {
3305 // @@protoc_insertion_point(field_get:tensorflow.NamedTupleValue.values)
3306 return _internal_values(index);
3307 }
_internal_add_values()3308 inline ::tensorflow::PairValue* NamedTupleValue::_internal_add_values() {
3309 return _impl_.values_.Add();
3310 }
add_values()3311 inline ::tensorflow::PairValue* NamedTupleValue::add_values() {
3312 ::tensorflow::PairValue* _add = _internal_add_values();
3313 // @@protoc_insertion_point(field_add:tensorflow.NamedTupleValue.values)
3314 return _add;
3315 }
3316 inline const ::PROTOBUF_NAMESPACE_ID::RepeatedPtrField< ::tensorflow::PairValue >&
values()3317 NamedTupleValue::values() const {
3318 // @@protoc_insertion_point(field_list:tensorflow.NamedTupleValue.values)
3319 return _impl_.values_;
3320 }
3321
3322 // -------------------------------------------------------------------
3323
3324 // TensorSpecProto
3325
3326 // string name = 1;
clear_name()3327 inline void TensorSpecProto::clear_name() {
3328 _impl_.name_.ClearToEmpty();
3329 }
name()3330 inline const std::string& TensorSpecProto::name() const {
3331 // @@protoc_insertion_point(field_get:tensorflow.TensorSpecProto.name)
3332 return _internal_name();
3333 }
3334 template <typename ArgT0, typename... ArgT>
3335 inline PROTOBUF_ALWAYS_INLINE
set_name(ArgT0 && arg0,ArgT...args)3336 void TensorSpecProto::set_name(ArgT0&& arg0, ArgT... args) {
3337
3338 _impl_.name_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
3339 // @@protoc_insertion_point(field_set:tensorflow.TensorSpecProto.name)
3340 }
mutable_name()3341 inline std::string* TensorSpecProto::mutable_name() {
3342 std::string* _s = _internal_mutable_name();
3343 // @@protoc_insertion_point(field_mutable:tensorflow.TensorSpecProto.name)
3344 return _s;
3345 }
_internal_name()3346 inline const std::string& TensorSpecProto::_internal_name() const {
3347 return _impl_.name_.Get();
3348 }
_internal_set_name(const std::string & value)3349 inline void TensorSpecProto::_internal_set_name(const std::string& value) {
3350
3351 _impl_.name_.Set(value, GetArenaForAllocation());
3352 }
_internal_mutable_name()3353 inline std::string* TensorSpecProto::_internal_mutable_name() {
3354
3355 return _impl_.name_.Mutable(GetArenaForAllocation());
3356 }
release_name()3357 inline std::string* TensorSpecProto::release_name() {
3358 // @@protoc_insertion_point(field_release:tensorflow.TensorSpecProto.name)
3359 return _impl_.name_.Release();
3360 }
set_allocated_name(std::string * name)3361 inline void TensorSpecProto::set_allocated_name(std::string* name) {
3362 _impl_.name_.SetAllocated(name, GetArenaForAllocation());
3363 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
3364 if (_impl_.name_.IsDefault()) {
3365 _impl_.name_.Set("", GetArenaForAllocation());
3366 }
3367 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
3368 // @@protoc_insertion_point(field_set_allocated:tensorflow.TensorSpecProto.name)
3369 }
3370
3371 // .tensorflow.TensorShapeProto shape = 2;
_internal_has_shape()3372 inline bool TensorSpecProto::_internal_has_shape() const {
3373 return this != internal_default_instance() && _impl_.shape_ != nullptr;
3374 }
has_shape()3375 inline bool TensorSpecProto::has_shape() const {
3376 return _internal_has_shape();
3377 }
_internal_shape()3378 inline const ::tensorflow::TensorShapeProto& TensorSpecProto::_internal_shape() const {
3379 const ::tensorflow::TensorShapeProto* p = _impl_.shape_;
3380 return p != nullptr ? *p : reinterpret_cast<const ::tensorflow::TensorShapeProto&>(
3381 ::tensorflow::_TensorShapeProto_default_instance_);
3382 }
shape()3383 inline const ::tensorflow::TensorShapeProto& TensorSpecProto::shape() const {
3384 // @@protoc_insertion_point(field_get:tensorflow.TensorSpecProto.shape)
3385 return _internal_shape();
3386 }
unsafe_arena_set_allocated_shape(::tensorflow::TensorShapeProto * shape)3387 inline void TensorSpecProto::unsafe_arena_set_allocated_shape(
3388 ::tensorflow::TensorShapeProto* shape) {
3389 if (GetArenaForAllocation() == nullptr) {
3390 delete reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.shape_);
3391 }
3392 _impl_.shape_ = shape;
3393 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.TensorSpecProto.shape)
3394 }
release_shape()3395 inline ::tensorflow::TensorShapeProto* TensorSpecProto::release_shape() {
3396
3397 ::tensorflow::TensorShapeProto* temp = _impl_.shape_;
3398 _impl_.shape_ = nullptr;
3399 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
3400 auto* old = reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(temp);
3401 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
3402 if (GetArenaForAllocation() == nullptr) { delete old; }
3403 #else // PROTOBUF_FORCE_COPY_IN_RELEASE
3404 if (GetArenaForAllocation() != nullptr) {
3405 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
3406 }
3407 #endif // !PROTOBUF_FORCE_COPY_IN_RELEASE
3408 return temp;
3409 }
unsafe_arena_release_shape()3410 inline ::tensorflow::TensorShapeProto* TensorSpecProto::unsafe_arena_release_shape() {
3411 // @@protoc_insertion_point(field_release:tensorflow.TensorSpecProto.shape)
3412
3413 ::tensorflow::TensorShapeProto* temp = _impl_.shape_;
3414 _impl_.shape_ = nullptr;
3415 return temp;
3416 }
_internal_mutable_shape()3417 inline ::tensorflow::TensorShapeProto* TensorSpecProto::_internal_mutable_shape() {
3418
3419 if (_impl_.shape_ == nullptr) {
3420 auto* p = CreateMaybeMessage<::tensorflow::TensorShapeProto>(GetArenaForAllocation());
3421 _impl_.shape_ = p;
3422 }
3423 return _impl_.shape_;
3424 }
mutable_shape()3425 inline ::tensorflow::TensorShapeProto* TensorSpecProto::mutable_shape() {
3426 ::tensorflow::TensorShapeProto* _msg = _internal_mutable_shape();
3427 // @@protoc_insertion_point(field_mutable:tensorflow.TensorSpecProto.shape)
3428 return _msg;
3429 }
set_allocated_shape(::tensorflow::TensorShapeProto * shape)3430 inline void TensorSpecProto::set_allocated_shape(::tensorflow::TensorShapeProto* shape) {
3431 ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
3432 if (message_arena == nullptr) {
3433 delete reinterpret_cast< ::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.shape_);
3434 }
3435 if (shape) {
3436 ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
3437 ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(
3438 reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(shape));
3439 if (message_arena != submessage_arena) {
3440 shape = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
3441 message_arena, shape, submessage_arena);
3442 }
3443
3444 } else {
3445
3446 }
3447 _impl_.shape_ = shape;
3448 // @@protoc_insertion_point(field_set_allocated:tensorflow.TensorSpecProto.shape)
3449 }
3450
3451 // .tensorflow.DataType dtype = 3;
clear_dtype()3452 inline void TensorSpecProto::clear_dtype() {
3453 _impl_.dtype_ = 0;
3454 }
_internal_dtype()3455 inline ::tensorflow::DataType TensorSpecProto::_internal_dtype() const {
3456 return static_cast< ::tensorflow::DataType >(_impl_.dtype_);
3457 }
dtype()3458 inline ::tensorflow::DataType TensorSpecProto::dtype() const {
3459 // @@protoc_insertion_point(field_get:tensorflow.TensorSpecProto.dtype)
3460 return _internal_dtype();
3461 }
_internal_set_dtype(::tensorflow::DataType value)3462 inline void TensorSpecProto::_internal_set_dtype(::tensorflow::DataType value) {
3463
3464 _impl_.dtype_ = value;
3465 }
set_dtype(::tensorflow::DataType value)3466 inline void TensorSpecProto::set_dtype(::tensorflow::DataType value) {
3467 _internal_set_dtype(value);
3468 // @@protoc_insertion_point(field_set:tensorflow.TensorSpecProto.dtype)
3469 }
3470
3471 // -------------------------------------------------------------------
3472
3473 // BoundedTensorSpecProto
3474
3475 // string name = 1;
clear_name()3476 inline void BoundedTensorSpecProto::clear_name() {
3477 _impl_.name_.ClearToEmpty();
3478 }
name()3479 inline const std::string& BoundedTensorSpecProto::name() const {
3480 // @@protoc_insertion_point(field_get:tensorflow.BoundedTensorSpecProto.name)
3481 return _internal_name();
3482 }
3483 template <typename ArgT0, typename... ArgT>
3484 inline PROTOBUF_ALWAYS_INLINE
set_name(ArgT0 && arg0,ArgT...args)3485 void BoundedTensorSpecProto::set_name(ArgT0&& arg0, ArgT... args) {
3486
3487 _impl_.name_.Set(static_cast<ArgT0 &&>(arg0), args..., GetArenaForAllocation());
3488 // @@protoc_insertion_point(field_set:tensorflow.BoundedTensorSpecProto.name)
3489 }
mutable_name()3490 inline std::string* BoundedTensorSpecProto::mutable_name() {
3491 std::string* _s = _internal_mutable_name();
3492 // @@protoc_insertion_point(field_mutable:tensorflow.BoundedTensorSpecProto.name)
3493 return _s;
3494 }
_internal_name()3495 inline const std::string& BoundedTensorSpecProto::_internal_name() const {
3496 return _impl_.name_.Get();
3497 }
_internal_set_name(const std::string & value)3498 inline void BoundedTensorSpecProto::_internal_set_name(const std::string& value) {
3499
3500 _impl_.name_.Set(value, GetArenaForAllocation());
3501 }
_internal_mutable_name()3502 inline std::string* BoundedTensorSpecProto::_internal_mutable_name() {
3503
3504 return _impl_.name_.Mutable(GetArenaForAllocation());
3505 }
release_name()3506 inline std::string* BoundedTensorSpecProto::release_name() {
3507 // @@protoc_insertion_point(field_release:tensorflow.BoundedTensorSpecProto.name)
3508 return _impl_.name_.Release();
3509 }
set_allocated_name(std::string * name)3510 inline void BoundedTensorSpecProto::set_allocated_name(std::string* name) {
3511 _impl_.name_.SetAllocated(name, GetArenaForAllocation());
3512 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
3513 if (_impl_.name_.IsDefault()) {
3514 _impl_.name_.Set("", GetArenaForAllocation());
3515 }
3516 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
3517 // @@protoc_insertion_point(field_set_allocated:tensorflow.BoundedTensorSpecProto.name)
3518 }
3519
3520 // .tensorflow.TensorShapeProto shape = 2;
_internal_has_shape()3521 inline bool BoundedTensorSpecProto::_internal_has_shape() const {
3522 return this != internal_default_instance() && _impl_.shape_ != nullptr;
3523 }
has_shape()3524 inline bool BoundedTensorSpecProto::has_shape() const {
3525 return _internal_has_shape();
3526 }
_internal_shape()3527 inline const ::tensorflow::TensorShapeProto& BoundedTensorSpecProto::_internal_shape() const {
3528 const ::tensorflow::TensorShapeProto* p = _impl_.shape_;
3529 return p != nullptr ? *p : reinterpret_cast<const ::tensorflow::TensorShapeProto&>(
3530 ::tensorflow::_TensorShapeProto_default_instance_);
3531 }
shape()3532 inline const ::tensorflow::TensorShapeProto& BoundedTensorSpecProto::shape() const {
3533 // @@protoc_insertion_point(field_get:tensorflow.BoundedTensorSpecProto.shape)
3534 return _internal_shape();
3535 }
unsafe_arena_set_allocated_shape(::tensorflow::TensorShapeProto * shape)3536 inline void BoundedTensorSpecProto::unsafe_arena_set_allocated_shape(
3537 ::tensorflow::TensorShapeProto* shape) {
3538 if (GetArenaForAllocation() == nullptr) {
3539 delete reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.shape_);
3540 }
3541 _impl_.shape_ = shape;
3542 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.BoundedTensorSpecProto.shape)
3543 }
release_shape()3544 inline ::tensorflow::TensorShapeProto* BoundedTensorSpecProto::release_shape() {
3545
3546 ::tensorflow::TensorShapeProto* temp = _impl_.shape_;
3547 _impl_.shape_ = nullptr;
3548 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
3549 auto* old = reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(temp);
3550 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
3551 if (GetArenaForAllocation() == nullptr) { delete old; }
3552 #else // PROTOBUF_FORCE_COPY_IN_RELEASE
3553 if (GetArenaForAllocation() != nullptr) {
3554 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
3555 }
3556 #endif // !PROTOBUF_FORCE_COPY_IN_RELEASE
3557 return temp;
3558 }
unsafe_arena_release_shape()3559 inline ::tensorflow::TensorShapeProto* BoundedTensorSpecProto::unsafe_arena_release_shape() {
3560 // @@protoc_insertion_point(field_release:tensorflow.BoundedTensorSpecProto.shape)
3561
3562 ::tensorflow::TensorShapeProto* temp = _impl_.shape_;
3563 _impl_.shape_ = nullptr;
3564 return temp;
3565 }
_internal_mutable_shape()3566 inline ::tensorflow::TensorShapeProto* BoundedTensorSpecProto::_internal_mutable_shape() {
3567
3568 if (_impl_.shape_ == nullptr) {
3569 auto* p = CreateMaybeMessage<::tensorflow::TensorShapeProto>(GetArenaForAllocation());
3570 _impl_.shape_ = p;
3571 }
3572 return _impl_.shape_;
3573 }
mutable_shape()3574 inline ::tensorflow::TensorShapeProto* BoundedTensorSpecProto::mutable_shape() {
3575 ::tensorflow::TensorShapeProto* _msg = _internal_mutable_shape();
3576 // @@protoc_insertion_point(field_mutable:tensorflow.BoundedTensorSpecProto.shape)
3577 return _msg;
3578 }
set_allocated_shape(::tensorflow::TensorShapeProto * shape)3579 inline void BoundedTensorSpecProto::set_allocated_shape(::tensorflow::TensorShapeProto* shape) {
3580 ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
3581 if (message_arena == nullptr) {
3582 delete reinterpret_cast< ::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.shape_);
3583 }
3584 if (shape) {
3585 ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
3586 ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(
3587 reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(shape));
3588 if (message_arena != submessage_arena) {
3589 shape = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
3590 message_arena, shape, submessage_arena);
3591 }
3592
3593 } else {
3594
3595 }
3596 _impl_.shape_ = shape;
3597 // @@protoc_insertion_point(field_set_allocated:tensorflow.BoundedTensorSpecProto.shape)
3598 }
3599
3600 // .tensorflow.DataType dtype = 3;
clear_dtype()3601 inline void BoundedTensorSpecProto::clear_dtype() {
3602 _impl_.dtype_ = 0;
3603 }
_internal_dtype()3604 inline ::tensorflow::DataType BoundedTensorSpecProto::_internal_dtype() const {
3605 return static_cast< ::tensorflow::DataType >(_impl_.dtype_);
3606 }
dtype()3607 inline ::tensorflow::DataType BoundedTensorSpecProto::dtype() const {
3608 // @@protoc_insertion_point(field_get:tensorflow.BoundedTensorSpecProto.dtype)
3609 return _internal_dtype();
3610 }
_internal_set_dtype(::tensorflow::DataType value)3611 inline void BoundedTensorSpecProto::_internal_set_dtype(::tensorflow::DataType value) {
3612
3613 _impl_.dtype_ = value;
3614 }
set_dtype(::tensorflow::DataType value)3615 inline void BoundedTensorSpecProto::set_dtype(::tensorflow::DataType value) {
3616 _internal_set_dtype(value);
3617 // @@protoc_insertion_point(field_set:tensorflow.BoundedTensorSpecProto.dtype)
3618 }
3619
3620 // .tensorflow.TensorProto minimum = 4;
_internal_has_minimum()3621 inline bool BoundedTensorSpecProto::_internal_has_minimum() const {
3622 return this != internal_default_instance() && _impl_.minimum_ != nullptr;
3623 }
has_minimum()3624 inline bool BoundedTensorSpecProto::has_minimum() const {
3625 return _internal_has_minimum();
3626 }
_internal_minimum()3627 inline const ::tensorflow::TensorProto& BoundedTensorSpecProto::_internal_minimum() const {
3628 const ::tensorflow::TensorProto* p = _impl_.minimum_;
3629 return p != nullptr ? *p : reinterpret_cast<const ::tensorflow::TensorProto&>(
3630 ::tensorflow::_TensorProto_default_instance_);
3631 }
minimum()3632 inline const ::tensorflow::TensorProto& BoundedTensorSpecProto::minimum() const {
3633 // @@protoc_insertion_point(field_get:tensorflow.BoundedTensorSpecProto.minimum)
3634 return _internal_minimum();
3635 }
unsafe_arena_set_allocated_minimum(::tensorflow::TensorProto * minimum)3636 inline void BoundedTensorSpecProto::unsafe_arena_set_allocated_minimum(
3637 ::tensorflow::TensorProto* minimum) {
3638 if (GetArenaForAllocation() == nullptr) {
3639 delete reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.minimum_);
3640 }
3641 _impl_.minimum_ = minimum;
3642 // @@protoc_insertion_point(field_unsafe_arena_set_allocated:tensorflow.BoundedTensorSpecProto.minimum)
3643 }
release_minimum()3644 inline ::tensorflow::TensorProto* BoundedTensorSpecProto::release_minimum() {
3645
3646 ::tensorflow::TensorProto* temp = _impl_.minimum_;
3647 _impl_.minimum_ = nullptr;
3648 #ifdef PROTOBUF_FORCE_COPY_IN_RELEASE
3649 auto* old = reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(temp);
3650 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
3651 if (GetArenaForAllocation() == nullptr) { delete old; }
3652 #else // PROTOBUF_FORCE_COPY_IN_RELEASE
3653 if (GetArenaForAllocation() != nullptr) {
3654 temp = ::PROTOBUF_NAMESPACE_ID::internal::DuplicateIfNonNull(temp);
3655 }
3656 #endif // !PROTOBUF_FORCE_COPY_IN_RELEASE
3657 return temp;
3658 }
unsafe_arena_release_minimum()3659 inline ::tensorflow::TensorProto* BoundedTensorSpecProto::unsafe_arena_release_minimum() {
3660 // @@protoc_insertion_point(field_release:tensorflow.BoundedTensorSpecProto.minimum)
3661
3662 ::tensorflow::TensorProto* temp = _impl_.minimum_;
3663 _impl_.minimum_ = nullptr;
3664 return temp;
3665 }
_internal_mutable_minimum()3666 inline ::tensorflow::TensorProto* BoundedTensorSpecProto::_internal_mutable_minimum() {
3667
3668 if (_impl_.minimum_ == nullptr) {
3669 auto* p = CreateMaybeMessage<::tensorflow::TensorProto>(GetArenaForAllocation());
3670 _impl_.minimum_ = p;
3671 }
3672 return _impl_.minimum_;
3673 }
mutable_minimum()3674 inline ::tensorflow::TensorProto* BoundedTensorSpecProto::mutable_minimum() {
3675 ::tensorflow::TensorProto* _msg = _internal_mutable_minimum();
3676 // @@protoc_insertion_point(field_mutable:tensorflow.BoundedTensorSpecProto.minimum)
3677 return _msg;
3678 }
set_allocated_minimum(::tensorflow::TensorProto * minimum)3679 inline void BoundedTensorSpecProto::set_allocated_minimum(::tensorflow::TensorProto* minimum) {
3680 ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
3681 if (message_arena == nullptr) {
3682 delete reinterpret_cast< ::PROTOBUF_NAMESPACE_ID::MessageLite*>(_impl_.minimum_);
3683 }
3684 if (minimum) {
3685 ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
3686 ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(
3687 reinterpret_cast<::PROTOBUF_NAMESPACE_ID::MessageLite*>(minimum));
3688 if (message_arena != submessage_arena) {
3689 minimum = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
3690 message_arena, minimum, submessage_arena);
3691 }
3692
3693 } else {
3694
3695 }
3696 _impl_.minimum_ = minimum;
3697 // @@protoc_insertion_point(field_set_allocated:tensorflow.BoundedTensorSpecProto.minimum)
3698 }
3699
3700 // .tensorflow.TensorProto maximum = 5;
_internal_has_maximum()3701 inline bool BoundedTensorSpecProto::_internal_has_maximum() const {
3702 return this != internal_default_instance() && _impl_.maximum_ != nullptr;
3703 }
has_maximum()3704 inline bool BoundedTensorSpecProto::has_maximum() const {
3705 return _internal_has_maximum();
3706 }
_internal_maximum()3707 inline const ::tensorflow::TensorProto& BoundedTensorSpecProto::_internal_maximum() const {
3708 const ::tensorflow::TensorProto* p = _impl_.maximum_;
3709 return p != nullptr ? *p : reinterpret_cast<const ::tensorflow::TensorProto&>(
3710 ::tensorflow::_TensorProto_default_instance_);
3711 }
maximum()3712