1 // Generated by the protocol buffer compiler. DO NOT EDIT!
2 // source: tensorflow/compiler/xla/stream_executor/dnn.proto
3
4 #include "tensorflow/compiler/xla/stream_executor/dnn.pb.h"
5
6 #include <algorithm>
7 #include <cstdint>
8
9 #include <google/protobuf/io/coded_stream.h>
10 #include <google/protobuf/extension_set.h>
11 #include <google/protobuf/wire_format_lite.h>
12 #include <google/protobuf/io/zero_copy_stream_impl_lite.h>
13 // @@protoc_insertion_point(includes)
14 #include <google/protobuf/port_def.inc>
15
16 PROTOBUF_PRAGMA_INIT_SEG
17
18 namespace _pb = ::PROTOBUF_NAMESPACE_ID;
19 namespace _pbi = _pb::internal;
20
21 namespace stream_executor {
22 namespace dnn {
TensorDescriptorProto(::_pbi::ConstantInitialized)23 PROTOBUF_CONSTEXPR TensorDescriptorProto::TensorDescriptorProto(
24 ::_pbi::ConstantInitialized): _impl_{
25 /*decltype(_impl_.dimensions_)*/{}
26 , /*decltype(_impl_._dimensions_cached_byte_size_)*/{0}
27 , /*decltype(_impl_.data_type_)*/0
28 , /*decltype(_impl_.layout_oneof_)*/{}
29 , /*decltype(_impl_._cached_size_)*/{}
30 , /*decltype(_impl_._oneof_case_)*/{}} {}
31 struct TensorDescriptorProtoDefaultTypeInternal {
TensorDescriptorProtoDefaultTypeInternalstream_executor::dnn::TensorDescriptorProtoDefaultTypeInternal32 PROTOBUF_CONSTEXPR TensorDescriptorProtoDefaultTypeInternal()
33 : _instance(::_pbi::ConstantInitialized{}) {}
~TensorDescriptorProtoDefaultTypeInternalstream_executor::dnn::TensorDescriptorProtoDefaultTypeInternal34 ~TensorDescriptorProtoDefaultTypeInternal() {}
35 union { // NOLINT(misc-non-private-member-variables-in-classes)
36 TensorDescriptorProto _instance;
37 };
38 };
39 PROTOBUF_ATTRIBUTE_NO_DESTROY PROTOBUF_CONSTINIT PROTOBUF_ATTRIBUTE_INIT_PRIORITY1 TensorDescriptorProtoDefaultTypeInternal _TensorDescriptorProto_default_instance_;
AlgorithmProto_TuningKnobsEntry_DoNotUse(::_pbi::ConstantInitialized)40 PROTOBUF_CONSTEXPR AlgorithmProto_TuningKnobsEntry_DoNotUse::AlgorithmProto_TuningKnobsEntry_DoNotUse(
41 ::_pbi::ConstantInitialized) {}
42 struct AlgorithmProto_TuningKnobsEntry_DoNotUseDefaultTypeInternal {
AlgorithmProto_TuningKnobsEntry_DoNotUseDefaultTypeInternalstream_executor::dnn::AlgorithmProto_TuningKnobsEntry_DoNotUseDefaultTypeInternal43 PROTOBUF_CONSTEXPR AlgorithmProto_TuningKnobsEntry_DoNotUseDefaultTypeInternal()
44 : _instance(::_pbi::ConstantInitialized{}) {}
~AlgorithmProto_TuningKnobsEntry_DoNotUseDefaultTypeInternalstream_executor::dnn::AlgorithmProto_TuningKnobsEntry_DoNotUseDefaultTypeInternal45 ~AlgorithmProto_TuningKnobsEntry_DoNotUseDefaultTypeInternal() {}
46 union { // NOLINT(misc-non-private-member-variables-in-classes)
47 AlgorithmProto_TuningKnobsEntry_DoNotUse _instance;
48 };
49 };
50 PROTOBUF_ATTRIBUTE_NO_DESTROY PROTOBUF_CONSTINIT PROTOBUF_ATTRIBUTE_INIT_PRIORITY1 AlgorithmProto_TuningKnobsEntry_DoNotUseDefaultTypeInternal _AlgorithmProto_TuningKnobsEntry_DoNotUse_default_instance_;
AlgorithmProto(::_pbi::ConstantInitialized)51 PROTOBUF_CONSTEXPR AlgorithmProto::AlgorithmProto(
52 ::_pbi::ConstantInitialized): _impl_{
53 /*decltype(_impl_.tuning_knobs_)*/{}
54 , /*decltype(_impl_.workspace_size_)*/nullptr
55 , /*decltype(_impl_.algo_id_)*/::int64_t{0}
56 , /*decltype(_impl_.math_type_)*/0
57 , /*decltype(_impl_.is_cudnn_frontend_)*/false
58 , /*decltype(_impl_._cached_size_)*/{}} {}
59 struct AlgorithmProtoDefaultTypeInternal {
AlgorithmProtoDefaultTypeInternalstream_executor::dnn::AlgorithmProtoDefaultTypeInternal60 PROTOBUF_CONSTEXPR AlgorithmProtoDefaultTypeInternal()
61 : _instance(::_pbi::ConstantInitialized{}) {}
~AlgorithmProtoDefaultTypeInternalstream_executor::dnn::AlgorithmProtoDefaultTypeInternal62 ~AlgorithmProtoDefaultTypeInternal() {}
63 union { // NOLINT(misc-non-private-member-variables-in-classes)
64 AlgorithmProto _instance;
65 };
66 };
67 PROTOBUF_ATTRIBUTE_NO_DESTROY PROTOBUF_CONSTINIT PROTOBUF_ATTRIBUTE_INIT_PRIORITY1 AlgorithmProtoDefaultTypeInternal _AlgorithmProto_default_instance_;
AlgorithmConfigProto(::_pbi::ConstantInitialized)68 PROTOBUF_CONSTEXPR AlgorithmConfigProto::AlgorithmConfigProto(
69 ::_pbi::ConstantInitialized): _impl_{
70 /*decltype(_impl_.optional_algorithm_)*/{}
71 , /*decltype(_impl_.optional_algorithm_no_scratch_)*/{}
72 , /*decltype(_impl_.optional_scratch_size_)*/{}
73 , /*decltype(_impl_._cached_size_)*/{}
74 , /*decltype(_impl_._oneof_case_)*/{}} {}
75 struct AlgorithmConfigProtoDefaultTypeInternal {
AlgorithmConfigProtoDefaultTypeInternalstream_executor::dnn::AlgorithmConfigProtoDefaultTypeInternal76 PROTOBUF_CONSTEXPR AlgorithmConfigProtoDefaultTypeInternal()
77 : _instance(::_pbi::ConstantInitialized{}) {}
~AlgorithmConfigProtoDefaultTypeInternalstream_executor::dnn::AlgorithmConfigProtoDefaultTypeInternal78 ~AlgorithmConfigProtoDefaultTypeInternal() {}
79 union { // NOLINT(misc-non-private-member-variables-in-classes)
80 AlgorithmConfigProto _instance;
81 };
82 };
83 PROTOBUF_ATTRIBUTE_NO_DESTROY PROTOBUF_CONSTINIT PROTOBUF_ATTRIBUTE_INIT_PRIORITY1 AlgorithmConfigProtoDefaultTypeInternal _AlgorithmConfigProto_default_instance_;
ConvolutionDescriptorProto(::_pbi::ConstantInitialized)84 PROTOBUF_CONSTEXPR ConvolutionDescriptorProto::ConvolutionDescriptorProto(
85 ::_pbi::ConstantInitialized): _impl_{
86 /*decltype(_impl_.paddings_)*/{}
87 , /*decltype(_impl_._paddings_cached_byte_size_)*/{0}
88 , /*decltype(_impl_.strides_)*/{}
89 , /*decltype(_impl_._strides_cached_byte_size_)*/{0}
90 , /*decltype(_impl_.dilations_)*/{}
91 , /*decltype(_impl_._dilations_cached_byte_size_)*/{0}
92 , /*decltype(_impl_.name_)*/{&::_pbi::fixed_address_empty_string, ::_pbi::ConstantInitialized{}}
93 , /*decltype(_impl_.compute_mode_)*/0
94 , /*decltype(_impl_.group_count_)*/0
95 , /*decltype(_impl_.convolution_mode_)*/0
96 , /*decltype(_impl_._cached_size_)*/{}} {}
97 struct ConvolutionDescriptorProtoDefaultTypeInternal {
ConvolutionDescriptorProtoDefaultTypeInternalstream_executor::dnn::ConvolutionDescriptorProtoDefaultTypeInternal98 PROTOBUF_CONSTEXPR ConvolutionDescriptorProtoDefaultTypeInternal()
99 : _instance(::_pbi::ConstantInitialized{}) {}
~ConvolutionDescriptorProtoDefaultTypeInternalstream_executor::dnn::ConvolutionDescriptorProtoDefaultTypeInternal100 ~ConvolutionDescriptorProtoDefaultTypeInternal() {}
101 union { // NOLINT(misc-non-private-member-variables-in-classes)
102 ConvolutionDescriptorProto _instance;
103 };
104 };
105 PROTOBUF_ATTRIBUTE_NO_DESTROY PROTOBUF_CONSTINIT PROTOBUF_ATTRIBUTE_INIT_PRIORITY1 ConvolutionDescriptorProtoDefaultTypeInternal _ConvolutionDescriptorProto_default_instance_;
106 } // namespace dnn
107 } // namespace stream_executor
108 namespace stream_executor {
109 namespace dnn {
AlgorithmProto_MathType_IsValid(int value)110 bool AlgorithmProto_MathType_IsValid(int value) {
111 switch (value) {
112 case 0:
113 case 1:
114 return true;
115 default:
116 return false;
117 }
118 }
119
120 static ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed<std::string> AlgorithmProto_MathType_strings[2] = {};
121
122 static const char AlgorithmProto_MathType_names[] =
123 "DEFAULT_MATH"
124 "TENSOR_OP_MATH";
125
126 static const ::PROTOBUF_NAMESPACE_ID::internal::EnumEntry AlgorithmProto_MathType_entries[] = {
127 { {AlgorithmProto_MathType_names + 0, 12}, 0 },
128 { {AlgorithmProto_MathType_names + 12, 14}, 1 },
129 };
130
131 static const int AlgorithmProto_MathType_entries_by_number[] = {
132 0, // 0 -> DEFAULT_MATH
133 1, // 1 -> TENSOR_OP_MATH
134 };
135
AlgorithmProto_MathType_Name(AlgorithmProto_MathType value)136 const std::string& AlgorithmProto_MathType_Name(
137 AlgorithmProto_MathType value) {
138 static const bool dummy =
139 ::PROTOBUF_NAMESPACE_ID::internal::InitializeEnumStrings(
140 AlgorithmProto_MathType_entries,
141 AlgorithmProto_MathType_entries_by_number,
142 2, AlgorithmProto_MathType_strings);
143 (void) dummy;
144 int idx = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumName(
145 AlgorithmProto_MathType_entries,
146 AlgorithmProto_MathType_entries_by_number,
147 2, value);
148 return idx == -1 ? ::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString() :
149 AlgorithmProto_MathType_strings[idx].get();
150 }
AlgorithmProto_MathType_Parse(::PROTOBUF_NAMESPACE_ID::ConstStringParam name,AlgorithmProto_MathType * value)151 bool AlgorithmProto_MathType_Parse(
152 ::PROTOBUF_NAMESPACE_ID::ConstStringParam name, AlgorithmProto_MathType* value) {
153 int int_value;
154 bool success = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumValue(
155 AlgorithmProto_MathType_entries, 2, name, &int_value);
156 if (success) {
157 *value = static_cast<AlgorithmProto_MathType>(int_value);
158 }
159 return success;
160 }
161 #if (__cplusplus < 201703) && (!defined(_MSC_VER) || (_MSC_VER >= 1900 && _MSC_VER < 1912))
162 constexpr AlgorithmProto_MathType AlgorithmProto::DEFAULT_MATH;
163 constexpr AlgorithmProto_MathType AlgorithmProto::TENSOR_OP_MATH;
164 constexpr AlgorithmProto_MathType AlgorithmProto::MathType_MIN;
165 constexpr AlgorithmProto_MathType AlgorithmProto::MathType_MAX;
166 constexpr int AlgorithmProto::MathType_ARRAYSIZE;
167 #endif // (__cplusplus < 201703) && (!defined(_MSC_VER) || (_MSC_VER >= 1900 && _MSC_VER < 1912))
DataType_IsValid(int value)168 bool DataType_IsValid(int value) {
169 switch (value) {
170 case 0:
171 case 1:
172 case 2:
173 case 3:
174 case 4:
175 case 5:
176 case 6:
177 case 7:
178 return true;
179 default:
180 return false;
181 }
182 }
183
184 static ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed<std::string> DataType_strings[8] = {};
185
186 static const char DataType_names[] =
187 "kBF16"
188 "kComplexDouble"
189 "kComplexFloat"
190 "kDouble"
191 "kFloat"
192 "kHalf"
193 "kInt32"
194 "kInt8";
195
196 static const ::PROTOBUF_NAMESPACE_ID::internal::EnumEntry DataType_entries[] = {
197 { {DataType_names + 0, 5}, 7 },
198 { {DataType_names + 5, 14}, 6 },
199 { {DataType_names + 19, 13}, 5 },
200 { {DataType_names + 32, 7}, 1 },
201 { {DataType_names + 39, 6}, 0 },
202 { {DataType_names + 45, 5}, 2 },
203 { {DataType_names + 50, 6}, 4 },
204 { {DataType_names + 56, 5}, 3 },
205 };
206
207 static const int DataType_entries_by_number[] = {
208 4, // 0 -> kFloat
209 3, // 1 -> kDouble
210 5, // 2 -> kHalf
211 7, // 3 -> kInt8
212 6, // 4 -> kInt32
213 2, // 5 -> kComplexFloat
214 1, // 6 -> kComplexDouble
215 0, // 7 -> kBF16
216 };
217
DataType_Name(DataType value)218 const std::string& DataType_Name(
219 DataType value) {
220 static const bool dummy =
221 ::PROTOBUF_NAMESPACE_ID::internal::InitializeEnumStrings(
222 DataType_entries,
223 DataType_entries_by_number,
224 8, DataType_strings);
225 (void) dummy;
226 int idx = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumName(
227 DataType_entries,
228 DataType_entries_by_number,
229 8, value);
230 return idx == -1 ? ::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString() :
231 DataType_strings[idx].get();
232 }
DataType_Parse(::PROTOBUF_NAMESPACE_ID::ConstStringParam name,DataType * value)233 bool DataType_Parse(
234 ::PROTOBUF_NAMESPACE_ID::ConstStringParam name, DataType* value) {
235 int int_value;
236 bool success = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumValue(
237 DataType_entries, 8, name, &int_value);
238 if (success) {
239 *value = static_cast<DataType>(int_value);
240 }
241 return success;
242 }
DataLayout_IsValid(int value)243 bool DataLayout_IsValid(int value) {
244 switch (value) {
245 case 0:
246 case 1:
247 case 2:
248 case 3:
249 case 4:
250 case 5:
251 return true;
252 default:
253 return false;
254 }
255 }
256
257 static ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed<std::string> DataLayout_strings[6] = {};
258
259 static const char DataLayout_names[] =
260 "kBatchDepthYX"
261 "kBatchDepthYX32"
262 "kBatchDepthYX4"
263 "kBatchYXDepth"
264 "kYXBatchDepth"
265 "kYXDepthBatch";
266
267 static const ::PROTOBUF_NAMESPACE_ID::internal::EnumEntry DataLayout_entries[] = {
268 { {DataLayout_names + 0, 13}, 3 },
269 { {DataLayout_names + 13, 15}, 5 },
270 { {DataLayout_names + 28, 14}, 4 },
271 { {DataLayout_names + 42, 13}, 2 },
272 { {DataLayout_names + 55, 13}, 1 },
273 { {DataLayout_names + 68, 13}, 0 },
274 };
275
276 static const int DataLayout_entries_by_number[] = {
277 5, // 0 -> kYXDepthBatch
278 4, // 1 -> kYXBatchDepth
279 3, // 2 -> kBatchYXDepth
280 0, // 3 -> kBatchDepthYX
281 2, // 4 -> kBatchDepthYX4
282 1, // 5 -> kBatchDepthYX32
283 };
284
DataLayout_Name(DataLayout value)285 const std::string& DataLayout_Name(
286 DataLayout value) {
287 static const bool dummy =
288 ::PROTOBUF_NAMESPACE_ID::internal::InitializeEnumStrings(
289 DataLayout_entries,
290 DataLayout_entries_by_number,
291 6, DataLayout_strings);
292 (void) dummy;
293 int idx = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumName(
294 DataLayout_entries,
295 DataLayout_entries_by_number,
296 6, value);
297 return idx == -1 ? ::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString() :
298 DataLayout_strings[idx].get();
299 }
DataLayout_Parse(::PROTOBUF_NAMESPACE_ID::ConstStringParam name,DataLayout * value)300 bool DataLayout_Parse(
301 ::PROTOBUF_NAMESPACE_ID::ConstStringParam name, DataLayout* value) {
302 int int_value;
303 bool success = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumValue(
304 DataLayout_entries, 6, name, &int_value);
305 if (success) {
306 *value = static_cast<DataLayout>(int_value);
307 }
308 return success;
309 }
FilterLayout_IsValid(int value)310 bool FilterLayout_IsValid(int value) {
311 switch (value) {
312 case 0:
313 case 1:
314 case 2:
315 case 3:
316 case 4:
317 case 5:
318 return true;
319 default:
320 return false;
321 }
322 }
323
324 static ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed<std::string> FilterLayout_strings[6] = {};
325
326 static const char FilterLayout_names[] =
327 "kInputYXOutput"
328 "kOutputInputYX"
329 "kOutputInputYX32"
330 "kOutputInputYX4"
331 "kOutputYXInput"
332 "kYXInputOutput";
333
334 static const ::PROTOBUF_NAMESPACE_ID::internal::EnumEntry FilterLayout_entries[] = {
335 { {FilterLayout_names + 0, 14}, 3 },
336 { {FilterLayout_names + 14, 14}, 0 },
337 { {FilterLayout_names + 28, 16}, 5 },
338 { {FilterLayout_names + 44, 15}, 2 },
339 { {FilterLayout_names + 59, 14}, 1 },
340 { {FilterLayout_names + 73, 14}, 4 },
341 };
342
343 static const int FilterLayout_entries_by_number[] = {
344 1, // 0 -> kOutputInputYX
345 4, // 1 -> kOutputYXInput
346 3, // 2 -> kOutputInputYX4
347 0, // 3 -> kInputYXOutput
348 5, // 4 -> kYXInputOutput
349 2, // 5 -> kOutputInputYX32
350 };
351
FilterLayout_Name(FilterLayout value)352 const std::string& FilterLayout_Name(
353 FilterLayout value) {
354 static const bool dummy =
355 ::PROTOBUF_NAMESPACE_ID::internal::InitializeEnumStrings(
356 FilterLayout_entries,
357 FilterLayout_entries_by_number,
358 6, FilterLayout_strings);
359 (void) dummy;
360 int idx = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumName(
361 FilterLayout_entries,
362 FilterLayout_entries_by_number,
363 6, value);
364 return idx == -1 ? ::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString() :
365 FilterLayout_strings[idx].get();
366 }
FilterLayout_Parse(::PROTOBUF_NAMESPACE_ID::ConstStringParam name,FilterLayout * value)367 bool FilterLayout_Parse(
368 ::PROTOBUF_NAMESPACE_ID::ConstStringParam name, FilterLayout* value) {
369 int int_value;
370 bool success = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumValue(
371 FilterLayout_entries, 6, name, &int_value);
372 if (success) {
373 *value = static_cast<FilterLayout>(int_value);
374 }
375 return success;
376 }
ActivationMode_IsValid(int value)377 bool ActivationMode_IsValid(int value) {
378 switch (value) {
379 case 0:
380 case 1:
381 case 2:
382 case 3:
383 case 4:
384 case 5:
385 case 6:
386 case 7:
387 case 8:
388 return true;
389 default:
390 return false;
391 }
392 }
393
394 static ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed<std::string> ActivationMode_strings[9] = {};
395
396 static const char ActivationMode_names[] =
397 "kBandPass"
398 "kElu"
399 "kLeakyRelu"
400 "kNone"
401 "kRelu"
402 "kRelu6"
403 "kReluX"
404 "kSigmoid"
405 "kTanh";
406
407 static const ::PROTOBUF_NAMESPACE_ID::internal::EnumEntry ActivationMode_entries[] = {
408 { {ActivationMode_names + 0, 9}, 6 },
409 { {ActivationMode_names + 9, 4}, 7 },
410 { {ActivationMode_names + 13, 10}, 8 },
411 { {ActivationMode_names + 23, 5}, 0 },
412 { {ActivationMode_names + 28, 5}, 2 },
413 { {ActivationMode_names + 33, 6}, 3 },
414 { {ActivationMode_names + 39, 6}, 4 },
415 { {ActivationMode_names + 45, 8}, 1 },
416 { {ActivationMode_names + 53, 5}, 5 },
417 };
418
419 static const int ActivationMode_entries_by_number[] = {
420 3, // 0 -> kNone
421 7, // 1 -> kSigmoid
422 4, // 2 -> kRelu
423 5, // 3 -> kRelu6
424 6, // 4 -> kReluX
425 8, // 5 -> kTanh
426 0, // 6 -> kBandPass
427 1, // 7 -> kElu
428 2, // 8 -> kLeakyRelu
429 };
430
ActivationMode_Name(ActivationMode value)431 const std::string& ActivationMode_Name(
432 ActivationMode value) {
433 static const bool dummy =
434 ::PROTOBUF_NAMESPACE_ID::internal::InitializeEnumStrings(
435 ActivationMode_entries,
436 ActivationMode_entries_by_number,
437 9, ActivationMode_strings);
438 (void) dummy;
439 int idx = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumName(
440 ActivationMode_entries,
441 ActivationMode_entries_by_number,
442 9, value);
443 return idx == -1 ? ::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString() :
444 ActivationMode_strings[idx].get();
445 }
ActivationMode_Parse(::PROTOBUF_NAMESPACE_ID::ConstStringParam name,ActivationMode * value)446 bool ActivationMode_Parse(
447 ::PROTOBUF_NAMESPACE_ID::ConstStringParam name, ActivationMode* value) {
448 int int_value;
449 bool success = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumValue(
450 ActivationMode_entries, 9, name, &int_value);
451 if (success) {
452 *value = static_cast<ActivationMode>(int_value);
453 }
454 return success;
455 }
ConvolutionMode_IsValid(int value)456 bool ConvolutionMode_IsValid(int value) {
457 switch (value) {
458 case 0:
459 case 1:
460 return true;
461 default:
462 return false;
463 }
464 }
465
466 static ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed<std::string> ConvolutionMode_strings[2] = {};
467
468 static const char ConvolutionMode_names[] =
469 "CONVOLUTION"
470 "CROSS_CORRELATION";
471
472 static const ::PROTOBUF_NAMESPACE_ID::internal::EnumEntry ConvolutionMode_entries[] = {
473 { {ConvolutionMode_names + 0, 11}, 1 },
474 { {ConvolutionMode_names + 11, 17}, 0 },
475 };
476
477 static const int ConvolutionMode_entries_by_number[] = {
478 1, // 0 -> CROSS_CORRELATION
479 0, // 1 -> CONVOLUTION
480 };
481
ConvolutionMode_Name(ConvolutionMode value)482 const std::string& ConvolutionMode_Name(
483 ConvolutionMode value) {
484 static const bool dummy =
485 ::PROTOBUF_NAMESPACE_ID::internal::InitializeEnumStrings(
486 ConvolutionMode_entries,
487 ConvolutionMode_entries_by_number,
488 2, ConvolutionMode_strings);
489 (void) dummy;
490 int idx = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumName(
491 ConvolutionMode_entries,
492 ConvolutionMode_entries_by_number,
493 2, value);
494 return idx == -1 ? ::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString() :
495 ConvolutionMode_strings[idx].get();
496 }
ConvolutionMode_Parse(::PROTOBUF_NAMESPACE_ID::ConstStringParam name,ConvolutionMode * value)497 bool ConvolutionMode_Parse(
498 ::PROTOBUF_NAMESPACE_ID::ConstStringParam name, ConvolutionMode* value) {
499 int int_value;
500 bool success = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumValue(
501 ConvolutionMode_entries, 2, name, &int_value);
502 if (success) {
503 *value = static_cast<ConvolutionMode>(int_value);
504 }
505 return success;
506 }
ConvolutionKind_IsValid(int value)507 bool ConvolutionKind_IsValid(int value) {
508 switch (value) {
509 case 0:
510 case 1:
511 case 2:
512 case 3:
513 case 4:
514 return true;
515 default:
516 return false;
517 }
518 }
519
520 static ::PROTOBUF_NAMESPACE_ID::internal::ExplicitlyConstructed<std::string> ConvolutionKind_strings[5] = {};
521
522 static const char ConvolutionKind_names[] =
523 "BACKWARD_DATA"
524 "BACKWARD_FILTER"
525 "FORWARD"
526 "FORWARD_BIAS_ACTIVATION"
527 "INVALID";
528
529 static const ::PROTOBUF_NAMESPACE_ID::internal::EnumEntry ConvolutionKind_entries[] = {
530 { {ConvolutionKind_names + 0, 13}, 3 },
531 { {ConvolutionKind_names + 13, 15}, 2 },
532 { {ConvolutionKind_names + 28, 7}, 1 },
533 { {ConvolutionKind_names + 35, 23}, 4 },
534 { {ConvolutionKind_names + 58, 7}, 0 },
535 };
536
537 static const int ConvolutionKind_entries_by_number[] = {
538 4, // 0 -> INVALID
539 2, // 1 -> FORWARD
540 1, // 2 -> BACKWARD_FILTER
541 0, // 3 -> BACKWARD_DATA
542 3, // 4 -> FORWARD_BIAS_ACTIVATION
543 };
544
ConvolutionKind_Name(ConvolutionKind value)545 const std::string& ConvolutionKind_Name(
546 ConvolutionKind value) {
547 static const bool dummy =
548 ::PROTOBUF_NAMESPACE_ID::internal::InitializeEnumStrings(
549 ConvolutionKind_entries,
550 ConvolutionKind_entries_by_number,
551 5, ConvolutionKind_strings);
552 (void) dummy;
553 int idx = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumName(
554 ConvolutionKind_entries,
555 ConvolutionKind_entries_by_number,
556 5, value);
557 return idx == -1 ? ::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString() :
558 ConvolutionKind_strings[idx].get();
559 }
ConvolutionKind_Parse(::PROTOBUF_NAMESPACE_ID::ConstStringParam name,ConvolutionKind * value)560 bool ConvolutionKind_Parse(
561 ::PROTOBUF_NAMESPACE_ID::ConstStringParam name, ConvolutionKind* value) {
562 int int_value;
563 bool success = ::PROTOBUF_NAMESPACE_ID::internal::LookUpEnumValue(
564 ConvolutionKind_entries, 5, name, &int_value);
565 if (success) {
566 *value = static_cast<ConvolutionKind>(int_value);
567 }
568 return success;
569 }
570
571 // ===================================================================
572
573 class TensorDescriptorProto::_Internal {
574 public:
575 };
576
TensorDescriptorProto(::PROTOBUF_NAMESPACE_ID::Arena * arena,bool is_message_owned)577 TensorDescriptorProto::TensorDescriptorProto(::PROTOBUF_NAMESPACE_ID::Arena* arena,
578 bool is_message_owned)
579 : ::PROTOBUF_NAMESPACE_ID::MessageLite(arena, is_message_owned) {
580 SharedCtor(arena, is_message_owned);
581 // @@protoc_insertion_point(arena_constructor:stream_executor.dnn.TensorDescriptorProto)
582 }
TensorDescriptorProto(const TensorDescriptorProto & from)583 TensorDescriptorProto::TensorDescriptorProto(const TensorDescriptorProto& from)
584 : ::PROTOBUF_NAMESPACE_ID::MessageLite() {
585 TensorDescriptorProto* const _this = this; (void)_this;
586 new (&_impl_) Impl_{
587 decltype(_impl_.dimensions_){from._impl_.dimensions_}
588 , /*decltype(_impl_._dimensions_cached_byte_size_)*/{0}
589 , decltype(_impl_.data_type_){}
590 , decltype(_impl_.layout_oneof_){}
591 , /*decltype(_impl_._cached_size_)*/{}
592 , /*decltype(_impl_._oneof_case_)*/{}};
593
594 _internal_metadata_.MergeFrom<std::string>(from._internal_metadata_);
595 _this->_impl_.data_type_ = from._impl_.data_type_;
596 clear_has_layout_oneof();
597 switch (from.layout_oneof_case()) {
598 case kDataLayout: {
599 _this->_internal_set_data_layout(from._internal_data_layout());
600 break;
601 }
602 case kFilterLayout: {
603 _this->_internal_set_filter_layout(from._internal_filter_layout());
604 break;
605 }
606 case LAYOUT_ONEOF_NOT_SET: {
607 break;
608 }
609 }
610 // @@protoc_insertion_point(copy_constructor:stream_executor.dnn.TensorDescriptorProto)
611 }
612
SharedCtor(::_pb::Arena * arena,bool is_message_owned)613 inline void TensorDescriptorProto::SharedCtor(
614 ::_pb::Arena* arena, bool is_message_owned) {
615 (void)arena;
616 (void)is_message_owned;
617 new (&_impl_) Impl_{
618 decltype(_impl_.dimensions_){arena}
619 , /*decltype(_impl_._dimensions_cached_byte_size_)*/{0}
620 , decltype(_impl_.data_type_){0}
621 , decltype(_impl_.layout_oneof_){}
622 , /*decltype(_impl_._cached_size_)*/{}
623 , /*decltype(_impl_._oneof_case_)*/{}
624 };
625 clear_has_layout_oneof();
626 }
627
~TensorDescriptorProto()628 TensorDescriptorProto::~TensorDescriptorProto() {
629 // @@protoc_insertion_point(destructor:stream_executor.dnn.TensorDescriptorProto)
630 if (auto *arena = _internal_metadata_.DeleteReturnArena<std::string>()) {
631 (void)arena;
632 return;
633 }
634 SharedDtor();
635 }
636
SharedDtor()637 inline void TensorDescriptorProto::SharedDtor() {
638 GOOGLE_DCHECK(GetArenaForAllocation() == nullptr);
639 _impl_.dimensions_.~RepeatedField();
640 if (has_layout_oneof()) {
641 clear_layout_oneof();
642 }
643 }
644
SetCachedSize(int size) const645 void TensorDescriptorProto::SetCachedSize(int size) const {
646 _impl_._cached_size_.Set(size);
647 }
648
clear_layout_oneof()649 void TensorDescriptorProto::clear_layout_oneof() {
650 // @@protoc_insertion_point(one_of_clear_start:stream_executor.dnn.TensorDescriptorProto)
651 switch (layout_oneof_case()) {
652 case kDataLayout: {
653 // No need to clear
654 break;
655 }
656 case kFilterLayout: {
657 // No need to clear
658 break;
659 }
660 case LAYOUT_ONEOF_NOT_SET: {
661 break;
662 }
663 }
664 _impl_._oneof_case_[0] = LAYOUT_ONEOF_NOT_SET;
665 }
666
667
Clear()668 void TensorDescriptorProto::Clear() {
669 // @@protoc_insertion_point(message_clear_start:stream_executor.dnn.TensorDescriptorProto)
670 ::uint32_t cached_has_bits = 0;
671 // Prevent compiler warnings about cached_has_bits being unused
672 (void) cached_has_bits;
673
674 _impl_.dimensions_.Clear();
675 _impl_.data_type_ = 0;
676 clear_layout_oneof();
677 _internal_metadata_.Clear<std::string>();
678 }
679
_InternalParse(const char * ptr,::_pbi::ParseContext * ctx)680 const char* TensorDescriptorProto::_InternalParse(const char* ptr, ::_pbi::ParseContext* ctx) {
681 #define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure
682 while (!ctx->Done(&ptr)) {
683 ::uint32_t tag;
684 ptr = ::_pbi::ReadTag(ptr, &tag);
685 switch (tag >> 3) {
686 // repeated int64 dimensions = 1;
687 case 1:
688 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 10)) {
689 ptr = ::PROTOBUF_NAMESPACE_ID::internal::PackedInt64Parser(_internal_mutable_dimensions(), ptr, ctx);
690 CHK_(ptr);
691 } else if (static_cast<::uint8_t>(tag) == 8) {
692 _internal_add_dimensions(::PROTOBUF_NAMESPACE_ID::internal::ReadVarint64(&ptr));
693 CHK_(ptr);
694 } else {
695 goto handle_unusual;
696 }
697 continue;
698 // .stream_executor.dnn.DataType data_type = 2;
699 case 2:
700 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 16)) {
701 ::uint64_t val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint64(&ptr);
702 CHK_(ptr);
703 _internal_set_data_type(static_cast<::stream_executor::dnn::DataType>(val));
704 } else {
705 goto handle_unusual;
706 }
707 continue;
708 // .stream_executor.dnn.DataLayout data_layout = 3;
709 case 3:
710 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 24)) {
711 ::uint64_t val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint64(&ptr);
712 CHK_(ptr);
713 _internal_set_data_layout(static_cast<::stream_executor::dnn::DataLayout>(val));
714 } else {
715 goto handle_unusual;
716 }
717 continue;
718 // .stream_executor.dnn.FilterLayout filter_layout = 4;
719 case 4:
720 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 32)) {
721 ::uint64_t val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint64(&ptr);
722 CHK_(ptr);
723 _internal_set_filter_layout(static_cast<::stream_executor::dnn::FilterLayout>(val));
724 } else {
725 goto handle_unusual;
726 }
727 continue;
728 default:
729 goto handle_unusual;
730 } // switch
731 handle_unusual:
732 if ((tag == 0) || ((tag & 7) == 4)) {
733 CHK_(ptr);
734 ctx->SetLastTag(tag);
735 goto message_done;
736 }
737 ptr = UnknownFieldParse(
738 tag,
739 _internal_metadata_.mutable_unknown_fields<std::string>(),
740 ptr, ctx);
741 CHK_(ptr != nullptr);
742 } // while
743 message_done:
744 return ptr;
745 failure:
746 ptr = nullptr;
747 goto message_done;
748 #undef CHK_
749 }
750
_InternalSerialize(::uint8_t * target,::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream * stream) const751 ::uint8_t* TensorDescriptorProto::_InternalSerialize(
752 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const {
753 // @@protoc_insertion_point(serialize_to_array_start:stream_executor.dnn.TensorDescriptorProto)
754 ::uint32_t cached_has_bits = 0;
755 (void) cached_has_bits;
756
757 // repeated int64 dimensions = 1;
758 {
759 int byte_size = _impl_._dimensions_cached_byte_size_.load(std::memory_order_relaxed);
760 if (byte_size > 0) {
761 target = stream->WriteInt64Packed(
762 1, _internal_dimensions(), byte_size, target);
763 }
764 }
765
766 // .stream_executor.dnn.DataType data_type = 2;
767 if (this->_internal_data_type() != 0) {
768 target = stream->EnsureSpace(target);
769 target = ::_pbi::WireFormatLite::WriteEnumToArray(
770 2, this->_internal_data_type(), target);
771 }
772
773 // .stream_executor.dnn.DataLayout data_layout = 3;
774 if (_internal_has_data_layout()) {
775 target = stream->EnsureSpace(target);
776 target = ::_pbi::WireFormatLite::WriteEnumToArray(
777 3, this->_internal_data_layout(), target);
778 }
779
780 // .stream_executor.dnn.FilterLayout filter_layout = 4;
781 if (_internal_has_filter_layout()) {
782 target = stream->EnsureSpace(target);
783 target = ::_pbi::WireFormatLite::WriteEnumToArray(
784 4, this->_internal_filter_layout(), target);
785 }
786
787 if (PROTOBUF_PREDICT_FALSE(_internal_metadata_.have_unknown_fields())) {
788 target = stream->WriteRaw(_internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).data(),
789 static_cast<int>(_internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).size()), target);
790 }
791 // @@protoc_insertion_point(serialize_to_array_end:stream_executor.dnn.TensorDescriptorProto)
792 return target;
793 }
794
ByteSizeLong() const795 size_t TensorDescriptorProto::ByteSizeLong() const {
796 // @@protoc_insertion_point(message_byte_size_start:stream_executor.dnn.TensorDescriptorProto)
797 size_t total_size = 0;
798
799 ::uint32_t cached_has_bits = 0;
800 // Prevent compiler warnings about cached_has_bits being unused
801 (void) cached_has_bits;
802
803 // repeated int64 dimensions = 1;
804 {
805 size_t data_size = ::_pbi::WireFormatLite::
806 Int64Size(this->_impl_.dimensions_);
807 if (data_size > 0) {
808 total_size += 1 +
809 ::_pbi::WireFormatLite::Int32Size(static_cast<::int32_t>(data_size));
810 }
811 int cached_size = ::_pbi::ToCachedSize(data_size);
812 _impl_._dimensions_cached_byte_size_.store(cached_size,
813 std::memory_order_relaxed);
814 total_size += data_size;
815 }
816
817 // .stream_executor.dnn.DataType data_type = 2;
818 if (this->_internal_data_type() != 0) {
819 total_size += 1 +
820 ::_pbi::WireFormatLite::EnumSize(this->_internal_data_type());
821 }
822
823 switch (layout_oneof_case()) {
824 // .stream_executor.dnn.DataLayout data_layout = 3;
825 case kDataLayout: {
826 total_size += 1 +
827 ::_pbi::WireFormatLite::EnumSize(this->_internal_data_layout());
828 break;
829 }
830 // .stream_executor.dnn.FilterLayout filter_layout = 4;
831 case kFilterLayout: {
832 total_size += 1 +
833 ::_pbi::WireFormatLite::EnumSize(this->_internal_filter_layout());
834 break;
835 }
836 case LAYOUT_ONEOF_NOT_SET: {
837 break;
838 }
839 }
840 if (PROTOBUF_PREDICT_FALSE(_internal_metadata_.have_unknown_fields())) {
841 total_size += _internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).size();
842 }
843 int cached_size = ::_pbi::ToCachedSize(total_size);
844 SetCachedSize(cached_size);
845 return total_size;
846 }
847
CheckTypeAndMergeFrom(const::PROTOBUF_NAMESPACE_ID::MessageLite & from)848 void TensorDescriptorProto::CheckTypeAndMergeFrom(
849 const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) {
850 MergeFrom(*::_pbi::DownCast<const TensorDescriptorProto*>(
851 &from));
852 }
853
MergeFrom(const TensorDescriptorProto & from)854 void TensorDescriptorProto::MergeFrom(const TensorDescriptorProto& from) {
855 TensorDescriptorProto* const _this = this;
856 // @@protoc_insertion_point(class_specific_merge_from_start:stream_executor.dnn.TensorDescriptorProto)
857 GOOGLE_DCHECK_NE(&from, _this);
858 ::uint32_t cached_has_bits = 0;
859 (void) cached_has_bits;
860
861 _this->_impl_.dimensions_.MergeFrom(from._impl_.dimensions_);
862 if (from._internal_data_type() != 0) {
863 _this->_internal_set_data_type(from._internal_data_type());
864 }
865 switch (from.layout_oneof_case()) {
866 case kDataLayout: {
867 _this->_internal_set_data_layout(from._internal_data_layout());
868 break;
869 }
870 case kFilterLayout: {
871 _this->_internal_set_filter_layout(from._internal_filter_layout());
872 break;
873 }
874 case LAYOUT_ONEOF_NOT_SET: {
875 break;
876 }
877 }
878 _this->_internal_metadata_.MergeFrom<std::string>(from._internal_metadata_);
879 }
880
CopyFrom(const TensorDescriptorProto & from)881 void TensorDescriptorProto::CopyFrom(const TensorDescriptorProto& from) {
882 // @@protoc_insertion_point(class_specific_copy_from_start:stream_executor.dnn.TensorDescriptorProto)
883 if (&from == this) return;
884 Clear();
885 MergeFrom(from);
886 }
887
IsInitialized() const888 bool TensorDescriptorProto::IsInitialized() const {
889 return true;
890 }
891
InternalSwap(TensorDescriptorProto * other)892 void TensorDescriptorProto::InternalSwap(TensorDescriptorProto* other) {
893 using std::swap;
894 _internal_metadata_.InternalSwap(&other->_internal_metadata_);
895 _impl_.dimensions_.InternalSwap(&other->_impl_.dimensions_);
896 swap(_impl_.data_type_, other->_impl_.data_type_);
897 swap(_impl_.layout_oneof_, other->_impl_.layout_oneof_);
898 swap(_impl_._oneof_case_[0], other->_impl_._oneof_case_[0]);
899 }
900
GetTypeName() const901 std::string TensorDescriptorProto::GetTypeName() const {
902 return "stream_executor.dnn.TensorDescriptorProto";
903 }
904
905
906 // ===================================================================
907
AlgorithmProto_TuningKnobsEntry_DoNotUse()908 AlgorithmProto_TuningKnobsEntry_DoNotUse::AlgorithmProto_TuningKnobsEntry_DoNotUse() {}
AlgorithmProto_TuningKnobsEntry_DoNotUse(::PROTOBUF_NAMESPACE_ID::Arena * arena)909 AlgorithmProto_TuningKnobsEntry_DoNotUse::AlgorithmProto_TuningKnobsEntry_DoNotUse(::PROTOBUF_NAMESPACE_ID::Arena* arena)
910 : SuperType(arena) {}
MergeFrom(const AlgorithmProto_TuningKnobsEntry_DoNotUse & other)911 void AlgorithmProto_TuningKnobsEntry_DoNotUse::MergeFrom(const AlgorithmProto_TuningKnobsEntry_DoNotUse& other) {
912 MergeFromInternal(other);
913 }
914
915 // ===================================================================
916
917 class AlgorithmProto::_Internal {
918 public:
919 static const ::PROTOBUF_NAMESPACE_ID::UInt64Value& workspace_size(const AlgorithmProto* msg);
920 };
921
922 const ::PROTOBUF_NAMESPACE_ID::UInt64Value&
workspace_size(const AlgorithmProto * msg)923 AlgorithmProto::_Internal::workspace_size(const AlgorithmProto* msg) {
924 return *msg->_impl_.workspace_size_;
925 }
clear_workspace_size()926 void AlgorithmProto::clear_workspace_size() {
927 if (GetArenaForAllocation() == nullptr && _impl_.workspace_size_ != nullptr) {
928 delete _impl_.workspace_size_;
929 }
930 _impl_.workspace_size_ = nullptr;
931 }
AlgorithmProto(::PROTOBUF_NAMESPACE_ID::Arena * arena,bool is_message_owned)932 AlgorithmProto::AlgorithmProto(::PROTOBUF_NAMESPACE_ID::Arena* arena,
933 bool is_message_owned)
934 : ::PROTOBUF_NAMESPACE_ID::MessageLite(arena, is_message_owned) {
935 SharedCtor(arena, is_message_owned);
936 // @@protoc_insertion_point(arena_constructor:stream_executor.dnn.AlgorithmProto)
937 }
AlgorithmProto(const AlgorithmProto & from)938 AlgorithmProto::AlgorithmProto(const AlgorithmProto& from)
939 : ::PROTOBUF_NAMESPACE_ID::MessageLite() {
940 AlgorithmProto* const _this = this; (void)_this;
941 new (&_impl_) Impl_{
942 /*decltype(_impl_.tuning_knobs_)*/{}
943 , decltype(_impl_.workspace_size_){nullptr}
944 , decltype(_impl_.algo_id_){}
945 , decltype(_impl_.math_type_){}
946 , decltype(_impl_.is_cudnn_frontend_){}
947 , /*decltype(_impl_._cached_size_)*/{}};
948
949 _internal_metadata_.MergeFrom<std::string>(from._internal_metadata_);
950 _this->_impl_.tuning_knobs_.MergeFrom(from._impl_.tuning_knobs_);
951 if (from._internal_has_workspace_size()) {
952 _this->_impl_.workspace_size_ = new ::PROTOBUF_NAMESPACE_ID::UInt64Value(*from._impl_.workspace_size_);
953 }
954 ::memcpy(&_impl_.algo_id_, &from._impl_.algo_id_,
955 static_cast<size_t>(reinterpret_cast<char*>(&_impl_.is_cudnn_frontend_) -
956 reinterpret_cast<char*>(&_impl_.algo_id_)) + sizeof(_impl_.is_cudnn_frontend_));
957 // @@protoc_insertion_point(copy_constructor:stream_executor.dnn.AlgorithmProto)
958 }
959
SharedCtor(::_pb::Arena * arena,bool is_message_owned)960 inline void AlgorithmProto::SharedCtor(
961 ::_pb::Arena* arena, bool is_message_owned) {
962 (void)arena;
963 (void)is_message_owned;
964 new (&_impl_) Impl_{
965 /*decltype(_impl_.tuning_knobs_)*/{::_pbi::ArenaInitialized(), arena}
966 , decltype(_impl_.workspace_size_){nullptr}
967 , decltype(_impl_.algo_id_){::int64_t{0}}
968 , decltype(_impl_.math_type_){0}
969 , decltype(_impl_.is_cudnn_frontend_){false}
970 , /*decltype(_impl_._cached_size_)*/{}
971 };
972 }
973
~AlgorithmProto()974 AlgorithmProto::~AlgorithmProto() {
975 // @@protoc_insertion_point(destructor:stream_executor.dnn.AlgorithmProto)
976 if (auto *arena = _internal_metadata_.DeleteReturnArena<std::string>()) {
977 (void)arena;
978 return;
979 }
980 SharedDtor();
981 }
982
SharedDtor()983 inline void AlgorithmProto::SharedDtor() {
984 GOOGLE_DCHECK(GetArenaForAllocation() == nullptr);
985 _impl_.tuning_knobs_.Destruct();
986 _impl_.tuning_knobs_.~MapFieldLite();
987 if (this != internal_default_instance()) delete _impl_.workspace_size_;
988 }
989
SetCachedSize(int size) const990 void AlgorithmProto::SetCachedSize(int size) const {
991 _impl_._cached_size_.Set(size);
992 }
993
Clear()994 void AlgorithmProto::Clear() {
995 // @@protoc_insertion_point(message_clear_start:stream_executor.dnn.AlgorithmProto)
996 ::uint32_t cached_has_bits = 0;
997 // Prevent compiler warnings about cached_has_bits being unused
998 (void) cached_has_bits;
999
1000 _impl_.tuning_knobs_.Clear();
1001 if (GetArenaForAllocation() == nullptr && _impl_.workspace_size_ != nullptr) {
1002 delete _impl_.workspace_size_;
1003 }
1004 _impl_.workspace_size_ = nullptr;
1005 ::memset(&_impl_.algo_id_, 0, static_cast<size_t>(
1006 reinterpret_cast<char*>(&_impl_.is_cudnn_frontend_) -
1007 reinterpret_cast<char*>(&_impl_.algo_id_)) + sizeof(_impl_.is_cudnn_frontend_));
1008 _internal_metadata_.Clear<std::string>();
1009 }
1010
_InternalParse(const char * ptr,::_pbi::ParseContext * ctx)1011 const char* AlgorithmProto::_InternalParse(const char* ptr, ::_pbi::ParseContext* ctx) {
1012 #define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure
1013 while (!ctx->Done(&ptr)) {
1014 ::uint32_t tag;
1015 ptr = ::_pbi::ReadTag(ptr, &tag);
1016 switch (tag >> 3) {
1017 // int64 algo_id = 1;
1018 case 1:
1019 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 8)) {
1020 _impl_.algo_id_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint64(&ptr);
1021 CHK_(ptr);
1022 } else {
1023 goto handle_unusual;
1024 }
1025 continue;
1026 // .stream_executor.dnn.AlgorithmProto.MathType math_type = 2;
1027 case 2:
1028 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 16)) {
1029 ::uint64_t val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint64(&ptr);
1030 CHK_(ptr);
1031 _internal_set_math_type(static_cast<::stream_executor::dnn::AlgorithmProto_MathType>(val));
1032 } else {
1033 goto handle_unusual;
1034 }
1035 continue;
1036 // map<int64, int64> tuning_knobs = 4;
1037 case 4:
1038 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 34)) {
1039 ptr -= 1;
1040 do {
1041 ptr += 1;
1042 ptr = ctx->ParseMessage(&_impl_.tuning_knobs_, ptr);
1043 CHK_(ptr);
1044 if (!ctx->DataAvailable(ptr)) break;
1045 } while (::PROTOBUF_NAMESPACE_ID::internal::ExpectTag<34>(ptr));
1046 } else {
1047 goto handle_unusual;
1048 }
1049 continue;
1050 // bool is_cudnn_frontend = 5;
1051 case 5:
1052 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 40)) {
1053 _impl_.is_cudnn_frontend_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint64(&ptr);
1054 CHK_(ptr);
1055 } else {
1056 goto handle_unusual;
1057 }
1058 continue;
1059 // .google.protobuf.UInt64Value workspace_size = 6;
1060 case 6:
1061 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 50)) {
1062 ptr = ctx->ParseMessage(_internal_mutable_workspace_size(), ptr);
1063 CHK_(ptr);
1064 } else {
1065 goto handle_unusual;
1066 }
1067 continue;
1068 default:
1069 goto handle_unusual;
1070 } // switch
1071 handle_unusual:
1072 if ((tag == 0) || ((tag & 7) == 4)) {
1073 CHK_(ptr);
1074 ctx->SetLastTag(tag);
1075 goto message_done;
1076 }
1077 ptr = UnknownFieldParse(
1078 tag,
1079 _internal_metadata_.mutable_unknown_fields<std::string>(),
1080 ptr, ctx);
1081 CHK_(ptr != nullptr);
1082 } // while
1083 message_done:
1084 return ptr;
1085 failure:
1086 ptr = nullptr;
1087 goto message_done;
1088 #undef CHK_
1089 }
1090
_InternalSerialize(::uint8_t * target,::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream * stream) const1091 ::uint8_t* AlgorithmProto::_InternalSerialize(
1092 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const {
1093 // @@protoc_insertion_point(serialize_to_array_start:stream_executor.dnn.AlgorithmProto)
1094 ::uint32_t cached_has_bits = 0;
1095 (void) cached_has_bits;
1096
1097 // int64 algo_id = 1;
1098 if (this->_internal_algo_id() != 0) {
1099 target = stream->EnsureSpace(target);
1100 target = ::_pbi::WireFormatLite::WriteInt64ToArray(1, this->_internal_algo_id(), target);
1101 }
1102
1103 // .stream_executor.dnn.AlgorithmProto.MathType math_type = 2;
1104 if (this->_internal_math_type() != 0) {
1105 target = stream->EnsureSpace(target);
1106 target = ::_pbi::WireFormatLite::WriteEnumToArray(
1107 2, this->_internal_math_type(), target);
1108 }
1109
1110 // map<int64, int64> tuning_knobs = 4;
1111 if (!this->_internal_tuning_knobs().empty()) {
1112 using MapType = ::_pb::Map<::int64_t, ::int64_t>;
1113 using WireHelper = AlgorithmProto_TuningKnobsEntry_DoNotUse::Funcs;
1114 const auto& map_field = this->_internal_tuning_knobs();
1115
1116 if (stream->IsSerializationDeterministic() && map_field.size() > 1) {
1117 for (const auto& entry : ::_pbi::MapSorterFlat<MapType>(map_field)) {
1118 target = WireHelper::InternalSerialize(4, entry.first, entry.second, target, stream);
1119 }
1120 } else {
1121 for (const auto& entry : map_field) {
1122 target = WireHelper::InternalSerialize(4, entry.first, entry.second, target, stream);
1123 }
1124 }
1125 }
1126
1127 // bool is_cudnn_frontend = 5;
1128 if (this->_internal_is_cudnn_frontend() != 0) {
1129 target = stream->EnsureSpace(target);
1130 target = ::_pbi::WireFormatLite::WriteBoolToArray(5, this->_internal_is_cudnn_frontend(), target);
1131 }
1132
1133 // .google.protobuf.UInt64Value workspace_size = 6;
1134 if (this->_internal_has_workspace_size()) {
1135 target = ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::
1136 InternalWriteMessage(6, _Internal::workspace_size(this),
1137 _Internal::workspace_size(this).GetCachedSize(), target, stream);
1138 }
1139
1140 if (PROTOBUF_PREDICT_FALSE(_internal_metadata_.have_unknown_fields())) {
1141 target = stream->WriteRaw(_internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).data(),
1142 static_cast<int>(_internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).size()), target);
1143 }
1144 // @@protoc_insertion_point(serialize_to_array_end:stream_executor.dnn.AlgorithmProto)
1145 return target;
1146 }
1147
ByteSizeLong() const1148 size_t AlgorithmProto::ByteSizeLong() const {
1149 // @@protoc_insertion_point(message_byte_size_start:stream_executor.dnn.AlgorithmProto)
1150 size_t total_size = 0;
1151
1152 ::uint32_t cached_has_bits = 0;
1153 // Prevent compiler warnings about cached_has_bits being unused
1154 (void) cached_has_bits;
1155
1156 // map<int64, int64> tuning_knobs = 4;
1157 total_size += 1 *
1158 ::PROTOBUF_NAMESPACE_ID::internal::FromIntSize(this->_internal_tuning_knobs_size());
1159 for (::PROTOBUF_NAMESPACE_ID::Map< ::int64_t, ::int64_t >::const_iterator
1160 it = this->_internal_tuning_knobs().begin();
1161 it != this->_internal_tuning_knobs().end(); ++it) {
1162 total_size += AlgorithmProto_TuningKnobsEntry_DoNotUse::Funcs::ByteSizeLong(it->first, it->second);
1163 }
1164
1165 // .google.protobuf.UInt64Value workspace_size = 6;
1166 if (this->_internal_has_workspace_size()) {
1167 total_size += 1 +
1168 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize(
1169 *_impl_.workspace_size_);
1170 }
1171
1172 // int64 algo_id = 1;
1173 if (this->_internal_algo_id() != 0) {
1174 total_size += ::_pbi::WireFormatLite::Int64SizePlusOne(this->_internal_algo_id());
1175 }
1176
1177 // .stream_executor.dnn.AlgorithmProto.MathType math_type = 2;
1178 if (this->_internal_math_type() != 0) {
1179 total_size += 1 +
1180 ::_pbi::WireFormatLite::EnumSize(this->_internal_math_type());
1181 }
1182
1183 // bool is_cudnn_frontend = 5;
1184 if (this->_internal_is_cudnn_frontend() != 0) {
1185 total_size += 1 + 1;
1186 }
1187
1188 if (PROTOBUF_PREDICT_FALSE(_internal_metadata_.have_unknown_fields())) {
1189 total_size += _internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).size();
1190 }
1191 int cached_size = ::_pbi::ToCachedSize(total_size);
1192 SetCachedSize(cached_size);
1193 return total_size;
1194 }
1195
CheckTypeAndMergeFrom(const::PROTOBUF_NAMESPACE_ID::MessageLite & from)1196 void AlgorithmProto::CheckTypeAndMergeFrom(
1197 const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) {
1198 MergeFrom(*::_pbi::DownCast<const AlgorithmProto*>(
1199 &from));
1200 }
1201
MergeFrom(const AlgorithmProto & from)1202 void AlgorithmProto::MergeFrom(const AlgorithmProto& from) {
1203 AlgorithmProto* const _this = this;
1204 // @@protoc_insertion_point(class_specific_merge_from_start:stream_executor.dnn.AlgorithmProto)
1205 GOOGLE_DCHECK_NE(&from, _this);
1206 ::uint32_t cached_has_bits = 0;
1207 (void) cached_has_bits;
1208
1209 _this->_impl_.tuning_knobs_.MergeFrom(from._impl_.tuning_knobs_);
1210 if (from._internal_has_workspace_size()) {
1211 _this->_internal_mutable_workspace_size()->::PROTOBUF_NAMESPACE_ID::UInt64Value::MergeFrom(
1212 from._internal_workspace_size());
1213 }
1214 if (from._internal_algo_id() != 0) {
1215 _this->_internal_set_algo_id(from._internal_algo_id());
1216 }
1217 if (from._internal_math_type() != 0) {
1218 _this->_internal_set_math_type(from._internal_math_type());
1219 }
1220 if (from._internal_is_cudnn_frontend() != 0) {
1221 _this->_internal_set_is_cudnn_frontend(from._internal_is_cudnn_frontend());
1222 }
1223 _this->_internal_metadata_.MergeFrom<std::string>(from._internal_metadata_);
1224 }
1225
CopyFrom(const AlgorithmProto & from)1226 void AlgorithmProto::CopyFrom(const AlgorithmProto& from) {
1227 // @@protoc_insertion_point(class_specific_copy_from_start:stream_executor.dnn.AlgorithmProto)
1228 if (&from == this) return;
1229 Clear();
1230 MergeFrom(from);
1231 }
1232
IsInitialized() const1233 bool AlgorithmProto::IsInitialized() const {
1234 return true;
1235 }
1236
InternalSwap(AlgorithmProto * other)1237 void AlgorithmProto::InternalSwap(AlgorithmProto* other) {
1238 using std::swap;
1239 _internal_metadata_.InternalSwap(&other->_internal_metadata_);
1240 _impl_.tuning_knobs_.InternalSwap(&other->_impl_.tuning_knobs_);
1241 ::PROTOBUF_NAMESPACE_ID::internal::memswap<
1242 PROTOBUF_FIELD_OFFSET(AlgorithmProto, _impl_.is_cudnn_frontend_)
1243 + sizeof(AlgorithmProto::_impl_.is_cudnn_frontend_) // NOLINT
1244 - PROTOBUF_FIELD_OFFSET(AlgorithmProto, _impl_.workspace_size_)>(
1245 reinterpret_cast<char*>(&_impl_.workspace_size_),
1246 reinterpret_cast<char*>(&other->_impl_.workspace_size_));
1247 }
1248
GetTypeName() const1249 std::string AlgorithmProto::GetTypeName() const {
1250 return "stream_executor.dnn.AlgorithmProto";
1251 }
1252
1253
1254 // ===================================================================
1255
1256 class AlgorithmConfigProto::_Internal {
1257 public:
1258 static const ::stream_executor::dnn::AlgorithmProto& algorithm(const AlgorithmConfigProto* msg);
1259 static const ::stream_executor::dnn::AlgorithmProto& algorithm_no_scratch(const AlgorithmConfigProto* msg);
1260 };
1261
1262 const ::stream_executor::dnn::AlgorithmProto&
algorithm(const AlgorithmConfigProto * msg)1263 AlgorithmConfigProto::_Internal::algorithm(const AlgorithmConfigProto* msg) {
1264 return *msg->_impl_.optional_algorithm_.algorithm_;
1265 }
1266 const ::stream_executor::dnn::AlgorithmProto&
algorithm_no_scratch(const AlgorithmConfigProto * msg)1267 AlgorithmConfigProto::_Internal::algorithm_no_scratch(const AlgorithmConfigProto* msg) {
1268 return *msg->_impl_.optional_algorithm_no_scratch_.algorithm_no_scratch_;
1269 }
set_allocated_algorithm(::stream_executor::dnn::AlgorithmProto * algorithm)1270 void AlgorithmConfigProto::set_allocated_algorithm(::stream_executor::dnn::AlgorithmProto* algorithm) {
1271 ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
1272 clear_optional_algorithm();
1273 if (algorithm) {
1274 ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
1275 ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(algorithm);
1276 if (message_arena != submessage_arena) {
1277 algorithm = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
1278 message_arena, algorithm, submessage_arena);
1279 }
1280 set_has_algorithm();
1281 _impl_.optional_algorithm_.algorithm_ = algorithm;
1282 }
1283 // @@protoc_insertion_point(field_set_allocated:stream_executor.dnn.AlgorithmConfigProto.algorithm)
1284 }
set_allocated_algorithm_no_scratch(::stream_executor::dnn::AlgorithmProto * algorithm_no_scratch)1285 void AlgorithmConfigProto::set_allocated_algorithm_no_scratch(::stream_executor::dnn::AlgorithmProto* algorithm_no_scratch) {
1286 ::PROTOBUF_NAMESPACE_ID::Arena* message_arena = GetArenaForAllocation();
1287 clear_optional_algorithm_no_scratch();
1288 if (algorithm_no_scratch) {
1289 ::PROTOBUF_NAMESPACE_ID::Arena* submessage_arena =
1290 ::PROTOBUF_NAMESPACE_ID::Arena::InternalGetOwningArena(algorithm_no_scratch);
1291 if (message_arena != submessage_arena) {
1292 algorithm_no_scratch = ::PROTOBUF_NAMESPACE_ID::internal::GetOwnedMessage(
1293 message_arena, algorithm_no_scratch, submessage_arena);
1294 }
1295 set_has_algorithm_no_scratch();
1296 _impl_.optional_algorithm_no_scratch_.algorithm_no_scratch_ = algorithm_no_scratch;
1297 }
1298 // @@protoc_insertion_point(field_set_allocated:stream_executor.dnn.AlgorithmConfigProto.algorithm_no_scratch)
1299 }
AlgorithmConfigProto(::PROTOBUF_NAMESPACE_ID::Arena * arena,bool is_message_owned)1300 AlgorithmConfigProto::AlgorithmConfigProto(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1301 bool is_message_owned)
1302 : ::PROTOBUF_NAMESPACE_ID::MessageLite(arena, is_message_owned) {
1303 SharedCtor(arena, is_message_owned);
1304 // @@protoc_insertion_point(arena_constructor:stream_executor.dnn.AlgorithmConfigProto)
1305 }
AlgorithmConfigProto(const AlgorithmConfigProto & from)1306 AlgorithmConfigProto::AlgorithmConfigProto(const AlgorithmConfigProto& from)
1307 : ::PROTOBUF_NAMESPACE_ID::MessageLite() {
1308 AlgorithmConfigProto* const _this = this; (void)_this;
1309 new (&_impl_) Impl_{
1310 decltype(_impl_.optional_algorithm_){}
1311 , decltype(_impl_.optional_algorithm_no_scratch_){}
1312 , decltype(_impl_.optional_scratch_size_){}
1313 , /*decltype(_impl_._cached_size_)*/{}
1314 , /*decltype(_impl_._oneof_case_)*/{}};
1315
1316 _internal_metadata_.MergeFrom<std::string>(from._internal_metadata_);
1317 clear_has_optional_algorithm();
1318 switch (from.optional_algorithm_case()) {
1319 case kAlgorithm: {
1320 _this->_internal_mutable_algorithm()->::stream_executor::dnn::AlgorithmProto::MergeFrom(
1321 from._internal_algorithm());
1322 break;
1323 }
1324 case OPTIONAL_ALGORITHM_NOT_SET: {
1325 break;
1326 }
1327 }
1328 clear_has_optional_algorithm_no_scratch();
1329 switch (from.optional_algorithm_no_scratch_case()) {
1330 case kAlgorithmNoScratch: {
1331 _this->_internal_mutable_algorithm_no_scratch()->::stream_executor::dnn::AlgorithmProto::MergeFrom(
1332 from._internal_algorithm_no_scratch());
1333 break;
1334 }
1335 case OPTIONAL_ALGORITHM_NO_SCRATCH_NOT_SET: {
1336 break;
1337 }
1338 }
1339 clear_has_optional_scratch_size();
1340 switch (from.optional_scratch_size_case()) {
1341 case kScratchSize: {
1342 _this->_internal_set_scratch_size(from._internal_scratch_size());
1343 break;
1344 }
1345 case OPTIONAL_SCRATCH_SIZE_NOT_SET: {
1346 break;
1347 }
1348 }
1349 // @@protoc_insertion_point(copy_constructor:stream_executor.dnn.AlgorithmConfigProto)
1350 }
1351
SharedCtor(::_pb::Arena * arena,bool is_message_owned)1352 inline void AlgorithmConfigProto::SharedCtor(
1353 ::_pb::Arena* arena, bool is_message_owned) {
1354 (void)arena;
1355 (void)is_message_owned;
1356 new (&_impl_) Impl_{
1357 decltype(_impl_.optional_algorithm_){}
1358 , decltype(_impl_.optional_algorithm_no_scratch_){}
1359 , decltype(_impl_.optional_scratch_size_){}
1360 , /*decltype(_impl_._cached_size_)*/{}
1361 , /*decltype(_impl_._oneof_case_)*/{}
1362 };
1363 clear_has_optional_algorithm();
1364 clear_has_optional_algorithm_no_scratch();
1365 clear_has_optional_scratch_size();
1366 }
1367
~AlgorithmConfigProto()1368 AlgorithmConfigProto::~AlgorithmConfigProto() {
1369 // @@protoc_insertion_point(destructor:stream_executor.dnn.AlgorithmConfigProto)
1370 if (auto *arena = _internal_metadata_.DeleteReturnArena<std::string>()) {
1371 (void)arena;
1372 return;
1373 }
1374 SharedDtor();
1375 }
1376
SharedDtor()1377 inline void AlgorithmConfigProto::SharedDtor() {
1378 GOOGLE_DCHECK(GetArenaForAllocation() == nullptr);
1379 if (has_optional_algorithm()) {
1380 clear_optional_algorithm();
1381 }
1382 if (has_optional_algorithm_no_scratch()) {
1383 clear_optional_algorithm_no_scratch();
1384 }
1385 if (has_optional_scratch_size()) {
1386 clear_optional_scratch_size();
1387 }
1388 }
1389
SetCachedSize(int size) const1390 void AlgorithmConfigProto::SetCachedSize(int size) const {
1391 _impl_._cached_size_.Set(size);
1392 }
1393
clear_optional_algorithm()1394 void AlgorithmConfigProto::clear_optional_algorithm() {
1395 // @@protoc_insertion_point(one_of_clear_start:stream_executor.dnn.AlgorithmConfigProto)
1396 switch (optional_algorithm_case()) {
1397 case kAlgorithm: {
1398 if (GetArenaForAllocation() == nullptr) {
1399 delete _impl_.optional_algorithm_.algorithm_;
1400 }
1401 break;
1402 }
1403 case OPTIONAL_ALGORITHM_NOT_SET: {
1404 break;
1405 }
1406 }
1407 _impl_._oneof_case_[0] = OPTIONAL_ALGORITHM_NOT_SET;
1408 }
1409
clear_optional_algorithm_no_scratch()1410 void AlgorithmConfigProto::clear_optional_algorithm_no_scratch() {
1411 // @@protoc_insertion_point(one_of_clear_start:stream_executor.dnn.AlgorithmConfigProto)
1412 switch (optional_algorithm_no_scratch_case()) {
1413 case kAlgorithmNoScratch: {
1414 if (GetArenaForAllocation() == nullptr) {
1415 delete _impl_.optional_algorithm_no_scratch_.algorithm_no_scratch_;
1416 }
1417 break;
1418 }
1419 case OPTIONAL_ALGORITHM_NO_SCRATCH_NOT_SET: {
1420 break;
1421 }
1422 }
1423 _impl_._oneof_case_[1] = OPTIONAL_ALGORITHM_NO_SCRATCH_NOT_SET;
1424 }
1425
clear_optional_scratch_size()1426 void AlgorithmConfigProto::clear_optional_scratch_size() {
1427 // @@protoc_insertion_point(one_of_clear_start:stream_executor.dnn.AlgorithmConfigProto)
1428 switch (optional_scratch_size_case()) {
1429 case kScratchSize: {
1430 // No need to clear
1431 break;
1432 }
1433 case OPTIONAL_SCRATCH_SIZE_NOT_SET: {
1434 break;
1435 }
1436 }
1437 _impl_._oneof_case_[2] = OPTIONAL_SCRATCH_SIZE_NOT_SET;
1438 }
1439
1440
Clear()1441 void AlgorithmConfigProto::Clear() {
1442 // @@protoc_insertion_point(message_clear_start:stream_executor.dnn.AlgorithmConfigProto)
1443 ::uint32_t cached_has_bits = 0;
1444 // Prevent compiler warnings about cached_has_bits being unused
1445 (void) cached_has_bits;
1446
1447 clear_optional_algorithm();
1448 clear_optional_algorithm_no_scratch();
1449 clear_optional_scratch_size();
1450 _internal_metadata_.Clear<std::string>();
1451 }
1452
_InternalParse(const char * ptr,::_pbi::ParseContext * ctx)1453 const char* AlgorithmConfigProto::_InternalParse(const char* ptr, ::_pbi::ParseContext* ctx) {
1454 #define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure
1455 while (!ctx->Done(&ptr)) {
1456 ::uint32_t tag;
1457 ptr = ::_pbi::ReadTag(ptr, &tag);
1458 switch (tag >> 3) {
1459 // .stream_executor.dnn.AlgorithmProto algorithm = 1;
1460 case 1:
1461 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 10)) {
1462 ptr = ctx->ParseMessage(_internal_mutable_algorithm(), ptr);
1463 CHK_(ptr);
1464 } else {
1465 goto handle_unusual;
1466 }
1467 continue;
1468 // .stream_executor.dnn.AlgorithmProto algorithm_no_scratch = 2;
1469 case 2:
1470 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 18)) {
1471 ptr = ctx->ParseMessage(_internal_mutable_algorithm_no_scratch(), ptr);
1472 CHK_(ptr);
1473 } else {
1474 goto handle_unusual;
1475 }
1476 continue;
1477 // int64 scratch_size = 3;
1478 case 3:
1479 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 24)) {
1480 _internal_set_scratch_size(::PROTOBUF_NAMESPACE_ID::internal::ReadVarint64(&ptr));
1481 CHK_(ptr);
1482 } else {
1483 goto handle_unusual;
1484 }
1485 continue;
1486 default:
1487 goto handle_unusual;
1488 } // switch
1489 handle_unusual:
1490 if ((tag == 0) || ((tag & 7) == 4)) {
1491 CHK_(ptr);
1492 ctx->SetLastTag(tag);
1493 goto message_done;
1494 }
1495 ptr = UnknownFieldParse(
1496 tag,
1497 _internal_metadata_.mutable_unknown_fields<std::string>(),
1498 ptr, ctx);
1499 CHK_(ptr != nullptr);
1500 } // while
1501 message_done:
1502 return ptr;
1503 failure:
1504 ptr = nullptr;
1505 goto message_done;
1506 #undef CHK_
1507 }
1508
_InternalSerialize(::uint8_t * target,::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream * stream) const1509 ::uint8_t* AlgorithmConfigProto::_InternalSerialize(
1510 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const {
1511 // @@protoc_insertion_point(serialize_to_array_start:stream_executor.dnn.AlgorithmConfigProto)
1512 ::uint32_t cached_has_bits = 0;
1513 (void) cached_has_bits;
1514
1515 // .stream_executor.dnn.AlgorithmProto algorithm = 1;
1516 if (_internal_has_algorithm()) {
1517 target = ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::
1518 InternalWriteMessage(1, _Internal::algorithm(this),
1519 _Internal::algorithm(this).GetCachedSize(), target, stream);
1520 }
1521
1522 // .stream_executor.dnn.AlgorithmProto algorithm_no_scratch = 2;
1523 if (_internal_has_algorithm_no_scratch()) {
1524 target = ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::
1525 InternalWriteMessage(2, _Internal::algorithm_no_scratch(this),
1526 _Internal::algorithm_no_scratch(this).GetCachedSize(), target, stream);
1527 }
1528
1529 // int64 scratch_size = 3;
1530 if (_internal_has_scratch_size()) {
1531 target = stream->EnsureSpace(target);
1532 target = ::_pbi::WireFormatLite::WriteInt64ToArray(3, this->_internal_scratch_size(), target);
1533 }
1534
1535 if (PROTOBUF_PREDICT_FALSE(_internal_metadata_.have_unknown_fields())) {
1536 target = stream->WriteRaw(_internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).data(),
1537 static_cast<int>(_internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).size()), target);
1538 }
1539 // @@protoc_insertion_point(serialize_to_array_end:stream_executor.dnn.AlgorithmConfigProto)
1540 return target;
1541 }
1542
ByteSizeLong() const1543 size_t AlgorithmConfigProto::ByteSizeLong() const {
1544 // @@protoc_insertion_point(message_byte_size_start:stream_executor.dnn.AlgorithmConfigProto)
1545 size_t total_size = 0;
1546
1547 switch (optional_algorithm_case()) {
1548 // .stream_executor.dnn.AlgorithmProto algorithm = 1;
1549 case kAlgorithm: {
1550 total_size += 1 +
1551 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize(
1552 *_impl_.optional_algorithm_.algorithm_);
1553 break;
1554 }
1555 case OPTIONAL_ALGORITHM_NOT_SET: {
1556 break;
1557 }
1558 }
1559 switch (optional_algorithm_no_scratch_case()) {
1560 // .stream_executor.dnn.AlgorithmProto algorithm_no_scratch = 2;
1561 case kAlgorithmNoScratch: {
1562 total_size += 1 +
1563 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::MessageSize(
1564 *_impl_.optional_algorithm_no_scratch_.algorithm_no_scratch_);
1565 break;
1566 }
1567 case OPTIONAL_ALGORITHM_NO_SCRATCH_NOT_SET: {
1568 break;
1569 }
1570 }
1571 switch (optional_scratch_size_case()) {
1572 // int64 scratch_size = 3;
1573 case kScratchSize: {
1574 total_size += ::_pbi::WireFormatLite::Int64SizePlusOne(this->_internal_scratch_size());
1575 break;
1576 }
1577 case OPTIONAL_SCRATCH_SIZE_NOT_SET: {
1578 break;
1579 }
1580 }
1581 if (PROTOBUF_PREDICT_FALSE(_internal_metadata_.have_unknown_fields())) {
1582 total_size += _internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).size();
1583 }
1584 int cached_size = ::_pbi::ToCachedSize(total_size);
1585 SetCachedSize(cached_size);
1586 return total_size;
1587 }
1588
CheckTypeAndMergeFrom(const::PROTOBUF_NAMESPACE_ID::MessageLite & from)1589 void AlgorithmConfigProto::CheckTypeAndMergeFrom(
1590 const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) {
1591 MergeFrom(*::_pbi::DownCast<const AlgorithmConfigProto*>(
1592 &from));
1593 }
1594
MergeFrom(const AlgorithmConfigProto & from)1595 void AlgorithmConfigProto::MergeFrom(const AlgorithmConfigProto& from) {
1596 AlgorithmConfigProto* const _this = this;
1597 // @@protoc_insertion_point(class_specific_merge_from_start:stream_executor.dnn.AlgorithmConfigProto)
1598 GOOGLE_DCHECK_NE(&from, _this);
1599 ::uint32_t cached_has_bits = 0;
1600 (void) cached_has_bits;
1601
1602 switch (from.optional_algorithm_case()) {
1603 case kAlgorithm: {
1604 _this->_internal_mutable_algorithm()->::stream_executor::dnn::AlgorithmProto::MergeFrom(
1605 from._internal_algorithm());
1606 break;
1607 }
1608 case OPTIONAL_ALGORITHM_NOT_SET: {
1609 break;
1610 }
1611 }
1612 switch (from.optional_algorithm_no_scratch_case()) {
1613 case kAlgorithmNoScratch: {
1614 _this->_internal_mutable_algorithm_no_scratch()->::stream_executor::dnn::AlgorithmProto::MergeFrom(
1615 from._internal_algorithm_no_scratch());
1616 break;
1617 }
1618 case OPTIONAL_ALGORITHM_NO_SCRATCH_NOT_SET: {
1619 break;
1620 }
1621 }
1622 switch (from.optional_scratch_size_case()) {
1623 case kScratchSize: {
1624 _this->_internal_set_scratch_size(from._internal_scratch_size());
1625 break;
1626 }
1627 case OPTIONAL_SCRATCH_SIZE_NOT_SET: {
1628 break;
1629 }
1630 }
1631 _this->_internal_metadata_.MergeFrom<std::string>(from._internal_metadata_);
1632 }
1633
CopyFrom(const AlgorithmConfigProto & from)1634 void AlgorithmConfigProto::CopyFrom(const AlgorithmConfigProto& from) {
1635 // @@protoc_insertion_point(class_specific_copy_from_start:stream_executor.dnn.AlgorithmConfigProto)
1636 if (&from == this) return;
1637 Clear();
1638 MergeFrom(from);
1639 }
1640
IsInitialized() const1641 bool AlgorithmConfigProto::IsInitialized() const {
1642 return true;
1643 }
1644
InternalSwap(AlgorithmConfigProto * other)1645 void AlgorithmConfigProto::InternalSwap(AlgorithmConfigProto* other) {
1646 using std::swap;
1647 _internal_metadata_.InternalSwap(&other->_internal_metadata_);
1648 swap(_impl_.optional_algorithm_, other->_impl_.optional_algorithm_);
1649 swap(_impl_.optional_algorithm_no_scratch_, other->_impl_.optional_algorithm_no_scratch_);
1650 swap(_impl_.optional_scratch_size_, other->_impl_.optional_scratch_size_);
1651 swap(_impl_._oneof_case_[0], other->_impl_._oneof_case_[0]);
1652 swap(_impl_._oneof_case_[1], other->_impl_._oneof_case_[1]);
1653 swap(_impl_._oneof_case_[2], other->_impl_._oneof_case_[2]);
1654 }
1655
GetTypeName() const1656 std::string AlgorithmConfigProto::GetTypeName() const {
1657 return "stream_executor.dnn.AlgorithmConfigProto";
1658 }
1659
1660
1661 // ===================================================================
1662
1663 class ConvolutionDescriptorProto::_Internal {
1664 public:
1665 };
1666
ConvolutionDescriptorProto(::PROTOBUF_NAMESPACE_ID::Arena * arena,bool is_message_owned)1667 ConvolutionDescriptorProto::ConvolutionDescriptorProto(::PROTOBUF_NAMESPACE_ID::Arena* arena,
1668 bool is_message_owned)
1669 : ::PROTOBUF_NAMESPACE_ID::MessageLite(arena, is_message_owned) {
1670 SharedCtor(arena, is_message_owned);
1671 // @@protoc_insertion_point(arena_constructor:stream_executor.dnn.ConvolutionDescriptorProto)
1672 }
ConvolutionDescriptorProto(const ConvolutionDescriptorProto & from)1673 ConvolutionDescriptorProto::ConvolutionDescriptorProto(const ConvolutionDescriptorProto& from)
1674 : ::PROTOBUF_NAMESPACE_ID::MessageLite() {
1675 ConvolutionDescriptorProto* const _this = this; (void)_this;
1676 new (&_impl_) Impl_{
1677 decltype(_impl_.paddings_){from._impl_.paddings_}
1678 , /*decltype(_impl_._paddings_cached_byte_size_)*/{0}
1679 , decltype(_impl_.strides_){from._impl_.strides_}
1680 , /*decltype(_impl_._strides_cached_byte_size_)*/{0}
1681 , decltype(_impl_.dilations_){from._impl_.dilations_}
1682 , /*decltype(_impl_._dilations_cached_byte_size_)*/{0}
1683 , decltype(_impl_.name_){}
1684 , decltype(_impl_.compute_mode_){}
1685 , decltype(_impl_.group_count_){}
1686 , decltype(_impl_.convolution_mode_){}
1687 , /*decltype(_impl_._cached_size_)*/{}};
1688
1689 _internal_metadata_.MergeFrom<std::string>(from._internal_metadata_);
1690 _impl_.name_.InitDefault();
1691 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
1692 _impl_.name_.Set("", GetArenaForAllocation());
1693 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
1694 if (!from._internal_name().empty()) {
1695 _this->_impl_.name_.Set(from._internal_name(),
1696 _this->GetArenaForAllocation());
1697 }
1698 ::memcpy(&_impl_.compute_mode_, &from._impl_.compute_mode_,
1699 static_cast<size_t>(reinterpret_cast<char*>(&_impl_.convolution_mode_) -
1700 reinterpret_cast<char*>(&_impl_.compute_mode_)) + sizeof(_impl_.convolution_mode_));
1701 // @@protoc_insertion_point(copy_constructor:stream_executor.dnn.ConvolutionDescriptorProto)
1702 }
1703
SharedCtor(::_pb::Arena * arena,bool is_message_owned)1704 inline void ConvolutionDescriptorProto::SharedCtor(
1705 ::_pb::Arena* arena, bool is_message_owned) {
1706 (void)arena;
1707 (void)is_message_owned;
1708 new (&_impl_) Impl_{
1709 decltype(_impl_.paddings_){arena}
1710 , /*decltype(_impl_._paddings_cached_byte_size_)*/{0}
1711 , decltype(_impl_.strides_){arena}
1712 , /*decltype(_impl_._strides_cached_byte_size_)*/{0}
1713 , decltype(_impl_.dilations_){arena}
1714 , /*decltype(_impl_._dilations_cached_byte_size_)*/{0}
1715 , decltype(_impl_.name_){}
1716 , decltype(_impl_.compute_mode_){0}
1717 , decltype(_impl_.group_count_){0}
1718 , decltype(_impl_.convolution_mode_){0}
1719 , /*decltype(_impl_._cached_size_)*/{}
1720 };
1721 _impl_.name_.InitDefault();
1722 #ifdef PROTOBUF_FORCE_COPY_DEFAULT_STRING
1723 _impl_.name_.Set("", GetArenaForAllocation());
1724 #endif // PROTOBUF_FORCE_COPY_DEFAULT_STRING
1725 }
1726
~ConvolutionDescriptorProto()1727 ConvolutionDescriptorProto::~ConvolutionDescriptorProto() {
1728 // @@protoc_insertion_point(destructor:stream_executor.dnn.ConvolutionDescriptorProto)
1729 if (auto *arena = _internal_metadata_.DeleteReturnArena<std::string>()) {
1730 (void)arena;
1731 return;
1732 }
1733 SharedDtor();
1734 }
1735
SharedDtor()1736 inline void ConvolutionDescriptorProto::SharedDtor() {
1737 GOOGLE_DCHECK(GetArenaForAllocation() == nullptr);
1738 _impl_.paddings_.~RepeatedField();
1739 _impl_.strides_.~RepeatedField();
1740 _impl_.dilations_.~RepeatedField();
1741 _impl_.name_.Destroy();
1742 }
1743
SetCachedSize(int size) const1744 void ConvolutionDescriptorProto::SetCachedSize(int size) const {
1745 _impl_._cached_size_.Set(size);
1746 }
1747
Clear()1748 void ConvolutionDescriptorProto::Clear() {
1749 // @@protoc_insertion_point(message_clear_start:stream_executor.dnn.ConvolutionDescriptorProto)
1750 ::uint32_t cached_has_bits = 0;
1751 // Prevent compiler warnings about cached_has_bits being unused
1752 (void) cached_has_bits;
1753
1754 _impl_.paddings_.Clear();
1755 _impl_.strides_.Clear();
1756 _impl_.dilations_.Clear();
1757 _impl_.name_.ClearToEmpty();
1758 ::memset(&_impl_.compute_mode_, 0, static_cast<size_t>(
1759 reinterpret_cast<char*>(&_impl_.convolution_mode_) -
1760 reinterpret_cast<char*>(&_impl_.compute_mode_)) + sizeof(_impl_.convolution_mode_));
1761 _internal_metadata_.Clear<std::string>();
1762 }
1763
_InternalParse(const char * ptr,::_pbi::ParseContext * ctx)1764 const char* ConvolutionDescriptorProto::_InternalParse(const char* ptr, ::_pbi::ParseContext* ctx) {
1765 #define CHK_(x) if (PROTOBUF_PREDICT_FALSE(!(x))) goto failure
1766 while (!ctx->Done(&ptr)) {
1767 ::uint32_t tag;
1768 ptr = ::_pbi::ReadTag(ptr, &tag);
1769 switch (tag >> 3) {
1770 // repeated int64 paddings = 1;
1771 case 1:
1772 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 10)) {
1773 ptr = ::PROTOBUF_NAMESPACE_ID::internal::PackedInt64Parser(_internal_mutable_paddings(), ptr, ctx);
1774 CHK_(ptr);
1775 } else if (static_cast<::uint8_t>(tag) == 8) {
1776 _internal_add_paddings(::PROTOBUF_NAMESPACE_ID::internal::ReadVarint64(&ptr));
1777 CHK_(ptr);
1778 } else {
1779 goto handle_unusual;
1780 }
1781 continue;
1782 // repeated int64 strides = 2;
1783 case 2:
1784 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 18)) {
1785 ptr = ::PROTOBUF_NAMESPACE_ID::internal::PackedInt64Parser(_internal_mutable_strides(), ptr, ctx);
1786 CHK_(ptr);
1787 } else if (static_cast<::uint8_t>(tag) == 16) {
1788 _internal_add_strides(::PROTOBUF_NAMESPACE_ID::internal::ReadVarint64(&ptr));
1789 CHK_(ptr);
1790 } else {
1791 goto handle_unusual;
1792 }
1793 continue;
1794 // repeated int64 dilations = 3;
1795 case 3:
1796 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 26)) {
1797 ptr = ::PROTOBUF_NAMESPACE_ID::internal::PackedInt64Parser(_internal_mutable_dilations(), ptr, ctx);
1798 CHK_(ptr);
1799 } else if (static_cast<::uint8_t>(tag) == 24) {
1800 _internal_add_dilations(::PROTOBUF_NAMESPACE_ID::internal::ReadVarint64(&ptr));
1801 CHK_(ptr);
1802 } else {
1803 goto handle_unusual;
1804 }
1805 continue;
1806 // .stream_executor.dnn.DataType compute_mode = 4;
1807 case 4:
1808 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 32)) {
1809 ::uint64_t val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint64(&ptr);
1810 CHK_(ptr);
1811 _internal_set_compute_mode(static_cast<::stream_executor::dnn::DataType>(val));
1812 } else {
1813 goto handle_unusual;
1814 }
1815 continue;
1816 // int32 group_count = 5;
1817 case 5:
1818 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 40)) {
1819 _impl_.group_count_ = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint32(&ptr);
1820 CHK_(ptr);
1821 } else {
1822 goto handle_unusual;
1823 }
1824 continue;
1825 // .stream_executor.dnn.ConvolutionMode convolution_mode = 6;
1826 case 6:
1827 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 48)) {
1828 ::uint64_t val = ::PROTOBUF_NAMESPACE_ID::internal::ReadVarint64(&ptr);
1829 CHK_(ptr);
1830 _internal_set_convolution_mode(static_cast<::stream_executor::dnn::ConvolutionMode>(val));
1831 } else {
1832 goto handle_unusual;
1833 }
1834 continue;
1835 // string name = 7;
1836 case 7:
1837 if (PROTOBUF_PREDICT_TRUE(static_cast<::uint8_t>(tag) == 58)) {
1838 auto str = _internal_mutable_name();
1839 ptr = ::_pbi::InlineGreedyStringParser(str, ptr, ctx);
1840 CHK_(ptr);
1841 CHK_(::_pbi::VerifyUTF8(str, nullptr));
1842 } else {
1843 goto handle_unusual;
1844 }
1845 continue;
1846 default:
1847 goto handle_unusual;
1848 } // switch
1849 handle_unusual:
1850 if ((tag == 0) || ((tag & 7) == 4)) {
1851 CHK_(ptr);
1852 ctx->SetLastTag(tag);
1853 goto message_done;
1854 }
1855 ptr = UnknownFieldParse(
1856 tag,
1857 _internal_metadata_.mutable_unknown_fields<std::string>(),
1858 ptr, ctx);
1859 CHK_(ptr != nullptr);
1860 } // while
1861 message_done:
1862 return ptr;
1863 failure:
1864 ptr = nullptr;
1865 goto message_done;
1866 #undef CHK_
1867 }
1868
_InternalSerialize(::uint8_t * target,::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream * stream) const1869 ::uint8_t* ConvolutionDescriptorProto::_InternalSerialize(
1870 ::uint8_t* target, ::PROTOBUF_NAMESPACE_ID::io::EpsCopyOutputStream* stream) const {
1871 // @@protoc_insertion_point(serialize_to_array_start:stream_executor.dnn.ConvolutionDescriptorProto)
1872 ::uint32_t cached_has_bits = 0;
1873 (void) cached_has_bits;
1874
1875 // repeated int64 paddings = 1;
1876 {
1877 int byte_size = _impl_._paddings_cached_byte_size_.load(std::memory_order_relaxed);
1878 if (byte_size > 0) {
1879 target = stream->WriteInt64Packed(
1880 1, _internal_paddings(), byte_size, target);
1881 }
1882 }
1883
1884 // repeated int64 strides = 2;
1885 {
1886 int byte_size = _impl_._strides_cached_byte_size_.load(std::memory_order_relaxed);
1887 if (byte_size > 0) {
1888 target = stream->WriteInt64Packed(
1889 2, _internal_strides(), byte_size, target);
1890 }
1891 }
1892
1893 // repeated int64 dilations = 3;
1894 {
1895 int byte_size = _impl_._dilations_cached_byte_size_.load(std::memory_order_relaxed);
1896 if (byte_size > 0) {
1897 target = stream->WriteInt64Packed(
1898 3, _internal_dilations(), byte_size, target);
1899 }
1900 }
1901
1902 // .stream_executor.dnn.DataType compute_mode = 4;
1903 if (this->_internal_compute_mode() != 0) {
1904 target = stream->EnsureSpace(target);
1905 target = ::_pbi::WireFormatLite::WriteEnumToArray(
1906 4, this->_internal_compute_mode(), target);
1907 }
1908
1909 // int32 group_count = 5;
1910 if (this->_internal_group_count() != 0) {
1911 target = stream->EnsureSpace(target);
1912 target = ::_pbi::WireFormatLite::WriteInt32ToArray(5, this->_internal_group_count(), target);
1913 }
1914
1915 // .stream_executor.dnn.ConvolutionMode convolution_mode = 6;
1916 if (this->_internal_convolution_mode() != 0) {
1917 target = stream->EnsureSpace(target);
1918 target = ::_pbi::WireFormatLite::WriteEnumToArray(
1919 6, this->_internal_convolution_mode(), target);
1920 }
1921
1922 // string name = 7;
1923 if (!this->_internal_name().empty()) {
1924 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::VerifyUtf8String(
1925 this->_internal_name().data(), static_cast<int>(this->_internal_name().length()),
1926 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::SERIALIZE,
1927 "stream_executor.dnn.ConvolutionDescriptorProto.name");
1928 target = stream->WriteStringMaybeAliased(
1929 7, this->_internal_name(), target);
1930 }
1931
1932 if (PROTOBUF_PREDICT_FALSE(_internal_metadata_.have_unknown_fields())) {
1933 target = stream->WriteRaw(_internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).data(),
1934 static_cast<int>(_internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).size()), target);
1935 }
1936 // @@protoc_insertion_point(serialize_to_array_end:stream_executor.dnn.ConvolutionDescriptorProto)
1937 return target;
1938 }
1939
ByteSizeLong() const1940 size_t ConvolutionDescriptorProto::ByteSizeLong() const {
1941 // @@protoc_insertion_point(message_byte_size_start:stream_executor.dnn.ConvolutionDescriptorProto)
1942 size_t total_size = 0;
1943
1944 ::uint32_t cached_has_bits = 0;
1945 // Prevent compiler warnings about cached_has_bits being unused
1946 (void) cached_has_bits;
1947
1948 // repeated int64 paddings = 1;
1949 {
1950 size_t data_size = ::_pbi::WireFormatLite::
1951 Int64Size(this->_impl_.paddings_);
1952 if (data_size > 0) {
1953 total_size += 1 +
1954 ::_pbi::WireFormatLite::Int32Size(static_cast<::int32_t>(data_size));
1955 }
1956 int cached_size = ::_pbi::ToCachedSize(data_size);
1957 _impl_._paddings_cached_byte_size_.store(cached_size,
1958 std::memory_order_relaxed);
1959 total_size += data_size;
1960 }
1961
1962 // repeated int64 strides = 2;
1963 {
1964 size_t data_size = ::_pbi::WireFormatLite::
1965 Int64Size(this->_impl_.strides_);
1966 if (data_size > 0) {
1967 total_size += 1 +
1968 ::_pbi::WireFormatLite::Int32Size(static_cast<::int32_t>(data_size));
1969 }
1970 int cached_size = ::_pbi::ToCachedSize(data_size);
1971 _impl_._strides_cached_byte_size_.store(cached_size,
1972 std::memory_order_relaxed);
1973 total_size += data_size;
1974 }
1975
1976 // repeated int64 dilations = 3;
1977 {
1978 size_t data_size = ::_pbi::WireFormatLite::
1979 Int64Size(this->_impl_.dilations_);
1980 if (data_size > 0) {
1981 total_size += 1 +
1982 ::_pbi::WireFormatLite::Int32Size(static_cast<::int32_t>(data_size));
1983 }
1984 int cached_size = ::_pbi::ToCachedSize(data_size);
1985 _impl_._dilations_cached_byte_size_.store(cached_size,
1986 std::memory_order_relaxed);
1987 total_size += data_size;
1988 }
1989
1990 // string name = 7;
1991 if (!this->_internal_name().empty()) {
1992 total_size += 1 +
1993 ::PROTOBUF_NAMESPACE_ID::internal::WireFormatLite::StringSize(
1994 this->_internal_name());
1995 }
1996
1997 // .stream_executor.dnn.DataType compute_mode = 4;
1998 if (this->_internal_compute_mode() != 0) {
1999 total_size += 1 +
2000 ::_pbi::WireFormatLite::EnumSize(this->_internal_compute_mode());
2001 }
2002
2003 // int32 group_count = 5;
2004 if (this->_internal_group_count() != 0) {
2005 total_size += ::_pbi::WireFormatLite::Int32SizePlusOne(this->_internal_group_count());
2006 }
2007
2008 // .stream_executor.dnn.ConvolutionMode convolution_mode = 6;
2009 if (this->_internal_convolution_mode() != 0) {
2010 total_size += 1 +
2011 ::_pbi::WireFormatLite::EnumSize(this->_internal_convolution_mode());
2012 }
2013
2014 if (PROTOBUF_PREDICT_FALSE(_internal_metadata_.have_unknown_fields())) {
2015 total_size += _internal_metadata_.unknown_fields<std::string>(::PROTOBUF_NAMESPACE_ID::internal::GetEmptyString).size();
2016 }
2017 int cached_size = ::_pbi::ToCachedSize(total_size);
2018 SetCachedSize(cached_size);
2019 return total_size;
2020 }
2021
CheckTypeAndMergeFrom(const::PROTOBUF_NAMESPACE_ID::MessageLite & from)2022 void ConvolutionDescriptorProto::CheckTypeAndMergeFrom(
2023 const ::PROTOBUF_NAMESPACE_ID::MessageLite& from) {
2024 MergeFrom(*::_pbi::DownCast<const ConvolutionDescriptorProto*>(
2025 &from));
2026 }
2027
MergeFrom(const ConvolutionDescriptorProto & from)2028 void ConvolutionDescriptorProto::MergeFrom(const ConvolutionDescriptorProto& from) {
2029 ConvolutionDescriptorProto* const _this = this;
2030 // @@protoc_insertion_point(class_specific_merge_from_start:stream_executor.dnn.ConvolutionDescriptorProto)
2031 GOOGLE_DCHECK_NE(&from, _this);
2032 ::uint32_t cached_has_bits = 0;
2033 (void) cached_has_bits;
2034
2035 _this->_impl_.paddings_.MergeFrom(from._impl_.paddings_);
2036 _this->_impl_.strides_.MergeFrom(from._impl_.strides_);
2037 _this->_impl_.dilations_.MergeFrom(from._impl_.dilations_);
2038 if (!from._internal_name().empty()) {
2039 _this->_internal_set_name(from._internal_name());
2040 }
2041 if (from._internal_compute_mode() != 0) {
2042 _this->_internal_set_compute_mode(from._internal_compute_mode());
2043 }
2044 if (from._internal_group_count() != 0) {
2045 _this->_internal_set_group_count(from._internal_group_count());
2046 }
2047 if (from._internal_convolution_mode() != 0) {
2048 _this->_internal_set_convolution_mode(from._internal_convolution_mode());
2049 }
2050 _this->_internal_metadata_.MergeFrom<std::string>(from._internal_metadata_);
2051 }
2052
CopyFrom(const ConvolutionDescriptorProto & from)2053 void ConvolutionDescriptorProto::CopyFrom(const ConvolutionDescriptorProto& from) {
2054 // @@protoc_insertion_point(class_specific_copy_from_start:stream_executor.dnn.ConvolutionDescriptorProto)
2055 if (&from == this) return;
2056 Clear();
2057 MergeFrom(from);
2058 }
2059
IsInitialized() const2060 bool ConvolutionDescriptorProto::IsInitialized() const {
2061 return true;
2062 }
2063
InternalSwap(ConvolutionDescriptorProto * other)2064 void ConvolutionDescriptorProto::InternalSwap(ConvolutionDescriptorProto* other) {
2065 using std::swap;
2066 auto* lhs_arena = GetArenaForAllocation();
2067 auto* rhs_arena = other->GetArenaForAllocation();
2068 _internal_metadata_.InternalSwap(&other->_internal_metadata_);
2069 _impl_.paddings_.InternalSwap(&other->_impl_.paddings_);
2070 _impl_.strides_.InternalSwap(&other->_impl_.strides_);
2071 _impl_.dilations_.InternalSwap(&other->_impl_.dilations_);
2072 ::PROTOBUF_NAMESPACE_ID::internal::ArenaStringPtr::InternalSwap(
2073 &_impl_.name_, lhs_arena,
2074 &other->_impl_.name_, rhs_arena
2075 );
2076 ::PROTOBUF_NAMESPACE_ID::internal::memswap<
2077 PROTOBUF_FIELD_OFFSET(ConvolutionDescriptorProto, _impl_.convolution_mode_)
2078 + sizeof(ConvolutionDescriptorProto::_impl_.convolution_mode_) // NOLINT
2079 - PROTOBUF_FIELD_OFFSET(ConvolutionDescriptorProto, _impl_.compute_mode_)>(
2080 reinterpret_cast<char*>(&_impl_.compute_mode_),
2081 reinterpret_cast<char*>(&other->_impl_.compute_mode_));
2082 }
2083
GetTypeName() const2084 std::string ConvolutionDescriptorProto::GetTypeName() const {
2085 return "stream_executor.dnn.ConvolutionDescriptorProto";
2086 }
2087
2088
2089 // @@protoc_insertion_point(namespace_scope)
2090 } // namespace dnn
2091 } // namespace stream_executor
2092 PROTOBUF_NAMESPACE_OPEN
2093 template<> PROTOBUF_NOINLINE ::stream_executor::dnn::TensorDescriptorProto*
CreateMaybeMessage(Arena * arena)2094 Arena::CreateMaybeMessage< ::stream_executor::dnn::TensorDescriptorProto >(Arena* arena) {
2095 return Arena::CreateMessageInternal< ::stream_executor::dnn::TensorDescriptorProto >(arena);
2096 }
2097 template<> PROTOBUF_NOINLINE ::stream_executor::dnn::AlgorithmProto_TuningKnobsEntry_DoNotUse*
CreateMaybeMessage(Arena * arena)2098 Arena::CreateMaybeMessage< ::stream_executor::dnn::AlgorithmProto_TuningKnobsEntry_DoNotUse >(Arena* arena) {
2099 return Arena::CreateMessageInternal< ::stream_executor::dnn::AlgorithmProto_TuningKnobsEntry_DoNotUse >(arena);
2100 }
2101 template<> PROTOBUF_NOINLINE ::stream_executor::dnn::AlgorithmProto*
CreateMaybeMessage(Arena * arena)2102 Arena::CreateMaybeMessage< ::stream_executor::dnn::AlgorithmProto >(Arena* arena) {
2103 return Arena::CreateMessageInternal< ::stream_executor::dnn::AlgorithmProto >(arena);
2104 }
2105 template<> PROTOBUF_NOINLINE ::stream_executor::dnn::AlgorithmConfigProto*
CreateMaybeMessage(Arena * arena)2106 Arena::CreateMaybeMessage< ::stream_executor::dnn::AlgorithmConfigProto >(Arena* arena) {
2107 return Arena::CreateMessageInternal< ::stream_executor::dnn::AlgorithmConfigProto >(arena);
2108 }
2109 template<> PROTOBUF_NOINLINE ::stream_executor::dnn::ConvolutionDescriptorProto*
CreateMaybeMessage(Arena * arena)2110 Arena::CreateMaybeMessage< ::stream_executor::dnn::ConvolutionDescriptorProto >(Arena* arena) {
2111 return Arena::CreateMessageInternal< ::stream_executor::dnn::ConvolutionDescriptorProto >(arena);
2112 }
2113 PROTOBUF_NAMESPACE_CLOSE
2114
2115 // @@protoc_insertion_point(global_scope)
2116 #include <google/protobuf/port_undef.inc>
2117