1 //
2 // Copyright © 2017 Arm Ltd and Contributors. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5
6 #include <armnn/Deprecated.hpp>
7 #include <armnn/Exceptions.hpp>
8 #include <armnn/Types.hpp>
9
10 #include <backendsCommon/LayerSupportBase.hpp>
11
12 #include <armnn/utility/IgnoreUnused.hpp>
13 #include <armnn/utility/PolymorphicDowncast.hpp>
14
15 namespace
16 {
17
DefaultLayerSupport(const char * func,const char * file,unsigned int line,armnn::Optional<std::string &> reasonIfUnsupported)18 bool DefaultLayerSupport(const char* func,
19 const char* file,
20 unsigned int line,
21 armnn::Optional<std::string&> reasonIfUnsupported)
22 {
23 // NOTE: We only need to return the reason if the optional parameter is not empty
24 if (reasonIfUnsupported)
25 {
26 std::stringstream message;
27 message << func << " is not implemented [" << file << ":" << line << "]";
28
29 reasonIfUnsupported.value() = message.str();
30 }
31
32 return false;
33 }
34
35 } // anonymous namespace
36
37 namespace armnn
38 {
39
IsLayerSupported(const LayerType & type,const std::vector<TensorInfo> & infos,const BaseDescriptor & descriptor,const Optional<LstmInputParamsInfo> &,const Optional<QuantizedLstmInputParamsInfo> &,Optional<std::string &> reasonIfUnsupported) const40 bool LayerSupportBase::IsLayerSupported(const LayerType& type,
41 const std::vector<TensorInfo>& infos,
42 const BaseDescriptor& descriptor,
43 const Optional<LstmInputParamsInfo>&,
44 const Optional<QuantizedLstmInputParamsInfo>&,
45 Optional<std::string&> reasonIfUnsupported) const
46 {
47 switch(type)
48 {
49 case LayerType::MemCopy:
50 return IsMemCopySupported(infos[0], infos[1], reasonIfUnsupported);
51 case LayerType::MemImport:
52 return IsMemImportSupported(infos[0], infos[1], reasonIfUnsupported);
53 case LayerType::StandIn:
54 {
55 auto desc = *(PolymorphicDowncast<const StandInDescriptor*>(&descriptor));
56
57 if (infos.size() != (desc.m_NumInputs + desc.m_NumOutputs))
58 {
59 throw InvalidArgumentException("Number of StandIn layer TensorInfos does not equal "
60 "the combined number of input and output slots assigned "
61 "to the StandIn descriptor");
62 }
63
64 std::vector<const TensorInfo*> inputInfos;
65 for (uint32_t i = 0; i < desc.m_NumInputs; i++)
66 {
67 inputInfos.push_back(&infos[i]);
68 }
69 std::vector<const TensorInfo*> outputInfos;
70 for (uint32_t i = desc.m_NumInputs; i < infos.size(); i++)
71 {
72 outputInfos.push_back(&infos[i]);
73 }
74
75 return IsStandInSupported(inputInfos,
76 outputInfos,
77 desc,
78 reasonIfUnsupported);
79 }
80 default:
81 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
82 }
83 }
84
IsActivationSupported(const TensorInfo &,const TensorInfo &,const ActivationDescriptor &,Optional<std::string &> reasonIfUnsupported) const85 bool LayerSupportBase::IsActivationSupported(const TensorInfo&, // input
86 const TensorInfo&, //output
87 const ActivationDescriptor&, // descriptor
88 Optional<std::string&> reasonIfUnsupported) const
89 {
90 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
91 }
92
IsAdditionSupported(const TensorInfo &,const TensorInfo &,const TensorInfo &,Optional<std::string &> reasonIfUnsupported) const93 bool LayerSupportBase::IsAdditionSupported(const TensorInfo&, // input0
94 const TensorInfo&, // input1
95 const TensorInfo&, // output
96 Optional<std::string&> reasonIfUnsupported) const
97 {
98 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
99 }
100
IsArgMinMaxSupported(const armnn::TensorInfo &,const armnn::TensorInfo &,const armnn::ArgMinMaxDescriptor &,armnn::Optional<std::string &> reasonIfUnsupported) const101 bool LayerSupportBase::IsArgMinMaxSupported(const armnn::TensorInfo&, // input
102 const armnn::TensorInfo&, // output
103 const armnn::ArgMinMaxDescriptor&, // descriptor
104 armnn::Optional<std::string &> reasonIfUnsupported) const
105 {
106 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
107 }
108
IsBatchNormalizationSupported(const TensorInfo &,const TensorInfo &,const TensorInfo &,const TensorInfo &,const TensorInfo &,const TensorInfo &,const BatchNormalizationDescriptor &,Optional<std::string &> reasonIfUnsupported) const109 bool LayerSupportBase::IsBatchNormalizationSupported(const TensorInfo&, //input
110 const TensorInfo&, // output
111 const TensorInfo&, //mean
112 const TensorInfo&, //var
113 const TensorInfo&, //beta
114 const TensorInfo&, //gamma
115 const BatchNormalizationDescriptor&, // descriptor
116 Optional<std::string&> reasonIfUnsupported) const
117 {
118 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
119 }
120
IsBatchToSpaceNdSupported(const TensorInfo &,const TensorInfo &,const BatchToSpaceNdDescriptor &,Optional<std::string &> reasonIfUnsupported) const121 bool LayerSupportBase::IsBatchToSpaceNdSupported(const TensorInfo&, // input
122 const TensorInfo&, // output
123 const BatchToSpaceNdDescriptor&, //descriptor
124 Optional<std::string&> reasonIfUnsupported) const
125 {
126 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
127 }
128
IsCastSupported(const TensorInfo &,const TensorInfo &,Optional<std::string &> reasonIfUnsupported) const129 bool LayerSupportBase::IsCastSupported(const TensorInfo&, //input
130 const TensorInfo&, //output
131 Optional<std::string &> reasonIfUnsupported) const
132 {
133 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
134 }
135
IsChannelShuffleSupported(const TensorInfo &,const TensorInfo &,const ChannelShuffleDescriptor &,Optional<std::string &> reasonIfUnsupported) const136 bool LayerSupportBase::IsChannelShuffleSupported(const TensorInfo&, //input
137 const TensorInfo&, //output
138 const ChannelShuffleDescriptor&, //descriptor
139 Optional<std::string &> reasonIfUnsupported) const
140 {
141 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
142 }
143
IsComparisonSupported(const TensorInfo &,const TensorInfo &,const TensorInfo &,const ComparisonDescriptor &,Optional<std::string &> reasonIfUnsupported) const144 bool LayerSupportBase::IsComparisonSupported(const TensorInfo&, // input0
145 const TensorInfo&, // input1
146 const TensorInfo&, // output
147 const ComparisonDescriptor&, // descriptor
148 Optional<std::string&> reasonIfUnsupported) const
149 {
150 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
151 }
152
IsConcatSupported(const std::vector<const TensorInfo * >,const TensorInfo &,const OriginsDescriptor &,Optional<std::string &> reasonIfUnsupported) const153 bool LayerSupportBase::IsConcatSupported(const std::vector<const TensorInfo*>, // inputs
154 const TensorInfo&, // output
155 const OriginsDescriptor&, // descriptor
156 Optional<std::string&> reasonIfUnsupported) const
157 {
158 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
159 }
160
IsConstantSupported(const TensorInfo &,Optional<std::string &> reasonIfUnsupported) const161 bool LayerSupportBase::IsConstantSupported(const TensorInfo&, // output
162 Optional<std::string&> reasonIfUnsupported) const
163 {
164 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
165 }
166
IsConvertFp16ToFp32Supported(const TensorInfo &,const TensorInfo &,Optional<std::string &> reasonIfUnsupported) const167 bool LayerSupportBase::IsConvertFp16ToFp32Supported(const TensorInfo&, // input
168 const TensorInfo&, // output
169 Optional<std::string&> reasonIfUnsupported) const
170 {
171 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
172 }
173
IsConvertFp32ToFp16Supported(const TensorInfo &,const TensorInfo &,Optional<std::string &> reasonIfUnsupported) const174 bool LayerSupportBase::IsConvertFp32ToFp16Supported(const TensorInfo&, // input
175 const TensorInfo&, // output
176 Optional<std::string&> reasonIfUnsupported) const
177 {
178 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
179 }
180
IsConvolution2dSupported(const TensorInfo &,const TensorInfo &,const Convolution2dDescriptor &,const TensorInfo &,const Optional<TensorInfo> &,Optional<std::string &> reasonIfUnsupported) const181 bool LayerSupportBase::IsConvolution2dSupported(const TensorInfo&, // input
182 const TensorInfo&, // output
183 const Convolution2dDescriptor&, // descriptor
184 const TensorInfo&, // weights
185 const Optional<TensorInfo>&, // biases
186 Optional<std::string&> reasonIfUnsupported) const
187 {
188 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
189 }
190
IsConvolution3dSupported(const TensorInfo &,const TensorInfo &,const Convolution3dDescriptor &,const TensorInfo &,const Optional<TensorInfo> &,Optional<std::string &> reasonIfUnsupported) const191 bool LayerSupportBase::IsConvolution3dSupported(const TensorInfo&, // input
192 const TensorInfo&, // output
193 const Convolution3dDescriptor&, // descriptor
194 const TensorInfo&, // weights
195 const Optional<TensorInfo>&, // biases
196 Optional<std::string&> reasonIfUnsupported) const
197 {
198 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
199 }
200
IsDebugSupported(const TensorInfo &,const TensorInfo &,Optional<std::string &> reasonIfUnsupported) const201 bool LayerSupportBase::IsDebugSupported(const TensorInfo&, // input
202 const TensorInfo&, // output
203 Optional<std::string&> reasonIfUnsupported) const
204 {
205 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
206 }
207
IsDepthToSpaceSupported(const TensorInfo &,const TensorInfo &,const DepthToSpaceDescriptor &,Optional<std::string &> reasonIfUnsupported) const208 bool LayerSupportBase::IsDepthToSpaceSupported(const TensorInfo&, // input
209 const TensorInfo&, // output
210 const DepthToSpaceDescriptor&, // descriptor
211 Optional<std::string&> reasonIfUnsupported) const
212 {
213 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
214 }
215
IsDepthwiseConvolutionSupported(const TensorInfo &,const TensorInfo &,const DepthwiseConvolution2dDescriptor &,const TensorInfo &,const Optional<TensorInfo> &,Optional<std::string &> reasonIfUnsupported) const216 bool LayerSupportBase::IsDepthwiseConvolutionSupported(const TensorInfo&, //input
217 const TensorInfo&, //output
218 const DepthwiseConvolution2dDescriptor&, // descriptor
219 const TensorInfo&, // weights
220 const Optional<TensorInfo>&, // biases
221 Optional<std::string&> reasonIfUnsupported) const
222 {
223 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
224 }
225
IsDequantizeSupported(const TensorInfo &,const TensorInfo &,Optional<std::string &> reasonIfUnsupported) const226 bool LayerSupportBase::IsDequantizeSupported(const TensorInfo&, // input
227 const TensorInfo&, // output
228 Optional<std::string&> reasonIfUnsupported) const
229 {
230 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
231 }
232
IsDetectionPostProcessSupported(const TensorInfo &,const TensorInfo &,const TensorInfo &,const TensorInfo &,const TensorInfo &,const TensorInfo &,const TensorInfo &,const DetectionPostProcessDescriptor &,Optional<std::string &> reasonIfUnsupported) const233 bool LayerSupportBase::IsDetectionPostProcessSupported(const TensorInfo&, // boxEncodings
234 const TensorInfo&, // scores
235 const TensorInfo&, // anchors
236 const TensorInfo&, // detectionBoxes
237 const TensorInfo&, // detectionClasses
238 const TensorInfo&, // detectionScores
239 const TensorInfo&, // numDetections
240 const DetectionPostProcessDescriptor&, //descriptor
241 Optional<std::string&> reasonIfUnsupported) const
242 {
243 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
244 }
245
IsDilatedDepthwiseConvolutionSupported(const TensorInfo &,const TensorInfo &,const DepthwiseConvolution2dDescriptor &,const TensorInfo &,const Optional<TensorInfo> &,Optional<std::string &> reasonIfUnsupported) const246 bool LayerSupportBase::IsDilatedDepthwiseConvolutionSupported(const TensorInfo&, // input
247 const TensorInfo&, // output
248 const DepthwiseConvolution2dDescriptor&, // descriptor
249 const TensorInfo&,// weights
250 const Optional<TensorInfo>&, // biases
251 Optional<std::string&> reasonIfUnsupported) const
252 {
253 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
254 }
255
IsDivisionSupported(const TensorInfo &,const TensorInfo &,const TensorInfo &,Optional<std::string &> reasonIfUnsupported) const256 bool LayerSupportBase::IsDivisionSupported(const TensorInfo&, // input0
257 const TensorInfo&, // input1
258 const TensorInfo&, // output
259 Optional<std::string&> reasonIfUnsupported) const
260 {
261 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
262 }
263
IsElementwiseUnarySupported(const TensorInfo &,const TensorInfo &,const ElementwiseUnaryDescriptor &,Optional<std::string &> reasonIfUnsupported) const264 bool LayerSupportBase::IsElementwiseUnarySupported(const TensorInfo&, // input
265 const TensorInfo&, // output
266 const ElementwiseUnaryDescriptor&, // descriptor
267 Optional<std::string&> reasonIfUnsupported) const
268 {
269 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
270 }
271
IsFakeQuantizationSupported(const TensorInfo &,const FakeQuantizationDescriptor &,Optional<std::string &> reasonIfUnsupported) const272 bool LayerSupportBase::IsFakeQuantizationSupported(const TensorInfo&, // input
273 const FakeQuantizationDescriptor&, // descriptor
274 Optional<std::string&> reasonIfUnsupported) const
275 {
276 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
277 }
278
IsFillSupported(const TensorInfo &,const TensorInfo &,const FillDescriptor &,Optional<std::string &> reasonIfUnsupported) const279 bool LayerSupportBase::IsFillSupported(const TensorInfo&, // input
280 const TensorInfo&, // output
281 const FillDescriptor&, // descriptor
282 Optional<std::string&> reasonIfUnsupported) const
283 {
284 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
285 }
286
IsFloorSupported(const TensorInfo &,const TensorInfo &,Optional<std::string &> reasonIfUnsupported) const287 bool LayerSupportBase::IsFloorSupported(const TensorInfo&, // input
288 const TensorInfo&, // output
289 Optional<std::string&> reasonIfUnsupported) const
290 {
291 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
292 }
293
IsFullyConnectedSupported(const TensorInfo &,const TensorInfo &,const TensorInfo &,const TensorInfo &,const FullyConnectedDescriptor &,Optional<std::string &> reasonIfUnsupported) const294 bool LayerSupportBase::IsFullyConnectedSupported(const TensorInfo&, // input
295 const TensorInfo&, // output
296 const TensorInfo&, // weights
297 const TensorInfo&, // biases
298 const FullyConnectedDescriptor&, // descriptor
299 Optional<std::string&> reasonIfUnsupported) const
300 {
301 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
302 }
303
IsGatherSupported(const armnn::TensorInfo &,const armnn::TensorInfo &,const armnn::TensorInfo &,const GatherDescriptor &,armnn::Optional<std::string &> reasonIfUnsupported) const304 bool LayerSupportBase::IsGatherSupported(const armnn::TensorInfo&, // input0
305 const armnn::TensorInfo&, // input1
306 const armnn::TensorInfo&, // output
307 const GatherDescriptor&, // descriptor
308 armnn::Optional<std::string&> reasonIfUnsupported) const
309 {
310 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
311 }
312
IsInputSupported(const TensorInfo &,Optional<std::string &> reasonIfUnsupported) const313 bool LayerSupportBase::IsInputSupported(const TensorInfo&, // input
314 Optional<std::string&> reasonIfUnsupported) const
315 {
316 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
317 }
318
IsInstanceNormalizationSupported(const TensorInfo &,const TensorInfo &,const InstanceNormalizationDescriptor &,Optional<std::string &> reasonIfUnsupported) const319 bool LayerSupportBase::IsInstanceNormalizationSupported(const TensorInfo&, // input
320 const TensorInfo&, // output
321 const InstanceNormalizationDescriptor&, // descriptor
322 Optional<std::string&> reasonIfUnsupported) const
323 {
324 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
325 }
326
IsL2NormalizationSupported(const TensorInfo &,const TensorInfo &,const L2NormalizationDescriptor &,Optional<std::string &> reasonIfUnsupported) const327 bool LayerSupportBase::IsL2NormalizationSupported(const TensorInfo&, // input
328 const TensorInfo&, // output
329 const L2NormalizationDescriptor&, // descriptor
330 Optional<std::string&> reasonIfUnsupported) const
331 {
332 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
333 }
334
IsLogicalBinarySupported(const TensorInfo &,const TensorInfo &,const TensorInfo &,const LogicalBinaryDescriptor &,Optional<std::string &> reasonIfUnsupported) const335 bool LayerSupportBase::IsLogicalBinarySupported(const TensorInfo&, // input0
336 const TensorInfo&, // input1
337 const TensorInfo&, // output
338 const LogicalBinaryDescriptor&, // descriptor
339 Optional<std::string&> reasonIfUnsupported) const
340 {
341 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
342 }
343
IsLogicalUnarySupported(const TensorInfo &,const TensorInfo &,const ElementwiseUnaryDescriptor &,Optional<std::string &> reasonIfUnsupported) const344 bool LayerSupportBase::IsLogicalUnarySupported(const TensorInfo&, // input
345 const TensorInfo&, // output
346 const ElementwiseUnaryDescriptor&, // descriptor
347 Optional<std::string&> reasonIfUnsupported) const
348 {
349 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
350 }
351
IsLogSoftmaxSupported(const TensorInfo &,const TensorInfo &,const LogSoftmaxDescriptor &,Optional<std::string &> reasonIfUnsupported) const352 bool LayerSupportBase::IsLogSoftmaxSupported(const TensorInfo&, // input
353 const TensorInfo&, // output
354 const LogSoftmaxDescriptor&, // descriptor
355 Optional<std::string&> reasonIfUnsupported) const
356 {
357 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
358 }
359
IsLstmSupported(const TensorInfo &,const TensorInfo &,const TensorInfo &,const TensorInfo &,const TensorInfo &,const TensorInfo &,const TensorInfo &,const LstmDescriptor &,const LstmInputParamsInfo &,Optional<std::string &> reasonIfUnsupported) const360 bool LayerSupportBase::IsLstmSupported(const TensorInfo&, // input
361 const TensorInfo&, // outputStateIn
362 const TensorInfo&, // cellStateIn
363 const TensorInfo&, // scratchBuffer
364 const TensorInfo&, // outputStateOut
365 const TensorInfo&, // cellStateOut
366 const TensorInfo&, // output
367 const LstmDescriptor&, // descriptor
368 const LstmInputParamsInfo&, // paramsInfo
369 Optional<std::string&> reasonIfUnsupported) const
370 {
371 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
372 }
373
IsMaximumSupported(const TensorInfo &,const TensorInfo &,const TensorInfo &,Optional<std::string &> reasonIfUnsupported) const374 bool LayerSupportBase::IsMaximumSupported(const TensorInfo&, // input0
375 const TensorInfo&, // input1
376 const TensorInfo&, // output
377 Optional<std::string&> reasonIfUnsupported) const
378 {
379 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
380 }
381
IsMeanSupported(const TensorInfo &,const TensorInfo &,const MeanDescriptor &,Optional<std::string &> reasonIfUnsupported) const382 bool LayerSupportBase::IsMeanSupported(const TensorInfo&, // input
383 const TensorInfo&, // output
384 const MeanDescriptor&, // descriptor
385 Optional<std::string&> reasonIfUnsupported) const
386 {
387 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
388 }
389
IsMemCopySupported(const armnn::TensorInfo &,const armnn::TensorInfo &,armnn::Optional<std::string &>) const390 bool LayerSupportBase::IsMemCopySupported(const armnn::TensorInfo&, // input
391 const armnn::TensorInfo&, // output
392 armnn::Optional<std::string &> ) const // reasonIfUnsupported
393 {
394 return true;
395 }
396
IsMemImportSupported(const armnn::TensorInfo &,const armnn::TensorInfo &,armnn::Optional<std::string &>) const397 bool LayerSupportBase::IsMemImportSupported(const armnn::TensorInfo&, // input
398 const armnn::TensorInfo&, // output
399 armnn::Optional<std::string &> ) const // reasonIfUnsupported
400 {
401 return true;
402 }
403
IsMergeSupported(const TensorInfo &,const TensorInfo &,const TensorInfo &,Optional<std::string &> reasonIfUnsupported) const404 bool LayerSupportBase::IsMergeSupported(const TensorInfo&, // input0
405 const TensorInfo&, // input1
406 const TensorInfo&, // output
407 Optional<std::string&> reasonIfUnsupported) const
408 {
409 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
410 }
411
IsMinimumSupported(const TensorInfo &,const TensorInfo &,const TensorInfo &,Optional<std::string &> reasonIfUnsupported) const412 bool LayerSupportBase::IsMinimumSupported(const TensorInfo&, // input0
413 const TensorInfo&, // input1
414 const TensorInfo&, // output
415 Optional<std::string&> reasonIfUnsupported) const
416 {
417 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
418 }
419
IsMultiplicationSupported(const TensorInfo &,const TensorInfo &,const TensorInfo &,Optional<std::string &> reasonIfUnsupported) const420 bool LayerSupportBase::IsMultiplicationSupported(const TensorInfo&, // input0
421 const TensorInfo&, // input1
422 const TensorInfo&, // output
423 Optional<std::string&> reasonIfUnsupported) const
424 {
425 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
426 }
427
IsNormalizationSupported(const TensorInfo &,const TensorInfo &,const NormalizationDescriptor &,Optional<std::string &> reasonIfUnsupported) const428 bool LayerSupportBase::IsNormalizationSupported(const TensorInfo&, // input
429 const TensorInfo&, // output
430 const NormalizationDescriptor&, // descriptor
431 Optional<std::string&> reasonIfUnsupported) const
432 {
433 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
434 }
435
IsOutputSupported(const TensorInfo &,Optional<std::string &> reasonIfUnsupported) const436 bool LayerSupportBase::IsOutputSupported(const TensorInfo&, // output
437 Optional<std::string&> reasonIfUnsupported) const
438 {
439 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
440 }
441
IsPadSupported(const TensorInfo &,const TensorInfo &,const PadDescriptor &,Optional<std::string &> reasonIfUnsupported) const442 bool LayerSupportBase::IsPadSupported(const TensorInfo&, // input
443 const TensorInfo&, // output
444 const PadDescriptor&, // descriptor
445 Optional<std::string&> reasonIfUnsupported) const
446 {
447 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
448 }
449
IsPermuteSupported(const TensorInfo &,const TensorInfo &,const PermuteDescriptor &,Optional<std::string &> reasonIfUnsupported) const450 bool LayerSupportBase::IsPermuteSupported(const TensorInfo&, // input
451 const TensorInfo&, // output
452 const PermuteDescriptor&, // descriptor
453 Optional<std::string&> reasonIfUnsupported) const
454 {
455 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
456 }
457
IsPooling2dSupported(const TensorInfo &,const TensorInfo &,const Pooling2dDescriptor &,Optional<std::string &> reasonIfUnsupported) const458 bool LayerSupportBase::IsPooling2dSupported(const TensorInfo&, // input
459 const TensorInfo&, // output
460 const Pooling2dDescriptor&, // descriptor
461 Optional<std::string&> reasonIfUnsupported) const
462 {
463 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
464 }
465
IsPooling3dSupported(const TensorInfo &,const TensorInfo &,const Pooling3dDescriptor &,Optional<std::string &> reasonIfUnsupported) const466 bool LayerSupportBase::IsPooling3dSupported(const TensorInfo&, // input
467 const TensorInfo&, // output
468 const Pooling3dDescriptor&, // descriptor
469 Optional<std::string&> reasonIfUnsupported) const
470 {
471 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
472 }
473
IsPreCompiledSupported(const TensorInfo &,const PreCompiledDescriptor &,Optional<std::string &> reasonIfUnsupported) const474 bool LayerSupportBase::IsPreCompiledSupported(const TensorInfo&, // input
475 const PreCompiledDescriptor&, // descriptor
476 Optional<std::string&> reasonIfUnsupported) const
477 {
478 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
479 }
480
IsPreluSupported(const TensorInfo &,const TensorInfo &,const TensorInfo &,Optional<std::string &> reasonIfUnsupported) const481 bool LayerSupportBase::IsPreluSupported(const TensorInfo&, // input
482 const TensorInfo&, // alpha
483 const TensorInfo&, // output
484 Optional<std::string &> reasonIfUnsupported) const
485 {
486 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
487 }
488
IsQuantizeSupported(const armnn::TensorInfo &,const armnn::TensorInfo &,armnn::Optional<std::string &> reasonIfUnsupported) const489 bool LayerSupportBase::IsQuantizeSupported(const armnn::TensorInfo&, // input
490 const armnn::TensorInfo&, // output
491 armnn::Optional<std::string&> reasonIfUnsupported) const
492 {
493 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
494 }
495
IsQLstmSupported(const TensorInfo &,const TensorInfo &,const TensorInfo &,const TensorInfo &,const TensorInfo &,const TensorInfo &,const QLstmDescriptor &,const LstmInputParamsInfo &,Optional<std::string &> reasonIfUnsupported) const496 bool LayerSupportBase::IsQLstmSupported(const TensorInfo&, // input
497 const TensorInfo&, // previousOutputIn
498 const TensorInfo&, // previousCellStateIn
499 const TensorInfo&, // outputStateOut
500 const TensorInfo&, // cellStateOut
501 const TensorInfo&, // output
502 const QLstmDescriptor&, // descriptor
503 const LstmInputParamsInfo&, // paramsInfo
504 Optional<std::string&> reasonIfUnsupported) const
505 {
506 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
507 }
508
IsQuantizedLstmSupported(const TensorInfo &,const TensorInfo &,const TensorInfo &,const TensorInfo &,const TensorInfo &,const QuantizedLstmInputParamsInfo &,Optional<std::string &> reasonIfUnsupported) const509 bool LayerSupportBase::IsQuantizedLstmSupported(const TensorInfo&, // input
510 const TensorInfo&, // previousCellStateIn
511 const TensorInfo&, // previousOutputIn
512 const TensorInfo&, // cellStateOut
513 const TensorInfo&, // output
514 const QuantizedLstmInputParamsInfo&, // paramsInfo
515 Optional<std::string&> reasonIfUnsupported) const
516 {
517 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
518 }
519
IsRankSupported(const TensorInfo &,const TensorInfo &,Optional<std::string &> reasonIfUnsupported) const520 bool LayerSupportBase::IsRankSupported(const TensorInfo&, // input
521 const TensorInfo&, // output
522 Optional<std::string&> reasonIfUnsupported) const
523 {
524 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
525 }
526
IsReduceSupported(const TensorInfo &,const TensorInfo &,const ReduceDescriptor &,Optional<std::string &> reasonIfUnsupported) const527 bool LayerSupportBase::IsReduceSupported(const TensorInfo& /*input*/,
528 const TensorInfo& /*output*/,
529 const ReduceDescriptor& /*descriptor*/,
530 Optional<std::string&> reasonIfUnsupported) const
531 {
532 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
533 }
534
IsReshapeSupported(const TensorInfo &,const TensorInfo &,const ReshapeDescriptor &,Optional<std::string &> reasonIfUnsupported) const535 bool LayerSupportBase::IsReshapeSupported(const TensorInfo&, // input
536 const TensorInfo&, // output
537 const ReshapeDescriptor&, // descriptor
538 Optional<std::string&> reasonIfUnsupported) const
539 {
540 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
541 }
542
IsResizeSupported(const TensorInfo &,const TensorInfo &,const ResizeDescriptor &,Optional<std::string &> reasonIfUnsupported) const543 bool LayerSupportBase::IsResizeSupported(const TensorInfo&, // input
544 const TensorInfo&, // output
545 const ResizeDescriptor&, // descriptor
546 Optional<std::string&> reasonIfUnsupported) const
547 {
548 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
549 }
550
IsShapeSupported(const TensorInfo &,const TensorInfo &,Optional<std::string &> reasonIfUnsupported) const551 bool LayerSupportBase::IsShapeSupported(const TensorInfo&, // input
552 const TensorInfo&, // output
553 Optional<std::string&> reasonIfUnsupported) const
554 {
555 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
556 }
557
IsSliceSupported(const TensorInfo &,const TensorInfo &,const SliceDescriptor &,Optional<std::string &> reasonIfUnsupported) const558 bool LayerSupportBase::IsSliceSupported(const TensorInfo&, // input
559 const TensorInfo&, // output
560 const SliceDescriptor&, // descriptor
561 Optional<std::string&> reasonIfUnsupported) const
562 {
563 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
564 }
565
IsSoftmaxSupported(const TensorInfo &,const TensorInfo &,const SoftmaxDescriptor &,Optional<std::string &> reasonIfUnsupported) const566 bool LayerSupportBase::IsSoftmaxSupported(const TensorInfo&, // input
567 const TensorInfo&, // output
568 const SoftmaxDescriptor&, // descriptor
569 Optional<std::string&> reasonIfUnsupported) const
570 {
571 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
572 }
573 /**/
IsSpaceToBatchNdSupported(const TensorInfo &,const TensorInfo &,const SpaceToBatchNdDescriptor &,Optional<std::string &> reasonIfUnsupported) const574 bool LayerSupportBase::IsSpaceToBatchNdSupported(const TensorInfo&, // input
575 const TensorInfo&, // output
576 const SpaceToBatchNdDescriptor&, // descriptor
577 Optional<std::string&> reasonIfUnsupported) const
578 {
579 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
580 }
581
IsSpaceToDepthSupported(const TensorInfo &,const TensorInfo &,const SpaceToDepthDescriptor &,Optional<std::string &> reasonIfUnsupported) const582 bool LayerSupportBase::IsSpaceToDepthSupported(const TensorInfo&, // input
583 const TensorInfo&, // output
584 const SpaceToDepthDescriptor&, // descriptor
585 Optional<std::string&> reasonIfUnsupported) const
586 {
587 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
588 }
589
IsSplitterSupported(const TensorInfo &,const std::vector<std::reference_wrapper<TensorInfo>> &,const ViewsDescriptor &,Optional<std::string &> reasonIfUnsupported) const590 bool LayerSupportBase::IsSplitterSupported(const TensorInfo&, // input
591 const std::vector<std::reference_wrapper<TensorInfo>>&, // outputs
592 const ViewsDescriptor&, // descriptor
593 Optional<std::string&> reasonIfUnsupported) const
594 {
595 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
596 }
597
IsStackSupported(const std::vector<const TensorInfo * > &,const TensorInfo &,const StackDescriptor &,Optional<std::string &> reasonIfUnsupported) const598 bool LayerSupportBase::IsStackSupported(const std::vector<const TensorInfo*>&, // inputs
599 const TensorInfo&, // output
600 const StackDescriptor&, // descriptor
601 Optional<std::string&> reasonIfUnsupported) const
602 {
603 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
604 }
605
IsStandInSupported(const std::vector<const TensorInfo * > &,const std::vector<const TensorInfo * > &,const StandInDescriptor &,Optional<std::string &> reasonIfUnsupported) const606 bool LayerSupportBase::IsStandInSupported(const std::vector<const TensorInfo*>&, // inputs
607 const std::vector<const TensorInfo*>&, // outputs
608 const StandInDescriptor&, // descriptor
609 Optional<std::string&> reasonIfUnsupported) const
610 {
611 if (reasonIfUnsupported)
612 {
613 std::stringstream message;
614 message << "StandIn layer is not executable via backends";
615
616 reasonIfUnsupported.value() = message.str();
617 }
618
619 return false;
620 }
621
IsStridedSliceSupported(const TensorInfo &,const TensorInfo &,const StridedSliceDescriptor &,Optional<std::string &> reasonIfUnsupported) const622 bool LayerSupportBase::IsStridedSliceSupported(const TensorInfo&, // input
623 const TensorInfo&, // output
624 const StridedSliceDescriptor&, // descriptor
625 Optional<std::string&> reasonIfUnsupported) const
626 {
627 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
628 }
629
IsSubtractionSupported(const TensorInfo &,const TensorInfo &,const TensorInfo &,Optional<std::string &> reasonIfUnsupported) const630 bool LayerSupportBase::IsSubtractionSupported(const TensorInfo&, // input0
631 const TensorInfo&, // input1
632 const TensorInfo&, // output
633 Optional<std::string&> reasonIfUnsupported) const
634 {
635 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
636 }
637
IsSwitchSupported(const TensorInfo &,const TensorInfo &,const TensorInfo &,const TensorInfo &,Optional<std::string &> reasonIfUnsupported) const638 bool LayerSupportBase::IsSwitchSupported(const TensorInfo&, // input0
639 const TensorInfo&, // input1
640 const TensorInfo&, // output0
641 const TensorInfo&, // output1
642 Optional<std::string&> reasonIfUnsupported) const
643 {
644 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
645 }
646
IsTransposeConvolution2dSupported(const TensorInfo &,const TensorInfo &,const TransposeConvolution2dDescriptor &,const TensorInfo &,const Optional<TensorInfo> &,Optional<std::string &> reasonIfUnsupported) const647 bool LayerSupportBase::IsTransposeConvolution2dSupported(const TensorInfo&, // input
648 const TensorInfo&, // output
649 const TransposeConvolution2dDescriptor&, // descriptor
650 const TensorInfo&, // weights
651 const Optional<TensorInfo>&, // biases
652 Optional<std::string&> reasonIfUnsupported) const
653 {
654 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
655 }
656
IsTransposeSupported(const TensorInfo &,const TensorInfo &,const TransposeDescriptor &,Optional<std::string &> reasonIfUnsupported) const657 bool LayerSupportBase::IsTransposeSupported(const TensorInfo&, // input
658 const TensorInfo&, // output
659 const TransposeDescriptor&, // descriptor
660 Optional<std::string&> reasonIfUnsupported) const
661 {
662 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
663 }
664
IsUnidirectionalSequenceLstmSupported(const TensorInfo &,const TensorInfo &,const TensorInfo &,const TensorInfo &,const TensorInfo &,const TensorInfo &,const LstmDescriptor &,const LstmInputParamsInfo &,Optional<std::string &> reasonIfUnsupported) const665 bool LayerSupportBase::IsUnidirectionalSequenceLstmSupported(const TensorInfo&, // input
666 const TensorInfo&, // outputStateIn
667 const TensorInfo&, // cellStateIn
668 const TensorInfo&, // outputStateOut
669 const TensorInfo&, // cellStateOut
670 const TensorInfo&, // output
671 const LstmDescriptor&, // descriptor
672 const LstmInputParamsInfo&, // paramsInfo
673 Optional<std::string&> reasonIfUnsupported) const
674 {
675 return DefaultLayerSupport(__func__, __FILE__, __LINE__, reasonIfUnsupported);
676 }
677
678 } // namespace armnn
679