1 //
2 // Copyright © 2017 Arm Ltd. All rights reserved.
3 // SPDX-License-Identifier: MIT
4 //
5 #pragma once
6
7 namespace armnn
8 {
9 namespace
10 {
11
12 // Make a workload of the specified WorkloadType.
13 template<typename WorkloadType>
14 struct MakeWorkloadForType
15 {
16 template<typename QueueDescriptorType, typename... Args>
Funcarmnn::__anon1c45edbd0111::MakeWorkloadForType17 static std::unique_ptr<WorkloadType> Func(const QueueDescriptorType& descriptor,
18 const WorkloadInfo& info,
19 Args&&... args)
20 {
21 return std::make_unique<WorkloadType>(descriptor, info, std::forward<Args>(args)...);
22 }
23 };
24
25 // Specialization for void workload type used for unsupported workloads.
26 template<>
27 struct MakeWorkloadForType<NullWorkload>
28 {
29 template<typename QueueDescriptorType, typename... Args>
Funcarmnn::__anon1c45edbd0111::MakeWorkloadForType30 static std::unique_ptr<NullWorkload> Func(const QueueDescriptorType& descriptor,
31 const WorkloadInfo& info,
32 Args&&... args)
33 {
34 IgnoreUnused(descriptor);
35 IgnoreUnused(info);
36 IgnoreUnused(args...);
37 return nullptr;
38 }
39 };
40
41 // Makes a workload for one the specified types based on the data type requirements of the tensorinfo.
42 // Specify type void as the WorkloadType for unsupported DataType/WorkloadType combos.
43 template <typename Float16Workload, typename Float32Workload, typename Uint8Workload, typename Int32Workload,
44 typename BooleanWorkload, typename Int8Workload, typename QueueDescriptorType, typename... Args>
MakeWorkloadHelper(const QueueDescriptorType & descriptor,const WorkloadInfo & info,Args &&...args)45 std::unique_ptr<IWorkload> MakeWorkloadHelper(const QueueDescriptorType& descriptor,
46 const WorkloadInfo& info,
47 Args&&... args)
48 {
49 const DataType dataType = !info.m_InputTensorInfos.empty() ?
50 info.m_InputTensorInfos[0].GetDataType()
51 : info.m_OutputTensorInfos[0].GetDataType();
52
53 switch (dataType)
54 {
55
56 case DataType::Float16:
57 return MakeWorkloadForType<Float16Workload>::Func(descriptor, info, std::forward<Args>(args)...);
58 case DataType::Float32:
59 return MakeWorkloadForType<Float32Workload>::Func(descriptor, info, std::forward<Args>(args)...);
60 case DataType::QAsymmU8:
61 return MakeWorkloadForType<Uint8Workload>::Func(descriptor, info, std::forward<Args>(args)...);
62 case DataType::QSymmS8:
63 case DataType::QAsymmS8:
64 return MakeWorkloadForType<Int8Workload>::Func(descriptor, info, std::forward<Args>(args)...);
65 case DataType::Signed32:
66 return MakeWorkloadForType<Int32Workload>::Func(descriptor, info, std::forward<Args>(args)...);
67 case DataType::Boolean:
68 return MakeWorkloadForType<BooleanWorkload>::Func(descriptor, info, std::forward<Args>(args)...);
69 case DataType::BFloat16:
70 case DataType::QSymmS16:
71 return nullptr;
72 default:
73 ARMNN_ASSERT_MSG(false, "Unknown DataType.");
74 return nullptr;
75 }
76 }
77
78 // Makes a workload for one the specified types based on the data type requirements of the tensorinfo.
79 // Calling this method is the equivalent of calling the five typed MakeWorkload method with <FloatWorkload,
80 // FloatWorkload, Uint8Workload, NullWorkload, NullWorkload, NullWorkload>.
81 // Specify type void as the WorkloadType for unsupported DataType/WorkloadType combos.
82 template <typename FloatWorkload, typename Uint8Workload, typename QueueDescriptorType, typename... Args>
MakeWorkloadHelper(const QueueDescriptorType & descriptor,const WorkloadInfo & info,Args &&...args)83 std::unique_ptr<IWorkload> MakeWorkloadHelper(const QueueDescriptorType& descriptor,
84 const WorkloadInfo& info,
85 Args&&... args)
86 {
87 return MakeWorkloadHelper<FloatWorkload, FloatWorkload, Uint8Workload, NullWorkload, NullWorkload, NullWorkload>(
88 descriptor,
89 info,
90 std::forward<Args>(args)...);
91 }
92
93 } //namespace
94 } //namespace armnn
95