1 //
2 // Copyright 2021 The ANGLE Project Authors. All rights reserved.
3 // Use of this source code is governed by a BSD-style license that can be
4 // found in the LICENSE file.
5 //
6 // validationCL.cpp: Validation functions for generic CL entry point parameters
7 // based on the OpenCL Specification V3.0.7, see https://www.khronos.org/registry/OpenCL/
8 // Each used CL error code is preceeded by a citation of the relevant rule in the spec.
9
10 #include "libANGLE/cl_utils.h"
11 #include "libANGLE/validationCL_autogen.h"
12
13 #include "common/string_utils.h"
14
15 #define ANGLE_VALIDATE_VERSION(version, major, minor) \
16 do \
17 { \
18 if (version < CL_MAKE_VERSION(major##u, minor##u, 0u)) \
19 { \
20 return CL_INVALID_VALUE; \
21 } \
22 } while (0)
23
24 #define ANGLE_VALIDATE_EXTENSION(extension) \
25 do \
26 { \
27 if (!extension) \
28 { \
29 return CL_INVALID_VALUE; \
30 } \
31 } while (0)
32
33 #define ANGLE_VALIDATE(expression) \
34 do \
35 { \
36 const cl_int _errorCode = expression; \
37 if (_errorCode != CL_SUCCESS) \
38 { \
39 return _errorCode; \
40 } \
41 } while (0)
42
43 #define ANGLE_VALIDATE_VERSION_OR_EXTENSION(version, major, minor, extension) \
44 do \
45 { \
46 if (version < CL_MAKE_VERSION(major##u, minor##u, 0u)) \
47 { \
48 ANGLE_VALIDATE_EXTENSION(extension); \
49 } \
50 } while (0)
51
52 namespace cl
53 {
54
55 namespace
56 {
57
ValidateContextProperties(const cl_context_properties * properties,const Platform * & platform)58 cl_int ValidateContextProperties(const cl_context_properties *properties, const Platform *&platform)
59 {
60 platform = nullptr;
61 bool hasUserSync = false;
62 if (properties != nullptr)
63 {
64 while (*properties != 0)
65 {
66 switch (*properties++)
67 {
68 case CL_CONTEXT_PLATFORM:
69 {
70 // CL_INVALID_PROPERTY if the same property name is specified more than once.
71 if (platform != nullptr)
72 {
73 return CL_INVALID_PROPERTY;
74 }
75 cl_platform_id nativePlatform = reinterpret_cast<cl_platform_id>(*properties++);
76 // CL_INVALID_PLATFORM if platform value specified in properties
77 // is not a valid platform.
78 if (!Platform::IsValid(nativePlatform))
79 {
80 return CL_INVALID_PLATFORM;
81 }
82 platform = &nativePlatform->cast<Platform>();
83 break;
84 }
85 case CL_CONTEXT_INTEROP_USER_SYNC:
86 {
87 // CL_INVALID_PROPERTY if the value specified for a supported property name
88 // is not valid, or if the same property name is specified more than once.
89 if ((*properties != CL_FALSE && *properties != CL_TRUE) || hasUserSync)
90 {
91 return CL_INVALID_PROPERTY;
92 }
93 ++properties;
94 hasUserSync = true;
95 break;
96 }
97 default:
98 {
99 // CL_INVALID_PROPERTY if context property name in properties
100 // is not a supported property name.
101 return CL_INVALID_PROPERTY;
102 }
103 }
104 }
105 }
106 return CL_SUCCESS;
107 }
108
ValidateMemoryFlags(MemFlags flags,const Platform & platform)109 bool ValidateMemoryFlags(MemFlags flags, const Platform &platform)
110 {
111 // CL_MEM_READ_WRITE, CL_MEM_WRITE_ONLY, and CL_MEM_READ_ONLY are mutually exclusive.
112 MemFlags allowedFlags(CL_MEM_READ_WRITE | CL_MEM_WRITE_ONLY | CL_MEM_READ_ONLY);
113 if (!flags.areMutuallyExclusive(CL_MEM_READ_WRITE, CL_MEM_WRITE_ONLY, CL_MEM_READ_ONLY))
114 {
115 return false;
116 }
117 // CL_MEM_USE_HOST_PTR is mutually exclusive with either of the other two flags.
118 allowedFlags.set(CL_MEM_USE_HOST_PTR | CL_MEM_ALLOC_HOST_PTR | CL_MEM_COPY_HOST_PTR);
119 if (!flags.areMutuallyExclusive(CL_MEM_USE_HOST_PTR,
120 CL_MEM_ALLOC_HOST_PTR | CL_MEM_COPY_HOST_PTR))
121 {
122 return false;
123 }
124 if (platform.isVersionOrNewer(1u, 2u))
125 {
126 // CL_MEM_HOST_WRITE_ONLY, CL_MEM_HOST_READ_ONLY,
127 // and CL_MEM_HOST_NO_ACCESS are mutually exclusive.
128 allowedFlags.set(CL_MEM_HOST_WRITE_ONLY | CL_MEM_HOST_READ_ONLY | CL_MEM_HOST_NO_ACCESS);
129 if (!flags.areMutuallyExclusive(CL_MEM_HOST_WRITE_ONLY, CL_MEM_HOST_READ_ONLY,
130 CL_MEM_HOST_NO_ACCESS))
131 {
132 return false;
133 }
134 }
135 if (platform.isVersionOrNewer(2u, 0u))
136 {
137 allowedFlags.set(CL_MEM_KERNEL_READ_AND_WRITE);
138 }
139 if (flags.hasOtherBitsThan(allowedFlags))
140 {
141 return false;
142 }
143 return true;
144 }
145
ValidateMapFlags(MapFlags flags,const Platform & platform)146 bool ValidateMapFlags(MapFlags flags, const Platform &platform)
147 {
148 MemFlags allowedFlags(CL_MAP_READ | CL_MAP_WRITE);
149 if (platform.isVersionOrNewer(1u, 2u))
150 {
151 // CL_MAP_READ or CL_MAP_WRITE and CL_MAP_WRITE_INVALIDATE_REGION are mutually exclusive.
152 allowedFlags.set(CL_MAP_WRITE_INVALIDATE_REGION);
153 if (!flags.areMutuallyExclusive(CL_MAP_WRITE_INVALIDATE_REGION, CL_MAP_READ | CL_MAP_WRITE))
154 {
155 return false;
156 }
157 }
158 if (flags.hasOtherBitsThan(allowedFlags))
159 {
160 return false;
161 }
162 return true;
163 }
164
ValidateMemoryProperties(const cl_mem_properties * properties)165 bool ValidateMemoryProperties(const cl_mem_properties *properties)
166 {
167 if (properties != nullptr)
168 {
169 // OpenCL 3.0 does not define any optional properties.
170 // This function is reserved for extensions and future use.
171 if (*properties != 0)
172 {
173 return false;
174 }
175 }
176 return true;
177 }
178
ValidateCommandQueueAndEventWaitList(cl_command_queue commandQueue,bool validateImageSupport,cl_uint numEvents,const cl_event * events)179 cl_int ValidateCommandQueueAndEventWaitList(cl_command_queue commandQueue,
180 bool validateImageSupport,
181 cl_uint numEvents,
182 const cl_event *events)
183 {
184 // CL_INVALID_COMMAND_QUEUE if command_queue is not a valid host command-queue.
185 if (!CommandQueue::IsValid(commandQueue))
186 {
187 return CL_INVALID_COMMAND_QUEUE;
188 }
189 const CommandQueue &queue = commandQueue->cast<CommandQueue>();
190 if (!queue.isOnHost())
191 {
192 return CL_INVALID_COMMAND_QUEUE;
193 }
194
195 if (validateImageSupport)
196 {
197 // CL_INVALID_OPERATION if the device associated with command_queue does not support images.
198 if (queue.getDevice().getInfo().imageSupport == CL_FALSE)
199 {
200 return CL_INVALID_OPERATION;
201 }
202 }
203
204 // CL_INVALID_EVENT_WAIT_LIST if event_wait_list is NULL and num_events_in_wait_list > 0,
205 // or event_wait_list is not NULL and num_events_in_wait_list is 0, ...
206 if ((events == nullptr) != (numEvents == 0u))
207 {
208 return CL_INVALID_EVENT_WAIT_LIST;
209 }
210 while (numEvents-- != 0u)
211 {
212 // or if event objects in event_wait_list are not valid events.
213 if (!Event::IsValid(*events))
214 {
215 return CL_INVALID_EVENT_WAIT_LIST;
216 }
217
218 // CL_INVALID_CONTEXT if the context associated with command_queue
219 // and events in event_wait_list are not the same.
220 if (&queue.getContext() != &(*events++)->cast<Event>().getContext())
221 {
222 return CL_INVALID_CONTEXT;
223 }
224 }
225
226 return CL_SUCCESS;
227 }
228
ValidateEnqueueBuffer(const CommandQueue & queue,cl_mem buffer,bool hostRead,bool hostWrite)229 cl_int ValidateEnqueueBuffer(const CommandQueue &queue,
230 cl_mem buffer,
231 bool hostRead,
232 bool hostWrite)
233 {
234 // CL_INVALID_MEM_OBJECT if buffer is not a valid buffer object.
235 if (!Buffer::IsValid(buffer))
236 {
237 return CL_INVALID_MEM_OBJECT;
238 }
239 const Buffer &buf = buffer->cast<Buffer>();
240
241 // CL_INVALID_CONTEXT if the context associated with command_queue and buffer are not the same.
242 if (&queue.getContext() != &buf.getContext())
243 {
244 return CL_INVALID_CONTEXT;
245 }
246
247 // CL_MISALIGNED_SUB_BUFFER_OFFSET if buffer is a sub-buffer object and offset specified
248 // when the sub-buffer object is created is not aligned to CL_DEVICE_MEM_BASE_ADDR_ALIGN
249 // value (which is in bits!) for device associated with queue.
250 if (buf.isSubBuffer() &&
251 (buf.getOffset() % (queue.getDevice().getInfo().memBaseAddrAlign / 8u)) != 0u)
252 {
253 return CL_MISALIGNED_SUB_BUFFER_OFFSET;
254 }
255
256 // CL_INVALID_OPERATION if a read function is called on buffer which
257 // has been created with CL_MEM_HOST_WRITE_ONLY or CL_MEM_HOST_NO_ACCESS.
258 if (hostRead && buf.getFlags().intersects(CL_MEM_HOST_WRITE_ONLY | CL_MEM_HOST_NO_ACCESS))
259 {
260 return CL_INVALID_OPERATION;
261 }
262
263 // CL_INVALID_OPERATION if a write function is called on buffer which
264 // has been created with CL_MEM_HOST_READ_ONLY or CL_MEM_HOST_NO_ACCESS.
265 if (hostWrite && buf.getFlags().intersects(CL_MEM_HOST_READ_ONLY | CL_MEM_HOST_NO_ACCESS))
266 {
267 return CL_INVALID_OPERATION;
268 }
269
270 return CL_SUCCESS;
271 }
272
ValidateBufferRect(const Buffer & buffer,const size_t * origin,const size_t * region,size_t rowPitch,size_t slicePitch)273 cl_int ValidateBufferRect(const Buffer &buffer,
274 const size_t *origin,
275 const size_t *region,
276 size_t rowPitch,
277 size_t slicePitch)
278 {
279 // CL_INVALID_VALUE if origin or region is NULL.
280 if (origin == nullptr || region == nullptr)
281 {
282 return CL_INVALID_VALUE;
283 }
284
285 // CL_INVALID_VALUE if any region array element is 0.
286 if (region[0] == 0u || region[1] == 0u || region[2] == 0u)
287 {
288 return CL_INVALID_VALUE;
289 }
290
291 // CL_INVALID_VALUE if row_pitch is not 0 and is less than region[0].
292 if (rowPitch == 0u)
293 {
294 rowPitch = region[0];
295 }
296 else if (rowPitch < region[0])
297 {
298 return CL_INVALID_VALUE;
299 }
300
301 // CL_INVALID_VALUE if slice_pitch is not 0 and is less than
302 // region[1] x row_pitch and not a multiple of row_pitch.
303 if (slicePitch == 0u)
304 {
305 slicePitch = region[1] * rowPitch;
306 }
307 else if (slicePitch < region[1] * rowPitch || (slicePitch % rowPitch) != 0u)
308 {
309 return CL_INVALID_VALUE;
310 }
311
312 // CL_INVALID_VALUE if the region being read or written specified
313 // by (origin, region, row_pitch, slice_pitch) is out of bounds.
314 if (!buffer.isRegionValid(
315 origin[2] * slicePitch + origin[1] * rowPitch + origin[0],
316 (region[2] - 1u) * slicePitch + (region[1] - 1u) * rowPitch + region[0]))
317 {
318 return CL_INVALID_VALUE;
319 }
320
321 return CL_SUCCESS;
322 }
323
ValidateHostRect(const size_t * hostOrigin,const size_t * region,size_t hostRowPitch,size_t hostSlicePitch,const void * ptr)324 cl_int ValidateHostRect(const size_t *hostOrigin,
325 const size_t *region,
326 size_t hostRowPitch,
327 size_t hostSlicePitch,
328 const void *ptr)
329 {
330 // CL_INVALID_VALUE if host_origin or region is NULL.
331 if (hostOrigin == nullptr || region == nullptr)
332 {
333 return CL_INVALID_VALUE;
334 }
335
336 // CL_INVALID_VALUE if any region array element is 0.
337 if (region[0] == 0u || region[1] == 0u || region[2] == 0u)
338 {
339 return CL_INVALID_VALUE;
340 }
341
342 // CL_INVALID_VALUE if host_row_pitch is not 0 and is less than region[0].
343 if (hostRowPitch == 0u)
344 {
345 hostRowPitch = region[0];
346 }
347 else if (hostRowPitch < region[0])
348 {
349 return CL_INVALID_VALUE;
350 }
351
352 // CL_INVALID_VALUE if host_slice_pitch is not 0 and is less than
353 // region[1] x host_row_pitch and not a multiple of host_row_pitch.
354 if (hostSlicePitch != 0u &&
355 (hostSlicePitch < region[1] * hostRowPitch || (hostSlicePitch % hostRowPitch) != 0u))
356 {
357 return CL_INVALID_VALUE;
358 }
359
360 // CL_INVALID_VALUE if ptr is NULL.
361 if (ptr == nullptr)
362 {
363 return CL_INVALID_VALUE;
364 }
365
366 return CL_SUCCESS;
367 }
368
ValidateEnqueueImage(const CommandQueue & queue,cl_mem image,bool hostRead,bool hostWrite)369 cl_int ValidateEnqueueImage(const CommandQueue &queue, cl_mem image, bool hostRead, bool hostWrite)
370 {
371 // CL_INVALID_MEM_OBJECT if image is not a valid image object.
372 if (!Image::IsValid(image))
373 {
374 return CL_INVALID_MEM_OBJECT;
375 }
376 const Image &img = image->cast<Image>();
377
378 // CL_INVALID_CONTEXT if the context associated with command_queue and image are not the same.
379 if (&queue.getContext() != &img.getContext())
380 {
381 return CL_INVALID_CONTEXT;
382 }
383
384 // CL_INVALID_OPERATION if a read function is called on image which
385 // has been created with CL_MEM_HOST_WRITE_ONLY or CL_MEM_HOST_NO_ACCESS.
386 if (hostRead && img.getFlags().intersects(CL_MEM_HOST_WRITE_ONLY | CL_MEM_HOST_NO_ACCESS))
387 {
388 return CL_INVALID_OPERATION;
389 }
390
391 // CL_INVALID_OPERATION if a write function is called on image which
392 // has been created with CL_MEM_HOST_READ_ONLY or CL_MEM_HOST_NO_ACCESS.
393 if (hostWrite && img.getFlags().intersects(CL_MEM_HOST_READ_ONLY | CL_MEM_HOST_NO_ACCESS))
394 {
395 return CL_INVALID_OPERATION;
396 }
397
398 return CL_SUCCESS;
399 }
400
ValidateImageForDevice(const Image & image,const Device & device,const size_t * origin,const size_t * region)401 cl_int ValidateImageForDevice(const Image &image,
402 const Device &device,
403 const size_t *origin,
404 const size_t *region)
405 {
406 // CL_INVALID_VALUE if origin or region is NULL.
407 if (origin == nullptr || region == nullptr)
408 {
409 return CL_INVALID_VALUE;
410 }
411
412 // CL_INVALID_VALUE if values in origin and region do not follow rules
413 // described in the argument description for origin and region.
414 // The values in region cannot be 0.
415 if (region[0] == 0u || region[1] == 0u || region[2] == 0u)
416 {
417 return CL_INVALID_VALUE;
418 }
419 switch (image.getType())
420 {
421 // If image is a 1D image or 1D image buffer object,
422 // origin[1] and origin[2] must be 0 and region[1] and region[2] must be 1.
423 case MemObjectType::Image1D:
424 case MemObjectType::Image1D_Buffer:
425 if (origin[1] != 0u || origin[2] != 0u || region[1] != 1u || region[2] != 1u)
426 {
427 return CL_INVALID_VALUE;
428 }
429 break;
430 // If image is a 2D image object or a 1D image array object,
431 // origin[2] must be 0 and region[2] must be 1.
432 case MemObjectType::Image2D:
433 case MemObjectType::Image1D_Array:
434 if (origin[2] != 0u || region[2] != 1u)
435 {
436 return CL_INVALID_VALUE;
437 }
438 break;
439 case MemObjectType::Image3D:
440 case MemObjectType::Image2D_Array:
441 break;
442 default:
443 ASSERT(false);
444 return CL_INVALID_IMAGE_DESCRIPTOR;
445 }
446
447 // CL_INVALID_VALUE if the region being read or written
448 // specified by origin and region is out of bounds.
449
450 if (!image.isRegionValid(cl::MemOffsets{origin[0], origin[1], origin[2]},
451 cl::Coordinate{region[0], region[1], region[2]}))
452 {
453 return CL_INVALID_VALUE;
454 }
455
456 // CL_INVALID_IMAGE_SIZE if image dimensions (image width, height, specified or compute
457 // row and/or slice pitch) for image are not supported by device associated with queue.
458 if (!device.supportsImageDimensions(image.getDescriptor()))
459 {
460 return CL_INVALID_IMAGE_SIZE;
461 }
462
463 return CL_SUCCESS;
464 }
465
ValidateHostRegionForImage(const Image & image,const size_t region[3],size_t rowPitch,size_t slicePitch,const void * ptr)466 cl_int ValidateHostRegionForImage(const Image &image,
467 const size_t region[3],
468 size_t rowPitch,
469 size_t slicePitch,
470 const void *ptr)
471 {
472 // CL_INVALID_VALUE if row_pitch is not 0 and is less than the element size in bytes x width.
473 if (rowPitch == 0u)
474 {
475 rowPitch = image.getElementSize() * region[0];
476 }
477 else if (rowPitch < image.getElementSize() * region[0])
478 {
479 return CL_INVALID_VALUE;
480 }
481 if (slicePitch != 0u)
482 {
483 // TODO(jplate) Follow up with https://github.com/KhronosGroup/OpenCL-Docs/issues/624
484 // This error is missing in the OpenCL spec.
485 // slice_pitch must be 0 if image is a 1D or 2D image.
486 if (image.getType() == MemObjectType::Image1D ||
487 image.getType() == MemObjectType::Image1D_Buffer ||
488 image.getType() == MemObjectType::Image2D)
489 {
490 return CL_INVALID_VALUE;
491 }
492 else if (slicePitch < rowPitch)
493 {
494 return CL_INVALID_VALUE;
495 }
496 // CL_INVALID_VALUE if slice_pitch is not 0 and is less than row_pitch x height.
497 else if (((image.getType() == MemObjectType::Image2D_Array) ||
498 (image.getType() == MemObjectType::Image3D)) &&
499 (slicePitch < rowPitch * region[1]))
500 {
501 return CL_INVALID_VALUE;
502 }
503 }
504
505 // CL_INVALID_VALUE if ptr is NULL.
506 if (ptr == nullptr)
507 {
508 return CL_INVALID_VALUE;
509 }
510
511 return CL_SUCCESS;
512 }
513
514 } // namespace
515
516 // CL 1.0
ValidateGetPlatformIDs(cl_uint num_entries,const cl_platform_id * platforms,const cl_uint * num_platforms)517 cl_int ValidateGetPlatformIDs(cl_uint num_entries,
518 const cl_platform_id *platforms,
519 const cl_uint *num_platforms)
520 {
521 // CL_INVALID_VALUE if num_entries is equal to zero and platforms is not NULL
522 // or if both num_platforms and platforms are NULL.
523 if ((num_entries == 0u && platforms != nullptr) ||
524 (platforms == nullptr && num_platforms == nullptr))
525 {
526 return CL_INVALID_VALUE;
527 }
528
529 return CL_SUCCESS;
530 }
531
ValidateGetPlatformInfo(cl_platform_id platform,PlatformInfo param_name,size_t param_value_size,const void * param_value,const size_t * param_value_size_ret)532 cl_int ValidateGetPlatformInfo(cl_platform_id platform,
533 PlatformInfo param_name,
534 size_t param_value_size,
535 const void *param_value,
536 const size_t *param_value_size_ret)
537 {
538 // CL_INVALID_PLATFORM if platform is not a valid platform.
539 if (!Platform::IsValidOrDefault(platform))
540 {
541 return CL_INVALID_PLATFORM;
542 }
543
544 // CL_INVALID_VALUE if param_name is not one of the supported values.
545 const cl_version version = platform->cast<Platform>().getVersion();
546 switch (param_name)
547 {
548 case PlatformInfo::HostTimerResolution:
549 ANGLE_VALIDATE_VERSION(version, 2, 1);
550 break;
551 case PlatformInfo::NumericVersion:
552 case PlatformInfo::ExtensionsWithVersion:
553 ANGLE_VALIDATE_VERSION(version, 3, 0);
554 break;
555 case PlatformInfo::InvalidEnum:
556 return CL_INVALID_VALUE;
557 default:
558 // All remaining possible values for param_name are valid for all versions.
559 break;
560 }
561
562 return CL_SUCCESS;
563 }
564
ValidateGetDeviceIDs(cl_platform_id platform,DeviceType device_type,cl_uint num_entries,const cl_device_id * devices,const cl_uint * num_devices)565 cl_int ValidateGetDeviceIDs(cl_platform_id platform,
566 DeviceType device_type,
567 cl_uint num_entries,
568 const cl_device_id *devices,
569 const cl_uint *num_devices)
570 {
571 // CL_INVALID_PLATFORM if platform is not a valid platform.
572 if (!Platform::IsValidOrDefault(platform))
573 {
574 return CL_INVALID_PLATFORM;
575 }
576
577 // CL_INVALID_DEVICE_TYPE if device_type is not a valid value.
578 if (!Device::IsValidType(device_type))
579 {
580 return CL_INVALID_DEVICE_TYPE;
581 }
582
583 // CL_INVALID_VALUE if num_entries is equal to zero and devices is not NULL
584 // or if both num_devices and devices are NULL.
585 if ((num_entries == 0u && devices != nullptr) || (num_devices == nullptr && devices == nullptr))
586 {
587 return CL_INVALID_VALUE;
588 }
589
590 return CL_SUCCESS;
591 }
592
ValidateGetDeviceInfo(cl_device_id device,DeviceInfo param_name,size_t param_value_size,const void * param_value,const size_t * param_value_size_ret)593 cl_int ValidateGetDeviceInfo(cl_device_id device,
594 DeviceInfo param_name,
595 size_t param_value_size,
596 const void *param_value,
597 const size_t *param_value_size_ret)
598 {
599 // CL_INVALID_DEVICE if device is not a valid device.
600 if (!Device::IsValid(device))
601 {
602 return CL_INVALID_DEVICE;
603 }
604 const Device &dev = device->cast<Device>();
605
606 // CL_INVALID_VALUE if param_name is not one of the supported values
607 // or if param_name is a value that is available as an extension
608 // and the corresponding extension is not supported by the device.
609 const cl_version version = dev.getVersion();
610 const rx::CLDeviceImpl::Info &info = dev.getInfo();
611 // Enums ordered within their version block as they appear in the OpenCL spec V3.0.7, table 5
612 switch (param_name)
613 {
614 case DeviceInfo::PreferredVectorWidthHalf:
615 case DeviceInfo::NativeVectorWidthChar:
616 case DeviceInfo::NativeVectorWidthShort:
617 case DeviceInfo::NativeVectorWidthInt:
618 case DeviceInfo::NativeVectorWidthLong:
619 case DeviceInfo::NativeVectorWidthFloat:
620 case DeviceInfo::NativeVectorWidthDouble:
621 case DeviceInfo::NativeVectorWidthHalf:
622 case DeviceInfo::HostUnifiedMemory:
623 case DeviceInfo::OpenCL_C_Version:
624 ANGLE_VALIDATE_VERSION(version, 1, 1);
625 break;
626
627 case DeviceInfo::ImageMaxBufferSize:
628 case DeviceInfo::ImageMaxArraySize:
629 case DeviceInfo::LinkerAvailable:
630 case DeviceInfo::BuiltInKernels:
631 case DeviceInfo::PrintfBufferSize:
632 case DeviceInfo::PreferredInteropUserSync:
633 case DeviceInfo::ParentDevice:
634 case DeviceInfo::PartitionMaxSubDevices:
635 case DeviceInfo::PartitionProperties:
636 case DeviceInfo::PartitionAffinityDomain:
637 case DeviceInfo::PartitionType:
638 case DeviceInfo::ReferenceCount:
639 ANGLE_VALIDATE_VERSION(version, 1, 2);
640 break;
641
642 case DeviceInfo::MaxReadWriteImageArgs:
643 case DeviceInfo::ImagePitchAlignment:
644 case DeviceInfo::ImageBaseAddressAlignment:
645 case DeviceInfo::MaxPipeArgs:
646 case DeviceInfo::PipeMaxActiveReservations:
647 case DeviceInfo::PipeMaxPacketSize:
648 case DeviceInfo::MaxGlobalVariableSize:
649 case DeviceInfo::GlobalVariablePreferredTotalSize:
650 case DeviceInfo::QueueOnDeviceProperties:
651 case DeviceInfo::QueueOnDevicePreferredSize:
652 case DeviceInfo::QueueOnDeviceMaxSize:
653 case DeviceInfo::MaxOnDeviceQueues:
654 case DeviceInfo::MaxOnDeviceEvents:
655 case DeviceInfo::SVM_Capabilities:
656 case DeviceInfo::PreferredPlatformAtomicAlignment:
657 case DeviceInfo::PreferredGlobalAtomicAlignment:
658 case DeviceInfo::PreferredLocalAtomicAlignment:
659 ANGLE_VALIDATE_VERSION(version, 2, 0);
660 break;
661
662 case DeviceInfo::IL_Version:
663 case DeviceInfo::MaxNumSubGroups:
664 case DeviceInfo::SubGroupIndependentForwardProgress:
665 ANGLE_VALIDATE_VERSION(version, 2, 1);
666 break;
667
668 case DeviceInfo::ILsWithVersion:
669 case DeviceInfo::BuiltInKernelsWithVersion:
670 case DeviceInfo::NumericVersion:
671 case DeviceInfo::OpenCL_C_AllVersions:
672 case DeviceInfo::OpenCL_C_Features:
673 case DeviceInfo::ExtensionsWithVersion:
674 case DeviceInfo::AtomicMemoryCapabilities:
675 case DeviceInfo::AtomicFenceCapabilities:
676 case DeviceInfo::NonUniformWorkGroupSupport:
677 case DeviceInfo::WorkGroupCollectiveFunctionsSupport:
678 case DeviceInfo::GenericAddressSpaceSupport:
679 case DeviceInfo::DeviceEnqueueCapabilities:
680 case DeviceInfo::PipeSupport:
681 case DeviceInfo::PreferredWorkGroupSizeMultiple:
682 case DeviceInfo::LatestConformanceVersionPassed:
683 ANGLE_VALIDATE_VERSION(version, 3, 0);
684 break;
685
686 case DeviceInfo::DoubleFpConfig:
687 // This extension became a core query from OpenCL 1.2 onward.
688 // Only need to validate for OpenCL versions less than 1.2 here.
689 ANGLE_VALIDATE_VERSION_OR_EXTENSION(version, 1, 2, info.khrFP64);
690 break;
691
692 case DeviceInfo::InvalidEnum:
693 return CL_INVALID_VALUE;
694 default:
695 // All remaining possible values for param_name are valid for all versions.
696 break;
697 }
698
699 return CL_SUCCESS;
700 }
701
ValidateCreateContext(const cl_context_properties * properties,cl_uint num_devices,const cl_device_id * devices,void (CL_CALLBACK * pfn_notify)(const char * errinfo,const void * private_info,size_t cb,void * user_data),const void * user_data)702 cl_int ValidateCreateContext(const cl_context_properties *properties,
703 cl_uint num_devices,
704 const cl_device_id *devices,
705 void(CL_CALLBACK *pfn_notify)(const char *errinfo,
706 const void *private_info,
707 size_t cb,
708 void *user_data),
709 const void *user_data)
710 {
711 // CL_INVALID_VALUE if devices is NULL or if num_devices is equal to zero
712 // or if pfn_notify is NULL but user_data is not NULL.
713 if (devices == nullptr || num_devices == 0u || (pfn_notify == nullptr && user_data != nullptr))
714 {
715 return CL_INVALID_VALUE;
716 }
717
718 // CL_INVALID_DEVICE if any device in devices is not a valid device.
719 for (cl_uint i = 0; i < num_devices; ++i)
720 {
721 if (!Device::IsValid(devices[i]))
722 {
723 return CL_INVALID_DEVICE;
724 }
725 }
726
727 // Because ANGLE can have one or more platforms here (e.g. passthrough, Vulkan, etc.), if a
728 // context platform is not explicitly specified in the properties, spec says to default to an
729 // implementation-defined platform. In ANGLE's case, we can derive the platform from the device
730 // object.
731 const Platform *platform = nullptr;
732 ANGLE_VALIDATE(ValidateContextProperties(properties, platform));
733 if (platform == nullptr)
734 {
735 // Just use/pick the first device's platform object here
736 platform = &(devices[0])->cast<Device>().getPlatform();
737 }
738
739 // Ensure that each device in device list is derived from the same platform object
740 for (cl_uint i = 0; i < num_devices; ++i)
741 {
742 if (platform != &(devices[i])->cast<Device>().getPlatform())
743 {
744 return CL_INVALID_PLATFORM;
745 }
746 }
747
748 return CL_SUCCESS;
749 }
750
ValidateCreateContextFromType(const cl_context_properties * properties,DeviceType device_type,void (CL_CALLBACK * pfn_notify)(const char * errinfo,const void * private_info,size_t cb,void * user_data),const void * user_data)751 cl_int ValidateCreateContextFromType(const cl_context_properties *properties,
752 DeviceType device_type,
753 void(CL_CALLBACK *pfn_notify)(const char *errinfo,
754 const void *private_info,
755 size_t cb,
756 void *user_data),
757 const void *user_data)
758 {
759 // CL_INVALID_DEVICE_TYPE if device_type is not a valid value.
760 if (!Device::IsValidType(device_type))
761 {
762 return CL_INVALID_DEVICE_TYPE;
763 }
764
765 const Platform *platform = nullptr;
766 ANGLE_VALIDATE(ValidateContextProperties(properties, platform));
767 if (platform == nullptr)
768 {
769 platform = Platform::GetDefault();
770 if (platform == nullptr)
771 {
772 return CL_INVALID_PLATFORM;
773 }
774 }
775
776 if (!platform->hasDeviceType(device_type))
777 {
778 return CL_DEVICE_NOT_FOUND;
779 }
780
781 // CL_INVALID_VALUE if pfn_notify is NULL but user_data is not NULL.
782 if (pfn_notify == nullptr && user_data != nullptr)
783 {
784 return CL_INVALID_VALUE;
785 }
786
787 return CL_SUCCESS;
788 }
789
ValidateRetainContext(cl_context context)790 cl_int ValidateRetainContext(cl_context context)
791 {
792 // CL_INVALID_CONTEXT if context is not a valid OpenCL context.
793 return Context::IsValid(context) ? CL_SUCCESS : CL_INVALID_CONTEXT;
794 }
795
ValidateReleaseContext(cl_context context)796 cl_int ValidateReleaseContext(cl_context context)
797 {
798 // CL_INVALID_CONTEXT if context is not a valid OpenCL context.
799 return Context::IsValid(context) ? CL_SUCCESS : CL_INVALID_CONTEXT;
800 }
801
ValidateGetContextInfo(cl_context context,ContextInfo param_name,size_t param_value_size,const void * param_value,const size_t * param_value_size_ret)802 cl_int ValidateGetContextInfo(cl_context context,
803 ContextInfo param_name,
804 size_t param_value_size,
805 const void *param_value,
806 const size_t *param_value_size_ret)
807 {
808 // CL_INVALID_CONTEXT if context is not a valid context.
809 if (!Context::IsValid(context))
810 {
811 return CL_INVALID_CONTEXT;
812 }
813
814 // CL_INVALID_VALUE if param_name is not one of the supported values.
815 if (param_name == ContextInfo::InvalidEnum)
816 {
817 return CL_INVALID_VALUE;
818 }
819
820 return CL_SUCCESS;
821 }
822
ValidateRetainCommandQueue(cl_command_queue command_queue)823 cl_int ValidateRetainCommandQueue(cl_command_queue command_queue)
824 {
825 // CL_INVALID_COMMAND_QUEUE if command_queue is not a valid command-queue.
826 return CommandQueue::IsValid(command_queue) ? CL_SUCCESS : CL_INVALID_COMMAND_QUEUE;
827 }
828
ValidateReleaseCommandQueue(cl_command_queue command_queue)829 cl_int ValidateReleaseCommandQueue(cl_command_queue command_queue)
830 {
831 // CL_INVALID_COMMAND_QUEUE if command_queue is not a valid command-queue.
832 return CommandQueue::IsValid(command_queue) ? CL_SUCCESS : CL_INVALID_COMMAND_QUEUE;
833 }
834
ValidateGetCommandQueueInfo(cl_command_queue command_queue,CommandQueueInfo param_name,size_t param_value_size,const void * param_value,const size_t * param_value_size_ret)835 cl_int ValidateGetCommandQueueInfo(cl_command_queue command_queue,
836 CommandQueueInfo param_name,
837 size_t param_value_size,
838 const void *param_value,
839 const size_t *param_value_size_ret)
840 {
841 // CL_INVALID_COMMAND_QUEUE if command_queue is not a valid command-queue ...
842 if (!CommandQueue::IsValid(command_queue))
843 {
844 return CL_INVALID_COMMAND_QUEUE;
845 }
846 const CommandQueue &queue = command_queue->cast<CommandQueue>();
847 // or if command_queue is not a valid command-queue for param_name.
848 if (param_name == CommandQueueInfo::Size && queue.isOnDevice() &&
849 !queue.getDevice().hasDeviceEnqueueCaps())
850 {
851 return CL_INVALID_COMMAND_QUEUE;
852 }
853
854 // CL_INVALID_VALUE if param_name is not one of the supported values.
855 const cl_version version = queue.getDevice().getVersion();
856 switch (param_name)
857 {
858 case CommandQueueInfo::Size:
859 ANGLE_VALIDATE_VERSION(version, 2, 0);
860 break;
861 case CommandQueueInfo::DeviceDefault:
862 ANGLE_VALIDATE_VERSION(version, 2, 1);
863 break;
864 case CommandQueueInfo::PropertiesArray:
865 ANGLE_VALIDATE_VERSION(version, 3, 0);
866 break;
867 case CommandQueueInfo::InvalidEnum:
868 return CL_INVALID_VALUE;
869 default:
870 // All remaining possible values for param_name are valid for all versions.
871 break;
872 }
873
874 return CL_SUCCESS;
875 }
876
ValidateCreateBuffer(cl_context context,MemFlags flags,size_t size,const void * host_ptr)877 cl_int ValidateCreateBuffer(cl_context context, MemFlags flags, size_t size, const void *host_ptr)
878 {
879 // CL_INVALID_CONTEXT if context is not a valid context.
880 if (!Context::IsValid(context))
881 {
882 return CL_INVALID_CONTEXT;
883 }
884 const Context &ctx = context->cast<Context>();
885
886 // CL_INVALID_VALUE if values specified in flags are not valid
887 // as defined in the Memory Flags table.
888 if (!ValidateMemoryFlags(flags, ctx.getPlatform()))
889 {
890 return CL_INVALID_VALUE;
891 }
892
893 // CL_INVALID_BUFFER_SIZE if size is 0 ...
894 if (size == 0u)
895 {
896 CL_INVALID_BUFFER_SIZE;
897 }
898 for (const DevicePtr &device : ctx.getDevices())
899 {
900 // or if size is greater than CL_DEVICE_MAX_MEM_ALLOC_SIZE for all devices in context.
901 if (size > device->getInfo().maxMemAllocSize)
902 {
903 return CL_INVALID_BUFFER_SIZE;
904 }
905 }
906
907 // CL_INVALID_HOST_PTR
908 // if host_ptr is NULL and CL_MEM_USE_HOST_PTR or CL_MEM_COPY_HOST_PTR are set in flags or
909 // if host_ptr is not NULL but CL_MEM_COPY_HOST_PTR or CL_MEM_USE_HOST_PTR are not set in flags.
910 if ((host_ptr != nullptr) != flags.intersects(CL_MEM_USE_HOST_PTR | CL_MEM_COPY_HOST_PTR))
911 {
912 return CL_INVALID_HOST_PTR;
913 }
914
915 return CL_SUCCESS;
916 }
917
ValidateRetainMemObject(cl_mem memobj)918 cl_int ValidateRetainMemObject(cl_mem memobj)
919 {
920 // CL_INVALID_MEM_OBJECT if memobj is not a valid memory object.
921 return Memory::IsValid(memobj) ? CL_SUCCESS : CL_INVALID_MEM_OBJECT;
922 }
923
ValidateReleaseMemObject(cl_mem memobj)924 cl_int ValidateReleaseMemObject(cl_mem memobj)
925 {
926 // CL_INVALID_MEM_OBJECT if memobj is not a valid memory object.
927 return Memory::IsValid(memobj) ? CL_SUCCESS : CL_INVALID_MEM_OBJECT;
928 }
929
ValidateGetSupportedImageFormats(cl_context context,MemFlags flags,MemObjectType image_type,cl_uint num_entries,const cl_image_format * image_formats,const cl_uint * num_image_formats)930 cl_int ValidateGetSupportedImageFormats(cl_context context,
931 MemFlags flags,
932 MemObjectType image_type,
933 cl_uint num_entries,
934 const cl_image_format *image_formats,
935 const cl_uint *num_image_formats)
936 {
937 // CL_INVALID_CONTEXT if context is not a valid context.
938 if (!Context::IsValid(context))
939 {
940 return CL_INVALID_CONTEXT;
941 }
942 const Context &ctx = context->cast<Context>();
943
944 // CL_INVALID_VALUE if flags or image_type are not valid,
945 if (!ValidateMemoryFlags(flags, ctx.getPlatform()) || !Image::IsTypeValid(image_type))
946 {
947 return CL_INVALID_VALUE;
948 }
949 // or if num_entries is 0 and image_formats is not NULL.
950 if (num_entries == 0u && image_formats != nullptr)
951 {
952 return CL_INVALID_VALUE;
953 }
954
955 return CL_SUCCESS;
956 }
957
ValidateGetMemObjectInfo(cl_mem memobj,MemInfo param_name,size_t param_value_size,const void * param_value,const size_t * param_value_size_ret)958 cl_int ValidateGetMemObjectInfo(cl_mem memobj,
959 MemInfo param_name,
960 size_t param_value_size,
961 const void *param_value,
962 const size_t *param_value_size_ret)
963 {
964 // CL_INVALID_MEM_OBJECT if memobj is a not a valid memory object.
965 if (!Memory::IsValid(memobj))
966 {
967 return CL_INVALID_MEM_OBJECT;
968 }
969
970 // CL_INVALID_VALUE if param_name is not valid.
971 const cl_version version = memobj->cast<Memory>().getContext().getPlatform().getVersion();
972 switch (param_name)
973 {
974 case MemInfo::AssociatedMemObject:
975 case MemInfo::Offset:
976 ANGLE_VALIDATE_VERSION(version, 1, 1);
977 break;
978 case MemInfo::UsesSVM_Pointer:
979 ANGLE_VALIDATE_VERSION(version, 2, 0);
980 break;
981 case MemInfo::Properties:
982 ANGLE_VALIDATE_VERSION(version, 3, 0);
983 break;
984 case MemInfo::InvalidEnum:
985 return CL_INVALID_VALUE;
986 default:
987 // All remaining possible values for param_name are valid for all versions.
988 break;
989 }
990
991 return CL_SUCCESS;
992 }
993
ValidateGetImageInfo(cl_mem image,ImageInfo param_name,size_t param_value_size,const void * param_value,const size_t * param_value_size_ret)994 cl_int ValidateGetImageInfo(cl_mem image,
995 ImageInfo param_name,
996 size_t param_value_size,
997 const void *param_value,
998 const size_t *param_value_size_ret)
999 {
1000 // CL_INVALID_MEM_OBJECT if image is a not a valid image object.
1001 if (!Image::IsValid(image))
1002 {
1003 return CL_INVALID_MEM_OBJECT;
1004 }
1005
1006 // CL_INVALID_VALUE if param_name is not valid.
1007 const cl_version version = image->cast<Image>().getContext().getPlatform().getVersion();
1008 switch (param_name)
1009 {
1010 case ImageInfo::ArraySize:
1011 case ImageInfo::Buffer:
1012 case ImageInfo::NumMipLevels:
1013 case ImageInfo::NumSamples:
1014 ANGLE_VALIDATE_VERSION(version, 1, 2);
1015 break;
1016 case ImageInfo::InvalidEnum:
1017 return CL_INVALID_VALUE;
1018 default:
1019 // All remaining possible values for param_name are valid for all versions.
1020 break;
1021 }
1022
1023 return CL_SUCCESS;
1024 }
1025
ValidateRetainSampler(cl_sampler sampler)1026 cl_int ValidateRetainSampler(cl_sampler sampler)
1027 {
1028 // CL_INVALID_SAMPLER if sampler is not a valid sampler object.
1029 return Sampler::IsValid(sampler) ? CL_SUCCESS : CL_INVALID_SAMPLER;
1030 }
1031
ValidateReleaseSampler(cl_sampler sampler)1032 cl_int ValidateReleaseSampler(cl_sampler sampler)
1033 {
1034 // CL_INVALID_SAMPLER if sampler is not a valid sampler object.
1035 return Sampler::IsValid(sampler) ? CL_SUCCESS : CL_INVALID_SAMPLER;
1036 }
1037
ValidateGetSamplerInfo(cl_sampler sampler,SamplerInfo param_name,size_t param_value_size,const void * param_value,const size_t * param_value_size_ret)1038 cl_int ValidateGetSamplerInfo(cl_sampler sampler,
1039 SamplerInfo param_name,
1040 size_t param_value_size,
1041 const void *param_value,
1042 const size_t *param_value_size_ret)
1043 {
1044 // CL_INVALID_SAMPLER if sampler is a not a valid sampler object.
1045 if (!Sampler::IsValid(sampler))
1046 {
1047 return CL_INVALID_SAMPLER;
1048 }
1049
1050 // CL_INVALID_VALUE if param_name is not valid.
1051 const cl_version version = sampler->cast<Sampler>().getContext().getPlatform().getVersion();
1052 switch (param_name)
1053 {
1054 case SamplerInfo::Properties:
1055 ANGLE_VALIDATE_VERSION(version, 3, 0);
1056 break;
1057 case SamplerInfo::InvalidEnum:
1058 return CL_INVALID_VALUE;
1059 default:
1060 // All remaining possible values for param_name are valid for all versions.
1061 break;
1062 }
1063
1064 return CL_SUCCESS;
1065 }
1066
ValidateCreateProgramWithSource(cl_context context,cl_uint count,const char ** strings,const size_t * lengths)1067 cl_int ValidateCreateProgramWithSource(cl_context context,
1068 cl_uint count,
1069 const char **strings,
1070 const size_t *lengths)
1071 {
1072 // CL_INVALID_CONTEXT if context is not a valid context.
1073 if (!Context::IsValid(context))
1074 {
1075 return CL_INVALID_CONTEXT;
1076 }
1077
1078 // CL_INVALID_VALUE if count is zero or if strings or any entry in strings is NULL.
1079 if (count == 0u || strings == nullptr)
1080 {
1081 return CL_INVALID_VALUE;
1082 }
1083 while (count-- != 0u)
1084 {
1085 if (*strings++ == nullptr)
1086 {
1087 return CL_INVALID_VALUE;
1088 }
1089 }
1090
1091 return CL_SUCCESS;
1092 }
1093
ValidateCreateProgramWithBinary(cl_context context,cl_uint num_devices,const cl_device_id * device_list,const size_t * lengths,const unsigned char ** binaries,const cl_int * binary_status)1094 cl_int ValidateCreateProgramWithBinary(cl_context context,
1095 cl_uint num_devices,
1096 const cl_device_id *device_list,
1097 const size_t *lengths,
1098 const unsigned char **binaries,
1099 const cl_int *binary_status)
1100 {
1101 // CL_INVALID_CONTEXT if context is not a valid context.
1102 if (!Context::IsValid(context))
1103 {
1104 return CL_INVALID_CONTEXT;
1105 }
1106 const Context &ctx = context->cast<Context>();
1107
1108 // CL_INVALID_VALUE if device_list is NULL or num_devices is zero.
1109 // CL_INVALID_VALUE if lengths or binaries is NULL.
1110 if (device_list == nullptr || num_devices == 0u || lengths == nullptr || binaries == nullptr)
1111 {
1112 return CL_INVALID_VALUE;
1113 }
1114 while (num_devices-- != 0u)
1115 {
1116 // CL_INVALID_DEVICE if any device in device_list
1117 // is not in the list of devices associated with context.
1118 if (!ctx.hasDevice(*device_list++))
1119 {
1120 return CL_INVALID_DEVICE;
1121 }
1122
1123 // CL_INVALID_VALUE if any entry in lengths[i] is zero or binaries[i] is NULL.
1124 if (*lengths++ == 0u || *binaries++ == nullptr)
1125 {
1126 return CL_INVALID_VALUE;
1127 }
1128 }
1129
1130 return CL_SUCCESS;
1131 }
1132
ValidateRetainProgram(cl_program program)1133 cl_int ValidateRetainProgram(cl_program program)
1134 {
1135 // CL_INVALID_PROGRAM if program is not a valid program object.
1136 return Program::IsValid(program) ? CL_SUCCESS : CL_INVALID_PROGRAM;
1137 }
1138
ValidateReleaseProgram(cl_program program)1139 cl_int ValidateReleaseProgram(cl_program program)
1140 {
1141 // CL_INVALID_PROGRAM if program is not a valid program object.
1142 return Program::IsValid(program) ? CL_SUCCESS : CL_INVALID_PROGRAM;
1143 }
1144
ValidateBuildProgram(cl_program program,cl_uint num_devices,const cl_device_id * device_list,const char * options,void (CL_CALLBACK * pfn_notify)(cl_program program,void * user_data),const void * user_data)1145 cl_int ValidateBuildProgram(cl_program program,
1146 cl_uint num_devices,
1147 const cl_device_id *device_list,
1148 const char *options,
1149 void(CL_CALLBACK *pfn_notify)(cl_program program, void *user_data),
1150 const void *user_data)
1151 {
1152 // CL_INVALID_PROGRAM if program is not a valid program object.
1153 if (!Program::IsValid(program))
1154 {
1155 return CL_INVALID_PROGRAM;
1156 }
1157 const Program &prog = program->cast<Program>();
1158
1159 // CL_INVALID_VALUE if device_list is NULL and num_devices is greater than zero,
1160 // or if device_list is not NULL and num_devices is zero.
1161 if ((device_list != nullptr) != (num_devices != 0u))
1162 {
1163 return CL_INVALID_VALUE;
1164 }
1165
1166 // CL_INVALID_DEVICE if any device in device_list
1167 // is not in the list of devices associated with program.
1168 while (num_devices-- != 0u)
1169 {
1170 if (!prog.hasDevice(*device_list++))
1171 {
1172 return CL_INVALID_DEVICE;
1173 }
1174 }
1175
1176 // CL_INVALID_VALUE if pfn_notify is NULL but user_data is not NULL.
1177 if (pfn_notify == nullptr && user_data != nullptr)
1178 {
1179 return CL_INVALID_VALUE;
1180 }
1181
1182 // CL_INVALID_OPERATION if the build of a program executable for any of the devices listed
1183 // in device_list by a previous call to clBuildProgram for program has not completed.
1184 if (prog.isBuilding())
1185 {
1186 return CL_INVALID_OPERATION;
1187 }
1188
1189 // CL_INVALID_OPERATION if there are kernel objects attached to program.
1190 if (prog.hasAttachedKernels())
1191 {
1192 return CL_INVALID_OPERATION;
1193 }
1194
1195 // If program was created with clCreateProgramWithBinary and device does not have a valid
1196 // program binary loaded
1197 std::vector<size_t> binSizes{prog.getDevices().size()};
1198 std::vector<std::vector<unsigned char *>> bins{prog.getDevices().size()};
1199 if (IsError(prog.getInfo(ProgramInfo::BinarySizes, binSizes.size() * sizeof(size_t),
1200 binSizes.data(), nullptr)))
1201 {
1202 return CL_INVALID_PROGRAM;
1203 }
1204 for (size_t i = 0; i < prog.getDevices().size(); ++i)
1205 {
1206 cl_program_binary_type binType;
1207 bins.at(i).resize(binSizes[i]);
1208
1209 if (IsError(prog.getInfo(ProgramInfo::Binaries, sizeof(unsigned char *) * bins.size(),
1210 bins.data(), nullptr)))
1211 {
1212 return CL_INVALID_VALUE;
1213 }
1214 if (IsError(prog.getBuildInfo(prog.getDevices()[i]->getNative(),
1215 ProgramBuildInfo::BinaryType, sizeof(cl_program_binary_type),
1216 &binType, nullptr)))
1217 {
1218 return CL_INVALID_VALUE;
1219 }
1220 if ((binType != CL_PROGRAM_BINARY_TYPE_NONE) && bins[i].empty())
1221 {
1222 return CL_INVALID_BINARY;
1223 }
1224 }
1225
1226 return CL_SUCCESS;
1227 }
1228
ValidateGetProgramInfo(cl_program program,ProgramInfo param_name,size_t param_value_size,const void * param_value,const size_t * param_value_size_ret)1229 cl_int ValidateGetProgramInfo(cl_program program,
1230 ProgramInfo param_name,
1231 size_t param_value_size,
1232 const void *param_value,
1233 const size_t *param_value_size_ret)
1234 {
1235 // CL_INVALID_PROGRAM if program is not a valid program object.
1236 if (!Program::IsValid(program))
1237 {
1238 return CL_INVALID_PROGRAM;
1239 }
1240 const Program &prog = program->cast<Program>();
1241
1242 // CL_INVALID_VALUE if param_name is not valid.
1243 const cl_version version = prog.getContext().getPlatform().getVersion();
1244 switch (param_name)
1245 {
1246 case ProgramInfo::NumKernels:
1247 case ProgramInfo::KernelNames:
1248 ANGLE_VALIDATE_VERSION(version, 1, 2);
1249 break;
1250 case ProgramInfo::IL:
1251 ANGLE_VALIDATE_VERSION(version, 2, 1);
1252 break;
1253 case ProgramInfo::ScopeGlobalCtorsPresent:
1254 case ProgramInfo::ScopeGlobalDtorsPresent:
1255 ANGLE_VALIDATE_VERSION(version, 2, 2);
1256 break;
1257 case ProgramInfo::InvalidEnum:
1258 return CL_INVALID_VALUE;
1259 default:
1260 // All remaining possible values for param_name are valid for all versions.
1261 break;
1262 }
1263
1264 // CL_INVALID_VALUE if size in bytes specified by param_value_size is < size of return type
1265 // as described in the Program Object Queries table and param_value is not NULL.
1266 if (param_value != nullptr)
1267 {
1268 size_t valueSizeRet = 0;
1269 if (IsError(prog.getInfo(param_name, 0, nullptr, &valueSizeRet)) ||
1270 param_value_size < valueSizeRet)
1271 {
1272 return CL_INVALID_VALUE;
1273 }
1274 }
1275
1276 return CL_SUCCESS;
1277 }
1278
ValidateGetProgramBuildInfo(cl_program program,cl_device_id device,ProgramBuildInfo param_name,size_t param_value_size,const void * param_value,const size_t * param_value_size_ret)1279 cl_int ValidateGetProgramBuildInfo(cl_program program,
1280 cl_device_id device,
1281 ProgramBuildInfo param_name,
1282 size_t param_value_size,
1283 const void *param_value,
1284 const size_t *param_value_size_ret)
1285 {
1286 // CL_INVALID_PROGRAM if program is not a valid program object.
1287 if (!Program::IsValid(program))
1288 {
1289 return CL_INVALID_PROGRAM;
1290 }
1291 const Program &prog = program->cast<Program>();
1292
1293 // CL_INVALID_DEVICE if device is not in the list of devices associated with program.
1294 if (!prog.hasDevice(device))
1295 {
1296 return CL_INVALID_DEVICE;
1297 }
1298
1299 // CL_INVALID_VALUE if param_name is not valid.
1300 const cl_version version = prog.getContext().getPlatform().getVersion();
1301 switch (param_name)
1302 {
1303 case ProgramBuildInfo::BinaryType:
1304 ANGLE_VALIDATE_VERSION(version, 1, 2);
1305 break;
1306 case ProgramBuildInfo::GlobalVariableTotalSize:
1307 ANGLE_VALIDATE_VERSION(version, 2, 0);
1308 break;
1309 case ProgramBuildInfo::InvalidEnum:
1310 return CL_INVALID_VALUE;
1311 default:
1312 // All remaining possible values for param_name are valid for all versions.
1313 break;
1314 }
1315
1316 // CL_INVALID_VALUE if size in bytes specified by param_value_size is < size of return type
1317 // as described in the Program Object Queries table and param_value is not NULL.
1318 if (param_value != nullptr)
1319 {
1320 size_t valueSizeRet = 0;
1321 if (IsError(prog.getBuildInfo(device, param_name, 0, nullptr, &valueSizeRet)) ||
1322 param_value_size < valueSizeRet)
1323 {
1324 return CL_INVALID_VALUE;
1325 }
1326 }
1327
1328 return CL_SUCCESS;
1329 }
1330
ValidateCreateKernel(cl_program program,const char * kernel_name)1331 cl_int ValidateCreateKernel(cl_program program, const char *kernel_name)
1332 {
1333 // CL_INVALID_PROGRAM if program is not a valid program object.
1334 if (!Program::IsValid(program))
1335 {
1336 return CL_INVALID_PROGRAM;
1337 }
1338 cl::Program &prog = program->cast<cl::Program>();
1339
1340 // CL_INVALID_VALUE if kernel_name is NULL.
1341 if (kernel_name == nullptr)
1342 {
1343 return CL_INVALID_VALUE;
1344 }
1345
1346 // CL_INVALID_PROGRAM_EXECUTABLE if there is no successfully built executable for program.
1347 std::vector<cl_device_id> associatedDevices;
1348 size_t associatedDeviceCount = 0;
1349 bool isAnyDeviceProgramBuilt = false;
1350 if (IsError(prog.getInfo(ProgramInfo::Devices, 0, nullptr, &associatedDeviceCount)))
1351 {
1352 return CL_INVALID_PROGRAM;
1353 }
1354 associatedDevices.resize(associatedDeviceCount / sizeof(cl_device_id));
1355 if (IsError(prog.getInfo(ProgramInfo::Devices, associatedDeviceCount, associatedDevices.data(),
1356 nullptr)))
1357 {
1358 return CL_INVALID_PROGRAM;
1359 }
1360 for (const cl_device_id &device : associatedDevices)
1361 {
1362 cl_build_status status = CL_BUILD_NONE;
1363 if (IsError(prog.getBuildInfo(device, ProgramBuildInfo::Status, sizeof(cl_build_status),
1364 &status, nullptr)))
1365 {
1366 return CL_INVALID_PROGRAM;
1367 }
1368
1369 if (status == CL_BUILD_SUCCESS)
1370 {
1371 isAnyDeviceProgramBuilt = true;
1372 break;
1373 }
1374 }
1375 if (!isAnyDeviceProgramBuilt)
1376 {
1377 return CL_INVALID_PROGRAM_EXECUTABLE;
1378 }
1379
1380 // CL_INVALID_KERNEL_NAME if kernel_name is not found in program.
1381 std::string kernelNames;
1382 size_t kernelNamesSize = 0;
1383 if (IsError(prog.getInfo(ProgramInfo::KernelNames, 0, nullptr, &kernelNamesSize)))
1384 {
1385 return CL_INVALID_PROGRAM;
1386 }
1387 kernelNames.resize(kernelNamesSize);
1388 if (IsError(
1389 prog.getInfo(ProgramInfo::KernelNames, kernelNamesSize, kernelNames.data(), nullptr)))
1390 {
1391 return CL_INVALID_PROGRAM;
1392 }
1393 std::vector<std::string> tokenizedKernelNames =
1394 angle::SplitString(kernelNames.c_str(), ";", angle::WhitespaceHandling::TRIM_WHITESPACE,
1395 angle::SplitResult::SPLIT_WANT_NONEMPTY);
1396 if (std::find(tokenizedKernelNames.begin(), tokenizedKernelNames.end(), kernel_name) ==
1397 tokenizedKernelNames.end())
1398 {
1399 return CL_INVALID_KERNEL_NAME;
1400 }
1401
1402 return CL_SUCCESS;
1403 }
1404
ValidateCreateKernelsInProgram(cl_program program,cl_uint num_kernels,const cl_kernel * kernels,const cl_uint * num_kernels_ret)1405 cl_int ValidateCreateKernelsInProgram(cl_program program,
1406 cl_uint num_kernels,
1407 const cl_kernel *kernels,
1408 const cl_uint *num_kernels_ret)
1409 {
1410 // CL_INVALID_PROGRAM if program is not a valid program object.
1411 if (!Program::IsValid(program))
1412 {
1413 return CL_INVALID_PROGRAM;
1414 }
1415
1416 // CL_INVALID_VALUE if kernels is not NULL and num_kernels is less than the number of kernels in
1417 // program.
1418 size_t kernelCount = 0;
1419 cl::Program &prog = program->cast<cl::Program>();
1420 if (IsError(prog.getInfo(ProgramInfo::NumKernels, sizeof(size_t), &prog, nullptr)))
1421 {
1422 return CL_INVALID_PROGRAM;
1423 }
1424 if (kernels != nullptr && num_kernels < kernelCount)
1425 {
1426 return CL_INVALID_VALUE;
1427 }
1428
1429 return CL_SUCCESS;
1430 }
1431
ValidateRetainKernel(cl_kernel kernel)1432 cl_int ValidateRetainKernel(cl_kernel kernel)
1433 {
1434 // CL_INVALID_KERNEL if kernel is not a valid kernel object.
1435 return Kernel::IsValid(kernel) ? CL_SUCCESS : CL_INVALID_KERNEL;
1436 }
1437
ValidateReleaseKernel(cl_kernel kernel)1438 cl_int ValidateReleaseKernel(cl_kernel kernel)
1439 {
1440 // CL_INVALID_KERNEL if kernel is not a valid kernel object.
1441 return Kernel::IsValid(kernel) ? CL_SUCCESS : CL_INVALID_KERNEL;
1442 }
1443
ValidateSetKernelArg(cl_kernel kernel,cl_uint arg_index,size_t arg_size,const void * arg_value)1444 cl_int ValidateSetKernelArg(cl_kernel kernel,
1445 cl_uint arg_index,
1446 size_t arg_size,
1447 const void *arg_value)
1448 {
1449 // CL_INVALID_KERNEL if kernel is not a valid kernel object.
1450 if (!Kernel::IsValid(kernel))
1451 {
1452 return CL_INVALID_KERNEL;
1453 }
1454 const Kernel &krnl = kernel->cast<Kernel>();
1455
1456 // CL_INVALID_ARG_INDEX if arg_index is not a valid argument index.
1457 if (arg_index >= krnl.getInfo().args.size())
1458 {
1459 return CL_INVALID_ARG_INDEX;
1460 }
1461
1462 if (arg_size == sizeof(cl_mem) && arg_value != nullptr)
1463 {
1464 const std::string &typeName = krnl.getInfo().args[arg_index].typeName;
1465
1466 // CL_INVALID_MEM_OBJECT for an argument declared to be a memory object
1467 // when the specified arg_value is not a valid memory object.
1468 if (typeName == "image1d_t")
1469 {
1470 const cl_mem image = *static_cast<const cl_mem *>(arg_value);
1471 if (!Image::IsValid(image) || image->cast<Image>().getType() != MemObjectType::Image1D)
1472 {
1473 return CL_INVALID_MEM_OBJECT;
1474 }
1475 }
1476 else if (typeName == "image2d_t")
1477 {
1478 const cl_mem image = *static_cast<const cl_mem *>(arg_value);
1479 if (!Image::IsValid(image) || image->cast<Image>().getType() != MemObjectType::Image2D)
1480 {
1481 return CL_INVALID_MEM_OBJECT;
1482 }
1483 }
1484 else if (typeName == "image3d_t")
1485 {
1486 const cl_mem image = *static_cast<const cl_mem *>(arg_value);
1487 if (!Image::IsValid(image) || image->cast<Image>().getType() != MemObjectType::Image3D)
1488 {
1489 return CL_INVALID_MEM_OBJECT;
1490 }
1491 }
1492 else if (typeName == "image1d_array_t")
1493 {
1494 const cl_mem image = *static_cast<const cl_mem *>(arg_value);
1495 if (!Image::IsValid(image) ||
1496 image->cast<Image>().getType() != MemObjectType::Image1D_Array)
1497 {
1498 return CL_INVALID_MEM_OBJECT;
1499 }
1500 }
1501 else if (typeName == "image2d_array_t")
1502 {
1503 const cl_mem image = *static_cast<const cl_mem *>(arg_value);
1504 if (!Image::IsValid(image) ||
1505 image->cast<Image>().getType() != MemObjectType::Image2D_Array)
1506 {
1507 return CL_INVALID_MEM_OBJECT;
1508 }
1509 }
1510 else if (typeName == "image1d_buffer_t")
1511 {
1512 const cl_mem image = *static_cast<const cl_mem *>(arg_value);
1513 if (!Image::IsValid(image) ||
1514 image->cast<Image>().getType() != MemObjectType::Image1D_Buffer)
1515 {
1516 return CL_INVALID_MEM_OBJECT;
1517 }
1518 }
1519 // CL_INVALID_SAMPLER for an argument declared to be of type sampler_t
1520 // when the specified arg_value is not a valid sampler object.
1521 else if (typeName == "sampler_t")
1522 {
1523 static_assert(sizeof(cl_mem) == sizeof(cl_sampler), "api object size check failed");
1524 if (!Sampler::IsValid(*static_cast<const cl_sampler *>(arg_value)))
1525 {
1526 return CL_INVALID_SAMPLER;
1527 }
1528 }
1529 // CL_INVALID_DEVICE_QUEUE for an argument declared to be of type queue_t
1530 // when the specified arg_value is not a valid device queue object.
1531 else if (typeName == "queue_t")
1532 {
1533 static_assert(sizeof(cl_mem) == sizeof(cl_command_queue),
1534 "api object size check failed");
1535 const cl_command_queue queue = *static_cast<const cl_command_queue *>(arg_value);
1536 if (!CommandQueue::IsValid(queue) || !queue->cast<CommandQueue>().isOnDevice())
1537 {
1538 return CL_INVALID_DEVICE_QUEUE;
1539 }
1540 }
1541 }
1542
1543 return CL_SUCCESS;
1544 }
1545
ValidateGetKernelInfo(cl_kernel kernel,KernelInfo param_name,size_t param_value_size,const void * param_value,const size_t * param_value_size_ret)1546 cl_int ValidateGetKernelInfo(cl_kernel kernel,
1547 KernelInfo param_name,
1548 size_t param_value_size,
1549 const void *param_value,
1550 const size_t *param_value_size_ret)
1551 {
1552 // CL_INVALID_KERNEL if kernel is a not a valid kernel object.
1553 if (!Kernel::IsValid(kernel))
1554 {
1555 return CL_INVALID_KERNEL;
1556 }
1557
1558 // CL_INVALID_VALUE if param_name is not valid.
1559 const cl_version version =
1560 kernel->cast<Kernel>().getProgram().getContext().getPlatform().getVersion();
1561 switch (param_name)
1562 {
1563 case KernelInfo::Attributes:
1564 ANGLE_VALIDATE_VERSION(version, 1, 2);
1565 break;
1566 case KernelInfo::InvalidEnum:
1567 return CL_INVALID_VALUE;
1568 default:
1569 // All remaining possible values for param_name are valid for all versions.
1570 break;
1571 }
1572
1573 return CL_SUCCESS;
1574 }
1575
ValidateGetKernelWorkGroupInfo(cl_kernel kernel,cl_device_id device,KernelWorkGroupInfo param_name,size_t param_value_size,const void * param_value,const size_t * param_value_size_ret)1576 cl_int ValidateGetKernelWorkGroupInfo(cl_kernel kernel,
1577 cl_device_id device,
1578 KernelWorkGroupInfo param_name,
1579 size_t param_value_size,
1580 const void *param_value,
1581 const size_t *param_value_size_ret)
1582 {
1583 // CL_INVALID_KERNEL if kernel is a not a valid kernel object.
1584 if (!Kernel::IsValid(kernel))
1585 {
1586 return CL_INVALID_KERNEL;
1587 }
1588 const Kernel &krnl = kernel->cast<Kernel>();
1589
1590 const Device *dev = nullptr;
1591 if (device != nullptr)
1592 {
1593 // CL_INVALID_DEVICE if device is not in the list of devices associated with kernel ...
1594 if (krnl.getProgram().getContext().hasDevice(device))
1595 {
1596 dev = &device->cast<Device>();
1597 }
1598 else
1599 {
1600 return CL_INVALID_DEVICE;
1601 }
1602 }
1603 else
1604 {
1605 // or if device is NULL but there is more than one device associated with kernel.
1606 if (krnl.getProgram().getContext().getDevices().size() == 1u)
1607 {
1608 dev = krnl.getProgram().getContext().getDevices().front().get();
1609 }
1610 else
1611 {
1612 return CL_INVALID_DEVICE;
1613 }
1614 }
1615
1616 // CL_INVALID_VALUE if param_name is not valid.
1617 const cl_version version = krnl.getProgram().getContext().getPlatform().getInfo().version;
1618 switch (param_name)
1619 {
1620 case KernelWorkGroupInfo::GlobalWorkSize:
1621 ANGLE_VALIDATE_VERSION(version, 1, 2);
1622 // CL_INVALID_VALUE if param_name is CL_KERNEL_GLOBAL_WORK_SIZE and
1623 // device is not a custom device and kernel is not a built-in kernel.
1624 if (!dev->supportsBuiltInKernel(krnl.getInfo().functionName))
1625 {
1626 return CL_INVALID_VALUE;
1627 }
1628 break;
1629 case KernelWorkGroupInfo::InvalidEnum:
1630 return CL_INVALID_VALUE;
1631 default:
1632 // All remaining possible values for param_name are valid for all versions.
1633 break;
1634 }
1635
1636 return CL_SUCCESS;
1637 }
1638
ValidateWaitForEvents(cl_uint num_events,const cl_event * event_list)1639 cl_int ValidateWaitForEvents(cl_uint num_events, const cl_event *event_list)
1640 {
1641 // CL_INVALID_VALUE if num_events is zero or event_list is NULL.
1642 if (num_events == 0u || event_list == nullptr)
1643 {
1644 return CL_INVALID_VALUE;
1645 }
1646
1647 const Context *context = nullptr;
1648 while (num_events-- != 0u)
1649 {
1650 // CL_INVALID_EVENT if event objects specified in event_list are not valid event objects.
1651 if (!Event::IsValid(*event_list))
1652 {
1653 return CL_INVALID_EVENT;
1654 }
1655
1656 // CL_INVALID_CONTEXT if events specified in event_list do not belong to the same context.
1657 const Context *eventContext = &(*event_list++)->cast<Event>().getContext();
1658 if (context == nullptr)
1659 {
1660 context = eventContext;
1661 }
1662 else if (context != eventContext)
1663 {
1664 return CL_INVALID_CONTEXT;
1665 }
1666 }
1667
1668 return CL_SUCCESS;
1669 }
1670
ValidateGetEventInfo(cl_event event,EventInfo param_name,size_t param_value_size,const void * param_value,const size_t * param_value_size_ret)1671 cl_int ValidateGetEventInfo(cl_event event,
1672 EventInfo param_name,
1673 size_t param_value_size,
1674 const void *param_value,
1675 const size_t *param_value_size_ret)
1676 {
1677 // CL_INVALID_EVENT if event is a not a valid event object.
1678 if (!Event::IsValid(event))
1679 {
1680 return CL_INVALID_EVENT;
1681 }
1682
1683 // CL_INVALID_VALUE if param_name is not valid.
1684 const cl_version version = event->cast<Event>().getContext().getPlatform().getVersion();
1685 switch (param_name)
1686 {
1687 case EventInfo::Context:
1688 ANGLE_VALIDATE_VERSION(version, 1, 1);
1689 break;
1690 case EventInfo::InvalidEnum:
1691 return CL_INVALID_VALUE;
1692 default:
1693 // All remaining possible values for param_name are valid for all versions.
1694 break;
1695 }
1696
1697 return CL_SUCCESS;
1698 }
1699
ValidateRetainEvent(cl_event event)1700 cl_int ValidateRetainEvent(cl_event event)
1701 {
1702 // CL_INVALID_EVENT if event is not a valid event object.
1703 return Event::IsValid(event) ? CL_SUCCESS : CL_INVALID_EVENT;
1704 }
1705
ValidateReleaseEvent(cl_event event)1706 cl_int ValidateReleaseEvent(cl_event event)
1707 {
1708 // CL_INVALID_EVENT if event is not a valid event object.
1709 return Event::IsValid(event) ? CL_SUCCESS : CL_INVALID_EVENT;
1710 }
1711
ValidateGetEventProfilingInfo(cl_event event,ProfilingInfo param_name,size_t param_value_size,const void * param_value,const size_t * param_value_size_ret)1712 cl_int ValidateGetEventProfilingInfo(cl_event event,
1713 ProfilingInfo param_name,
1714 size_t param_value_size,
1715 const void *param_value,
1716 const size_t *param_value_size_ret)
1717 {
1718 // CL_INVALID_EVENT if event is a not a valid event object.
1719 if (!Event::IsValid(event))
1720 {
1721 return CL_INVALID_EVENT;
1722 }
1723 const Event &evt = event->cast<Event>();
1724
1725 // CL_PROFILING_INFO_NOT_AVAILABLE if event is a user event object,
1726 if (evt.getCommandType() == CL_COMMAND_USER)
1727 {
1728 return CL_PROFILING_INFO_NOT_AVAILABLE;
1729 }
1730 // or if the CL_QUEUE_PROFILING_ENABLE flag is not set for the command-queue.
1731 if (evt.getCommandQueue()->getProperties().excludes(CL_QUEUE_PROFILING_ENABLE))
1732 {
1733 return CL_PROFILING_INFO_NOT_AVAILABLE;
1734 }
1735
1736 // CL_INVALID_VALUE if param_name is not valid.
1737 const cl_version version = evt.getContext().getPlatform().getVersion();
1738 switch (param_name)
1739 {
1740 case ProfilingInfo::CommandComplete:
1741 ANGLE_VALIDATE_VERSION(version, 2, 0);
1742 break;
1743 case ProfilingInfo::InvalidEnum:
1744 return CL_INVALID_VALUE;
1745 default:
1746 // All remaining possible values for param_name are valid for all versions.
1747 break;
1748 }
1749
1750 return CL_SUCCESS;
1751 }
1752
ValidateFlush(cl_command_queue command_queue)1753 cl_int ValidateFlush(cl_command_queue command_queue)
1754 {
1755 // CL_INVALID_COMMAND_QUEUE if command_queue is not a valid host command-queue.
1756 if (!CommandQueue::IsValid(command_queue) || !command_queue->cast<CommandQueue>().isOnHost())
1757 {
1758 return CL_INVALID_COMMAND_QUEUE;
1759 }
1760 return CL_SUCCESS;
1761 }
1762
ValidateFinish(cl_command_queue command_queue)1763 cl_int ValidateFinish(cl_command_queue command_queue)
1764 {
1765 // CL_INVALID_COMMAND_QUEUE if command_queue is not a valid host command-queue.
1766 if (!CommandQueue::IsValid(command_queue) || !command_queue->cast<CommandQueue>().isOnHost())
1767 {
1768 return CL_INVALID_COMMAND_QUEUE;
1769 }
1770 return CL_SUCCESS;
1771 }
1772
ValidateEnqueueReadBuffer(cl_command_queue command_queue,cl_mem buffer,cl_bool blocking_read,size_t offset,size_t size,const void * ptr,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)1773 cl_int ValidateEnqueueReadBuffer(cl_command_queue command_queue,
1774 cl_mem buffer,
1775 cl_bool blocking_read,
1776 size_t offset,
1777 size_t size,
1778 const void *ptr,
1779 cl_uint num_events_in_wait_list,
1780 const cl_event *event_wait_list,
1781 const cl_event *event)
1782 {
1783 ANGLE_VALIDATE(ValidateCommandQueueAndEventWaitList(command_queue, false,
1784 num_events_in_wait_list, event_wait_list));
1785 ANGLE_VALIDATE(ValidateEnqueueBuffer(command_queue->cast<CommandQueue>(), buffer, true, false));
1786
1787 // CL_INVALID_VALUE if the region being read or written specified
1788 // by (offset, size) is out of bounds or if ptr is a NULL value.
1789 if (!buffer->cast<Buffer>().isRegionValid(offset, size) || ptr == nullptr)
1790 {
1791 return CL_INVALID_VALUE;
1792 }
1793
1794 return CL_SUCCESS;
1795 }
1796
ValidateEnqueueWriteBuffer(cl_command_queue command_queue,cl_mem buffer,cl_bool blocking_write,size_t offset,size_t size,const void * ptr,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)1797 cl_int ValidateEnqueueWriteBuffer(cl_command_queue command_queue,
1798 cl_mem buffer,
1799 cl_bool blocking_write,
1800 size_t offset,
1801 size_t size,
1802 const void *ptr,
1803 cl_uint num_events_in_wait_list,
1804 const cl_event *event_wait_list,
1805 const cl_event *event)
1806 {
1807 ANGLE_VALIDATE(ValidateCommandQueueAndEventWaitList(command_queue, false,
1808 num_events_in_wait_list, event_wait_list));
1809 ANGLE_VALIDATE(ValidateEnqueueBuffer(command_queue->cast<CommandQueue>(), buffer, false, true));
1810
1811 // CL_INVALID_VALUE if the region being read or written specified
1812 // by (offset, size) is out of bounds or if ptr is a NULL value.
1813 if (!buffer->cast<Buffer>().isRegionValid(offset, size) || ptr == nullptr)
1814 {
1815 return CL_INVALID_VALUE;
1816 }
1817
1818 return CL_SUCCESS;
1819 }
1820
ValidateEnqueueCopyBuffer(cl_command_queue command_queue,cl_mem src_buffer,cl_mem dst_buffer,size_t src_offset,size_t dst_offset,size_t size,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)1821 cl_int ValidateEnqueueCopyBuffer(cl_command_queue command_queue,
1822 cl_mem src_buffer,
1823 cl_mem dst_buffer,
1824 size_t src_offset,
1825 size_t dst_offset,
1826 size_t size,
1827 cl_uint num_events_in_wait_list,
1828 const cl_event *event_wait_list,
1829 const cl_event *event)
1830 {
1831 ANGLE_VALIDATE(ValidateCommandQueueAndEventWaitList(command_queue, false,
1832 num_events_in_wait_list, event_wait_list));
1833 const CommandQueue &queue = command_queue->cast<CommandQueue>();
1834
1835 ANGLE_VALIDATE(ValidateEnqueueBuffer(queue, src_buffer, false, false));
1836 const Buffer &src = src_buffer->cast<Buffer>();
1837
1838 ANGLE_VALIDATE(ValidateEnqueueBuffer(queue, dst_buffer, false, false));
1839 const Buffer &dst = dst_buffer->cast<Buffer>();
1840
1841 // CL_INVALID_VALUE if src_offset, dst_offset, size, src_offset + size or dst_offset + size
1842 // require accessing elements outside the src_buffer and dst_buffer buffer objects respectively.
1843 if (!src.isRegionValid(src_offset, size) || !dst.isRegionValid(dst_offset, size))
1844 {
1845 return CL_INVALID_VALUE;
1846 }
1847
1848 // CL_MEM_COPY_OVERLAP if src_buffer and dst_buffer are the same buffer or sub-buffer object
1849 // and the source and destination regions overlap or if src_buffer and dst_buffer are
1850 // different sub-buffers of the same associated buffer object and they overlap.
1851 if ((src.isSubBuffer() ? src.getParent().get() : &src) ==
1852 (dst.isSubBuffer() ? dst.getParent().get() : &dst))
1853 {
1854 // Only sub-buffers have offsets larger than zero
1855 src_offset += src.getOffset();
1856 dst_offset += dst.getOffset();
1857
1858 if (OverlapRegions(src_offset, dst_offset, size))
1859 {
1860 return CL_MEM_COPY_OVERLAP;
1861 }
1862 }
1863
1864 return CL_SUCCESS;
1865 }
1866
ValidateEnqueueReadImage(cl_command_queue command_queue,cl_mem image,cl_bool blocking_read,const size_t * origin,const size_t * region,size_t row_pitch,size_t slice_pitch,const void * ptr,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)1867 cl_int ValidateEnqueueReadImage(cl_command_queue command_queue,
1868 cl_mem image,
1869 cl_bool blocking_read,
1870 const size_t *origin,
1871 const size_t *region,
1872 size_t row_pitch,
1873 size_t slice_pitch,
1874 const void *ptr,
1875 cl_uint num_events_in_wait_list,
1876 const cl_event *event_wait_list,
1877 const cl_event *event)
1878 {
1879 ANGLE_VALIDATE(ValidateCommandQueueAndEventWaitList(command_queue, true,
1880 num_events_in_wait_list, event_wait_list));
1881 const CommandQueue &queue = command_queue->cast<CommandQueue>();
1882
1883 ANGLE_VALIDATE(ValidateEnqueueImage(queue, image, true, false));
1884 const Image &img = image->cast<Image>();
1885
1886 ANGLE_VALIDATE(ValidateImageForDevice(img, queue.getDevice(), origin, region));
1887 ANGLE_VALIDATE(ValidateHostRegionForImage(img, region, row_pitch, slice_pitch, ptr));
1888
1889 return CL_SUCCESS;
1890 }
1891
ValidateEnqueueWriteImage(cl_command_queue command_queue,cl_mem image,cl_bool blocking_write,const size_t * origin,const size_t * region,size_t input_row_pitch,size_t input_slice_pitch,const void * ptr,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)1892 cl_int ValidateEnqueueWriteImage(cl_command_queue command_queue,
1893 cl_mem image,
1894 cl_bool blocking_write,
1895 const size_t *origin,
1896 const size_t *region,
1897 size_t input_row_pitch,
1898 size_t input_slice_pitch,
1899 const void *ptr,
1900 cl_uint num_events_in_wait_list,
1901 const cl_event *event_wait_list,
1902 const cl_event *event)
1903 {
1904 ANGLE_VALIDATE(ValidateCommandQueueAndEventWaitList(command_queue, true,
1905 num_events_in_wait_list, event_wait_list));
1906 const CommandQueue &queue = command_queue->cast<CommandQueue>();
1907
1908 ANGLE_VALIDATE(ValidateEnqueueImage(queue, image, false, true));
1909 const Image &img = image->cast<Image>();
1910
1911 ANGLE_VALIDATE(ValidateImageForDevice(img, queue.getDevice(), origin, region));
1912 ANGLE_VALIDATE(
1913 ValidateHostRegionForImage(img, region, input_row_pitch, input_slice_pitch, ptr));
1914
1915 return CL_SUCCESS;
1916 }
1917
ValidateEnqueueCopyImage(cl_command_queue command_queue,cl_mem src_image,cl_mem dst_image,const size_t * src_origin,const size_t * dst_origin,const size_t * region,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)1918 cl_int ValidateEnqueueCopyImage(cl_command_queue command_queue,
1919 cl_mem src_image,
1920 cl_mem dst_image,
1921 const size_t *src_origin,
1922 const size_t *dst_origin,
1923 const size_t *region,
1924 cl_uint num_events_in_wait_list,
1925 const cl_event *event_wait_list,
1926 const cl_event *event)
1927 {
1928 ANGLE_VALIDATE(ValidateCommandQueueAndEventWaitList(command_queue, true,
1929 num_events_in_wait_list, event_wait_list));
1930 const CommandQueue &queue = command_queue->cast<CommandQueue>();
1931
1932 ANGLE_VALIDATE(ValidateEnqueueImage(queue, src_image, false, false));
1933 const Image &src = src_image->cast<Image>();
1934
1935 ANGLE_VALIDATE(ValidateEnqueueImage(queue, dst_image, false, false));
1936 const Image &dst = dst_image->cast<Image>();
1937
1938 // CL_IMAGE_FORMAT_MISMATCH if src_image and dst_image do not use the same image format.
1939 if (src.getFormat().image_channel_order != dst.getFormat().image_channel_order ||
1940 src.getFormat().image_channel_data_type != dst.getFormat().image_channel_data_type)
1941 {
1942 return CL_IMAGE_FORMAT_MISMATCH;
1943 }
1944
1945 ANGLE_VALIDATE(ValidateImageForDevice(src, queue.getDevice(), src_origin, region));
1946 ANGLE_VALIDATE(ValidateImageForDevice(dst, queue.getDevice(), dst_origin, region));
1947
1948 // CL_MEM_COPY_OVERLAP if src_image and dst_image are the same image object
1949 // and the source and destination regions overlap.
1950 if (&src == &dst)
1951 {
1952 const MemObjectType type = src.getType();
1953 // Check overlap in first dimension
1954 if (OverlapRegions(src_origin[0], dst_origin[0], region[0]))
1955 {
1956 if (type == MemObjectType::Image1D || type == MemObjectType::Image1D_Buffer)
1957 {
1958 return CL_MEM_COPY_OVERLAP;
1959 }
1960
1961 // Check overlap in second dimension
1962 if (OverlapRegions(src_origin[1], dst_origin[1], region[1]))
1963 {
1964 if (type == MemObjectType::Image2D || type == MemObjectType::Image1D_Array)
1965 {
1966 return CL_MEM_COPY_OVERLAP;
1967 }
1968
1969 // Check overlap in third dimension
1970 if (OverlapRegions(src_origin[2], dst_origin[2], region[2]))
1971 {
1972 return CL_MEM_COPY_OVERLAP;
1973 }
1974 }
1975 }
1976 }
1977
1978 return CL_SUCCESS;
1979 }
1980
ValidateEnqueueCopyImageToBuffer(cl_command_queue command_queue,cl_mem src_image,cl_mem dst_buffer,const size_t * src_origin,const size_t * region,size_t dst_offset,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)1981 cl_int ValidateEnqueueCopyImageToBuffer(cl_command_queue command_queue,
1982 cl_mem src_image,
1983 cl_mem dst_buffer,
1984 const size_t *src_origin,
1985 const size_t *region,
1986 size_t dst_offset,
1987 cl_uint num_events_in_wait_list,
1988 const cl_event *event_wait_list,
1989 const cl_event *event)
1990 {
1991 ANGLE_VALIDATE(ValidateCommandQueueAndEventWaitList(command_queue, true,
1992 num_events_in_wait_list, event_wait_list));
1993 const CommandQueue &queue = command_queue->cast<CommandQueue>();
1994
1995 ANGLE_VALIDATE(ValidateEnqueueImage(queue, src_image, false, false));
1996 const Image &src = src_image->cast<Image>();
1997
1998 ANGLE_VALIDATE(ValidateEnqueueBuffer(queue, dst_buffer, false, false));
1999 const Buffer &dst = dst_buffer->cast<Buffer>();
2000
2001 // CL_INVALID_MEM_OBJECT if src_image is a 1D image buffer object created from dst_buffer.
2002 if (src.getType() == MemObjectType::Image1D_Buffer && src.getParent() == &dst)
2003 {
2004 return CL_INVALID_MEM_OBJECT;
2005 }
2006
2007 ANGLE_VALIDATE(ValidateImageForDevice(src, queue.getDevice(), src_origin, region));
2008
2009 // CL_INVALID_VALUE if the region specified by dst_offset and dst_offset + dst_cb
2010 // refer to a region outside dst_buffer.
2011 const MemObjectType type = src.getType();
2012 size_t dst_cb = src.getElementSize() * region[0];
2013 if (type != MemObjectType::Image1D && type != MemObjectType::Image1D_Buffer)
2014 {
2015 dst_cb *= region[1];
2016 if (type != MemObjectType::Image2D && type != MemObjectType::Image1D_Array)
2017 {
2018 dst_cb *= region[2];
2019 }
2020 }
2021 if (!dst.isRegionValid(dst_offset, dst_cb))
2022 {
2023 return CL_INVALID_VALUE;
2024 }
2025
2026 return CL_SUCCESS;
2027 }
2028
ValidateEnqueueCopyBufferToImage(cl_command_queue command_queue,cl_mem src_buffer,cl_mem dst_image,size_t src_offset,const size_t * dst_origin,const size_t * region,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)2029 cl_int ValidateEnqueueCopyBufferToImage(cl_command_queue command_queue,
2030 cl_mem src_buffer,
2031 cl_mem dst_image,
2032 size_t src_offset,
2033 const size_t *dst_origin,
2034 const size_t *region,
2035 cl_uint num_events_in_wait_list,
2036 const cl_event *event_wait_list,
2037 const cl_event *event)
2038 {
2039 ANGLE_VALIDATE(ValidateCommandQueueAndEventWaitList(command_queue, true,
2040 num_events_in_wait_list, event_wait_list));
2041 const CommandQueue &queue = command_queue->cast<CommandQueue>();
2042
2043 ANGLE_VALIDATE(ValidateEnqueueBuffer(queue, src_buffer, false, false));
2044 const Buffer &src = src_buffer->cast<Buffer>();
2045
2046 ANGLE_VALIDATE(ValidateEnqueueImage(queue, dst_image, false, false));
2047 const Image &dst = dst_image->cast<Image>();
2048
2049 // CL_INVALID_MEM_OBJECT if dst_image is a 1D image buffer object created from src_buffer.
2050 if (dst.getType() == MemObjectType::Image1D_Buffer && dst.getParent() == &src)
2051 {
2052 return CL_INVALID_MEM_OBJECT;
2053 }
2054
2055 ANGLE_VALIDATE(ValidateImageForDevice(dst, queue.getDevice(), dst_origin, region));
2056
2057 // CL_INVALID_VALUE if the region specified by src_offset and src_offset + src_cb
2058 // refer to a region outside src_buffer.
2059 const MemObjectType type = dst.getType();
2060 size_t src_cb = dst.getElementSize() * region[0];
2061 if (type != MemObjectType::Image1D && type != MemObjectType::Image1D_Buffer)
2062 {
2063 src_cb *= region[1];
2064 if (type != MemObjectType::Image2D && type != MemObjectType::Image1D_Array)
2065 {
2066 src_cb *= region[2];
2067 }
2068 }
2069 if (!src.isRegionValid(src_offset, src_cb))
2070 {
2071 return CL_INVALID_VALUE;
2072 }
2073
2074 return CL_SUCCESS;
2075 }
2076
ValidateEnqueueMapBuffer(cl_command_queue command_queue,cl_mem buffer,cl_bool blocking_map,MapFlags map_flags,size_t offset,size_t size,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)2077 cl_int ValidateEnqueueMapBuffer(cl_command_queue command_queue,
2078 cl_mem buffer,
2079 cl_bool blocking_map,
2080 MapFlags map_flags,
2081 size_t offset,
2082 size_t size,
2083 cl_uint num_events_in_wait_list,
2084 const cl_event *event_wait_list,
2085 const cl_event *event)
2086 {
2087 ANGLE_VALIDATE(ValidateCommandQueueAndEventWaitList(command_queue, false,
2088 num_events_in_wait_list, event_wait_list));
2089 const CommandQueue &queue = command_queue->cast<CommandQueue>();
2090
2091 // CL_INVALID_OPERATION if buffer has been created with CL_MEM_HOST_WRITE_ONLY or
2092 // CL_MEM_HOST_NO_ACCESS and CL_MAP_READ is set in map_flags
2093 // or if buffer has been created with CL_MEM_HOST_READ_ONLY or CL_MEM_HOST_NO_ACCESS
2094 // and CL_MAP_WRITE or CL_MAP_WRITE_INVALIDATE_REGION is set in map_flags.
2095 ANGLE_VALIDATE(
2096 ValidateEnqueueBuffer(queue, buffer, map_flags.intersects(CL_MAP_READ),
2097 map_flags.intersects(CL_MAP_WRITE | CL_MAP_WRITE_INVALIDATE_REGION)));
2098
2099 // CL_INVALID_VALUE if region being mapped given by (offset, size) is out of bounds
2100 // or if size is 0 or if values specified in map_flags are not valid.
2101 if (!buffer->cast<Buffer>().isRegionValid(offset, size) || size == 0u ||
2102 !ValidateMapFlags(map_flags, queue.getContext().getPlatform()))
2103 {
2104 return CL_INVALID_VALUE;
2105 }
2106
2107 return CL_SUCCESS;
2108 }
2109
ValidateEnqueueMapImage(cl_command_queue command_queue,cl_mem image,cl_bool blocking_map,MapFlags map_flags,const size_t * origin,const size_t * region,const size_t * image_row_pitch,const size_t * image_slice_pitch,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)2110 cl_int ValidateEnqueueMapImage(cl_command_queue command_queue,
2111 cl_mem image,
2112 cl_bool blocking_map,
2113 MapFlags map_flags,
2114 const size_t *origin,
2115 const size_t *region,
2116 const size_t *image_row_pitch,
2117 const size_t *image_slice_pitch,
2118 cl_uint num_events_in_wait_list,
2119 const cl_event *event_wait_list,
2120 const cl_event *event)
2121 {
2122 ANGLE_VALIDATE(ValidateCommandQueueAndEventWaitList(command_queue, true,
2123 num_events_in_wait_list, event_wait_list));
2124 const CommandQueue &queue = command_queue->cast<CommandQueue>();
2125
2126 // CL_INVALID_OPERATION if image has been created with CL_MEM_HOST_WRITE_ONLY or
2127 // CL_MEM_HOST_NO_ACCESS and CL_MAP_READ is set in map_flags
2128 // or if image has been created with CL_MEM_HOST_READ_ONLY or CL_MEM_HOST_NO_ACCESS
2129 // and CL_MAP_WRITE or CL_MAP_WRITE_INVALIDATE_REGION is set in map_flags.
2130 ANGLE_VALIDATE(
2131 ValidateEnqueueImage(queue, image, map_flags.intersects(CL_MAP_READ),
2132 map_flags.intersects(CL_MAP_WRITE | CL_MAP_WRITE_INVALIDATE_REGION)));
2133 const Image &img = image->cast<Image>();
2134
2135 ANGLE_VALIDATE(ValidateImageForDevice(img, queue.getDevice(), origin, region));
2136
2137 // CL_INVALID_VALUE if values specified in map_flags are not valid.
2138 if (!ValidateMapFlags(map_flags, queue.getContext().getPlatform()))
2139 {
2140 return CL_INVALID_VALUE;
2141 }
2142
2143 // CL_INVALID_VALUE if image_row_pitch is NULL.
2144 if (image_row_pitch == nullptr)
2145 {
2146 return CL_INVALID_VALUE;
2147 }
2148
2149 // CL_INVALID_VALUE if image is a 3D image, 1D or 2D image array object
2150 // and image_slice_pitch is NULL.
2151 if ((img.getType() == MemObjectType::Image3D || img.getType() == MemObjectType::Image1D_Array ||
2152 img.getType() == MemObjectType::Image2D_Array) &&
2153 image_slice_pitch == nullptr)
2154 {
2155 return CL_INVALID_VALUE;
2156 }
2157
2158 return CL_SUCCESS;
2159 }
2160
ValidateEnqueueUnmapMemObject(cl_command_queue command_queue,cl_mem memobj,const void * mapped_ptr,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)2161 cl_int ValidateEnqueueUnmapMemObject(cl_command_queue command_queue,
2162 cl_mem memobj,
2163 const void *mapped_ptr,
2164 cl_uint num_events_in_wait_list,
2165 const cl_event *event_wait_list,
2166 const cl_event *event)
2167 {
2168 ANGLE_VALIDATE(ValidateCommandQueueAndEventWaitList(command_queue, false,
2169 num_events_in_wait_list, event_wait_list));
2170 const CommandQueue &queue = command_queue->cast<CommandQueue>();
2171
2172 // CL_INVALID_MEM_OBJECT if memobj is not a valid memory object or is a pipe object.
2173 if (!Memory::IsValid(memobj))
2174 {
2175 return CL_INVALID_MEM_OBJECT;
2176 }
2177 const Memory &memory = memobj->cast<Memory>();
2178 if (memory.getType() == MemObjectType::Pipe)
2179 {
2180 return CL_INVALID_MEM_OBJECT;
2181 }
2182
2183 // CL_INVALID_CONTEXT if context associated with command_queue and memobj are not the same.
2184 if (&queue.getContext() != &memory.getContext())
2185 {
2186 return CL_INVALID_CONTEXT;
2187 }
2188
2189 return CL_SUCCESS;
2190 }
2191
ValidateEnqueueNDRangeKernel(cl_command_queue command_queue,cl_kernel kernel,cl_uint work_dim,const size_t * global_work_offset,const size_t * global_work_size,const size_t * local_work_size,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)2192 cl_int ValidateEnqueueNDRangeKernel(cl_command_queue command_queue,
2193 cl_kernel kernel,
2194 cl_uint work_dim,
2195 const size_t *global_work_offset,
2196 const size_t *global_work_size,
2197 const size_t *local_work_size,
2198 cl_uint num_events_in_wait_list,
2199 const cl_event *event_wait_list,
2200 const cl_event *event)
2201 {
2202 ANGLE_VALIDATE(ValidateCommandQueueAndEventWaitList(command_queue, false,
2203 num_events_in_wait_list, event_wait_list));
2204 const CommandQueue &queue = command_queue->cast<CommandQueue>();
2205 const Device &device = queue.getDevice();
2206
2207 // CL_INVALID_KERNEL if kernel is not a valid kernel object.
2208 if (!Kernel::IsValid(kernel))
2209 {
2210 return CL_INVALID_KERNEL;
2211 }
2212 const Kernel &krnl = kernel->cast<Kernel>();
2213
2214 // CL_INVALID_CONTEXT if context associated with command_queue and kernel are not the same.
2215 if (&queue.getContext() != &krnl.getProgram().getContext())
2216 {
2217 return CL_INVALID_CONTEXT;
2218 }
2219
2220 // CL_INVALID_WORK_DIMENSION if work_dim is not a valid value.
2221 if (work_dim == 0u || work_dim > device.getInfo().maxWorkItemSizes.size())
2222 {
2223 return CL_INVALID_WORK_DIMENSION;
2224 }
2225
2226 // CL_INVALID_GLOBAL_OFFSET if global_work_offset is non-NULL before version 1.1.
2227 if (!queue.getContext().getPlatform().isVersionOrNewer(1u, 1u) && global_work_offset != nullptr)
2228 {
2229 return CL_INVALID_GLOBAL_OFFSET;
2230 }
2231
2232 // CL_INVALID_KERNEL_ARGS if all the kernel arguments have not been set for the kernel
2233 if (!krnl.areAllArgsSet())
2234 {
2235 return CL_INVALID_KERNEL_ARGS;
2236 }
2237
2238 size_t compileWorkGroupSize[3] = {0, 0, 0};
2239 if (IsError(krnl.getWorkGroupInfo(const_cast<cl_device_id>(device.getNative()),
2240 KernelWorkGroupInfo::CompileWorkGroupSize,
2241 sizeof(compileWorkGroupSize), compileWorkGroupSize, nullptr)))
2242 {
2243 return CL_INVALID_VALUE;
2244 }
2245 if (local_work_size != nullptr)
2246 {
2247 // CL_INVALID_WORK_GROUP_SIZE when non-uniform work-groups are not supported, the size of
2248 // each work-group must be uniform. If local_work_size is specified, the values specified in
2249 // global_work_size[0],...,global_work_size[work_dim - 1] must be evenly divisible by
2250 // the corresponding values specified in local_work_size[0],...,
2251 // local_work_size[work_dim-1].
2252 if (!device.supportsNonUniformWorkGroups())
2253 {
2254 for (cl_uint i = 0; i < work_dim; ++i)
2255 {
2256 if (global_work_size[i] % local_work_size[i] != 0)
2257 {
2258 return CL_INVALID_WORK_GROUP_SIZE;
2259 }
2260 }
2261 }
2262
2263 for (cl_uint i = 0; i < work_dim; ++i)
2264 {
2265 // CL_INVALID_WORK_GROUP_SIZE when non-uniform work-groups are not supported, the size
2266 // of each work-group must be uniform. If local_work_size is specified, the values
2267 // specified in global_work_size[0],..., global_work_size[work_dim - 1] must be
2268 // evenly divisible by the corresponding values specified in local_work_size[0],...,
2269 // local_work_size[work_dim-1].
2270 if (local_work_size[i] == 0)
2271 {
2272 return CL_INVALID_WORK_GROUP_SIZE;
2273 }
2274
2275 // CL_INVALID_WORK_GROUP_SIZE if local_work_size is specified and does not match the
2276 // required work-group size for kernel in the program source.
2277 if (compileWorkGroupSize[i] != 0 && local_work_size[i] != compileWorkGroupSize[i])
2278 {
2279 return CL_INVALID_WORK_GROUP_SIZE;
2280 }
2281 }
2282 }
2283
2284 // CL_INVALID_GLOBAL_WORK_SIZE if global_work_size is NULL or if any of the values
2285 // specified in global_work_size[0] ... global_work_size[work_dim - 1] are 0.
2286 // Returning this error code under these circumstances is deprecated by version 2.1.
2287 if (!queue.getContext().getPlatform().isVersionOrNewer(2u, 1u))
2288 {
2289 if (global_work_size == nullptr)
2290 {
2291 return CL_INVALID_GLOBAL_WORK_SIZE;
2292 }
2293 for (cl_uint dim = 0u; dim < work_dim; ++dim)
2294 {
2295 if (global_work_size[dim] == 0u)
2296 {
2297 return CL_INVALID_GLOBAL_WORK_SIZE;
2298 }
2299 }
2300 }
2301
2302 if (local_work_size != nullptr)
2303 {
2304 size_t numWorkItems = 1u; // Initialize with neutral element for multiplication
2305
2306 // CL_INVALID_WORK_ITEM_SIZE if the number of work-items specified
2307 // in any of local_work_size[0] ... local_work_size[work_dim - 1]
2308 // is greater than the corresponding values specified by
2309 // CL_DEVICE_MAX_WORK_ITEM_SIZES[0] ... CL_DEVICE_MAX_WORK_ITEM_SIZES[work_dim - 1].
2310 for (cl_uint dim = 0u; dim < work_dim; ++dim)
2311 {
2312 if (local_work_size[dim] > device.getInfo().maxWorkItemSizes[dim])
2313 {
2314 return CL_INVALID_WORK_ITEM_SIZE;
2315 }
2316 numWorkItems *= local_work_size[dim];
2317 }
2318
2319 // CL_INVALID_WORK_GROUP_SIZE if local_work_size is specified
2320 // and the total number of work-items in the work-group computed as
2321 // local_work_size[0] x ... local_work_size[work_dim - 1] is greater than the value
2322 // specified by CL_KERNEL_WORK_GROUP_SIZE in the Kernel Object Device Queries table.
2323 if (numWorkItems > krnl.getInfo().workGroups[queue.getDeviceIndex()].workGroupSize)
2324 {
2325 return CL_INVALID_WORK_GROUP_SIZE;
2326 }
2327 }
2328
2329 return CL_SUCCESS;
2330 }
2331
ValidateEnqueueNativeKernel(cl_command_queue command_queue,void (CL_CALLBACK * user_func)(void *),const void * args,size_t cb_args,cl_uint num_mem_objects,const cl_mem * mem_list,const void ** args_mem_loc,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)2332 cl_int ValidateEnqueueNativeKernel(cl_command_queue command_queue,
2333 void(CL_CALLBACK *user_func)(void *),
2334 const void *args,
2335 size_t cb_args,
2336 cl_uint num_mem_objects,
2337 const cl_mem *mem_list,
2338 const void **args_mem_loc,
2339 cl_uint num_events_in_wait_list,
2340 const cl_event *event_wait_list,
2341 const cl_event *event)
2342 {
2343 ANGLE_VALIDATE(ValidateCommandQueueAndEventWaitList(command_queue, false,
2344 num_events_in_wait_list, event_wait_list));
2345 const CommandQueue &queue = command_queue->cast<CommandQueue>();
2346
2347 // CL_INVALID_OPERATION if the device associated with command_queue
2348 // cannot execute the native kernel.
2349 if (queue.getDevice().getInfo().execCapabilities.excludes(CL_EXEC_NATIVE_KERNEL))
2350 {
2351 return CL_INVALID_OPERATION;
2352 }
2353
2354 // CL_INVALID_VALUE if user_func is NULL.
2355 if (user_func == nullptr)
2356 {
2357 return CL_INVALID_VALUE;
2358 }
2359
2360 if (args == nullptr)
2361 {
2362 // CL_INVALID_VALUE if args is a NULL value and cb_args > 0 or num_mem_objects > 0.
2363 if (cb_args > 0u || num_mem_objects > 0u)
2364 {
2365 return CL_INVALID_VALUE;
2366 }
2367 }
2368 else
2369 {
2370 // CL_INVALID_VALUE if args is not NULL and cb_args is 0.
2371 if (cb_args == 0u)
2372 {
2373 return CL_INVALID_VALUE;
2374 }
2375 }
2376
2377 if (num_mem_objects == 0u)
2378 {
2379 // CL_INVALID_VALUE if num_mem_objects = 0 and mem_list or args_mem_loc are not NULL.
2380 if (mem_list != nullptr || args_mem_loc != nullptr)
2381 {
2382 return CL_INVALID_VALUE;
2383 }
2384 }
2385 else
2386 {
2387 // CL_INVALID_VALUE if num_mem_objects > 0 and mem_list or args_mem_loc are NULL.
2388 if (mem_list == nullptr || args_mem_loc == nullptr)
2389 {
2390 return CL_INVALID_VALUE;
2391 }
2392
2393 // CL_INVALID_MEM_OBJECT if one or more memory objects
2394 // specified in mem_list are not valid or are not buffer objects.
2395 while (num_mem_objects-- != 0u)
2396 {
2397 if (!Buffer::IsValid(*mem_list++))
2398 {
2399 return CL_INVALID_MEM_OBJECT;
2400 }
2401 }
2402 }
2403
2404 return CL_SUCCESS;
2405 }
2406
ValidateSetCommandQueueProperty(cl_command_queue command_queue,CommandQueueProperties properties,cl_bool enable,const cl_command_queue_properties * old_properties)2407 cl_int ValidateSetCommandQueueProperty(cl_command_queue command_queue,
2408 CommandQueueProperties properties,
2409 cl_bool enable,
2410 const cl_command_queue_properties *old_properties)
2411 {
2412 // CL_INVALID_COMMAND_QUEUE if command_queue is not a valid command-queue.
2413 if (!CommandQueue::IsValid(command_queue))
2414 {
2415 return CL_INVALID_COMMAND_QUEUE;
2416 }
2417
2418 // CL_INVALID_VALUE if values specified in properties are not valid.
2419 if (properties.hasOtherBitsThan(CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE |
2420 CL_QUEUE_PROFILING_ENABLE))
2421 {
2422 return CL_INVALID_VALUE;
2423 }
2424
2425 return CL_SUCCESS;
2426 }
2427
ValidateCreateImage2D(cl_context context,MemFlags flags,const cl_image_format * image_format,size_t image_width,size_t image_height,size_t image_row_pitch,const void * host_ptr)2428 cl_int ValidateCreateImage2D(cl_context context,
2429 MemFlags flags,
2430 const cl_image_format *image_format,
2431 size_t image_width,
2432 size_t image_height,
2433 size_t image_row_pitch,
2434 const void *host_ptr)
2435 {
2436 const cl_image_desc desc = {CL_MEM_OBJECT_IMAGE2D, image_width, image_height, 0u, 0u,
2437 image_row_pitch, 0u, 0u, 0u, {nullptr}};
2438 return ValidateCreateImage(context, flags, image_format, &desc, host_ptr);
2439 }
2440
ValidateCreateImage3D(cl_context context,MemFlags flags,const cl_image_format * image_format,size_t image_width,size_t image_height,size_t image_depth,size_t image_row_pitch,size_t image_slice_pitch,const void * host_ptr)2441 cl_int ValidateCreateImage3D(cl_context context,
2442 MemFlags flags,
2443 const cl_image_format *image_format,
2444 size_t image_width,
2445 size_t image_height,
2446 size_t image_depth,
2447 size_t image_row_pitch,
2448 size_t image_slice_pitch,
2449 const void *host_ptr)
2450 {
2451 const cl_image_desc desc = {
2452 CL_MEM_OBJECT_IMAGE3D, image_width, image_height, image_depth, 0u,
2453 image_row_pitch, image_slice_pitch, 0u, 0u, {nullptr}};
2454 return ValidateCreateImage(context, flags, image_format, &desc, host_ptr);
2455 }
2456
ValidateEnqueueMarker(cl_command_queue command_queue,const cl_event * event)2457 cl_int ValidateEnqueueMarker(cl_command_queue command_queue, const cl_event *event)
2458 {
2459 // CL_INVALID_COMMAND_QUEUE if command_queue is not a valid host command-queue.
2460 if (!CommandQueue::IsValid(command_queue) || !command_queue->cast<CommandQueue>().isOnHost())
2461 {
2462 return CL_INVALID_COMMAND_QUEUE;
2463 }
2464
2465 // CL_INVALID_VALUE if event is NULL.
2466 if (event == nullptr)
2467 {
2468 return CL_INVALID_VALUE;
2469 }
2470
2471 return CL_SUCCESS;
2472 }
2473
ValidateEnqueueWaitForEvents(cl_command_queue command_queue,cl_uint num_events,const cl_event * event_list)2474 cl_int ValidateEnqueueWaitForEvents(cl_command_queue command_queue,
2475 cl_uint num_events,
2476 const cl_event *event_list)
2477 {
2478 // CL_INVALID_COMMAND_QUEUE if command_queue is not a valid host command-queue.
2479 if (!CommandQueue::IsValid(command_queue))
2480 {
2481 return CL_INVALID_COMMAND_QUEUE;
2482 }
2483 const CommandQueue &queue = command_queue->cast<CommandQueue>();
2484 if (!queue.isOnHost())
2485 {
2486 return CL_INVALID_COMMAND_QUEUE;
2487 }
2488
2489 // CL_INVALID_VALUE if num_events is 0 or event_list is NULL.
2490 if (num_events == 0u || event_list == nullptr)
2491 {
2492 return CL_INVALID_VALUE;
2493 }
2494
2495 while (num_events-- != 0u)
2496 {
2497 // The documentation for invalid events is missing.
2498 if (!Event::IsValid(*event_list))
2499 {
2500 return CL_INVALID_VALUE;
2501 }
2502
2503 // CL_INVALID_CONTEXT if context associated with command_queue
2504 // and events in event_list are not the same.
2505 if (&queue.getContext() != &(*event_list++)->cast<Event>().getContext())
2506 {
2507 return CL_INVALID_CONTEXT;
2508 }
2509 }
2510
2511 return CL_SUCCESS;
2512 }
2513
ValidateEnqueueBarrier(cl_command_queue command_queue)2514 cl_int ValidateEnqueueBarrier(cl_command_queue command_queue)
2515 {
2516 // CL_INVALID_COMMAND_QUEUE if command_queue is not a valid host command-queue.
2517 if (!CommandQueue::IsValid(command_queue) || !command_queue->cast<CommandQueue>().isOnHost())
2518 {
2519 return CL_INVALID_COMMAND_QUEUE;
2520 }
2521 return CL_SUCCESS;
2522 }
2523
ValidateUnloadCompiler()2524 cl_int ValidateUnloadCompiler()
2525 {
2526 return CL_SUCCESS;
2527 }
2528
ValidateGetExtensionFunctionAddress(const char * func_name)2529 cl_int ValidateGetExtensionFunctionAddress(const char *func_name)
2530 {
2531 return func_name != nullptr && *func_name != '\0' ? CL_SUCCESS : CL_INVALID_VALUE;
2532 }
2533
ValidateCreateCommandQueue(cl_context context,cl_device_id device,CommandQueueProperties properties)2534 cl_int ValidateCreateCommandQueue(cl_context context,
2535 cl_device_id device,
2536 CommandQueueProperties properties)
2537 {
2538 // CL_INVALID_CONTEXT if context is not a valid context.
2539 if (!Context::IsValid(context))
2540 {
2541 return CL_INVALID_CONTEXT;
2542 }
2543
2544 // CL_INVALID_DEVICE if device is not a valid device or is not associated with context.
2545 if (!context->cast<Context>().hasDevice(device))
2546 {
2547 return CL_INVALID_DEVICE;
2548 }
2549
2550 // CL_INVALID_VALUE if values specified in properties are not valid.
2551 if (properties.hasOtherBitsThan(CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE |
2552 CL_QUEUE_PROFILING_ENABLE))
2553 {
2554 return CL_INVALID_VALUE;
2555 }
2556
2557 return CL_SUCCESS;
2558 }
2559
ValidateCreateSampler(cl_context context,cl_bool normalized_coords,AddressingMode addressing_mode,FilterMode filter_mode)2560 cl_int ValidateCreateSampler(cl_context context,
2561 cl_bool normalized_coords,
2562 AddressingMode addressing_mode,
2563 FilterMode filter_mode)
2564 {
2565 // CL_INVALID_CONTEXT if context is not a valid context.
2566 if (!Context::IsValid(context))
2567 {
2568 return CL_INVALID_CONTEXT;
2569 }
2570
2571 // CL_INVALID_VALUE if addressing_mode, filter_mode, normalized_coords
2572 // or a combination of these arguements are not valid.
2573 if ((normalized_coords != CL_FALSE && normalized_coords != CL_TRUE) ||
2574 addressing_mode == AddressingMode::InvalidEnum || filter_mode == FilterMode::InvalidEnum)
2575 {
2576 return CL_INVALID_VALUE;
2577 }
2578
2579 // CL_INVALID_OPERATION if images are not supported by any device associated with context.
2580 if (!context->cast<Context>().supportsImages())
2581 {
2582 return CL_INVALID_OPERATION;
2583 }
2584
2585 return CL_SUCCESS;
2586 }
2587
ValidateEnqueueTask(cl_command_queue command_queue,cl_kernel kernel,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)2588 cl_int ValidateEnqueueTask(cl_command_queue command_queue,
2589 cl_kernel kernel,
2590 cl_uint num_events_in_wait_list,
2591 const cl_event *event_wait_list,
2592 const cl_event *event)
2593 {
2594 ANGLE_VALIDATE(ValidateCommandQueueAndEventWaitList(command_queue, false,
2595 num_events_in_wait_list, event_wait_list));
2596
2597 // CL_INVALID_KERNEL if kernel is not a valid kernel object.
2598 if (!Kernel::IsValid(kernel))
2599 {
2600 return CL_INVALID_KERNEL;
2601 }
2602
2603 // CL_INVALID_CONTEXT if context associated with command_queue and kernel are not the same.
2604 if (&command_queue->cast<CommandQueue>().getContext() !=
2605 &kernel->cast<Kernel>().getProgram().getContext())
2606 {
2607 return CL_INVALID_CONTEXT;
2608 }
2609
2610 return CL_SUCCESS;
2611 }
2612
2613 // CL 1.1
ValidateCreateSubBuffer(cl_mem buffer,MemFlags flags,cl_buffer_create_type buffer_create_type,const void * buffer_create_info)2614 cl_int ValidateCreateSubBuffer(cl_mem buffer,
2615 MemFlags flags,
2616 cl_buffer_create_type buffer_create_type,
2617 const void *buffer_create_info)
2618 {
2619 // CL_INVALID_MEM_OBJECT if buffer is not a valid buffer object or is a sub-buffer object.
2620 if (!Buffer::IsValid(buffer))
2621 {
2622 return CL_INVALID_MEM_OBJECT;
2623 }
2624 const Buffer &buf = buffer->cast<Buffer>();
2625 if (buf.isSubBuffer() || !buf.getContext().getPlatform().isVersionOrNewer(1u, 1u))
2626 {
2627 return CL_INVALID_MEM_OBJECT;
2628 }
2629
2630 if (!ValidateMemoryFlags(flags, buf.getContext().getPlatform()))
2631 {
2632 return CL_INVALID_VALUE;
2633 }
2634
2635 const MemFlags bufFlags = buf.getFlags();
2636 // CL_INVALID_VALUE if buffer was created with CL_MEM_WRITE_ONLY
2637 // and flags specifies CL_MEM_READ_WRITE or CL_MEM_READ_ONLY,
2638 if ((bufFlags.intersects(CL_MEM_WRITE_ONLY) &&
2639 flags.intersects(CL_MEM_READ_WRITE | CL_MEM_READ_ONLY)) ||
2640 // or if buffer was created with CL_MEM_READ_ONLY
2641 // and flags specifies CL_MEM_READ_WRITE or CL_MEM_WRITE_ONLY,
2642 (bufFlags.intersects(CL_MEM_READ_ONLY) &&
2643 flags.intersects(CL_MEM_READ_WRITE | CL_MEM_WRITE_ONLY)) ||
2644 // or if flags specifies CL_MEM_USE_HOST_PTR, CL_MEM_ALLOC_HOST_PTR or CL_MEM_COPY_HOST_PTR.
2645 flags.intersects(CL_MEM_USE_HOST_PTR | CL_MEM_ALLOC_HOST_PTR | CL_MEM_COPY_HOST_PTR))
2646 {
2647 return CL_INVALID_VALUE;
2648 }
2649
2650 // CL_INVALID_VALUE if buffer was created with CL_MEM_HOST_WRITE_ONLY
2651 // and flags specify CL_MEM_HOST_READ_ONLY,
2652 if ((bufFlags.intersects(CL_MEM_HOST_WRITE_ONLY) && flags.intersects(CL_MEM_HOST_READ_ONLY)) ||
2653 // or if buffer was created with CL_MEM_HOST_READ_ONLY
2654 // and flags specify CL_MEM_HOST_WRITE_ONLY,
2655 (bufFlags.intersects(CL_MEM_HOST_READ_ONLY) && flags.intersects(CL_MEM_HOST_WRITE_ONLY)) ||
2656 // or if buffer was created with CL_MEM_HOST_NO_ACCESS
2657 // and flags specify CL_MEM_HOST_READ_ONLY or CL_MEM_HOST_WRITE_ONLY.
2658 (bufFlags.intersects(CL_MEM_HOST_NO_ACCESS) &&
2659 flags.intersects(CL_MEM_HOST_READ_ONLY | CL_MEM_HOST_WRITE_ONLY)))
2660 {
2661 return CL_INVALID_VALUE;
2662 }
2663
2664 // CL_INVALID_VALUE if the value specified in buffer_create_type is not valid.
2665 if (buffer_create_type != CL_BUFFER_CREATE_TYPE_REGION)
2666 {
2667 return CL_INVALID_VALUE;
2668 }
2669
2670 // CL_INVALID_VALUE if value(s) specified in buffer_create_info
2671 // (for a given buffer_create_type) is not valid or if buffer_create_info is NULL.
2672 // CL_INVALID_VALUE if the region specified by the cl_buffer_region structure
2673 // passed in buffer_create_info is out of bounds in buffer.
2674 const cl_buffer_region *region = static_cast<const cl_buffer_region *>(buffer_create_info);
2675 if (region == nullptr || !buf.isRegionValid(*region))
2676 {
2677 return CL_INVALID_VALUE;
2678 }
2679
2680 // CL_INVALID_BUFFER_SIZE if the size field of the cl_buffer_region structure
2681 // passed in buffer_create_info is 0.
2682 if (region->size == 0u)
2683 {
2684 return CL_INVALID_BUFFER_SIZE;
2685 }
2686
2687 // CL_MISALIGNED_SUB_BUFFER_OFFSET when the sub-buffer object is created with an offset that is
2688 // not aligned to CL_DEVICE_MEM_BASE_ADDR_ALIGN value (which is in bits!) for devices associated
2689 // with the context.
2690 const Memory &memory = buffer->cast<Memory>();
2691 for (const DevicePtr &device : memory.getContext().getDevices())
2692 {
2693 if (region->origin % (device->getInfo().memBaseAddrAlign / CHAR_BIT) != 0)
2694 {
2695 return CL_MISALIGNED_SUB_BUFFER_OFFSET;
2696 }
2697 }
2698
2699 return CL_SUCCESS;
2700 }
2701
ValidateSetMemObjectDestructorCallback(cl_mem memobj,void (CL_CALLBACK * pfn_notify)(cl_mem memobj,void * user_data),const void * user_data)2702 cl_int ValidateSetMemObjectDestructorCallback(cl_mem memobj,
2703 void(CL_CALLBACK *pfn_notify)(cl_mem memobj,
2704 void *user_data),
2705 const void *user_data)
2706 {
2707 // CL_INVALID_MEM_OBJECT if memobj is not a valid memory object.
2708 if (!Memory::IsValid(memobj))
2709 {
2710 return CL_INVALID_MEM_OBJECT;
2711 }
2712
2713 // CL_INVALID_VALUE if pfn_notify is NULL.
2714 if (pfn_notify == nullptr)
2715 {
2716 return CL_INVALID_VALUE;
2717 }
2718
2719 return CL_SUCCESS;
2720 }
2721
ValidateCreateUserEvent(cl_context context)2722 cl_int ValidateCreateUserEvent(cl_context context)
2723 {
2724 // CL_INVALID_CONTEXT if context is not a valid context.
2725 return Context::IsValidAndVersionOrNewer(context, 1u, 1u) ? CL_SUCCESS : CL_INVALID_CONTEXT;
2726 }
2727
ValidateSetUserEventStatus(cl_event event,cl_int execution_status)2728 cl_int ValidateSetUserEventStatus(cl_event event, cl_int execution_status)
2729 {
2730 // CL_INVALID_EVENT if event is not a valid user event object.
2731 if (!Event::IsValid(event))
2732 {
2733 return CL_INVALID_EVENT;
2734 }
2735 const Event &evt = event->cast<Event>();
2736 if (!evt.getContext().getPlatform().isVersionOrNewer(1u, 1u) ||
2737 evt.getCommandType() != CL_COMMAND_USER)
2738 {
2739 return CL_INVALID_EVENT;
2740 }
2741
2742 // CL_INVALID_VALUE if the execution_status is not CL_COMPLETE or a negative integer value.
2743 if (execution_status != CL_COMPLETE && execution_status >= 0)
2744 {
2745 return CL_INVALID_VALUE;
2746 }
2747
2748 // CL_INVALID_OPERATION if the execution_status for event has already been changed
2749 // by a previous call to clSetUserEventStatus.
2750 if (evt.wasStatusChanged())
2751 {
2752 return CL_INVALID_OPERATION;
2753 }
2754
2755 return CL_SUCCESS;
2756 }
2757
ValidateSetEventCallback(cl_event event,cl_int command_exec_callback_type,void (CL_CALLBACK * pfn_notify)(cl_event event,cl_int event_command_status,void * user_data),const void * user_data)2758 cl_int ValidateSetEventCallback(cl_event event,
2759 cl_int command_exec_callback_type,
2760 void(CL_CALLBACK *pfn_notify)(cl_event event,
2761 cl_int event_command_status,
2762 void *user_data),
2763 const void *user_data)
2764 {
2765 // CL_INVALID_EVENT if event is not a valid event object.
2766 if (!Event::IsValid(event) ||
2767 !event->cast<Event>().getContext().getPlatform().isVersionOrNewer(1u, 1u))
2768 {
2769 return CL_INVALID_EVENT;
2770 }
2771
2772 // CL_INVALID_VALUE if pfn_event_notify is NULL
2773 // or if command_exec_callback_type is not CL_SUBMITTED, CL_RUNNING, or CL_COMPLETE.
2774 if (pfn_notify == nullptr ||
2775 (command_exec_callback_type != CL_SUBMITTED && command_exec_callback_type != CL_RUNNING &&
2776 command_exec_callback_type != CL_COMPLETE))
2777 {
2778 return CL_INVALID_VALUE;
2779 }
2780
2781 return CL_SUCCESS;
2782 }
2783
ValidateEnqueueReadBufferRect(cl_command_queue command_queue,cl_mem buffer,cl_bool blocking_read,const size_t * buffer_origin,const size_t * host_origin,const size_t * region,size_t buffer_row_pitch,size_t buffer_slice_pitch,size_t host_row_pitch,size_t host_slice_pitch,const void * ptr,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)2784 cl_int ValidateEnqueueReadBufferRect(cl_command_queue command_queue,
2785 cl_mem buffer,
2786 cl_bool blocking_read,
2787 const size_t *buffer_origin,
2788 const size_t *host_origin,
2789 const size_t *region,
2790 size_t buffer_row_pitch,
2791 size_t buffer_slice_pitch,
2792 size_t host_row_pitch,
2793 size_t host_slice_pitch,
2794 const void *ptr,
2795 cl_uint num_events_in_wait_list,
2796 const cl_event *event_wait_list,
2797 const cl_event *event)
2798 {
2799 ANGLE_VALIDATE(ValidateCommandQueueAndEventWaitList(command_queue, false,
2800 num_events_in_wait_list, event_wait_list));
2801 const CommandQueue &queue = command_queue->cast<CommandQueue>();
2802 if (!queue.getContext().getPlatform().isVersionOrNewer(1u, 1u))
2803 {
2804 return CL_INVALID_COMMAND_QUEUE;
2805 }
2806
2807 ANGLE_VALIDATE(ValidateEnqueueBuffer(queue, buffer, true, false));
2808 ANGLE_VALIDATE(ValidateBufferRect(buffer->cast<Buffer>(), buffer_origin, region,
2809 buffer_row_pitch, buffer_slice_pitch));
2810 ANGLE_VALIDATE(ValidateHostRect(host_origin, region, host_row_pitch, host_slice_pitch, ptr));
2811
2812 return CL_SUCCESS;
2813 }
2814
ValidateEnqueueWriteBufferRect(cl_command_queue command_queue,cl_mem buffer,cl_bool blocking_write,const size_t * buffer_origin,const size_t * host_origin,const size_t * region,size_t buffer_row_pitch,size_t buffer_slice_pitch,size_t host_row_pitch,size_t host_slice_pitch,const void * ptr,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)2815 cl_int ValidateEnqueueWriteBufferRect(cl_command_queue command_queue,
2816 cl_mem buffer,
2817 cl_bool blocking_write,
2818 const size_t *buffer_origin,
2819 const size_t *host_origin,
2820 const size_t *region,
2821 size_t buffer_row_pitch,
2822 size_t buffer_slice_pitch,
2823 size_t host_row_pitch,
2824 size_t host_slice_pitch,
2825 const void *ptr,
2826 cl_uint num_events_in_wait_list,
2827 const cl_event *event_wait_list,
2828 const cl_event *event)
2829 {
2830 ANGLE_VALIDATE(ValidateCommandQueueAndEventWaitList(command_queue, false,
2831 num_events_in_wait_list, event_wait_list));
2832 const CommandQueue &queue = command_queue->cast<CommandQueue>();
2833 if (!queue.getContext().getPlatform().isVersionOrNewer(1u, 1u))
2834 {
2835 return CL_INVALID_COMMAND_QUEUE;
2836 }
2837
2838 ANGLE_VALIDATE(ValidateEnqueueBuffer(queue, buffer, false, true));
2839 ANGLE_VALIDATE(ValidateBufferRect(buffer->cast<Buffer>(), buffer_origin, region,
2840 buffer_row_pitch, buffer_slice_pitch));
2841 ANGLE_VALIDATE(ValidateHostRect(host_origin, region, host_row_pitch, host_slice_pitch, ptr));
2842
2843 return CL_SUCCESS;
2844 }
2845
ValidateEnqueueCopyBufferRect(cl_command_queue command_queue,cl_mem src_buffer,cl_mem dst_buffer,const size_t * src_origin,const size_t * dst_origin,const size_t * region,size_t src_row_pitch,size_t src_slice_pitch,size_t dst_row_pitch,size_t dst_slice_pitch,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)2846 cl_int ValidateEnqueueCopyBufferRect(cl_command_queue command_queue,
2847 cl_mem src_buffer,
2848 cl_mem dst_buffer,
2849 const size_t *src_origin,
2850 const size_t *dst_origin,
2851 const size_t *region,
2852 size_t src_row_pitch,
2853 size_t src_slice_pitch,
2854 size_t dst_row_pitch,
2855 size_t dst_slice_pitch,
2856 cl_uint num_events_in_wait_list,
2857 const cl_event *event_wait_list,
2858 const cl_event *event)
2859 {
2860 ANGLE_VALIDATE(ValidateCommandQueueAndEventWaitList(command_queue, false,
2861 num_events_in_wait_list, event_wait_list));
2862 const CommandQueue &queue = command_queue->cast<CommandQueue>();
2863 if (!queue.getContext().getPlatform().isVersionOrNewer(1u, 1u))
2864 {
2865 return CL_INVALID_COMMAND_QUEUE;
2866 }
2867
2868 ANGLE_VALIDATE(ValidateEnqueueBuffer(queue, src_buffer, false, false));
2869 const Buffer &src = src_buffer->cast<Buffer>();
2870
2871 ANGLE_VALIDATE(ValidateEnqueueBuffer(queue, dst_buffer, false, false));
2872 const Buffer &dst = dst_buffer->cast<Buffer>();
2873
2874 ANGLE_VALIDATE(ValidateBufferRect(src, src_origin, region, src_row_pitch, src_slice_pitch));
2875 ANGLE_VALIDATE(ValidateBufferRect(dst, dst_origin, region, dst_row_pitch, dst_slice_pitch));
2876
2877 // CL_INVALID_VALUE if src_buffer and dst_buffer are the same buffer object and src_slice_pitch
2878 // is not equal to dst_slice_pitch or src_row_pitch is not equal to dst_row_pitch.
2879 if (&src == &dst && (src_slice_pitch != dst_slice_pitch || src_row_pitch != dst_row_pitch))
2880 {
2881 return CL_INVALID_VALUE;
2882 }
2883
2884 return CL_SUCCESS;
2885 }
2886
2887 // CL 1.2
ValidateCreateSubDevices(cl_device_id in_device,const cl_device_partition_property * properties,cl_uint num_devices,const cl_device_id * out_devices,const cl_uint * num_devices_ret)2888 cl_int ValidateCreateSubDevices(cl_device_id in_device,
2889 const cl_device_partition_property *properties,
2890 cl_uint num_devices,
2891 const cl_device_id *out_devices,
2892 const cl_uint *num_devices_ret)
2893 {
2894 // CL_INVALID_DEVICE if in_device is not a valid device.
2895 if (!Device::IsValid(in_device))
2896 {
2897 return CL_INVALID_DEVICE;
2898 }
2899 const Device &device = in_device->cast<Device>();
2900 if (!device.isVersionOrNewer(1u, 2u))
2901 {
2902 return CL_INVALID_DEVICE;
2903 }
2904
2905 // CL_INVALID_VALUE if values specified in properties are not valid
2906 // or if values specified in properties are valid but not supported by the device
2907 const std::vector<cl_device_partition_property> &devProps =
2908 device.getInfo().partitionProperties;
2909 if (properties == nullptr ||
2910 std::find(devProps.cbegin(), devProps.cend(), *properties) == devProps.cend())
2911 {
2912 return CL_INVALID_VALUE;
2913 }
2914
2915 return CL_SUCCESS;
2916 }
2917
ValidateRetainDevice(cl_device_id device)2918 cl_int ValidateRetainDevice(cl_device_id device)
2919 {
2920 // CL_INVALID_DEVICE if device is not a valid device.
2921 if (!Device::IsValid(device) || !device->cast<Device>().isVersionOrNewer(1u, 2u))
2922 {
2923 return CL_INVALID_DEVICE;
2924 }
2925 return CL_SUCCESS;
2926 }
2927
ValidateReleaseDevice(cl_device_id device)2928 cl_int ValidateReleaseDevice(cl_device_id device)
2929 {
2930 // CL_INVALID_DEVICE if device is not a valid device.
2931 if (!Device::IsValid(device) || !device->cast<Device>().isVersionOrNewer(1u, 2u))
2932 {
2933 return CL_INVALID_DEVICE;
2934 }
2935 return CL_SUCCESS;
2936 }
2937
ValidateCreateImage(cl_context context,MemFlags flags,const cl_image_format * image_format,const cl_image_desc * image_desc,const void * host_ptr)2938 cl_int ValidateCreateImage(cl_context context,
2939 MemFlags flags,
2940 const cl_image_format *image_format,
2941 const cl_image_desc *image_desc,
2942 const void *host_ptr)
2943 {
2944 // CL_INVALID_CONTEXT if context is not a valid context.
2945 if (!Context::IsValidAndVersionOrNewer(context, 1u, 2u))
2946 {
2947 return CL_INVALID_CONTEXT;
2948 }
2949 const Context &ctx = context->cast<Context>();
2950
2951 // CL_INVALID_VALUE if values specified in flags are not valid.
2952 if (!ValidateMemoryFlags(flags, ctx.getPlatform()))
2953 {
2954 return CL_INVALID_VALUE;
2955 }
2956
2957 // CL_INVALID_IMAGE_FORMAT_DESCRIPTOR if values specified in image_format are not valid
2958 // or if image_format is NULL.
2959 if (!IsValidImageFormat(image_format, ctx.getPlatform().getInfo()))
2960 {
2961 return CL_INVALID_IMAGE_FORMAT_DESCRIPTOR;
2962 }
2963
2964 // CL_INVALID_IMAGE_DESCRIPTOR if image_desc is NULL.
2965 if (image_desc == nullptr)
2966 {
2967 return CL_INVALID_IMAGE_DESCRIPTOR;
2968 }
2969
2970 const size_t elemSize = GetElementSize(*image_format);
2971 if (elemSize == 0u)
2972 {
2973 ASSERT(false);
2974 ERR() << "Failed to calculate image element size";
2975 return CL_INVALID_IMAGE_FORMAT_DESCRIPTOR;
2976 }
2977 const size_t rowPitch = image_desc->image_row_pitch != 0u ? image_desc->image_row_pitch
2978 : image_desc->image_width * elemSize;
2979 const size_t imageHeight =
2980 image_desc->image_type == CL_MEM_OBJECT_IMAGE1D_ARRAY ? 1u : image_desc->image_height;
2981 const size_t sliceSize = imageHeight * rowPitch;
2982
2983 // CL_INVALID_IMAGE_DESCRIPTOR if values specified in image_desc are not valid.
2984 switch (FromCLenum<MemObjectType>(image_desc->image_type))
2985 {
2986 case MemObjectType::Image1D:
2987 if (image_desc->image_width == 0u)
2988 {
2989 return CL_INVALID_IMAGE_DESCRIPTOR;
2990 }
2991 break;
2992 case MemObjectType::Image2D:
2993 if (image_desc->image_width == 0u || image_desc->image_height == 0u)
2994 {
2995 return CL_INVALID_IMAGE_DESCRIPTOR;
2996 }
2997 break;
2998 case MemObjectType::Image3D:
2999 if (image_desc->image_width == 0u || image_desc->image_height == 0u ||
3000 image_desc->image_depth == 0u)
3001 {
3002 return CL_INVALID_IMAGE_DESCRIPTOR;
3003 }
3004 break;
3005 case MemObjectType::Image1D_Array:
3006 if (image_desc->image_width == 0u || image_desc->image_array_size == 0u)
3007 {
3008 return CL_INVALID_IMAGE_DESCRIPTOR;
3009 }
3010 break;
3011 case MemObjectType::Image2D_Array:
3012 if (image_desc->image_width == 0u || image_desc->image_height == 0u ||
3013 image_desc->image_array_size == 0u)
3014 {
3015 return CL_INVALID_IMAGE_DESCRIPTOR;
3016 }
3017 break;
3018 case MemObjectType::Image1D_Buffer:
3019 if (image_desc->image_width == 0u)
3020 {
3021 return CL_INVALID_IMAGE_DESCRIPTOR;
3022 }
3023 break;
3024 default:
3025 return CL_INVALID_IMAGE_DESCRIPTOR;
3026 }
3027 if (image_desc->image_row_pitch != 0u)
3028 {
3029 // image_row_pitch must be 0 if host_ptr is NULL.
3030 if (host_ptr == nullptr)
3031 {
3032 return CL_INVALID_IMAGE_DESCRIPTOR;
3033 }
3034 // image_row_pitch can be either 0
3035 // or >= image_width * size of element in bytes if host_ptr is not NULL.
3036 if (image_desc->image_row_pitch < image_desc->image_width * elemSize)
3037 {
3038 return CL_INVALID_IMAGE_DESCRIPTOR;
3039 }
3040 // If image_row_pitch is not 0, it must be a multiple of the image element size in bytes.
3041 if ((image_desc->image_row_pitch % elemSize) != 0u)
3042 {
3043 return CL_INVALID_IMAGE_DESCRIPTOR;
3044 }
3045 }
3046 if (image_desc->image_slice_pitch != 0u)
3047 {
3048 // image_slice_pitch must be 0 if host_ptr is NULL.
3049 if (host_ptr == nullptr)
3050 {
3051 return CL_INVALID_IMAGE_DESCRIPTOR;
3052 }
3053 // If host_ptr is not NULL, image_slice_pitch can be either 0
3054 // or >= image_row_pitch * image_height for a 2D image array or 3D image
3055 // and can be either 0 or >= image_row_pitch for a 1D image array.
3056 if (image_desc->image_slice_pitch < sliceSize)
3057 {
3058 return CL_INVALID_IMAGE_DESCRIPTOR;
3059 }
3060 // If image_slice_pitch is not 0, it must be a multiple of the image_row_pitch.
3061 if ((image_desc->image_slice_pitch % rowPitch) != 0u)
3062 {
3063 return CL_INVALID_IMAGE_DESCRIPTOR;
3064 }
3065 }
3066 // num_mip_levels and num_samples must be 0.
3067 if (image_desc->num_mip_levels != 0u || image_desc->num_samples != 0u)
3068 {
3069 return CL_INVALID_IMAGE_DESCRIPTOR;
3070 }
3071 // buffer can be a buffer memory object if image_type is CL_MEM_OBJECT_IMAGE1D_BUFFER or
3072 // CL_MEM_OBJECT_IMAGE2D. buffer can be an image object if image_type is CL_MEM_OBJECT_IMAGE2D.
3073 // Otherwise it must be NULL.
3074 if (image_desc->buffer != nullptr &&
3075 (!Buffer::IsValid(image_desc->buffer) ||
3076 (image_desc->image_type != CL_MEM_OBJECT_IMAGE1D_BUFFER &&
3077 image_desc->image_type != CL_MEM_OBJECT_IMAGE2D)) &&
3078 (!Image::IsValid(image_desc->buffer) || image_desc->image_type != CL_MEM_OBJECT_IMAGE2D))
3079 {
3080 return CL_INVALID_IMAGE_DESCRIPTOR;
3081 }
3082
3083 // CL_INVALID_OPERATION if there are no devices in context that support images.
3084 if (!ctx.supportsImages())
3085 {
3086 return CL_INVALID_OPERATION;
3087 }
3088
3089 // CL_INVALID_IMAGE_SIZE if image dimensions specified in image_desc exceed the maximum
3090 // image dimensions described in the Device Queries table for all devices in context.
3091 const DevicePtrs &devices = ctx.getDevices();
3092 if (std::find_if(devices.cbegin(), devices.cend(), [&](const DevicePtr &ptr) {
3093 return ptr->supportsNativeImageDimensions(*image_desc);
3094 }) == devices.cend())
3095 {
3096 return CL_INVALID_IMAGE_SIZE;
3097 }
3098
3099 // CL_INVALID_HOST_PTR
3100 // if host_ptr is NULL and CL_MEM_USE_HOST_PTR or CL_MEM_COPY_HOST_PTR are set in flags or
3101 // if host_ptr is not NULL but CL_MEM_COPY_HOST_PTR or CL_MEM_USE_HOST_PTR are not set in flags.
3102 if ((host_ptr != nullptr) != flags.intersects(CL_MEM_USE_HOST_PTR | CL_MEM_COPY_HOST_PTR))
3103 {
3104 return CL_INVALID_HOST_PTR;
3105 }
3106
3107 return CL_SUCCESS;
3108 }
3109
ValidateCreateProgramWithBuiltInKernels(cl_context context,cl_uint num_devices,const cl_device_id * device_list,const char * kernel_names)3110 cl_int ValidateCreateProgramWithBuiltInKernels(cl_context context,
3111 cl_uint num_devices,
3112 const cl_device_id *device_list,
3113 const char *kernel_names)
3114 {
3115 // CL_INVALID_CONTEXT if context is not a valid context.
3116 if (!Context::IsValidAndVersionOrNewer(context, 1u, 2u))
3117 {
3118 return CL_INVALID_CONTEXT;
3119 }
3120 const Context &ctx = context->cast<Context>();
3121
3122 // CL_INVALID_VALUE if device_list is NULL or num_devices is zero or if kernel_names is NULL.
3123 if (device_list == nullptr || num_devices == 0u || kernel_names == nullptr)
3124 {
3125 return CL_INVALID_VALUE;
3126 }
3127
3128 // CL_INVALID_DEVICE if any device in device_list
3129 // is not in the list of devices associated with context.
3130 for (size_t index = 0u; index < num_devices; ++index)
3131 {
3132 if (!ctx.hasDevice(device_list[index]))
3133 {
3134 return CL_INVALID_DEVICE;
3135 }
3136 }
3137
3138 // CL_INVALID_VALUE if kernel_names contains a kernel name
3139 // that is not supported by any of the devices in device_list.
3140 const char *start = kernel_names;
3141 do
3142 {
3143 const char *end = start;
3144 while (*end != '\0' && *end != ';')
3145 {
3146 ++end;
3147 }
3148 const size_t length = end - start;
3149 if (length != 0u && !ctx.supportsBuiltInKernel(std::string(start, length)))
3150 {
3151 return CL_INVALID_VALUE;
3152 }
3153 start = end;
3154 } while (*start++ != '\0');
3155
3156 return CL_SUCCESS;
3157 }
3158
ValidateCompileProgram(cl_program program,cl_uint num_devices,const cl_device_id * device_list,const char * options,cl_uint num_input_headers,const cl_program * input_headers,const char ** header_include_names,void (CL_CALLBACK * pfn_notify)(cl_program program,void * user_data),const void * user_data)3159 cl_int ValidateCompileProgram(cl_program program,
3160 cl_uint num_devices,
3161 const cl_device_id *device_list,
3162 const char *options,
3163 cl_uint num_input_headers,
3164 const cl_program *input_headers,
3165 const char **header_include_names,
3166 void(CL_CALLBACK *pfn_notify)(cl_program program, void *user_data),
3167 const void *user_data)
3168 {
3169 // CL_INVALID_PROGRAM if program is not a valid program object.
3170 if (!Program::IsValid(program))
3171 {
3172 return CL_INVALID_PROGRAM;
3173 }
3174 const Program &prog = program->cast<Program>();
3175 if (!prog.getContext().getPlatform().isVersionOrNewer(1u, 2u))
3176 {
3177 return CL_INVALID_PROGRAM;
3178 }
3179
3180 // CL_INVALID_OPERATION if we do not have source code.
3181 if (prog.getSource().empty())
3182 {
3183 ERR() << "No OpenCL C source available from program object (" << &prog << ")!";
3184 return CL_INVALID_OPERATION;
3185 }
3186
3187 // CL_INVALID_VALUE if device_list is NULL and num_devices is greater than zero,
3188 // or if device_list is not NULL and num_devices is zero.
3189 if ((device_list != nullptr) != (num_devices != 0u))
3190 {
3191 return CL_INVALID_VALUE;
3192 }
3193
3194 // CL_INVALID_DEVICE if any device in device_list
3195 // is not in the list of devices associated with program.
3196 while (num_devices-- != 0u)
3197 {
3198 if (!prog.hasDevice(*device_list++))
3199 {
3200 return CL_INVALID_DEVICE;
3201 }
3202 }
3203
3204 // CL_INVALID_VALUE if num_input_headers is zero and header_include_names
3205 // or input_headers are not NULL
3206 // or if num_input_headers is not zero and header_include_names or input_headers are NULL.
3207 if ((num_input_headers != 0u) != (header_include_names != nullptr) ||
3208 (num_input_headers != 0u) != (input_headers != nullptr))
3209 {
3210 return CL_INVALID_VALUE;
3211 }
3212
3213 // CL_INVALID_VALUE if pfn_notify is NULL but user_data is not NULL.
3214 if (pfn_notify == nullptr && user_data != nullptr)
3215 {
3216 return CL_INVALID_VALUE;
3217 }
3218
3219 // CL_INVALID_OPERATION if the build of a program executable for any of the devices listed
3220 // in device_list by a previous call to clBuildProgram for program has not completed.
3221 if (prog.isBuilding())
3222 {
3223 return CL_INVALID_OPERATION;
3224 }
3225
3226 // CL_INVALID_OPERATION if there are kernel objects attached to program.
3227 if (prog.hasAttachedKernels())
3228 {
3229 return CL_INVALID_OPERATION;
3230 }
3231
3232 return CL_SUCCESS;
3233 }
3234
ValidateLinkProgram(cl_context context,cl_uint num_devices,const cl_device_id * device_list,const char * options,cl_uint num_input_programs,const cl_program * input_programs,void (CL_CALLBACK * pfn_notify)(cl_program program,void * user_data),const void * user_data)3235 cl_int ValidateLinkProgram(cl_context context,
3236 cl_uint num_devices,
3237 const cl_device_id *device_list,
3238 const char *options,
3239 cl_uint num_input_programs,
3240 const cl_program *input_programs,
3241 void(CL_CALLBACK *pfn_notify)(cl_program program, void *user_data),
3242 const void *user_data)
3243 {
3244 // CL_INVALID_CONTEXT if context is not a valid context.
3245 if (!Context::IsValidAndVersionOrNewer(context, 1u, 2u))
3246 {
3247 return CL_INVALID_CONTEXT;
3248 }
3249 const Context &ctx = context->cast<Context>();
3250
3251 // CL_INVALID_OPERATION if the compilation or build of a program executable for any of the
3252 // devices listed in device_list by a previous call to clCompileProgram or clBuildProgram for
3253 // program has not completed.
3254 for (size_t i = 0; i < num_devices; ++i)
3255 {
3256 Device &device = device_list[i]->cast<Device>();
3257 for (size_t j = 0; j < num_input_programs; ++j)
3258 {
3259 cl_build_status buildStatus = CL_BUILD_NONE;
3260 Program &program = input_programs[j]->cast<Program>();
3261 if (IsError(program.getBuildInfo(device.getNative(), ProgramBuildInfo::Status,
3262 sizeof(cl_build_status), &buildStatus, nullptr)))
3263 {
3264 return CL_INVALID_PROGRAM;
3265 }
3266
3267 if (buildStatus != CL_BUILD_SUCCESS)
3268 {
3269 return CL_INVALID_OPERATION;
3270 }
3271 }
3272 }
3273
3274 // CL_INVALID_OPERATION if the rules for devices containing compiled binaries or libraries as
3275 // described in input_programs argument below are not followed.
3276 //
3277 // All programs specified by input_programs contain a compiled binary or library for the device.
3278 // In this case, a link is performed to generate a program executable for this device.
3279 //
3280 // None of the programs contain a compiled binary or library for that device. In this case, no
3281 // link is performed and there will be no program executable generated for this device.
3282 //
3283 // All other cases will return a CL_INVALID_OPERATION error.
3284 BitField libraryOrObject(CL_PROGRAM_BINARY_TYPE_LIBRARY |
3285 CL_PROGRAM_BINARY_TYPE_COMPILED_OBJECT);
3286 for (size_t i = 0; i < num_devices; ++i)
3287 {
3288 bool foundAnyLibraryOrObject = false;
3289 Device &device = device_list[i]->cast<Device>();
3290 for (size_t j = 0; j < num_input_programs; ++j)
3291 {
3292 cl_program_binary_type binaryType = CL_PROGRAM_BINARY_TYPE_NONE;
3293 Program &program = input_programs[j]->cast<Program>();
3294 if (IsError(program.getBuildInfo(device.getNative(), ProgramBuildInfo::BinaryType,
3295 sizeof(cl_program_binary_type), &binaryType, nullptr)))
3296 {
3297 return CL_INVALID_PROGRAM;
3298 }
3299
3300 if (libraryOrObject.excludes(binaryType))
3301 {
3302 if (foundAnyLibraryOrObject)
3303 {
3304 return CL_INVALID_OPERATION;
3305 }
3306 }
3307 else
3308 {
3309 foundAnyLibraryOrObject = true;
3310 }
3311 }
3312 }
3313
3314 // CL_INVALID_VALUE if device_list is NULL and num_devices is greater than zero,
3315 // or if device_list is not NULL and num_devices is zero.
3316 if ((device_list != nullptr) != (num_devices != 0u))
3317 {
3318 return CL_INVALID_VALUE;
3319 }
3320
3321 // CL_INVALID_DEVICE if any device in device_list
3322 // is not in the list of devices associated with context.
3323 while (num_devices-- != 0u)
3324 {
3325 if (!ctx.hasDevice(*device_list++))
3326 {
3327 return CL_INVALID_DEVICE;
3328 }
3329 }
3330
3331 // CL_INVALID_VALUE if num_input_programs is zero or input_programs is NULL.
3332 if (num_input_programs == 0u || input_programs == nullptr)
3333 {
3334 return CL_INVALID_VALUE;
3335 }
3336
3337 // CL_INVALID_PROGRAM if programs specified in input_programs are not valid program objects.
3338 while (num_input_programs-- != 0u)
3339 {
3340 if (!Program::IsValid(*input_programs++))
3341 {
3342 return CL_INVALID_PROGRAM;
3343 }
3344 }
3345
3346 // CL_INVALID_VALUE if pfn_notify is NULL but user_data is not NULL.
3347 if (pfn_notify == nullptr && user_data != nullptr)
3348 {
3349 return CL_INVALID_VALUE;
3350 }
3351
3352 return CL_SUCCESS;
3353 }
3354
ValidateUnloadPlatformCompiler(cl_platform_id platform)3355 cl_int ValidateUnloadPlatformCompiler(cl_platform_id platform)
3356 {
3357 // CL_INVALID_PLATFORM if platform is not a valid platform.
3358 if (!Platform::IsValid(platform) || !platform->cast<Platform>().isVersionOrNewer(1u, 2u))
3359 {
3360 return CL_INVALID_PLATFORM;
3361 }
3362 return CL_SUCCESS;
3363 }
3364
ValidateGetKernelArgInfo(cl_kernel kernel,cl_uint arg_index,KernelArgInfo param_name,size_t param_value_size,const void * param_value,const size_t * param_value_size_ret)3365 cl_int ValidateGetKernelArgInfo(cl_kernel kernel,
3366 cl_uint arg_index,
3367 KernelArgInfo param_name,
3368 size_t param_value_size,
3369 const void *param_value,
3370 const size_t *param_value_size_ret)
3371 {
3372 // CL_INVALID_KERNEL if kernel is a not a valid kernel object.
3373 if (!Kernel::IsValid(kernel))
3374 {
3375 return CL_INVALID_KERNEL;
3376 }
3377 const Kernel &krnl = kernel->cast<Kernel>();
3378 if (!krnl.getProgram().getContext().getPlatform().isVersionOrNewer(1u, 2u))
3379 {
3380 return CL_INVALID_KERNEL;
3381 }
3382
3383 // CL_INVALID_ARG_INDEX if arg_index is not a valid argument index.
3384 if (arg_index >= krnl.getInfo().args.size())
3385 {
3386 return CL_INVALID_ARG_INDEX;
3387 }
3388
3389 // CL_KERNEL_ARG_INFO_NOT_AVAILABLE if the argument information is not available for kernel.
3390 if (!krnl.getInfo().args[arg_index].isAvailable())
3391 {
3392 return CL_KERNEL_ARG_INFO_NOT_AVAILABLE;
3393 }
3394
3395 // CL_INVALID_VALUE if param_name is not valid.
3396 if (param_name == KernelArgInfo::InvalidEnum)
3397 {
3398 return CL_INVALID_VALUE;
3399 }
3400
3401 return CL_SUCCESS;
3402 }
3403
ValidateEnqueueFillBuffer(cl_command_queue command_queue,cl_mem buffer,const void * pattern,size_t pattern_size,size_t offset,size_t size,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)3404 cl_int ValidateEnqueueFillBuffer(cl_command_queue command_queue,
3405 cl_mem buffer,
3406 const void *pattern,
3407 size_t pattern_size,
3408 size_t offset,
3409 size_t size,
3410 cl_uint num_events_in_wait_list,
3411 const cl_event *event_wait_list,
3412 const cl_event *event)
3413 {
3414 ANGLE_VALIDATE(ValidateCommandQueueAndEventWaitList(command_queue, false,
3415 num_events_in_wait_list, event_wait_list));
3416 const CommandQueue &queue = command_queue->cast<CommandQueue>();
3417 if (!queue.getContext().getPlatform().isVersionOrNewer(1u, 2u))
3418 {
3419 return CL_INVALID_COMMAND_QUEUE;
3420 }
3421
3422 ANGLE_VALIDATE(ValidateEnqueueBuffer(queue, buffer, false, false));
3423
3424 // CL_INVALID_VALUE if offset or offset + size require accessing
3425 // elements outside the buffer object respectively.
3426 if (!buffer->cast<Buffer>().isRegionValid(offset, size))
3427 {
3428 return CL_INVALID_VALUE;
3429 }
3430
3431 // CL_INVALID_VALUE if pattern is NULL or if pattern_size is 0 or
3432 // if pattern_size is not one of { 1, 2, 4, 8, 16, 32, 64, 128 }.
3433 if (pattern == nullptr || pattern_size == 0u || pattern_size > 128u ||
3434 (pattern_size & (pattern_size - 1u)) != 0u)
3435 {
3436 return CL_INVALID_VALUE;
3437 }
3438
3439 // CL_INVALID_VALUE if offset and size are not a multiple of pattern_size.
3440 if ((offset % pattern_size) != 0u || (size % pattern_size) != 0u)
3441 {
3442 return CL_INVALID_VALUE;
3443 }
3444
3445 return CL_SUCCESS;
3446 }
3447
ValidateEnqueueFillImage(cl_command_queue command_queue,cl_mem image,const void * fill_color,const size_t * origin,const size_t * region,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)3448 cl_int ValidateEnqueueFillImage(cl_command_queue command_queue,
3449 cl_mem image,
3450 const void *fill_color,
3451 const size_t *origin,
3452 const size_t *region,
3453 cl_uint num_events_in_wait_list,
3454 const cl_event *event_wait_list,
3455 const cl_event *event)
3456 {
3457 ANGLE_VALIDATE(ValidateCommandQueueAndEventWaitList(command_queue, true,
3458 num_events_in_wait_list, event_wait_list));
3459 const CommandQueue &queue = command_queue->cast<CommandQueue>();
3460 if (!queue.getContext().getPlatform().isVersionOrNewer(1u, 2u))
3461 {
3462 return CL_INVALID_COMMAND_QUEUE;
3463 }
3464
3465 ANGLE_VALIDATE(ValidateEnqueueImage(queue, image, false, false));
3466 const Image &img = image->cast<Image>();
3467
3468 ANGLE_VALIDATE(ValidateImageForDevice(img, queue.getDevice(), origin, region));
3469
3470 // CL_INVALID_VALUE if fill_color is NULL.
3471 if (fill_color == nullptr)
3472 {
3473 return CL_INVALID_VALUE;
3474 }
3475
3476 return CL_SUCCESS;
3477 }
3478
ValidateEnqueueMigrateMemObjects(cl_command_queue command_queue,cl_uint num_mem_objects,const cl_mem * mem_objects,MemMigrationFlags flags,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)3479 cl_int ValidateEnqueueMigrateMemObjects(cl_command_queue command_queue,
3480 cl_uint num_mem_objects,
3481 const cl_mem *mem_objects,
3482 MemMigrationFlags flags,
3483 cl_uint num_events_in_wait_list,
3484 const cl_event *event_wait_list,
3485 const cl_event *event)
3486 {
3487 ANGLE_VALIDATE(ValidateCommandQueueAndEventWaitList(command_queue, false,
3488 num_events_in_wait_list, event_wait_list));
3489 const CommandQueue &queue = command_queue->cast<CommandQueue>();
3490 if (!queue.getContext().getPlatform().isVersionOrNewer(1u, 2u))
3491 {
3492 return CL_INVALID_COMMAND_QUEUE;
3493 }
3494
3495 // CL_INVALID_VALUE if num_mem_objects is zero or if mem_objects is NULL.
3496 if (num_mem_objects == 0u || mem_objects == nullptr)
3497 {
3498 return CL_INVALID_VALUE;
3499 }
3500
3501 while (num_mem_objects-- != 0u)
3502 {
3503 // CL_INVALID_MEM_OBJECT if any of the memory objects
3504 // in mem_objects is not a valid memory object.
3505 if (!Memory::IsValid(*mem_objects))
3506 {
3507 return CL_INVALID_MEM_OBJECT;
3508 }
3509
3510 // CL_INVALID_CONTEXT if the context associated with command_queue
3511 // and memory objects in mem_objects are not the same.
3512 if (&queue.getContext() != &(*mem_objects++)->cast<Memory>().getContext())
3513 {
3514 return CL_INVALID_CONTEXT;
3515 }
3516 }
3517
3518 // CL_INVALID_VALUE if flags is not 0 or is not any of the values described in the table.
3519 const MemMigrationFlags allowedFlags(CL_MIGRATE_MEM_OBJECT_HOST |
3520 CL_MIGRATE_MEM_OBJECT_CONTENT_UNDEFINED);
3521 if (flags.hasOtherBitsThan(allowedFlags))
3522 {
3523 return CL_INVALID_VALUE;
3524 }
3525
3526 return CL_SUCCESS;
3527 }
3528
ValidateEnqueueMarkerWithWaitList(cl_command_queue command_queue,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)3529 cl_int ValidateEnqueueMarkerWithWaitList(cl_command_queue command_queue,
3530 cl_uint num_events_in_wait_list,
3531 const cl_event *event_wait_list,
3532 const cl_event *event)
3533 {
3534 ANGLE_VALIDATE(ValidateCommandQueueAndEventWaitList(command_queue, false,
3535 num_events_in_wait_list, event_wait_list));
3536 if (!command_queue->cast<CommandQueue>().getContext().getPlatform().isVersionOrNewer(1u, 2u))
3537 {
3538 return CL_INVALID_COMMAND_QUEUE;
3539 }
3540 return CL_SUCCESS;
3541 }
3542
ValidateEnqueueBarrierWithWaitList(cl_command_queue command_queue,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)3543 cl_int ValidateEnqueueBarrierWithWaitList(cl_command_queue command_queue,
3544 cl_uint num_events_in_wait_list,
3545 const cl_event *event_wait_list,
3546 const cl_event *event)
3547 {
3548 ANGLE_VALIDATE(ValidateCommandQueueAndEventWaitList(command_queue, false,
3549 num_events_in_wait_list, event_wait_list));
3550 if (!command_queue->cast<CommandQueue>().getContext().getPlatform().isVersionOrNewer(1u, 2u))
3551 {
3552 return CL_INVALID_COMMAND_QUEUE;
3553 }
3554 return CL_SUCCESS;
3555 }
3556
ValidateGetExtensionFunctionAddressForPlatform(cl_platform_id platform,const char * func_name)3557 cl_int ValidateGetExtensionFunctionAddressForPlatform(cl_platform_id platform,
3558 const char *func_name)
3559 {
3560 if (!Platform::IsValid(platform) || func_name == nullptr || *func_name == '\0')
3561 {
3562 return CL_INVALID_VALUE;
3563 }
3564 return CL_SUCCESS;
3565 }
3566
3567 // CL 2.0
ValidateCreateCommandQueueWithProperties(cl_context context,cl_device_id device,const cl_queue_properties * properties)3568 cl_int ValidateCreateCommandQueueWithProperties(cl_context context,
3569 cl_device_id device,
3570 const cl_queue_properties *properties)
3571 {
3572 // CL_INVALID_CONTEXT if context is not a valid context.
3573 if (!Context::IsValidAndVersionOrNewer(context, 2u, 0u))
3574 {
3575 return CL_INVALID_CONTEXT;
3576 }
3577
3578 // CL_INVALID_DEVICE if device is not a valid device or is not associated with context.
3579 if (!context->cast<Context>().hasDevice(device) ||
3580 !device->cast<Device>().isVersionOrNewer(2u, 0u))
3581 {
3582 return CL_INVALID_DEVICE;
3583 }
3584
3585 // CL_INVALID_VALUE if values specified in properties are not valid.
3586 if (properties != nullptr)
3587 {
3588 bool isQueueOnDevice = false;
3589 bool hasQueueSize = false;
3590 while (*properties != 0)
3591 {
3592 switch (*properties++)
3593 {
3594 case CL_QUEUE_PROPERTIES:
3595 {
3596 const CommandQueueProperties props(*properties++);
3597 const CommandQueueProperties validProps(
3598 CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_PROFILING_ENABLE |
3599 CL_QUEUE_ON_DEVICE | CL_QUEUE_ON_DEVICE_DEFAULT);
3600 if (props.hasOtherBitsThan(validProps) ||
3601 // If CL_QUEUE_ON_DEVICE is set, CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE
3602 // must also be set.
3603 (props.intersects(CL_QUEUE_ON_DEVICE) &&
3604 !props.intersects(CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE)) ||
3605 // CL_QUEUE_ON_DEVICE_DEFAULT can only be used with CL_QUEUE_ON_DEVICE.
3606 (props.intersects(CL_QUEUE_ON_DEVICE_DEFAULT) &&
3607 !props.intersects(CL_QUEUE_ON_DEVICE)))
3608 {
3609 return CL_INVALID_VALUE;
3610 }
3611 isQueueOnDevice = props.intersects(CL_QUEUE_ON_DEVICE);
3612 break;
3613 }
3614 case CL_QUEUE_SIZE:
3615 {
3616 // CL_QUEUE_SIZE must be a value <= CL_DEVICE_QUEUE_ON_DEVICE_MAX_SIZE.
3617 if (*properties++ > device->cast<Device>().getInfo().queueOnDeviceMaxSize)
3618 {
3619 return CL_INVALID_VALUE;
3620 }
3621 hasQueueSize = true;
3622 break;
3623 }
3624 default:
3625 return CL_INVALID_VALUE;
3626 }
3627 }
3628
3629 // CL_QUEUE_SIZE can only be specified if CL_QUEUE_ON_DEVICE is set in CL_QUEUE_PROPERTIES.
3630 if (hasQueueSize && !isQueueOnDevice)
3631 {
3632 return CL_INVALID_VALUE;
3633 }
3634 }
3635
3636 return CL_SUCCESS;
3637 }
3638
ValidateCreatePipe(cl_context context,MemFlags flags,cl_uint pipe_packet_size,cl_uint pipe_max_packets,const cl_pipe_properties * properties)3639 cl_int ValidateCreatePipe(cl_context context,
3640 MemFlags flags,
3641 cl_uint pipe_packet_size,
3642 cl_uint pipe_max_packets,
3643 const cl_pipe_properties *properties)
3644 {
3645 return CL_SUCCESS;
3646 }
3647
ValidateGetPipeInfo(cl_mem pipe,PipeInfo param_name,size_t param_value_size,const void * param_value,const size_t * param_value_size_ret)3648 cl_int ValidateGetPipeInfo(cl_mem pipe,
3649 PipeInfo param_name,
3650 size_t param_value_size,
3651 const void *param_value,
3652 const size_t *param_value_size_ret)
3653 {
3654 return CL_SUCCESS;
3655 }
3656
ValidateSVMAlloc(cl_context context,SVM_MemFlags flags,size_t size,cl_uint alignment)3657 cl_int ValidateSVMAlloc(cl_context context, SVM_MemFlags flags, size_t size, cl_uint alignment)
3658 {
3659 return CL_SUCCESS;
3660 }
3661
ValidateSVMFree(cl_context context,const void * svm_pointer)3662 cl_int ValidateSVMFree(cl_context context, const void *svm_pointer)
3663 {
3664 return CL_SUCCESS;
3665 }
3666
ValidateCreateSamplerWithProperties(cl_context context,const cl_sampler_properties * sampler_properties)3667 cl_int ValidateCreateSamplerWithProperties(cl_context context,
3668 const cl_sampler_properties *sampler_properties)
3669 {
3670 // CL_INVALID_CONTEXT if context is not a valid context.
3671 if (!Context::IsValidAndVersionOrNewer(context, 2u, 0u))
3672 {
3673 return CL_INVALID_CONTEXT;
3674 }
3675
3676 // CL_INVALID_VALUE if the property name in sampler_properties is not a supported property name,
3677 // if the value specified for a supported property name is not valid,
3678 // or if the same property name is specified more than once.
3679 if (sampler_properties != nullptr)
3680 {
3681 bool hasNormalizedCoords = false;
3682 bool hasAddressingMode = false;
3683 bool hasFilterMode = false;
3684 const cl_sampler_properties *propIt = sampler_properties;
3685 while (*propIt != 0)
3686 {
3687 switch (*propIt++)
3688 {
3689 case CL_SAMPLER_NORMALIZED_COORDS:
3690 if (hasNormalizedCoords || (*propIt != CL_FALSE && *propIt != CL_TRUE))
3691 {
3692 return CL_INVALID_VALUE;
3693 }
3694 hasNormalizedCoords = true;
3695 ++propIt;
3696 break;
3697 case CL_SAMPLER_ADDRESSING_MODE:
3698 if (hasAddressingMode || FromCLenum<AddressingMode>(static_cast<CLenum>(
3699 *propIt++)) == AddressingMode::InvalidEnum)
3700 {
3701 return CL_INVALID_VALUE;
3702 }
3703 hasAddressingMode = true;
3704 break;
3705 case CL_SAMPLER_FILTER_MODE:
3706 if (hasFilterMode || FromCLenum<FilterMode>(static_cast<CLenum>(*propIt++)) ==
3707 FilterMode::InvalidEnum)
3708 {
3709 return CL_INVALID_VALUE;
3710 }
3711 hasFilterMode = true;
3712 break;
3713 default:
3714 return CL_INVALID_VALUE;
3715 }
3716 }
3717 }
3718
3719 // CL_INVALID_OPERATION if images are not supported by any device associated with context.
3720 if (!context->cast<Context>().supportsImages())
3721 {
3722 return CL_INVALID_OPERATION;
3723 }
3724
3725 return CL_SUCCESS;
3726 }
3727
ValidateSetKernelArgSVMPointer(cl_kernel kernel,cl_uint arg_index,const void * arg_value)3728 cl_int ValidateSetKernelArgSVMPointer(cl_kernel kernel, cl_uint arg_index, const void *arg_value)
3729 {
3730 return CL_SUCCESS;
3731 }
3732
ValidateSetKernelExecInfo(cl_kernel kernel,KernelExecInfo param_name,size_t param_value_size,const void * param_value)3733 cl_int ValidateSetKernelExecInfo(cl_kernel kernel,
3734 KernelExecInfo param_name,
3735 size_t param_value_size,
3736 const void *param_value)
3737 {
3738 return CL_SUCCESS;
3739 }
3740
ValidateEnqueueSVMFree(cl_command_queue command_queue,cl_uint num_svm_pointers,void * const svm_pointers[],void (CL_CALLBACK * pfn_free_func)(cl_command_queue queue,cl_uint num_svm_pointers,void * svm_pointers[],void * user_data),const void * user_data,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)3741 cl_int ValidateEnqueueSVMFree(cl_command_queue command_queue,
3742 cl_uint num_svm_pointers,
3743 void *const svm_pointers[],
3744 void(CL_CALLBACK *pfn_free_func)(cl_command_queue queue,
3745 cl_uint num_svm_pointers,
3746 void *svm_pointers[],
3747 void *user_data),
3748 const void *user_data,
3749 cl_uint num_events_in_wait_list,
3750 const cl_event *event_wait_list,
3751 const cl_event *event)
3752 {
3753 return CL_SUCCESS;
3754 }
3755
ValidateEnqueueSVMMemcpy(cl_command_queue command_queue,cl_bool blocking_copy,const void * dst_ptr,const void * src_ptr,size_t size,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)3756 cl_int ValidateEnqueueSVMMemcpy(cl_command_queue command_queue,
3757 cl_bool blocking_copy,
3758 const void *dst_ptr,
3759 const void *src_ptr,
3760 size_t size,
3761 cl_uint num_events_in_wait_list,
3762 const cl_event *event_wait_list,
3763 const cl_event *event)
3764 {
3765 return CL_SUCCESS;
3766 }
3767
ValidateEnqueueSVMMemFill(cl_command_queue command_queue,const void * svm_ptr,const void * pattern,size_t pattern_size,size_t size,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)3768 cl_int ValidateEnqueueSVMMemFill(cl_command_queue command_queue,
3769 const void *svm_ptr,
3770 const void *pattern,
3771 size_t pattern_size,
3772 size_t size,
3773 cl_uint num_events_in_wait_list,
3774 const cl_event *event_wait_list,
3775 const cl_event *event)
3776 {
3777 return CL_SUCCESS;
3778 }
3779
ValidateEnqueueSVMMap(cl_command_queue command_queue,cl_bool blocking_map,MapFlags flags,const void * svm_ptr,size_t size,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)3780 cl_int ValidateEnqueueSVMMap(cl_command_queue command_queue,
3781 cl_bool blocking_map,
3782 MapFlags flags,
3783 const void *svm_ptr,
3784 size_t size,
3785 cl_uint num_events_in_wait_list,
3786 const cl_event *event_wait_list,
3787 const cl_event *event)
3788 {
3789 return CL_SUCCESS;
3790 }
3791
ValidateEnqueueSVMUnmap(cl_command_queue command_queue,const void * svm_ptr,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)3792 cl_int ValidateEnqueueSVMUnmap(cl_command_queue command_queue,
3793 const void *svm_ptr,
3794 cl_uint num_events_in_wait_list,
3795 const cl_event *event_wait_list,
3796 const cl_event *event)
3797 {
3798 return CL_SUCCESS;
3799 }
3800
3801 // CL 2.1
ValidateSetDefaultDeviceCommandQueue(cl_context context,cl_device_id device,cl_command_queue command_queue)3802 cl_int ValidateSetDefaultDeviceCommandQueue(cl_context context,
3803 cl_device_id device,
3804 cl_command_queue command_queue)
3805 {
3806 return CL_SUCCESS;
3807 }
3808
ValidateGetDeviceAndHostTimer(cl_device_id device,const cl_ulong * device_timestamp,const cl_ulong * host_timestamp)3809 cl_int ValidateGetDeviceAndHostTimer(cl_device_id device,
3810 const cl_ulong *device_timestamp,
3811 const cl_ulong *host_timestamp)
3812 {
3813 return CL_SUCCESS;
3814 }
3815
ValidateGetHostTimer(cl_device_id device,const cl_ulong * host_timestamp)3816 cl_int ValidateGetHostTimer(cl_device_id device, const cl_ulong *host_timestamp)
3817 {
3818 return CL_SUCCESS;
3819 }
3820
ValidateCreateProgramWithIL(cl_context context,const void * il,size_t length)3821 cl_int ValidateCreateProgramWithIL(cl_context context, const void *il, size_t length)
3822 {
3823 // CL_INVALID_CONTEXT if context is not a valid context.
3824 if (!Context::IsValidAndVersionOrNewer(context, 2u, 1u))
3825 {
3826 return CL_INVALID_CONTEXT;
3827 }
3828 const Context &ctx = context->cast<Context>();
3829
3830 // CL_INVALID_OPERATION if no devices in context support intermediate language programs.
3831 if (!ctx.supportsIL())
3832 {
3833 return CL_INVALID_OPERATION;
3834 }
3835
3836 // CL_INVALID_VALUE if il is NULL or if length is zero.
3837 if (il == nullptr || length == 0u)
3838 {
3839 return CL_INVALID_VALUE;
3840 }
3841
3842 return CL_SUCCESS;
3843 }
3844
ValidateCloneKernel(cl_kernel source_kernel)3845 cl_int ValidateCloneKernel(cl_kernel source_kernel)
3846 {
3847 if (!Kernel::IsValid(source_kernel))
3848 {
3849 return CL_INVALID_KERNEL;
3850 }
3851 return CL_SUCCESS;
3852 }
3853
ValidateGetKernelSubGroupInfo(cl_kernel kernel,cl_device_id device,KernelSubGroupInfo param_name,size_t input_value_size,const void * input_value,size_t param_value_size,const void * param_value,const size_t * param_value_size_ret)3854 cl_int ValidateGetKernelSubGroupInfo(cl_kernel kernel,
3855 cl_device_id device,
3856 KernelSubGroupInfo param_name,
3857 size_t input_value_size,
3858 const void *input_value,
3859 size_t param_value_size,
3860 const void *param_value,
3861 const size_t *param_value_size_ret)
3862 {
3863 return CL_SUCCESS;
3864 }
3865
ValidateEnqueueSVMMigrateMem(cl_command_queue command_queue,cl_uint num_svm_pointers,const void ** svm_pointers,const size_t * sizes,MemMigrationFlags flags,cl_uint num_events_in_wait_list,const cl_event * event_wait_list,const cl_event * event)3866 cl_int ValidateEnqueueSVMMigrateMem(cl_command_queue command_queue,
3867 cl_uint num_svm_pointers,
3868 const void **svm_pointers,
3869 const size_t *sizes,
3870 MemMigrationFlags flags,
3871 cl_uint num_events_in_wait_list,
3872 const cl_event *event_wait_list,
3873 const cl_event *event)
3874 {
3875 return CL_SUCCESS;
3876 }
3877
3878 // CL 2.2
ValidateSetProgramReleaseCallback(cl_program program,void (CL_CALLBACK * pfn_notify)(cl_program program,void * user_data),const void * user_data)3879 cl_int ValidateSetProgramReleaseCallback(cl_program program,
3880 void(CL_CALLBACK *pfn_notify)(cl_program program,
3881 void *user_data),
3882 const void *user_data)
3883 {
3884 return CL_SUCCESS;
3885 }
3886
ValidateSetProgramSpecializationConstant(cl_program program,cl_uint spec_id,size_t spec_size,const void * spec_value)3887 cl_int ValidateSetProgramSpecializationConstant(cl_program program,
3888 cl_uint spec_id,
3889 size_t spec_size,
3890 const void *spec_value)
3891 {
3892 return CL_SUCCESS;
3893 }
3894
3895 // CL 3.0
ValidateSetContextDestructorCallback(cl_context context,void (CL_CALLBACK * pfn_notify)(cl_context context,void * user_data),const void * user_data)3896 cl_int ValidateSetContextDestructorCallback(cl_context context,
3897 void(CL_CALLBACK *pfn_notify)(cl_context context,
3898 void *user_data),
3899 const void *user_data)
3900 {
3901 return CL_SUCCESS;
3902 }
3903
ValidateCreateBufferWithProperties(cl_context context,const cl_mem_properties * properties,MemFlags flags,size_t size,const void * host_ptr)3904 cl_int ValidateCreateBufferWithProperties(cl_context context,
3905 const cl_mem_properties *properties,
3906 MemFlags flags,
3907 size_t size,
3908 const void *host_ptr)
3909 {
3910 ANGLE_VALIDATE(ValidateCreateBuffer(context, flags, size, host_ptr));
3911
3912 // CL_INVALID_CONTEXT if context is not a valid context.
3913 if (!context->cast<Context>().getPlatform().isVersionOrNewer(3u, 0u))
3914 {
3915 return CL_INVALID_CONTEXT;
3916 }
3917
3918 // CL_INVALID_PROPERTY if a property name in properties is not a supported property name,
3919 // if the value specified for a supported property name is not valid,
3920 // or if the same property name is specified more than once.
3921 if (!ValidateMemoryProperties(properties))
3922 {
3923 return CL_INVALID_PROPERTY;
3924 }
3925
3926 return CL_SUCCESS;
3927 }
3928
ValidateCreateImageWithProperties(cl_context context,const cl_mem_properties * properties,MemFlags flags,const cl_image_format * image_format,const cl_image_desc * image_desc,const void * host_ptr)3929 cl_int ValidateCreateImageWithProperties(cl_context context,
3930 const cl_mem_properties *properties,
3931 MemFlags flags,
3932 const cl_image_format *image_format,
3933 const cl_image_desc *image_desc,
3934 const void *host_ptr)
3935 {
3936 ANGLE_VALIDATE(ValidateCreateImage(context, flags, image_format, image_desc, host_ptr));
3937
3938 // CL_INVALID_CONTEXT if context is not a valid context.
3939 if (!context->cast<Context>().getPlatform().isVersionOrNewer(3u, 0u))
3940 {
3941 return CL_INVALID_CONTEXT;
3942 }
3943
3944 // CL_INVALID_PROPERTY if a property name in properties is not a supported property name,
3945 // if the value specified for a supported property name is not valid,
3946 // or if the same property name is specified more than once.
3947 if (!ValidateMemoryProperties(properties))
3948 {
3949 return CL_INVALID_PROPERTY;
3950 }
3951
3952 return CL_SUCCESS;
3953 }
3954
3955 // cl_khr_icd
ValidateIcdGetPlatformIDsKHR(cl_uint num_entries,const cl_platform_id * platforms,const cl_uint * num_platforms)3956 cl_int ValidateIcdGetPlatformIDsKHR(cl_uint num_entries,
3957 const cl_platform_id *platforms,
3958 const cl_uint *num_platforms)
3959 {
3960 if ((num_entries == 0u && platforms != nullptr) ||
3961 (platforms == nullptr && num_platforms == nullptr))
3962 {
3963 return CL_INVALID_VALUE;
3964 }
3965 return CL_SUCCESS;
3966 }
3967
3968 } // namespace cl
3969