OpenCL C++ Bindings
cl2.hpp
Go to the documentation of this file.
1 /*******************************************************************************
2  * Copyright (c) 2008-2016 The Khronos Group Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and/or associated documentation files (the
6  * "Materials"), to deal in the Materials without restriction, including
7  * without limitation the rights to use, copy, modify, merge, publish,
8  * distribute, sublicense, and/or sell copies of the Materials, and to
9  * permit persons to whom the Materials are furnished to do so, subject to
10  * the following conditions:
11  *
12  * The above copyright notice and this permission notice shall be included
13  * in all copies or substantial portions of the Materials.
14  *
15  * MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS
16  * KHRONOS STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS
17  * SPECIFICATIONS AND HEADER INFORMATION ARE LOCATED AT
18  * https://www.khronos.org/registry/
19  *
20  * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
21  * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
22  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
23  * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
24  * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
25  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
26  * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
27  ******************************************************************************/
28 
316 
329 
393 #ifndef CL_HPP_
394 #define CL_HPP_
395 
396 /* Handle deprecated preprocessor definitions. In each case, we only check for
397  * the old name if the new name is not defined, so that user code can define
398  * both and hence work with either version of the bindings.
399  */
400 #if !defined(CL_HPP_USE_DX_INTEROP) && defined(USE_DX_INTEROP)
401 # pragma message("cl2.hpp: USE_DX_INTEROP is deprecated. Define CL_HPP_USE_DX_INTEROP instead")
402 # define CL_HPP_USE_DX_INTEROP
403 #endif
404 #if !defined(CL_HPP_USE_CL_DEVICE_FISSION) && defined(USE_CL_DEVICE_FISSION)
405 # pragma message("cl2.hpp: USE_CL_DEVICE_FISSION is deprecated. Define CL_HPP_USE_CL_DEVICE_FISSION instead")
406 # define CL_HPP_USE_CL_DEVICE_FISSION
407 #endif
408 #if !defined(CL_HPP_ENABLE_EXCEPTIONS) && defined(__CL_ENABLE_EXCEPTIONS)
409 # pragma message("cl2.hpp: __CL_ENABLE_EXCEPTIONS is deprecated. Define CL_HPP_ENABLE_EXCEPTIONS instead")
410 # define CL_HPP_ENABLE_EXCEPTIONS
411 #endif
412 #if !defined(CL_HPP_NO_STD_VECTOR) && defined(__NO_STD_VECTOR)
413 # pragma message("cl2.hpp: __NO_STD_VECTOR is deprecated. Define CL_HPP_NO_STD_VECTOR instead")
414 # define CL_HPP_NO_STD_VECTOR
415 #endif
416 #if !defined(CL_HPP_NO_STD_STRING) && defined(__NO_STD_STRING)
417 # pragma message("cl2.hpp: __NO_STD_STRING is deprecated. Define CL_HPP_NO_STD_STRING instead")
418 # define CL_HPP_NO_STD_STRING
419 #endif
420 #if defined(VECTOR_CLASS)
421 # pragma message("cl2.hpp: VECTOR_CLASS is deprecated. Alias cl::vector instead")
422 #endif
423 #if defined(STRING_CLASS)
424 # pragma message("cl2.hpp: STRING_CLASS is deprecated. Alias cl::string instead.")
425 #endif
426 #if !defined(CL_HPP_USER_OVERRIDE_ERROR_STRINGS) && defined(__CL_USER_OVERRIDE_ERROR_STRINGS)
427 # pragma message("cl2.hpp: __CL_USER_OVERRIDE_ERROR_STRINGS is deprecated. Define CL_HPP_USER_OVERRIDE_ERROR_STRINGS instead")
428 # define CL_HPP_USER_OVERRIDE_ERROR_STRINGS
429 #endif
430 
431 /* Warn about features that are no longer supported
432  */
433 #if defined(__USE_DEV_VECTOR)
434 # pragma message("cl2.hpp: __USE_DEV_VECTOR is no longer supported. Expect compilation errors")
435 #endif
436 #if defined(__USE_DEV_STRING)
437 # pragma message("cl2.hpp: __USE_DEV_STRING is no longer supported. Expect compilation errors")
438 #endif
439 
440 /* Detect which version to target */
441 #if !defined(CL_HPP_TARGET_OPENCL_VERSION)
442 # pragma message("cl2.hpp: CL_HPP_TARGET_OPENCL_VERSION is not defined. It will default to 200 (OpenCL 2.0)")
443 # define CL_HPP_TARGET_OPENCL_VERSION 200
444 #endif
445 #if CL_HPP_TARGET_OPENCL_VERSION != 100 && CL_HPP_TARGET_OPENCL_VERSION != 110 && CL_HPP_TARGET_OPENCL_VERSION != 120 && CL_HPP_TARGET_OPENCL_VERSION != 200
446 # pragma message("cl2.hpp: CL_HPP_TARGET_OPENCL_VERSION is not a valid value (100, 110, 120 or 200). It will be set to 200")
447 # undef CL_HPP_TARGET_OPENCL_VERSION
448 # define CL_HPP_TARGET_OPENCL_VERSION 200
449 #endif
450 
451 #if !defined(CL_HPP_MINIMUM_OPENCL_VERSION)
452 # define CL_HPP_MINIMUM_OPENCL_VERSION 200
453 #endif
454 #if CL_HPP_MINIMUM_OPENCL_VERSION != 100 && CL_HPP_MINIMUM_OPENCL_VERSION != 110 && CL_HPP_MINIMUM_OPENCL_VERSION != 120 && CL_HPP_MINIMUM_OPENCL_VERSION != 200
455 # pragma message("cl2.hpp: CL_HPP_MINIMUM_OPENCL_VERSION is not a valid value (100, 110, 120 or 200). It will be set to 100")
456 # undef CL_HPP_MINIMUM_OPENCL_VERSION
457 # define CL_HPP_MINIMUM_OPENCL_VERSION 100
458 #endif
459 #if CL_HPP_MINIMUM_OPENCL_VERSION > CL_HPP_TARGET_OPENCL_VERSION
460 # error "CL_HPP_MINIMUM_OPENCL_VERSION must not be greater than CL_HPP_TARGET_OPENCL_VERSION"
461 #endif
462 
463 #if CL_HPP_MINIMUM_OPENCL_VERSION <= 100 && !defined(CL_USE_DEPRECATED_OPENCL_1_0_APIS)
464 # define CL_USE_DEPRECATED_OPENCL_1_0_APIS
465 #endif
466 #if CL_HPP_MINIMUM_OPENCL_VERSION <= 110 && !defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
467 # define CL_USE_DEPRECATED_OPENCL_1_1_APIS
468 #endif
469 #if CL_HPP_MINIMUM_OPENCL_VERSION <= 120 && !defined(CL_USE_DEPRECATED_OPENCL_1_2_APIS)
470 # define CL_USE_DEPRECATED_OPENCL_1_2_APIS
471 #endif
472 #if CL_HPP_MINIMUM_OPENCL_VERSION <= 200 && !defined(CL_USE_DEPRECATED_OPENCL_2_0_APIS)
473 # define CL_USE_DEPRECATED_OPENCL_2_0_APIS
474 #endif
475 
476 #ifdef _WIN32
477 
478 #include <malloc.h>
479 
480 #if defined(CL_HPP_USE_DX_INTEROP)
481 #include <CL/cl_d3d10.h>
482 #include <CL/cl_dx9_media_sharing.h>
483 #endif
484 #endif // _WIN32
485 
486 #if defined(_MSC_VER)
487 #include <intrin.h>
488 #endif // _MSC_VER
489 
490  // Check for a valid C++ version
491 
492 // Need to do both tests here because for some reason __cplusplus is not
493 // updated in visual studio
494 #if (!defined(_MSC_VER) && __cplusplus < 201103L) || (defined(_MSC_VER) && _MSC_VER < 1700)
495 #error Visual studio 2013 or another C++11-supporting compiler required
496 #endif
497 
498 //
499 #if defined(CL_HPP_USE_CL_DEVICE_FISSION) || defined(CL_HPP_USE_CL_SUB_GROUPS_KHR)
500 #include <CL/cl_ext.h>
501 #endif
502 
503 #if defined(__APPLE__) || defined(__MACOSX)
504 #include <OpenCL/opencl.h>
505 #else
506 #include <CL/opencl.h>
507 #endif // !__APPLE__
508 
509 #if (__cplusplus >= 201103L)
510 #define CL_HPP_NOEXCEPT_ noexcept
511 #else
512 #define CL_HPP_NOEXCEPT_
513 #endif
514 
515 #if defined(_MSC_VER)
516 # define CL_HPP_DEFINE_STATIC_MEMBER_ __declspec(selectany)
517 #else
518 # define CL_HPP_DEFINE_STATIC_MEMBER_ __attribute__((weak))
519 #endif // !_MSC_VER
520 
521 // Define deprecated prefixes and suffixes to ensure compilation
522 // in case they are not pre-defined
523 #if !defined(CL_EXT_PREFIX__VERSION_1_1_DEPRECATED)
524 #define CL_EXT_PREFIX__VERSION_1_1_DEPRECATED
525 #endif // #if !defined(CL_EXT_PREFIX__VERSION_1_1_DEPRECATED)
526 #if !defined(CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED)
527 #define CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
528 #endif // #if !defined(CL_EXT_PREFIX__VERSION_1_1_DEPRECATED)
529 
530 #if !defined(CL_EXT_PREFIX__VERSION_1_2_DEPRECATED)
531 #define CL_EXT_PREFIX__VERSION_1_2_DEPRECATED
532 #endif // #if !defined(CL_EXT_PREFIX__VERSION_1_2_DEPRECATED)
533 #if !defined(CL_EXT_SUFFIX__VERSION_1_2_DEPRECATED)
534 #define CL_EXT_SUFFIX__VERSION_1_2_DEPRECATED
535 #endif // #if !defined(CL_EXT_PREFIX__VERSION_1_2_DEPRECATED)
536 
537 #if !defined(CL_CALLBACK)
538 #define CL_CALLBACK
539 #endif //CL_CALLBACK
540 
541 #include <utility>
542 #include <limits>
543 #include <iterator>
544 #include <mutex>
545 #include <cstring>
546 #include <functional>
547 
548 
549 // Define a size_type to represent a correctly resolved size_t
550 #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
551 namespace cl {
552  using size_type = ::size_t;
553 } // namespace cl
554 #else // #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
555 namespace cl {
556  using size_type = size_t;
557 } // namespace cl
558 #endif // #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
559 
560 
561 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
562 #include <exception>
563 #endif // #if defined(CL_HPP_ENABLE_EXCEPTIONS)
564 
565 #if !defined(CL_HPP_NO_STD_VECTOR)
566 #include <vector>
567 namespace cl {
568  template < class T, class Alloc = std::allocator<T> >
569  using vector = std::vector<T, Alloc>;
570 } // namespace cl
571 #endif // #if !defined(CL_HPP_NO_STD_VECTOR)
572 
573 #if !defined(CL_HPP_NO_STD_STRING)
574 #include <string>
575 namespace cl {
576  using string = std::string;
577 } // namespace cl
578 #endif // #if !defined(CL_HPP_NO_STD_STRING)
579 
580 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
581 
582 #if !defined(CL_HPP_NO_STD_UNIQUE_PTR)
583 #include <memory>
584 namespace cl {
585  // Replace unique_ptr and allocate_pointer for internal use
586  // to allow user to replace them
587  template<class T, class D>
588  using pointer = std::unique_ptr<T, D>;
589 } // namespace cl
590 #endif
591 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
592 #if !defined(CL_HPP_NO_STD_ARRAY)
593 #include <array>
594 namespace cl {
595  template < class T, size_type N >
596  using array = std::array<T, N>;
597 } // namespace cl
598 #endif // #if !defined(CL_HPP_NO_STD_ARRAY)
599 
600 // Define size_type appropriately to allow backward-compatibility
601 // use of the old size_t interface class
602 #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
603 namespace cl {
604  namespace compatibility {
609  template <int N>
610  class size_t
611  {
612  private:
613  size_type data_[N];
614 
615  public:
617  size_t()
618  {
619  for (int i = 0; i < N; ++i) {
620  data_[i] = 0;
621  }
622  }
623 
624  size_t(const array<size_type, N> &rhs)
625  {
626  for (int i = 0; i < N; ++i) {
627  data_[i] = rhs[i];
628  }
629  }
630 
631  size_type& operator[](int index)
632  {
633  return data_[index];
634  }
635 
636  const size_type& operator[](int index) const
637  {
638  return data_[index];
639  }
640 
642  operator size_type* () { return data_; }
643 
645  operator const size_type* () const { return data_; }
646 
647  operator array<size_type, N>() const
648  {
649  array<size_type, N> ret;
650 
651  for (int i = 0; i < N; ++i) {
652  ret[i] = data_[i];
653  }
654  return ret;
655  }
656  };
657  } // namespace compatibility
658 
659  template<int N>
660  using size_t = compatibility::size_t<N>;
661 } // namespace cl
662 #endif // #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
663 
664 // Helper alias to avoid confusing the macros
665 namespace cl {
666  namespace detail {
667  using size_t_array = array<size_type, 3>;
668  } // namespace detail
669 } // namespace cl
670 
671 
677 namespace cl {
678  class Memory;
679 
680 #define CL_HPP_INIT_CL_EXT_FCN_PTR_(name) \
681  if (!pfn_##name) { \
682  pfn_##name = (PFN_##name) \
683  clGetExtensionFunctionAddress(#name); \
684  if (!pfn_##name) { \
685  } \
686  }
687 
688 #define CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(platform, name) \
689  if (!pfn_##name) { \
690  pfn_##name = (PFN_##name) \
691  clGetExtensionFunctionAddressForPlatform(platform, #name); \
692  if (!pfn_##name) { \
693  } \
694  }
695 
696  class Program;
697  class Device;
698  class Context;
699  class CommandQueue;
700  class DeviceCommandQueue;
701  class Memory;
702  class Buffer;
703  class Pipe;
704 
705 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
706 
710  class Error : public std::exception
711  {
712  private:
713  cl_int err_;
714  const char * errStr_;
715  public:
725  Error(cl_int err, const char * errStr = NULL) : err_(err), errStr_(errStr)
726  {}
727 
728  ~Error() throw() {}
729 
734  virtual const char * what() const throw ()
735  {
736  if (errStr_ == NULL) {
737  return "empty";
738  }
739  else {
740  return errStr_;
741  }
742  }
743 
748  cl_int err(void) const { return err_; }
749  };
750 #define CL_HPP_ERR_STR_(x) #x
751 #else
752 #define CL_HPP_ERR_STR_(x) NULL
753 #endif // CL_HPP_ENABLE_EXCEPTIONS
754 
755 
756 namespace detail
757 {
758 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
759 static inline cl_int errHandler (
760  cl_int err,
761  const char * errStr = NULL)
762 {
763  if (err != CL_SUCCESS) {
764  throw Error(err, errStr);
765  }
766  return err;
767 }
768 #else
769 static inline cl_int errHandler (cl_int err, const char * errStr = NULL)
770 {
771  (void) errStr; // suppress unused variable warning
772  return err;
773 }
774 #endif // CL_HPP_ENABLE_EXCEPTIONS
775 }
776 
777 
778 
780 #if !defined(CL_HPP_USER_OVERRIDE_ERROR_STRINGS)
781 #define __GET_DEVICE_INFO_ERR CL_HPP_ERR_STR_(clGetDeviceInfo)
782 #define __GET_PLATFORM_INFO_ERR CL_HPP_ERR_STR_(clGetPlatformInfo)
783 #define __GET_DEVICE_IDS_ERR CL_HPP_ERR_STR_(clGetDeviceIDs)
784 #define __GET_PLATFORM_IDS_ERR CL_HPP_ERR_STR_(clGetPlatformIDs)
785 #define __GET_CONTEXT_INFO_ERR CL_HPP_ERR_STR_(clGetContextInfo)
786 #define __GET_EVENT_INFO_ERR CL_HPP_ERR_STR_(clGetEventInfo)
787 #define __GET_EVENT_PROFILE_INFO_ERR CL_HPP_ERR_STR_(clGetEventProfileInfo)
788 #define __GET_MEM_OBJECT_INFO_ERR CL_HPP_ERR_STR_(clGetMemObjectInfo)
789 #define __GET_IMAGE_INFO_ERR CL_HPP_ERR_STR_(clGetImageInfo)
790 #define __GET_SAMPLER_INFO_ERR CL_HPP_ERR_STR_(clGetSamplerInfo)
791 #define __GET_KERNEL_INFO_ERR CL_HPP_ERR_STR_(clGetKernelInfo)
792 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
793 #define __GET_KERNEL_ARG_INFO_ERR CL_HPP_ERR_STR_(clGetKernelArgInfo)
794 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
795 #define __GET_KERNEL_WORK_GROUP_INFO_ERR CL_HPP_ERR_STR_(clGetKernelWorkGroupInfo)
796 #define __GET_PROGRAM_INFO_ERR CL_HPP_ERR_STR_(clGetProgramInfo)
797 #define __GET_PROGRAM_BUILD_INFO_ERR CL_HPP_ERR_STR_(clGetProgramBuildInfo)
798 #define __GET_COMMAND_QUEUE_INFO_ERR CL_HPP_ERR_STR_(clGetCommandQueueInfo)
799 
800 #define __CREATE_CONTEXT_ERR CL_HPP_ERR_STR_(clCreateContext)
801 #define __CREATE_CONTEXT_FROM_TYPE_ERR CL_HPP_ERR_STR_(clCreateContextFromType)
802 #define __GET_SUPPORTED_IMAGE_FORMATS_ERR CL_HPP_ERR_STR_(clGetSupportedImageFormats)
803 
804 #define __CREATE_BUFFER_ERR CL_HPP_ERR_STR_(clCreateBuffer)
805 #define __COPY_ERR CL_HPP_ERR_STR_(cl::copy)
806 #define __CREATE_SUBBUFFER_ERR CL_HPP_ERR_STR_(clCreateSubBuffer)
807 #define __CREATE_GL_BUFFER_ERR CL_HPP_ERR_STR_(clCreateFromGLBuffer)
808 #define __CREATE_GL_RENDER_BUFFER_ERR CL_HPP_ERR_STR_(clCreateFromGLBuffer)
809 #define __GET_GL_OBJECT_INFO_ERR CL_HPP_ERR_STR_(clGetGLObjectInfo)
810 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
811 #define __CREATE_IMAGE_ERR CL_HPP_ERR_STR_(clCreateImage)
812 #define __CREATE_GL_TEXTURE_ERR CL_HPP_ERR_STR_(clCreateFromGLTexture)
813 #define __IMAGE_DIMENSION_ERR CL_HPP_ERR_STR_(Incorrect image dimensions)
814 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
815 #define __SET_MEM_OBJECT_DESTRUCTOR_CALLBACK_ERR CL_HPP_ERR_STR_(clSetMemObjectDestructorCallback)
816 
817 #define __CREATE_USER_EVENT_ERR CL_HPP_ERR_STR_(clCreateUserEvent)
818 #define __SET_USER_EVENT_STATUS_ERR CL_HPP_ERR_STR_(clSetUserEventStatus)
819 #define __SET_EVENT_CALLBACK_ERR CL_HPP_ERR_STR_(clSetEventCallback)
820 #define __WAIT_FOR_EVENTS_ERR CL_HPP_ERR_STR_(clWaitForEvents)
821 
822 #define __CREATE_KERNEL_ERR CL_HPP_ERR_STR_(clCreateKernel)
823 #define __SET_KERNEL_ARGS_ERR CL_HPP_ERR_STR_(clSetKernelArg)
824 #define __CREATE_PROGRAM_WITH_SOURCE_ERR CL_HPP_ERR_STR_(clCreateProgramWithSource)
825 #define __CREATE_PROGRAM_WITH_BINARY_ERR CL_HPP_ERR_STR_(clCreateProgramWithBinary)
826 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
827 #define __CREATE_PROGRAM_WITH_BUILT_IN_KERNELS_ERR CL_HPP_ERR_STR_(clCreateProgramWithBuiltInKernels)
828 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
829 #define __BUILD_PROGRAM_ERR CL_HPP_ERR_STR_(clBuildProgram)
830 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
831 #define __COMPILE_PROGRAM_ERR CL_HPP_ERR_STR_(clCompileProgram)
832 #define __LINK_PROGRAM_ERR CL_HPP_ERR_STR_(clLinkProgram)
833 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
834 #define __CREATE_KERNELS_IN_PROGRAM_ERR CL_HPP_ERR_STR_(clCreateKernelsInProgram)
835 
836 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
837 #define __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR CL_HPP_ERR_STR_(clCreateCommandQueueWithProperties)
838 #define __CREATE_SAMPLER_WITH_PROPERTIES_ERR CL_HPP_ERR_STR_(clCreateSamplerWithProperties)
839 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
840 #define __SET_COMMAND_QUEUE_PROPERTY_ERR CL_HPP_ERR_STR_(clSetCommandQueueProperty)
841 #define __ENQUEUE_READ_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueReadBuffer)
842 #define __ENQUEUE_READ_BUFFER_RECT_ERR CL_HPP_ERR_STR_(clEnqueueReadBufferRect)
843 #define __ENQUEUE_WRITE_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueWriteBuffer)
844 #define __ENQUEUE_WRITE_BUFFER_RECT_ERR CL_HPP_ERR_STR_(clEnqueueWriteBufferRect)
845 #define __ENQEUE_COPY_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueCopyBuffer)
846 #define __ENQEUE_COPY_BUFFER_RECT_ERR CL_HPP_ERR_STR_(clEnqueueCopyBufferRect)
847 #define __ENQUEUE_FILL_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueFillBuffer)
848 #define __ENQUEUE_READ_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueReadImage)
849 #define __ENQUEUE_WRITE_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueWriteImage)
850 #define __ENQUEUE_COPY_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueCopyImage)
851 #define __ENQUEUE_FILL_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueFillImage)
852 #define __ENQUEUE_COPY_IMAGE_TO_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueCopyImageToBuffer)
853 #define __ENQUEUE_COPY_BUFFER_TO_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueCopyBufferToImage)
854 #define __ENQUEUE_MAP_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueMapBuffer)
855 #define __ENQUEUE_MAP_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueMapImage)
856 #define __ENQUEUE_UNMAP_MEM_OBJECT_ERR CL_HPP_ERR_STR_(clEnqueueUnMapMemObject)
857 #define __ENQUEUE_NDRANGE_KERNEL_ERR CL_HPP_ERR_STR_(clEnqueueNDRangeKernel)
858 #define __ENQUEUE_NATIVE_KERNEL CL_HPP_ERR_STR_(clEnqueueNativeKernel)
859 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
860 #define __ENQUEUE_MIGRATE_MEM_OBJECTS_ERR CL_HPP_ERR_STR_(clEnqueueMigrateMemObjects)
861 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
862 
863 #define __ENQUEUE_ACQUIRE_GL_ERR CL_HPP_ERR_STR_(clEnqueueAcquireGLObjects)
864 #define __ENQUEUE_RELEASE_GL_ERR CL_HPP_ERR_STR_(clEnqueueReleaseGLObjects)
865 
866 #define __CREATE_PIPE_ERR CL_HPP_ERR_STR_(clCreatePipe)
867 #define __GET_PIPE_INFO_ERR CL_HPP_ERR_STR_(clGetPipeInfo)
868 
869 
870 #define __RETAIN_ERR CL_HPP_ERR_STR_(Retain Object)
871 #define __RELEASE_ERR CL_HPP_ERR_STR_(Release Object)
872 #define __FLUSH_ERR CL_HPP_ERR_STR_(clFlush)
873 #define __FINISH_ERR CL_HPP_ERR_STR_(clFinish)
874 #define __VECTOR_CAPACITY_ERR CL_HPP_ERR_STR_(Vector capacity error)
875 
879 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
880 #define __CREATE_SUB_DEVICES_ERR CL_HPP_ERR_STR_(clCreateSubDevices)
881 #else
882 #define __CREATE_SUB_DEVICES_ERR CL_HPP_ERR_STR_(clCreateSubDevicesEXT)
883 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
884 
888 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
889 #define __ENQUEUE_MARKER_ERR CL_HPP_ERR_STR_(clEnqueueMarker)
890 #define __ENQUEUE_WAIT_FOR_EVENTS_ERR CL_HPP_ERR_STR_(clEnqueueWaitForEvents)
891 #define __ENQUEUE_BARRIER_ERR CL_HPP_ERR_STR_(clEnqueueBarrier)
892 #define __UNLOAD_COMPILER_ERR CL_HPP_ERR_STR_(clUnloadCompiler)
893 #define __CREATE_GL_TEXTURE_2D_ERR CL_HPP_ERR_STR_(clCreateFromGLTexture2D)
894 #define __CREATE_GL_TEXTURE_3D_ERR CL_HPP_ERR_STR_(clCreateFromGLTexture3D)
895 #define __CREATE_IMAGE2D_ERR CL_HPP_ERR_STR_(clCreateImage2D)
896 #define __CREATE_IMAGE3D_ERR CL_HPP_ERR_STR_(clCreateImage3D)
897 #endif // #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
898 
902 #if defined(CL_USE_DEPRECATED_OPENCL_1_2_APIS)
903 #define __CREATE_COMMAND_QUEUE_ERR CL_HPP_ERR_STR_(clCreateCommandQueue)
904 #define __ENQUEUE_TASK_ERR CL_HPP_ERR_STR_(clEnqueueTask)
905 #define __CREATE_SAMPLER_ERR CL_HPP_ERR_STR_(clCreateSampler)
906 #endif // #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
907 
911 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
912 #define __ENQUEUE_MARKER_WAIT_LIST_ERR CL_HPP_ERR_STR_(clEnqueueMarkerWithWaitList)
913 #define __ENQUEUE_BARRIER_WAIT_LIST_ERR CL_HPP_ERR_STR_(clEnqueueBarrierWithWaitList)
914 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
915 
916 #endif // CL_HPP_USER_OVERRIDE_ERROR_STRINGS
917 
919 
920 namespace detail {
921 
922 // Generic getInfoHelper. The final parameter is used to guide overload
923 // resolution: the actual parameter passed is an int, which makes this
924 // a worse conversion sequence than a specialization that declares the
925 // parameter as an int.
926 template<typename Functor, typename T>
927 inline cl_int getInfoHelper(Functor f, cl_uint name, T* param, long)
928 {
929  return f(name, sizeof(T), param, NULL);
930 }
931 
932 // Specialized for getInfo<CL_PROGRAM_BINARIES>
933 // Assumes that the output vector was correctly resized on the way in
934 template <typename Func>
935 inline cl_int getInfoHelper(Func f, cl_uint name, vector<vector<unsigned char>>* param, int)
936 {
937  if (name != CL_PROGRAM_BINARIES) {
938  return CL_INVALID_VALUE;
939  }
940  if (param) {
941  // Create array of pointers, calculate total size and pass pointer array in
942  size_type numBinaries = param->size();
943  vector<unsigned char*> binariesPointers(numBinaries);
944 
945  for (size_type i = 0; i < numBinaries; ++i)
946  {
947  binariesPointers[i] = (*param)[i].data();
948  }
949 
950  cl_int err = f(name, numBinaries * sizeof(unsigned char*), binariesPointers.data(), NULL);
951 
952  if (err != CL_SUCCESS) {
953  return err;
954  }
955  }
956 
957 
958  return CL_SUCCESS;
959 }
960 
961 // Specialized getInfoHelper for vector params
962 template <typename Func, typename T>
963 inline cl_int getInfoHelper(Func f, cl_uint name, vector<T>* param, long)
964 {
965  size_type required;
966  cl_int err = f(name, 0, NULL, &required);
967  if (err != CL_SUCCESS) {
968  return err;
969  }
970  const size_type elements = required / sizeof(T);
971 
972  // Temporary to avoid changing param on an error
973  vector<T> localData(elements);
974  err = f(name, required, localData.data(), NULL);
975  if (err != CL_SUCCESS) {
976  return err;
977  }
978  if (param) {
979  *param = std::move(localData);
980  }
981 
982  return CL_SUCCESS;
983 }
984 
985 /* Specialization for reference-counted types. This depends on the
986  * existence of Wrapper<T>::cl_type, and none of the other types having the
987  * cl_type member. Note that simplify specifying the parameter as Wrapper<T>
988  * does not work, because when using a derived type (e.g. Context) the generic
989  * template will provide a better match.
990  */
991 template <typename Func, typename T>
992 inline cl_int getInfoHelper(
993  Func f, cl_uint name, vector<T>* param, int, typename T::cl_type = 0)
994 {
995  size_type required;
996  cl_int err = f(name, 0, NULL, &required);
997  if (err != CL_SUCCESS) {
998  return err;
999  }
1000 
1001  const size_type elements = required / sizeof(typename T::cl_type);
1002 
1003  vector<typename T::cl_type> value(elements);
1004  err = f(name, required, value.data(), NULL);
1005  if (err != CL_SUCCESS) {
1006  return err;
1007  }
1008 
1009  if (param) {
1010  // Assign to convert CL type to T for each element
1011  param->resize(elements);
1012 
1013  // Assign to param, constructing with retain behaviour
1014  // to correctly capture each underlying CL object
1015  for (size_type i = 0; i < elements; i++) {
1016  (*param)[i] = T(value[i], true);
1017  }
1018  }
1019  return CL_SUCCESS;
1020 }
1021 
1022 // Specialized GetInfoHelper for string params
1023 template <typename Func>
1024 inline cl_int getInfoHelper(Func f, cl_uint name, string* param, long)
1025 {
1026  size_type required;
1027  cl_int err = f(name, 0, NULL, &required);
1028  if (err != CL_SUCCESS) {
1029  return err;
1030  }
1031 
1032  // std::string has a constant data member
1033  // a char vector does not
1034  if (required > 0) {
1035  vector<char> value(required);
1036  err = f(name, required, value.data(), NULL);
1037  if (err != CL_SUCCESS) {
1038  return err;
1039  }
1040  if (param) {
1041  param->assign(begin(value), prev(end(value)));
1042  }
1043  }
1044  else if (param) {
1045  param->assign("");
1046  }
1047  return CL_SUCCESS;
1048 }
1049 
1050 // Specialized GetInfoHelper for clsize_t params
1051 template <typename Func, size_type N>
1052 inline cl_int getInfoHelper(Func f, cl_uint name, array<size_type, N>* param, long)
1053 {
1054  size_type required;
1055  cl_int err = f(name, 0, NULL, &required);
1056  if (err != CL_SUCCESS) {
1057  return err;
1058  }
1059 
1060  size_type elements = required / sizeof(size_type);
1061  vector<size_type> value(elements, 0);
1062 
1063  err = f(name, required, value.data(), NULL);
1064  if (err != CL_SUCCESS) {
1065  return err;
1066  }
1067 
1068  // Bound the copy with N to prevent overruns
1069  // if passed N > than the amount copied
1070  if (elements > N) {
1071  elements = N;
1072  }
1073  for (size_type i = 0; i < elements; ++i) {
1074  (*param)[i] = value[i];
1075  }
1076 
1077  return CL_SUCCESS;
1078 }
1079 
1080 template<typename T> struct ReferenceHandler;
1081 
1082 /* Specialization for reference-counted types. This depends on the
1083  * existence of Wrapper<T>::cl_type, and none of the other types having the
1084  * cl_type member. Note that simplify specifying the parameter as Wrapper<T>
1085  * does not work, because when using a derived type (e.g. Context) the generic
1086  * template will provide a better match.
1087  */
1088 template<typename Func, typename T>
1089 inline cl_int getInfoHelper(Func f, cl_uint name, T* param, int, typename T::cl_type = 0)
1090 {
1091  typename T::cl_type value;
1092  cl_int err = f(name, sizeof(value), &value, NULL);
1093  if (err != CL_SUCCESS) {
1094  return err;
1095  }
1096  *param = value;
1097  if (value != NULL)
1098  {
1099  err = param->retain();
1100  if (err != CL_SUCCESS) {
1101  return err;
1102  }
1103  }
1104  return CL_SUCCESS;
1105 }
1106 
1107 #define CL_HPP_PARAM_NAME_INFO_1_0_(F) \
1108  F(cl_platform_info, CL_PLATFORM_PROFILE, string) \
1109  F(cl_platform_info, CL_PLATFORM_VERSION, string) \
1110  F(cl_platform_info, CL_PLATFORM_NAME, string) \
1111  F(cl_platform_info, CL_PLATFORM_VENDOR, string) \
1112  F(cl_platform_info, CL_PLATFORM_EXTENSIONS, string) \
1113  \
1114  F(cl_device_info, CL_DEVICE_TYPE, cl_device_type) \
1115  F(cl_device_info, CL_DEVICE_VENDOR_ID, cl_uint) \
1116  F(cl_device_info, CL_DEVICE_MAX_COMPUTE_UNITS, cl_uint) \
1117  F(cl_device_info, CL_DEVICE_MAX_WORK_ITEM_DIMENSIONS, cl_uint) \
1118  F(cl_device_info, CL_DEVICE_MAX_WORK_GROUP_SIZE, size_type) \
1119  F(cl_device_info, CL_DEVICE_MAX_WORK_ITEM_SIZES, cl::vector<size_type>) \
1120  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_CHAR, cl_uint) \
1121  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_SHORT, cl_uint) \
1122  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_INT, cl_uint) \
1123  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_LONG, cl_uint) \
1124  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_FLOAT, cl_uint) \
1125  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_DOUBLE, cl_uint) \
1126  F(cl_device_info, CL_DEVICE_MAX_CLOCK_FREQUENCY, cl_uint) \
1127  F(cl_device_info, CL_DEVICE_ADDRESS_BITS, cl_uint) \
1128  F(cl_device_info, CL_DEVICE_MAX_READ_IMAGE_ARGS, cl_uint) \
1129  F(cl_device_info, CL_DEVICE_MAX_WRITE_IMAGE_ARGS, cl_uint) \
1130  F(cl_device_info, CL_DEVICE_MAX_MEM_ALLOC_SIZE, cl_ulong) \
1131  F(cl_device_info, CL_DEVICE_IMAGE2D_MAX_WIDTH, size_type) \
1132  F(cl_device_info, CL_DEVICE_IMAGE2D_MAX_HEIGHT, size_type) \
1133  F(cl_device_info, CL_DEVICE_IMAGE3D_MAX_WIDTH, size_type) \
1134  F(cl_device_info, CL_DEVICE_IMAGE3D_MAX_HEIGHT, size_type) \
1135  F(cl_device_info, CL_DEVICE_IMAGE3D_MAX_DEPTH, size_type) \
1136  F(cl_device_info, CL_DEVICE_IMAGE_SUPPORT, cl_bool) \
1137  F(cl_device_info, CL_DEVICE_MAX_PARAMETER_SIZE, size_type) \
1138  F(cl_device_info, CL_DEVICE_MAX_SAMPLERS, cl_uint) \
1139  F(cl_device_info, CL_DEVICE_MEM_BASE_ADDR_ALIGN, cl_uint) \
1140  F(cl_device_info, CL_DEVICE_MIN_DATA_TYPE_ALIGN_SIZE, cl_uint) \
1141  F(cl_device_info, CL_DEVICE_SINGLE_FP_CONFIG, cl_device_fp_config) \
1142  F(cl_device_info, CL_DEVICE_GLOBAL_MEM_CACHE_TYPE, cl_device_mem_cache_type) \
1143  F(cl_device_info, CL_DEVICE_GLOBAL_MEM_CACHELINE_SIZE, cl_uint)\
1144  F(cl_device_info, CL_DEVICE_GLOBAL_MEM_CACHE_SIZE, cl_ulong) \
1145  F(cl_device_info, CL_DEVICE_GLOBAL_MEM_SIZE, cl_ulong) \
1146  F(cl_device_info, CL_DEVICE_MAX_CONSTANT_BUFFER_SIZE, cl_ulong) \
1147  F(cl_device_info, CL_DEVICE_MAX_CONSTANT_ARGS, cl_uint) \
1148  F(cl_device_info, CL_DEVICE_LOCAL_MEM_TYPE, cl_device_local_mem_type) \
1149  F(cl_device_info, CL_DEVICE_LOCAL_MEM_SIZE, cl_ulong) \
1150  F(cl_device_info, CL_DEVICE_ERROR_CORRECTION_SUPPORT, cl_bool) \
1151  F(cl_device_info, CL_DEVICE_PROFILING_TIMER_RESOLUTION, size_type) \
1152  F(cl_device_info, CL_DEVICE_ENDIAN_LITTLE, cl_bool) \
1153  F(cl_device_info, CL_DEVICE_AVAILABLE, cl_bool) \
1154  F(cl_device_info, CL_DEVICE_COMPILER_AVAILABLE, cl_bool) \
1155  F(cl_device_info, CL_DEVICE_EXECUTION_CAPABILITIES, cl_device_exec_capabilities) \
1156  F(cl_device_info, CL_DEVICE_PLATFORM, cl_platform_id) \
1157  F(cl_device_info, CL_DEVICE_NAME, string) \
1158  F(cl_device_info, CL_DEVICE_VENDOR, string) \
1159  F(cl_device_info, CL_DRIVER_VERSION, string) \
1160  F(cl_device_info, CL_DEVICE_PROFILE, string) \
1161  F(cl_device_info, CL_DEVICE_VERSION, string) \
1162  F(cl_device_info, CL_DEVICE_EXTENSIONS, string) \
1163  \
1164  F(cl_context_info, CL_CONTEXT_REFERENCE_COUNT, cl_uint) \
1165  F(cl_context_info, CL_CONTEXT_DEVICES, cl::vector<Device>) \
1166  F(cl_context_info, CL_CONTEXT_PROPERTIES, cl::vector<cl_context_properties>) \
1167  \
1168  F(cl_event_info, CL_EVENT_COMMAND_QUEUE, cl::CommandQueue) \
1169  F(cl_event_info, CL_EVENT_COMMAND_TYPE, cl_command_type) \
1170  F(cl_event_info, CL_EVENT_REFERENCE_COUNT, cl_uint) \
1171  F(cl_event_info, CL_EVENT_COMMAND_EXECUTION_STATUS, cl_int) \
1172  \
1173  F(cl_profiling_info, CL_PROFILING_COMMAND_QUEUED, cl_ulong) \
1174  F(cl_profiling_info, CL_PROFILING_COMMAND_SUBMIT, cl_ulong) \
1175  F(cl_profiling_info, CL_PROFILING_COMMAND_START, cl_ulong) \
1176  F(cl_profiling_info, CL_PROFILING_COMMAND_END, cl_ulong) \
1177  \
1178  F(cl_mem_info, CL_MEM_TYPE, cl_mem_object_type) \
1179  F(cl_mem_info, CL_MEM_FLAGS, cl_mem_flags) \
1180  F(cl_mem_info, CL_MEM_SIZE, size_type) \
1181  F(cl_mem_info, CL_MEM_HOST_PTR, void*) \
1182  F(cl_mem_info, CL_MEM_MAP_COUNT, cl_uint) \
1183  F(cl_mem_info, CL_MEM_REFERENCE_COUNT, cl_uint) \
1184  F(cl_mem_info, CL_MEM_CONTEXT, cl::Context) \
1185  \
1186  F(cl_image_info, CL_IMAGE_FORMAT, cl_image_format) \
1187  F(cl_image_info, CL_IMAGE_ELEMENT_SIZE, size_type) \
1188  F(cl_image_info, CL_IMAGE_ROW_PITCH, size_type) \
1189  F(cl_image_info, CL_IMAGE_SLICE_PITCH, size_type) \
1190  F(cl_image_info, CL_IMAGE_WIDTH, size_type) \
1191  F(cl_image_info, CL_IMAGE_HEIGHT, size_type) \
1192  F(cl_image_info, CL_IMAGE_DEPTH, size_type) \
1193  \
1194  F(cl_sampler_info, CL_SAMPLER_REFERENCE_COUNT, cl_uint) \
1195  F(cl_sampler_info, CL_SAMPLER_CONTEXT, cl::Context) \
1196  F(cl_sampler_info, CL_SAMPLER_NORMALIZED_COORDS, cl_bool) \
1197  F(cl_sampler_info, CL_SAMPLER_ADDRESSING_MODE, cl_addressing_mode) \
1198  F(cl_sampler_info, CL_SAMPLER_FILTER_MODE, cl_filter_mode) \
1199  \
1200  F(cl_program_info, CL_PROGRAM_REFERENCE_COUNT, cl_uint) \
1201  F(cl_program_info, CL_PROGRAM_CONTEXT, cl::Context) \
1202  F(cl_program_info, CL_PROGRAM_NUM_DEVICES, cl_uint) \
1203  F(cl_program_info, CL_PROGRAM_DEVICES, cl::vector<Device>) \
1204  F(cl_program_info, CL_PROGRAM_SOURCE, string) \
1205  F(cl_program_info, CL_PROGRAM_BINARY_SIZES, cl::vector<size_type>) \
1206  F(cl_program_info, CL_PROGRAM_BINARIES, cl::vector<cl::vector<unsigned char>>) \
1207  \
1208  F(cl_program_build_info, CL_PROGRAM_BUILD_STATUS, cl_build_status) \
1209  F(cl_program_build_info, CL_PROGRAM_BUILD_OPTIONS, string) \
1210  F(cl_program_build_info, CL_PROGRAM_BUILD_LOG, string) \
1211  \
1212  F(cl_kernel_info, CL_KERNEL_FUNCTION_NAME, string) \
1213  F(cl_kernel_info, CL_KERNEL_NUM_ARGS, cl_uint) \
1214  F(cl_kernel_info, CL_KERNEL_REFERENCE_COUNT, cl_uint) \
1215  F(cl_kernel_info, CL_KERNEL_CONTEXT, cl::Context) \
1216  F(cl_kernel_info, CL_KERNEL_PROGRAM, cl::Program) \
1217  \
1218  F(cl_kernel_work_group_info, CL_KERNEL_WORK_GROUP_SIZE, size_type) \
1219  F(cl_kernel_work_group_info, CL_KERNEL_COMPILE_WORK_GROUP_SIZE, cl::detail::size_t_array) \
1220  F(cl_kernel_work_group_info, CL_KERNEL_LOCAL_MEM_SIZE, cl_ulong) \
1221  \
1222  F(cl_command_queue_info, CL_QUEUE_CONTEXT, cl::Context) \
1223  F(cl_command_queue_info, CL_QUEUE_DEVICE, cl::Device) \
1224  F(cl_command_queue_info, CL_QUEUE_REFERENCE_COUNT, cl_uint) \
1225  F(cl_command_queue_info, CL_QUEUE_PROPERTIES, cl_command_queue_properties)
1226 
1227 
1228 #define CL_HPP_PARAM_NAME_INFO_1_1_(F) \
1229  F(cl_context_info, CL_CONTEXT_NUM_DEVICES, cl_uint)\
1230  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_HALF, cl_uint) \
1231  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_CHAR, cl_uint) \
1232  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_SHORT, cl_uint) \
1233  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_INT, cl_uint) \
1234  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_LONG, cl_uint) \
1235  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_FLOAT, cl_uint) \
1236  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_DOUBLE, cl_uint) \
1237  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_HALF, cl_uint) \
1238  F(cl_device_info, CL_DEVICE_DOUBLE_FP_CONFIG, cl_device_fp_config) \
1239  F(cl_device_info, CL_DEVICE_HALF_FP_CONFIG, cl_device_fp_config) \
1240  F(cl_device_info, CL_DEVICE_OPENCL_C_VERSION, string) \
1241  \
1242  F(cl_mem_info, CL_MEM_ASSOCIATED_MEMOBJECT, cl::Memory) \
1243  F(cl_mem_info, CL_MEM_OFFSET, size_type) \
1244  \
1245  F(cl_kernel_work_group_info, CL_KERNEL_PREFERRED_WORK_GROUP_SIZE_MULTIPLE, size_type) \
1246  F(cl_kernel_work_group_info, CL_KERNEL_PRIVATE_MEM_SIZE, cl_ulong) \
1247  \
1248  F(cl_event_info, CL_EVENT_CONTEXT, cl::Context)
1249 
1250 #define CL_HPP_PARAM_NAME_INFO_1_2_(F) \
1251  F(cl_program_info, CL_PROGRAM_NUM_KERNELS, size_type) \
1252  F(cl_program_info, CL_PROGRAM_KERNEL_NAMES, string) \
1253  \
1254  F(cl_program_build_info, CL_PROGRAM_BINARY_TYPE, cl_program_binary_type) \
1255  \
1256  F(cl_kernel_info, CL_KERNEL_ATTRIBUTES, string) \
1257  \
1258  F(cl_kernel_arg_info, CL_KERNEL_ARG_ADDRESS_QUALIFIER, cl_kernel_arg_address_qualifier) \
1259  F(cl_kernel_arg_info, CL_KERNEL_ARG_ACCESS_QUALIFIER, cl_kernel_arg_access_qualifier) \
1260  F(cl_kernel_arg_info, CL_KERNEL_ARG_TYPE_NAME, string) \
1261  F(cl_kernel_arg_info, CL_KERNEL_ARG_NAME, string) \
1262  F(cl_kernel_arg_info, CL_KERNEL_ARG_TYPE_QUALIFIER, cl_kernel_arg_type_qualifier) \
1263  \
1264  F(cl_device_info, CL_DEVICE_PARENT_DEVICE, cl::Device) \
1265  F(cl_device_info, CL_DEVICE_PARTITION_PROPERTIES, cl::vector<cl_device_partition_property>) \
1266  F(cl_device_info, CL_DEVICE_PARTITION_TYPE, cl::vector<cl_device_partition_property>) \
1267  F(cl_device_info, CL_DEVICE_REFERENCE_COUNT, cl_uint) \
1268  F(cl_device_info, CL_DEVICE_PREFERRED_INTEROP_USER_SYNC, size_type) \
1269  F(cl_device_info, CL_DEVICE_PARTITION_AFFINITY_DOMAIN, cl_device_affinity_domain) \
1270  F(cl_device_info, CL_DEVICE_BUILT_IN_KERNELS, string) \
1271  \
1272  F(cl_image_info, CL_IMAGE_ARRAY_SIZE, size_type) \
1273  F(cl_image_info, CL_IMAGE_NUM_MIP_LEVELS, cl_uint) \
1274  F(cl_image_info, CL_IMAGE_NUM_SAMPLES, cl_uint)
1275 
1276 #define CL_HPP_PARAM_NAME_INFO_2_0_(F) \
1277  F(cl_device_info, CL_DEVICE_QUEUE_ON_HOST_PROPERTIES, cl_command_queue_properties) \
1278  F(cl_device_info, CL_DEVICE_QUEUE_ON_DEVICE_PROPERTIES, cl_command_queue_properties) \
1279  F(cl_device_info, CL_DEVICE_QUEUE_ON_DEVICE_PREFERRED_SIZE, cl_uint) \
1280  F(cl_device_info, CL_DEVICE_QUEUE_ON_DEVICE_MAX_SIZE, cl_uint) \
1281  F(cl_device_info, CL_DEVICE_MAX_ON_DEVICE_QUEUES, cl_uint) \
1282  F(cl_device_info, CL_DEVICE_MAX_ON_DEVICE_EVENTS, cl_uint) \
1283  F(cl_device_info, CL_DEVICE_MAX_PIPE_ARGS, cl_uint) \
1284  F(cl_device_info, CL_DEVICE_PIPE_MAX_ACTIVE_RESERVATIONS, cl_uint) \
1285  F(cl_device_info, CL_DEVICE_PIPE_MAX_PACKET_SIZE, cl_uint) \
1286  F(cl_device_info, CL_DEVICE_SVM_CAPABILITIES, cl_device_svm_capabilities) \
1287  F(cl_device_info, CL_DEVICE_PREFERRED_PLATFORM_ATOMIC_ALIGNMENT, cl_uint) \
1288  F(cl_device_info, CL_DEVICE_PREFERRED_GLOBAL_ATOMIC_ALIGNMENT, cl_uint) \
1289  F(cl_device_info, CL_DEVICE_PREFERRED_LOCAL_ATOMIC_ALIGNMENT, cl_uint) \
1290  F(cl_command_queue_info, CL_QUEUE_SIZE, cl_uint) \
1291  F(cl_mem_info, CL_MEM_USES_SVM_POINTER, cl_bool) \
1292  F(cl_program_build_info, CL_PROGRAM_BUILD_GLOBAL_VARIABLE_TOTAL_SIZE, size_type) \
1293  F(cl_pipe_info, CL_PIPE_PACKET_SIZE, cl_uint) \
1294  F(cl_pipe_info, CL_PIPE_MAX_PACKETS, cl_uint)
1295 
1296 #define CL_HPP_PARAM_NAME_DEVICE_FISSION_(F) \
1297  F(cl_device_info, CL_DEVICE_PARENT_DEVICE_EXT, cl_device_id) \
1298  F(cl_device_info, CL_DEVICE_PARTITION_TYPES_EXT, cl::vector<cl_device_partition_property_ext>) \
1299  F(cl_device_info, CL_DEVICE_AFFINITY_DOMAINS_EXT, cl::vector<cl_device_partition_property_ext>) \
1300  F(cl_device_info, CL_DEVICE_REFERENCE_COUNT_EXT , cl_uint) \
1301  F(cl_device_info, CL_DEVICE_PARTITION_STYLE_EXT, cl::vector<cl_device_partition_property_ext>)
1302 
1303 template <typename enum_type, cl_int Name>
1304 struct param_traits {};
1305 
1306 #define CL_HPP_DECLARE_PARAM_TRAITS_(token, param_name, T) \
1307 struct token; \
1308 template<> \
1309 struct param_traits<detail:: token,param_name> \
1310 { \
1311  enum { value = param_name }; \
1312  typedef T param_type; \
1313 };
1314 
1315 CL_HPP_PARAM_NAME_INFO_1_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
1316 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
1317 CL_HPP_PARAM_NAME_INFO_1_1_(CL_HPP_DECLARE_PARAM_TRAITS_)
1318 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
1319 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
1320 CL_HPP_PARAM_NAME_INFO_1_2_(CL_HPP_DECLARE_PARAM_TRAITS_)
1321 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
1322 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
1323 CL_HPP_PARAM_NAME_INFO_2_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
1324 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
1325 
1326 
1327 // Flags deprecated in OpenCL 2.0
1328 #define CL_HPP_PARAM_NAME_INFO_1_0_DEPRECATED_IN_2_0_(F) \
1329  F(cl_device_info, CL_DEVICE_QUEUE_PROPERTIES, cl_command_queue_properties)
1330 
1331 #define CL_HPP_PARAM_NAME_INFO_1_1_DEPRECATED_IN_2_0_(F) \
1332  F(cl_device_info, CL_DEVICE_HOST_UNIFIED_MEMORY, cl_bool)
1333 
1334 #define CL_HPP_PARAM_NAME_INFO_1_2_DEPRECATED_IN_2_0_(F) \
1335  F(cl_image_info, CL_IMAGE_BUFFER, cl::Buffer)
1336 
1337 // Include deprecated query flags based on versions
1338 // Only include deprecated 1.0 flags if 2.0 not active as there is an enum clash
1339 #if CL_HPP_TARGET_OPENCL_VERSION > 100 && CL_HPP_MINIMUM_OPENCL_VERSION < 200 && CL_HPP_TARGET_OPENCL_VERSION < 200
1340 CL_HPP_PARAM_NAME_INFO_1_0_DEPRECATED_IN_2_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
1341 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 110
1342 #if CL_HPP_TARGET_OPENCL_VERSION > 110 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
1343 CL_HPP_PARAM_NAME_INFO_1_1_DEPRECATED_IN_2_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
1344 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 120
1345 #if CL_HPP_TARGET_OPENCL_VERSION > 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
1346 CL_HPP_PARAM_NAME_INFO_1_2_DEPRECATED_IN_2_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
1347 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
1348 
1349 #if defined(CL_HPP_USE_CL_DEVICE_FISSION)
1350 CL_HPP_PARAM_NAME_DEVICE_FISSION_(CL_HPP_DECLARE_PARAM_TRAITS_);
1351 #endif // CL_HPP_USE_CL_DEVICE_FISSION
1352 
1353 #ifdef CL_PLATFORM_ICD_SUFFIX_KHR
1354 CL_HPP_DECLARE_PARAM_TRAITS_(cl_platform_info, CL_PLATFORM_ICD_SUFFIX_KHR, string)
1355 #endif
1356 
1357 #ifdef CL_DEVICE_PROFILING_TIMER_OFFSET_AMD
1358 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_PROFILING_TIMER_OFFSET_AMD, cl_ulong)
1359 #endif
1360 
1361 #ifdef CL_DEVICE_GLOBAL_FREE_MEMORY_AMD
1362 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GLOBAL_FREE_MEMORY_AMD, vector<size_type>)
1363 #endif
1364 #ifdef CL_DEVICE_SIMD_PER_COMPUTE_UNIT_AMD
1365 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_SIMD_PER_COMPUTE_UNIT_AMD, cl_uint)
1366 #endif
1367 #ifdef CL_DEVICE_SIMD_WIDTH_AMD
1368 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_SIMD_WIDTH_AMD, cl_uint)
1369 #endif
1370 #ifdef CL_DEVICE_SIMD_INSTRUCTION_WIDTH_AMD
1371 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_SIMD_INSTRUCTION_WIDTH_AMD, cl_uint)
1372 #endif
1373 #ifdef CL_DEVICE_WAVEFRONT_WIDTH_AMD
1374 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_WAVEFRONT_WIDTH_AMD, cl_uint)
1375 #endif
1376 #ifdef CL_DEVICE_GLOBAL_MEM_CHANNELS_AMD
1377 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GLOBAL_MEM_CHANNELS_AMD, cl_uint)
1378 #endif
1379 #ifdef CL_DEVICE_GLOBAL_MEM_CHANNEL_BANKS_AMD
1380 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GLOBAL_MEM_CHANNEL_BANKS_AMD, cl_uint)
1381 #endif
1382 #ifdef CL_DEVICE_GLOBAL_MEM_CHANNEL_BANK_WIDTH_AMD
1383 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GLOBAL_MEM_CHANNEL_BANK_WIDTH_AMD, cl_uint)
1384 #endif
1385 #ifdef CL_DEVICE_LOCAL_MEM_SIZE_PER_COMPUTE_UNIT_AMD
1386 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_LOCAL_MEM_SIZE_PER_COMPUTE_UNIT_AMD, cl_uint)
1387 #endif
1388 #ifdef CL_DEVICE_LOCAL_MEM_BANKS_AMD
1389 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_LOCAL_MEM_BANKS_AMD, cl_uint)
1390 #endif
1391 
1392 #ifdef CL_DEVICE_COMPUTE_CAPABILITY_MAJOR_NV
1393 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_COMPUTE_CAPABILITY_MAJOR_NV, cl_uint)
1394 #endif
1395 #ifdef CL_DEVICE_COMPUTE_CAPABILITY_MINOR_NV
1396 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_COMPUTE_CAPABILITY_MINOR_NV, cl_uint)
1397 #endif
1398 #ifdef CL_DEVICE_REGISTERS_PER_BLOCK_NV
1399 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_REGISTERS_PER_BLOCK_NV, cl_uint)
1400 #endif
1401 #ifdef CL_DEVICE_WARP_SIZE_NV
1402 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_WARP_SIZE_NV, cl_uint)
1403 #endif
1404 #ifdef CL_DEVICE_GPU_OVERLAP_NV
1405 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GPU_OVERLAP_NV, cl_bool)
1406 #endif
1407 #ifdef CL_DEVICE_KERNEL_EXEC_TIMEOUT_NV
1408 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_KERNEL_EXEC_TIMEOUT_NV, cl_bool)
1409 #endif
1410 #ifdef CL_DEVICE_INTEGRATED_MEMORY_NV
1411 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_INTEGRATED_MEMORY_NV, cl_bool)
1412 #endif
1413 
1414 // Convenience functions
1415 
1416 template <typename Func, typename T>
1417 inline cl_int
1418 getInfo(Func f, cl_uint name, T* param)
1419 {
1420  return getInfoHelper(f, name, param, 0);
1421 }
1422 
1423 template <typename Func, typename Arg0>
1425 {
1426  Func f_; const Arg0& arg0_;
1427  cl_int operator ()(
1428  cl_uint param, size_type size, void* value, size_type* size_ret)
1429  { return f_(arg0_, param, size, value, size_ret); }
1430 };
1431 
1432 template <typename Func, typename Arg0, typename Arg1>
1434 {
1435  Func f_; const Arg0& arg0_; const Arg1& arg1_;
1436  cl_int operator ()(
1437  cl_uint param, size_type size, void* value, size_type* size_ret)
1438  { return f_(arg0_, arg1_, param, size, value, size_ret); }
1439 };
1440 
1441 template <typename Func, typename Arg0, typename T>
1442 inline cl_int
1443 getInfo(Func f, const Arg0& arg0, cl_uint name, T* param)
1444 {
1445  GetInfoFunctor0<Func, Arg0> f0 = { f, arg0 };
1446  return getInfoHelper(f0, name, param, 0);
1447 }
1448 
1449 template <typename Func, typename Arg0, typename Arg1, typename T>
1450 inline cl_int
1451 getInfo(Func f, const Arg0& arg0, const Arg1& arg1, cl_uint name, T* param)
1452 {
1453  GetInfoFunctor1<Func, Arg0, Arg1> f0 = { f, arg0, arg1 };
1454  return getInfoHelper(f0, name, param, 0);
1455 }
1456 
1457 
1458 template<typename T>
1459 struct ReferenceHandler
1460 { };
1461 
1462 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
1463 
1466 template <>
1467 struct ReferenceHandler<cl_device_id>
1468 {
1478  static cl_int retain(cl_device_id device)
1479  { return ::clRetainDevice(device); }
1489  static cl_int release(cl_device_id device)
1490  { return ::clReleaseDevice(device); }
1491 };
1492 #else // CL_HPP_TARGET_OPENCL_VERSION >= 120
1493 
1496 template <>
1497 struct ReferenceHandler<cl_device_id>
1498 {
1499  // cl_device_id does not have retain().
1500  static cl_int retain(cl_device_id)
1501  { return CL_SUCCESS; }
1502  // cl_device_id does not have release().
1503  static cl_int release(cl_device_id)
1504  { return CL_SUCCESS; }
1505 };
1506 #endif // ! (CL_HPP_TARGET_OPENCL_VERSION >= 120)
1507 
1508 template <>
1509 struct ReferenceHandler<cl_platform_id>
1510 {
1511  // cl_platform_id does not have retain().
1512  static cl_int retain(cl_platform_id)
1513  { return CL_SUCCESS; }
1514  // cl_platform_id does not have release().
1515  static cl_int release(cl_platform_id)
1516  { return CL_SUCCESS; }
1517 };
1518 
1519 template <>
1520 struct ReferenceHandler<cl_context>
1521 {
1522  static cl_int retain(cl_context context)
1523  { return ::clRetainContext(context); }
1524  static cl_int release(cl_context context)
1525  { return ::clReleaseContext(context); }
1526 };
1527 
1528 template <>
1529 struct ReferenceHandler<cl_command_queue>
1530 {
1531  static cl_int retain(cl_command_queue queue)
1532  { return ::clRetainCommandQueue(queue); }
1533  static cl_int release(cl_command_queue queue)
1534  { return ::clReleaseCommandQueue(queue); }
1535 };
1536 
1537 template <>
1538 struct ReferenceHandler<cl_mem>
1539 {
1540  static cl_int retain(cl_mem memory)
1541  { return ::clRetainMemObject(memory); }
1542  static cl_int release(cl_mem memory)
1543  { return ::clReleaseMemObject(memory); }
1544 };
1545 
1546 template <>
1547 struct ReferenceHandler<cl_sampler>
1548 {
1549  static cl_int retain(cl_sampler sampler)
1550  { return ::clRetainSampler(sampler); }
1551  static cl_int release(cl_sampler sampler)
1552  { return ::clReleaseSampler(sampler); }
1553 };
1554 
1555 template <>
1556 struct ReferenceHandler<cl_program>
1557 {
1558  static cl_int retain(cl_program program)
1559  { return ::clRetainProgram(program); }
1560  static cl_int release(cl_program program)
1561  { return ::clReleaseProgram(program); }
1562 };
1563 
1564 template <>
1565 struct ReferenceHandler<cl_kernel>
1566 {
1567  static cl_int retain(cl_kernel kernel)
1568  { return ::clRetainKernel(kernel); }
1569  static cl_int release(cl_kernel kernel)
1570  { return ::clReleaseKernel(kernel); }
1571 };
1572 
1573 template <>
1574 struct ReferenceHandler<cl_event>
1575 {
1576  static cl_int retain(cl_event event)
1577  { return ::clRetainEvent(event); }
1578  static cl_int release(cl_event event)
1579  { return ::clReleaseEvent(event); }
1580 };
1581 
1582 
1583 #if CL_HPP_TARGET_OPENCL_VERSION >= 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 120
1584 // Extracts version number with major in the upper 16 bits, minor in the lower 16
1585 static cl_uint getVersion(const vector<char> &versionInfo)
1586 {
1587  int highVersion = 0;
1588  int lowVersion = 0;
1589  int index = 7;
1590  while(versionInfo[index] != '.' ) {
1591  highVersion *= 10;
1592  highVersion += versionInfo[index]-'0';
1593  ++index;
1594  }
1595  ++index;
1596  while(versionInfo[index] != ' ' && versionInfo[index] != '\0') {
1597  lowVersion *= 10;
1598  lowVersion += versionInfo[index]-'0';
1599  ++index;
1600  }
1601  return (highVersion << 16) | lowVersion;
1602 }
1603 
1604 static cl_uint getPlatformVersion(cl_platform_id platform)
1605 {
1606  size_type size = 0;
1607  clGetPlatformInfo(platform, CL_PLATFORM_VERSION, 0, NULL, &size);
1608 
1609  vector<char> versionInfo(size);
1610  clGetPlatformInfo(platform, CL_PLATFORM_VERSION, size, versionInfo.data(), &size);
1611  return getVersion(versionInfo);
1612 }
1613 
1614 static cl_uint getDevicePlatformVersion(cl_device_id device)
1615 {
1616  cl_platform_id platform;
1617  clGetDeviceInfo(device, CL_DEVICE_PLATFORM, sizeof(platform), &platform, NULL);
1618  return getPlatformVersion(platform);
1619 }
1620 
1621 static cl_uint getContextPlatformVersion(cl_context context)
1622 {
1623  // The platform cannot be queried directly, so we first have to grab a
1624  // device and obtain its context
1625  size_type size = 0;
1626  clGetContextInfo(context, CL_CONTEXT_DEVICES, 0, NULL, &size);
1627  if (size == 0)
1628  return 0;
1629  vector<cl_device_id> devices(size/sizeof(cl_device_id));
1630  clGetContextInfo(context, CL_CONTEXT_DEVICES, size, devices.data(), NULL);
1631  return getDevicePlatformVersion(devices[0]);
1632 }
1633 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 120
1634 
1635 template <typename T>
1636 class Wrapper
1637 {
1638 public:
1639  typedef T cl_type;
1640 
1641 protected:
1642  cl_type object_;
1643 
1644 public:
1645  Wrapper() : object_(NULL) { }
1646 
1647  Wrapper(const cl_type &obj, bool retainObject) : object_(obj)
1648  {
1649  if (retainObject) {
1650  detail::errHandler(retain(), __RETAIN_ERR);
1651  }
1652  }
1653 
1654  ~Wrapper()
1655  {
1656  if (object_ != NULL) { release(); }
1657  }
1658 
1659  Wrapper(const Wrapper<cl_type>& rhs)
1660  {
1661  object_ = rhs.object_;
1662  detail::errHandler(retain(), __RETAIN_ERR);
1663  }
1664 
1665  Wrapper(Wrapper<cl_type>&& rhs) CL_HPP_NOEXCEPT_
1666  {
1667  object_ = rhs.object_;
1668  rhs.object_ = NULL;
1669  }
1670 
1671  Wrapper<cl_type>& operator = (const Wrapper<cl_type>& rhs)
1672  {
1673  if (this != &rhs) {
1674  detail::errHandler(release(), __RELEASE_ERR);
1675  object_ = rhs.object_;
1676  detail::errHandler(retain(), __RETAIN_ERR);
1677  }
1678  return *this;
1679  }
1680 
1681  Wrapper<cl_type>& operator = (Wrapper<cl_type>&& rhs)
1682  {
1683  if (this != &rhs) {
1684  detail::errHandler(release(), __RELEASE_ERR);
1685  object_ = rhs.object_;
1686  rhs.object_ = NULL;
1687  }
1688  return *this;
1689  }
1690 
1691  Wrapper<cl_type>& operator = (const cl_type &rhs)
1692  {
1693  detail::errHandler(release(), __RELEASE_ERR);
1694  object_ = rhs;
1695  return *this;
1696  }
1697 
1698  const cl_type& operator ()() const { return object_; }
1699 
1700  cl_type& operator ()() { return object_; }
1701 
1702  const cl_type get() const { return object_; }
1703 
1704  cl_type get() { return object_; }
1705 
1706 
1707 protected:
1708  template<typename Func, typename U>
1709  friend inline cl_int getInfoHelper(Func, cl_uint, U*, int, typename U::cl_type);
1710 
1711  cl_int retain() const
1712  {
1713  if (object_ != nullptr) {
1714  return ReferenceHandler<cl_type>::retain(object_);
1715  }
1716  else {
1717  return CL_SUCCESS;
1718  }
1719  }
1720 
1721  cl_int release() const
1722  {
1723  if (object_ != nullptr) {
1724  return ReferenceHandler<cl_type>::release(object_);
1725  }
1726  else {
1727  return CL_SUCCESS;
1728  }
1729  }
1730 };
1731 
1732 template <>
1733 class Wrapper<cl_device_id>
1734 {
1735 public:
1736  typedef cl_device_id cl_type;
1737 
1738 protected:
1739  cl_type object_;
1740  bool referenceCountable_;
1741 
1742  static bool isReferenceCountable(cl_device_id device)
1743  {
1744  bool retVal = false;
1745 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
1746 #if CL_HPP_MINIMUM_OPENCL_VERSION < 120
1747  if (device != NULL) {
1748  int version = getDevicePlatformVersion(device);
1749  if(version > ((1 << 16) + 1)) {
1750  retVal = true;
1751  }
1752  }
1753 #else // CL_HPP_MINIMUM_OPENCL_VERSION < 120
1754  retVal = true;
1755 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 120
1756 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
1757  return retVal;
1758  }
1759 
1760 public:
1761  Wrapper() : object_(NULL), referenceCountable_(false)
1762  {
1763  }
1764 
1765  Wrapper(const cl_type &obj, bool retainObject) :
1766  object_(obj),
1767  referenceCountable_(false)
1768  {
1769  referenceCountable_ = isReferenceCountable(obj);
1770 
1771  if (retainObject) {
1772  detail::errHandler(retain(), __RETAIN_ERR);
1773  }
1774  }
1775 
1776  ~Wrapper()
1777  {
1778  release();
1779  }
1780 
1781  Wrapper(const Wrapper<cl_type>& rhs)
1782  {
1783  object_ = rhs.object_;
1784  referenceCountable_ = isReferenceCountable(object_);
1785  detail::errHandler(retain(), __RETAIN_ERR);
1786  }
1787 
1788  Wrapper(Wrapper<cl_type>&& rhs) CL_HPP_NOEXCEPT_
1789  {
1790  object_ = rhs.object_;
1791  referenceCountable_ = rhs.referenceCountable_;
1792  rhs.object_ = NULL;
1793  rhs.referenceCountable_ = false;
1794  }
1795 
1796  Wrapper<cl_type>& operator = (const Wrapper<cl_type>& rhs)
1797  {
1798  if (this != &rhs) {
1799  detail::errHandler(release(), __RELEASE_ERR);
1800  object_ = rhs.object_;
1801  referenceCountable_ = rhs.referenceCountable_;
1802  detail::errHandler(retain(), __RETAIN_ERR);
1803  }
1804  return *this;
1805  }
1806 
1807  Wrapper<cl_type>& operator = (Wrapper<cl_type>&& rhs)
1808  {
1809  if (this != &rhs) {
1810  detail::errHandler(release(), __RELEASE_ERR);
1811  object_ = rhs.object_;
1812  referenceCountable_ = rhs.referenceCountable_;
1813  rhs.object_ = NULL;
1814  rhs.referenceCountable_ = false;
1815  }
1816  return *this;
1817  }
1818 
1819  Wrapper<cl_type>& operator = (const cl_type &rhs)
1820  {
1821  detail::errHandler(release(), __RELEASE_ERR);
1822  object_ = rhs;
1823  referenceCountable_ = isReferenceCountable(object_);
1824  return *this;
1825  }
1826 
1827  const cl_type& operator ()() const { return object_; }
1828 
1829  cl_type& operator ()() { return object_; }
1830 
1831  cl_type get() const { return object_; }
1832 
1833 protected:
1834  template<typename Func, typename U>
1835  friend inline cl_int getInfoHelper(Func, cl_uint, U*, int, typename U::cl_type);
1836 
1837  template<typename Func, typename U>
1838  friend inline cl_int getInfoHelper(Func, cl_uint, vector<U>*, int, typename U::cl_type);
1839 
1840  cl_int retain() const
1841  {
1842  if( object_ != nullptr && referenceCountable_ ) {
1843  return ReferenceHandler<cl_type>::retain(object_);
1844  }
1845  else {
1846  return CL_SUCCESS;
1847  }
1848  }
1849 
1850  cl_int release() const
1851  {
1852  if (object_ != nullptr && referenceCountable_) {
1853  return ReferenceHandler<cl_type>::release(object_);
1854  }
1855  else {
1856  return CL_SUCCESS;
1857  }
1858  }
1859 };
1860 
1861 template <typename T>
1862 inline bool operator==(const Wrapper<T> &lhs, const Wrapper<T> &rhs)
1863 {
1864  return lhs() == rhs();
1865 }
1866 
1867 template <typename T>
1868 inline bool operator!=(const Wrapper<T> &lhs, const Wrapper<T> &rhs)
1869 {
1870  return !operator==(lhs, rhs);
1871 }
1872 
1873 } // namespace detail
1875 
1876 
1877 using BuildLogType = vector<std::pair<cl::Device, typename detail::param_traits<detail::cl_program_build_info, CL_PROGRAM_BUILD_LOG>::param_type>>;
1878 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
1879 
1882 class BuildError : public Error
1883 {
1884 private:
1885  BuildLogType buildLogs;
1886 public:
1887  BuildError(cl_int err, const char * errStr, const BuildLogType &vec) : Error(err, errStr), buildLogs(vec)
1888  {
1889  }
1890 
1891  BuildLogType getBuildLog() const
1892  {
1893  return buildLogs;
1894  }
1895 };
1896 namespace detail {
1897  static inline cl_int buildErrHandler(
1898  cl_int err,
1899  const char * errStr,
1900  const BuildLogType &buildLogs)
1901  {
1902  if (err != CL_SUCCESS) {
1903  throw BuildError(err, errStr, buildLogs);
1904  }
1905  return err;
1906  }
1907 } // namespace detail
1908 
1909 #else
1910 namespace detail {
1911  static inline cl_int buildErrHandler(
1912  cl_int err,
1913  const char * errStr,
1914  const BuildLogType &buildLogs)
1915  {
1916  (void)buildLogs; // suppress unused variable warning
1917  (void)errStr;
1918  return err;
1919  }
1920 } // namespace detail
1921 #endif // #if defined(CL_HPP_ENABLE_EXCEPTIONS)
1922 
1923 
1929 struct ImageFormat : public cl_image_format
1930 {
1933 
1935  ImageFormat(cl_channel_order order, cl_channel_type type)
1936  {
1937  image_channel_order = order;
1938  image_channel_data_type = type;
1939  }
1940 
1942  ImageFormat& operator = (const ImageFormat& rhs)
1943  {
1944  if (this != &rhs) {
1945  this->image_channel_data_type = rhs.image_channel_data_type;
1946  this->image_channel_order = rhs.image_channel_order;
1947  }
1948  return *this;
1949  }
1950 };
1951 
1959 class Device : public detail::Wrapper<cl_device_id>
1960 {
1961 private:
1962  static std::once_flag default_initialized_;
1963  static Device default_;
1964  static cl_int default_error_;
1965 
1971  static void makeDefault();
1972 
1978  static void makeDefaultProvided(const Device &p) {
1979  default_ = p;
1980  }
1981 
1982 public:
1983 #ifdef CL_HPP_UNIT_TEST_ENABLE
1984 
1990  static void unitTestClearDefault() {
1991  default_ = Device();
1992  }
1993 #endif // #ifdef CL_HPP_UNIT_TEST_ENABLE
1994 
1996  Device() : detail::Wrapper<cl_type>() { }
1997 
2002  explicit Device(const cl_device_id &device, bool retainObject = false) :
2003  detail::Wrapper<cl_type>(device, retainObject) { }
2004 
2010  cl_int *errResult = NULL)
2011  {
2012  std::call_once(default_initialized_, makeDefault);
2013  detail::errHandler(default_error_);
2014  if (errResult != NULL) {
2015  *errResult = default_error_;
2016  }
2017  return default_;
2018  }
2019 
2027  static Device setDefault(const Device &default_device)
2028  {
2029  std::call_once(default_initialized_, makeDefaultProvided, std::cref(default_device));
2030  detail::errHandler(default_error_);
2031  return default_;
2032  }
2033 
2038  Device& operator = (const cl_device_id& rhs)
2039  {
2041  return *this;
2042  }
2043 
2047  Device(const Device& dev) : detail::Wrapper<cl_type>(dev) {}
2048 
2052  Device& operator = (const Device &dev)
2053  {
2055  return *this;
2056  }
2057 
2061  Device(Device&& dev) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(dev)) {}
2062 
2066  Device& operator = (Device &&dev)
2067  {
2068  detail::Wrapper<cl_type>::operator=(std::move(dev));
2069  return *this;
2070  }
2071 
2073  template <typename T>
2074  cl_int getInfo(cl_device_info name, T* param) const
2075  {
2076  return detail::errHandler(
2077  detail::getInfo(&::clGetDeviceInfo, object_, name, param),
2078  __GET_DEVICE_INFO_ERR);
2079  }
2080 
2082  template <cl_int name> typename
2084  getInfo(cl_int* err = NULL) const
2085  {
2086  typename detail::param_traits<
2087  detail::cl_device_info, name>::param_type param;
2088  cl_int result = getInfo(name, &param);
2089  if (err != NULL) {
2090  *err = result;
2091  }
2092  return param;
2093  }
2094 
2098 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
2099  cl_int createSubDevices(
2101  const cl_device_partition_property * properties,
2102  vector<Device>* devices)
2103  {
2104  cl_uint n = 0;
2105  cl_int err = clCreateSubDevices(object_, properties, 0, NULL, &n);
2106  if (err != CL_SUCCESS) {
2107  return detail::errHandler(err, __CREATE_SUB_DEVICES_ERR);
2108  }
2109 
2110  vector<cl_device_id> ids(n);
2111  err = clCreateSubDevices(object_, properties, n, ids.data(), NULL);
2112  if (err != CL_SUCCESS) {
2113  return detail::errHandler(err, __CREATE_SUB_DEVICES_ERR);
2114  }
2115 
2116  // Cannot trivially assign because we need to capture intermediates
2117  // with safe construction
2118  if (devices) {
2119  devices->resize(ids.size());
2120 
2121  // Assign to param, constructing with retain behaviour
2122  // to correctly capture each underlying CL object
2123  for (size_type i = 0; i < ids.size(); i++) {
2124  // We do not need to retain because this device is being created
2125  // by the runtime
2126  (*devices)[i] = Device(ids[i], false);
2127  }
2128  }
2129 
2130  return CL_SUCCESS;
2131  }
2132 #elif defined(CL_HPP_USE_CL_DEVICE_FISSION)
2133 
2137  cl_int createSubDevices(
2138  const cl_device_partition_property_ext * properties,
2139  vector<Device>* devices)
2140  {
2141  typedef CL_API_ENTRY cl_int
2142  ( CL_API_CALL * PFN_clCreateSubDevicesEXT)(
2143  cl_device_id /*in_device*/,
2144  const cl_device_partition_property_ext * /* properties */,
2145  cl_uint /*num_entries*/,
2146  cl_device_id * /*out_devices*/,
2147  cl_uint * /*num_devices*/ ) CL_EXT_SUFFIX__VERSION_1_1;
2148 
2149  static PFN_clCreateSubDevicesEXT pfn_clCreateSubDevicesEXT = NULL;
2150  CL_HPP_INIT_CL_EXT_FCN_PTR_(clCreateSubDevicesEXT);
2151 
2152  cl_uint n = 0;
2153  cl_int err = pfn_clCreateSubDevicesEXT(object_, properties, 0, NULL, &n);
2154  if (err != CL_SUCCESS) {
2155  return detail::errHandler(err, __CREATE_SUB_DEVICES_ERR);
2156  }
2157 
2158  vector<cl_device_id> ids(n);
2159  err = pfn_clCreateSubDevicesEXT(object_, properties, n, ids.data(), NULL);
2160  if (err != CL_SUCCESS) {
2161  return detail::errHandler(err, __CREATE_SUB_DEVICES_ERR);
2162  }
2163  // Cannot trivially assign because we need to capture intermediates
2164  // with safe construction
2165  if (devices) {
2166  devices->resize(ids.size());
2167 
2168  // Assign to param, constructing with retain behaviour
2169  // to correctly capture each underlying CL object
2170  for (size_type i = 0; i < ids.size(); i++) {
2171  // We do not need to retain because this device is being created
2172  // by the runtime
2173  (*devices)[i] = Device(ids[i], false);
2174  }
2175  }
2176  return CL_SUCCESS;
2177  }
2178 #endif // defined(CL_HPP_USE_CL_DEVICE_FISSION)
2179 };
2180 
2181 CL_HPP_DEFINE_STATIC_MEMBER_ std::once_flag Device::default_initialized_;
2182 CL_HPP_DEFINE_STATIC_MEMBER_ Device Device::default_;
2183 CL_HPP_DEFINE_STATIC_MEMBER_ cl_int Device::default_error_ = CL_SUCCESS;
2184 
2192 class Platform : public detail::Wrapper<cl_platform_id>
2193 {
2194 private:
2195  static std::once_flag default_initialized_;
2196  static Platform default_;
2197  static cl_int default_error_;
2198 
2204  static void makeDefault() {
2205  /* Throwing an exception from a call_once invocation does not do
2206  * what we wish, so we catch it and save the error.
2207  */
2208 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2209  try
2210 #endif
2211  {
2212  // If default wasn't passed ,generate one
2213  // Otherwise set it
2214  cl_uint n = 0;
2215 
2216  cl_int err = ::clGetPlatformIDs(0, NULL, &n);
2217  if (err != CL_SUCCESS) {
2218  default_error_ = err;
2219  return;
2220  }
2221  if (n == 0) {
2222  default_error_ = CL_INVALID_PLATFORM;
2223  return;
2224  }
2225 
2226  vector<cl_platform_id> ids(n);
2227  err = ::clGetPlatformIDs(n, ids.data(), NULL);
2228  if (err != CL_SUCCESS) {
2229  default_error_ = err;
2230  return;
2231  }
2232 
2233  default_ = Platform(ids[0]);
2234  }
2235 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2236  catch (cl::Error &e) {
2237  default_error_ = e.err();
2238  }
2239 #endif
2240  }
2241 
2247  static void makeDefaultProvided(const Platform &p) {
2248  default_ = p;
2249  }
2250 
2251 public:
2252 #ifdef CL_HPP_UNIT_TEST_ENABLE
2253 
2259  static void unitTestClearDefault() {
2260  default_ = Platform();
2261  }
2262 #endif // #ifdef CL_HPP_UNIT_TEST_ENABLE
2263 
2265  Platform() : detail::Wrapper<cl_type>() { }
2266 
2274  explicit Platform(const cl_platform_id &platform, bool retainObject = false) :
2275  detail::Wrapper<cl_type>(platform, retainObject) { }
2276 
2281  Platform& operator = (const cl_platform_id& rhs)
2282  {
2284  return *this;
2285  }
2286 
2287  static Platform getDefault(
2288  cl_int *errResult = NULL)
2289  {
2290  std::call_once(default_initialized_, makeDefault);
2291  detail::errHandler(default_error_);
2292  if (errResult != NULL) {
2293  *errResult = default_error_;
2294  }
2295  return default_;
2296  }
2297 
2305  static Platform setDefault(const Platform &default_platform)
2306  {
2307  std::call_once(default_initialized_, makeDefaultProvided, std::cref(default_platform));
2308  detail::errHandler(default_error_);
2309  return default_;
2310  }
2311 
2313  cl_int getInfo(cl_platform_info name, string* param) const
2314  {
2315  return detail::errHandler(
2316  detail::getInfo(&::clGetPlatformInfo, object_, name, param),
2317  __GET_PLATFORM_INFO_ERR);
2318  }
2319 
2321  template <cl_int name> typename
2323  getInfo(cl_int* err = NULL) const
2324  {
2325  typename detail::param_traits<
2326  detail::cl_platform_info, name>::param_type param;
2327  cl_int result = getInfo(name, &param);
2328  if (err != NULL) {
2329  *err = result;
2330  }
2331  return param;
2332  }
2333 
2338  cl_int getDevices(
2339  cl_device_type type,
2340  vector<Device>* devices) const
2341  {
2342  cl_uint n = 0;
2343  if( devices == NULL ) {
2344  return detail::errHandler(CL_INVALID_ARG_VALUE, __GET_DEVICE_IDS_ERR);
2345  }
2346  cl_int err = ::clGetDeviceIDs(object_, type, 0, NULL, &n);
2347  if (err != CL_SUCCESS) {
2348  return detail::errHandler(err, __GET_DEVICE_IDS_ERR);
2349  }
2350 
2351  vector<cl_device_id> ids(n);
2352  err = ::clGetDeviceIDs(object_, type, n, ids.data(), NULL);
2353  if (err != CL_SUCCESS) {
2354  return detail::errHandler(err, __GET_DEVICE_IDS_ERR);
2355  }
2356 
2357  // Cannot trivially assign because we need to capture intermediates
2358  // with safe construction
2359  // We must retain things we obtain from the API to avoid releasing
2360  // API-owned objects.
2361  if (devices) {
2362  devices->resize(ids.size());
2363 
2364  // Assign to param, constructing with retain behaviour
2365  // to correctly capture each underlying CL object
2366  for (size_type i = 0; i < ids.size(); i++) {
2367  (*devices)[i] = Device(ids[i], true);
2368  }
2369  }
2370  return CL_SUCCESS;
2371  }
2372 
2373 #if defined(CL_HPP_USE_DX_INTEROP)
2374 
2397  cl_int getDevices(
2398  cl_d3d10_device_source_khr d3d_device_source,
2399  void * d3d_object,
2400  cl_d3d10_device_set_khr d3d_device_set,
2401  vector<Device>* devices) const
2402  {
2403  typedef CL_API_ENTRY cl_int (CL_API_CALL *PFN_clGetDeviceIDsFromD3D10KHR)(
2404  cl_platform_id platform,
2405  cl_d3d10_device_source_khr d3d_device_source,
2406  void * d3d_object,
2407  cl_d3d10_device_set_khr d3d_device_set,
2408  cl_uint num_entries,
2409  cl_device_id * devices,
2410  cl_uint* num_devices);
2411 
2412  if( devices == NULL ) {
2413  return detail::errHandler(CL_INVALID_ARG_VALUE, __GET_DEVICE_IDS_ERR);
2414  }
2415 
2416  static PFN_clGetDeviceIDsFromD3D10KHR pfn_clGetDeviceIDsFromD3D10KHR = NULL;
2417  CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(object_, clGetDeviceIDsFromD3D10KHR);
2418 
2419  cl_uint n = 0;
2420  cl_int err = pfn_clGetDeviceIDsFromD3D10KHR(
2421  object_,
2422  d3d_device_source,
2423  d3d_object,
2424  d3d_device_set,
2425  0,
2426  NULL,
2427  &n);
2428  if (err != CL_SUCCESS) {
2429  return detail::errHandler(err, __GET_DEVICE_IDS_ERR);
2430  }
2431 
2432  vector<cl_device_id> ids(n);
2433  err = pfn_clGetDeviceIDsFromD3D10KHR(
2434  object_,
2435  d3d_device_source,
2436  d3d_object,
2437  d3d_device_set,
2438  n,
2439  ids.data(),
2440  NULL);
2441  if (err != CL_SUCCESS) {
2442  return detail::errHandler(err, __GET_DEVICE_IDS_ERR);
2443  }
2444 
2445  // Cannot trivially assign because we need to capture intermediates
2446  // with safe construction
2447  // We must retain things we obtain from the API to avoid releasing
2448  // API-owned objects.
2449  if (devices) {
2450  devices->resize(ids.size());
2451 
2452  // Assign to param, constructing with retain behaviour
2453  // to correctly capture each underlying CL object
2454  for (size_type i = 0; i < ids.size(); i++) {
2455  (*devices)[i] = Device(ids[i], true);
2456  }
2457  }
2458  return CL_SUCCESS;
2459  }
2460 #endif
2461 
2466  static cl_int get(
2467  vector<Platform>* platforms)
2468  {
2469  cl_uint n = 0;
2470 
2471  if( platforms == NULL ) {
2472  return detail::errHandler(CL_INVALID_ARG_VALUE, __GET_PLATFORM_IDS_ERR);
2473  }
2474 
2475  cl_int err = ::clGetPlatformIDs(0, NULL, &n);
2476  if (err != CL_SUCCESS) {
2477  return detail::errHandler(err, __GET_PLATFORM_IDS_ERR);
2478  }
2479 
2480  vector<cl_platform_id> ids(n);
2481  err = ::clGetPlatformIDs(n, ids.data(), NULL);
2482  if (err != CL_SUCCESS) {
2483  return detail::errHandler(err, __GET_PLATFORM_IDS_ERR);
2484  }
2485 
2486  if (platforms) {
2487  platforms->resize(ids.size());
2488 
2489  // Platforms don't reference count
2490  for (size_type i = 0; i < ids.size(); i++) {
2491  (*platforms)[i] = Platform(ids[i]);
2492  }
2493  }
2494  return CL_SUCCESS;
2495  }
2496 
2501  static cl_int get(
2502  Platform * platform)
2503  {
2504  cl_int err;
2505  Platform default_platform = Platform::getDefault(&err);
2506  if (platform) {
2507  *platform = default_platform;
2508  }
2509  return err;
2510  }
2511 
2520  static Platform get(
2521  cl_int * errResult = NULL)
2522  {
2523  cl_int err;
2524  Platform default_platform = Platform::getDefault(&err);
2525  if (errResult) {
2526  *errResult = err;
2527  }
2528  return default_platform;
2529  }
2530 
2531 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
2532  cl_int
2535  {
2536  return ::clUnloadPlatformCompiler(object_);
2537  }
2538 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
2539 }; // class Platform
2540 
2541 CL_HPP_DEFINE_STATIC_MEMBER_ std::once_flag Platform::default_initialized_;
2542 CL_HPP_DEFINE_STATIC_MEMBER_ Platform Platform::default_;
2543 CL_HPP_DEFINE_STATIC_MEMBER_ cl_int Platform::default_error_ = CL_SUCCESS;
2544 
2545 
2549 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
2550 
2554 inline CL_EXT_PREFIX__VERSION_1_1_DEPRECATED cl_int
2555 UnloadCompiler() CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED;
2556 inline cl_int
2558 {
2559  return ::clUnloadCompiler();
2560 }
2561 #endif // #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
2562 
2571 class Context
2572  : public detail::Wrapper<cl_context>
2573 {
2574 private:
2575  static std::once_flag default_initialized_;
2576  static Context default_;
2577  static cl_int default_error_;
2578 
2584  static void makeDefault() {
2585  /* Throwing an exception from a call_once invocation does not do
2586  * what we wish, so we catch it and save the error.
2587  */
2588 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2589  try
2590 #endif
2591  {
2592 #if !defined(__APPLE__) && !defined(__MACOS)
2593  const Platform &p = Platform::getDefault();
2594  cl_platform_id defaultPlatform = p();
2595  cl_context_properties properties[3] = {
2596  CL_CONTEXT_PLATFORM, (cl_context_properties)defaultPlatform, 0
2597  };
2598 #else // #if !defined(__APPLE__) && !defined(__MACOS)
2599  cl_context_properties *properties = nullptr;
2600 #endif // #if !defined(__APPLE__) && !defined(__MACOS)
2601 
2602  default_ = Context(
2603  CL_DEVICE_TYPE_DEFAULT,
2604  properties,
2605  NULL,
2606  NULL,
2607  &default_error_);
2608  }
2609 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2610  catch (cl::Error &e) {
2611  default_error_ = e.err();
2612  }
2613 #endif
2614  }
2615 
2616 
2622  static void makeDefaultProvided(const Context &c) {
2623  default_ = c;
2624  }
2625 
2626 public:
2627 #ifdef CL_HPP_UNIT_TEST_ENABLE
2628 
2634  static void unitTestClearDefault() {
2635  default_ = Context();
2636  }
2637 #endif // #ifdef CL_HPP_UNIT_TEST_ENABLE
2638 
2644  const vector<Device>& devices,
2645  cl_context_properties* properties = NULL,
2646  void (CL_CALLBACK * notifyFptr)(
2647  const char *,
2648  const void *,
2649  size_type,
2650  void *) = NULL,
2651  void* data = NULL,
2652  cl_int* err = NULL)
2653  {
2654  cl_int error;
2655 
2656  size_type numDevices = devices.size();
2657  vector<cl_device_id> deviceIDs(numDevices);
2658 
2659  for( size_type deviceIndex = 0; deviceIndex < numDevices; ++deviceIndex ) {
2660  deviceIDs[deviceIndex] = (devices[deviceIndex])();
2661  }
2662 
2663  object_ = ::clCreateContext(
2664  properties, (cl_uint) numDevices,
2665  deviceIDs.data(),
2666  notifyFptr, data, &error);
2667 
2668  detail::errHandler(error, __CREATE_CONTEXT_ERR);
2669  if (err != NULL) {
2670  *err = error;
2671  }
2672  }
2673 
2674  Context(
2675  const Device& device,
2676  cl_context_properties* properties = NULL,
2677  void (CL_CALLBACK * notifyFptr)(
2678  const char *,
2679  const void *,
2680  size_type,
2681  void *) = NULL,
2682  void* data = NULL,
2683  cl_int* err = NULL)
2684  {
2685  cl_int error;
2686 
2687  cl_device_id deviceID = device();
2688 
2689  object_ = ::clCreateContext(
2690  properties, 1,
2691  &deviceID,
2692  notifyFptr, data, &error);
2693 
2694  detail::errHandler(error, __CREATE_CONTEXT_ERR);
2695  if (err != NULL) {
2696  *err = error;
2697  }
2698  }
2699 
2705  cl_device_type type,
2706  cl_context_properties* properties = NULL,
2707  void (CL_CALLBACK * notifyFptr)(
2708  const char *,
2709  const void *,
2710  size_type,
2711  void *) = NULL,
2712  void* data = NULL,
2713  cl_int* err = NULL)
2714  {
2715  cl_int error;
2716 
2717 #if !defined(__APPLE__) && !defined(__MACOS)
2718  cl_context_properties prop[4] = {CL_CONTEXT_PLATFORM, 0, 0, 0 };
2719 
2720  if (properties == NULL) {
2721  // Get a valid platform ID as we cannot send in a blank one
2722  vector<Platform> platforms;
2723  error = Platform::get(&platforms);
2724  if (error != CL_SUCCESS) {
2725  detail::errHandler(error, __CREATE_CONTEXT_FROM_TYPE_ERR);
2726  if (err != NULL) {
2727  *err = error;
2728  }
2729  return;
2730  }
2731 
2732  // Check the platforms we found for a device of our specified type
2733  cl_context_properties platform_id = 0;
2734  for (unsigned int i = 0; i < platforms.size(); i++) {
2735 
2736  vector<Device> devices;
2737 
2738 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2739  try {
2740 #endif
2741 
2742  error = platforms[i].getDevices(type, &devices);
2743 
2744 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2745  } catch (Error) {}
2746  // Catch if exceptions are enabled as we don't want to exit if first platform has no devices of type
2747  // We do error checking next anyway, and can throw there if needed
2748 #endif
2749 
2750  // Only squash CL_SUCCESS and CL_DEVICE_NOT_FOUND
2751  if (error != CL_SUCCESS && error != CL_DEVICE_NOT_FOUND) {
2752  detail::errHandler(error, __CREATE_CONTEXT_FROM_TYPE_ERR);
2753  if (err != NULL) {
2754  *err = error;
2755  }
2756  }
2757 
2758  if (devices.size() > 0) {
2759  platform_id = (cl_context_properties)platforms[i]();
2760  break;
2761  }
2762  }
2763 
2764  if (platform_id == 0) {
2765  detail::errHandler(CL_DEVICE_NOT_FOUND, __CREATE_CONTEXT_FROM_TYPE_ERR);
2766  if (err != NULL) {
2767  *err = CL_DEVICE_NOT_FOUND;
2768  }
2769  return;
2770  }
2771 
2772  prop[1] = platform_id;
2773  properties = &prop[0];
2774  }
2775 #endif
2776  object_ = ::clCreateContextFromType(
2777  properties, type, notifyFptr, data, &error);
2778 
2779  detail::errHandler(error, __CREATE_CONTEXT_FROM_TYPE_ERR);
2780  if (err != NULL) {
2781  *err = error;
2782  }
2783  }
2784 
2788  Context(const Context& ctx) : detail::Wrapper<cl_type>(ctx) {}
2789 
2793  Context& operator = (const Context &ctx)
2794  {
2796  return *this;
2797  }
2798 
2802  Context(Context&& ctx) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(ctx)) {}
2803 
2807  Context& operator = (Context &&ctx)
2808  {
2809  detail::Wrapper<cl_type>::operator=(std::move(ctx));
2810  return *this;
2811  }
2812 
2813 
2818  static Context getDefault(cl_int * err = NULL)
2819  {
2820  std::call_once(default_initialized_, makeDefault);
2821  detail::errHandler(default_error_);
2822  if (err != NULL) {
2823  *err = default_error_;
2824  }
2825  return default_;
2826  }
2827 
2835  static Context setDefault(const Context &default_context)
2836  {
2837  std::call_once(default_initialized_, makeDefaultProvided, std::cref(default_context));
2838  detail::errHandler(default_error_);
2839  return default_;
2840  }
2841 
2843  Context() : detail::Wrapper<cl_type>() { }
2844 
2850  explicit Context(const cl_context& context, bool retainObject = false) :
2851  detail::Wrapper<cl_type>(context, retainObject) { }
2852 
2858  Context& operator = (const cl_context& rhs)
2859  {
2861  return *this;
2862  }
2863 
2865  template <typename T>
2866  cl_int getInfo(cl_context_info name, T* param) const
2867  {
2868  return detail::errHandler(
2869  detail::getInfo(&::clGetContextInfo, object_, name, param),
2870  __GET_CONTEXT_INFO_ERR);
2871  }
2872 
2874  template <cl_int name> typename
2876  getInfo(cl_int* err = NULL) const
2877  {
2878  typename detail::param_traits<
2879  detail::cl_context_info, name>::param_type param;
2880  cl_int result = getInfo(name, &param);
2881  if (err != NULL) {
2882  *err = result;
2883  }
2884  return param;
2885  }
2886 
2892  cl_mem_flags flags,
2893  cl_mem_object_type type,
2894  vector<ImageFormat>* formats) const
2895  {
2896  cl_uint numEntries;
2897 
2898  if (!formats) {
2899  return CL_SUCCESS;
2900  }
2901 
2902  cl_int err = ::clGetSupportedImageFormats(
2903  object_,
2904  flags,
2905  type,
2906  0,
2907  NULL,
2908  &numEntries);
2909  if (err != CL_SUCCESS) {
2910  return detail::errHandler(err, __GET_SUPPORTED_IMAGE_FORMATS_ERR);
2911  }
2912 
2913  if (numEntries > 0) {
2914  vector<ImageFormat> value(numEntries);
2915  err = ::clGetSupportedImageFormats(
2916  object_,
2917  flags,
2918  type,
2919  numEntries,
2920  (cl_image_format*)value.data(),
2921  NULL);
2922  if (err != CL_SUCCESS) {
2923  return detail::errHandler(err, __GET_SUPPORTED_IMAGE_FORMATS_ERR);
2924  }
2925 
2926  formats->assign(begin(value), end(value));
2927  }
2928  else {
2929  // If no values are being returned, ensure an empty vector comes back
2930  formats->clear();
2931  }
2932 
2933  return CL_SUCCESS;
2934  }
2935 };
2936 
2937 inline void Device::makeDefault()
2938 {
2939  /* Throwing an exception from a call_once invocation does not do
2940  * what we wish, so we catch it and save the error.
2941  */
2942 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2943  try
2944 #endif
2945  {
2946  cl_int error = 0;
2947 
2948  Context context = Context::getDefault(&error);
2949  detail::errHandler(error, __CREATE_CONTEXT_ERR);
2950 
2951  if (error != CL_SUCCESS) {
2952  default_error_ = error;
2953  }
2954  else {
2955  default_ = context.getInfo<CL_CONTEXT_DEVICES>()[0];
2956  default_error_ = CL_SUCCESS;
2957  }
2958  }
2959 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2960  catch (cl::Error &e) {
2961  default_error_ = e.err();
2962  }
2963 #endif
2964 }
2965 
2966 CL_HPP_DEFINE_STATIC_MEMBER_ std::once_flag Context::default_initialized_;
2967 CL_HPP_DEFINE_STATIC_MEMBER_ Context Context::default_;
2968 CL_HPP_DEFINE_STATIC_MEMBER_ cl_int Context::default_error_ = CL_SUCCESS;
2969 
2978 class Event : public detail::Wrapper<cl_event>
2979 {
2980 public:
2982  Event() : detail::Wrapper<cl_type>() { }
2983 
2992  explicit Event(const cl_event& event, bool retainObject = false) :
2993  detail::Wrapper<cl_type>(event, retainObject) { }
2994 
3000  Event& operator = (const cl_event& rhs)
3001  {
3003  return *this;
3004  }
3005 
3007  template <typename T>
3008  cl_int getInfo(cl_event_info name, T* param) const
3009  {
3010  return detail::errHandler(
3011  detail::getInfo(&::clGetEventInfo, object_, name, param),
3012  __GET_EVENT_INFO_ERR);
3013  }
3014 
3016  template <cl_int name> typename
3018  getInfo(cl_int* err = NULL) const
3019  {
3020  typename detail::param_traits<
3021  detail::cl_event_info, name>::param_type param;
3022  cl_int result = getInfo(name, &param);
3023  if (err != NULL) {
3024  *err = result;
3025  }
3026  return param;
3027  }
3028 
3030  template <typename T>
3031  cl_int getProfilingInfo(cl_profiling_info name, T* param) const
3032  {
3033  return detail::errHandler(detail::getInfo(
3034  &::clGetEventProfilingInfo, object_, name, param),
3035  __GET_EVENT_PROFILE_INFO_ERR);
3036  }
3037 
3039  template <cl_int name> typename
3041  getProfilingInfo(cl_int* err = NULL) const
3042  {
3043  typename detail::param_traits<
3044  detail::cl_profiling_info, name>::param_type param;
3045  cl_int result = getProfilingInfo(name, &param);
3046  if (err != NULL) {
3047  *err = result;
3048  }
3049  return param;
3050  }
3051 
3056  cl_int wait() const
3057  {
3058  return detail::errHandler(
3059  ::clWaitForEvents(1, &object_),
3060  __WAIT_FOR_EVENTS_ERR);
3061  }
3062 
3063 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
3064 
3068  cl_int setCallback(
3069  cl_int type,
3070  void (CL_CALLBACK * pfn_notify)(cl_event, cl_int, void *),
3071  void * user_data = NULL)
3072  {
3073  return detail::errHandler(
3074  ::clSetEventCallback(
3075  object_,
3076  type,
3077  pfn_notify,
3078  user_data),
3079  __SET_EVENT_CALLBACK_ERR);
3080  }
3081 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
3082 
3087  static cl_int
3088  waitForEvents(const vector<Event>& events)
3089  {
3090  return detail::errHandler(
3091  ::clWaitForEvents(
3092  (cl_uint) events.size(), (events.size() > 0) ? (cl_event*)&events.front() : NULL),
3093  __WAIT_FOR_EVENTS_ERR);
3094  }
3095 };
3096 
3097 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
3098 
3102 class UserEvent : public Event
3103 {
3104 public:
3110  const Context& context,
3111  cl_int * err = NULL)
3112  {
3113  cl_int error;
3114  object_ = ::clCreateUserEvent(
3115  context(),
3116  &error);
3117 
3118  detail::errHandler(error, __CREATE_USER_EVENT_ERR);
3119  if (err != NULL) {
3120  *err = error;
3121  }
3122  }
3123 
3125  UserEvent() : Event() { }
3126 
3131  cl_int setStatus(cl_int status)
3132  {
3133  return detail::errHandler(
3134  ::clSetUserEventStatus(object_,status),
3135  __SET_USER_EVENT_STATUS_ERR);
3136  }
3137 };
3138 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
3139 
3144 inline static cl_int
3145 WaitForEvents(const vector<Event>& events)
3146 {
3147  return detail::errHandler(
3148  ::clWaitForEvents(
3149  (cl_uint) events.size(), (events.size() > 0) ? (cl_event*)&events.front() : NULL),
3150  __WAIT_FOR_EVENTS_ERR);
3151 }
3152 
3161 class Memory : public detail::Wrapper<cl_mem>
3162 {
3163 public:
3165  Memory() : detail::Wrapper<cl_type>() { }
3166 
3178  explicit Memory(const cl_mem& memory, bool retainObject) :
3179  detail::Wrapper<cl_type>(memory, retainObject) { }
3180 
3186  Memory& operator = (const cl_mem& rhs)
3187  {
3189  return *this;
3190  }
3191 
3195  Memory(const Memory& mem) : detail::Wrapper<cl_type>(mem) {}
3196 
3200  Memory& operator = (const Memory &mem)
3201  {
3203  return *this;
3204  }
3205 
3209  Memory(Memory&& mem) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(mem)) {}
3210 
3214  Memory& operator = (Memory &&mem)
3215  {
3216  detail::Wrapper<cl_type>::operator=(std::move(mem));
3217  return *this;
3218  }
3219 
3220 
3222  template <typename T>
3223  cl_int getInfo(cl_mem_info name, T* param) const
3224  {
3225  return detail::errHandler(
3226  detail::getInfo(&::clGetMemObjectInfo, object_, name, param),
3227  __GET_MEM_OBJECT_INFO_ERR);
3228  }
3229 
3231  template <cl_int name> typename
3233  getInfo(cl_int* err = NULL) const
3234  {
3235  typename detail::param_traits<
3236  detail::cl_mem_info, name>::param_type param;
3237  cl_int result = getInfo(name, &param);
3238  if (err != NULL) {
3239  *err = result;
3240  }
3241  return param;
3242  }
3243 
3244 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
3245 
3259  void (CL_CALLBACK * pfn_notify)(cl_mem, void *),
3260  void * user_data = NULL)
3261  {
3262  return detail::errHandler(
3263  ::clSetMemObjectDestructorCallback(
3264  object_,
3265  pfn_notify,
3266  user_data),
3267  __SET_MEM_OBJECT_DESTRUCTOR_CALLBACK_ERR);
3268  }
3269 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
3270 
3271 };
3272 
3273 // Pre-declare copy functions
3274 class Buffer;
3275 template< typename IteratorType >
3276 cl_int copy( IteratorType startIterator, IteratorType endIterator, cl::Buffer &buffer );
3277 template< typename IteratorType >
3278 cl_int copy( const cl::Buffer &buffer, IteratorType startIterator, IteratorType endIterator );
3279 template< typename IteratorType >
3280 cl_int copy( const CommandQueue &queue, IteratorType startIterator, IteratorType endIterator, cl::Buffer &buffer );
3281 template< typename IteratorType >
3282 cl_int copy( const CommandQueue &queue, const cl::Buffer &buffer, IteratorType startIterator, IteratorType endIterator );
3283 
3284 
3285 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
3286 namespace detail
3287 {
3289  {
3290  public:
3291  static cl_svm_mem_flags getSVMMemFlags()
3292  {
3293  return 0;
3294  }
3295  };
3296 } // namespace detail
3297 
3298 template<class Trait = detail::SVMTraitNull>
3300 {
3301 public:
3302  static cl_svm_mem_flags getSVMMemFlags()
3303  {
3304  return CL_MEM_READ_WRITE |
3305  Trait::getSVMMemFlags();
3306  }
3307 };
3308 
3309 template<class Trait = detail::SVMTraitNull>
3311 {
3312 public:
3313  static cl_svm_mem_flags getSVMMemFlags()
3314  {
3315  return CL_MEM_READ_ONLY |
3316  Trait::getSVMMemFlags();
3317  }
3318 };
3319 
3320 template<class Trait = detail::SVMTraitNull>
3322 {
3323 public:
3324  static cl_svm_mem_flags getSVMMemFlags()
3325  {
3326  return CL_MEM_WRITE_ONLY |
3327  Trait::getSVMMemFlags();
3328  }
3329 };
3330 
3331 template<class Trait = SVMTraitReadWrite<>>
3333 {
3334 public:
3335  static cl_svm_mem_flags getSVMMemFlags()
3336  {
3337  return Trait::getSVMMemFlags();
3338  }
3339 };
3340 
3341 template<class Trait = SVMTraitReadWrite<>>
3343 {
3344 public:
3345  static cl_svm_mem_flags getSVMMemFlags()
3346  {
3347  return CL_MEM_SVM_FINE_GRAIN_BUFFER |
3348  Trait::getSVMMemFlags();
3349  }
3350 };
3351 
3352 template<class Trait = SVMTraitReadWrite<>>
3354 {
3355 public:
3356  static cl_svm_mem_flags getSVMMemFlags()
3357  {
3358  return
3359  CL_MEM_SVM_FINE_GRAIN_BUFFER |
3360  CL_MEM_SVM_ATOMICS |
3361  Trait::getSVMMemFlags();
3362  }
3363 };
3364 
3365 // Pre-declare SVM map function
3366 template<typename T>
3367 inline cl_int enqueueMapSVM(
3368  T* ptr,
3369  cl_bool blocking,
3370  cl_map_flags flags,
3371  size_type size,
3372  const vector<Event>* events = NULL,
3373  Event* event = NULL);
3374 
3386 template<typename T, class SVMTrait>
3388 private:
3389  Context context_;
3390 
3391 public:
3392  typedef T value_type;
3393  typedef value_type* pointer;
3394  typedef const value_type* const_pointer;
3395  typedef value_type& reference;
3396  typedef const value_type& const_reference;
3397  typedef std::size_t size_type;
3398  typedef std::ptrdiff_t difference_type;
3399 
3400  template<typename U>
3401  struct rebind
3402  {
3404  };
3405 
3406  template<typename U, typename V>
3407  friend class SVMAllocator;
3408 
3409  SVMAllocator() :
3410  context_(Context::getDefault())
3411  {
3412  }
3413 
3414  explicit SVMAllocator(cl::Context context) :
3415  context_(context)
3416  {
3417  }
3418 
3419 
3420  SVMAllocator(const SVMAllocator &other) :
3421  context_(other.context_)
3422  {
3423  }
3424 
3425  template<typename U>
3426  SVMAllocator(const SVMAllocator<U, SVMTrait> &other) :
3427  context_(other.context_)
3428  {
3429  }
3430 
3431  ~SVMAllocator()
3432  {
3433  }
3434 
3435  pointer address(reference r) CL_HPP_NOEXCEPT_
3436  {
3437  return std::addressof(r);
3438  }
3439 
3440  const_pointer address(const_reference r) CL_HPP_NOEXCEPT_
3441  {
3442  return std::addressof(r);
3443  }
3444 
3451  pointer allocate(
3452  size_type size,
3454  {
3455  // Allocate memory with default alignment matching the size of the type
3456  void* voidPointer =
3457  clSVMAlloc(
3458  context_(),
3459  SVMTrait::getSVMMemFlags(),
3460  size*sizeof(T),
3461  0);
3462  pointer retValue = reinterpret_cast<pointer>(
3463  voidPointer);
3464 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
3465  if (!retValue) {
3466  std::bad_alloc excep;
3467  throw excep;
3468  }
3469 #endif // #if defined(CL_HPP_ENABLE_EXCEPTIONS)
3470 
3471  // If allocation was coarse-grained then map it
3472  if (!(SVMTrait::getSVMMemFlags() & CL_MEM_SVM_FINE_GRAIN_BUFFER)) {
3473  cl_int err = enqueueMapSVM(retValue, CL_TRUE, CL_MAP_READ | CL_MAP_WRITE, size*sizeof(T));
3474  if (err != CL_SUCCESS) {
3475  std::bad_alloc excep;
3476  throw excep;
3477  }
3478  }
3479 
3480  // If exceptions disabled, return null pointer from allocator
3481  return retValue;
3482  }
3483 
3484  void deallocate(pointer p, size_type)
3485  {
3486  clSVMFree(context_(), p);
3487  }
3488 
3493  size_type max_size() const CL_HPP_NOEXCEPT_
3494  {
3495  size_type maxSize = std::numeric_limits<size_type>::max() / sizeof(T);
3496 
3497  for (const Device &d : context_.getInfo<CL_CONTEXT_DEVICES>()) {
3498  maxSize = std::min(
3499  maxSize,
3500  static_cast<size_type>(d.getInfo<CL_DEVICE_MAX_MEM_ALLOC_SIZE>()));
3501  }
3502 
3503  return maxSize;
3504  }
3505 
3506  template< class U, class... Args >
3507  void construct(U* p, Args&&... args)
3508  {
3509  new(p)T(args...);
3510  }
3511 
3512  template< class U >
3513  void destroy(U* p)
3514  {
3515  p->~U();
3516  }
3517 
3521  inline bool operator==(SVMAllocator const& rhs)
3522  {
3523  return (context_==rhs.context_);
3524  }
3525 
3526  inline bool operator!=(SVMAllocator const& a)
3527  {
3528  return !operator==(a);
3529  }
3530 }; // class SVMAllocator return cl::pointer<T>(tmp, detail::Deleter<T, Alloc>{alloc, copies});
3531 
3532 
3533 template<class SVMTrait>
3534 class SVMAllocator<void, SVMTrait> {
3535 public:
3536  typedef void value_type;
3537  typedef value_type* pointer;
3538  typedef const value_type* const_pointer;
3539 
3540  template<typename U>
3541  struct rebind
3542  {
3544  };
3545 
3546  template<typename U, typename V>
3547  friend class SVMAllocator;
3548 };
3549 
3550 #if !defined(CL_HPP_NO_STD_UNIQUE_PTR)
3551 namespace detail
3552 {
3553  template<class Alloc>
3554  class Deleter {
3555  private:
3556  Alloc alloc_;
3557  size_type copies_;
3558 
3559  public:
3560  typedef typename std::allocator_traits<Alloc>::pointer pointer;
3561 
3562  Deleter(const Alloc &alloc, size_type copies) : alloc_{ alloc }, copies_{ copies }
3563  {
3564  }
3565 
3566  void operator()(pointer ptr) const {
3567  Alloc tmpAlloc{ alloc_ };
3568  std::allocator_traits<Alloc>::destroy(tmpAlloc, std::addressof(*ptr));
3569  std::allocator_traits<Alloc>::deallocate(tmpAlloc, ptr, copies_);
3570  }
3571  };
3572 } // namespace detail
3573 
3580 template <class T, class Alloc, class... Args>
3581 cl::pointer<T, detail::Deleter<Alloc>> allocate_pointer(const Alloc &alloc_, Args&&... args)
3582 {
3583  Alloc alloc(alloc_);
3584  static const size_type copies = 1;
3585 
3586  // Ensure that creation of the management block and the
3587  // object are dealt with separately such that we only provide a deleter
3588 
3589  T* tmp = std::allocator_traits<Alloc>::allocate(alloc, copies);
3590  if (!tmp) {
3591  std::bad_alloc excep;
3592  throw excep;
3593  }
3594  try {
3595  std::allocator_traits<Alloc>::construct(
3596  alloc,
3597  std::addressof(*tmp),
3598  std::forward<Args>(args)...);
3599 
3600  return cl::pointer<T, detail::Deleter<Alloc>>(tmp, detail::Deleter<Alloc>{alloc, copies});
3601  }
3602  catch (std::bad_alloc b)
3603  {
3604  std::allocator_traits<Alloc>::deallocate(alloc, tmp, copies);
3605  throw;
3606  }
3607 }
3608 
3609 template< class T, class SVMTrait, class... Args >
3610 cl::pointer<T, detail::Deleter<SVMAllocator<T, SVMTrait>>> allocate_svm(Args... args)
3611 {
3613  return cl::allocate_pointer<T>(alloc, args...);
3614 }
3615 
3616 template< class T, class SVMTrait, class... Args >
3617 cl::pointer<T, detail::Deleter<SVMAllocator<T, SVMTrait>>> allocate_svm(const cl::Context &c, Args... args)
3618 {
3619  SVMAllocator<T, SVMTrait> alloc(c);
3620  return cl::allocate_pointer<T>(alloc, args...);
3621 }
3622 #endif // #if !defined(CL_HPP_NO_STD_UNIQUE_PTR)
3623 
3627 template < class T >
3628 using coarse_svm_vector = vector<T, cl::SVMAllocator<int, cl::SVMTraitCoarse<>>>;
3629 
3633 template < class T >
3634 using fine_svm_vector = vector<T, cl::SVMAllocator<int, cl::SVMTraitFine<>>>;
3635 
3639 template < class T >
3640 using atomic_svm_vector = vector<T, cl::SVMAllocator<int, cl::SVMTraitAtomic<>>>;
3641 
3642 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
3643 
3644 
3651 class Buffer : public Memory
3652 {
3653 public:
3654 
3663  const Context& context,
3664  cl_mem_flags flags,
3665  size_type size,
3666  void* host_ptr = NULL,
3667  cl_int* err = NULL)
3668  {
3669  cl_int error;
3670  object_ = ::clCreateBuffer(context(), flags, size, host_ptr, &error);
3671 
3672  detail::errHandler(error, __CREATE_BUFFER_ERR);
3673  if (err != NULL) {
3674  *err = error;
3675  }
3676  }
3677 
3688  cl_mem_flags flags,
3689  size_type size,
3690  void* host_ptr = NULL,
3691  cl_int* err = NULL)
3692  {
3693  cl_int error;
3694 
3695  Context context = Context::getDefault(err);
3696 
3697  object_ = ::clCreateBuffer(context(), flags, size, host_ptr, &error);
3698 
3699  detail::errHandler(error, __CREATE_BUFFER_ERR);
3700  if (err != NULL) {
3701  *err = error;
3702  }
3703  }
3704 
3710  template< typename IteratorType >
3712  IteratorType startIterator,
3713  IteratorType endIterator,
3714  bool readOnly,
3715  bool useHostPtr = false,
3716  cl_int* err = NULL)
3717  {
3718  typedef typename std::iterator_traits<IteratorType>::value_type DataType;
3719  cl_int error;
3720 
3721  cl_mem_flags flags = 0;
3722  if( readOnly ) {
3723  flags |= CL_MEM_READ_ONLY;
3724  }
3725  else {
3726  flags |= CL_MEM_READ_WRITE;
3727  }
3728  if( useHostPtr ) {
3729  flags |= CL_MEM_USE_HOST_PTR;
3730  }
3731 
3732  size_type size = sizeof(DataType)*(endIterator - startIterator);
3733 
3734  Context context = Context::getDefault(err);
3735 
3736  if( useHostPtr ) {
3737  object_ = ::clCreateBuffer(context(), flags, size, static_cast<DataType*>(&*startIterator), &error);
3738  } else {
3739  object_ = ::clCreateBuffer(context(), flags, size, 0, &error);
3740  }
3741 
3742  detail::errHandler(error, __CREATE_BUFFER_ERR);
3743  if (err != NULL) {
3744  *err = error;
3745  }
3746 
3747  if( !useHostPtr ) {
3748  error = cl::copy(startIterator, endIterator, *this);
3749  detail::errHandler(error, __CREATE_BUFFER_ERR);
3750  if (err != NULL) {
3751  *err = error;
3752  }
3753  }
3754  }
3755 
3761  template< typename IteratorType >
3762  Buffer(const Context &context, IteratorType startIterator, IteratorType endIterator,
3763  bool readOnly, bool useHostPtr = false, cl_int* err = NULL);
3764 
3769  template< typename IteratorType >
3770  Buffer(const CommandQueue &queue, IteratorType startIterator, IteratorType endIterator,
3771  bool readOnly, bool useHostPtr = false, cl_int* err = NULL);
3772 
3774  Buffer() : Memory() { }
3775 
3783  explicit Buffer(const cl_mem& buffer, bool retainObject = false) :
3784  Memory(buffer, retainObject) { }
3785 
3790  Buffer& operator = (const cl_mem& rhs)
3791  {
3792  Memory::operator=(rhs);
3793  return *this;
3794  }
3795 
3799  Buffer(const Buffer& buf) : Memory(buf) {}
3800 
3804  Buffer& operator = (const Buffer &buf)
3805  {
3806  Memory::operator=(buf);
3807  return *this;
3808  }
3809 
3813  Buffer(Buffer&& buf) CL_HPP_NOEXCEPT_ : Memory(std::move(buf)) {}
3814 
3818  Buffer& operator = (Buffer &&buf)
3819  {
3820  Memory::operator=(std::move(buf));
3821  return *this;
3822  }
3823 
3824 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
3825 
3830  cl_mem_flags flags,
3831  cl_buffer_create_type buffer_create_type,
3832  const void * buffer_create_info,
3833  cl_int * err = NULL)
3834  {
3835  Buffer result;
3836  cl_int error;
3837  result.object_ = ::clCreateSubBuffer(
3838  object_,
3839  flags,
3840  buffer_create_type,
3841  buffer_create_info,
3842  &error);
3843 
3844  detail::errHandler(error, __CREATE_SUBBUFFER_ERR);
3845  if (err != NULL) {
3846  *err = error;
3847  }
3848 
3849  return result;
3850  }
3851 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
3852 };
3853 
3854 #if defined (CL_HPP_USE_DX_INTEROP)
3855 
3863 class BufferD3D10 : public Buffer
3864 {
3865 public:
3866 
3867 
3873  BufferD3D10(
3874  const Context& context,
3875  cl_mem_flags flags,
3876  ID3D10Buffer* bufobj,
3877  cl_int * err = NULL) : pfn_clCreateFromD3D10BufferKHR(nullptr)
3878  {
3879  typedef CL_API_ENTRY cl_mem (CL_API_CALL *PFN_clCreateFromD3D10BufferKHR)(
3880  cl_context context, cl_mem_flags flags, ID3D10Buffer* buffer,
3881  cl_int* errcode_ret);
3882  PFN_clCreateFromD3D10BufferKHR pfn_clCreateFromD3D10BufferKHR;
3883 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
3884  vector<cl_context_properties> props = context.getInfo<CL_CONTEXT_PROPERTIES>();
3885  cl_platform platform = -1;
3886  for( int i = 0; i < props.size(); ++i ) {
3887  if( props[i] == CL_CONTEXT_PLATFORM ) {
3888  platform = props[i+1];
3889  }
3890  }
3891  CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(platform, clCreateFromD3D10BufferKHR);
3892 #elif CL_HPP_TARGET_OPENCL_VERSION >= 110
3893  CL_HPP_INIT_CL_EXT_FCN_PTR_(clCreateFromD3D10BufferKHR);
3894 #endif
3895 
3896  cl_int error;
3897  object_ = pfn_clCreateFromD3D10BufferKHR(
3898  context(),
3899  flags,
3900  bufobj,
3901  &error);
3902 
3903  detail::errHandler(error, __CREATE_GL_BUFFER_ERR);
3904  if (err != NULL) {
3905  *err = error;
3906  }
3907  }
3908 
3910  BufferD3D10() : Buffer() { }
3911 
3919  explicit BufferD3D10(const cl_mem& buffer, bool retainObject = false) :
3920  Buffer(buffer, retainObject) { }
3921 
3926  BufferD3D10& operator = (const cl_mem& rhs)
3927  {
3928  Buffer::operator=(rhs);
3929  return *this;
3930  }
3931 
3935  BufferD3D10(const BufferD3D10& buf) :
3936  Buffer(buf) {}
3937 
3941  BufferD3D10& operator = (const BufferD3D10 &buf)
3942  {
3943  Buffer::operator=(buf);
3944  return *this;
3945  }
3946 
3950  BufferD3D10(BufferD3D10&& buf) CL_HPP_NOEXCEPT_ : Buffer(std::move(buf)) {}
3951 
3955  BufferD3D10& operator = (BufferD3D10 &&buf)
3956  {
3957  Buffer::operator=(std::move(buf));
3958  return *this;
3959  }
3960 };
3961 #endif
3962 
3971 class BufferGL : public Buffer
3972 {
3973 public:
3980  const Context& context,
3981  cl_mem_flags flags,
3982  cl_GLuint bufobj,
3983  cl_int * err = NULL)
3984  {
3985  cl_int error;
3986  object_ = ::clCreateFromGLBuffer(
3987  context(),
3988  flags,
3989  bufobj,
3990  &error);
3991 
3992  detail::errHandler(error, __CREATE_GL_BUFFER_ERR);
3993  if (err != NULL) {
3994  *err = error;
3995  }
3996  }
3997 
3999  BufferGL() : Buffer() { }
4000 
4008  explicit BufferGL(const cl_mem& buffer, bool retainObject = false) :
4009  Buffer(buffer, retainObject) { }
4010 
4015  BufferGL& operator = (const cl_mem& rhs)
4016  {
4017  Buffer::operator=(rhs);
4018  return *this;
4019  }
4020 
4024  BufferGL(const BufferGL& buf) : Buffer(buf) {}
4025 
4029  BufferGL& operator = (const BufferGL &buf)
4030  {
4031  Buffer::operator=(buf);
4032  return *this;
4033  }
4034 
4038  BufferGL(BufferGL&& buf) CL_HPP_NOEXCEPT_ : Buffer(std::move(buf)) {}
4039 
4043  BufferGL& operator = (BufferGL &&buf)
4044  {
4045  Buffer::operator=(std::move(buf));
4046  return *this;
4047  }
4048 
4051  cl_gl_object_type *type,
4052  cl_GLuint * gl_object_name)
4053  {
4054  return detail::errHandler(
4055  ::clGetGLObjectInfo(object_,type,gl_object_name),
4056  __GET_GL_OBJECT_INFO_ERR);
4057  }
4058 };
4059 
4068 class BufferRenderGL : public Buffer
4069 {
4070 public:
4077  const Context& context,
4078  cl_mem_flags flags,
4079  cl_GLuint bufobj,
4080  cl_int * err = NULL)
4081  {
4082  cl_int error;
4083  object_ = ::clCreateFromGLRenderbuffer(
4084  context(),
4085  flags,
4086  bufobj,
4087  &error);
4088 
4089  detail::errHandler(error, __CREATE_GL_RENDER_BUFFER_ERR);
4090  if (err != NULL) {
4091  *err = error;
4092  }
4093  }
4094 
4097 
4105  explicit BufferRenderGL(const cl_mem& buffer, bool retainObject = false) :
4106  Buffer(buffer, retainObject) { }
4107 
4112  BufferRenderGL& operator = (const cl_mem& rhs)
4113  {
4114  Buffer::operator=(rhs);
4115  return *this;
4116  }
4117 
4121  BufferRenderGL(const BufferRenderGL& buf) : Buffer(buf) {}
4122 
4126  BufferRenderGL& operator = (const BufferRenderGL &buf)
4127  {
4128  Buffer::operator=(buf);
4129  return *this;
4130  }
4131 
4135  BufferRenderGL(BufferRenderGL&& buf) CL_HPP_NOEXCEPT_ : Buffer(std::move(buf)) {}
4136 
4140  BufferRenderGL& operator = (BufferRenderGL &&buf)
4141  {
4142  Buffer::operator=(std::move(buf));
4143  return *this;
4144  }
4145 
4148  cl_gl_object_type *type,
4149  cl_GLuint * gl_object_name)
4150  {
4151  return detail::errHandler(
4152  ::clGetGLObjectInfo(object_,type,gl_object_name),
4153  __GET_GL_OBJECT_INFO_ERR);
4154  }
4155 };
4156 
4163 class Image : public Memory
4164 {
4165 protected:
4167  Image() : Memory() { }
4168 
4176  explicit Image(const cl_mem& image, bool retainObject = false) :
4177  Memory(image, retainObject) { }
4178 
4183  Image& operator = (const cl_mem& rhs)
4184  {
4185  Memory::operator=(rhs);
4186  return *this;
4187  }
4188 
4192  Image(const Image& img) : Memory(img) {}
4193 
4197  Image& operator = (const Image &img)
4198  {
4199  Memory::operator=(img);
4200  return *this;
4201  }
4202 
4206  Image(Image&& img) CL_HPP_NOEXCEPT_ : Memory(std::move(img)) {}
4207 
4211  Image& operator = (Image &&img)
4212  {
4213  Memory::operator=(std::move(img));
4214  return *this;
4215  }
4216 
4217 
4218 public:
4220  template <typename T>
4221  cl_int getImageInfo(cl_image_info name, T* param) const
4222  {
4223  return detail::errHandler(
4224  detail::getInfo(&::clGetImageInfo, object_, name, param),
4225  __GET_IMAGE_INFO_ERR);
4226  }
4227 
4229  template <cl_int name> typename
4231  getImageInfo(cl_int* err = NULL) const
4232  {
4233  typename detail::param_traits<
4234  detail::cl_image_info, name>::param_type param;
4235  cl_int result = getImageInfo(name, &param);
4236  if (err != NULL) {
4237  *err = result;
4238  }
4239  return param;
4240  }
4241 };
4242 
4243 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
4244 
4250 class Image1D : public Image
4251 {
4252 public:
4258  const Context& context,
4259  cl_mem_flags flags,
4260  ImageFormat format,
4261  size_type width,
4262  void* host_ptr = NULL,
4263  cl_int* err = NULL)
4264  {
4265  cl_int error;
4266  cl_image_desc desc =
4267  {
4268  CL_MEM_OBJECT_IMAGE1D,
4269  width,
4270  0, 0, 0, 0, 0, 0, 0, 0
4271  };
4272  object_ = ::clCreateImage(
4273  context(),
4274  flags,
4275  &format,
4276  &desc,
4277  host_ptr,
4278  &error);
4279 
4280  detail::errHandler(error, __CREATE_IMAGE_ERR);
4281  if (err != NULL) {
4282  *err = error;
4283  }
4284  }
4285 
4287  Image1D() { }
4288 
4296  explicit Image1D(const cl_mem& image1D, bool retainObject = false) :
4297  Image(image1D, retainObject) { }
4298 
4303  Image1D& operator = (const cl_mem& rhs)
4304  {
4305  Image::operator=(rhs);
4306  return *this;
4307  }
4308 
4312  Image1D(const Image1D& img) : Image(img) {}
4313 
4317  Image1D& operator = (const Image1D &img)
4318  {
4319  Image::operator=(img);
4320  return *this;
4321  }
4322 
4326  Image1D(Image1D&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
4327 
4331  Image1D& operator = (Image1D &&img)
4332  {
4333  Image::operator=(std::move(img));
4334  return *this;
4335  }
4336 
4337 };
4338 
4342 class Image1DBuffer : public Image
4343 {
4344 public:
4345  Image1DBuffer(
4346  const Context& context,
4347  cl_mem_flags flags,
4348  ImageFormat format,
4349  size_type width,
4350  const Buffer &buffer,
4351  cl_int* err = NULL)
4352  {
4353  cl_int error;
4354  cl_image_desc desc =
4355  {
4356  CL_MEM_OBJECT_IMAGE1D_BUFFER,
4357  width,
4358  0, 0, 0, 0, 0, 0, 0,
4359  buffer()
4360  };
4361  object_ = ::clCreateImage(
4362  context(),
4363  flags,
4364  &format,
4365  &desc,
4366  NULL,
4367  &error);
4368 
4369  detail::errHandler(error, __CREATE_IMAGE_ERR);
4370  if (err != NULL) {
4371  *err = error;
4372  }
4373  }
4374 
4375  Image1DBuffer() { }
4376 
4384  explicit Image1DBuffer(const cl_mem& image1D, bool retainObject = false) :
4385  Image(image1D, retainObject) { }
4386 
4387  Image1DBuffer& operator = (const cl_mem& rhs)
4388  {
4389  Image::operator=(rhs);
4390  return *this;
4391  }
4392 
4396  Image1DBuffer(const Image1DBuffer& img) : Image(img) {}
4397 
4401  Image1DBuffer& operator = (const Image1DBuffer &img)
4402  {
4403  Image::operator=(img);
4404  return *this;
4405  }
4406 
4410  Image1DBuffer(Image1DBuffer&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
4411 
4415  Image1DBuffer& operator = (Image1DBuffer &&img)
4416  {
4417  Image::operator=(std::move(img));
4418  return *this;
4419  }
4420 
4421 };
4422 
4426 class Image1DArray : public Image
4427 {
4428 public:
4429  Image1DArray(
4430  const Context& context,
4431  cl_mem_flags flags,
4432  ImageFormat format,
4433  size_type arraySize,
4434  size_type width,
4435  size_type rowPitch,
4436  void* host_ptr = NULL,
4437  cl_int* err = NULL)
4438  {
4439  cl_int error;
4440  cl_image_desc desc =
4441  {
4442  CL_MEM_OBJECT_IMAGE1D_ARRAY,
4443  width,
4444  0, 0, // height, depth (unused)
4445  arraySize,
4446  rowPitch,
4447  0, 0, 0, 0
4448  };
4449  object_ = ::clCreateImage(
4450  context(),
4451  flags,
4452  &format,
4453  &desc,
4454  host_ptr,
4455  &error);
4456 
4457  detail::errHandler(error, __CREATE_IMAGE_ERR);
4458  if (err != NULL) {
4459  *err = error;
4460  }
4461  }
4462 
4463  Image1DArray() { }
4464 
4472  explicit Image1DArray(const cl_mem& imageArray, bool retainObject = false) :
4473  Image(imageArray, retainObject) { }
4474 
4475 
4476  Image1DArray& operator = (const cl_mem& rhs)
4477  {
4478  Image::operator=(rhs);
4479  return *this;
4480  }
4481 
4485  Image1DArray(const Image1DArray& img) : Image(img) {}
4486 
4490  Image1DArray& operator = (const Image1DArray &img)
4491  {
4492  Image::operator=(img);
4493  return *this;
4494  }
4495 
4499  Image1DArray(Image1DArray&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
4500 
4504  Image1DArray& operator = (Image1DArray &&img)
4505  {
4506  Image::operator=(std::move(img));
4507  return *this;
4508  }
4509 
4510 };
4511 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 120
4512 
4513 
4520 class Image2D : public Image
4521 {
4522 public:
4528  const Context& context,
4529  cl_mem_flags flags,
4530  ImageFormat format,
4531  size_type width,
4532  size_type height,
4533  size_type row_pitch = 0,
4534  void* host_ptr = NULL,
4535  cl_int* err = NULL)
4536  {
4537  cl_int error;
4538  bool useCreateImage;
4539 
4540 #if CL_HPP_TARGET_OPENCL_VERSION >= 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 120
4541  // Run-time decision based on the actual platform
4542  {
4543  cl_uint version = detail::getContextPlatformVersion(context());
4544  useCreateImage = (version >= 0x10002); // OpenCL 1.2 or above
4545  }
4546 #elif CL_HPP_TARGET_OPENCL_VERSION >= 120
4547  useCreateImage = true;
4548 #else
4549  useCreateImage = false;
4550 #endif
4551 
4552 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
4553  if (useCreateImage)
4554  {
4555  cl_image_desc desc =
4556  {
4557  CL_MEM_OBJECT_IMAGE2D,
4558  width,
4559  height,
4560  0, 0, // depth, array size (unused)
4561  row_pitch,
4562  0, 0, 0, 0
4563  };
4564  object_ = ::clCreateImage(
4565  context(),
4566  flags,
4567  &format,
4568  &desc,
4569  host_ptr,
4570  &error);
4571 
4572  detail::errHandler(error, __CREATE_IMAGE_ERR);
4573  if (err != NULL) {
4574  *err = error;
4575  }
4576  }
4577 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
4578 #if CL_HPP_MINIMUM_OPENCL_VERSION < 120
4579  if (!useCreateImage)
4580  {
4581  object_ = ::clCreateImage2D(
4582  context(), flags,&format, width, height, row_pitch, host_ptr, &error);
4583 
4584  detail::errHandler(error, __CREATE_IMAGE2D_ERR);
4585  if (err != NULL) {
4586  *err = error;
4587  }
4588  }
4589 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 120
4590  }
4591 
4592 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 || defined(CL_HPP_USE_CL_IMAGE2D_FROM_BUFFER_KHR)
4593 
4599  const Context& context,
4600  ImageFormat format,
4601  const Buffer &sourceBuffer,
4602  size_type width,
4603  size_type height,
4604  size_type row_pitch = 0,
4605  cl_int* err = nullptr)
4606  {
4607  cl_int error;
4608 
4609  cl_image_desc desc =
4610  {
4611  CL_MEM_OBJECT_IMAGE2D,
4612  width,
4613  height,
4614  0, 0, // depth, array size (unused)
4615  row_pitch,
4616  0, 0, 0,
4617  // Use buffer as input to image
4618  sourceBuffer()
4619  };
4620  object_ = ::clCreateImage(
4621  context(),
4622  0, // flags inherited from buffer
4623  &format,
4624  &desc,
4625  nullptr,
4626  &error);
4627 
4628  detail::errHandler(error, __CREATE_IMAGE_ERR);
4629  if (err != nullptr) {
4630  *err = error;
4631  }
4632  }
4633 #endif //#if CL_HPP_TARGET_OPENCL_VERSION >= 200 || defined(CL_HPP_USE_CL_IMAGE2D_FROM_BUFFER_KHR)
4634 
4635 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
4636 
4649  const Context& context,
4650  cl_channel_order order,
4651  const Image &sourceImage,
4652  cl_int* err = nullptr)
4653  {
4654  cl_int error;
4655 
4656  // Descriptor fields have to match source image
4657  size_type sourceWidth =
4658  sourceImage.getImageInfo<CL_IMAGE_WIDTH>();
4659  size_type sourceHeight =
4660  sourceImage.getImageInfo<CL_IMAGE_HEIGHT>();
4661  size_type sourceRowPitch =
4662  sourceImage.getImageInfo<CL_IMAGE_ROW_PITCH>();
4663  cl_uint sourceNumMIPLevels =
4664  sourceImage.getImageInfo<CL_IMAGE_NUM_MIP_LEVELS>();
4665  cl_uint sourceNumSamples =
4666  sourceImage.getImageInfo<CL_IMAGE_NUM_SAMPLES>();
4667  cl_image_format sourceFormat =
4668  sourceImage.getImageInfo<CL_IMAGE_FORMAT>();
4669 
4670  // Update only the channel order.
4671  // Channel format inherited from source.
4672  sourceFormat.image_channel_order = order;
4673  cl_image_desc desc =
4674  {
4675  CL_MEM_OBJECT_IMAGE2D,
4676  sourceWidth,
4677  sourceHeight,
4678  0, 0, // depth (unused), array size (unused)
4679  sourceRowPitch,
4680  0, // slice pitch (unused)
4681  sourceNumMIPLevels,
4682  sourceNumSamples,
4683  // Use buffer as input to image
4684  sourceImage()
4685  };
4686  object_ = ::clCreateImage(
4687  context(),
4688  0, // flags should be inherited from mem_object
4689  &sourceFormat,
4690  &desc,
4691  nullptr,
4692  &error);
4693 
4694  detail::errHandler(error, __CREATE_IMAGE_ERR);
4695  if (err != nullptr) {
4696  *err = error;
4697  }
4698  }
4699 #endif //#if CL_HPP_TARGET_OPENCL_VERSION >= 200
4700 
4702  Image2D() { }
4703 
4711  explicit Image2D(const cl_mem& image2D, bool retainObject = false) :
4712  Image(image2D, retainObject) { }
4713 
4718  Image2D& operator = (const cl_mem& rhs)
4719  {
4720  Image::operator=(rhs);
4721  return *this;
4722  }
4723 
4727  Image2D(const Image2D& img) : Image(img) {}
4728 
4732  Image2D& operator = (const Image2D &img)
4733  {
4734  Image::operator=(img);
4735  return *this;
4736  }
4737 
4741  Image2D(Image2D&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
4742 
4746  Image2D& operator = (Image2D &&img)
4747  {
4748  Image::operator=(std::move(img));
4749  return *this;
4750  }
4751 
4752 };
4753 
4754 
4755 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
4756 
4765 class CL_EXT_PREFIX__VERSION_1_1_DEPRECATED Image2DGL : public Image2D
4766 {
4767 public:
4774  const Context& context,
4775  cl_mem_flags flags,
4776  cl_GLenum target,
4777  cl_GLint miplevel,
4778  cl_GLuint texobj,
4779  cl_int * err = NULL)
4780  {
4781  cl_int error;
4782  object_ = ::clCreateFromGLTexture2D(
4783  context(),
4784  flags,
4785  target,
4786  miplevel,
4787  texobj,
4788  &error);
4789 
4790  detail::errHandler(error, __CREATE_GL_TEXTURE_2D_ERR);
4791  if (err != NULL) {
4792  *err = error;
4793  }
4794 
4795  }
4796 
4798  Image2DGL() : Image2D() { }
4799 
4807  explicit Image2DGL(const cl_mem& image, bool retainObject = false) :
4808  Image2D(image, retainObject) { }
4809 
4814  Image2DGL& operator = (const cl_mem& rhs)
4815  {
4816  Image2D::operator=(rhs);
4817  return *this;
4818  }
4819 
4823  Image2DGL(const Image2DGL& img) : Image2D(img) {}
4824 
4828  Image2DGL& operator = (const Image2DGL &img)
4829  {
4830  Image2D::operator=(img);
4831  return *this;
4832  }
4833 
4837  Image2DGL(Image2DGL&& img) CL_HPP_NOEXCEPT_ : Image2D(std::move(img)) {}
4838 
4842  Image2DGL& operator = (Image2DGL &&img)
4843  {
4844  Image2D::operator=(std::move(img));
4845  return *this;
4846  }
4847 
4848 } CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED;
4849 #endif // CL_USE_DEPRECATED_OPENCL_1_1_APIS
4850 
4851 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
4852 
4855 class Image2DArray : public Image
4856 {
4857 public:
4858  Image2DArray(
4859  const Context& context,
4860  cl_mem_flags flags,
4861  ImageFormat format,
4862  size_type arraySize,
4863  size_type width,
4864  size_type height,
4865  size_type rowPitch,
4866  size_type slicePitch,
4867  void* host_ptr = NULL,
4868  cl_int* err = NULL)
4869  {
4870  cl_int error;
4871  cl_image_desc desc =
4872  {
4873  CL_MEM_OBJECT_IMAGE2D_ARRAY,
4874  width,
4875  height,
4876  0, // depth (unused)
4877  arraySize,
4878  rowPitch,
4879  slicePitch,
4880  0, 0, 0
4881  };
4882  object_ = ::clCreateImage(
4883  context(),
4884  flags,
4885  &format,
4886  &desc,
4887  host_ptr,
4888  &error);
4889 
4890  detail::errHandler(error, __CREATE_IMAGE_ERR);
4891  if (err != NULL) {
4892  *err = error;
4893  }
4894  }
4895 
4896  Image2DArray() { }
4897 
4905  explicit Image2DArray(const cl_mem& imageArray, bool retainObject = false) : Image(imageArray, retainObject) { }
4906 
4907  Image2DArray& operator = (const cl_mem& rhs)
4908  {
4909  Image::operator=(rhs);
4910  return *this;
4911  }
4912 
4916  Image2DArray(const Image2DArray& img) : Image(img) {}
4917 
4921  Image2DArray& operator = (const Image2DArray &img)
4922  {
4923  Image::operator=(img);
4924  return *this;
4925  }
4926 
4930  Image2DArray(Image2DArray&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
4931 
4935  Image2DArray& operator = (Image2DArray &&img)
4936  {
4937  Image::operator=(std::move(img));
4938  return *this;
4939  }
4940 };
4941 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 120
4942 
4949 class Image3D : public Image
4950 {
4951 public:
4957  const Context& context,
4958  cl_mem_flags flags,
4959  ImageFormat format,
4960  size_type width,
4961  size_type height,
4962  size_type depth,
4963  size_type row_pitch = 0,
4964  size_type slice_pitch = 0,
4965  void* host_ptr = NULL,
4966  cl_int* err = NULL)
4967  {
4968  cl_int error;
4969  bool useCreateImage;
4970 
4971 #if CL_HPP_TARGET_OPENCL_VERSION >= 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 120
4972  // Run-time decision based on the actual platform
4973  {
4974  cl_uint version = detail::getContextPlatformVersion(context());
4975  useCreateImage = (version >= 0x10002); // OpenCL 1.2 or above
4976  }
4977 #elif CL_HPP_TARGET_OPENCL_VERSION >= 120
4978  useCreateImage = true;
4979 #else
4980  useCreateImage = false;
4981 #endif
4982 
4983 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
4984  if (useCreateImage)
4985  {
4986  cl_image_desc desc =
4987  {
4988  CL_MEM_OBJECT_IMAGE3D,
4989  width,
4990  height,
4991  depth,
4992  0, // array size (unused)
4993  row_pitch,
4994  slice_pitch,
4995  0, 0, 0
4996  };
4997  object_ = ::clCreateImage(
4998  context(),
4999  flags,
5000  &format,
5001  &desc,
5002  host_ptr,
5003  &error);
5004 
5005  detail::errHandler(error, __CREATE_IMAGE_ERR);
5006  if (err != NULL) {
5007  *err = error;
5008  }
5009  }
5010 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
5011 #if CL_HPP_MINIMUM_OPENCL_VERSION < 120
5012  if (!useCreateImage)
5013  {
5014  object_ = ::clCreateImage3D(
5015  context(), flags, &format, width, height, depth, row_pitch,
5016  slice_pitch, host_ptr, &error);
5017 
5018  detail::errHandler(error, __CREATE_IMAGE3D_ERR);
5019  if (err != NULL) {
5020  *err = error;
5021  }
5022  }
5023 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 120
5024  }
5025 
5027  Image3D() : Image() { }
5028 
5036  explicit Image3D(const cl_mem& image3D, bool retainObject = false) :
5037  Image(image3D, retainObject) { }
5038 
5043  Image3D& operator = (const cl_mem& rhs)
5044  {
5045  Image::operator=(rhs);
5046  return *this;
5047  }
5048 
5052  Image3D(const Image3D& img) : Image(img) {}
5053 
5057  Image3D& operator = (const Image3D &img)
5058  {
5059  Image::operator=(img);
5060  return *this;
5061  }
5062 
5066  Image3D(Image3D&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
5067 
5071  Image3D& operator = (Image3D &&img)
5072  {
5073  Image::operator=(std::move(img));
5074  return *this;
5075  }
5076 };
5077 
5078 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
5079 
5087 class Image3DGL : public Image3D
5088 {
5089 public:
5096  const Context& context,
5097  cl_mem_flags flags,
5098  cl_GLenum target,
5099  cl_GLint miplevel,
5100  cl_GLuint texobj,
5101  cl_int * err = NULL)
5102  {
5103  cl_int error;
5104  object_ = ::clCreateFromGLTexture3D(
5105  context(),
5106  flags,
5107  target,
5108  miplevel,
5109  texobj,
5110  &error);
5111 
5112  detail::errHandler(error, __CREATE_GL_TEXTURE_3D_ERR);
5113  if (err != NULL) {
5114  *err = error;
5115  }
5116  }
5117 
5119  Image3DGL() : Image3D() { }
5120 
5128  explicit Image3DGL(const cl_mem& image, bool retainObject = false) :
5129  Image3D(image, retainObject) { }
5130 
5135  Image3DGL& operator = (const cl_mem& rhs)
5136  {
5137  Image3D::operator=(rhs);
5138  return *this;
5139  }
5140 
5144  Image3DGL(const Image3DGL& img) : Image3D(img) {}
5145 
5149  Image3DGL& operator = (const Image3DGL &img)
5150  {
5151  Image3D::operator=(img);
5152  return *this;
5153  }
5154 
5158  Image3DGL(Image3DGL&& img) CL_HPP_NOEXCEPT_ : Image3D(std::move(img)) {}
5159 
5163  Image3DGL& operator = (Image3DGL &&img)
5164  {
5165  Image3D::operator=(std::move(img));
5166  return *this;
5167  }
5168 };
5169 #endif // CL_USE_DEPRECATED_OPENCL_1_1_APIS
5170 
5171 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
5172 
5178 class ImageGL : public Image
5179 {
5180 public:
5181  ImageGL(
5182  const Context& context,
5183  cl_mem_flags flags,
5184  cl_GLenum target,
5185  cl_GLint miplevel,
5186  cl_GLuint texobj,
5187  cl_int * err = NULL)
5188  {
5189  cl_int error;
5190  object_ = ::clCreateFromGLTexture(
5191  context(),
5192  flags,
5193  target,
5194  miplevel,
5195  texobj,
5196  &error);
5197 
5198  detail::errHandler(error, __CREATE_GL_TEXTURE_ERR);
5199  if (err != NULL) {
5200  *err = error;
5201  }
5202  }
5203 
5204  ImageGL() : Image() { }
5205 
5213  explicit ImageGL(const cl_mem& image, bool retainObject = false) :
5214  Image(image, retainObject) { }
5215 
5216  ImageGL& operator = (const cl_mem& rhs)
5217  {
5218  Image::operator=(rhs);
5219  return *this;
5220  }
5221 
5225  ImageGL(const ImageGL& img) : Image(img) {}
5226 
5230  ImageGL& operator = (const ImageGL &img)
5231  {
5232  Image::operator=(img);
5233  return *this;
5234  }
5235 
5239  ImageGL(ImageGL&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
5240 
5244  ImageGL& operator = (ImageGL &&img)
5245  {
5246  Image::operator=(std::move(img));
5247  return *this;
5248  }
5249 };
5250 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
5251 
5252 
5253 
5254 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5255 
5261 class Pipe : public Memory
5262 {
5263 public:
5264 
5275  const Context& context,
5276  cl_uint packet_size,
5277  cl_uint max_packets,
5278  cl_int* err = NULL)
5279  {
5280  cl_int error;
5281 
5282  cl_mem_flags flags = CL_MEM_READ_WRITE | CL_MEM_HOST_NO_ACCESS;
5283  object_ = ::clCreatePipe(context(), flags, packet_size, max_packets, nullptr, &error);
5284 
5285  detail::errHandler(error, __CREATE_PIPE_ERR);
5286  if (err != NULL) {
5287  *err = error;
5288  }
5289  }
5290 
5300  cl_uint packet_size,
5301  cl_uint max_packets,
5302  cl_int* err = NULL)
5303  {
5304  cl_int error;
5305 
5306  Context context = Context::getDefault(err);
5307 
5308  cl_mem_flags flags = CL_MEM_READ_WRITE | CL_MEM_HOST_NO_ACCESS;
5309  object_ = ::clCreatePipe(context(), flags, packet_size, max_packets, nullptr, &error);
5310 
5311  detail::errHandler(error, __CREATE_PIPE_ERR);
5312  if (err != NULL) {
5313  *err = error;
5314  }
5315  }
5316 
5318  Pipe() : Memory() { }
5319 
5327  explicit Pipe(const cl_mem& pipe, bool retainObject = false) :
5328  Memory(pipe, retainObject) { }
5329 
5334  Pipe& operator = (const cl_mem& rhs)
5335  {
5336  Memory::operator=(rhs);
5337  return *this;
5338  }
5339 
5343  Pipe(const Pipe& pipe) : Memory(pipe) {}
5344 
5348  Pipe& operator = (const Pipe &pipe)
5349  {
5350  Memory::operator=(pipe);
5351  return *this;
5352  }
5353 
5357  Pipe(Pipe&& pipe) CL_HPP_NOEXCEPT_ : Memory(std::move(pipe)) {}
5358 
5362  Pipe& operator = (Pipe &&pipe)
5363  {
5364  Memory::operator=(std::move(pipe));
5365  return *this;
5366  }
5367 
5369  template <typename T>
5370  cl_int getInfo(cl_pipe_info name, T* param) const
5371  {
5372  return detail::errHandler(
5373  detail::getInfo(&::clGetPipeInfo, object_, name, param),
5374  __GET_PIPE_INFO_ERR);
5375  }
5376 
5378  template <cl_int name> typename
5380  getInfo(cl_int* err = NULL) const
5381  {
5382  typename detail::param_traits<
5383  detail::cl_pipe_info, name>::param_type param;
5384  cl_int result = getInfo(name, &param);
5385  if (err != NULL) {
5386  *err = result;
5387  }
5388  return param;
5389  }
5390 }; // class Pipe
5391 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
5392 
5393 
5402 class Sampler : public detail::Wrapper<cl_sampler>
5403 {
5404 public:
5406  Sampler() { }
5407 
5413  const Context& context,
5414  cl_bool normalized_coords,
5415  cl_addressing_mode addressing_mode,
5416  cl_filter_mode filter_mode,
5417  cl_int* err = NULL)
5418  {
5419  cl_int error;
5420 
5421 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5422  cl_sampler_properties sampler_properties[] = {
5423  CL_SAMPLER_NORMALIZED_COORDS, normalized_coords,
5424  CL_SAMPLER_ADDRESSING_MODE, addressing_mode,
5425  CL_SAMPLER_FILTER_MODE, filter_mode,
5426  0 };
5427  object_ = ::clCreateSamplerWithProperties(
5428  context(),
5429  sampler_properties,
5430  &error);
5431 
5432  detail::errHandler(error, __CREATE_SAMPLER_WITH_PROPERTIES_ERR);
5433  if (err != NULL) {
5434  *err = error;
5435  }
5436 #else
5437  object_ = ::clCreateSampler(
5438  context(),
5439  normalized_coords,
5440  addressing_mode,
5441  filter_mode,
5442  &error);
5443 
5444  detail::errHandler(error, __CREATE_SAMPLER_ERR);
5445  if (err != NULL) {
5446  *err = error;
5447  }
5448 #endif
5449  }
5450 
5459  explicit Sampler(const cl_sampler& sampler, bool retainObject = false) :
5460  detail::Wrapper<cl_type>(sampler, retainObject) { }
5461 
5467  Sampler& operator = (const cl_sampler& rhs)
5468  {
5470  return *this;
5471  }
5472 
5476  Sampler(const Sampler& sam) : detail::Wrapper<cl_type>(sam) {}
5477 
5481  Sampler& operator = (const Sampler &sam)
5482  {
5484  return *this;
5485  }
5486 
5490  Sampler(Sampler&& sam) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(sam)) {}
5491 
5495  Sampler& operator = (Sampler &&sam)
5496  {
5497  detail::Wrapper<cl_type>::operator=(std::move(sam));
5498  return *this;
5499  }
5500 
5502  template <typename T>
5503  cl_int getInfo(cl_sampler_info name, T* param) const
5504  {
5505  return detail::errHandler(
5506  detail::getInfo(&::clGetSamplerInfo, object_, name, param),
5507  __GET_SAMPLER_INFO_ERR);
5508  }
5509 
5511  template <cl_int name> typename
5513  getInfo(cl_int* err = NULL) const
5514  {
5515  typename detail::param_traits<
5516  detail::cl_sampler_info, name>::param_type param;
5517  cl_int result = getInfo(name, &param);
5518  if (err != NULL) {
5519  *err = result;
5520  }
5521  return param;
5522  }
5523 };
5524 
5525 class Program;
5526 class CommandQueue;
5527 class DeviceCommandQueue;
5528 class Kernel;
5529 
5531 class NDRange
5532 {
5533 private:
5534  size_type sizes_[3];
5535  cl_uint dimensions_;
5536 
5537 public:
5540  : dimensions_(0)
5541  {
5542  sizes_[0] = 0;
5543  sizes_[1] = 0;
5544  sizes_[2] = 0;
5545  }
5546 
5548  NDRange(size_type size0)
5549  : dimensions_(1)
5550  {
5551  sizes_[0] = size0;
5552  sizes_[1] = 1;
5553  sizes_[2] = 1;
5554  }
5555 
5557  NDRange(size_type size0, size_type size1)
5558  : dimensions_(2)
5559  {
5560  sizes_[0] = size0;
5561  sizes_[1] = size1;
5562  sizes_[2] = 1;
5563  }
5564 
5566  NDRange(size_type size0, size_type size1, size_type size2)
5567  : dimensions_(3)
5568  {
5569  sizes_[0] = size0;
5570  sizes_[1] = size1;
5571  sizes_[2] = size2;
5572  }
5573 
5578  operator const size_type*() const {
5579  return sizes_;
5580  }
5581 
5583  size_type dimensions() const
5584  {
5585  return dimensions_;
5586  }
5587 
5589  // runtime number of dimensions
5590  size_type size() const
5591  {
5592  return dimensions_*sizeof(size_type);
5593  }
5594 
5595  size_type* get()
5596  {
5597  return sizes_;
5598  }
5599 
5600  const size_type* get() const
5601  {
5602  return sizes_;
5603  }
5604 };
5605 
5607 static const NDRange NullRange;
5608 
5611 {
5612  size_type size_;
5613 };
5614 
5615 namespace detail {
5616 
5617 template <typename T, class Enable = void>
5619 
5620 // Enable for objects that are not subclasses of memory
5621 // Pointers, constants etc
5622 template <typename T>
5623 struct KernelArgumentHandler<T, typename std::enable_if<!std::is_base_of<cl::Memory, T>::value>::type>
5624 {
5625  static size_type size(const T&) { return sizeof(T); }
5626  static const T* ptr(const T& value) { return &value; }
5627 };
5628 
5629 // Enable for subclasses of memory where we want to get a reference to the cl_mem out
5630 // and pass that in for safety
5631 template <typename T>
5632 struct KernelArgumentHandler<T, typename std::enable_if<std::is_base_of<cl::Memory, T>::value>::type>
5633 {
5634  static size_type size(const T&) { return sizeof(cl_mem); }
5635  static const cl_mem* ptr(const T& value) { return &(value()); }
5636 };
5637 
5638 // Specialization for DeviceCommandQueue defined later
5639 
5640 template <>
5642 {
5643  static size_type size(const LocalSpaceArg& value) { return value.size_; }
5644  static const void* ptr(const LocalSpaceArg&) { return NULL; }
5645 };
5646 
5647 }
5649 
5653 inline LocalSpaceArg
5654 Local(size_type size)
5655 {
5656  LocalSpaceArg ret = { size };
5657  return ret;
5658 }
5659 
5668 class Kernel : public detail::Wrapper<cl_kernel>
5669 {
5670 public:
5671  inline Kernel(const Program& program, const char* name, cl_int* err = NULL);
5672 
5674  Kernel() { }
5675 
5684  explicit Kernel(const cl_kernel& kernel, bool retainObject = false) :
5685  detail::Wrapper<cl_type>(kernel, retainObject) { }
5686 
5692  Kernel& operator = (const cl_kernel& rhs)
5693  {
5695  return *this;
5696  }
5697 
5701  Kernel(const Kernel& kernel) : detail::Wrapper<cl_type>(kernel) {}
5702 
5706  Kernel& operator = (const Kernel &kernel)
5707  {
5709  return *this;
5710  }
5711 
5715  Kernel(Kernel&& kernel) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(kernel)) {}
5716 
5720  Kernel& operator = (Kernel &&kernel)
5721  {
5722  detail::Wrapper<cl_type>::operator=(std::move(kernel));
5723  return *this;
5724  }
5725 
5726  template <typename T>
5727  cl_int getInfo(cl_kernel_info name, T* param) const
5728  {
5729  return detail::errHandler(
5730  detail::getInfo(&::clGetKernelInfo, object_, name, param),
5731  __GET_KERNEL_INFO_ERR);
5732  }
5733 
5734  template <cl_int name> typename
5736  getInfo(cl_int* err = NULL) const
5737  {
5738  typename detail::param_traits<
5739  detail::cl_kernel_info, name>::param_type param;
5740  cl_int result = getInfo(name, &param);
5741  if (err != NULL) {
5742  *err = result;
5743  }
5744  return param;
5745  }
5746 
5747 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
5748  template <typename T>
5749  cl_int getArgInfo(cl_uint argIndex, cl_kernel_arg_info name, T* param) const
5750  {
5751  return detail::errHandler(
5752  detail::getInfo(&::clGetKernelArgInfo, object_, argIndex, name, param),
5753  __GET_KERNEL_ARG_INFO_ERR);
5754  }
5755 
5756  template <cl_int name> typename
5758  getArgInfo(cl_uint argIndex, cl_int* err = NULL) const
5759  {
5760  typename detail::param_traits<
5761  detail::cl_kernel_arg_info, name>::param_type param;
5762  cl_int result = getArgInfo(argIndex, name, &param);
5763  if (err != NULL) {
5764  *err = result;
5765  }
5766  return param;
5767  }
5768 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
5769 
5770  template <typename T>
5771  cl_int getWorkGroupInfo(
5772  const Device& device, cl_kernel_work_group_info name, T* param) const
5773  {
5774  return detail::errHandler(
5775  detail::getInfo(
5776  &::clGetKernelWorkGroupInfo, object_, device(), name, param),
5777  __GET_KERNEL_WORK_GROUP_INFO_ERR);
5778  }
5779 
5780  template <cl_int name> typename
5782  getWorkGroupInfo(const Device& device, cl_int* err = NULL) const
5783  {
5784  typename detail::param_traits<
5785  detail::cl_kernel_work_group_info, name>::param_type param;
5786  cl_int result = getWorkGroupInfo(device, name, &param);
5787  if (err != NULL) {
5788  *err = result;
5789  }
5790  return param;
5791  }
5792 
5793 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5794 #if defined(CL_HPP_USE_CL_SUB_GROUPS_KHR)
5795  cl_int getSubGroupInfo(const cl::Device &dev, cl_kernel_sub_group_info name, const cl::NDRange &range, size_type* param) const
5796  {
5797  typedef clGetKernelSubGroupInfoKHR_fn PFN_clGetKernelSubGroupInfoKHR;
5798  static PFN_clGetKernelSubGroupInfoKHR pfn_clGetKernelSubGroupInfoKHR = NULL;
5799  CL_HPP_INIT_CL_EXT_FCN_PTR_(clGetKernelSubGroupInfoKHR);
5800 
5801  return detail::errHandler(
5802  pfn_clGetKernelSubGroupInfoKHR(object_, dev(), name, range.size(), range.get(), sizeof(size_type), param, nullptr),
5803  __GET_KERNEL_ARG_INFO_ERR);
5804  }
5805 
5806  template <cl_int name>
5807  size_type getSubGroupInfo(const cl::Device &dev, const cl::NDRange &range, cl_int* err = NULL) const
5808  {
5809  size_type param;
5810  cl_int result = getSubGroupInfo(dev, name, range, &param);
5811  if (err != NULL) {
5812  *err = result;
5813  }
5814  return param;
5815  }
5816 #endif // #if defined(CL_HPP_USE_CL_SUB_GROUPS_KHR)
5817 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5818 
5819 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5820 
5822  template<typename T, class D>
5823  cl_int setArg(cl_uint index, const cl::pointer<T, D> &argPtr)
5824  {
5825  return detail::errHandler(
5826  ::clSetKernelArgSVMPointer(object_, index, argPtr.get()),
5827  __SET_KERNEL_ARGS_ERR);
5828  }
5829 
5832  template<typename T, class Alloc>
5833  cl_int setArg(cl_uint index, const cl::vector<T, Alloc> &argPtr)
5834  {
5835  return detail::errHandler(
5836  ::clSetKernelArgSVMPointer(object_, index, argPtr.data()),
5837  __SET_KERNEL_ARGS_ERR);
5838  }
5839 
5842  template<typename T>
5843  typename std::enable_if<std::is_pointer<T>::value, cl_int>::type
5844  setArg(cl_uint index, const T argPtr)
5845  {
5846  return detail::errHandler(
5847  ::clSetKernelArgSVMPointer(object_, index, argPtr),
5848  __SET_KERNEL_ARGS_ERR);
5849  }
5850 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5851 
5854  template <typename T>
5855  typename std::enable_if<!std::is_pointer<T>::value, cl_int>::type
5856  setArg(cl_uint index, const T &value)
5857  {
5858  return detail::errHandler(
5859  ::clSetKernelArg(
5860  object_,
5861  index,
5864  __SET_KERNEL_ARGS_ERR);
5865  }
5866 
5867  cl_int setArg(cl_uint index, size_type size, const void* argPtr)
5868  {
5869  return detail::errHandler(
5870  ::clSetKernelArg(object_, index, size, argPtr),
5871  __SET_KERNEL_ARGS_ERR);
5872  }
5873 
5874 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5875 
5879  cl_int setSVMPointers(const vector<void*> &pointerList)
5880  {
5881  return detail::errHandler(
5882  ::clSetKernelExecInfo(
5883  object_,
5884  CL_KERNEL_EXEC_INFO_SVM_PTRS,
5885  sizeof(void*)*pointerList.size(),
5886  pointerList.data()));
5887  }
5888 
5893  template<int ArrayLength>
5894  cl_int setSVMPointers(const std::array<void*, ArrayLength> &pointerList)
5895  {
5896  return detail::errHandler(
5897  ::clSetKernelExecInfo(
5898  object_,
5899  CL_KERNEL_EXEC_INFO_SVM_PTRS,
5900  sizeof(void*)*pointerList.size(),
5901  pointerList.data()));
5902  }
5903 
5915  cl_int enableFineGrainedSystemSVM(bool svmEnabled)
5916  {
5917  cl_bool svmEnabled_ = svmEnabled ? CL_TRUE : CL_FALSE;
5918  return detail::errHandler(
5919  ::clSetKernelExecInfo(
5920  object_,
5921  CL_KERNEL_EXEC_INFO_SVM_FINE_GRAIN_SYSTEM,
5922  sizeof(cl_bool),
5923  &svmEnabled_
5924  )
5925  );
5926  }
5927 
5928  template<int index, int ArrayLength, class D, typename T0, typename T1, typename... Ts>
5929  void setSVMPointersHelper(std::array<void*, ArrayLength> &pointerList, const pointer<T0, D> &t0, const pointer<T1, D> &t1, Ts & ... ts)
5930  {
5931  pointerList[index] = static_cast<void*>(t0.get());
5932  setSVMPointersHelper<index + 1, ArrayLength>(pointerList, t1, ts...);
5933  }
5934 
5935  template<int index, int ArrayLength, typename T0, typename T1, typename... Ts>
5936  typename std::enable_if<std::is_pointer<T0>::value, void>::type
5937  setSVMPointersHelper(std::array<void*, ArrayLength> &pointerList, T0 t0, T1 t1, Ts... ts)
5938  {
5939  pointerList[index] = static_cast<void*>(t0);
5940  setSVMPointersHelper<index + 1, ArrayLength>(pointerList, t1, ts...);
5941  }
5942 
5943  template<int index, int ArrayLength, typename T0, class D>
5944  void setSVMPointersHelper(std::array<void*, ArrayLength> &pointerList, const pointer<T0, D> &t0)
5945  {
5946  pointerList[index] = static_cast<void*>(t0.get());
5947  }
5948 
5949 
5950  template<int index, int ArrayLength, typename T0>
5951  typename std::enable_if<std::is_pointer<T0>::value, void>::type
5952  setSVMPointersHelper(std::array<void*, ArrayLength> &pointerList, T0 t0)
5953  {
5954  pointerList[index] = static_cast<void*>(t0);
5955  }
5956 
5957  template<typename T0, typename... Ts>
5958  cl_int setSVMPointers(const T0 &t0, Ts & ... ts)
5959  {
5960  std::array<void*, 1 + sizeof...(Ts)> pointerList;
5961 
5962  setSVMPointersHelper<0, 1 + sizeof...(Ts)>(pointerList, t0, ts...);
5963  return detail::errHandler(
5964  ::clSetKernelExecInfo(
5965  object_,
5966  CL_KERNEL_EXEC_INFO_SVM_PTRS,
5967  sizeof(void*)*(1 + sizeof...(Ts)),
5968  pointerList.data()));
5969  }
5970 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5971 };
5972 
5976 class Program : public detail::Wrapper<cl_program>
5977 {
5978 public:
5979 #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
5980  typedef vector<vector<unsigned char>> Binaries;
5981  typedef vector<string> Sources;
5982 #else // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
5983  typedef vector<std::pair<const void*, size_type> > Binaries;
5984  typedef vector<std::pair<const char*, size_type> > Sources;
5985 #endif // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
5986 
5987  Program(
5988  const string& source,
5989  bool build = false,
5990  cl_int* err = NULL)
5991  {
5992  cl_int error;
5993 
5994  const char * strings = source.c_str();
5995  const size_type length = source.size();
5996 
5997  Context context = Context::getDefault(err);
5998 
5999  object_ = ::clCreateProgramWithSource(
6000  context(), (cl_uint)1, &strings, &length, &error);
6001 
6002  detail::errHandler(error, __CREATE_PROGRAM_WITH_SOURCE_ERR);
6003 
6004  if (error == CL_SUCCESS && build) {
6005 
6006  error = ::clBuildProgram(
6007  object_,
6008  0,
6009  NULL,
6010 #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6011  "-cl-std=CL2.0",
6012 #else
6013  "",
6014 #endif // #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6015  NULL,
6016  NULL);
6017 
6018  detail::buildErrHandler(error, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6019  }
6020 
6021  if (err != NULL) {
6022  *err = error;
6023  }
6024  }
6025 
6026  Program(
6027  const Context& context,
6028  const string& source,
6029  bool build = false,
6030  cl_int* err = NULL)
6031  {
6032  cl_int error;
6033 
6034  const char * strings = source.c_str();
6035  const size_type length = source.size();
6036 
6037  object_ = ::clCreateProgramWithSource(
6038  context(), (cl_uint)1, &strings, &length, &error);
6039 
6040  detail::errHandler(error, __CREATE_PROGRAM_WITH_SOURCE_ERR);
6041 
6042  if (error == CL_SUCCESS && build) {
6043  error = ::clBuildProgram(
6044  object_,
6045  0,
6046  NULL,
6047 #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6048  "-cl-std=CL2.0",
6049 #else
6050  "",
6051 #endif // #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6052  NULL,
6053  NULL);
6054 
6055  detail::buildErrHandler(error, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6056  }
6057 
6058  if (err != NULL) {
6059  *err = error;
6060  }
6061  }
6062 
6068  const Sources& sources,
6069  cl_int* err = NULL)
6070  {
6071  cl_int error;
6072  Context context = Context::getDefault(err);
6073 
6074  const size_type n = (size_type)sources.size();
6075 
6076  vector<size_type> lengths(n);
6077  vector<const char*> strings(n);
6078 
6079  for (size_type i = 0; i < n; ++i) {
6080 #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6081  strings[i] = sources[(int)i].data();
6082  lengths[i] = sources[(int)i].length();
6083 #else // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6084  strings[i] = sources[(int)i].first;
6085  lengths[i] = sources[(int)i].second;
6086 #endif // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6087  }
6088 
6089  object_ = ::clCreateProgramWithSource(
6090  context(), (cl_uint)n, strings.data(), lengths.data(), &error);
6091 
6092  detail::errHandler(error, __CREATE_PROGRAM_WITH_SOURCE_ERR);
6093  if (err != NULL) {
6094  *err = error;
6095  }
6096  }
6097 
6103  const Context& context,
6104  const Sources& sources,
6105  cl_int* err = NULL)
6106  {
6107  cl_int error;
6108 
6109  const size_type n = (size_type)sources.size();
6110 
6111  vector<size_type> lengths(n);
6112  vector<const char*> strings(n);
6113 
6114  for (size_type i = 0; i < n; ++i) {
6115 #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6116  strings[i] = sources[(int)i].data();
6117  lengths[i] = sources[(int)i].length();
6118 #else // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6119  strings[i] = sources[(int)i].first;
6120  lengths[i] = sources[(int)i].second;
6121 #endif // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6122  }
6123 
6124  object_ = ::clCreateProgramWithSource(
6125  context(), (cl_uint)n, strings.data(), lengths.data(), &error);
6126 
6127  detail::errHandler(error, __CREATE_PROGRAM_WITH_SOURCE_ERR);
6128  if (err != NULL) {
6129  *err = error;
6130  }
6131  }
6132 
6153  const Context& context,
6154  const vector<Device>& devices,
6155  const Binaries& binaries,
6156  vector<cl_int>* binaryStatus = NULL,
6157  cl_int* err = NULL)
6158  {
6159  cl_int error;
6160 
6161  const size_type numDevices = devices.size();
6162 
6163  // Catch size mismatch early and return
6164  if(binaries.size() != numDevices) {
6165  error = CL_INVALID_VALUE;
6166  detail::errHandler(error, __CREATE_PROGRAM_WITH_BINARY_ERR);
6167  if (err != NULL) {
6168  *err = error;
6169  }
6170  return;
6171  }
6172 
6173 
6174  vector<size_type> lengths(numDevices);
6175  vector<const unsigned char*> images(numDevices);
6176 #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6177  for (size_type i = 0; i < numDevices; ++i) {
6178  images[i] = binaries[i].data();
6179  lengths[i] = binaries[(int)i].size();
6180  }
6181 #else // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6182  for (size_type i = 0; i < numDevices; ++i) {
6183  images[i] = (const unsigned char*)binaries[i].first;
6184  lengths[i] = binaries[(int)i].second;
6185  }
6186 #endif // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6187 
6188  vector<cl_device_id> deviceIDs(numDevices);
6189  for( size_type deviceIndex = 0; deviceIndex < numDevices; ++deviceIndex ) {
6190  deviceIDs[deviceIndex] = (devices[deviceIndex])();
6191  }
6192 
6193  if(binaryStatus) {
6194  binaryStatus->resize(numDevices);
6195  }
6196 
6197  object_ = ::clCreateProgramWithBinary(
6198  context(), (cl_uint) devices.size(),
6199  deviceIDs.data(),
6200  lengths.data(), images.data(), (binaryStatus != NULL && numDevices > 0)
6201  ? &binaryStatus->front()
6202  : NULL, &error);
6203 
6204  detail::errHandler(error, __CREATE_PROGRAM_WITH_BINARY_ERR);
6205  if (err != NULL) {
6206  *err = error;
6207  }
6208  }
6209 
6210 
6211 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
6212 
6217  const Context& context,
6218  const vector<Device>& devices,
6219  const string& kernelNames,
6220  cl_int* err = NULL)
6221  {
6222  cl_int error;
6223 
6224 
6225  size_type numDevices = devices.size();
6226  vector<cl_device_id> deviceIDs(numDevices);
6227  for( size_type deviceIndex = 0; deviceIndex < numDevices; ++deviceIndex ) {
6228  deviceIDs[deviceIndex] = (devices[deviceIndex])();
6229  }
6230 
6231  object_ = ::clCreateProgramWithBuiltInKernels(
6232  context(),
6233  (cl_uint) devices.size(),
6234  deviceIDs.data(),
6235  kernelNames.c_str(),
6236  &error);
6237 
6238  detail::errHandler(error, __CREATE_PROGRAM_WITH_BUILT_IN_KERNELS_ERR);
6239  if (err != NULL) {
6240  *err = error;
6241  }
6242  }
6243 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
6244 
6245  Program() { }
6246 
6247 
6254  explicit Program(const cl_program& program, bool retainObject = false) :
6255  detail::Wrapper<cl_type>(program, retainObject) { }
6256 
6257  Program& operator = (const cl_program& rhs)
6258  {
6260  return *this;
6261  }
6262 
6266  Program(const Program& program) : detail::Wrapper<cl_type>(program) {}
6267 
6271  Program& operator = (const Program &program)
6272  {
6274  return *this;
6275  }
6276 
6280  Program(Program&& program) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(program)) {}
6281 
6285  Program& operator = (Program &&program)
6286  {
6287  detail::Wrapper<cl_type>::operator=(std::move(program));
6288  return *this;
6289  }
6290 
6291  cl_int build(
6292  const vector<Device>& devices,
6293  const char* options = NULL,
6294  void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
6295  void* data = NULL) const
6296  {
6297  size_type numDevices = devices.size();
6298  vector<cl_device_id> deviceIDs(numDevices);
6299 
6300  for( size_type deviceIndex = 0; deviceIndex < numDevices; ++deviceIndex ) {
6301  deviceIDs[deviceIndex] = (devices[deviceIndex])();
6302  }
6303 
6304  cl_int buildError = ::clBuildProgram(
6305  object_,
6306  (cl_uint)
6307  devices.size(),
6308  deviceIDs.data(),
6309  options,
6310  notifyFptr,
6311  data);
6312 
6313  return detail::buildErrHandler(buildError, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6314  }
6315 
6316  cl_int build(
6317  const char* options = NULL,
6318  void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
6319  void* data = NULL) const
6320  {
6321  cl_int buildError = ::clBuildProgram(
6322  object_,
6323  0,
6324  NULL,
6325  options,
6326  notifyFptr,
6327  data);
6328 
6329 
6330  return detail::buildErrHandler(buildError, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6331  }
6332 
6333 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
6334  cl_int compile(
6335  const char* options = NULL,
6336  void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
6337  void* data = NULL) const
6338  {
6339  cl_int error = ::clCompileProgram(
6340  object_,
6341  0,
6342  NULL,
6343  options,
6344  0,
6345  NULL,
6346  NULL,
6347  notifyFptr,
6348  data);
6349  return detail::buildErrHandler(error, __COMPILE_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6350  }
6351 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
6352 
6353  template <typename T>
6354  cl_int getInfo(cl_program_info name, T* param) const
6355  {
6356  return detail::errHandler(
6357  detail::getInfo(&::clGetProgramInfo, object_, name, param),
6358  __GET_PROGRAM_INFO_ERR);
6359  }
6360 
6361  template <cl_int name> typename
6363  getInfo(cl_int* err = NULL) const
6364  {
6365  typename detail::param_traits<
6366  detail::cl_program_info, name>::param_type param;
6367  cl_int result = getInfo(name, &param);
6368  if (err != NULL) {
6369  *err = result;
6370  }
6371  return param;
6372  }
6373 
6374  template <typename T>
6375  cl_int getBuildInfo(
6376  const Device& device, cl_program_build_info name, T* param) const
6377  {
6378  return detail::errHandler(
6379  detail::getInfo(
6380  &::clGetProgramBuildInfo, object_, device(), name, param),
6381  __GET_PROGRAM_BUILD_INFO_ERR);
6382  }
6383 
6384  template <cl_int name> typename
6386  getBuildInfo(const Device& device, cl_int* err = NULL) const
6387  {
6388  typename detail::param_traits<
6389  detail::cl_program_build_info, name>::param_type param;
6390  cl_int result = getBuildInfo(device, name, &param);
6391  if (err != NULL) {
6392  *err = result;
6393  }
6394  return param;
6395  }
6396 
6402  template <cl_int name>
6403  vector<std::pair<cl::Device, typename detail::param_traits<detail::cl_program_build_info, name>::param_type>>
6404  getBuildInfo(cl_int *err = NULL) const
6405  {
6406  cl_int result = CL_SUCCESS;
6407 
6408  auto devs = getInfo<CL_PROGRAM_DEVICES>(&result);
6409  vector<std::pair<cl::Device, typename detail::param_traits<detail::cl_program_build_info, name>::param_type>>
6410  devInfo;
6411 
6412  // If there was an initial error from getInfo return the error
6413  if (result != CL_SUCCESS) {
6414  if (err != NULL) {
6415  *err = result;
6416  }
6417  return devInfo;
6418  }
6419 
6420  for (const cl::Device &d : devs) {
6421  typename detail::param_traits<
6422  detail::cl_program_build_info, name>::param_type param;
6423  result = getBuildInfo(d, name, &param);
6424  devInfo.push_back(
6426  (d, param));
6427  if (result != CL_SUCCESS) {
6428  // On error, leave the loop and return the error code
6429  break;
6430  }
6431  }
6432  if (err != NULL) {
6433  *err = result;
6434  }
6435  if (result != CL_SUCCESS) {
6436  devInfo.clear();
6437  }
6438  return devInfo;
6439  }
6440 
6441  cl_int createKernels(vector<Kernel>* kernels)
6442  {
6443  cl_uint numKernels;
6444  cl_int err = ::clCreateKernelsInProgram(object_, 0, NULL, &numKernels);
6445  if (err != CL_SUCCESS) {
6446  return detail::errHandler(err, __CREATE_KERNELS_IN_PROGRAM_ERR);
6447  }
6448 
6449  vector<cl_kernel> value(numKernels);
6450 
6451  err = ::clCreateKernelsInProgram(
6452  object_, numKernels, value.data(), NULL);
6453  if (err != CL_SUCCESS) {
6454  return detail::errHandler(err, __CREATE_KERNELS_IN_PROGRAM_ERR);
6455  }
6456 
6457  if (kernels) {
6458  kernels->resize(value.size());
6459 
6460  // Assign to param, constructing with retain behaviour
6461  // to correctly capture each underlying CL object
6462  for (size_type i = 0; i < value.size(); i++) {
6463  // We do not need to retain because this kernel is being created
6464  // by the runtime
6465  (*kernels)[i] = Kernel(value[i], false);
6466  }
6467  }
6468  return CL_SUCCESS;
6469  }
6470 };
6471 
6472 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
6473 inline Program linkProgram(
6474  Program input1,
6475  Program input2,
6476  const char* options = NULL,
6477  void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
6478  void* data = NULL,
6479  cl_int* err = NULL)
6480 {
6481  cl_int error_local = CL_SUCCESS;
6482 
6483  cl_program programs[2] = { input1(), input2() };
6484 
6485  Context ctx = input1.getInfo<CL_PROGRAM_CONTEXT>(&error_local);
6486  if(error_local!=CL_SUCCESS) {
6487  detail::errHandler(error_local, __LINK_PROGRAM_ERR);
6488  }
6489 
6490  cl_program prog = ::clLinkProgram(
6491  ctx(),
6492  0,
6493  NULL,
6494  options,
6495  2,
6496  programs,
6497  notifyFptr,
6498  data,
6499  &error_local);
6500 
6501  detail::errHandler(error_local,__COMPILE_PROGRAM_ERR);
6502  if (err != NULL) {
6503  *err = error_local;
6504  }
6505 
6506  return Program(prog);
6507 }
6508 
6509 inline Program linkProgram(
6510  vector<Program> inputPrograms,
6511  const char* options = NULL,
6512  void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
6513  void* data = NULL,
6514  cl_int* err = NULL)
6515 {
6516  cl_int error_local = CL_SUCCESS;
6517 
6518  vector<cl_program> programs(inputPrograms.size());
6519 
6520  for (unsigned int i = 0; i < inputPrograms.size(); i++) {
6521  programs[i] = inputPrograms[i]();
6522  }
6523 
6524  Context ctx;
6525  if(inputPrograms.size() > 0) {
6526  ctx = inputPrograms[0].getInfo<CL_PROGRAM_CONTEXT>(&error_local);
6527  if(error_local!=CL_SUCCESS) {
6528  detail::errHandler(error_local, __LINK_PROGRAM_ERR);
6529  }
6530  }
6531  cl_program prog = ::clLinkProgram(
6532  ctx(),
6533  0,
6534  NULL,
6535  options,
6536  (cl_uint)inputPrograms.size(),
6537  programs.data(),
6538  notifyFptr,
6539  data,
6540  &error_local);
6541 
6542  detail::errHandler(error_local,__COMPILE_PROGRAM_ERR);
6543  if (err != NULL) {
6544  *err = error_local;
6545  }
6546 
6547  return Program(prog, false);
6548 }
6549 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
6550 
6551 // Template specialization for CL_PROGRAM_BINARIES
6552 template <>
6553 inline cl_int cl::Program::getInfo(cl_program_info name, vector<vector<unsigned char>>* param) const
6554 {
6555  if (name != CL_PROGRAM_BINARIES) {
6556  return CL_INVALID_VALUE;
6557  }
6558  if (param) {
6559  // Resize the parameter array appropriately for each allocation
6560  // and pass down to the helper
6561 
6562  vector<size_type> sizes = getInfo<CL_PROGRAM_BINARY_SIZES>();
6563  size_type numBinaries = sizes.size();
6564 
6565  // Resize the parameter array and constituent arrays
6566  param->resize(numBinaries);
6567  for (size_type i = 0; i < numBinaries; ++i) {
6568  (*param)[i].resize(sizes[i]);
6569  }
6570 
6571  return detail::errHandler(
6572  detail::getInfo(&::clGetProgramInfo, object_, name, param),
6573  __GET_PROGRAM_INFO_ERR);
6574  }
6575 
6576  return CL_SUCCESS;
6577 }
6578 
6579 template<>
6580 inline vector<vector<unsigned char>> cl::Program::getInfo<CL_PROGRAM_BINARIES>(cl_int* err) const
6581 {
6582  vector<vector<unsigned char>> binariesVectors;
6583 
6584  cl_int result = getInfo(CL_PROGRAM_BINARIES, &binariesVectors);
6585  if (err != NULL) {
6586  *err = result;
6587  }
6588  return binariesVectors;
6589 }
6590 
6591 inline Kernel::Kernel(const Program& program, const char* name, cl_int* err)
6592 {
6593  cl_int error;
6594 
6595  object_ = ::clCreateKernel(program(), name, &error);
6596  detail::errHandler(error, __CREATE_KERNEL_ERR);
6597 
6598  if (err != NULL) {
6599  *err = error;
6600  }
6601 
6602 }
6603 
6604 enum class QueueProperties : cl_command_queue_properties
6605 {
6606  None = 0,
6607  Profiling = CL_QUEUE_PROFILING_ENABLE,
6608  OutOfOrder = CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE,
6609 };
6610 
6611 inline QueueProperties operator|(QueueProperties lhs, QueueProperties rhs)
6612 {
6613  return static_cast<QueueProperties>(static_cast<cl_command_queue_properties>(lhs) | static_cast<cl_command_queue_properties>(rhs));
6614 }
6615 
6619 class CommandQueue : public detail::Wrapper<cl_command_queue>
6620 {
6621 private:
6622  static std::once_flag default_initialized_;
6623  static CommandQueue default_;
6624  static cl_int default_error_;
6625 
6631  static void makeDefault()
6632  {
6633  /* We don't want to throw an error from this function, so we have to
6634  * catch and set the error flag.
6635  */
6636 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
6637  try
6638 #endif
6639  {
6640  int error;
6641  Context context = Context::getDefault(&error);
6642 
6643  if (error != CL_SUCCESS) {
6644  default_error_ = error;
6645  }
6646  else {
6647  Device device = Device::getDefault();
6648  default_ = CommandQueue(context, device, 0, &default_error_);
6649  }
6650  }
6651 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
6652  catch (cl::Error &e) {
6653  default_error_ = e.err();
6654  }
6655 #endif
6656  }
6657 
6663  static void makeDefaultProvided(const CommandQueue &c) {
6664  default_ = c;
6665  }
6666 
6667 public:
6668 #ifdef CL_HPP_UNIT_TEST_ENABLE
6669 
6675  static void unitTestClearDefault() {
6676  default_ = CommandQueue();
6677  }
6678 #endif // #ifdef CL_HPP_UNIT_TEST_ENABLE
6679 
6680 
6686  cl_command_queue_properties properties,
6687  cl_int* err = NULL)
6688  {
6689  cl_int error;
6690 
6691  Context context = Context::getDefault(&error);
6692  detail::errHandler(error, __CREATE_CONTEXT_ERR);
6693 
6694  if (error != CL_SUCCESS) {
6695  if (err != NULL) {
6696  *err = error;
6697  }
6698  }
6699  else {
6700  Device device = context.getInfo<CL_CONTEXT_DEVICES>()[0];
6701  bool useWithProperties;
6702 
6703 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
6704  // Run-time decision based on the actual platform
6705  {
6706  cl_uint version = detail::getContextPlatformVersion(context());
6707  useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
6708  }
6709 #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
6710  useWithProperties = true;
6711 #else
6712  useWithProperties = false;
6713 #endif
6714 
6715 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
6716  if (useWithProperties) {
6717  cl_queue_properties queue_properties[] = {
6718  CL_QUEUE_PROPERTIES, properties, 0 };
6719  if ((properties & CL_QUEUE_ON_DEVICE) == 0) {
6720  object_ = ::clCreateCommandQueueWithProperties(
6721  context(), device(), queue_properties, &error);
6722  }
6723  else {
6724  error = CL_INVALID_QUEUE_PROPERTIES;
6725  }
6726 
6727  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
6728  if (err != NULL) {
6729  *err = error;
6730  }
6731  }
6732 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
6733 #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
6734  if (!useWithProperties) {
6735  object_ = ::clCreateCommandQueue(
6736  context(), device(), properties, &error);
6737 
6738  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
6739  if (err != NULL) {
6740  *err = error;
6741  }
6742  }
6743 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
6744  }
6745  }
6746 
6752  QueueProperties properties,
6753  cl_int* err = NULL)
6754  {
6755  cl_int error;
6756 
6757  Context context = Context::getDefault(&error);
6758  detail::errHandler(error, __CREATE_CONTEXT_ERR);
6759 
6760  if (error != CL_SUCCESS) {
6761  if (err != NULL) {
6762  *err = error;
6763  }
6764  }
6765  else {
6766  Device device = context.getInfo<CL_CONTEXT_DEVICES>()[0];
6767  bool useWithProperties;
6768 
6769 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
6770  // Run-time decision based on the actual platform
6771  {
6772  cl_uint version = detail::getContextPlatformVersion(context());
6773  useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
6774  }
6775 #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
6776  useWithProperties = true;
6777 #else
6778  useWithProperties = false;
6779 #endif
6780 
6781 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
6782  if (useWithProperties) {
6783  cl_queue_properties queue_properties[] = {
6784  CL_QUEUE_PROPERTIES, static_cast<cl_queue_properties>(properties), 0 };
6785 
6786  object_ = ::clCreateCommandQueueWithProperties(
6787  context(), device(), queue_properties, &error);
6788 
6789  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
6790  if (err != NULL) {
6791  *err = error;
6792  }
6793  }
6794 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
6795 #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
6796  if (!useWithProperties) {
6797  object_ = ::clCreateCommandQueue(
6798  context(), device(), static_cast<cl_command_queue_properties>(properties), &error);
6799 
6800  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
6801  if (err != NULL) {
6802  *err = error;
6803  }
6804  }
6805 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
6806 
6807  }
6808  }
6809 
6814  explicit CommandQueue(
6815  const Context& context,
6816  cl_command_queue_properties properties = 0,
6817  cl_int* err = NULL)
6818  {
6819  cl_int error;
6820  bool useWithProperties;
6821  vector<cl::Device> devices;
6822  error = context.getInfo(CL_CONTEXT_DEVICES, &devices);
6823 
6824  detail::errHandler(error, __CREATE_CONTEXT_ERR);
6825 
6826  if (error != CL_SUCCESS)
6827  {
6828  if (err != NULL) {
6829  *err = error;
6830  }
6831  return;
6832  }
6833 
6834 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
6835  // Run-time decision based on the actual platform
6836  {
6837  cl_uint version = detail::getContextPlatformVersion(context());
6838  useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
6839  }
6840 #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
6841  useWithProperties = true;
6842 #else
6843  useWithProperties = false;
6844 #endif
6845 
6846 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
6847  if (useWithProperties) {
6848  cl_queue_properties queue_properties[] = {
6849  CL_QUEUE_PROPERTIES, properties, 0 };
6850  if ((properties & CL_QUEUE_ON_DEVICE) == 0) {
6851  object_ = ::clCreateCommandQueueWithProperties(
6852  context(), devices[0](), queue_properties, &error);
6853  }
6854  else {
6855  error = CL_INVALID_QUEUE_PROPERTIES;
6856  }
6857 
6858  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
6859  if (err != NULL) {
6860  *err = error;
6861  }
6862  }
6863 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
6864 #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
6865  if (!useWithProperties) {
6866  object_ = ::clCreateCommandQueue(
6867  context(), devices[0](), properties, &error);
6868 
6869  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
6870  if (err != NULL) {
6871  *err = error;
6872  }
6873  }
6874 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
6875  }
6876 
6881  explicit CommandQueue(
6882  const Context& context,
6883  QueueProperties properties,
6884  cl_int* err = NULL)
6885  {
6886  cl_int error;
6887  bool useWithProperties;
6888  vector<cl::Device> devices;
6889  error = context.getInfo(CL_CONTEXT_DEVICES, &devices);
6890 
6891  detail::errHandler(error, __CREATE_CONTEXT_ERR);
6892 
6893  if (error != CL_SUCCESS)
6894  {
6895  if (err != NULL) {
6896  *err = error;
6897  }
6898  return;
6899  }
6900 
6901 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
6902  // Run-time decision based on the actual platform
6903  {
6904  cl_uint version = detail::getContextPlatformVersion(context());
6905  useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
6906  }
6907 #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
6908  useWithProperties = true;
6909 #else
6910  useWithProperties = false;
6911 #endif
6912 
6913 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
6914  if (useWithProperties) {
6915  cl_queue_properties queue_properties[] = {
6916  CL_QUEUE_PROPERTIES, static_cast<cl_queue_properties>(properties), 0 };
6917  object_ = ::clCreateCommandQueueWithProperties(
6918  context(), devices[0](), queue_properties, &error);
6919 
6920  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
6921  if (err != NULL) {
6922  *err = error;
6923  }
6924  }
6925 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
6926 #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
6927  if (!useWithProperties) {
6928  object_ = ::clCreateCommandQueue(
6929  context(), devices[0](), static_cast<cl_command_queue_properties>(properties), &error);
6930 
6931  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
6932  if (err != NULL) {
6933  *err = error;
6934  }
6935  }
6936 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
6937  }
6938 
6944  const Context& context,
6945  const Device& device,
6946  cl_command_queue_properties properties = 0,
6947  cl_int* err = NULL)
6948  {
6949  cl_int error;
6950  bool useWithProperties;
6951 
6952 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
6953  // Run-time decision based on the actual platform
6954  {
6955  cl_uint version = detail::getContextPlatformVersion(context());
6956  useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
6957  }
6958 #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
6959  useWithProperties = true;
6960 #else
6961  useWithProperties = false;
6962 #endif
6963 
6964 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
6965  if (useWithProperties) {
6966  cl_queue_properties queue_properties[] = {
6967  CL_QUEUE_PROPERTIES, properties, 0 };
6968  object_ = ::clCreateCommandQueueWithProperties(
6969  context(), device(), queue_properties, &error);
6970 
6971  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
6972  if (err != NULL) {
6973  *err = error;
6974  }
6975  }
6976 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
6977 #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
6978  if (!useWithProperties) {
6979  object_ = ::clCreateCommandQueue(
6980  context(), device(), properties, &error);
6981 
6982  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
6983  if (err != NULL) {
6984  *err = error;
6985  }
6986  }
6987 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
6988  }
6989 
6995  const Context& context,
6996  const Device& device,
6997  QueueProperties properties,
6998  cl_int* err = NULL)
6999  {
7000  cl_int error;
7001  bool useWithProperties;
7002 
7003 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
7004  // Run-time decision based on the actual platform
7005  {
7006  cl_uint version = detail::getContextPlatformVersion(context());
7007  useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
7008  }
7009 #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
7010  useWithProperties = true;
7011 #else
7012  useWithProperties = false;
7013 #endif
7014 
7015 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7016  if (useWithProperties) {
7017  cl_queue_properties queue_properties[] = {
7018  CL_QUEUE_PROPERTIES, static_cast<cl_queue_properties>(properties), 0 };
7019  object_ = ::clCreateCommandQueueWithProperties(
7020  context(), device(), queue_properties, &error);
7021 
7022  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
7023  if (err != NULL) {
7024  *err = error;
7025  }
7026  }
7027 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
7028 #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
7029  if (!useWithProperties) {
7030  object_ = ::clCreateCommandQueue(
7031  context(), device(), static_cast<cl_command_queue_properties>(properties), &error);
7032 
7033  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
7034  if (err != NULL) {
7035  *err = error;
7036  }
7037  }
7038 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
7039  }
7040 
7041  static CommandQueue getDefault(cl_int * err = NULL)
7042  {
7043  std::call_once(default_initialized_, makeDefault);
7044 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7045  detail::errHandler(default_error_, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
7046 #else // CL_HPP_TARGET_OPENCL_VERSION >= 200
7047  detail::errHandler(default_error_, __CREATE_COMMAND_QUEUE_ERR);
7048 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
7049  if (err != NULL) {
7050  *err = default_error_;
7051  }
7052  return default_;
7053  }
7054 
7062  static CommandQueue setDefault(const CommandQueue &default_queue)
7063  {
7064  std::call_once(default_initialized_, makeDefaultProvided, std::cref(default_queue));
7065  detail::errHandler(default_error_);
7066  return default_;
7067  }
7068 
7069  CommandQueue() { }
7070 
7071 
7078  explicit CommandQueue(const cl_command_queue& commandQueue, bool retainObject = false) :
7079  detail::Wrapper<cl_type>(commandQueue, retainObject) { }
7080 
7081  CommandQueue& operator = (const cl_command_queue& rhs)
7082  {
7084  return *this;
7085  }
7086 
7090  CommandQueue(const CommandQueue& queue) : detail::Wrapper<cl_type>(queue) {}
7091 
7095  CommandQueue& operator = (const CommandQueue &queue)
7096  {
7098  return *this;
7099  }
7100 
7104  CommandQueue(CommandQueue&& queue) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(queue)) {}
7105 
7109  CommandQueue& operator = (CommandQueue &&queue)
7110  {
7111  detail::Wrapper<cl_type>::operator=(std::move(queue));
7112  return *this;
7113  }
7114 
7115  template <typename T>
7116  cl_int getInfo(cl_command_queue_info name, T* param) const
7117  {
7118  return detail::errHandler(
7119  detail::getInfo(
7120  &::clGetCommandQueueInfo, object_, name, param),
7121  __GET_COMMAND_QUEUE_INFO_ERR);
7122  }
7123 
7124  template <cl_int name> typename
7126  getInfo(cl_int* err = NULL) const
7127  {
7128  typename detail::param_traits<
7129  detail::cl_command_queue_info, name>::param_type param;
7130  cl_int result = getInfo(name, &param);
7131  if (err != NULL) {
7132  *err = result;
7133  }
7134  return param;
7135  }
7136 
7137  cl_int enqueueReadBuffer(
7138  const Buffer& buffer,
7139  cl_bool blocking,
7140  size_type offset,
7141  size_type size,
7142  void* ptr,
7143  const vector<Event>* events = NULL,
7144  Event* event = NULL) const
7145  {
7146  cl_event tmp;
7147  cl_int err = detail::errHandler(
7148  ::clEnqueueReadBuffer(
7149  object_, buffer(), blocking, offset, size,
7150  ptr,
7151  (events != NULL) ? (cl_uint) events->size() : 0,
7152  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7153  (event != NULL) ? &tmp : NULL),
7154  __ENQUEUE_READ_BUFFER_ERR);
7155 
7156  if (event != NULL && err == CL_SUCCESS)
7157  *event = tmp;
7158 
7159  return err;
7160  }
7161 
7162  cl_int enqueueWriteBuffer(
7163  const Buffer& buffer,
7164  cl_bool blocking,
7165  size_type offset,
7166  size_type size,
7167  const void* ptr,
7168  const vector<Event>* events = NULL,
7169  Event* event = NULL) const
7170  {
7171  cl_event tmp;
7172  cl_int err = detail::errHandler(
7173  ::clEnqueueWriteBuffer(
7174  object_, buffer(), blocking, offset, size,
7175  ptr,
7176  (events != NULL) ? (cl_uint) events->size() : 0,
7177  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7178  (event != NULL) ? &tmp : NULL),
7179  __ENQUEUE_WRITE_BUFFER_ERR);
7180 
7181  if (event != NULL && err == CL_SUCCESS)
7182  *event = tmp;
7183 
7184  return err;
7185  }
7186 
7187  cl_int enqueueCopyBuffer(
7188  const Buffer& src,
7189  const Buffer& dst,
7190  size_type src_offset,
7191  size_type dst_offset,
7192  size_type size,
7193  const vector<Event>* events = NULL,
7194  Event* event = NULL) const
7195  {
7196  cl_event tmp;
7197  cl_int err = detail::errHandler(
7198  ::clEnqueueCopyBuffer(
7199  object_, src(), dst(), src_offset, dst_offset, size,
7200  (events != NULL) ? (cl_uint) events->size() : 0,
7201  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7202  (event != NULL) ? &tmp : NULL),
7203  __ENQEUE_COPY_BUFFER_ERR);
7204 
7205  if (event != NULL && err == CL_SUCCESS)
7206  *event = tmp;
7207 
7208  return err;
7209  }
7210 
7211  cl_int enqueueReadBufferRect(
7212  const Buffer& buffer,
7213  cl_bool blocking,
7214  const array<size_type, 3>& buffer_offset,
7215  const array<size_type, 3>& host_offset,
7216  const array<size_type, 3>& region,
7217  size_type buffer_row_pitch,
7218  size_type buffer_slice_pitch,
7219  size_type host_row_pitch,
7220  size_type host_slice_pitch,
7221  void *ptr,
7222  const vector<Event>* events = NULL,
7223  Event* event = NULL) const
7224  {
7225  cl_event tmp;
7226  cl_int err = detail::errHandler(
7227  ::clEnqueueReadBufferRect(
7228  object_,
7229  buffer(),
7230  blocking,
7231  buffer_offset.data(),
7232  host_offset.data(),
7233  region.data(),
7234  buffer_row_pitch,
7235  buffer_slice_pitch,
7236  host_row_pitch,
7237  host_slice_pitch,
7238  ptr,
7239  (events != NULL) ? (cl_uint) events->size() : 0,
7240  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7241  (event != NULL) ? &tmp : NULL),
7242  __ENQUEUE_READ_BUFFER_RECT_ERR);
7243 
7244  if (event != NULL && err == CL_SUCCESS)
7245  *event = tmp;
7246 
7247  return err;
7248  }
7249 
7250  cl_int enqueueWriteBufferRect(
7251  const Buffer& buffer,
7252  cl_bool blocking,
7253  const array<size_type, 3>& buffer_offset,
7254  const array<size_type, 3>& host_offset,
7255  const array<size_type, 3>& region,
7256  size_type buffer_row_pitch,
7257  size_type buffer_slice_pitch,
7258  size_type host_row_pitch,
7259  size_type host_slice_pitch,
7260  const void *ptr,
7261  const vector<Event>* events = NULL,
7262  Event* event = NULL) const
7263  {
7264  cl_event tmp;
7265  cl_int err = detail::errHandler(
7266  ::clEnqueueWriteBufferRect(
7267  object_,
7268  buffer(),
7269  blocking,
7270  buffer_offset.data(),
7271  host_offset.data(),
7272  region.data(),
7273  buffer_row_pitch,
7274  buffer_slice_pitch,
7275  host_row_pitch,
7276  host_slice_pitch,
7277  ptr,
7278  (events != NULL) ? (cl_uint) events->size() : 0,
7279  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7280  (event != NULL) ? &tmp : NULL),
7281  __ENQUEUE_WRITE_BUFFER_RECT_ERR);
7282 
7283  if (event != NULL && err == CL_SUCCESS)
7284  *event = tmp;
7285 
7286  return err;
7287  }
7288 
7289  cl_int enqueueCopyBufferRect(
7290  const Buffer& src,
7291  const Buffer& dst,
7292  const array<size_type, 3>& src_origin,
7293  const array<size_type, 3>& dst_origin,
7294  const array<size_type, 3>& region,
7295  size_type src_row_pitch,
7296  size_type src_slice_pitch,
7297  size_type dst_row_pitch,
7298  size_type dst_slice_pitch,
7299  const vector<Event>* events = NULL,
7300  Event* event = NULL) const
7301  {
7302  cl_event tmp;
7303  cl_int err = detail::errHandler(
7304  ::clEnqueueCopyBufferRect(
7305  object_,
7306  src(),
7307  dst(),
7308  src_origin.data(),
7309  dst_origin.data(),
7310  region.data(),
7311  src_row_pitch,
7312  src_slice_pitch,
7313  dst_row_pitch,
7314  dst_slice_pitch,
7315  (events != NULL) ? (cl_uint) events->size() : 0,
7316  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7317  (event != NULL) ? &tmp : NULL),
7318  __ENQEUE_COPY_BUFFER_RECT_ERR);
7319 
7320  if (event != NULL && err == CL_SUCCESS)
7321  *event = tmp;
7322 
7323  return err;
7324  }
7325 
7326 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
7327 
7338  template<typename PatternType>
7340  const Buffer& buffer,
7341  PatternType pattern,
7342  size_type offset,
7343  size_type size,
7344  const vector<Event>* events = NULL,
7345  Event* event = NULL) const
7346  {
7347  cl_event tmp;
7348  cl_int err = detail::errHandler(
7349  ::clEnqueueFillBuffer(
7350  object_,
7351  buffer(),
7352  static_cast<void*>(&pattern),
7353  sizeof(PatternType),
7354  offset,
7355  size,
7356  (events != NULL) ? (cl_uint) events->size() : 0,
7357  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7358  (event != NULL) ? &tmp : NULL),
7359  __ENQUEUE_FILL_BUFFER_ERR);
7360 
7361  if (event != NULL && err == CL_SUCCESS)
7362  *event = tmp;
7363 
7364  return err;
7365  }
7366 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
7367 
7368  cl_int enqueueReadImage(
7369  const Image& image,
7370  cl_bool blocking,
7371  const array<size_type, 3>& origin,
7372  const array<size_type, 3>& region,
7373  size_type row_pitch,
7374  size_type slice_pitch,
7375  void* ptr,
7376  const vector<Event>* events = NULL,
7377  Event* event = NULL) const
7378  {
7379  cl_event tmp;
7380  cl_int err = detail::errHandler(
7381  ::clEnqueueReadImage(
7382  object_,
7383  image(),
7384  blocking,
7385  origin.data(),
7386  region.data(),
7387  row_pitch,
7388  slice_pitch,
7389  ptr,
7390  (events != NULL) ? (cl_uint) events->size() : 0,
7391  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7392  (event != NULL) ? &tmp : NULL),
7393  __ENQUEUE_READ_IMAGE_ERR);
7394 
7395  if (event != NULL && err == CL_SUCCESS)
7396  *event = tmp;
7397 
7398  return err;
7399  }
7400 
7401  cl_int enqueueWriteImage(
7402  const Image& image,
7403  cl_bool blocking,
7404  const array<size_type, 3>& origin,
7405  const array<size_type, 3>& region,
7406  size_type row_pitch,
7407  size_type slice_pitch,
7408  const void* ptr,
7409  const vector<Event>* events = NULL,
7410  Event* event = NULL) const
7411  {
7412  cl_event tmp;
7413  cl_int err = detail::errHandler(
7414  ::clEnqueueWriteImage(
7415  object_,
7416  image(),
7417  blocking,
7418  origin.data(),
7419  region.data(),
7420  row_pitch,
7421  slice_pitch,
7422  ptr,
7423  (events != NULL) ? (cl_uint) events->size() : 0,
7424  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7425  (event != NULL) ? &tmp : NULL),
7426  __ENQUEUE_WRITE_IMAGE_ERR);
7427 
7428  if (event != NULL && err == CL_SUCCESS)
7429  *event = tmp;
7430 
7431  return err;
7432  }
7433 
7434  cl_int enqueueCopyImage(
7435  const Image& src,
7436  const Image& dst,
7437  const array<size_type, 3>& src_origin,
7438  const array<size_type, 3>& dst_origin,
7439  const array<size_type, 3>& region,
7440  const vector<Event>* events = NULL,
7441  Event* event = NULL) const
7442  {
7443  cl_event tmp;
7444  cl_int err = detail::errHandler(
7445  ::clEnqueueCopyImage(
7446  object_,
7447  src(),
7448  dst(),
7449  src_origin.data(),
7450  dst_origin.data(),
7451  region.data(),
7452  (events != NULL) ? (cl_uint) events->size() : 0,
7453  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7454  (event != NULL) ? &tmp : NULL),
7455  __ENQUEUE_COPY_IMAGE_ERR);
7456 
7457  if (event != NULL && err == CL_SUCCESS)
7458  *event = tmp;
7459 
7460  return err;
7461  }
7462 
7463 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
7464 
7472  const Image& image,
7473  cl_float4 fillColor,
7474  const array<size_type, 3>& origin,
7475  const array<size_type, 3>& region,
7476  const vector<Event>* events = NULL,
7477  Event* event = NULL) const
7478  {
7479  cl_event tmp;
7480  cl_int err = detail::errHandler(
7481  ::clEnqueueFillImage(
7482  object_,
7483  image(),
7484  static_cast<void*>(&fillColor),
7485  origin.data(),
7486  region.data(),
7487  (events != NULL) ? (cl_uint) events->size() : 0,
7488  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7489  (event != NULL) ? &tmp : NULL),
7490  __ENQUEUE_FILL_IMAGE_ERR);
7491 
7492  if (event != NULL && err == CL_SUCCESS)
7493  *event = tmp;
7494 
7495  return err;
7496  }
7497 
7506  const Image& image,
7507  cl_int4 fillColor,
7508  const array<size_type, 3>& origin,
7509  const array<size_type, 3>& region,
7510  const vector<Event>* events = NULL,
7511  Event* event = NULL) const
7512  {
7513  cl_event tmp;
7514  cl_int err = detail::errHandler(
7515  ::clEnqueueFillImage(
7516  object_,
7517  image(),
7518  static_cast<void*>(&fillColor),
7519  origin.data(),
7520  region.data(),
7521  (events != NULL) ? (cl_uint) events->size() : 0,
7522  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7523  (event != NULL) ? &tmp : NULL),
7524  __ENQUEUE_FILL_IMAGE_ERR);
7525 
7526  if (event != NULL && err == CL_SUCCESS)
7527  *event = tmp;
7528 
7529  return err;
7530  }
7531 
7540  const Image& image,
7541  cl_uint4 fillColor,
7542  const array<size_type, 3>& origin,
7543  const array<size_type, 3>& region,
7544  const vector<Event>* events = NULL,
7545  Event* event = NULL) const
7546  {
7547  cl_event tmp;
7548  cl_int err = detail::errHandler(
7549  ::clEnqueueFillImage(
7550  object_,
7551  image(),
7552  static_cast<void*>(&fillColor),
7553  origin.data(),
7554  region.data(),
7555  (events != NULL) ? (cl_uint) events->size() : 0,
7556  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7557  (event != NULL) ? &tmp : NULL),
7558  __ENQUEUE_FILL_IMAGE_ERR);
7559 
7560  if (event != NULL && err == CL_SUCCESS)
7561  *event = tmp;
7562 
7563  return err;
7564  }
7565 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
7566 
7567  cl_int enqueueCopyImageToBuffer(
7568  const Image& src,
7569  const Buffer& dst,
7570  const array<size_type, 3>& src_origin,
7571  const array<size_type, 3>& region,
7572  size_type dst_offset,
7573  const vector<Event>* events = NULL,
7574  Event* event = NULL) const
7575  {
7576  cl_event tmp;
7577  cl_int err = detail::errHandler(
7578  ::clEnqueueCopyImageToBuffer(
7579  object_,
7580  src(),
7581  dst(),
7582  src_origin.data(),
7583  region.data(),
7584  dst_offset,
7585  (events != NULL) ? (cl_uint) events->size() : 0,
7586  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7587  (event != NULL) ? &tmp : NULL),
7588  __ENQUEUE_COPY_IMAGE_TO_BUFFER_ERR);
7589 
7590  if (event != NULL && err == CL_SUCCESS)
7591  *event = tmp;
7592 
7593  return err;
7594  }
7595 
7596  cl_int enqueueCopyBufferToImage(
7597  const Buffer& src,
7598  const Image& dst,
7599  size_type src_offset,
7600  const array<size_type, 3>& dst_origin,
7601  const array<size_type, 3>& region,
7602  const vector<Event>* events = NULL,
7603  Event* event = NULL) const
7604  {
7605  cl_event tmp;
7606  cl_int err = detail::errHandler(
7607  ::clEnqueueCopyBufferToImage(
7608  object_,
7609  src(),
7610  dst(),
7611  src_offset,
7612  dst_origin.data(),
7613  region.data(),
7614  (events != NULL) ? (cl_uint) events->size() : 0,
7615  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7616  (event != NULL) ? &tmp : NULL),
7617  __ENQUEUE_COPY_BUFFER_TO_IMAGE_ERR);
7618 
7619  if (event != NULL && err == CL_SUCCESS)
7620  *event = tmp;
7621 
7622  return err;
7623  }
7624 
7625  void* enqueueMapBuffer(
7626  const Buffer& buffer,
7627  cl_bool blocking,
7628  cl_map_flags flags,
7629  size_type offset,
7630  size_type size,
7631  const vector<Event>* events = NULL,
7632  Event* event = NULL,
7633  cl_int* err = NULL) const
7634  {
7635  cl_event tmp;
7636  cl_int error;
7637  void * result = ::clEnqueueMapBuffer(
7638  object_, buffer(), blocking, flags, offset, size,
7639  (events != NULL) ? (cl_uint) events->size() : 0,
7640  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7641  (event != NULL) ? &tmp : NULL,
7642  &error);
7643 
7644  detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
7645  if (err != NULL) {
7646  *err = error;
7647  }
7648  if (event != NULL && error == CL_SUCCESS)
7649  *event = tmp;
7650 
7651  return result;
7652  }
7653 
7654  void* enqueueMapImage(
7655  const Image& buffer,
7656  cl_bool blocking,
7657  cl_map_flags flags,
7658  const array<size_type, 3>& origin,
7659  const array<size_type, 3>& region,
7660  size_type * row_pitch,
7661  size_type * slice_pitch,
7662  const vector<Event>* events = NULL,
7663  Event* event = NULL,
7664  cl_int* err = NULL) const
7665  {
7666  cl_event tmp;
7667  cl_int error;
7668  void * result = ::clEnqueueMapImage(
7669  object_, buffer(), blocking, flags,
7670  origin.data(),
7671  region.data(),
7672  row_pitch, slice_pitch,
7673  (events != NULL) ? (cl_uint) events->size() : 0,
7674  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7675  (event != NULL) ? &tmp : NULL,
7676  &error);
7677 
7678  detail::errHandler(error, __ENQUEUE_MAP_IMAGE_ERR);
7679  if (err != NULL) {
7680  *err = error;
7681  }
7682  if (event != NULL && error == CL_SUCCESS)
7683  *event = tmp;
7684  return result;
7685  }
7686 
7687 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7688 
7692  template<typename T>
7694  T* ptr,
7695  cl_bool blocking,
7696  cl_map_flags flags,
7697  size_type size,
7698  const vector<Event>* events = NULL,
7699  Event* event = NULL) const
7700  {
7701  cl_event tmp;
7702  cl_int err = detail::errHandler(::clEnqueueSVMMap(
7703  object_, blocking, flags, static_cast<void*>(ptr), size,
7704  (events != NULL) ? (cl_uint)events->size() : 0,
7705  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
7706  (event != NULL) ? &tmp : NULL),
7707  __ENQUEUE_MAP_BUFFER_ERR);
7708 
7709  if (event != NULL && err == CL_SUCCESS)
7710  *event = tmp;
7711 
7712  return err;
7713  }
7714 
7715 
7720  template<typename T, class D>
7722  cl::pointer<T, D> &ptr,
7723  cl_bool blocking,
7724  cl_map_flags flags,
7725  size_type size,
7726  const vector<Event>* events = NULL,
7727  Event* event = NULL) const
7728  {
7729  cl_event tmp;
7730  cl_int err = detail::errHandler(::clEnqueueSVMMap(
7731  object_, blocking, flags, static_cast<void*>(ptr.get()), size,
7732  (events != NULL) ? (cl_uint)events->size() : 0,
7733  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
7734  (event != NULL) ? &tmp : NULL),
7735  __ENQUEUE_MAP_BUFFER_ERR);
7736 
7737  if (event != NULL && err == CL_SUCCESS)
7738  *event = tmp;
7739 
7740  return err;
7741  }
7742 
7747  template<typename T, class Alloc>
7749  cl::vector<T, Alloc> &container,
7750  cl_bool blocking,
7751  cl_map_flags flags,
7752  const vector<Event>* events = NULL,
7753  Event* event = NULL) const
7754  {
7755  cl_event tmp;
7756  cl_int err = detail::errHandler(::clEnqueueSVMMap(
7757  object_, blocking, flags, static_cast<void*>(container.data()), container.size(),
7758  (events != NULL) ? (cl_uint)events->size() : 0,
7759  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
7760  (event != NULL) ? &tmp : NULL),
7761  __ENQUEUE_MAP_BUFFER_ERR);
7762 
7763  if (event != NULL && err == CL_SUCCESS)
7764  *event = tmp;
7765 
7766  return err;
7767  }
7768 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7769 
7770  cl_int enqueueUnmapMemObject(
7771  const Memory& memory,
7772  void* mapped_ptr,
7773  const vector<Event>* events = NULL,
7774  Event* event = NULL) const
7775  {
7776  cl_event tmp;
7777  cl_int err = detail::errHandler(
7778  ::clEnqueueUnmapMemObject(
7779  object_, memory(), mapped_ptr,
7780  (events != NULL) ? (cl_uint) events->size() : 0,
7781  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7782  (event != NULL) ? &tmp : NULL),
7783  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
7784 
7785  if (event != NULL && err == CL_SUCCESS)
7786  *event = tmp;
7787 
7788  return err;
7789  }
7790 
7791 
7792 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7793 
7797  template<typename T>
7799  T* ptr,
7800  const vector<Event>* events = NULL,
7801  Event* event = NULL) const
7802  {
7803  cl_event tmp;
7804  cl_int err = detail::errHandler(
7805  ::clEnqueueSVMUnmap(
7806  object_, static_cast<void*>(ptr),
7807  (events != NULL) ? (cl_uint)events->size() : 0,
7808  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
7809  (event != NULL) ? &tmp : NULL),
7810  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
7811 
7812  if (event != NULL && err == CL_SUCCESS)
7813  *event = tmp;
7814 
7815  return err;
7816  }
7817 
7822  template<typename T, class D>
7824  cl::pointer<T, D> &ptr,
7825  const vector<Event>* events = NULL,
7826  Event* event = NULL) const
7827  {
7828  cl_event tmp;
7829  cl_int err = detail::errHandler(
7830  ::clEnqueueSVMUnmap(
7831  object_, static_cast<void*>(ptr.get()),
7832  (events != NULL) ? (cl_uint)events->size() : 0,
7833  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
7834  (event != NULL) ? &tmp : NULL),
7835  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
7836 
7837  if (event != NULL && err == CL_SUCCESS)
7838  *event = tmp;
7839 
7840  return err;
7841  }
7842 
7847  template<typename T, class Alloc>
7849  cl::vector<T, Alloc> &container,
7850  const vector<Event>* events = NULL,
7851  Event* event = NULL) const
7852  {
7853  cl_event tmp;
7854  cl_int err = detail::errHandler(
7855  ::clEnqueueSVMUnmap(
7856  object_, static_cast<void*>(container.data()),
7857  (events != NULL) ? (cl_uint)events->size() : 0,
7858  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
7859  (event != NULL) ? &tmp : NULL),
7860  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
7861 
7862  if (event != NULL && err == CL_SUCCESS)
7863  *event = tmp;
7864 
7865  return err;
7866  }
7867 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7868 
7869 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
7870 
7882  const vector<Event> *events = 0,
7883  Event *event = 0) const
7884  {
7885  cl_event tmp;
7886  cl_int err = detail::errHandler(
7887  ::clEnqueueMarkerWithWaitList(
7888  object_,
7889  (events != NULL) ? (cl_uint) events->size() : 0,
7890  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7891  (event != NULL) ? &tmp : NULL),
7892  __ENQUEUE_MARKER_WAIT_LIST_ERR);
7893 
7894  if (event != NULL && err == CL_SUCCESS)
7895  *event = tmp;
7896 
7897  return err;
7898  }
7899 
7912  const vector<Event> *events = 0,
7913  Event *event = 0) const
7914  {
7915  cl_event tmp;
7916  cl_int err = detail::errHandler(
7917  ::clEnqueueBarrierWithWaitList(
7918  object_,
7919  (events != NULL) ? (cl_uint) events->size() : 0,
7920  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7921  (event != NULL) ? &tmp : NULL),
7922  __ENQUEUE_BARRIER_WAIT_LIST_ERR);
7923 
7924  if (event != NULL && err == CL_SUCCESS)
7925  *event = tmp;
7926 
7927  return err;
7928  }
7929 
7935  const vector<Memory> &memObjects,
7936  cl_mem_migration_flags flags,
7937  const vector<Event>* events = NULL,
7938  Event* event = NULL
7939  ) const
7940  {
7941  cl_event tmp;
7942 
7943  vector<cl_mem> localMemObjects(memObjects.size());
7944 
7945  for( int i = 0; i < (int)memObjects.size(); ++i ) {
7946  localMemObjects[i] = memObjects[i]();
7947  }
7948 
7949 
7950  cl_int err = detail::errHandler(
7951  ::clEnqueueMigrateMemObjects(
7952  object_,
7953  (cl_uint)memObjects.size(),
7954  localMemObjects.data(),
7955  flags,
7956  (events != NULL) ? (cl_uint) events->size() : 0,
7957  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7958  (event != NULL) ? &tmp : NULL),
7959  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
7960 
7961  if (event != NULL && err == CL_SUCCESS)
7962  *event = tmp;
7963 
7964  return err;
7965  }
7966 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
7967 
7968  cl_int enqueueNDRangeKernel(
7969  const Kernel& kernel,
7970  const NDRange& offset,
7971  const NDRange& global,
7972  const NDRange& local = NullRange,
7973  const vector<Event>* events = NULL,
7974  Event* event = NULL) const
7975  {
7976  cl_event tmp;
7977  cl_int err = detail::errHandler(
7978  ::clEnqueueNDRangeKernel(
7979  object_, kernel(), (cl_uint) global.dimensions(),
7980  offset.dimensions() != 0 ? (const size_type*) offset : NULL,
7981  (const size_type*) global,
7982  local.dimensions() != 0 ? (const size_type*) local : NULL,
7983  (events != NULL) ? (cl_uint) events->size() : 0,
7984  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7985  (event != NULL) ? &tmp : NULL),
7986  __ENQUEUE_NDRANGE_KERNEL_ERR);
7987 
7988  if (event != NULL && err == CL_SUCCESS)
7989  *event = tmp;
7990 
7991  return err;
7992  }
7993 
7994 #if defined(CL_USE_DEPRECATED_OPENCL_1_2_APIS)
7995  CL_EXT_PREFIX__VERSION_1_2_DEPRECATED cl_int enqueueTask(
7996  const Kernel& kernel,
7997  const vector<Event>* events = NULL,
7998  Event* event = NULL) const CL_EXT_SUFFIX__VERSION_1_2_DEPRECATED
7999  {
8000  cl_event tmp;
8001  cl_int err = detail::errHandler(
8002  ::clEnqueueTask(
8003  object_, kernel(),
8004  (events != NULL) ? (cl_uint) events->size() : 0,
8005  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8006  (event != NULL) ? &tmp : NULL),
8007  __ENQUEUE_TASK_ERR);
8008 
8009  if (event != NULL && err == CL_SUCCESS)
8010  *event = tmp;
8011 
8012  return err;
8013  }
8014 #endif // #if defined(CL_USE_DEPRECATED_OPENCL_1_2_APIS)
8015 
8016  cl_int enqueueNativeKernel(
8017  void (CL_CALLBACK *userFptr)(void *),
8018  std::pair<void*, size_type> args,
8019  const vector<Memory>* mem_objects = NULL,
8020  const vector<const void*>* mem_locs = NULL,
8021  const vector<Event>* events = NULL,
8022  Event* event = NULL) const
8023  {
8024  size_type elements = 0;
8025  if (mem_objects != NULL) {
8026  elements = mem_objects->size();
8027  }
8028  vector<cl_mem> mems(elements);
8029  for (unsigned int i = 0; i < elements; i++) {
8030  mems[i] = ((*mem_objects)[i])();
8031  }
8032 
8033  cl_event tmp;
8034  cl_int err = detail::errHandler(
8035  ::clEnqueueNativeKernel(
8036  object_, userFptr, args.first, args.second,
8037  (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
8038  mems.data(),
8039  (mem_locs != NULL && mem_locs->size() > 0) ? (const void **) &mem_locs->front() : NULL,
8040  (events != NULL) ? (cl_uint) events->size() : 0,
8041  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8042  (event != NULL) ? &tmp : NULL),
8043  __ENQUEUE_NATIVE_KERNEL);
8044 
8045  if (event != NULL && err == CL_SUCCESS)
8046  *event = tmp;
8047 
8048  return err;
8049  }
8050 
8054 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
8055  CL_EXT_PREFIX__VERSION_1_1_DEPRECATED
8056  cl_int enqueueMarker(Event* event = NULL) const CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
8057  {
8058  cl_event tmp;
8059  cl_int err = detail::errHandler(
8060  ::clEnqueueMarker(
8061  object_,
8062  (event != NULL) ? &tmp : NULL),
8063  __ENQUEUE_MARKER_ERR);
8064 
8065  if (event != NULL && err == CL_SUCCESS)
8066  *event = tmp;
8067 
8068  return err;
8069  }
8070 
8071  CL_EXT_PREFIX__VERSION_1_1_DEPRECATED
8072  cl_int enqueueWaitForEvents(const vector<Event>& events) const CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
8073  {
8074  return detail::errHandler(
8075  ::clEnqueueWaitForEvents(
8076  object_,
8077  (cl_uint) events.size(),
8078  events.size() > 0 ? (const cl_event*) &events.front() : NULL),
8079  __ENQUEUE_WAIT_FOR_EVENTS_ERR);
8080  }
8081 #endif // defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
8082 
8083  cl_int enqueueAcquireGLObjects(
8084  const vector<Memory>* mem_objects = NULL,
8085  const vector<Event>* events = NULL,
8086  Event* event = NULL) const
8087  {
8088  cl_event tmp;
8089  cl_int err = detail::errHandler(
8090  ::clEnqueueAcquireGLObjects(
8091  object_,
8092  (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
8093  (mem_objects != NULL && mem_objects->size() > 0) ? (const cl_mem *) &mem_objects->front(): NULL,
8094  (events != NULL) ? (cl_uint) events->size() : 0,
8095  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8096  (event != NULL) ? &tmp : NULL),
8097  __ENQUEUE_ACQUIRE_GL_ERR);
8098 
8099  if (event != NULL && err == CL_SUCCESS)
8100  *event = tmp;
8101 
8102  return err;
8103  }
8104 
8105  cl_int enqueueReleaseGLObjects(
8106  const vector<Memory>* mem_objects = NULL,
8107  const vector<Event>* events = NULL,
8108  Event* event = NULL) const
8109  {
8110  cl_event tmp;
8111  cl_int err = detail::errHandler(
8112  ::clEnqueueReleaseGLObjects(
8113  object_,
8114  (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
8115  (mem_objects != NULL && mem_objects->size() > 0) ? (const cl_mem *) &mem_objects->front(): NULL,
8116  (events != NULL) ? (cl_uint) events->size() : 0,
8117  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8118  (event != NULL) ? &tmp : NULL),
8119  __ENQUEUE_RELEASE_GL_ERR);
8120 
8121  if (event != NULL && err == CL_SUCCESS)
8122  *event = tmp;
8123 
8124  return err;
8125  }
8126 
8127 #if defined (CL_HPP_USE_DX_INTEROP)
8128 typedef CL_API_ENTRY cl_int (CL_API_CALL *PFN_clEnqueueAcquireD3D10ObjectsKHR)(
8129  cl_command_queue command_queue, cl_uint num_objects,
8130  const cl_mem* mem_objects, cl_uint num_events_in_wait_list,
8131  const cl_event* event_wait_list, cl_event* event);
8132 typedef CL_API_ENTRY cl_int (CL_API_CALL *PFN_clEnqueueReleaseD3D10ObjectsKHR)(
8133  cl_command_queue command_queue, cl_uint num_objects,
8134  const cl_mem* mem_objects, cl_uint num_events_in_wait_list,
8135  const cl_event* event_wait_list, cl_event* event);
8136 
8137  cl_int enqueueAcquireD3D10Objects(
8138  const vector<Memory>* mem_objects = NULL,
8139  const vector<Event>* events = NULL,
8140  Event* event = NULL) const
8141  {
8142  static PFN_clEnqueueAcquireD3D10ObjectsKHR pfn_clEnqueueAcquireD3D10ObjectsKHR = NULL;
8143 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
8144  cl_context context = getInfo<CL_QUEUE_CONTEXT>();
8145  cl::Device device(getInfo<CL_QUEUE_DEVICE>());
8146  cl_platform_id platform = device.getInfo<CL_DEVICE_PLATFORM>();
8147  CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(platform, clEnqueueAcquireD3D10ObjectsKHR);
8148 #endif
8149 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
8150  CL_HPP_INIT_CL_EXT_FCN_PTR_(clEnqueueAcquireD3D10ObjectsKHR);
8151 #endif
8152 
8153  cl_event tmp;
8154  cl_int err = detail::errHandler(
8155  pfn_clEnqueueAcquireD3D10ObjectsKHR(
8156  object_,
8157  (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
8158  (mem_objects != NULL && mem_objects->size() > 0) ? (const cl_mem *) &mem_objects->front(): NULL,
8159  (events != NULL) ? (cl_uint) events->size() : 0,
8160  (events != NULL) ? (cl_event*) &events->front() : NULL,
8161  (event != NULL) ? &tmp : NULL),
8162  __ENQUEUE_ACQUIRE_GL_ERR);
8163 
8164  if (event != NULL && err == CL_SUCCESS)
8165  *event = tmp;
8166 
8167  return err;
8168  }
8169 
8170  cl_int enqueueReleaseD3D10Objects(
8171  const vector<Memory>* mem_objects = NULL,
8172  const vector<Event>* events = NULL,
8173  Event* event = NULL) const
8174  {
8175  static PFN_clEnqueueReleaseD3D10ObjectsKHR pfn_clEnqueueReleaseD3D10ObjectsKHR = NULL;
8176 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
8177  cl_context context = getInfo<CL_QUEUE_CONTEXT>();
8178  cl::Device device(getInfo<CL_QUEUE_DEVICE>());
8179  cl_platform_id platform = device.getInfo<CL_DEVICE_PLATFORM>();
8180  CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(platform, clEnqueueReleaseD3D10ObjectsKHR);
8181 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
8182 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
8183  CL_HPP_INIT_CL_EXT_FCN_PTR_(clEnqueueReleaseD3D10ObjectsKHR);
8184 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
8185 
8186  cl_event tmp;
8187  cl_int err = detail::errHandler(
8188  pfn_clEnqueueReleaseD3D10ObjectsKHR(
8189  object_,
8190  (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
8191  (mem_objects != NULL && mem_objects->size() > 0) ? (const cl_mem *) &mem_objects->front(): NULL,
8192  (events != NULL) ? (cl_uint) events->size() : 0,
8193  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8194  (event != NULL) ? &tmp : NULL),
8195  __ENQUEUE_RELEASE_GL_ERR);
8196 
8197  if (event != NULL && err == CL_SUCCESS)
8198  *event = tmp;
8199 
8200  return err;
8201  }
8202 #endif
8203 
8207 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
8208  CL_EXT_PREFIX__VERSION_1_1_DEPRECATED
8209  cl_int enqueueBarrier() const CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
8210  {
8211  return detail::errHandler(
8212  ::clEnqueueBarrier(object_),
8213  __ENQUEUE_BARRIER_ERR);
8214  }
8215 #endif // CL_USE_DEPRECATED_OPENCL_1_1_APIS
8216 
8217  cl_int flush() const
8218  {
8219  return detail::errHandler(::clFlush(object_), __FLUSH_ERR);
8220  }
8221 
8222  cl_int finish() const
8223  {
8224  return detail::errHandler(::clFinish(object_), __FINISH_ERR);
8225  }
8226 }; // CommandQueue
8227 
8228 CL_HPP_DEFINE_STATIC_MEMBER_ std::once_flag CommandQueue::default_initialized_;
8229 CL_HPP_DEFINE_STATIC_MEMBER_ CommandQueue CommandQueue::default_;
8230 CL_HPP_DEFINE_STATIC_MEMBER_ cl_int CommandQueue::default_error_ = CL_SUCCESS;
8231 
8232 
8233 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8234 enum class DeviceQueueProperties : cl_command_queue_properties
8235 {
8236  None = 0,
8237  Profiling = CL_QUEUE_PROFILING_ENABLE,
8238 };
8239 
8240 inline DeviceQueueProperties operator|(DeviceQueueProperties lhs, DeviceQueueProperties rhs)
8241 {
8242  return static_cast<DeviceQueueProperties>(static_cast<cl_command_queue_properties>(lhs) | static_cast<cl_command_queue_properties>(rhs));
8243 }
8244 
8248 class DeviceCommandQueue : public detail::Wrapper<cl_command_queue>
8249 {
8250 public:
8251 
8256 
8260  DeviceCommandQueue(DeviceQueueProperties properties, cl_int* err = NULL)
8261  {
8262  cl_int error;
8265 
8266  cl_command_queue_properties mergedProperties =
8267  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | static_cast<cl_command_queue_properties>(properties);
8268 
8269  cl_queue_properties queue_properties[] = {
8270  CL_QUEUE_PROPERTIES, mergedProperties, 0 };
8271  object_ = ::clCreateCommandQueueWithProperties(
8272  context(), device(), queue_properties, &error);
8273 
8274  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8275  if (err != NULL) {
8276  *err = error;
8277  }
8278  }
8279 
8284  const Context& context,
8285  const Device& device,
8286  DeviceQueueProperties properties = DeviceQueueProperties::None,
8287  cl_int* err = NULL)
8288  {
8289  cl_int error;
8290 
8291  cl_command_queue_properties mergedProperties =
8292  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | static_cast<cl_command_queue_properties>(properties);
8293  cl_queue_properties queue_properties[] = {
8294  CL_QUEUE_PROPERTIES, mergedProperties, 0 };
8295  object_ = ::clCreateCommandQueueWithProperties(
8296  context(), device(), queue_properties, &error);
8297 
8298  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8299  if (err != NULL) {
8300  *err = error;
8301  }
8302  }
8303 
8308  const Context& context,
8309  const Device& device,
8310  cl_uint queueSize,
8311  DeviceQueueProperties properties = DeviceQueueProperties::None,
8312  cl_int* err = NULL)
8313  {
8314  cl_int error;
8315 
8316  cl_command_queue_properties mergedProperties =
8317  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | static_cast<cl_command_queue_properties>(properties);
8318  cl_queue_properties queue_properties[] = {
8319  CL_QUEUE_PROPERTIES, mergedProperties,
8320  CL_QUEUE_SIZE, queueSize,
8321  0 };
8322  object_ = ::clCreateCommandQueueWithProperties(
8323  context(), device(), queue_properties, &error);
8324 
8325  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8326  if (err != NULL) {
8327  *err = error;
8328  }
8329  }
8330 
8337  explicit DeviceCommandQueue(const cl_command_queue& commandQueue, bool retainObject = false) :
8338  detail::Wrapper<cl_type>(commandQueue, retainObject) { }
8339 
8340  DeviceCommandQueue& operator = (const cl_command_queue& rhs)
8341  {
8343  return *this;
8344  }
8345 
8349  DeviceCommandQueue(const DeviceCommandQueue& queue) : detail::Wrapper<cl_type>(queue) {}
8350 
8354  DeviceCommandQueue& operator = (const DeviceCommandQueue &queue)
8355  {
8357  return *this;
8358  }
8359 
8363  DeviceCommandQueue(DeviceCommandQueue&& queue) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(queue)) {}
8364 
8369  {
8370  detail::Wrapper<cl_type>::operator=(std::move(queue));
8371  return *this;
8372  }
8373 
8374  template <typename T>
8375  cl_int getInfo(cl_command_queue_info name, T* param) const
8376  {
8377  return detail::errHandler(
8378  detail::getInfo(
8379  &::clGetCommandQueueInfo, object_, name, param),
8380  __GET_COMMAND_QUEUE_INFO_ERR);
8381  }
8382 
8383  template <cl_int name> typename
8385  getInfo(cl_int* err = NULL) const
8386  {
8387  typename detail::param_traits<
8388  detail::cl_command_queue_info, name>::param_type param;
8389  cl_int result = getInfo(name, &param);
8390  if (err != NULL) {
8391  *err = result;
8392  }
8393  return param;
8394  }
8395 
8403  cl_int *err = nullptr)
8404  {
8405  cl_int error;
8408 
8409  cl_command_queue_properties properties =
8410  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | CL_QUEUE_ON_DEVICE_DEFAULT;
8411  cl_queue_properties queue_properties[] = {
8412  CL_QUEUE_PROPERTIES, properties,
8413  0 };
8414  DeviceCommandQueue deviceQueue(
8415  ::clCreateCommandQueueWithProperties(
8416  context(), device(), queue_properties, &error));
8417 
8418  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8419  if (err != NULL) {
8420  *err = error;
8421  }
8422 
8423  return deviceQueue;
8424  }
8425 
8433  const Context &context, const Device &device, cl_int *err = nullptr)
8434  {
8435  cl_int error;
8436 
8437  cl_command_queue_properties properties =
8438  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | CL_QUEUE_ON_DEVICE_DEFAULT;
8439  cl_queue_properties queue_properties[] = {
8440  CL_QUEUE_PROPERTIES, properties,
8441  0 };
8442  DeviceCommandQueue deviceQueue(
8443  ::clCreateCommandQueueWithProperties(
8444  context(), device(), queue_properties, &error));
8445 
8446  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8447  if (err != NULL) {
8448  *err = error;
8449  }
8450 
8451  return deviceQueue;
8452  }
8453 
8461  const Context &context, const Device &device, cl_uint queueSize, cl_int *err = nullptr)
8462  {
8463  cl_int error;
8464 
8465  cl_command_queue_properties properties =
8466  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | CL_QUEUE_ON_DEVICE_DEFAULT;
8467  cl_queue_properties queue_properties[] = {
8468  CL_QUEUE_PROPERTIES, properties,
8469  CL_QUEUE_SIZE, queueSize,
8470  0 };
8471  DeviceCommandQueue deviceQueue(
8472  ::clCreateCommandQueueWithProperties(
8473  context(), device(), queue_properties, &error));
8474 
8475  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8476  if (err != NULL) {
8477  *err = error;
8478  }
8479 
8480  return deviceQueue;
8481  }
8482 }; // DeviceCommandQueue
8483 
8484 namespace detail
8485 {
8486  // Specialization for device command queue
8487  template <>
8489  {
8490  static size_type size(const cl::DeviceCommandQueue&) { return sizeof(cl_command_queue); }
8491  static const cl_command_queue* ptr(const cl::DeviceCommandQueue& value) { return &(value()); }
8492  };
8493 } // namespace detail
8494 
8495 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8496 
8497 
8498 template< typename IteratorType >
8500  const Context &context,
8501  IteratorType startIterator,
8502  IteratorType endIterator,
8503  bool readOnly,
8504  bool useHostPtr,
8505  cl_int* err)
8506 {
8507  typedef typename std::iterator_traits<IteratorType>::value_type DataType;
8508  cl_int error;
8509 
8510  cl_mem_flags flags = 0;
8511  if( readOnly ) {
8512  flags |= CL_MEM_READ_ONLY;
8513  }
8514  else {
8515  flags |= CL_MEM_READ_WRITE;
8516  }
8517  if( useHostPtr ) {
8518  flags |= CL_MEM_USE_HOST_PTR;
8519  }
8520 
8521  size_type size = sizeof(DataType)*(endIterator - startIterator);
8522 
8523  if( useHostPtr ) {
8524  object_ = ::clCreateBuffer(context(), flags, size, static_cast<DataType*>(&*startIterator), &error);
8525  } else {
8526  object_ = ::clCreateBuffer(context(), flags, size, 0, &error);
8527  }
8528 
8529  detail::errHandler(error, __CREATE_BUFFER_ERR);
8530  if (err != NULL) {
8531  *err = error;
8532  }
8533 
8534  if( !useHostPtr ) {
8535  CommandQueue queue(context, 0, &error);
8536  detail::errHandler(error, __CREATE_BUFFER_ERR);
8537  if (err != NULL) {
8538  *err = error;
8539  }
8540 
8541  error = cl::copy(queue, startIterator, endIterator, *this);
8542  detail::errHandler(error, __CREATE_BUFFER_ERR);
8543  if (err != NULL) {
8544  *err = error;
8545  }
8546  }
8547 }
8548 
8549 template< typename IteratorType >
8551  const CommandQueue &queue,
8552  IteratorType startIterator,
8553  IteratorType endIterator,
8554  bool readOnly,
8555  bool useHostPtr,
8556  cl_int* err)
8557 {
8558  typedef typename std::iterator_traits<IteratorType>::value_type DataType;
8559  cl_int error;
8560 
8561  cl_mem_flags flags = 0;
8562  if (readOnly) {
8563  flags |= CL_MEM_READ_ONLY;
8564  }
8565  else {
8566  flags |= CL_MEM_READ_WRITE;
8567  }
8568  if (useHostPtr) {
8569  flags |= CL_MEM_USE_HOST_PTR;
8570  }
8571 
8572  size_type size = sizeof(DataType)*(endIterator - startIterator);
8573 
8574  Context context = queue.getInfo<CL_QUEUE_CONTEXT>();
8575 
8576  if (useHostPtr) {
8577  object_ = ::clCreateBuffer(context(), flags, size, static_cast<DataType*>(&*startIterator), &error);
8578  }
8579  else {
8580  object_ = ::clCreateBuffer(context(), flags, size, 0, &error);
8581  }
8582 
8583  detail::errHandler(error, __CREATE_BUFFER_ERR);
8584  if (err != NULL) {
8585  *err = error;
8586  }
8587 
8588  if (!useHostPtr) {
8589  error = cl::copy(queue, startIterator, endIterator, *this);
8590  detail::errHandler(error, __CREATE_BUFFER_ERR);
8591  if (err != NULL) {
8592  *err = error;
8593  }
8594  }
8595 }
8596 
8597 inline cl_int enqueueReadBuffer(
8598  const Buffer& buffer,
8599  cl_bool blocking,
8600  size_type offset,
8601  size_type size,
8602  void* ptr,
8603  const vector<Event>* events = NULL,
8604  Event* event = NULL)
8605 {
8606  cl_int error;
8607  CommandQueue queue = CommandQueue::getDefault(&error);
8608 
8609  if (error != CL_SUCCESS) {
8610  return error;
8611  }
8612 
8613  return queue.enqueueReadBuffer(buffer, blocking, offset, size, ptr, events, event);
8614 }
8615 
8616 inline cl_int enqueueWriteBuffer(
8617  const Buffer& buffer,
8618  cl_bool blocking,
8619  size_type offset,
8620  size_type size,
8621  const void* ptr,
8622  const vector<Event>* events = NULL,
8623  Event* event = NULL)
8624 {
8625  cl_int error;
8626  CommandQueue queue = CommandQueue::getDefault(&error);
8627 
8628  if (error != CL_SUCCESS) {
8629  return error;
8630  }
8631 
8632  return queue.enqueueWriteBuffer(buffer, blocking, offset, size, ptr, events, event);
8633 }
8634 
8635 inline void* enqueueMapBuffer(
8636  const Buffer& buffer,
8637  cl_bool blocking,
8638  cl_map_flags flags,
8639  size_type offset,
8640  size_type size,
8641  const vector<Event>* events = NULL,
8642  Event* event = NULL,
8643  cl_int* err = NULL)
8644 {
8645  cl_int error;
8646  CommandQueue queue = CommandQueue::getDefault(&error);
8647  detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
8648  if (err != NULL) {
8649  *err = error;
8650  }
8651 
8652  void * result = ::clEnqueueMapBuffer(
8653  queue(), buffer(), blocking, flags, offset, size,
8654  (events != NULL) ? (cl_uint) events->size() : 0,
8655  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8656  (cl_event*) event,
8657  &error);
8658 
8659  detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
8660  if (err != NULL) {
8661  *err = error;
8662  }
8663  return result;
8664 }
8665 
8666 
8667 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8668 
8673 template<typename T>
8674 inline cl_int enqueueMapSVM(
8675  T* ptr,
8676  cl_bool blocking,
8677  cl_map_flags flags,
8678  size_type size,
8679  const vector<Event>* events,
8680  Event* event)
8681 {
8682  cl_int error;
8683  CommandQueue queue = CommandQueue::getDefault(&error);
8684  if (error != CL_SUCCESS) {
8685  return detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
8686  }
8687 
8688  return queue.enqueueMapSVM(
8689  ptr, blocking, flags, size, events, event);
8690 }
8691 
8697 template<typename T, class D>
8698 inline cl_int enqueueMapSVM(
8699  cl::pointer<T, D> ptr,
8700  cl_bool blocking,
8701  cl_map_flags flags,
8702  size_type size,
8703  const vector<Event>* events = NULL,
8704  Event* event = NULL)
8705 {
8706  cl_int error;
8707  CommandQueue queue = CommandQueue::getDefault(&error);
8708  if (error != CL_SUCCESS) {
8709  return detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
8710  }
8711 
8712  return queue.enqueueMapSVM(
8713  ptr, blocking, flags, size, events, event);
8714 }
8715 
8721 template<typename T, class Alloc>
8722 inline cl_int enqueueMapSVM(
8723  cl::vector<T, Alloc> container,
8724  cl_bool blocking,
8725  cl_map_flags flags,
8726  const vector<Event>* events = NULL,
8727  Event* event = NULL)
8728 {
8729  cl_int error;
8730  CommandQueue queue = CommandQueue::getDefault(&error);
8731  if (error != CL_SUCCESS) {
8732  return detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
8733  }
8734 
8735  return queue.enqueueMapSVM(
8736  container, blocking, flags, events, event);
8737 }
8738 
8739 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8740 
8741 inline cl_int enqueueUnmapMemObject(
8742  const Memory& memory,
8743  void* mapped_ptr,
8744  const vector<Event>* events = NULL,
8745  Event* event = NULL)
8746 {
8747  cl_int error;
8748  CommandQueue queue = CommandQueue::getDefault(&error);
8749  detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
8750  if (error != CL_SUCCESS) {
8751  return error;
8752  }
8753 
8754  cl_event tmp;
8755  cl_int err = detail::errHandler(
8756  ::clEnqueueUnmapMemObject(
8757  queue(), memory(), mapped_ptr,
8758  (events != NULL) ? (cl_uint)events->size() : 0,
8759  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
8760  (event != NULL) ? &tmp : NULL),
8761  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8762 
8763  if (event != NULL && err == CL_SUCCESS)
8764  *event = tmp;
8765 
8766  return err;
8767 }
8768 
8769 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8770 
8775 template<typename T>
8776 inline cl_int enqueueUnmapSVM(
8777  T* ptr,
8778  const vector<Event>* events = NULL,
8779  Event* event = NULL)
8780 {
8781  cl_int error;
8782  CommandQueue queue = CommandQueue::getDefault(&error);
8783  if (error != CL_SUCCESS) {
8784  return detail::errHandler(error, __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8785  }
8786 
8787  return detail::errHandler(queue.enqueueUnmapSVM(ptr, events, event),
8788  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8789 
8790 }
8791 
8797 template<typename T, class D>
8798 inline cl_int enqueueUnmapSVM(
8799  cl::pointer<T, D> &ptr,
8800  const vector<Event>* events = NULL,
8801  Event* event = NULL)
8802 {
8803  cl_int error;
8804  CommandQueue queue = CommandQueue::getDefault(&error);
8805  if (error != CL_SUCCESS) {
8806  return detail::errHandler(error, __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8807  }
8808 
8809  return detail::errHandler(queue.enqueueUnmapSVM(ptr, events, event),
8810  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8811 }
8812 
8818 template<typename T, class Alloc>
8819 inline cl_int enqueueUnmapSVM(
8820  cl::vector<T, Alloc> &container,
8821  const vector<Event>* events = NULL,
8822  Event* event = NULL)
8823 {
8824  cl_int error;
8825  CommandQueue queue = CommandQueue::getDefault(&error);
8826  if (error != CL_SUCCESS) {
8827  return detail::errHandler(error, __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8828  }
8829 
8830  return detail::errHandler(queue.enqueueUnmapSVM(container, events, event),
8831  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8832 }
8833 
8834 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8835 
8836 inline cl_int enqueueCopyBuffer(
8837  const Buffer& src,
8838  const Buffer& dst,
8839  size_type src_offset,
8840  size_type dst_offset,
8841  size_type size,
8842  const vector<Event>* events = NULL,
8843  Event* event = NULL)
8844 {
8845  cl_int error;
8846  CommandQueue queue = CommandQueue::getDefault(&error);
8847 
8848  if (error != CL_SUCCESS) {
8849  return error;
8850  }
8851 
8852  return queue.enqueueCopyBuffer(src, dst, src_offset, dst_offset, size, events, event);
8853 }
8854 
8860 template< typename IteratorType >
8861 inline cl_int copy( IteratorType startIterator, IteratorType endIterator, cl::Buffer &buffer )
8862 {
8863  cl_int error;
8864  CommandQueue queue = CommandQueue::getDefault(&error);
8865  if (error != CL_SUCCESS)
8866  return error;
8867 
8868  return cl::copy(queue, startIterator, endIterator, buffer);
8869 }
8870 
8876 template< typename IteratorType >
8877 inline cl_int copy( const cl::Buffer &buffer, IteratorType startIterator, IteratorType endIterator )
8878 {
8879  cl_int error;
8880  CommandQueue queue = CommandQueue::getDefault(&error);
8881  if (error != CL_SUCCESS)
8882  return error;
8883 
8884  return cl::copy(queue, buffer, startIterator, endIterator);
8885 }
8886 
8892 template< typename IteratorType >
8893 inline cl_int copy( const CommandQueue &queue, IteratorType startIterator, IteratorType endIterator, cl::Buffer &buffer )
8894 {
8895  typedef typename std::iterator_traits<IteratorType>::value_type DataType;
8896  cl_int error;
8897 
8898  size_type length = endIterator-startIterator;
8899  size_type byteLength = length*sizeof(DataType);
8900 
8901  DataType *pointer =
8902  static_cast<DataType*>(queue.enqueueMapBuffer(buffer, CL_TRUE, CL_MAP_WRITE, 0, byteLength, 0, 0, &error));
8903  // if exceptions enabled, enqueueMapBuffer will throw
8904  if( error != CL_SUCCESS ) {
8905  return error;
8906  }
8907 #if defined(_MSC_VER)
8908  std::copy(
8909  startIterator,
8910  endIterator,
8911  stdext::checked_array_iterator<DataType*>(
8912  pointer, length));
8913 #else
8914  std::copy(startIterator, endIterator, pointer);
8915 #endif
8916  Event endEvent;
8917  error = queue.enqueueUnmapMemObject(buffer, pointer, 0, &endEvent);
8918  // if exceptions enabled, enqueueUnmapMemObject will throw
8919  if( error != CL_SUCCESS ) {
8920  return error;
8921  }
8922  endEvent.wait();
8923  return CL_SUCCESS;
8924 }
8925 
8931 template< typename IteratorType >
8932 inline cl_int copy( const CommandQueue &queue, const cl::Buffer &buffer, IteratorType startIterator, IteratorType endIterator )
8933 {
8934  typedef typename std::iterator_traits<IteratorType>::value_type DataType;
8935  cl_int error;
8936 
8937  size_type length = endIterator-startIterator;
8938  size_type byteLength = length*sizeof(DataType);
8939 
8940  DataType *pointer =
8941  static_cast<DataType*>(queue.enqueueMapBuffer(buffer, CL_TRUE, CL_MAP_READ, 0, byteLength, 0, 0, &error));
8942  // if exceptions enabled, enqueueMapBuffer will throw
8943  if( error != CL_SUCCESS ) {
8944  return error;
8945  }
8946  std::copy(pointer, pointer + length, startIterator);
8947  Event endEvent;
8948  error = queue.enqueueUnmapMemObject(buffer, pointer, 0, &endEvent);
8949  // if exceptions enabled, enqueueUnmapMemObject will throw
8950  if( error != CL_SUCCESS ) {
8951  return error;
8952  }
8953  endEvent.wait();
8954  return CL_SUCCESS;
8955 }
8956 
8957 
8958 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8959 
8962 template<typename T, class Alloc>
8963 inline cl_int mapSVM(cl::vector<T, Alloc> &container)
8964 {
8965  return enqueueMapSVM(container, CL_TRUE, CL_MAP_READ | CL_MAP_WRITE);
8966 }
8967 
8971 template<typename T, class Alloc>
8972 inline cl_int unmapSVM(cl::vector<T, Alloc> &container)
8973 {
8974  return enqueueUnmapSVM(container);
8975 }
8976 
8977 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8978 
8979 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
8980 inline cl_int enqueueReadBufferRect(
8981  const Buffer& buffer,
8982  cl_bool blocking,
8983  const array<size_type, 3>& buffer_offset,
8984  const array<size_type, 3>& host_offset,
8985  const array<size_type, 3>& region,
8986  size_type buffer_row_pitch,
8987  size_type buffer_slice_pitch,
8988  size_type host_row_pitch,
8989  size_type host_slice_pitch,
8990  void *ptr,
8991  const vector<Event>* events = NULL,
8992  Event* event = NULL)
8993 {
8994  cl_int error;
8995  CommandQueue queue = CommandQueue::getDefault(&error);
8996 
8997  if (error != CL_SUCCESS) {
8998  return error;
8999  }
9000 
9001  return queue.enqueueReadBufferRect(
9002  buffer,
9003  blocking,
9004  buffer_offset,
9005  host_offset,
9006  region,
9007  buffer_row_pitch,
9008  buffer_slice_pitch,
9009  host_row_pitch,
9010  host_slice_pitch,
9011  ptr,
9012  events,
9013  event);
9014 }
9015 
9016 inline cl_int enqueueWriteBufferRect(
9017  const Buffer& buffer,
9018  cl_bool blocking,
9019  const array<size_type, 3>& buffer_offset,
9020  const array<size_type, 3>& host_offset,
9021  const array<size_type, 3>& region,
9022  size_type buffer_row_pitch,
9023  size_type buffer_slice_pitch,
9024  size_type host_row_pitch,
9025  size_type host_slice_pitch,
9026  const void *ptr,
9027  const vector<Event>* events = NULL,
9028  Event* event = NULL)
9029 {
9030  cl_int error;
9031  CommandQueue queue = CommandQueue::getDefault(&error);
9032 
9033  if (error != CL_SUCCESS) {
9034  return error;
9035  }
9036 
9037  return queue.enqueueWriteBufferRect(
9038  buffer,
9039  blocking,
9040  buffer_offset,
9041  host_offset,
9042  region,
9043  buffer_row_pitch,
9044  buffer_slice_pitch,
9045  host_row_pitch,
9046  host_slice_pitch,
9047  ptr,
9048  events,
9049  event);
9050 }
9051 
9052 inline cl_int enqueueCopyBufferRect(
9053  const Buffer& src,
9054  const Buffer& dst,
9055  const array<size_type, 3>& src_origin,
9056  const array<size_type, 3>& dst_origin,
9057  const array<size_type, 3>& region,
9058  size_type src_row_pitch,
9059  size_type src_slice_pitch,
9060  size_type dst_row_pitch,
9061  size_type dst_slice_pitch,
9062  const vector<Event>* events = NULL,
9063  Event* event = NULL)
9064 {
9065  cl_int error;
9066  CommandQueue queue = CommandQueue::getDefault(&error);
9067 
9068  if (error != CL_SUCCESS) {
9069  return error;
9070  }
9071 
9072  return queue.enqueueCopyBufferRect(
9073  src,
9074  dst,
9075  src_origin,
9076  dst_origin,
9077  region,
9078  src_row_pitch,
9079  src_slice_pitch,
9080  dst_row_pitch,
9081  dst_slice_pitch,
9082  events,
9083  event);
9084 }
9085 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
9086 
9087 inline cl_int enqueueReadImage(
9088  const Image& image,
9089  cl_bool blocking,
9090  const array<size_type, 3>& origin,
9091  const array<size_type, 3>& region,
9092  size_type row_pitch,
9093  size_type slice_pitch,
9094  void* ptr,
9095  const vector<Event>* events = NULL,
9096  Event* event = NULL)
9097 {
9098  cl_int error;
9099  CommandQueue queue = CommandQueue::getDefault(&error);
9100 
9101  if (error != CL_SUCCESS) {
9102  return error;
9103  }
9104 
9105  return queue.enqueueReadImage(
9106  image,
9107  blocking,
9108  origin,
9109  region,
9110  row_pitch,
9111  slice_pitch,
9112  ptr,
9113  events,
9114  event);
9115 }
9116 
9117 inline cl_int enqueueWriteImage(
9118  const Image& image,
9119  cl_bool blocking,
9120  const array<size_type, 3>& origin,
9121  const array<size_type, 3>& region,
9122  size_type row_pitch,
9123  size_type slice_pitch,
9124  const void* ptr,
9125  const vector<Event>* events = NULL,
9126  Event* event = NULL)
9127 {
9128  cl_int error;
9129  CommandQueue queue = CommandQueue::getDefault(&error);
9130 
9131  if (error != CL_SUCCESS) {
9132  return error;
9133  }
9134 
9135  return queue.enqueueWriteImage(
9136  image,
9137  blocking,
9138  origin,
9139  region,
9140  row_pitch,
9141  slice_pitch,
9142  ptr,
9143  events,
9144  event);
9145 }
9146 
9147 inline cl_int enqueueCopyImage(
9148  const Image& src,
9149  const Image& dst,
9150  const array<size_type, 3>& src_origin,
9151  const array<size_type, 3>& dst_origin,
9152  const array<size_type, 3>& region,
9153  const vector<Event>* events = NULL,
9154  Event* event = NULL)
9155 {
9156  cl_int error;
9157  CommandQueue queue = CommandQueue::getDefault(&error);
9158 
9159  if (error != CL_SUCCESS) {
9160  return error;
9161  }
9162 
9163  return queue.enqueueCopyImage(
9164  src,
9165  dst,
9166  src_origin,
9167  dst_origin,
9168  region,
9169  events,
9170  event);
9171 }
9172 
9173 inline cl_int enqueueCopyImageToBuffer(
9174  const Image& src,
9175  const Buffer& dst,
9176  const array<size_type, 3>& src_origin,
9177  const array<size_type, 3>& region,
9178  size_type dst_offset,
9179  const vector<Event>* events = NULL,
9180  Event* event = NULL)
9181 {
9182  cl_int error;
9183  CommandQueue queue = CommandQueue::getDefault(&error);
9184 
9185  if (error != CL_SUCCESS) {
9186  return error;
9187  }
9188 
9189  return queue.enqueueCopyImageToBuffer(
9190  src,
9191  dst,
9192  src_origin,
9193  region,
9194  dst_offset,
9195  events,
9196  event);
9197 }
9198 
9199 inline cl_int enqueueCopyBufferToImage(
9200  const Buffer& src,
9201  const Image& dst,
9202  size_type src_offset,
9203  const array<size_type, 3>& dst_origin,
9204  const array<size_type, 3>& region,
9205  const vector<Event>* events = NULL,
9206  Event* event = NULL)
9207 {
9208  cl_int error;
9209  CommandQueue queue = CommandQueue::getDefault(&error);
9210 
9211  if (error != CL_SUCCESS) {
9212  return error;
9213  }
9214 
9215  return queue.enqueueCopyBufferToImage(
9216  src,
9217  dst,
9218  src_offset,
9219  dst_origin,
9220  region,
9221  events,
9222  event);
9223 }
9224 
9225 
9226 inline cl_int flush(void)
9227 {
9228  cl_int error;
9229  CommandQueue queue = CommandQueue::getDefault(&error);
9230 
9231  if (error != CL_SUCCESS) {
9232  return error;
9233  }
9234 
9235  return queue.flush();
9236 }
9237 
9238 inline cl_int finish(void)
9239 {
9240  cl_int error;
9241  CommandQueue queue = CommandQueue::getDefault(&error);
9242 
9243  if (error != CL_SUCCESS) {
9244  return error;
9245  }
9246 
9247 
9248  return queue.finish();
9249 }
9250 
9252 {
9253 private:
9254  CommandQueue queue_;
9255  const NDRange offset_;
9256  const NDRange global_;
9257  const NDRange local_;
9258  vector<Event> events_;
9259 
9260  template<typename... Ts>
9261  friend class KernelFunctor;
9262 
9263 public:
9264  EnqueueArgs(NDRange global) :
9265  queue_(CommandQueue::getDefault()),
9266  offset_(NullRange),
9267  global_(global),
9268  local_(NullRange)
9269  {
9270 
9271  }
9272 
9273  EnqueueArgs(NDRange global, NDRange local) :
9274  queue_(CommandQueue::getDefault()),
9275  offset_(NullRange),
9276  global_(global),
9277  local_(local)
9278  {
9279 
9280  }
9281 
9282  EnqueueArgs(NDRange offset, NDRange global, NDRange local) :
9283  queue_(CommandQueue::getDefault()),
9284  offset_(offset),
9285  global_(global),
9286  local_(local)
9287  {
9288 
9289  }
9290 
9291  EnqueueArgs(Event e, NDRange global) :
9292  queue_(CommandQueue::getDefault()),
9293  offset_(NullRange),
9294  global_(global),
9295  local_(NullRange)
9296  {
9297  events_.push_back(e);
9298  }
9299 
9300  EnqueueArgs(Event e, NDRange global, NDRange local) :
9301  queue_(CommandQueue::getDefault()),
9302  offset_(NullRange),
9303  global_(global),
9304  local_(local)
9305  {
9306  events_.push_back(e);
9307  }
9308 
9309  EnqueueArgs(Event e, NDRange offset, NDRange global, NDRange local) :
9310  queue_(CommandQueue::getDefault()),
9311  offset_(offset),
9312  global_(global),
9313  local_(local)
9314  {
9315  events_.push_back(e);
9316  }
9317 
9318  EnqueueArgs(const vector<Event> &events, NDRange global) :
9319  queue_(CommandQueue::getDefault()),
9320  offset_(NullRange),
9321  global_(global),
9322  local_(NullRange),
9323  events_(events)
9324  {
9325 
9326  }
9327 
9328  EnqueueArgs(const vector<Event> &events, NDRange global, NDRange local) :
9329  queue_(CommandQueue::getDefault()),
9330  offset_(NullRange),
9331  global_(global),
9332  local_(local),
9333  events_(events)
9334  {
9335 
9336  }
9337 
9338  EnqueueArgs(const vector<Event> &events, NDRange offset, NDRange global, NDRange local) :
9339  queue_(CommandQueue::getDefault()),
9340  offset_(offset),
9341  global_(global),
9342  local_(local),
9343  events_(events)
9344  {
9345 
9346  }
9347 
9348  EnqueueArgs(CommandQueue &queue, NDRange global) :
9349  queue_(queue),
9350  offset_(NullRange),
9351  global_(global),
9352  local_(NullRange)
9353  {
9354 
9355  }
9356 
9357  EnqueueArgs(CommandQueue &queue, NDRange global, NDRange local) :
9358  queue_(queue),
9359  offset_(NullRange),
9360  global_(global),
9361  local_(local)
9362  {
9363 
9364  }
9365 
9366  EnqueueArgs(CommandQueue &queue, NDRange offset, NDRange global, NDRange local) :
9367  queue_(queue),
9368  offset_(offset),
9369  global_(global),
9370  local_(local)
9371  {
9372 
9373  }
9374 
9375  EnqueueArgs(CommandQueue &queue, Event e, NDRange global) :
9376  queue_(queue),
9377  offset_(NullRange),
9378  global_(global),
9379  local_(NullRange)
9380  {
9381  events_.push_back(e);
9382  }
9383 
9384  EnqueueArgs(CommandQueue &queue, Event e, NDRange global, NDRange local) :
9385  queue_(queue),
9386  offset_(NullRange),
9387  global_(global),
9388  local_(local)
9389  {
9390  events_.push_back(e);
9391  }
9392 
9393  EnqueueArgs(CommandQueue &queue, Event e, NDRange offset, NDRange global, NDRange local) :
9394  queue_(queue),
9395  offset_(offset),
9396  global_(global),
9397  local_(local)
9398  {
9399  events_.push_back(e);
9400  }
9401 
9402  EnqueueArgs(CommandQueue &queue, const vector<Event> &events, NDRange global) :
9403  queue_(queue),
9404  offset_(NullRange),
9405  global_(global),
9406  local_(NullRange),
9407  events_(events)
9408  {
9409 
9410  }
9411 
9412  EnqueueArgs(CommandQueue &queue, const vector<Event> &events, NDRange global, NDRange local) :
9413  queue_(queue),
9414  offset_(NullRange),
9415  global_(global),
9416  local_(local),
9417  events_(events)
9418  {
9419 
9420  }
9421 
9422  EnqueueArgs(CommandQueue &queue, const vector<Event> &events, NDRange offset, NDRange global, NDRange local) :
9423  queue_(queue),
9424  offset_(offset),
9425  global_(global),
9426  local_(local),
9427  events_(events)
9428  {
9429 
9430  }
9431 };
9432 
9433 
9434 //----------------------------------------------------------------------------------------------
9435 
9436 
9441 template<typename... Ts>
9443 {
9444 private:
9445  Kernel kernel_;
9446 
9447  template<int index, typename T0, typename... T1s>
9448  void setArgs(T0&& t0, T1s&&... t1s)
9449  {
9450  kernel_.setArg(index, t0);
9451  setArgs<index + 1, T1s...>(std::forward<T1s>(t1s)...);
9452  }
9453 
9454  template<int index, typename T0>
9455  void setArgs(T0&& t0)
9456  {
9457  kernel_.setArg(index, t0);
9458  }
9459 
9460  template<int index>
9461  void setArgs()
9462  {
9463  }
9464 
9465 
9466 public:
9467  KernelFunctor(Kernel kernel) : kernel_(kernel)
9468  {}
9469 
9470  KernelFunctor(
9471  const Program& program,
9472  const string name,
9473  cl_int * err = NULL) :
9474  kernel_(program, name.c_str(), err)
9475  {}
9476 
9479 
9485  Event operator() (
9486  const EnqueueArgs& args,
9487  Ts... ts)
9488  {
9489  Event event;
9490  setArgs<0>(std::forward<Ts>(ts)...);
9491 
9492  args.queue_.enqueueNDRangeKernel(
9493  kernel_,
9494  args.offset_,
9495  args.global_,
9496  args.local_,
9497  &args.events_,
9498  &event);
9499 
9500  return event;
9501  }
9502 
9509  Event operator() (
9510  const EnqueueArgs& args,
9511  Ts... ts,
9512  cl_int &error)
9513  {
9514  Event event;
9515  setArgs<0>(std::forward<Ts>(ts)...);
9516 
9517  error = args.queue_.enqueueNDRangeKernel(
9518  kernel_,
9519  args.offset_,
9520  args.global_,
9521  args.local_,
9522  &args.events_,
9523  &event);
9524 
9525  return event;
9526  }
9527 
9528 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
9529  cl_int setSVMPointers(const vector<void*> &pointerList)
9530  {
9531  return kernel_.setSVMPointers(pointerList);
9532  }
9533 
9534  template<typename T0, typename... T1s>
9535  cl_int setSVMPointers(const T0 &t0, T1s &... ts)
9536  {
9537  return kernel_.setSVMPointers(t0, ts...);
9538  }
9539 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
9540 
9541  Kernel getKernel()
9542  {
9543  return kernel_;
9544  }
9545 };
9546 
9547 namespace compatibility {
9552  template<typename... Ts>
9554  {
9555  typedef KernelFunctor<Ts...> FunctorType;
9556 
9557  FunctorType functor_;
9558 
9559  make_kernel(
9560  const Program& program,
9561  const string name,
9562  cl_int * err = NULL) :
9563  functor_(FunctorType(program, name, err))
9564  {}
9565 
9566  make_kernel(
9567  const Kernel kernel) :
9568  functor_(FunctorType(kernel))
9569  {}
9570 
9573 
9575  typedef Event type_(
9576  const EnqueueArgs&,
9577  Ts...);
9578 
9579  Event operator()(
9580  const EnqueueArgs& enqueueArgs,
9581  Ts... args)
9582  {
9583  return functor_(
9584  enqueueArgs, args...);
9585  }
9586  };
9587 } // namespace compatibility
9588 
9589 
9590 //----------------------------------------------------------------------------------------------------------------------
9591 
9592 #undef CL_HPP_ERR_STR_
9593 #if !defined(CL_HPP_USER_OVERRIDE_ERROR_STRINGS)
9594 #undef __GET_DEVICE_INFO_ERR
9595 #undef __GET_PLATFORM_INFO_ERR
9596 #undef __GET_DEVICE_IDS_ERR
9597 #undef __GET_CONTEXT_INFO_ERR
9598 #undef __GET_EVENT_INFO_ERR
9599 #undef __GET_EVENT_PROFILE_INFO_ERR
9600 #undef __GET_MEM_OBJECT_INFO_ERR
9601 #undef __GET_IMAGE_INFO_ERR
9602 #undef __GET_SAMPLER_INFO_ERR
9603 #undef __GET_KERNEL_INFO_ERR
9604 #undef __GET_KERNEL_ARG_INFO_ERR
9605 #undef __GET_KERNEL_WORK_GROUP_INFO_ERR
9606 #undef __GET_PROGRAM_INFO_ERR
9607 #undef __GET_PROGRAM_BUILD_INFO_ERR
9608 #undef __GET_COMMAND_QUEUE_INFO_ERR
9609 
9610 #undef __CREATE_CONTEXT_ERR
9611 #undef __CREATE_CONTEXT_FROM_TYPE_ERR
9612 #undef __GET_SUPPORTED_IMAGE_FORMATS_ERR
9613 
9614 #undef __CREATE_BUFFER_ERR
9615 #undef __CREATE_SUBBUFFER_ERR
9616 #undef __CREATE_IMAGE2D_ERR
9617 #undef __CREATE_IMAGE3D_ERR
9618 #undef __CREATE_SAMPLER_ERR
9619 #undef __SET_MEM_OBJECT_DESTRUCTOR_CALLBACK_ERR
9620 
9621 #undef __CREATE_USER_EVENT_ERR
9622 #undef __SET_USER_EVENT_STATUS_ERR
9623 #undef __SET_EVENT_CALLBACK_ERR
9624 #undef __SET_PRINTF_CALLBACK_ERR
9625 
9626 #undef __WAIT_FOR_EVENTS_ERR
9627 
9628 #undef __CREATE_KERNEL_ERR
9629 #undef __SET_KERNEL_ARGS_ERR
9630 #undef __CREATE_PROGRAM_WITH_SOURCE_ERR
9631 #undef __CREATE_PROGRAM_WITH_BINARY_ERR
9632 #undef __CREATE_PROGRAM_WITH_BUILT_IN_KERNELS_ERR
9633 #undef __BUILD_PROGRAM_ERR
9634 #undef __CREATE_KERNELS_IN_PROGRAM_ERR
9635 
9636 #undef __CREATE_COMMAND_QUEUE_ERR
9637 #undef __SET_COMMAND_QUEUE_PROPERTY_ERR
9638 #undef __ENQUEUE_READ_BUFFER_ERR
9639 #undef __ENQUEUE_WRITE_BUFFER_ERR
9640 #undef __ENQUEUE_READ_BUFFER_RECT_ERR
9641 #undef __ENQUEUE_WRITE_BUFFER_RECT_ERR
9642 #undef __ENQEUE_COPY_BUFFER_ERR
9643 #undef __ENQEUE_COPY_BUFFER_RECT_ERR
9644 #undef __ENQUEUE_READ_IMAGE_ERR
9645 #undef __ENQUEUE_WRITE_IMAGE_ERR
9646 #undef __ENQUEUE_COPY_IMAGE_ERR
9647 #undef __ENQUEUE_COPY_IMAGE_TO_BUFFER_ERR
9648 #undef __ENQUEUE_COPY_BUFFER_TO_IMAGE_ERR
9649 #undef __ENQUEUE_MAP_BUFFER_ERR
9650 #undef __ENQUEUE_MAP_IMAGE_ERR
9651 #undef __ENQUEUE_UNMAP_MEM_OBJECT_ERR
9652 #undef __ENQUEUE_NDRANGE_KERNEL_ERR
9653 #undef __ENQUEUE_TASK_ERR
9654 #undef __ENQUEUE_NATIVE_KERNEL
9655 
9656 #undef __UNLOAD_COMPILER_ERR
9657 #undef __CREATE_SUB_DEVICES_ERR
9658 
9659 #undef __CREATE_PIPE_ERR
9660 #undef __GET_PIPE_INFO_ERR
9661 
9662 #endif //CL_HPP_USER_OVERRIDE_ERROR_STRINGS
9663 
9664 // Extensions
9665 #undef CL_HPP_INIT_CL_EXT_FCN_PTR_
9666 #undef CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_
9667 
9668 #if defined(CL_HPP_USE_CL_DEVICE_FISSION)
9669 #undef CL_HPP_PARAM_NAME_DEVICE_FISSION_
9670 #endif // CL_HPP_USE_CL_DEVICE_FISSION
9671 
9672 #undef CL_HPP_NOEXCEPT_
9673 #undef CL_HPP_DEFINE_STATIC_MEMBER_
9674 
9675 } // namespace cl
9676 
9677 #endif // CL_HPP_
Memory()
Default constructor - initializes to NULL.
Definition: cl2.hpp:3165
Event type_(const EnqueueArgs &, Ts...)
Function signature of kernel functor with no event dependency.
Definition: cl2.hpp:9575
BufferRenderGL(const cl_mem &buffer, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4105
CommandQueue(CommandQueue &&queue) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:7104
Image interface for arrays of 2D images.
Definition: cl2.hpp:4855
Image1DBuffer(const cl_mem &image1D, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4384
Image interface for arrays of 1D images.
Definition: cl2.hpp:4426
Image2DGL()
Default constructor - initializes to NULL.
Definition: cl2.hpp:4798
cl_int getInfo(cl_context_info name, T *param) const
Wrapper for clGetContextInfo().
Definition: cl2.hpp:2866
Adds constructors and member functions for cl_image_format.
Definition: cl2.hpp:1929
Context(const cl_context &context, bool retainObject=false)
Constructor from cl_context - takes ownership.
Definition: cl2.hpp:2850
Image & operator=(const cl_mem &rhs)
Assignment from cl_mem - performs shallow copy.
Definition: cl2.hpp:4183
Image3D(const Image3D &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5052
static DeviceCommandQueue makeDefault(const Context &context, const Device &device, cl_uint queueSize, cl_int *err=nullptr)
Definition: cl2.hpp:8460
Image2DGL(const Image2DGL &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4823
std::enable_if< std::is_pointer< T >::value, cl_int >::type setArg(cl_uint index, const T argPtr)
setArg overload taking a pointer type
Definition: cl2.hpp:5844
Image1DBuffer(const Image1DBuffer &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4396
Image2D(const Context &context, cl_mem_flags flags, ImageFormat format, size_type width, size_type height, size_type row_pitch=0, void *host_ptr=NULL, cl_int *err=NULL)
Constructs a 2D Image in a specified context.
Definition: cl2.hpp:4527
Image1D(const Context &context, cl_mem_flags flags, ImageFormat format, size_type width, void *host_ptr=NULL, cl_int *err=NULL)
Constructs a 1D Image in a specified context.
Definition: cl2.hpp:4257
Image2D & operator=(const cl_mem &rhs)
Assignment from cl_mem - performs shallow copy.
Definition: cl2.hpp:4718
cl_int setSVMPointers(const vector< void *> &pointerList)
Definition: cl2.hpp:5879
The OpenCL C++ bindings are defined within this namespace.
Definition: cl2.hpp:555
CL_EXT_PREFIX__VERSION_1_1_DEPRECATED cl_int enqueueBarrier() const CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
Definition: cl2.hpp:8209
Image(Image &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4206
detail::param_traits< detail::cl_device_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetDeviceInfo() that returns by value.
Definition: cl2.hpp:2084
cl_int unmapSVM(cl::vector< T, Alloc > &container)
Definition: cl2.hpp:8972
Context(const Context &ctx)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:2788
Local address wrapper for use with Kernel::setArg.
Definition: cl2.hpp:5610
detail::param_traits< detail::cl_pipe_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetMemObjectInfo() that returns by value.
Definition: cl2.hpp:5380
cl_int enqueueMapSVM(cl::vector< T, Alloc > &container, cl_bool blocking, cl_map_flags flags, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7748
Class interface for GL 3D Image Memory objects.
Definition: cl2.hpp:5087
CL_EXT_PREFIX__VERSION_1_1_DEPRECATED cl_int enqueueMarker(Event *event=NULL) const CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
Definition: cl2.hpp:8056
CommandQueue(const Context &context, const Device &device, QueueProperties properties, cl_int *err=NULL)
Constructs a CommandQueue for a passed device and context Will return an CL_INVALID_QUEUE_PROPERTIES ...
Definition: cl2.hpp:6994
CL_EXT_PREFIX__VERSION_1_1_DEPRECATED cl_int UnloadCompiler() CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
Definition: cl2.hpp:2557
static DeviceCommandQueue makeDefault(const Context &context, const Device &device, cl_int *err=nullptr)
Definition: cl2.hpp:8432
cl_int getDevices(cl_device_type type, vector< Device > *devices) const
Gets a list of devices for this platform.
Definition: cl2.hpp:2338
Program(const Context &context, const vector< Device > &devices, const Binaries &binaries, vector< cl_int > *binaryStatus=NULL, cl_int *err=NULL)
Definition: cl2.hpp:6152
Buffer(IteratorType startIterator, IteratorType endIterator, bool readOnly, bool useHostPtr=false, cl_int *err=NULL)
Construct a Buffer from a host container via iterators. IteratorType must be random access...
Definition: cl2.hpp:3711
Class interface for cl_mem.
Definition: cl2.hpp:3161
Pipe(cl_uint packet_size, cl_uint max_packets, cl_int *err=NULL)
Constructs a Pipe in a the default context.
Definition: cl2.hpp:5299
cl_int enqueueMigrateMemObjects(const vector< Memory > &memObjects, cl_mem_migration_flags flags, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7934
Device(const Device &dev)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:2047
BufferRenderGL(BufferRenderGL &&buf) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4135
Image2DArray(Image2DArray &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4930
detail::param_traits< detail::cl_image_info, name >::param_type getImageInfo(cl_int *err=NULL) const
Wrapper for clGetImageInfo() that returns by value.
Definition: cl2.hpp:4231
Memory(Memory &&mem) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:3209
cl_int enqueueMarkerWithWaitList(const vector< Event > *events=0, Event *event=0) const
Definition: cl2.hpp:7881
Image3D(Image3D &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5066
cl_int getSupportedImageFormats(cl_mem_flags flags, cl_mem_object_type type, vector< ImageFormat > *formats) const
Gets a list of supported image formats.
Definition: cl2.hpp:2891
BufferRenderGL()
Default constructor - initializes to NULL.
Definition: cl2.hpp:4096
Image(const Image &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4192
cl_int enqueueFillBuffer(const Buffer &buffer, PatternType pattern, size_type offset, size_type size, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7339
Image1DArray(const Image1DArray &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4485
Kernel(const Kernel &kernel)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5701
CommandQueue(const Context &context, cl_command_queue_properties properties=0, cl_int *err=NULL)
Constructs a CommandQueue for an implementation defined device in the given context Will return an CL...
Definition: cl2.hpp:6814
Buffer createSubBuffer(cl_mem_flags flags, cl_buffer_create_type buffer_create_type, const void *buffer_create_info, cl_int *err=NULL)
Creates a new buffer object from this.
Definition: cl2.hpp:3829
cl_int enqueueBarrierWithWaitList(const vector< Event > *events=0, Event *event=0) const
Definition: cl2.hpp:7911
Buffer(Buffer &&buf) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:3813
vector< T, cl::SVMAllocator< int, cl::SVMTraitCoarse<> >> coarse_svm_vector
Vector alias to simplify contruction of coarse-grained SVM containers.
Definition: cl2.hpp:3628
static Device getDefault(cl_int *errResult=NULL)
Returns the first device on the default context.
Definition: cl2.hpp:2009
cl_int enqueueUnmapSVM(T *ptr, const vector< Event > *events=NULL, Event *event=NULL)
Definition: cl2.hpp:8776
Sampler(const Sampler &sam)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5476
cl_int setCallback(cl_int type, void(CL_CALLBACK *pfn_notify)(cl_event, cl_int, void *), void *user_data=NULL)
Registers a user callback function for a specific command execution status.
Definition: cl2.hpp:3068
size_type dimensions() const
Queries the number of dimensions in the range.
Definition: cl2.hpp:5583
Event()
Default constructor - initializes to NULL.
Definition: cl2.hpp:2982
ImageGL(const cl_mem &image, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:5213
Image3D(const Context &context, cl_mem_flags flags, ImageFormat format, size_type width, size_type height, size_type depth, size_type row_pitch=0, size_type slice_pitch=0, void *host_ptr=NULL, cl_int *err=NULL)
Constructs a 3D Image in a specified context.
Definition: cl2.hpp:4956
cl_int setArg(cl_uint index, const cl::vector< T, Alloc > &argPtr)
setArg overload taking a vector type.
Definition: cl2.hpp:5833
detail::param_traits< detail::cl_profiling_info, name >::param_type getProfilingInfo(cl_int *err=NULL) const
Wrapper for clGetEventProfilingInfo() that returns by value.
Definition: cl2.hpp:3041
Image1D(const Image1D &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4312
BufferRenderGL(const BufferRenderGL &buf)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4121
DeviceCommandQueue(DeviceQueueProperties properties, cl_int *err=NULL)
Definition: cl2.hpp:8260
Pipe(const Context &context, cl_uint packet_size, cl_uint max_packets, cl_int *err=NULL)
Constructs a Pipe in a specified context.
Definition: cl2.hpp:5274
Image3D()
Default constructor - initializes to NULL.
Definition: cl2.hpp:5027
CommandQueue(const Context &context, const Device &device, cl_command_queue_properties properties=0, cl_int *err=NULL)
Constructs a CommandQueue for a passed device and context Will return an CL_INVALID_QUEUE_PROPERTIES ...
Definition: cl2.hpp:6943
Image2DGL(Image2DGL &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4837
cl_int getInfo(cl_sampler_info name, T *param) const
Wrapper for clGetSamplerInfo().
Definition: cl2.hpp:5503
Kernel()
Default constructor - initializes to NULL.
Definition: cl2.hpp:5674
Image2DGL(const Context &context, cl_mem_flags flags, cl_GLenum target, cl_GLint miplevel, cl_GLuint texobj, cl_int *err=NULL)
Constructs an Image2DGL in a specified context, from a given GL Texture.
Definition: cl2.hpp:4773
Sampler()
Default constructor - initializes to NULL.
Definition: cl2.hpp:5406
Image1D(Image1D &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4326
Program(Program &&program) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:6280
cl_int getObjectInfo(cl_gl_object_type *type, cl_GLuint *gl_object_name)
Wrapper for clGetGLObjectInfo().
Definition: cl2.hpp:4147
cl_int unloadCompiler()
Wrapper for clUnloadCompiler().
Definition: cl2.hpp:2534
std::enable_if<!std::is_pointer< T >::value, cl_int >::type setArg(cl_uint index, const T &value)
setArg overload taking a POD type
Definition: cl2.hpp:5856
size_type max_size() const CL_HPP_NOEXCEPT_
Definition: cl2.hpp:3493
Image1DArray(const cl_mem &imageArray, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4472
Class interface for cl_event.
Definition: cl2.hpp:2978
vector< T, cl::SVMAllocator< int, cl::SVMTraitFine<> >> fine_svm_vector
Vector alias to simplify contruction of fine-grained SVM containers.
Definition: cl2.hpp:3634
Image2DGL(const cl_mem &image, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4807
NDRange(size_type size0)
Constructs one-dimensional range.
Definition: cl2.hpp:5548
Program(const Context &context, const Sources &sources, cl_int *err=NULL)
Definition: cl2.hpp:6102
vector< T, cl::SVMAllocator< int, cl::SVMTraitAtomic<> >> atomic_svm_vector
Vector alias to simplify contruction of fine-grained SVM containers that support platform atomics...
Definition: cl2.hpp:3640
Buffer & operator=(const cl_mem &rhs)
Assignment from cl_mem - performs shallow copy.
Definition: cl2.hpp:3790
detail::param_traits< detail::cl_event_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetEventInfo() that returns by value.
Definition: cl2.hpp:3018
Class interface for Pipe Memory Objects.
Definition: cl2.hpp:5261
cl_int enqueueUnmapSVM(cl::pointer< T, D > &ptr, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7823
Image2D(const Image2D &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4727
Buffer()
Default constructor - initializes to NULL.
Definition: cl2.hpp:3774
DeviceCommandQueue(const Context &context, const Device &device, DeviceQueueProperties properties=DeviceQueueProperties::None, cl_int *err=NULL)
Definition: cl2.hpp:8283
cl_int setArg(cl_uint index, const cl::pointer< T, D > &argPtr)
setArg overload taking a shared_ptr type
Definition: cl2.hpp:5823
Image3DGL(const Image3DGL &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5144
cl_int copy(IteratorType startIterator, IteratorType endIterator, cl::Buffer &buffer)
Definition: cl2.hpp:8861
Context(const vector< Device > &devices, cl_context_properties *properties=NULL, void(CL_CALLBACK *notifyFptr)(const char *, const void *, size_type, void *)=NULL, void *data=NULL, cl_int *err=NULL)
Constructs a context including a list of specified devices.
Definition: cl2.hpp:2643
DeviceCommandQueue(const DeviceCommandQueue &queue)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:8349
Sampler(const cl_sampler &sampler, bool retainObject=false)
Constructor from cl_sampler - takes ownership.
Definition: cl2.hpp:5459
Image()
Default constructor - initializes to NULL.
Definition: cl2.hpp:4167
static cl_int release(cl_device_id device)
Definition: cl2.hpp:1489
cl_int getInfo(cl_device_info name, T *param) const
Wrapper for clGetDeviceInfo().
Definition: cl2.hpp:2074
Event(const cl_event &event, bool retainObject=false)
Constructor from cl_event - takes ownership.
Definition: cl2.hpp:2992
DeviceCommandQueue(const Context &context, const Device &device, cl_uint queueSize, DeviceQueueProperties properties=DeviceQueueProperties::None, cl_int *err=NULL)
Definition: cl2.hpp:8307
NDRange(size_type size0, size_type size1)
Constructs two-dimensional range.
Definition: cl2.hpp:5557
Image3DGL(const Context &context, cl_mem_flags flags, cl_GLenum target, cl_GLint miplevel, cl_GLuint texobj, cl_int *err=NULL)
Constructs an Image3DGL in a specified context, from a given GL Texture.
Definition: cl2.hpp:5095
Image2D()
Default constructor - initializes to NULL.
Definition: cl2.hpp:4702
Image3D(const cl_mem &image3D, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:5036
Image2D(Image2D &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4741
DeviceCommandQueue interface for device cl_command_queues.
Definition: cl2.hpp:8248
Image1DArray(Image1DArray &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4499
Pipe()
Default constructor - initializes to NULL.
Definition: cl2.hpp:5318
CommandQueue(const cl_command_queue &commandQueue, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:7078
Pipe(const Pipe &pipe)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5343
cl_int getInfo(cl_event_info name, T *param) const
Wrapper for clGetEventInfo().
Definition: cl2.hpp:3008
Image1D()
Default constructor - initializes to NULL.
Definition: cl2.hpp:4287
Buffer(const cl_mem &buffer, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:3783
Sampler(Sampler &&sam) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5490
static CommandQueue setDefault(const CommandQueue &default_queue)
Definition: cl2.hpp:7062
Program(const Context &context, const vector< Device > &devices, const string &kernelNames, cl_int *err=NULL)
Definition: cl2.hpp:6216
CommandQueue(const CommandQueue &queue)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:7090
cl_int enqueueMapSVM(T *ptr, cl_bool blocking, cl_map_flags flags, size_type size, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7693
Kernel(Kernel &&kernel) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5715
cl_int enqueueFillImage(const Image &image, cl_uint4 fillColor, const array< size_type, 3 > &origin, const array< size_type, 3 > &region, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7539
Image2D(const Context &context, ImageFormat format, const Buffer &sourceBuffer, size_type width, size_type height, size_type row_pitch=0, cl_int *err=nullptr)
Constructs a 2D Image from a buffer.
Definition: cl2.hpp:4598
Program(const Sources &sources, cl_int *err=NULL)
Definition: cl2.hpp:6067
Device(const cl_device_id &device, bool retainObject=false)
Constructor from cl_device_id.
Definition: cl2.hpp:2002
NDRange()
Default constructor - resulting range has zero dimensions.
Definition: cl2.hpp:5539
BufferGL(const BufferGL &buf)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4024
Image3D & operator=(const cl_mem &rhs)
Assignment from cl_mem - performs shallow copy.
Definition: cl2.hpp:5043
CommandQueue(const Context &context, QueueProperties properties, cl_int *err=NULL)
Constructs a CommandQueue for an implementation defined device in the given context Will return an CL...
Definition: cl2.hpp:6881
cl_int setDestructorCallback(void(CL_CALLBACK *pfn_notify)(cl_mem, void *), void *user_data=NULL)
Registers a callback function to be called when the memory object is no longer needed.
Definition: cl2.hpp:3258
CommandQueue(cl_command_queue_properties properties, cl_int *err=NULL)
Constructs a CommandQueue based on passed properties. Will return an CL_INVALID_QUEUE_PROPERTIES erro...
Definition: cl2.hpp:6685
Buffer(const Context &context, cl_mem_flags flags, size_type size, void *host_ptr=NULL, cl_int *err=NULL)
Constructs a Buffer in a specified context.
Definition: cl2.hpp:3662
cl_int wait() const
Blocks the calling thread until this event completes.
Definition: cl2.hpp:3056
ImageFormat()
Default constructor - performs no initialization.
Definition: cl2.hpp:1932
Event result_type
Return type of the functor.
Definition: cl2.hpp:9478
Class interface for cl_platform_id.
Definition: cl2.hpp:2192
cl_int setSVMPointers(const std::array< void *, ArrayLength > &pointerList)
Definition: cl2.hpp:5894
Image2DArray(const cl_mem &imageArray, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4905
Buffer(const Buffer &buf)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:3799
Image(const cl_mem &image, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4176
Memory(const Memory &mem)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:3195
Program(const Program &program)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:6266
cl_int getObjectInfo(cl_gl_object_type *type, cl_GLuint *gl_object_name)
Wrapper for clGetGLObjectInfo().
Definition: cl2.hpp:4050
static cl_int retain(cl_device_id device)
Definition: cl2.hpp:1478
Image3DGL()
Default constructor - initializes to NULL.
Definition: cl2.hpp:5119
Image2D(const Context &context, cl_channel_order order, const Image &sourceImage, cl_int *err=nullptr)
Constructs a 2D Image from an image.
Definition: cl2.hpp:4648
CommandQueue interface for cl_command_queue.
Definition: cl2.hpp:6619
Image2D(const cl_mem &image2D, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4711
cl_int enqueueUnmapSVM(cl::vector< T, Alloc > &container, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7848
Sampler(const Context &context, cl_bool normalized_coords, cl_addressing_mode addressing_mode, cl_filter_mode filter_mode, cl_int *err=NULL)
Constructs a Sampler in a specified context.
Definition: cl2.hpp:5412
Image1D(const cl_mem &image1D, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4296
pointer allocate(size_type size, typename cl::SVMAllocator< void, SVMTrait >::const_pointer=0)
Definition: cl2.hpp:3451
static DeviceCommandQueue makeDefault(cl_int *err=nullptr)
Definition: cl2.hpp:8402
UserEvent()
Default constructor - initializes to NULL.
Definition: cl2.hpp:3125
DeviceCommandQueue(const cl_command_queue &commandQueue, bool retainObject=false)
Constructor from cl_command_queue - takes ownership.
Definition: cl2.hpp:8337
cl_int enableFineGrainedSystemSVM(bool svmEnabled)
Enable fine-grained system SVM.
Definition: cl2.hpp:5915
ImageFormat(cl_channel_order order, cl_channel_type type)
Initializing constructor.
Definition: cl2.hpp:1935
Pipe(const cl_mem &pipe, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:5327
detail::param_traits< detail::cl_context_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetContextInfo() that returns by value.
Definition: cl2.hpp:2876
Pipe(Pipe &&pipe) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5357
cl_int mapSVM(cl::vector< T, Alloc > &container)
Definition: cl2.hpp:8963
cl::pointer< T, detail::Deleter< Alloc > > allocate_pointer(const Alloc &alloc_, Args &&... args)
Definition: cl2.hpp:3581
Class interface for user events (a subset of cl_event&#39;s).
Definition: cl2.hpp:3102
bool operator==(SVMAllocator const &rhs)
Definition: cl2.hpp:3521
cl_int enqueueFillImage(const Image &image, cl_int4 fillColor, const array< size_type, 3 > &origin, const array< size_type, 3 > &region, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7505
Program(const cl_program &program, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:6254
cl_int enqueueUnmapSVM(T *ptr, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7798
Kernel(const cl_kernel &kernel, bool retainObject=false)
Constructor from cl_kernel - takes ownership.
Definition: cl2.hpp:5684
vector< std::pair< cl::Device, typename detail::param_traits< detail::cl_program_build_info, name >::param_type > > getBuildInfo(cl_int *err=NULL) const
Definition: cl2.hpp:6404
cl_int getProfilingInfo(cl_profiling_info name, T *param) const
Wrapper for clGetEventProfilingInfo().
Definition: cl2.hpp:3031
Class interface for cl_sampler.
Definition: cl2.hpp:5402
ImageGL(const ImageGL &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5225
LocalSpaceArg Local(size_type size)
Helper function for generating LocalSpaceArg objects.
Definition: cl2.hpp:5654
Event result_type
Return type of the functor.
Definition: cl2.hpp:9572
detail::param_traits< detail::cl_mem_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetMemObjectInfo() that returns by value.
Definition: cl2.hpp:3233
C++ base class for Image Memory objects.
Definition: cl2.hpp:4163
cl_int enqueueMapSVM(T *ptr, cl_bool blocking, cl_map_flags flags, size_type size, const vector< Event > *events=NULL, Event *event=NULL)
Definition: cl2.hpp:8674
Buffer(cl_mem_flags flags, size_type size, void *host_ptr=NULL, cl_int *err=NULL)
Constructs a Buffer in the default context.
Definition: cl2.hpp:3687
static Context setDefault(const Context &default_context)
Definition: cl2.hpp:2835
cl_int enqueueMapSVM(cl::pointer< T, D > &ptr, cl_bool blocking, cl_map_flags flags, size_type size, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7721
BufferGL(BufferGL &&buf) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4038
Class interface for Buffer Memory Objects.
Definition: cl2.hpp:3651
detail::param_traits< detail::cl_sampler_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetSamplerInfo() that returns by value.
Definition: cl2.hpp:5513
static Device setDefault(const Device &default_device)
Definition: cl2.hpp:2027
Image1DBuffer(Image1DBuffer &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4410
Class interface for specifying NDRange values.
Definition: cl2.hpp:5531
Class interface for 2D Image Memory objects.
Definition: cl2.hpp:4520
ImageGL(ImageGL &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5239
Class interface for 1D Image Memory objects.
Definition: cl2.hpp:4250
Class interface for cl_kernel.
Definition: cl2.hpp:5668
NDRange(size_type size0, size_type size1, size_type size2)
Constructs three-dimensional range.
Definition: cl2.hpp:5566
static cl_int waitForEvents(const vector< Event > &events)
Blocks the calling thread until every event specified is complete.
Definition: cl2.hpp:3088
Class interface for 3D Image Memory objects.
Definition: cl2.hpp:4949
Class interface for GL Render Buffer Memory Objects.
Definition: cl2.hpp:4068
general image interface for GL interop. We abstract the 2D and 3D GL images into a single instance he...
Definition: cl2.hpp:5178
cl_int setStatus(cl_int status)
Sets the execution status of a user event object.
Definition: cl2.hpp:3131
detail::param_traits< detail::cl_platform_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetPlatformInfo() that returns by value.
Definition: cl2.hpp:2323
Device()
Default constructor - initializes to NULL.
Definition: cl2.hpp:1996
Context()
Default constructor - initializes to NULL.
Definition: cl2.hpp:2843
Image2DArray(const Image2DArray &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4916
Class interface for cl_device_id.
Definition: cl2.hpp:1959
Context(Context &&ctx) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:2802
static Context getDefault(cl_int *err=NULL)
Returns a singleton context including all devices of CL_DEVICE_TYPE_DEFAULT.
Definition: cl2.hpp:2818
Context(cl_device_type type, cl_context_properties *properties=NULL, void(CL_CALLBACK *notifyFptr)(const char *, const void *, size_type, void *)=NULL, void *data=NULL, cl_int *err=NULL)
Constructs a context including all or a subset of devices of a specified type.
Definition: cl2.hpp:2704
Memory(const cl_mem &memory, bool retainObject)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:3178
Platform(const cl_platform_id &platform, bool retainObject=false)
Constructor from cl_platform_id.
Definition: cl2.hpp:2274
Device(Device &&dev) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:2061
BufferRenderGL(const Context &context, cl_mem_flags flags, cl_GLuint bufobj, cl_int *err=NULL)
Constructs a BufferRenderGL in a specified context, from a given GL Renderbuffer. ...
Definition: cl2.hpp:4076
cl_int getImageInfo(cl_image_info name, T *param) const
Wrapper for clGetImageInfo().
Definition: cl2.hpp:4221
BufferGL()
Default constructor - initializes to NULL.
Definition: cl2.hpp:3999
UserEvent(const Context &context, cl_int *err=NULL)
Constructs a user event on a given context.
Definition: cl2.hpp:3109
cl_int enqueueFillImage(const Image &image, cl_float4 fillColor, const array< size_type, 3 > &origin, const array< size_type, 3 > &region, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7471
CommandQueue(QueueProperties properties, cl_int *err=NULL)
Constructs a CommandQueue based on passed properties. Will return an CL_INVALID_QUEUE_PROPERTIES erro...
Definition: cl2.hpp:6751
cl_int getInfo(cl_pipe_info name, T *param) const
Wrapper for clGetMemObjectInfo().
Definition: cl2.hpp:5370
Image interface for 1D buffer images.
Definition: cl2.hpp:4342
Program interface that implements cl_program.
Definition: cl2.hpp:5976
Image3DGL(const cl_mem &image, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:5128
Image3DGL(Image3DGL &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5158
cl_int getInfo(cl_platform_info name, string *param) const
Wrapper for clGetPlatformInfo().
Definition: cl2.hpp:2313
Class interface for GL Buffer Memory Objects.
Definition: cl2.hpp:3971
DeviceCommandQueue(DeviceCommandQueue &&queue) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:8363
cl_int copy(const CommandQueue &queue, const cl::Buffer &buffer, IteratorType startIterator, IteratorType endIterator)
Definition: cl2.hpp:8932
static Platform setDefault(const Platform &default_platform)
Definition: cl2.hpp:2305
Class interface for GL 2D Image Memory objects.
Definition: cl2.hpp:4765
Class interface for cl_context.
Definition: cl2.hpp:2571
BufferGL(const Context &context, cl_mem_flags flags, cl_GLuint bufobj, cl_int *err=NULL)
Constructs a BufferGL in a specified context, from a given GL buffer.
Definition: cl2.hpp:3979
Platform()
Default constructor - initializes to NULL.
Definition: cl2.hpp:2265
Memory & operator=(const cl_mem &rhs)
Assignment operator from cl_mem - takes ownership.
Definition: cl2.hpp:3186
size_type size() const
Returns the size of the object in bytes based on the.
Definition: cl2.hpp:5590
BufferGL(const cl_mem &buffer, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4008
cl_int getInfo(cl_mem_info name, T *param) const
Wrapper for clGetMemObjectInfo().
Definition: cl2.hpp:3223