OpenCL C++ Bindings
cl2.hpp
Go to the documentation of this file.
1 //
2 // Copyright (c) 2008-2020 The Khronos Group Inc.
3 //
4 // Licensed under the Apache License, Version 2.0 (the "License");
5 // you may not use this file except in compliance with the License.
6 // You may obtain a copy of the License at
7 //
8 // http://www.apache.org/licenses/LICENSE-2.0
9 //
10 // Unless required by applicable law or agreed to in writing, software
11 // distributed under the License is distributed on an "AS IS" BASIS,
12 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 // See the License for the specific language governing permissions and
14 // limitations under the License.
15 //
16 
312 
325 
389 #ifndef CL_HPP_
390 #define CL_HPP_
391 
392 /* Handle deprecated preprocessor definitions. In each case, we only check for
393  * the old name if the new name is not defined, so that user code can define
394  * both and hence work with either version of the bindings.
395  */
396 #if !defined(CL_HPP_USE_DX_INTEROP) && defined(USE_DX_INTEROP)
397 # pragma message("cl2.hpp: USE_DX_INTEROP is deprecated. Define CL_HPP_USE_DX_INTEROP instead")
398 # define CL_HPP_USE_DX_INTEROP
399 #endif
400 #if !defined(CL_HPP_USE_CL_DEVICE_FISSION) && defined(USE_CL_DEVICE_FISSION)
401 # pragma message("cl2.hpp: USE_CL_DEVICE_FISSION is deprecated. Define CL_HPP_USE_CL_DEVICE_FISSION instead")
402 # define CL_HPP_USE_CL_DEVICE_FISSION
403 #endif
404 #if !defined(CL_HPP_ENABLE_EXCEPTIONS) && defined(__CL_ENABLE_EXCEPTIONS)
405 # pragma message("cl2.hpp: __CL_ENABLE_EXCEPTIONS is deprecated. Define CL_HPP_ENABLE_EXCEPTIONS instead")
406 # define CL_HPP_ENABLE_EXCEPTIONS
407 #endif
408 #if !defined(CL_HPP_NO_STD_VECTOR) && defined(__NO_STD_VECTOR)
409 # pragma message("cl2.hpp: __NO_STD_VECTOR is deprecated. Define CL_HPP_NO_STD_VECTOR instead")
410 # define CL_HPP_NO_STD_VECTOR
411 #endif
412 #if !defined(CL_HPP_NO_STD_STRING) && defined(__NO_STD_STRING)
413 # pragma message("cl2.hpp: __NO_STD_STRING is deprecated. Define CL_HPP_NO_STD_STRING instead")
414 # define CL_HPP_NO_STD_STRING
415 #endif
416 #if defined(VECTOR_CLASS)
417 # pragma message("cl2.hpp: VECTOR_CLASS is deprecated. Alias cl::vector instead")
418 #endif
419 #if defined(STRING_CLASS)
420 # pragma message("cl2.hpp: STRING_CLASS is deprecated. Alias cl::string instead.")
421 #endif
422 #if !defined(CL_HPP_USER_OVERRIDE_ERROR_STRINGS) && defined(__CL_USER_OVERRIDE_ERROR_STRINGS)
423 # pragma message("cl2.hpp: __CL_USER_OVERRIDE_ERROR_STRINGS is deprecated. Define CL_HPP_USER_OVERRIDE_ERROR_STRINGS instead")
424 # define CL_HPP_USER_OVERRIDE_ERROR_STRINGS
425 #endif
426 
427 /* Warn about features that are no longer supported
428  */
429 #if defined(__USE_DEV_VECTOR)
430 # pragma message("cl2.hpp: __USE_DEV_VECTOR is no longer supported. Expect compilation errors")
431 #endif
432 #if defined(__USE_DEV_STRING)
433 # pragma message("cl2.hpp: __USE_DEV_STRING is no longer supported. Expect compilation errors")
434 #endif
435 
436 /* Detect which version to target */
437 #if !defined(CL_HPP_TARGET_OPENCL_VERSION)
438 # pragma message("cl2.hpp: CL_HPP_TARGET_OPENCL_VERSION is not defined. It will default to 220 (OpenCL 2.2)")
439 # define CL_HPP_TARGET_OPENCL_VERSION 220
440 #endif
441 #if CL_HPP_TARGET_OPENCL_VERSION != 100 && \
442  CL_HPP_TARGET_OPENCL_VERSION != 110 && \
443  CL_HPP_TARGET_OPENCL_VERSION != 120 && \
444  CL_HPP_TARGET_OPENCL_VERSION != 200 && \
445  CL_HPP_TARGET_OPENCL_VERSION != 210 && \
446  CL_HPP_TARGET_OPENCL_VERSION != 220
447 # pragma message("cl2.hpp: CL_HPP_TARGET_OPENCL_VERSION is not a valid value (100, 110, 120, 200, 210 or 220). It will be set to 220")
448 # undef CL_HPP_TARGET_OPENCL_VERSION
449 # define CL_HPP_TARGET_OPENCL_VERSION 220
450 #endif
451 
452 /* Forward target OpenCL version to C headers if necessary */
453 #if defined(CL_TARGET_OPENCL_VERSION)
454 /* Warn if prior definition of CL_TARGET_OPENCL_VERSION is lower than
455  * requested C++ bindings version */
456 #if CL_TARGET_OPENCL_VERSION < CL_HPP_TARGET_OPENCL_VERSION
457 # pragma message("CL_TARGET_OPENCL_VERSION is already defined as is lower than CL_HPP_TARGET_OPENCL_VERSION")
458 #endif
459 #else
460 # define CL_TARGET_OPENCL_VERSION CL_HPP_TARGET_OPENCL_VERSION
461 #endif
462 
463 #if !defined(CL_HPP_MINIMUM_OPENCL_VERSION)
464 # define CL_HPP_MINIMUM_OPENCL_VERSION 200
465 #endif
466 #if CL_HPP_MINIMUM_OPENCL_VERSION != 100 && \
467  CL_HPP_MINIMUM_OPENCL_VERSION != 110 && \
468  CL_HPP_MINIMUM_OPENCL_VERSION != 120 && \
469  CL_HPP_MINIMUM_OPENCL_VERSION != 200 && \
470  CL_HPP_MINIMUM_OPENCL_VERSION != 210 && \
471  CL_HPP_MINIMUM_OPENCL_VERSION != 220
472 # pragma message("cl2.hpp: CL_HPP_MINIMUM_OPENCL_VERSION is not a valid value (100, 110, 120, 200, 210 or 220). It will be set to 100")
473 # undef CL_HPP_MINIMUM_OPENCL_VERSION
474 # define CL_HPP_MINIMUM_OPENCL_VERSION 100
475 #endif
476 #if CL_HPP_MINIMUM_OPENCL_VERSION > CL_HPP_TARGET_OPENCL_VERSION
477 # error "CL_HPP_MINIMUM_OPENCL_VERSION must not be greater than CL_HPP_TARGET_OPENCL_VERSION"
478 #endif
479 
480 #if CL_HPP_MINIMUM_OPENCL_VERSION <= 100 && !defined(CL_USE_DEPRECATED_OPENCL_1_0_APIS)
481 # define CL_USE_DEPRECATED_OPENCL_1_0_APIS
482 #endif
483 #if CL_HPP_MINIMUM_OPENCL_VERSION <= 110 && !defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
484 # define CL_USE_DEPRECATED_OPENCL_1_1_APIS
485 #endif
486 #if CL_HPP_MINIMUM_OPENCL_VERSION <= 120 && !defined(CL_USE_DEPRECATED_OPENCL_1_2_APIS)
487 # define CL_USE_DEPRECATED_OPENCL_1_2_APIS
488 #endif
489 #if CL_HPP_MINIMUM_OPENCL_VERSION <= 200 && !defined(CL_USE_DEPRECATED_OPENCL_2_0_APIS)
490 # define CL_USE_DEPRECATED_OPENCL_2_0_APIS
491 #endif
492 #if CL_HPP_MINIMUM_OPENCL_VERSION <= 210 && !defined(CL_USE_DEPRECATED_OPENCL_2_1_APIS)
493 # define CL_USE_DEPRECATED_OPENCL_2_1_APIS
494 #endif
495 #if CL_HPP_MINIMUM_OPENCL_VERSION <= 220 && !defined(CL_USE_DEPRECATED_OPENCL_2_2_APIS)
496 # define CL_USE_DEPRECATED_OPENCL_2_2_APIS
497 #endif
498 
499 #ifdef _WIN32
500 
501 #include <malloc.h>
502 
503 #if defined(CL_HPP_USE_DX_INTEROP)
504 #include <CL/cl_d3d10.h>
505 #include <CL/cl_dx9_media_sharing.h>
506 #endif
507 #endif // _WIN32
508 
509 #if defined(_MSC_VER)
510 #include <intrin.h>
511 #endif // _MSC_VER
512 
513  // Check for a valid C++ version
514 
515 // Need to do both tests here because for some reason __cplusplus is not
516 // updated in visual studio
517 #if (!defined(_MSC_VER) && __cplusplus < 201103L) || (defined(_MSC_VER) && _MSC_VER < 1700)
518 #error Visual studio 2013 or another C++11-supporting compiler required
519 #endif
520 
521 //
522 #if defined(CL_HPP_USE_CL_DEVICE_FISSION) || defined(CL_HPP_USE_CL_SUB_GROUPS_KHR)
523 #include <CL/cl_ext.h>
524 #endif
525 
526 #if defined(__APPLE__) || defined(__MACOSX)
527 #include <OpenCL/opencl.h>
528 #else
529 #include <CL/opencl.h>
530 #endif // !__APPLE__
531 
532 #if (__cplusplus >= 201103L || _MSVC_LANG >= 201103L )
533 #define CL_HPP_NOEXCEPT_ noexcept
534 #else
535 #define CL_HPP_NOEXCEPT_
536 #endif
537 
538 #if __cplusplus >= 201703L
539 # define CL_HPP_DEFINE_STATIC_MEMBER_ inline
540 #elif defined(_MSC_VER)
541 # define CL_HPP_DEFINE_STATIC_MEMBER_ __declspec(selectany)
542 #elif defined(__MINGW32__)
543 # define CL_HPP_DEFINE_STATIC_MEMBER_ __attribute__((selectany))
544 #else
545 # define CL_HPP_DEFINE_STATIC_MEMBER_ __attribute__((weak))
546 #endif // !_MSC_VER
547 
548 // Define deprecated prefixes and suffixes to ensure compilation
549 // in case they are not pre-defined
550 #if !defined(CL_EXT_PREFIX__VERSION_1_1_DEPRECATED)
551 #define CL_EXT_PREFIX__VERSION_1_1_DEPRECATED
552 #endif // #if !defined(CL_EXT_PREFIX__VERSION_1_1_DEPRECATED)
553 #if !defined(CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED)
554 #define CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
555 #endif // #if !defined(CL_EXT_PREFIX__VERSION_1_1_DEPRECATED)
556 
557 #if !defined(CL_EXT_PREFIX__VERSION_1_2_DEPRECATED)
558 #define CL_EXT_PREFIX__VERSION_1_2_DEPRECATED
559 #endif // #if !defined(CL_EXT_PREFIX__VERSION_1_2_DEPRECATED)
560 #if !defined(CL_EXT_SUFFIX__VERSION_1_2_DEPRECATED)
561 #define CL_EXT_SUFFIX__VERSION_1_2_DEPRECATED
562 #endif // #if !defined(CL_EXT_PREFIX__VERSION_1_2_DEPRECATED)
563 
564 #if !defined(CL_CALLBACK)
565 #define CL_CALLBACK
566 #endif //CL_CALLBACK
567 
568 #include <utility>
569 #include <limits>
570 #include <iterator>
571 #include <mutex>
572 #include <cstring>
573 #include <functional>
574 
575 
576 // Define a size_type to represent a correctly resolved size_t
577 #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
578 namespace cl {
579  using size_type = ::size_t;
580 } // namespace cl
581 #else // #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
582 namespace cl {
583  using size_type = size_t;
584 } // namespace cl
585 #endif // #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
586 
587 
588 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
589 #include <exception>
590 #endif // #if defined(CL_HPP_ENABLE_EXCEPTIONS)
591 
592 #if !defined(CL_HPP_NO_STD_VECTOR)
593 #include <vector>
594 namespace cl {
595  template < class T, class Alloc = std::allocator<T> >
596  using vector = std::vector<T, Alloc>;
597 } // namespace cl
598 #endif // #if !defined(CL_HPP_NO_STD_VECTOR)
599 
600 #if !defined(CL_HPP_NO_STD_STRING)
601 #include <string>
602 namespace cl {
603  using string = std::string;
604 } // namespace cl
605 #endif // #if !defined(CL_HPP_NO_STD_STRING)
606 
607 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
608 
609 #if !defined(CL_HPP_NO_STD_UNIQUE_PTR)
610 #include <memory>
611 namespace cl {
612  // Replace unique_ptr and allocate_pointer for internal use
613  // to allow user to replace them
614  template<class T, class D>
615  using pointer = std::unique_ptr<T, D>;
616 } // namespace cl
617 #endif
618 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
619 #if !defined(CL_HPP_NO_STD_ARRAY)
620 #include <array>
621 namespace cl {
622  template < class T, size_type N >
623  using array = std::array<T, N>;
624 } // namespace cl
625 #endif // #if !defined(CL_HPP_NO_STD_ARRAY)
626 
627 // Define size_type appropriately to allow backward-compatibility
628 // use of the old size_t interface class
629 #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
630 namespace cl {
631  namespace compatibility {
636  template <int N>
637  class size_t
638  {
639  private:
640  size_type data_[N];
641 
642  public:
644  size_t()
645  {
646  for (int i = 0; i < N; ++i) {
647  data_[i] = 0;
648  }
649  }
650 
651  size_t(const array<size_type, N> &rhs)
652  {
653  for (int i = 0; i < N; ++i) {
654  data_[i] = rhs[i];
655  }
656  }
657 
658  size_type& operator[](int index)
659  {
660  return data_[index];
661  }
662 
663  const size_type& operator[](int index) const
664  {
665  return data_[index];
666  }
667 
669  operator size_type* () { return data_; }
670 
672  operator const size_type* () const { return data_; }
673 
674  operator array<size_type, N>() const
675  {
676  array<size_type, N> ret;
677 
678  for (int i = 0; i < N; ++i) {
679  ret[i] = data_[i];
680  }
681  return ret;
682  }
683  };
684  } // namespace compatibility
685 
686  template<int N>
687  using size_t = compatibility::size_t<N>;
688 } // namespace cl
689 #endif // #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
690 
691 // Helper alias to avoid confusing the macros
692 namespace cl {
693  namespace detail {
694  using size_t_array = array<size_type, 3>;
695  } // namespace detail
696 } // namespace cl
697 
698 
704 namespace cl {
705  class Memory;
706 
707 #define CL_HPP_INIT_CL_EXT_FCN_PTR_(name) \
708  if (!pfn_##name) { \
709  pfn_##name = (PFN_##name) \
710  clGetExtensionFunctionAddress(#name); \
711  if (!pfn_##name) { \
712  } \
713  }
714 
715 #define CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(platform, name) \
716  if (!pfn_##name) { \
717  pfn_##name = (PFN_##name) \
718  clGetExtensionFunctionAddressForPlatform(platform, #name); \
719  if (!pfn_##name) { \
720  } \
721  }
722 
723  class Program;
724  class Device;
725  class Context;
726  class CommandQueue;
727  class DeviceCommandQueue;
728  class Memory;
729  class Buffer;
730  class Pipe;
731 
732 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
733 
737  class Error : public std::exception
738  {
739  private:
740  cl_int err_;
741  const char * errStr_;
742  public:
752  Error(cl_int err, const char * errStr = NULL) : err_(err), errStr_(errStr)
753  {}
754 
755  ~Error() throw() {}
756 
761  virtual const char * what() const throw ()
762  {
763  if (errStr_ == NULL) {
764  return "empty";
765  }
766  else {
767  return errStr_;
768  }
769  }
770 
775  cl_int err(void) const { return err_; }
776  };
777 #define CL_HPP_ERR_STR_(x) #x
778 #else
779 #define CL_HPP_ERR_STR_(x) NULL
780 #endif // CL_HPP_ENABLE_EXCEPTIONS
781 
782 
783 namespace detail
784 {
785 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
786 static inline cl_int errHandler (
787  cl_int err,
788  const char * errStr = NULL)
789 {
790  if (err != CL_SUCCESS) {
791  throw Error(err, errStr);
792  }
793  return err;
794 }
795 #else
796 static inline cl_int errHandler (cl_int err, const char * errStr = NULL)
797 {
798  (void) errStr; // suppress unused variable warning
799  return err;
800 }
801 #endif // CL_HPP_ENABLE_EXCEPTIONS
802 }
803 
804 
805 
807 #if !defined(CL_HPP_USER_OVERRIDE_ERROR_STRINGS)
808 #define __GET_DEVICE_INFO_ERR CL_HPP_ERR_STR_(clGetDeviceInfo)
809 #define __GET_PLATFORM_INFO_ERR CL_HPP_ERR_STR_(clGetPlatformInfo)
810 #define __GET_DEVICE_IDS_ERR CL_HPP_ERR_STR_(clGetDeviceIDs)
811 #define __GET_PLATFORM_IDS_ERR CL_HPP_ERR_STR_(clGetPlatformIDs)
812 #define __GET_CONTEXT_INFO_ERR CL_HPP_ERR_STR_(clGetContextInfo)
813 #define __GET_EVENT_INFO_ERR CL_HPP_ERR_STR_(clGetEventInfo)
814 #define __GET_EVENT_PROFILE_INFO_ERR CL_HPP_ERR_STR_(clGetEventProfileInfo)
815 #define __GET_MEM_OBJECT_INFO_ERR CL_HPP_ERR_STR_(clGetMemObjectInfo)
816 #define __GET_IMAGE_INFO_ERR CL_HPP_ERR_STR_(clGetImageInfo)
817 #define __GET_SAMPLER_INFO_ERR CL_HPP_ERR_STR_(clGetSamplerInfo)
818 #define __GET_KERNEL_INFO_ERR CL_HPP_ERR_STR_(clGetKernelInfo)
819 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
820 #define __GET_KERNEL_ARG_INFO_ERR CL_HPP_ERR_STR_(clGetKernelArgInfo)
821 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
822 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
823 #define __GET_KERNEL_SUB_GROUP_INFO_ERR CL_HPP_ERR_STR_(clGetKernelSubGroupInfo)
824 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
825 #define __GET_KERNEL_WORK_GROUP_INFO_ERR CL_HPP_ERR_STR_(clGetKernelWorkGroupInfo)
826 #define __GET_PROGRAM_INFO_ERR CL_HPP_ERR_STR_(clGetProgramInfo)
827 #define __GET_PROGRAM_BUILD_INFO_ERR CL_HPP_ERR_STR_(clGetProgramBuildInfo)
828 #define __GET_COMMAND_QUEUE_INFO_ERR CL_HPP_ERR_STR_(clGetCommandQueueInfo)
829 
830 #define __CREATE_CONTEXT_ERR CL_HPP_ERR_STR_(clCreateContext)
831 #define __CREATE_CONTEXT_FROM_TYPE_ERR CL_HPP_ERR_STR_(clCreateContextFromType)
832 #define __GET_SUPPORTED_IMAGE_FORMATS_ERR CL_HPP_ERR_STR_(clGetSupportedImageFormats)
833 
834 #define __CREATE_BUFFER_ERR CL_HPP_ERR_STR_(clCreateBuffer)
835 #define __COPY_ERR CL_HPP_ERR_STR_(cl::copy)
836 #define __CREATE_SUBBUFFER_ERR CL_HPP_ERR_STR_(clCreateSubBuffer)
837 #define __CREATE_GL_BUFFER_ERR CL_HPP_ERR_STR_(clCreateFromGLBuffer)
838 #define __CREATE_GL_RENDER_BUFFER_ERR CL_HPP_ERR_STR_(clCreateFromGLBuffer)
839 #define __GET_GL_OBJECT_INFO_ERR CL_HPP_ERR_STR_(clGetGLObjectInfo)
840 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
841 #define __CREATE_IMAGE_ERR CL_HPP_ERR_STR_(clCreateImage)
842 #define __CREATE_GL_TEXTURE_ERR CL_HPP_ERR_STR_(clCreateFromGLTexture)
843 #define __IMAGE_DIMENSION_ERR CL_HPP_ERR_STR_(Incorrect image dimensions)
844 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
845 #define __SET_MEM_OBJECT_DESTRUCTOR_CALLBACK_ERR CL_HPP_ERR_STR_(clSetMemObjectDestructorCallback)
846 
847 #define __CREATE_USER_EVENT_ERR CL_HPP_ERR_STR_(clCreateUserEvent)
848 #define __SET_USER_EVENT_STATUS_ERR CL_HPP_ERR_STR_(clSetUserEventStatus)
849 #define __SET_EVENT_CALLBACK_ERR CL_HPP_ERR_STR_(clSetEventCallback)
850 #define __WAIT_FOR_EVENTS_ERR CL_HPP_ERR_STR_(clWaitForEvents)
851 
852 #define __CREATE_KERNEL_ERR CL_HPP_ERR_STR_(clCreateKernel)
853 #define __SET_KERNEL_ARGS_ERR CL_HPP_ERR_STR_(clSetKernelArg)
854 #define __CREATE_PROGRAM_WITH_SOURCE_ERR CL_HPP_ERR_STR_(clCreateProgramWithSource)
855 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
856 #define __CREATE_PROGRAM_WITH_IL_ERR CL_HPP_ERR_STR_(clCreateProgramWithIL)
857 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
858 #define __CREATE_PROGRAM_WITH_BINARY_ERR CL_HPP_ERR_STR_(clCreateProgramWithBinary)
859 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
860 #define __CREATE_PROGRAM_WITH_IL_ERR CL_HPP_ERR_STR_(clCreateProgramWithIL)
861 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 210
862 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
863 #define __CREATE_PROGRAM_WITH_BUILT_IN_KERNELS_ERR CL_HPP_ERR_STR_(clCreateProgramWithBuiltInKernels)
864 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
865 #define __BUILD_PROGRAM_ERR CL_HPP_ERR_STR_(clBuildProgram)
866 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
867 #define __COMPILE_PROGRAM_ERR CL_HPP_ERR_STR_(clCompileProgram)
868 #define __LINK_PROGRAM_ERR CL_HPP_ERR_STR_(clLinkProgram)
869 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
870 #define __CREATE_KERNELS_IN_PROGRAM_ERR CL_HPP_ERR_STR_(clCreateKernelsInProgram)
871 
872 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
873 #define __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR CL_HPP_ERR_STR_(clCreateCommandQueueWithProperties)
874 #define __CREATE_SAMPLER_WITH_PROPERTIES_ERR CL_HPP_ERR_STR_(clCreateSamplerWithProperties)
875 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
876 #define __SET_COMMAND_QUEUE_PROPERTY_ERR CL_HPP_ERR_STR_(clSetCommandQueueProperty)
877 #define __ENQUEUE_READ_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueReadBuffer)
878 #define __ENQUEUE_READ_BUFFER_RECT_ERR CL_HPP_ERR_STR_(clEnqueueReadBufferRect)
879 #define __ENQUEUE_WRITE_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueWriteBuffer)
880 #define __ENQUEUE_WRITE_BUFFER_RECT_ERR CL_HPP_ERR_STR_(clEnqueueWriteBufferRect)
881 #define __ENQEUE_COPY_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueCopyBuffer)
882 #define __ENQEUE_COPY_BUFFER_RECT_ERR CL_HPP_ERR_STR_(clEnqueueCopyBufferRect)
883 #define __ENQUEUE_FILL_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueFillBuffer)
884 #define __ENQUEUE_READ_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueReadImage)
885 #define __ENQUEUE_WRITE_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueWriteImage)
886 #define __ENQUEUE_COPY_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueCopyImage)
887 #define __ENQUEUE_FILL_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueFillImage)
888 #define __ENQUEUE_COPY_IMAGE_TO_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueCopyImageToBuffer)
889 #define __ENQUEUE_COPY_BUFFER_TO_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueCopyBufferToImage)
890 #define __ENQUEUE_MAP_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueMapBuffer)
891 #define __ENQUEUE_MAP_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueMapImage)
892 #define __ENQUEUE_UNMAP_MEM_OBJECT_ERR CL_HPP_ERR_STR_(clEnqueueUnMapMemObject)
893 #define __ENQUEUE_NDRANGE_KERNEL_ERR CL_HPP_ERR_STR_(clEnqueueNDRangeKernel)
894 #define __ENQUEUE_NATIVE_KERNEL CL_HPP_ERR_STR_(clEnqueueNativeKernel)
895 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
896 #define __ENQUEUE_MIGRATE_MEM_OBJECTS_ERR CL_HPP_ERR_STR_(clEnqueueMigrateMemObjects)
897 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
898 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
899 #define __ENQUEUE_MIGRATE_SVM_ERR CL_HPP_ERR_STR_(clEnqueueSVMMigrateMem)
900 #define __SET_DEFAULT_DEVICE_COMMAND_QUEUE_ERR CL_HPP_ERR_STR_(clSetDefaultDeviceCommandQueue)
901 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 210
902 
903 
904 #define __ENQUEUE_ACQUIRE_GL_ERR CL_HPP_ERR_STR_(clEnqueueAcquireGLObjects)
905 #define __ENQUEUE_RELEASE_GL_ERR CL_HPP_ERR_STR_(clEnqueueReleaseGLObjects)
906 
907 #define __CREATE_PIPE_ERR CL_HPP_ERR_STR_(clCreatePipe)
908 #define __GET_PIPE_INFO_ERR CL_HPP_ERR_STR_(clGetPipeInfo)
909 
910 
911 #define __RETAIN_ERR CL_HPP_ERR_STR_(Retain Object)
912 #define __RELEASE_ERR CL_HPP_ERR_STR_(Release Object)
913 #define __FLUSH_ERR CL_HPP_ERR_STR_(clFlush)
914 #define __FINISH_ERR CL_HPP_ERR_STR_(clFinish)
915 #define __VECTOR_CAPACITY_ERR CL_HPP_ERR_STR_(Vector capacity error)
916 
917 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
918 #define __GET_HOST_TIMER_ERR CL_HPP_ERR_STR_(clGetHostTimer)
919 #define __GET_DEVICE_AND_HOST_TIMER_ERR CL_HPP_ERR_STR_(clGetDeviceAndHostTimer)
920 #endif
921 #if CL_HPP_TARGET_OPENCL_VERSION >= 220
922 #define __SET_PROGRAM_RELEASE_CALLBACK_ERR CL_HPP_ERR_STR_(clSetProgramReleaseCallback)
923 #define __SET_PROGRAM_SPECIALIZATION_CONSTANT_ERR CL_HPP_ERR_STR_(clSetProgramSpecializationConstant)
924 #endif
925 
926 
930 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
931 #define __CREATE_SUB_DEVICES_ERR CL_HPP_ERR_STR_(clCreateSubDevices)
932 #else
933 #define __CREATE_SUB_DEVICES_ERR CL_HPP_ERR_STR_(clCreateSubDevicesEXT)
934 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
935 
939 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
940 #define __ENQUEUE_MARKER_ERR CL_HPP_ERR_STR_(clEnqueueMarker)
941 #define __ENQUEUE_WAIT_FOR_EVENTS_ERR CL_HPP_ERR_STR_(clEnqueueWaitForEvents)
942 #define __ENQUEUE_BARRIER_ERR CL_HPP_ERR_STR_(clEnqueueBarrier)
943 #define __UNLOAD_COMPILER_ERR CL_HPP_ERR_STR_(clUnloadCompiler)
944 #define __CREATE_GL_TEXTURE_2D_ERR CL_HPP_ERR_STR_(clCreateFromGLTexture2D)
945 #define __CREATE_GL_TEXTURE_3D_ERR CL_HPP_ERR_STR_(clCreateFromGLTexture3D)
946 #define __CREATE_IMAGE2D_ERR CL_HPP_ERR_STR_(clCreateImage2D)
947 #define __CREATE_IMAGE3D_ERR CL_HPP_ERR_STR_(clCreateImage3D)
948 #endif // #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
949 
953 #if defined(CL_USE_DEPRECATED_OPENCL_1_2_APIS)
954 #define __CREATE_COMMAND_QUEUE_ERR CL_HPP_ERR_STR_(clCreateCommandQueue)
955 #define __ENQUEUE_TASK_ERR CL_HPP_ERR_STR_(clEnqueueTask)
956 #define __CREATE_SAMPLER_ERR CL_HPP_ERR_STR_(clCreateSampler)
957 #endif // #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
958 
962 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
963 #define __ENQUEUE_MARKER_WAIT_LIST_ERR CL_HPP_ERR_STR_(clEnqueueMarkerWithWaitList)
964 #define __ENQUEUE_BARRIER_WAIT_LIST_ERR CL_HPP_ERR_STR_(clEnqueueBarrierWithWaitList)
965 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
966 
967 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
968 #define __CLONE_KERNEL_ERR CL_HPP_ERR_STR_(clCloneKernel)
969 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 210
970 
971 #endif // CL_HPP_USER_OVERRIDE_ERROR_STRINGS
972 
974 
975 namespace detail {
976 
977 // Generic getInfoHelper. The final parameter is used to guide overload
978 // resolution: the actual parameter passed is an int, which makes this
979 // a worse conversion sequence than a specialization that declares the
980 // parameter as an int.
981 template<typename Functor, typename T>
982 inline cl_int getInfoHelper(Functor f, cl_uint name, T* param, long)
983 {
984  return f(name, sizeof(T), param, NULL);
985 }
986 
987 // Specialized for getInfo<CL_PROGRAM_BINARIES>
988 // Assumes that the output vector was correctly resized on the way in
989 template <typename Func>
990 inline cl_int getInfoHelper(Func f, cl_uint name, vector<vector<unsigned char>>* param, int)
991 {
992  if (name != CL_PROGRAM_BINARIES) {
993  return CL_INVALID_VALUE;
994  }
995  if (param) {
996  // Create array of pointers, calculate total size and pass pointer array in
997  size_type numBinaries = param->size();
998  vector<unsigned char*> binariesPointers(numBinaries);
999 
1000  for (size_type i = 0; i < numBinaries; ++i)
1001  {
1002  binariesPointers[i] = (*param)[i].data();
1003  }
1004 
1005  cl_int err = f(name, numBinaries * sizeof(unsigned char*), binariesPointers.data(), NULL);
1006 
1007  if (err != CL_SUCCESS) {
1008  return err;
1009  }
1010  }
1011 
1012 
1013  return CL_SUCCESS;
1014 }
1015 
1016 // Specialized getInfoHelper for vector params
1017 template <typename Func, typename T>
1018 inline cl_int getInfoHelper(Func f, cl_uint name, vector<T>* param, long)
1019 {
1020  size_type required;
1021  cl_int err = f(name, 0, NULL, &required);
1022  if (err != CL_SUCCESS) {
1023  return err;
1024  }
1025  const size_type elements = required / sizeof(T);
1026 
1027  // Temporary to avoid changing param on an error
1028  vector<T> localData(elements);
1029  err = f(name, required, localData.data(), NULL);
1030  if (err != CL_SUCCESS) {
1031  return err;
1032  }
1033  if (param) {
1034  *param = std::move(localData);
1035  }
1036 
1037  return CL_SUCCESS;
1038 }
1039 
1040 /* Specialization for reference-counted types. This depends on the
1041  * existence of Wrapper<T>::cl_type, and none of the other types having the
1042  * cl_type member. Note that simplify specifying the parameter as Wrapper<T>
1043  * does not work, because when using a derived type (e.g. Context) the generic
1044  * template will provide a better match.
1045  */
1046 template <typename Func, typename T>
1047 inline cl_int getInfoHelper(
1048  Func f, cl_uint name, vector<T>* param, int, typename T::cl_type = 0)
1049 {
1050  size_type required;
1051  cl_int err = f(name, 0, NULL, &required);
1052  if (err != CL_SUCCESS) {
1053  return err;
1054  }
1055 
1056  const size_type elements = required / sizeof(typename T::cl_type);
1057 
1058  vector<typename T::cl_type> value(elements);
1059  err = f(name, required, value.data(), NULL);
1060  if (err != CL_SUCCESS) {
1061  return err;
1062  }
1063 
1064  if (param) {
1065  // Assign to convert CL type to T for each element
1066  param->resize(elements);
1067 
1068  // Assign to param, constructing with retain behaviour
1069  // to correctly capture each underlying CL object
1070  for (size_type i = 0; i < elements; i++) {
1071  (*param)[i] = T(value[i], true);
1072  }
1073  }
1074  return CL_SUCCESS;
1075 }
1076 
1077 // Specialized GetInfoHelper for string params
1078 template <typename Func>
1079 inline cl_int getInfoHelper(Func f, cl_uint name, string* param, long)
1080 {
1081  size_type required;
1082  cl_int err = f(name, 0, NULL, &required);
1083  if (err != CL_SUCCESS) {
1084  return err;
1085  }
1086 
1087  // std::string has a constant data member
1088  // a char vector does not
1089  if (required > 0) {
1090  vector<char> value(required);
1091  err = f(name, required, value.data(), NULL);
1092  if (err != CL_SUCCESS) {
1093  return err;
1094  }
1095  if (param) {
1096  param->assign(begin(value), prev(end(value)));
1097  }
1098  }
1099  else if (param) {
1100  param->assign("");
1101  }
1102  return CL_SUCCESS;
1103 }
1104 
1105 // Specialized GetInfoHelper for clsize_t params
1106 template <typename Func, size_type N>
1107 inline cl_int getInfoHelper(Func f, cl_uint name, array<size_type, N>* param, long)
1108 {
1109  size_type required;
1110  cl_int err = f(name, 0, NULL, &required);
1111  if (err != CL_SUCCESS) {
1112  return err;
1113  }
1114 
1115  size_type elements = required / sizeof(size_type);
1116  vector<size_type> value(elements, 0);
1117 
1118  err = f(name, required, value.data(), NULL);
1119  if (err != CL_SUCCESS) {
1120  return err;
1121  }
1122 
1123  // Bound the copy with N to prevent overruns
1124  // if passed N > than the amount copied
1125  if (elements > N) {
1126  elements = N;
1127  }
1128  for (size_type i = 0; i < elements; ++i) {
1129  (*param)[i] = value[i];
1130  }
1131 
1132  return CL_SUCCESS;
1133 }
1134 
1135 template<typename T> struct ReferenceHandler;
1136 
1137 /* Specialization for reference-counted types. This depends on the
1138  * existence of Wrapper<T>::cl_type, and none of the other types having the
1139  * cl_type member. Note that simplify specifying the parameter as Wrapper<T>
1140  * does not work, because when using a derived type (e.g. Context) the generic
1141  * template will provide a better match.
1142  */
1143 template<typename Func, typename T>
1144 inline cl_int getInfoHelper(Func f, cl_uint name, T* param, int, typename T::cl_type = 0)
1145 {
1146  typename T::cl_type value;
1147  cl_int err = f(name, sizeof(value), &value, NULL);
1148  if (err != CL_SUCCESS) {
1149  return err;
1150  }
1151  *param = value;
1152  if (value != NULL)
1153  {
1154  err = param->retain();
1155  if (err != CL_SUCCESS) {
1156  return err;
1157  }
1158  }
1159  return CL_SUCCESS;
1160 }
1161 
1162 #define CL_HPP_PARAM_NAME_INFO_1_0_(F) \
1163  F(cl_platform_info, CL_PLATFORM_PROFILE, string) \
1164  F(cl_platform_info, CL_PLATFORM_VERSION, string) \
1165  F(cl_platform_info, CL_PLATFORM_NAME, string) \
1166  F(cl_platform_info, CL_PLATFORM_VENDOR, string) \
1167  F(cl_platform_info, CL_PLATFORM_EXTENSIONS, string) \
1168  \
1169  F(cl_device_info, CL_DEVICE_TYPE, cl_device_type) \
1170  F(cl_device_info, CL_DEVICE_VENDOR_ID, cl_uint) \
1171  F(cl_device_info, CL_DEVICE_MAX_COMPUTE_UNITS, cl_uint) \
1172  F(cl_device_info, CL_DEVICE_MAX_WORK_ITEM_DIMENSIONS, cl_uint) \
1173  F(cl_device_info, CL_DEVICE_MAX_WORK_GROUP_SIZE, size_type) \
1174  F(cl_device_info, CL_DEVICE_MAX_WORK_ITEM_SIZES, cl::vector<size_type>) \
1175  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_CHAR, cl_uint) \
1176  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_SHORT, cl_uint) \
1177  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_INT, cl_uint) \
1178  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_LONG, cl_uint) \
1179  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_FLOAT, cl_uint) \
1180  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_DOUBLE, cl_uint) \
1181  F(cl_device_info, CL_DEVICE_MAX_CLOCK_FREQUENCY, cl_uint) \
1182  F(cl_device_info, CL_DEVICE_ADDRESS_BITS, cl_uint) \
1183  F(cl_device_info, CL_DEVICE_MAX_READ_IMAGE_ARGS, cl_uint) \
1184  F(cl_device_info, CL_DEVICE_MAX_WRITE_IMAGE_ARGS, cl_uint) \
1185  F(cl_device_info, CL_DEVICE_MAX_MEM_ALLOC_SIZE, cl_ulong) \
1186  F(cl_device_info, CL_DEVICE_IMAGE2D_MAX_WIDTH, size_type) \
1187  F(cl_device_info, CL_DEVICE_IMAGE2D_MAX_HEIGHT, size_type) \
1188  F(cl_device_info, CL_DEVICE_IMAGE3D_MAX_WIDTH, size_type) \
1189  F(cl_device_info, CL_DEVICE_IMAGE3D_MAX_HEIGHT, size_type) \
1190  F(cl_device_info, CL_DEVICE_IMAGE3D_MAX_DEPTH, size_type) \
1191  F(cl_device_info, CL_DEVICE_IMAGE_SUPPORT, cl_bool) \
1192  F(cl_device_info, CL_DEVICE_MAX_PARAMETER_SIZE, size_type) \
1193  F(cl_device_info, CL_DEVICE_MAX_SAMPLERS, cl_uint) \
1194  F(cl_device_info, CL_DEVICE_MEM_BASE_ADDR_ALIGN, cl_uint) \
1195  F(cl_device_info, CL_DEVICE_MIN_DATA_TYPE_ALIGN_SIZE, cl_uint) \
1196  F(cl_device_info, CL_DEVICE_SINGLE_FP_CONFIG, cl_device_fp_config) \
1197  F(cl_device_info, CL_DEVICE_DOUBLE_FP_CONFIG, cl_device_fp_config) \
1198  F(cl_device_info, CL_DEVICE_HALF_FP_CONFIG, cl_device_fp_config) \
1199  F(cl_device_info, CL_DEVICE_GLOBAL_MEM_CACHE_TYPE, cl_device_mem_cache_type) \
1200  F(cl_device_info, CL_DEVICE_GLOBAL_MEM_CACHELINE_SIZE, cl_uint)\
1201  F(cl_device_info, CL_DEVICE_GLOBAL_MEM_CACHE_SIZE, cl_ulong) \
1202  F(cl_device_info, CL_DEVICE_GLOBAL_MEM_SIZE, cl_ulong) \
1203  F(cl_device_info, CL_DEVICE_MAX_CONSTANT_BUFFER_SIZE, cl_ulong) \
1204  F(cl_device_info, CL_DEVICE_MAX_CONSTANT_ARGS, cl_uint) \
1205  F(cl_device_info, CL_DEVICE_LOCAL_MEM_TYPE, cl_device_local_mem_type) \
1206  F(cl_device_info, CL_DEVICE_LOCAL_MEM_SIZE, cl_ulong) \
1207  F(cl_device_info, CL_DEVICE_ERROR_CORRECTION_SUPPORT, cl_bool) \
1208  F(cl_device_info, CL_DEVICE_PROFILING_TIMER_RESOLUTION, size_type) \
1209  F(cl_device_info, CL_DEVICE_ENDIAN_LITTLE, cl_bool) \
1210  F(cl_device_info, CL_DEVICE_AVAILABLE, cl_bool) \
1211  F(cl_device_info, CL_DEVICE_COMPILER_AVAILABLE, cl_bool) \
1212  F(cl_device_info, CL_DEVICE_EXECUTION_CAPABILITIES, cl_device_exec_capabilities) \
1213  F(cl_device_info, CL_DEVICE_PLATFORM, cl_platform_id) \
1214  F(cl_device_info, CL_DEVICE_NAME, string) \
1215  F(cl_device_info, CL_DEVICE_VENDOR, string) \
1216  F(cl_device_info, CL_DRIVER_VERSION, string) \
1217  F(cl_device_info, CL_DEVICE_PROFILE, string) \
1218  F(cl_device_info, CL_DEVICE_VERSION, string) \
1219  F(cl_device_info, CL_DEVICE_EXTENSIONS, string) \
1220  \
1221  F(cl_context_info, CL_CONTEXT_REFERENCE_COUNT, cl_uint) \
1222  F(cl_context_info, CL_CONTEXT_DEVICES, cl::vector<Device>) \
1223  F(cl_context_info, CL_CONTEXT_PROPERTIES, cl::vector<cl_context_properties>) \
1224  \
1225  F(cl_event_info, CL_EVENT_COMMAND_QUEUE, cl::CommandQueue) \
1226  F(cl_event_info, CL_EVENT_COMMAND_TYPE, cl_command_type) \
1227  F(cl_event_info, CL_EVENT_REFERENCE_COUNT, cl_uint) \
1228  F(cl_event_info, CL_EVENT_COMMAND_EXECUTION_STATUS, cl_int) \
1229  \
1230  F(cl_profiling_info, CL_PROFILING_COMMAND_QUEUED, cl_ulong) \
1231  F(cl_profiling_info, CL_PROFILING_COMMAND_SUBMIT, cl_ulong) \
1232  F(cl_profiling_info, CL_PROFILING_COMMAND_START, cl_ulong) \
1233  F(cl_profiling_info, CL_PROFILING_COMMAND_END, cl_ulong) \
1234  \
1235  F(cl_mem_info, CL_MEM_TYPE, cl_mem_object_type) \
1236  F(cl_mem_info, CL_MEM_FLAGS, cl_mem_flags) \
1237  F(cl_mem_info, CL_MEM_SIZE, size_type) \
1238  F(cl_mem_info, CL_MEM_HOST_PTR, void*) \
1239  F(cl_mem_info, CL_MEM_MAP_COUNT, cl_uint) \
1240  F(cl_mem_info, CL_MEM_REFERENCE_COUNT, cl_uint) \
1241  F(cl_mem_info, CL_MEM_CONTEXT, cl::Context) \
1242  \
1243  F(cl_image_info, CL_IMAGE_FORMAT, cl_image_format) \
1244  F(cl_image_info, CL_IMAGE_ELEMENT_SIZE, size_type) \
1245  F(cl_image_info, CL_IMAGE_ROW_PITCH, size_type) \
1246  F(cl_image_info, CL_IMAGE_SLICE_PITCH, size_type) \
1247  F(cl_image_info, CL_IMAGE_WIDTH, size_type) \
1248  F(cl_image_info, CL_IMAGE_HEIGHT, size_type) \
1249  F(cl_image_info, CL_IMAGE_DEPTH, size_type) \
1250  \
1251  F(cl_sampler_info, CL_SAMPLER_REFERENCE_COUNT, cl_uint) \
1252  F(cl_sampler_info, CL_SAMPLER_CONTEXT, cl::Context) \
1253  F(cl_sampler_info, CL_SAMPLER_NORMALIZED_COORDS, cl_bool) \
1254  F(cl_sampler_info, CL_SAMPLER_ADDRESSING_MODE, cl_addressing_mode) \
1255  F(cl_sampler_info, CL_SAMPLER_FILTER_MODE, cl_filter_mode) \
1256  \
1257  F(cl_program_info, CL_PROGRAM_REFERENCE_COUNT, cl_uint) \
1258  F(cl_program_info, CL_PROGRAM_CONTEXT, cl::Context) \
1259  F(cl_program_info, CL_PROGRAM_NUM_DEVICES, cl_uint) \
1260  F(cl_program_info, CL_PROGRAM_DEVICES, cl::vector<Device>) \
1261  F(cl_program_info, CL_PROGRAM_SOURCE, string) \
1262  F(cl_program_info, CL_PROGRAM_BINARY_SIZES, cl::vector<size_type>) \
1263  F(cl_program_info, CL_PROGRAM_BINARIES, cl::vector<cl::vector<unsigned char>>) \
1264  \
1265  F(cl_program_build_info, CL_PROGRAM_BUILD_STATUS, cl_build_status) \
1266  F(cl_program_build_info, CL_PROGRAM_BUILD_OPTIONS, string) \
1267  F(cl_program_build_info, CL_PROGRAM_BUILD_LOG, string) \
1268  \
1269  F(cl_kernel_info, CL_KERNEL_FUNCTION_NAME, string) \
1270  F(cl_kernel_info, CL_KERNEL_NUM_ARGS, cl_uint) \
1271  F(cl_kernel_info, CL_KERNEL_REFERENCE_COUNT, cl_uint) \
1272  F(cl_kernel_info, CL_KERNEL_CONTEXT, cl::Context) \
1273  F(cl_kernel_info, CL_KERNEL_PROGRAM, cl::Program) \
1274  \
1275  F(cl_kernel_work_group_info, CL_KERNEL_WORK_GROUP_SIZE, size_type) \
1276  F(cl_kernel_work_group_info, CL_KERNEL_COMPILE_WORK_GROUP_SIZE, cl::detail::size_t_array) \
1277  F(cl_kernel_work_group_info, CL_KERNEL_LOCAL_MEM_SIZE, cl_ulong) \
1278  \
1279  F(cl_command_queue_info, CL_QUEUE_CONTEXT, cl::Context) \
1280  F(cl_command_queue_info, CL_QUEUE_DEVICE, cl::Device) \
1281  F(cl_command_queue_info, CL_QUEUE_REFERENCE_COUNT, cl_uint) \
1282  F(cl_command_queue_info, CL_QUEUE_PROPERTIES, cl_command_queue_properties)
1283 
1284 
1285 #define CL_HPP_PARAM_NAME_INFO_1_1_(F) \
1286  F(cl_context_info, CL_CONTEXT_NUM_DEVICES, cl_uint)\
1287  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_HALF, cl_uint) \
1288  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_CHAR, cl_uint) \
1289  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_SHORT, cl_uint) \
1290  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_INT, cl_uint) \
1291  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_LONG, cl_uint) \
1292  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_FLOAT, cl_uint) \
1293  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_DOUBLE, cl_uint) \
1294  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_HALF, cl_uint) \
1295  F(cl_device_info, CL_DEVICE_OPENCL_C_VERSION, string) \
1296  \
1297  F(cl_mem_info, CL_MEM_ASSOCIATED_MEMOBJECT, cl::Memory) \
1298  F(cl_mem_info, CL_MEM_OFFSET, size_type) \
1299  \
1300  F(cl_kernel_work_group_info, CL_KERNEL_PREFERRED_WORK_GROUP_SIZE_MULTIPLE, size_type) \
1301  F(cl_kernel_work_group_info, CL_KERNEL_PRIVATE_MEM_SIZE, cl_ulong) \
1302  \
1303  F(cl_event_info, CL_EVENT_CONTEXT, cl::Context)
1304 
1305 #define CL_HPP_PARAM_NAME_INFO_1_2_(F) \
1306  F(cl_program_info, CL_PROGRAM_NUM_KERNELS, size_type) \
1307  F(cl_program_info, CL_PROGRAM_KERNEL_NAMES, string) \
1308  \
1309  F(cl_program_build_info, CL_PROGRAM_BINARY_TYPE, cl_program_binary_type) \
1310  \
1311  F(cl_kernel_info, CL_KERNEL_ATTRIBUTES, string) \
1312  \
1313  F(cl_kernel_arg_info, CL_KERNEL_ARG_ADDRESS_QUALIFIER, cl_kernel_arg_address_qualifier) \
1314  F(cl_kernel_arg_info, CL_KERNEL_ARG_ACCESS_QUALIFIER, cl_kernel_arg_access_qualifier) \
1315  F(cl_kernel_arg_info, CL_KERNEL_ARG_TYPE_NAME, string) \
1316  F(cl_kernel_arg_info, CL_KERNEL_ARG_NAME, string) \
1317  F(cl_kernel_arg_info, CL_KERNEL_ARG_TYPE_QUALIFIER, cl_kernel_arg_type_qualifier) \
1318  \
1319  F(cl_device_info, CL_DEVICE_PARENT_DEVICE, cl::Device) \
1320  F(cl_device_info, CL_DEVICE_PARTITION_PROPERTIES, cl::vector<cl_device_partition_property>) \
1321  F(cl_device_info, CL_DEVICE_PARTITION_TYPE, cl::vector<cl_device_partition_property>) \
1322  F(cl_device_info, CL_DEVICE_REFERENCE_COUNT, cl_uint) \
1323  F(cl_device_info, CL_DEVICE_PREFERRED_INTEROP_USER_SYNC, size_type) \
1324  F(cl_device_info, CL_DEVICE_PARTITION_AFFINITY_DOMAIN, cl_device_affinity_domain) \
1325  F(cl_device_info, CL_DEVICE_BUILT_IN_KERNELS, string) \
1326  \
1327  F(cl_image_info, CL_IMAGE_ARRAY_SIZE, size_type) \
1328  F(cl_image_info, CL_IMAGE_NUM_MIP_LEVELS, cl_uint) \
1329  F(cl_image_info, CL_IMAGE_NUM_SAMPLES, cl_uint)
1330 
1331 #define CL_HPP_PARAM_NAME_INFO_2_0_(F) \
1332  F(cl_device_info, CL_DEVICE_QUEUE_ON_HOST_PROPERTIES, cl_command_queue_properties) \
1333  F(cl_device_info, CL_DEVICE_QUEUE_ON_DEVICE_PROPERTIES, cl_command_queue_properties) \
1334  F(cl_device_info, CL_DEVICE_QUEUE_ON_DEVICE_PREFERRED_SIZE, cl_uint) \
1335  F(cl_device_info, CL_DEVICE_QUEUE_ON_DEVICE_MAX_SIZE, cl_uint) \
1336  F(cl_device_info, CL_DEVICE_MAX_ON_DEVICE_QUEUES, cl_uint) \
1337  F(cl_device_info, CL_DEVICE_MAX_ON_DEVICE_EVENTS, cl_uint) \
1338  F(cl_device_info, CL_DEVICE_MAX_PIPE_ARGS, cl_uint) \
1339  F(cl_device_info, CL_DEVICE_PIPE_MAX_ACTIVE_RESERVATIONS, cl_uint) \
1340  F(cl_device_info, CL_DEVICE_PIPE_MAX_PACKET_SIZE, cl_uint) \
1341  F(cl_device_info, CL_DEVICE_SVM_CAPABILITIES, cl_device_svm_capabilities) \
1342  F(cl_device_info, CL_DEVICE_PREFERRED_PLATFORM_ATOMIC_ALIGNMENT, cl_uint) \
1343  F(cl_device_info, CL_DEVICE_PREFERRED_GLOBAL_ATOMIC_ALIGNMENT, cl_uint) \
1344  F(cl_device_info, CL_DEVICE_PREFERRED_LOCAL_ATOMIC_ALIGNMENT, cl_uint) \
1345  F(cl_command_queue_info, CL_QUEUE_SIZE, cl_uint) \
1346  F(cl_mem_info, CL_MEM_USES_SVM_POINTER, cl_bool) \
1347  F(cl_program_build_info, CL_PROGRAM_BUILD_GLOBAL_VARIABLE_TOTAL_SIZE, size_type) \
1348  F(cl_pipe_info, CL_PIPE_PACKET_SIZE, cl_uint) \
1349  F(cl_pipe_info, CL_PIPE_MAX_PACKETS, cl_uint)
1350 
1351 #define CL_HPP_PARAM_NAME_INFO_SUBGROUP_KHR_(F) \
1352  F(cl_kernel_sub_group_info, CL_KERNEL_MAX_SUB_GROUP_SIZE_FOR_NDRANGE_KHR, size_type) \
1353  F(cl_kernel_sub_group_info, CL_KERNEL_SUB_GROUP_COUNT_FOR_NDRANGE_KHR, size_type)
1354 
1355 #define CL_HPP_PARAM_NAME_INFO_IL_KHR_(F) \
1356  F(cl_device_info, CL_DEVICE_IL_VERSION_KHR, string) \
1357  F(cl_program_info, CL_PROGRAM_IL_KHR, cl::vector<unsigned char>)
1358 
1359 #define CL_HPP_PARAM_NAME_INFO_2_1_(F) \
1360  F(cl_platform_info, CL_PLATFORM_HOST_TIMER_RESOLUTION, size_type) \
1361  F(cl_program_info, CL_PROGRAM_IL, cl::vector<unsigned char>) \
1362  F(cl_kernel_info, CL_KERNEL_MAX_NUM_SUB_GROUPS, size_type) \
1363  F(cl_kernel_info, CL_KERNEL_COMPILE_NUM_SUB_GROUPS, size_type) \
1364  F(cl_device_info, CL_DEVICE_MAX_NUM_SUB_GROUPS, cl_uint) \
1365  F(cl_device_info, CL_DEVICE_IL_VERSION, string) \
1366  F(cl_device_info, CL_DEVICE_SUB_GROUP_INDEPENDENT_FORWARD_PROGRESS, cl_bool) \
1367  F(cl_command_queue_info, CL_QUEUE_DEVICE_DEFAULT, cl::DeviceCommandQueue) \
1368  F(cl_kernel_sub_group_info, CL_KERNEL_MAX_SUB_GROUP_SIZE_FOR_NDRANGE, size_type) \
1369  F(cl_kernel_sub_group_info, CL_KERNEL_SUB_GROUP_COUNT_FOR_NDRANGE, size_type) \
1370  F(cl_kernel_sub_group_info, CL_KERNEL_LOCAL_SIZE_FOR_SUB_GROUP_COUNT, cl::detail::size_t_array)
1371 
1372 #define CL_HPP_PARAM_NAME_INFO_2_2_(F) \
1373  F(cl_program_info, CL_PROGRAM_SCOPE_GLOBAL_CTORS_PRESENT, cl_bool) \
1374  F(cl_program_info, CL_PROGRAM_SCOPE_GLOBAL_DTORS_PRESENT, cl_bool)
1375 
1376 #define CL_HPP_PARAM_NAME_DEVICE_FISSION_(F) \
1377  F(cl_device_info, CL_DEVICE_PARENT_DEVICE_EXT, cl_device_id) \
1378  F(cl_device_info, CL_DEVICE_PARTITION_TYPES_EXT, cl::vector<cl_device_partition_property_ext>) \
1379  F(cl_device_info, CL_DEVICE_AFFINITY_DOMAINS_EXT, cl::vector<cl_device_partition_property_ext>) \
1380  F(cl_device_info, CL_DEVICE_REFERENCE_COUNT_EXT , cl_uint) \
1381  F(cl_device_info, CL_DEVICE_PARTITION_STYLE_EXT, cl::vector<cl_device_partition_property_ext>)
1382 
1383 #define CL_HPP_PARAM_NAME_CL_KHR_EXTENDED_VERSIONING_(F) \
1384  F(cl_platform_info, CL_PLATFORM_NUMERIC_VERSION_KHR, cl_version_khr) \
1385  F(cl_platform_info, CL_PLATFORM_EXTENSIONS_WITH_VERSION_KHR, cl::vector<cl_name_version_khr>) \
1386  \
1387  F(cl_device_info, CL_DEVICE_NUMERIC_VERSION_KHR, cl_version_khr) \
1388  F(cl_device_info, CL_DEVICE_OPENCL_C_NUMERIC_VERSION_KHR, cl_version_khr) \
1389  F(cl_device_info, CL_DEVICE_EXTENSIONS_WITH_VERSION_KHR, cl::vector<cl_name_version_khr>) \
1390  F(cl_device_info, CL_DEVICE_ILS_WITH_VERSION_KHR, cl::vector<cl_name_version_khr>) \
1391  F(cl_device_info, CL_DEVICE_BUILT_IN_KERNELS_WITH_VERSION_KHR, cl::vector<cl_name_version_khr>)
1392 
1393 template <typename enum_type, cl_int Name>
1394 struct param_traits {};
1395 
1396 #define CL_HPP_DECLARE_PARAM_TRAITS_(token, param_name, T) \
1397 struct token; \
1398 template<> \
1399 struct param_traits<detail:: token,param_name> \
1400 { \
1401  enum { value = param_name }; \
1402  typedef T param_type; \
1403 };
1404 
1405 CL_HPP_PARAM_NAME_INFO_1_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
1406 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
1407 CL_HPP_PARAM_NAME_INFO_1_1_(CL_HPP_DECLARE_PARAM_TRAITS_)
1408 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
1409 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
1410 CL_HPP_PARAM_NAME_INFO_1_2_(CL_HPP_DECLARE_PARAM_TRAITS_)
1411 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
1412 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
1413 CL_HPP_PARAM_NAME_INFO_2_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
1414 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
1415 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
1416 CL_HPP_PARAM_NAME_INFO_2_1_(CL_HPP_DECLARE_PARAM_TRAITS_)
1417 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 210
1418 #if CL_HPP_TARGET_OPENCL_VERSION >= 220
1419 CL_HPP_PARAM_NAME_INFO_2_2_(CL_HPP_DECLARE_PARAM_TRAITS_)
1420 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 220
1421 
1422 #if defined(CL_HPP_USE_CL_SUB_GROUPS_KHR) && CL_HPP_TARGET_OPENCL_VERSION < 210
1423 CL_HPP_PARAM_NAME_INFO_SUBGROUP_KHR_(CL_HPP_DECLARE_PARAM_TRAITS_)
1424 #endif // #if defined(CL_HPP_USE_CL_SUB_GROUPS_KHR) && CL_HPP_TARGET_OPENCL_VERSION < 210
1425 
1426 #if defined(CL_HPP_USE_IL_KHR)
1427 CL_HPP_PARAM_NAME_INFO_IL_KHR_(CL_HPP_DECLARE_PARAM_TRAITS_)
1428 #endif // #if defined(CL_HPP_USE_IL_KHR)
1429 
1430 
1431 // Flags deprecated in OpenCL 2.0
1432 #define CL_HPP_PARAM_NAME_INFO_1_0_DEPRECATED_IN_2_0_(F) \
1433  F(cl_device_info, CL_DEVICE_QUEUE_PROPERTIES, cl_command_queue_properties)
1434 
1435 #define CL_HPP_PARAM_NAME_INFO_1_1_DEPRECATED_IN_2_0_(F) \
1436  F(cl_device_info, CL_DEVICE_HOST_UNIFIED_MEMORY, cl_bool)
1437 
1438 #define CL_HPP_PARAM_NAME_INFO_1_2_DEPRECATED_IN_2_0_(F) \
1439  F(cl_image_info, CL_IMAGE_BUFFER, cl::Buffer)
1440 
1441 // Include deprecated query flags based on versions
1442 // Only include deprecated 1.0 flags if 2.0 not active as there is an enum clash
1443 #if CL_HPP_TARGET_OPENCL_VERSION > 100 && CL_HPP_MINIMUM_OPENCL_VERSION < 200 && CL_HPP_TARGET_OPENCL_VERSION < 200
1444 CL_HPP_PARAM_NAME_INFO_1_0_DEPRECATED_IN_2_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
1445 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 110
1446 #if CL_HPP_TARGET_OPENCL_VERSION > 110 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
1447 CL_HPP_PARAM_NAME_INFO_1_1_DEPRECATED_IN_2_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
1448 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 120
1449 #if CL_HPP_TARGET_OPENCL_VERSION > 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
1450 CL_HPP_PARAM_NAME_INFO_1_2_DEPRECATED_IN_2_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
1451 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
1452 
1453 #if defined(CL_HPP_USE_CL_DEVICE_FISSION)
1454 CL_HPP_PARAM_NAME_DEVICE_FISSION_(CL_HPP_DECLARE_PARAM_TRAITS_);
1455 #endif // CL_HPP_USE_CL_DEVICE_FISSION
1456 
1457 #if defined(cl_khr_extended_versioning)
1458 CL_HPP_PARAM_NAME_CL_KHR_EXTENDED_VERSIONING_(CL_HPP_DECLARE_PARAM_TRAITS_);
1459 #endif // cl_khr_extended_versioning
1460 
1461 #ifdef CL_PLATFORM_ICD_SUFFIX_KHR
1462 CL_HPP_DECLARE_PARAM_TRAITS_(cl_platform_info, CL_PLATFORM_ICD_SUFFIX_KHR, string)
1463 #endif
1464 
1465 #ifdef CL_DEVICE_PROFILING_TIMER_OFFSET_AMD
1466 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_PROFILING_TIMER_OFFSET_AMD, cl_ulong)
1467 #endif
1468 
1469 #ifdef CL_DEVICE_GLOBAL_FREE_MEMORY_AMD
1470 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GLOBAL_FREE_MEMORY_AMD, vector<size_type>)
1471 #endif
1472 #ifdef CL_DEVICE_SIMD_PER_COMPUTE_UNIT_AMD
1473 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_SIMD_PER_COMPUTE_UNIT_AMD, cl_uint)
1474 #endif
1475 #ifdef CL_DEVICE_SIMD_WIDTH_AMD
1476 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_SIMD_WIDTH_AMD, cl_uint)
1477 #endif
1478 #ifdef CL_DEVICE_SIMD_INSTRUCTION_WIDTH_AMD
1479 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_SIMD_INSTRUCTION_WIDTH_AMD, cl_uint)
1480 #endif
1481 #ifdef CL_DEVICE_WAVEFRONT_WIDTH_AMD
1482 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_WAVEFRONT_WIDTH_AMD, cl_uint)
1483 #endif
1484 #ifdef CL_DEVICE_GLOBAL_MEM_CHANNELS_AMD
1485 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GLOBAL_MEM_CHANNELS_AMD, cl_uint)
1486 #endif
1487 #ifdef CL_DEVICE_GLOBAL_MEM_CHANNEL_BANKS_AMD
1488 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GLOBAL_MEM_CHANNEL_BANKS_AMD, cl_uint)
1489 #endif
1490 #ifdef CL_DEVICE_GLOBAL_MEM_CHANNEL_BANK_WIDTH_AMD
1491 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GLOBAL_MEM_CHANNEL_BANK_WIDTH_AMD, cl_uint)
1492 #endif
1493 #ifdef CL_DEVICE_LOCAL_MEM_SIZE_PER_COMPUTE_UNIT_AMD
1494 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_LOCAL_MEM_SIZE_PER_COMPUTE_UNIT_AMD, cl_uint)
1495 #endif
1496 #ifdef CL_DEVICE_LOCAL_MEM_BANKS_AMD
1497 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_LOCAL_MEM_BANKS_AMD, cl_uint)
1498 #endif
1499 
1500 #ifdef CL_DEVICE_COMPUTE_UNITS_BITFIELD_ARM
1501 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_COMPUTE_UNITS_BITFIELD_ARM, cl_ulong)
1502 #endif
1503 #ifdef CL_DEVICE_JOB_SLOTS_ARM
1504 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_JOB_SLOTS_ARM, cl_uint)
1505 #endif
1506 
1507 #ifdef CL_DEVICE_COMPUTE_CAPABILITY_MAJOR_NV
1508 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_COMPUTE_CAPABILITY_MAJOR_NV, cl_uint)
1509 #endif
1510 #ifdef CL_DEVICE_COMPUTE_CAPABILITY_MINOR_NV
1511 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_COMPUTE_CAPABILITY_MINOR_NV, cl_uint)
1512 #endif
1513 #ifdef CL_DEVICE_REGISTERS_PER_BLOCK_NV
1514 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_REGISTERS_PER_BLOCK_NV, cl_uint)
1515 #endif
1516 #ifdef CL_DEVICE_WARP_SIZE_NV
1517 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_WARP_SIZE_NV, cl_uint)
1518 #endif
1519 #ifdef CL_DEVICE_GPU_OVERLAP_NV
1520 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GPU_OVERLAP_NV, cl_bool)
1521 #endif
1522 #ifdef CL_DEVICE_KERNEL_EXEC_TIMEOUT_NV
1523 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_KERNEL_EXEC_TIMEOUT_NV, cl_bool)
1524 #endif
1525 #ifdef CL_DEVICE_INTEGRATED_MEMORY_NV
1526 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_INTEGRATED_MEMORY_NV, cl_bool)
1527 #endif
1528 
1529 // Convenience functions
1530 
1531 template <typename Func, typename T>
1532 inline cl_int
1533 getInfo(Func f, cl_uint name, T* param)
1534 {
1535  return getInfoHelper(f, name, param, 0);
1536 }
1537 
1538 template <typename Func, typename Arg0>
1540 {
1541  Func f_; const Arg0& arg0_;
1542  cl_int operator ()(
1543  cl_uint param, size_type size, void* value, size_type* size_ret)
1544  { return f_(arg0_, param, size, value, size_ret); }
1545 };
1546 
1547 template <typename Func, typename Arg0, typename Arg1>
1549 {
1550  Func f_; const Arg0& arg0_; const Arg1& arg1_;
1551  cl_int operator ()(
1552  cl_uint param, size_type size, void* value, size_type* size_ret)
1553  { return f_(arg0_, arg1_, param, size, value, size_ret); }
1554 };
1555 
1556 template <typename Func, typename Arg0, typename T>
1557 inline cl_int
1558 getInfo(Func f, const Arg0& arg0, cl_uint name, T* param)
1559 {
1560  GetInfoFunctor0<Func, Arg0> f0 = { f, arg0 };
1561  return getInfoHelper(f0, name, param, 0);
1562 }
1563 
1564 template <typename Func, typename Arg0, typename Arg1, typename T>
1565 inline cl_int
1566 getInfo(Func f, const Arg0& arg0, const Arg1& arg1, cl_uint name, T* param)
1567 {
1568  GetInfoFunctor1<Func, Arg0, Arg1> f0 = { f, arg0, arg1 };
1569  return getInfoHelper(f0, name, param, 0);
1570 }
1571 
1572 
1573 template<typename T>
1574 struct ReferenceHandler
1575 { };
1576 
1577 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
1578 
1581 template <>
1582 struct ReferenceHandler<cl_device_id>
1583 {
1593  static cl_int retain(cl_device_id device)
1594  { return ::clRetainDevice(device); }
1604  static cl_int release(cl_device_id device)
1605  { return ::clReleaseDevice(device); }
1606 };
1607 #else // CL_HPP_TARGET_OPENCL_VERSION >= 120
1608 
1611 template <>
1612 struct ReferenceHandler<cl_device_id>
1613 {
1614  // cl_device_id does not have retain().
1615  static cl_int retain(cl_device_id)
1616  { return CL_SUCCESS; }
1617  // cl_device_id does not have release().
1618  static cl_int release(cl_device_id)
1619  { return CL_SUCCESS; }
1620 };
1621 #endif // ! (CL_HPP_TARGET_OPENCL_VERSION >= 120)
1622 
1623 template <>
1624 struct ReferenceHandler<cl_platform_id>
1625 {
1626  // cl_platform_id does not have retain().
1627  static cl_int retain(cl_platform_id)
1628  { return CL_SUCCESS; }
1629  // cl_platform_id does not have release().
1630  static cl_int release(cl_platform_id)
1631  { return CL_SUCCESS; }
1632 };
1633 
1634 template <>
1635 struct ReferenceHandler<cl_context>
1636 {
1637  static cl_int retain(cl_context context)
1638  { return ::clRetainContext(context); }
1639  static cl_int release(cl_context context)
1640  { return ::clReleaseContext(context); }
1641 };
1642 
1643 template <>
1644 struct ReferenceHandler<cl_command_queue>
1645 {
1646  static cl_int retain(cl_command_queue queue)
1647  { return ::clRetainCommandQueue(queue); }
1648  static cl_int release(cl_command_queue queue)
1649  { return ::clReleaseCommandQueue(queue); }
1650 };
1651 
1652 template <>
1653 struct ReferenceHandler<cl_mem>
1654 {
1655  static cl_int retain(cl_mem memory)
1656  { return ::clRetainMemObject(memory); }
1657  static cl_int release(cl_mem memory)
1658  { return ::clReleaseMemObject(memory); }
1659 };
1660 
1661 template <>
1662 struct ReferenceHandler<cl_sampler>
1663 {
1664  static cl_int retain(cl_sampler sampler)
1665  { return ::clRetainSampler(sampler); }
1666  static cl_int release(cl_sampler sampler)
1667  { return ::clReleaseSampler(sampler); }
1668 };
1669 
1670 template <>
1671 struct ReferenceHandler<cl_program>
1672 {
1673  static cl_int retain(cl_program program)
1674  { return ::clRetainProgram(program); }
1675  static cl_int release(cl_program program)
1676  { return ::clReleaseProgram(program); }
1677 };
1678 
1679 template <>
1680 struct ReferenceHandler<cl_kernel>
1681 {
1682  static cl_int retain(cl_kernel kernel)
1683  { return ::clRetainKernel(kernel); }
1684  static cl_int release(cl_kernel kernel)
1685  { return ::clReleaseKernel(kernel); }
1686 };
1687 
1688 template <>
1689 struct ReferenceHandler<cl_event>
1690 {
1691  static cl_int retain(cl_event event)
1692  { return ::clRetainEvent(event); }
1693  static cl_int release(cl_event event)
1694  { return ::clReleaseEvent(event); }
1695 };
1696 
1697 
1698 #if CL_HPP_TARGET_OPENCL_VERSION >= 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 120
1699 // Extracts version number with major in the upper 16 bits, minor in the lower 16
1700 static cl_uint getVersion(const vector<char> &versionInfo)
1701 {
1702  int highVersion = 0;
1703  int lowVersion = 0;
1704  int index = 7;
1705  while(versionInfo[index] != '.' ) {
1706  highVersion *= 10;
1707  highVersion += versionInfo[index]-'0';
1708  ++index;
1709  }
1710  ++index;
1711  while(versionInfo[index] != ' ' && versionInfo[index] != '\0') {
1712  lowVersion *= 10;
1713  lowVersion += versionInfo[index]-'0';
1714  ++index;
1715  }
1716  return (highVersion << 16) | lowVersion;
1717 }
1718 
1719 static cl_uint getPlatformVersion(cl_platform_id platform)
1720 {
1721  size_type size = 0;
1722  clGetPlatformInfo(platform, CL_PLATFORM_VERSION, 0, NULL, &size);
1723 
1724  vector<char> versionInfo(size);
1725  clGetPlatformInfo(platform, CL_PLATFORM_VERSION, size, versionInfo.data(), &size);
1726  return getVersion(versionInfo);
1727 }
1728 
1729 static cl_uint getDevicePlatformVersion(cl_device_id device)
1730 {
1731  cl_platform_id platform;
1732  clGetDeviceInfo(device, CL_DEVICE_PLATFORM, sizeof(platform), &platform, NULL);
1733  return getPlatformVersion(platform);
1734 }
1735 
1736 static cl_uint getContextPlatformVersion(cl_context context)
1737 {
1738  // The platform cannot be queried directly, so we first have to grab a
1739  // device and obtain its context
1740  size_type size = 0;
1741  clGetContextInfo(context, CL_CONTEXT_DEVICES, 0, NULL, &size);
1742  if (size == 0)
1743  return 0;
1744  vector<cl_device_id> devices(size/sizeof(cl_device_id));
1745  clGetContextInfo(context, CL_CONTEXT_DEVICES, size, devices.data(), NULL);
1746  return getDevicePlatformVersion(devices[0]);
1747 }
1748 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 120
1749 
1750 template <typename T>
1751 class Wrapper
1752 {
1753 public:
1754  typedef T cl_type;
1755 
1756 protected:
1757  cl_type object_;
1758 
1759 public:
1760  Wrapper() : object_(NULL) { }
1761 
1762  Wrapper(const cl_type &obj, bool retainObject) : object_(obj)
1763  {
1764  if (retainObject) {
1765  detail::errHandler(retain(), __RETAIN_ERR);
1766  }
1767  }
1768 
1769  ~Wrapper()
1770  {
1771  if (object_ != NULL) { release(); }
1772  }
1773 
1774  Wrapper(const Wrapper<cl_type>& rhs)
1775  {
1776  object_ = rhs.object_;
1777  detail::errHandler(retain(), __RETAIN_ERR);
1778  }
1779 
1780  Wrapper(Wrapper<cl_type>&& rhs) CL_HPP_NOEXCEPT_
1781  {
1782  object_ = rhs.object_;
1783  rhs.object_ = NULL;
1784  }
1785 
1786  Wrapper<cl_type>& operator = (const Wrapper<cl_type>& rhs)
1787  {
1788  if (this != &rhs) {
1789  detail::errHandler(release(), __RELEASE_ERR);
1790  object_ = rhs.object_;
1791  detail::errHandler(retain(), __RETAIN_ERR);
1792  }
1793  return *this;
1794  }
1795 
1796  Wrapper<cl_type>& operator = (Wrapper<cl_type>&& rhs)
1797  {
1798  if (this != &rhs) {
1799  detail::errHandler(release(), __RELEASE_ERR);
1800  object_ = rhs.object_;
1801  rhs.object_ = NULL;
1802  }
1803  return *this;
1804  }
1805 
1806  Wrapper<cl_type>& operator = (const cl_type &rhs)
1807  {
1808  detail::errHandler(release(), __RELEASE_ERR);
1809  object_ = rhs;
1810  return *this;
1811  }
1812 
1813  const cl_type& operator ()() const { return object_; }
1814 
1815  cl_type& operator ()() { return object_; }
1816 
1817  cl_type get() const { return object_; }
1818 
1819 protected:
1820  template<typename Func, typename U>
1821  friend inline cl_int getInfoHelper(Func, cl_uint, U*, int, typename U::cl_type);
1822 
1823  cl_int retain() const
1824  {
1825  if (object_ != nullptr) {
1826  return ReferenceHandler<cl_type>::retain(object_);
1827  }
1828  else {
1829  return CL_SUCCESS;
1830  }
1831  }
1832 
1833  cl_int release() const
1834  {
1835  if (object_ != nullptr) {
1836  return ReferenceHandler<cl_type>::release(object_);
1837  }
1838  else {
1839  return CL_SUCCESS;
1840  }
1841  }
1842 };
1843 
1844 template <>
1845 class Wrapper<cl_device_id>
1846 {
1847 public:
1848  typedef cl_device_id cl_type;
1849 
1850 protected:
1851  cl_type object_;
1852  bool referenceCountable_;
1853 
1854  static bool isReferenceCountable(cl_device_id device)
1855  {
1856  bool retVal = false;
1857 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
1858 #if CL_HPP_MINIMUM_OPENCL_VERSION < 120
1859  if (device != NULL) {
1860  int version = getDevicePlatformVersion(device);
1861  if(version > ((1 << 16) + 1)) {
1862  retVal = true;
1863  }
1864  }
1865 #else // CL_HPP_MINIMUM_OPENCL_VERSION < 120
1866  retVal = true;
1867 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 120
1868 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
1869  return retVal;
1870  }
1871 
1872 public:
1873  Wrapper() : object_(NULL), referenceCountable_(false)
1874  {
1875  }
1876 
1877  Wrapper(const cl_type &obj, bool retainObject) :
1878  object_(obj),
1879  referenceCountable_(false)
1880  {
1881  referenceCountable_ = isReferenceCountable(obj);
1882 
1883  if (retainObject) {
1884  detail::errHandler(retain(), __RETAIN_ERR);
1885  }
1886  }
1887 
1888  ~Wrapper()
1889  {
1890  release();
1891  }
1892 
1893  Wrapper(const Wrapper<cl_type>& rhs)
1894  {
1895  object_ = rhs.object_;
1896  referenceCountable_ = isReferenceCountable(object_);
1897  detail::errHandler(retain(), __RETAIN_ERR);
1898  }
1899 
1900  Wrapper(Wrapper<cl_type>&& rhs) CL_HPP_NOEXCEPT_
1901  {
1902  object_ = rhs.object_;
1903  referenceCountable_ = rhs.referenceCountable_;
1904  rhs.object_ = NULL;
1905  rhs.referenceCountable_ = false;
1906  }
1907 
1908  Wrapper<cl_type>& operator = (const Wrapper<cl_type>& rhs)
1909  {
1910  if (this != &rhs) {
1911  detail::errHandler(release(), __RELEASE_ERR);
1912  object_ = rhs.object_;
1913  referenceCountable_ = rhs.referenceCountable_;
1914  detail::errHandler(retain(), __RETAIN_ERR);
1915  }
1916  return *this;
1917  }
1918 
1919  Wrapper<cl_type>& operator = (Wrapper<cl_type>&& rhs)
1920  {
1921  if (this != &rhs) {
1922  detail::errHandler(release(), __RELEASE_ERR);
1923  object_ = rhs.object_;
1924  referenceCountable_ = rhs.referenceCountable_;
1925  rhs.object_ = NULL;
1926  rhs.referenceCountable_ = false;
1927  }
1928  return *this;
1929  }
1930 
1931  Wrapper<cl_type>& operator = (const cl_type &rhs)
1932  {
1933  detail::errHandler(release(), __RELEASE_ERR);
1934  object_ = rhs;
1935  referenceCountable_ = isReferenceCountable(object_);
1936  return *this;
1937  }
1938 
1939  const cl_type& operator ()() const { return object_; }
1940 
1941  cl_type& operator ()() { return object_; }
1942 
1943  cl_type get() const { return object_; }
1944 
1945 protected:
1946  template<typename Func, typename U>
1947  friend inline cl_int getInfoHelper(Func, cl_uint, U*, int, typename U::cl_type);
1948 
1949  template<typename Func, typename U>
1950  friend inline cl_int getInfoHelper(Func, cl_uint, vector<U>*, int, typename U::cl_type);
1951 
1952  cl_int retain() const
1953  {
1954  if( object_ != nullptr && referenceCountable_ ) {
1955  return ReferenceHandler<cl_type>::retain(object_);
1956  }
1957  else {
1958  return CL_SUCCESS;
1959  }
1960  }
1961 
1962  cl_int release() const
1963  {
1964  if (object_ != nullptr && referenceCountable_) {
1965  return ReferenceHandler<cl_type>::release(object_);
1966  }
1967  else {
1968  return CL_SUCCESS;
1969  }
1970  }
1971 };
1972 
1973 template <typename T>
1974 inline bool operator==(const Wrapper<T> &lhs, const Wrapper<T> &rhs)
1975 {
1976  return lhs() == rhs();
1977 }
1978 
1979 template <typename T>
1980 inline bool operator!=(const Wrapper<T> &lhs, const Wrapper<T> &rhs)
1981 {
1982  return !operator==(lhs, rhs);
1983 }
1984 
1985 } // namespace detail
1987 
1988 
1989 using BuildLogType = vector<std::pair<cl::Device, typename detail::param_traits<detail::cl_program_build_info, CL_PROGRAM_BUILD_LOG>::param_type>>;
1990 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
1991 
1994 class BuildError : public Error
1995 {
1996 private:
1997  BuildLogType buildLogs;
1998 public:
1999  BuildError(cl_int err, const char * errStr, const BuildLogType &vec) : Error(err, errStr), buildLogs(vec)
2000  {
2001  }
2002 
2003  BuildLogType getBuildLog() const
2004  {
2005  return buildLogs;
2006  }
2007 };
2008 namespace detail {
2009  static inline cl_int buildErrHandler(
2010  cl_int err,
2011  const char * errStr,
2012  const BuildLogType &buildLogs)
2013  {
2014  if (err != CL_SUCCESS) {
2015  throw BuildError(err, errStr, buildLogs);
2016  }
2017  return err;
2018  }
2019 } // namespace detail
2020 
2021 #else
2022 namespace detail {
2023  static inline cl_int buildErrHandler(
2024  cl_int err,
2025  const char * errStr,
2026  const BuildLogType &buildLogs)
2027  {
2028  (void)buildLogs; // suppress unused variable warning
2029  (void)errStr;
2030  return err;
2031  }
2032 } // namespace detail
2033 #endif // #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2034 
2035 
2041 struct ImageFormat : public cl_image_format
2042 {
2045 
2047  ImageFormat(cl_channel_order order, cl_channel_type type)
2048  {
2049  image_channel_order = order;
2050  image_channel_data_type = type;
2051  }
2052 
2055  {
2056  if (this != &rhs) {
2057  this->image_channel_data_type = rhs.image_channel_data_type;
2058  this->image_channel_order = rhs.image_channel_order;
2059  }
2060  return *this;
2061  }
2062 };
2063 
2071 class Device : public detail::Wrapper<cl_device_id>
2072 {
2073 private:
2074  static std::once_flag default_initialized_;
2075  static Device default_;
2076  static cl_int default_error_;
2077 
2083  static void makeDefault();
2084 
2090  static void makeDefaultProvided(const Device &p) {
2091  default_ = p;
2092  }
2093 
2094 public:
2095 #ifdef CL_HPP_UNIT_TEST_ENABLE
2096 
2102  static void unitTestClearDefault() {
2103  default_ = Device();
2104  }
2105 #endif // #ifdef CL_HPP_UNIT_TEST_ENABLE
2106 
2108  Device() : detail::Wrapper<cl_type>() { }
2109 
2114  explicit Device(const cl_device_id &device, bool retainObject = false) :
2115  detail::Wrapper<cl_type>(device, retainObject) { }
2116 
2122  cl_int *errResult = NULL)
2123  {
2124  std::call_once(default_initialized_, makeDefault);
2125  detail::errHandler(default_error_);
2126  if (errResult != NULL) {
2127  *errResult = default_error_;
2128  }
2129  return default_;
2130  }
2131 
2139  static Device setDefault(const Device &default_device)
2140  {
2141  std::call_once(default_initialized_, makeDefaultProvided, std::cref(default_device));
2142  detail::errHandler(default_error_);
2143  return default_;
2144  }
2145 
2150  Device& operator = (const cl_device_id& rhs)
2151  {
2153  return *this;
2154  }
2155 
2159  Device(const Device& dev) : detail::Wrapper<cl_type>(dev) {}
2160 
2165  {
2167  return *this;
2168  }
2169 
2173  Device(Device&& dev) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(dev)) {}
2174 
2179  {
2180  detail::Wrapper<cl_type>::operator=(std::move(dev));
2181  return *this;
2182  }
2183 
2185  template <typename T>
2186  cl_int getInfo(cl_device_info name, T* param) const
2187  {
2188  return detail::errHandler(
2189  detail::getInfo(&::clGetDeviceInfo, object_, name, param),
2190  __GET_DEVICE_INFO_ERR);
2191  }
2192 
2194  template <cl_device_info name> typename
2196  getInfo(cl_int* err = NULL) const
2197  {
2198  typename detail::param_traits<
2199  detail::cl_device_info, name>::param_type param;
2200  cl_int result = getInfo(name, &param);
2201  if (err != NULL) {
2202  *err = result;
2203  }
2204  return param;
2205  }
2206 
2207 
2208 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
2209 
2215  cl_ulong getHostTimer(cl_int *error = nullptr)
2216  {
2217  cl_ulong retVal = 0;
2218  cl_int err =
2219  clGetHostTimer(this->get(), &retVal);
2220  detail::errHandler(
2221  err,
2222  __GET_HOST_TIMER_ERR);
2223  if (error) {
2224  *error = err;
2225  }
2226  return retVal;
2227  }
2228 
2239  std::pair<cl_ulong, cl_ulong> getDeviceAndHostTimer(cl_int *error = nullptr)
2240  {
2241  std::pair<cl_ulong, cl_ulong> retVal;
2242  cl_int err =
2243  clGetDeviceAndHostTimer(this->get(), &(retVal.first), &(retVal.second));
2244  detail::errHandler(
2245  err,
2246  __GET_DEVICE_AND_HOST_TIMER_ERR);
2247  if (error) {
2248  *error = err;
2249  }
2250  return retVal;
2251  }
2252 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
2253 
2257 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
2258  cl_int createSubDevices(
2260  const cl_device_partition_property * properties,
2261  vector<Device>* devices)
2262  {
2263  cl_uint n = 0;
2264  cl_int err = clCreateSubDevices(object_, properties, 0, NULL, &n);
2265  if (err != CL_SUCCESS) {
2266  return detail::errHandler(err, __CREATE_SUB_DEVICES_ERR);
2267  }
2268 
2269  vector<cl_device_id> ids(n);
2270  err = clCreateSubDevices(object_, properties, n, ids.data(), NULL);
2271  if (err != CL_SUCCESS) {
2272  return detail::errHandler(err, __CREATE_SUB_DEVICES_ERR);
2273  }
2274 
2275  // Cannot trivially assign because we need to capture intermediates
2276  // with safe construction
2277  if (devices) {
2278  devices->resize(ids.size());
2279 
2280  // Assign to param, constructing with retain behaviour
2281  // to correctly capture each underlying CL object
2282  for (size_type i = 0; i < ids.size(); i++) {
2283  // We do not need to retain because this device is being created
2284  // by the runtime
2285  (*devices)[i] = Device(ids[i], false);
2286  }
2287  }
2288 
2289  return CL_SUCCESS;
2290  }
2291 #elif defined(CL_HPP_USE_CL_DEVICE_FISSION)
2292 
2296  cl_int createSubDevices(
2297  const cl_device_partition_property_ext * properties,
2298  vector<Device>* devices)
2299  {
2300  typedef CL_API_ENTRY cl_int
2301  ( CL_API_CALL * PFN_clCreateSubDevicesEXT)(
2302  cl_device_id /*in_device*/,
2303  const cl_device_partition_property_ext * /* properties */,
2304  cl_uint /*num_entries*/,
2305  cl_device_id * /*out_devices*/,
2306  cl_uint * /*num_devices*/ ) CL_EXT_SUFFIX__VERSION_1_1;
2307 
2308  static PFN_clCreateSubDevicesEXT pfn_clCreateSubDevicesEXT = NULL;
2309  CL_HPP_INIT_CL_EXT_FCN_PTR_(clCreateSubDevicesEXT);
2310 
2311  cl_uint n = 0;
2312  cl_int err = pfn_clCreateSubDevicesEXT(object_, properties, 0, NULL, &n);
2313  if (err != CL_SUCCESS) {
2314  return detail::errHandler(err, __CREATE_SUB_DEVICES_ERR);
2315  }
2316 
2317  vector<cl_device_id> ids(n);
2318  err = pfn_clCreateSubDevicesEXT(object_, properties, n, ids.data(), NULL);
2319  if (err != CL_SUCCESS) {
2320  return detail::errHandler(err, __CREATE_SUB_DEVICES_ERR);
2321  }
2322  // Cannot trivially assign because we need to capture intermediates
2323  // with safe construction
2324  if (devices) {
2325  devices->resize(ids.size());
2326 
2327  // Assign to param, constructing with retain behaviour
2328  // to correctly capture each underlying CL object
2329  for (size_type i = 0; i < ids.size(); i++) {
2330  // We do not need to retain because this device is being created
2331  // by the runtime
2332  (*devices)[i] = Device(ids[i], false);
2333  }
2334  }
2335  return CL_SUCCESS;
2336  }
2337 #endif // defined(CL_HPP_USE_CL_DEVICE_FISSION)
2338 };
2339 
2340 CL_HPP_DEFINE_STATIC_MEMBER_ std::once_flag Device::default_initialized_;
2341 CL_HPP_DEFINE_STATIC_MEMBER_ Device Device::default_;
2342 CL_HPP_DEFINE_STATIC_MEMBER_ cl_int Device::default_error_ = CL_SUCCESS;
2343 
2351 class Platform : public detail::Wrapper<cl_platform_id>
2352 {
2353 private:
2354  static std::once_flag default_initialized_;
2355  static Platform default_;
2356  static cl_int default_error_;
2357 
2363  static void makeDefault() {
2364  /* Throwing an exception from a call_once invocation does not do
2365  * what we wish, so we catch it and save the error.
2366  */
2367 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2368  try
2369 #endif
2370  {
2371  // If default wasn't passed ,generate one
2372  // Otherwise set it
2373  cl_uint n = 0;
2374 
2375  cl_int err = ::clGetPlatformIDs(0, NULL, &n);
2376  if (err != CL_SUCCESS) {
2377  default_error_ = err;
2378  return;
2379  }
2380  if (n == 0) {
2381  default_error_ = CL_INVALID_PLATFORM;
2382  return;
2383  }
2384 
2385  vector<cl_platform_id> ids(n);
2386  err = ::clGetPlatformIDs(n, ids.data(), NULL);
2387  if (err != CL_SUCCESS) {
2388  default_error_ = err;
2389  return;
2390  }
2391 
2392  default_ = Platform(ids[0]);
2393  }
2394 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2395  catch (cl::Error &e) {
2396  default_error_ = e.err();
2397  }
2398 #endif
2399  }
2400 
2406  static void makeDefaultProvided(const Platform &p) {
2407  default_ = p;
2408  }
2409 
2410 public:
2411 #ifdef CL_HPP_UNIT_TEST_ENABLE
2412 
2418  static void unitTestClearDefault() {
2419  default_ = Platform();
2420  }
2421 #endif // #ifdef CL_HPP_UNIT_TEST_ENABLE
2422 
2424  Platform() : detail::Wrapper<cl_type>() { }
2425 
2433  explicit Platform(const cl_platform_id &platform, bool retainObject = false) :
2434  detail::Wrapper<cl_type>(platform, retainObject) { }
2435 
2440  Platform& operator = (const cl_platform_id& rhs)
2441  {
2443  return *this;
2444  }
2445 
2446  static Platform getDefault(
2447  cl_int *errResult = NULL)
2448  {
2449  std::call_once(default_initialized_, makeDefault);
2450  detail::errHandler(default_error_);
2451  if (errResult != NULL) {
2452  *errResult = default_error_;
2453  }
2454  return default_;
2455  }
2456 
2464  static Platform setDefault(const Platform &default_platform)
2465  {
2466  std::call_once(default_initialized_, makeDefaultProvided, std::cref(default_platform));
2467  detail::errHandler(default_error_);
2468  return default_;
2469  }
2470 
2472  template <typename T>
2473  cl_int getInfo(cl_platform_info name, T* param) const
2474  {
2475  return detail::errHandler(
2476  detail::getInfo(&::clGetPlatformInfo, object_, name, param),
2477  __GET_PLATFORM_INFO_ERR);
2478  }
2479 
2481  template <cl_platform_info name> typename
2483  getInfo(cl_int* err = NULL) const
2484  {
2485  typename detail::param_traits<
2486  detail::cl_platform_info, name>::param_type param;
2487  cl_int result = getInfo(name, &param);
2488  if (err != NULL) {
2489  *err = result;
2490  }
2491  return param;
2492  }
2493 
2498  cl_int getDevices(
2499  cl_device_type type,
2500  vector<Device>* devices) const
2501  {
2502  cl_uint n = 0;
2503  if( devices == NULL ) {
2504  return detail::errHandler(CL_INVALID_ARG_VALUE, __GET_DEVICE_IDS_ERR);
2505  }
2506  cl_int err = ::clGetDeviceIDs(object_, type, 0, NULL, &n);
2507  if (err != CL_SUCCESS && err != CL_DEVICE_NOT_FOUND) {
2508  return detail::errHandler(err, __GET_DEVICE_IDS_ERR);
2509  }
2510 
2511  vector<cl_device_id> ids(n);
2512  if (n>0) {
2513  err = ::clGetDeviceIDs(object_, type, n, ids.data(), NULL);
2514  if (err != CL_SUCCESS) {
2515  return detail::errHandler(err, __GET_DEVICE_IDS_ERR);
2516  }
2517  }
2518 
2519  // Cannot trivially assign because we need to capture intermediates
2520  // with safe construction
2521  // We must retain things we obtain from the API to avoid releasing
2522  // API-owned objects.
2523  if (devices) {
2524  devices->resize(ids.size());
2525 
2526  // Assign to param, constructing with retain behaviour
2527  // to correctly capture each underlying CL object
2528  for (size_type i = 0; i < ids.size(); i++) {
2529  (*devices)[i] = Device(ids[i], true);
2530  }
2531  }
2532  return CL_SUCCESS;
2533  }
2534 
2535 #if defined(CL_HPP_USE_DX_INTEROP)
2536 
2559  cl_int getDevices(
2560  cl_d3d10_device_source_khr d3d_device_source,
2561  void * d3d_object,
2562  cl_d3d10_device_set_khr d3d_device_set,
2563  vector<Device>* devices) const
2564  {
2565  typedef CL_API_ENTRY cl_int (CL_API_CALL *PFN_clGetDeviceIDsFromD3D10KHR)(
2566  cl_platform_id platform,
2567  cl_d3d10_device_source_khr d3d_device_source,
2568  void * d3d_object,
2569  cl_d3d10_device_set_khr d3d_device_set,
2570  cl_uint num_entries,
2571  cl_device_id * devices,
2572  cl_uint* num_devices);
2573 
2574  if( devices == NULL ) {
2575  return detail::errHandler(CL_INVALID_ARG_VALUE, __GET_DEVICE_IDS_ERR);
2576  }
2577 
2578  static PFN_clGetDeviceIDsFromD3D10KHR pfn_clGetDeviceIDsFromD3D10KHR = NULL;
2579  CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(object_, clGetDeviceIDsFromD3D10KHR);
2580 
2581  cl_uint n = 0;
2582  cl_int err = pfn_clGetDeviceIDsFromD3D10KHR(
2583  object_,
2584  d3d_device_source,
2585  d3d_object,
2586  d3d_device_set,
2587  0,
2588  NULL,
2589  &n);
2590  if (err != CL_SUCCESS) {
2591  return detail::errHandler(err, __GET_DEVICE_IDS_ERR);
2592  }
2593 
2594  vector<cl_device_id> ids(n);
2595  err = pfn_clGetDeviceIDsFromD3D10KHR(
2596  object_,
2597  d3d_device_source,
2598  d3d_object,
2599  d3d_device_set,
2600  n,
2601  ids.data(),
2602  NULL);
2603  if (err != CL_SUCCESS) {
2604  return detail::errHandler(err, __GET_DEVICE_IDS_ERR);
2605  }
2606 
2607  // Cannot trivially assign because we need to capture intermediates
2608  // with safe construction
2609  // We must retain things we obtain from the API to avoid releasing
2610  // API-owned objects.
2611  if (devices) {
2612  devices->resize(ids.size());
2613 
2614  // Assign to param, constructing with retain behaviour
2615  // to correctly capture each underlying CL object
2616  for (size_type i = 0; i < ids.size(); i++) {
2617  (*devices)[i] = Device(ids[i], true);
2618  }
2619  }
2620  return CL_SUCCESS;
2621  }
2622 #endif
2623 
2628  static cl_int get(
2629  vector<Platform>* platforms)
2630  {
2631  cl_uint n = 0;
2632 
2633  if( platforms == NULL ) {
2634  return detail::errHandler(CL_INVALID_ARG_VALUE, __GET_PLATFORM_IDS_ERR);
2635  }
2636 
2637  cl_int err = ::clGetPlatformIDs(0, NULL, &n);
2638  if (err != CL_SUCCESS) {
2639  return detail::errHandler(err, __GET_PLATFORM_IDS_ERR);
2640  }
2641 
2642  vector<cl_platform_id> ids(n);
2643  err = ::clGetPlatformIDs(n, ids.data(), NULL);
2644  if (err != CL_SUCCESS) {
2645  return detail::errHandler(err, __GET_PLATFORM_IDS_ERR);
2646  }
2647 
2648  if (platforms) {
2649  platforms->resize(ids.size());
2650 
2651  // Platforms don't reference count
2652  for (size_type i = 0; i < ids.size(); i++) {
2653  (*platforms)[i] = Platform(ids[i]);
2654  }
2655  }
2656  return CL_SUCCESS;
2657  }
2658 
2663  static cl_int get(
2664  Platform * platform)
2665  {
2666  cl_int err;
2667  Platform default_platform = Platform::getDefault(&err);
2668  if (platform) {
2669  *platform = default_platform;
2670  }
2671  return err;
2672  }
2673 
2682  static Platform get(
2683  cl_int * errResult = NULL)
2684  {
2685  cl_int err;
2686  Platform default_platform = Platform::getDefault(&err);
2687  if (errResult) {
2688  *errResult = err;
2689  }
2690  return default_platform;
2691  }
2692 
2693 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
2694  cl_int
2697  {
2698  return ::clUnloadPlatformCompiler(object_);
2699  }
2700 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
2701 }; // class Platform
2702 
2703 CL_HPP_DEFINE_STATIC_MEMBER_ std::once_flag Platform::default_initialized_;
2704 CL_HPP_DEFINE_STATIC_MEMBER_ Platform Platform::default_;
2705 CL_HPP_DEFINE_STATIC_MEMBER_ cl_int Platform::default_error_ = CL_SUCCESS;
2706 
2707 
2711 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
2712 
2716 inline CL_EXT_PREFIX__VERSION_1_1_DEPRECATED cl_int
2717 UnloadCompiler() CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED;
2718 inline cl_int
2720 {
2721  return ::clUnloadCompiler();
2722 }
2723 #endif // #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
2724 
2733 class Context
2734  : public detail::Wrapper<cl_context>
2735 {
2736 private:
2737  static std::once_flag default_initialized_;
2738  static Context default_;
2739  static cl_int default_error_;
2740 
2746  static void makeDefault() {
2747  /* Throwing an exception from a call_once invocation does not do
2748  * what we wish, so we catch it and save the error.
2749  */
2750 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2751  try
2752 #endif
2753  {
2754 #if !defined(__APPLE__) && !defined(__MACOS)
2755  const Platform &p = Platform::getDefault();
2756  cl_platform_id defaultPlatform = p();
2757  cl_context_properties properties[3] = {
2758  CL_CONTEXT_PLATFORM, (cl_context_properties)defaultPlatform, 0
2759  };
2760 #else // #if !defined(__APPLE__) && !defined(__MACOS)
2761  cl_context_properties *properties = nullptr;
2762 #endif // #if !defined(__APPLE__) && !defined(__MACOS)
2763 
2764  default_ = Context(
2765  CL_DEVICE_TYPE_DEFAULT,
2766  properties,
2767  NULL,
2768  NULL,
2769  &default_error_);
2770  }
2771 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2772  catch (cl::Error &e) {
2773  default_error_ = e.err();
2774  }
2775 #endif
2776  }
2777 
2778 
2784  static void makeDefaultProvided(const Context &c) {
2785  default_ = c;
2786  }
2787 
2788 public:
2789 #ifdef CL_HPP_UNIT_TEST_ENABLE
2790 
2796  static void unitTestClearDefault() {
2797  default_ = Context();
2798  }
2799 #endif // #ifdef CL_HPP_UNIT_TEST_ENABLE
2800 
2806  const vector<Device>& devices,
2807  cl_context_properties* properties = NULL,
2808  void (CL_CALLBACK * notifyFptr)(
2809  const char *,
2810  const void *,
2811  size_type,
2812  void *) = NULL,
2813  void* data = NULL,
2814  cl_int* err = NULL)
2815  {
2816  cl_int error;
2817 
2818  size_type numDevices = devices.size();
2819  vector<cl_device_id> deviceIDs(numDevices);
2820 
2821  for( size_type deviceIndex = 0; deviceIndex < numDevices; ++deviceIndex ) {
2822  deviceIDs[deviceIndex] = (devices[deviceIndex])();
2823  }
2824 
2825  object_ = ::clCreateContext(
2826  properties, (cl_uint) numDevices,
2827  deviceIDs.data(),
2828  notifyFptr, data, &error);
2829 
2830  detail::errHandler(error, __CREATE_CONTEXT_ERR);
2831  if (err != NULL) {
2832  *err = error;
2833  }
2834  }
2835 
2836  Context(
2837  const Device& device,
2838  cl_context_properties* properties = NULL,
2839  void (CL_CALLBACK * notifyFptr)(
2840  const char *,
2841  const void *,
2842  size_type,
2843  void *) = NULL,
2844  void* data = NULL,
2845  cl_int* err = NULL)
2846  {
2847  cl_int error;
2848 
2849  cl_device_id deviceID = device();
2850 
2851  object_ = ::clCreateContext(
2852  properties, 1,
2853  &deviceID,
2854  notifyFptr, data, &error);
2855 
2856  detail::errHandler(error, __CREATE_CONTEXT_ERR);
2857  if (err != NULL) {
2858  *err = error;
2859  }
2860  }
2861 
2867  cl_device_type type,
2868  cl_context_properties* properties = NULL,
2869  void (CL_CALLBACK * notifyFptr)(
2870  const char *,
2871  const void *,
2872  size_type,
2873  void *) = NULL,
2874  void* data = NULL,
2875  cl_int* err = NULL)
2876  {
2877  cl_int error;
2878 
2879 #if !defined(__APPLE__) && !defined(__MACOS)
2880  cl_context_properties prop[4] = {CL_CONTEXT_PLATFORM, 0, 0, 0 };
2881 
2882  if (properties == NULL) {
2883  // Get a valid platform ID as we cannot send in a blank one
2884  vector<Platform> platforms;
2885  error = Platform::get(&platforms);
2886  if (error != CL_SUCCESS) {
2887  detail::errHandler(error, __CREATE_CONTEXT_FROM_TYPE_ERR);
2888  if (err != NULL) {
2889  *err = error;
2890  }
2891  return;
2892  }
2893 
2894  // Check the platforms we found for a device of our specified type
2895  cl_context_properties platform_id = 0;
2896  for (unsigned int i = 0; i < platforms.size(); i++) {
2897 
2898  vector<Device> devices;
2899 
2900 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2901  try {
2902 #endif
2903 
2904  error = platforms[i].getDevices(type, &devices);
2905 
2906 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2907  } catch (cl::Error& e) {
2908  error = e.err();
2909  }
2910  // Catch if exceptions are enabled as we don't want to exit if first platform has no devices of type
2911  // We do error checking next anyway, and can throw there if needed
2912 #endif
2913 
2914  // Only squash CL_SUCCESS and CL_DEVICE_NOT_FOUND
2915  if (error != CL_SUCCESS && error != CL_DEVICE_NOT_FOUND) {
2916  detail::errHandler(error, __CREATE_CONTEXT_FROM_TYPE_ERR);
2917  if (err != NULL) {
2918  *err = error;
2919  }
2920  }
2921 
2922  if (devices.size() > 0) {
2923  platform_id = (cl_context_properties)platforms[i]();
2924  break;
2925  }
2926  }
2927 
2928  if (platform_id == 0) {
2929  detail::errHandler(CL_DEVICE_NOT_FOUND, __CREATE_CONTEXT_FROM_TYPE_ERR);
2930  if (err != NULL) {
2931  *err = CL_DEVICE_NOT_FOUND;
2932  }
2933  return;
2934  }
2935 
2936  prop[1] = platform_id;
2937  properties = &prop[0];
2938  }
2939 #endif
2940  object_ = ::clCreateContextFromType(
2941  properties, type, notifyFptr, data, &error);
2942 
2943  detail::errHandler(error, __CREATE_CONTEXT_FROM_TYPE_ERR);
2944  if (err != NULL) {
2945  *err = error;
2946  }
2947  }
2948 
2952  Context(const Context& ctx) : detail::Wrapper<cl_type>(ctx) {}
2953 
2958  {
2960  return *this;
2961  }
2962 
2966  Context(Context&& ctx) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(ctx)) {}
2967 
2972  {
2973  detail::Wrapper<cl_type>::operator=(std::move(ctx));
2974  return *this;
2975  }
2976 
2977 
2982  static Context getDefault(cl_int * err = NULL)
2983  {
2984  std::call_once(default_initialized_, makeDefault);
2985  detail::errHandler(default_error_);
2986  if (err != NULL) {
2987  *err = default_error_;
2988  }
2989  return default_;
2990  }
2991 
2999  static Context setDefault(const Context &default_context)
3000  {
3001  std::call_once(default_initialized_, makeDefaultProvided, std::cref(default_context));
3002  detail::errHandler(default_error_);
3003  return default_;
3004  }
3005 
3007  Context() : detail::Wrapper<cl_type>() { }
3008 
3014  explicit Context(const cl_context& context, bool retainObject = false) :
3015  detail::Wrapper<cl_type>(context, retainObject) { }
3016 
3022  Context& operator = (const cl_context& rhs)
3023  {
3025  return *this;
3026  }
3027 
3029  template <typename T>
3030  cl_int getInfo(cl_context_info name, T* param) const
3031  {
3032  return detail::errHandler(
3033  detail::getInfo(&::clGetContextInfo, object_, name, param),
3034  __GET_CONTEXT_INFO_ERR);
3035  }
3036 
3038  template <cl_context_info name> typename
3040  getInfo(cl_int* err = NULL) const
3041  {
3042  typename detail::param_traits<
3043  detail::cl_context_info, name>::param_type param;
3044  cl_int result = getInfo(name, &param);
3045  if (err != NULL) {
3046  *err = result;
3047  }
3048  return param;
3049  }
3050 
3056  cl_mem_flags flags,
3057  cl_mem_object_type type,
3058  vector<ImageFormat>* formats) const
3059  {
3060  cl_uint numEntries;
3061 
3062  if (!formats) {
3063  return CL_SUCCESS;
3064  }
3065 
3066  cl_int err = ::clGetSupportedImageFormats(
3067  object_,
3068  flags,
3069  type,
3070  0,
3071  NULL,
3072  &numEntries);
3073  if (err != CL_SUCCESS) {
3074  return detail::errHandler(err, __GET_SUPPORTED_IMAGE_FORMATS_ERR);
3075  }
3076 
3077  if (numEntries > 0) {
3078  vector<ImageFormat> value(numEntries);
3079  err = ::clGetSupportedImageFormats(
3080  object_,
3081  flags,
3082  type,
3083  numEntries,
3084  (cl_image_format*)value.data(),
3085  NULL);
3086  if (err != CL_SUCCESS) {
3087  return detail::errHandler(err, __GET_SUPPORTED_IMAGE_FORMATS_ERR);
3088  }
3089 
3090  formats->assign(begin(value), end(value));
3091  }
3092  else {
3093  // If no values are being returned, ensure an empty vector comes back
3094  formats->clear();
3095  }
3096 
3097  return CL_SUCCESS;
3098  }
3099 };
3100 
3101 inline void Device::makeDefault()
3102 {
3103  /* Throwing an exception from a call_once invocation does not do
3104  * what we wish, so we catch it and save the error.
3105  */
3106 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
3107  try
3108 #endif
3109  {
3110  cl_int error = 0;
3111 
3112  Context context = Context::getDefault(&error);
3113  detail::errHandler(error, __CREATE_CONTEXT_ERR);
3114 
3115  if (error != CL_SUCCESS) {
3116  default_error_ = error;
3117  }
3118  else {
3119  default_ = context.getInfo<CL_CONTEXT_DEVICES>()[0];
3120  default_error_ = CL_SUCCESS;
3121  }
3122  }
3123 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
3124  catch (cl::Error &e) {
3125  default_error_ = e.err();
3126  }
3127 #endif
3128 }
3129 
3130 CL_HPP_DEFINE_STATIC_MEMBER_ std::once_flag Context::default_initialized_;
3131 CL_HPP_DEFINE_STATIC_MEMBER_ Context Context::default_;
3132 CL_HPP_DEFINE_STATIC_MEMBER_ cl_int Context::default_error_ = CL_SUCCESS;
3133 
3142 class Event : public detail::Wrapper<cl_event>
3143 {
3144 public:
3146  Event() : detail::Wrapper<cl_type>() { }
3147 
3156  explicit Event(const cl_event& event, bool retainObject = false) :
3157  detail::Wrapper<cl_type>(event, retainObject) { }
3158 
3164  Event& operator = (const cl_event& rhs)
3165  {
3167  return *this;
3168  }
3169 
3171  template <typename T>
3172  cl_int getInfo(cl_event_info name, T* param) const
3173  {
3174  return detail::errHandler(
3175  detail::getInfo(&::clGetEventInfo, object_, name, param),
3176  __GET_EVENT_INFO_ERR);
3177  }
3178 
3180  template <cl_event_info name> typename
3182  getInfo(cl_int* err = NULL) const
3183  {
3184  typename detail::param_traits<
3185  detail::cl_event_info, name>::param_type param;
3186  cl_int result = getInfo(name, &param);
3187  if (err != NULL) {
3188  *err = result;
3189  }
3190  return param;
3191  }
3192 
3194  template <typename T>
3195  cl_int getProfilingInfo(cl_profiling_info name, T* param) const
3196  {
3197  return detail::errHandler(detail::getInfo(
3198  &::clGetEventProfilingInfo, object_, name, param),
3199  __GET_EVENT_PROFILE_INFO_ERR);
3200  }
3201 
3203  template <cl_profiling_info name> typename
3205  getProfilingInfo(cl_int* err = NULL) const
3206  {
3207  typename detail::param_traits<
3208  detail::cl_profiling_info, name>::param_type param;
3209  cl_int result = getProfilingInfo(name, &param);
3210  if (err != NULL) {
3211  *err = result;
3212  }
3213  return param;
3214  }
3215 
3220  cl_int wait() const
3221  {
3222  return detail::errHandler(
3223  ::clWaitForEvents(1, &object_),
3224  __WAIT_FOR_EVENTS_ERR);
3225  }
3226 
3227 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
3228 
3232  cl_int setCallback(
3233  cl_int type,
3234  void (CL_CALLBACK * pfn_notify)(cl_event, cl_int, void *),
3235  void * user_data = NULL)
3236  {
3237  return detail::errHandler(
3238  ::clSetEventCallback(
3239  object_,
3240  type,
3241  pfn_notify,
3242  user_data),
3243  __SET_EVENT_CALLBACK_ERR);
3244  }
3245 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
3246 
3251  static cl_int
3252  waitForEvents(const vector<Event>& events)
3253  {
3254  return detail::errHandler(
3255  ::clWaitForEvents(
3256  (cl_uint) events.size(), (events.size() > 0) ? (cl_event*)&events.front() : NULL),
3257  __WAIT_FOR_EVENTS_ERR);
3258  }
3259 };
3260 
3261 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
3262 
3266 class UserEvent : public Event
3267 {
3268 public:
3274  const Context& context,
3275  cl_int * err = NULL)
3276  {
3277  cl_int error;
3278  object_ = ::clCreateUserEvent(
3279  context(),
3280  &error);
3281 
3282  detail::errHandler(error, __CREATE_USER_EVENT_ERR);
3283  if (err != NULL) {
3284  *err = error;
3285  }
3286  }
3287 
3289  UserEvent() : Event() { }
3290 
3295  cl_int setStatus(cl_int status)
3296  {
3297  return detail::errHandler(
3298  ::clSetUserEventStatus(object_,status),
3299  __SET_USER_EVENT_STATUS_ERR);
3300  }
3301 };
3302 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
3303 
3308 inline static cl_int
3309 WaitForEvents(const vector<Event>& events)
3310 {
3311  return detail::errHandler(
3312  ::clWaitForEvents(
3313  (cl_uint) events.size(), (events.size() > 0) ? (cl_event*)&events.front() : NULL),
3314  __WAIT_FOR_EVENTS_ERR);
3315 }
3316 
3325 class Memory : public detail::Wrapper<cl_mem>
3326 {
3327 public:
3329  Memory() : detail::Wrapper<cl_type>() { }
3330 
3342  explicit Memory(const cl_mem& memory, bool retainObject) :
3343  detail::Wrapper<cl_type>(memory, retainObject) { }
3344 
3350  Memory& operator = (const cl_mem& rhs)
3351  {
3353  return *this;
3354  }
3355 
3359  Memory(const Memory& mem) : detail::Wrapper<cl_type>(mem) {}
3360 
3365  {
3367  return *this;
3368  }
3369 
3373  Memory(Memory&& mem) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(mem)) {}
3374 
3379  {
3380  detail::Wrapper<cl_type>::operator=(std::move(mem));
3381  return *this;
3382  }
3383 
3384 
3386  template <typename T>
3387  cl_int getInfo(cl_mem_info name, T* param) const
3388  {
3389  return detail::errHandler(
3390  detail::getInfo(&::clGetMemObjectInfo, object_, name, param),
3391  __GET_MEM_OBJECT_INFO_ERR);
3392  }
3393 
3395  template <cl_mem_info name> typename
3397  getInfo(cl_int* err = NULL) const
3398  {
3399  typename detail::param_traits<
3400  detail::cl_mem_info, name>::param_type param;
3401  cl_int result = getInfo(name, &param);
3402  if (err != NULL) {
3403  *err = result;
3404  }
3405  return param;
3406  }
3407 
3408 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
3409 
3423  void (CL_CALLBACK * pfn_notify)(cl_mem, void *),
3424  void * user_data = NULL)
3425  {
3426  return detail::errHandler(
3427  ::clSetMemObjectDestructorCallback(
3428  object_,
3429  pfn_notify,
3430  user_data),
3431  __SET_MEM_OBJECT_DESTRUCTOR_CALLBACK_ERR);
3432  }
3433 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
3434 
3435 };
3436 
3437 // Pre-declare copy functions
3438 class Buffer;
3439 template< typename IteratorType >
3440 cl_int copy( IteratorType startIterator, IteratorType endIterator, cl::Buffer &buffer );
3441 template< typename IteratorType >
3442 cl_int copy( const cl::Buffer &buffer, IteratorType startIterator, IteratorType endIterator );
3443 template< typename IteratorType >
3444 cl_int copy( const CommandQueue &queue, IteratorType startIterator, IteratorType endIterator, cl::Buffer &buffer );
3445 template< typename IteratorType >
3446 cl_int copy( const CommandQueue &queue, const cl::Buffer &buffer, IteratorType startIterator, IteratorType endIterator );
3447 
3448 
3449 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
3450 namespace detail
3451 {
3453  {
3454  public:
3455  static cl_svm_mem_flags getSVMMemFlags()
3456  {
3457  return 0;
3458  }
3459  };
3460 } // namespace detail
3461 
3462 template<class Trait = detail::SVMTraitNull>
3464 {
3465 public:
3466  static cl_svm_mem_flags getSVMMemFlags()
3467  {
3468  return CL_MEM_READ_WRITE |
3469  Trait::getSVMMemFlags();
3470  }
3471 };
3472 
3473 template<class Trait = detail::SVMTraitNull>
3475 {
3476 public:
3477  static cl_svm_mem_flags getSVMMemFlags()
3478  {
3479  return CL_MEM_READ_ONLY |
3480  Trait::getSVMMemFlags();
3481  }
3482 };
3483 
3484 template<class Trait = detail::SVMTraitNull>
3486 {
3487 public:
3488  static cl_svm_mem_flags getSVMMemFlags()
3489  {
3490  return CL_MEM_WRITE_ONLY |
3491  Trait::getSVMMemFlags();
3492  }
3493 };
3494 
3495 template<class Trait = SVMTraitReadWrite<>>
3497 {
3498 public:
3499  static cl_svm_mem_flags getSVMMemFlags()
3500  {
3501  return Trait::getSVMMemFlags();
3502  }
3503 };
3504 
3505 template<class Trait = SVMTraitReadWrite<>>
3507 {
3508 public:
3509  static cl_svm_mem_flags getSVMMemFlags()
3510  {
3511  return CL_MEM_SVM_FINE_GRAIN_BUFFER |
3512  Trait::getSVMMemFlags();
3513  }
3514 };
3515 
3516 template<class Trait = SVMTraitReadWrite<>>
3518 {
3519 public:
3520  static cl_svm_mem_flags getSVMMemFlags()
3521  {
3522  return
3523  CL_MEM_SVM_FINE_GRAIN_BUFFER |
3524  CL_MEM_SVM_ATOMICS |
3525  Trait::getSVMMemFlags();
3526  }
3527 };
3528 
3529 // Pre-declare SVM map function
3530 template<typename T>
3531 inline cl_int enqueueMapSVM(
3532  T* ptr,
3533  cl_bool blocking,
3534  cl_map_flags flags,
3535  size_type size,
3536  const vector<Event>* events = NULL,
3537  Event* event = NULL);
3538 
3550 template<typename T, class SVMTrait>
3552 private:
3553  Context context_;
3554 
3555 public:
3556  typedef T value_type;
3557  typedef value_type* pointer;
3558  typedef const value_type* const_pointer;
3559  typedef value_type& reference;
3560  typedef const value_type& const_reference;
3561  typedef std::size_t size_type;
3562  typedef std::ptrdiff_t difference_type;
3563 
3564  template<typename U>
3565  struct rebind
3566  {
3568  };
3569 
3570  template<typename U, typename V>
3571  friend class SVMAllocator;
3572 
3573  SVMAllocator() :
3574  context_(Context::getDefault())
3575  {
3576  }
3577 
3578  explicit SVMAllocator(cl::Context context) :
3579  context_(context)
3580  {
3581  }
3582 
3583 
3584  SVMAllocator(const SVMAllocator &other) :
3585  context_(other.context_)
3586  {
3587  }
3588 
3589  template<typename U>
3590  SVMAllocator(const SVMAllocator<U, SVMTrait> &other) :
3591  context_(other.context_)
3592  {
3593  }
3594 
3595  ~SVMAllocator()
3596  {
3597  }
3598 
3599  pointer address(reference r) CL_HPP_NOEXCEPT_
3600  {
3601  return std::addressof(r);
3602  }
3603 
3604  const_pointer address(const_reference r) CL_HPP_NOEXCEPT_
3605  {
3606  return std::addressof(r);
3607  }
3608 
3615  pointer allocate(
3616  size_type size,
3618  {
3619  // Allocate memory with default alignment matching the size of the type
3620  void* voidPointer =
3621  clSVMAlloc(
3622  context_(),
3623  SVMTrait::getSVMMemFlags(),
3624  size*sizeof(T),
3625  0);
3626  pointer retValue = reinterpret_cast<pointer>(
3627  voidPointer);
3628 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
3629  if (!retValue) {
3630  std::bad_alloc excep;
3631  throw excep;
3632  }
3633 #endif // #if defined(CL_HPP_ENABLE_EXCEPTIONS)
3634 
3635  // If allocation was coarse-grained then map it
3636  if (!(SVMTrait::getSVMMemFlags() & CL_MEM_SVM_FINE_GRAIN_BUFFER)) {
3637  cl_int err = enqueueMapSVM(retValue, CL_TRUE, CL_MAP_READ | CL_MAP_WRITE, size*sizeof(T));
3638  if (err != CL_SUCCESS) {
3639  std::bad_alloc excep;
3640  throw excep;
3641  }
3642  }
3643 
3644  // If exceptions disabled, return null pointer from allocator
3645  return retValue;
3646  }
3647 
3648  void deallocate(pointer p, size_type)
3649  {
3650  clSVMFree(context_(), p);
3651  }
3652 
3657  size_type max_size() const CL_HPP_NOEXCEPT_
3658  {
3659  size_type maxSize = std::numeric_limits<size_type>::max() / sizeof(T);
3660 
3661  for (const Device &d : context_.getInfo<CL_CONTEXT_DEVICES>()) {
3662  maxSize = std::min(
3663  maxSize,
3664  static_cast<size_type>(d.getInfo<CL_DEVICE_MAX_MEM_ALLOC_SIZE>()));
3665  }
3666 
3667  return maxSize;
3668  }
3669 
3670  template< class U, class... Args >
3671  void construct(U* p, Args&&... args)
3672  {
3673  new(p)T(args...);
3674  }
3675 
3676  template< class U >
3677  void destroy(U* p)
3678  {
3679  p->~U();
3680  }
3681 
3685  inline bool operator==(SVMAllocator const& rhs)
3686  {
3687  return (context_==rhs.context_);
3688  }
3689 
3690  inline bool operator!=(SVMAllocator const& a)
3691  {
3692  return !operator==(a);
3693  }
3694 }; // class SVMAllocator return cl::pointer<T>(tmp, detail::Deleter<T, Alloc>{alloc, copies});
3695 
3696 
3697 template<class SVMTrait>
3698 class SVMAllocator<void, SVMTrait> {
3699 public:
3700  typedef void value_type;
3701  typedef value_type* pointer;
3702  typedef const value_type* const_pointer;
3703 
3704  template<typename U>
3705  struct rebind
3706  {
3708  };
3709 
3710  template<typename U, typename V>
3711  friend class SVMAllocator;
3712 };
3713 
3714 #if !defined(CL_HPP_NO_STD_UNIQUE_PTR)
3715 namespace detail
3716 {
3717  template<class Alloc>
3718  class Deleter {
3719  private:
3720  Alloc alloc_;
3721  size_type copies_;
3722 
3723  public:
3724  typedef typename std::allocator_traits<Alloc>::pointer pointer;
3725 
3726  Deleter(const Alloc &alloc, size_type copies) : alloc_{ alloc }, copies_{ copies }
3727  {
3728  }
3729 
3730  void operator()(pointer ptr) const {
3731  Alloc tmpAlloc{ alloc_ };
3732  std::allocator_traits<Alloc>::destroy(tmpAlloc, std::addressof(*ptr));
3733  std::allocator_traits<Alloc>::deallocate(tmpAlloc, ptr, copies_);
3734  }
3735  };
3736 } // namespace detail
3737 
3744 template <class T, class Alloc, class... Args>
3745 cl::pointer<T, detail::Deleter<Alloc>> allocate_pointer(const Alloc &alloc_, Args&&... args)
3746 {
3747  Alloc alloc(alloc_);
3748  static const size_type copies = 1;
3749 
3750  // Ensure that creation of the management block and the
3751  // object are dealt with separately such that we only provide a deleter
3752 
3753  T* tmp = std::allocator_traits<Alloc>::allocate(alloc, copies);
3754  if (!tmp) {
3755  std::bad_alloc excep;
3756  throw excep;
3757  }
3758  try {
3759  std::allocator_traits<Alloc>::construct(
3760  alloc,
3761  std::addressof(*tmp),
3762  std::forward<Args>(args)...);
3763 
3764  return cl::pointer<T, detail::Deleter<Alloc>>(tmp, detail::Deleter<Alloc>{alloc, copies});
3765  }
3766  catch (std::bad_alloc& b)
3767  {
3768  std::allocator_traits<Alloc>::deallocate(alloc, tmp, copies);
3769  throw;
3770  }
3771 }
3772 
3773 template< class T, class SVMTrait, class... Args >
3774 cl::pointer<T, detail::Deleter<SVMAllocator<T, SVMTrait>>> allocate_svm(Args... args)
3775 {
3776  SVMAllocator<T, SVMTrait> alloc;
3777  return cl::allocate_pointer<T>(alloc, args...);
3778 }
3779 
3780 template< class T, class SVMTrait, class... Args >
3781 cl::pointer<T, detail::Deleter<SVMAllocator<T, SVMTrait>>> allocate_svm(const cl::Context &c, Args... args)
3782 {
3783  SVMAllocator<T, SVMTrait> alloc(c);
3784  return cl::allocate_pointer<T>(alloc, args...);
3785 }
3786 #endif // #if !defined(CL_HPP_NO_STD_UNIQUE_PTR)
3787 
3791 template < class T >
3792 using coarse_svm_vector = vector<T, cl::SVMAllocator<int, cl::SVMTraitCoarse<>>>;
3793 
3797 template < class T >
3798 using fine_svm_vector = vector<T, cl::SVMAllocator<int, cl::SVMTraitFine<>>>;
3799 
3803 template < class T >
3804 using atomic_svm_vector = vector<T, cl::SVMAllocator<int, cl::SVMTraitAtomic<>>>;
3805 
3806 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
3807 
3808 
3815 class Buffer : public Memory
3816 {
3817 public:
3818 
3827  const Context& context,
3828  cl_mem_flags flags,
3829  size_type size,
3830  void* host_ptr = NULL,
3831  cl_int* err = NULL)
3832  {
3833  cl_int error;
3834  object_ = ::clCreateBuffer(context(), flags, size, host_ptr, &error);
3835 
3836  detail::errHandler(error, __CREATE_BUFFER_ERR);
3837  if (err != NULL) {
3838  *err = error;
3839  }
3840  }
3841 
3852  cl_mem_flags flags,
3853  size_type size,
3854  void* host_ptr = NULL,
3855  cl_int* err = NULL)
3856  {
3857  cl_int error;
3858 
3859  Context context = Context::getDefault(err);
3860 
3861  object_ = ::clCreateBuffer(context(), flags, size, host_ptr, &error);
3862 
3863  detail::errHandler(error, __CREATE_BUFFER_ERR);
3864  if (err != NULL) {
3865  *err = error;
3866  }
3867  }
3868 
3874  template< typename IteratorType >
3876  IteratorType startIterator,
3877  IteratorType endIterator,
3878  bool readOnly,
3879  bool useHostPtr = false,
3880  cl_int* err = NULL)
3881  {
3882  typedef typename std::iterator_traits<IteratorType>::value_type DataType;
3883  cl_int error;
3884 
3885  cl_mem_flags flags = 0;
3886  if( readOnly ) {
3887  flags |= CL_MEM_READ_ONLY;
3888  }
3889  else {
3890  flags |= CL_MEM_READ_WRITE;
3891  }
3892  if( useHostPtr ) {
3893  flags |= CL_MEM_USE_HOST_PTR;
3894  }
3895 
3896  size_type size = sizeof(DataType)*(endIterator - startIterator);
3897 
3898  Context context = Context::getDefault(err);
3899 
3900  if( useHostPtr ) {
3901  object_ = ::clCreateBuffer(context(), flags, size, static_cast<DataType*>(&*startIterator), &error);
3902  } else {
3903  object_ = ::clCreateBuffer(context(), flags, size, 0, &error);
3904  }
3905 
3906  detail::errHandler(error, __CREATE_BUFFER_ERR);
3907  if (err != NULL) {
3908  *err = error;
3909  }
3910 
3911  if( !useHostPtr ) {
3912  error = cl::copy(startIterator, endIterator, *this);
3913  detail::errHandler(error, __CREATE_BUFFER_ERR);
3914  if (err != NULL) {
3915  *err = error;
3916  }
3917  }
3918  }
3919 
3925  template< typename IteratorType >
3926  Buffer(const Context &context, IteratorType startIterator, IteratorType endIterator,
3927  bool readOnly, bool useHostPtr = false, cl_int* err = NULL);
3928 
3933  template< typename IteratorType >
3934  Buffer(const CommandQueue &queue, IteratorType startIterator, IteratorType endIterator,
3935  bool readOnly, bool useHostPtr = false, cl_int* err = NULL);
3936 
3938  Buffer() : Memory() { }
3939 
3947  explicit Buffer(const cl_mem& buffer, bool retainObject = false) :
3948  Memory(buffer, retainObject) { }
3949 
3954  Buffer& operator = (const cl_mem& rhs)
3955  {
3956  Memory::operator=(rhs);
3957  return *this;
3958  }
3959 
3963  Buffer(const Buffer& buf) : Memory(buf) {}
3964 
3969  {
3970  Memory::operator=(buf);
3971  return *this;
3972  }
3973 
3977  Buffer(Buffer&& buf) CL_HPP_NOEXCEPT_ : Memory(std::move(buf)) {}
3978 
3983  {
3984  Memory::operator=(std::move(buf));
3985  return *this;
3986  }
3987 
3988 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
3989 
3994  cl_mem_flags flags,
3995  cl_buffer_create_type buffer_create_type,
3996  const void * buffer_create_info,
3997  cl_int * err = NULL)
3998  {
3999  Buffer result;
4000  cl_int error;
4001  result.object_ = ::clCreateSubBuffer(
4002  object_,
4003  flags,
4004  buffer_create_type,
4005  buffer_create_info,
4006  &error);
4007 
4008  detail::errHandler(error, __CREATE_SUBBUFFER_ERR);
4009  if (err != NULL) {
4010  *err = error;
4011  }
4012 
4013  return result;
4014  }
4015 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
4016 };
4017 
4018 #if defined (CL_HPP_USE_DX_INTEROP)
4019 
4027 class BufferD3D10 : public Buffer
4028 {
4029 public:
4030 
4031 
4037  BufferD3D10(
4038  const Context& context,
4039  cl_mem_flags flags,
4040  ID3D10Buffer* bufobj,
4041  cl_int * err = NULL) : pfn_clCreateFromD3D10BufferKHR(nullptr)
4042  {
4043  typedef CL_API_ENTRY cl_mem (CL_API_CALL *PFN_clCreateFromD3D10BufferKHR)(
4044  cl_context context, cl_mem_flags flags, ID3D10Buffer* buffer,
4045  cl_int* errcode_ret);
4046  PFN_clCreateFromD3D10BufferKHR pfn_clCreateFromD3D10BufferKHR;
4047 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
4048  vector<cl_context_properties> props = context.getInfo<CL_CONTEXT_PROPERTIES>();
4049  cl_platform platform = -1;
4050  for( int i = 0; i < props.size(); ++i ) {
4051  if( props[i] == CL_CONTEXT_PLATFORM ) {
4052  platform = props[i+1];
4053  }
4054  }
4055  CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(platform, clCreateFromD3D10BufferKHR);
4056 #elif CL_HPP_TARGET_OPENCL_VERSION >= 110
4057  CL_HPP_INIT_CL_EXT_FCN_PTR_(clCreateFromD3D10BufferKHR);
4058 #endif
4059 
4060  cl_int error;
4061  object_ = pfn_clCreateFromD3D10BufferKHR(
4062  context(),
4063  flags,
4064  bufobj,
4065  &error);
4066 
4067  detail::errHandler(error, __CREATE_GL_BUFFER_ERR);
4068  if (err != NULL) {
4069  *err = error;
4070  }
4071  }
4072 
4074  BufferD3D10() : Buffer() { }
4075 
4083  explicit BufferD3D10(const cl_mem& buffer, bool retainObject = false) :
4084  Buffer(buffer, retainObject) { }
4085 
4090  BufferD3D10& operator = (const cl_mem& rhs)
4091  {
4092  Buffer::operator=(rhs);
4093  return *this;
4094  }
4095 
4099  BufferD3D10(const BufferD3D10& buf) :
4100  Buffer(buf) {}
4101 
4105  BufferD3D10& operator = (const BufferD3D10 &buf)
4106  {
4107  Buffer::operator=(buf);
4108  return *this;
4109  }
4110 
4114  BufferD3D10(BufferD3D10&& buf) CL_HPP_NOEXCEPT_ : Buffer(std::move(buf)) {}
4115 
4119  BufferD3D10& operator = (BufferD3D10 &&buf)
4120  {
4121  Buffer::operator=(std::move(buf));
4122  return *this;
4123  }
4124 };
4125 #endif
4126 
4135 class BufferGL : public Buffer
4136 {
4137 public:
4144  const Context& context,
4145  cl_mem_flags flags,
4146  cl_GLuint bufobj,
4147  cl_int * err = NULL)
4148  {
4149  cl_int error;
4150  object_ = ::clCreateFromGLBuffer(
4151  context(),
4152  flags,
4153  bufobj,
4154  &error);
4155 
4156  detail::errHandler(error, __CREATE_GL_BUFFER_ERR);
4157  if (err != NULL) {
4158  *err = error;
4159  }
4160  }
4161 
4163  BufferGL() : Buffer() { }
4164 
4172  explicit BufferGL(const cl_mem& buffer, bool retainObject = false) :
4173  Buffer(buffer, retainObject) { }
4174 
4179  BufferGL& operator = (const cl_mem& rhs)
4180  {
4181  Buffer::operator=(rhs);
4182  return *this;
4183  }
4184 
4188  BufferGL(const BufferGL& buf) : Buffer(buf) {}
4189 
4194  {
4195  Buffer::operator=(buf);
4196  return *this;
4197  }
4198 
4202  BufferGL(BufferGL&& buf) CL_HPP_NOEXCEPT_ : Buffer(std::move(buf)) {}
4203 
4208  {
4209  Buffer::operator=(std::move(buf));
4210  return *this;
4211  }
4212 
4215  cl_gl_object_type *type,
4216  cl_GLuint * gl_object_name)
4217  {
4218  return detail::errHandler(
4219  ::clGetGLObjectInfo(object_,type,gl_object_name),
4220  __GET_GL_OBJECT_INFO_ERR);
4221  }
4222 };
4223 
4232 class BufferRenderGL : public Buffer
4233 {
4234 public:
4241  const Context& context,
4242  cl_mem_flags flags,
4243  cl_GLuint bufobj,
4244  cl_int * err = NULL)
4245  {
4246  cl_int error;
4247  object_ = ::clCreateFromGLRenderbuffer(
4248  context(),
4249  flags,
4250  bufobj,
4251  &error);
4252 
4253  detail::errHandler(error, __CREATE_GL_RENDER_BUFFER_ERR);
4254  if (err != NULL) {
4255  *err = error;
4256  }
4257  }
4258 
4261 
4269  explicit BufferRenderGL(const cl_mem& buffer, bool retainObject = false) :
4270  Buffer(buffer, retainObject) { }
4271 
4276  BufferRenderGL& operator = (const cl_mem& rhs)
4277  {
4278  Buffer::operator=(rhs);
4279  return *this;
4280  }
4281 
4285  BufferRenderGL(const BufferRenderGL& buf) : Buffer(buf) {}
4286 
4291  {
4292  Buffer::operator=(buf);
4293  return *this;
4294  }
4295 
4299  BufferRenderGL(BufferRenderGL&& buf) CL_HPP_NOEXCEPT_ : Buffer(std::move(buf)) {}
4300 
4305  {
4306  Buffer::operator=(std::move(buf));
4307  return *this;
4308  }
4309 
4312  cl_gl_object_type *type,
4313  cl_GLuint * gl_object_name)
4314  {
4315  return detail::errHandler(
4316  ::clGetGLObjectInfo(object_,type,gl_object_name),
4317  __GET_GL_OBJECT_INFO_ERR);
4318  }
4319 };
4320 
4327 class Image : public Memory
4328 {
4329 protected:
4331  Image() : Memory() { }
4332 
4340  explicit Image(const cl_mem& image, bool retainObject = false) :
4341  Memory(image, retainObject) { }
4342 
4347  Image& operator = (const cl_mem& rhs)
4348  {
4349  Memory::operator=(rhs);
4350  return *this;
4351  }
4352 
4356  Image(const Image& img) : Memory(img) {}
4357 
4361  Image& operator = (const Image &img)
4362  {
4363  Memory::operator=(img);
4364  return *this;
4365  }
4366 
4370  Image(Image&& img) CL_HPP_NOEXCEPT_ : Memory(std::move(img)) {}
4371 
4376  {
4377  Memory::operator=(std::move(img));
4378  return *this;
4379  }
4380 
4381 
4382 public:
4384  template <typename T>
4385  cl_int getImageInfo(cl_image_info name, T* param) const
4386  {
4387  return detail::errHandler(
4388  detail::getInfo(&::clGetImageInfo, object_, name, param),
4389  __GET_IMAGE_INFO_ERR);
4390  }
4391 
4393  template <cl_image_info name> typename
4395  getImageInfo(cl_int* err = NULL) const
4396  {
4397  typename detail::param_traits<
4398  detail::cl_image_info, name>::param_type param;
4399  cl_int result = getImageInfo(name, &param);
4400  if (err != NULL) {
4401  *err = result;
4402  }
4403  return param;
4404  }
4405 };
4406 
4407 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
4408 
4414 class Image1D : public Image
4415 {
4416 public:
4422  const Context& context,
4423  cl_mem_flags flags,
4424  ImageFormat format,
4425  size_type width,
4426  void* host_ptr = NULL,
4427  cl_int* err = NULL)
4428  {
4429  cl_int error;
4430  cl_image_desc desc =
4431  {
4432  CL_MEM_OBJECT_IMAGE1D,
4433  width,
4434  0, 0, 0, 0, 0, 0, 0, 0
4435  };
4436  object_ = ::clCreateImage(
4437  context(),
4438  flags,
4439  &format,
4440  &desc,
4441  host_ptr,
4442  &error);
4443 
4444  detail::errHandler(error, __CREATE_IMAGE_ERR);
4445  if (err != NULL) {
4446  *err = error;
4447  }
4448  }
4449 
4451  Image1D() { }
4452 
4460  explicit Image1D(const cl_mem& image1D, bool retainObject = false) :
4461  Image(image1D, retainObject) { }
4462 
4467  Image1D& operator = (const cl_mem& rhs)
4468  {
4469  Image::operator=(rhs);
4470  return *this;
4471  }
4472 
4476  Image1D(const Image1D& img) : Image(img) {}
4477 
4482  {
4483  Image::operator=(img);
4484  return *this;
4485  }
4486 
4490  Image1D(Image1D&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
4491 
4496  {
4497  Image::operator=(std::move(img));
4498  return *this;
4499  }
4500 
4501 };
4502 
4506 class Image1DBuffer : public Image
4507 {
4508 public:
4509  Image1DBuffer(
4510  const Context& context,
4511  cl_mem_flags flags,
4512  ImageFormat format,
4513  size_type width,
4514  const Buffer &buffer,
4515  cl_int* err = NULL)
4516  {
4517  cl_int error;
4518  cl_image_desc desc =
4519  {
4520  CL_MEM_OBJECT_IMAGE1D_BUFFER,
4521  width,
4522  0, 0, 0, 0, 0, 0, 0,
4523  buffer()
4524  };
4525  object_ = ::clCreateImage(
4526  context(),
4527  flags,
4528  &format,
4529  &desc,
4530  NULL,
4531  &error);
4532 
4533  detail::errHandler(error, __CREATE_IMAGE_ERR);
4534  if (err != NULL) {
4535  *err = error;
4536  }
4537  }
4538 
4539  Image1DBuffer() { }
4540 
4548  explicit Image1DBuffer(const cl_mem& image1D, bool retainObject = false) :
4549  Image(image1D, retainObject) { }
4550 
4551  Image1DBuffer& operator = (const cl_mem& rhs)
4552  {
4553  Image::operator=(rhs);
4554  return *this;
4555  }
4556 
4560  Image1DBuffer(const Image1DBuffer& img) : Image(img) {}
4561 
4565  Image1DBuffer& operator = (const Image1DBuffer &img)
4566  {
4567  Image::operator=(img);
4568  return *this;
4569  }
4570 
4574  Image1DBuffer(Image1DBuffer&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
4575 
4579  Image1DBuffer& operator = (Image1DBuffer &&img)
4580  {
4581  Image::operator=(std::move(img));
4582  return *this;
4583  }
4584 
4585 };
4586 
4590 class Image1DArray : public Image
4591 {
4592 public:
4593  Image1DArray(
4594  const Context& context,
4595  cl_mem_flags flags,
4596  ImageFormat format,
4597  size_type arraySize,
4598  size_type width,
4599  size_type rowPitch,
4600  void* host_ptr = NULL,
4601  cl_int* err = NULL)
4602  {
4603  cl_int error;
4604  cl_image_desc desc =
4605  {
4606  CL_MEM_OBJECT_IMAGE1D_ARRAY,
4607  width,
4608  0, 0, // height, depth (unused)
4609  arraySize,
4610  rowPitch,
4611  0, 0, 0, 0
4612  };
4613  object_ = ::clCreateImage(
4614  context(),
4615  flags,
4616  &format,
4617  &desc,
4618  host_ptr,
4619  &error);
4620 
4621  detail::errHandler(error, __CREATE_IMAGE_ERR);
4622  if (err != NULL) {
4623  *err = error;
4624  }
4625  }
4626 
4627  Image1DArray() { }
4628 
4636  explicit Image1DArray(const cl_mem& imageArray, bool retainObject = false) :
4637  Image(imageArray, retainObject) { }
4638 
4639 
4640  Image1DArray& operator = (const cl_mem& rhs)
4641  {
4642  Image::operator=(rhs);
4643  return *this;
4644  }
4645 
4649  Image1DArray(const Image1DArray& img) : Image(img) {}
4650 
4654  Image1DArray& operator = (const Image1DArray &img)
4655  {
4656  Image::operator=(img);
4657  return *this;
4658  }
4659 
4663  Image1DArray(Image1DArray&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
4664 
4668  Image1DArray& operator = (Image1DArray &&img)
4669  {
4670  Image::operator=(std::move(img));
4671  return *this;
4672  }
4673 
4674 };
4675 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 120
4676 
4677 
4684 class Image2D : public Image
4685 {
4686 public:
4692  const Context& context,
4693  cl_mem_flags flags,
4694  ImageFormat format,
4695  size_type width,
4696  size_type height,
4697  size_type row_pitch = 0,
4698  void* host_ptr = NULL,
4699  cl_int* err = NULL)
4700  {
4701  cl_int error;
4702  bool useCreateImage;
4703 
4704 #if CL_HPP_TARGET_OPENCL_VERSION >= 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 120
4705  // Run-time decision based on the actual platform
4706  {
4707  cl_uint version = detail::getContextPlatformVersion(context());
4708  useCreateImage = (version >= 0x10002); // OpenCL 1.2 or above
4709  }
4710 #elif CL_HPP_TARGET_OPENCL_VERSION >= 120
4711  useCreateImage = true;
4712 #else
4713  useCreateImage = false;
4714 #endif
4715 
4716 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
4717  if (useCreateImage)
4718  {
4719  cl_image_desc desc =
4720  {
4721  CL_MEM_OBJECT_IMAGE2D,
4722  width,
4723  height,
4724  0, 0, // depth, array size (unused)
4725  row_pitch,
4726  0, 0, 0, 0
4727  };
4728  object_ = ::clCreateImage(
4729  context(),
4730  flags,
4731  &format,
4732  &desc,
4733  host_ptr,
4734  &error);
4735 
4736  detail::errHandler(error, __CREATE_IMAGE_ERR);
4737  if (err != NULL) {
4738  *err = error;
4739  }
4740  }
4741 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
4742 #if CL_HPP_MINIMUM_OPENCL_VERSION < 120
4743  if (!useCreateImage)
4744  {
4745  object_ = ::clCreateImage2D(
4746  context(), flags,&format, width, height, row_pitch, host_ptr, &error);
4747 
4748  detail::errHandler(error, __CREATE_IMAGE2D_ERR);
4749  if (err != NULL) {
4750  *err = error;
4751  }
4752  }
4753 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 120
4754  }
4755 
4756 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 || defined(CL_HPP_USE_CL_IMAGE2D_FROM_BUFFER_KHR)
4757 
4763  const Context& context,
4764  ImageFormat format,
4765  const Buffer &sourceBuffer,
4766  size_type width,
4767  size_type height,
4768  size_type row_pitch = 0,
4769  cl_int* err = nullptr)
4770  {
4771  cl_int error;
4772 
4773  cl_image_desc desc =
4774  {
4775  CL_MEM_OBJECT_IMAGE2D,
4776  width,
4777  height,
4778  0, 0, // depth, array size (unused)
4779  row_pitch,
4780  0, 0, 0,
4781  // Use buffer as input to image
4782  sourceBuffer()
4783  };
4784  object_ = ::clCreateImage(
4785  context(),
4786  0, // flags inherited from buffer
4787  &format,
4788  &desc,
4789  nullptr,
4790  &error);
4791 
4792  detail::errHandler(error, __CREATE_IMAGE_ERR);
4793  if (err != nullptr) {
4794  *err = error;
4795  }
4796  }
4797 #endif //#if CL_HPP_TARGET_OPENCL_VERSION >= 200 || defined(CL_HPP_USE_CL_IMAGE2D_FROM_BUFFER_KHR)
4798 
4799 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
4800 
4813  const Context& context,
4814  cl_channel_order order,
4815  const Image &sourceImage,
4816  cl_int* err = nullptr)
4817  {
4818  cl_int error;
4819 
4820  // Descriptor fields have to match source image
4821  size_type sourceWidth =
4822  sourceImage.getImageInfo<CL_IMAGE_WIDTH>();
4823  size_type sourceHeight =
4824  sourceImage.getImageInfo<CL_IMAGE_HEIGHT>();
4825  size_type sourceRowPitch =
4826  sourceImage.getImageInfo<CL_IMAGE_ROW_PITCH>();
4827  cl_uint sourceNumMIPLevels =
4828  sourceImage.getImageInfo<CL_IMAGE_NUM_MIP_LEVELS>();
4829  cl_uint sourceNumSamples =
4830  sourceImage.getImageInfo<CL_IMAGE_NUM_SAMPLES>();
4831  cl_image_format sourceFormat =
4832  sourceImage.getImageInfo<CL_IMAGE_FORMAT>();
4833 
4834  // Update only the channel order.
4835  // Channel format inherited from source.
4836  sourceFormat.image_channel_order = order;
4837  cl_image_desc desc =
4838  {
4839  CL_MEM_OBJECT_IMAGE2D,
4840  sourceWidth,
4841  sourceHeight,
4842  0, 0, // depth (unused), array size (unused)
4843  sourceRowPitch,
4844  0, // slice pitch (unused)
4845  sourceNumMIPLevels,
4846  sourceNumSamples,
4847  // Use buffer as input to image
4848  sourceImage()
4849  };
4850  object_ = ::clCreateImage(
4851  context(),
4852  0, // flags should be inherited from mem_object
4853  &sourceFormat,
4854  &desc,
4855  nullptr,
4856  &error);
4857 
4858  detail::errHandler(error, __CREATE_IMAGE_ERR);
4859  if (err != nullptr) {
4860  *err = error;
4861  }
4862  }
4863 #endif //#if CL_HPP_TARGET_OPENCL_VERSION >= 200
4864 
4866  Image2D() { }
4867 
4875  explicit Image2D(const cl_mem& image2D, bool retainObject = false) :
4876  Image(image2D, retainObject) { }
4877 
4882  Image2D& operator = (const cl_mem& rhs)
4883  {
4884  Image::operator=(rhs);
4885  return *this;
4886  }
4887 
4891  Image2D(const Image2D& img) : Image(img) {}
4892 
4897  {
4898  Image::operator=(img);
4899  return *this;
4900  }
4901 
4905  Image2D(Image2D&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
4906 
4911  {
4912  Image::operator=(std::move(img));
4913  return *this;
4914  }
4915 
4916 };
4917 
4918 
4919 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
4920 
4929 class CL_EXT_PREFIX__VERSION_1_1_DEPRECATED Image2DGL : public Image2D
4930 {
4931 public:
4938  const Context& context,
4939  cl_mem_flags flags,
4940  cl_GLenum target,
4941  cl_GLint miplevel,
4942  cl_GLuint texobj,
4943  cl_int * err = NULL)
4944  {
4945  cl_int error;
4946  object_ = ::clCreateFromGLTexture2D(
4947  context(),
4948  flags,
4949  target,
4950  miplevel,
4951  texobj,
4952  &error);
4953 
4954  detail::errHandler(error, __CREATE_GL_TEXTURE_2D_ERR);
4955  if (err != NULL) {
4956  *err = error;
4957  }
4958 
4959  }
4960 
4962  Image2DGL() : Image2D() { }
4963 
4971  explicit Image2DGL(const cl_mem& image, bool retainObject = false) :
4972  Image2D(image, retainObject) { }
4973 
4978  Image2DGL& operator = (const cl_mem& rhs)
4979  {
4980  Image2D::operator=(rhs);
4981  return *this;
4982  }
4983 
4987  Image2DGL(const Image2DGL& img) : Image2D(img) {}
4988 
4992  Image2DGL& operator = (const Image2DGL &img)
4993  {
4994  Image2D::operator=(img);
4995  return *this;
4996  }
4997 
5001  Image2DGL(Image2DGL&& img) CL_HPP_NOEXCEPT_ : Image2D(std::move(img)) {}
5002 
5006  Image2DGL& operator = (Image2DGL &&img)
5007  {
5008  Image2D::operator=(std::move(img));
5009  return *this;
5010  }
5011 
5012 } CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED;
5013 #endif // CL_USE_DEPRECATED_OPENCL_1_1_APIS
5014 
5015 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
5016 
5019 class Image2DArray : public Image
5020 {
5021 public:
5022  Image2DArray(
5023  const Context& context,
5024  cl_mem_flags flags,
5025  ImageFormat format,
5026  size_type arraySize,
5027  size_type width,
5028  size_type height,
5029  size_type rowPitch,
5030  size_type slicePitch,
5031  void* host_ptr = NULL,
5032  cl_int* err = NULL)
5033  {
5034  cl_int error;
5035  cl_image_desc desc =
5036  {
5037  CL_MEM_OBJECT_IMAGE2D_ARRAY,
5038  width,
5039  height,
5040  0, // depth (unused)
5041  arraySize,
5042  rowPitch,
5043  slicePitch,
5044  0, 0, 0
5045  };
5046  object_ = ::clCreateImage(
5047  context(),
5048  flags,
5049  &format,
5050  &desc,
5051  host_ptr,
5052  &error);
5053 
5054  detail::errHandler(error, __CREATE_IMAGE_ERR);
5055  if (err != NULL) {
5056  *err = error;
5057  }
5058  }
5059 
5060  Image2DArray() { }
5061 
5069  explicit Image2DArray(const cl_mem& imageArray, bool retainObject = false) : Image(imageArray, retainObject) { }
5070 
5071  Image2DArray& operator = (const cl_mem& rhs)
5072  {
5073  Image::operator=(rhs);
5074  return *this;
5075  }
5076 
5080  Image2DArray(const Image2DArray& img) : Image(img) {}
5081 
5085  Image2DArray& operator = (const Image2DArray &img)
5086  {
5087  Image::operator=(img);
5088  return *this;
5089  }
5090 
5094  Image2DArray(Image2DArray&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
5095 
5099  Image2DArray& operator = (Image2DArray &&img)
5100  {
5101  Image::operator=(std::move(img));
5102  return *this;
5103  }
5104 };
5105 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 120
5106 
5113 class Image3D : public Image
5114 {
5115 public:
5121  const Context& context,
5122  cl_mem_flags flags,
5123  ImageFormat format,
5124  size_type width,
5125  size_type height,
5126  size_type depth,
5127  size_type row_pitch = 0,
5128  size_type slice_pitch = 0,
5129  void* host_ptr = NULL,
5130  cl_int* err = NULL)
5131  {
5132  cl_int error;
5133  bool useCreateImage;
5134 
5135 #if CL_HPP_TARGET_OPENCL_VERSION >= 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 120
5136  // Run-time decision based on the actual platform
5137  {
5138  cl_uint version = detail::getContextPlatformVersion(context());
5139  useCreateImage = (version >= 0x10002); // OpenCL 1.2 or above
5140  }
5141 #elif CL_HPP_TARGET_OPENCL_VERSION >= 120
5142  useCreateImage = true;
5143 #else
5144  useCreateImage = false;
5145 #endif
5146 
5147 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
5148  if (useCreateImage)
5149  {
5150  cl_image_desc desc =
5151  {
5152  CL_MEM_OBJECT_IMAGE3D,
5153  width,
5154  height,
5155  depth,
5156  0, // array size (unused)
5157  row_pitch,
5158  slice_pitch,
5159  0, 0, 0
5160  };
5161  object_ = ::clCreateImage(
5162  context(),
5163  flags,
5164  &format,
5165  &desc,
5166  host_ptr,
5167  &error);
5168 
5169  detail::errHandler(error, __CREATE_IMAGE_ERR);
5170  if (err != NULL) {
5171  *err = error;
5172  }
5173  }
5174 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
5175 #if CL_HPP_MINIMUM_OPENCL_VERSION < 120
5176  if (!useCreateImage)
5177  {
5178  object_ = ::clCreateImage3D(
5179  context(), flags, &format, width, height, depth, row_pitch,
5180  slice_pitch, host_ptr, &error);
5181 
5182  detail::errHandler(error, __CREATE_IMAGE3D_ERR);
5183  if (err != NULL) {
5184  *err = error;
5185  }
5186  }
5187 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 120
5188  }
5189 
5191  Image3D() : Image() { }
5192 
5200  explicit Image3D(const cl_mem& image3D, bool retainObject = false) :
5201  Image(image3D, retainObject) { }
5202 
5207  Image3D& operator = (const cl_mem& rhs)
5208  {
5209  Image::operator=(rhs);
5210  return *this;
5211  }
5212 
5216  Image3D(const Image3D& img) : Image(img) {}
5217 
5222  {
5223  Image::operator=(img);
5224  return *this;
5225  }
5226 
5230  Image3D(Image3D&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
5231 
5236  {
5237  Image::operator=(std::move(img));
5238  return *this;
5239  }
5240 };
5241 
5242 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
5243 
5251 class Image3DGL : public Image3D
5252 {
5253 public:
5260  const Context& context,
5261  cl_mem_flags flags,
5262  cl_GLenum target,
5263  cl_GLint miplevel,
5264  cl_GLuint texobj,
5265  cl_int * err = NULL)
5266  {
5267  cl_int error;
5268  object_ = ::clCreateFromGLTexture3D(
5269  context(),
5270  flags,
5271  target,
5272  miplevel,
5273  texobj,
5274  &error);
5275 
5276  detail::errHandler(error, __CREATE_GL_TEXTURE_3D_ERR);
5277  if (err != NULL) {
5278  *err = error;
5279  }
5280  }
5281 
5283  Image3DGL() : Image3D() { }
5284 
5292  explicit Image3DGL(const cl_mem& image, bool retainObject = false) :
5293  Image3D(image, retainObject) { }
5294 
5299  Image3DGL& operator = (const cl_mem& rhs)
5300  {
5301  Image3D::operator=(rhs);
5302  return *this;
5303  }
5304 
5308  Image3DGL(const Image3DGL& img) : Image3D(img) {}
5309 
5314  {
5315  Image3D::operator=(img);
5316  return *this;
5317  }
5318 
5322  Image3DGL(Image3DGL&& img) CL_HPP_NOEXCEPT_ : Image3D(std::move(img)) {}
5323 
5328  {
5329  Image3D::operator=(std::move(img));
5330  return *this;
5331  }
5332 };
5333 #endif // CL_USE_DEPRECATED_OPENCL_1_1_APIS
5334 
5335 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
5336 
5342 class ImageGL : public Image
5343 {
5344 public:
5345  ImageGL(
5346  const Context& context,
5347  cl_mem_flags flags,
5348  cl_GLenum target,
5349  cl_GLint miplevel,
5350  cl_GLuint texobj,
5351  cl_int * err = NULL)
5352  {
5353  cl_int error;
5354  object_ = ::clCreateFromGLTexture(
5355  context(),
5356  flags,
5357  target,
5358  miplevel,
5359  texobj,
5360  &error);
5361 
5362  detail::errHandler(error, __CREATE_GL_TEXTURE_ERR);
5363  if (err != NULL) {
5364  *err = error;
5365  }
5366  }
5367 
5368  ImageGL() : Image() { }
5369 
5377  explicit ImageGL(const cl_mem& image, bool retainObject = false) :
5378  Image(image, retainObject) { }
5379 
5380  ImageGL& operator = (const cl_mem& rhs)
5381  {
5382  Image::operator=(rhs);
5383  return *this;
5384  }
5385 
5389  ImageGL(const ImageGL& img) : Image(img) {}
5390 
5394  ImageGL& operator = (const ImageGL &img)
5395  {
5396  Image::operator=(img);
5397  return *this;
5398  }
5399 
5403  ImageGL(ImageGL&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
5404 
5408  ImageGL& operator = (ImageGL &&img)
5409  {
5410  Image::operator=(std::move(img));
5411  return *this;
5412  }
5413 };
5414 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
5415 
5416 
5417 
5418 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5419 
5425 class Pipe : public Memory
5426 {
5427 public:
5428 
5439  const Context& context,
5440  cl_uint packet_size,
5441  cl_uint max_packets,
5442  cl_int* err = NULL)
5443  {
5444  cl_int error;
5445 
5446  cl_mem_flags flags = CL_MEM_READ_WRITE | CL_MEM_HOST_NO_ACCESS;
5447  object_ = ::clCreatePipe(context(), flags, packet_size, max_packets, nullptr, &error);
5448 
5449  detail::errHandler(error, __CREATE_PIPE_ERR);
5450  if (err != NULL) {
5451  *err = error;
5452  }
5453  }
5454 
5464  cl_uint packet_size,
5465  cl_uint max_packets,
5466  cl_int* err = NULL)
5467  {
5468  cl_int error;
5469 
5470  Context context = Context::getDefault(err);
5471 
5472  cl_mem_flags flags = CL_MEM_READ_WRITE | CL_MEM_HOST_NO_ACCESS;
5473  object_ = ::clCreatePipe(context(), flags, packet_size, max_packets, nullptr, &error);
5474 
5475  detail::errHandler(error, __CREATE_PIPE_ERR);
5476  if (err != NULL) {
5477  *err = error;
5478  }
5479  }
5480 
5482  Pipe() : Memory() { }
5483 
5491  explicit Pipe(const cl_mem& pipe, bool retainObject = false) :
5492  Memory(pipe, retainObject) { }
5493 
5498  Pipe& operator = (const cl_mem& rhs)
5499  {
5500  Memory::operator=(rhs);
5501  return *this;
5502  }
5503 
5507  Pipe(const Pipe& pipe) : Memory(pipe) {}
5508 
5512  Pipe& operator = (const Pipe &pipe)
5513  {
5514  Memory::operator=(pipe);
5515  return *this;
5516  }
5517 
5521  Pipe(Pipe&& pipe) CL_HPP_NOEXCEPT_ : Memory(std::move(pipe)) {}
5522 
5527  {
5528  Memory::operator=(std::move(pipe));
5529  return *this;
5530  }
5531 
5533  template <typename T>
5534  cl_int getInfo(cl_pipe_info name, T* param) const
5535  {
5536  return detail::errHandler(
5537  detail::getInfo(&::clGetPipeInfo, object_, name, param),
5538  __GET_PIPE_INFO_ERR);
5539  }
5540 
5542  template <cl_pipe_info name> typename
5544  getInfo(cl_int* err = NULL) const
5545  {
5546  typename detail::param_traits<
5547  detail::cl_pipe_info, name>::param_type param;
5548  cl_int result = getInfo(name, &param);
5549  if (err != NULL) {
5550  *err = result;
5551  }
5552  return param;
5553  }
5554 }; // class Pipe
5555 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
5556 
5557 
5566 class Sampler : public detail::Wrapper<cl_sampler>
5567 {
5568 public:
5570  Sampler() { }
5571 
5577  const Context& context,
5578  cl_bool normalized_coords,
5579  cl_addressing_mode addressing_mode,
5580  cl_filter_mode filter_mode,
5581  cl_int* err = NULL)
5582  {
5583  cl_int error;
5584 
5585 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5586  cl_sampler_properties sampler_properties[] = {
5587  CL_SAMPLER_NORMALIZED_COORDS, normalized_coords,
5588  CL_SAMPLER_ADDRESSING_MODE, addressing_mode,
5589  CL_SAMPLER_FILTER_MODE, filter_mode,
5590  0 };
5591  object_ = ::clCreateSamplerWithProperties(
5592  context(),
5593  sampler_properties,
5594  &error);
5595 
5596  detail::errHandler(error, __CREATE_SAMPLER_WITH_PROPERTIES_ERR);
5597  if (err != NULL) {
5598  *err = error;
5599  }
5600 #else
5601  object_ = ::clCreateSampler(
5602  context(),
5603  normalized_coords,
5604  addressing_mode,
5605  filter_mode,
5606  &error);
5607 
5608  detail::errHandler(error, __CREATE_SAMPLER_ERR);
5609  if (err != NULL) {
5610  *err = error;
5611  }
5612 #endif
5613  }
5614 
5623  explicit Sampler(const cl_sampler& sampler, bool retainObject = false) :
5624  detail::Wrapper<cl_type>(sampler, retainObject) { }
5625 
5631  Sampler& operator = (const cl_sampler& rhs)
5632  {
5634  return *this;
5635  }
5636 
5640  Sampler(const Sampler& sam) : detail::Wrapper<cl_type>(sam) {}
5641 
5646  {
5648  return *this;
5649  }
5650 
5654  Sampler(Sampler&& sam) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(sam)) {}
5655 
5660  {
5661  detail::Wrapper<cl_type>::operator=(std::move(sam));
5662  return *this;
5663  }
5664 
5666  template <typename T>
5667  cl_int getInfo(cl_sampler_info name, T* param) const
5668  {
5669  return detail::errHandler(
5670  detail::getInfo(&::clGetSamplerInfo, object_, name, param),
5671  __GET_SAMPLER_INFO_ERR);
5672  }
5673 
5675  template <cl_sampler_info name> typename
5677  getInfo(cl_int* err = NULL) const
5678  {
5679  typename detail::param_traits<
5680  detail::cl_sampler_info, name>::param_type param;
5681  cl_int result = getInfo(name, &param);
5682  if (err != NULL) {
5683  *err = result;
5684  }
5685  return param;
5686  }
5687 };
5688 
5689 class Program;
5690 class CommandQueue;
5691 class DeviceCommandQueue;
5692 class Kernel;
5693 
5695 class NDRange
5696 {
5697 private:
5698  size_type sizes_[3];
5699  cl_uint dimensions_;
5700 
5701 public:
5704  : dimensions_(0)
5705  {
5706  sizes_[0] = 0;
5707  sizes_[1] = 0;
5708  sizes_[2] = 0;
5709  }
5710 
5712  NDRange(size_type size0)
5713  : dimensions_(1)
5714  {
5715  sizes_[0] = size0;
5716  sizes_[1] = 1;
5717  sizes_[2] = 1;
5718  }
5719 
5721  NDRange(size_type size0, size_type size1)
5722  : dimensions_(2)
5723  {
5724  sizes_[0] = size0;
5725  sizes_[1] = size1;
5726  sizes_[2] = 1;
5727  }
5728 
5730  NDRange(size_type size0, size_type size1, size_type size2)
5731  : dimensions_(3)
5732  {
5733  sizes_[0] = size0;
5734  sizes_[1] = size1;
5735  sizes_[2] = size2;
5736  }
5737 
5742  operator const size_type*() const {
5743  return sizes_;
5744  }
5745 
5747  size_type dimensions() const
5748  {
5749  return dimensions_;
5750  }
5751 
5753  // runtime number of dimensions
5754  size_type size() const
5755  {
5756  return dimensions_*sizeof(size_type);
5757  }
5758 
5759  size_type* get()
5760  {
5761  return sizes_;
5762  }
5763 
5764  const size_type* get() const
5765  {
5766  return sizes_;
5767  }
5768 };
5769 
5771 static const NDRange NullRange;
5772 
5775 {
5776  size_type size_;
5777 };
5778 
5779 namespace detail {
5780 
5781 template <typename T, class Enable = void>
5783 
5784 // Enable for objects that are not subclasses of memory
5785 // Pointers, constants etc
5786 template <typename T>
5787 struct KernelArgumentHandler<T, typename std::enable_if<!std::is_base_of<cl::Memory, T>::value>::type>
5788 {
5789  static size_type size(const T&) { return sizeof(T); }
5790  static const T* ptr(const T& value) { return &value; }
5791 };
5792 
5793 // Enable for subclasses of memory where we want to get a reference to the cl_mem out
5794 // and pass that in for safety
5795 template <typename T>
5796 struct KernelArgumentHandler<T, typename std::enable_if<std::is_base_of<cl::Memory, T>::value>::type>
5797 {
5798  static size_type size(const T&) { return sizeof(cl_mem); }
5799  static const cl_mem* ptr(const T& value) { return &(value()); }
5800 };
5801 
5802 // Specialization for DeviceCommandQueue defined later
5803 
5804 template <>
5806 {
5807  static size_type size(const LocalSpaceArg& value) { return value.size_; }
5808  static const void* ptr(const LocalSpaceArg&) { return NULL; }
5809 };
5810 
5811 }
5813 
5817 inline LocalSpaceArg
5818 Local(size_type size)
5819 {
5820  LocalSpaceArg ret = { size };
5821  return ret;
5822 }
5823 
5832 class Kernel : public detail::Wrapper<cl_kernel>
5833 {
5834 public:
5835  inline Kernel(const Program& program, const char* name, cl_int* err = NULL);
5836 
5838  Kernel() { }
5839 
5848  explicit Kernel(const cl_kernel& kernel, bool retainObject = false) :
5849  detail::Wrapper<cl_type>(kernel, retainObject) { }
5850 
5856  Kernel& operator = (const cl_kernel& rhs)
5857  {
5859  return *this;
5860  }
5861 
5865  Kernel(const Kernel& kernel) : detail::Wrapper<cl_type>(kernel) {}
5866 
5870  Kernel& operator = (const Kernel &kernel)
5871  {
5873  return *this;
5874  }
5875 
5879  Kernel(Kernel&& kernel) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(kernel)) {}
5880 
5885  {
5886  detail::Wrapper<cl_type>::operator=(std::move(kernel));
5887  return *this;
5888  }
5889 
5890  template <typename T>
5891  cl_int getInfo(cl_kernel_info name, T* param) const
5892  {
5893  return detail::errHandler(
5894  detail::getInfo(&::clGetKernelInfo, object_, name, param),
5895  __GET_KERNEL_INFO_ERR);
5896  }
5897 
5898  template <cl_kernel_info name> typename
5899  detail::param_traits<detail::cl_kernel_info, name>::param_type
5900  getInfo(cl_int* err = NULL) const
5901  {
5902  typename detail::param_traits<
5903  detail::cl_kernel_info, name>::param_type param;
5904  cl_int result = getInfo(name, &param);
5905  if (err != NULL) {
5906  *err = result;
5907  }
5908  return param;
5909  }
5910 
5911 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
5912  template <typename T>
5913  cl_int getArgInfo(cl_uint argIndex, cl_kernel_arg_info name, T* param) const
5914  {
5915  return detail::errHandler(
5916  detail::getInfo(&::clGetKernelArgInfo, object_, argIndex, name, param),
5917  __GET_KERNEL_ARG_INFO_ERR);
5918  }
5919 
5920  template <cl_kernel_arg_info name> typename
5921  detail::param_traits<detail::cl_kernel_arg_info, name>::param_type
5922  getArgInfo(cl_uint argIndex, cl_int* err = NULL) const
5923  {
5924  typename detail::param_traits<
5925  detail::cl_kernel_arg_info, name>::param_type param;
5926  cl_int result = getArgInfo(argIndex, name, &param);
5927  if (err != NULL) {
5928  *err = result;
5929  }
5930  return param;
5931  }
5932 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
5933 
5934  template <typename T>
5935  cl_int getWorkGroupInfo(
5936  const Device& device, cl_kernel_work_group_info name, T* param) const
5937  {
5938  return detail::errHandler(
5939  detail::getInfo(
5940  &::clGetKernelWorkGroupInfo, object_, device(), name, param),
5941  __GET_KERNEL_WORK_GROUP_INFO_ERR);
5942  }
5943 
5944  template <cl_kernel_work_group_info name> typename
5945  detail::param_traits<detail::cl_kernel_work_group_info, name>::param_type
5946  getWorkGroupInfo(const Device& device, cl_int* err = NULL) const
5947  {
5948  typename detail::param_traits<
5949  detail::cl_kernel_work_group_info, name>::param_type param;
5950  cl_int result = getWorkGroupInfo(device, name, &param);
5951  if (err != NULL) {
5952  *err = result;
5953  }
5954  return param;
5955  }
5956 
5957 #if (CL_HPP_TARGET_OPENCL_VERSION >= 200 && defined(CL_HPP_USE_CL_SUB_GROUPS_KHR)) || CL_HPP_TARGET_OPENCL_VERSION >= 210
5958  cl_int getSubGroupInfo(const cl::Device &dev, cl_kernel_sub_group_info name, const cl::NDRange &range, size_type* param) const
5959  {
5960 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
5961 
5962  return detail::errHandler(
5963  clGetKernelSubGroupInfo(object_, dev(), name, range.size(), range.get(), sizeof(size_type), param, nullptr),
5964  __GET_KERNEL_SUB_GROUP_INFO_ERR);
5965 
5966 #else // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
5967 
5968  typedef clGetKernelSubGroupInfoKHR_fn PFN_clGetKernelSubGroupInfoKHR;
5969  static PFN_clGetKernelSubGroupInfoKHR pfn_clGetKernelSubGroupInfoKHR = NULL;
5970  CL_HPP_INIT_CL_EXT_FCN_PTR_(clGetKernelSubGroupInfoKHR);
5971 
5972  return detail::errHandler(
5973  pfn_clGetKernelSubGroupInfoKHR(object_, dev(), name, range.size(), range.get(), sizeof(size_type), param, nullptr),
5974  __GET_KERNEL_SUB_GROUP_INFO_ERR);
5975 
5976 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
5977  }
5978 
5979  template <cl_kernel_sub_group_info name>
5980  size_type getSubGroupInfo(const cl::Device &dev, const cl::NDRange &range, cl_int* err = NULL) const
5981  {
5982  size_type param;
5983  cl_int result = getSubGroupInfo(dev, name, range, &param);
5984  if (err != NULL) {
5985  *err = result;
5986  }
5987  return param;
5988  }
5989 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5990 
5991 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5992 
5994  template<typename T, class D>
5995  cl_int setArg(cl_uint index, const cl::pointer<T, D> &argPtr)
5996  {
5997  return detail::errHandler(
5998  ::clSetKernelArgSVMPointer(object_, index, argPtr.get()),
5999  __SET_KERNEL_ARGS_ERR);
6000  }
6001 
6004  template<typename T, class Alloc>
6005  cl_int setArg(cl_uint index, const cl::vector<T, Alloc> &argPtr)
6006  {
6007  return detail::errHandler(
6008  ::clSetKernelArgSVMPointer(object_, index, argPtr.data()),
6009  __SET_KERNEL_ARGS_ERR);
6010  }
6011 
6014  template<typename T>
6015  typename std::enable_if<std::is_pointer<T>::value, cl_int>::type
6016  setArg(cl_uint index, const T argPtr)
6017  {
6018  return detail::errHandler(
6019  ::clSetKernelArgSVMPointer(object_, index, argPtr),
6020  __SET_KERNEL_ARGS_ERR);
6021  }
6022 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
6023 
6026  template <typename T>
6027  typename std::enable_if<!std::is_pointer<T>::value, cl_int>::type
6028  setArg(cl_uint index, const T &value)
6029  {
6030  return detail::errHandler(
6031  ::clSetKernelArg(
6032  object_,
6033  index,
6036  __SET_KERNEL_ARGS_ERR);
6037  }
6038 
6039  cl_int setArg(cl_uint index, size_type size, const void* argPtr)
6040  {
6041  return detail::errHandler(
6042  ::clSetKernelArg(object_, index, size, argPtr),
6043  __SET_KERNEL_ARGS_ERR);
6044  }
6045 
6046 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
6047 
6051  cl_int setSVMPointers(const vector<void*> &pointerList)
6052  {
6053  return detail::errHandler(
6054  ::clSetKernelExecInfo(
6055  object_,
6056  CL_KERNEL_EXEC_INFO_SVM_PTRS,
6057  sizeof(void*)*pointerList.size(),
6058  pointerList.data()));
6059  }
6060 
6065  template<int ArrayLength>
6066  cl_int setSVMPointers(const std::array<void*, ArrayLength> &pointerList)
6067  {
6068  return detail::errHandler(
6069  ::clSetKernelExecInfo(
6070  object_,
6071  CL_KERNEL_EXEC_INFO_SVM_PTRS,
6072  sizeof(void*)*pointerList.size(),
6073  pointerList.data()));
6074  }
6075 
6087  cl_int enableFineGrainedSystemSVM(bool svmEnabled)
6088  {
6089  cl_bool svmEnabled_ = svmEnabled ? CL_TRUE : CL_FALSE;
6090  return detail::errHandler(
6091  ::clSetKernelExecInfo(
6092  object_,
6093  CL_KERNEL_EXEC_INFO_SVM_FINE_GRAIN_SYSTEM,
6094  sizeof(cl_bool),
6095  &svmEnabled_
6096  )
6097  );
6098  }
6099 
6100  template<int index, int ArrayLength, class D, typename T0, typename T1, typename... Ts>
6101  void setSVMPointersHelper(std::array<void*, ArrayLength> &pointerList, const pointer<T0, D> &t0, const pointer<T1, D> &t1, Ts & ... ts)
6102  {
6103  pointerList[index] = static_cast<void*>(t0.get());
6104  setSVMPointersHelper<index + 1, ArrayLength>(pointerList, t1, ts...);
6105  }
6106 
6107  template<int index, int ArrayLength, typename T0, typename T1, typename... Ts>
6108  typename std::enable_if<std::is_pointer<T0>::value, void>::type
6109  setSVMPointersHelper(std::array<void*, ArrayLength> &pointerList, T0 t0, T1 t1, Ts... ts)
6110  {
6111  pointerList[index] = static_cast<void*>(t0);
6112  setSVMPointersHelper<index + 1, ArrayLength>(pointerList, t1, ts...);
6113  }
6114 
6115  template<int index, int ArrayLength, typename T0, class D>
6116  void setSVMPointersHelper(std::array<void*, ArrayLength> &pointerList, const pointer<T0, D> &t0)
6117  {
6118  pointerList[index] = static_cast<void*>(t0.get());
6119  }
6120 
6121 
6122  template<int index, int ArrayLength, typename T0>
6123  typename std::enable_if<std::is_pointer<T0>::value, void>::type
6124  setSVMPointersHelper(std::array<void*, ArrayLength> &pointerList, T0 t0)
6125  {
6126  pointerList[index] = static_cast<void*>(t0);
6127  }
6128 
6129  template<typename T0, typename... Ts>
6130  cl_int setSVMPointers(const T0 &t0, Ts & ... ts)
6131  {
6132  std::array<void*, 1 + sizeof...(Ts)> pointerList;
6133 
6134  setSVMPointersHelper<0, 1 + sizeof...(Ts)>(pointerList, t0, ts...);
6135  return detail::errHandler(
6136  ::clSetKernelExecInfo(
6137  object_,
6138  CL_KERNEL_EXEC_INFO_SVM_PTRS,
6139  sizeof(void*)*(1 + sizeof...(Ts)),
6140  pointerList.data()));
6141  }
6142 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
6143 
6144 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
6145 
6151  {
6152  cl_int error;
6153  Kernel retValue(clCloneKernel(this->get(), &error));
6154 
6155  detail::errHandler(error, __CLONE_KERNEL_ERR);
6156  return retValue;
6157  }
6158 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
6159 };
6160 
6164 class Program : public detail::Wrapper<cl_program>
6165 {
6166 public:
6167 #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6168  typedef vector<vector<unsigned char>> Binaries;
6169  typedef vector<string> Sources;
6170 #else // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6171  typedef vector<std::pair<const void*, size_type> > Binaries;
6172  typedef vector<std::pair<const char*, size_type> > Sources;
6173 #endif // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6174 
6175  Program(
6176  const string& source,
6177  bool build = false,
6178  cl_int* err = NULL)
6179  {
6180  cl_int error;
6181 
6182  const char * strings = source.c_str();
6183  const size_type length = source.size();
6184 
6185  Context context = Context::getDefault(err);
6186 
6187  object_ = ::clCreateProgramWithSource(
6188  context(), (cl_uint)1, &strings, &length, &error);
6189 
6190  detail::errHandler(error, __CREATE_PROGRAM_WITH_SOURCE_ERR);
6191 
6192  if (error == CL_SUCCESS && build) {
6193 
6194  error = ::clBuildProgram(
6195  object_,
6196  0,
6197  NULL,
6198 #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6199  "-cl-std=CL2.0",
6200 #else
6201  "",
6202 #endif // #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6203  NULL,
6204  NULL);
6205 
6206  detail::buildErrHandler(error, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6207  }
6208 
6209  if (err != NULL) {
6210  *err = error;
6211  }
6212  }
6213 
6214  Program(
6215  const Context& context,
6216  const string& source,
6217  bool build = false,
6218  cl_int* err = NULL)
6219  {
6220  cl_int error;
6221 
6222  const char * strings = source.c_str();
6223  const size_type length = source.size();
6224 
6225  object_ = ::clCreateProgramWithSource(
6226  context(), (cl_uint)1, &strings, &length, &error);
6227 
6228  detail::errHandler(error, __CREATE_PROGRAM_WITH_SOURCE_ERR);
6229 
6230  if (error == CL_SUCCESS && build) {
6231  error = ::clBuildProgram(
6232  object_,
6233  0,
6234  NULL,
6235 #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6236  "-cl-std=CL2.0",
6237 #else
6238  "",
6239 #endif // #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6240  NULL,
6241  NULL);
6242 
6243  detail::buildErrHandler(error, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6244  }
6245 
6246  if (err != NULL) {
6247  *err = error;
6248  }
6249  }
6250 
6256  const Sources& sources,
6257  cl_int* err = NULL)
6258  {
6259  cl_int error;
6260  Context context = Context::getDefault(err);
6261 
6262  const size_type n = (size_type)sources.size();
6263 
6264  vector<size_type> lengths(n);
6265  vector<const char*> strings(n);
6266 
6267  for (size_type i = 0; i < n; ++i) {
6268 #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6269  strings[i] = sources[(int)i].data();
6270  lengths[i] = sources[(int)i].length();
6271 #else // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6272  strings[i] = sources[(int)i].first;
6273  lengths[i] = sources[(int)i].second;
6274 #endif // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6275  }
6276 
6277  object_ = ::clCreateProgramWithSource(
6278  context(), (cl_uint)n, strings.data(), lengths.data(), &error);
6279 
6280  detail::errHandler(error, __CREATE_PROGRAM_WITH_SOURCE_ERR);
6281  if (err != NULL) {
6282  *err = error;
6283  }
6284  }
6285 
6291  const Context& context,
6292  const Sources& sources,
6293  cl_int* err = NULL)
6294  {
6295  cl_int error;
6296 
6297  const size_type n = (size_type)sources.size();
6298 
6299  vector<size_type> lengths(n);
6300  vector<const char*> strings(n);
6301 
6302  for (size_type i = 0; i < n; ++i) {
6303 #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6304  strings[i] = sources[(int)i].data();
6305  lengths[i] = sources[(int)i].length();
6306 #else // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6307  strings[i] = sources[(int)i].first;
6308  lengths[i] = sources[(int)i].second;
6309 #endif // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6310  }
6311 
6312  object_ = ::clCreateProgramWithSource(
6313  context(), (cl_uint)n, strings.data(), lengths.data(), &error);
6314 
6315  detail::errHandler(error, __CREATE_PROGRAM_WITH_SOURCE_ERR);
6316  if (err != NULL) {
6317  *err = error;
6318  }
6319  }
6320 
6321 
6322 #if CL_HPP_TARGET_OPENCL_VERSION >= 210 || (CL_HPP_TARGET_OPENCL_VERSION==200 && defined(CL_HPP_USE_IL_KHR))
6323 
6328  const vector<char>& IL,
6329  bool build = false,
6330  cl_int* err = NULL)
6331  {
6332  cl_int error;
6333 
6334  Context context = Context::getDefault(err);
6335 
6336 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
6337 
6338  object_ = ::clCreateProgramWithIL(
6339  context(), static_cast<const void*>(IL.data()), IL.size(), &error);
6340 
6341 #else // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
6342 
6343  typedef clCreateProgramWithILKHR_fn PFN_clCreateProgramWithILKHR;
6344  static PFN_clCreateProgramWithILKHR pfn_clCreateProgramWithILKHR = NULL;
6345  CL_HPP_INIT_CL_EXT_FCN_PTR_(clCreateProgramWithILKHR);
6346 
6347  return detail::errHandler(
6348  pfn_clCreateProgramWithILKHR(
6349  context(), static_cast<const void*>(IL.data()), IL.size(), &error);
6350 
6351 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
6352 
6353  detail::errHandler(error, __CREATE_PROGRAM_WITH_IL_ERR);
6354 
6355  if (error == CL_SUCCESS && build) {
6356 
6357  error = ::clBuildProgram(
6358  object_,
6359  0,
6360  NULL,
6361 #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6362  "-cl-std=CL2.0",
6363 #else
6364  "",
6365 #endif // #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6366  NULL,
6367  NULL);
6368 
6369  detail::buildErrHandler(error, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6370  }
6371 
6372  if (err != NULL) {
6373  *err = error;
6374  }
6375  }
6376 
6383  const Context& context,
6384  const vector<char>& IL,
6385  bool build = false,
6386  cl_int* err = NULL)
6387  {
6388  cl_int error;
6389 
6390 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
6391 
6392  object_ = ::clCreateProgramWithIL(
6393  context(), static_cast<const void*>(IL.data()), IL.size(), &error);
6394 
6395 #else // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
6396 
6397  typedef clCreateProgramWithILKHR_fn PFN_clCreateProgramWithILKHR;
6398  static PFN_clCreateProgramWithILKHR pfn_clCreateProgramWithILKHR = NULL;
6399  CL_HPP_INIT_CL_EXT_FCN_PTR_(clCreateProgramWithILKHR);
6400 
6401  return detail::errHandler(
6402  pfn_clCreateProgramWithILKHR(
6403  context(), static_cast<const void*>(IL.data()), IL.size(), &error);
6404 
6405 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
6406 
6407  detail::errHandler(error, __CREATE_PROGRAM_WITH_IL_ERR);
6408 
6409  if (error == CL_SUCCESS && build) {
6410  error = ::clBuildProgram(
6411  object_,
6412  0,
6413  NULL,
6414 #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6415  "-cl-std=CL2.0",
6416 #else
6417  "",
6418 #endif // #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6419  NULL,
6420  NULL);
6421 
6422  detail::buildErrHandler(error, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6423  }
6424 
6425  if (err != NULL) {
6426  *err = error;
6427  }
6428  }
6429 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
6430 
6451  const Context& context,
6452  const vector<Device>& devices,
6453  const Binaries& binaries,
6454  vector<cl_int>* binaryStatus = NULL,
6455  cl_int* err = NULL)
6456  {
6457  cl_int error;
6458 
6459  const size_type numDevices = devices.size();
6460 
6461  // Catch size mismatch early and return
6462  if(binaries.size() != numDevices) {
6463  error = CL_INVALID_VALUE;
6464  detail::errHandler(error, __CREATE_PROGRAM_WITH_BINARY_ERR);
6465  if (err != NULL) {
6466  *err = error;
6467  }
6468  return;
6469  }
6470 
6471 
6472  vector<size_type> lengths(numDevices);
6473  vector<const unsigned char*> images(numDevices);
6474 #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6475  for (size_type i = 0; i < numDevices; ++i) {
6476  images[i] = binaries[i].data();
6477  lengths[i] = binaries[(int)i].size();
6478  }
6479 #else // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6480  for (size_type i = 0; i < numDevices; ++i) {
6481  images[i] = (const unsigned char*)binaries[i].first;
6482  lengths[i] = binaries[(int)i].second;
6483  }
6484 #endif // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6485 
6486  vector<cl_device_id> deviceIDs(numDevices);
6487  for( size_type deviceIndex = 0; deviceIndex < numDevices; ++deviceIndex ) {
6488  deviceIDs[deviceIndex] = (devices[deviceIndex])();
6489  }
6490 
6491  if(binaryStatus) {
6492  binaryStatus->resize(numDevices);
6493  }
6494 
6495  object_ = ::clCreateProgramWithBinary(
6496  context(), (cl_uint) devices.size(),
6497  deviceIDs.data(),
6498  lengths.data(), images.data(), (binaryStatus != NULL && numDevices > 0)
6499  ? &binaryStatus->front()
6500  : NULL, &error);
6501 
6502  detail::errHandler(error, __CREATE_PROGRAM_WITH_BINARY_ERR);
6503  if (err != NULL) {
6504  *err = error;
6505  }
6506  }
6507 
6508 
6509 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
6510 
6515  const Context& context,
6516  const vector<Device>& devices,
6517  const string& kernelNames,
6518  cl_int* err = NULL)
6519  {
6520  cl_int error;
6521 
6522 
6523  size_type numDevices = devices.size();
6524  vector<cl_device_id> deviceIDs(numDevices);
6525  for( size_type deviceIndex = 0; deviceIndex < numDevices; ++deviceIndex ) {
6526  deviceIDs[deviceIndex] = (devices[deviceIndex])();
6527  }
6528 
6529  object_ = ::clCreateProgramWithBuiltInKernels(
6530  context(),
6531  (cl_uint) devices.size(),
6532  deviceIDs.data(),
6533  kernelNames.c_str(),
6534  &error);
6535 
6536  detail::errHandler(error, __CREATE_PROGRAM_WITH_BUILT_IN_KERNELS_ERR);
6537  if (err != NULL) {
6538  *err = error;
6539  }
6540  }
6541 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
6542 
6543  Program() { }
6544 
6545 
6552  explicit Program(const cl_program& program, bool retainObject = false) :
6553  detail::Wrapper<cl_type>(program, retainObject) { }
6554 
6555  Program& operator = (const cl_program& rhs)
6556  {
6558  return *this;
6559  }
6560 
6564  Program(const Program& program) : detail::Wrapper<cl_type>(program) {}
6565 
6569  Program& operator = (const Program &program)
6570  {
6572  return *this;
6573  }
6574 
6578  Program(Program&& program) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(program)) {}
6579 
6583  Program& operator = (Program &&program)
6584  {
6585  detail::Wrapper<cl_type>::operator=(std::move(program));
6586  return *this;
6587  }
6588 
6589  cl_int build(
6590  const vector<Device>& devices,
6591  const char* options = NULL,
6592  void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
6593  void* data = NULL) const
6594  {
6595  size_type numDevices = devices.size();
6596  vector<cl_device_id> deviceIDs(numDevices);
6597 
6598  for( size_type deviceIndex = 0; deviceIndex < numDevices; ++deviceIndex ) {
6599  deviceIDs[deviceIndex] = (devices[deviceIndex])();
6600  }
6601 
6602  cl_int buildError = ::clBuildProgram(
6603  object_,
6604  (cl_uint)
6605  devices.size(),
6606  deviceIDs.data(),
6607  options,
6608  notifyFptr,
6609  data);
6610 
6611  return detail::buildErrHandler(buildError, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6612  }
6613 
6614  cl_int build(
6615  const char* options = NULL,
6616  void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
6617  void* data = NULL) const
6618  {
6619  cl_int buildError = ::clBuildProgram(
6620  object_,
6621  0,
6622  NULL,
6623  options,
6624  notifyFptr,
6625  data);
6626 
6627 
6628  return detail::buildErrHandler(buildError, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6629  }
6630 
6631 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
6632  cl_int compile(
6633  const char* options = NULL,
6634  void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
6635  void* data = NULL) const
6636  {
6637  cl_int error = ::clCompileProgram(
6638  object_,
6639  0,
6640  NULL,
6641  options,
6642  0,
6643  NULL,
6644  NULL,
6645  notifyFptr,
6646  data);
6647  return detail::buildErrHandler(error, __COMPILE_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6648  }
6649 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
6650 
6651  template <typename T>
6652  cl_int getInfo(cl_program_info name, T* param) const
6653  {
6654  return detail::errHandler(
6655  detail::getInfo(&::clGetProgramInfo, object_, name, param),
6656  __GET_PROGRAM_INFO_ERR);
6657  }
6658 
6659  template <cl_program_info name> typename
6660  detail::param_traits<detail::cl_program_info, name>::param_type
6661  getInfo(cl_int* err = NULL) const
6662  {
6663  typename detail::param_traits<
6664  detail::cl_program_info, name>::param_type param;
6665  cl_int result = getInfo(name, &param);
6666  if (err != NULL) {
6667  *err = result;
6668  }
6669  return param;
6670  }
6671 
6672  template <typename T>
6673  cl_int getBuildInfo(
6674  const Device& device, cl_program_build_info name, T* param) const
6675  {
6676  return detail::errHandler(
6677  detail::getInfo(
6678  &::clGetProgramBuildInfo, object_, device(), name, param),
6679  __GET_PROGRAM_BUILD_INFO_ERR);
6680  }
6681 
6682  template <cl_program_build_info name> typename
6683  detail::param_traits<detail::cl_program_build_info, name>::param_type
6684  getBuildInfo(const Device& device, cl_int* err = NULL) const
6685  {
6686  typename detail::param_traits<
6687  detail::cl_program_build_info, name>::param_type param;
6688  cl_int result = getBuildInfo(device, name, &param);
6689  if (err != NULL) {
6690  *err = result;
6691  }
6692  return param;
6693  }
6694 
6700  template <cl_program_build_info name>
6701  vector<std::pair<cl::Device, typename detail::param_traits<detail::cl_program_build_info, name>::param_type>>
6702  getBuildInfo(cl_int *err = NULL) const
6703  {
6704  cl_int result = CL_SUCCESS;
6705 
6706  auto devs = getInfo<CL_PROGRAM_DEVICES>(&result);
6707  vector<std::pair<cl::Device, typename detail::param_traits<detail::cl_program_build_info, name>::param_type>>
6708  devInfo;
6709 
6710  // If there was an initial error from getInfo return the error
6711  if (result != CL_SUCCESS) {
6712  if (err != NULL) {
6713  *err = result;
6714  }
6715  return devInfo;
6716  }
6717 
6718  for (const cl::Device &d : devs) {
6719  typename detail::param_traits<
6720  detail::cl_program_build_info, name>::param_type param;
6721  result = getBuildInfo(d, name, &param);
6722  devInfo.push_back(
6724  (d, param));
6725  if (result != CL_SUCCESS) {
6726  // On error, leave the loop and return the error code
6727  break;
6728  }
6729  }
6730  if (err != NULL) {
6731  *err = result;
6732  }
6733  if (result != CL_SUCCESS) {
6734  devInfo.clear();
6735  }
6736  return devInfo;
6737  }
6738 
6739  cl_int createKernels(vector<Kernel>* kernels)
6740  {
6741  cl_uint numKernels;
6742  cl_int err = ::clCreateKernelsInProgram(object_, 0, NULL, &numKernels);
6743  if (err != CL_SUCCESS) {
6744  return detail::errHandler(err, __CREATE_KERNELS_IN_PROGRAM_ERR);
6745  }
6746 
6747  vector<cl_kernel> value(numKernels);
6748 
6749  err = ::clCreateKernelsInProgram(
6750  object_, numKernels, value.data(), NULL);
6751  if (err != CL_SUCCESS) {
6752  return detail::errHandler(err, __CREATE_KERNELS_IN_PROGRAM_ERR);
6753  }
6754 
6755  if (kernels) {
6756  kernels->resize(value.size());
6757 
6758  // Assign to param, constructing with retain behaviour
6759  // to correctly capture each underlying CL object
6760  for (size_type i = 0; i < value.size(); i++) {
6761  // We do not need to retain because this kernel is being created
6762  // by the runtime
6763  (*kernels)[i] = Kernel(value[i], false);
6764  }
6765  }
6766  return CL_SUCCESS;
6767  }
6768 
6769 #if CL_HPP_TARGET_OPENCL_VERSION >= 220
6770 
6781  void (CL_CALLBACK * pfn_notify)(cl_program program, void * user_data),
6782  void * user_data = NULL)
6783  {
6784  return detail::errHandler(
6785  ::clSetProgramReleaseCallback(
6786  object_,
6787  pfn_notify,
6788  user_data),
6789  __SET_PROGRAM_RELEASE_CALLBACK_ERR);
6790  }
6791 
6796  template <typename T>
6797  typename std::enable_if<!std::is_pointer<T>::value, cl_int>::type
6798  setSpecializationConstant(cl_uint index, const T &value)
6799  {
6800  return detail::errHandler(
6801  ::clSetProgramSpecializationConstant(
6802  object_,
6803  index,
6804  sizeof(value),
6805  &value),
6806  __SET_PROGRAM_SPECIALIZATION_CONSTANT_ERR);
6807  }
6808 
6813  cl_int setSpecializationConstant(cl_uint index, size_type size, const void* value)
6814  {
6815  return detail::errHandler(
6816  ::clSetProgramSpecializationConstant(
6817  object_,
6818  index,
6819  size,
6820  value),
6821  __SET_PROGRAM_SPECIALIZATION_CONSTANT_ERR);
6822  }
6823 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 220
6824 };
6825 
6826 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
6827 inline Program linkProgram(
6828  Program input1,
6829  Program input2,
6830  const char* options = NULL,
6831  void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
6832  void* data = NULL,
6833  cl_int* err = NULL)
6834 {
6835  cl_int error_local = CL_SUCCESS;
6836 
6837  cl_program programs[2] = { input1(), input2() };
6838 
6839  Context ctx = input1.getInfo<CL_PROGRAM_CONTEXT>(&error_local);
6840  if(error_local!=CL_SUCCESS) {
6841  detail::errHandler(error_local, __LINK_PROGRAM_ERR);
6842  }
6843 
6844  cl_program prog = ::clLinkProgram(
6845  ctx(),
6846  0,
6847  NULL,
6848  options,
6849  2,
6850  programs,
6851  notifyFptr,
6852  data,
6853  &error_local);
6854 
6855  detail::errHandler(error_local,__COMPILE_PROGRAM_ERR);
6856  if (err != NULL) {
6857  *err = error_local;
6858  }
6859 
6860  return Program(prog);
6861 }
6862 
6863 inline Program linkProgram(
6864  vector<Program> inputPrograms,
6865  const char* options = NULL,
6866  void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
6867  void* data = NULL,
6868  cl_int* err = NULL)
6869 {
6870  cl_int error_local = CL_SUCCESS;
6871 
6872  vector<cl_program> programs(inputPrograms.size());
6873 
6874  for (unsigned int i = 0; i < inputPrograms.size(); i++) {
6875  programs[i] = inputPrograms[i]();
6876  }
6877 
6878  Context ctx;
6879  if(inputPrograms.size() > 0) {
6880  ctx = inputPrograms[0].getInfo<CL_PROGRAM_CONTEXT>(&error_local);
6881  if(error_local!=CL_SUCCESS) {
6882  detail::errHandler(error_local, __LINK_PROGRAM_ERR);
6883  }
6884  }
6885  cl_program prog = ::clLinkProgram(
6886  ctx(),
6887  0,
6888  NULL,
6889  options,
6890  (cl_uint)inputPrograms.size(),
6891  programs.data(),
6892  notifyFptr,
6893  data,
6894  &error_local);
6895 
6896  detail::errHandler(error_local,__COMPILE_PROGRAM_ERR);
6897  if (err != NULL) {
6898  *err = error_local;
6899  }
6900 
6901  return Program(prog, false);
6902 }
6903 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
6904 
6905 // Template specialization for CL_PROGRAM_BINARIES
6906 template <>
6907 inline cl_int cl::Program::getInfo(cl_program_info name, vector<vector<unsigned char>>* param) const
6908 {
6909  if (name != CL_PROGRAM_BINARIES) {
6910  return CL_INVALID_VALUE;
6911  }
6912  if (param) {
6913  // Resize the parameter array appropriately for each allocation
6914  // and pass down to the helper
6915 
6916  vector<size_type> sizes = getInfo<CL_PROGRAM_BINARY_SIZES>();
6917  size_type numBinaries = sizes.size();
6918 
6919  // Resize the parameter array and constituent arrays
6920  param->resize(numBinaries);
6921  for (size_type i = 0; i < numBinaries; ++i) {
6922  (*param)[i].resize(sizes[i]);
6923  }
6924 
6925  return detail::errHandler(
6926  detail::getInfo(&::clGetProgramInfo, object_, name, param),
6927  __GET_PROGRAM_INFO_ERR);
6928  }
6929 
6930  return CL_SUCCESS;
6931 }
6932 
6933 template<>
6934 inline vector<vector<unsigned char>> cl::Program::getInfo<CL_PROGRAM_BINARIES>(cl_int* err) const
6935 {
6936  vector<vector<unsigned char>> binariesVectors;
6937 
6938  cl_int result = getInfo(CL_PROGRAM_BINARIES, &binariesVectors);
6939  if (err != NULL) {
6940  *err = result;
6941  }
6942  return binariesVectors;
6943 }
6944 
6945 #if CL_HPP_TARGET_OPENCL_VERSION >= 220
6946 // Template specialization for clSetProgramSpecializationConstant
6947 template <>
6948 inline cl_int cl::Program::setSpecializationConstant(cl_uint index, const bool &value)
6949 {
6950  cl_uchar ucValue = value ? CL_UCHAR_MAX : 0;
6951  return detail::errHandler(
6952  ::clSetProgramSpecializationConstant(
6953  object_,
6954  index,
6955  sizeof(ucValue),
6956  &ucValue),
6957  __SET_PROGRAM_SPECIALIZATION_CONSTANT_ERR);
6958 }
6959 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 220
6960 
6961 inline Kernel::Kernel(const Program& program, const char* name, cl_int* err)
6962 {
6963  cl_int error;
6964 
6965  object_ = ::clCreateKernel(program(), name, &error);
6966  detail::errHandler(error, __CREATE_KERNEL_ERR);
6967 
6968  if (err != NULL) {
6969  *err = error;
6970  }
6971 
6972 }
6973 
6974 enum class QueueProperties : cl_command_queue_properties
6975 {
6976  None = 0,
6977  Profiling = CL_QUEUE_PROFILING_ENABLE,
6978  OutOfOrder = CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE,
6979 };
6980 
6981 inline QueueProperties operator|(QueueProperties lhs, QueueProperties rhs)
6982 {
6983  return static_cast<QueueProperties>(static_cast<cl_command_queue_properties>(lhs) | static_cast<cl_command_queue_properties>(rhs));
6984 }
6985 
6989 class CommandQueue : public detail::Wrapper<cl_command_queue>
6990 {
6991 private:
6992  static std::once_flag default_initialized_;
6993  static CommandQueue default_;
6994  static cl_int default_error_;
6995 
7001  static void makeDefault()
7002  {
7003  /* We don't want to throw an error from this function, so we have to
7004  * catch and set the error flag.
7005  */
7006 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
7007  try
7008 #endif
7009  {
7010  int error;
7011  Context context = Context::getDefault(&error);
7012 
7013  if (error != CL_SUCCESS) {
7014  default_error_ = error;
7015  }
7016  else {
7017  Device device = Device::getDefault();
7018  default_ = CommandQueue(context, device, 0, &default_error_);
7019  }
7020  }
7021 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
7022  catch (cl::Error &e) {
7023  default_error_ = e.err();
7024  }
7025 #endif
7026  }
7027 
7033  static void makeDefaultProvided(const CommandQueue &c) {
7034  default_ = c;
7035  }
7036 
7037 public:
7038 #ifdef CL_HPP_UNIT_TEST_ENABLE
7039 
7045  static void unitTestClearDefault() {
7046  default_ = CommandQueue();
7047  }
7048 #endif // #ifdef CL_HPP_UNIT_TEST_ENABLE
7049 
7050 
7056  cl_command_queue_properties properties,
7057  cl_int* err = NULL)
7058  {
7059  cl_int error;
7060 
7061  Context context = Context::getDefault(&error);
7062  detail::errHandler(error, __CREATE_CONTEXT_ERR);
7063 
7064  if (error != CL_SUCCESS) {
7065  if (err != NULL) {
7066  *err = error;
7067  }
7068  }
7069  else {
7070  Device device = context.getInfo<CL_CONTEXT_DEVICES>()[0];
7071  bool useWithProperties;
7072 
7073 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
7074  // Run-time decision based on the actual platform
7075  {
7076  cl_uint version = detail::getContextPlatformVersion(context());
7077  useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
7078  }
7079 #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
7080  useWithProperties = true;
7081 #else
7082  useWithProperties = false;
7083 #endif
7084 
7085 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7086  if (useWithProperties) {
7087  cl_queue_properties queue_properties[] = {
7088  CL_QUEUE_PROPERTIES, properties, 0 };
7089  if ((properties & CL_QUEUE_ON_DEVICE) == 0) {
7090  object_ = ::clCreateCommandQueueWithProperties(
7091  context(), device(), queue_properties, &error);
7092  }
7093  else {
7094  error = CL_INVALID_QUEUE_PROPERTIES;
7095  }
7096 
7097  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
7098  if (err != NULL) {
7099  *err = error;
7100  }
7101  }
7102 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
7103 #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
7104  if (!useWithProperties) {
7105  object_ = ::clCreateCommandQueue(
7106  context(), device(), properties, &error);
7107 
7108  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
7109  if (err != NULL) {
7110  *err = error;
7111  }
7112  }
7113 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
7114  }
7115  }
7116 
7122  QueueProperties properties,
7123  cl_int* err = NULL)
7124  {
7125  cl_int error;
7126 
7127  Context context = Context::getDefault(&error);
7128  detail::errHandler(error, __CREATE_CONTEXT_ERR);
7129 
7130  if (error != CL_SUCCESS) {
7131  if (err != NULL) {
7132  *err = error;
7133  }
7134  }
7135  else {
7136  Device device = context.getInfo<CL_CONTEXT_DEVICES>()[0];
7137  bool useWithProperties;
7138 
7139 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
7140  // Run-time decision based on the actual platform
7141  {
7142  cl_uint version = detail::getContextPlatformVersion(context());
7143  useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
7144  }
7145 #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
7146  useWithProperties = true;
7147 #else
7148  useWithProperties = false;
7149 #endif
7150 
7151 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7152  if (useWithProperties) {
7153  cl_queue_properties queue_properties[] = {
7154  CL_QUEUE_PROPERTIES, static_cast<cl_queue_properties>(properties), 0 };
7155 
7156  object_ = ::clCreateCommandQueueWithProperties(
7157  context(), device(), queue_properties, &error);
7158 
7159  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
7160  if (err != NULL) {
7161  *err = error;
7162  }
7163  }
7164 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
7165 #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
7166  if (!useWithProperties) {
7167  object_ = ::clCreateCommandQueue(
7168  context(), device(), static_cast<cl_command_queue_properties>(properties), &error);
7169 
7170  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
7171  if (err != NULL) {
7172  *err = error;
7173  }
7174  }
7175 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
7176 
7177  }
7178  }
7179 
7184  explicit CommandQueue(
7185  const Context& context,
7186  cl_command_queue_properties properties = 0,
7187  cl_int* err = NULL)
7188  {
7189  cl_int error;
7190  bool useWithProperties;
7191  vector<cl::Device> devices;
7192  error = context.getInfo(CL_CONTEXT_DEVICES, &devices);
7193 
7194  detail::errHandler(error, __CREATE_CONTEXT_ERR);
7195 
7196  if (error != CL_SUCCESS)
7197  {
7198  if (err != NULL) {
7199  *err = error;
7200  }
7201  return;
7202  }
7203 
7204 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
7205  // Run-time decision based on the actual platform
7206  {
7207  cl_uint version = detail::getContextPlatformVersion(context());
7208  useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
7209  }
7210 #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
7211  useWithProperties = true;
7212 #else
7213  useWithProperties = false;
7214 #endif
7215 
7216 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7217  if (useWithProperties) {
7218  cl_queue_properties queue_properties[] = {
7219  CL_QUEUE_PROPERTIES, properties, 0 };
7220  if ((properties & CL_QUEUE_ON_DEVICE) == 0) {
7221  object_ = ::clCreateCommandQueueWithProperties(
7222  context(), devices[0](), queue_properties, &error);
7223  }
7224  else {
7225  error = CL_INVALID_QUEUE_PROPERTIES;
7226  }
7227 
7228  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
7229  if (err != NULL) {
7230  *err = error;
7231  }
7232  }
7233 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
7234 #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
7235  if (!useWithProperties) {
7236  object_ = ::clCreateCommandQueue(
7237  context(), devices[0](), properties, &error);
7238 
7239  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
7240  if (err != NULL) {
7241  *err = error;
7242  }
7243  }
7244 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
7245  }
7246 
7251  explicit CommandQueue(
7252  const Context& context,
7253  QueueProperties properties,
7254  cl_int* err = NULL)
7255  {
7256  cl_int error;
7257  bool useWithProperties;
7258  vector<cl::Device> devices;
7259  error = context.getInfo(CL_CONTEXT_DEVICES, &devices);
7260 
7261  detail::errHandler(error, __CREATE_CONTEXT_ERR);
7262 
7263  if (error != CL_SUCCESS)
7264  {
7265  if (err != NULL) {
7266  *err = error;
7267  }
7268  return;
7269  }
7270 
7271 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
7272  // Run-time decision based on the actual platform
7273  {
7274  cl_uint version = detail::getContextPlatformVersion(context());
7275  useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
7276  }
7277 #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
7278  useWithProperties = true;
7279 #else
7280  useWithProperties = false;
7281 #endif
7282 
7283 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7284  if (useWithProperties) {
7285  cl_queue_properties queue_properties[] = {
7286  CL_QUEUE_PROPERTIES, static_cast<cl_queue_properties>(properties), 0 };
7287  object_ = ::clCreateCommandQueueWithProperties(
7288  context(), devices[0](), queue_properties, &error);
7289 
7290  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
7291  if (err != NULL) {
7292  *err = error;
7293  }
7294  }
7295 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
7296 #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
7297  if (!useWithProperties) {
7298  object_ = ::clCreateCommandQueue(
7299  context(), devices[0](), static_cast<cl_command_queue_properties>(properties), &error);
7300 
7301  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
7302  if (err != NULL) {
7303  *err = error;
7304  }
7305  }
7306 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
7307  }
7308 
7314  const Context& context,
7315  const Device& device,
7316  cl_command_queue_properties properties = 0,
7317  cl_int* err = NULL)
7318  {
7319  cl_int error;
7320  bool useWithProperties;
7321 
7322 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
7323  // Run-time decision based on the actual platform
7324  {
7325  cl_uint version = detail::getContextPlatformVersion(context());
7326  useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
7327  }
7328 #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
7329  useWithProperties = true;
7330 #else
7331  useWithProperties = false;
7332 #endif
7333 
7334 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7335  if (useWithProperties) {
7336  cl_queue_properties queue_properties[] = {
7337  CL_QUEUE_PROPERTIES, properties, 0 };
7338  object_ = ::clCreateCommandQueueWithProperties(
7339  context(), device(), queue_properties, &error);
7340 
7341  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
7342  if (err != NULL) {
7343  *err = error;
7344  }
7345  }
7346 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
7347 #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
7348  if (!useWithProperties) {
7349  object_ = ::clCreateCommandQueue(
7350  context(), device(), properties, &error);
7351 
7352  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
7353  if (err != NULL) {
7354  *err = error;
7355  }
7356  }
7357 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
7358  }
7359 
7365  const Context& context,
7366  const Device& device,
7367  QueueProperties properties,
7368  cl_int* err = NULL)
7369  {
7370  cl_int error;
7371  bool useWithProperties;
7372 
7373 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
7374  // Run-time decision based on the actual platform
7375  {
7376  cl_uint version = detail::getContextPlatformVersion(context());
7377  useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
7378  }
7379 #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
7380  useWithProperties = true;
7381 #else
7382  useWithProperties = false;
7383 #endif
7384 
7385 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7386  if (useWithProperties) {
7387  cl_queue_properties queue_properties[] = {
7388  CL_QUEUE_PROPERTIES, static_cast<cl_queue_properties>(properties), 0 };
7389  object_ = ::clCreateCommandQueueWithProperties(
7390  context(), device(), queue_properties, &error);
7391 
7392  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
7393  if (err != NULL) {
7394  *err = error;
7395  }
7396  }
7397 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
7398 #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
7399  if (!useWithProperties) {
7400  object_ = ::clCreateCommandQueue(
7401  context(), device(), static_cast<cl_command_queue_properties>(properties), &error);
7402 
7403  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
7404  if (err != NULL) {
7405  *err = error;
7406  }
7407  }
7408 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
7409  }
7410 
7411  static CommandQueue getDefault(cl_int * err = NULL)
7412  {
7413  std::call_once(default_initialized_, makeDefault);
7414 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7415  detail::errHandler(default_error_, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
7416 #else // CL_HPP_TARGET_OPENCL_VERSION >= 200
7417  detail::errHandler(default_error_, __CREATE_COMMAND_QUEUE_ERR);
7418 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
7419  if (err != NULL) {
7420  *err = default_error_;
7421  }
7422  return default_;
7423  }
7424 
7432  static CommandQueue setDefault(const CommandQueue &default_queue)
7433  {
7434  std::call_once(default_initialized_, makeDefaultProvided, std::cref(default_queue));
7435  detail::errHandler(default_error_);
7436  return default_;
7437  }
7438 
7439  CommandQueue() { }
7440 
7441 
7448  explicit CommandQueue(const cl_command_queue& commandQueue, bool retainObject = false) :
7449  detail::Wrapper<cl_type>(commandQueue, retainObject) { }
7450 
7451  CommandQueue& operator = (const cl_command_queue& rhs)
7452  {
7454  return *this;
7455  }
7456 
7460  CommandQueue(const CommandQueue& queue) : detail::Wrapper<cl_type>(queue) {}
7461 
7465  CommandQueue& operator = (const CommandQueue &queue)
7466  {
7468  return *this;
7469  }
7470 
7474  CommandQueue(CommandQueue&& queue) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(queue)) {}
7475 
7479  CommandQueue& operator = (CommandQueue &&queue)
7480  {
7481  detail::Wrapper<cl_type>::operator=(std::move(queue));
7482  return *this;
7483  }
7484 
7485  template <typename T>
7486  cl_int getInfo(cl_command_queue_info name, T* param) const
7487  {
7488  return detail::errHandler(
7489  detail::getInfo(
7490  &::clGetCommandQueueInfo, object_, name, param),
7491  __GET_COMMAND_QUEUE_INFO_ERR);
7492  }
7493 
7494  template <cl_command_queue_info name> typename
7495  detail::param_traits<detail::cl_command_queue_info, name>::param_type
7496  getInfo(cl_int* err = NULL) const
7497  {
7498  typename detail::param_traits<
7499  detail::cl_command_queue_info, name>::param_type param;
7500  cl_int result = getInfo(name, &param);
7501  if (err != NULL) {
7502  *err = result;
7503  }
7504  return param;
7505  }
7506 
7507  cl_int enqueueReadBuffer(
7508  const Buffer& buffer,
7509  cl_bool blocking,
7510  size_type offset,
7511  size_type size,
7512  void* ptr,
7513  const vector<Event>* events = NULL,
7514  Event* event = NULL) const
7515  {
7516  cl_event tmp;
7517  cl_int err = detail::errHandler(
7518  ::clEnqueueReadBuffer(
7519  object_, buffer(), blocking, offset, size,
7520  ptr,
7521  (events != NULL) ? (cl_uint) events->size() : 0,
7522  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7523  (event != NULL) ? &tmp : NULL),
7524  __ENQUEUE_READ_BUFFER_ERR);
7525 
7526  if (event != NULL && err == CL_SUCCESS)
7527  *event = tmp;
7528 
7529  return err;
7530  }
7531 
7532  cl_int enqueueWriteBuffer(
7533  const Buffer& buffer,
7534  cl_bool blocking,
7535  size_type offset,
7536  size_type size,
7537  const void* ptr,
7538  const vector<Event>* events = NULL,
7539  Event* event = NULL) const
7540  {
7541  cl_event tmp;
7542  cl_int err = detail::errHandler(
7543  ::clEnqueueWriteBuffer(
7544  object_, buffer(), blocking, offset, size,
7545  ptr,
7546  (events != NULL) ? (cl_uint) events->size() : 0,
7547  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7548  (event != NULL) ? &tmp : NULL),
7549  __ENQUEUE_WRITE_BUFFER_ERR);
7550 
7551  if (event != NULL && err == CL_SUCCESS)
7552  *event = tmp;
7553 
7554  return err;
7555  }
7556 
7557  cl_int enqueueCopyBuffer(
7558  const Buffer& src,
7559  const Buffer& dst,
7560  size_type src_offset,
7561  size_type dst_offset,
7562  size_type size,
7563  const vector<Event>* events = NULL,
7564  Event* event = NULL) const
7565  {
7566  cl_event tmp;
7567  cl_int err = detail::errHandler(
7568  ::clEnqueueCopyBuffer(
7569  object_, src(), dst(), src_offset, dst_offset, size,
7570  (events != NULL) ? (cl_uint) events->size() : 0,
7571  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7572  (event != NULL) ? &tmp : NULL),
7573  __ENQEUE_COPY_BUFFER_ERR);
7574 
7575  if (event != NULL && err == CL_SUCCESS)
7576  *event = tmp;
7577 
7578  return err;
7579  }
7580 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
7581  cl_int enqueueReadBufferRect(
7582  const Buffer& buffer,
7583  cl_bool blocking,
7584  const array<size_type, 3>& buffer_offset,
7585  const array<size_type, 3>& host_offset,
7586  const array<size_type, 3>& region,
7587  size_type buffer_row_pitch,
7588  size_type buffer_slice_pitch,
7589  size_type host_row_pitch,
7590  size_type host_slice_pitch,
7591  void *ptr,
7592  const vector<Event>* events = NULL,
7593  Event* event = NULL) const
7594  {
7595  cl_event tmp;
7596  cl_int err = detail::errHandler(
7597  ::clEnqueueReadBufferRect(
7598  object_,
7599  buffer(),
7600  blocking,
7601  buffer_offset.data(),
7602  host_offset.data(),
7603  region.data(),
7604  buffer_row_pitch,
7605  buffer_slice_pitch,
7606  host_row_pitch,
7607  host_slice_pitch,
7608  ptr,
7609  (events != NULL) ? (cl_uint) events->size() : 0,
7610  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7611  (event != NULL) ? &tmp : NULL),
7612  __ENQUEUE_READ_BUFFER_RECT_ERR);
7613 
7614  if (event != NULL && err == CL_SUCCESS)
7615  *event = tmp;
7616 
7617  return err;
7618  }
7619 
7620  cl_int enqueueWriteBufferRect(
7621  const Buffer& buffer,
7622  cl_bool blocking,
7623  const array<size_type, 3>& buffer_offset,
7624  const array<size_type, 3>& host_offset,
7625  const array<size_type, 3>& region,
7626  size_type buffer_row_pitch,
7627  size_type buffer_slice_pitch,
7628  size_type host_row_pitch,
7629  size_type host_slice_pitch,
7630  const void *ptr,
7631  const vector<Event>* events = NULL,
7632  Event* event = NULL) const
7633  {
7634  cl_event tmp;
7635  cl_int err = detail::errHandler(
7636  ::clEnqueueWriteBufferRect(
7637  object_,
7638  buffer(),
7639  blocking,
7640  buffer_offset.data(),
7641  host_offset.data(),
7642  region.data(),
7643  buffer_row_pitch,
7644  buffer_slice_pitch,
7645  host_row_pitch,
7646  host_slice_pitch,
7647  ptr,
7648  (events != NULL) ? (cl_uint) events->size() : 0,
7649  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7650  (event != NULL) ? &tmp : NULL),
7651  __ENQUEUE_WRITE_BUFFER_RECT_ERR);
7652 
7653  if (event != NULL && err == CL_SUCCESS)
7654  *event = tmp;
7655 
7656  return err;
7657  }
7658 
7659  cl_int enqueueCopyBufferRect(
7660  const Buffer& src,
7661  const Buffer& dst,
7662  const array<size_type, 3>& src_origin,
7663  const array<size_type, 3>& dst_origin,
7664  const array<size_type, 3>& region,
7665  size_type src_row_pitch,
7666  size_type src_slice_pitch,
7667  size_type dst_row_pitch,
7668  size_type dst_slice_pitch,
7669  const vector<Event>* events = NULL,
7670  Event* event = NULL) const
7671  {
7672  cl_event tmp;
7673  cl_int err = detail::errHandler(
7674  ::clEnqueueCopyBufferRect(
7675  object_,
7676  src(),
7677  dst(),
7678  src_origin.data(),
7679  dst_origin.data(),
7680  region.data(),
7681  src_row_pitch,
7682  src_slice_pitch,
7683  dst_row_pitch,
7684  dst_slice_pitch,
7685  (events != NULL) ? (cl_uint) events->size() : 0,
7686  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7687  (event != NULL) ? &tmp : NULL),
7688  __ENQEUE_COPY_BUFFER_RECT_ERR);
7689 
7690  if (event != NULL && err == CL_SUCCESS)
7691  *event = tmp;
7692 
7693  return err;
7694  }
7695 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
7696 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
7697 
7708  template<typename PatternType>
7710  const Buffer& buffer,
7711  PatternType pattern,
7712  size_type offset,
7713  size_type size,
7714  const vector<Event>* events = NULL,
7715  Event* event = NULL) const
7716  {
7717  cl_event tmp;
7718  cl_int err = detail::errHandler(
7719  ::clEnqueueFillBuffer(
7720  object_,
7721  buffer(),
7722  static_cast<void*>(&pattern),
7723  sizeof(PatternType),
7724  offset,
7725  size,
7726  (events != NULL) ? (cl_uint) events->size() : 0,
7727  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7728  (event != NULL) ? &tmp : NULL),
7729  __ENQUEUE_FILL_BUFFER_ERR);
7730 
7731  if (event != NULL && err == CL_SUCCESS)
7732  *event = tmp;
7733 
7734  return err;
7735  }
7736 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
7737 
7738  cl_int enqueueReadImage(
7739  const Image& image,
7740  cl_bool blocking,
7741  const array<size_type, 3>& origin,
7742  const array<size_type, 3>& region,
7743  size_type row_pitch,
7744  size_type slice_pitch,
7745  void* ptr,
7746  const vector<Event>* events = NULL,
7747  Event* event = NULL) const
7748  {
7749  cl_event tmp;
7750  cl_int err = detail::errHandler(
7751  ::clEnqueueReadImage(
7752  object_,
7753  image(),
7754  blocking,
7755  origin.data(),
7756  region.data(),
7757  row_pitch,
7758  slice_pitch,
7759  ptr,
7760  (events != NULL) ? (cl_uint) events->size() : 0,
7761  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7762  (event != NULL) ? &tmp : NULL),
7763  __ENQUEUE_READ_IMAGE_ERR);
7764 
7765  if (event != NULL && err == CL_SUCCESS)
7766  *event = tmp;
7767 
7768  return err;
7769  }
7770 
7771  cl_int enqueueWriteImage(
7772  const Image& image,
7773  cl_bool blocking,
7774  const array<size_type, 3>& origin,
7775  const array<size_type, 3>& region,
7776  size_type row_pitch,
7777  size_type slice_pitch,
7778  const void* ptr,
7779  const vector<Event>* events = NULL,
7780  Event* event = NULL) const
7781  {
7782  cl_event tmp;
7783  cl_int err = detail::errHandler(
7784  ::clEnqueueWriteImage(
7785  object_,
7786  image(),
7787  blocking,
7788  origin.data(),
7789  region.data(),
7790  row_pitch,
7791  slice_pitch,
7792  ptr,
7793  (events != NULL) ? (cl_uint) events->size() : 0,
7794  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7795  (event != NULL) ? &tmp : NULL),
7796  __ENQUEUE_WRITE_IMAGE_ERR);
7797 
7798  if (event != NULL && err == CL_SUCCESS)
7799  *event = tmp;
7800 
7801  return err;
7802  }
7803 
7804  cl_int enqueueCopyImage(
7805  const Image& src,
7806  const Image& dst,
7807  const array<size_type, 3>& src_origin,
7808  const array<size_type, 3>& dst_origin,
7809  const array<size_type, 3>& region,
7810  const vector<Event>* events = NULL,
7811  Event* event = NULL) const
7812  {
7813  cl_event tmp;
7814  cl_int err = detail::errHandler(
7815  ::clEnqueueCopyImage(
7816  object_,
7817  src(),
7818  dst(),
7819  src_origin.data(),
7820  dst_origin.data(),
7821  region.data(),
7822  (events != NULL) ? (cl_uint) events->size() : 0,
7823  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7824  (event != NULL) ? &tmp : NULL),
7825  __ENQUEUE_COPY_IMAGE_ERR);
7826 
7827  if (event != NULL && err == CL_SUCCESS)
7828  *event = tmp;
7829 
7830  return err;
7831  }
7832 
7833 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
7834 
7842  const Image& image,
7843  cl_float4 fillColor,
7844  const array<size_type, 3>& origin,
7845  const array<size_type, 3>& region,
7846  const vector<Event>* events = NULL,
7847  Event* event = NULL) const
7848  {
7849  cl_event tmp;
7850  cl_int err = detail::errHandler(
7851  ::clEnqueueFillImage(
7852  object_,
7853  image(),
7854  static_cast<void*>(&fillColor),
7855  origin.data(),
7856  region.data(),
7857  (events != NULL) ? (cl_uint) events->size() : 0,
7858  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7859  (event != NULL) ? &tmp : NULL),
7860  __ENQUEUE_FILL_IMAGE_ERR);
7861 
7862  if (event != NULL && err == CL_SUCCESS)
7863  *event = tmp;
7864 
7865  return err;
7866  }
7867 
7876  const Image& image,
7877  cl_int4 fillColor,
7878  const array<size_type, 3>& origin,
7879  const array<size_type, 3>& region,
7880  const vector<Event>* events = NULL,
7881  Event* event = NULL) const
7882  {
7883  cl_event tmp;
7884  cl_int err = detail::errHandler(
7885  ::clEnqueueFillImage(
7886  object_,
7887  image(),
7888  static_cast<void*>(&fillColor),
7889  origin.data(),
7890  region.data(),
7891  (events != NULL) ? (cl_uint) events->size() : 0,
7892  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7893  (event != NULL) ? &tmp : NULL),
7894  __ENQUEUE_FILL_IMAGE_ERR);
7895 
7896  if (event != NULL && err == CL_SUCCESS)
7897  *event = tmp;
7898 
7899  return err;
7900  }
7901 
7910  const Image& image,
7911  cl_uint4 fillColor,
7912  const array<size_type, 3>& origin,
7913  const array<size_type, 3>& region,
7914  const vector<Event>* events = NULL,
7915  Event* event = NULL) const
7916  {
7917  cl_event tmp;
7918  cl_int err = detail::errHandler(
7919  ::clEnqueueFillImage(
7920  object_,
7921  image(),
7922  static_cast<void*>(&fillColor),
7923  origin.data(),
7924  region.data(),
7925  (events != NULL) ? (cl_uint) events->size() : 0,
7926  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7927  (event != NULL) ? &tmp : NULL),
7928  __ENQUEUE_FILL_IMAGE_ERR);
7929 
7930  if (event != NULL && err == CL_SUCCESS)
7931  *event = tmp;
7932 
7933  return err;
7934  }
7935 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
7936 
7937  cl_int enqueueCopyImageToBuffer(
7938  const Image& src,
7939  const Buffer& dst,
7940  const array<size_type, 3>& src_origin,
7941  const array<size_type, 3>& region,
7942  size_type dst_offset,
7943  const vector<Event>* events = NULL,
7944  Event* event = NULL) const
7945  {
7946  cl_event tmp;
7947  cl_int err = detail::errHandler(
7948  ::clEnqueueCopyImageToBuffer(
7949  object_,
7950  src(),
7951  dst(),
7952  src_origin.data(),
7953  region.data(),
7954  dst_offset,
7955  (events != NULL) ? (cl_uint) events->size() : 0,
7956  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7957  (event != NULL) ? &tmp : NULL),
7958  __ENQUEUE_COPY_IMAGE_TO_BUFFER_ERR);
7959 
7960  if (event != NULL && err == CL_SUCCESS)
7961  *event = tmp;
7962 
7963  return err;
7964  }
7965 
7966  cl_int enqueueCopyBufferToImage(
7967  const Buffer& src,
7968  const Image& dst,
7969  size_type src_offset,
7970  const array<size_type, 3>& dst_origin,
7971  const array<size_type, 3>& region,
7972  const vector<Event>* events = NULL,
7973  Event* event = NULL) const
7974  {
7975  cl_event tmp;
7976  cl_int err = detail::errHandler(
7977  ::clEnqueueCopyBufferToImage(
7978  object_,
7979  src(),
7980  dst(),
7981  src_offset,
7982  dst_origin.data(),
7983  region.data(),
7984  (events != NULL) ? (cl_uint) events->size() : 0,
7985  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7986  (event != NULL) ? &tmp : NULL),
7987  __ENQUEUE_COPY_BUFFER_TO_IMAGE_ERR);
7988 
7989  if (event != NULL && err == CL_SUCCESS)
7990  *event = tmp;
7991 
7992  return err;
7993  }
7994 
7995  void* enqueueMapBuffer(
7996  const Buffer& buffer,
7997  cl_bool blocking,
7998  cl_map_flags flags,
7999  size_type offset,
8000  size_type size,
8001  const vector<Event>* events = NULL,
8002  Event* event = NULL,
8003  cl_int* err = NULL) const
8004  {
8005  cl_event tmp;
8006  cl_int error;
8007  void * result = ::clEnqueueMapBuffer(
8008  object_, buffer(), blocking, flags, offset, size,
8009  (events != NULL) ? (cl_uint) events->size() : 0,
8010  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8011  (event != NULL) ? &tmp : NULL,
8012  &error);
8013 
8014  detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
8015  if (err != NULL) {
8016  *err = error;
8017  }
8018  if (event != NULL && error == CL_SUCCESS)
8019  *event = tmp;
8020 
8021  return result;
8022  }
8023 
8024  void* enqueueMapImage(
8025  const Image& buffer,
8026  cl_bool blocking,
8027  cl_map_flags flags,
8028  const array<size_type, 3>& origin,
8029  const array<size_type, 3>& region,
8030  size_type * row_pitch,
8031  size_type * slice_pitch,
8032  const vector<Event>* events = NULL,
8033  Event* event = NULL,
8034  cl_int* err = NULL) const
8035  {
8036  cl_event tmp;
8037  cl_int error;
8038  void * result = ::clEnqueueMapImage(
8039  object_, buffer(), blocking, flags,
8040  origin.data(),
8041  region.data(),
8042  row_pitch, slice_pitch,
8043  (events != NULL) ? (cl_uint) events->size() : 0,
8044  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8045  (event != NULL) ? &tmp : NULL,
8046  &error);
8047 
8048  detail::errHandler(error, __ENQUEUE_MAP_IMAGE_ERR);
8049  if (err != NULL) {
8050  *err = error;
8051  }
8052  if (event != NULL && error == CL_SUCCESS)
8053  *event = tmp;
8054  return result;
8055  }
8056 
8057 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8058 
8062  template<typename T>
8064  T* ptr,
8065  cl_bool blocking,
8066  cl_map_flags flags,
8067  size_type size,
8068  const vector<Event>* events = NULL,
8069  Event* event = NULL) const
8070  {
8071  cl_event tmp;
8072  cl_int err = detail::errHandler(::clEnqueueSVMMap(
8073  object_, blocking, flags, static_cast<void*>(ptr), size,
8074  (events != NULL) ? (cl_uint)events->size() : 0,
8075  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
8076  (event != NULL) ? &tmp : NULL),
8077  __ENQUEUE_MAP_BUFFER_ERR);
8078 
8079  if (event != NULL && err == CL_SUCCESS)
8080  *event = tmp;
8081 
8082  return err;
8083  }
8084 
8085 
8090  template<typename T, class D>
8092  cl::pointer<T, D> &ptr,
8093  cl_bool blocking,
8094  cl_map_flags flags,
8095  size_type size,
8096  const vector<Event>* events = NULL,
8097  Event* event = NULL) const
8098  {
8099  cl_event tmp;
8100  cl_int err = detail::errHandler(::clEnqueueSVMMap(
8101  object_, blocking, flags, static_cast<void*>(ptr.get()), size,
8102  (events != NULL) ? (cl_uint)events->size() : 0,
8103  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
8104  (event != NULL) ? &tmp : NULL),
8105  __ENQUEUE_MAP_BUFFER_ERR);
8106 
8107  if (event != NULL && err == CL_SUCCESS)
8108  *event = tmp;
8109 
8110  return err;
8111  }
8112 
8117  template<typename T, class Alloc>
8119  cl::vector<T, Alloc> &container,
8120  cl_bool blocking,
8121  cl_map_flags flags,
8122  const vector<Event>* events = NULL,
8123  Event* event = NULL) const
8124  {
8125  cl_event tmp;
8126  cl_int err = detail::errHandler(::clEnqueueSVMMap(
8127  object_, blocking, flags, static_cast<void*>(container.data()), container.size(),
8128  (events != NULL) ? (cl_uint)events->size() : 0,
8129  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
8130  (event != NULL) ? &tmp : NULL),
8131  __ENQUEUE_MAP_BUFFER_ERR);
8132 
8133  if (event != NULL && err == CL_SUCCESS)
8134  *event = tmp;
8135 
8136  return err;
8137  }
8138 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8139 
8140  cl_int enqueueUnmapMemObject(
8141  const Memory& memory,
8142  void* mapped_ptr,
8143  const vector<Event>* events = NULL,
8144  Event* event = NULL) const
8145  {
8146  cl_event tmp;
8147  cl_int err = detail::errHandler(
8148  ::clEnqueueUnmapMemObject(
8149  object_, memory(), mapped_ptr,
8150  (events != NULL) ? (cl_uint) events->size() : 0,
8151  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8152  (event != NULL) ? &tmp : NULL),
8153  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8154 
8155  if (event != NULL && err == CL_SUCCESS)
8156  *event = tmp;
8157 
8158  return err;
8159  }
8160 
8161 
8162 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8163 
8167  template<typename T>
8169  T* ptr,
8170  const vector<Event>* events = NULL,
8171  Event* event = NULL) const
8172  {
8173  cl_event tmp;
8174  cl_int err = detail::errHandler(
8175  ::clEnqueueSVMUnmap(
8176  object_, static_cast<void*>(ptr),
8177  (events != NULL) ? (cl_uint)events->size() : 0,
8178  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
8179  (event != NULL) ? &tmp : NULL),
8180  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8181 
8182  if (event != NULL && err == CL_SUCCESS)
8183  *event = tmp;
8184 
8185  return err;
8186  }
8187 
8192  template<typename T, class D>
8194  cl::pointer<T, D> &ptr,
8195  const vector<Event>* events = NULL,
8196  Event* event = NULL) const
8197  {
8198  cl_event tmp;
8199  cl_int err = detail::errHandler(
8200  ::clEnqueueSVMUnmap(
8201  object_, static_cast<void*>(ptr.get()),
8202  (events != NULL) ? (cl_uint)events->size() : 0,
8203  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
8204  (event != NULL) ? &tmp : NULL),
8205  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8206 
8207  if (event != NULL && err == CL_SUCCESS)
8208  *event = tmp;
8209 
8210  return err;
8211  }
8212 
8217  template<typename T, class Alloc>
8219  cl::vector<T, Alloc> &container,
8220  const vector<Event>* events = NULL,
8221  Event* event = NULL) const
8222  {
8223  cl_event tmp;
8224  cl_int err = detail::errHandler(
8225  ::clEnqueueSVMUnmap(
8226  object_, static_cast<void*>(container.data()),
8227  (events != NULL) ? (cl_uint)events->size() : 0,
8228  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
8229  (event != NULL) ? &tmp : NULL),
8230  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8231 
8232  if (event != NULL && err == CL_SUCCESS)
8233  *event = tmp;
8234 
8235  return err;
8236  }
8237 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8238 
8239 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
8240 
8252  const vector<Event> *events = 0,
8253  Event *event = 0) const
8254  {
8255  cl_event tmp;
8256  cl_int err = detail::errHandler(
8257  ::clEnqueueMarkerWithWaitList(
8258  object_,
8259  (events != NULL) ? (cl_uint) events->size() : 0,
8260  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8261  (event != NULL) ? &tmp : NULL),
8262  __ENQUEUE_MARKER_WAIT_LIST_ERR);
8263 
8264  if (event != NULL && err == CL_SUCCESS)
8265  *event = tmp;
8266 
8267  return err;
8268  }
8269 
8282  const vector<Event> *events = 0,
8283  Event *event = 0) const
8284  {
8285  cl_event tmp;
8286  cl_int err = detail::errHandler(
8287  ::clEnqueueBarrierWithWaitList(
8288  object_,
8289  (events != NULL) ? (cl_uint) events->size() : 0,
8290  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8291  (event != NULL) ? &tmp : NULL),
8292  __ENQUEUE_BARRIER_WAIT_LIST_ERR);
8293 
8294  if (event != NULL && err == CL_SUCCESS)
8295  *event = tmp;
8296 
8297  return err;
8298  }
8299 
8305  const vector<Memory> &memObjects,
8306  cl_mem_migration_flags flags,
8307  const vector<Event>* events = NULL,
8308  Event* event = NULL
8309  ) const
8310  {
8311  cl_event tmp;
8312 
8313  vector<cl_mem> localMemObjects(memObjects.size());
8314 
8315  for( int i = 0; i < (int)memObjects.size(); ++i ) {
8316  localMemObjects[i] = memObjects[i]();
8317  }
8318 
8319  cl_int err = detail::errHandler(
8320  ::clEnqueueMigrateMemObjects(
8321  object_,
8322  (cl_uint)memObjects.size(),
8323  localMemObjects.data(),
8324  flags,
8325  (events != NULL) ? (cl_uint) events->size() : 0,
8326  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8327  (event != NULL) ? &tmp : NULL),
8328  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8329 
8330  if (event != NULL && err == CL_SUCCESS)
8331  *event = tmp;
8332 
8333  return err;
8334  }
8335 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
8336 
8337 
8338 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
8339 
8344  template<typename T>
8346  const cl::vector<T*> &svmRawPointers,
8347  const cl::vector<size_type> &sizes,
8348  cl_mem_migration_flags flags = 0,
8349  const vector<Event>* events = NULL,
8350  Event* event = NULL) const
8351  {
8352  cl_event tmp;
8353  cl_int err = detail::errHandler(::clEnqueueSVMMigrateMem(
8354  object_,
8355  svmRawPointers.size(), static_cast<void**>(svmRawPointers.data()),
8356  sizes.data(), // array of sizes not passed
8357  flags,
8358  (events != NULL) ? (cl_uint)events->size() : 0,
8359  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
8360  (event != NULL) ? &tmp : NULL),
8361  __ENQUEUE_MIGRATE_SVM_ERR);
8362 
8363  if (event != NULL && err == CL_SUCCESS)
8364  *event = tmp;
8365 
8366  return err;
8367  }
8368 
8373  template<typename T>
8375  const cl::vector<T*> &svmRawPointers,
8376  cl_mem_migration_flags flags = 0,
8377  const vector<Event>* events = NULL,
8378  Event* event = NULL) const
8379  {
8380  return enqueueMigrateSVM(svmRawPointers, cl::vector<size_type>(svmRawPointers.size()), flags, events, event);
8381  }
8382 
8383 
8389  template<typename T, class D>
8391  const cl::vector<cl::pointer<T, D>> &svmPointers,
8392  const cl::vector<size_type> &sizes,
8393  cl_mem_migration_flags flags = 0,
8394  const vector<Event>* events = NULL,
8395  Event* event = NULL) const
8396  {
8397  cl::vector<void*> svmRawPointers;
8398  svmRawPointers.reserve(svmPointers.size());
8399  for (auto p : svmPointers) {
8400  svmRawPointers.push_back(static_cast<void*>(p.get()));
8401  }
8402 
8403  return enqueueMigrateSVM(svmRawPointers, sizes, flags, events, event);
8404  }
8405 
8406 
8411  template<typename T, class D>
8413  const cl::vector<cl::pointer<T, D>> &svmPointers,
8414  cl_mem_migration_flags flags = 0,
8415  const vector<Event>* events = NULL,
8416  Event* event = NULL) const
8417  {
8418  return enqueueMigrateSVM(svmPointers, cl::vector<size_type>(svmPointers.size()), flags, events, event);
8419  }
8420 
8426  template<typename T, class Alloc>
8428  const cl::vector<cl::vector<T, Alloc>> &svmContainers,
8429  const cl::vector<size_type> &sizes,
8430  cl_mem_migration_flags flags = 0,
8431  const vector<Event>* events = NULL,
8432  Event* event = NULL) const
8433  {
8434  cl::vector<void*> svmRawPointers;
8435  svmRawPointers.reserve(svmContainers.size());
8436  for (auto p : svmContainers) {
8437  svmRawPointers.push_back(static_cast<void*>(p.data()));
8438  }
8439 
8440  return enqueueMigrateSVM(svmRawPointers, sizes, flags, events, event);
8441  }
8442 
8447  template<typename T, class Alloc>
8449  const cl::vector<cl::vector<T, Alloc>> &svmContainers,
8450  cl_mem_migration_flags flags = 0,
8451  const vector<Event>* events = NULL,
8452  Event* event = NULL) const
8453  {
8454  return enqueueMigrateSVM(svmContainers, cl::vector<size_type>(svmContainers.size()), flags, events, event);
8455  }
8456 
8457 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
8458 
8459  cl_int enqueueNDRangeKernel(
8460  const Kernel& kernel,
8461  const NDRange& offset,
8462  const NDRange& global,
8463  const NDRange& local = NullRange,
8464  const vector<Event>* events = NULL,
8465  Event* event = NULL) const
8466  {
8467  cl_event tmp;
8468  cl_int err = detail::errHandler(
8469  ::clEnqueueNDRangeKernel(
8470  object_, kernel(), (cl_uint) global.dimensions(),
8471  offset.dimensions() != 0 ? (const size_type*) offset : NULL,
8472  (const size_type*) global,
8473  local.dimensions() != 0 ? (const size_type*) local : NULL,
8474  (events != NULL) ? (cl_uint) events->size() : 0,
8475  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8476  (event != NULL) ? &tmp : NULL),
8477  __ENQUEUE_NDRANGE_KERNEL_ERR);
8478 
8479  if (event != NULL && err == CL_SUCCESS)
8480  *event = tmp;
8481 
8482  return err;
8483  }
8484 
8485 #if defined(CL_USE_DEPRECATED_OPENCL_1_2_APIS)
8486  CL_EXT_PREFIX__VERSION_1_2_DEPRECATED cl_int enqueueTask(
8487  const Kernel& kernel,
8488  const vector<Event>* events = NULL,
8489  Event* event = NULL) const CL_EXT_SUFFIX__VERSION_1_2_DEPRECATED
8490  {
8491  cl_event tmp;
8492  cl_int err = detail::errHandler(
8493  ::clEnqueueTask(
8494  object_, kernel(),
8495  (events != NULL) ? (cl_uint) events->size() : 0,
8496  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8497  (event != NULL) ? &tmp : NULL),
8498  __ENQUEUE_TASK_ERR);
8499 
8500  if (event != NULL && err == CL_SUCCESS)
8501  *event = tmp;
8502 
8503  return err;
8504  }
8505 #endif // #if defined(CL_USE_DEPRECATED_OPENCL_1_2_APIS)
8506 
8507  cl_int enqueueNativeKernel(
8508  void (CL_CALLBACK *userFptr)(void *),
8509  std::pair<void*, size_type> args,
8510  const vector<Memory>* mem_objects = NULL,
8511  const vector<const void*>* mem_locs = NULL,
8512  const vector<Event>* events = NULL,
8513  Event* event = NULL) const
8514  {
8515  size_type elements = 0;
8516  if (mem_objects != NULL) {
8517  elements = mem_objects->size();
8518  }
8519  vector<cl_mem> mems(elements);
8520  for (unsigned int i = 0; i < elements; i++) {
8521  mems[i] = ((*mem_objects)[i])();
8522  }
8523 
8524  cl_event tmp;
8525  cl_int err = detail::errHandler(
8526  ::clEnqueueNativeKernel(
8527  object_, userFptr, args.first, args.second,
8528  (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
8529  mems.data(),
8530  (mem_locs != NULL && mem_locs->size() > 0) ? (const void **) &mem_locs->front() : NULL,
8531  (events != NULL) ? (cl_uint) events->size() : 0,
8532  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8533  (event != NULL) ? &tmp : NULL),
8534  __ENQUEUE_NATIVE_KERNEL);
8535 
8536  if (event != NULL && err == CL_SUCCESS)
8537  *event = tmp;
8538 
8539  return err;
8540  }
8541 
8545 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
8546  CL_EXT_PREFIX__VERSION_1_1_DEPRECATED
8547  cl_int enqueueMarker(Event* event = NULL) const CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
8548  {
8549  cl_event tmp;
8550  cl_int err = detail::errHandler(
8551  ::clEnqueueMarker(
8552  object_,
8553  (event != NULL) ? &tmp : NULL),
8554  __ENQUEUE_MARKER_ERR);
8555 
8556  if (event != NULL && err == CL_SUCCESS)
8557  *event = tmp;
8558 
8559  return err;
8560  }
8561 
8562  CL_EXT_PREFIX__VERSION_1_1_DEPRECATED
8563  cl_int enqueueWaitForEvents(const vector<Event>& events) const CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
8564  {
8565  return detail::errHandler(
8566  ::clEnqueueWaitForEvents(
8567  object_,
8568  (cl_uint) events.size(),
8569  events.size() > 0 ? (const cl_event*) &events.front() : NULL),
8570  __ENQUEUE_WAIT_FOR_EVENTS_ERR);
8571  }
8572 #endif // defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
8573 
8574  cl_int enqueueAcquireGLObjects(
8575  const vector<Memory>* mem_objects = NULL,
8576  const vector<Event>* events = NULL,
8577  Event* event = NULL) const
8578  {
8579  cl_event tmp;
8580  cl_int err = detail::errHandler(
8581  ::clEnqueueAcquireGLObjects(
8582  object_,
8583  (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
8584  (mem_objects != NULL && mem_objects->size() > 0) ? (const cl_mem *) &mem_objects->front(): NULL,
8585  (events != NULL) ? (cl_uint) events->size() : 0,
8586  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8587  (event != NULL) ? &tmp : NULL),
8588  __ENQUEUE_ACQUIRE_GL_ERR);
8589 
8590  if (event != NULL && err == CL_SUCCESS)
8591  *event = tmp;
8592 
8593  return err;
8594  }
8595 
8596  cl_int enqueueReleaseGLObjects(
8597  const vector<Memory>* mem_objects = NULL,
8598  const vector<Event>* events = NULL,
8599  Event* event = NULL) const
8600  {
8601  cl_event tmp;
8602  cl_int err = detail::errHandler(
8603  ::clEnqueueReleaseGLObjects(
8604  object_,
8605  (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
8606  (mem_objects != NULL && mem_objects->size() > 0) ? (const cl_mem *) &mem_objects->front(): NULL,
8607  (events != NULL) ? (cl_uint) events->size() : 0,
8608  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8609  (event != NULL) ? &tmp : NULL),
8610  __ENQUEUE_RELEASE_GL_ERR);
8611 
8612  if (event != NULL && err == CL_SUCCESS)
8613  *event = tmp;
8614 
8615  return err;
8616  }
8617 
8618 #if defined (CL_HPP_USE_DX_INTEROP)
8619 typedef CL_API_ENTRY cl_int (CL_API_CALL *PFN_clEnqueueAcquireD3D10ObjectsKHR)(
8620  cl_command_queue command_queue, cl_uint num_objects,
8621  const cl_mem* mem_objects, cl_uint num_events_in_wait_list,
8622  const cl_event* event_wait_list, cl_event* event);
8623 typedef CL_API_ENTRY cl_int (CL_API_CALL *PFN_clEnqueueReleaseD3D10ObjectsKHR)(
8624  cl_command_queue command_queue, cl_uint num_objects,
8625  const cl_mem* mem_objects, cl_uint num_events_in_wait_list,
8626  const cl_event* event_wait_list, cl_event* event);
8627 
8628  cl_int enqueueAcquireD3D10Objects(
8629  const vector<Memory>* mem_objects = NULL,
8630  const vector<Event>* events = NULL,
8631  Event* event = NULL) const
8632  {
8633  static PFN_clEnqueueAcquireD3D10ObjectsKHR pfn_clEnqueueAcquireD3D10ObjectsKHR = NULL;
8634 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
8635  cl_context context = getInfo<CL_QUEUE_CONTEXT>();
8636  cl::Device device(getInfo<CL_QUEUE_DEVICE>());
8637  cl_platform_id platform = device.getInfo<CL_DEVICE_PLATFORM>();
8638  CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(platform, clEnqueueAcquireD3D10ObjectsKHR);
8639 #endif
8640 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
8641  CL_HPP_INIT_CL_EXT_FCN_PTR_(clEnqueueAcquireD3D10ObjectsKHR);
8642 #endif
8643 
8644  cl_event tmp;
8645  cl_int err = detail::errHandler(
8646  pfn_clEnqueueAcquireD3D10ObjectsKHR(
8647  object_,
8648  (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
8649  (mem_objects != NULL && mem_objects->size() > 0) ? (const cl_mem *) &mem_objects->front(): NULL,
8650  (events != NULL) ? (cl_uint) events->size() : 0,
8651  (events != NULL) ? (cl_event*) &events->front() : NULL,
8652  (event != NULL) ? &tmp : NULL),
8653  __ENQUEUE_ACQUIRE_GL_ERR);
8654 
8655  if (event != NULL && err == CL_SUCCESS)
8656  *event = tmp;
8657 
8658  return err;
8659  }
8660 
8661  cl_int enqueueReleaseD3D10Objects(
8662  const vector<Memory>* mem_objects = NULL,
8663  const vector<Event>* events = NULL,
8664  Event* event = NULL) const
8665  {
8666  static PFN_clEnqueueReleaseD3D10ObjectsKHR pfn_clEnqueueReleaseD3D10ObjectsKHR = NULL;
8667 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
8668  cl_context context = getInfo<CL_QUEUE_CONTEXT>();
8669  cl::Device device(getInfo<CL_QUEUE_DEVICE>());
8670  cl_platform_id platform = device.getInfo<CL_DEVICE_PLATFORM>();
8671  CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(platform, clEnqueueReleaseD3D10ObjectsKHR);
8672 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
8673 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
8674  CL_HPP_INIT_CL_EXT_FCN_PTR_(clEnqueueReleaseD3D10ObjectsKHR);
8675 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
8676 
8677  cl_event tmp;
8678  cl_int err = detail::errHandler(
8679  pfn_clEnqueueReleaseD3D10ObjectsKHR(
8680  object_,
8681  (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
8682  (mem_objects != NULL && mem_objects->size() > 0) ? (const cl_mem *) &mem_objects->front(): NULL,
8683  (events != NULL) ? (cl_uint) events->size() : 0,
8684  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8685  (event != NULL) ? &tmp : NULL),
8686  __ENQUEUE_RELEASE_GL_ERR);
8687 
8688  if (event != NULL && err == CL_SUCCESS)
8689  *event = tmp;
8690 
8691  return err;
8692  }
8693 #endif
8694 
8698 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
8699  CL_EXT_PREFIX__VERSION_1_1_DEPRECATED
8700  cl_int enqueueBarrier() const CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
8701  {
8702  return detail::errHandler(
8703  ::clEnqueueBarrier(object_),
8704  __ENQUEUE_BARRIER_ERR);
8705  }
8706 #endif // CL_USE_DEPRECATED_OPENCL_1_1_APIS
8707 
8708  cl_int flush() const
8709  {
8710  return detail::errHandler(::clFlush(object_), __FLUSH_ERR);
8711  }
8712 
8713  cl_int finish() const
8714  {
8715  return detail::errHandler(::clFinish(object_), __FINISH_ERR);
8716  }
8717 }; // CommandQueue
8718 
8719 CL_HPP_DEFINE_STATIC_MEMBER_ std::once_flag CommandQueue::default_initialized_;
8720 CL_HPP_DEFINE_STATIC_MEMBER_ CommandQueue CommandQueue::default_;
8721 CL_HPP_DEFINE_STATIC_MEMBER_ cl_int CommandQueue::default_error_ = CL_SUCCESS;
8722 
8723 
8724 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8725 enum class DeviceQueueProperties : cl_command_queue_properties
8726 {
8727  None = 0,
8728  Profiling = CL_QUEUE_PROFILING_ENABLE,
8729 };
8730 
8731 inline DeviceQueueProperties operator|(DeviceQueueProperties lhs, DeviceQueueProperties rhs)
8732 {
8733  return static_cast<DeviceQueueProperties>(static_cast<cl_command_queue_properties>(lhs) | static_cast<cl_command_queue_properties>(rhs));
8734 }
8735 
8739 class DeviceCommandQueue : public detail::Wrapper<cl_command_queue>
8740 {
8741 public:
8742 
8747 
8751  DeviceCommandQueue(DeviceQueueProperties properties, cl_int* err = NULL)
8752  {
8753  cl_int error;
8756 
8757  cl_command_queue_properties mergedProperties =
8758  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | static_cast<cl_command_queue_properties>(properties);
8759 
8760  cl_queue_properties queue_properties[] = {
8761  CL_QUEUE_PROPERTIES, mergedProperties, 0 };
8762  object_ = ::clCreateCommandQueueWithProperties(
8763  context(), device(), queue_properties, &error);
8764 
8765  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8766  if (err != NULL) {
8767  *err = error;
8768  }
8769  }
8770 
8775  const Context& context,
8776  const Device& device,
8777  DeviceQueueProperties properties = DeviceQueueProperties::None,
8778  cl_int* err = NULL)
8779  {
8780  cl_int error;
8781 
8782  cl_command_queue_properties mergedProperties =
8783  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | static_cast<cl_command_queue_properties>(properties);
8784  cl_queue_properties queue_properties[] = {
8785  CL_QUEUE_PROPERTIES, mergedProperties, 0 };
8786  object_ = ::clCreateCommandQueueWithProperties(
8787  context(), device(), queue_properties, &error);
8788 
8789  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8790  if (err != NULL) {
8791  *err = error;
8792  }
8793  }
8794 
8799  const Context& context,
8800  const Device& device,
8801  cl_uint queueSize,
8802  DeviceQueueProperties properties = DeviceQueueProperties::None,
8803  cl_int* err = NULL)
8804  {
8805  cl_int error;
8806 
8807  cl_command_queue_properties mergedProperties =
8808  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | static_cast<cl_command_queue_properties>(properties);
8809  cl_queue_properties queue_properties[] = {
8810  CL_QUEUE_PROPERTIES, mergedProperties,
8811  CL_QUEUE_SIZE, queueSize,
8812  0 };
8813  object_ = ::clCreateCommandQueueWithProperties(
8814  context(), device(), queue_properties, &error);
8815 
8816  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8817  if (err != NULL) {
8818  *err = error;
8819  }
8820  }
8821 
8828  explicit DeviceCommandQueue(const cl_command_queue& commandQueue, bool retainObject = false) :
8829  detail::Wrapper<cl_type>(commandQueue, retainObject) { }
8830 
8831  DeviceCommandQueue& operator = (const cl_command_queue& rhs)
8832  {
8834  return *this;
8835  }
8836 
8840  DeviceCommandQueue(const DeviceCommandQueue& queue) : detail::Wrapper<cl_type>(queue) {}
8841 
8845  DeviceCommandQueue& operator = (const DeviceCommandQueue &queue)
8846  {
8848  return *this;
8849  }
8850 
8854  DeviceCommandQueue(DeviceCommandQueue&& queue) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(queue)) {}
8855 
8860  {
8861  detail::Wrapper<cl_type>::operator=(std::move(queue));
8862  return *this;
8863  }
8864 
8865  template <typename T>
8866  cl_int getInfo(cl_command_queue_info name, T* param) const
8867  {
8868  return detail::errHandler(
8869  detail::getInfo(
8870  &::clGetCommandQueueInfo, object_, name, param),
8871  __GET_COMMAND_QUEUE_INFO_ERR);
8872  }
8873 
8874  template <cl_command_queue_info name> typename
8875  detail::param_traits<detail::cl_command_queue_info, name>::param_type
8876  getInfo(cl_int* err = NULL) const
8877  {
8878  typename detail::param_traits<
8879  detail::cl_command_queue_info, name>::param_type param;
8880  cl_int result = getInfo(name, &param);
8881  if (err != NULL) {
8882  *err = result;
8883  }
8884  return param;
8885  }
8886 
8894  cl_int *err = nullptr)
8895  {
8896  cl_int error;
8899 
8900  cl_command_queue_properties properties =
8901  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | CL_QUEUE_ON_DEVICE_DEFAULT;
8902  cl_queue_properties queue_properties[] = {
8903  CL_QUEUE_PROPERTIES, properties,
8904  0 };
8905  DeviceCommandQueue deviceQueue(
8906  ::clCreateCommandQueueWithProperties(
8907  context(), device(), queue_properties, &error));
8908 
8909  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8910  if (err != NULL) {
8911  *err = error;
8912  }
8913 
8914  return deviceQueue;
8915  }
8916 
8924  const Context &context, const Device &device, cl_int *err = nullptr)
8925  {
8926  cl_int error;
8927 
8928  cl_command_queue_properties properties =
8929  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | CL_QUEUE_ON_DEVICE_DEFAULT;
8930  cl_queue_properties queue_properties[] = {
8931  CL_QUEUE_PROPERTIES, properties,
8932  0 };
8933  DeviceCommandQueue deviceQueue(
8934  ::clCreateCommandQueueWithProperties(
8935  context(), device(), queue_properties, &error));
8936 
8937  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8938  if (err != NULL) {
8939  *err = error;
8940  }
8941 
8942  return deviceQueue;
8943  }
8944 
8952  const Context &context, const Device &device, cl_uint queueSize, cl_int *err = nullptr)
8953  {
8954  cl_int error;
8955 
8956  cl_command_queue_properties properties =
8957  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | CL_QUEUE_ON_DEVICE_DEFAULT;
8958  cl_queue_properties queue_properties[] = {
8959  CL_QUEUE_PROPERTIES, properties,
8960  CL_QUEUE_SIZE, queueSize,
8961  0 };
8962  DeviceCommandQueue deviceQueue(
8963  ::clCreateCommandQueueWithProperties(
8964  context(), device(), queue_properties, &error));
8965 
8966  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8967  if (err != NULL) {
8968  *err = error;
8969  }
8970 
8971  return deviceQueue;
8972  }
8973 
8974 
8975 
8976 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
8977 
8983  static DeviceCommandQueue updateDefault(const Context &context, const Device &device, const DeviceCommandQueue &default_queue, cl_int *err = nullptr)
8984  {
8985  cl_int error;
8986  error = clSetDefaultDeviceCommandQueue(context.get(), device.get(), default_queue.get());
8987 
8988  detail::errHandler(error, __SET_DEFAULT_DEVICE_COMMAND_QUEUE_ERR);
8989  if (err != NULL) {
8990  *err = error;
8991  }
8992  return default_queue;
8993  }
8994 
8998  static DeviceCommandQueue getDefault(const CommandQueue &queue, cl_int * err = NULL)
8999  {
9000  return queue.getInfo<CL_QUEUE_DEVICE_DEFAULT>(err);
9001  }
9002 
9003 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
9004 }; // DeviceCommandQueue
9005 
9006 namespace detail
9007 {
9008  // Specialization for device command queue
9009  template <>
9011  {
9012  static size_type size(const cl::DeviceCommandQueue&) { return sizeof(cl_command_queue); }
9013  static const cl_command_queue* ptr(const cl::DeviceCommandQueue& value) { return &(value()); }
9014  };
9015 } // namespace detail
9016 
9017 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
9018 
9019 
9020 template< typename IteratorType >
9022  const Context &context,
9023  IteratorType startIterator,
9024  IteratorType endIterator,
9025  bool readOnly,
9026  bool useHostPtr,
9027  cl_int* err)
9028 {
9029  typedef typename std::iterator_traits<IteratorType>::value_type DataType;
9030  cl_int error;
9031 
9032  cl_mem_flags flags = 0;
9033  if( readOnly ) {
9034  flags |= CL_MEM_READ_ONLY;
9035  }
9036  else {
9037  flags |= CL_MEM_READ_WRITE;
9038  }
9039  if( useHostPtr ) {
9040  flags |= CL_MEM_USE_HOST_PTR;
9041  }
9042 
9043  size_type size = sizeof(DataType)*(endIterator - startIterator);
9044 
9045  if( useHostPtr ) {
9046  object_ = ::clCreateBuffer(context(), flags, size, static_cast<DataType*>(&*startIterator), &error);
9047  } else {
9048  object_ = ::clCreateBuffer(context(), flags, size, 0, &error);
9049  }
9050 
9051  detail::errHandler(error, __CREATE_BUFFER_ERR);
9052  if (err != NULL) {
9053  *err = error;
9054  }
9055 
9056  if( !useHostPtr ) {
9057  CommandQueue queue(context, 0, &error);
9058  detail::errHandler(error, __CREATE_BUFFER_ERR);
9059  if (err != NULL) {
9060  *err = error;
9061  }
9062 
9063  error = cl::copy(queue, startIterator, endIterator, *this);
9064  detail::errHandler(error, __CREATE_BUFFER_ERR);
9065  if (err != NULL) {
9066  *err = error;
9067  }
9068  }
9069 }
9070 
9071 template< typename IteratorType >
9073  const CommandQueue &queue,
9074  IteratorType startIterator,
9075  IteratorType endIterator,
9076  bool readOnly,
9077  bool useHostPtr,
9078  cl_int* err)
9079 {
9080  typedef typename std::iterator_traits<IteratorType>::value_type DataType;
9081  cl_int error;
9082 
9083  cl_mem_flags flags = 0;
9084  if (readOnly) {
9085  flags |= CL_MEM_READ_ONLY;
9086  }
9087  else {
9088  flags |= CL_MEM_READ_WRITE;
9089  }
9090  if (useHostPtr) {
9091  flags |= CL_MEM_USE_HOST_PTR;
9092  }
9093 
9094  size_type size = sizeof(DataType)*(endIterator - startIterator);
9095 
9096  Context context = queue.getInfo<CL_QUEUE_CONTEXT>();
9097 
9098  if (useHostPtr) {
9099  object_ = ::clCreateBuffer(context(), flags, size, static_cast<DataType*>(&*startIterator), &error);
9100  }
9101  else {
9102  object_ = ::clCreateBuffer(context(), flags, size, 0, &error);
9103  }
9104 
9105  detail::errHandler(error, __CREATE_BUFFER_ERR);
9106  if (err != NULL) {
9107  *err = error;
9108  }
9109 
9110  if (!useHostPtr) {
9111  error = cl::copy(queue, startIterator, endIterator, *this);
9112  detail::errHandler(error, __CREATE_BUFFER_ERR);
9113  if (err != NULL) {
9114  *err = error;
9115  }
9116  }
9117 }
9118 
9119 inline cl_int enqueueReadBuffer(
9120  const Buffer& buffer,
9121  cl_bool blocking,
9122  size_type offset,
9123  size_type size,
9124  void* ptr,
9125  const vector<Event>* events = NULL,
9126  Event* event = NULL)
9127 {
9128  cl_int error;
9129  CommandQueue queue = CommandQueue::getDefault(&error);
9130 
9131  if (error != CL_SUCCESS) {
9132  return error;
9133  }
9134 
9135  return queue.enqueueReadBuffer(buffer, blocking, offset, size, ptr, events, event);
9136 }
9137 
9138 inline cl_int enqueueWriteBuffer(
9139  const Buffer& buffer,
9140  cl_bool blocking,
9141  size_type offset,
9142  size_type size,
9143  const void* ptr,
9144  const vector<Event>* events = NULL,
9145  Event* event = NULL)
9146 {
9147  cl_int error;
9148  CommandQueue queue = CommandQueue::getDefault(&error);
9149 
9150  if (error != CL_SUCCESS) {
9151  return error;
9152  }
9153 
9154  return queue.enqueueWriteBuffer(buffer, blocking, offset, size, ptr, events, event);
9155 }
9156 
9157 inline void* enqueueMapBuffer(
9158  const Buffer& buffer,
9159  cl_bool blocking,
9160  cl_map_flags flags,
9161  size_type offset,
9162  size_type size,
9163  const vector<Event>* events = NULL,
9164  Event* event = NULL,
9165  cl_int* err = NULL)
9166 {
9167  cl_int error;
9168  CommandQueue queue = CommandQueue::getDefault(&error);
9169  detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
9170  if (err != NULL) {
9171  *err = error;
9172  }
9173 
9174  void * result = ::clEnqueueMapBuffer(
9175  queue(), buffer(), blocking, flags, offset, size,
9176  (events != NULL) ? (cl_uint) events->size() : 0,
9177  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
9178  (cl_event*) event,
9179  &error);
9180 
9181  detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
9182  if (err != NULL) {
9183  *err = error;
9184  }
9185  return result;
9186 }
9187 
9188 
9189 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
9190 
9195 template<typename T>
9196 inline cl_int enqueueMapSVM(
9197  T* ptr,
9198  cl_bool blocking,
9199  cl_map_flags flags,
9200  size_type size,
9201  const vector<Event>* events,
9202  Event* event)
9203 {
9204  cl_int error;
9205  CommandQueue queue = CommandQueue::getDefault(&error);
9206  if (error != CL_SUCCESS) {
9207  return detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
9208  }
9209 
9210  return queue.enqueueMapSVM(
9211  ptr, blocking, flags, size, events, event);
9212 }
9213 
9219 template<typename T, class D>
9220 inline cl_int enqueueMapSVM(
9221  cl::pointer<T, D> ptr,
9222  cl_bool blocking,
9223  cl_map_flags flags,
9224  size_type size,
9225  const vector<Event>* events = NULL,
9226  Event* event = NULL)
9227 {
9228  cl_int error;
9229  CommandQueue queue = CommandQueue::getDefault(&error);
9230  if (error != CL_SUCCESS) {
9231  return detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
9232  }
9233 
9234  return queue.enqueueMapSVM(
9235  ptr, blocking, flags, size, events, event);
9236 }
9237 
9243 template<typename T, class Alloc>
9244 inline cl_int enqueueMapSVM(
9245  cl::vector<T, Alloc> container,
9246  cl_bool blocking,
9247  cl_map_flags flags,
9248  const vector<Event>* events = NULL,
9249  Event* event = NULL)
9250 {
9251  cl_int error;
9252  CommandQueue queue = CommandQueue::getDefault(&error);
9253  if (error != CL_SUCCESS) {
9254  return detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
9255  }
9256 
9257  return queue.enqueueMapSVM(
9258  container, blocking, flags, events, event);
9259 }
9260 
9261 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
9262 
9263 inline cl_int enqueueUnmapMemObject(
9264  const Memory& memory,
9265  void* mapped_ptr,
9266  const vector<Event>* events = NULL,
9267  Event* event = NULL)
9268 {
9269  cl_int error;
9270  CommandQueue queue = CommandQueue::getDefault(&error);
9271  detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
9272  if (error != CL_SUCCESS) {
9273  return error;
9274  }
9275 
9276  cl_event tmp;
9277  cl_int err = detail::errHandler(
9278  ::clEnqueueUnmapMemObject(
9279  queue(), memory(), mapped_ptr,
9280  (events != NULL) ? (cl_uint)events->size() : 0,
9281  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
9282  (event != NULL) ? &tmp : NULL),
9283  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
9284 
9285  if (event != NULL && err == CL_SUCCESS)
9286  *event = tmp;
9287 
9288  return err;
9289 }
9290 
9291 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
9292 
9297 template<typename T>
9298 inline cl_int enqueueUnmapSVM(
9299  T* ptr,
9300  const vector<Event>* events = NULL,
9301  Event* event = NULL)
9302 {
9303  cl_int error;
9304  CommandQueue queue = CommandQueue::getDefault(&error);
9305  if (error != CL_SUCCESS) {
9306  return detail::errHandler(error, __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
9307  }
9308 
9309  return detail::errHandler(queue.enqueueUnmapSVM(ptr, events, event),
9310  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
9311 
9312 }
9313 
9319 template<typename T, class D>
9320 inline cl_int enqueueUnmapSVM(
9321  cl::pointer<T, D> &ptr,
9322  const vector<Event>* events = NULL,
9323  Event* event = NULL)
9324 {
9325  cl_int error;
9326  CommandQueue queue = CommandQueue::getDefault(&error);
9327  if (error != CL_SUCCESS) {
9328  return detail::errHandler(error, __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
9329  }
9330 
9331  return detail::errHandler(queue.enqueueUnmapSVM(ptr, events, event),
9332  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
9333 }
9334 
9340 template<typename T, class Alloc>
9341 inline cl_int enqueueUnmapSVM(
9342  cl::vector<T, Alloc> &container,
9343  const vector<Event>* events = NULL,
9344  Event* event = NULL)
9345 {
9346  cl_int error;
9347  CommandQueue queue = CommandQueue::getDefault(&error);
9348  if (error != CL_SUCCESS) {
9349  return detail::errHandler(error, __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
9350  }
9351 
9352  return detail::errHandler(queue.enqueueUnmapSVM(container, events, event),
9353  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
9354 }
9355 
9356 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
9357 
9358 inline cl_int enqueueCopyBuffer(
9359  const Buffer& src,
9360  const Buffer& dst,
9361  size_type src_offset,
9362  size_type dst_offset,
9363  size_type size,
9364  const vector<Event>* events = NULL,
9365  Event* event = NULL)
9366 {
9367  cl_int error;
9368  CommandQueue queue = CommandQueue::getDefault(&error);
9369 
9370  if (error != CL_SUCCESS) {
9371  return error;
9372  }
9373 
9374  return queue.enqueueCopyBuffer(src, dst, src_offset, dst_offset, size, events, event);
9375 }
9376 
9382 template< typename IteratorType >
9383 inline cl_int copy( IteratorType startIterator, IteratorType endIterator, cl::Buffer &buffer )
9384 {
9385  cl_int error;
9386  CommandQueue queue = CommandQueue::getDefault(&error);
9387  if (error != CL_SUCCESS)
9388  return error;
9389 
9390  return cl::copy(queue, startIterator, endIterator, buffer);
9391 }
9392 
9398 template< typename IteratorType >
9399 inline cl_int copy( const cl::Buffer &buffer, IteratorType startIterator, IteratorType endIterator )
9400 {
9401  cl_int error;
9402  CommandQueue queue = CommandQueue::getDefault(&error);
9403  if (error != CL_SUCCESS)
9404  return error;
9405 
9406  return cl::copy(queue, buffer, startIterator, endIterator);
9407 }
9408 
9414 template< typename IteratorType >
9415 inline cl_int copy( const CommandQueue &queue, IteratorType startIterator, IteratorType endIterator, cl::Buffer &buffer )
9416 {
9417  typedef typename std::iterator_traits<IteratorType>::value_type DataType;
9418  cl_int error;
9419 
9420  size_type length = endIterator-startIterator;
9421  size_type byteLength = length*sizeof(DataType);
9422 
9423  DataType *pointer =
9424  static_cast<DataType*>(queue.enqueueMapBuffer(buffer, CL_TRUE, CL_MAP_WRITE, 0, byteLength, 0, 0, &error));
9425  // if exceptions enabled, enqueueMapBuffer will throw
9426  if( error != CL_SUCCESS ) {
9427  return error;
9428  }
9429 #if defined(_MSC_VER)
9430  std::copy(
9431  startIterator,
9432  endIterator,
9433  stdext::checked_array_iterator<DataType*>(
9434  pointer, length));
9435 #else
9436  std::copy(startIterator, endIterator, pointer);
9437 #endif
9438  Event endEvent;
9439  error = queue.enqueueUnmapMemObject(buffer, pointer, 0, &endEvent);
9440  // if exceptions enabled, enqueueUnmapMemObject will throw
9441  if( error != CL_SUCCESS ) {
9442  return error;
9443  }
9444  endEvent.wait();
9445  return CL_SUCCESS;
9446 }
9447 
9453 template< typename IteratorType >
9454 inline cl_int copy( const CommandQueue &queue, const cl::Buffer &buffer, IteratorType startIterator, IteratorType endIterator )
9455 {
9456  typedef typename std::iterator_traits<IteratorType>::value_type DataType;
9457  cl_int error;
9458 
9459  size_type length = endIterator-startIterator;
9460  size_type byteLength = length*sizeof(DataType);
9461 
9462  DataType *pointer =
9463  static_cast<DataType*>(queue.enqueueMapBuffer(buffer, CL_TRUE, CL_MAP_READ, 0, byteLength, 0, 0, &error));
9464  // if exceptions enabled, enqueueMapBuffer will throw
9465  if( error != CL_SUCCESS ) {
9466  return error;
9467  }
9468  std::copy(pointer, pointer + length, startIterator);
9469  Event endEvent;
9470  error = queue.enqueueUnmapMemObject(buffer, pointer, 0, &endEvent);
9471  // if exceptions enabled, enqueueUnmapMemObject will throw
9472  if( error != CL_SUCCESS ) {
9473  return error;
9474  }
9475  endEvent.wait();
9476  return CL_SUCCESS;
9477 }
9478 
9479 
9480 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
9481 
9484 template<typename T, class Alloc>
9485 inline cl_int mapSVM(cl::vector<T, Alloc> &container)
9486 {
9487  return enqueueMapSVM(container, CL_TRUE, CL_MAP_READ | CL_MAP_WRITE);
9488 }
9489 
9493 template<typename T, class Alloc>
9494 inline cl_int unmapSVM(cl::vector<T, Alloc> &container)
9495 {
9496  return enqueueUnmapSVM(container);
9497 }
9498 
9499 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
9500 
9501 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
9502 inline cl_int enqueueReadBufferRect(
9503  const Buffer& buffer,
9504  cl_bool blocking,
9505  const array<size_type, 3>& buffer_offset,
9506  const array<size_type, 3>& host_offset,
9507  const array<size_type, 3>& region,
9508  size_type buffer_row_pitch,
9509  size_type buffer_slice_pitch,
9510  size_type host_row_pitch,
9511  size_type host_slice_pitch,
9512  void *ptr,
9513  const vector<Event>* events = NULL,
9514  Event* event = NULL)
9515 {
9516  cl_int error;
9517  CommandQueue queue = CommandQueue::getDefault(&error);
9518 
9519  if (error != CL_SUCCESS) {
9520  return error;
9521  }
9522 
9523  return queue.enqueueReadBufferRect(
9524  buffer,
9525  blocking,
9526  buffer_offset,
9527  host_offset,
9528  region,
9529  buffer_row_pitch,
9530  buffer_slice_pitch,
9531  host_row_pitch,
9532  host_slice_pitch,
9533  ptr,
9534  events,
9535  event);
9536 }
9537 
9538 inline cl_int enqueueWriteBufferRect(
9539  const Buffer& buffer,
9540  cl_bool blocking,
9541  const array<size_type, 3>& buffer_offset,
9542  const array<size_type, 3>& host_offset,
9543  const array<size_type, 3>& region,
9544  size_type buffer_row_pitch,
9545  size_type buffer_slice_pitch,
9546  size_type host_row_pitch,
9547  size_type host_slice_pitch,
9548  const void *ptr,
9549  const vector<Event>* events = NULL,
9550  Event* event = NULL)
9551 {
9552  cl_int error;
9553  CommandQueue queue = CommandQueue::getDefault(&error);
9554 
9555  if (error != CL_SUCCESS) {
9556  return error;
9557  }
9558 
9559  return queue.enqueueWriteBufferRect(
9560  buffer,
9561  blocking,
9562  buffer_offset,
9563  host_offset,
9564  region,
9565  buffer_row_pitch,
9566  buffer_slice_pitch,
9567  host_row_pitch,
9568  host_slice_pitch,
9569  ptr,
9570  events,
9571  event);
9572 }
9573 
9574 inline cl_int enqueueCopyBufferRect(
9575  const Buffer& src,
9576  const Buffer& dst,
9577  const array<size_type, 3>& src_origin,
9578  const array<size_type, 3>& dst_origin,
9579  const array<size_type, 3>& region,
9580  size_type src_row_pitch,
9581  size_type src_slice_pitch,
9582  size_type dst_row_pitch,
9583  size_type dst_slice_pitch,
9584  const vector<Event>* events = NULL,
9585  Event* event = NULL)
9586 {
9587  cl_int error;
9588  CommandQueue queue = CommandQueue::getDefault(&error);
9589 
9590  if (error != CL_SUCCESS) {
9591  return error;
9592  }
9593 
9594  return queue.enqueueCopyBufferRect(
9595  src,
9596  dst,
9597  src_origin,
9598  dst_origin,
9599  region,
9600  src_row_pitch,
9601  src_slice_pitch,
9602  dst_row_pitch,
9603  dst_slice_pitch,
9604  events,
9605  event);
9606 }
9607 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
9608 
9609 inline cl_int enqueueReadImage(
9610  const Image& image,
9611  cl_bool blocking,
9612  const array<size_type, 3>& origin,
9613  const array<size_type, 3>& region,
9614  size_type row_pitch,
9615  size_type slice_pitch,
9616  void* ptr,
9617  const vector<Event>* events = NULL,
9618  Event* event = NULL)
9619 {
9620  cl_int error;
9621  CommandQueue queue = CommandQueue::getDefault(&error);
9622 
9623  if (error != CL_SUCCESS) {
9624  return error;
9625  }
9626 
9627  return queue.enqueueReadImage(
9628  image,
9629  blocking,
9630  origin,
9631  region,
9632  row_pitch,
9633  slice_pitch,
9634  ptr,
9635  events,
9636  event);
9637 }
9638 
9639 inline cl_int enqueueWriteImage(
9640  const Image& image,
9641  cl_bool blocking,
9642  const array<size_type, 3>& origin,
9643  const array<size_type, 3>& region,
9644  size_type row_pitch,
9645  size_type slice_pitch,
9646  const void* ptr,
9647  const vector<Event>* events = NULL,
9648  Event* event = NULL)
9649 {
9650  cl_int error;
9651  CommandQueue queue = CommandQueue::getDefault(&error);
9652 
9653  if (error != CL_SUCCESS) {
9654  return error;
9655  }
9656 
9657  return queue.enqueueWriteImage(
9658  image,
9659  blocking,
9660  origin,
9661  region,
9662  row_pitch,
9663  slice_pitch,
9664  ptr,
9665  events,
9666  event);
9667 }
9668 
9669 inline cl_int enqueueCopyImage(
9670  const Image& src,
9671  const Image& dst,
9672  const array<size_type, 3>& src_origin,
9673  const array<size_type, 3>& dst_origin,
9674  const array<size_type, 3>& region,
9675  const vector<Event>* events = NULL,
9676  Event* event = NULL)
9677 {
9678  cl_int error;
9679  CommandQueue queue = CommandQueue::getDefault(&error);
9680 
9681  if (error != CL_SUCCESS) {
9682  return error;
9683  }
9684 
9685  return queue.enqueueCopyImage(
9686  src,
9687  dst,
9688  src_origin,
9689  dst_origin,
9690  region,
9691  events,
9692  event);
9693 }
9694 
9695 inline cl_int enqueueCopyImageToBuffer(
9696  const Image& src,
9697  const Buffer& dst,
9698  const array<size_type, 3>& src_origin,
9699  const array<size_type, 3>& region,
9700  size_type dst_offset,
9701  const vector<Event>* events = NULL,
9702  Event* event = NULL)
9703 {
9704  cl_int error;
9705  CommandQueue queue = CommandQueue::getDefault(&error);
9706 
9707  if (error != CL_SUCCESS) {
9708  return error;
9709  }
9710 
9711  return queue.enqueueCopyImageToBuffer(
9712  src,
9713  dst,
9714  src_origin,
9715  region,
9716  dst_offset,
9717  events,
9718  event);
9719 }
9720 
9721 inline cl_int enqueueCopyBufferToImage(
9722  const Buffer& src,
9723  const Image& dst,
9724  size_type src_offset,
9725  const array<size_type, 3>& dst_origin,
9726  const array<size_type, 3>& region,
9727  const vector<Event>* events = NULL,
9728  Event* event = NULL)
9729 {
9730  cl_int error;
9731  CommandQueue queue = CommandQueue::getDefault(&error);
9732 
9733  if (error != CL_SUCCESS) {
9734  return error;
9735  }
9736 
9737  return queue.enqueueCopyBufferToImage(
9738  src,
9739  dst,
9740  src_offset,
9741  dst_origin,
9742  region,
9743  events,
9744  event);
9745 }
9746 
9747 
9748 inline cl_int flush(void)
9749 {
9750  cl_int error;
9751  CommandQueue queue = CommandQueue::getDefault(&error);
9752 
9753  if (error != CL_SUCCESS) {
9754  return error;
9755  }
9756 
9757  return queue.flush();
9758 }
9759 
9760 inline cl_int finish(void)
9761 {
9762  cl_int error;
9763  CommandQueue queue = CommandQueue::getDefault(&error);
9764 
9765  if (error != CL_SUCCESS) {
9766  return error;
9767  }
9768 
9769 
9770  return queue.finish();
9771 }
9772 
9774 {
9775 private:
9776  CommandQueue queue_;
9777  const NDRange offset_;
9778  const NDRange global_;
9779  const NDRange local_;
9780  vector<Event> events_;
9781 
9782  template<typename... Ts>
9783  friend class KernelFunctor;
9784 
9785 public:
9786  EnqueueArgs(NDRange global) :
9787  queue_(CommandQueue::getDefault()),
9788  offset_(NullRange),
9789  global_(global),
9790  local_(NullRange)
9791  {
9792 
9793  }
9794 
9795  EnqueueArgs(NDRange global, NDRange local) :
9796  queue_(CommandQueue::getDefault()),
9797  offset_(NullRange),
9798  global_(global),
9799  local_(local)
9800  {
9801 
9802  }
9803 
9804  EnqueueArgs(NDRange offset, NDRange global, NDRange local) :
9805  queue_(CommandQueue::getDefault()),
9806  offset_(offset),
9807  global_(global),
9808  local_(local)
9809  {
9810 
9811  }
9812 
9813  EnqueueArgs(Event e, NDRange global) :
9814  queue_(CommandQueue::getDefault()),
9815  offset_(NullRange),
9816  global_(global),
9817  local_(NullRange)
9818  {
9819  events_.push_back(e);
9820  }
9821 
9822  EnqueueArgs(Event e, NDRange global, NDRange local) :
9823  queue_(CommandQueue::getDefault()),
9824  offset_(NullRange),
9825  global_(global),
9826  local_(local)
9827  {
9828  events_.push_back(e);
9829  }
9830 
9831  EnqueueArgs(Event e, NDRange offset, NDRange global, NDRange local) :
9832  queue_(CommandQueue::getDefault()),
9833  offset_(offset),
9834  global_(global),
9835  local_(local)
9836  {
9837  events_.push_back(e);
9838  }
9839 
9840  EnqueueArgs(const vector<Event> &events, NDRange global) :
9841  queue_(CommandQueue::getDefault()),
9842  offset_(NullRange),
9843  global_(global),
9844  local_(NullRange),
9845  events_(events)
9846  {
9847 
9848  }
9849 
9850  EnqueueArgs(const vector<Event> &events, NDRange global, NDRange local) :
9851  queue_(CommandQueue::getDefault()),
9852  offset_(NullRange),
9853  global_(global),
9854  local_(local),
9855  events_(events)
9856  {
9857 
9858  }
9859 
9860  EnqueueArgs(const vector<Event> &events, NDRange offset, NDRange global, NDRange local) :
9861  queue_(CommandQueue::getDefault()),
9862  offset_(offset),
9863  global_(global),
9864  local_(local),
9865  events_(events)
9866  {
9867 
9868  }
9869 
9870  EnqueueArgs(CommandQueue &queue, NDRange global) :
9871  queue_(queue),
9872  offset_(NullRange),
9873  global_(global),
9874  local_(NullRange)
9875  {
9876 
9877  }
9878 
9879  EnqueueArgs(CommandQueue &queue, NDRange global, NDRange local) :
9880  queue_(queue),
9881  offset_(NullRange),
9882  global_(global),
9883  local_(local)
9884  {
9885 
9886  }
9887 
9888  EnqueueArgs(CommandQueue &queue, NDRange offset, NDRange global, NDRange local) :
9889  queue_(queue),
9890  offset_(offset),
9891  global_(global),
9892  local_(local)
9893  {
9894 
9895  }
9896 
9897  EnqueueArgs(CommandQueue &queue, Event e, NDRange global) :
9898  queue_(queue),
9899  offset_(NullRange),
9900  global_(global),
9901  local_(NullRange)
9902  {
9903  events_.push_back(e);
9904  }
9905 
9906  EnqueueArgs(CommandQueue &queue, Event e, NDRange global, NDRange local) :
9907  queue_(queue),
9908  offset_(NullRange),
9909  global_(global),
9910  local_(local)
9911  {
9912  events_.push_back(e);
9913  }
9914 
9915  EnqueueArgs(CommandQueue &queue, Event e, NDRange offset, NDRange global, NDRange local) :
9916  queue_(queue),
9917  offset_(offset),
9918  global_(global),
9919  local_(local)
9920  {
9921  events_.push_back(e);
9922  }
9923 
9924  EnqueueArgs(CommandQueue &queue, const vector<Event> &events, NDRange global) :
9925  queue_(queue),
9926  offset_(NullRange),
9927  global_(global),
9928  local_(NullRange),
9929  events_(events)
9930  {
9931 
9932  }
9933 
9934  EnqueueArgs(CommandQueue &queue, const vector<Event> &events, NDRange global, NDRange local) :
9935  queue_(queue),
9936  offset_(NullRange),
9937  global_(global),
9938  local_(local),
9939  events_(events)
9940  {
9941 
9942  }
9943 
9944  EnqueueArgs(CommandQueue &queue, const vector<Event> &events, NDRange offset, NDRange global, NDRange local) :
9945  queue_(queue),
9946  offset_(offset),
9947  global_(global),
9948  local_(local),
9949  events_(events)
9950  {
9951 
9952  }
9953 };
9954 
9955 
9956 //----------------------------------------------------------------------------------------------
9957 
9958 
9963 template<typename... Ts>
9965 {
9966 private:
9967  Kernel kernel_;
9968 
9969  template<int index, typename T0, typename... T1s>
9970  void setArgs(T0&& t0, T1s&&... t1s)
9971  {
9972  kernel_.setArg(index, t0);
9973  setArgs<index + 1, T1s...>(std::forward<T1s>(t1s)...);
9974  }
9975 
9976  template<int index, typename T0>
9977  void setArgs(T0&& t0)
9978  {
9979  kernel_.setArg(index, t0);
9980  }
9981 
9982  template<int index>
9983  void setArgs()
9984  {
9985  }
9986 
9987 
9988 public:
9989  KernelFunctor(Kernel kernel) : kernel_(kernel)
9990  {}
9991 
9992  KernelFunctor(
9993  const Program& program,
9994  const string name,
9995  cl_int * err = NULL) :
9996  kernel_(program, name.c_str(), err)
9997  {}
9998 
10001 
10008  const EnqueueArgs& args,
10009  Ts... ts)
10010  {
10011  Event event;
10012  setArgs<0>(std::forward<Ts>(ts)...);
10013 
10014  args.queue_.enqueueNDRangeKernel(
10015  kernel_,
10016  args.offset_,
10017  args.global_,
10018  args.local_,
10019  &args.events_,
10020  &event);
10021 
10022  return event;
10023  }
10024 
10032  const EnqueueArgs& args,
10033  Ts... ts,
10034  cl_int &error)
10035  {
10036  Event event;
10037  setArgs<0>(std::forward<Ts>(ts)...);
10038 
10039  error = args.queue_.enqueueNDRangeKernel(
10040  kernel_,
10041  args.offset_,
10042  args.global_,
10043  args.local_,
10044  &args.events_,
10045  &event);
10046 
10047  return event;
10048  }
10049 
10050 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
10051  cl_int setSVMPointers(const vector<void*> &pointerList)
10052  {
10053  return kernel_.setSVMPointers(pointerList);
10054  }
10055 
10056  template<typename T0, typename... T1s>
10057  cl_int setSVMPointers(const T0 &t0, T1s &... ts)
10058  {
10059  return kernel_.setSVMPointers(t0, ts...);
10060  }
10061 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
10062 
10063  Kernel getKernel()
10064  {
10065  return kernel_;
10066  }
10067 };
10068 
10069 namespace compatibility {
10074  template<typename... Ts>
10076  {
10077  typedef KernelFunctor<Ts...> FunctorType;
10078 
10079  FunctorType functor_;
10080 
10081  make_kernel(
10082  const Program& program,
10083  const string name,
10084  cl_int * err = NULL) :
10085  functor_(FunctorType(program, name, err))
10086  {}
10087 
10088  make_kernel(
10089  const Kernel kernel) :
10090  functor_(FunctorType(kernel))
10091  {}
10092 
10095 
10097  typedef Event type_(
10098  const EnqueueArgs&,
10099  Ts...);
10100 
10101  Event operator()(
10102  const EnqueueArgs& enqueueArgs,
10103  Ts... args)
10104  {
10105  return functor_(
10106  enqueueArgs, args...);
10107  }
10108  };
10109 } // namespace compatibility
10110 
10111 
10112 //----------------------------------------------------------------------------------------------------------------------
10113 
10114 #undef CL_HPP_ERR_STR_
10115 #if !defined(CL_HPP_USER_OVERRIDE_ERROR_STRINGS)
10116 #undef __GET_DEVICE_INFO_ERR
10117 #undef __GET_PLATFORM_INFO_ERR
10118 #undef __GET_DEVICE_IDS_ERR
10119 #undef __GET_PLATFORM_IDS_ERR
10120 #undef __GET_CONTEXT_INFO_ERR
10121 #undef __GET_EVENT_INFO_ERR
10122 #undef __GET_EVENT_PROFILE_INFO_ERR
10123 #undef __GET_MEM_OBJECT_INFO_ERR
10124 #undef __GET_IMAGE_INFO_ERR
10125 #undef __GET_SAMPLER_INFO_ERR
10126 #undef __GET_KERNEL_INFO_ERR
10127 #undef __GET_KERNEL_ARG_INFO_ERR
10128 #undef __GET_KERNEL_SUB_GROUP_INFO_ERR
10129 #undef __GET_KERNEL_WORK_GROUP_INFO_ERR
10130 #undef __GET_PROGRAM_INFO_ERR
10131 #undef __GET_PROGRAM_BUILD_INFO_ERR
10132 #undef __GET_COMMAND_QUEUE_INFO_ERR
10133 #undef __CREATE_CONTEXT_ERR
10134 #undef __CREATE_CONTEXT_FROM_TYPE_ERR
10135 #undef __GET_SUPPORTED_IMAGE_FORMATS_ERR
10136 #undef __CREATE_BUFFER_ERR
10137 #undef __COPY_ERR
10138 #undef __CREATE_SUBBUFFER_ERR
10139 #undef __CREATE_GL_BUFFER_ERR
10140 #undef __CREATE_GL_RENDER_BUFFER_ERR
10141 #undef __GET_GL_OBJECT_INFO_ERR
10142 #undef __CREATE_IMAGE_ERR
10143 #undef __CREATE_GL_TEXTURE_ERR
10144 #undef __IMAGE_DIMENSION_ERR
10145 #undef __SET_MEM_OBJECT_DESTRUCTOR_CALLBACK_ERR
10146 #undef __CREATE_USER_EVENT_ERR
10147 #undef __SET_USER_EVENT_STATUS_ERR
10148 #undef __SET_EVENT_CALLBACK_ERR
10149 #undef __WAIT_FOR_EVENTS_ERR
10150 #undef __CREATE_KERNEL_ERR
10151 #undef __SET_KERNEL_ARGS_ERR
10152 #undef __CREATE_PROGRAM_WITH_SOURCE_ERR
10153 #undef __CREATE_PROGRAM_WITH_IL_ERR
10154 #undef __CREATE_PROGRAM_WITH_BINARY_ERR
10155 #undef __CREATE_PROGRAM_WITH_IL_ERR
10156 #undef __CREATE_PROGRAM_WITH_BUILT_IN_KERNELS_ERR
10157 #undef __BUILD_PROGRAM_ERR
10158 #undef __COMPILE_PROGRAM_ERR
10159 #undef __LINK_PROGRAM_ERR
10160 #undef __CREATE_KERNELS_IN_PROGRAM_ERR
10161 #undef __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR
10162 #undef __CREATE_SAMPLER_WITH_PROPERTIES_ERR
10163 #undef __SET_COMMAND_QUEUE_PROPERTY_ERR
10164 #undef __ENQUEUE_READ_BUFFER_ERR
10165 #undef __ENQUEUE_READ_BUFFER_RECT_ERR
10166 #undef __ENQUEUE_WRITE_BUFFER_ERR
10167 #undef __ENQUEUE_WRITE_BUFFER_RECT_ERR
10168 #undef __ENQEUE_COPY_BUFFER_ERR
10169 #undef __ENQEUE_COPY_BUFFER_RECT_ERR
10170 #undef __ENQUEUE_FILL_BUFFER_ERR
10171 #undef __ENQUEUE_READ_IMAGE_ERR
10172 #undef __ENQUEUE_WRITE_IMAGE_ERR
10173 #undef __ENQUEUE_COPY_IMAGE_ERR
10174 #undef __ENQUEUE_FILL_IMAGE_ERR
10175 #undef __ENQUEUE_COPY_IMAGE_TO_BUFFER_ERR
10176 #undef __ENQUEUE_COPY_BUFFER_TO_IMAGE_ERR
10177 #undef __ENQUEUE_MAP_BUFFER_ERR
10178 #undef __ENQUEUE_MAP_IMAGE_ERR
10179 #undef __ENQUEUE_UNMAP_MEM_OBJECT_ERR
10180 #undef __ENQUEUE_NDRANGE_KERNEL_ERR
10181 #undef __ENQUEUE_NATIVE_KERNEL
10182 #undef __ENQUEUE_MIGRATE_MEM_OBJECTS_ERR
10183 #undef __ENQUEUE_MIGRATE_SVM_ERR
10184 #undef __ENQUEUE_ACQUIRE_GL_ERR
10185 #undef __ENQUEUE_RELEASE_GL_ERR
10186 #undef __CREATE_PIPE_ERR
10187 #undef __GET_PIPE_INFO_ERR
10188 #undef __RETAIN_ERR
10189 #undef __RELEASE_ERR
10190 #undef __FLUSH_ERR
10191 #undef __FINISH_ERR
10192 #undef __VECTOR_CAPACITY_ERR
10193 #undef __CREATE_SUB_DEVICES_ERR
10194 #undef __CREATE_SUB_DEVICES_ERR
10195 #undef __ENQUEUE_MARKER_ERR
10196 #undef __ENQUEUE_WAIT_FOR_EVENTS_ERR
10197 #undef __ENQUEUE_BARRIER_ERR
10198 #undef __UNLOAD_COMPILER_ERR
10199 #undef __CREATE_GL_TEXTURE_2D_ERR
10200 #undef __CREATE_GL_TEXTURE_3D_ERR
10201 #undef __CREATE_IMAGE2D_ERR
10202 #undef __CREATE_IMAGE3D_ERR
10203 #undef __CREATE_COMMAND_QUEUE_ERR
10204 #undef __ENQUEUE_TASK_ERR
10205 #undef __CREATE_SAMPLER_ERR
10206 #undef __ENQUEUE_MARKER_WAIT_LIST_ERR
10207 #undef __ENQUEUE_BARRIER_WAIT_LIST_ERR
10208 #undef __CLONE_KERNEL_ERR
10209 #undef __GET_HOST_TIMER_ERR
10210 #undef __GET_DEVICE_AND_HOST_TIMER_ERR
10211 
10212 #endif //CL_HPP_USER_OVERRIDE_ERROR_STRINGS
10213 
10214 // Extensions
10215 #undef CL_HPP_INIT_CL_EXT_FCN_PTR_
10216 #undef CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_
10217 
10218 #if defined(CL_HPP_USE_CL_DEVICE_FISSION)
10219 #undef CL_HPP_PARAM_NAME_DEVICE_FISSION_
10220 #endif // CL_HPP_USE_CL_DEVICE_FISSION
10221 
10222 #undef CL_HPP_NOEXCEPT_
10223 #undef CL_HPP_DEFINE_STATIC_MEMBER_
10224 
10225 } // namespace cl
10226 
10227 #endif // CL_HPP_
cl::compatibility::make_kernel::result_type
Event result_type
Return type of the functor.
Definition: cl2.hpp:10094
cl::SVMTraitCoarse
Definition: cl2.hpp:3496
cl::Image2D::Image2D
Image2D(const Context &context, ImageFormat format, const Buffer &sourceBuffer, size_type width, size_type height, size_type row_pitch=0, cl_int *err=nullptr)
Constructs a 2D Image from a buffer.
Definition: cl2.hpp:4762
cl::Image3DGL::Image3DGL
Image3DGL(const cl_mem &image, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:5292
cl::copy
cl_int copy(IteratorType startIterator, IteratorType endIterator, cl::Buffer &buffer)
Definition: cl2.hpp:9383
cl::Image1DArray::Image1DArray
Image1DArray(const Image1DArray &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4649
cl::CommandQueue::enqueueMigrateSVM
cl_int enqueueMigrateSVM(const cl::vector< cl::pointer< T, D >> &svmPointers, const cl::vector< size_type > &sizes, cl_mem_migration_flags flags=0, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8390
cl::Device::getDeviceAndHostTimer
std::pair< cl_ulong, cl_ulong > getDeviceAndHostTimer(cl_int *error=nullptr)
Definition: cl2.hpp:2239
cl::DeviceCommandQueue::DeviceCommandQueue
DeviceCommandQueue(const cl_command_queue &commandQueue, bool retainObject=false)
Constructor from cl_command_queue - takes ownership.
Definition: cl2.hpp:8828
cl::Pipe::getInfo
cl_int getInfo(cl_pipe_info name, T *param) const
Wrapper for clGetMemObjectInfo().
Definition: cl2.hpp:5534
cl::CommandQueue::CommandQueue
CommandQueue(const CommandQueue &queue)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:7460
cl::Device::getInfo
detail::param_traits< detail::cl_device_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetDeviceInfo() that returns by value.
Definition: cl2.hpp:2196
cl::Event::getProfilingInfo
detail::param_traits< detail::cl_profiling_info, name >::param_type getProfilingInfo(cl_int *err=NULL) const
Wrapper for clGetEventProfilingInfo() that returns by value.
Definition: cl2.hpp:3205
cl::SVMAllocator::rebind
Definition: cl2.hpp:3565
cl::Program
Program interface that implements cl_program.
Definition: cl2.hpp:6164
cl::Pipe
Class interface for Pipe Memory Objects.
Definition: cl2.hpp:5425
cl::Sampler
Class interface for cl_sampler.
Definition: cl2.hpp:5566
cl::NDRange::size
size_type size() const
Returns the size of the object in bytes based on the.
Definition: cl2.hpp:5754
cl::BufferGL::BufferGL
BufferGL(const Context &context, cl_mem_flags flags, cl_GLuint bufobj, cl_int *err=NULL)
Constructs a BufferGL in a specified context, from a given GL buffer.
Definition: cl2.hpp:4143
cl::BufferRenderGL::BufferRenderGL
BufferRenderGL(const Context &context, cl_mem_flags flags, cl_GLuint bufobj, cl_int *err=NULL)
Constructs a BufferRenderGL in a specified context, from a given GL Renderbuffer.
Definition: cl2.hpp:4240
cl::Image2D::Image2D
Image2D(Image2D &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4905
cl::Event::setCallback
cl_int setCallback(cl_int type, void(CL_CALLBACK *pfn_notify)(cl_event, cl_int, void *), void *user_data=NULL)
Registers a user callback function for a specific command execution status.
Definition: cl2.hpp:3232
cl::Image1D::Image1D
Image1D(Image1D &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4490
cl::Platform::unloadCompiler
cl_int unloadCompiler()
Wrapper for clUnloadCompiler().
Definition: cl2.hpp:2696
cl::Kernel::setArg
std::enable_if< std::is_pointer< T >::value, cl_int >::type setArg(cl_uint index, const T argPtr)
setArg overload taking a pointer type
Definition: cl2.hpp:6016
cl::Context::getDefault
static Context getDefault(cl_int *err=NULL)
Returns a singleton context including all devices of CL_DEVICE_TYPE_DEFAULT.
Definition: cl2.hpp:2982
cl::Program::Program
Program(const cl_program &program, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:6552
cl::detail::ReferenceHandler< cl_device_id >::retain
static cl_int retain(cl_device_id device)
Definition: cl2.hpp:1593
cl::Memory::Memory
Memory(const cl_mem &memory, bool retainObject)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:3342
cl::Image1D::Image1D
Image1D(const Image1D &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4476
cl::Image2DGL::Image2DGL
Image2DGL(Image2DGL &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5001
cl::CommandQueue::enqueueFillImage
cl_int enqueueFillImage(const Image &image, cl_uint4 fillColor, const array< size_type, 3 > &origin, const array< size_type, 3 > &region, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7909
cl::CommandQueue::enqueueFillImage
cl_int enqueueFillImage(const Image &image, cl_int4 fillColor, const array< size_type, 3 > &origin, const array< size_type, 3 > &region, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7875
cl::Buffer::Buffer
Buffer(const Buffer &buf)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:3963
cl::Platform::Platform
Platform()
Default constructor - initializes to NULL.
Definition: cl2.hpp:2424
cl::DeviceCommandQueue::getDefault
static DeviceCommandQueue getDefault(const CommandQueue &queue, cl_int *err=NULL)
Definition: cl2.hpp:8998
cl::CommandQueue::enqueueMigrateSVM
cl_int enqueueMigrateSVM(const cl::vector< cl::vector< T, Alloc >> &svmContainers, cl_mem_migration_flags flags=0, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8448
cl::Image2D::Image2D
Image2D()
Default constructor - initializes to NULL.
Definition: cl2.hpp:4866
cl::SVMTraitAtomic
Definition: cl2.hpp:3517
cl::Image3D::Image3D
Image3D()
Default constructor - initializes to NULL.
Definition: cl2.hpp:5191
cl::Context::Context
Context(Context &&ctx) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:2966
cl::UserEvent
Class interface for user events (a subset of cl_event's).
Definition: cl2.hpp:3266
cl::Platform::get
static cl_int get(vector< Platform > *platforms)
Gets a list of available platforms.
Definition: cl2.hpp:2628
cl::Sampler::Sampler
Sampler()
Default constructor - initializes to NULL.
Definition: cl2.hpp:5570
cl::Event::Event
Event(const cl_event &event, bool retainObject=false)
Constructor from cl_event - takes ownership.
Definition: cl2.hpp:3156
cl::Image3D::Image3D
Image3D(Image3D &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5230
cl::DeviceCommandQueue::DeviceCommandQueue
DeviceCommandQueue(const Context &context, const Device &device, DeviceQueueProperties properties=DeviceQueueProperties::None, cl_int *err=NULL)
Definition: cl2.hpp:8774
cl::fine_svm_vector
vector< T, cl::SVMAllocator< int, cl::SVMTraitFine<> >> fine_svm_vector
Vector alias to simplify contruction of fine-grained SVM containers.
Definition: cl2.hpp:3798
cl::Image3DGL::operator=
Image3DGL & operator=(const cl_mem &rhs)
Assignment from cl_mem - performs shallow copy.
Definition: cl2.hpp:5299
cl::Buffer
Class interface for Buffer Memory Objects.
Definition: cl2.hpp:3815
cl::Pipe::Pipe
Pipe(cl_uint packet_size, cl_uint max_packets, cl_int *err=NULL)
Constructs a Pipe in a the default context.
Definition: cl2.hpp:5463
cl::CommandQueue::CommandQueue
CommandQueue(const cl_command_queue &commandQueue, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:7448
cl::Pipe::getInfo
detail::param_traits< detail::cl_pipe_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetMemObjectInfo() that returns by value.
Definition: cl2.hpp:5544
cl::Image3DGL::Image3DGL
Image3DGL(const Image3DGL &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5308
cl::Program::Program
Program(const Context &context, const vector< Device > &devices, const string &kernelNames, cl_int *err=NULL)
Definition: cl2.hpp:6514
cl::Image3D::operator=
Image3D & operator=(const cl_mem &rhs)
Assignment from cl_mem - performs shallow copy.
Definition: cl2.hpp:5207
cl::Image2D::operator=
Image2D & operator=(const cl_mem &rhs)
Assignment from cl_mem - performs shallow copy.
Definition: cl2.hpp:4882
cl::DeviceCommandQueue::DeviceCommandQueue
DeviceCommandQueue(const DeviceCommandQueue &queue)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:8840
cl::Kernel::clone
Kernel clone()
Definition: cl2.hpp:6150
cl::Image1DBuffer
Image interface for 1D buffer images.
Definition: cl2.hpp:4506
cl::BufferRenderGL::BufferRenderGL
BufferRenderGL(const BufferRenderGL &buf)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4285
cl::NDRange::dimensions
size_type dimensions() const
Queries the number of dimensions in the range.
Definition: cl2.hpp:5747
cl::DeviceCommandQueue::makeDefault
static DeviceCommandQueue makeDefault(cl_int *err=nullptr)
Definition: cl2.hpp:8893
cl::Program::Program
Program(const Sources &sources, cl_int *err=NULL)
Definition: cl2.hpp:6255
cl::Device::Device
Device(Device &&dev) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:2173
cl::Sampler::Sampler
Sampler(const Sampler &sam)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5640
cl::CommandQueue::enqueueMigrateSVM
cl_int enqueueMigrateSVM(const cl::vector< cl::pointer< T, D >> &svmPointers, cl_mem_migration_flags flags=0, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8412
cl::enqueueUnmapSVM
cl_int enqueueUnmapSVM(T *ptr, const vector< Event > *events=NULL, Event *event=NULL)
Definition: cl2.hpp:9298
cl::Kernel::setSVMPointers
cl_int setSVMPointers(const vector< void * > &pointerList)
Definition: cl2.hpp:6051
cl::LocalSpaceArg
Local address wrapper for use with Kernel::setArg.
Definition: cl2.hpp:5774
cl::Memory::Memory
Memory(const Memory &mem)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:3359
cl::Context
Class interface for cl_context.
Definition: cl2.hpp:2733
cl::mapSVM
cl_int mapSVM(cl::vector< T, Alloc > &container)
Definition: cl2.hpp:9485
cl::Image::Image
Image(const Image &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4356
cl::Program::Program
Program(Program &&program) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:6578
cl::SVMTraitFine
Definition: cl2.hpp:3506
cl::Buffer::Buffer
Buffer(Buffer &&buf) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:3977
cl::Pipe::operator=
Pipe & operator=(const cl_mem &rhs)
Assignment from cl_mem - performs shallow copy.
Definition: cl2.hpp:5498
cl::Buffer::Buffer
Buffer(const Context &context, cl_mem_flags flags, size_type size, void *host_ptr=NULL, cl_int *err=NULL)
Constructs a Buffer in a specified context.
Definition: cl2.hpp:3826
cl::ImageFormat
Adds constructors and member functions for cl_image_format.
Definition: cl2.hpp:2041
cl::Image::Image
Image(Image &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4370
cl::Program::Program
Program(const Context &context, const vector< char > &IL, bool build=false, cl_int *err=NULL)
Definition: cl2.hpp:6382
cl::Context::getInfo
detail::param_traits< detail::cl_context_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetContextInfo() that returns by value.
Definition: cl2.hpp:3040
cl::Buffer::Buffer
Buffer()
Default constructor - initializes to NULL.
Definition: cl2.hpp:3938
cl::Image1D::Image1D
Image1D(const cl_mem &image1D, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4460
cl::CommandQueue::enqueueBarrierWithWaitList
cl_int enqueueBarrierWithWaitList(const vector< Event > *events=0, Event *event=0) const
Definition: cl2.hpp:8281
cl::Image2DGL::Image2DGL
Image2DGL(const Context &context, cl_mem_flags flags, cl_GLenum target, cl_GLint miplevel, cl_GLuint texobj, cl_int *err=NULL)
Constructs an Image2DGL in a specified context, from a given GL Texture.
Definition: cl2.hpp:4937
cl::Memory
Class interface for cl_mem.
Definition: cl2.hpp:3325
cl::Program::setSpecializationConstant
cl_int setSpecializationConstant(cl_uint index, size_type size, const void *value)
Sets a SPIR-V specialization constant.
Definition: cl2.hpp:6813
cl::Sampler::Sampler
Sampler(const cl_sampler &sampler, bool retainObject=false)
Constructor from cl_sampler - takes ownership.
Definition: cl2.hpp:5623
cl::Image2DGL::Image2DGL
Image2DGL(const Image2DGL &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4987
cl::ImageGL::ImageGL
ImageGL(const cl_mem &image, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:5377
cl::Device::Device
Device(const Device &dev)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:2159
cl::Image2DArray::Image2DArray
Image2DArray(const cl_mem &imageArray, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:5069
cl::Event::waitForEvents
static cl_int waitForEvents(const vector< Event > &events)
Blocks the calling thread until every event specified is complete.
Definition: cl2.hpp:3252
cl::NDRange::NDRange
NDRange(size_type size0, size_type size1)
Constructs two-dimensional range.
Definition: cl2.hpp:5721
cl::CommandQueue::CommandQueue
CommandQueue(const Context &context, cl_command_queue_properties properties=0, cl_int *err=NULL)
Constructs a CommandQueue for an implementation defined device in the given context Will return an CL...
Definition: cl2.hpp:7184
cl::NDRange::NDRange
NDRange()
Default constructor - resulting range has zero dimensions.
Definition: cl2.hpp:5703
cl::CommandQueue::enqueueFillBuffer
cl_int enqueueFillBuffer(const Buffer &buffer, PatternType pattern, size_type offset, size_type size, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7709
cl::Buffer::Buffer
Buffer(const cl_mem &buffer, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:3947
cl::DeviceCommandQueue
DeviceCommandQueue interface for device cl_command_queues.
Definition: cl2.hpp:8739
cl::Image2D::Image2D
Image2D(const Context &context, cl_mem_flags flags, ImageFormat format, size_type width, size_type height, size_type row_pitch=0, void *host_ptr=NULL, cl_int *err=NULL)
Constructs a 2D Image in a specified context.
Definition: cl2.hpp:4691
cl::SVMTraitReadOnly
Definition: cl2.hpp:3474
cl::Image2DArray::Image2DArray
Image2DArray(const Image2DArray &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5080
cl::Pipe::Pipe
Pipe()
Default constructor - initializes to NULL.
Definition: cl2.hpp:5482
cl::NDRange::NDRange
NDRange(size_type size0)
Constructs one-dimensional range.
Definition: cl2.hpp:5712
cl::Buffer::Buffer
Buffer(cl_mem_flags flags, size_type size, void *host_ptr=NULL, cl_int *err=NULL)
Constructs a Buffer in the default context.
Definition: cl2.hpp:3851
cl::Image2DGL::Image2DGL
Image2DGL(const cl_mem &image, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4971
cl::UnloadCompiler
CL_EXT_PREFIX__VERSION_1_1_DEPRECATED cl_int UnloadCompiler() CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
Definition: cl2.hpp:2719
cl::Platform::getInfo
detail::param_traits< detail::cl_platform_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetPlatformInfo() that returns by value.
Definition: cl2.hpp:2483
cl::Sampler::getInfo
cl_int getInfo(cl_sampler_info name, T *param) const
Wrapper for clGetSamplerInfo().
Definition: cl2.hpp:5667
cl::Kernel::Kernel
Kernel()
Default constructor - initializes to NULL.
Definition: cl2.hpp:5838
cl::Memory::operator=
Memory & operator=(const cl_mem &rhs)
Assignment operator from cl_mem - takes ownership.
Definition: cl2.hpp:3350
cl::Memory::setDestructorCallback
cl_int setDestructorCallback(void(CL_CALLBACK *pfn_notify)(cl_mem, void *), void *user_data=NULL)
Registers a callback function to be called when the memory object is no longer needed.
Definition: cl2.hpp:3422
cl::CommandQueue::enqueueMigrateSVM
cl_int enqueueMigrateSVM(const cl::vector< cl::vector< T, Alloc >> &svmContainers, const cl::vector< size_type > &sizes, cl_mem_migration_flags flags=0, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8427
cl::Device::operator=
Device & operator=(const cl_device_id &rhs)
Assignment operator from cl_device_id.
Definition: cl2.hpp:2150
cl::Image3DGL::Image3DGL
Image3DGL(const Context &context, cl_mem_flags flags, cl_GLenum target, cl_GLint miplevel, cl_GLuint texobj, cl_int *err=NULL)
Constructs an Image3DGL in a specified context, from a given GL Texture.
Definition: cl2.hpp:5259
cl::Event::getProfilingInfo
cl_int getProfilingInfo(cl_profiling_info name, T *param) const
Wrapper for clGetEventProfilingInfo().
Definition: cl2.hpp:3195
cl::BufferRenderGL::BufferRenderGL
BufferRenderGL()
Default constructor - initializes to NULL.
Definition: cl2.hpp:4260
cl::Kernel::setArg
std::enable_if<!std::is_pointer< T >::value, cl_int >::type setArg(cl_uint index, const T &value)
setArg overload taking a POD type
Definition: cl2.hpp:6028
cl::Sampler::operator=
Sampler & operator=(const cl_sampler &rhs)
Assignment operator from cl_sampler - takes ownership.
Definition: cl2.hpp:5631
cl::Context::Context
Context(cl_device_type type, cl_context_properties *properties=NULL, void(CL_CALLBACK *notifyFptr)(const char *, const void *, size_type, void *)=NULL, void *data=NULL, cl_int *err=NULL)
Constructs a context including all or a subset of devices of a specified type.
Definition: cl2.hpp:2866
cl::Sampler::Sampler
Sampler(const Context &context, cl_bool normalized_coords, cl_addressing_mode addressing_mode, cl_filter_mode filter_mode, cl_int *err=NULL)
Constructs a Sampler in a specified context.
Definition: cl2.hpp:5576
cl::SVMAllocator
Definition: cl2.hpp:3551
cl::Program::setReleaseCallback
cl_int setReleaseCallback(void(CL_CALLBACK *pfn_notify)(cl_program program, void *user_data), void *user_data=NULL)
Registers a callback function to be called when destructors for program scope global variables are co...
Definition: cl2.hpp:6780
cl::ImageFormat::ImageFormat
ImageFormat(cl_channel_order order, cl_channel_type type)
Initializing constructor.
Definition: cl2.hpp:2047
cl::Image::getImageInfo
cl_int getImageInfo(cl_image_info name, T *param) const
Wrapper for clGetImageInfo().
Definition: cl2.hpp:4385
cl::CommandQueue::CommandQueue
CommandQueue(const Context &context, const Device &device, cl_command_queue_properties properties=0, cl_int *err=NULL)
Constructs a CommandQueue for a passed device and context Will return an CL_INVALID_QUEUE_PROPERTIES ...
Definition: cl2.hpp:7313
cl::Platform::get
static Platform get(cl_int *errResult=NULL)
Gets the first available platform, returning it by value.
Definition: cl2.hpp:2682
cl::Kernel
Class interface for cl_kernel.
Definition: cl2.hpp:5832
cl::CommandQueue::enqueueMapSVM
cl_int enqueueMapSVM(cl::pointer< T, D > &ptr, cl_bool blocking, cl_map_flags flags, size_type size, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8091
cl::SVMAllocator::operator==
bool operator==(SVMAllocator const &rhs)
Definition: cl2.hpp:3685
cl::Program::Program
Program(const Context &context, const vector< Device > &devices, const Binaries &binaries, vector< cl_int > *binaryStatus=NULL, cl_int *err=NULL)
Definition: cl2.hpp:6450
cl::CommandQueue::enqueueMarker
CL_EXT_PREFIX__VERSION_1_1_DEPRECATED cl_int enqueueMarker(Event *event=NULL) const CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
Definition: cl2.hpp:8547
cl::BufferRenderGL::getObjectInfo
cl_int getObjectInfo(cl_gl_object_type *type, cl_GLuint *gl_object_name)
Wrapper for clGetGLObjectInfo().
Definition: cl2.hpp:4311
cl::Kernel::enableFineGrainedSystemSVM
cl_int enableFineGrainedSystemSVM(bool svmEnabled)
Enable fine-grained system SVM.
Definition: cl2.hpp:6087
cl::Image1DBuffer::Image1DBuffer
Image1DBuffer(const cl_mem &image1D, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4548
cl::Pipe::Pipe
Pipe(const Context &context, cl_uint packet_size, cl_uint max_packets, cl_int *err=NULL)
Constructs a Pipe in a specified context.
Definition: cl2.hpp:5438
cl::Context::operator=
Context & operator=(const Context &ctx)
Copy assignment to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:2957
cl::ImageFormat::ImageFormat
ImageFormat()
Default constructor - performs no initialization.
Definition: cl2.hpp:2044
cl::CommandQueue::enqueueUnmapSVM
cl_int enqueueUnmapSVM(T *ptr, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8168
cl::Kernel::setArg
cl_int setArg(cl_uint index, const cl::pointer< T, D > &argPtr)
setArg overload taking a shared_ptr type
Definition: cl2.hpp:5995
cl::Platform::operator=
Platform & operator=(const cl_platform_id &rhs)
Assignment operator from cl_platform_id.
Definition: cl2.hpp:2440
cl::BufferGL::getObjectInfo
cl_int getObjectInfo(cl_gl_object_type *type, cl_GLuint *gl_object_name)
Wrapper for clGetGLObjectInfo().
Definition: cl2.hpp:4214
cl::Event::wait
cl_int wait() const
Blocks the calling thread until this event completes.
Definition: cl2.hpp:3220
cl::Sampler::Sampler
Sampler(Sampler &&sam) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5654
cl::Context::getSupportedImageFormats
cl_int getSupportedImageFormats(cl_mem_flags flags, cl_mem_object_type type, vector< ImageFormat > *formats) const
Gets a list of supported image formats.
Definition: cl2.hpp:3055
cl::BufferGL::BufferGL
BufferGL(const BufferGL &buf)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4188
cl::Kernel::setArg
cl_int setArg(cl_uint index, const cl::vector< T, Alloc > &argPtr)
setArg overload taking a vector type.
Definition: cl2.hpp:6005
cl::Memory::Memory
Memory()
Default constructor - initializes to NULL.
Definition: cl2.hpp:3329
cl::compatibility::make_kernel::type_
Event type_(const EnqueueArgs &, Ts...)
Function signature of kernel functor with no event dependency.
Definition: cl2.hpp:10097
cl::Image
C++ base class for Image Memory objects.
Definition: cl2.hpp:4327
cl::atomic_svm_vector
vector< T, cl::SVMAllocator< int, cl::SVMTraitAtomic<> >> atomic_svm_vector
Vector alias to simplify contruction of fine-grained SVM containers that support platform atomics.
Definition: cl2.hpp:3804
cl::Context::Context
Context(const vector< Device > &devices, cl_context_properties *properties=NULL, void(CL_CALLBACK *notifyFptr)(const char *, const void *, size_type, void *)=NULL, void *data=NULL, cl_int *err=NULL)
Constructs a context including a list of specified devices.
Definition: cl2.hpp:2805
cl::Pipe::Pipe
Pipe(Pipe &&pipe) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5521
cl
The OpenCL C++ bindings are defined within this namespace.
Definition: cl2.hpp:582
cl::BufferRenderGL
Class interface for GL Render Buffer Memory Objects.
Definition: cl2.hpp:4232
cl::SVMTraitReadWrite
Definition: cl2.hpp:3463
cl::Device
Class interface for cl_device_id.
Definition: cl2.hpp:2071
cl::Device::Device
Device()
Default constructor - initializes to NULL.
Definition: cl2.hpp:2108
cl::ImageFormat::operator=
ImageFormat & operator=(const ImageFormat &rhs)
Assignment operator.
Definition: cl2.hpp:2054
cl::KernelFunctor::result_type
Event result_type
Return type of the functor.
Definition: cl2.hpp:10000
cl::ImageGL
general image interface for GL interop. We abstract the 2D and 3D GL images into a single instance he...
Definition: cl2.hpp:5342
cl::CommandQueue::CommandQueue
CommandQueue(const Context &context, QueueProperties properties, cl_int *err=NULL)
Constructs a CommandQueue for an implementation defined device in the given context Will return an CL...
Definition: cl2.hpp:7251
cl::Event::operator=
Event & operator=(const cl_event &rhs)
Assignment operator from cl_event - takes ownership.
Definition: cl2.hpp:3164
cl::Image1DBuffer::Image1DBuffer
Image1DBuffer(Image1DBuffer &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4574
cl::Platform::setDefault
static Platform setDefault(const Platform &default_platform)
Definition: cl2.hpp:2464
cl::Image3D::Image3D
Image3D(const Image3D &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5216
cl::Image::Image
Image()
Default constructor - initializes to NULL.
Definition: cl2.hpp:4331
cl::Program::Program
Program(const vector< char > &IL, bool build=false, cl_int *err=NULL)
Definition: cl2.hpp:6327
cl::Image3D
Class interface for 3D Image Memory objects.
Definition: cl2.hpp:5113
cl::Platform::Platform
Platform(const cl_platform_id &platform, bool retainObject=false)
Constructor from cl_platform_id.
Definition: cl2.hpp:2433
cl::Platform::get
static cl_int get(Platform *platform)
Gets the first available platform.
Definition: cl2.hpp:2663
cl::Kernel::operator=
Kernel & operator=(const cl_kernel &rhs)
Assignment operator from cl_kernel - takes ownership.
Definition: cl2.hpp:5856
cl::BufferRenderGL::BufferRenderGL
BufferRenderGL(BufferRenderGL &&buf) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4299
cl::Image::Image
Image(const cl_mem &image, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4340
cl::Platform::getDevices
cl_int getDevices(cl_device_type type, vector< Device > *devices) const
Gets a list of devices for this platform.
Definition: cl2.hpp:2498
cl::CommandQueue::enqueueMigrateSVM
cl_int enqueueMigrateSVM(const cl::vector< T * > &svmRawPointers, cl_mem_migration_flags flags=0, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8374
cl::Pipe::Pipe
Pipe(const Pipe &pipe)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5507
cl::KernelFunctor
Definition: cl2.hpp:9964
cl::unmapSVM
cl_int unmapSVM(cl::vector< T, Alloc > &container)
Definition: cl2.hpp:9494
cl::Context::Context
Context(const cl_context &context, bool retainObject=false)
Constructor from cl_context - takes ownership.
Definition: cl2.hpp:3014
cl::Image2D
Class interface for 2D Image Memory objects.
Definition: cl2.hpp:4684
cl::Image1D
Class interface for 1D Image Memory objects.
Definition: cl2.hpp:4414
cl::DeviceCommandQueue::makeDefault
static DeviceCommandQueue makeDefault(const Context &context, const Device &device, cl_int *err=nullptr)
Definition: cl2.hpp:8923
cl::Image3DGL::Image3DGL
Image3DGL(Image3DGL &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5322
cl::Platform::getInfo
cl_int getInfo(cl_platform_info name, T *param) const
Wrapper for clGetPlatformInfo().
Definition: cl2.hpp:2473
cl::CommandQueue::enqueueUnmapSVM
cl_int enqueueUnmapSVM(cl::pointer< T, D > &ptr, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8193
cl::detail::GetInfoFunctor1
Definition: cl2.hpp:1548
cl::Image2DGL::Image2DGL
Image2DGL()
Default constructor - initializes to NULL.
Definition: cl2.hpp:4962
cl::Device::setDefault
static Device setDefault(const Device &default_device)
Definition: cl2.hpp:2139
cl::Memory::Memory
Memory(Memory &&mem) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:3373
cl::detail::Deleter
Definition: cl2.hpp:3718
cl::detail::ReferenceHandler
Definition: cl2.hpp:1135
cl::CommandQueue::CommandQueue
CommandQueue(CommandQueue &&queue) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:7474
cl::Context::Context
Context(const Context &ctx)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:2952
cl::CommandQueue::setDefault
static CommandQueue setDefault(const CommandQueue &default_queue)
Definition: cl2.hpp:7432
cl::KernelFunctor::operator()
Event operator()(const EnqueueArgs &args, Ts... ts)
Definition: cl2.hpp:10007
cl::SVMTraitWriteOnly
Definition: cl2.hpp:3485
cl::Image1DArray::Image1DArray
Image1DArray(Image1DArray &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4663
cl::Buffer::Buffer
Buffer(IteratorType startIterator, IteratorType endIterator, bool readOnly, bool useHostPtr=false, cl_int *err=NULL)
Construct a Buffer from a host container via iterators. IteratorType must be random access....
Definition: cl2.hpp:3875
cl::BufferGL::BufferGL
BufferGL(BufferGL &&buf) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4202
cl::Device::getDefault
static Device getDefault(cl_int *errResult=NULL)
Returns the first device on the default context.
Definition: cl2.hpp:2121
cl::CommandQueue::enqueueUnmapSVM
cl_int enqueueUnmapSVM(cl::vector< T, Alloc > &container, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8218
cl::Sampler::getInfo
detail::param_traits< detail::cl_sampler_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetSamplerInfo() that returns by value.
Definition: cl2.hpp:5677
cl::Image2DArray::Image2DArray
Image2DArray(Image2DArray &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5094
cl::Event::Event
Event()
Default constructor - initializes to NULL.
Definition: cl2.hpp:3146
cl::Kernel::setSVMPointers
cl_int setSVMPointers(const std::array< void *, ArrayLength > &pointerList)
Definition: cl2.hpp:6066
cl::Image1DArray::Image1DArray
Image1DArray(const cl_mem &imageArray, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4636
cl::Platform
Class interface for cl_platform_id.
Definition: cl2.hpp:2351
cl::Image3D::Image3D
Image3D(const cl_mem &image3D, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:5200
cl::detail::SVMTraitNull
Definition: cl2.hpp:3452
cl::CommandQueue::enqueueMigrateMemObjects
cl_int enqueueMigrateMemObjects(const vector< Memory > &memObjects, cl_mem_migration_flags flags, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8304
cl::Image3DGL
Class interface for GL 3D Image Memory objects.
Definition: cl2.hpp:5251
cl::enqueueMapSVM
cl_int enqueueMapSVM(T *ptr, cl_bool blocking, cl_map_flags flags, size_type size, const vector< Event > *events=NULL, Event *event=NULL)
Definition: cl2.hpp:9196
cl::DeviceCommandQueue::makeDefault
static DeviceCommandQueue makeDefault(const Context &context, const Device &device, cl_uint queueSize, cl_int *err=nullptr)
Definition: cl2.hpp:8951
cl::CommandQueue::enqueueMapSVM
cl_int enqueueMapSVM(cl::vector< T, Alloc > &container, cl_bool blocking, cl_map_flags flags, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8118
cl::DeviceCommandQueue::DeviceCommandQueue
DeviceCommandQueue(DeviceQueueProperties properties, cl_int *err=NULL)
Definition: cl2.hpp:8751
cl::Image3DGL::Image3DGL
Image3DGL()
Default constructor - initializes to NULL.
Definition: cl2.hpp:5283
cl::Context::getInfo
cl_int getInfo(cl_context_info name, T *param) const
Wrapper for clGetContextInfo().
Definition: cl2.hpp:3030
cl::coarse_svm_vector
vector< T, cl::SVMAllocator< int, cl::SVMTraitCoarse<> >> coarse_svm_vector
Vector alias to simplify contruction of coarse-grained SVM containers.
Definition: cl2.hpp:3792
cl::CommandQueue::enqueueMigrateSVM
cl_int enqueueMigrateSVM(const cl::vector< T * > &svmRawPointers, const cl::vector< size_type > &sizes, cl_mem_migration_flags flags=0, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8345
cl::ImageGL::ImageGL
ImageGL(const ImageGL &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5389
cl::Event
Class interface for cl_event.
Definition: cl2.hpp:3142
cl::EnqueueArgs
Definition: cl2.hpp:9773
cl::Kernel::Kernel
Kernel(const cl_kernel &kernel, bool retainObject=false)
Constructor from cl_kernel - takes ownership.
Definition: cl2.hpp:5848
cl::Local
LocalSpaceArg Local(size_type size)
Helper function for generating LocalSpaceArg objects.
Definition: cl2.hpp:5818
cl::Kernel::Kernel
Kernel(Kernel &&kernel) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5879
cl::Pipe::Pipe
Pipe(const cl_mem &pipe, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:5491
cl::CommandQueue::CommandQueue
CommandQueue(const Context &context, const Device &device, QueueProperties properties, cl_int *err=NULL)
Constructs a CommandQueue for a passed device and context Will return an CL_INVALID_QUEUE_PROPERTIES ...
Definition: cl2.hpp:7364
cl::Image::getImageInfo
detail::param_traits< detail::cl_image_info, name >::param_type getImageInfo(cl_int *err=NULL) const
Wrapper for clGetImageInfo() that returns by value.
Definition: cl2.hpp:4395
cl::CommandQueue::enqueueMapSVM
cl_int enqueueMapSVM(T *ptr, cl_bool blocking, cl_map_flags flags, size_type size, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8063
cl::Program::getBuildInfo
vector< std::pair< cl::Device, typename detail::param_traits< detail::cl_program_build_info, name >::param_type > > getBuildInfo(cl_int *err=NULL) const
Definition: cl2.hpp:6702
cl::SVMAllocator::allocate
pointer allocate(size_type size, typename cl::SVMAllocator< void, SVMTrait >::const_pointer=0)
Definition: cl2.hpp:3615
cl::detail::param_traits
Definition: cl2.hpp:1394
cl::UserEvent::UserEvent
UserEvent()
Default constructor - initializes to NULL.
Definition: cl2.hpp:3289
cl::Memory::getInfo
cl_int getInfo(cl_mem_info name, T *param) const
Wrapper for clGetMemObjectInfo().
Definition: cl2.hpp:3387
cl::DeviceCommandQueue::DeviceCommandQueue
DeviceCommandQueue(DeviceCommandQueue &&queue) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:8854
cl::BufferGL::BufferGL
BufferGL(const cl_mem &buffer, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4172
cl::Program::setSpecializationConstant
std::enable_if<!std::is_pointer< T >::value, cl_int >::type setSpecializationConstant(cl_uint index, const T &value)
Sets a SPIR-V specialization constant.
Definition: cl2.hpp:6798
cl::DeviceCommandQueue::DeviceCommandQueue
DeviceCommandQueue()
Definition: cl2.hpp:8746
cl::NDRange::NDRange
NDRange(size_type size0, size_type size1, size_type size2)
Constructs three-dimensional range.
Definition: cl2.hpp:5730
cl::detail::Wrapper
Definition: cl2.hpp:1751
cl::BufferGL
Class interface for GL Buffer Memory Objects.
Definition: cl2.hpp:4135
cl::Event::getInfo
cl_int getInfo(cl_event_info name, T *param) const
Wrapper for clGetEventInfo().
Definition: cl2.hpp:3172
cl::detail::GetInfoFunctor0
Definition: cl2.hpp:1539
cl::Buffer::createSubBuffer
Buffer createSubBuffer(cl_mem_flags flags, cl_buffer_create_type buffer_create_type, const void *buffer_create_info, cl_int *err=NULL)
Creates a new buffer object from this.
Definition: cl2.hpp:3993
cl::Memory::getInfo
detail::param_traits< detail::cl_mem_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetMemObjectInfo() that returns by value.
Definition: cl2.hpp:3397
cl::CommandQueue::enqueueFillImage
cl_int enqueueFillImage(const Image &image, cl_float4 fillColor, const array< size_type, 3 > &origin, const array< size_type, 3 > &region, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7841
cl::CommandQueue::enqueueBarrier
CL_EXT_PREFIX__VERSION_1_1_DEPRECATED cl_int enqueueBarrier() const CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
Definition: cl2.hpp:8700
cl::Image2D::Image2D
Image2D(const cl_mem &image2D, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4875
cl::allocate_pointer
cl::pointer< T, detail::Deleter< Alloc > > allocate_pointer(const Alloc &alloc_, Args &&... args)
Definition: cl2.hpp:3745
cl::Device::Device
Device(const cl_device_id &device, bool retainObject=false)
Constructor from cl_device_id.
Definition: cl2.hpp:2114
cl::Image2DArray
Image interface for arrays of 2D images.
Definition: cl2.hpp:5019
cl::Image1DArray
Image interface for arrays of 1D images.
Definition: cl2.hpp:4590
cl::Kernel::Kernel
Kernel(const Kernel &kernel)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5865
cl::BufferRenderGL::BufferRenderGL
BufferRenderGL(const cl_mem &buffer, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4269
cl::DeviceCommandQueue::updateDefault
static DeviceCommandQueue updateDefault(const Context &context, const Device &device, const DeviceCommandQueue &default_queue, cl_int *err=nullptr)
Definition: cl2.hpp:8983
cl::NDRange
Class interface for specifying NDRange values.
Definition: cl2.hpp:5695
cl::UserEvent::UserEvent
UserEvent(const Context &context, cl_int *err=NULL)
Constructs a user event on a given context.
Definition: cl2.hpp:3273
cl::Device::createSubDevices
cl_int createSubDevices(const cl_device_partition_property *properties, vector< Device > *devices)
Wrapper for clCreateSubDevices().
Definition: cl2.hpp:2259
cl::Event::getInfo
detail::param_traits< detail::cl_event_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetEventInfo() that returns by value.
Definition: cl2.hpp:3182
cl::ImageGL::ImageGL
ImageGL(ImageGL &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5403
cl::detail::ReferenceHandler< cl_device_id >::release
static cl_int release(cl_device_id device)
Definition: cl2.hpp:1604
cl::CommandQueue
CommandQueue interface for cl_command_queue.
Definition: cl2.hpp:6989
cl::Image::operator=
Image & operator=(const cl_mem &rhs)
Assignment from cl_mem - performs shallow copy.
Definition: cl2.hpp:4347
cl::compatibility::make_kernel
Definition: cl2.hpp:10075
cl::UserEvent::setStatus
cl_int setStatus(cl_int status)
Sets the execution status of a user event object.
Definition: cl2.hpp:3295
cl::copy
cl_int copy(const CommandQueue &queue, const cl::Buffer &buffer, IteratorType startIterator, IteratorType endIterator)
Definition: cl2.hpp:9454
cl::Buffer::operator=
Buffer & operator=(const cl_mem &rhs)
Assignment from cl_mem - performs shallow copy.
Definition: cl2.hpp:3954
cl::BufferGL::operator=
BufferGL & operator=(const cl_mem &rhs)
Assignment from cl_mem - performs shallow copy.
Definition: cl2.hpp:4179
cl::BufferGL::BufferGL
BufferGL()
Default constructor - initializes to NULL.
Definition: cl2.hpp:4163
cl::CommandQueue::enqueueMarkerWithWaitList
cl_int enqueueMarkerWithWaitList(const vector< Event > *events=0, Event *event=0) const
Definition: cl2.hpp:8251
cl::CommandQueue::CommandQueue
CommandQueue(QueueProperties properties, cl_int *err=NULL)
Constructs a CommandQueue based on passed properties. Will return an CL_INVALID_QUEUE_PROPERTIES erro...
Definition: cl2.hpp:7121
cl::Image1D::operator=
Image1D & operator=(const cl_mem &rhs)
Assignment from cl_mem - performs shallow copy.
Definition: cl2.hpp:4467
cl::Image1D::Image1D
Image1D()
Default constructor - initializes to NULL.
Definition: cl2.hpp:4451
cl::BufferRenderGL::operator=
BufferRenderGL & operator=(const cl_mem &rhs)
Assignment from cl_mem - performs shallow copy.
Definition: cl2.hpp:4276
cl::CommandQueue::CommandQueue
CommandQueue(cl_command_queue_properties properties, cl_int *err=NULL)
Constructs a CommandQueue based on passed properties. Will return an CL_INVALID_QUEUE_PROPERTIES erro...
Definition: cl2.hpp:7055
cl::Context::Context
Context()
Default constructor - initializes to NULL.
Definition: cl2.hpp:3007
cl::Device::getInfo
cl_int getInfo(cl_device_info name, T *param) const
Wrapper for clGetDeviceInfo().
Definition: cl2.hpp:2186
cl::DeviceCommandQueue::DeviceCommandQueue
DeviceCommandQueue(const Context &context, const Device &device, cl_uint queueSize, DeviceQueueProperties properties=DeviceQueueProperties::None, cl_int *err=NULL)
Definition: cl2.hpp:8798
cl::Image1D::Image1D
Image1D(const Context &context, cl_mem_flags flags, ImageFormat format, size_type width, void *host_ptr=NULL, cl_int *err=NULL)
Constructs a 1D Image in a specified context.
Definition: cl2.hpp:4421
cl::Image2D::Image2D
Image2D(const Context &context, cl_channel_order order, const Image &sourceImage, cl_int *err=nullptr)
Constructs a 2D Image from an image.
Definition: cl2.hpp:4812
cl::Image3D::Image3D
Image3D(const Context &context, cl_mem_flags flags, ImageFormat format, size_type width, size_type height, size_type depth, size_type row_pitch=0, size_type slice_pitch=0, void *host_ptr=NULL, cl_int *err=NULL)
Constructs a 3D Image in a specified context.
Definition: cl2.hpp:5120
cl::Program::Program
Program(const Program &program)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:6564
cl::Image2DGL
Class interface for GL 2D Image Memory objects.
Definition: cl2.hpp:4929
cl::Image2D::Image2D
Image2D(const Image2D &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4891
cl::Device::getHostTimer
cl_ulong getHostTimer(cl_int *error=nullptr)
Definition: cl2.hpp:2215
cl::detail::KernelArgumentHandler
Definition: cl2.hpp:5782
cl::Program::Program
Program(const Context &context, const Sources &sources, cl_int *err=NULL)
Definition: cl2.hpp:6290
cl::Image1DBuffer::Image1DBuffer
Image1DBuffer(const Image1DBuffer &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4560
cl::SVMAllocator::max_size
size_type max_size() const CL_HPP_NOEXCEPT_
Definition: cl2.hpp:3657
cl::Context::setDefault
static Context setDefault(const Context &default_context)
Definition: cl2.hpp:2999