OpenCL C++ Bindings
cl2.hpp
Go to the documentation of this file.
1 /*******************************************************************************
2  * Copyright (c) 2008-2016 The Khronos Group Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and/or associated documentation files (the
6  * "Materials"), to deal in the Materials without restriction, including
7  * without limitation the rights to use, copy, modify, merge, publish,
8  * distribute, sublicense, and/or sell copies of the Materials, and to
9  * permit persons to whom the Materials are furnished to do so, subject to
10  * the following conditions:
11  *
12  * The above copyright notice and this permission notice shall be included
13  * in all copies or substantial portions of the Materials.
14  *
15  * MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS
16  * KHRONOS STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS
17  * SPECIFICATIONS AND HEADER INFORMATION ARE LOCATED AT
18  * https://www.khronos.org/registry/
19  *
20  * THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
21  * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
22  * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
23  * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
24  * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
25  * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
26  * MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
27  ******************************************************************************/
28 
323 
336 
400 #ifndef CL_HPP_
401 #define CL_HPP_
402 
403 /* Handle deprecated preprocessor definitions. In each case, we only check for
404  * the old name if the new name is not defined, so that user code can define
405  * both and hence work with either version of the bindings.
406  */
407 #if !defined(CL_HPP_USE_DX_INTEROP) && defined(USE_DX_INTEROP)
408 # pragma message("cl2.hpp: USE_DX_INTEROP is deprecated. Define CL_HPP_USE_DX_INTEROP instead")
409 # define CL_HPP_USE_DX_INTEROP
410 #endif
411 #if !defined(CL_HPP_USE_CL_DEVICE_FISSION) && defined(USE_CL_DEVICE_FISSION)
412 # pragma message("cl2.hpp: USE_CL_DEVICE_FISSION is deprecated. Define CL_HPP_USE_CL_DEVICE_FISSION instead")
413 # define CL_HPP_USE_CL_DEVICE_FISSION
414 #endif
415 #if !defined(CL_HPP_ENABLE_EXCEPTIONS) && defined(__CL_ENABLE_EXCEPTIONS)
416 # pragma message("cl2.hpp: __CL_ENABLE_EXCEPTIONS is deprecated. Define CL_HPP_ENABLE_EXCEPTIONS instead")
417 # define CL_HPP_ENABLE_EXCEPTIONS
418 #endif
419 #if !defined(CL_HPP_NO_STD_VECTOR) && defined(__NO_STD_VECTOR)
420 # pragma message("cl2.hpp: __NO_STD_VECTOR is deprecated. Define CL_HPP_NO_STD_VECTOR instead")
421 # define CL_HPP_NO_STD_VECTOR
422 #endif
423 #if !defined(CL_HPP_NO_STD_STRING) && defined(__NO_STD_STRING)
424 # pragma message("cl2.hpp: __NO_STD_STRING is deprecated. Define CL_HPP_NO_STD_STRING instead")
425 # define CL_HPP_NO_STD_STRING
426 #endif
427 #if defined(VECTOR_CLASS)
428 # pragma message("cl2.hpp: VECTOR_CLASS is deprecated. Alias cl::vector instead")
429 #endif
430 #if defined(STRING_CLASS)
431 # pragma message("cl2.hpp: STRING_CLASS is deprecated. Alias cl::string instead.")
432 #endif
433 #if !defined(CL_HPP_USER_OVERRIDE_ERROR_STRINGS) && defined(__CL_USER_OVERRIDE_ERROR_STRINGS)
434 # pragma message("cl2.hpp: __CL_USER_OVERRIDE_ERROR_STRINGS is deprecated. Define CL_HPP_USER_OVERRIDE_ERROR_STRINGS instead")
435 # define CL_HPP_USER_OVERRIDE_ERROR_STRINGS
436 #endif
437 
438 /* Warn about features that are no longer supported
439  */
440 #if defined(__USE_DEV_VECTOR)
441 # pragma message("cl2.hpp: __USE_DEV_VECTOR is no longer supported. Expect compilation errors")
442 #endif
443 #if defined(__USE_DEV_STRING)
444 # pragma message("cl2.hpp: __USE_DEV_STRING is no longer supported. Expect compilation errors")
445 #endif
446 
447 /* Detect which version to target */
448 #if !defined(CL_HPP_TARGET_OPENCL_VERSION)
449 # pragma message("cl2.hpp: CL_HPP_TARGET_OPENCL_VERSION is not defined. It will default to 210 (OpenCL 2.1)")
450 # define CL_HPP_TARGET_OPENCL_VERSION 210
451 #endif
452 #if CL_HPP_TARGET_OPENCL_VERSION != 100 && CL_HPP_TARGET_OPENCL_VERSION != 110 && CL_HPP_TARGET_OPENCL_VERSION != 120 && CL_HPP_TARGET_OPENCL_VERSION != 200 && CL_HPP_TARGET_OPENCL_VERSION != 210
453 # pragma message("cl2.hpp: CL_HPP_TARGET_OPENCL_VERSION is not a valid value (100, 110, 120, 200 or 210). It will be set to 210")
454 # undef CL_HPP_TARGET_OPENCL_VERSION
455 # define CL_HPP_TARGET_OPENCL_VERSION 210
456 #endif
457 
458 /* Forward target OpenCL version to C headers if necessary */
459 #if defined(CL_TARGET_OPENCL_VERSION)
460 /* Warn if prior definition of CL_TARGET_OPENCL_VERSION is lower than
461  * requested C++ bindings version */
462 #if CL_TARGET_OPENCL_VERSION < CL_HPP_TARGET_OPENCL_VERSION
463 # pragma message("CL_TARGET_OPENCL_VERSION is already defined as is lower than CL_HPP_TARGET_OPENCL_VERSION")
464 #endif
465 #else
466 # define CL_TARGET_OPENCL_VERSION CL_HPP_TARGET_OPENCL_VERSION
467 #endif
468 
469 #if !defined(CL_HPP_MINIMUM_OPENCL_VERSION)
470 # define CL_HPP_MINIMUM_OPENCL_VERSION 200
471 #endif
472 #if CL_HPP_MINIMUM_OPENCL_VERSION != 100 && CL_HPP_MINIMUM_OPENCL_VERSION != 110 && CL_HPP_MINIMUM_OPENCL_VERSION != 120 && CL_HPP_MINIMUM_OPENCL_VERSION != 200 && CL_HPP_MINIMUM_OPENCL_VERSION != 210
473 # pragma message("cl2.hpp: CL_HPP_MINIMUM_OPENCL_VERSION is not a valid value (100, 110, 120, 200 or 210). It will be set to 100")
474 # undef CL_HPP_MINIMUM_OPENCL_VERSION
475 # define CL_HPP_MINIMUM_OPENCL_VERSION 100
476 #endif
477 #if CL_HPP_MINIMUM_OPENCL_VERSION > CL_HPP_TARGET_OPENCL_VERSION
478 # error "CL_HPP_MINIMUM_OPENCL_VERSION must not be greater than CL_HPP_TARGET_OPENCL_VERSION"
479 #endif
480 
481 #if CL_HPP_MINIMUM_OPENCL_VERSION <= 100 && !defined(CL_USE_DEPRECATED_OPENCL_1_0_APIS)
482 # define CL_USE_DEPRECATED_OPENCL_1_0_APIS
483 #endif
484 #if CL_HPP_MINIMUM_OPENCL_VERSION <= 110 && !defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
485 # define CL_USE_DEPRECATED_OPENCL_1_1_APIS
486 #endif
487 #if CL_HPP_MINIMUM_OPENCL_VERSION <= 120 && !defined(CL_USE_DEPRECATED_OPENCL_1_2_APIS)
488 # define CL_USE_DEPRECATED_OPENCL_1_2_APIS
489 #endif
490 #if CL_HPP_MINIMUM_OPENCL_VERSION <= 200 && !defined(CL_USE_DEPRECATED_OPENCL_2_0_APIS)
491 # define CL_USE_DEPRECATED_OPENCL_2_0_APIS
492 #endif
493 #if CL_HPP_MINIMUM_OPENCL_VERSION <= 210 && !defined(CL_USE_DEPRECATED_OPENCL_2_1_APIS)
494 # define CL_USE_DEPRECATED_OPENCL_2_1_APIS
495 #endif
496 
497 #ifdef _WIN32
498 
499 #include <malloc.h>
500 
501 #if defined(CL_HPP_USE_DX_INTEROP)
502 #include <CL/cl_d3d10.h>
503 #include <CL/cl_dx9_media_sharing.h>
504 #endif
505 #endif // _WIN32
506 
507 #if defined(_MSC_VER)
508 #include <intrin.h>
509 #endif // _MSC_VER
510 
511  // Check for a valid C++ version
512 
513 // Need to do both tests here because for some reason __cplusplus is not
514 // updated in visual studio
515 #if (!defined(_MSC_VER) && __cplusplus < 201103L) || (defined(_MSC_VER) && _MSC_VER < 1700)
516 #error Visual studio 2013 or another C++11-supporting compiler required
517 #endif
518 
519 //
520 #if defined(CL_HPP_USE_CL_DEVICE_FISSION) || defined(CL_HPP_USE_CL_SUB_GROUPS_KHR)
521 #include <CL/cl_ext.h>
522 #endif
523 
524 #if defined(__APPLE__) || defined(__MACOSX)
525 #include <OpenCL/opencl.h>
526 #else
527 #include <CL/opencl.h>
528 #endif // !__APPLE__
529 
530 #if (__cplusplus >= 201103L)
531 #define CL_HPP_NOEXCEPT_ noexcept
532 #else
533 #define CL_HPP_NOEXCEPT_
534 #endif
535 
536 #if defined(_MSC_VER)
537 # define CL_HPP_DEFINE_STATIC_MEMBER_ __declspec(selectany)
538 #elif defined(__MINGW32__)
539 # define CL_HPP_DEFINE_STATIC_MEMBER_ __attribute__((selectany))
540 #else
541 # define CL_HPP_DEFINE_STATIC_MEMBER_ __attribute__((weak))
542 #endif // !_MSC_VER
543 
544 // Define deprecated prefixes and suffixes to ensure compilation
545 // in case they are not pre-defined
546 #if !defined(CL_EXT_PREFIX__VERSION_1_1_DEPRECATED)
547 #define CL_EXT_PREFIX__VERSION_1_1_DEPRECATED
548 #endif // #if !defined(CL_EXT_PREFIX__VERSION_1_1_DEPRECATED)
549 #if !defined(CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED)
550 #define CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
551 #endif // #if !defined(CL_EXT_PREFIX__VERSION_1_1_DEPRECATED)
552 
553 #if !defined(CL_EXT_PREFIX__VERSION_1_2_DEPRECATED)
554 #define CL_EXT_PREFIX__VERSION_1_2_DEPRECATED
555 #endif // #if !defined(CL_EXT_PREFIX__VERSION_1_2_DEPRECATED)
556 #if !defined(CL_EXT_SUFFIX__VERSION_1_2_DEPRECATED)
557 #define CL_EXT_SUFFIX__VERSION_1_2_DEPRECATED
558 #endif // #if !defined(CL_EXT_PREFIX__VERSION_1_2_DEPRECATED)
559 
560 #if !defined(CL_CALLBACK)
561 #define CL_CALLBACK
562 #endif //CL_CALLBACK
563 
564 #include <utility>
565 #include <limits>
566 #include <iterator>
567 #include <mutex>
568 #include <cstring>
569 #include <functional>
570 
571 
572 // Define a size_type to represent a correctly resolved size_t
573 #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
574 namespace cl {
575  using size_type = ::size_t;
576 } // namespace cl
577 #else // #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
578 namespace cl {
579  using size_type = size_t;
580 } // namespace cl
581 #endif // #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
582 
583 
584 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
585 #include <exception>
586 #endif // #if defined(CL_HPP_ENABLE_EXCEPTIONS)
587 
588 #if !defined(CL_HPP_NO_STD_VECTOR)
589 #include <vector>
590 namespace cl {
591  template < class T, class Alloc = std::allocator<T> >
592  using vector = std::vector<T, Alloc>;
593 } // namespace cl
594 #endif // #if !defined(CL_HPP_NO_STD_VECTOR)
595 
596 #if !defined(CL_HPP_NO_STD_STRING)
597 #include <string>
598 namespace cl {
599  using string = std::string;
600 } // namespace cl
601 #endif // #if !defined(CL_HPP_NO_STD_STRING)
602 
603 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
604 
605 #if !defined(CL_HPP_NO_STD_UNIQUE_PTR)
606 #include <memory>
607 namespace cl {
608  // Replace unique_ptr and allocate_pointer for internal use
609  // to allow user to replace them
610  template<class T, class D>
611  using pointer = std::unique_ptr<T, D>;
612 } // namespace cl
613 #endif
614 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
615 #if !defined(CL_HPP_NO_STD_ARRAY)
616 #include <array>
617 namespace cl {
618  template < class T, size_type N >
619  using array = std::array<T, N>;
620 } // namespace cl
621 #endif // #if !defined(CL_HPP_NO_STD_ARRAY)
622 
623 // Define size_type appropriately to allow backward-compatibility
624 // use of the old size_t interface class
625 #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
626 namespace cl {
627  namespace compatibility {
632  template <int N>
633  class size_t
634  {
635  private:
636  size_type data_[N];
637 
638  public:
640  size_t()
641  {
642  for (int i = 0; i < N; ++i) {
643  data_[i] = 0;
644  }
645  }
646 
647  size_t(const array<size_type, N> &rhs)
648  {
649  for (int i = 0; i < N; ++i) {
650  data_[i] = rhs[i];
651  }
652  }
653 
654  size_type& operator[](int index)
655  {
656  return data_[index];
657  }
658 
659  const size_type& operator[](int index) const
660  {
661  return data_[index];
662  }
663 
665  operator size_type* () { return data_; }
666 
668  operator const size_type* () const { return data_; }
669 
670  operator array<size_type, N>() const
671  {
672  array<size_type, N> ret;
673 
674  for (int i = 0; i < N; ++i) {
675  ret[i] = data_[i];
676  }
677  return ret;
678  }
679  };
680  } // namespace compatibility
681 
682  template<int N>
683  using size_t = compatibility::size_t<N>;
684 } // namespace cl
685 #endif // #if defined(CL_HPP_ENABLE_SIZE_T_COMPATIBILITY)
686 
687 // Helper alias to avoid confusing the macros
688 namespace cl {
689  namespace detail {
690  using size_t_array = array<size_type, 3>;
691  } // namespace detail
692 } // namespace cl
693 
694 
700 namespace cl {
701  class Memory;
702 
703 #define CL_HPP_INIT_CL_EXT_FCN_PTR_(name) \
704  if (!pfn_##name) { \
705  pfn_##name = (PFN_##name) \
706  clGetExtensionFunctionAddress(#name); \
707  if (!pfn_##name) { \
708  } \
709  }
710 
711 #define CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(platform, name) \
712  if (!pfn_##name) { \
713  pfn_##name = (PFN_##name) \
714  clGetExtensionFunctionAddressForPlatform(platform, #name); \
715  if (!pfn_##name) { \
716  } \
717  }
718 
719  class Program;
720  class Device;
721  class Context;
722  class CommandQueue;
723  class DeviceCommandQueue;
724  class Memory;
725  class Buffer;
726  class Pipe;
727 
728 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
729 
733  class Error : public std::exception
734  {
735  private:
736  cl_int err_;
737  const char * errStr_;
738  public:
748  Error(cl_int err, const char * errStr = NULL) : err_(err), errStr_(errStr)
749  {}
750 
751  ~Error() throw() {}
752 
757  virtual const char * what() const throw ()
758  {
759  if (errStr_ == NULL) {
760  return "empty";
761  }
762  else {
763  return errStr_;
764  }
765  }
766 
771  cl_int err(void) const { return err_; }
772  };
773 #define CL_HPP_ERR_STR_(x) #x
774 #else
775 #define CL_HPP_ERR_STR_(x) NULL
776 #endif // CL_HPP_ENABLE_EXCEPTIONS
777 
778 
779 namespace detail
780 {
781 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
782 static inline cl_int errHandler (
783  cl_int err,
784  const char * errStr = NULL)
785 {
786  if (err != CL_SUCCESS) {
787  throw Error(err, errStr);
788  }
789  return err;
790 }
791 #else
792 static inline cl_int errHandler (cl_int err, const char * errStr = NULL)
793 {
794  (void) errStr; // suppress unused variable warning
795  return err;
796 }
797 #endif // CL_HPP_ENABLE_EXCEPTIONS
798 }
799 
800 
801 
803 #if !defined(CL_HPP_USER_OVERRIDE_ERROR_STRINGS)
804 #define __GET_DEVICE_INFO_ERR CL_HPP_ERR_STR_(clGetDeviceInfo)
805 #define __GET_PLATFORM_INFO_ERR CL_HPP_ERR_STR_(clGetPlatformInfo)
806 #define __GET_DEVICE_IDS_ERR CL_HPP_ERR_STR_(clGetDeviceIDs)
807 #define __GET_PLATFORM_IDS_ERR CL_HPP_ERR_STR_(clGetPlatformIDs)
808 #define __GET_CONTEXT_INFO_ERR CL_HPP_ERR_STR_(clGetContextInfo)
809 #define __GET_EVENT_INFO_ERR CL_HPP_ERR_STR_(clGetEventInfo)
810 #define __GET_EVENT_PROFILE_INFO_ERR CL_HPP_ERR_STR_(clGetEventProfileInfo)
811 #define __GET_MEM_OBJECT_INFO_ERR CL_HPP_ERR_STR_(clGetMemObjectInfo)
812 #define __GET_IMAGE_INFO_ERR CL_HPP_ERR_STR_(clGetImageInfo)
813 #define __GET_SAMPLER_INFO_ERR CL_HPP_ERR_STR_(clGetSamplerInfo)
814 #define __GET_KERNEL_INFO_ERR CL_HPP_ERR_STR_(clGetKernelInfo)
815 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
816 #define __GET_KERNEL_ARG_INFO_ERR CL_HPP_ERR_STR_(clGetKernelArgInfo)
817 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
818 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
819 #define __GET_KERNEL_SUB_GROUP_INFO_ERR CL_HPP_ERR_STR_(clGetKernelSubGroupInfo)
820 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
821 #define __GET_KERNEL_WORK_GROUP_INFO_ERR CL_HPP_ERR_STR_(clGetKernelWorkGroupInfo)
822 #define __GET_PROGRAM_INFO_ERR CL_HPP_ERR_STR_(clGetProgramInfo)
823 #define __GET_PROGRAM_BUILD_INFO_ERR CL_HPP_ERR_STR_(clGetProgramBuildInfo)
824 #define __GET_COMMAND_QUEUE_INFO_ERR CL_HPP_ERR_STR_(clGetCommandQueueInfo)
825 
826 #define __CREATE_CONTEXT_ERR CL_HPP_ERR_STR_(clCreateContext)
827 #define __CREATE_CONTEXT_FROM_TYPE_ERR CL_HPP_ERR_STR_(clCreateContextFromType)
828 #define __GET_SUPPORTED_IMAGE_FORMATS_ERR CL_HPP_ERR_STR_(clGetSupportedImageFormats)
829 
830 #define __CREATE_BUFFER_ERR CL_HPP_ERR_STR_(clCreateBuffer)
831 #define __COPY_ERR CL_HPP_ERR_STR_(cl::copy)
832 #define __CREATE_SUBBUFFER_ERR CL_HPP_ERR_STR_(clCreateSubBuffer)
833 #define __CREATE_GL_BUFFER_ERR CL_HPP_ERR_STR_(clCreateFromGLBuffer)
834 #define __CREATE_GL_RENDER_BUFFER_ERR CL_HPP_ERR_STR_(clCreateFromGLBuffer)
835 #define __GET_GL_OBJECT_INFO_ERR CL_HPP_ERR_STR_(clGetGLObjectInfo)
836 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
837 #define __CREATE_IMAGE_ERR CL_HPP_ERR_STR_(clCreateImage)
838 #define __CREATE_GL_TEXTURE_ERR CL_HPP_ERR_STR_(clCreateFromGLTexture)
839 #define __IMAGE_DIMENSION_ERR CL_HPP_ERR_STR_(Incorrect image dimensions)
840 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
841 #define __SET_MEM_OBJECT_DESTRUCTOR_CALLBACK_ERR CL_HPP_ERR_STR_(clSetMemObjectDestructorCallback)
842 
843 #define __CREATE_USER_EVENT_ERR CL_HPP_ERR_STR_(clCreateUserEvent)
844 #define __SET_USER_EVENT_STATUS_ERR CL_HPP_ERR_STR_(clSetUserEventStatus)
845 #define __SET_EVENT_CALLBACK_ERR CL_HPP_ERR_STR_(clSetEventCallback)
846 #define __WAIT_FOR_EVENTS_ERR CL_HPP_ERR_STR_(clWaitForEvents)
847 
848 #define __CREATE_KERNEL_ERR CL_HPP_ERR_STR_(clCreateKernel)
849 #define __SET_KERNEL_ARGS_ERR CL_HPP_ERR_STR_(clSetKernelArg)
850 #define __CREATE_PROGRAM_WITH_SOURCE_ERR CL_HPP_ERR_STR_(clCreateProgramWithSource)
851 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
852 #define __CREATE_PROGRAM_WITH_IL_ERR CL_HPP_ERR_STR_(clCreateProgramWithIL)
853 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
854 #define __CREATE_PROGRAM_WITH_BINARY_ERR CL_HPP_ERR_STR_(clCreateProgramWithBinary)
855 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
856 #define __CREATE_PROGRAM_WITH_IL_ERR CL_HPP_ERR_STR_(clCreateProgramWithIL)
857 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 210
858 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
859 #define __CREATE_PROGRAM_WITH_BUILT_IN_KERNELS_ERR CL_HPP_ERR_STR_(clCreateProgramWithBuiltInKernels)
860 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
861 #define __BUILD_PROGRAM_ERR CL_HPP_ERR_STR_(clBuildProgram)
862 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
863 #define __COMPILE_PROGRAM_ERR CL_HPP_ERR_STR_(clCompileProgram)
864 #define __LINK_PROGRAM_ERR CL_HPP_ERR_STR_(clLinkProgram)
865 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
866 #define __CREATE_KERNELS_IN_PROGRAM_ERR CL_HPP_ERR_STR_(clCreateKernelsInProgram)
867 
868 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
869 #define __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR CL_HPP_ERR_STR_(clCreateCommandQueueWithProperties)
870 #define __CREATE_SAMPLER_WITH_PROPERTIES_ERR CL_HPP_ERR_STR_(clCreateSamplerWithProperties)
871 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
872 #define __SET_COMMAND_QUEUE_PROPERTY_ERR CL_HPP_ERR_STR_(clSetCommandQueueProperty)
873 #define __ENQUEUE_READ_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueReadBuffer)
874 #define __ENQUEUE_READ_BUFFER_RECT_ERR CL_HPP_ERR_STR_(clEnqueueReadBufferRect)
875 #define __ENQUEUE_WRITE_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueWriteBuffer)
876 #define __ENQUEUE_WRITE_BUFFER_RECT_ERR CL_HPP_ERR_STR_(clEnqueueWriteBufferRect)
877 #define __ENQEUE_COPY_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueCopyBuffer)
878 #define __ENQEUE_COPY_BUFFER_RECT_ERR CL_HPP_ERR_STR_(clEnqueueCopyBufferRect)
879 #define __ENQUEUE_FILL_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueFillBuffer)
880 #define __ENQUEUE_READ_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueReadImage)
881 #define __ENQUEUE_WRITE_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueWriteImage)
882 #define __ENQUEUE_COPY_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueCopyImage)
883 #define __ENQUEUE_FILL_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueFillImage)
884 #define __ENQUEUE_COPY_IMAGE_TO_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueCopyImageToBuffer)
885 #define __ENQUEUE_COPY_BUFFER_TO_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueCopyBufferToImage)
886 #define __ENQUEUE_MAP_BUFFER_ERR CL_HPP_ERR_STR_(clEnqueueMapBuffer)
887 #define __ENQUEUE_MAP_IMAGE_ERR CL_HPP_ERR_STR_(clEnqueueMapImage)
888 #define __ENQUEUE_UNMAP_MEM_OBJECT_ERR CL_HPP_ERR_STR_(clEnqueueUnMapMemObject)
889 #define __ENQUEUE_NDRANGE_KERNEL_ERR CL_HPP_ERR_STR_(clEnqueueNDRangeKernel)
890 #define __ENQUEUE_NATIVE_KERNEL CL_HPP_ERR_STR_(clEnqueueNativeKernel)
891 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
892 #define __ENQUEUE_MIGRATE_MEM_OBJECTS_ERR CL_HPP_ERR_STR_(clEnqueueMigrateMemObjects)
893 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
894 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
895 #define __ENQUEUE_MIGRATE_SVM_ERR CL_HPP_ERR_STR_(clEnqueueSVMMigrateMem)
896 #define __SET_DEFAULT_DEVICE_COMMAND_QUEUE_ERR CL_HPP_ERR_STR_(clSetDefaultDeviceCommandQueue)
897 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 210
898 
899 
900 #define __ENQUEUE_ACQUIRE_GL_ERR CL_HPP_ERR_STR_(clEnqueueAcquireGLObjects)
901 #define __ENQUEUE_RELEASE_GL_ERR CL_HPP_ERR_STR_(clEnqueueReleaseGLObjects)
902 
903 #define __CREATE_PIPE_ERR CL_HPP_ERR_STR_(clCreatePipe)
904 #define __GET_PIPE_INFO_ERR CL_HPP_ERR_STR_(clGetPipeInfo)
905 
906 
907 #define __RETAIN_ERR CL_HPP_ERR_STR_(Retain Object)
908 #define __RELEASE_ERR CL_HPP_ERR_STR_(Release Object)
909 #define __FLUSH_ERR CL_HPP_ERR_STR_(clFlush)
910 #define __FINISH_ERR CL_HPP_ERR_STR_(clFinish)
911 #define __VECTOR_CAPACITY_ERR CL_HPP_ERR_STR_(Vector capacity error)
912 
913 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
914 #define __GET_HOST_TIMER_ERR CL_HPP_ERR_STR_(clGetHostTimer)
915 #define __GET_DEVICE_AND_HOST_TIMER_ERR CL_HPP_ERR_STR_(clGetDeviceAndHostTimer)
916 #endif
917 
918 
922 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
923 #define __CREATE_SUB_DEVICES_ERR CL_HPP_ERR_STR_(clCreateSubDevices)
924 #else
925 #define __CREATE_SUB_DEVICES_ERR CL_HPP_ERR_STR_(clCreateSubDevicesEXT)
926 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
927 
931 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
932 #define __ENQUEUE_MARKER_ERR CL_HPP_ERR_STR_(clEnqueueMarker)
933 #define __ENQUEUE_WAIT_FOR_EVENTS_ERR CL_HPP_ERR_STR_(clEnqueueWaitForEvents)
934 #define __ENQUEUE_BARRIER_ERR CL_HPP_ERR_STR_(clEnqueueBarrier)
935 #define __UNLOAD_COMPILER_ERR CL_HPP_ERR_STR_(clUnloadCompiler)
936 #define __CREATE_GL_TEXTURE_2D_ERR CL_HPP_ERR_STR_(clCreateFromGLTexture2D)
937 #define __CREATE_GL_TEXTURE_3D_ERR CL_HPP_ERR_STR_(clCreateFromGLTexture3D)
938 #define __CREATE_IMAGE2D_ERR CL_HPP_ERR_STR_(clCreateImage2D)
939 #define __CREATE_IMAGE3D_ERR CL_HPP_ERR_STR_(clCreateImage3D)
940 #endif // #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
941 
945 #if defined(CL_USE_DEPRECATED_OPENCL_1_2_APIS)
946 #define __CREATE_COMMAND_QUEUE_ERR CL_HPP_ERR_STR_(clCreateCommandQueue)
947 #define __ENQUEUE_TASK_ERR CL_HPP_ERR_STR_(clEnqueueTask)
948 #define __CREATE_SAMPLER_ERR CL_HPP_ERR_STR_(clCreateSampler)
949 #endif // #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
950 
954 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
955 #define __ENQUEUE_MARKER_WAIT_LIST_ERR CL_HPP_ERR_STR_(clEnqueueMarkerWithWaitList)
956 #define __ENQUEUE_BARRIER_WAIT_LIST_ERR CL_HPP_ERR_STR_(clEnqueueBarrierWithWaitList)
957 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
958 
959 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
960 #define __CLONE_KERNEL_ERR CL_HPP_ERR_STR_(clCloneKernel)
961 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 210
962 
963 #endif // CL_HPP_USER_OVERRIDE_ERROR_STRINGS
964 
966 
967 namespace detail {
968 
969 // Generic getInfoHelper. The final parameter is used to guide overload
970 // resolution: the actual parameter passed is an int, which makes this
971 // a worse conversion sequence than a specialization that declares the
972 // parameter as an int.
973 template<typename Functor, typename T>
974 inline cl_int getInfoHelper(Functor f, cl_uint name, T* param, long)
975 {
976  return f(name, sizeof(T), param, NULL);
977 }
978 
979 // Specialized for getInfo<CL_PROGRAM_BINARIES>
980 // Assumes that the output vector was correctly resized on the way in
981 template <typename Func>
982 inline cl_int getInfoHelper(Func f, cl_uint name, vector<vector<unsigned char>>* param, int)
983 {
984  if (name != CL_PROGRAM_BINARIES) {
985  return CL_INVALID_VALUE;
986  }
987  if (param) {
988  // Create array of pointers, calculate total size and pass pointer array in
989  size_type numBinaries = param->size();
990  vector<unsigned char*> binariesPointers(numBinaries);
991 
992  for (size_type i = 0; i < numBinaries; ++i)
993  {
994  binariesPointers[i] = (*param)[i].data();
995  }
996 
997  cl_int err = f(name, numBinaries * sizeof(unsigned char*), binariesPointers.data(), NULL);
998 
999  if (err != CL_SUCCESS) {
1000  return err;
1001  }
1002  }
1003 
1004 
1005  return CL_SUCCESS;
1006 }
1007 
1008 // Specialized getInfoHelper for vector params
1009 template <typename Func, typename T>
1010 inline cl_int getInfoHelper(Func f, cl_uint name, vector<T>* param, long)
1011 {
1012  size_type required;
1013  cl_int err = f(name, 0, NULL, &required);
1014  if (err != CL_SUCCESS) {
1015  return err;
1016  }
1017  const size_type elements = required / sizeof(T);
1018 
1019  // Temporary to avoid changing param on an error
1020  vector<T> localData(elements);
1021  err = f(name, required, localData.data(), NULL);
1022  if (err != CL_SUCCESS) {
1023  return err;
1024  }
1025  if (param) {
1026  *param = std::move(localData);
1027  }
1028 
1029  return CL_SUCCESS;
1030 }
1031 
1032 /* Specialization for reference-counted types. This depends on the
1033  * existence of Wrapper<T>::cl_type, and none of the other types having the
1034  * cl_type member. Note that simplify specifying the parameter as Wrapper<T>
1035  * does not work, because when using a derived type (e.g. Context) the generic
1036  * template will provide a better match.
1037  */
1038 template <typename Func, typename T>
1039 inline cl_int getInfoHelper(
1040  Func f, cl_uint name, vector<T>* param, int, typename T::cl_type = 0)
1041 {
1042  size_type required;
1043  cl_int err = f(name, 0, NULL, &required);
1044  if (err != CL_SUCCESS) {
1045  return err;
1046  }
1047 
1048  const size_type elements = required / sizeof(typename T::cl_type);
1049 
1050  vector<typename T::cl_type> value(elements);
1051  err = f(name, required, value.data(), NULL);
1052  if (err != CL_SUCCESS) {
1053  return err;
1054  }
1055 
1056  if (param) {
1057  // Assign to convert CL type to T for each element
1058  param->resize(elements);
1059 
1060  // Assign to param, constructing with retain behaviour
1061  // to correctly capture each underlying CL object
1062  for (size_type i = 0; i < elements; i++) {
1063  (*param)[i] = T(value[i], true);
1064  }
1065  }
1066  return CL_SUCCESS;
1067 }
1068 
1069 // Specialized GetInfoHelper for string params
1070 template <typename Func>
1071 inline cl_int getInfoHelper(Func f, cl_uint name, string* param, long)
1072 {
1073  size_type required;
1074  cl_int err = f(name, 0, NULL, &required);
1075  if (err != CL_SUCCESS) {
1076  return err;
1077  }
1078 
1079  // std::string has a constant data member
1080  // a char vector does not
1081  if (required > 0) {
1082  vector<char> value(required);
1083  err = f(name, required, value.data(), NULL);
1084  if (err != CL_SUCCESS) {
1085  return err;
1086  }
1087  if (param) {
1088  param->assign(begin(value), prev(end(value)));
1089  }
1090  }
1091  else if (param) {
1092  param->assign("");
1093  }
1094  return CL_SUCCESS;
1095 }
1096 
1097 // Specialized GetInfoHelper for clsize_t params
1098 template <typename Func, size_type N>
1099 inline cl_int getInfoHelper(Func f, cl_uint name, array<size_type, N>* param, long)
1100 {
1101  size_type required;
1102  cl_int err = f(name, 0, NULL, &required);
1103  if (err != CL_SUCCESS) {
1104  return err;
1105  }
1106 
1107  size_type elements = required / sizeof(size_type);
1108  vector<size_type> value(elements, 0);
1109 
1110  err = f(name, required, value.data(), NULL);
1111  if (err != CL_SUCCESS) {
1112  return err;
1113  }
1114 
1115  // Bound the copy with N to prevent overruns
1116  // if passed N > than the amount copied
1117  if (elements > N) {
1118  elements = N;
1119  }
1120  for (size_type i = 0; i < elements; ++i) {
1121  (*param)[i] = value[i];
1122  }
1123 
1124  return CL_SUCCESS;
1125 }
1126 
1127 template<typename T> struct ReferenceHandler;
1128 
1129 /* Specialization for reference-counted types. This depends on the
1130  * existence of Wrapper<T>::cl_type, and none of the other types having the
1131  * cl_type member. Note that simplify specifying the parameter as Wrapper<T>
1132  * does not work, because when using a derived type (e.g. Context) the generic
1133  * template will provide a better match.
1134  */
1135 template<typename Func, typename T>
1136 inline cl_int getInfoHelper(Func f, cl_uint name, T* param, int, typename T::cl_type = 0)
1137 {
1138  typename T::cl_type value;
1139  cl_int err = f(name, sizeof(value), &value, NULL);
1140  if (err != CL_SUCCESS) {
1141  return err;
1142  }
1143  *param = value;
1144  if (value != NULL)
1145  {
1146  err = param->retain();
1147  if (err != CL_SUCCESS) {
1148  return err;
1149  }
1150  }
1151  return CL_SUCCESS;
1152 }
1153 
1154 #define CL_HPP_PARAM_NAME_INFO_1_0_(F) \
1155  F(cl_platform_info, CL_PLATFORM_PROFILE, string) \
1156  F(cl_platform_info, CL_PLATFORM_VERSION, string) \
1157  F(cl_platform_info, CL_PLATFORM_NAME, string) \
1158  F(cl_platform_info, CL_PLATFORM_VENDOR, string) \
1159  F(cl_platform_info, CL_PLATFORM_EXTENSIONS, string) \
1160  \
1161  F(cl_device_info, CL_DEVICE_TYPE, cl_device_type) \
1162  F(cl_device_info, CL_DEVICE_VENDOR_ID, cl_uint) \
1163  F(cl_device_info, CL_DEVICE_MAX_COMPUTE_UNITS, cl_uint) \
1164  F(cl_device_info, CL_DEVICE_MAX_WORK_ITEM_DIMENSIONS, cl_uint) \
1165  F(cl_device_info, CL_DEVICE_MAX_WORK_GROUP_SIZE, size_type) \
1166  F(cl_device_info, CL_DEVICE_MAX_WORK_ITEM_SIZES, cl::vector<size_type>) \
1167  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_CHAR, cl_uint) \
1168  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_SHORT, cl_uint) \
1169  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_INT, cl_uint) \
1170  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_LONG, cl_uint) \
1171  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_FLOAT, cl_uint) \
1172  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_DOUBLE, cl_uint) \
1173  F(cl_device_info, CL_DEVICE_MAX_CLOCK_FREQUENCY, cl_uint) \
1174  F(cl_device_info, CL_DEVICE_ADDRESS_BITS, cl_uint) \
1175  F(cl_device_info, CL_DEVICE_MAX_READ_IMAGE_ARGS, cl_uint) \
1176  F(cl_device_info, CL_DEVICE_MAX_WRITE_IMAGE_ARGS, cl_uint) \
1177  F(cl_device_info, CL_DEVICE_MAX_MEM_ALLOC_SIZE, cl_ulong) \
1178  F(cl_device_info, CL_DEVICE_IMAGE2D_MAX_WIDTH, size_type) \
1179  F(cl_device_info, CL_DEVICE_IMAGE2D_MAX_HEIGHT, size_type) \
1180  F(cl_device_info, CL_DEVICE_IMAGE3D_MAX_WIDTH, size_type) \
1181  F(cl_device_info, CL_DEVICE_IMAGE3D_MAX_HEIGHT, size_type) \
1182  F(cl_device_info, CL_DEVICE_IMAGE3D_MAX_DEPTH, size_type) \
1183  F(cl_device_info, CL_DEVICE_IMAGE_SUPPORT, cl_bool) \
1184  F(cl_device_info, CL_DEVICE_MAX_PARAMETER_SIZE, size_type) \
1185  F(cl_device_info, CL_DEVICE_MAX_SAMPLERS, cl_uint) \
1186  F(cl_device_info, CL_DEVICE_MEM_BASE_ADDR_ALIGN, cl_uint) \
1187  F(cl_device_info, CL_DEVICE_MIN_DATA_TYPE_ALIGN_SIZE, cl_uint) \
1188  F(cl_device_info, CL_DEVICE_SINGLE_FP_CONFIG, cl_device_fp_config) \
1189  F(cl_device_info, CL_DEVICE_DOUBLE_FP_CONFIG, cl_device_fp_config) \
1190  F(cl_device_info, CL_DEVICE_HALF_FP_CONFIG, cl_device_fp_config) \
1191  F(cl_device_info, CL_DEVICE_GLOBAL_MEM_CACHE_TYPE, cl_device_mem_cache_type) \
1192  F(cl_device_info, CL_DEVICE_GLOBAL_MEM_CACHELINE_SIZE, cl_uint)\
1193  F(cl_device_info, CL_DEVICE_GLOBAL_MEM_CACHE_SIZE, cl_ulong) \
1194  F(cl_device_info, CL_DEVICE_GLOBAL_MEM_SIZE, cl_ulong) \
1195  F(cl_device_info, CL_DEVICE_MAX_CONSTANT_BUFFER_SIZE, cl_ulong) \
1196  F(cl_device_info, CL_DEVICE_MAX_CONSTANT_ARGS, cl_uint) \
1197  F(cl_device_info, CL_DEVICE_LOCAL_MEM_TYPE, cl_device_local_mem_type) \
1198  F(cl_device_info, CL_DEVICE_LOCAL_MEM_SIZE, cl_ulong) \
1199  F(cl_device_info, CL_DEVICE_ERROR_CORRECTION_SUPPORT, cl_bool) \
1200  F(cl_device_info, CL_DEVICE_PROFILING_TIMER_RESOLUTION, size_type) \
1201  F(cl_device_info, CL_DEVICE_ENDIAN_LITTLE, cl_bool) \
1202  F(cl_device_info, CL_DEVICE_AVAILABLE, cl_bool) \
1203  F(cl_device_info, CL_DEVICE_COMPILER_AVAILABLE, cl_bool) \
1204  F(cl_device_info, CL_DEVICE_EXECUTION_CAPABILITIES, cl_device_exec_capabilities) \
1205  F(cl_device_info, CL_DEVICE_PLATFORM, cl_platform_id) \
1206  F(cl_device_info, CL_DEVICE_NAME, string) \
1207  F(cl_device_info, CL_DEVICE_VENDOR, string) \
1208  F(cl_device_info, CL_DRIVER_VERSION, string) \
1209  F(cl_device_info, CL_DEVICE_PROFILE, string) \
1210  F(cl_device_info, CL_DEVICE_VERSION, string) \
1211  F(cl_device_info, CL_DEVICE_EXTENSIONS, string) \
1212  \
1213  F(cl_context_info, CL_CONTEXT_REFERENCE_COUNT, cl_uint) \
1214  F(cl_context_info, CL_CONTEXT_DEVICES, cl::vector<Device>) \
1215  F(cl_context_info, CL_CONTEXT_PROPERTIES, cl::vector<cl_context_properties>) \
1216  \
1217  F(cl_event_info, CL_EVENT_COMMAND_QUEUE, cl::CommandQueue) \
1218  F(cl_event_info, CL_EVENT_COMMAND_TYPE, cl_command_type) \
1219  F(cl_event_info, CL_EVENT_REFERENCE_COUNT, cl_uint) \
1220  F(cl_event_info, CL_EVENT_COMMAND_EXECUTION_STATUS, cl_int) \
1221  \
1222  F(cl_profiling_info, CL_PROFILING_COMMAND_QUEUED, cl_ulong) \
1223  F(cl_profiling_info, CL_PROFILING_COMMAND_SUBMIT, cl_ulong) \
1224  F(cl_profiling_info, CL_PROFILING_COMMAND_START, cl_ulong) \
1225  F(cl_profiling_info, CL_PROFILING_COMMAND_END, cl_ulong) \
1226  \
1227  F(cl_mem_info, CL_MEM_TYPE, cl_mem_object_type) \
1228  F(cl_mem_info, CL_MEM_FLAGS, cl_mem_flags) \
1229  F(cl_mem_info, CL_MEM_SIZE, size_type) \
1230  F(cl_mem_info, CL_MEM_HOST_PTR, void*) \
1231  F(cl_mem_info, CL_MEM_MAP_COUNT, cl_uint) \
1232  F(cl_mem_info, CL_MEM_REFERENCE_COUNT, cl_uint) \
1233  F(cl_mem_info, CL_MEM_CONTEXT, cl::Context) \
1234  \
1235  F(cl_image_info, CL_IMAGE_FORMAT, cl_image_format) \
1236  F(cl_image_info, CL_IMAGE_ELEMENT_SIZE, size_type) \
1237  F(cl_image_info, CL_IMAGE_ROW_PITCH, size_type) \
1238  F(cl_image_info, CL_IMAGE_SLICE_PITCH, size_type) \
1239  F(cl_image_info, CL_IMAGE_WIDTH, size_type) \
1240  F(cl_image_info, CL_IMAGE_HEIGHT, size_type) \
1241  F(cl_image_info, CL_IMAGE_DEPTH, size_type) \
1242  \
1243  F(cl_sampler_info, CL_SAMPLER_REFERENCE_COUNT, cl_uint) \
1244  F(cl_sampler_info, CL_SAMPLER_CONTEXT, cl::Context) \
1245  F(cl_sampler_info, CL_SAMPLER_NORMALIZED_COORDS, cl_bool) \
1246  F(cl_sampler_info, CL_SAMPLER_ADDRESSING_MODE, cl_addressing_mode) \
1247  F(cl_sampler_info, CL_SAMPLER_FILTER_MODE, cl_filter_mode) \
1248  \
1249  F(cl_program_info, CL_PROGRAM_REFERENCE_COUNT, cl_uint) \
1250  F(cl_program_info, CL_PROGRAM_CONTEXT, cl::Context) \
1251  F(cl_program_info, CL_PROGRAM_NUM_DEVICES, cl_uint) \
1252  F(cl_program_info, CL_PROGRAM_DEVICES, cl::vector<Device>) \
1253  F(cl_program_info, CL_PROGRAM_SOURCE, string) \
1254  F(cl_program_info, CL_PROGRAM_BINARY_SIZES, cl::vector<size_type>) \
1255  F(cl_program_info, CL_PROGRAM_BINARIES, cl::vector<cl::vector<unsigned char>>) \
1256  \
1257  F(cl_program_build_info, CL_PROGRAM_BUILD_STATUS, cl_build_status) \
1258  F(cl_program_build_info, CL_PROGRAM_BUILD_OPTIONS, string) \
1259  F(cl_program_build_info, CL_PROGRAM_BUILD_LOG, string) \
1260  \
1261  F(cl_kernel_info, CL_KERNEL_FUNCTION_NAME, string) \
1262  F(cl_kernel_info, CL_KERNEL_NUM_ARGS, cl_uint) \
1263  F(cl_kernel_info, CL_KERNEL_REFERENCE_COUNT, cl_uint) \
1264  F(cl_kernel_info, CL_KERNEL_CONTEXT, cl::Context) \
1265  F(cl_kernel_info, CL_KERNEL_PROGRAM, cl::Program) \
1266  \
1267  F(cl_kernel_work_group_info, CL_KERNEL_WORK_GROUP_SIZE, size_type) \
1268  F(cl_kernel_work_group_info, CL_KERNEL_COMPILE_WORK_GROUP_SIZE, cl::detail::size_t_array) \
1269  F(cl_kernel_work_group_info, CL_KERNEL_LOCAL_MEM_SIZE, cl_ulong) \
1270  \
1271  F(cl_command_queue_info, CL_QUEUE_CONTEXT, cl::Context) \
1272  F(cl_command_queue_info, CL_QUEUE_DEVICE, cl::Device) \
1273  F(cl_command_queue_info, CL_QUEUE_REFERENCE_COUNT, cl_uint) \
1274  F(cl_command_queue_info, CL_QUEUE_PROPERTIES, cl_command_queue_properties)
1275 
1276 
1277 #define CL_HPP_PARAM_NAME_INFO_1_1_(F) \
1278  F(cl_context_info, CL_CONTEXT_NUM_DEVICES, cl_uint)\
1279  F(cl_device_info, CL_DEVICE_PREFERRED_VECTOR_WIDTH_HALF, cl_uint) \
1280  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_CHAR, cl_uint) \
1281  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_SHORT, cl_uint) \
1282  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_INT, cl_uint) \
1283  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_LONG, cl_uint) \
1284  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_FLOAT, cl_uint) \
1285  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_DOUBLE, cl_uint) \
1286  F(cl_device_info, CL_DEVICE_NATIVE_VECTOR_WIDTH_HALF, cl_uint) \
1287  F(cl_device_info, CL_DEVICE_OPENCL_C_VERSION, string) \
1288  \
1289  F(cl_mem_info, CL_MEM_ASSOCIATED_MEMOBJECT, cl::Memory) \
1290  F(cl_mem_info, CL_MEM_OFFSET, size_type) \
1291  \
1292  F(cl_kernel_work_group_info, CL_KERNEL_PREFERRED_WORK_GROUP_SIZE_MULTIPLE, size_type) \
1293  F(cl_kernel_work_group_info, CL_KERNEL_PRIVATE_MEM_SIZE, cl_ulong) \
1294  \
1295  F(cl_event_info, CL_EVENT_CONTEXT, cl::Context)
1296 
1297 #define CL_HPP_PARAM_NAME_INFO_1_2_(F) \
1298  F(cl_program_info, CL_PROGRAM_NUM_KERNELS, size_type) \
1299  F(cl_program_info, CL_PROGRAM_KERNEL_NAMES, string) \
1300  \
1301  F(cl_program_build_info, CL_PROGRAM_BINARY_TYPE, cl_program_binary_type) \
1302  \
1303  F(cl_kernel_info, CL_KERNEL_ATTRIBUTES, string) \
1304  \
1305  F(cl_kernel_arg_info, CL_KERNEL_ARG_ADDRESS_QUALIFIER, cl_kernel_arg_address_qualifier) \
1306  F(cl_kernel_arg_info, CL_KERNEL_ARG_ACCESS_QUALIFIER, cl_kernel_arg_access_qualifier) \
1307  F(cl_kernel_arg_info, CL_KERNEL_ARG_TYPE_NAME, string) \
1308  F(cl_kernel_arg_info, CL_KERNEL_ARG_NAME, string) \
1309  F(cl_kernel_arg_info, CL_KERNEL_ARG_TYPE_QUALIFIER, cl_kernel_arg_type_qualifier) \
1310  \
1311  F(cl_device_info, CL_DEVICE_PARENT_DEVICE, cl::Device) \
1312  F(cl_device_info, CL_DEVICE_PARTITION_PROPERTIES, cl::vector<cl_device_partition_property>) \
1313  F(cl_device_info, CL_DEVICE_PARTITION_TYPE, cl::vector<cl_device_partition_property>) \
1314  F(cl_device_info, CL_DEVICE_REFERENCE_COUNT, cl_uint) \
1315  F(cl_device_info, CL_DEVICE_PREFERRED_INTEROP_USER_SYNC, size_type) \
1316  F(cl_device_info, CL_DEVICE_PARTITION_AFFINITY_DOMAIN, cl_device_affinity_domain) \
1317  F(cl_device_info, CL_DEVICE_BUILT_IN_KERNELS, string) \
1318  \
1319  F(cl_image_info, CL_IMAGE_ARRAY_SIZE, size_type) \
1320  F(cl_image_info, CL_IMAGE_NUM_MIP_LEVELS, cl_uint) \
1321  F(cl_image_info, CL_IMAGE_NUM_SAMPLES, cl_uint)
1322 
1323 #define CL_HPP_PARAM_NAME_INFO_2_0_(F) \
1324  F(cl_device_info, CL_DEVICE_QUEUE_ON_HOST_PROPERTIES, cl_command_queue_properties) \
1325  F(cl_device_info, CL_DEVICE_QUEUE_ON_DEVICE_PROPERTIES, cl_command_queue_properties) \
1326  F(cl_device_info, CL_DEVICE_QUEUE_ON_DEVICE_PREFERRED_SIZE, cl_uint) \
1327  F(cl_device_info, CL_DEVICE_QUEUE_ON_DEVICE_MAX_SIZE, cl_uint) \
1328  F(cl_device_info, CL_DEVICE_MAX_ON_DEVICE_QUEUES, cl_uint) \
1329  F(cl_device_info, CL_DEVICE_MAX_ON_DEVICE_EVENTS, cl_uint) \
1330  F(cl_device_info, CL_DEVICE_MAX_PIPE_ARGS, cl_uint) \
1331  F(cl_device_info, CL_DEVICE_PIPE_MAX_ACTIVE_RESERVATIONS, cl_uint) \
1332  F(cl_device_info, CL_DEVICE_PIPE_MAX_PACKET_SIZE, cl_uint) \
1333  F(cl_device_info, CL_DEVICE_SVM_CAPABILITIES, cl_device_svm_capabilities) \
1334  F(cl_device_info, CL_DEVICE_PREFERRED_PLATFORM_ATOMIC_ALIGNMENT, cl_uint) \
1335  F(cl_device_info, CL_DEVICE_PREFERRED_GLOBAL_ATOMIC_ALIGNMENT, cl_uint) \
1336  F(cl_device_info, CL_DEVICE_PREFERRED_LOCAL_ATOMIC_ALIGNMENT, cl_uint) \
1337  F(cl_command_queue_info, CL_QUEUE_SIZE, cl_uint) \
1338  F(cl_mem_info, CL_MEM_USES_SVM_POINTER, cl_bool) \
1339  F(cl_program_build_info, CL_PROGRAM_BUILD_GLOBAL_VARIABLE_TOTAL_SIZE, size_type) \
1340  F(cl_pipe_info, CL_PIPE_PACKET_SIZE, cl_uint) \
1341  F(cl_pipe_info, CL_PIPE_MAX_PACKETS, cl_uint)
1342 
1343 #define CL_HPP_PARAM_NAME_INFO_SUBGROUP_KHR_(F) \
1344  F(cl_kernel_sub_group_info, CL_KERNEL_MAX_SUB_GROUP_SIZE_FOR_NDRANGE_KHR, size_type) \
1345  F(cl_kernel_sub_group_info, CL_KERNEL_SUB_GROUP_COUNT_FOR_NDRANGE_KHR, size_type)
1346 
1347 #define CL_HPP_PARAM_NAME_INFO_IL_KHR_(F) \
1348  F(cl_device_info, CL_DEVICE_IL_VERSION_KHR, string) \
1349  F(cl_program_info, CL_PROGRAM_IL_KHR, cl::vector<unsigned char>)
1350 
1351 #define CL_HPP_PARAM_NAME_INFO_2_1_(F) \
1352  F(cl_platform_info, CL_PLATFORM_HOST_TIMER_RESOLUTION, size_type) \
1353  F(cl_program_info, CL_PROGRAM_IL, cl::vector<unsigned char>) \
1354  F(cl_kernel_info, CL_KERNEL_MAX_NUM_SUB_GROUPS, size_type) \
1355  F(cl_kernel_info, CL_KERNEL_COMPILE_NUM_SUB_GROUPS, size_type) \
1356  F(cl_device_info, CL_DEVICE_MAX_NUM_SUB_GROUPS, cl_uint) \
1357  F(cl_device_info, CL_DEVICE_IL_VERSION, string) \
1358  F(cl_device_info, CL_DEVICE_SUB_GROUP_INDEPENDENT_FORWARD_PROGRESS, cl_bool) \
1359  F(cl_command_queue_info, CL_QUEUE_DEVICE_DEFAULT, cl::DeviceCommandQueue) \
1360  F(cl_kernel_sub_group_info, CL_KERNEL_MAX_SUB_GROUP_SIZE_FOR_NDRANGE, size_type) \
1361  F(cl_kernel_sub_group_info, CL_KERNEL_SUB_GROUP_COUNT_FOR_NDRANGE, size_type) \
1362  F(cl_kernel_sub_group_info, CL_KERNEL_LOCAL_SIZE_FOR_SUB_GROUP_COUNT, cl::detail::size_t_array)
1363 
1364 #define CL_HPP_PARAM_NAME_DEVICE_FISSION_(F) \
1365  F(cl_device_info, CL_DEVICE_PARENT_DEVICE_EXT, cl_device_id) \
1366  F(cl_device_info, CL_DEVICE_PARTITION_TYPES_EXT, cl::vector<cl_device_partition_property_ext>) \
1367  F(cl_device_info, CL_DEVICE_AFFINITY_DOMAINS_EXT, cl::vector<cl_device_partition_property_ext>) \
1368  F(cl_device_info, CL_DEVICE_REFERENCE_COUNT_EXT , cl_uint) \
1369  F(cl_device_info, CL_DEVICE_PARTITION_STYLE_EXT, cl::vector<cl_device_partition_property_ext>)
1370 
1371 template <typename enum_type, cl_int Name>
1372 struct param_traits {};
1373 
1374 #define CL_HPP_DECLARE_PARAM_TRAITS_(token, param_name, T) \
1375 struct token; \
1376 template<> \
1377 struct param_traits<detail:: token,param_name> \
1378 { \
1379  enum { value = param_name }; \
1380  typedef T param_type; \
1381 };
1382 
1383 CL_HPP_PARAM_NAME_INFO_1_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
1384 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
1385 CL_HPP_PARAM_NAME_INFO_1_1_(CL_HPP_DECLARE_PARAM_TRAITS_)
1386 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
1387 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
1388 CL_HPP_PARAM_NAME_INFO_1_2_(CL_HPP_DECLARE_PARAM_TRAITS_)
1389 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
1390 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
1391 CL_HPP_PARAM_NAME_INFO_2_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
1392 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
1393 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
1394 CL_HPP_PARAM_NAME_INFO_2_1_(CL_HPP_DECLARE_PARAM_TRAITS_)
1395 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 210
1396 
1397 #if defined(CL_HPP_USE_CL_SUB_GROUPS_KHR) && CL_HPP_TARGET_OPENCL_VERSION < 210
1398 CL_HPP_PARAM_NAME_INFO_SUBGROUP_KHR_(CL_HPP_DECLARE_PARAM_TRAITS_)
1399 #endif // #if defined(CL_HPP_USE_CL_SUB_GROUPS_KHR) && CL_HPP_TARGET_OPENCL_VERSION < 210
1400 
1401 #if defined(CL_HPP_USE_IL_KHR)
1402 CL_HPP_PARAM_NAME_INFO_IL_KHR_(CL_HPP_DECLARE_PARAM_TRAITS_)
1403 #endif // #if defined(CL_HPP_USE_IL_KHR)
1404 
1405 
1406 // Flags deprecated in OpenCL 2.0
1407 #define CL_HPP_PARAM_NAME_INFO_1_0_DEPRECATED_IN_2_0_(F) \
1408  F(cl_device_info, CL_DEVICE_QUEUE_PROPERTIES, cl_command_queue_properties)
1409 
1410 #define CL_HPP_PARAM_NAME_INFO_1_1_DEPRECATED_IN_2_0_(F) \
1411  F(cl_device_info, CL_DEVICE_HOST_UNIFIED_MEMORY, cl_bool)
1412 
1413 #define CL_HPP_PARAM_NAME_INFO_1_2_DEPRECATED_IN_2_0_(F) \
1414  F(cl_image_info, CL_IMAGE_BUFFER, cl::Buffer)
1415 
1416 // Include deprecated query flags based on versions
1417 // Only include deprecated 1.0 flags if 2.0 not active as there is an enum clash
1418 #if CL_HPP_TARGET_OPENCL_VERSION > 100 && CL_HPP_MINIMUM_OPENCL_VERSION < 200 && CL_HPP_TARGET_OPENCL_VERSION < 200
1419 CL_HPP_PARAM_NAME_INFO_1_0_DEPRECATED_IN_2_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
1420 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 110
1421 #if CL_HPP_TARGET_OPENCL_VERSION > 110 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
1422 CL_HPP_PARAM_NAME_INFO_1_1_DEPRECATED_IN_2_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
1423 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 120
1424 #if CL_HPP_TARGET_OPENCL_VERSION > 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
1425 CL_HPP_PARAM_NAME_INFO_1_2_DEPRECATED_IN_2_0_(CL_HPP_DECLARE_PARAM_TRAITS_)
1426 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
1427 
1428 #if defined(CL_HPP_USE_CL_DEVICE_FISSION)
1429 CL_HPP_PARAM_NAME_DEVICE_FISSION_(CL_HPP_DECLARE_PARAM_TRAITS_);
1430 #endif // CL_HPP_USE_CL_DEVICE_FISSION
1431 
1432 #ifdef CL_PLATFORM_ICD_SUFFIX_KHR
1433 CL_HPP_DECLARE_PARAM_TRAITS_(cl_platform_info, CL_PLATFORM_ICD_SUFFIX_KHR, string)
1434 #endif
1435 
1436 #ifdef CL_DEVICE_PROFILING_TIMER_OFFSET_AMD
1437 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_PROFILING_TIMER_OFFSET_AMD, cl_ulong)
1438 #endif
1439 
1440 #ifdef CL_DEVICE_GLOBAL_FREE_MEMORY_AMD
1441 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GLOBAL_FREE_MEMORY_AMD, vector<size_type>)
1442 #endif
1443 #ifdef CL_DEVICE_SIMD_PER_COMPUTE_UNIT_AMD
1444 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_SIMD_PER_COMPUTE_UNIT_AMD, cl_uint)
1445 #endif
1446 #ifdef CL_DEVICE_SIMD_WIDTH_AMD
1447 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_SIMD_WIDTH_AMD, cl_uint)
1448 #endif
1449 #ifdef CL_DEVICE_SIMD_INSTRUCTION_WIDTH_AMD
1450 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_SIMD_INSTRUCTION_WIDTH_AMD, cl_uint)
1451 #endif
1452 #ifdef CL_DEVICE_WAVEFRONT_WIDTH_AMD
1453 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_WAVEFRONT_WIDTH_AMD, cl_uint)
1454 #endif
1455 #ifdef CL_DEVICE_GLOBAL_MEM_CHANNELS_AMD
1456 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GLOBAL_MEM_CHANNELS_AMD, cl_uint)
1457 #endif
1458 #ifdef CL_DEVICE_GLOBAL_MEM_CHANNEL_BANKS_AMD
1459 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GLOBAL_MEM_CHANNEL_BANKS_AMD, cl_uint)
1460 #endif
1461 #ifdef CL_DEVICE_GLOBAL_MEM_CHANNEL_BANK_WIDTH_AMD
1462 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GLOBAL_MEM_CHANNEL_BANK_WIDTH_AMD, cl_uint)
1463 #endif
1464 #ifdef CL_DEVICE_LOCAL_MEM_SIZE_PER_COMPUTE_UNIT_AMD
1465 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_LOCAL_MEM_SIZE_PER_COMPUTE_UNIT_AMD, cl_uint)
1466 #endif
1467 #ifdef CL_DEVICE_LOCAL_MEM_BANKS_AMD
1468 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_LOCAL_MEM_BANKS_AMD, cl_uint)
1469 #endif
1470 
1471 #ifdef CL_DEVICE_COMPUTE_CAPABILITY_MAJOR_NV
1472 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_COMPUTE_CAPABILITY_MAJOR_NV, cl_uint)
1473 #endif
1474 #ifdef CL_DEVICE_COMPUTE_CAPABILITY_MINOR_NV
1475 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_COMPUTE_CAPABILITY_MINOR_NV, cl_uint)
1476 #endif
1477 #ifdef CL_DEVICE_REGISTERS_PER_BLOCK_NV
1478 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_REGISTERS_PER_BLOCK_NV, cl_uint)
1479 #endif
1480 #ifdef CL_DEVICE_WARP_SIZE_NV
1481 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_WARP_SIZE_NV, cl_uint)
1482 #endif
1483 #ifdef CL_DEVICE_GPU_OVERLAP_NV
1484 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_GPU_OVERLAP_NV, cl_bool)
1485 #endif
1486 #ifdef CL_DEVICE_KERNEL_EXEC_TIMEOUT_NV
1487 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_KERNEL_EXEC_TIMEOUT_NV, cl_bool)
1488 #endif
1489 #ifdef CL_DEVICE_INTEGRATED_MEMORY_NV
1490 CL_HPP_DECLARE_PARAM_TRAITS_(cl_device_info, CL_DEVICE_INTEGRATED_MEMORY_NV, cl_bool)
1491 #endif
1492 
1493 // Convenience functions
1494 
1495 template <typename Func, typename T>
1496 inline cl_int
1497 getInfo(Func f, cl_uint name, T* param)
1498 {
1499  return getInfoHelper(f, name, param, 0);
1500 }
1501 
1502 template <typename Func, typename Arg0>
1504 {
1505  Func f_; const Arg0& arg0_;
1506  cl_int operator ()(
1507  cl_uint param, size_type size, void* value, size_type* size_ret)
1508  { return f_(arg0_, param, size, value, size_ret); }
1509 };
1510 
1511 template <typename Func, typename Arg0, typename Arg1>
1513 {
1514  Func f_; const Arg0& arg0_; const Arg1& arg1_;
1515  cl_int operator ()(
1516  cl_uint param, size_type size, void* value, size_type* size_ret)
1517  { return f_(arg0_, arg1_, param, size, value, size_ret); }
1518 };
1519 
1520 template <typename Func, typename Arg0, typename T>
1521 inline cl_int
1522 getInfo(Func f, const Arg0& arg0, cl_uint name, T* param)
1523 {
1524  GetInfoFunctor0<Func, Arg0> f0 = { f, arg0 };
1525  return getInfoHelper(f0, name, param, 0);
1526 }
1527 
1528 template <typename Func, typename Arg0, typename Arg1, typename T>
1529 inline cl_int
1530 getInfo(Func f, const Arg0& arg0, const Arg1& arg1, cl_uint name, T* param)
1531 {
1532  GetInfoFunctor1<Func, Arg0, Arg1> f0 = { f, arg0, arg1 };
1533  return getInfoHelper(f0, name, param, 0);
1534 }
1535 
1536 
1537 template<typename T>
1538 struct ReferenceHandler
1539 { };
1540 
1541 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
1542 
1545 template <>
1546 struct ReferenceHandler<cl_device_id>
1547 {
1557  static cl_int retain(cl_device_id device)
1558  { return ::clRetainDevice(device); }
1568  static cl_int release(cl_device_id device)
1569  { return ::clReleaseDevice(device); }
1570 };
1571 #else // CL_HPP_TARGET_OPENCL_VERSION >= 120
1572 
1575 template <>
1576 struct ReferenceHandler<cl_device_id>
1577 {
1578  // cl_device_id does not have retain().
1579  static cl_int retain(cl_device_id)
1580  { return CL_SUCCESS; }
1581  // cl_device_id does not have release().
1582  static cl_int release(cl_device_id)
1583  { return CL_SUCCESS; }
1584 };
1585 #endif // ! (CL_HPP_TARGET_OPENCL_VERSION >= 120)
1586 
1587 template <>
1588 struct ReferenceHandler<cl_platform_id>
1589 {
1590  // cl_platform_id does not have retain().
1591  static cl_int retain(cl_platform_id)
1592  { return CL_SUCCESS; }
1593  // cl_platform_id does not have release().
1594  static cl_int release(cl_platform_id)
1595  { return CL_SUCCESS; }
1596 };
1597 
1598 template <>
1599 struct ReferenceHandler<cl_context>
1600 {
1601  static cl_int retain(cl_context context)
1602  { return ::clRetainContext(context); }
1603  static cl_int release(cl_context context)
1604  { return ::clReleaseContext(context); }
1605 };
1606 
1607 template <>
1608 struct ReferenceHandler<cl_command_queue>
1609 {
1610  static cl_int retain(cl_command_queue queue)
1611  { return ::clRetainCommandQueue(queue); }
1612  static cl_int release(cl_command_queue queue)
1613  { return ::clReleaseCommandQueue(queue); }
1614 };
1615 
1616 template <>
1617 struct ReferenceHandler<cl_mem>
1618 {
1619  static cl_int retain(cl_mem memory)
1620  { return ::clRetainMemObject(memory); }
1621  static cl_int release(cl_mem memory)
1622  { return ::clReleaseMemObject(memory); }
1623 };
1624 
1625 template <>
1626 struct ReferenceHandler<cl_sampler>
1627 {
1628  static cl_int retain(cl_sampler sampler)
1629  { return ::clRetainSampler(sampler); }
1630  static cl_int release(cl_sampler sampler)
1631  { return ::clReleaseSampler(sampler); }
1632 };
1633 
1634 template <>
1635 struct ReferenceHandler<cl_program>
1636 {
1637  static cl_int retain(cl_program program)
1638  { return ::clRetainProgram(program); }
1639  static cl_int release(cl_program program)
1640  { return ::clReleaseProgram(program); }
1641 };
1642 
1643 template <>
1644 struct ReferenceHandler<cl_kernel>
1645 {
1646  static cl_int retain(cl_kernel kernel)
1647  { return ::clRetainKernel(kernel); }
1648  static cl_int release(cl_kernel kernel)
1649  { return ::clReleaseKernel(kernel); }
1650 };
1651 
1652 template <>
1653 struct ReferenceHandler<cl_event>
1654 {
1655  static cl_int retain(cl_event event)
1656  { return ::clRetainEvent(event); }
1657  static cl_int release(cl_event event)
1658  { return ::clReleaseEvent(event); }
1659 };
1660 
1661 
1662 #if CL_HPP_TARGET_OPENCL_VERSION >= 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 120
1663 // Extracts version number with major in the upper 16 bits, minor in the lower 16
1664 static cl_uint getVersion(const vector<char> &versionInfo)
1665 {
1666  int highVersion = 0;
1667  int lowVersion = 0;
1668  int index = 7;
1669  while(versionInfo[index] != '.' ) {
1670  highVersion *= 10;
1671  highVersion += versionInfo[index]-'0';
1672  ++index;
1673  }
1674  ++index;
1675  while(versionInfo[index] != ' ' && versionInfo[index] != '\0') {
1676  lowVersion *= 10;
1677  lowVersion += versionInfo[index]-'0';
1678  ++index;
1679  }
1680  return (highVersion << 16) | lowVersion;
1681 }
1682 
1683 static cl_uint getPlatformVersion(cl_platform_id platform)
1684 {
1685  size_type size = 0;
1686  clGetPlatformInfo(platform, CL_PLATFORM_VERSION, 0, NULL, &size);
1687 
1688  vector<char> versionInfo(size);
1689  clGetPlatformInfo(platform, CL_PLATFORM_VERSION, size, versionInfo.data(), &size);
1690  return getVersion(versionInfo);
1691 }
1692 
1693 static cl_uint getDevicePlatformVersion(cl_device_id device)
1694 {
1695  cl_platform_id platform;
1696  clGetDeviceInfo(device, CL_DEVICE_PLATFORM, sizeof(platform), &platform, NULL);
1697  return getPlatformVersion(platform);
1698 }
1699 
1700 static cl_uint getContextPlatformVersion(cl_context context)
1701 {
1702  // The platform cannot be queried directly, so we first have to grab a
1703  // device and obtain its context
1704  size_type size = 0;
1705  clGetContextInfo(context, CL_CONTEXT_DEVICES, 0, NULL, &size);
1706  if (size == 0)
1707  return 0;
1708  vector<cl_device_id> devices(size/sizeof(cl_device_id));
1709  clGetContextInfo(context, CL_CONTEXT_DEVICES, size, devices.data(), NULL);
1710  return getDevicePlatformVersion(devices[0]);
1711 }
1712 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 120
1713 
1714 template <typename T>
1715 class Wrapper
1716 {
1717 public:
1718  typedef T cl_type;
1719 
1720 protected:
1721  cl_type object_;
1722 
1723 public:
1724  Wrapper() : object_(NULL) { }
1725 
1726  Wrapper(const cl_type &obj, bool retainObject) : object_(obj)
1727  {
1728  if (retainObject) {
1729  detail::errHandler(retain(), __RETAIN_ERR);
1730  }
1731  }
1732 
1733  ~Wrapper()
1734  {
1735  if (object_ != NULL) { release(); }
1736  }
1737 
1738  Wrapper(const Wrapper<cl_type>& rhs)
1739  {
1740  object_ = rhs.object_;
1741  detail::errHandler(retain(), __RETAIN_ERR);
1742  }
1743 
1744  Wrapper(Wrapper<cl_type>&& rhs) CL_HPP_NOEXCEPT_
1745  {
1746  object_ = rhs.object_;
1747  rhs.object_ = NULL;
1748  }
1749 
1750  Wrapper<cl_type>& operator = (const Wrapper<cl_type>& rhs)
1751  {
1752  if (this != &rhs) {
1753  detail::errHandler(release(), __RELEASE_ERR);
1754  object_ = rhs.object_;
1755  detail::errHandler(retain(), __RETAIN_ERR);
1756  }
1757  return *this;
1758  }
1759 
1760  Wrapper<cl_type>& operator = (Wrapper<cl_type>&& rhs)
1761  {
1762  if (this != &rhs) {
1763  detail::errHandler(release(), __RELEASE_ERR);
1764  object_ = rhs.object_;
1765  rhs.object_ = NULL;
1766  }
1767  return *this;
1768  }
1769 
1770  Wrapper<cl_type>& operator = (const cl_type &rhs)
1771  {
1772  detail::errHandler(release(), __RELEASE_ERR);
1773  object_ = rhs;
1774  return *this;
1775  }
1776 
1777  const cl_type& operator ()() const { return object_; }
1778 
1779  cl_type& operator ()() { return object_; }
1780 
1781  const cl_type get() const { return object_; }
1782 
1783  cl_type get() { return object_; }
1784 
1785 
1786 protected:
1787  template<typename Func, typename U>
1788  friend inline cl_int getInfoHelper(Func, cl_uint, U*, int, typename U::cl_type);
1789 
1790  cl_int retain() const
1791  {
1792  if (object_ != nullptr) {
1793  return ReferenceHandler<cl_type>::retain(object_);
1794  }
1795  else {
1796  return CL_SUCCESS;
1797  }
1798  }
1799 
1800  cl_int release() const
1801  {
1802  if (object_ != nullptr) {
1803  return ReferenceHandler<cl_type>::release(object_);
1804  }
1805  else {
1806  return CL_SUCCESS;
1807  }
1808  }
1809 };
1810 
1811 template <>
1812 class Wrapper<cl_device_id>
1813 {
1814 public:
1815  typedef cl_device_id cl_type;
1816 
1817 protected:
1818  cl_type object_;
1819  bool referenceCountable_;
1820 
1821  static bool isReferenceCountable(cl_device_id device)
1822  {
1823  bool retVal = false;
1824 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
1825 #if CL_HPP_MINIMUM_OPENCL_VERSION < 120
1826  if (device != NULL) {
1827  int version = getDevicePlatformVersion(device);
1828  if(version > ((1 << 16) + 1)) {
1829  retVal = true;
1830  }
1831  }
1832 #else // CL_HPP_MINIMUM_OPENCL_VERSION < 120
1833  retVal = true;
1834 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 120
1835 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
1836  return retVal;
1837  }
1838 
1839 public:
1840  Wrapper() : object_(NULL), referenceCountable_(false)
1841  {
1842  }
1843 
1844  Wrapper(const cl_type &obj, bool retainObject) :
1845  object_(obj),
1846  referenceCountable_(false)
1847  {
1848  referenceCountable_ = isReferenceCountable(obj);
1849 
1850  if (retainObject) {
1851  detail::errHandler(retain(), __RETAIN_ERR);
1852  }
1853  }
1854 
1855  ~Wrapper()
1856  {
1857  release();
1858  }
1859 
1860  Wrapper(const Wrapper<cl_type>& rhs)
1861  {
1862  object_ = rhs.object_;
1863  referenceCountable_ = isReferenceCountable(object_);
1864  detail::errHandler(retain(), __RETAIN_ERR);
1865  }
1866 
1867  Wrapper(Wrapper<cl_type>&& rhs) CL_HPP_NOEXCEPT_
1868  {
1869  object_ = rhs.object_;
1870  referenceCountable_ = rhs.referenceCountable_;
1871  rhs.object_ = NULL;
1872  rhs.referenceCountable_ = false;
1873  }
1874 
1875  Wrapper<cl_type>& operator = (const Wrapper<cl_type>& rhs)
1876  {
1877  if (this != &rhs) {
1878  detail::errHandler(release(), __RELEASE_ERR);
1879  object_ = rhs.object_;
1880  referenceCountable_ = rhs.referenceCountable_;
1881  detail::errHandler(retain(), __RETAIN_ERR);
1882  }
1883  return *this;
1884  }
1885 
1886  Wrapper<cl_type>& operator = (Wrapper<cl_type>&& rhs)
1887  {
1888  if (this != &rhs) {
1889  detail::errHandler(release(), __RELEASE_ERR);
1890  object_ = rhs.object_;
1891  referenceCountable_ = rhs.referenceCountable_;
1892  rhs.object_ = NULL;
1893  rhs.referenceCountable_ = false;
1894  }
1895  return *this;
1896  }
1897 
1898  Wrapper<cl_type>& operator = (const cl_type &rhs)
1899  {
1900  detail::errHandler(release(), __RELEASE_ERR);
1901  object_ = rhs;
1902  referenceCountable_ = isReferenceCountable(object_);
1903  return *this;
1904  }
1905 
1906  const cl_type& operator ()() const { return object_; }
1907 
1908  cl_type& operator ()() { return object_; }
1909 
1910  cl_type get() const { return object_; }
1911 
1912 protected:
1913  template<typename Func, typename U>
1914  friend inline cl_int getInfoHelper(Func, cl_uint, U*, int, typename U::cl_type);
1915 
1916  template<typename Func, typename U>
1917  friend inline cl_int getInfoHelper(Func, cl_uint, vector<U>*, int, typename U::cl_type);
1918 
1919  cl_int retain() const
1920  {
1921  if( object_ != nullptr && referenceCountable_ ) {
1922  return ReferenceHandler<cl_type>::retain(object_);
1923  }
1924  else {
1925  return CL_SUCCESS;
1926  }
1927  }
1928 
1929  cl_int release() const
1930  {
1931  if (object_ != nullptr && referenceCountable_) {
1932  return ReferenceHandler<cl_type>::release(object_);
1933  }
1934  else {
1935  return CL_SUCCESS;
1936  }
1937  }
1938 };
1939 
1940 template <typename T>
1941 inline bool operator==(const Wrapper<T> &lhs, const Wrapper<T> &rhs)
1942 {
1943  return lhs() == rhs();
1944 }
1945 
1946 template <typename T>
1947 inline bool operator!=(const Wrapper<T> &lhs, const Wrapper<T> &rhs)
1948 {
1949  return !operator==(lhs, rhs);
1950 }
1951 
1952 } // namespace detail
1954 
1955 
1956 using BuildLogType = vector<std::pair<cl::Device, typename detail::param_traits<detail::cl_program_build_info, CL_PROGRAM_BUILD_LOG>::param_type>>;
1957 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
1958 
1961 class BuildError : public Error
1962 {
1963 private:
1964  BuildLogType buildLogs;
1965 public:
1966  BuildError(cl_int err, const char * errStr, const BuildLogType &vec) : Error(err, errStr), buildLogs(vec)
1967  {
1968  }
1969 
1970  BuildLogType getBuildLog() const
1971  {
1972  return buildLogs;
1973  }
1974 };
1975 namespace detail {
1976  static inline cl_int buildErrHandler(
1977  cl_int err,
1978  const char * errStr,
1979  const BuildLogType &buildLogs)
1980  {
1981  if (err != CL_SUCCESS) {
1982  throw BuildError(err, errStr, buildLogs);
1983  }
1984  return err;
1985  }
1986 } // namespace detail
1987 
1988 #else
1989 namespace detail {
1990  static inline cl_int buildErrHandler(
1991  cl_int err,
1992  const char * errStr,
1993  const BuildLogType &buildLogs)
1994  {
1995  (void)buildLogs; // suppress unused variable warning
1996  (void)errStr;
1997  return err;
1998  }
1999 } // namespace detail
2000 #endif // #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2001 
2002 
2008 struct ImageFormat : public cl_image_format
2009 {
2012 
2014  ImageFormat(cl_channel_order order, cl_channel_type type)
2015  {
2016  image_channel_order = order;
2017  image_channel_data_type = type;
2018  }
2019 
2021  ImageFormat& operator = (const ImageFormat& rhs)
2022  {
2023  if (this != &rhs) {
2024  this->image_channel_data_type = rhs.image_channel_data_type;
2025  this->image_channel_order = rhs.image_channel_order;
2026  }
2027  return *this;
2028  }
2029 };
2030 
2038 class Device : public detail::Wrapper<cl_device_id>
2039 {
2040 private:
2041  static std::once_flag default_initialized_;
2042  static Device default_;
2043  static cl_int default_error_;
2044 
2050  static void makeDefault();
2051 
2057  static void makeDefaultProvided(const Device &p) {
2058  default_ = p;
2059  }
2060 
2061 public:
2062 #ifdef CL_HPP_UNIT_TEST_ENABLE
2063 
2069  static void unitTestClearDefault() {
2070  default_ = Device();
2071  }
2072 #endif // #ifdef CL_HPP_UNIT_TEST_ENABLE
2073 
2075  Device() : detail::Wrapper<cl_type>() { }
2076 
2081  explicit Device(const cl_device_id &device, bool retainObject = false) :
2082  detail::Wrapper<cl_type>(device, retainObject) { }
2083 
2089  cl_int *errResult = NULL)
2090  {
2091  std::call_once(default_initialized_, makeDefault);
2092  detail::errHandler(default_error_);
2093  if (errResult != NULL) {
2094  *errResult = default_error_;
2095  }
2096  return default_;
2097  }
2098 
2106  static Device setDefault(const Device &default_device)
2107  {
2108  std::call_once(default_initialized_, makeDefaultProvided, std::cref(default_device));
2109  detail::errHandler(default_error_);
2110  return default_;
2111  }
2112 
2117  Device& operator = (const cl_device_id& rhs)
2118  {
2120  return *this;
2121  }
2122 
2126  Device(const Device& dev) : detail::Wrapper<cl_type>(dev) {}
2127 
2131  Device& operator = (const Device &dev)
2132  {
2134  return *this;
2135  }
2136 
2140  Device(Device&& dev) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(dev)) {}
2141 
2145  Device& operator = (Device &&dev)
2146  {
2147  detail::Wrapper<cl_type>::operator=(std::move(dev));
2148  return *this;
2149  }
2150 
2152  template <typename T>
2153  cl_int getInfo(cl_device_info name, T* param) const
2154  {
2155  return detail::errHandler(
2156  detail::getInfo(&::clGetDeviceInfo, object_, name, param),
2157  __GET_DEVICE_INFO_ERR);
2158  }
2159 
2161  template <cl_int name> typename
2163  getInfo(cl_int* err = NULL) const
2164  {
2165  typename detail::param_traits<
2166  detail::cl_device_info, name>::param_type param;
2167  cl_int result = getInfo(name, &param);
2168  if (err != NULL) {
2169  *err = result;
2170  }
2171  return param;
2172  }
2173 
2174 
2175 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
2176 
2182  cl_ulong getHostTimer(cl_int *error = nullptr)
2183  {
2184  cl_ulong retVal = 0;
2185  cl_int err =
2186  clGetHostTimer(this->get(), &retVal);
2187  detail::errHandler(
2188  err,
2189  __GET_HOST_TIMER_ERR);
2190  if (error) {
2191  *error = err;
2192  }
2193  return retVal;
2194  }
2195 
2206  std::pair<cl_ulong, cl_ulong> getDeviceAndHostTimer(cl_int *error = nullptr)
2207  {
2208  std::pair<cl_ulong, cl_ulong> retVal;
2209  cl_int err =
2210  clGetDeviceAndHostTimer(this->get(), &(retVal.first), &(retVal.second));
2211  detail::errHandler(
2212  err,
2213  __GET_DEVICE_AND_HOST_TIMER_ERR);
2214  if (error) {
2215  *error = err;
2216  }
2217  return retVal;
2218  }
2219 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
2220 
2224 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
2225  cl_int createSubDevices(
2227  const cl_device_partition_property * properties,
2228  vector<Device>* devices)
2229  {
2230  cl_uint n = 0;
2231  cl_int err = clCreateSubDevices(object_, properties, 0, NULL, &n);
2232  if (err != CL_SUCCESS) {
2233  return detail::errHandler(err, __CREATE_SUB_DEVICES_ERR);
2234  }
2235 
2236  vector<cl_device_id> ids(n);
2237  err = clCreateSubDevices(object_, properties, n, ids.data(), NULL);
2238  if (err != CL_SUCCESS) {
2239  return detail::errHandler(err, __CREATE_SUB_DEVICES_ERR);
2240  }
2241 
2242  // Cannot trivially assign because we need to capture intermediates
2243  // with safe construction
2244  if (devices) {
2245  devices->resize(ids.size());
2246 
2247  // Assign to param, constructing with retain behaviour
2248  // to correctly capture each underlying CL object
2249  for (size_type i = 0; i < ids.size(); i++) {
2250  // We do not need to retain because this device is being created
2251  // by the runtime
2252  (*devices)[i] = Device(ids[i], false);
2253  }
2254  }
2255 
2256  return CL_SUCCESS;
2257  }
2258 #elif defined(CL_HPP_USE_CL_DEVICE_FISSION)
2259 
2263  cl_int createSubDevices(
2264  const cl_device_partition_property_ext * properties,
2265  vector<Device>* devices)
2266  {
2267  typedef CL_API_ENTRY cl_int
2268  ( CL_API_CALL * PFN_clCreateSubDevicesEXT)(
2269  cl_device_id /*in_device*/,
2270  const cl_device_partition_property_ext * /* properties */,
2271  cl_uint /*num_entries*/,
2272  cl_device_id * /*out_devices*/,
2273  cl_uint * /*num_devices*/ ) CL_EXT_SUFFIX__VERSION_1_1;
2274 
2275  static PFN_clCreateSubDevicesEXT pfn_clCreateSubDevicesEXT = NULL;
2276  CL_HPP_INIT_CL_EXT_FCN_PTR_(clCreateSubDevicesEXT);
2277 
2278  cl_uint n = 0;
2279  cl_int err = pfn_clCreateSubDevicesEXT(object_, properties, 0, NULL, &n);
2280  if (err != CL_SUCCESS) {
2281  return detail::errHandler(err, __CREATE_SUB_DEVICES_ERR);
2282  }
2283 
2284  vector<cl_device_id> ids(n);
2285  err = pfn_clCreateSubDevicesEXT(object_, properties, n, ids.data(), NULL);
2286  if (err != CL_SUCCESS) {
2287  return detail::errHandler(err, __CREATE_SUB_DEVICES_ERR);
2288  }
2289  // Cannot trivially assign because we need to capture intermediates
2290  // with safe construction
2291  if (devices) {
2292  devices->resize(ids.size());
2293 
2294  // Assign to param, constructing with retain behaviour
2295  // to correctly capture each underlying CL object
2296  for (size_type i = 0; i < ids.size(); i++) {
2297  // We do not need to retain because this device is being created
2298  // by the runtime
2299  (*devices)[i] = Device(ids[i], false);
2300  }
2301  }
2302  return CL_SUCCESS;
2303  }
2304 #endif // defined(CL_HPP_USE_CL_DEVICE_FISSION)
2305 };
2306 
2307 CL_HPP_DEFINE_STATIC_MEMBER_ std::once_flag Device::default_initialized_;
2308 CL_HPP_DEFINE_STATIC_MEMBER_ Device Device::default_;
2309 CL_HPP_DEFINE_STATIC_MEMBER_ cl_int Device::default_error_ = CL_SUCCESS;
2310 
2318 class Platform : public detail::Wrapper<cl_platform_id>
2319 {
2320 private:
2321  static std::once_flag default_initialized_;
2322  static Platform default_;
2323  static cl_int default_error_;
2324 
2330  static void makeDefault() {
2331  /* Throwing an exception from a call_once invocation does not do
2332  * what we wish, so we catch it and save the error.
2333  */
2334 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2335  try
2336 #endif
2337  {
2338  // If default wasn't passed ,generate one
2339  // Otherwise set it
2340  cl_uint n = 0;
2341 
2342  cl_int err = ::clGetPlatformIDs(0, NULL, &n);
2343  if (err != CL_SUCCESS) {
2344  default_error_ = err;
2345  return;
2346  }
2347  if (n == 0) {
2348  default_error_ = CL_INVALID_PLATFORM;
2349  return;
2350  }
2351 
2352  vector<cl_platform_id> ids(n);
2353  err = ::clGetPlatformIDs(n, ids.data(), NULL);
2354  if (err != CL_SUCCESS) {
2355  default_error_ = err;
2356  return;
2357  }
2358 
2359  default_ = Platform(ids[0]);
2360  }
2361 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2362  catch (cl::Error &e) {
2363  default_error_ = e.err();
2364  }
2365 #endif
2366  }
2367 
2373  static void makeDefaultProvided(const Platform &p) {
2374  default_ = p;
2375  }
2376 
2377 public:
2378 #ifdef CL_HPP_UNIT_TEST_ENABLE
2379 
2385  static void unitTestClearDefault() {
2386  default_ = Platform();
2387  }
2388 #endif // #ifdef CL_HPP_UNIT_TEST_ENABLE
2389 
2391  Platform() : detail::Wrapper<cl_type>() { }
2392 
2400  explicit Platform(const cl_platform_id &platform, bool retainObject = false) :
2401  detail::Wrapper<cl_type>(platform, retainObject) { }
2402 
2407  Platform& operator = (const cl_platform_id& rhs)
2408  {
2410  return *this;
2411  }
2412 
2413  static Platform getDefault(
2414  cl_int *errResult = NULL)
2415  {
2416  std::call_once(default_initialized_, makeDefault);
2417  detail::errHandler(default_error_);
2418  if (errResult != NULL) {
2419  *errResult = default_error_;
2420  }
2421  return default_;
2422  }
2423 
2431  static Platform setDefault(const Platform &default_platform)
2432  {
2433  std::call_once(default_initialized_, makeDefaultProvided, std::cref(default_platform));
2434  detail::errHandler(default_error_);
2435  return default_;
2436  }
2437 
2439  cl_int getInfo(cl_platform_info name, string* param) const
2440  {
2441  return detail::errHandler(
2442  detail::getInfo(&::clGetPlatformInfo, object_, name, param),
2443  __GET_PLATFORM_INFO_ERR);
2444  }
2445 
2447  template <cl_int name> typename
2449  getInfo(cl_int* err = NULL) const
2450  {
2451  typename detail::param_traits<
2452  detail::cl_platform_info, name>::param_type param;
2453  cl_int result = getInfo(name, &param);
2454  if (err != NULL) {
2455  *err = result;
2456  }
2457  return param;
2458  }
2459 
2464  cl_int getDevices(
2465  cl_device_type type,
2466  vector<Device>* devices) const
2467  {
2468  cl_uint n = 0;
2469  if( devices == NULL ) {
2470  return detail::errHandler(CL_INVALID_ARG_VALUE, __GET_DEVICE_IDS_ERR);
2471  }
2472  cl_int err = ::clGetDeviceIDs(object_, type, 0, NULL, &n);
2473  if (err != CL_SUCCESS) {
2474  return detail::errHandler(err, __GET_DEVICE_IDS_ERR);
2475  }
2476 
2477  vector<cl_device_id> ids(n);
2478  err = ::clGetDeviceIDs(object_, type, n, ids.data(), NULL);
2479  if (err != CL_SUCCESS) {
2480  return detail::errHandler(err, __GET_DEVICE_IDS_ERR);
2481  }
2482 
2483  // Cannot trivially assign because we need to capture intermediates
2484  // with safe construction
2485  // We must retain things we obtain from the API to avoid releasing
2486  // API-owned objects.
2487  if (devices) {
2488  devices->resize(ids.size());
2489 
2490  // Assign to param, constructing with retain behaviour
2491  // to correctly capture each underlying CL object
2492  for (size_type i = 0; i < ids.size(); i++) {
2493  (*devices)[i] = Device(ids[i], true);
2494  }
2495  }
2496  return CL_SUCCESS;
2497  }
2498 
2499 #if defined(CL_HPP_USE_DX_INTEROP)
2500 
2523  cl_int getDevices(
2524  cl_d3d10_device_source_khr d3d_device_source,
2525  void * d3d_object,
2526  cl_d3d10_device_set_khr d3d_device_set,
2527  vector<Device>* devices) const
2528  {
2529  typedef CL_API_ENTRY cl_int (CL_API_CALL *PFN_clGetDeviceIDsFromD3D10KHR)(
2530  cl_platform_id platform,
2531  cl_d3d10_device_source_khr d3d_device_source,
2532  void * d3d_object,
2533  cl_d3d10_device_set_khr d3d_device_set,
2534  cl_uint num_entries,
2535  cl_device_id * devices,
2536  cl_uint* num_devices);
2537 
2538  if( devices == NULL ) {
2539  return detail::errHandler(CL_INVALID_ARG_VALUE, __GET_DEVICE_IDS_ERR);
2540  }
2541 
2542  static PFN_clGetDeviceIDsFromD3D10KHR pfn_clGetDeviceIDsFromD3D10KHR = NULL;
2543  CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(object_, clGetDeviceIDsFromD3D10KHR);
2544 
2545  cl_uint n = 0;
2546  cl_int err = pfn_clGetDeviceIDsFromD3D10KHR(
2547  object_,
2548  d3d_device_source,
2549  d3d_object,
2550  d3d_device_set,
2551  0,
2552  NULL,
2553  &n);
2554  if (err != CL_SUCCESS) {
2555  return detail::errHandler(err, __GET_DEVICE_IDS_ERR);
2556  }
2557 
2558  vector<cl_device_id> ids(n);
2559  err = pfn_clGetDeviceIDsFromD3D10KHR(
2560  object_,
2561  d3d_device_source,
2562  d3d_object,
2563  d3d_device_set,
2564  n,
2565  ids.data(),
2566  NULL);
2567  if (err != CL_SUCCESS) {
2568  return detail::errHandler(err, __GET_DEVICE_IDS_ERR);
2569  }
2570 
2571  // Cannot trivially assign because we need to capture intermediates
2572  // with safe construction
2573  // We must retain things we obtain from the API to avoid releasing
2574  // API-owned objects.
2575  if (devices) {
2576  devices->resize(ids.size());
2577 
2578  // Assign to param, constructing with retain behaviour
2579  // to correctly capture each underlying CL object
2580  for (size_type i = 0; i < ids.size(); i++) {
2581  (*devices)[i] = Device(ids[i], true);
2582  }
2583  }
2584  return CL_SUCCESS;
2585  }
2586 #endif
2587 
2592  static cl_int get(
2593  vector<Platform>* platforms)
2594  {
2595  cl_uint n = 0;
2596 
2597  if( platforms == NULL ) {
2598  return detail::errHandler(CL_INVALID_ARG_VALUE, __GET_PLATFORM_IDS_ERR);
2599  }
2600 
2601  cl_int err = ::clGetPlatformIDs(0, NULL, &n);
2602  if (err != CL_SUCCESS) {
2603  return detail::errHandler(err, __GET_PLATFORM_IDS_ERR);
2604  }
2605 
2606  vector<cl_platform_id> ids(n);
2607  err = ::clGetPlatformIDs(n, ids.data(), NULL);
2608  if (err != CL_SUCCESS) {
2609  return detail::errHandler(err, __GET_PLATFORM_IDS_ERR);
2610  }
2611 
2612  if (platforms) {
2613  platforms->resize(ids.size());
2614 
2615  // Platforms don't reference count
2616  for (size_type i = 0; i < ids.size(); i++) {
2617  (*platforms)[i] = Platform(ids[i]);
2618  }
2619  }
2620  return CL_SUCCESS;
2621  }
2622 
2627  static cl_int get(
2628  Platform * platform)
2629  {
2630  cl_int err;
2631  Platform default_platform = Platform::getDefault(&err);
2632  if (platform) {
2633  *platform = default_platform;
2634  }
2635  return err;
2636  }
2637 
2646  static Platform get(
2647  cl_int * errResult = NULL)
2648  {
2649  cl_int err;
2650  Platform default_platform = Platform::getDefault(&err);
2651  if (errResult) {
2652  *errResult = err;
2653  }
2654  return default_platform;
2655  }
2656 
2657 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
2658  cl_int
2661  {
2662  return ::clUnloadPlatformCompiler(object_);
2663  }
2664 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
2665 }; // class Platform
2666 
2667 CL_HPP_DEFINE_STATIC_MEMBER_ std::once_flag Platform::default_initialized_;
2668 CL_HPP_DEFINE_STATIC_MEMBER_ Platform Platform::default_;
2669 CL_HPP_DEFINE_STATIC_MEMBER_ cl_int Platform::default_error_ = CL_SUCCESS;
2670 
2671 
2675 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
2676 
2680 inline CL_EXT_PREFIX__VERSION_1_1_DEPRECATED cl_int
2681 UnloadCompiler() CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED;
2682 inline cl_int
2684 {
2685  return ::clUnloadCompiler();
2686 }
2687 #endif // #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
2688 
2697 class Context
2698  : public detail::Wrapper<cl_context>
2699 {
2700 private:
2701  static std::once_flag default_initialized_;
2702  static Context default_;
2703  static cl_int default_error_;
2704 
2710  static void makeDefault() {
2711  /* Throwing an exception from a call_once invocation does not do
2712  * what we wish, so we catch it and save the error.
2713  */
2714 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2715  try
2716 #endif
2717  {
2718 #if !defined(__APPLE__) && !defined(__MACOS)
2719  const Platform &p = Platform::getDefault();
2720  cl_platform_id defaultPlatform = p();
2721  cl_context_properties properties[3] = {
2722  CL_CONTEXT_PLATFORM, (cl_context_properties)defaultPlatform, 0
2723  };
2724 #else // #if !defined(__APPLE__) && !defined(__MACOS)
2725  cl_context_properties *properties = nullptr;
2726 #endif // #if !defined(__APPLE__) && !defined(__MACOS)
2727 
2728  default_ = Context(
2729  CL_DEVICE_TYPE_DEFAULT,
2730  properties,
2731  NULL,
2732  NULL,
2733  &default_error_);
2734  }
2735 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2736  catch (cl::Error &e) {
2737  default_error_ = e.err();
2738  }
2739 #endif
2740  }
2741 
2742 
2748  static void makeDefaultProvided(const Context &c) {
2749  default_ = c;
2750  }
2751 
2752 public:
2753 #ifdef CL_HPP_UNIT_TEST_ENABLE
2754 
2760  static void unitTestClearDefault() {
2761  default_ = Context();
2762  }
2763 #endif // #ifdef CL_HPP_UNIT_TEST_ENABLE
2764 
2770  const vector<Device>& devices,
2771  cl_context_properties* properties = NULL,
2772  void (CL_CALLBACK * notifyFptr)(
2773  const char *,
2774  const void *,
2775  size_type,
2776  void *) = NULL,
2777  void* data = NULL,
2778  cl_int* err = NULL)
2779  {
2780  cl_int error;
2781 
2782  size_type numDevices = devices.size();
2783  vector<cl_device_id> deviceIDs(numDevices);
2784 
2785  for( size_type deviceIndex = 0; deviceIndex < numDevices; ++deviceIndex ) {
2786  deviceIDs[deviceIndex] = (devices[deviceIndex])();
2787  }
2788 
2789  object_ = ::clCreateContext(
2790  properties, (cl_uint) numDevices,
2791  deviceIDs.data(),
2792  notifyFptr, data, &error);
2793 
2794  detail::errHandler(error, __CREATE_CONTEXT_ERR);
2795  if (err != NULL) {
2796  *err = error;
2797  }
2798  }
2799 
2800  Context(
2801  const Device& device,
2802  cl_context_properties* properties = NULL,
2803  void (CL_CALLBACK * notifyFptr)(
2804  const char *,
2805  const void *,
2806  size_type,
2807  void *) = NULL,
2808  void* data = NULL,
2809  cl_int* err = NULL)
2810  {
2811  cl_int error;
2812 
2813  cl_device_id deviceID = device();
2814 
2815  object_ = ::clCreateContext(
2816  properties, 1,
2817  &deviceID,
2818  notifyFptr, data, &error);
2819 
2820  detail::errHandler(error, __CREATE_CONTEXT_ERR);
2821  if (err != NULL) {
2822  *err = error;
2823  }
2824  }
2825 
2831  cl_device_type type,
2832  cl_context_properties* properties = NULL,
2833  void (CL_CALLBACK * notifyFptr)(
2834  const char *,
2835  const void *,
2836  size_type,
2837  void *) = NULL,
2838  void* data = NULL,
2839  cl_int* err = NULL)
2840  {
2841  cl_int error;
2842 
2843 #if !defined(__APPLE__) && !defined(__MACOS)
2844  cl_context_properties prop[4] = {CL_CONTEXT_PLATFORM, 0, 0, 0 };
2845 
2846  if (properties == NULL) {
2847  // Get a valid platform ID as we cannot send in a blank one
2848  vector<Platform> platforms;
2849  error = Platform::get(&platforms);
2850  if (error != CL_SUCCESS) {
2851  detail::errHandler(error, __CREATE_CONTEXT_FROM_TYPE_ERR);
2852  if (err != NULL) {
2853  *err = error;
2854  }
2855  return;
2856  }
2857 
2858  // Check the platforms we found for a device of our specified type
2859  cl_context_properties platform_id = 0;
2860  for (unsigned int i = 0; i < platforms.size(); i++) {
2861 
2862  vector<Device> devices;
2863 
2864 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2865  try {
2866 #endif
2867 
2868  error = platforms[i].getDevices(type, &devices);
2869 
2870 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
2871  } catch (cl::Error& e) {
2872  error = e.err();
2873  }
2874  // Catch if exceptions are enabled as we don't want to exit if first platform has no devices of type
2875  // We do error checking next anyway, and can throw there if needed
2876 #endif
2877 
2878  // Only squash CL_SUCCESS and CL_DEVICE_NOT_FOUND
2879  if (error != CL_SUCCESS && error != CL_DEVICE_NOT_FOUND) {
2880  detail::errHandler(error, __CREATE_CONTEXT_FROM_TYPE_ERR);
2881  if (err != NULL) {
2882  *err = error;
2883  }
2884  }
2885 
2886  if (devices.size() > 0) {
2887  platform_id = (cl_context_properties)platforms[i]();
2888  break;
2889  }
2890  }
2891 
2892  if (platform_id == 0) {
2893  detail::errHandler(CL_DEVICE_NOT_FOUND, __CREATE_CONTEXT_FROM_TYPE_ERR);
2894  if (err != NULL) {
2895  *err = CL_DEVICE_NOT_FOUND;
2896  }
2897  return;
2898  }
2899 
2900  prop[1] = platform_id;
2901  properties = &prop[0];
2902  }
2903 #endif
2904  object_ = ::clCreateContextFromType(
2905  properties, type, notifyFptr, data, &error);
2906 
2907  detail::errHandler(error, __CREATE_CONTEXT_FROM_TYPE_ERR);
2908  if (err != NULL) {
2909  *err = error;
2910  }
2911  }
2912 
2916  Context(const Context& ctx) : detail::Wrapper<cl_type>(ctx) {}
2917 
2921  Context& operator = (const Context &ctx)
2922  {
2924  return *this;
2925  }
2926 
2930  Context(Context&& ctx) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(ctx)) {}
2931 
2935  Context& operator = (Context &&ctx)
2936  {
2937  detail::Wrapper<cl_type>::operator=(std::move(ctx));
2938  return *this;
2939  }
2940 
2941 
2946  static Context getDefault(cl_int * err = NULL)
2947  {
2948  std::call_once(default_initialized_, makeDefault);
2949  detail::errHandler(default_error_);
2950  if (err != NULL) {
2951  *err = default_error_;
2952  }
2953  return default_;
2954  }
2955 
2963  static Context setDefault(const Context &default_context)
2964  {
2965  std::call_once(default_initialized_, makeDefaultProvided, std::cref(default_context));
2966  detail::errHandler(default_error_);
2967  return default_;
2968  }
2969 
2971  Context() : detail::Wrapper<cl_type>() { }
2972 
2978  explicit Context(const cl_context& context, bool retainObject = false) :
2979  detail::Wrapper<cl_type>(context, retainObject) { }
2980 
2986  Context& operator = (const cl_context& rhs)
2987  {
2989  return *this;
2990  }
2991 
2993  template <typename T>
2994  cl_int getInfo(cl_context_info name, T* param) const
2995  {
2996  return detail::errHandler(
2997  detail::getInfo(&::clGetContextInfo, object_, name, param),
2998  __GET_CONTEXT_INFO_ERR);
2999  }
3000 
3002  template <cl_int name> typename
3004  getInfo(cl_int* err = NULL) const
3005  {
3006  typename detail::param_traits<
3007  detail::cl_context_info, name>::param_type param;
3008  cl_int result = getInfo(name, &param);
3009  if (err != NULL) {
3010  *err = result;
3011  }
3012  return param;
3013  }
3014 
3020  cl_mem_flags flags,
3021  cl_mem_object_type type,
3022  vector<ImageFormat>* formats) const
3023  {
3024  cl_uint numEntries;
3025 
3026  if (!formats) {
3027  return CL_SUCCESS;
3028  }
3029 
3030  cl_int err = ::clGetSupportedImageFormats(
3031  object_,
3032  flags,
3033  type,
3034  0,
3035  NULL,
3036  &numEntries);
3037  if (err != CL_SUCCESS) {
3038  return detail::errHandler(err, __GET_SUPPORTED_IMAGE_FORMATS_ERR);
3039  }
3040 
3041  if (numEntries > 0) {
3042  vector<ImageFormat> value(numEntries);
3043  err = ::clGetSupportedImageFormats(
3044  object_,
3045  flags,
3046  type,
3047  numEntries,
3048  (cl_image_format*)value.data(),
3049  NULL);
3050  if (err != CL_SUCCESS) {
3051  return detail::errHandler(err, __GET_SUPPORTED_IMAGE_FORMATS_ERR);
3052  }
3053 
3054  formats->assign(begin(value), end(value));
3055  }
3056  else {
3057  // If no values are being returned, ensure an empty vector comes back
3058  formats->clear();
3059  }
3060 
3061  return CL_SUCCESS;
3062  }
3063 };
3064 
3065 inline void Device::makeDefault()
3066 {
3067  /* Throwing an exception from a call_once invocation does not do
3068  * what we wish, so we catch it and save the error.
3069  */
3070 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
3071  try
3072 #endif
3073  {
3074  cl_int error = 0;
3075 
3076  Context context = Context::getDefault(&error);
3077  detail::errHandler(error, __CREATE_CONTEXT_ERR);
3078 
3079  if (error != CL_SUCCESS) {
3080  default_error_ = error;
3081  }
3082  else {
3083  default_ = context.getInfo<CL_CONTEXT_DEVICES>()[0];
3084  default_error_ = CL_SUCCESS;
3085  }
3086  }
3087 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
3088  catch (cl::Error &e) {
3089  default_error_ = e.err();
3090  }
3091 #endif
3092 }
3093 
3094 CL_HPP_DEFINE_STATIC_MEMBER_ std::once_flag Context::default_initialized_;
3095 CL_HPP_DEFINE_STATIC_MEMBER_ Context Context::default_;
3096 CL_HPP_DEFINE_STATIC_MEMBER_ cl_int Context::default_error_ = CL_SUCCESS;
3097 
3106 class Event : public detail::Wrapper<cl_event>
3107 {
3108 public:
3110  Event() : detail::Wrapper<cl_type>() { }
3111 
3120  explicit Event(const cl_event& event, bool retainObject = false) :
3121  detail::Wrapper<cl_type>(event, retainObject) { }
3122 
3128  Event& operator = (const cl_event& rhs)
3129  {
3131  return *this;
3132  }
3133 
3135  template <typename T>
3136  cl_int getInfo(cl_event_info name, T* param) const
3137  {
3138  return detail::errHandler(
3139  detail::getInfo(&::clGetEventInfo, object_, name, param),
3140  __GET_EVENT_INFO_ERR);
3141  }
3142 
3144  template <cl_int name> typename
3146  getInfo(cl_int* err = NULL) const
3147  {
3148  typename detail::param_traits<
3149  detail::cl_event_info, name>::param_type param;
3150  cl_int result = getInfo(name, &param);
3151  if (err != NULL) {
3152  *err = result;
3153  }
3154  return param;
3155  }
3156 
3158  template <typename T>
3159  cl_int getProfilingInfo(cl_profiling_info name, T* param) const
3160  {
3161  return detail::errHandler(detail::getInfo(
3162  &::clGetEventProfilingInfo, object_, name, param),
3163  __GET_EVENT_PROFILE_INFO_ERR);
3164  }
3165 
3167  template <cl_int name> typename
3169  getProfilingInfo(cl_int* err = NULL) const
3170  {
3171  typename detail::param_traits<
3172  detail::cl_profiling_info, name>::param_type param;
3173  cl_int result = getProfilingInfo(name, &param);
3174  if (err != NULL) {
3175  *err = result;
3176  }
3177  return param;
3178  }
3179 
3184  cl_int wait() const
3185  {
3186  return detail::errHandler(
3187  ::clWaitForEvents(1, &object_),
3188  __WAIT_FOR_EVENTS_ERR);
3189  }
3190 
3191 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
3192 
3196  cl_int setCallback(
3197  cl_int type,
3198  void (CL_CALLBACK * pfn_notify)(cl_event, cl_int, void *),
3199  void * user_data = NULL)
3200  {
3201  return detail::errHandler(
3202  ::clSetEventCallback(
3203  object_,
3204  type,
3205  pfn_notify,
3206  user_data),
3207  __SET_EVENT_CALLBACK_ERR);
3208  }
3209 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
3210 
3215  static cl_int
3216  waitForEvents(const vector<Event>& events)
3217  {
3218  return detail::errHandler(
3219  ::clWaitForEvents(
3220  (cl_uint) events.size(), (events.size() > 0) ? (cl_event*)&events.front() : NULL),
3221  __WAIT_FOR_EVENTS_ERR);
3222  }
3223 };
3224 
3225 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
3226 
3230 class UserEvent : public Event
3231 {
3232 public:
3238  const Context& context,
3239  cl_int * err = NULL)
3240  {
3241  cl_int error;
3242  object_ = ::clCreateUserEvent(
3243  context(),
3244  &error);
3245 
3246  detail::errHandler(error, __CREATE_USER_EVENT_ERR);
3247  if (err != NULL) {
3248  *err = error;
3249  }
3250  }
3251 
3253  UserEvent() : Event() { }
3254 
3259  cl_int setStatus(cl_int status)
3260  {
3261  return detail::errHandler(
3262  ::clSetUserEventStatus(object_,status),
3263  __SET_USER_EVENT_STATUS_ERR);
3264  }
3265 };
3266 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
3267 
3272 inline static cl_int
3273 WaitForEvents(const vector<Event>& events)
3274 {
3275  return detail::errHandler(
3276  ::clWaitForEvents(
3277  (cl_uint) events.size(), (events.size() > 0) ? (cl_event*)&events.front() : NULL),
3278  __WAIT_FOR_EVENTS_ERR);
3279 }
3280 
3289 class Memory : public detail::Wrapper<cl_mem>
3290 {
3291 public:
3293  Memory() : detail::Wrapper<cl_type>() { }
3294 
3306  explicit Memory(const cl_mem& memory, bool retainObject) :
3307  detail::Wrapper<cl_type>(memory, retainObject) { }
3308 
3314  Memory& operator = (const cl_mem& rhs)
3315  {
3317  return *this;
3318  }
3319 
3323  Memory(const Memory& mem) : detail::Wrapper<cl_type>(mem) {}
3324 
3328  Memory& operator = (const Memory &mem)
3329  {
3331  return *this;
3332  }
3333 
3337  Memory(Memory&& mem) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(mem)) {}
3338 
3342  Memory& operator = (Memory &&mem)
3343  {
3344  detail::Wrapper<cl_type>::operator=(std::move(mem));
3345  return *this;
3346  }
3347 
3348 
3350  template <typename T>
3351  cl_int getInfo(cl_mem_info name, T* param) const
3352  {
3353  return detail::errHandler(
3354  detail::getInfo(&::clGetMemObjectInfo, object_, name, param),
3355  __GET_MEM_OBJECT_INFO_ERR);
3356  }
3357 
3359  template <cl_int name> typename
3361  getInfo(cl_int* err = NULL) const
3362  {
3363  typename detail::param_traits<
3364  detail::cl_mem_info, name>::param_type param;
3365  cl_int result = getInfo(name, &param);
3366  if (err != NULL) {
3367  *err = result;
3368  }
3369  return param;
3370  }
3371 
3372 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
3373 
3387  void (CL_CALLBACK * pfn_notify)(cl_mem, void *),
3388  void * user_data = NULL)
3389  {
3390  return detail::errHandler(
3391  ::clSetMemObjectDestructorCallback(
3392  object_,
3393  pfn_notify,
3394  user_data),
3395  __SET_MEM_OBJECT_DESTRUCTOR_CALLBACK_ERR);
3396  }
3397 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
3398 
3399 };
3400 
3401 // Pre-declare copy functions
3402 class Buffer;
3403 template< typename IteratorType >
3404 cl_int copy( IteratorType startIterator, IteratorType endIterator, cl::Buffer &buffer );
3405 template< typename IteratorType >
3406 cl_int copy( const cl::Buffer &buffer, IteratorType startIterator, IteratorType endIterator );
3407 template< typename IteratorType >
3408 cl_int copy( const CommandQueue &queue, IteratorType startIterator, IteratorType endIterator, cl::Buffer &buffer );
3409 template< typename IteratorType >
3410 cl_int copy( const CommandQueue &queue, const cl::Buffer &buffer, IteratorType startIterator, IteratorType endIterator );
3411 
3412 
3413 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
3414 namespace detail
3415 {
3417  {
3418  public:
3419  static cl_svm_mem_flags getSVMMemFlags()
3420  {
3421  return 0;
3422  }
3423  };
3424 } // namespace detail
3425 
3426 template<class Trait = detail::SVMTraitNull>
3428 {
3429 public:
3430  static cl_svm_mem_flags getSVMMemFlags()
3431  {
3432  return CL_MEM_READ_WRITE |
3433  Trait::getSVMMemFlags();
3434  }
3435 };
3436 
3437 template<class Trait = detail::SVMTraitNull>
3439 {
3440 public:
3441  static cl_svm_mem_flags getSVMMemFlags()
3442  {
3443  return CL_MEM_READ_ONLY |
3444  Trait::getSVMMemFlags();
3445  }
3446 };
3447 
3448 template<class Trait = detail::SVMTraitNull>
3450 {
3451 public:
3452  static cl_svm_mem_flags getSVMMemFlags()
3453  {
3454  return CL_MEM_WRITE_ONLY |
3455  Trait::getSVMMemFlags();
3456  }
3457 };
3458 
3459 template<class Trait = SVMTraitReadWrite<>>
3461 {
3462 public:
3463  static cl_svm_mem_flags getSVMMemFlags()
3464  {
3465  return Trait::getSVMMemFlags();
3466  }
3467 };
3468 
3469 template<class Trait = SVMTraitReadWrite<>>
3471 {
3472 public:
3473  static cl_svm_mem_flags getSVMMemFlags()
3474  {
3475  return CL_MEM_SVM_FINE_GRAIN_BUFFER |
3476  Trait::getSVMMemFlags();
3477  }
3478 };
3479 
3480 template<class Trait = SVMTraitReadWrite<>>
3482 {
3483 public:
3484  static cl_svm_mem_flags getSVMMemFlags()
3485  {
3486  return
3487  CL_MEM_SVM_FINE_GRAIN_BUFFER |
3488  CL_MEM_SVM_ATOMICS |
3489  Trait::getSVMMemFlags();
3490  }
3491 };
3492 
3493 // Pre-declare SVM map function
3494 template<typename T>
3495 inline cl_int enqueueMapSVM(
3496  T* ptr,
3497  cl_bool blocking,
3498  cl_map_flags flags,
3499  size_type size,
3500  const vector<Event>* events = NULL,
3501  Event* event = NULL);
3502 
3514 template<typename T, class SVMTrait>
3516 private:
3517  Context context_;
3518 
3519 public:
3520  typedef T value_type;
3521  typedef value_type* pointer;
3522  typedef const value_type* const_pointer;
3523  typedef value_type& reference;
3524  typedef const value_type& const_reference;
3525  typedef std::size_t size_type;
3526  typedef std::ptrdiff_t difference_type;
3527 
3528  template<typename U>
3529  struct rebind
3530  {
3532  };
3533 
3534  template<typename U, typename V>
3535  friend class SVMAllocator;
3536 
3537  SVMAllocator() :
3538  context_(Context::getDefault())
3539  {
3540  }
3541 
3542  explicit SVMAllocator(cl::Context context) :
3543  context_(context)
3544  {
3545  }
3546 
3547 
3548  SVMAllocator(const SVMAllocator &other) :
3549  context_(other.context_)
3550  {
3551  }
3552 
3553  template<typename U>
3554  SVMAllocator(const SVMAllocator<U, SVMTrait> &other) :
3555  context_(other.context_)
3556  {
3557  }
3558 
3559  ~SVMAllocator()
3560  {
3561  }
3562 
3563  pointer address(reference r) CL_HPP_NOEXCEPT_
3564  {
3565  return std::addressof(r);
3566  }
3567 
3568  const_pointer address(const_reference r) CL_HPP_NOEXCEPT_
3569  {
3570  return std::addressof(r);
3571  }
3572 
3579  pointer allocate(
3580  size_type size,
3582  {
3583  // Allocate memory with default alignment matching the size of the type
3584  void* voidPointer =
3585  clSVMAlloc(
3586  context_(),
3587  SVMTrait::getSVMMemFlags(),
3588  size*sizeof(T),
3589  0);
3590  pointer retValue = reinterpret_cast<pointer>(
3591  voidPointer);
3592 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
3593  if (!retValue) {
3594  std::bad_alloc excep;
3595  throw excep;
3596  }
3597 #endif // #if defined(CL_HPP_ENABLE_EXCEPTIONS)
3598 
3599  // If allocation was coarse-grained then map it
3600  if (!(SVMTrait::getSVMMemFlags() & CL_MEM_SVM_FINE_GRAIN_BUFFER)) {
3601  cl_int err = enqueueMapSVM(retValue, CL_TRUE, CL_MAP_READ | CL_MAP_WRITE, size*sizeof(T));
3602  if (err != CL_SUCCESS) {
3603  std::bad_alloc excep;
3604  throw excep;
3605  }
3606  }
3607 
3608  // If exceptions disabled, return null pointer from allocator
3609  return retValue;
3610  }
3611 
3612  void deallocate(pointer p, size_type)
3613  {
3614  clSVMFree(context_(), p);
3615  }
3616 
3621  size_type max_size() const CL_HPP_NOEXCEPT_
3622  {
3623  size_type maxSize = std::numeric_limits<size_type>::max() / sizeof(T);
3624 
3625  for (const Device &d : context_.getInfo<CL_CONTEXT_DEVICES>()) {
3626  maxSize = std::min(
3627  maxSize,
3628  static_cast<size_type>(d.getInfo<CL_DEVICE_MAX_MEM_ALLOC_SIZE>()));
3629  }
3630 
3631  return maxSize;
3632  }
3633 
3634  template< class U, class... Args >
3635  void construct(U* p, Args&&... args)
3636  {
3637  new(p)T(args...);
3638  }
3639 
3640  template< class U >
3641  void destroy(U* p)
3642  {
3643  p->~U();
3644  }
3645 
3649  inline bool operator==(SVMAllocator const& rhs)
3650  {
3651  return (context_==rhs.context_);
3652  }
3653 
3654  inline bool operator!=(SVMAllocator const& a)
3655  {
3656  return !operator==(a);
3657  }
3658 }; // class SVMAllocator return cl::pointer<T>(tmp, detail::Deleter<T, Alloc>{alloc, copies});
3659 
3660 
3661 template<class SVMTrait>
3662 class SVMAllocator<void, SVMTrait> {
3663 public:
3664  typedef void value_type;
3665  typedef value_type* pointer;
3666  typedef const value_type* const_pointer;
3667 
3668  template<typename U>
3669  struct rebind
3670  {
3672  };
3673 
3674  template<typename U, typename V>
3675  friend class SVMAllocator;
3676 };
3677 
3678 #if !defined(CL_HPP_NO_STD_UNIQUE_PTR)
3679 namespace detail
3680 {
3681  template<class Alloc>
3682  class Deleter {
3683  private:
3684  Alloc alloc_;
3685  size_type copies_;
3686 
3687  public:
3688  typedef typename std::allocator_traits<Alloc>::pointer pointer;
3689 
3690  Deleter(const Alloc &alloc, size_type copies) : alloc_{ alloc }, copies_{ copies }
3691  {
3692  }
3693 
3694  void operator()(pointer ptr) const {
3695  Alloc tmpAlloc{ alloc_ };
3696  std::allocator_traits<Alloc>::destroy(tmpAlloc, std::addressof(*ptr));
3697  std::allocator_traits<Alloc>::deallocate(tmpAlloc, ptr, copies_);
3698  }
3699  };
3700 } // namespace detail
3701 
3708 template <class T, class Alloc, class... Args>
3709 cl::pointer<T, detail::Deleter<Alloc>> allocate_pointer(const Alloc &alloc_, Args&&... args)
3710 {
3711  Alloc alloc(alloc_);
3712  static const size_type copies = 1;
3713 
3714  // Ensure that creation of the management block and the
3715  // object are dealt with separately such that we only provide a deleter
3716 
3717  T* tmp = std::allocator_traits<Alloc>::allocate(alloc, copies);
3718  if (!tmp) {
3719  std::bad_alloc excep;
3720  throw excep;
3721  }
3722  try {
3723  std::allocator_traits<Alloc>::construct(
3724  alloc,
3725  std::addressof(*tmp),
3726  std::forward<Args>(args)...);
3727 
3728  return cl::pointer<T, detail::Deleter<Alloc>>(tmp, detail::Deleter<Alloc>{alloc, copies});
3729  }
3730  catch (std::bad_alloc& b)
3731  {
3732  std::allocator_traits<Alloc>::deallocate(alloc, tmp, copies);
3733  throw;
3734  }
3735 }
3736 
3737 template< class T, class SVMTrait, class... Args >
3738 cl::pointer<T, detail::Deleter<SVMAllocator<T, SVMTrait>>> allocate_svm(Args... args)
3739 {
3741  return cl::allocate_pointer<T>(alloc, args...);
3742 }
3743 
3744 template< class T, class SVMTrait, class... Args >
3745 cl::pointer<T, detail::Deleter<SVMAllocator<T, SVMTrait>>> allocate_svm(const cl::Context &c, Args... args)
3746 {
3747  SVMAllocator<T, SVMTrait> alloc(c);
3748  return cl::allocate_pointer<T>(alloc, args...);
3749 }
3750 #endif // #if !defined(CL_HPP_NO_STD_UNIQUE_PTR)
3751 
3755 template < class T >
3756 using coarse_svm_vector = vector<T, cl::SVMAllocator<int, cl::SVMTraitCoarse<>>>;
3757 
3761 template < class T >
3762 using fine_svm_vector = vector<T, cl::SVMAllocator<int, cl::SVMTraitFine<>>>;
3763 
3767 template < class T >
3768 using atomic_svm_vector = vector<T, cl::SVMAllocator<int, cl::SVMTraitAtomic<>>>;
3769 
3770 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
3771 
3772 
3779 class Buffer : public Memory
3780 {
3781 public:
3782 
3791  const Context& context,
3792  cl_mem_flags flags,
3793  size_type size,
3794  void* host_ptr = NULL,
3795  cl_int* err = NULL)
3796  {
3797  cl_int error;
3798  object_ = ::clCreateBuffer(context(), flags, size, host_ptr, &error);
3799 
3800  detail::errHandler(error, __CREATE_BUFFER_ERR);
3801  if (err != NULL) {
3802  *err = error;
3803  }
3804  }
3805 
3816  cl_mem_flags flags,
3817  size_type size,
3818  void* host_ptr = NULL,
3819  cl_int* err = NULL)
3820  {
3821  cl_int error;
3822 
3823  Context context = Context::getDefault(err);
3824 
3825  object_ = ::clCreateBuffer(context(), flags, size, host_ptr, &error);
3826 
3827  detail::errHandler(error, __CREATE_BUFFER_ERR);
3828  if (err != NULL) {
3829  *err = error;
3830  }
3831  }
3832 
3838  template< typename IteratorType >
3840  IteratorType startIterator,
3841  IteratorType endIterator,
3842  bool readOnly,
3843  bool useHostPtr = false,
3844  cl_int* err = NULL)
3845  {
3846  typedef typename std::iterator_traits<IteratorType>::value_type DataType;
3847  cl_int error;
3848 
3849  cl_mem_flags flags = 0;
3850  if( readOnly ) {
3851  flags |= CL_MEM_READ_ONLY;
3852  }
3853  else {
3854  flags |= CL_MEM_READ_WRITE;
3855  }
3856  if( useHostPtr ) {
3857  flags |= CL_MEM_USE_HOST_PTR;
3858  }
3859 
3860  size_type size = sizeof(DataType)*(endIterator - startIterator);
3861 
3862  Context context = Context::getDefault(err);
3863 
3864  if( useHostPtr ) {
3865  object_ = ::clCreateBuffer(context(), flags, size, static_cast<DataType*>(&*startIterator), &error);
3866  } else {
3867  object_ = ::clCreateBuffer(context(), flags, size, 0, &error);
3868  }
3869 
3870  detail::errHandler(error, __CREATE_BUFFER_ERR);
3871  if (err != NULL) {
3872  *err = error;
3873  }
3874 
3875  if( !useHostPtr ) {
3876  error = cl::copy(startIterator, endIterator, *this);
3877  detail::errHandler(error, __CREATE_BUFFER_ERR);
3878  if (err != NULL) {
3879  *err = error;
3880  }
3881  }
3882  }
3883 
3889  template< typename IteratorType >
3890  Buffer(const Context &context, IteratorType startIterator, IteratorType endIterator,
3891  bool readOnly, bool useHostPtr = false, cl_int* err = NULL);
3892 
3897  template< typename IteratorType >
3898  Buffer(const CommandQueue &queue, IteratorType startIterator, IteratorType endIterator,
3899  bool readOnly, bool useHostPtr = false, cl_int* err = NULL);
3900 
3902  Buffer() : Memory() { }
3903 
3911  explicit Buffer(const cl_mem& buffer, bool retainObject = false) :
3912  Memory(buffer, retainObject) { }
3913 
3918  Buffer& operator = (const cl_mem& rhs)
3919  {
3920  Memory::operator=(rhs);
3921  return *this;
3922  }
3923 
3927  Buffer(const Buffer& buf) : Memory(buf) {}
3928 
3932  Buffer& operator = (const Buffer &buf)
3933  {
3934  Memory::operator=(buf);
3935  return *this;
3936  }
3937 
3941  Buffer(Buffer&& buf) CL_HPP_NOEXCEPT_ : Memory(std::move(buf)) {}
3942 
3946  Buffer& operator = (Buffer &&buf)
3947  {
3948  Memory::operator=(std::move(buf));
3949  return *this;
3950  }
3951 
3952 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
3953 
3958  cl_mem_flags flags,
3959  cl_buffer_create_type buffer_create_type,
3960  const void * buffer_create_info,
3961  cl_int * err = NULL)
3962  {
3963  Buffer result;
3964  cl_int error;
3965  result.object_ = ::clCreateSubBuffer(
3966  object_,
3967  flags,
3968  buffer_create_type,
3969  buffer_create_info,
3970  &error);
3971 
3972  detail::errHandler(error, __CREATE_SUBBUFFER_ERR);
3973  if (err != NULL) {
3974  *err = error;
3975  }
3976 
3977  return result;
3978  }
3979 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
3980 };
3981 
3982 #if defined (CL_HPP_USE_DX_INTEROP)
3983 
3991 class BufferD3D10 : public Buffer
3992 {
3993 public:
3994 
3995 
4001  BufferD3D10(
4002  const Context& context,
4003  cl_mem_flags flags,
4004  ID3D10Buffer* bufobj,
4005  cl_int * err = NULL) : pfn_clCreateFromD3D10BufferKHR(nullptr)
4006  {
4007  typedef CL_API_ENTRY cl_mem (CL_API_CALL *PFN_clCreateFromD3D10BufferKHR)(
4008  cl_context context, cl_mem_flags flags, ID3D10Buffer* buffer,
4009  cl_int* errcode_ret);
4010  PFN_clCreateFromD3D10BufferKHR pfn_clCreateFromD3D10BufferKHR;
4011 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
4012  vector<cl_context_properties> props = context.getInfo<CL_CONTEXT_PROPERTIES>();
4013  cl_platform platform = -1;
4014  for( int i = 0; i < props.size(); ++i ) {
4015  if( props[i] == CL_CONTEXT_PLATFORM ) {
4016  platform = props[i+1];
4017  }
4018  }
4019  CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(platform, clCreateFromD3D10BufferKHR);
4020 #elif CL_HPP_TARGET_OPENCL_VERSION >= 110
4021  CL_HPP_INIT_CL_EXT_FCN_PTR_(clCreateFromD3D10BufferKHR);
4022 #endif
4023 
4024  cl_int error;
4025  object_ = pfn_clCreateFromD3D10BufferKHR(
4026  context(),
4027  flags,
4028  bufobj,
4029  &error);
4030 
4031  detail::errHandler(error, __CREATE_GL_BUFFER_ERR);
4032  if (err != NULL) {
4033  *err = error;
4034  }
4035  }
4036 
4038  BufferD3D10() : Buffer() { }
4039 
4047  explicit BufferD3D10(const cl_mem& buffer, bool retainObject = false) :
4048  Buffer(buffer, retainObject) { }
4049 
4054  BufferD3D10& operator = (const cl_mem& rhs)
4055  {
4056  Buffer::operator=(rhs);
4057  return *this;
4058  }
4059 
4063  BufferD3D10(const BufferD3D10& buf) :
4064  Buffer(buf) {}
4065 
4069  BufferD3D10& operator = (const BufferD3D10 &buf)
4070  {
4071  Buffer::operator=(buf);
4072  return *this;
4073  }
4074 
4078  BufferD3D10(BufferD3D10&& buf) CL_HPP_NOEXCEPT_ : Buffer(std::move(buf)) {}
4079 
4083  BufferD3D10& operator = (BufferD3D10 &&buf)
4084  {
4085  Buffer::operator=(std::move(buf));
4086  return *this;
4087  }
4088 };
4089 #endif
4090 
4099 class BufferGL : public Buffer
4100 {
4101 public:
4108  const Context& context,
4109  cl_mem_flags flags,
4110  cl_GLuint bufobj,
4111  cl_int * err = NULL)
4112  {
4113  cl_int error;
4114  object_ = ::clCreateFromGLBuffer(
4115  context(),
4116  flags,
4117  bufobj,
4118  &error);
4119 
4120  detail::errHandler(error, __CREATE_GL_BUFFER_ERR);
4121  if (err != NULL) {
4122  *err = error;
4123  }
4124  }
4125 
4127  BufferGL() : Buffer() { }
4128 
4136  explicit BufferGL(const cl_mem& buffer, bool retainObject = false) :
4137  Buffer(buffer, retainObject) { }
4138 
4143  BufferGL& operator = (const cl_mem& rhs)
4144  {
4145  Buffer::operator=(rhs);
4146  return *this;
4147  }
4148 
4152  BufferGL(const BufferGL& buf) : Buffer(buf) {}
4153 
4157  BufferGL& operator = (const BufferGL &buf)
4158  {
4159  Buffer::operator=(buf);
4160  return *this;
4161  }
4162 
4166  BufferGL(BufferGL&& buf) CL_HPP_NOEXCEPT_ : Buffer(std::move(buf)) {}
4167 
4171  BufferGL& operator = (BufferGL &&buf)
4172  {
4173  Buffer::operator=(std::move(buf));
4174  return *this;
4175  }
4176 
4179  cl_gl_object_type *type,
4180  cl_GLuint * gl_object_name)
4181  {
4182  return detail::errHandler(
4183  ::clGetGLObjectInfo(object_,type,gl_object_name),
4184  __GET_GL_OBJECT_INFO_ERR);
4185  }
4186 };
4187 
4196 class BufferRenderGL : public Buffer
4197 {
4198 public:
4205  const Context& context,
4206  cl_mem_flags flags,
4207  cl_GLuint bufobj,
4208  cl_int * err = NULL)
4209  {
4210  cl_int error;
4211  object_ = ::clCreateFromGLRenderbuffer(
4212  context(),
4213  flags,
4214  bufobj,
4215  &error);
4216 
4217  detail::errHandler(error, __CREATE_GL_RENDER_BUFFER_ERR);
4218  if (err != NULL) {
4219  *err = error;
4220  }
4221  }
4222 
4225 
4233  explicit BufferRenderGL(const cl_mem& buffer, bool retainObject = false) :
4234  Buffer(buffer, retainObject) { }
4235 
4240  BufferRenderGL& operator = (const cl_mem& rhs)
4241  {
4242  Buffer::operator=(rhs);
4243  return *this;
4244  }
4245 
4249  BufferRenderGL(const BufferRenderGL& buf) : Buffer(buf) {}
4250 
4254  BufferRenderGL& operator = (const BufferRenderGL &buf)
4255  {
4256  Buffer::operator=(buf);
4257  return *this;
4258  }
4259 
4263  BufferRenderGL(BufferRenderGL&& buf) CL_HPP_NOEXCEPT_ : Buffer(std::move(buf)) {}
4264 
4268  BufferRenderGL& operator = (BufferRenderGL &&buf)
4269  {
4270  Buffer::operator=(std::move(buf));
4271  return *this;
4272  }
4273 
4276  cl_gl_object_type *type,
4277  cl_GLuint * gl_object_name)
4278  {
4279  return detail::errHandler(
4280  ::clGetGLObjectInfo(object_,type,gl_object_name),
4281  __GET_GL_OBJECT_INFO_ERR);
4282  }
4283 };
4284 
4291 class Image : public Memory
4292 {
4293 protected:
4295  Image() : Memory() { }
4296 
4304  explicit Image(const cl_mem& image, bool retainObject = false) :
4305  Memory(image, retainObject) { }
4306 
4311  Image& operator = (const cl_mem& rhs)
4312  {
4313  Memory::operator=(rhs);
4314  return *this;
4315  }
4316 
4320  Image(const Image& img) : Memory(img) {}
4321 
4325  Image& operator = (const Image &img)
4326  {
4327  Memory::operator=(img);
4328  return *this;
4329  }
4330 
4334  Image(Image&& img) CL_HPP_NOEXCEPT_ : Memory(std::move(img)) {}
4335 
4339  Image& operator = (Image &&img)
4340  {
4341  Memory::operator=(std::move(img));
4342  return *this;
4343  }
4344 
4345 
4346 public:
4348  template <typename T>
4349  cl_int getImageInfo(cl_image_info name, T* param) const
4350  {
4351  return detail::errHandler(
4352  detail::getInfo(&::clGetImageInfo, object_, name, param),
4353  __GET_IMAGE_INFO_ERR);
4354  }
4355 
4357  template <cl_int name> typename
4359  getImageInfo(cl_int* err = NULL) const
4360  {
4361  typename detail::param_traits<
4362  detail::cl_image_info, name>::param_type param;
4363  cl_int result = getImageInfo(name, &param);
4364  if (err != NULL) {
4365  *err = result;
4366  }
4367  return param;
4368  }
4369 };
4370 
4371 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
4372 
4378 class Image1D : public Image
4379 {
4380 public:
4386  const Context& context,
4387  cl_mem_flags flags,
4388  ImageFormat format,
4389  size_type width,
4390  void* host_ptr = NULL,
4391  cl_int* err = NULL)
4392  {
4393  cl_int error;
4394  cl_image_desc desc =
4395  {
4396  CL_MEM_OBJECT_IMAGE1D,
4397  width,
4398  0, 0, 0, 0, 0, 0, 0, 0
4399  };
4400  object_ = ::clCreateImage(
4401  context(),
4402  flags,
4403  &format,
4404  &desc,
4405  host_ptr,
4406  &error);
4407 
4408  detail::errHandler(error, __CREATE_IMAGE_ERR);
4409  if (err != NULL) {
4410  *err = error;
4411  }
4412  }
4413 
4415  Image1D() { }
4416 
4424  explicit Image1D(const cl_mem& image1D, bool retainObject = false) :
4425  Image(image1D, retainObject) { }
4426 
4431  Image1D& operator = (const cl_mem& rhs)
4432  {
4433  Image::operator=(rhs);
4434  return *this;
4435  }
4436 
4440  Image1D(const Image1D& img) : Image(img) {}
4441 
4445  Image1D& operator = (const Image1D &img)
4446  {
4447  Image::operator=(img);
4448  return *this;
4449  }
4450 
4454  Image1D(Image1D&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
4455 
4459  Image1D& operator = (Image1D &&img)
4460  {
4461  Image::operator=(std::move(img));
4462  return *this;
4463  }
4464 
4465 };
4466 
4470 class Image1DBuffer : public Image
4471 {
4472 public:
4473  Image1DBuffer(
4474  const Context& context,
4475  cl_mem_flags flags,
4476  ImageFormat format,
4477  size_type width,
4478  const Buffer &buffer,
4479  cl_int* err = NULL)
4480  {
4481  cl_int error;
4482  cl_image_desc desc =
4483  {
4484  CL_MEM_OBJECT_IMAGE1D_BUFFER,
4485  width,
4486  0, 0, 0, 0, 0, 0, 0,
4487  buffer()
4488  };
4489  object_ = ::clCreateImage(
4490  context(),
4491  flags,
4492  &format,
4493  &desc,
4494  NULL,
4495  &error);
4496 
4497  detail::errHandler(error, __CREATE_IMAGE_ERR);
4498  if (err != NULL) {
4499  *err = error;
4500  }
4501  }
4502 
4503  Image1DBuffer() { }
4504 
4512  explicit Image1DBuffer(const cl_mem& image1D, bool retainObject = false) :
4513  Image(image1D, retainObject) { }
4514 
4515  Image1DBuffer& operator = (const cl_mem& rhs)
4516  {
4517  Image::operator=(rhs);
4518  return *this;
4519  }
4520 
4524  Image1DBuffer(const Image1DBuffer& img) : Image(img) {}
4525 
4529  Image1DBuffer& operator = (const Image1DBuffer &img)
4530  {
4531  Image::operator=(img);
4532  return *this;
4533  }
4534 
4538  Image1DBuffer(Image1DBuffer&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
4539 
4543  Image1DBuffer& operator = (Image1DBuffer &&img)
4544  {
4545  Image::operator=(std::move(img));
4546  return *this;
4547  }
4548 
4549 };
4550 
4554 class Image1DArray : public Image
4555 {
4556 public:
4557  Image1DArray(
4558  const Context& context,
4559  cl_mem_flags flags,
4560  ImageFormat format,
4561  size_type arraySize,
4562  size_type width,
4563  size_type rowPitch,
4564  void* host_ptr = NULL,
4565  cl_int* err = NULL)
4566  {
4567  cl_int error;
4568  cl_image_desc desc =
4569  {
4570  CL_MEM_OBJECT_IMAGE1D_ARRAY,
4571  width,
4572  0, 0, // height, depth (unused)
4573  arraySize,
4574  rowPitch,
4575  0, 0, 0, 0
4576  };
4577  object_ = ::clCreateImage(
4578  context(),
4579  flags,
4580  &format,
4581  &desc,
4582  host_ptr,
4583  &error);
4584 
4585  detail::errHandler(error, __CREATE_IMAGE_ERR);
4586  if (err != NULL) {
4587  *err = error;
4588  }
4589  }
4590 
4591  Image1DArray() { }
4592 
4600  explicit Image1DArray(const cl_mem& imageArray, bool retainObject = false) :
4601  Image(imageArray, retainObject) { }
4602 
4603 
4604  Image1DArray& operator = (const cl_mem& rhs)
4605  {
4606  Image::operator=(rhs);
4607  return *this;
4608  }
4609 
4613  Image1DArray(const Image1DArray& img) : Image(img) {}
4614 
4618  Image1DArray& operator = (const Image1DArray &img)
4619  {
4620  Image::operator=(img);
4621  return *this;
4622  }
4623 
4627  Image1DArray(Image1DArray&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
4628 
4632  Image1DArray& operator = (Image1DArray &&img)
4633  {
4634  Image::operator=(std::move(img));
4635  return *this;
4636  }
4637 
4638 };
4639 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 120
4640 
4641 
4648 class Image2D : public Image
4649 {
4650 public:
4656  const Context& context,
4657  cl_mem_flags flags,
4658  ImageFormat format,
4659  size_type width,
4660  size_type height,
4661  size_type row_pitch = 0,
4662  void* host_ptr = NULL,
4663  cl_int* err = NULL)
4664  {
4665  cl_int error;
4666  bool useCreateImage;
4667 
4668 #if CL_HPP_TARGET_OPENCL_VERSION >= 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 120
4669  // Run-time decision based on the actual platform
4670  {
4671  cl_uint version = detail::getContextPlatformVersion(context());
4672  useCreateImage = (version >= 0x10002); // OpenCL 1.2 or above
4673  }
4674 #elif CL_HPP_TARGET_OPENCL_VERSION >= 120
4675  useCreateImage = true;
4676 #else
4677  useCreateImage = false;
4678 #endif
4679 
4680 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
4681  if (useCreateImage)
4682  {
4683  cl_image_desc desc =
4684  {
4685  CL_MEM_OBJECT_IMAGE2D,
4686  width,
4687  height,
4688  0, 0, // depth, array size (unused)
4689  row_pitch,
4690  0, 0, 0, 0
4691  };
4692  object_ = ::clCreateImage(
4693  context(),
4694  flags,
4695  &format,
4696  &desc,
4697  host_ptr,
4698  &error);
4699 
4700  detail::errHandler(error, __CREATE_IMAGE_ERR);
4701  if (err != NULL) {
4702  *err = error;
4703  }
4704  }
4705 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
4706 #if CL_HPP_MINIMUM_OPENCL_VERSION < 120
4707  if (!useCreateImage)
4708  {
4709  object_ = ::clCreateImage2D(
4710  context(), flags,&format, width, height, row_pitch, host_ptr, &error);
4711 
4712  detail::errHandler(error, __CREATE_IMAGE2D_ERR);
4713  if (err != NULL) {
4714  *err = error;
4715  }
4716  }
4717 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 120
4718  }
4719 
4720 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 || defined(CL_HPP_USE_CL_IMAGE2D_FROM_BUFFER_KHR)
4721 
4727  const Context& context,
4728  ImageFormat format,
4729  const Buffer &sourceBuffer,
4730  size_type width,
4731  size_type height,
4732  size_type row_pitch = 0,
4733  cl_int* err = nullptr)
4734  {
4735  cl_int error;
4736 
4737  cl_image_desc desc =
4738  {
4739  CL_MEM_OBJECT_IMAGE2D,
4740  width,
4741  height,
4742  0, 0, // depth, array size (unused)
4743  row_pitch,
4744  0, 0, 0,
4745  // Use buffer as input to image
4746  sourceBuffer()
4747  };
4748  object_ = ::clCreateImage(
4749  context(),
4750  0, // flags inherited from buffer
4751  &format,
4752  &desc,
4753  nullptr,
4754  &error);
4755 
4756  detail::errHandler(error, __CREATE_IMAGE_ERR);
4757  if (err != nullptr) {
4758  *err = error;
4759  }
4760  }
4761 #endif //#if CL_HPP_TARGET_OPENCL_VERSION >= 200 || defined(CL_HPP_USE_CL_IMAGE2D_FROM_BUFFER_KHR)
4762 
4763 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
4764 
4777  const Context& context,
4778  cl_channel_order order,
4779  const Image &sourceImage,
4780  cl_int* err = nullptr)
4781  {
4782  cl_int error;
4783 
4784  // Descriptor fields have to match source image
4785  size_type sourceWidth =
4786  sourceImage.getImageInfo<CL_IMAGE_WIDTH>();
4787  size_type sourceHeight =
4788  sourceImage.getImageInfo<CL_IMAGE_HEIGHT>();
4789  size_type sourceRowPitch =
4790  sourceImage.getImageInfo<CL_IMAGE_ROW_PITCH>();
4791  cl_uint sourceNumMIPLevels =
4792  sourceImage.getImageInfo<CL_IMAGE_NUM_MIP_LEVELS>();
4793  cl_uint sourceNumSamples =
4794  sourceImage.getImageInfo<CL_IMAGE_NUM_SAMPLES>();
4795  cl_image_format sourceFormat =
4796  sourceImage.getImageInfo<CL_IMAGE_FORMAT>();
4797 
4798  // Update only the channel order.
4799  // Channel format inherited from source.
4800  sourceFormat.image_channel_order = order;
4801  cl_image_desc desc =
4802  {
4803  CL_MEM_OBJECT_IMAGE2D,
4804  sourceWidth,
4805  sourceHeight,
4806  0, 0, // depth (unused), array size (unused)
4807  sourceRowPitch,
4808  0, // slice pitch (unused)
4809  sourceNumMIPLevels,
4810  sourceNumSamples,
4811  // Use buffer as input to image
4812  sourceImage()
4813  };
4814  object_ = ::clCreateImage(
4815  context(),
4816  0, // flags should be inherited from mem_object
4817  &sourceFormat,
4818  &desc,
4819  nullptr,
4820  &error);
4821 
4822  detail::errHandler(error, __CREATE_IMAGE_ERR);
4823  if (err != nullptr) {
4824  *err = error;
4825  }
4826  }
4827 #endif //#if CL_HPP_TARGET_OPENCL_VERSION >= 200
4828 
4830  Image2D() { }
4831 
4839  explicit Image2D(const cl_mem& image2D, bool retainObject = false) :
4840  Image(image2D, retainObject) { }
4841 
4846  Image2D& operator = (const cl_mem& rhs)
4847  {
4848  Image::operator=(rhs);
4849  return *this;
4850  }
4851 
4855  Image2D(const Image2D& img) : Image(img) {}
4856 
4860  Image2D& operator = (const Image2D &img)
4861  {
4862  Image::operator=(img);
4863  return *this;
4864  }
4865 
4869  Image2D(Image2D&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
4870 
4874  Image2D& operator = (Image2D &&img)
4875  {
4876  Image::operator=(std::move(img));
4877  return *this;
4878  }
4879 
4880 };
4881 
4882 
4883 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
4884 
4893 class CL_EXT_PREFIX__VERSION_1_1_DEPRECATED Image2DGL : public Image2D
4894 {
4895 public:
4902  const Context& context,
4903  cl_mem_flags flags,
4904  cl_GLenum target,
4905  cl_GLint miplevel,
4906  cl_GLuint texobj,
4907  cl_int * err = NULL)
4908  {
4909  cl_int error;
4910  object_ = ::clCreateFromGLTexture2D(
4911  context(),
4912  flags,
4913  target,
4914  miplevel,
4915  texobj,
4916  &error);
4917 
4918  detail::errHandler(error, __CREATE_GL_TEXTURE_2D_ERR);
4919  if (err != NULL) {
4920  *err = error;
4921  }
4922 
4923  }
4924 
4926  Image2DGL() : Image2D() { }
4927 
4935  explicit Image2DGL(const cl_mem& image, bool retainObject = false) :
4936  Image2D(image, retainObject) { }
4937 
4942  Image2DGL& operator = (const cl_mem& rhs)
4943  {
4944  Image2D::operator=(rhs);
4945  return *this;
4946  }
4947 
4951  Image2DGL(const Image2DGL& img) : Image2D(img) {}
4952 
4956  Image2DGL& operator = (const Image2DGL &img)
4957  {
4958  Image2D::operator=(img);
4959  return *this;
4960  }
4961 
4965  Image2DGL(Image2DGL&& img) CL_HPP_NOEXCEPT_ : Image2D(std::move(img)) {}
4966 
4970  Image2DGL& operator = (Image2DGL &&img)
4971  {
4972  Image2D::operator=(std::move(img));
4973  return *this;
4974  }
4975 
4976 } CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED;
4977 #endif // CL_USE_DEPRECATED_OPENCL_1_1_APIS
4978 
4979 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
4980 
4983 class Image2DArray : public Image
4984 {
4985 public:
4986  Image2DArray(
4987  const Context& context,
4988  cl_mem_flags flags,
4989  ImageFormat format,
4990  size_type arraySize,
4991  size_type width,
4992  size_type height,
4993  size_type rowPitch,
4994  size_type slicePitch,
4995  void* host_ptr = NULL,
4996  cl_int* err = NULL)
4997  {
4998  cl_int error;
4999  cl_image_desc desc =
5000  {
5001  CL_MEM_OBJECT_IMAGE2D_ARRAY,
5002  width,
5003  height,
5004  0, // depth (unused)
5005  arraySize,
5006  rowPitch,
5007  slicePitch,
5008  0, 0, 0
5009  };
5010  object_ = ::clCreateImage(
5011  context(),
5012  flags,
5013  &format,
5014  &desc,
5015  host_ptr,
5016  &error);
5017 
5018  detail::errHandler(error, __CREATE_IMAGE_ERR);
5019  if (err != NULL) {
5020  *err = error;
5021  }
5022  }
5023 
5024  Image2DArray() { }
5025 
5033  explicit Image2DArray(const cl_mem& imageArray, bool retainObject = false) : Image(imageArray, retainObject) { }
5034 
5035  Image2DArray& operator = (const cl_mem& rhs)
5036  {
5037  Image::operator=(rhs);
5038  return *this;
5039  }
5040 
5044  Image2DArray(const Image2DArray& img) : Image(img) {}
5045 
5049  Image2DArray& operator = (const Image2DArray &img)
5050  {
5051  Image::operator=(img);
5052  return *this;
5053  }
5054 
5058  Image2DArray(Image2DArray&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
5059 
5063  Image2DArray& operator = (Image2DArray &&img)
5064  {
5065  Image::operator=(std::move(img));
5066  return *this;
5067  }
5068 };
5069 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 120
5070 
5077 class Image3D : public Image
5078 {
5079 public:
5085  const Context& context,
5086  cl_mem_flags flags,
5087  ImageFormat format,
5088  size_type width,
5089  size_type height,
5090  size_type depth,
5091  size_type row_pitch = 0,
5092  size_type slice_pitch = 0,
5093  void* host_ptr = NULL,
5094  cl_int* err = NULL)
5095  {
5096  cl_int error;
5097  bool useCreateImage;
5098 
5099 #if CL_HPP_TARGET_OPENCL_VERSION >= 120 && CL_HPP_MINIMUM_OPENCL_VERSION < 120
5100  // Run-time decision based on the actual platform
5101  {
5102  cl_uint version = detail::getContextPlatformVersion(context());
5103  useCreateImage = (version >= 0x10002); // OpenCL 1.2 or above
5104  }
5105 #elif CL_HPP_TARGET_OPENCL_VERSION >= 120
5106  useCreateImage = true;
5107 #else
5108  useCreateImage = false;
5109 #endif
5110 
5111 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
5112  if (useCreateImage)
5113  {
5114  cl_image_desc desc =
5115  {
5116  CL_MEM_OBJECT_IMAGE3D,
5117  width,
5118  height,
5119  depth,
5120  0, // array size (unused)
5121  row_pitch,
5122  slice_pitch,
5123  0, 0, 0
5124  };
5125  object_ = ::clCreateImage(
5126  context(),
5127  flags,
5128  &format,
5129  &desc,
5130  host_ptr,
5131  &error);
5132 
5133  detail::errHandler(error, __CREATE_IMAGE_ERR);
5134  if (err != NULL) {
5135  *err = error;
5136  }
5137  }
5138 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
5139 #if CL_HPP_MINIMUM_OPENCL_VERSION < 120
5140  if (!useCreateImage)
5141  {
5142  object_ = ::clCreateImage3D(
5143  context(), flags, &format, width, height, depth, row_pitch,
5144  slice_pitch, host_ptr, &error);
5145 
5146  detail::errHandler(error, __CREATE_IMAGE3D_ERR);
5147  if (err != NULL) {
5148  *err = error;
5149  }
5150  }
5151 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 120
5152  }
5153 
5155  Image3D() : Image() { }
5156 
5164  explicit Image3D(const cl_mem& image3D, bool retainObject = false) :
5165  Image(image3D, retainObject) { }
5166 
5171  Image3D& operator = (const cl_mem& rhs)
5172  {
5173  Image::operator=(rhs);
5174  return *this;
5175  }
5176 
5180  Image3D(const Image3D& img) : Image(img) {}
5181 
5185  Image3D& operator = (const Image3D &img)
5186  {
5187  Image::operator=(img);
5188  return *this;
5189  }
5190 
5194  Image3D(Image3D&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
5195 
5199  Image3D& operator = (Image3D &&img)
5200  {
5201  Image::operator=(std::move(img));
5202  return *this;
5203  }
5204 };
5205 
5206 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
5207 
5215 class Image3DGL : public Image3D
5216 {
5217 public:
5224  const Context& context,
5225  cl_mem_flags flags,
5226  cl_GLenum target,
5227  cl_GLint miplevel,
5228  cl_GLuint texobj,
5229  cl_int * err = NULL)
5230  {
5231  cl_int error;
5232  object_ = ::clCreateFromGLTexture3D(
5233  context(),
5234  flags,
5235  target,
5236  miplevel,
5237  texobj,
5238  &error);
5239 
5240  detail::errHandler(error, __CREATE_GL_TEXTURE_3D_ERR);
5241  if (err != NULL) {
5242  *err = error;
5243  }
5244  }
5245 
5247  Image3DGL() : Image3D() { }
5248 
5256  explicit Image3DGL(const cl_mem& image, bool retainObject = false) :
5257  Image3D(image, retainObject) { }
5258 
5263  Image3DGL& operator = (const cl_mem& rhs)
5264  {
5265  Image3D::operator=(rhs);
5266  return *this;
5267  }
5268 
5272  Image3DGL(const Image3DGL& img) : Image3D(img) {}
5273 
5277  Image3DGL& operator = (const Image3DGL &img)
5278  {
5279  Image3D::operator=(img);
5280  return *this;
5281  }
5282 
5286  Image3DGL(Image3DGL&& img) CL_HPP_NOEXCEPT_ : Image3D(std::move(img)) {}
5287 
5291  Image3DGL& operator = (Image3DGL &&img)
5292  {
5293  Image3D::operator=(std::move(img));
5294  return *this;
5295  }
5296 };
5297 #endif // CL_USE_DEPRECATED_OPENCL_1_1_APIS
5298 
5299 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
5300 
5306 class ImageGL : public Image
5307 {
5308 public:
5309  ImageGL(
5310  const Context& context,
5311  cl_mem_flags flags,
5312  cl_GLenum target,
5313  cl_GLint miplevel,
5314  cl_GLuint texobj,
5315  cl_int * err = NULL)
5316  {
5317  cl_int error;
5318  object_ = ::clCreateFromGLTexture(
5319  context(),
5320  flags,
5321  target,
5322  miplevel,
5323  texobj,
5324  &error);
5325 
5326  detail::errHandler(error, __CREATE_GL_TEXTURE_ERR);
5327  if (err != NULL) {
5328  *err = error;
5329  }
5330  }
5331 
5332  ImageGL() : Image() { }
5333 
5341  explicit ImageGL(const cl_mem& image, bool retainObject = false) :
5342  Image(image, retainObject) { }
5343 
5344  ImageGL& operator = (const cl_mem& rhs)
5345  {
5346  Image::operator=(rhs);
5347  return *this;
5348  }
5349 
5353  ImageGL(const ImageGL& img) : Image(img) {}
5354 
5358  ImageGL& operator = (const ImageGL &img)
5359  {
5360  Image::operator=(img);
5361  return *this;
5362  }
5363 
5367  ImageGL(ImageGL&& img) CL_HPP_NOEXCEPT_ : Image(std::move(img)) {}
5368 
5372  ImageGL& operator = (ImageGL &&img)
5373  {
5374  Image::operator=(std::move(img));
5375  return *this;
5376  }
5377 };
5378 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
5379 
5380 
5381 
5382 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5383 
5389 class Pipe : public Memory
5390 {
5391 public:
5392 
5403  const Context& context,
5404  cl_uint packet_size,
5405  cl_uint max_packets,
5406  cl_int* err = NULL)
5407  {
5408  cl_int error;
5409 
5410  cl_mem_flags flags = CL_MEM_READ_WRITE | CL_MEM_HOST_NO_ACCESS;
5411  object_ = ::clCreatePipe(context(), flags, packet_size, max_packets, nullptr, &error);
5412 
5413  detail::errHandler(error, __CREATE_PIPE_ERR);
5414  if (err != NULL) {
5415  *err = error;
5416  }
5417  }
5418 
5428  cl_uint packet_size,
5429  cl_uint max_packets,
5430  cl_int* err = NULL)
5431  {
5432  cl_int error;
5433 
5434  Context context = Context::getDefault(err);
5435 
5436  cl_mem_flags flags = CL_MEM_READ_WRITE | CL_MEM_HOST_NO_ACCESS;
5437  object_ = ::clCreatePipe(context(), flags, packet_size, max_packets, nullptr, &error);
5438 
5439  detail::errHandler(error, __CREATE_PIPE_ERR);
5440  if (err != NULL) {
5441  *err = error;
5442  }
5443  }
5444 
5446  Pipe() : Memory() { }
5447 
5455  explicit Pipe(const cl_mem& pipe, bool retainObject = false) :
5456  Memory(pipe, retainObject) { }
5457 
5462  Pipe& operator = (const cl_mem& rhs)
5463  {
5464  Memory::operator=(rhs);
5465  return *this;
5466  }
5467 
5471  Pipe(const Pipe& pipe) : Memory(pipe) {}
5472 
5476  Pipe& operator = (const Pipe &pipe)
5477  {
5478  Memory::operator=(pipe);
5479  return *this;
5480  }
5481 
5485  Pipe(Pipe&& pipe) CL_HPP_NOEXCEPT_ : Memory(std::move(pipe)) {}
5486 
5490  Pipe& operator = (Pipe &&pipe)
5491  {
5492  Memory::operator=(std::move(pipe));
5493  return *this;
5494  }
5495 
5497  template <typename T>
5498  cl_int getInfo(cl_pipe_info name, T* param) const
5499  {
5500  return detail::errHandler(
5501  detail::getInfo(&::clGetPipeInfo, object_, name, param),
5502  __GET_PIPE_INFO_ERR);
5503  }
5504 
5506  template <cl_int name> typename
5508  getInfo(cl_int* err = NULL) const
5509  {
5510  typename detail::param_traits<
5511  detail::cl_pipe_info, name>::param_type param;
5512  cl_int result = getInfo(name, &param);
5513  if (err != NULL) {
5514  *err = result;
5515  }
5516  return param;
5517  }
5518 }; // class Pipe
5519 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
5520 
5521 
5530 class Sampler : public detail::Wrapper<cl_sampler>
5531 {
5532 public:
5534  Sampler() { }
5535 
5541  const Context& context,
5542  cl_bool normalized_coords,
5543  cl_addressing_mode addressing_mode,
5544  cl_filter_mode filter_mode,
5545  cl_int* err = NULL)
5546  {
5547  cl_int error;
5548 
5549 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5550  cl_sampler_properties sampler_properties[] = {
5551  CL_SAMPLER_NORMALIZED_COORDS, normalized_coords,
5552  CL_SAMPLER_ADDRESSING_MODE, addressing_mode,
5553  CL_SAMPLER_FILTER_MODE, filter_mode,
5554  0 };
5555  object_ = ::clCreateSamplerWithProperties(
5556  context(),
5557  sampler_properties,
5558  &error);
5559 
5560  detail::errHandler(error, __CREATE_SAMPLER_WITH_PROPERTIES_ERR);
5561  if (err != NULL) {
5562  *err = error;
5563  }
5564 #else
5565  object_ = ::clCreateSampler(
5566  context(),
5567  normalized_coords,
5568  addressing_mode,
5569  filter_mode,
5570  &error);
5571 
5572  detail::errHandler(error, __CREATE_SAMPLER_ERR);
5573  if (err != NULL) {
5574  *err = error;
5575  }
5576 #endif
5577  }
5578 
5587  explicit Sampler(const cl_sampler& sampler, bool retainObject = false) :
5588  detail::Wrapper<cl_type>(sampler, retainObject) { }
5589 
5595  Sampler& operator = (const cl_sampler& rhs)
5596  {
5598  return *this;
5599  }
5600 
5604  Sampler(const Sampler& sam) : detail::Wrapper<cl_type>(sam) {}
5605 
5609  Sampler& operator = (const Sampler &sam)
5610  {
5612  return *this;
5613  }
5614 
5618  Sampler(Sampler&& sam) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(sam)) {}
5619 
5623  Sampler& operator = (Sampler &&sam)
5624  {
5625  detail::Wrapper<cl_type>::operator=(std::move(sam));
5626  return *this;
5627  }
5628 
5630  template <typename T>
5631  cl_int getInfo(cl_sampler_info name, T* param) const
5632  {
5633  return detail::errHandler(
5634  detail::getInfo(&::clGetSamplerInfo, object_, name, param),
5635  __GET_SAMPLER_INFO_ERR);
5636  }
5637 
5639  template <cl_int name> typename
5641  getInfo(cl_int* err = NULL) const
5642  {
5643  typename detail::param_traits<
5644  detail::cl_sampler_info, name>::param_type param;
5645  cl_int result = getInfo(name, &param);
5646  if (err != NULL) {
5647  *err = result;
5648  }
5649  return param;
5650  }
5651 };
5652 
5653 class Program;
5654 class CommandQueue;
5655 class DeviceCommandQueue;
5656 class Kernel;
5657 
5659 class NDRange
5660 {
5661 private:
5662  size_type sizes_[3];
5663  cl_uint dimensions_;
5664 
5665 public:
5668  : dimensions_(0)
5669  {
5670  sizes_[0] = 0;
5671  sizes_[1] = 0;
5672  sizes_[2] = 0;
5673  }
5674 
5676  NDRange(size_type size0)
5677  : dimensions_(1)
5678  {
5679  sizes_[0] = size0;
5680  sizes_[1] = 1;
5681  sizes_[2] = 1;
5682  }
5683 
5685  NDRange(size_type size0, size_type size1)
5686  : dimensions_(2)
5687  {
5688  sizes_[0] = size0;
5689  sizes_[1] = size1;
5690  sizes_[2] = 1;
5691  }
5692 
5694  NDRange(size_type size0, size_type size1, size_type size2)
5695  : dimensions_(3)
5696  {
5697  sizes_[0] = size0;
5698  sizes_[1] = size1;
5699  sizes_[2] = size2;
5700  }
5701 
5706  operator const size_type*() const {
5707  return sizes_;
5708  }
5709 
5711  size_type dimensions() const
5712  {
5713  return dimensions_;
5714  }
5715 
5717  // runtime number of dimensions
5718  size_type size() const
5719  {
5720  return dimensions_*sizeof(size_type);
5721  }
5722 
5723  size_type* get()
5724  {
5725  return sizes_;
5726  }
5727 
5728  const size_type* get() const
5729  {
5730  return sizes_;
5731  }
5732 };
5733 
5735 static const NDRange NullRange;
5736 
5739 {
5740  size_type size_;
5741 };
5742 
5743 namespace detail {
5744 
5745 template <typename T, class Enable = void>
5747 
5748 // Enable for objects that are not subclasses of memory
5749 // Pointers, constants etc
5750 template <typename T>
5751 struct KernelArgumentHandler<T, typename std::enable_if<!std::is_base_of<cl::Memory, T>::value>::type>
5752 {
5753  static size_type size(const T&) { return sizeof(T); }
5754  static const T* ptr(const T& value) { return &value; }
5755 };
5756 
5757 // Enable for subclasses of memory where we want to get a reference to the cl_mem out
5758 // and pass that in for safety
5759 template <typename T>
5760 struct KernelArgumentHandler<T, typename std::enable_if<std::is_base_of<cl::Memory, T>::value>::type>
5761 {
5762  static size_type size(const T&) { return sizeof(cl_mem); }
5763  static const cl_mem* ptr(const T& value) { return &(value()); }
5764 };
5765 
5766 // Specialization for DeviceCommandQueue defined later
5767 
5768 template <>
5770 {
5771  static size_type size(const LocalSpaceArg& value) { return value.size_; }
5772  static const void* ptr(const LocalSpaceArg&) { return NULL; }
5773 };
5774 
5775 }
5777 
5781 inline LocalSpaceArg
5782 Local(size_type size)
5783 {
5784  LocalSpaceArg ret = { size };
5785  return ret;
5786 }
5787 
5796 class Kernel : public detail::Wrapper<cl_kernel>
5797 {
5798 public:
5799  inline Kernel(const Program& program, const char* name, cl_int* err = NULL);
5800 
5802  Kernel() { }
5803 
5812  explicit Kernel(const cl_kernel& kernel, bool retainObject = false) :
5813  detail::Wrapper<cl_type>(kernel, retainObject) { }
5814 
5820  Kernel& operator = (const cl_kernel& rhs)
5821  {
5823  return *this;
5824  }
5825 
5829  Kernel(const Kernel& kernel) : detail::Wrapper<cl_type>(kernel) {}
5830 
5834  Kernel& operator = (const Kernel &kernel)
5835  {
5837  return *this;
5838  }
5839 
5843  Kernel(Kernel&& kernel) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(kernel)) {}
5844 
5848  Kernel& operator = (Kernel &&kernel)
5849  {
5850  detail::Wrapper<cl_type>::operator=(std::move(kernel));
5851  return *this;
5852  }
5853 
5854  template <typename T>
5855  cl_int getInfo(cl_kernel_info name, T* param) const
5856  {
5857  return detail::errHandler(
5858  detail::getInfo(&::clGetKernelInfo, object_, name, param),
5859  __GET_KERNEL_INFO_ERR);
5860  }
5861 
5862  template <cl_int name> typename
5864  getInfo(cl_int* err = NULL) const
5865  {
5866  typename detail::param_traits<
5867  detail::cl_kernel_info, name>::param_type param;
5868  cl_int result = getInfo(name, &param);
5869  if (err != NULL) {
5870  *err = result;
5871  }
5872  return param;
5873  }
5874 
5875 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
5876  template <typename T>
5877  cl_int getArgInfo(cl_uint argIndex, cl_kernel_arg_info name, T* param) const
5878  {
5879  return detail::errHandler(
5880  detail::getInfo(&::clGetKernelArgInfo, object_, argIndex, name, param),
5881  __GET_KERNEL_ARG_INFO_ERR);
5882  }
5883 
5884  template <cl_int name> typename
5886  getArgInfo(cl_uint argIndex, cl_int* err = NULL) const
5887  {
5888  typename detail::param_traits<
5889  detail::cl_kernel_arg_info, name>::param_type param;
5890  cl_int result = getArgInfo(argIndex, name, &param);
5891  if (err != NULL) {
5892  *err = result;
5893  }
5894  return param;
5895  }
5896 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
5897 
5898  template <typename T>
5899  cl_int getWorkGroupInfo(
5900  const Device& device, cl_kernel_work_group_info name, T* param) const
5901  {
5902  return detail::errHandler(
5903  detail::getInfo(
5904  &::clGetKernelWorkGroupInfo, object_, device(), name, param),
5905  __GET_KERNEL_WORK_GROUP_INFO_ERR);
5906  }
5907 
5908  template <cl_int name> typename
5910  getWorkGroupInfo(const Device& device, cl_int* err = NULL) const
5911  {
5912  typename detail::param_traits<
5913  detail::cl_kernel_work_group_info, name>::param_type param;
5914  cl_int result = getWorkGroupInfo(device, name, &param);
5915  if (err != NULL) {
5916  *err = result;
5917  }
5918  return param;
5919  }
5920 
5921 #if (CL_HPP_TARGET_OPENCL_VERSION >= 200 && defined(CL_HPP_USE_CL_SUB_GROUPS_KHR)) || CL_HPP_TARGET_OPENCL_VERSION >= 210
5922  cl_int getSubGroupInfo(const cl::Device &dev, cl_kernel_sub_group_info name, const cl::NDRange &range, size_type* param) const
5923  {
5924 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
5925 
5926  return detail::errHandler(
5927  clGetKernelSubGroupInfo(object_, dev(), name, range.size(), range.get(), sizeof(size_type), param, nullptr),
5928  __GET_KERNEL_SUB_GROUP_INFO_ERR);
5929 
5930 #else // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
5931 
5932  typedef clGetKernelSubGroupInfoKHR_fn PFN_clGetKernelSubGroupInfoKHR;
5933  static PFN_clGetKernelSubGroupInfoKHR pfn_clGetKernelSubGroupInfoKHR = NULL;
5934  CL_HPP_INIT_CL_EXT_FCN_PTR_(clGetKernelSubGroupInfoKHR);
5935 
5936  return detail::errHandler(
5937  pfn_clGetKernelSubGroupInfoKHR(object_, dev(), name, range.size(), range.get(), sizeof(size_type), param, nullptr),
5938  __GET_KERNEL_SUB_GROUP_INFO_ERR);
5939 
5940 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
5941  }
5942 
5943  template <cl_int name>
5944  size_type getSubGroupInfo(const cl::Device &dev, const cl::NDRange &range, cl_int* err = NULL) const
5945  {
5946  size_type param;
5947  cl_int result = getSubGroupInfo(dev, name, range, &param);
5948  if (err != NULL) {
5949  *err = result;
5950  }
5951  return param;
5952  }
5953 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5954 
5955 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5956 
5958  template<typename T, class D>
5959  cl_int setArg(cl_uint index, const cl::pointer<T, D> &argPtr)
5960  {
5961  return detail::errHandler(
5962  ::clSetKernelArgSVMPointer(object_, index, argPtr.get()),
5963  __SET_KERNEL_ARGS_ERR);
5964  }
5965 
5968  template<typename T, class Alloc>
5969  cl_int setArg(cl_uint index, const cl::vector<T, Alloc> &argPtr)
5970  {
5971  return detail::errHandler(
5972  ::clSetKernelArgSVMPointer(object_, index, argPtr.data()),
5973  __SET_KERNEL_ARGS_ERR);
5974  }
5975 
5978  template<typename T>
5979  typename std::enable_if<std::is_pointer<T>::value, cl_int>::type
5980  setArg(cl_uint index, const T argPtr)
5981  {
5982  return detail::errHandler(
5983  ::clSetKernelArgSVMPointer(object_, index, argPtr),
5984  __SET_KERNEL_ARGS_ERR);
5985  }
5986 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
5987 
5990  template <typename T>
5991  typename std::enable_if<!std::is_pointer<T>::value, cl_int>::type
5992  setArg(cl_uint index, const T &value)
5993  {
5994  return detail::errHandler(
5995  ::clSetKernelArg(
5996  object_,
5997  index,
6000  __SET_KERNEL_ARGS_ERR);
6001  }
6002 
6003  cl_int setArg(cl_uint index, size_type size, const void* argPtr)
6004  {
6005  return detail::errHandler(
6006  ::clSetKernelArg(object_, index, size, argPtr),
6007  __SET_KERNEL_ARGS_ERR);
6008  }
6009 
6010 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
6011 
6015  cl_int setSVMPointers(const vector<void*> &pointerList)
6016  {
6017  return detail::errHandler(
6018  ::clSetKernelExecInfo(
6019  object_,
6020  CL_KERNEL_EXEC_INFO_SVM_PTRS,
6021  sizeof(void*)*pointerList.size(),
6022  pointerList.data()));
6023  }
6024 
6029  template<int ArrayLength>
6030  cl_int setSVMPointers(const std::array<void*, ArrayLength> &pointerList)
6031  {
6032  return detail::errHandler(
6033  ::clSetKernelExecInfo(
6034  object_,
6035  CL_KERNEL_EXEC_INFO_SVM_PTRS,
6036  sizeof(void*)*pointerList.size(),
6037  pointerList.data()));
6038  }
6039 
6051  cl_int enableFineGrainedSystemSVM(bool svmEnabled)
6052  {
6053  cl_bool svmEnabled_ = svmEnabled ? CL_TRUE : CL_FALSE;
6054  return detail::errHandler(
6055  ::clSetKernelExecInfo(
6056  object_,
6057  CL_KERNEL_EXEC_INFO_SVM_FINE_GRAIN_SYSTEM,
6058  sizeof(cl_bool),
6059  &svmEnabled_
6060  )
6061  );
6062  }
6063 
6064  template<int index, int ArrayLength, class D, typename T0, typename T1, typename... Ts>
6065  void setSVMPointersHelper(std::array<void*, ArrayLength> &pointerList, const pointer<T0, D> &t0, const pointer<T1, D> &t1, Ts & ... ts)
6066  {
6067  pointerList[index] = static_cast<void*>(t0.get());
6068  setSVMPointersHelper<index + 1, ArrayLength>(pointerList, t1, ts...);
6069  }
6070 
6071  template<int index, int ArrayLength, typename T0, typename T1, typename... Ts>
6072  typename std::enable_if<std::is_pointer<T0>::value, void>::type
6073  setSVMPointersHelper(std::array<void*, ArrayLength> &pointerList, T0 t0, T1 t1, Ts... ts)
6074  {
6075  pointerList[index] = static_cast<void*>(t0);
6076  setSVMPointersHelper<index + 1, ArrayLength>(pointerList, t1, ts...);
6077  }
6078 
6079  template<int index, int ArrayLength, typename T0, class D>
6080  void setSVMPointersHelper(std::array<void*, ArrayLength> &pointerList, const pointer<T0, D> &t0)
6081  {
6082  pointerList[index] = static_cast<void*>(t0.get());
6083  }
6084 
6085 
6086  template<int index, int ArrayLength, typename T0>
6087  typename std::enable_if<std::is_pointer<T0>::value, void>::type
6088  setSVMPointersHelper(std::array<void*, ArrayLength> &pointerList, T0 t0)
6089  {
6090  pointerList[index] = static_cast<void*>(t0);
6091  }
6092 
6093  template<typename T0, typename... Ts>
6094  cl_int setSVMPointers(const T0 &t0, Ts & ... ts)
6095  {
6096  std::array<void*, 1 + sizeof...(Ts)> pointerList;
6097 
6098  setSVMPointersHelper<0, 1 + sizeof...(Ts)>(pointerList, t0, ts...);
6099  return detail::errHandler(
6100  ::clSetKernelExecInfo(
6101  object_,
6102  CL_KERNEL_EXEC_INFO_SVM_PTRS,
6103  sizeof(void*)*(1 + sizeof...(Ts)),
6104  pointerList.data()));
6105  }
6106 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
6107 
6108 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
6109 
6115  {
6116  cl_int error;
6117  Kernel retValue(clCloneKernel(this->get(), &error));
6118 
6119  detail::errHandler(error, __CLONE_KERNEL_ERR);
6120  return retValue;
6121  }
6122 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
6123 };
6124 
6128 class Program : public detail::Wrapper<cl_program>
6129 {
6130 public:
6131 #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6132  typedef vector<vector<unsigned char>> Binaries;
6133  typedef vector<string> Sources;
6134 #else // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6135  typedef vector<std::pair<const void*, size_type> > Binaries;
6136  typedef vector<std::pair<const char*, size_type> > Sources;
6137 #endif // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6138 
6139  Program(
6140  const string& source,
6141  bool build = false,
6142  cl_int* err = NULL)
6143  {
6144  cl_int error;
6145 
6146  const char * strings = source.c_str();
6147  const size_type length = source.size();
6148 
6149  Context context = Context::getDefault(err);
6150 
6151  object_ = ::clCreateProgramWithSource(
6152  context(), (cl_uint)1, &strings, &length, &error);
6153 
6154  detail::errHandler(error, __CREATE_PROGRAM_WITH_SOURCE_ERR);
6155 
6156  if (error == CL_SUCCESS && build) {
6157 
6158  error = ::clBuildProgram(
6159  object_,
6160  0,
6161  NULL,
6162 #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6163  "-cl-std=CL2.0",
6164 #else
6165  "",
6166 #endif // #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6167  NULL,
6168  NULL);
6169 
6170  detail::buildErrHandler(error, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6171  }
6172 
6173  if (err != NULL) {
6174  *err = error;
6175  }
6176  }
6177 
6178  Program(
6179  const Context& context,
6180  const string& source,
6181  bool build = false,
6182  cl_int* err = NULL)
6183  {
6184  cl_int error;
6185 
6186  const char * strings = source.c_str();
6187  const size_type length = source.size();
6188 
6189  object_ = ::clCreateProgramWithSource(
6190  context(), (cl_uint)1, &strings, &length, &error);
6191 
6192  detail::errHandler(error, __CREATE_PROGRAM_WITH_SOURCE_ERR);
6193 
6194  if (error == CL_SUCCESS && build) {
6195  error = ::clBuildProgram(
6196  object_,
6197  0,
6198  NULL,
6199 #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6200  "-cl-std=CL2.0",
6201 #else
6202  "",
6203 #endif // #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6204  NULL,
6205  NULL);
6206 
6207  detail::buildErrHandler(error, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6208  }
6209 
6210  if (err != NULL) {
6211  *err = error;
6212  }
6213  }
6214 
6220  const Sources& sources,
6221  cl_int* err = NULL)
6222  {
6223  cl_int error;
6224  Context context = Context::getDefault(err);
6225 
6226  const size_type n = (size_type)sources.size();
6227 
6228  vector<size_type> lengths(n);
6229  vector<const char*> strings(n);
6230 
6231  for (size_type i = 0; i < n; ++i) {
6232 #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6233  strings[i] = sources[(int)i].data();
6234  lengths[i] = sources[(int)i].length();
6235 #else // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6236  strings[i] = sources[(int)i].first;
6237  lengths[i] = sources[(int)i].second;
6238 #endif // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6239  }
6240 
6241  object_ = ::clCreateProgramWithSource(
6242  context(), (cl_uint)n, strings.data(), lengths.data(), &error);
6243 
6244  detail::errHandler(error, __CREATE_PROGRAM_WITH_SOURCE_ERR);
6245  if (err != NULL) {
6246  *err = error;
6247  }
6248  }
6249 
6255  const Context& context,
6256  const Sources& sources,
6257  cl_int* err = NULL)
6258  {
6259  cl_int error;
6260 
6261  const size_type n = (size_type)sources.size();
6262 
6263  vector<size_type> lengths(n);
6264  vector<const char*> strings(n);
6265 
6266  for (size_type i = 0; i < n; ++i) {
6267 #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6268  strings[i] = sources[(int)i].data();
6269  lengths[i] = sources[(int)i].length();
6270 #else // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6271  strings[i] = sources[(int)i].first;
6272  lengths[i] = sources[(int)i].second;
6273 #endif // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6274  }
6275 
6276  object_ = ::clCreateProgramWithSource(
6277  context(), (cl_uint)n, strings.data(), lengths.data(), &error);
6278 
6279  detail::errHandler(error, __CREATE_PROGRAM_WITH_SOURCE_ERR);
6280  if (err != NULL) {
6281  *err = error;
6282  }
6283  }
6284 
6285 
6286 #if CL_HPP_TARGET_OPENCL_VERSION >= 210 || (CL_HPP_TARGET_OPENCL_VERSION==200 && defined(CL_HPP_USE_IL_KHR))
6287 
6292  const vector<char>& IL,
6293  bool build = false,
6294  cl_int* err = NULL)
6295  {
6296  cl_int error;
6297 
6298  Context context = Context::getDefault(err);
6299 
6300 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
6301 
6302  object_ = ::clCreateProgramWithIL(
6303  context(), static_cast<const void*>(IL.data()), IL.size(), &error);
6304 
6305 #else // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
6306 
6307  typedef clCreateProgramWithILKHR_fn PFN_clCreateProgramWithILKHR;
6308  static PFN_clCreateProgramWithILKHR pfn_clCreateProgramWithILKHR = NULL;
6309  CL_HPP_INIT_CL_EXT_FCN_PTR_(clCreateProgramWithILKHR);
6310 
6311  return detail::errHandler(
6312  pfn_clCreateProgramWithILKHR(
6313  context(), static_cast<const void*>(IL.data()), IL.size(), &error);
6314 
6315 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
6316 
6317  detail::errHandler(error, __CREATE_PROGRAM_WITH_IL_ERR);
6318 
6319  if (error == CL_SUCCESS && build) {
6320 
6321  error = ::clBuildProgram(
6322  object_,
6323  0,
6324  NULL,
6325 #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6326  "-cl-std=CL2.0",
6327 #else
6328  "",
6329 #endif // #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6330  NULL,
6331  NULL);
6332 
6333  detail::buildErrHandler(error, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6334  }
6335 
6336  if (err != NULL) {
6337  *err = error;
6338  }
6339  }
6340 
6347  const Context& context,
6348  const vector<char>& IL,
6349  bool build = false,
6350  cl_int* err = NULL)
6351  {
6352  cl_int error;
6353 
6354 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
6355 
6356  object_ = ::clCreateProgramWithIL(
6357  context(), static_cast<const void*>(IL.data()), IL.size(), &error);
6358 
6359 #else // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
6360 
6361  typedef clCreateProgramWithILKHR_fn PFN_clCreateProgramWithILKHR;
6362  static PFN_clCreateProgramWithILKHR pfn_clCreateProgramWithILKHR = NULL;
6363  CL_HPP_INIT_CL_EXT_FCN_PTR_(clCreateProgramWithILKHR);
6364 
6365  return detail::errHandler(
6366  pfn_clCreateProgramWithILKHR(
6367  context(), static_cast<const void*>(IL.data()), IL.size(), &error);
6368 
6369 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
6370 
6371  detail::errHandler(error, __CREATE_PROGRAM_WITH_IL_ERR);
6372 
6373  if (error == CL_SUCCESS && build) {
6374  error = ::clBuildProgram(
6375  object_,
6376  0,
6377  NULL,
6378 #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6379  "-cl-std=CL2.0",
6380 #else
6381  "",
6382 #endif // #if !defined(CL_HPP_CL_1_2_DEFAULT_BUILD)
6383  NULL,
6384  NULL);
6385 
6386  detail::buildErrHandler(error, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6387  }
6388 
6389  if (err != NULL) {
6390  *err = error;
6391  }
6392  }
6393 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
6394 
6415  const Context& context,
6416  const vector<Device>& devices,
6417  const Binaries& binaries,
6418  vector<cl_int>* binaryStatus = NULL,
6419  cl_int* err = NULL)
6420  {
6421  cl_int error;
6422 
6423  const size_type numDevices = devices.size();
6424 
6425  // Catch size mismatch early and return
6426  if(binaries.size() != numDevices) {
6427  error = CL_INVALID_VALUE;
6428  detail::errHandler(error, __CREATE_PROGRAM_WITH_BINARY_ERR);
6429  if (err != NULL) {
6430  *err = error;
6431  }
6432  return;
6433  }
6434 
6435 
6436  vector<size_type> lengths(numDevices);
6437  vector<const unsigned char*> images(numDevices);
6438 #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6439  for (size_type i = 0; i < numDevices; ++i) {
6440  images[i] = binaries[i].data();
6441  lengths[i] = binaries[(int)i].size();
6442  }
6443 #else // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6444  for (size_type i = 0; i < numDevices; ++i) {
6445  images[i] = (const unsigned char*)binaries[i].first;
6446  lengths[i] = binaries[(int)i].second;
6447  }
6448 #endif // #if !defined(CL_HPP_ENABLE_PROGRAM_CONSTRUCTION_FROM_ARRAY_COMPATIBILITY)
6449 
6450  vector<cl_device_id> deviceIDs(numDevices);
6451  for( size_type deviceIndex = 0; deviceIndex < numDevices; ++deviceIndex ) {
6452  deviceIDs[deviceIndex] = (devices[deviceIndex])();
6453  }
6454 
6455  if(binaryStatus) {
6456  binaryStatus->resize(numDevices);
6457  }
6458 
6459  object_ = ::clCreateProgramWithBinary(
6460  context(), (cl_uint) devices.size(),
6461  deviceIDs.data(),
6462  lengths.data(), images.data(), (binaryStatus != NULL && numDevices > 0)
6463  ? &binaryStatus->front()
6464  : NULL, &error);
6465 
6466  detail::errHandler(error, __CREATE_PROGRAM_WITH_BINARY_ERR);
6467  if (err != NULL) {
6468  *err = error;
6469  }
6470  }
6471 
6472 
6473 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
6474 
6479  const Context& context,
6480  const vector<Device>& devices,
6481  const string& kernelNames,
6482  cl_int* err = NULL)
6483  {
6484  cl_int error;
6485 
6486 
6487  size_type numDevices = devices.size();
6488  vector<cl_device_id> deviceIDs(numDevices);
6489  for( size_type deviceIndex = 0; deviceIndex < numDevices; ++deviceIndex ) {
6490  deviceIDs[deviceIndex] = (devices[deviceIndex])();
6491  }
6492 
6493  object_ = ::clCreateProgramWithBuiltInKernels(
6494  context(),
6495  (cl_uint) devices.size(),
6496  deviceIDs.data(),
6497  kernelNames.c_str(),
6498  &error);
6499 
6500  detail::errHandler(error, __CREATE_PROGRAM_WITH_BUILT_IN_KERNELS_ERR);
6501  if (err != NULL) {
6502  *err = error;
6503  }
6504  }
6505 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
6506 
6507  Program() { }
6508 
6509 
6516  explicit Program(const cl_program& program, bool retainObject = false) :
6517  detail::Wrapper<cl_type>(program, retainObject) { }
6518 
6519  Program& operator = (const cl_program& rhs)
6520  {
6522  return *this;
6523  }
6524 
6528  Program(const Program& program) : detail::Wrapper<cl_type>(program) {}
6529 
6533  Program& operator = (const Program &program)
6534  {
6536  return *this;
6537  }
6538 
6542  Program(Program&& program) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(program)) {}
6543 
6547  Program& operator = (Program &&program)
6548  {
6549  detail::Wrapper<cl_type>::operator=(std::move(program));
6550  return *this;
6551  }
6552 
6553  cl_int build(
6554  const vector<Device>& devices,
6555  const char* options = NULL,
6556  void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
6557  void* data = NULL) const
6558  {
6559  size_type numDevices = devices.size();
6560  vector<cl_device_id> deviceIDs(numDevices);
6561 
6562  for( size_type deviceIndex = 0; deviceIndex < numDevices; ++deviceIndex ) {
6563  deviceIDs[deviceIndex] = (devices[deviceIndex])();
6564  }
6565 
6566  cl_int buildError = ::clBuildProgram(
6567  object_,
6568  (cl_uint)
6569  devices.size(),
6570  deviceIDs.data(),
6571  options,
6572  notifyFptr,
6573  data);
6574 
6575  return detail::buildErrHandler(buildError, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6576  }
6577 
6578  cl_int build(
6579  const char* options = NULL,
6580  void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
6581  void* data = NULL) const
6582  {
6583  cl_int buildError = ::clBuildProgram(
6584  object_,
6585  0,
6586  NULL,
6587  options,
6588  notifyFptr,
6589  data);
6590 
6591 
6592  return detail::buildErrHandler(buildError, __BUILD_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6593  }
6594 
6595 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
6596  cl_int compile(
6597  const char* options = NULL,
6598  void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
6599  void* data = NULL) const
6600  {
6601  cl_int error = ::clCompileProgram(
6602  object_,
6603  0,
6604  NULL,
6605  options,
6606  0,
6607  NULL,
6608  NULL,
6609  notifyFptr,
6610  data);
6611  return detail::buildErrHandler(error, __COMPILE_PROGRAM_ERR, getBuildInfo<CL_PROGRAM_BUILD_LOG>());
6612  }
6613 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
6614 
6615  template <typename T>
6616  cl_int getInfo(cl_program_info name, T* param) const
6617  {
6618  return detail::errHandler(
6619  detail::getInfo(&::clGetProgramInfo, object_, name, param),
6620  __GET_PROGRAM_INFO_ERR);
6621  }
6622 
6623  template <cl_int name> typename
6625  getInfo(cl_int* err = NULL) const
6626  {
6627  typename detail::param_traits<
6628  detail::cl_program_info, name>::param_type param;
6629  cl_int result = getInfo(name, &param);
6630  if (err != NULL) {
6631  *err = result;
6632  }
6633  return param;
6634  }
6635 
6636  template <typename T>
6637  cl_int getBuildInfo(
6638  const Device& device, cl_program_build_info name, T* param) const
6639  {
6640  return detail::errHandler(
6641  detail::getInfo(
6642  &::clGetProgramBuildInfo, object_, device(), name, param),
6643  __GET_PROGRAM_BUILD_INFO_ERR);
6644  }
6645 
6646  template <cl_int name> typename
6648  getBuildInfo(const Device& device, cl_int* err = NULL) const
6649  {
6650  typename detail::param_traits<
6651  detail::cl_program_build_info, name>::param_type param;
6652  cl_int result = getBuildInfo(device, name, &param);
6653  if (err != NULL) {
6654  *err = result;
6655  }
6656  return param;
6657  }
6658 
6664  template <cl_int name>
6665  vector<std::pair<cl::Device, typename detail::param_traits<detail::cl_program_build_info, name>::param_type>>
6666  getBuildInfo(cl_int *err = NULL) const
6667  {
6668  cl_int result = CL_SUCCESS;
6669 
6670  auto devs = getInfo<CL_PROGRAM_DEVICES>(&result);
6671  vector<std::pair<cl::Device, typename detail::param_traits<detail::cl_program_build_info, name>::param_type>>
6672  devInfo;
6673 
6674  // If there was an initial error from getInfo return the error
6675  if (result != CL_SUCCESS) {
6676  if (err != NULL) {
6677  *err = result;
6678  }
6679  return devInfo;
6680  }
6681 
6682  for (const cl::Device &d : devs) {
6683  typename detail::param_traits<
6684  detail::cl_program_build_info, name>::param_type param;
6685  result = getBuildInfo(d, name, &param);
6686  devInfo.push_back(
6688  (d, param));
6689  if (result != CL_SUCCESS) {
6690  // On error, leave the loop and return the error code
6691  break;
6692  }
6693  }
6694  if (err != NULL) {
6695  *err = result;
6696  }
6697  if (result != CL_SUCCESS) {
6698  devInfo.clear();
6699  }
6700  return devInfo;
6701  }
6702 
6703  cl_int createKernels(vector<Kernel>* kernels)
6704  {
6705  cl_uint numKernels;
6706  cl_int err = ::clCreateKernelsInProgram(object_, 0, NULL, &numKernels);
6707  if (err != CL_SUCCESS) {
6708  return detail::errHandler(err, __CREATE_KERNELS_IN_PROGRAM_ERR);
6709  }
6710 
6711  vector<cl_kernel> value(numKernels);
6712 
6713  err = ::clCreateKernelsInProgram(
6714  object_, numKernels, value.data(), NULL);
6715  if (err != CL_SUCCESS) {
6716  return detail::errHandler(err, __CREATE_KERNELS_IN_PROGRAM_ERR);
6717  }
6718 
6719  if (kernels) {
6720  kernels->resize(value.size());
6721 
6722  // Assign to param, constructing with retain behaviour
6723  // to correctly capture each underlying CL object
6724  for (size_type i = 0; i < value.size(); i++) {
6725  // We do not need to retain because this kernel is being created
6726  // by the runtime
6727  (*kernels)[i] = Kernel(value[i], false);
6728  }
6729  }
6730  return CL_SUCCESS;
6731  }
6732 };
6733 
6734 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
6735 inline Program linkProgram(
6736  Program input1,
6737  Program input2,
6738  const char* options = NULL,
6739  void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
6740  void* data = NULL,
6741  cl_int* err = NULL)
6742 {
6743  cl_int error_local = CL_SUCCESS;
6744 
6745  cl_program programs[2] = { input1(), input2() };
6746 
6747  Context ctx = input1.getInfo<CL_PROGRAM_CONTEXT>(&error_local);
6748  if(error_local!=CL_SUCCESS) {
6749  detail::errHandler(error_local, __LINK_PROGRAM_ERR);
6750  }
6751 
6752  cl_program prog = ::clLinkProgram(
6753  ctx(),
6754  0,
6755  NULL,
6756  options,
6757  2,
6758  programs,
6759  notifyFptr,
6760  data,
6761  &error_local);
6762 
6763  detail::errHandler(error_local,__COMPILE_PROGRAM_ERR);
6764  if (err != NULL) {
6765  *err = error_local;
6766  }
6767 
6768  return Program(prog);
6769 }
6770 
6771 inline Program linkProgram(
6772  vector<Program> inputPrograms,
6773  const char* options = NULL,
6774  void (CL_CALLBACK * notifyFptr)(cl_program, void *) = NULL,
6775  void* data = NULL,
6776  cl_int* err = NULL)
6777 {
6778  cl_int error_local = CL_SUCCESS;
6779 
6780  vector<cl_program> programs(inputPrograms.size());
6781 
6782  for (unsigned int i = 0; i < inputPrograms.size(); i++) {
6783  programs[i] = inputPrograms[i]();
6784  }
6785 
6786  Context ctx;
6787  if(inputPrograms.size() > 0) {
6788  ctx = inputPrograms[0].getInfo<CL_PROGRAM_CONTEXT>(&error_local);
6789  if(error_local!=CL_SUCCESS) {
6790  detail::errHandler(error_local, __LINK_PROGRAM_ERR);
6791  }
6792  }
6793  cl_program prog = ::clLinkProgram(
6794  ctx(),
6795  0,
6796  NULL,
6797  options,
6798  (cl_uint)inputPrograms.size(),
6799  programs.data(),
6800  notifyFptr,
6801  data,
6802  &error_local);
6803 
6804  detail::errHandler(error_local,__COMPILE_PROGRAM_ERR);
6805  if (err != NULL) {
6806  *err = error_local;
6807  }
6808 
6809  return Program(prog, false);
6810 }
6811 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
6812 
6813 // Template specialization for CL_PROGRAM_BINARIES
6814 template <>
6815 inline cl_int cl::Program::getInfo(cl_program_info name, vector<vector<unsigned char>>* param) const
6816 {
6817  if (name != CL_PROGRAM_BINARIES) {
6818  return CL_INVALID_VALUE;
6819  }
6820  if (param) {
6821  // Resize the parameter array appropriately for each allocation
6822  // and pass down to the helper
6823 
6824  vector<size_type> sizes = getInfo<CL_PROGRAM_BINARY_SIZES>();
6825  size_type numBinaries = sizes.size();
6826 
6827  // Resize the parameter array and constituent arrays
6828  param->resize(numBinaries);
6829  for (size_type i = 0; i < numBinaries; ++i) {
6830  (*param)[i].resize(sizes[i]);
6831  }
6832 
6833  return detail::errHandler(
6834  detail::getInfo(&::clGetProgramInfo, object_, name, param),
6835  __GET_PROGRAM_INFO_ERR);
6836  }
6837 
6838  return CL_SUCCESS;
6839 }
6840 
6841 template<>
6842 inline vector<vector<unsigned char>> cl::Program::getInfo<CL_PROGRAM_BINARIES>(cl_int* err) const
6843 {
6844  vector<vector<unsigned char>> binariesVectors;
6845 
6846  cl_int result = getInfo(CL_PROGRAM_BINARIES, &binariesVectors);
6847  if (err != NULL) {
6848  *err = result;
6849  }
6850  return binariesVectors;
6851 }
6852 
6853 inline Kernel::Kernel(const Program& program, const char* name, cl_int* err)
6854 {
6855  cl_int error;
6856 
6857  object_ = ::clCreateKernel(program(), name, &error);
6858  detail::errHandler(error, __CREATE_KERNEL_ERR);
6859 
6860  if (err != NULL) {
6861  *err = error;
6862  }
6863 
6864 }
6865 
6866 enum class QueueProperties : cl_command_queue_properties
6867 {
6868  None = 0,
6869  Profiling = CL_QUEUE_PROFILING_ENABLE,
6870  OutOfOrder = CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE,
6871 };
6872 
6873 inline QueueProperties operator|(QueueProperties lhs, QueueProperties rhs)
6874 {
6875  return static_cast<QueueProperties>(static_cast<cl_command_queue_properties>(lhs) | static_cast<cl_command_queue_properties>(rhs));
6876 }
6877 
6881 class CommandQueue : public detail::Wrapper<cl_command_queue>
6882 {
6883 private:
6884  static std::once_flag default_initialized_;
6885  static CommandQueue default_;
6886  static cl_int default_error_;
6887 
6893  static void makeDefault()
6894  {
6895  /* We don't want to throw an error from this function, so we have to
6896  * catch and set the error flag.
6897  */
6898 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
6899  try
6900 #endif
6901  {
6902  int error;
6903  Context context = Context::getDefault(&error);
6904 
6905  if (error != CL_SUCCESS) {
6906  default_error_ = error;
6907  }
6908  else {
6909  Device device = Device::getDefault();
6910  default_ = CommandQueue(context, device, 0, &default_error_);
6911  }
6912  }
6913 #if defined(CL_HPP_ENABLE_EXCEPTIONS)
6914  catch (cl::Error &e) {
6915  default_error_ = e.err();
6916  }
6917 #endif
6918  }
6919 
6925  static void makeDefaultProvided(const CommandQueue &c) {
6926  default_ = c;
6927  }
6928 
6929 public:
6930 #ifdef CL_HPP_UNIT_TEST_ENABLE
6931 
6937  static void unitTestClearDefault() {
6938  default_ = CommandQueue();
6939  }
6940 #endif // #ifdef CL_HPP_UNIT_TEST_ENABLE
6941 
6942 
6948  cl_command_queue_properties properties,
6949  cl_int* err = NULL)
6950  {
6951  cl_int error;
6952 
6953  Context context = Context::getDefault(&error);
6954  detail::errHandler(error, __CREATE_CONTEXT_ERR);
6955 
6956  if (error != CL_SUCCESS) {
6957  if (err != NULL) {
6958  *err = error;
6959  }
6960  }
6961  else {
6962  Device device = context.getInfo<CL_CONTEXT_DEVICES>()[0];
6963  bool useWithProperties;
6964 
6965 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
6966  // Run-time decision based on the actual platform
6967  {
6968  cl_uint version = detail::getContextPlatformVersion(context());
6969  useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
6970  }
6971 #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
6972  useWithProperties = true;
6973 #else
6974  useWithProperties = false;
6975 #endif
6976 
6977 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
6978  if (useWithProperties) {
6979  cl_queue_properties queue_properties[] = {
6980  CL_QUEUE_PROPERTIES, properties, 0 };
6981  if ((properties & CL_QUEUE_ON_DEVICE) == 0) {
6982  object_ = ::clCreateCommandQueueWithProperties(
6983  context(), device(), queue_properties, &error);
6984  }
6985  else {
6986  error = CL_INVALID_QUEUE_PROPERTIES;
6987  }
6988 
6989  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
6990  if (err != NULL) {
6991  *err = error;
6992  }
6993  }
6994 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
6995 #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
6996  if (!useWithProperties) {
6997  object_ = ::clCreateCommandQueue(
6998  context(), device(), properties, &error);
6999 
7000  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
7001  if (err != NULL) {
7002  *err = error;
7003  }
7004  }
7005 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
7006  }
7007  }
7008 
7014  QueueProperties properties,
7015  cl_int* err = NULL)
7016  {
7017  cl_int error;
7018 
7019  Context context = Context::getDefault(&error);
7020  detail::errHandler(error, __CREATE_CONTEXT_ERR);
7021 
7022  if (error != CL_SUCCESS) {
7023  if (err != NULL) {
7024  *err = error;
7025  }
7026  }
7027  else {
7028  Device device = context.getInfo<CL_CONTEXT_DEVICES>()[0];
7029  bool useWithProperties;
7030 
7031 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
7032  // Run-time decision based on the actual platform
7033  {
7034  cl_uint version = detail::getContextPlatformVersion(context());
7035  useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
7036  }
7037 #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
7038  useWithProperties = true;
7039 #else
7040  useWithProperties = false;
7041 #endif
7042 
7043 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7044  if (useWithProperties) {
7045  cl_queue_properties queue_properties[] = {
7046  CL_QUEUE_PROPERTIES, static_cast<cl_queue_properties>(properties), 0 };
7047 
7048  object_ = ::clCreateCommandQueueWithProperties(
7049  context(), device(), queue_properties, &error);
7050 
7051  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
7052  if (err != NULL) {
7053  *err = error;
7054  }
7055  }
7056 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
7057 #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
7058  if (!useWithProperties) {
7059  object_ = ::clCreateCommandQueue(
7060  context(), device(), static_cast<cl_command_queue_properties>(properties), &error);
7061 
7062  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
7063  if (err != NULL) {
7064  *err = error;
7065  }
7066  }
7067 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
7068 
7069  }
7070  }
7071 
7076  explicit CommandQueue(
7077  const Context& context,
7078  cl_command_queue_properties properties = 0,
7079  cl_int* err = NULL)
7080  {
7081  cl_int error;
7082  bool useWithProperties;
7083  vector<cl::Device> devices;
7084  error = context.getInfo(CL_CONTEXT_DEVICES, &devices);
7085 
7086  detail::errHandler(error, __CREATE_CONTEXT_ERR);
7087 
7088  if (error != CL_SUCCESS)
7089  {
7090  if (err != NULL) {
7091  *err = error;
7092  }
7093  return;
7094  }
7095 
7096 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
7097  // Run-time decision based on the actual platform
7098  {
7099  cl_uint version = detail::getContextPlatformVersion(context());
7100  useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
7101  }
7102 #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
7103  useWithProperties = true;
7104 #else
7105  useWithProperties = false;
7106 #endif
7107 
7108 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7109  if (useWithProperties) {
7110  cl_queue_properties queue_properties[] = {
7111  CL_QUEUE_PROPERTIES, properties, 0 };
7112  if ((properties & CL_QUEUE_ON_DEVICE) == 0) {
7113  object_ = ::clCreateCommandQueueWithProperties(
7114  context(), devices[0](), queue_properties, &error);
7115  }
7116  else {
7117  error = CL_INVALID_QUEUE_PROPERTIES;
7118  }
7119 
7120  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
7121  if (err != NULL) {
7122  *err = error;
7123  }
7124  }
7125 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
7126 #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
7127  if (!useWithProperties) {
7128  object_ = ::clCreateCommandQueue(
7129  context(), devices[0](), properties, &error);
7130 
7131  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
7132  if (err != NULL) {
7133  *err = error;
7134  }
7135  }
7136 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
7137  }
7138 
7143  explicit CommandQueue(
7144  const Context& context,
7145  QueueProperties properties,
7146  cl_int* err = NULL)
7147  {
7148  cl_int error;
7149  bool useWithProperties;
7150  vector<cl::Device> devices;
7151  error = context.getInfo(CL_CONTEXT_DEVICES, &devices);
7152 
7153  detail::errHandler(error, __CREATE_CONTEXT_ERR);
7154 
7155  if (error != CL_SUCCESS)
7156  {
7157  if (err != NULL) {
7158  *err = error;
7159  }
7160  return;
7161  }
7162 
7163 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
7164  // Run-time decision based on the actual platform
7165  {
7166  cl_uint version = detail::getContextPlatformVersion(context());
7167  useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
7168  }
7169 #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
7170  useWithProperties = true;
7171 #else
7172  useWithProperties = false;
7173 #endif
7174 
7175 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7176  if (useWithProperties) {
7177  cl_queue_properties queue_properties[] = {
7178  CL_QUEUE_PROPERTIES, static_cast<cl_queue_properties>(properties), 0 };
7179  object_ = ::clCreateCommandQueueWithProperties(
7180  context(), devices[0](), queue_properties, &error);
7181 
7182  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
7183  if (err != NULL) {
7184  *err = error;
7185  }
7186  }
7187 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
7188 #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
7189  if (!useWithProperties) {
7190  object_ = ::clCreateCommandQueue(
7191  context(), devices[0](), static_cast<cl_command_queue_properties>(properties), &error);
7192 
7193  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
7194  if (err != NULL) {
7195  *err = error;
7196  }
7197  }
7198 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
7199  }
7200 
7206  const Context& context,
7207  const Device& device,
7208  cl_command_queue_properties properties = 0,
7209  cl_int* err = NULL)
7210  {
7211  cl_int error;
7212  bool useWithProperties;
7213 
7214 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
7215  // Run-time decision based on the actual platform
7216  {
7217  cl_uint version = detail::getContextPlatformVersion(context());
7218  useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
7219  }
7220 #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
7221  useWithProperties = true;
7222 #else
7223  useWithProperties = false;
7224 #endif
7225 
7226 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7227  if (useWithProperties) {
7228  cl_queue_properties queue_properties[] = {
7229  CL_QUEUE_PROPERTIES, properties, 0 };
7230  object_ = ::clCreateCommandQueueWithProperties(
7231  context(), device(), queue_properties, &error);
7232 
7233  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
7234  if (err != NULL) {
7235  *err = error;
7236  }
7237  }
7238 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
7239 #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
7240  if (!useWithProperties) {
7241  object_ = ::clCreateCommandQueue(
7242  context(), device(), properties, &error);
7243 
7244  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
7245  if (err != NULL) {
7246  *err = error;
7247  }
7248  }
7249 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
7250  }
7251 
7257  const Context& context,
7258  const Device& device,
7259  QueueProperties properties,
7260  cl_int* err = NULL)
7261  {
7262  cl_int error;
7263  bool useWithProperties;
7264 
7265 #if CL_HPP_TARGET_OPENCL_VERSION >= 200 && CL_HPP_MINIMUM_OPENCL_VERSION < 200
7266  // Run-time decision based on the actual platform
7267  {
7268  cl_uint version = detail::getContextPlatformVersion(context());
7269  useWithProperties = (version >= 0x20000); // OpenCL 2.0 or above
7270  }
7271 #elif CL_HPP_TARGET_OPENCL_VERSION >= 200
7272  useWithProperties = true;
7273 #else
7274  useWithProperties = false;
7275 #endif
7276 
7277 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7278  if (useWithProperties) {
7279  cl_queue_properties queue_properties[] = {
7280  CL_QUEUE_PROPERTIES, static_cast<cl_queue_properties>(properties), 0 };
7281  object_ = ::clCreateCommandQueueWithProperties(
7282  context(), device(), queue_properties, &error);
7283 
7284  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
7285  if (err != NULL) {
7286  *err = error;
7287  }
7288  }
7289 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
7290 #if CL_HPP_MINIMUM_OPENCL_VERSION < 200
7291  if (!useWithProperties) {
7292  object_ = ::clCreateCommandQueue(
7293  context(), device(), static_cast<cl_command_queue_properties>(properties), &error);
7294 
7295  detail::errHandler(error, __CREATE_COMMAND_QUEUE_ERR);
7296  if (err != NULL) {
7297  *err = error;
7298  }
7299  }
7300 #endif // CL_HPP_MINIMUM_OPENCL_VERSION < 200
7301  }
7302 
7303  static CommandQueue getDefault(cl_int * err = NULL)
7304  {
7305  std::call_once(default_initialized_, makeDefault);
7306 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7307  detail::errHandler(default_error_, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
7308 #else // CL_HPP_TARGET_OPENCL_VERSION >= 200
7309  detail::errHandler(default_error_, __CREATE_COMMAND_QUEUE_ERR);
7310 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 200
7311  if (err != NULL) {
7312  *err = default_error_;
7313  }
7314  return default_;
7315  }
7316 
7324  static CommandQueue setDefault(const CommandQueue &default_queue)
7325  {
7326  std::call_once(default_initialized_, makeDefaultProvided, std::cref(default_queue));
7327  detail::errHandler(default_error_);
7328  return default_;
7329  }
7330 
7331  CommandQueue() { }
7332 
7333 
7340  explicit CommandQueue(const cl_command_queue& commandQueue, bool retainObject = false) :
7341  detail::Wrapper<cl_type>(commandQueue, retainObject) { }
7342 
7343  CommandQueue& operator = (const cl_command_queue& rhs)
7344  {
7346  return *this;
7347  }
7348 
7352  CommandQueue(const CommandQueue& queue) : detail::Wrapper<cl_type>(queue) {}
7353 
7357  CommandQueue& operator = (const CommandQueue &queue)
7358  {
7360  return *this;
7361  }
7362 
7366  CommandQueue(CommandQueue&& queue) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(queue)) {}
7367 
7371  CommandQueue& operator = (CommandQueue &&queue)
7372  {
7373  detail::Wrapper<cl_type>::operator=(std::move(queue));
7374  return *this;
7375  }
7376 
7377  template <typename T>
7378  cl_int getInfo(cl_command_queue_info name, T* param) const
7379  {
7380  return detail::errHandler(
7381  detail::getInfo(
7382  &::clGetCommandQueueInfo, object_, name, param),
7383  __GET_COMMAND_QUEUE_INFO_ERR);
7384  }
7385 
7386  template <cl_int name> typename
7388  getInfo(cl_int* err = NULL) const
7389  {
7390  typename detail::param_traits<
7391  detail::cl_command_queue_info, name>::param_type param;
7392  cl_int result = getInfo(name, &param);
7393  if (err != NULL) {
7394  *err = result;
7395  }
7396  return param;
7397  }
7398 
7399  cl_int enqueueReadBuffer(
7400  const Buffer& buffer,
7401  cl_bool blocking,
7402  size_type offset,
7403  size_type size,
7404  void* ptr,
7405  const vector<Event>* events = NULL,
7406  Event* event = NULL) const
7407  {
7408  cl_event tmp;
7409  cl_int err = detail::errHandler(
7410  ::clEnqueueReadBuffer(
7411  object_, buffer(), blocking, offset, size,
7412  ptr,
7413  (events != NULL) ? (cl_uint) events->size() : 0,
7414  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7415  (event != NULL) ? &tmp : NULL),
7416  __ENQUEUE_READ_BUFFER_ERR);
7417 
7418  if (event != NULL && err == CL_SUCCESS)
7419  *event = tmp;
7420 
7421  return err;
7422  }
7423 
7424  cl_int enqueueWriteBuffer(
7425  const Buffer& buffer,
7426  cl_bool blocking,
7427  size_type offset,
7428  size_type size,
7429  const void* ptr,
7430  const vector<Event>* events = NULL,
7431  Event* event = NULL) const
7432  {
7433  cl_event tmp;
7434  cl_int err = detail::errHandler(
7435  ::clEnqueueWriteBuffer(
7436  object_, buffer(), blocking, offset, size,
7437  ptr,
7438  (events != NULL) ? (cl_uint) events->size() : 0,
7439  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7440  (event != NULL) ? &tmp : NULL),
7441  __ENQUEUE_WRITE_BUFFER_ERR);
7442 
7443  if (event != NULL && err == CL_SUCCESS)
7444  *event = tmp;
7445 
7446  return err;
7447  }
7448 
7449  cl_int enqueueCopyBuffer(
7450  const Buffer& src,
7451  const Buffer& dst,
7452  size_type src_offset,
7453  size_type dst_offset,
7454  size_type size,
7455  const vector<Event>* events = NULL,
7456  Event* event = NULL) const
7457  {
7458  cl_event tmp;
7459  cl_int err = detail::errHandler(
7460  ::clEnqueueCopyBuffer(
7461  object_, src(), dst(), src_offset, dst_offset, size,
7462  (events != NULL) ? (cl_uint) events->size() : 0,
7463  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7464  (event != NULL) ? &tmp : NULL),
7465  __ENQEUE_COPY_BUFFER_ERR);
7466 
7467  if (event != NULL && err == CL_SUCCESS)
7468  *event = tmp;
7469 
7470  return err;
7471  }
7472 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
7473  cl_int enqueueReadBufferRect(
7474  const Buffer& buffer,
7475  cl_bool blocking,
7476  const array<size_type, 3>& buffer_offset,
7477  const array<size_type, 3>& host_offset,
7478  const array<size_type, 3>& region,
7479  size_type buffer_row_pitch,
7480  size_type buffer_slice_pitch,
7481  size_type host_row_pitch,
7482  size_type host_slice_pitch,
7483  void *ptr,
7484  const vector<Event>* events = NULL,
7485  Event* event = NULL) const
7486  {
7487  cl_event tmp;
7488  cl_int err = detail::errHandler(
7489  ::clEnqueueReadBufferRect(
7490  object_,
7491  buffer(),
7492  blocking,
7493  buffer_offset.data(),
7494  host_offset.data(),
7495  region.data(),
7496  buffer_row_pitch,
7497  buffer_slice_pitch,
7498  host_row_pitch,
7499  host_slice_pitch,
7500  ptr,
7501  (events != NULL) ? (cl_uint) events->size() : 0,
7502  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7503  (event != NULL) ? &tmp : NULL),
7504  __ENQUEUE_READ_BUFFER_RECT_ERR);
7505 
7506  if (event != NULL && err == CL_SUCCESS)
7507  *event = tmp;
7508 
7509  return err;
7510  }
7511 
7512  cl_int enqueueWriteBufferRect(
7513  const Buffer& buffer,
7514  cl_bool blocking,
7515  const array<size_type, 3>& buffer_offset,
7516  const array<size_type, 3>& host_offset,
7517  const array<size_type, 3>& region,
7518  size_type buffer_row_pitch,
7519  size_type buffer_slice_pitch,
7520  size_type host_row_pitch,
7521  size_type host_slice_pitch,
7522  const void *ptr,
7523  const vector<Event>* events = NULL,
7524  Event* event = NULL) const
7525  {
7526  cl_event tmp;
7527  cl_int err = detail::errHandler(
7528  ::clEnqueueWriteBufferRect(
7529  object_,
7530  buffer(),
7531  blocking,
7532  buffer_offset.data(),
7533  host_offset.data(),
7534  region.data(),
7535  buffer_row_pitch,
7536  buffer_slice_pitch,
7537  host_row_pitch,
7538  host_slice_pitch,
7539  ptr,
7540  (events != NULL) ? (cl_uint) events->size() : 0,
7541  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7542  (event != NULL) ? &tmp : NULL),
7543  __ENQUEUE_WRITE_BUFFER_RECT_ERR);
7544 
7545  if (event != NULL && err == CL_SUCCESS)
7546  *event = tmp;
7547 
7548  return err;
7549  }
7550 
7551  cl_int enqueueCopyBufferRect(
7552  const Buffer& src,
7553  const Buffer& dst,
7554  const array<size_type, 3>& src_origin,
7555  const array<size_type, 3>& dst_origin,
7556  const array<size_type, 3>& region,
7557  size_type src_row_pitch,
7558  size_type src_slice_pitch,
7559  size_type dst_row_pitch,
7560  size_type dst_slice_pitch,
7561  const vector<Event>* events = NULL,
7562  Event* event = NULL) const
7563  {
7564  cl_event tmp;
7565  cl_int err = detail::errHandler(
7566  ::clEnqueueCopyBufferRect(
7567  object_,
7568  src(),
7569  dst(),
7570  src_origin.data(),
7571  dst_origin.data(),
7572  region.data(),
7573  src_row_pitch,
7574  src_slice_pitch,
7575  dst_row_pitch,
7576  dst_slice_pitch,
7577  (events != NULL) ? (cl_uint) events->size() : 0,
7578  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7579  (event != NULL) ? &tmp : NULL),
7580  __ENQEUE_COPY_BUFFER_RECT_ERR);
7581 
7582  if (event != NULL && err == CL_SUCCESS)
7583  *event = tmp;
7584 
7585  return err;
7586  }
7587 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
7588 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
7589 
7600  template<typename PatternType>
7602  const Buffer& buffer,
7603  PatternType pattern,
7604  size_type offset,
7605  size_type size,
7606  const vector<Event>* events = NULL,
7607  Event* event = NULL) const
7608  {
7609  cl_event tmp;
7610  cl_int err = detail::errHandler(
7611  ::clEnqueueFillBuffer(
7612  object_,
7613  buffer(),
7614  static_cast<void*>(&pattern),
7615  sizeof(PatternType),
7616  offset,
7617  size,
7618  (events != NULL) ? (cl_uint) events->size() : 0,
7619  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7620  (event != NULL) ? &tmp : NULL),
7621  __ENQUEUE_FILL_BUFFER_ERR);
7622 
7623  if (event != NULL && err == CL_SUCCESS)
7624  *event = tmp;
7625 
7626  return err;
7627  }
7628 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
7629 
7630  cl_int enqueueReadImage(
7631  const Image& image,
7632  cl_bool blocking,
7633  const array<size_type, 3>& origin,
7634  const array<size_type, 3>& region,
7635  size_type row_pitch,
7636  size_type slice_pitch,
7637  void* ptr,
7638  const vector<Event>* events = NULL,
7639  Event* event = NULL) const
7640  {
7641  cl_event tmp;
7642  cl_int err = detail::errHandler(
7643  ::clEnqueueReadImage(
7644  object_,
7645  image(),
7646  blocking,
7647  origin.data(),
7648  region.data(),
7649  row_pitch,
7650  slice_pitch,
7651  ptr,
7652  (events != NULL) ? (cl_uint) events->size() : 0,
7653  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7654  (event != NULL) ? &tmp : NULL),
7655  __ENQUEUE_READ_IMAGE_ERR);
7656 
7657  if (event != NULL && err == CL_SUCCESS)
7658  *event = tmp;
7659 
7660  return err;
7661  }
7662 
7663  cl_int enqueueWriteImage(
7664  const Image& image,
7665  cl_bool blocking,
7666  const array<size_type, 3>& origin,
7667  const array<size_type, 3>& region,
7668  size_type row_pitch,
7669  size_type slice_pitch,
7670  const void* ptr,
7671  const vector<Event>* events = NULL,
7672  Event* event = NULL) const
7673  {
7674  cl_event tmp;
7675  cl_int err = detail::errHandler(
7676  ::clEnqueueWriteImage(
7677  object_,
7678  image(),
7679  blocking,
7680  origin.data(),
7681  region.data(),
7682  row_pitch,
7683  slice_pitch,
7684  ptr,
7685  (events != NULL) ? (cl_uint) events->size() : 0,
7686  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7687  (event != NULL) ? &tmp : NULL),
7688  __ENQUEUE_WRITE_IMAGE_ERR);
7689 
7690  if (event != NULL && err == CL_SUCCESS)
7691  *event = tmp;
7692 
7693  return err;
7694  }
7695 
7696  cl_int enqueueCopyImage(
7697  const Image& src,
7698  const Image& dst,
7699  const array<size_type, 3>& src_origin,
7700  const array<size_type, 3>& dst_origin,
7701  const array<size_type, 3>& region,
7702  const vector<Event>* events = NULL,
7703  Event* event = NULL) const
7704  {
7705  cl_event tmp;
7706  cl_int err = detail::errHandler(
7707  ::clEnqueueCopyImage(
7708  object_,
7709  src(),
7710  dst(),
7711  src_origin.data(),
7712  dst_origin.data(),
7713  region.data(),
7714  (events != NULL) ? (cl_uint) events->size() : 0,
7715  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7716  (event != NULL) ? &tmp : NULL),
7717  __ENQUEUE_COPY_IMAGE_ERR);
7718 
7719  if (event != NULL && err == CL_SUCCESS)
7720  *event = tmp;
7721 
7722  return err;
7723  }
7724 
7725 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
7726 
7734  const Image& image,
7735  cl_float4 fillColor,
7736  const array<size_type, 3>& origin,
7737  const array<size_type, 3>& region,
7738  const vector<Event>* events = NULL,
7739  Event* event = NULL) const
7740  {
7741  cl_event tmp;
7742  cl_int err = detail::errHandler(
7743  ::clEnqueueFillImage(
7744  object_,
7745  image(),
7746  static_cast<void*>(&fillColor),
7747  origin.data(),
7748  region.data(),
7749  (events != NULL) ? (cl_uint) events->size() : 0,
7750  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7751  (event != NULL) ? &tmp : NULL),
7752  __ENQUEUE_FILL_IMAGE_ERR);
7753 
7754  if (event != NULL && err == CL_SUCCESS)
7755  *event = tmp;
7756 
7757  return err;
7758  }
7759 
7768  const Image& image,
7769  cl_int4 fillColor,
7770  const array<size_type, 3>& origin,
7771  const array<size_type, 3>& region,
7772  const vector<Event>* events = NULL,
7773  Event* event = NULL) const
7774  {
7775  cl_event tmp;
7776  cl_int err = detail::errHandler(
7777  ::clEnqueueFillImage(
7778  object_,
7779  image(),
7780  static_cast<void*>(&fillColor),
7781  origin.data(),
7782  region.data(),
7783  (events != NULL) ? (cl_uint) events->size() : 0,
7784  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7785  (event != NULL) ? &tmp : NULL),
7786  __ENQUEUE_FILL_IMAGE_ERR);
7787 
7788  if (event != NULL && err == CL_SUCCESS)
7789  *event = tmp;
7790 
7791  return err;
7792  }
7793 
7802  const Image& image,
7803  cl_uint4 fillColor,
7804  const array<size_type, 3>& origin,
7805  const array<size_type, 3>& region,
7806  const vector<Event>* events = NULL,
7807  Event* event = NULL) const
7808  {
7809  cl_event tmp;
7810  cl_int err = detail::errHandler(
7811  ::clEnqueueFillImage(
7812  object_,
7813  image(),
7814  static_cast<void*>(&fillColor),
7815  origin.data(),
7816  region.data(),
7817  (events != NULL) ? (cl_uint) events->size() : 0,
7818  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7819  (event != NULL) ? &tmp : NULL),
7820  __ENQUEUE_FILL_IMAGE_ERR);
7821 
7822  if (event != NULL && err == CL_SUCCESS)
7823  *event = tmp;
7824 
7825  return err;
7826  }
7827 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
7828 
7829  cl_int enqueueCopyImageToBuffer(
7830  const Image& src,
7831  const Buffer& dst,
7832  const array<size_type, 3>& src_origin,
7833  const array<size_type, 3>& region,
7834  size_type dst_offset,
7835  const vector<Event>* events = NULL,
7836  Event* event = NULL) const
7837  {
7838  cl_event tmp;
7839  cl_int err = detail::errHandler(
7840  ::clEnqueueCopyImageToBuffer(
7841  object_,
7842  src(),
7843  dst(),
7844  src_origin.data(),
7845  region.data(),
7846  dst_offset,
7847  (events != NULL) ? (cl_uint) events->size() : 0,
7848  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7849  (event != NULL) ? &tmp : NULL),
7850  __ENQUEUE_COPY_IMAGE_TO_BUFFER_ERR);
7851 
7852  if (event != NULL && err == CL_SUCCESS)
7853  *event = tmp;
7854 
7855  return err;
7856  }
7857 
7858  cl_int enqueueCopyBufferToImage(
7859  const Buffer& src,
7860  const Image& dst,
7861  size_type src_offset,
7862  const array<size_type, 3>& dst_origin,
7863  const array<size_type, 3>& region,
7864  const vector<Event>* events = NULL,
7865  Event* event = NULL) const
7866  {
7867  cl_event tmp;
7868  cl_int err = detail::errHandler(
7869  ::clEnqueueCopyBufferToImage(
7870  object_,
7871  src(),
7872  dst(),
7873  src_offset,
7874  dst_origin.data(),
7875  region.data(),
7876  (events != NULL) ? (cl_uint) events->size() : 0,
7877  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7878  (event != NULL) ? &tmp : NULL),
7879  __ENQUEUE_COPY_BUFFER_TO_IMAGE_ERR);
7880 
7881  if (event != NULL && err == CL_SUCCESS)
7882  *event = tmp;
7883 
7884  return err;
7885  }
7886 
7887  void* enqueueMapBuffer(
7888  const Buffer& buffer,
7889  cl_bool blocking,
7890  cl_map_flags flags,
7891  size_type offset,
7892  size_type size,
7893  const vector<Event>* events = NULL,
7894  Event* event = NULL,
7895  cl_int* err = NULL) const
7896  {
7897  cl_event tmp;
7898  cl_int error;
7899  void * result = ::clEnqueueMapBuffer(
7900  object_, buffer(), blocking, flags, offset, size,
7901  (events != NULL) ? (cl_uint) events->size() : 0,
7902  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7903  (event != NULL) ? &tmp : NULL,
7904  &error);
7905 
7906  detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
7907  if (err != NULL) {
7908  *err = error;
7909  }
7910  if (event != NULL && error == CL_SUCCESS)
7911  *event = tmp;
7912 
7913  return result;
7914  }
7915 
7916  void* enqueueMapImage(
7917  const Image& buffer,
7918  cl_bool blocking,
7919  cl_map_flags flags,
7920  const array<size_type, 3>& origin,
7921  const array<size_type, 3>& region,
7922  size_type * row_pitch,
7923  size_type * slice_pitch,
7924  const vector<Event>* events = NULL,
7925  Event* event = NULL,
7926  cl_int* err = NULL) const
7927  {
7928  cl_event tmp;
7929  cl_int error;
7930  void * result = ::clEnqueueMapImage(
7931  object_, buffer(), blocking, flags,
7932  origin.data(),
7933  region.data(),
7934  row_pitch, slice_pitch,
7935  (events != NULL) ? (cl_uint) events->size() : 0,
7936  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
7937  (event != NULL) ? &tmp : NULL,
7938  &error);
7939 
7940  detail::errHandler(error, __ENQUEUE_MAP_IMAGE_ERR);
7941  if (err != NULL) {
7942  *err = error;
7943  }
7944  if (event != NULL && error == CL_SUCCESS)
7945  *event = tmp;
7946  return result;
7947  }
7948 
7949 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
7950 
7954  template<typename T>
7956  T* ptr,
7957  cl_bool blocking,
7958  cl_map_flags flags,
7959  size_type size,
7960  const vector<Event>* events = NULL,
7961  Event* event = NULL) const
7962  {
7963  cl_event tmp;
7964  cl_int err = detail::errHandler(::clEnqueueSVMMap(
7965  object_, blocking, flags, static_cast<void*>(ptr), size,
7966  (events != NULL) ? (cl_uint)events->size() : 0,
7967  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
7968  (event != NULL) ? &tmp : NULL),
7969  __ENQUEUE_MAP_BUFFER_ERR);
7970 
7971  if (event != NULL && err == CL_SUCCESS)
7972  *event = tmp;
7973 
7974  return err;
7975  }
7976 
7977 
7982  template<typename T, class D>
7984  cl::pointer<T, D> &ptr,
7985  cl_bool blocking,
7986  cl_map_flags flags,
7987  size_type size,
7988  const vector<Event>* events = NULL,
7989  Event* event = NULL) const
7990  {
7991  cl_event tmp;
7992  cl_int err = detail::errHandler(::clEnqueueSVMMap(
7993  object_, blocking, flags, static_cast<void*>(ptr.get()), size,
7994  (events != NULL) ? (cl_uint)events->size() : 0,
7995  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
7996  (event != NULL) ? &tmp : NULL),
7997  __ENQUEUE_MAP_BUFFER_ERR);
7998 
7999  if (event != NULL && err == CL_SUCCESS)
8000  *event = tmp;
8001 
8002  return err;
8003  }
8004 
8009  template<typename T, class Alloc>
8011  cl::vector<T, Alloc> &container,
8012  cl_bool blocking,
8013  cl_map_flags flags,
8014  const vector<Event>* events = NULL,
8015  Event* event = NULL) const
8016  {
8017  cl_event tmp;
8018  cl_int err = detail::errHandler(::clEnqueueSVMMap(
8019  object_, blocking, flags, static_cast<void*>(container.data()), container.size(),
8020  (events != NULL) ? (cl_uint)events->size() : 0,
8021  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
8022  (event != NULL) ? &tmp : NULL),
8023  __ENQUEUE_MAP_BUFFER_ERR);
8024 
8025  if (event != NULL && err == CL_SUCCESS)
8026  *event = tmp;
8027 
8028  return err;
8029  }
8030 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8031 
8032  cl_int enqueueUnmapMemObject(
8033  const Memory& memory,
8034  void* mapped_ptr,
8035  const vector<Event>* events = NULL,
8036  Event* event = NULL) const
8037  {
8038  cl_event tmp;
8039  cl_int err = detail::errHandler(
8040  ::clEnqueueUnmapMemObject(
8041  object_, memory(), mapped_ptr,
8042  (events != NULL) ? (cl_uint) events->size() : 0,
8043  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8044  (event != NULL) ? &tmp : NULL),
8045  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8046 
8047  if (event != NULL && err == CL_SUCCESS)
8048  *event = tmp;
8049 
8050  return err;
8051  }
8052 
8053 
8054 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8055 
8059  template<typename T>
8061  T* ptr,
8062  const vector<Event>* events = NULL,
8063  Event* event = NULL) const
8064  {
8065  cl_event tmp;
8066  cl_int err = detail::errHandler(
8067  ::clEnqueueSVMUnmap(
8068  object_, static_cast<void*>(ptr),
8069  (events != NULL) ? (cl_uint)events->size() : 0,
8070  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
8071  (event != NULL) ? &tmp : NULL),
8072  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8073 
8074  if (event != NULL && err == CL_SUCCESS)
8075  *event = tmp;
8076 
8077  return err;
8078  }
8079 
8084  template<typename T, class D>
8086  cl::pointer<T, D> &ptr,
8087  const vector<Event>* events = NULL,
8088  Event* event = NULL) const
8089  {
8090  cl_event tmp;
8091  cl_int err = detail::errHandler(
8092  ::clEnqueueSVMUnmap(
8093  object_, static_cast<void*>(ptr.get()),
8094  (events != NULL) ? (cl_uint)events->size() : 0,
8095  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
8096  (event != NULL) ? &tmp : NULL),
8097  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8098 
8099  if (event != NULL && err == CL_SUCCESS)
8100  *event = tmp;
8101 
8102  return err;
8103  }
8104 
8109  template<typename T, class Alloc>
8111  cl::vector<T, Alloc> &container,
8112  const vector<Event>* events = NULL,
8113  Event* event = NULL) const
8114  {
8115  cl_event tmp;
8116  cl_int err = detail::errHandler(
8117  ::clEnqueueSVMUnmap(
8118  object_, static_cast<void*>(container.data()),
8119  (events != NULL) ? (cl_uint)events->size() : 0,
8120  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
8121  (event != NULL) ? &tmp : NULL),
8122  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8123 
8124  if (event != NULL && err == CL_SUCCESS)
8125  *event = tmp;
8126 
8127  return err;
8128  }
8129 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8130 
8131 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
8132 
8144  const vector<Event> *events = 0,
8145  Event *event = 0) const
8146  {
8147  cl_event tmp;
8148  cl_int err = detail::errHandler(
8149  ::clEnqueueMarkerWithWaitList(
8150  object_,
8151  (events != NULL) ? (cl_uint) events->size() : 0,
8152  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8153  (event != NULL) ? &tmp : NULL),
8154  __ENQUEUE_MARKER_WAIT_LIST_ERR);
8155 
8156  if (event != NULL && err == CL_SUCCESS)
8157  *event = tmp;
8158 
8159  return err;
8160  }
8161 
8174  const vector<Event> *events = 0,
8175  Event *event = 0) const
8176  {
8177  cl_event tmp;
8178  cl_int err = detail::errHandler(
8179  ::clEnqueueBarrierWithWaitList(
8180  object_,
8181  (events != NULL) ? (cl_uint) events->size() : 0,
8182  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8183  (event != NULL) ? &tmp : NULL),
8184  __ENQUEUE_BARRIER_WAIT_LIST_ERR);
8185 
8186  if (event != NULL && err == CL_SUCCESS)
8187  *event = tmp;
8188 
8189  return err;
8190  }
8191 
8197  const vector<Memory> &memObjects,
8198  cl_mem_migration_flags flags,
8199  const vector<Event>* events = NULL,
8200  Event* event = NULL
8201  ) const
8202  {
8203  cl_event tmp;
8204 
8205  vector<cl_mem> localMemObjects(memObjects.size());
8206 
8207  for( int i = 0; i < (int)memObjects.size(); ++i ) {
8208  localMemObjects[i] = memObjects[i]();
8209  }
8210 
8211  cl_int err = detail::errHandler(
8212  ::clEnqueueMigrateMemObjects(
8213  object_,
8214  (cl_uint)memObjects.size(),
8215  localMemObjects.data(),
8216  flags,
8217  (events != NULL) ? (cl_uint) events->size() : 0,
8218  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8219  (event != NULL) ? &tmp : NULL),
8220  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
8221 
8222  if (event != NULL && err == CL_SUCCESS)
8223  *event = tmp;
8224 
8225  return err;
8226  }
8227 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
8228 
8229 
8230 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
8231 
8236  template<typename T>
8238  const cl::vector<T*> &svmRawPointers,
8239  const cl::vector<size_type> &sizes,
8240  cl_mem_migration_flags flags = 0,
8241  const vector<Event>* events = NULL,
8242  Event* event = NULL) const
8243  {
8244  cl_event tmp;
8245  cl_int err = detail::errHandler(::clEnqueueSVMMigrateMem(
8246  object_,
8247  svmRawPointers.size(), static_cast<void**>(svmRawPointers.data()),
8248  sizes.data(), // array of sizes not passed
8249  flags,
8250  (events != NULL) ? (cl_uint)events->size() : 0,
8251  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
8252  (event != NULL) ? &tmp : NULL),
8253  __ENQUEUE_MIGRATE_SVM_ERR);
8254 
8255  if (event != NULL && err == CL_SUCCESS)
8256  *event = tmp;
8257 
8258  return err;
8259  }
8260 
8265  template<typename T>
8267  const cl::vector<T*> &svmRawPointers,
8268  cl_mem_migration_flags flags = 0,
8269  const vector<Event>* events = NULL,
8270  Event* event = NULL) const
8271  {
8272  return enqueueMigrateSVM(svmRawPointers, cl::vector<size_type>(svmRawPointers.size()), flags, events, event);
8273  }
8274 
8275 
8281  template<typename T, class D>
8283  const cl::vector<cl::pointer<T, D>> &svmPointers,
8284  const cl::vector<size_type> &sizes,
8285  cl_mem_migration_flags flags = 0,
8286  const vector<Event>* events = NULL,
8287  Event* event = NULL) const
8288  {
8289  cl_event tmp;
8290  cl::vector<void*> svmRawPointers;
8291  svmRawPointers.reserve(svmPointers.size());
8292  for (auto p : svmPointers) {
8293  svmRawPointers.push_back(static_cast<void*>(p.get()));
8294  }
8295 
8296  return enqueueMigrateSVM(svmRawPointers, sizes, flags, events, event);
8297  }
8298 
8299 
8304  template<typename T, class D>
8306  const cl::vector<cl::pointer<T, D>> &svmPointers,
8307  cl_mem_migration_flags flags = 0,
8308  const vector<Event>* events = NULL,
8309  Event* event = NULL) const
8310  {
8311  return enqueueMigrateSVM(svmPointers, cl::vector<size_type>(svmPointers.size()), flags, events, event);
8312  }
8313 
8319  template<typename T, class Alloc>
8321  const cl::vector<cl::vector<T, Alloc>> &svmContainers,
8322  const cl::vector<size_type> &sizes,
8323  cl_mem_migration_flags flags = 0,
8324  const vector<Event>* events = NULL,
8325  Event* event = NULL) const
8326  {
8327  cl_event tmp;
8328  cl::vector<void*> svmRawPointers;
8329  svmRawPointers.reserve(svmContainers.size());
8330  for (auto p : svmContainers) {
8331  svmRawPointers.push_back(static_cast<void*>(p.data()));
8332  }
8333 
8334  return enqueueMigrateSVM(svmRawPointers, sizes, flags, events, event);
8335  }
8336 
8341  template<typename T, class Alloc>
8343  const cl::vector<cl::vector<T, Alloc>> &svmContainers,
8344  cl_mem_migration_flags flags = 0,
8345  const vector<Event>* events = NULL,
8346  Event* event = NULL) const
8347  {
8348  return enqueueMigrateSVM(svmContainers, cl::vector<size_type>(svmContainers.size()), flags, events, event);
8349  }
8350 
8351 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
8352 
8353  cl_int enqueueNDRangeKernel(
8354  const Kernel& kernel,
8355  const NDRange& offset,
8356  const NDRange& global,
8357  const NDRange& local = NullRange,
8358  const vector<Event>* events = NULL,
8359  Event* event = NULL) const
8360  {
8361  cl_event tmp;
8362  cl_int err = detail::errHandler(
8363  ::clEnqueueNDRangeKernel(
8364  object_, kernel(), (cl_uint) global.dimensions(),
8365  offset.dimensions() != 0 ? (const size_type*) offset : NULL,
8366  (const size_type*) global,
8367  local.dimensions() != 0 ? (const size_type*) local : NULL,
8368  (events != NULL) ? (cl_uint) events->size() : 0,
8369  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8370  (event != NULL) ? &tmp : NULL),
8371  __ENQUEUE_NDRANGE_KERNEL_ERR);
8372 
8373  if (event != NULL && err == CL_SUCCESS)
8374  *event = tmp;
8375 
8376  return err;
8377  }
8378 
8379 #if defined(CL_USE_DEPRECATED_OPENCL_1_2_APIS)
8380  CL_EXT_PREFIX__VERSION_1_2_DEPRECATED cl_int enqueueTask(
8381  const Kernel& kernel,
8382  const vector<Event>* events = NULL,
8383  Event* event = NULL) const CL_EXT_SUFFIX__VERSION_1_2_DEPRECATED
8384  {
8385  cl_event tmp;
8386  cl_int err = detail::errHandler(
8387  ::clEnqueueTask(
8388  object_, kernel(),
8389  (events != NULL) ? (cl_uint) events->size() : 0,
8390  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8391  (event != NULL) ? &tmp : NULL),
8392  __ENQUEUE_TASK_ERR);
8393 
8394  if (event != NULL && err == CL_SUCCESS)
8395  *event = tmp;
8396 
8397  return err;
8398  }
8399 #endif // #if defined(CL_USE_DEPRECATED_OPENCL_1_2_APIS)
8400 
8401  cl_int enqueueNativeKernel(
8402  void (CL_CALLBACK *userFptr)(void *),
8403  std::pair<void*, size_type> args,
8404  const vector<Memory>* mem_objects = NULL,
8405  const vector<const void*>* mem_locs = NULL,
8406  const vector<Event>* events = NULL,
8407  Event* event = NULL) const
8408  {
8409  size_type elements = 0;
8410  if (mem_objects != NULL) {
8411  elements = mem_objects->size();
8412  }
8413  vector<cl_mem> mems(elements);
8414  for (unsigned int i = 0; i < elements; i++) {
8415  mems[i] = ((*mem_objects)[i])();
8416  }
8417 
8418  cl_event tmp;
8419  cl_int err = detail::errHandler(
8420  ::clEnqueueNativeKernel(
8421  object_, userFptr, args.first, args.second,
8422  (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
8423  mems.data(),
8424  (mem_locs != NULL && mem_locs->size() > 0) ? (const void **) &mem_locs->front() : NULL,
8425  (events != NULL) ? (cl_uint) events->size() : 0,
8426  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8427  (event != NULL) ? &tmp : NULL),
8428  __ENQUEUE_NATIVE_KERNEL);
8429 
8430  if (event != NULL && err == CL_SUCCESS)
8431  *event = tmp;
8432 
8433  return err;
8434  }
8435 
8439 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
8440  CL_EXT_PREFIX__VERSION_1_1_DEPRECATED
8441  cl_int enqueueMarker(Event* event = NULL) const CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
8442  {
8443  cl_event tmp;
8444  cl_int err = detail::errHandler(
8445  ::clEnqueueMarker(
8446  object_,
8447  (event != NULL) ? &tmp : NULL),
8448  __ENQUEUE_MARKER_ERR);
8449 
8450  if (event != NULL && err == CL_SUCCESS)
8451  *event = tmp;
8452 
8453  return err;
8454  }
8455 
8456  CL_EXT_PREFIX__VERSION_1_1_DEPRECATED
8457  cl_int enqueueWaitForEvents(const vector<Event>& events) const CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
8458  {
8459  return detail::errHandler(
8460  ::clEnqueueWaitForEvents(
8461  object_,
8462  (cl_uint) events.size(),
8463  events.size() > 0 ? (const cl_event*) &events.front() : NULL),
8464  __ENQUEUE_WAIT_FOR_EVENTS_ERR);
8465  }
8466 #endif // defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
8467 
8468  cl_int enqueueAcquireGLObjects(
8469  const vector<Memory>* mem_objects = NULL,
8470  const vector<Event>* events = NULL,
8471  Event* event = NULL) const
8472  {
8473  cl_event tmp;
8474  cl_int err = detail::errHandler(
8475  ::clEnqueueAcquireGLObjects(
8476  object_,
8477  (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
8478  (mem_objects != NULL && mem_objects->size() > 0) ? (const cl_mem *) &mem_objects->front(): NULL,
8479  (events != NULL) ? (cl_uint) events->size() : 0,
8480  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8481  (event != NULL) ? &tmp : NULL),
8482  __ENQUEUE_ACQUIRE_GL_ERR);
8483 
8484  if (event != NULL && err == CL_SUCCESS)
8485  *event = tmp;
8486 
8487  return err;
8488  }
8489 
8490  cl_int enqueueReleaseGLObjects(
8491  const vector<Memory>* mem_objects = NULL,
8492  const vector<Event>* events = NULL,
8493  Event* event = NULL) const
8494  {
8495  cl_event tmp;
8496  cl_int err = detail::errHandler(
8497  ::clEnqueueReleaseGLObjects(
8498  object_,
8499  (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
8500  (mem_objects != NULL && mem_objects->size() > 0) ? (const cl_mem *) &mem_objects->front(): NULL,
8501  (events != NULL) ? (cl_uint) events->size() : 0,
8502  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8503  (event != NULL) ? &tmp : NULL),
8504  __ENQUEUE_RELEASE_GL_ERR);
8505 
8506  if (event != NULL && err == CL_SUCCESS)
8507  *event = tmp;
8508 
8509  return err;
8510  }
8511 
8512 #if defined (CL_HPP_USE_DX_INTEROP)
8513 typedef CL_API_ENTRY cl_int (CL_API_CALL *PFN_clEnqueueAcquireD3D10ObjectsKHR)(
8514  cl_command_queue command_queue, cl_uint num_objects,
8515  const cl_mem* mem_objects, cl_uint num_events_in_wait_list,
8516  const cl_event* event_wait_list, cl_event* event);
8517 typedef CL_API_ENTRY cl_int (CL_API_CALL *PFN_clEnqueueReleaseD3D10ObjectsKHR)(
8518  cl_command_queue command_queue, cl_uint num_objects,
8519  const cl_mem* mem_objects, cl_uint num_events_in_wait_list,
8520  const cl_event* event_wait_list, cl_event* event);
8521 
8522  cl_int enqueueAcquireD3D10Objects(
8523  const vector<Memory>* mem_objects = NULL,
8524  const vector<Event>* events = NULL,
8525  Event* event = NULL) const
8526  {
8527  static PFN_clEnqueueAcquireD3D10ObjectsKHR pfn_clEnqueueAcquireD3D10ObjectsKHR = NULL;
8528 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
8529  cl_context context = getInfo<CL_QUEUE_CONTEXT>();
8530  cl::Device device(getInfo<CL_QUEUE_DEVICE>());
8531  cl_platform_id platform = device.getInfo<CL_DEVICE_PLATFORM>();
8532  CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(platform, clEnqueueAcquireD3D10ObjectsKHR);
8533 #endif
8534 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
8535  CL_HPP_INIT_CL_EXT_FCN_PTR_(clEnqueueAcquireD3D10ObjectsKHR);
8536 #endif
8537 
8538  cl_event tmp;
8539  cl_int err = detail::errHandler(
8540  pfn_clEnqueueAcquireD3D10ObjectsKHR(
8541  object_,
8542  (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
8543  (mem_objects != NULL && mem_objects->size() > 0) ? (const cl_mem *) &mem_objects->front(): NULL,
8544  (events != NULL) ? (cl_uint) events->size() : 0,
8545  (events != NULL) ? (cl_event*) &events->front() : NULL,
8546  (event != NULL) ? &tmp : NULL),
8547  __ENQUEUE_ACQUIRE_GL_ERR);
8548 
8549  if (event != NULL && err == CL_SUCCESS)
8550  *event = tmp;
8551 
8552  return err;
8553  }
8554 
8555  cl_int enqueueReleaseD3D10Objects(
8556  const vector<Memory>* mem_objects = NULL,
8557  const vector<Event>* events = NULL,
8558  Event* event = NULL) const
8559  {
8560  static PFN_clEnqueueReleaseD3D10ObjectsKHR pfn_clEnqueueReleaseD3D10ObjectsKHR = NULL;
8561 #if CL_HPP_TARGET_OPENCL_VERSION >= 120
8562  cl_context context = getInfo<CL_QUEUE_CONTEXT>();
8563  cl::Device device(getInfo<CL_QUEUE_DEVICE>());
8564  cl_platform_id platform = device.getInfo<CL_DEVICE_PLATFORM>();
8565  CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_(platform, clEnqueueReleaseD3D10ObjectsKHR);
8566 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 120
8567 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
8568  CL_HPP_INIT_CL_EXT_FCN_PTR_(clEnqueueReleaseD3D10ObjectsKHR);
8569 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
8570 
8571  cl_event tmp;
8572  cl_int err = detail::errHandler(
8573  pfn_clEnqueueReleaseD3D10ObjectsKHR(
8574  object_,
8575  (mem_objects != NULL) ? (cl_uint) mem_objects->size() : 0,
8576  (mem_objects != NULL && mem_objects->size() > 0) ? (const cl_mem *) &mem_objects->front(): NULL,
8577  (events != NULL) ? (cl_uint) events->size() : 0,
8578  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
8579  (event != NULL) ? &tmp : NULL),
8580  __ENQUEUE_RELEASE_GL_ERR);
8581 
8582  if (event != NULL && err == CL_SUCCESS)
8583  *event = tmp;
8584 
8585  return err;
8586  }
8587 #endif
8588 
8592 #if defined(CL_USE_DEPRECATED_OPENCL_1_1_APIS)
8593  CL_EXT_PREFIX__VERSION_1_1_DEPRECATED
8594  cl_int enqueueBarrier() const CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
8595  {
8596  return detail::errHandler(
8597  ::clEnqueueBarrier(object_),
8598  __ENQUEUE_BARRIER_ERR);
8599  }
8600 #endif // CL_USE_DEPRECATED_OPENCL_1_1_APIS
8601 
8602  cl_int flush() const
8603  {
8604  return detail::errHandler(::clFlush(object_), __FLUSH_ERR);
8605  }
8606 
8607  cl_int finish() const
8608  {
8609  return detail::errHandler(::clFinish(object_), __FINISH_ERR);
8610  }
8611 }; // CommandQueue
8612 
8613 CL_HPP_DEFINE_STATIC_MEMBER_ std::once_flag CommandQueue::default_initialized_;
8614 CL_HPP_DEFINE_STATIC_MEMBER_ CommandQueue CommandQueue::default_;
8615 CL_HPP_DEFINE_STATIC_MEMBER_ cl_int CommandQueue::default_error_ = CL_SUCCESS;
8616 
8617 
8618 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8619 enum class DeviceQueueProperties : cl_command_queue_properties
8620 {
8621  None = 0,
8622  Profiling = CL_QUEUE_PROFILING_ENABLE,
8623 };
8624 
8625 inline DeviceQueueProperties operator|(DeviceQueueProperties lhs, DeviceQueueProperties rhs)
8626 {
8627  return static_cast<DeviceQueueProperties>(static_cast<cl_command_queue_properties>(lhs) | static_cast<cl_command_queue_properties>(rhs));
8628 }
8629 
8633 class DeviceCommandQueue : public detail::Wrapper<cl_command_queue>
8634 {
8635 public:
8636 
8641 
8645  DeviceCommandQueue(DeviceQueueProperties properties, cl_int* err = NULL)
8646  {
8647  cl_int error;
8650 
8651  cl_command_queue_properties mergedProperties =
8652  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | static_cast<cl_command_queue_properties>(properties);
8653 
8654  cl_queue_properties queue_properties[] = {
8655  CL_QUEUE_PROPERTIES, mergedProperties, 0 };
8656  object_ = ::clCreateCommandQueueWithProperties(
8657  context(), device(), queue_properties, &error);
8658 
8659  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8660  if (err != NULL) {
8661  *err = error;
8662  }
8663  }
8664 
8669  const Context& context,
8670  const Device& device,
8671  DeviceQueueProperties properties = DeviceQueueProperties::None,
8672  cl_int* err = NULL)
8673  {
8674  cl_int error;
8675 
8676  cl_command_queue_properties mergedProperties =
8677  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | static_cast<cl_command_queue_properties>(properties);
8678  cl_queue_properties queue_properties[] = {
8679  CL_QUEUE_PROPERTIES, mergedProperties, 0 };
8680  object_ = ::clCreateCommandQueueWithProperties(
8681  context(), device(), queue_properties, &error);
8682 
8683  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8684  if (err != NULL) {
8685  *err = error;
8686  }
8687  }
8688 
8693  const Context& context,
8694  const Device& device,
8695  cl_uint queueSize,
8696  DeviceQueueProperties properties = DeviceQueueProperties::None,
8697  cl_int* err = NULL)
8698  {
8699  cl_int error;
8700 
8701  cl_command_queue_properties mergedProperties =
8702  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | static_cast<cl_command_queue_properties>(properties);
8703  cl_queue_properties queue_properties[] = {
8704  CL_QUEUE_PROPERTIES, mergedProperties,
8705  CL_QUEUE_SIZE, queueSize,
8706  0 };
8707  object_ = ::clCreateCommandQueueWithProperties(
8708  context(), device(), queue_properties, &error);
8709 
8710  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8711  if (err != NULL) {
8712  *err = error;
8713  }
8714  }
8715 
8722  explicit DeviceCommandQueue(const cl_command_queue& commandQueue, bool retainObject = false) :
8723  detail::Wrapper<cl_type>(commandQueue, retainObject) { }
8724 
8725  DeviceCommandQueue& operator = (const cl_command_queue& rhs)
8726  {
8728  return *this;
8729  }
8730 
8734  DeviceCommandQueue(const DeviceCommandQueue& queue) : detail::Wrapper<cl_type>(queue) {}
8735 
8739  DeviceCommandQueue& operator = (const DeviceCommandQueue &queue)
8740  {
8742  return *this;
8743  }
8744 
8748  DeviceCommandQueue(DeviceCommandQueue&& queue) CL_HPP_NOEXCEPT_ : detail::Wrapper<cl_type>(std::move(queue)) {}
8749 
8754  {
8755  detail::Wrapper<cl_type>::operator=(std::move(queue));
8756  return *this;
8757  }
8758 
8759  template <typename T>
8760  cl_int getInfo(cl_command_queue_info name, T* param) const
8761  {
8762  return detail::errHandler(
8763  detail::getInfo(
8764  &::clGetCommandQueueInfo, object_, name, param),
8765  __GET_COMMAND_QUEUE_INFO_ERR);
8766  }
8767 
8768  template <cl_int name> typename
8770  getInfo(cl_int* err = NULL) const
8771  {
8772  typename detail::param_traits<
8773  detail::cl_command_queue_info, name>::param_type param;
8774  cl_int result = getInfo(name, &param);
8775  if (err != NULL) {
8776  *err = result;
8777  }
8778  return param;
8779  }
8780 
8788  cl_int *err = nullptr)
8789  {
8790  cl_int error;
8793 
8794  cl_command_queue_properties properties =
8795  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | CL_QUEUE_ON_DEVICE_DEFAULT;
8796  cl_queue_properties queue_properties[] = {
8797  CL_QUEUE_PROPERTIES, properties,
8798  0 };
8799  DeviceCommandQueue deviceQueue(
8800  ::clCreateCommandQueueWithProperties(
8801  context(), device(), queue_properties, &error));
8802 
8803  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8804  if (err != NULL) {
8805  *err = error;
8806  }
8807 
8808  return deviceQueue;
8809  }
8810 
8818  const Context &context, const Device &device, cl_int *err = nullptr)
8819  {
8820  cl_int error;
8821 
8822  cl_command_queue_properties properties =
8823  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | CL_QUEUE_ON_DEVICE_DEFAULT;
8824  cl_queue_properties queue_properties[] = {
8825  CL_QUEUE_PROPERTIES, properties,
8826  0 };
8827  DeviceCommandQueue deviceQueue(
8828  ::clCreateCommandQueueWithProperties(
8829  context(), device(), queue_properties, &error));
8830 
8831  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8832  if (err != NULL) {
8833  *err = error;
8834  }
8835 
8836  return deviceQueue;
8837  }
8838 
8846  const Context &context, const Device &device, cl_uint queueSize, cl_int *err = nullptr)
8847  {
8848  cl_int error;
8849 
8850  cl_command_queue_properties properties =
8851  CL_QUEUE_OUT_OF_ORDER_EXEC_MODE_ENABLE | CL_QUEUE_ON_DEVICE | CL_QUEUE_ON_DEVICE_DEFAULT;
8852  cl_queue_properties queue_properties[] = {
8853  CL_QUEUE_PROPERTIES, properties,
8854  CL_QUEUE_SIZE, queueSize,
8855  0 };
8856  DeviceCommandQueue deviceQueue(
8857  ::clCreateCommandQueueWithProperties(
8858  context(), device(), queue_properties, &error));
8859 
8860  detail::errHandler(error, __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR);
8861  if (err != NULL) {
8862  *err = error;
8863  }
8864 
8865  return deviceQueue;
8866  }
8867 
8868 
8869 
8870 #if CL_HPP_TARGET_OPENCL_VERSION >= 210
8871 
8877  static DeviceCommandQueue updateDefault(const Context &context, const Device &device, const DeviceCommandQueue &default_queue, cl_int *err = nullptr)
8878  {
8879  cl_int error;
8880  error = clSetDefaultDeviceCommandQueue(context.get(), device.get(), default_queue.get());
8881 
8882  detail::errHandler(error, __SET_DEFAULT_DEVICE_COMMAND_QUEUE_ERR);
8883  if (err != NULL) {
8884  *err = error;
8885  }
8886  return default_queue;
8887  }
8888 
8892  static DeviceCommandQueue getDefault(const CommandQueue &queue, cl_int * err = NULL)
8893  {
8894  return queue.getInfo<CL_QUEUE_DEVICE_DEFAULT>(err);
8895  }
8896 
8897 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 210
8898 }; // DeviceCommandQueue
8899 
8900 namespace detail
8901 {
8902  // Specialization for device command queue
8903  template <>
8905  {
8906  static size_type size(const cl::DeviceCommandQueue&) { return sizeof(cl_command_queue); }
8907  static const cl_command_queue* ptr(const cl::DeviceCommandQueue& value) { return &(value()); }
8908  };
8909 } // namespace detail
8910 
8911 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
8912 
8913 
8914 template< typename IteratorType >
8916  const Context &context,
8917  IteratorType startIterator,
8918  IteratorType endIterator,
8919  bool readOnly,
8920  bool useHostPtr,
8921  cl_int* err)
8922 {
8923  typedef typename std::iterator_traits<IteratorType>::value_type DataType;
8924  cl_int error;
8925 
8926  cl_mem_flags flags = 0;
8927  if( readOnly ) {
8928  flags |= CL_MEM_READ_ONLY;
8929  }
8930  else {
8931  flags |= CL_MEM_READ_WRITE;
8932  }
8933  if( useHostPtr ) {
8934  flags |= CL_MEM_USE_HOST_PTR;
8935  }
8936 
8937  size_type size = sizeof(DataType)*(endIterator - startIterator);
8938 
8939  if( useHostPtr ) {
8940  object_ = ::clCreateBuffer(context(), flags, size, static_cast<DataType*>(&*startIterator), &error);
8941  } else {
8942  object_ = ::clCreateBuffer(context(), flags, size, 0, &error);
8943  }
8944 
8945  detail::errHandler(error, __CREATE_BUFFER_ERR);
8946  if (err != NULL) {
8947  *err = error;
8948  }
8949 
8950  if( !useHostPtr ) {
8951  CommandQueue queue(context, 0, &error);
8952  detail::errHandler(error, __CREATE_BUFFER_ERR);
8953  if (err != NULL) {
8954  *err = error;
8955  }
8956 
8957  error = cl::copy(queue, startIterator, endIterator, *this);
8958  detail::errHandler(error, __CREATE_BUFFER_ERR);
8959  if (err != NULL) {
8960  *err = error;
8961  }
8962  }
8963 }
8964 
8965 template< typename IteratorType >
8967  const CommandQueue &queue,
8968  IteratorType startIterator,
8969  IteratorType endIterator,
8970  bool readOnly,
8971  bool useHostPtr,
8972  cl_int* err)
8973 {
8974  typedef typename std::iterator_traits<IteratorType>::value_type DataType;
8975  cl_int error;
8976 
8977  cl_mem_flags flags = 0;
8978  if (readOnly) {
8979  flags |= CL_MEM_READ_ONLY;
8980  }
8981  else {
8982  flags |= CL_MEM_READ_WRITE;
8983  }
8984  if (useHostPtr) {
8985  flags |= CL_MEM_USE_HOST_PTR;
8986  }
8987 
8988  size_type size = sizeof(DataType)*(endIterator - startIterator);
8989 
8990  Context context = queue.getInfo<CL_QUEUE_CONTEXT>();
8991 
8992  if (useHostPtr) {
8993  object_ = ::clCreateBuffer(context(), flags, size, static_cast<DataType*>(&*startIterator), &error);
8994  }
8995  else {
8996  object_ = ::clCreateBuffer(context(), flags, size, 0, &error);
8997  }
8998 
8999  detail::errHandler(error, __CREATE_BUFFER_ERR);
9000  if (err != NULL) {
9001  *err = error;
9002  }
9003 
9004  if (!useHostPtr) {
9005  error = cl::copy(queue, startIterator, endIterator, *this);
9006  detail::errHandler(error, __CREATE_BUFFER_ERR);
9007  if (err != NULL) {
9008  *err = error;
9009  }
9010  }
9011 }
9012 
9013 inline cl_int enqueueReadBuffer(
9014  const Buffer& buffer,
9015  cl_bool blocking,
9016  size_type offset,
9017  size_type size,
9018  void* ptr,
9019  const vector<Event>* events = NULL,
9020  Event* event = NULL)
9021 {
9022  cl_int error;
9023  CommandQueue queue = CommandQueue::getDefault(&error);
9024 
9025  if (error != CL_SUCCESS) {
9026  return error;
9027  }
9028 
9029  return queue.enqueueReadBuffer(buffer, blocking, offset, size, ptr, events, event);
9030 }
9031 
9032 inline cl_int enqueueWriteBuffer(
9033  const Buffer& buffer,
9034  cl_bool blocking,
9035  size_type offset,
9036  size_type size,
9037  const void* ptr,
9038  const vector<Event>* events = NULL,
9039  Event* event = NULL)
9040 {
9041  cl_int error;
9042  CommandQueue queue = CommandQueue::getDefault(&error);
9043 
9044  if (error != CL_SUCCESS) {
9045  return error;
9046  }
9047 
9048  return queue.enqueueWriteBuffer(buffer, blocking, offset, size, ptr, events, event);
9049 }
9050 
9051 inline void* enqueueMapBuffer(
9052  const Buffer& buffer,
9053  cl_bool blocking,
9054  cl_map_flags flags,
9055  size_type offset,
9056  size_type size,
9057  const vector<Event>* events = NULL,
9058  Event* event = NULL,
9059  cl_int* err = NULL)
9060 {
9061  cl_int error;
9062  CommandQueue queue = CommandQueue::getDefault(&error);
9063  detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
9064  if (err != NULL) {
9065  *err = error;
9066  }
9067 
9068  void * result = ::clEnqueueMapBuffer(
9069  queue(), buffer(), blocking, flags, offset, size,
9070  (events != NULL) ? (cl_uint) events->size() : 0,
9071  (events != NULL && events->size() > 0) ? (cl_event*) &events->front() : NULL,
9072  (cl_event*) event,
9073  &error);
9074 
9075  detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
9076  if (err != NULL) {
9077  *err = error;
9078  }
9079  return result;
9080 }
9081 
9082 
9083 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
9084 
9089 template<typename T>
9090 inline cl_int enqueueMapSVM(
9091  T* ptr,
9092  cl_bool blocking,
9093  cl_map_flags flags,
9094  size_type size,
9095  const vector<Event>* events,
9096  Event* event)
9097 {
9098  cl_int error;
9099  CommandQueue queue = CommandQueue::getDefault(&error);
9100  if (error != CL_SUCCESS) {
9101  return detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
9102  }
9103 
9104  return queue.enqueueMapSVM(
9105  ptr, blocking, flags, size, events, event);
9106 }
9107 
9113 template<typename T, class D>
9114 inline cl_int enqueueMapSVM(
9115  cl::pointer<T, D> ptr,
9116  cl_bool blocking,
9117  cl_map_flags flags,
9118  size_type size,
9119  const vector<Event>* events = NULL,
9120  Event* event = NULL)
9121 {
9122  cl_int error;
9123  CommandQueue queue = CommandQueue::getDefault(&error);
9124  if (error != CL_SUCCESS) {
9125  return detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
9126  }
9127 
9128  return queue.enqueueMapSVM(
9129  ptr, blocking, flags, size, events, event);
9130 }
9131 
9137 template<typename T, class Alloc>
9138 inline cl_int enqueueMapSVM(
9139  cl::vector<T, Alloc> container,
9140  cl_bool blocking,
9141  cl_map_flags flags,
9142  const vector<Event>* events = NULL,
9143  Event* event = NULL)
9144 {
9145  cl_int error;
9146  CommandQueue queue = CommandQueue::getDefault(&error);
9147  if (error != CL_SUCCESS) {
9148  return detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
9149  }
9150 
9151  return queue.enqueueMapSVM(
9152  container, blocking, flags, events, event);
9153 }
9154 
9155 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
9156 
9157 inline cl_int enqueueUnmapMemObject(
9158  const Memory& memory,
9159  void* mapped_ptr,
9160  const vector<Event>* events = NULL,
9161  Event* event = NULL)
9162 {
9163  cl_int error;
9164  CommandQueue queue = CommandQueue::getDefault(&error);
9165  detail::errHandler(error, __ENQUEUE_MAP_BUFFER_ERR);
9166  if (error != CL_SUCCESS) {
9167  return error;
9168  }
9169 
9170  cl_event tmp;
9171  cl_int err = detail::errHandler(
9172  ::clEnqueueUnmapMemObject(
9173  queue(), memory(), mapped_ptr,
9174  (events != NULL) ? (cl_uint)events->size() : 0,
9175  (events != NULL && events->size() > 0) ? (cl_event*)&events->front() : NULL,
9176  (event != NULL) ? &tmp : NULL),
9177  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
9178 
9179  if (event != NULL && err == CL_SUCCESS)
9180  *event = tmp;
9181 
9182  return err;
9183 }
9184 
9185 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
9186 
9191 template<typename T>
9192 inline cl_int enqueueUnmapSVM(
9193  T* ptr,
9194  const vector<Event>* events = NULL,
9195  Event* event = NULL)
9196 {
9197  cl_int error;
9198  CommandQueue queue = CommandQueue::getDefault(&error);
9199  if (error != CL_SUCCESS) {
9200  return detail::errHandler(error, __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
9201  }
9202 
9203  return detail::errHandler(queue.enqueueUnmapSVM(ptr, events, event),
9204  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
9205 
9206 }
9207 
9213 template<typename T, class D>
9214 inline cl_int enqueueUnmapSVM(
9215  cl::pointer<T, D> &ptr,
9216  const vector<Event>* events = NULL,
9217  Event* event = NULL)
9218 {
9219  cl_int error;
9220  CommandQueue queue = CommandQueue::getDefault(&error);
9221  if (error != CL_SUCCESS) {
9222  return detail::errHandler(error, __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
9223  }
9224 
9225  return detail::errHandler(queue.enqueueUnmapSVM(ptr, events, event),
9226  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
9227 }
9228 
9234 template<typename T, class Alloc>
9235 inline cl_int enqueueUnmapSVM(
9236  cl::vector<T, Alloc> &container,
9237  const vector<Event>* events = NULL,
9238  Event* event = NULL)
9239 {
9240  cl_int error;
9241  CommandQueue queue = CommandQueue::getDefault(&error);
9242  if (error != CL_SUCCESS) {
9243  return detail::errHandler(error, __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
9244  }
9245 
9246  return detail::errHandler(queue.enqueueUnmapSVM(container, events, event),
9247  __ENQUEUE_UNMAP_MEM_OBJECT_ERR);
9248 }
9249 
9250 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
9251 
9252 inline cl_int enqueueCopyBuffer(
9253  const Buffer& src,
9254  const Buffer& dst,
9255  size_type src_offset,
9256  size_type dst_offset,
9257  size_type size,
9258  const vector<Event>* events = NULL,
9259  Event* event = NULL)
9260 {
9261  cl_int error;
9262  CommandQueue queue = CommandQueue::getDefault(&error);
9263 
9264  if (error != CL_SUCCESS) {
9265  return error;
9266  }
9267 
9268  return queue.enqueueCopyBuffer(src, dst, src_offset, dst_offset, size, events, event);
9269 }
9270 
9276 template< typename IteratorType >
9277 inline cl_int copy( IteratorType startIterator, IteratorType endIterator, cl::Buffer &buffer )
9278 {
9279  cl_int error;
9280  CommandQueue queue = CommandQueue::getDefault(&error);
9281  if (error != CL_SUCCESS)
9282  return error;
9283 
9284  return cl::copy(queue, startIterator, endIterator, buffer);
9285 }
9286 
9292 template< typename IteratorType >
9293 inline cl_int copy( const cl::Buffer &buffer, IteratorType startIterator, IteratorType endIterator )
9294 {
9295  cl_int error;
9296  CommandQueue queue = CommandQueue::getDefault(&error);
9297  if (error != CL_SUCCESS)
9298  return error;
9299 
9300  return cl::copy(queue, buffer, startIterator, endIterator);
9301 }
9302 
9308 template< typename IteratorType >
9309 inline cl_int copy( const CommandQueue &queue, IteratorType startIterator, IteratorType endIterator, cl::Buffer &buffer )
9310 {
9311  typedef typename std::iterator_traits<IteratorType>::value_type DataType;
9312  cl_int error;
9313 
9314  size_type length = endIterator-startIterator;
9315  size_type byteLength = length*sizeof(DataType);
9316 
9317  DataType *pointer =
9318  static_cast<DataType*>(queue.enqueueMapBuffer(buffer, CL_TRUE, CL_MAP_WRITE, 0, byteLength, 0, 0, &error));
9319  // if exceptions enabled, enqueueMapBuffer will throw
9320  if( error != CL_SUCCESS ) {
9321  return error;
9322  }
9323 #if defined(_MSC_VER)
9324  std::copy(
9325  startIterator,
9326  endIterator,
9327  stdext::checked_array_iterator<DataType*>(
9328  pointer, length));
9329 #else
9330  std::copy(startIterator, endIterator, pointer);
9331 #endif
9332  Event endEvent;
9333  error = queue.enqueueUnmapMemObject(buffer, pointer, 0, &endEvent);
9334  // if exceptions enabled, enqueueUnmapMemObject will throw
9335  if( error != CL_SUCCESS ) {
9336  return error;
9337  }
9338  endEvent.wait();
9339  return CL_SUCCESS;
9340 }
9341 
9347 template< typename IteratorType >
9348 inline cl_int copy( const CommandQueue &queue, const cl::Buffer &buffer, IteratorType startIterator, IteratorType endIterator )
9349 {
9350  typedef typename std::iterator_traits<IteratorType>::value_type DataType;
9351  cl_int error;
9352 
9353  size_type length = endIterator-startIterator;
9354  size_type byteLength = length*sizeof(DataType);
9355 
9356  DataType *pointer =
9357  static_cast<DataType*>(queue.enqueueMapBuffer(buffer, CL_TRUE, CL_MAP_READ, 0, byteLength, 0, 0, &error));
9358  // if exceptions enabled, enqueueMapBuffer will throw
9359  if( error != CL_SUCCESS ) {
9360  return error;
9361  }
9362  std::copy(pointer, pointer + length, startIterator);
9363  Event endEvent;
9364  error = queue.enqueueUnmapMemObject(buffer, pointer, 0, &endEvent);
9365  // if exceptions enabled, enqueueUnmapMemObject will throw
9366  if( error != CL_SUCCESS ) {
9367  return error;
9368  }
9369  endEvent.wait();
9370  return CL_SUCCESS;
9371 }
9372 
9373 
9374 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
9375 
9378 template<typename T, class Alloc>
9379 inline cl_int mapSVM(cl::vector<T, Alloc> &container)
9380 {
9381  return enqueueMapSVM(container, CL_TRUE, CL_MAP_READ | CL_MAP_WRITE);
9382 }
9383 
9387 template<typename T, class Alloc>
9388 inline cl_int unmapSVM(cl::vector<T, Alloc> &container)
9389 {
9390  return enqueueUnmapSVM(container);
9391 }
9392 
9393 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
9394 
9395 #if CL_HPP_TARGET_OPENCL_VERSION >= 110
9396 inline cl_int enqueueReadBufferRect(
9397  const Buffer& buffer,
9398  cl_bool blocking,
9399  const array<size_type, 3>& buffer_offset,
9400  const array<size_type, 3>& host_offset,
9401  const array<size_type, 3>& region,
9402  size_type buffer_row_pitch,
9403  size_type buffer_slice_pitch,
9404  size_type host_row_pitch,
9405  size_type host_slice_pitch,
9406  void *ptr,
9407  const vector<Event>* events = NULL,
9408  Event* event = NULL)
9409 {
9410  cl_int error;
9411  CommandQueue queue = CommandQueue::getDefault(&error);
9412 
9413  if (error != CL_SUCCESS) {
9414  return error;
9415  }
9416 
9417  return queue.enqueueReadBufferRect(
9418  buffer,
9419  blocking,
9420  buffer_offset,
9421  host_offset,
9422  region,
9423  buffer_row_pitch,
9424  buffer_slice_pitch,
9425  host_row_pitch,
9426  host_slice_pitch,
9427  ptr,
9428  events,
9429  event);
9430 }
9431 
9432 inline cl_int enqueueWriteBufferRect(
9433  const Buffer& buffer,
9434  cl_bool blocking,
9435  const array<size_type, 3>& buffer_offset,
9436  const array<size_type, 3>& host_offset,
9437  const array<size_type, 3>& region,
9438  size_type buffer_row_pitch,
9439  size_type buffer_slice_pitch,
9440  size_type host_row_pitch,
9441  size_type host_slice_pitch,
9442  const void *ptr,
9443  const vector<Event>* events = NULL,
9444  Event* event = NULL)
9445 {
9446  cl_int error;
9447  CommandQueue queue = CommandQueue::getDefault(&error);
9448 
9449  if (error != CL_SUCCESS) {
9450  return error;
9451  }
9452 
9453  return queue.enqueueWriteBufferRect(
9454  buffer,
9455  blocking,
9456  buffer_offset,
9457  host_offset,
9458  region,
9459  buffer_row_pitch,
9460  buffer_slice_pitch,
9461  host_row_pitch,
9462  host_slice_pitch,
9463  ptr,
9464  events,
9465  event);
9466 }
9467 
9468 inline cl_int enqueueCopyBufferRect(
9469  const Buffer& src,
9470  const Buffer& dst,
9471  const array<size_type, 3>& src_origin,
9472  const array<size_type, 3>& dst_origin,
9473  const array<size_type, 3>& region,
9474  size_type src_row_pitch,
9475  size_type src_slice_pitch,
9476  size_type dst_row_pitch,
9477  size_type dst_slice_pitch,
9478  const vector<Event>* events = NULL,
9479  Event* event = NULL)
9480 {
9481  cl_int error;
9482  CommandQueue queue = CommandQueue::getDefault(&error);
9483 
9484  if (error != CL_SUCCESS) {
9485  return error;
9486  }
9487 
9488  return queue.enqueueCopyBufferRect(
9489  src,
9490  dst,
9491  src_origin,
9492  dst_origin,
9493  region,
9494  src_row_pitch,
9495  src_slice_pitch,
9496  dst_row_pitch,
9497  dst_slice_pitch,
9498  events,
9499  event);
9500 }
9501 #endif // CL_HPP_TARGET_OPENCL_VERSION >= 110
9502 
9503 inline cl_int enqueueReadImage(
9504  const Image& image,
9505  cl_bool blocking,
9506  const array<size_type, 3>& origin,
9507  const array<size_type, 3>& region,
9508  size_type row_pitch,
9509  size_type slice_pitch,
9510  void* ptr,
9511  const vector<Event>* events = NULL,
9512  Event* event = NULL)
9513 {
9514  cl_int error;
9515  CommandQueue queue = CommandQueue::getDefault(&error);
9516 
9517  if (error != CL_SUCCESS) {
9518  return error;
9519  }
9520 
9521  return queue.enqueueReadImage(
9522  image,
9523  blocking,
9524  origin,
9525  region,
9526  row_pitch,
9527  slice_pitch,
9528  ptr,
9529  events,
9530  event);
9531 }
9532 
9533 inline cl_int enqueueWriteImage(
9534  const Image& image,
9535  cl_bool blocking,
9536  const array<size_type, 3>& origin,
9537  const array<size_type, 3>& region,
9538  size_type row_pitch,
9539  size_type slice_pitch,
9540  const void* ptr,
9541  const vector<Event>* events = NULL,
9542  Event* event = NULL)
9543 {
9544  cl_int error;
9545  CommandQueue queue = CommandQueue::getDefault(&error);
9546 
9547  if (error != CL_SUCCESS) {
9548  return error;
9549  }
9550 
9551  return queue.enqueueWriteImage(
9552  image,
9553  blocking,
9554  origin,
9555  region,
9556  row_pitch,
9557  slice_pitch,
9558  ptr,
9559  events,
9560  event);
9561 }
9562 
9563 inline cl_int enqueueCopyImage(
9564  const Image& src,
9565  const Image& dst,
9566  const array<size_type, 3>& src_origin,
9567  const array<size_type, 3>& dst_origin,
9568  const array<size_type, 3>& region,
9569  const vector<Event>* events = NULL,
9570  Event* event = NULL)
9571 {
9572  cl_int error;
9573  CommandQueue queue = CommandQueue::getDefault(&error);
9574 
9575  if (error != CL_SUCCESS) {
9576  return error;
9577  }
9578 
9579  return queue.enqueueCopyImage(
9580  src,
9581  dst,
9582  src_origin,
9583  dst_origin,
9584  region,
9585  events,
9586  event);
9587 }
9588 
9589 inline cl_int enqueueCopyImageToBuffer(
9590  const Image& src,
9591  const Buffer& dst,
9592  const array<size_type, 3>& src_origin,
9593  const array<size_type, 3>& region,
9594  size_type dst_offset,
9595  const vector<Event>* events = NULL,
9596  Event* event = NULL)
9597 {
9598  cl_int error;
9599  CommandQueue queue = CommandQueue::getDefault(&error);
9600 
9601  if (error != CL_SUCCESS) {
9602  return error;
9603  }
9604 
9605  return queue.enqueueCopyImageToBuffer(
9606  src,
9607  dst,
9608  src_origin,
9609  region,
9610  dst_offset,
9611  events,
9612  event);
9613 }
9614 
9615 inline cl_int enqueueCopyBufferToImage(
9616  const Buffer& src,
9617  const Image& dst,
9618  size_type src_offset,
9619  const array<size_type, 3>& dst_origin,
9620  const array<size_type, 3>& region,
9621  const vector<Event>* events = NULL,
9622  Event* event = NULL)
9623 {
9624  cl_int error;
9625  CommandQueue queue = CommandQueue::getDefault(&error);
9626 
9627  if (error != CL_SUCCESS) {
9628  return error;
9629  }
9630 
9631  return queue.enqueueCopyBufferToImage(
9632  src,
9633  dst,
9634  src_offset,
9635  dst_origin,
9636  region,
9637  events,
9638  event);
9639 }
9640 
9641 
9642 inline cl_int flush(void)
9643 {
9644  cl_int error;
9645  CommandQueue queue = CommandQueue::getDefault(&error);
9646 
9647  if (error != CL_SUCCESS) {
9648  return error;
9649  }
9650 
9651  return queue.flush();
9652 }
9653 
9654 inline cl_int finish(void)
9655 {
9656  cl_int error;
9657  CommandQueue queue = CommandQueue::getDefault(&error);
9658 
9659  if (error != CL_SUCCESS) {
9660  return error;
9661  }
9662 
9663 
9664  return queue.finish();
9665 }
9666 
9668 {
9669 private:
9670  CommandQueue queue_;
9671  const NDRange offset_;
9672  const NDRange global_;
9673  const NDRange local_;
9674  vector<Event> events_;
9675 
9676  template<typename... Ts>
9677  friend class KernelFunctor;
9678 
9679 public:
9680  EnqueueArgs(NDRange global) :
9681  queue_(CommandQueue::getDefault()),
9682  offset_(NullRange),
9683  global_(global),
9684  local_(NullRange)
9685  {
9686 
9687  }
9688 
9689  EnqueueArgs(NDRange global, NDRange local) :
9690  queue_(CommandQueue::getDefault()),
9691  offset_(NullRange),
9692  global_(global),
9693  local_(local)
9694  {
9695 
9696  }
9697 
9698  EnqueueArgs(NDRange offset, NDRange global, NDRange local) :
9699  queue_(CommandQueue::getDefault()),
9700  offset_(offset),
9701  global_(global),
9702  local_(local)
9703  {
9704 
9705  }
9706 
9707  EnqueueArgs(Event e, NDRange global) :
9708  queue_(CommandQueue::getDefault()),
9709  offset_(NullRange),
9710  global_(global),
9711  local_(NullRange)
9712  {
9713  events_.push_back(e);
9714  }
9715 
9716  EnqueueArgs(Event e, NDRange global, NDRange local) :
9717  queue_(CommandQueue::getDefault()),
9718  offset_(NullRange),
9719  global_(global),
9720  local_(local)
9721  {
9722  events_.push_back(e);
9723  }
9724 
9725  EnqueueArgs(Event e, NDRange offset, NDRange global, NDRange local) :
9726  queue_(CommandQueue::getDefault()),
9727  offset_(offset),
9728  global_(global),
9729  local_(local)
9730  {
9731  events_.push_back(e);
9732  }
9733 
9734  EnqueueArgs(const vector<Event> &events, NDRange global) :
9735  queue_(CommandQueue::getDefault()),
9736  offset_(NullRange),
9737  global_(global),
9738  local_(NullRange),
9739  events_(events)
9740  {
9741 
9742  }
9743 
9744  EnqueueArgs(const vector<Event> &events, NDRange global, NDRange local) :
9745  queue_(CommandQueue::getDefault()),
9746  offset_(NullRange),
9747  global_(global),
9748  local_(local),
9749  events_(events)
9750  {
9751 
9752  }
9753 
9754  EnqueueArgs(const vector<Event> &events, NDRange offset, NDRange global, NDRange local) :
9755  queue_(CommandQueue::getDefault()),
9756  offset_(offset),
9757  global_(global),
9758  local_(local),
9759  events_(events)
9760  {
9761 
9762  }
9763 
9764  EnqueueArgs(CommandQueue &queue, NDRange global) :
9765  queue_(queue),
9766  offset_(NullRange),
9767  global_(global),
9768  local_(NullRange)
9769  {
9770 
9771  }
9772 
9773  EnqueueArgs(CommandQueue &queue, NDRange global, NDRange local) :
9774  queue_(queue),
9775  offset_(NullRange),
9776  global_(global),
9777  local_(local)
9778  {
9779 
9780  }
9781 
9782  EnqueueArgs(CommandQueue &queue, NDRange offset, NDRange global, NDRange local) :
9783  queue_(queue),
9784  offset_(offset),
9785  global_(global),
9786  local_(local)
9787  {
9788 
9789  }
9790 
9791  EnqueueArgs(CommandQueue &queue, Event e, NDRange global) :
9792  queue_(queue),
9793  offset_(NullRange),
9794  global_(global),
9795  local_(NullRange)
9796  {
9797  events_.push_back(e);
9798  }
9799 
9800  EnqueueArgs(CommandQueue &queue, Event e, NDRange global, NDRange local) :
9801  queue_(queue),
9802  offset_(NullRange),
9803  global_(global),
9804  local_(local)
9805  {
9806  events_.push_back(e);
9807  }
9808 
9809  EnqueueArgs(CommandQueue &queue, Event e, NDRange offset, NDRange global, NDRange local) :
9810  queue_(queue),
9811  offset_(offset),
9812  global_(global),
9813  local_(local)
9814  {
9815  events_.push_back(e);
9816  }
9817 
9818  EnqueueArgs(CommandQueue &queue, const vector<Event> &events, NDRange global) :
9819  queue_(queue),
9820  offset_(NullRange),
9821  global_(global),
9822  local_(NullRange),
9823  events_(events)
9824  {
9825 
9826  }
9827 
9828  EnqueueArgs(CommandQueue &queue, const vector<Event> &events, NDRange global, NDRange local) :
9829  queue_(queue),
9830  offset_(NullRange),
9831  global_(global),
9832  local_(local),
9833  events_(events)
9834  {
9835 
9836  }
9837 
9838  EnqueueArgs(CommandQueue &queue, const vector<Event> &events, NDRange offset, NDRange global, NDRange local) :
9839  queue_(queue),
9840  offset_(offset),
9841  global_(global),
9842  local_(local),
9843  events_(events)
9844  {
9845 
9846  }
9847 };
9848 
9849 
9850 //----------------------------------------------------------------------------------------------
9851 
9852 
9857 template<typename... Ts>
9859 {
9860 private:
9861  Kernel kernel_;
9862 
9863  template<int index, typename T0, typename... T1s>
9864  void setArgs(T0&& t0, T1s&&... t1s)
9865  {
9866  kernel_.setArg(index, t0);
9867  setArgs<index + 1, T1s...>(std::forward<T1s>(t1s)...);
9868  }
9869 
9870  template<int index, typename T0>
9871  void setArgs(T0&& t0)
9872  {
9873  kernel_.setArg(index, t0);
9874  }
9875 
9876  template<int index>
9877  void setArgs()
9878  {
9879  }
9880 
9881 
9882 public:
9883  KernelFunctor(Kernel kernel) : kernel_(kernel)
9884  {}
9885 
9886  KernelFunctor(
9887  const Program& program,
9888  const string name,
9889  cl_int * err = NULL) :
9890  kernel_(program, name.c_str(), err)
9891  {}
9892 
9895 
9901  Event operator() (
9902  const EnqueueArgs& args,
9903  Ts... ts)
9904  {
9905  Event event;
9906  setArgs<0>(std::forward<Ts>(ts)...);
9907 
9908  args.queue_.enqueueNDRangeKernel(
9909  kernel_,
9910  args.offset_,
9911  args.global_,
9912  args.local_,
9913  &args.events_,
9914  &event);
9915 
9916  return event;
9917  }
9918 
9925  Event operator() (
9926  const EnqueueArgs& args,
9927  Ts... ts,
9928  cl_int &error)
9929  {
9930  Event event;
9931  setArgs<0>(std::forward<Ts>(ts)...);
9932 
9933  error = args.queue_.enqueueNDRangeKernel(
9934  kernel_,
9935  args.offset_,
9936  args.global_,
9937  args.local_,
9938  &args.events_,
9939  &event);
9940 
9941  return event;
9942  }
9943 
9944 #if CL_HPP_TARGET_OPENCL_VERSION >= 200
9945  cl_int setSVMPointers(const vector<void*> &pointerList)
9946  {
9947  return kernel_.setSVMPointers(pointerList);
9948  }
9949 
9950  template<typename T0, typename... T1s>
9951  cl_int setSVMPointers(const T0 &t0, T1s &... ts)
9952  {
9953  return kernel_.setSVMPointers(t0, ts...);
9954  }
9955 #endif // #if CL_HPP_TARGET_OPENCL_VERSION >= 200
9956 
9957  Kernel getKernel()
9958  {
9959  return kernel_;
9960  }
9961 };
9962 
9963 namespace compatibility {
9968  template<typename... Ts>
9970  {
9971  typedef KernelFunctor<Ts...> FunctorType;
9972 
9973  FunctorType functor_;
9974 
9975  make_kernel(
9976  const Program& program,
9977  const string name,
9978  cl_int * err = NULL) :
9979  functor_(FunctorType(program, name, err))
9980  {}
9981 
9982  make_kernel(
9983  const Kernel kernel) :
9984  functor_(FunctorType(kernel))
9985  {}
9986 
9989 
9991  typedef Event type_(
9992  const EnqueueArgs&,
9993  Ts...);
9994 
9995  Event operator()(
9996  const EnqueueArgs& enqueueArgs,
9997  Ts... args)
9998  {
9999  return functor_(
10000  enqueueArgs, args...);
10001  }
10002  };
10003 } // namespace compatibility
10004 
10005 
10006 //----------------------------------------------------------------------------------------------------------------------
10007 
10008 #undef CL_HPP_ERR_STR_
10009 #if !defined(CL_HPP_USER_OVERRIDE_ERROR_STRINGS)
10010 #undef __GET_DEVICE_INFO_ERR
10011 #undef __GET_PLATFORM_INFO_ERR
10012 #undef __GET_DEVICE_IDS_ERR
10013 #undef __GET_PLATFORM_IDS_ERR
10014 #undef __GET_CONTEXT_INFO_ERR
10015 #undef __GET_EVENT_INFO_ERR
10016 #undef __GET_EVENT_PROFILE_INFO_ERR
10017 #undef __GET_MEM_OBJECT_INFO_ERR
10018 #undef __GET_IMAGE_INFO_ERR
10019 #undef __GET_SAMPLER_INFO_ERR
10020 #undef __GET_KERNEL_INFO_ERR
10021 #undef __GET_KERNEL_ARG_INFO_ERR
10022 #undef __GET_KERNEL_SUB_GROUP_INFO_ERR
10023 #undef __GET_KERNEL_WORK_GROUP_INFO_ERR
10024 #undef __GET_PROGRAM_INFO_ERR
10025 #undef __GET_PROGRAM_BUILD_INFO_ERR
10026 #undef __GET_COMMAND_QUEUE_INFO_ERR
10027 #undef __CREATE_CONTEXT_ERR
10028 #undef __CREATE_CONTEXT_FROM_TYPE_ERR
10029 #undef __GET_SUPPORTED_IMAGE_FORMATS_ERR
10030 #undef __CREATE_BUFFER_ERR
10031 #undef __COPY_ERR
10032 #undef __CREATE_SUBBUFFER_ERR
10033 #undef __CREATE_GL_BUFFER_ERR
10034 #undef __CREATE_GL_RENDER_BUFFER_ERR
10035 #undef __GET_GL_OBJECT_INFO_ERR
10036 #undef __CREATE_IMAGE_ERR
10037 #undef __CREATE_GL_TEXTURE_ERR
10038 #undef __IMAGE_DIMENSION_ERR
10039 #undef __SET_MEM_OBJECT_DESTRUCTOR_CALLBACK_ERR
10040 #undef __CREATE_USER_EVENT_ERR
10041 #undef __SET_USER_EVENT_STATUS_ERR
10042 #undef __SET_EVENT_CALLBACK_ERR
10043 #undef __WAIT_FOR_EVENTS_ERR
10044 #undef __CREATE_KERNEL_ERR
10045 #undef __SET_KERNEL_ARGS_ERR
10046 #undef __CREATE_PROGRAM_WITH_SOURCE_ERR
10047 #undef __CREATE_PROGRAM_WITH_IL_ERR
10048 #undef __CREATE_PROGRAM_WITH_BINARY_ERR
10049 #undef __CREATE_PROGRAM_WITH_IL_ERR
10050 #undef __CREATE_PROGRAM_WITH_BUILT_IN_KERNELS_ERR
10051 #undef __BUILD_PROGRAM_ERR
10052 #undef __COMPILE_PROGRAM_ERR
10053 #undef __LINK_PROGRAM_ERR
10054 #undef __CREATE_KERNELS_IN_PROGRAM_ERR
10055 #undef __CREATE_COMMAND_QUEUE_WITH_PROPERTIES_ERR
10056 #undef __CREATE_SAMPLER_WITH_PROPERTIES_ERR
10057 #undef __SET_COMMAND_QUEUE_PROPERTY_ERR
10058 #undef __ENQUEUE_READ_BUFFER_ERR
10059 #undef __ENQUEUE_READ_BUFFER_RECT_ERR
10060 #undef __ENQUEUE_WRITE_BUFFER_ERR
10061 #undef __ENQUEUE_WRITE_BUFFER_RECT_ERR
10062 #undef __ENQEUE_COPY_BUFFER_ERR
10063 #undef __ENQEUE_COPY_BUFFER_RECT_ERR
10064 #undef __ENQUEUE_FILL_BUFFER_ERR
10065 #undef __ENQUEUE_READ_IMAGE_ERR
10066 #undef __ENQUEUE_WRITE_IMAGE_ERR
10067 #undef __ENQUEUE_COPY_IMAGE_ERR
10068 #undef __ENQUEUE_FILL_IMAGE_ERR
10069 #undef __ENQUEUE_COPY_IMAGE_TO_BUFFER_ERR
10070 #undef __ENQUEUE_COPY_BUFFER_TO_IMAGE_ERR
10071 #undef __ENQUEUE_MAP_BUFFER_ERR
10072 #undef __ENQUEUE_MAP_IMAGE_ERR
10073 #undef __ENQUEUE_UNMAP_MEM_OBJECT_ERR
10074 #undef __ENQUEUE_NDRANGE_KERNEL_ERR
10075 #undef __ENQUEUE_NATIVE_KERNEL
10076 #undef __ENQUEUE_MIGRATE_MEM_OBJECTS_ERR
10077 #undef __ENQUEUE_MIGRATE_SVM_ERR
10078 #undef __ENQUEUE_ACQUIRE_GL_ERR
10079 #undef __ENQUEUE_RELEASE_GL_ERR
10080 #undef __CREATE_PIPE_ERR
10081 #undef __GET_PIPE_INFO_ERR
10082 #undef __RETAIN_ERR
10083 #undef __RELEASE_ERR
10084 #undef __FLUSH_ERR
10085 #undef __FINISH_ERR
10086 #undef __VECTOR_CAPACITY_ERR
10087 #undef __CREATE_SUB_DEVICES_ERR
10088 #undef __CREATE_SUB_DEVICES_ERR
10089 #undef __ENQUEUE_MARKER_ERR
10090 #undef __ENQUEUE_WAIT_FOR_EVENTS_ERR
10091 #undef __ENQUEUE_BARRIER_ERR
10092 #undef __UNLOAD_COMPILER_ERR
10093 #undef __CREATE_GL_TEXTURE_2D_ERR
10094 #undef __CREATE_GL_TEXTURE_3D_ERR
10095 #undef __CREATE_IMAGE2D_ERR
10096 #undef __CREATE_IMAGE3D_ERR
10097 #undef __CREATE_COMMAND_QUEUE_ERR
10098 #undef __ENQUEUE_TASK_ERR
10099 #undef __CREATE_SAMPLER_ERR
10100 #undef __ENQUEUE_MARKER_WAIT_LIST_ERR
10101 #undef __ENQUEUE_BARRIER_WAIT_LIST_ERR
10102 #undef __CLONE_KERNEL_ERR
10103 #undef __GET_HOST_TIMER_ERR
10104 #undef __GET_DEVICE_AND_HOST_TIMER_ERR
10105 
10106 #endif //CL_HPP_USER_OVERRIDE_ERROR_STRINGS
10107 
10108 // Extensions
10109 #undef CL_HPP_INIT_CL_EXT_FCN_PTR_
10110 #undef CL_HPP_INIT_CL_EXT_FCN_PTR_PLATFORM_
10111 
10112 #if defined(CL_HPP_USE_CL_DEVICE_FISSION)
10113 #undef CL_HPP_PARAM_NAME_DEVICE_FISSION_
10114 #endif // CL_HPP_USE_CL_DEVICE_FISSION
10115 
10116 #undef CL_HPP_NOEXCEPT_
10117 #undef CL_HPP_DEFINE_STATIC_MEMBER_
10118 
10119 } // namespace cl
10120 
10121 #endif // CL_HPP_
Memory()
Default constructor - initializes to NULL.
Definition: cl2.hpp:3293
Event type_(const EnqueueArgs &, Ts...)
Function signature of kernel functor with no event dependency.
Definition: cl2.hpp:9991
cl_int enqueueMigrateSVM(const cl::vector< cl::vector< T, Alloc >> &svmContainers, cl_mem_migration_flags flags=0, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8342
BufferRenderGL(const cl_mem &buffer, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4233
CommandQueue(CommandQueue &&queue) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:7366
Image interface for arrays of 2D images.
Definition: cl2.hpp:4983
Image1DBuffer(const cl_mem &image1D, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4512
Image interface for arrays of 1D images.
Definition: cl2.hpp:4554
Image2DGL()
Default constructor - initializes to NULL.
Definition: cl2.hpp:4926
cl_int getInfo(cl_context_info name, T *param) const
Wrapper for clGetContextInfo().
Definition: cl2.hpp:2994
Adds constructors and member functions for cl_image_format.
Definition: cl2.hpp:2008
Context(const cl_context &context, bool retainObject=false)
Constructor from cl_context - takes ownership.
Definition: cl2.hpp:2978
Image & operator=(const cl_mem &rhs)
Assignment from cl_mem - performs shallow copy.
Definition: cl2.hpp:4311
Image3D(const Image3D &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5180
static DeviceCommandQueue makeDefault(const Context &context, const Device &device, cl_uint queueSize, cl_int *err=nullptr)
Definition: cl2.hpp:8845
Image2DGL(const Image2DGL &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4951
std::enable_if< std::is_pointer< T >::value, cl_int >::type setArg(cl_uint index, const T argPtr)
setArg overload taking a pointer type
Definition: cl2.hpp:5980
Image1DBuffer(const Image1DBuffer &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4524
Image2D(const Context &context, cl_mem_flags flags, ImageFormat format, size_type width, size_type height, size_type row_pitch=0, void *host_ptr=NULL, cl_int *err=NULL)
Constructs a 2D Image in a specified context.
Definition: cl2.hpp:4655
Image1D(const Context &context, cl_mem_flags flags, ImageFormat format, size_type width, void *host_ptr=NULL, cl_int *err=NULL)
Constructs a 1D Image in a specified context.
Definition: cl2.hpp:4385
Image2D & operator=(const cl_mem &rhs)
Assignment from cl_mem - performs shallow copy.
Definition: cl2.hpp:4846
cl_int setSVMPointers(const vector< void *> &pointerList)
Definition: cl2.hpp:6015
The OpenCL C++ bindings are defined within this namespace.
Definition: cl2.hpp:578
CL_EXT_PREFIX__VERSION_1_1_DEPRECATED cl_int enqueueBarrier() const CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
Definition: cl2.hpp:8594
Image(Image &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4334
detail::param_traits< detail::cl_device_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetDeviceInfo() that returns by value.
Definition: cl2.hpp:2163
cl_int unmapSVM(cl::vector< T, Alloc > &container)
Definition: cl2.hpp:9388
Context(const Context &ctx)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:2916
Local address wrapper for use with Kernel::setArg.
Definition: cl2.hpp:5738
detail::param_traits< detail::cl_pipe_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetMemObjectInfo() that returns by value.
Definition: cl2.hpp:5508
cl_int enqueueMapSVM(cl::vector< T, Alloc > &container, cl_bool blocking, cl_map_flags flags, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8010
Class interface for GL 3D Image Memory objects.
Definition: cl2.hpp:5215
CL_EXT_PREFIX__VERSION_1_1_DEPRECATED cl_int enqueueMarker(Event *event=NULL) const CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
Definition: cl2.hpp:8441
CommandQueue(const Context &context, const Device &device, QueueProperties properties, cl_int *err=NULL)
Constructs a CommandQueue for a passed device and context Will return an CL_INVALID_QUEUE_PROPERTIES ...
Definition: cl2.hpp:7256
CL_EXT_PREFIX__VERSION_1_1_DEPRECATED cl_int UnloadCompiler() CL_EXT_SUFFIX__VERSION_1_1_DEPRECATED
Definition: cl2.hpp:2683
static DeviceCommandQueue makeDefault(const Context &context, const Device &device, cl_int *err=nullptr)
Definition: cl2.hpp:8817
cl_int getDevices(cl_device_type type, vector< Device > *devices) const
Gets a list of devices for this platform.
Definition: cl2.hpp:2464
Program(const Context &context, const vector< Device > &devices, const Binaries &binaries, vector< cl_int > *binaryStatus=NULL, cl_int *err=NULL)
Definition: cl2.hpp:6414
Buffer(IteratorType startIterator, IteratorType endIterator, bool readOnly, bool useHostPtr=false, cl_int *err=NULL)
Construct a Buffer from a host container via iterators. IteratorType must be random access...
Definition: cl2.hpp:3839
Class interface for cl_mem.
Definition: cl2.hpp:3289
Pipe(cl_uint packet_size, cl_uint max_packets, cl_int *err=NULL)
Constructs a Pipe in a the default context.
Definition: cl2.hpp:5427
cl_int enqueueMigrateMemObjects(const vector< Memory > &memObjects, cl_mem_migration_flags flags, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8196
Device(const Device &dev)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:2126
BufferRenderGL(BufferRenderGL &&buf) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4263
Image2DArray(Image2DArray &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5058
detail::param_traits< detail::cl_image_info, name >::param_type getImageInfo(cl_int *err=NULL) const
Wrapper for clGetImageInfo() that returns by value.
Definition: cl2.hpp:4359
Memory(Memory &&mem) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:3337
cl_int enqueueMarkerWithWaitList(const vector< Event > *events=0, Event *event=0) const
Definition: cl2.hpp:8143
Image3D(Image3D &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5194
cl_int getSupportedImageFormats(cl_mem_flags flags, cl_mem_object_type type, vector< ImageFormat > *formats) const
Gets a list of supported image formats.
Definition: cl2.hpp:3019
BufferRenderGL()
Default constructor - initializes to NULL.
Definition: cl2.hpp:4224
Image(const Image &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4320
cl_int enqueueFillBuffer(const Buffer &buffer, PatternType pattern, size_type offset, size_type size, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7601
Image1DArray(const Image1DArray &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4613
Kernel(const Kernel &kernel)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5829
CommandQueue(const Context &context, cl_command_queue_properties properties=0, cl_int *err=NULL)
Constructs a CommandQueue for an implementation defined device in the given context Will return an CL...
Definition: cl2.hpp:7076
cl_int enqueueMigrateSVM(const cl::vector< cl::pointer< T, D >> &svmPointers, cl_mem_migration_flags flags=0, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8305
Buffer createSubBuffer(cl_mem_flags flags, cl_buffer_create_type buffer_create_type, const void *buffer_create_info, cl_int *err=NULL)
Creates a new buffer object from this.
Definition: cl2.hpp:3957
cl_int enqueueBarrierWithWaitList(const vector< Event > *events=0, Event *event=0) const
Definition: cl2.hpp:8173
Kernel clone()
Definition: cl2.hpp:6114
Buffer(Buffer &&buf) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:3941
vector< T, cl::SVMAllocator< int, cl::SVMTraitCoarse<> >> coarse_svm_vector
Vector alias to simplify contruction of coarse-grained SVM containers.
Definition: cl2.hpp:3756
static Device getDefault(cl_int *errResult=NULL)
Returns the first device on the default context.
Definition: cl2.hpp:2088
cl_int enqueueUnmapSVM(T *ptr, const vector< Event > *events=NULL, Event *event=NULL)
Definition: cl2.hpp:9192
Sampler(const Sampler &sam)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5604
cl_int setCallback(cl_int type, void(CL_CALLBACK *pfn_notify)(cl_event, cl_int, void *), void *user_data=NULL)
Registers a user callback function for a specific command execution status.
Definition: cl2.hpp:3196
size_type dimensions() const
Queries the number of dimensions in the range.
Definition: cl2.hpp:5711
Event()
Default constructor - initializes to NULL.
Definition: cl2.hpp:3110
ImageGL(const cl_mem &image, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:5341
Image3D(const Context &context, cl_mem_flags flags, ImageFormat format, size_type width, size_type height, size_type depth, size_type row_pitch=0, size_type slice_pitch=0, void *host_ptr=NULL, cl_int *err=NULL)
Constructs a 3D Image in a specified context.
Definition: cl2.hpp:5084
cl_int enqueueMigrateSVM(const cl::vector< T *> &svmRawPointers, cl_mem_migration_flags flags=0, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8266
cl_int setArg(cl_uint index, const cl::vector< T, Alloc > &argPtr)
setArg overload taking a vector type.
Definition: cl2.hpp:5969
detail::param_traits< detail::cl_profiling_info, name >::param_type getProfilingInfo(cl_int *err=NULL) const
Wrapper for clGetEventProfilingInfo() that returns by value.
Definition: cl2.hpp:3169
Image1D(const Image1D &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4440
BufferRenderGL(const BufferRenderGL &buf)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4249
DeviceCommandQueue(DeviceQueueProperties properties, cl_int *err=NULL)
Definition: cl2.hpp:8645
Pipe(const Context &context, cl_uint packet_size, cl_uint max_packets, cl_int *err=NULL)
Constructs a Pipe in a specified context.
Definition: cl2.hpp:5402
Image3D()
Default constructor - initializes to NULL.
Definition: cl2.hpp:5155
CommandQueue(const Context &context, const Device &device, cl_command_queue_properties properties=0, cl_int *err=NULL)
Constructs a CommandQueue for a passed device and context Will return an CL_INVALID_QUEUE_PROPERTIES ...
Definition: cl2.hpp:7205
cl_ulong getHostTimer(cl_int *error=nullptr)
Definition: cl2.hpp:2182
Image2DGL(Image2DGL &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4965
cl_int getInfo(cl_sampler_info name, T *param) const
Wrapper for clGetSamplerInfo().
Definition: cl2.hpp:5631
Kernel()
Default constructor - initializes to NULL.
Definition: cl2.hpp:5802
Image2DGL(const Context &context, cl_mem_flags flags, cl_GLenum target, cl_GLint miplevel, cl_GLuint texobj, cl_int *err=NULL)
Constructs an Image2DGL in a specified context, from a given GL Texture.
Definition: cl2.hpp:4901
Sampler()
Default constructor - initializes to NULL.
Definition: cl2.hpp:5534
Image1D(Image1D &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4454
Program(Program &&program) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:6542
cl_int getObjectInfo(cl_gl_object_type *type, cl_GLuint *gl_object_name)
Wrapper for clGetGLObjectInfo().
Definition: cl2.hpp:4275
cl_int unloadCompiler()
Wrapper for clUnloadCompiler().
Definition: cl2.hpp:2660
std::enable_if<!std::is_pointer< T >::value, cl_int >::type setArg(cl_uint index, const T &value)
setArg overload taking a POD type
Definition: cl2.hpp:5992
size_type max_size() const CL_HPP_NOEXCEPT_
Definition: cl2.hpp:3621
Image1DArray(const cl_mem &imageArray, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4600
Class interface for cl_event.
Definition: cl2.hpp:3106
vector< T, cl::SVMAllocator< int, cl::SVMTraitFine<> >> fine_svm_vector
Vector alias to simplify contruction of fine-grained SVM containers.
Definition: cl2.hpp:3762
Image2DGL(const cl_mem &image, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4935
NDRange(size_type size0)
Constructs one-dimensional range.
Definition: cl2.hpp:5676
Program(const Context &context, const Sources &sources, cl_int *err=NULL)
Definition: cl2.hpp:6254
vector< T, cl::SVMAllocator< int, cl::SVMTraitAtomic<> >> atomic_svm_vector
Vector alias to simplify contruction of fine-grained SVM containers that support platform atomics...
Definition: cl2.hpp:3768
static DeviceCommandQueue getDefault(const CommandQueue &queue, cl_int *err=NULL)
Definition: cl2.hpp:8892
Buffer & operator=(const cl_mem &rhs)
Assignment from cl_mem - performs shallow copy.
Definition: cl2.hpp:3918
detail::param_traits< detail::cl_event_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetEventInfo() that returns by value.
Definition: cl2.hpp:3146
Class interface for Pipe Memory Objects.
Definition: cl2.hpp:5389
Program(const Context &context, const vector< char > &IL, bool build=false, cl_int *err=NULL)
Definition: cl2.hpp:6346
cl_int enqueueUnmapSVM(cl::pointer< T, D > &ptr, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8085
Image2D(const Image2D &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4855
Buffer()
Default constructor - initializes to NULL.
Definition: cl2.hpp:3902
DeviceCommandQueue(const Context &context, const Device &device, DeviceQueueProperties properties=DeviceQueueProperties::None, cl_int *err=NULL)
Definition: cl2.hpp:8668
cl_int setArg(cl_uint index, const cl::pointer< T, D > &argPtr)
setArg overload taking a shared_ptr type
Definition: cl2.hpp:5959
Image3DGL(const Image3DGL &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5272
cl_int copy(IteratorType startIterator, IteratorType endIterator, cl::Buffer &buffer)
Definition: cl2.hpp:9277
Context(const vector< Device > &devices, cl_context_properties *properties=NULL, void(CL_CALLBACK *notifyFptr)(const char *, const void *, size_type, void *)=NULL, void *data=NULL, cl_int *err=NULL)
Constructs a context including a list of specified devices.
Definition: cl2.hpp:2769
DeviceCommandQueue(const DeviceCommandQueue &queue)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:8734
Sampler(const cl_sampler &sampler, bool retainObject=false)
Constructor from cl_sampler - takes ownership.
Definition: cl2.hpp:5587
Image()
Default constructor - initializes to NULL.
Definition: cl2.hpp:4295
static cl_int release(cl_device_id device)
Definition: cl2.hpp:1568
cl_int getInfo(cl_device_info name, T *param) const
Wrapper for clGetDeviceInfo().
Definition: cl2.hpp:2153
Event(const cl_event &event, bool retainObject=false)
Constructor from cl_event - takes ownership.
Definition: cl2.hpp:3120
DeviceCommandQueue(const Context &context, const Device &device, cl_uint queueSize, DeviceQueueProperties properties=DeviceQueueProperties::None, cl_int *err=NULL)
Definition: cl2.hpp:8692
NDRange(size_type size0, size_type size1)
Constructs two-dimensional range.
Definition: cl2.hpp:5685
Image3DGL(const Context &context, cl_mem_flags flags, cl_GLenum target, cl_GLint miplevel, cl_GLuint texobj, cl_int *err=NULL)
Constructs an Image3DGL in a specified context, from a given GL Texture.
Definition: cl2.hpp:5223
Image2D()
Default constructor - initializes to NULL.
Definition: cl2.hpp:4830
Image3D(const cl_mem &image3D, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:5164
Image2D(Image2D &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4869
DeviceCommandQueue interface for device cl_command_queues.
Definition: cl2.hpp:8633
Image1DArray(Image1DArray &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4627
Pipe()
Default constructor - initializes to NULL.
Definition: cl2.hpp:5446
CommandQueue(const cl_command_queue &commandQueue, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:7340
Pipe(const Pipe &pipe)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5471
cl_int getInfo(cl_event_info name, T *param) const
Wrapper for clGetEventInfo().
Definition: cl2.hpp:3136
Image1D()
Default constructor - initializes to NULL.
Definition: cl2.hpp:4415
Buffer(const cl_mem &buffer, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:3911
Sampler(Sampler &&sam) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5618
static CommandQueue setDefault(const CommandQueue &default_queue)
Definition: cl2.hpp:7324
Program(const Context &context, const vector< Device > &devices, const string &kernelNames, cl_int *err=NULL)
Definition: cl2.hpp:6478
CommandQueue(const CommandQueue &queue)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:7352
cl_int enqueueMigrateSVM(const cl::vector< cl::pointer< T, D >> &svmPointers, const cl::vector< size_type > &sizes, cl_mem_migration_flags flags=0, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8282
cl_int enqueueMapSVM(T *ptr, cl_bool blocking, cl_map_flags flags, size_type size, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7955
Kernel(Kernel &&kernel) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5843
cl_int enqueueFillImage(const Image &image, cl_uint4 fillColor, const array< size_type, 3 > &origin, const array< size_type, 3 > &region, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7801
Image2D(const Context &context, ImageFormat format, const Buffer &sourceBuffer, size_type width, size_type height, size_type row_pitch=0, cl_int *err=nullptr)
Constructs a 2D Image from a buffer.
Definition: cl2.hpp:4726
Program(const Sources &sources, cl_int *err=NULL)
Definition: cl2.hpp:6219
Device(const cl_device_id &device, bool retainObject=false)
Constructor from cl_device_id.
Definition: cl2.hpp:2081
NDRange()
Default constructor - resulting range has zero dimensions.
Definition: cl2.hpp:5667
BufferGL(const BufferGL &buf)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4152
Image3D & operator=(const cl_mem &rhs)
Assignment from cl_mem - performs shallow copy.
Definition: cl2.hpp:5171
CommandQueue(const Context &context, QueueProperties properties, cl_int *err=NULL)
Constructs a CommandQueue for an implementation defined device in the given context Will return an CL...
Definition: cl2.hpp:7143
cl_int setDestructorCallback(void(CL_CALLBACK *pfn_notify)(cl_mem, void *), void *user_data=NULL)
Registers a callback function to be called when the memory object is no longer needed.
Definition: cl2.hpp:3386
CommandQueue(cl_command_queue_properties properties, cl_int *err=NULL)
Constructs a CommandQueue based on passed properties. Will return an CL_INVALID_QUEUE_PROPERTIES erro...
Definition: cl2.hpp:6947
Buffer(const Context &context, cl_mem_flags flags, size_type size, void *host_ptr=NULL, cl_int *err=NULL)
Constructs a Buffer in a specified context.
Definition: cl2.hpp:3790
cl_int wait() const
Blocks the calling thread until this event completes.
Definition: cl2.hpp:3184
ImageFormat()
Default constructor - performs no initialization.
Definition: cl2.hpp:2011
Event result_type
Return type of the functor.
Definition: cl2.hpp:9894
Class interface for cl_platform_id.
Definition: cl2.hpp:2318
cl_int setSVMPointers(const std::array< void *, ArrayLength > &pointerList)
Definition: cl2.hpp:6030
Image2DArray(const cl_mem &imageArray, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:5033
Buffer(const Buffer &buf)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:3927
Image(const cl_mem &image, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4304
Memory(const Memory &mem)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:3323
Program(const Program &program)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:6528
cl_int getObjectInfo(cl_gl_object_type *type, cl_GLuint *gl_object_name)
Wrapper for clGetGLObjectInfo().
Definition: cl2.hpp:4178
static cl_int retain(cl_device_id device)
Definition: cl2.hpp:1557
Image3DGL()
Default constructor - initializes to NULL.
Definition: cl2.hpp:5247
Image2D(const Context &context, cl_channel_order order, const Image &sourceImage, cl_int *err=nullptr)
Constructs a 2D Image from an image.
Definition: cl2.hpp:4776
CommandQueue interface for cl_command_queue.
Definition: cl2.hpp:6881
Image2D(const cl_mem &image2D, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4839
cl_int enqueueUnmapSVM(cl::vector< T, Alloc > &container, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8110
Sampler(const Context &context, cl_bool normalized_coords, cl_addressing_mode addressing_mode, cl_filter_mode filter_mode, cl_int *err=NULL)
Constructs a Sampler in a specified context.
Definition: cl2.hpp:5540
Image1D(const cl_mem &image1D, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4424
pointer allocate(size_type size, typename cl::SVMAllocator< void, SVMTrait >::const_pointer=0)
Definition: cl2.hpp:3579
std::pair< cl_ulong, cl_ulong > getDeviceAndHostTimer(cl_int *error=nullptr)
Definition: cl2.hpp:2206
static DeviceCommandQueue makeDefault(cl_int *err=nullptr)
Definition: cl2.hpp:8787
UserEvent()
Default constructor - initializes to NULL.
Definition: cl2.hpp:3253
Program(const vector< char > &IL, bool build=false, cl_int *err=NULL)
Definition: cl2.hpp:6291
cl_int enqueueMigrateSVM(const cl::vector< T *> &svmRawPointers, const cl::vector< size_type > &sizes, cl_mem_migration_flags flags=0, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8237
DeviceCommandQueue(const cl_command_queue &commandQueue, bool retainObject=false)
Constructor from cl_command_queue - takes ownership.
Definition: cl2.hpp:8722
cl_int enableFineGrainedSystemSVM(bool svmEnabled)
Enable fine-grained system SVM.
Definition: cl2.hpp:6051
ImageFormat(cl_channel_order order, cl_channel_type type)
Initializing constructor.
Definition: cl2.hpp:2014
Pipe(const cl_mem &pipe, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:5455
detail::param_traits< detail::cl_context_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetContextInfo() that returns by value.
Definition: cl2.hpp:3004
Pipe(Pipe &&pipe) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5485
cl_int mapSVM(cl::vector< T, Alloc > &container)
Definition: cl2.hpp:9379
cl::pointer< T, detail::Deleter< Alloc > > allocate_pointer(const Alloc &alloc_, Args &&... args)
Definition: cl2.hpp:3709
Class interface for user events (a subset of cl_event&#39;s).
Definition: cl2.hpp:3230
bool operator==(SVMAllocator const &rhs)
Definition: cl2.hpp:3649
cl_int enqueueFillImage(const Image &image, cl_int4 fillColor, const array< size_type, 3 > &origin, const array< size_type, 3 > &region, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7767
Program(const cl_program &program, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:6516
cl_int enqueueUnmapSVM(T *ptr, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8060
Kernel(const cl_kernel &kernel, bool retainObject=false)
Constructor from cl_kernel - takes ownership.
Definition: cl2.hpp:5812
vector< std::pair< cl::Device, typename detail::param_traits< detail::cl_program_build_info, name >::param_type > > getBuildInfo(cl_int *err=NULL) const
Definition: cl2.hpp:6666
cl_int getProfilingInfo(cl_profiling_info name, T *param) const
Wrapper for clGetEventProfilingInfo().
Definition: cl2.hpp:3159
Class interface for cl_sampler.
Definition: cl2.hpp:5530
ImageGL(const ImageGL &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5353
LocalSpaceArg Local(size_type size)
Helper function for generating LocalSpaceArg objects.
Definition: cl2.hpp:5782
Event result_type
Return type of the functor.
Definition: cl2.hpp:9988
detail::param_traits< detail::cl_mem_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetMemObjectInfo() that returns by value.
Definition: cl2.hpp:3361
C++ base class for Image Memory objects.
Definition: cl2.hpp:4291
cl_int enqueueMapSVM(T *ptr, cl_bool blocking, cl_map_flags flags, size_type size, const vector< Event > *events=NULL, Event *event=NULL)
Definition: cl2.hpp:9090
Buffer(cl_mem_flags flags, size_type size, void *host_ptr=NULL, cl_int *err=NULL)
Constructs a Buffer in the default context.
Definition: cl2.hpp:3815
static Context setDefault(const Context &default_context)
Definition: cl2.hpp:2963
cl_int enqueueMapSVM(cl::pointer< T, D > &ptr, cl_bool blocking, cl_map_flags flags, size_type size, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7983
BufferGL(BufferGL &&buf) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4166
Class interface for Buffer Memory Objects.
Definition: cl2.hpp:3779
detail::param_traits< detail::cl_sampler_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetSamplerInfo() that returns by value.
Definition: cl2.hpp:5641
static Device setDefault(const Device &default_device)
Definition: cl2.hpp:2106
Image1DBuffer(Image1DBuffer &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:4538
Class interface for specifying NDRange values.
Definition: cl2.hpp:5659
Class interface for 2D Image Memory objects.
Definition: cl2.hpp:4648
ImageGL(ImageGL &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5367
Class interface for 1D Image Memory objects.
Definition: cl2.hpp:4378
Class interface for cl_kernel.
Definition: cl2.hpp:5796
NDRange(size_type size0, size_type size1, size_type size2)
Constructs three-dimensional range.
Definition: cl2.hpp:5694
static cl_int waitForEvents(const vector< Event > &events)
Blocks the calling thread until every event specified is complete.
Definition: cl2.hpp:3216
Class interface for 3D Image Memory objects.
Definition: cl2.hpp:5077
Class interface for GL Render Buffer Memory Objects.
Definition: cl2.hpp:4196
static DeviceCommandQueue updateDefault(const Context &context, const Device &device, const DeviceCommandQueue &default_queue, cl_int *err=nullptr)
Definition: cl2.hpp:8877
general image interface for GL interop. We abstract the 2D and 3D GL images into a single instance he...
Definition: cl2.hpp:5306
cl_int setStatus(cl_int status)
Sets the execution status of a user event object.
Definition: cl2.hpp:3259
detail::param_traits< detail::cl_platform_info, name >::param_type getInfo(cl_int *err=NULL) const
Wrapper for clGetPlatformInfo() that returns by value.
Definition: cl2.hpp:2449
Device()
Default constructor - initializes to NULL.
Definition: cl2.hpp:2075
Context()
Default constructor - initializes to NULL.
Definition: cl2.hpp:2971
Image2DArray(const Image2DArray &img)
Copy constructor to forward copy to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5044
Class interface for cl_device_id.
Definition: cl2.hpp:2038
Context(Context &&ctx) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:2930
static Context getDefault(cl_int *err=NULL)
Returns a singleton context including all devices of CL_DEVICE_TYPE_DEFAULT.
Definition: cl2.hpp:2946
Context(cl_device_type type, cl_context_properties *properties=NULL, void(CL_CALLBACK *notifyFptr)(const char *, const void *, size_type, void *)=NULL, void *data=NULL, cl_int *err=NULL)
Constructs a context including all or a subset of devices of a specified type.
Definition: cl2.hpp:2830
Memory(const cl_mem &memory, bool retainObject)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:3306
Platform(const cl_platform_id &platform, bool retainObject=false)
Constructor from cl_platform_id.
Definition: cl2.hpp:2400
Device(Device &&dev) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:2140
BufferRenderGL(const Context &context, cl_mem_flags flags, cl_GLuint bufobj, cl_int *err=NULL)
Constructs a BufferRenderGL in a specified context, from a given GL Renderbuffer. ...
Definition: cl2.hpp:4204
cl_int getImageInfo(cl_image_info name, T *param) const
Wrapper for clGetImageInfo().
Definition: cl2.hpp:4349
BufferGL()
Default constructor - initializes to NULL.
Definition: cl2.hpp:4127
UserEvent(const Context &context, cl_int *err=NULL)
Constructs a user event on a given context.
Definition: cl2.hpp:3237
cl_int enqueueFillImage(const Image &image, cl_float4 fillColor, const array< size_type, 3 > &origin, const array< size_type, 3 > &region, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:7733
CommandQueue(QueueProperties properties, cl_int *err=NULL)
Constructs a CommandQueue based on passed properties. Will return an CL_INVALID_QUEUE_PROPERTIES erro...
Definition: cl2.hpp:7013
cl_int getInfo(cl_pipe_info name, T *param) const
Wrapper for clGetMemObjectInfo().
Definition: cl2.hpp:5498
Image interface for 1D buffer images.
Definition: cl2.hpp:4470
Program interface that implements cl_program.
Definition: cl2.hpp:6128
Image3DGL(const cl_mem &image, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:5256
Image3DGL(Image3DGL &&img) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:5286
cl_int getInfo(cl_platform_info name, string *param) const
Wrapper for clGetPlatformInfo().
Definition: cl2.hpp:2439
Class interface for GL Buffer Memory Objects.
Definition: cl2.hpp:4099
DeviceCommandQueue(DeviceCommandQueue &&queue) CL_HPP_NOEXCEPT_
Move constructor to forward move to the superclass correctly. Required for MSVC.
Definition: cl2.hpp:8748
cl_int copy(const CommandQueue &queue, const cl::Buffer &buffer, IteratorType startIterator, IteratorType endIterator)
Definition: cl2.hpp:9348
static Platform setDefault(const Platform &default_platform)
Definition: cl2.hpp:2431
Class interface for GL 2D Image Memory objects.
Definition: cl2.hpp:4893
Class interface for cl_context.
Definition: cl2.hpp:2697
BufferGL(const Context &context, cl_mem_flags flags, cl_GLuint bufobj, cl_int *err=NULL)
Constructs a BufferGL in a specified context, from a given GL buffer.
Definition: cl2.hpp:4107
Platform()
Default constructor - initializes to NULL.
Definition: cl2.hpp:2391
cl_int enqueueMigrateSVM(const cl::vector< cl::vector< T, Alloc >> &svmContainers, const cl::vector< size_type > &sizes, cl_mem_migration_flags flags=0, const vector< Event > *events=NULL, Event *event=NULL) const
Definition: cl2.hpp:8320
Memory & operator=(const cl_mem &rhs)
Assignment operator from cl_mem - takes ownership.
Definition: cl2.hpp:3314
size_type size() const
Returns the size of the object in bytes based on the.
Definition: cl2.hpp:5718
BufferGL(const cl_mem &buffer, bool retainObject=false)
Constructor from cl_mem - takes ownership.
Definition: cl2.hpp:4136
cl_int getInfo(cl_mem_info name, T *param) const
Wrapper for clGetMemObjectInfo().
Definition: cl2.hpp:3351