1#ifndef HALIDE_RUNTIME_VULKAN_RESOURCES_H
2#define HALIDE_RUNTIME_VULKAN_RESOURCES_H
86 <<
" vk_create_command_pool (user_context: " <<
user_context <<
", "
87 <<
"allocator: " << (
void *)
allocator <<
", "
88 <<
"queue_index: " << queue_index <<
")\n";
92 error(
user_context) <<
"Vulkan: Failed to create command pool ... invalid allocator pointer!\n";
96 VkCommandPoolCreateInfo command_pool_info =
98 VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
100 VK_COMMAND_POOL_CREATE_TRANSIENT_BIT,
105 if (result != VK_SUCCESS) {
106 error(
user_context) <<
"Vulkan: Failed to create command pool!\n";
115 <<
" vk_destroy_command_pool (user_context: " <<
user_context <<
", "
116 <<
"allocator: " << (
void *)
allocator <<
", "
120 error(
user_context) <<
"Vulkan: Failed to destroy command pool ... invalid allocator pointer!\n";
133 <<
" vk_create_command_buffer (user_context: " <<
user_context <<
", "
134 <<
"allocator: " << (
void *)
allocator <<
", "
138 error(
user_context) <<
"Vulkan: Failed to create command buffer ... invalid allocator pointer!\n";
142 VkCommandBufferAllocateInfo command_buffer_info =
144 VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
147 VK_COMMAND_BUFFER_LEVEL_PRIMARY,
152 if (result != VK_SUCCESS) {
153 error(
user_context) <<
"Vulkan: Failed to allocate command buffers!\n";
162 <<
" vk_destroy_command_buffer (user_context: " <<
user_context <<
", "
163 <<
"allocator: " << (
void *)
allocator <<
", "
168 error(
user_context) <<
"Vulkan: Failed to destroy command buffer ... invalid allocator pointer!\n";
177struct ScopedVulkanCommandBufferAndPool {
184 ScopedVulkanCommandBufferAndPool(
void *uc, VulkanMemoryAllocator *vma,
uint32_t queue_family_index)
191 ~ScopedVulkanCommandBufferAndPool() {
192 if ((allocator !=
nullptr) && (command_pool != VK_NULL_HANDLE)) {
193 if (command_buffer != VK_NULL_HANDLE) {
194 vk_destroy_command_buffer(user_context, allocator, command_pool, command_buffer);
196 vk_destroy_command_pool(user_context, allocator, command_pool);
205int vk_fill_command_buffer_with_dispatch_call(
void *
user_context,
208 VkPipeline compute_pipeline,
209 VkPipelineLayout pipeline_layout,
210 VkDescriptorSet descriptor_set,
212 int blocksX,
int blocksY,
int blocksZ) {
216 <<
" vk_fill_command_buffer_with_dispatch_call (user_context: " <<
user_context <<
", "
217 <<
"device: " << (
void *)device <<
", "
219 <<
"pipeline_layout: " << (
void *)pipeline_layout <<
", "
220 <<
"descriptor_set: " << (
void *)descriptor_set <<
", "
221 <<
"descriptor_set_index: " << descriptor_set_index <<
", "
222 <<
"blocks: " << blocksX <<
", " << blocksY <<
", " << blocksZ <<
")\n";
225 VkCommandBufferBeginInfo command_buffer_begin_info = {
226 VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
228 VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT,
232 VkResult result = vkBeginCommandBuffer(
command_buffer, &command_buffer_begin_info);
233 if (result != VK_SUCCESS) {
234 error(
user_context) <<
"vkBeginCommandBuffer returned " << vk_get_error_name(result) <<
"\n";
238 vkCmdBindPipeline(
command_buffer, VK_PIPELINE_BIND_POINT_COMPUTE, compute_pipeline);
239 vkCmdBindDescriptorSets(
command_buffer, VK_PIPELINE_BIND_POINT_COMPUTE, pipeline_layout,
240 descriptor_set_index, 1, &descriptor_set, 0,
nullptr);
244 if (result != VK_SUCCESS) {
245 error(
user_context) <<
"vkEndCommandBuffer returned " << vk_get_error_name(result) <<
"\n";
255 <<
" vk_submit_command_buffer (user_context: " <<
user_context <<
", "
256 <<
"queue: " << (
void *)queue <<
", "
260 VkSubmitInfo submit_info =
262 VK_STRUCTURE_TYPE_SUBMIT_INFO,
273 VkResult result = vkQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
274 if (result != VK_SUCCESS) {
275 error(
user_context) <<
"Vulkan: vkQueueSubmit returned " << vk_get_error_name(result) <<
"\n";
288 while (arg_sizes[i] > 0) {
289 if (!arg_is_buffer[i]) {
303 uint32_t bindings_count = vk_needs_scalar_uniform_buffer(
user_context, arg_sizes, args, arg_is_buffer);
306 while (arg_sizes[i] > 0) {
307 if (arg_is_buffer[i]) {
312 return bindings_count;
321 VkDescriptorPool *descriptor_pool) {
324 <<
" vk_create_descriptor_pool (user_context: " <<
user_context <<
", "
325 <<
"allocator: " << (
void *)
allocator <<
", "
326 <<
"uniform_buffer_count: " << (
uint32_t)uniform_buffer_count <<
", "
327 <<
"storage_buffer_count: " << (
uint32_t)storage_buffer_count <<
")\n";
330 error(
user_context) <<
"Vulkan: Failed to create descriptor pool ... invalid allocator pointer!\n";
335 pool_config.
entry_size =
sizeof(VkDescriptorPoolSize);
336 pool_config.
minimum_capacity = (uniform_buffer_count ? 1 : 0) + (storage_buffer_count ? 1 : 0);
340 if (uniform_buffer_count > 0) {
341 VkDescriptorPoolSize uniform_buffer_size = {
342 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
348 if (storage_buffer_count > 0) {
349 VkDescriptorPoolSize storage_buffer_size = {
350 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
356 VkDescriptorPoolCreateInfo descriptor_pool_info = {
357 VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
362 (
const VkDescriptorPoolSize *)pool_sizes.data()
366 if (result != VK_SUCCESS) {
367 error(
user_context) <<
"Vulkan: Failed to create descriptor pool! vkCreateDescriptorPool returned " << vk_get_error_name(result) <<
"\n";
375 VkDescriptorPool descriptor_pool) {
378 <<
" vk_destroy_descriptor_pool (user_context: " <<
user_context <<
", "
379 <<
"allocator: " << (
void *)
allocator <<
", "
380 <<
"descriptor_pool: " << (
void *)descriptor_pool <<
")\n";
383 error(
user_context) <<
"Vulkan: Failed to destroy descriptor pool ... invalid allocator pointer!\n";
396 VkDescriptorSetLayout *layout) {
400 <<
" vk_create_descriptor_set_layout (user_context: " <<
user_context <<
", "
401 <<
"allocator: " << (
void *)
allocator <<
", "
402 <<
"uniform_buffer_count: " << uniform_buffer_count <<
", "
403 <<
"storage_buffer_count: " << storage_buffer_count <<
", "
404 <<
"layout: " << (
void *)layout <<
")\n";
407 error(
user_context) <<
"Vulkan: Failed to create descriptor set layout ... invalid allocator pointer!\n";
412 layout_config.
entry_size =
sizeof(VkDescriptorSetLayoutBinding);
413 layout_config.
minimum_capacity = uniform_buffer_count + storage_buffer_count;
417 for (
uint32_t n = 0; n < uniform_buffer_count; ++n) {
418 VkDescriptorSetLayoutBinding uniform_buffer_layout = {
420 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
422 VK_SHADER_STAGE_COMPUTE_BIT,
428 <<
" [" << (
uint32_t)layout_bindings.size() <<
"] : UNIFORM_BUFFER\n";
431 layout_bindings.append(
user_context, &uniform_buffer_layout);
435 for (
uint32_t n = 0; n < storage_buffer_count; ++n) {
438 VkDescriptorSetLayoutBinding storage_buffer_layout = {
440 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
442 VK_SHADER_STAGE_COMPUTE_BIT,
447 <<
" [" << (
uint32_t)layout_bindings.size() <<
"] : STORAGE_BUFFER\n";
450 layout_bindings.append(
user_context, &storage_buffer_layout);
454 VkDescriptorSetLayoutCreateInfo layout_info = {
455 VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
459 (VkDescriptorSetLayoutBinding *)layout_bindings.data()
464 if (result != VK_SUCCESS) {
465 error(
user_context) <<
"vkCreateDescriptorSetLayout returned " << vk_get_error_name(result) <<
"\n";
474 VkDescriptorSetLayout descriptor_set_layout) {
478 <<
" vk_destroy_descriptor_set_layout (user_context: " <<
user_context <<
", "
479 <<
"allocator: " << (
void *)
allocator <<
", "
480 <<
"layout: " << (
void *)descriptor_set_layout <<
")\n";
483 error(
user_context) <<
"Vulkan: Failed to destroy descriptor set layout ... invalid allocator pointer!\n";
494 VkDescriptorSetLayout descriptor_set_layout,
495 VkDescriptorPool descriptor_pool,
496 VkDescriptorSet *descriptor_set) {
499 <<
" vk_create_descriptor_set (user_context: " <<
user_context <<
", "
500 <<
"allocator: " << (
void *)
allocator <<
", "
501 <<
"descriptor_set_layout: " << (
void *)descriptor_set_layout <<
", "
502 <<
"descriptor_pool: " << (
void *)descriptor_pool <<
")\n";
505 error(
user_context) <<
"Vulkan: Failed to create descriptor set ... invalid allocator pointer!\n";
509 VkDescriptorSetAllocateInfo descriptor_set_info =
511 VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
515 &descriptor_set_layout
519 if (result != VK_SUCCESS) {
520 error(
user_context) <<
"Vulkan: vkAllocateDescriptorSets returned " << vk_get_error_name(result) <<
"\n";
529 VkBuffer *scalar_args_buffer,
530 size_t uniform_buffer_count,
531 size_t storage_buffer_count,
535 VkDescriptorSet descriptor_set) {
538 <<
" vk_update_descriptor_set (user_context: " <<
user_context <<
", "
539 <<
"allocator: " << (
void *)
allocator <<
", "
540 <<
"scalar_args_buffer: " << (
void *)scalar_args_buffer <<
", "
541 <<
"uniform_buffer_count: " << (
uint32_t)uniform_buffer_count <<
", "
542 <<
"storage_buffer_count: " << (
uint32_t)storage_buffer_count <<
", "
543 <<
"descriptor_set: " << (
void *)descriptor_set <<
")\n";
546 error(
user_context) <<
"Vulkan: Failed to create descriptor set ... invalid allocator pointer!\n";
552 dbi_config.
entry_size =
sizeof(VkDescriptorBufferInfo);
557 wds_config.
entry_size =
sizeof(VkWriteDescriptorSet);
561 VkDescriptorBufferInfo *scalar_args_entry =
nullptr;
562 if (scalar_args_buffer !=
nullptr) {
563 VkDescriptorBufferInfo scalar_args_descriptor_buffer_info = {
568 descriptor_buffer_info.append(
user_context, &scalar_args_descriptor_buffer_info);
569 scalar_args_entry = (VkDescriptorBufferInfo *)descriptor_buffer_info.back();
573 <<
"buffer=" << (
void *)scalar_args_buffer <<
" "
574 <<
"offset=" << (
uint32_t)(0) <<
" "
575 <<
"size=VK_WHOLE_SIZE\n";
577 VkWriteDescriptorSet uniform_buffer_write_descriptor_set = {
578 VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
584 VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER,
589 write_descriptor_set.append(
user_context, &uniform_buffer_write_descriptor_set);
593 for (
size_t i = 0; arg_sizes[i] > 0; i++) {
594 if (arg_is_buffer[i]) {
601 VkBuffer *device_buffer =
reinterpret_cast<VkBuffer *
>(owner->
handle);
602 if (device_buffer ==
nullptr) {
603 error(
user_context) <<
"Vulkan: Failed to retrieve buffer for device memory!\n";
610 VkDescriptorBufferInfo device_buffer_info = {
615 descriptor_buffer_info.append(
user_context, &device_buffer_info);
616 VkDescriptorBufferInfo *device_buffer_entry = (VkDescriptorBufferInfo *)descriptor_buffer_info.back();
620 <<
"region=" << (
void *)device_region <<
" "
621 <<
"buffer=" << (
void *)device_buffer <<
" "
622 <<
"offset=" << (
uint32_t)(range_offset) <<
" "
623 <<
"size=" << (
uint32_t)(range_size) <<
"\n";
626 VkWriteDescriptorSet storage_buffer_write_descriptor_set = {
627 VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET,
630 (
uint32_t)write_descriptor_set.size(),
633 VK_DESCRIPTOR_TYPE_STORAGE_BUFFER,
638 write_descriptor_set.append(
user_context, &storage_buffer_write_descriptor_set);
643 vkUpdateDescriptorSets(
allocator->
current_device(), (
uint32_t)write_descriptor_set.size(), (
const VkWriteDescriptorSet *)write_descriptor_set.data(), 0,
nullptr);
649size_t vk_estimate_scalar_uniform_buffer_size(
void *
user_context,
654 int scalar_uniform_buffer_size = 0;
655 while (arg_sizes[i] > 0) {
656 if (!arg_is_buffer[i]) {
657 scalar_uniform_buffer_size += arg_sizes[i];
661 return scalar_uniform_buffer_size;
666 size_t scalar_buffer_size) {
670 <<
" vk_create_scalar_uniform_buffer (user_context: " <<
user_context <<
", "
671 <<
"allocator: " << (
void *)
allocator <<
", "
672 <<
"scalar_buffer_size: " << (
uint32_t)scalar_buffer_size <<
")\n";
676 error(
user_context) <<
"Vulkan: Failed to create scalar uniform buffer ... invalid allocator pointer!\n";
681 request.
size = scalar_buffer_size;
688 if ((region ==
nullptr) || (region->
handle ==
nullptr)) {
689 error(
user_context) <<
"Vulkan: Failed to create scalar uniform buffer ... unable to allocate device memory!\n";
706 <<
" vk_update_scalar_uniform_buffer (user_context: " <<
user_context <<
", "
707 <<
"region: " << (
void *)region <<
")\n";
711 error(
user_context) <<
"Vulkan: Failed to update scalar uniform buffer ... invalid allocator pointer!\n";
715 if ((region ==
nullptr) || (region->
handle ==
nullptr)) {
716 error(
user_context) <<
"Vulkan: Failed to update scalar uniform buffer ... invalid memory region!\n";
722 if (host_ptr ==
nullptr) {
723 error(
user_context) <<
"Vulkan: Failed to update scalar uniform buffer ... unable to map host pointer to device memory!\n";
728 size_t arg_offset = 0;
729 for (
size_t i = 0; arg_sizes[i] > 0; i++) {
730 if (!arg_is_buffer[i]) {
731 memcpy(host_ptr + arg_offset, args[i], arg_sizes[i]);
732 arg_offset += arg_sizes[i];
746 <<
" vk_destroy_scalar_uniform_buffer (user_context: " <<
user_context <<
", "
747 <<
"allocator: " << (
void *)
allocator <<
", "
748 <<
"scalar_args_region: " << (
void *)scalar_args_region <<
")\n";
751 error(
user_context) <<
"Vulkan: Failed to destroy scalar uniform buffer ... invalid allocator pointer!\n";
755 if (!scalar_args_region) {
773 VkDescriptorSetLayout *descriptor_set_layouts,
774 VkPipelineLayout *pipeline_layout) {
778 <<
" vk_create_pipeline_layout (user_context: " <<
user_context <<
", "
779 <<
"allocator: " << (
void *)
allocator <<
", "
780 <<
"descriptor_set_count: " << descriptor_set_count <<
", "
781 <<
"descriptor_set_layouts: " << (
void *)descriptor_set_layouts <<
", "
782 <<
"pipeline_layout: " << (
void *)pipeline_layout <<
")\n";
785 error(
user_context) <<
"Vulkan: Failed to create pipeline layout ... invalid allocator pointer!\n";
791 if (descriptor_set_count > max_bound_descriptor_sets) {
792 error(
user_context) <<
"Vulkan: Number of descriptor sets for pipeline layout exceeds the number that can be bound by device!\n"
793 <<
" requested: " << descriptor_set_count <<
","
794 <<
" available: " << max_bound_descriptor_sets <<
"\n";
799 VkPipelineLayoutCreateInfo pipeline_layout_info = {
800 VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
803 descriptor_set_count,
804 descriptor_set_layouts,
810 if (result != VK_SUCCESS) {
811 error(
user_context) <<
"Vulkan: vkCreatePipelineLayout returned " << vk_get_error_name(result) <<
"\n";
819 VkPipelineLayout pipeline_layout) {
823 <<
" vk_destroy_pipeline_layout (user_context: " <<
user_context <<
", "
824 <<
"allocator: " << (
void *)
allocator <<
", "
825 <<
"pipeline_layout: " << (
void *)pipeline_layout <<
")\n";
829 error(
user_context) <<
"Vulkan: Failed to destroy pipeline layout ... invalid allocator pointer!\n";
841 const char *pipeline_name,
842 VkShaderModule shader_module,
843 VkPipelineLayout pipeline_layout,
844 VkSpecializationInfo *specialization_info,
845 VkPipeline *compute_pipeline) {
849 <<
" vk_create_compute_pipeline (user_context: " <<
user_context <<
", "
850 <<
"allocator: " << (
void *)
allocator <<
", "
851 <<
"shader_module: " << (
void *)shader_module <<
", "
852 <<
"pipeline_layout: " << (
void *)pipeline_layout <<
")\n";
855 error(
user_context) <<
"Vulkan: Failed to create compute pipeline ... invalid allocator pointer!\n";
859 VkComputePipelineCreateInfo compute_pipeline_info =
861 VK_STRUCTURE_TYPE_COMPUTE_PIPELINE_CREATE_INFO,
866 VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO,
869 VK_SHADER_STAGE_COMPUTE_BIT,
880 if (result != VK_SUCCESS) {
881 error(
user_context) <<
"Vulkan: Failed to create compute pipeline! vkCreateComputePipelines returned " << vk_get_error_name(result) <<
"\n";
890 VulkanShaderBinding *shader_bindings,
891 VulkanDispatchData *dispatch_data,
892 VkShaderModule shader_module,
893 VkPipelineLayout pipeline_layout,
894 VkPipeline *compute_pipeline) {
898 <<
" vk_setup_compute_pipeline (user_context: " <<
user_context <<
", "
899 <<
"entry_point_name: '" << shader_bindings->entry_point_name <<
"', "
900 <<
"allocator: " << (
void *)
allocator <<
", "
901 <<
"shader_bindings: " << (
void *)shader_bindings <<
", "
902 <<
"dispatch_data: " << (
void *)dispatch_data <<
", "
903 <<
"shader_module: " << (
void *)shader_module <<
", "
904 <<
"pipeline_layout: " << (
void *)pipeline_layout <<
")\n";
908 error(
user_context) <<
"Vulkan: Failed to setup compute pipeline ... invalid allocator pointer!\n";
912 if (shader_bindings ==
nullptr) {
913 error(
user_context) <<
"Vulkan: Failed to setup compute pipeline ... invalid shader bindings!\n";
917 if (shader_bindings ==
nullptr) {
918 error(
user_context) <<
"Vulkan: Failed to setup compute pipeline ... invalid dispatch data!\n";
922 VkResult result = VK_SUCCESS;
923 const char *entry_point_name = shader_bindings->entry_point_name;
924 if (entry_point_name ==
nullptr) {
925 error(
user_context) <<
"Vulkan: Failed to setup compute pipeline ... missing entry point name!\n";
929 uint32_t dispatch_constant_index = 0;
930 uint32_t dispatch_constant_ids[4] = {0, 0, 0, 0};
931 uint32_t dispatch_constant_values[4] = {0, 0, 0, 0};
934 if (shader_bindings->shared_memory_allocations_count && dispatch_data->shared_mem_bytes) {
936 uint32_t shared_mem_constant_id = 0;
937 uint32_t static_shared_mem_bytes = 0;
940 for (
uint32_t sm = 0; sm < shader_bindings->shared_memory_allocations_count; sm++) {
941 VulkanSharedMemoryAllocation *allocation = &(shader_bindings->shared_memory_allocations[sm]);
942 if (allocation->constant_id == 0) {
944 static_shared_mem_bytes += allocation->type_size * allocation->array_size;
947 if (shared_mem_constant_id > 0) {
948 error(
user_context) <<
"Vulkan: Multiple dynamic shared memory allocations found! Only one is suported!!\n";
949 result = VK_ERROR_TOO_MANY_OBJECTS;
952 shared_mem_constant_id = allocation->constant_id;
953 shared_mem_type_size = allocation->type_size;
956 uint32_t shared_mem_bytes_avail = (dispatch_data->shared_mem_bytes - static_shared_mem_bytes);
958 debug(
user_context) <<
" pipeline uses " << static_shared_mem_bytes <<
" bytes of static shared memory\n";
959 debug(
user_context) <<
" dispatch requests " << dispatch_data->shared_mem_bytes <<
" bytes of shared memory\n";
960 debug(
user_context) <<
" dynamic shared memory " << shared_mem_bytes_avail <<
" bytes available\n";
963 if ((shared_mem_constant_id > 0) && (shared_mem_bytes_avail > 0)) {
964 uint32_t dynamic_array_size = (
uint32_t)shared_mem_bytes_avail / shared_mem_type_size;
966 debug(
user_context) <<
" setting shared memory to " << (
uint32_t)dynamic_array_size <<
" elements "
967 <<
"(or " << (
uint32_t)shared_mem_bytes_avail <<
" bytes)\n";
970 dispatch_constant_ids[dispatch_constant_index] = shared_mem_constant_id;
971 dispatch_constant_values[dispatch_constant_index] = dynamic_array_size;
972 dispatch_constant_index++;
978 if (static_shared_mem_bytes > device_shared_mem_size) {
979 error(
user_context) <<
"Vulkan: Amount of static shared memory used exceeds device limit!\n"
980 <<
" requested: " << static_shared_mem_bytes <<
" bytes,"
981 <<
" available: " << device_shared_mem_size <<
" bytes\n";
984 if (dispatch_data->shared_mem_bytes > device_shared_mem_size) {
985 error(
user_context) <<
"Vulkan: Amount of dynamic shared memory used exceeds device limit!\n"
986 <<
" requested: " << dispatch_data->shared_mem_bytes <<
" bytes,"
987 <<
" available: " << device_shared_mem_size <<
" bytes\n";
994 if (shader_bindings->dispatch_data.local_size_binding.constant_id[0] != 0) {
995 for (
uint32_t dim = 0; dim < 3; dim++) {
996 dispatch_constant_ids[dispatch_constant_index] = shader_bindings->dispatch_data.local_size_binding.constant_id[dim];
997 dispatch_constant_values[dispatch_constant_index] = dispatch_data->local_size[dim];
998 dispatch_constant_index++;
1003 for (
uint32_t dc = 0; dc < dispatch_constant_index; dc++) {
1005 uint32_t found_index = invalid_index;
1006 for (
uint32_t sc = 0; sc < shader_bindings->specialization_constants_count; sc++) {
1007 if (shader_bindings->specialization_constants[sc].constant_id == dispatch_constant_ids[dc]) {
1009 debug(
user_context) <<
" binding specialization constant [" << dispatch_constant_ids[dc] <<
"] "
1010 <<
"'" << shader_bindings->specialization_constants[sc].constant_name <<
"' "
1011 <<
" => " << dispatch_constant_values[dc] <<
"\n";
1017 if (found_index == invalid_index) {
1018 error(
user_context) <<
"Vulkan: Failed to locate dispatch constant index for shader binding!\n";
1019 result = VK_ERROR_INITIALIZATION_FAILED;
1024 if (result != VK_SUCCESS) {
1025 error(
user_context) <<
"Vulkan: Failed to decode shader bindings! " << vk_get_error_name(result) <<
"\n";
1030 uint32_t dispatch_constant_count = 0;
1031 VkSpecializationMapEntry specialization_map_entries[4];
1032 memset(specialization_map_entries, 0, 4 *
sizeof(VkSpecializationMapEntry));
1033 for (
uint32_t dc = 0; dc < dispatch_constant_index && dc < 4; dc++) {
1034 specialization_map_entries[dc].constantID = dispatch_constant_ids[dc];
1035 specialization_map_entries[dc].size =
sizeof(
uint32_t);
1036 specialization_map_entries[dc].offset = dc *
sizeof(
uint32_t);
1037 dispatch_constant_count++;
1040 if (dispatch_constant_count > 0) {
1043 VkSpecializationInfo specialization_info{};
1044 specialization_info.dataSize = dispatch_constant_count *
sizeof(
uint32_t);
1045 specialization_info.mapEntryCount = dispatch_constant_count;
1046 specialization_info.pMapEntries = specialization_map_entries;
1047 specialization_info.pData = dispatch_constant_values;
1050 if (shader_bindings->compute_pipeline) {
1053 error(
user_context) <<
"Vulkan: Failed to destroy compute pipeline!\n";
1056 shader_bindings->compute_pipeline = VK_NULL_HANDLE;
1059 int error_code = vk_create_compute_pipeline(
user_context,
allocator, entry_point_name, shader_module, pipeline_layout, &specialization_info, &(shader_bindings->compute_pipeline));
1061 error(
user_context) <<
"Vulkan: Failed to create compute pipeline!\n";
1068 if (shader_bindings->compute_pipeline == VK_NULL_HANDLE) {
1069 int error_code = vk_create_compute_pipeline(
user_context,
allocator, entry_point_name, shader_module, pipeline_layout,
nullptr, &(shader_bindings->compute_pipeline));
1071 error(
user_context) <<
"Vulkan: Failed to create compute pipeline!\n";
1082 VkPipeline compute_pipeline) {
1085 <<
" vk_destroy_compute_pipeline (user_context: " <<
user_context <<
", "
1086 <<
"allocator: " << (
void *)
allocator <<
", "
1088 <<
"compute_pipeline: " << (
void *)compute_pipeline <<
")\n";
1091 error(
user_context) <<
"Vulkan: Failed to destroy compute pipeline ... invalid allocator pointer!\n";
1104 <<
" vk_decode_shader_bindings (user_context: " <<
user_context <<
", "
1105 <<
"allocator: " << (
void *)
allocator <<
", "
1106 <<
"module_ptr: " << (
void *)module_ptr <<
", "
1107 <<
"module_size: " << module_size <<
")\n";
1113 error(
user_context) <<
"Vulkan: Failed to decode shader bindings ... invalid allocator pointer!\n";
1117 if ((module_ptr ==
nullptr) || (module_size < (2 *
sizeof(
uint32_t)))) {
1118 error(
user_context) <<
"Vulkan: Failed to decode shader bindings ... invalid module buffer!\n";
1164 uint32_t shader_count = module_ptr[idx++];
1165 if (shader_count < 1) {
1166 error(
user_context) <<
"Vulkan: Failed to decode shader bindings ... no descriptors found!\n";
1171 VkSystemAllocationScope alloc_scope = VkSystemAllocationScope::VK_SYSTEM_ALLOCATION_SCOPE_OBJECT;
1172 size_t shader_bindings_size = shader_count *
sizeof(VulkanShaderBinding);
1173 VulkanShaderBinding *shader_bindings = (VulkanShaderBinding *)vk_host_malloc(
user_context, shader_bindings_size, 0, alloc_scope,
allocator->
callbacks());
1174 if (shader_bindings ==
nullptr) {
1175 error(
user_context) <<
"Vulkan: Failed to allocate shader_bindings! Out of memory!\n";
1178 memset(shader_bindings, 0, shader_bindings_size);
1181 for (
uint32_t n = 0; (n < shader_count) && (idx < module_entries); n++) {
1185 uint32_t entry_point_name_length = module_ptr[idx++];
1188 const char *entry_point_name = (
const char *)(module_ptr + idx);
1189 idx += entry_point_name_length;
1192 uint32_t uniform_buffer_count = module_ptr[idx++];
1195 uint32_t storage_buffer_count = module_ptr[idx++];
1198 uint32_t specialization_constants_count = module_ptr[idx++];
1201 VulkanSpecializationConstant *specialization_constants =
nullptr;
1202 if (specialization_constants_count > 0) {
1205 size_t specialization_constants_size = specialization_constants_count *
sizeof(VulkanSpecializationConstant);
1206 specialization_constants = (VulkanSpecializationConstant *)vk_host_malloc(
user_context, specialization_constants_size, 0, alloc_scope,
allocator->
callbacks());
1207 if (specialization_constants ==
nullptr) {
1208 error(
user_context) <<
"Vulkan: Failed to allocate specialization_constants! Out of memory!\n";
1211 memset(specialization_constants, 0, specialization_constants_size);
1214 for (
uint32_t sc = 0; sc < specialization_constants_count; sc++) {
1218 uint32_t constant_name_length = module_ptr[idx++];
1221 const char *constant_name = (
const char *)(module_ptr + idx);
1222 specialization_constants[sc].constant_name = constant_name;
1223 idx += constant_name_length;
1226 specialization_constants[sc].constant_id = module_ptr[idx++];
1229 specialization_constants[sc].type_size = module_ptr[idx++];
1234 uint32_t shared_memory_allocations_count = module_ptr[idx++];
1237 VulkanSharedMemoryAllocation *shared_memory_allocations =
nullptr;
1238 if (shared_memory_allocations_count > 0) {
1241 size_t shared_memory_allocations_size = shared_memory_allocations_count *
sizeof(VulkanSharedMemoryAllocation);
1242 shared_memory_allocations = (VulkanSharedMemoryAllocation *)vk_host_malloc(
user_context, shared_memory_allocations_size, 0, alloc_scope,
allocator->
callbacks());
1243 if (shared_memory_allocations ==
nullptr) {
1244 error(
user_context) <<
"Vulkan: Failed to allocate shared_memory_allocations! Out of memory!\n";
1247 memset(shared_memory_allocations, 0, shared_memory_allocations_size);
1250 for (
uint32_t sm = 0; sm < shared_memory_allocations_count && (idx < module_entries); sm++) {
1254 uint32_t variable_name_length = module_ptr[idx++];
1257 const char *variable_name = (
const char *)(module_ptr + idx);
1258 shared_memory_allocations[sm].variable_name = variable_name;
1259 idx += variable_name_length;
1262 shared_memory_allocations[sm].constant_id = module_ptr[idx++];
1265 shared_memory_allocations[sm].type_size = module_ptr[idx++];
1268 shared_memory_allocations[sm].array_size = module_ptr[idx++];
1274 for (
uint32_t dim = 0; dim < 3 && (idx < module_entries); dim++) {
1275 shader_bindings[n].dispatch_data.local_size_binding.constant_id[dim] = module_ptr[idx++];
1280 debug(
user_context) <<
" [" << n <<
"] '" << (
const char *)entry_point_name <<
"'\n";
1282 debug(
user_context) <<
" uniform_buffer_count=" << uniform_buffer_count <<
"\n"
1283 <<
" storage_buffer_count=" << storage_buffer_count <<
"\n";
1285 debug(
user_context) <<
" specialization_constants_count=" << specialization_constants_count <<
"\n";
1286 for (
uint32_t sc = 0; sc < specialization_constants_count; sc++) {
1288 <<
"constant_name='" << (
const char *)specialization_constants[sc].constant_name <<
"' "
1289 <<
"constant_id=" << specialization_constants[sc].constant_id <<
" "
1290 <<
"type_size=" << specialization_constants[sc].type_size <<
"\n";
1293 debug(
user_context) <<
" shared_memory_allocations_count=" << shared_memory_allocations_count <<
"\n";
1294 for (
uint32_t sm = 0; sm < shared_memory_allocations_count; sm++) {
1296 <<
"variable_name='" << (
const char *)shared_memory_allocations[sm].variable_name <<
"' "
1297 <<
"constant_id=" << shared_memory_allocations[sm].constant_id <<
" "
1298 <<
"type_size=" << shared_memory_allocations[sm].type_size <<
" "
1299 <<
"array_size=" << shared_memory_allocations[sm].array_size <<
"\n";
1302 for (
uint32_t dim = 0; dim < 3 && (idx < module_entries); dim++) {
1303 debug(
user_context) << shader_bindings[n].dispatch_data.local_size_binding.constant_id[dim] <<
" ";
1307 shader_bindings[n].entry_point_name = entry_point_name;
1308 shader_bindings[n].uniform_buffer_count = uniform_buffer_count;
1309 shader_bindings[n].storage_buffer_count = storage_buffer_count;
1310 shader_bindings[n].specialization_constants_count = specialization_constants_count;
1311 shader_bindings[n].specialization_constants = specialization_constants;
1312 shader_bindings[n].shared_memory_allocations_count = shared_memory_allocations_count;
1313 shader_bindings[n].shared_memory_allocations = shared_memory_allocations;
1318 debug(
user_context) <<
" Time: " << (t_after - t_before) / 1.0e6 <<
" ms\n";
1321 return shader_bindings;
1325 const VulkanShaderBinding *shader_bindings,
uint32_t shader_count) {
1328 <<
" vk_validate_shader_for_device (user_context: " <<
user_context <<
", "
1329 <<
"allocator: " << (
void *)
allocator <<
", "
1330 <<
"shader_bindings: " << (
void *)shader_bindings <<
", "
1331 <<
"shader_count: " << shader_count <<
")\n";
1335 if (shader_bindings->shared_memory_allocations_count) {
1337 uint32_t static_shared_mem_bytes = 0;
1339 for (
uint32_t sm = 0; sm < shader_bindings->shared_memory_allocations_count; sm++) {
1340 VulkanSharedMemoryAllocation *allocation = &(shader_bindings->shared_memory_allocations[sm]);
1341 if (allocation->constant_id == 0) {
1343 static_shared_mem_bytes += allocation->type_size * allocation->array_size;
1352 if (static_shared_mem_bytes > device_shared_mem_size) {
1353 error(
user_context) <<
"Vulkan: Amount of static shared memory used exceeds device limit!\n"
1354 <<
" requested: " << static_shared_mem_bytes <<
" bytes,"
1355 <<
" available: " << device_shared_mem_size <<
" bytes\n";
1364 if (shader_count > max_descriptors) {
1365 error(
user_context) <<
"Vulkan: Number of required descriptor sets exceeds the amount available for device!\n"
1366 <<
" requested: " << shader_count <<
","
1367 <<
" available: " << max_descriptors <<
"\n";
1374VulkanCompilationCacheEntry *vk_compile_kernel_module(
void *
user_context, VulkanMemoryAllocator *
allocator,
1375 const char *ptr,
int size) {
1378 <<
" vk_compile_kernel_module (user_context: " <<
user_context <<
", "
1379 <<
"allocator: " << (
void *)
allocator <<
", "
1381 <<
"module: " << (
void *)ptr <<
", "
1382 <<
"size: " << size <<
")\n";
1388 debug(
user_context) <<
"Vulkan: Failed to compile kernel module ... invalid allocator pointer!\n";
1392 if ((ptr ==
nullptr) || (size <= 0)) {
1393 debug(
user_context) <<
"Vulkan: Failed to compile kernel module ... invalid module!\n";
1397 VkSystemAllocationScope alloc_scope = VkSystemAllocationScope::VK_SYSTEM_ALLOCATION_SCOPE_OBJECT;
1398 VulkanCompilationCacheEntry *cache_entry = (VulkanCompilationCacheEntry *)vk_host_malloc(
user_context,
sizeof(VulkanCompilationCacheEntry), 0, alloc_scope,
allocator->
callbacks());
1399 if (cache_entry ==
nullptr) {
1400 debug(
user_context) <<
"Vulkan: Failed to allocate compilation cache entry! Out of memory!\n";
1403 memset(cache_entry, 0,
sizeof(VulkanCompilationCacheEntry));
1408 if ((
size_t)size <
sizeof(
uint32_t)) {
1409 debug(
user_context) <<
"Vulkan: Code module size is invalid!\n";
1414 uint32_t kernel_count = module_header[word_offset++];
1415 debug(
user_context) <<
" kernel_count=" << kernel_count <<
"\n";
1418 cache_entry->compiled_modules = (VulkanCompiledShaderModule **)vk_host_malloc(
user_context,
sizeof(VulkanCompiledShaderModule *) * kernel_count, 0, alloc_scope,
allocator->
callbacks());
1419 if (cache_entry->compiled_modules ==
nullptr) {
1420 debug(
user_context) <<
"Vulkan: Failed to allocate host memory!\n";
1423 cache_entry->module_count = kernel_count;
1428 if (binary_sizes ==
nullptr) {
1429 debug(
user_context) <<
"Vulkan: Failed to allocate system memory!\n";
1434 size_t byte_offset = 0;
1435 for (
uint32_t i = 0; (i < kernel_count) && (byte_offset < (
size_t)size); ++i) {
1437 binary_sizes[i] = module_header[word_offset++];
1440 uint32_t kernel_name_entry_size = module_header[word_offset++];
1441 const char *kernel_name = (
const char *)(module_header + word_offset);
1442 word_offset += kernel_name_entry_size;
1445 byte_offset = (word_offset *
sizeof(
uint32_t));
1446 debug(
user_context) <<
" kernel[" << i <<
"] name: " << kernel_name <<
" binary_size: " << binary_sizes[i] <<
" bytes\n";
1451 for (
uint32_t i = 0; (i < kernel_count) && (byte_offset < (
size_t)size); ++i) {
1455 size_t spirv_size = binary_sizes[i];
1457 debug(
user_context) <<
" spirv_size[" << i <<
"] = " << spirv_size <<
" bytes\n";
1458 debug(
user_context) <<
" spirv_ptr[" << i <<
"] = " << spirv_ptr <<
"\n";
1461 cache_entry->compiled_modules[i] = vk_compile_shader_module(
user_context,
allocator, (
const char *)spirv_ptr, (
int)spirv_size);
1462 if (cache_entry->compiled_modules[i] ==
nullptr) {
1463 debug(
user_context) <<
"Vulkan: Failed to compile shader module!\n";
1468 byte_offset += binary_sizes[i];
1478 cache_entry =
nullptr;
1483 debug(
user_context) <<
" Time: " << (t_after - t_before) / 1.0e6 <<
" ms\n";
1489VulkanCompiledShaderModule *vk_compile_shader_module(
void *
user_context, VulkanMemoryAllocator *
allocator,
1490 const char *ptr,
int size) {
1493 <<
" vk_compile_shader_module (user_context: " <<
user_context <<
", "
1494 <<
"allocator: " << (
void *)
allocator <<
", "
1496 <<
"module: " << (
void *)ptr <<
", "
1497 <<
"size: " << size <<
")\n";
1503 error(
user_context) <<
"Vulkan: Failed to compile shader modules ... invalid allocator pointer!\n";
1507 if ((ptr ==
nullptr) || (size <= 0)) {
1508 error(
user_context) <<
"Vulkan: Failed to compile shader modules ... invalid program source buffer!\n";
1518 uint32_t header_word_count = module_ptr[0];
1519 uint32_t shader_count = module_ptr[1];
1523 const uint32_t *binary_ptr = (module_ptr + header_word_count);
1524 size_t binary_size = (size - header_size);
1528 <<
"module_ptr: " << (
void *)module_ptr <<
", "
1529 <<
"header_word_count: " << header_word_count <<
", "
1530 <<
"header_size: " << header_size <<
", "
1531 <<
"binar_ptr: " << (
void *)binary_ptr <<
", "
1532 <<
"binary_size: " << (
uint32_t)binary_size <<
")\n";
1535 VkShaderModuleCreateInfo shader_info = {
1536 VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO,
1543 VkSystemAllocationScope alloc_scope = VkSystemAllocationScope::VK_SYSTEM_ALLOCATION_SCOPE_OBJECT;
1544 VulkanCompiledShaderModule *compiled_module = (VulkanCompiledShaderModule *)vk_host_malloc(
user_context,
sizeof(VulkanCompiledShaderModule), 0, alloc_scope,
allocator->
callbacks());
1545 if (compiled_module ==
nullptr) {
1546 error(
user_context) <<
"Vulkan: Failed to allocate compilation cache entry! Out of memory!\n";
1549 memset(compiled_module, 0,
sizeof(VulkanCompiledShaderModule));
1552 VulkanShaderBinding *decoded_bindings = vk_decode_shader_bindings(
user_context,
allocator, module_ptr, module_size);
1553 if (decoded_bindings ==
nullptr) {
1554 error(
user_context) <<
"Vulkan: Failed to decode shader bindings!\n";
1559 int valid_status = vk_validate_shader_for_device(
user_context,
allocator, decoded_bindings, shader_count);
1567 compiled_module->shader_bindings = decoded_bindings;
1568 compiled_module->shader_count = shader_count;
1571 if ((result != VK_SUCCESS)) {
1572 error(
user_context) <<
"Vulkan: vkCreateShaderModule Failed! Error returned: " << vk_get_error_name(result) <<
"\n";
1579 if (compiled_module->shader_count) {
1580 compiled_module->descriptor_set_layouts = (VkDescriptorSetLayout *)vk_host_malloc(
user_context, compiled_module->shader_count *
sizeof(VkDescriptorSetLayout), 0, alloc_scope,
allocator->
callbacks());
1581 if (compiled_module->descriptor_set_layouts ==
nullptr) {
1582 error(
user_context) <<
"Vulkan: Failed to allocate descriptor set layouts for cache entry! Out of memory!\n";
1585 memset(compiled_module->descriptor_set_layouts, 0, compiled_module->shader_count *
sizeof(VkDescriptorSetLayout));
1590 debug(
user_context) <<
" Time: " << (t_after - t_before) / 1.0e6 <<
" ms\n";
1593 return compiled_module;
1596void vk_destroy_compiled_shader_module(VulkanCompiledShaderModule *shader_module, VulkanMemoryAllocator *
allocator) {
1600 <<
" vk_destroy_compiled_shader_module (shader_module: "
1601 << shader_module <<
", allocator: " <<
allocator <<
")\n";
1604 if (shader_module ==
nullptr) {
1612 if (shader_module->descriptor_set_layouts) {
1613 for (
uint32_t n = 0; n < shader_module->shader_count; n++) {
1614 debug(
user_context) <<
" destroying descriptor set layout [" << n <<
"] " << shader_module->shader_bindings[n].entry_point_name <<
"\n";
1615 vk_destroy_descriptor_set_layout(
user_context,
allocator, shader_module->descriptor_set_layouts[n]);
1616 shader_module->descriptor_set_layouts[n] = VK_NULL_HANDLE;
1619 shader_module->descriptor_set_layouts =
nullptr;
1621 if (shader_module->pipeline_layout) {
1622 debug(
user_context) <<
" destroying pipeline layout " << (
void *)shader_module->pipeline_layout <<
"\n";
1624 shader_module->pipeline_layout = VK_NULL_HANDLE;
1627 if (shader_module->shader_bindings) {
1628 for (
uint32_t n = 0; n < shader_module->shader_count; n++) {
1629 if (shader_module->shader_bindings[n].args_region) {
1630 vk_destroy_scalar_uniform_buffer(
user_context,
allocator, shader_module->shader_bindings[n].args_region);
1631 shader_module->shader_bindings[n].args_region =
nullptr;
1633 if (shader_module->shader_bindings[n].descriptor_pool) {
1634 vk_destroy_descriptor_pool(
user_context,
allocator, shader_module->shader_bindings[n].descriptor_pool);
1635 shader_module->shader_bindings[n].descriptor_pool = VK_NULL_HANDLE;
1637 if (shader_module->shader_bindings[n].specialization_constants) {
1639 shader_module->shader_bindings[n].specialization_constants =
nullptr;
1641 if (shader_module->shader_bindings[n].shared_memory_allocations) {
1643 shader_module->shader_bindings[n].shared_memory_allocations =
nullptr;
1645 if (shader_module->shader_bindings[n].compute_pipeline) {
1646 vk_destroy_compute_pipeline(
user_context,
allocator, shader_module->shader_bindings[n].compute_pipeline);
1647 shader_module->shader_bindings[n].compute_pipeline = VK_NULL_HANDLE;
1651 shader_module->shader_bindings =
nullptr;
1653 if (shader_module->shader_module) {
1654 debug(
user_context) <<
" . destroying shader module " << (
void *)shader_module->shader_module <<
"\n";
1656 shader_module->shader_module = VK_NULL_HANDLE;
1658 shader_module->shader_count = 0;
1660 shader_module =
nullptr;
1663void vk_destroy_compilation_cache_entry(VulkanCompilationCacheEntry *cache_entry) {
1666 <<
" vk_destroy_compilation_cache_entry (cache_entry: " << cache_entry <<
")\n";
1668 if (cache_entry ==
nullptr) {
1672 VulkanMemoryAllocator *
allocator = cache_entry->allocator;
1677 for (
uint32_t m = 0; m < cache_entry->module_count; m++) {
1678 VulkanCompiledShaderModule *shader_module = cache_entry->compiled_modules[m];
1679 vk_destroy_compiled_shader_module(shader_module,
allocator);
1682 cache_entry->module_count = 0;
1683 cache_entry->allocator =
nullptr;
1685 cache_entry =
nullptr;
1691 <<
" vk_destroy_shader_modules (user_context: " <<
user_context <<
")\n";
1702 debug(
user_context) <<
" Time: " << (t_after - t_before) / 1.0e6 <<
" ms\n";
1711 int d,
bool from_host,
bool to_host) {
1714 if ((!from_host && to_host) ||
1715 (from_host && !to_host) ||
1716 (!from_host && !to_host)) {
1718 VkBufferCopy buffer_copy = {
1724 VkBuffer *src_buffer =
reinterpret_cast<VkBuffer *
>(c.
src);
1725 VkBuffer *dst_buffer =
reinterpret_cast<VkBuffer *
>(c.
dst);
1726 if (!src_buffer || !dst_buffer) {
1727 error(
user_context) <<
"Vulkan: Failed to retrieve buffer for device memory!\n";
1731 vkCmdCopyBuffer(
command_buffer, *src_buffer, *dst_buffer, 1, &buffer_copy);
1733 }
else if ((c.
dst + dst_offset) != (c.
src + src_offset)) {
1745 src_offset + src_off,
1746 dst_offset + dst_off,
1747 d - 1, from_host, to_host);
1765 error(
user_context) <<
"Vulkan: Failed to acquire context!\n";
1774 error(
user_context) <<
"Vulkan: Invalid offset for device crop!\n";
1780 if (device_region ==
nullptr) {
1781 error(
user_context) <<
"Vulkan: Failed to crop region! Invalide device region!\n";
1787 if ((cropped_region ==
nullptr) || (cropped_region->
handle ==
nullptr)) {
1788 error(
user_context) <<
"Vulkan: Failed to crop region! Unable to create memory region!\n";
1798 debug(
user_context) <<
" Time: " << (t_after - t_before) / 1.0e6 <<
" ms\n";
bool halide_can_reuse_device_allocations(void *user_context)
Determines whether on device_free the memory is returned immediately to the device API,...
halide_error_code_t
The error codes that may be returned by a Halide pipeline.
@ halide_error_code_incompatible_device_interface
An operation on a buffer required an allocation on a particular device interface, but a device alloca...
@ halide_error_code_internal_error
There is a bug in the Halide compiler.
@ halide_error_code_generic_error
An uncategorized error occurred.
@ halide_error_code_device_crop_failed
Cropping/slicing a buffer failed for some other reason.
@ halide_error_code_success
There was no error.
Vulkan Memory Allocator class interface for managing large memory requests stored as contiguous block...
int reclaim(void *user_context, MemoryRegion *region)
MemoryRegion * reserve(void *user_context, const MemoryRequest &request)
int release(void *user_context, MemoryRegion *region)
int unmap(void *user_context, MemoryRegion *region)
MemoryRegion * owner_of(void *user_context, MemoryRegion *region)
VkDevice current_device() const
VkPhysicalDeviceLimits current_physical_device_limits() const
const VkAllocationCallbacks * callbacks() const
void * map(void *user_context, MemoryRegion *region)
WEAK Halide::Internal::GPUCompilationCache< VkDevice, VulkanCompilationCacheEntry * > compilation_cache
This file defines the class FunctionDAG, which is our representation of a Halide pipeline,...
@ Internal
Not visible externally, similar to 'static' linkage in C.
unsigned __INT64_TYPE__ uint64_t
signed __INT64_TYPE__ int64_t
#define halide_debug_assert(user_context, cond)
halide_debug_assert() is like halide_assert(), but only expands into a check when DEBUG_RUNTIME is de...
unsigned __INT8_TYPE__ uint8_t
void * memcpy(void *s1, const void *s2, size_t n)
void * memset(void *s, int val, size_t n)
unsigned __INT32_TYPE__ uint32_t
#define halide_abort_if_false(user_context, cond)
WEAK int64_t halide_current_time_ns(void *user_context)
signed __INT8_TYPE__ int8_t
uint32_t minimum_capacity
MemoryVisibility visibility
MemoryProperties properties
VulkanCompiledShaderModule ** compiled_modules
VulkanMemoryAllocator * allocator
VkDescriptorSetLayout * descriptor_set_layouts
VkPipelineLayout pipeline_layout
VkShaderModule shader_module
VulkanShaderBinding * shader_bindings
uint32_t shared_mem_bytes
VulkanWorkgroupSizeBinding local_size_binding
VulkanSpecializationConstant * specialization_constants
VulkanSharedMemoryAllocation * shared_memory_allocations
uint32_t shared_memory_allocations_count
uint32_t storage_buffer_count
uint32_t uniform_buffer_count
VulkanDispatchData dispatch_data
VkDescriptorSet descriptor_set
uint32_t specialization_constants_count
VkPipeline compute_pipeline
VkDescriptorPool descriptor_pool
const char * entry_point_name
MemoryRegion * args_region
const char * variable_name
const char * constant_name
uint64_t src_stride_bytes[MAX_COPY_DIMS]
uint64_t extent[MAX_COPY_DIMS]
uint64_t dst_stride_bytes[MAX_COPY_DIMS]
The raw representation of an image passed around by generated Halide code.
uint64_t device
A device-handle for e.g.
const struct halide_device_interface_t * device_interface
The interface used to interpret the above handle.
VkCommandPool command_pool
VkCommandBuffer command_buffer
VulkanMemoryAllocator * allocator