1 #ifndef HALIDE_RUNTIME_VULKAN_INTERNAL_H
2 #define HALIDE_RUNTIME_VULKAN_INTERNAL_H
25 class VulkanMemoryAllocator;
26 struct VulkanShaderBinding;
27 struct VulkanCompilationCacheEntry;
39 VulkanMemoryAllocator *vk_create_memory_allocator(
void *user_context, VkDevice device, VkPhysicalDevice physical_device,
42 int vk_destroy_memory_allocator(
void *user_context, VulkanMemoryAllocator *allocator);
43 int vk_clear_device_buffer(
void *user_context,
44 VulkanMemoryAllocator *allocator,
45 VkCommandPool command_pool,
46 VkQueue command_queue,
47 VkBuffer device_buffer);
52 int vk_create_context(
54 VulkanMemoryAllocator **allocator,
57 VkPhysicalDevice *physical_device,
58 VkCommandPool *command_pool,
59 VkQueue *queue,
uint32_t *queue_family_index);
61 int vk_find_compute_capability(
void *user_context,
int *major,
int *minor);
64 int vk_destroy_instance(
void *user_context, VkInstance instance,
const VkAllocationCallbacks *alloc_callbacks);
66 int vk_select_device_for_context(
void *user_context,
67 VkInstance *instance, VkDevice *device,
68 VkPhysicalDevice *physical_device,
71 int vk_create_device(
void *user_context,
const StringTable &requested_layers, VkInstance *instance, VkDevice *device, VkQueue *queue,
82 uint32_t vk_get_supported_device_extensions(
void *user_context, VkPhysicalDevice physical_device,
StringTable &ext_table);
83 bool vk_validate_required_extension_support(
void *user_context,
92 int vk_create_command_pool(
void *user_context, VulkanMemoryAllocator *allocator,
uint32_t queue_index, VkCommandPool *command_pool);
93 int vk_destroy_command_pool(
void *user_context, VulkanMemoryAllocator *allocator, VkCommandPool command_pool);
96 int vk_create_command_buffer(
void *user_context, VulkanMemoryAllocator *allocator, VkCommandPool pool, VkCommandBuffer *command_buffer);
97 int vk_destroy_command_buffer(
void *user_context, VulkanMemoryAllocator *allocator, VkCommandPool command_pool, VkCommandBuffer command_buffer);
99 int vk_fill_command_buffer_with_dispatch_call(
void *user_context,
101 VkCommandBuffer command_buffer,
102 VkPipeline compute_pipeline,
104 VkDescriptorSet descriptor_set,
106 int blocksX,
int blocksY,
int blocksZ);
108 int vk_submit_command_buffer(
void *user_context, VkQueue queue, VkCommandBuffer command_buffer);
111 bool vk_needs_scalar_uniform_buffer(
void *user_context,
116 size_t vk_estimate_scalar_uniform_buffer_size(
void *user_context,
121 MemoryRegion *vk_create_scalar_uniform_buffer(
void *user_context,
122 VulkanMemoryAllocator *allocator,
123 size_t scalar_buffer_size);
125 int vk_update_scalar_uniform_buffer(
void *user_context,
126 VulkanMemoryAllocator *allocator,
132 int vk_destroy_scalar_uniform_buffer(
void *user_context, VulkanMemoryAllocator *allocator,
135 int vk_create_descriptor_pool(
void *user_context,
136 VulkanMemoryAllocator *allocator,
139 VkDescriptorPool *descriptor_pool);
141 int vk_destroy_descriptor_pool(
void *user_context,
142 VulkanMemoryAllocator *allocator,
143 VkDescriptorPool descriptor_pool);
146 uint32_t vk_count_bindings_for_descriptor_set(
void *user_context,
151 int vk_create_descriptor_set_layout(
void *user_context,
152 VulkanMemoryAllocator *allocator,
155 VkDescriptorSetLayout *layout);
157 int vk_destroy_descriptor_set_layout(
void *user_context,
158 VulkanMemoryAllocator *allocator,
159 VkDescriptorSetLayout descriptor_set_layout);
162 int vk_create_descriptor_set(
void *user_context,
163 VulkanMemoryAllocator *allocator,
164 VkDescriptorSetLayout descriptor_set_layout,
165 VkDescriptorPool descriptor_pool,
166 VkDescriptorSet *descriptor_set);
168 int vk_update_descriptor_set(
void *user_context,
169 VulkanMemoryAllocator *allocator,
170 VkBuffer *scalar_args_buffer,
171 size_t uniform_buffer_count,
172 size_t storage_buffer_count,
176 VkDescriptorSet descriptor_set);
179 int vk_create_pipeline_layout(
void *user_context,
180 VulkanMemoryAllocator *allocator,
185 int vk_destroy_pipeline_layout(
void *user_context,
186 VulkanMemoryAllocator *allocator,
189 int vk_create_compute_pipeline(
void *user_context,
190 VulkanMemoryAllocator *allocator,
191 const char *pipeline_name,
195 VkPipeline *compute_pipeline);
197 int vk_setup_compute_pipeline(
void *user_context,
198 VulkanMemoryAllocator *allocator,
202 VkPipeline *compute_pipeline);
204 int vk_destroy_compute_pipeline(
void *user_context,
205 VulkanMemoryAllocator *allocator,
206 VkPipeline compute_pipeline);
209 VulkanShaderBinding *vk_decode_shader_bindings(
void *user_context, VulkanMemoryAllocator *allocator,
212 VulkanCompilationCacheEntry *vk_compile_shader_module(
void *user_context, VulkanMemoryAllocator *allocator,
213 const char *src,
int size);
215 int vk_destroy_shader_modules(
void *user_context, VulkanMemoryAllocator *allocator);
218 int vk_do_multidimensional_copy(
void *user_context, VkCommandBuffer command_buffer,
220 int d,
bool from_host,
bool to_host);
227 const char *vk_get_error_name(
VkResult error) {
232 return "VK_NOT_READY";
236 return "VK_EVENT_SET";
238 return "VK_EVENT_RESET";
240 return "VK_INCOMPLETE";
242 return "VK_ERROR_OUT_OF_HOST_MEMORY";
244 return "VK_ERROR_OUT_OF_DEVICE_MEMORY";
246 return "VK_ERROR_INITIALIZATION_FAILED";
248 return "VK_ERROR_DEVICE_LOST";
250 return "VK_ERROR_MEMORY_MAP_FAILED";
252 return "VK_ERROR_LAYER_NOT_PRESENT";
254 return "VK_ERROR_EXTENSION_NOT_PRESENT";
256 return "VK_ERROR_FEATURE_NOT_PRESENT";
258 return "VK_ERROR_INCOMPATIBLE_DRIVER";
260 return "VK_ERROR_TOO_MANY_OBJECTS";
262 return "VK_ERROR_FORMAT_NOT_SUPPORTED";
264 return "VK_ERROR_FRAGMENTED_POOL";
266 return "VK_ERROR_SURFACE_LOST_KHR";
268 return "VK_ERROR_NATIVE_WINDOW_IN_USE_KHR";
270 return "VK_SUBOPTIMAL_KHR";
272 return "VK_ERROR_OUT_OF_DATE_KHR";
274 return "VK_ERROR_INCOMPATIBLE_DISPLAY_KHR";
276 return "VK_ERROR_VALIDATION_FAILED_EXT";
278 return "VK_ERROR_INVALID_SHADER_NV";
280 return "VK_ERROR_OUT_OF_POOL_MEMORY_KHR";
282 return "VK_ERROR_INVALID_EXTERNAL_HANDLE_KHR";
284 return "<Unknown Vulkan Result Code>";
296 #endif // HALIDE_RUNTIME_VULKAN_INTERNAL_H