1 /*
2 * Copyright 2015 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16
17 #include <hardware/hwvulkan.h>
18
19 #include <errno.h>
20 #include <inttypes.h>
21 #include <stdlib.h>
22 #include <string.h>
23 #include <unistd.h>
24
25 #include <algorithm>
26 #include <array>
27
28 #include <log/log.h>
29
30 #include "null_driver_gen.h"
31
32 using namespace null_driver;
33
34 struct VkPhysicalDevice_T {
35 hwvulkan_dispatch_t dispatch;
36 };
37
38 struct VkInstance_T {
39 hwvulkan_dispatch_t dispatch;
40 VkAllocationCallbacks allocator;
41 VkPhysicalDevice_T physical_device;
42 uint64_t next_callback_handle;
43 };
44
45 struct VkQueue_T {
46 hwvulkan_dispatch_t dispatch;
47 };
48
49 struct VkCommandBuffer_T {
50 hwvulkan_dispatch_t dispatch;
51 };
52
53 namespace {
54 // Handles for non-dispatchable objects are either pointers, or arbitrary
55 // 64-bit non-zero values. We only use pointers when we need to keep state for
56 // the object even in a null driver. For the rest, we form a handle as:
57 // [63:63] = 1 to distinguish from pointer handles*
58 // [62:56] = non-zero handle type enum value
59 // [55: 0] = per-handle-type incrementing counter
60 // * This works because virtual addresses with the high bit set are reserved
61 // for kernel data in all ABIs we run on.
62 //
63 // We never reclaim handles on vkDestroy*. It's not even necessary for us to
64 // have distinct handles for live objects, and practically speaking we won't
65 // ever create 2^56 objects of the same type from a single VkDevice in a null
66 // driver.
67 //
68 // Using a namespace here instead of 'enum class' since we want scoped
69 // constants but also want implicit conversions to integral types.
70 namespace HandleType {
71 enum Enum {
72 kBufferView,
73 kDebugReportCallbackEXT,
74 kDescriptorPool,
75 kDescriptorSet,
76 kDescriptorSetLayout,
77 kEvent,
78 kFence,
79 kFramebuffer,
80 kImageView,
81 kPipeline,
82 kPipelineCache,
83 kPipelineLayout,
84 kQueryPool,
85 kRenderPass,
86 kSampler,
87 kSemaphore,
88 kShaderModule,
89
90 kNumTypes
91 };
92 } // namespace HandleType
93
94 const VkDeviceSize kMaxDeviceMemory = 0x10000000; // 256 MiB, arbitrary
95
96 } // anonymous namespace
97
98 struct VkDevice_T {
99 hwvulkan_dispatch_t dispatch;
100 VkAllocationCallbacks allocator;
101 VkInstance_T* instance;
102 VkQueue_T queue;
103 std::array<uint64_t, HandleType::kNumTypes> next_handle;
104 };
105
106 // -----------------------------------------------------------------------------
107 // Declare HAL_MODULE_INFO_SYM early so it can be referenced by nulldrv_device
108 // later.
109
110 namespace {
111 int OpenDevice(const hw_module_t* module, const char* id, hw_device_t** device);
112 hw_module_methods_t nulldrv_module_methods = {.open = OpenDevice};
113 } // namespace
114
115 #pragma clang diagnostic push
116 #pragma clang diagnostic ignored "-Wmissing-variable-declarations"
117 __attribute__((visibility("default"))) hwvulkan_module_t HAL_MODULE_INFO_SYM = {
118 .common =
119 {
120 .tag = HARDWARE_MODULE_TAG,
121 .module_api_version = HWVULKAN_MODULE_API_VERSION_0_1,
122 .hal_api_version = HARDWARE_HAL_API_VERSION,
123 .id = HWVULKAN_HARDWARE_MODULE_ID,
124 .name = "Null Vulkan Driver",
125 .author = "The Android Open Source Project",
126 .methods = &nulldrv_module_methods,
127 },
128 };
129 #pragma clang diagnostic pop
130
131 // -----------------------------------------------------------------------------
132
133 namespace {
134
CloseDevice(struct hw_device_t *)135 int CloseDevice(struct hw_device_t* /*device*/) {
136 // nothing to do - opening a device doesn't allocate any resources
137 return 0;
138 }
139
140 hwvulkan_device_t nulldrv_device = {
141 .common =
142 {
143 .tag = HARDWARE_DEVICE_TAG,
144 .version = HWVULKAN_DEVICE_API_VERSION_0_1,
145 .module = &HAL_MODULE_INFO_SYM.common,
146 .close = CloseDevice,
147 },
148 .EnumerateInstanceExtensionProperties =
149 EnumerateInstanceExtensionProperties,
150 .CreateInstance = CreateInstance,
151 .GetInstanceProcAddr = GetInstanceProcAddr};
152
OpenDevice(const hw_module_t *,const char * id,hw_device_t ** device)153 int OpenDevice(const hw_module_t* /*module*/,
154 const char* id,
155 hw_device_t** device) {
156 if (strcmp(id, HWVULKAN_DEVICE_0) == 0) {
157 *device = &nulldrv_device.common;
158 return 0;
159 }
160 return -ENOENT;
161 }
162
GetInstanceFromPhysicalDevice(VkPhysicalDevice_T * physical_device)163 VkInstance_T* GetInstanceFromPhysicalDevice(
164 VkPhysicalDevice_T* physical_device) {
165 return reinterpret_cast<VkInstance_T*>(
166 reinterpret_cast<uintptr_t>(physical_device) -
167 offsetof(VkInstance_T, physical_device));
168 }
169
AllocHandle(uint64_t type,uint64_t * next_handle)170 uint64_t AllocHandle(uint64_t type, uint64_t* next_handle) {
171 const uint64_t kHandleMask = (UINT64_C(1) << 56) - 1;
172 ALOGE_IF(*next_handle == kHandleMask,
173 "non-dispatchable handles of type=%" PRIu64
174 " are about to overflow",
175 type);
176 return (UINT64_C(1) << 63) | ((type & 0x7) << 56) |
177 ((*next_handle)++ & kHandleMask);
178 }
179
180 template <class Handle>
AllocHandle(VkInstance instance,HandleType::Enum type)181 Handle AllocHandle(VkInstance instance, HandleType::Enum type) {
182 return reinterpret_cast<Handle>(
183 AllocHandle(type, &instance->next_callback_handle));
184 }
185
186 template <class Handle>
AllocHandle(VkDevice device,HandleType::Enum type)187 Handle AllocHandle(VkDevice device, HandleType::Enum type) {
188 return reinterpret_cast<Handle>(
189 AllocHandle(type, &device->next_handle[type]));
190 }
191
DefaultAllocate(void *,size_t size,size_t alignment,VkSystemAllocationScope)192 VKAPI_ATTR void* DefaultAllocate(void*,
193 size_t size,
194 size_t alignment,
195 VkSystemAllocationScope) {
196 void* ptr = nullptr;
197 // Vulkan requires 'alignment' to be a power of two, but posix_memalign
198 // additionally requires that it be at least sizeof(void*).
199 int ret = posix_memalign(&ptr, std::max(alignment, sizeof(void*)), size);
200 return ret == 0 ? ptr : nullptr;
201 }
202
DefaultReallocate(void *,void * ptr,size_t size,size_t alignment,VkSystemAllocationScope)203 VKAPI_ATTR void* DefaultReallocate(void*,
204 void* ptr,
205 size_t size,
206 size_t alignment,
207 VkSystemAllocationScope) {
208 if (size == 0) {
209 free(ptr);
210 return nullptr;
211 }
212
213 // TODO(jessehall): Right now we never shrink allocations; if the new
214 // request is smaller than the existing chunk, we just continue using it.
215 // The null driver never reallocs, so this doesn't matter. If that changes,
216 // or if this code is copied into some other project, this should probably
217 // have a heuristic to allocate-copy-free when doing so will save "enough"
218 // space.
219 size_t old_size = ptr ? malloc_usable_size(ptr) : 0;
220 if (size <= old_size)
221 return ptr;
222
223 void* new_ptr = nullptr;
224 if (posix_memalign(&new_ptr, std::max(alignment, sizeof(void*)), size) != 0)
225 return nullptr;
226 if (ptr) {
227 memcpy(new_ptr, ptr, std::min(old_size, size));
228 free(ptr);
229 }
230 return new_ptr;
231 }
232
DefaultFree(void *,void * ptr)233 VKAPI_ATTR void DefaultFree(void*, void* ptr) {
234 free(ptr);
235 }
236
237 const VkAllocationCallbacks kDefaultAllocCallbacks = {
238 .pUserData = nullptr,
239 .pfnAllocation = DefaultAllocate,
240 .pfnReallocation = DefaultReallocate,
241 .pfnFree = DefaultFree,
242 };
243
244 } // namespace
245
246 namespace null_driver {
247
248 #define DEFINE_OBJECT_HANDLE_CONVERSION(T) \
249 T* Get##T##FromHandle(Vk##T h); \
250 T* Get##T##FromHandle(Vk##T h) { \
251 return reinterpret_cast<T*>(uintptr_t(h)); \
252 } \
253 Vk##T GetHandleTo##T(const T* obj); \
254 Vk##T GetHandleTo##T(const T* obj) { \
255 return Vk##T(reinterpret_cast<uintptr_t>(obj)); \
256 }
257
258 // -----------------------------------------------------------------------------
259 // Global
260
261 VKAPI_ATTR
EnumerateInstanceVersion(uint32_t * pApiVersion)262 VkResult EnumerateInstanceVersion(uint32_t* pApiVersion) {
263 *pApiVersion = VK_API_VERSION_1_1;
264 return VK_SUCCESS;
265 }
266
267 VKAPI_ATTR
EnumerateInstanceExtensionProperties(const char * layer_name,uint32_t * count,VkExtensionProperties * properties)268 VkResult EnumerateInstanceExtensionProperties(
269 const char* layer_name,
270 uint32_t* count,
271 VkExtensionProperties* properties) {
272 if (layer_name) {
273 ALOGW(
274 "Driver vkEnumerateInstanceExtensionProperties shouldn't be called "
275 "with a layer name ('%s')",
276 layer_name);
277 }
278
279 const VkExtensionProperties kExtensions[] = {
280 {VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME, VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_SPEC_VERSION}};
281 const uint32_t kExtensionsCount =
282 sizeof(kExtensions) / sizeof(kExtensions[0]);
283
284 if (!properties || *count > kExtensionsCount)
285 *count = kExtensionsCount;
286 if (properties)
287 std::copy(kExtensions, kExtensions + *count, properties);
288 return *count < kExtensionsCount ? VK_INCOMPLETE : VK_SUCCESS;
289 }
290
291 VKAPI_ATTR
CreateInstance(const VkInstanceCreateInfo * create_info,const VkAllocationCallbacks * allocator,VkInstance * out_instance)292 VkResult CreateInstance(const VkInstanceCreateInfo* create_info,
293 const VkAllocationCallbacks* allocator,
294 VkInstance* out_instance) {
295 if (!allocator)
296 allocator = &kDefaultAllocCallbacks;
297
298 VkInstance_T* instance =
299 static_cast<VkInstance_T*>(allocator->pfnAllocation(
300 allocator->pUserData, sizeof(VkInstance_T), alignof(VkInstance_T),
301 VK_SYSTEM_ALLOCATION_SCOPE_INSTANCE));
302 if (!instance)
303 return VK_ERROR_OUT_OF_HOST_MEMORY;
304
305 instance->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
306 instance->allocator = *allocator;
307 instance->physical_device.dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
308 instance->next_callback_handle = 0;
309
310 for (uint32_t i = 0; i < create_info->enabledExtensionCount; i++) {
311 if (strcmp(create_info->ppEnabledExtensionNames[i],
312 VK_KHR_GET_PHYSICAL_DEVICE_PROPERTIES_2_EXTENSION_NAME) == 0) {
313 ALOGV("instance extension '%s' requested",
314 create_info->ppEnabledExtensionNames[i]);
315 } else if (strcmp(create_info->ppEnabledExtensionNames[i],
316 VK_EXT_DEBUG_REPORT_EXTENSION_NAME) == 0) {
317 ALOGV("instance extension '%s' requested",
318 create_info->ppEnabledExtensionNames[i]);
319 } else {
320 ALOGW("unsupported extension '%s' requested",
321 create_info->ppEnabledExtensionNames[i]);
322 }
323 }
324
325 *out_instance = instance;
326 return VK_SUCCESS;
327 }
328
329 VKAPI_ATTR
GetInstanceProcAddr(VkInstance instance,const char * name)330 PFN_vkVoidFunction GetInstanceProcAddr(VkInstance instance, const char* name) {
331 return instance ? GetInstanceProcAddr(name) : GetGlobalProcAddr(name);
332 }
333
334 VKAPI_ATTR
GetDeviceProcAddr(VkDevice,const char * name)335 PFN_vkVoidFunction GetDeviceProcAddr(VkDevice, const char* name) {
336 return GetInstanceProcAddr(name);
337 }
338
339 // -----------------------------------------------------------------------------
340 // Instance
341
DestroyInstance(VkInstance instance,const VkAllocationCallbacks *)342 void DestroyInstance(VkInstance instance,
343 const VkAllocationCallbacks* /*allocator*/) {
344 instance->allocator.pfnFree(instance->allocator.pUserData, instance);
345 }
346
347 // -----------------------------------------------------------------------------
348 // PhysicalDevice
349
EnumeratePhysicalDevices(VkInstance instance,uint32_t * physical_device_count,VkPhysicalDevice * physical_devices)350 VkResult EnumeratePhysicalDevices(VkInstance instance,
351 uint32_t* physical_device_count,
352 VkPhysicalDevice* physical_devices) {
353 if (!physical_devices)
354 *physical_device_count = 1;
355 else if (*physical_device_count == 0)
356 return VK_INCOMPLETE;
357 else {
358 physical_devices[0] = &instance->physical_device;
359 *physical_device_count = 1;
360 }
361 return VK_SUCCESS;
362 }
363
EnumerateDeviceLayerProperties(VkPhysicalDevice,uint32_t * count,VkLayerProperties *)364 VkResult EnumerateDeviceLayerProperties(VkPhysicalDevice /*gpu*/,
365 uint32_t* count,
366 VkLayerProperties* /*properties*/) {
367 ALOGW("Driver vkEnumerateDeviceLayerProperties shouldn't be called");
368 *count = 0;
369 return VK_SUCCESS;
370 }
371
EnumerateDeviceExtensionProperties(VkPhysicalDevice,const char * layer_name,uint32_t * count,VkExtensionProperties * properties)372 VkResult EnumerateDeviceExtensionProperties(VkPhysicalDevice /*gpu*/,
373 const char* layer_name,
374 uint32_t* count,
375 VkExtensionProperties* properties) {
376 if (layer_name) {
377 ALOGW(
378 "Driver vkEnumerateDeviceExtensionProperties shouldn't be called "
379 "with a layer name ('%s')",
380 layer_name);
381 *count = 0;
382 return VK_SUCCESS;
383 }
384
385 const VkExtensionProperties kExtensions[] = {
386 {VK_ANDROID_NATIVE_BUFFER_EXTENSION_NAME,
387 VK_ANDROID_NATIVE_BUFFER_SPEC_VERSION}};
388 const uint32_t kExtensionsCount =
389 sizeof(kExtensions) / sizeof(kExtensions[0]);
390
391 if (!properties || *count > kExtensionsCount)
392 *count = kExtensionsCount;
393 if (properties)
394 std::copy(kExtensions, kExtensions + *count, properties);
395 return *count < kExtensionsCount ? VK_INCOMPLETE : VK_SUCCESS;
396 }
397
GetPhysicalDeviceProperties(VkPhysicalDevice,VkPhysicalDeviceProperties * properties)398 void GetPhysicalDeviceProperties(VkPhysicalDevice,
399 VkPhysicalDeviceProperties* properties) {
400 properties->apiVersion = VK_MAKE_VERSION(1, 0, VK_HEADER_VERSION);
401 properties->driverVersion = VK_MAKE_VERSION(0, 0, 1);
402 properties->vendorID = 0;
403 properties->deviceID = 0;
404 properties->deviceType = VK_PHYSICAL_DEVICE_TYPE_OTHER;
405 strcpy(properties->deviceName, "Android Vulkan Null Driver");
406 memset(properties->pipelineCacheUUID, 0,
407 sizeof(properties->pipelineCacheUUID));
408 properties->limits = VkPhysicalDeviceLimits{
409 4096, // maxImageDimension1D
410 4096, // maxImageDimension2D
411 256, // maxImageDimension3D
412 4096, // maxImageDimensionCube
413 256, // maxImageArrayLayers
414 65536, // maxTexelBufferElements
415 16384, // maxUniformBufferRange
416 1 << 27, // maxStorageBufferRange
417 128, // maxPushConstantsSize
418 4096, // maxMemoryAllocationCount
419 4000, // maxSamplerAllocationCount
420 1, // bufferImageGranularity
421 0, // sparseAddressSpaceSize
422 4, // maxBoundDescriptorSets
423 16, // maxPerStageDescriptorSamplers
424 12, // maxPerStageDescriptorUniformBuffers
425 4, // maxPerStageDescriptorStorageBuffers
426 16, // maxPerStageDescriptorSampledImages
427 4, // maxPerStageDescriptorStorageImages
428 4, // maxPerStageDescriptorInputAttachments
429 128, // maxPerStageResources
430 96, // maxDescriptorSetSamplers
431 72, // maxDescriptorSetUniformBuffers
432 8, // maxDescriptorSetUniformBuffersDynamic
433 24, // maxDescriptorSetStorageBuffers
434 4, // maxDescriptorSetStorageBuffersDynamic
435 96, // maxDescriptorSetSampledImages
436 24, // maxDescriptorSetStorageImages
437 4, // maxDescriptorSetInputAttachments
438 16, // maxVertexInputAttributes
439 16, // maxVertexInputBindings
440 2047, // maxVertexInputAttributeOffset
441 2048, // maxVertexInputBindingStride
442 64, // maxVertexOutputComponents
443 0, // maxTessellationGenerationLevel
444 0, // maxTessellationPatchSize
445 0, // maxTessellationControlPerVertexInputComponents
446 0, // maxTessellationControlPerVertexOutputComponents
447 0, // maxTessellationControlPerPatchOutputComponents
448 0, // maxTessellationControlTotalOutputComponents
449 0, // maxTessellationEvaluationInputComponents
450 0, // maxTessellationEvaluationOutputComponents
451 0, // maxGeometryShaderInvocations
452 0, // maxGeometryInputComponents
453 0, // maxGeometryOutputComponents
454 0, // maxGeometryOutputVertices
455 0, // maxGeometryTotalOutputComponents
456 64, // maxFragmentInputComponents
457 4, // maxFragmentOutputAttachments
458 0, // maxFragmentDualSrcAttachments
459 4, // maxFragmentCombinedOutputResources
460 16384, // maxComputeSharedMemorySize
461 {65536, 65536, 65536}, // maxComputeWorkGroupCount[3]
462 128, // maxComputeWorkGroupInvocations
463 {128, 128, 64}, // maxComputeWorkGroupSize[3]
464 4, // subPixelPrecisionBits
465 4, // subTexelPrecisionBits
466 4, // mipmapPrecisionBits
467 UINT32_MAX, // maxDrawIndexedIndexValue
468 1, // maxDrawIndirectCount
469 2, // maxSamplerLodBias
470 1, // maxSamplerAnisotropy
471 1, // maxViewports
472 {4096, 4096}, // maxViewportDimensions[2]
473 {-8192.0f, 8191.0f}, // viewportBoundsRange[2]
474 0, // viewportSubPixelBits
475 64, // minMemoryMapAlignment
476 256, // minTexelBufferOffsetAlignment
477 256, // minUniformBufferOffsetAlignment
478 256, // minStorageBufferOffsetAlignment
479 -8, // minTexelOffset
480 7, // maxTexelOffset
481 0, // minTexelGatherOffset
482 0, // maxTexelGatherOffset
483 0.0f, // minInterpolationOffset
484 0.0f, // maxInterpolationOffset
485 0, // subPixelInterpolationOffsetBits
486 4096, // maxFramebufferWidth
487 4096, // maxFramebufferHeight
488 256, // maxFramebufferLayers
489 VK_SAMPLE_COUNT_1_BIT |
490 VK_SAMPLE_COUNT_4_BIT, // framebufferColorSampleCounts
491 VK_SAMPLE_COUNT_1_BIT |
492 VK_SAMPLE_COUNT_4_BIT, // framebufferDepthSampleCounts
493 VK_SAMPLE_COUNT_1_BIT |
494 VK_SAMPLE_COUNT_4_BIT, // framebufferStencilSampleCounts
495 VK_SAMPLE_COUNT_1_BIT |
496 VK_SAMPLE_COUNT_4_BIT, // framebufferNoAttachmentsSampleCounts
497 4, // maxColorAttachments
498 VK_SAMPLE_COUNT_1_BIT |
499 VK_SAMPLE_COUNT_4_BIT, // sampledImageColorSampleCounts
500 VK_SAMPLE_COUNT_1_BIT, // sampledImageIntegerSampleCounts
501 VK_SAMPLE_COUNT_1_BIT |
502 VK_SAMPLE_COUNT_4_BIT, // sampledImageDepthSampleCounts
503 VK_SAMPLE_COUNT_1_BIT |
504 VK_SAMPLE_COUNT_4_BIT, // sampledImageStencilSampleCounts
505 VK_SAMPLE_COUNT_1_BIT, // storageImageSampleCounts
506 1, // maxSampleMaskWords
507 VK_TRUE, // timestampComputeAndGraphics
508 1, // timestampPeriod
509 0, // maxClipDistances
510 0, // maxCullDistances
511 0, // maxCombinedClipAndCullDistances
512 2, // discreteQueuePriorities
513 {1.0f, 1.0f}, // pointSizeRange[2]
514 {1.0f, 1.0f}, // lineWidthRange[2]
515 0.0f, // pointSizeGranularity
516 0.0f, // lineWidthGranularity
517 VK_TRUE, // strictLines
518 VK_TRUE, // standardSampleLocations
519 1, // optimalBufferCopyOffsetAlignment
520 1, // optimalBufferCopyRowPitchAlignment
521 64, // nonCoherentAtomSize
522 };
523 }
524
GetPhysicalDeviceProperties2KHR(VkPhysicalDevice physical_device,VkPhysicalDeviceProperties2KHR * properties)525 void GetPhysicalDeviceProperties2KHR(VkPhysicalDevice physical_device,
526 VkPhysicalDeviceProperties2KHR* properties) {
527 GetPhysicalDeviceProperties(physical_device, &properties->properties);
528
529 while (properties->pNext) {
530 properties = reinterpret_cast<VkPhysicalDeviceProperties2KHR *>(properties->pNext);
531
532 #pragma clang diagnostic push
533 #pragma clang diagnostic ignored "-Wold-style-cast"
534 switch ((VkFlags)properties->sType) {
535 case VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PRESENTATION_PROPERTIES_ANDROID: {
536 VkPhysicalDevicePresentationPropertiesANDROID *presentation_properties =
537 reinterpret_cast<VkPhysicalDevicePresentationPropertiesANDROID *>(properties);
538 #pragma clang diagnostic pop
539
540 // Claim that we do all the right things for the loader to
541 // expose KHR_shared_presentable_image on our behalf.
542 presentation_properties->sharedImage = VK_TRUE;
543 } break;
544
545 default:
546 // Silently ignore other extension query structs
547 break;
548 }
549 }
550 }
551
GetPhysicalDeviceQueueFamilyProperties(VkPhysicalDevice,uint32_t * count,VkQueueFamilyProperties * properties)552 void GetPhysicalDeviceQueueFamilyProperties(
553 VkPhysicalDevice,
554 uint32_t* count,
555 VkQueueFamilyProperties* properties) {
556 if (!properties || *count > 1)
557 *count = 1;
558 if (properties && *count == 1) {
559 properties->queueFlags = VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT |
560 VK_QUEUE_TRANSFER_BIT;
561 properties->queueCount = 1;
562 properties->timestampValidBits = 64;
563 properties->minImageTransferGranularity = VkExtent3D{1, 1, 1};
564 }
565 }
566
GetPhysicalDeviceQueueFamilyProperties2KHR(VkPhysicalDevice physical_device,uint32_t * count,VkQueueFamilyProperties2KHR * properties)567 void GetPhysicalDeviceQueueFamilyProperties2KHR(VkPhysicalDevice physical_device, uint32_t* count, VkQueueFamilyProperties2KHR* properties) {
568 // note: even though multiple structures, this is safe to forward in this
569 // case since we only expose one queue family.
570 GetPhysicalDeviceQueueFamilyProperties(physical_device, count, properties ? &properties->queueFamilyProperties : nullptr);
571 }
572
GetPhysicalDeviceMemoryProperties(VkPhysicalDevice,VkPhysicalDeviceMemoryProperties * properties)573 void GetPhysicalDeviceMemoryProperties(
574 VkPhysicalDevice,
575 VkPhysicalDeviceMemoryProperties* properties) {
576 properties->memoryTypeCount = 1;
577 properties->memoryTypes[0].propertyFlags =
578 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT |
579 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
580 VK_MEMORY_PROPERTY_HOST_COHERENT_BIT |
581 VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
582 properties->memoryTypes[0].heapIndex = 0;
583 properties->memoryHeapCount = 1;
584 properties->memoryHeaps[0].size = kMaxDeviceMemory;
585 properties->memoryHeaps[0].flags = VK_MEMORY_HEAP_DEVICE_LOCAL_BIT;
586 }
587
GetPhysicalDeviceMemoryProperties2KHR(VkPhysicalDevice physical_device,VkPhysicalDeviceMemoryProperties2KHR * properties)588 void GetPhysicalDeviceMemoryProperties2KHR(VkPhysicalDevice physical_device, VkPhysicalDeviceMemoryProperties2KHR* properties) {
589 GetPhysicalDeviceMemoryProperties(physical_device, &properties->memoryProperties);
590 }
591
GetPhysicalDeviceFeatures(VkPhysicalDevice,VkPhysicalDeviceFeatures * features)592 void GetPhysicalDeviceFeatures(VkPhysicalDevice /*gpu*/,
593 VkPhysicalDeviceFeatures* features) {
594 *features = VkPhysicalDeviceFeatures{
595 VK_TRUE, // robustBufferAccess
596 VK_FALSE, // fullDrawIndexUint32
597 VK_FALSE, // imageCubeArray
598 VK_FALSE, // independentBlend
599 VK_FALSE, // geometryShader
600 VK_FALSE, // tessellationShader
601 VK_FALSE, // sampleRateShading
602 VK_FALSE, // dualSrcBlend
603 VK_FALSE, // logicOp
604 VK_FALSE, // multiDrawIndirect
605 VK_FALSE, // drawIndirectFirstInstance
606 VK_FALSE, // depthClamp
607 VK_FALSE, // depthBiasClamp
608 VK_FALSE, // fillModeNonSolid
609 VK_FALSE, // depthBounds
610 VK_FALSE, // wideLines
611 VK_FALSE, // largePoints
612 VK_FALSE, // alphaToOne
613 VK_FALSE, // multiViewport
614 VK_FALSE, // samplerAnisotropy
615 VK_FALSE, // textureCompressionETC2
616 VK_FALSE, // textureCompressionASTC_LDR
617 VK_FALSE, // textureCompressionBC
618 VK_FALSE, // occlusionQueryPrecise
619 VK_FALSE, // pipelineStatisticsQuery
620 VK_FALSE, // vertexPipelineStoresAndAtomics
621 VK_FALSE, // fragmentStoresAndAtomics
622 VK_FALSE, // shaderTessellationAndGeometryPointSize
623 VK_FALSE, // shaderImageGatherExtended
624 VK_FALSE, // shaderStorageImageExtendedFormats
625 VK_FALSE, // shaderStorageImageMultisample
626 VK_FALSE, // shaderStorageImageReadWithoutFormat
627 VK_FALSE, // shaderStorageImageWriteWithoutFormat
628 VK_FALSE, // shaderUniformBufferArrayDynamicIndexing
629 VK_FALSE, // shaderSampledImageArrayDynamicIndexing
630 VK_FALSE, // shaderStorageBufferArrayDynamicIndexing
631 VK_FALSE, // shaderStorageImageArrayDynamicIndexing
632 VK_FALSE, // shaderClipDistance
633 VK_FALSE, // shaderCullDistance
634 VK_FALSE, // shaderFloat64
635 VK_FALSE, // shaderInt64
636 VK_FALSE, // shaderInt16
637 VK_FALSE, // shaderResourceResidency
638 VK_FALSE, // shaderResourceMinLod
639 VK_FALSE, // sparseBinding
640 VK_FALSE, // sparseResidencyBuffer
641 VK_FALSE, // sparseResidencyImage2D
642 VK_FALSE, // sparseResidencyImage3D
643 VK_FALSE, // sparseResidency2Samples
644 VK_FALSE, // sparseResidency4Samples
645 VK_FALSE, // sparseResidency8Samples
646 VK_FALSE, // sparseResidency16Samples
647 VK_FALSE, // sparseResidencyAliased
648 VK_FALSE, // variableMultisampleRate
649 VK_FALSE, // inheritedQueries
650 };
651 }
652
GetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physical_device,VkPhysicalDeviceFeatures2KHR * features)653 void GetPhysicalDeviceFeatures2KHR(VkPhysicalDevice physical_device, VkPhysicalDeviceFeatures2KHR* features) {
654 GetPhysicalDeviceFeatures(physical_device, &features->features);
655 }
656
657 // -----------------------------------------------------------------------------
658 // Device
659
CreateDevice(VkPhysicalDevice physical_device,const VkDeviceCreateInfo * create_info,const VkAllocationCallbacks * allocator,VkDevice * out_device)660 VkResult CreateDevice(VkPhysicalDevice physical_device,
661 const VkDeviceCreateInfo* create_info,
662 const VkAllocationCallbacks* allocator,
663 VkDevice* out_device) {
664 VkInstance_T* instance = GetInstanceFromPhysicalDevice(physical_device);
665 if (!allocator)
666 allocator = &instance->allocator;
667 VkDevice_T* device = static_cast<VkDevice_T*>(allocator->pfnAllocation(
668 allocator->pUserData, sizeof(VkDevice_T), alignof(VkDevice_T),
669 VK_SYSTEM_ALLOCATION_SCOPE_DEVICE));
670 if (!device)
671 return VK_ERROR_OUT_OF_HOST_MEMORY;
672
673 device->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
674 device->allocator = *allocator;
675 device->instance = instance;
676 device->queue.dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
677 std::fill(device->next_handle.begin(), device->next_handle.end(),
678 UINT64_C(0));
679
680 for (uint32_t i = 0; i < create_info->enabledExtensionCount; i++) {
681 if (strcmp(create_info->ppEnabledExtensionNames[i],
682 VK_ANDROID_NATIVE_BUFFER_EXTENSION_NAME) == 0) {
683 ALOGV("Enabling " VK_ANDROID_NATIVE_BUFFER_EXTENSION_NAME);
684 }
685 }
686
687 *out_device = device;
688 return VK_SUCCESS;
689 }
690
DestroyDevice(VkDevice device,const VkAllocationCallbacks *)691 void DestroyDevice(VkDevice device,
692 const VkAllocationCallbacks* /*allocator*/) {
693 if (!device)
694 return;
695 device->allocator.pfnFree(device->allocator.pUserData, device);
696 }
697
GetDeviceQueue(VkDevice device,uint32_t,uint32_t,VkQueue * queue)698 void GetDeviceQueue(VkDevice device, uint32_t, uint32_t, VkQueue* queue) {
699 *queue = &device->queue;
700 }
701
702 // -----------------------------------------------------------------------------
703 // CommandPool
704
705 struct CommandPool {
706 typedef VkCommandPool HandleType;
707 VkAllocationCallbacks allocator;
708 };
DEFINE_OBJECT_HANDLE_CONVERSION(CommandPool)709 DEFINE_OBJECT_HANDLE_CONVERSION(CommandPool)
710
711 VkResult CreateCommandPool(VkDevice device,
712 const VkCommandPoolCreateInfo* /*create_info*/,
713 const VkAllocationCallbacks* allocator,
714 VkCommandPool* cmd_pool) {
715 if (!allocator)
716 allocator = &device->allocator;
717 CommandPool* pool = static_cast<CommandPool*>(allocator->pfnAllocation(
718 allocator->pUserData, sizeof(CommandPool), alignof(CommandPool),
719 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
720 if (!pool)
721 return VK_ERROR_OUT_OF_HOST_MEMORY;
722 pool->allocator = *allocator;
723 *cmd_pool = GetHandleToCommandPool(pool);
724 return VK_SUCCESS;
725 }
726
DestroyCommandPool(VkDevice,VkCommandPool cmd_pool,const VkAllocationCallbacks *)727 void DestroyCommandPool(VkDevice /*device*/,
728 VkCommandPool cmd_pool,
729 const VkAllocationCallbacks* /*allocator*/) {
730 CommandPool* pool = GetCommandPoolFromHandle(cmd_pool);
731 pool->allocator.pfnFree(pool->allocator.pUserData, pool);
732 }
733
734 // -----------------------------------------------------------------------------
735 // CmdBuffer
736
AllocateCommandBuffers(VkDevice,const VkCommandBufferAllocateInfo * alloc_info,VkCommandBuffer * cmdbufs)737 VkResult AllocateCommandBuffers(VkDevice /*device*/,
738 const VkCommandBufferAllocateInfo* alloc_info,
739 VkCommandBuffer* cmdbufs) {
740 VkResult result = VK_SUCCESS;
741 CommandPool& pool = *GetCommandPoolFromHandle(alloc_info->commandPool);
742 std::fill(cmdbufs, cmdbufs + alloc_info->commandBufferCount, nullptr);
743 for (uint32_t i = 0; i < alloc_info->commandBufferCount; i++) {
744 cmdbufs[i] =
745 static_cast<VkCommandBuffer_T*>(pool.allocator.pfnAllocation(
746 pool.allocator.pUserData, sizeof(VkCommandBuffer_T),
747 alignof(VkCommandBuffer_T), VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
748 if (!cmdbufs[i]) {
749 result = VK_ERROR_OUT_OF_HOST_MEMORY;
750 break;
751 }
752 cmdbufs[i]->dispatch.magic = HWVULKAN_DISPATCH_MAGIC;
753 }
754 if (result != VK_SUCCESS) {
755 for (uint32_t i = 0; i < alloc_info->commandBufferCount; i++) {
756 if (!cmdbufs[i])
757 break;
758 pool.allocator.pfnFree(pool.allocator.pUserData, cmdbufs[i]);
759 }
760 }
761 return result;
762 }
763
FreeCommandBuffers(VkDevice,VkCommandPool cmd_pool,uint32_t count,const VkCommandBuffer * cmdbufs)764 void FreeCommandBuffers(VkDevice /*device*/,
765 VkCommandPool cmd_pool,
766 uint32_t count,
767 const VkCommandBuffer* cmdbufs) {
768 CommandPool& pool = *GetCommandPoolFromHandle(cmd_pool);
769 for (uint32_t i = 0; i < count; i++)
770 pool.allocator.pfnFree(pool.allocator.pUserData, cmdbufs[i]);
771 }
772
773 // -----------------------------------------------------------------------------
774 // DeviceMemory
775
776 struct DeviceMemory {
777 typedef VkDeviceMemory HandleType;
778 VkDeviceSize size;
779 alignas(16) uint8_t data[0];
780 };
DEFINE_OBJECT_HANDLE_CONVERSION(DeviceMemory)781 DEFINE_OBJECT_HANDLE_CONVERSION(DeviceMemory)
782
783 VkResult AllocateMemory(VkDevice device,
784 const VkMemoryAllocateInfo* alloc_info,
785 const VkAllocationCallbacks* allocator,
786 VkDeviceMemory* mem_handle) {
787 if (SIZE_MAX - sizeof(DeviceMemory) <= alloc_info->allocationSize)
788 return VK_ERROR_OUT_OF_HOST_MEMORY;
789 if (!allocator)
790 allocator = &device->allocator;
791
792 size_t size = sizeof(DeviceMemory) + size_t(alloc_info->allocationSize);
793 DeviceMemory* mem = static_cast<DeviceMemory*>(allocator->pfnAllocation(
794 allocator->pUserData, size, alignof(DeviceMemory),
795 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
796 if (!mem)
797 return VK_ERROR_OUT_OF_HOST_MEMORY;
798 mem->size = size;
799 *mem_handle = GetHandleToDeviceMemory(mem);
800 return VK_SUCCESS;
801 }
802
FreeMemory(VkDevice device,VkDeviceMemory mem_handle,const VkAllocationCallbacks * allocator)803 void FreeMemory(VkDevice device,
804 VkDeviceMemory mem_handle,
805 const VkAllocationCallbacks* allocator) {
806 if (!allocator)
807 allocator = &device->allocator;
808 DeviceMemory* mem = GetDeviceMemoryFromHandle(mem_handle);
809 allocator->pfnFree(allocator->pUserData, mem);
810 }
811
MapMemory(VkDevice,VkDeviceMemory mem_handle,VkDeviceSize offset,VkDeviceSize,VkMemoryMapFlags,void ** out_ptr)812 VkResult MapMemory(VkDevice,
813 VkDeviceMemory mem_handle,
814 VkDeviceSize offset,
815 VkDeviceSize,
816 VkMemoryMapFlags,
817 void** out_ptr) {
818 DeviceMemory* mem = GetDeviceMemoryFromHandle(mem_handle);
819 *out_ptr = &mem->data[0] + offset;
820 return VK_SUCCESS;
821 }
822
823 // -----------------------------------------------------------------------------
824 // Buffer
825
826 struct Buffer {
827 typedef VkBuffer HandleType;
828 VkDeviceSize size;
829 };
DEFINE_OBJECT_HANDLE_CONVERSION(Buffer)830 DEFINE_OBJECT_HANDLE_CONVERSION(Buffer)
831
832 VkResult CreateBuffer(VkDevice device,
833 const VkBufferCreateInfo* create_info,
834 const VkAllocationCallbacks* allocator,
835 VkBuffer* buffer_handle) {
836 ALOGW_IF(create_info->size > kMaxDeviceMemory,
837 "CreateBuffer: requested size 0x%" PRIx64
838 " exceeds max device memory size 0x%" PRIx64,
839 create_info->size, kMaxDeviceMemory);
840 if (!allocator)
841 allocator = &device->allocator;
842 Buffer* buffer = static_cast<Buffer*>(allocator->pfnAllocation(
843 allocator->pUserData, sizeof(Buffer), alignof(Buffer),
844 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
845 if (!buffer)
846 return VK_ERROR_OUT_OF_HOST_MEMORY;
847 buffer->size = create_info->size;
848 *buffer_handle = GetHandleToBuffer(buffer);
849 return VK_SUCCESS;
850 }
851
GetBufferMemoryRequirements(VkDevice,VkBuffer buffer_handle,VkMemoryRequirements * requirements)852 void GetBufferMemoryRequirements(VkDevice,
853 VkBuffer buffer_handle,
854 VkMemoryRequirements* requirements) {
855 Buffer* buffer = GetBufferFromHandle(buffer_handle);
856 requirements->size = buffer->size;
857 requirements->alignment = 16; // allow fast Neon/SSE memcpy
858 requirements->memoryTypeBits = 0x1;
859 }
860
DestroyBuffer(VkDevice device,VkBuffer buffer_handle,const VkAllocationCallbacks * allocator)861 void DestroyBuffer(VkDevice device,
862 VkBuffer buffer_handle,
863 const VkAllocationCallbacks* allocator) {
864 if (!allocator)
865 allocator = &device->allocator;
866 Buffer* buffer = GetBufferFromHandle(buffer_handle);
867 allocator->pfnFree(allocator->pUserData, buffer);
868 }
869
870 // -----------------------------------------------------------------------------
871 // Image
872
873 struct Image {
874 typedef VkImage HandleType;
875 VkDeviceSize size;
876 };
DEFINE_OBJECT_HANDLE_CONVERSION(Image)877 DEFINE_OBJECT_HANDLE_CONVERSION(Image)
878
879 VkResult CreateImage(VkDevice device,
880 const VkImageCreateInfo* create_info,
881 const VkAllocationCallbacks* allocator,
882 VkImage* image_handle) {
883 if (create_info->imageType != VK_IMAGE_TYPE_2D ||
884 create_info->format != VK_FORMAT_R8G8B8A8_UNORM ||
885 create_info->mipLevels != 1) {
886 ALOGE("CreateImage: not yet implemented: type=%d format=%d mips=%u",
887 create_info->imageType, create_info->format,
888 create_info->mipLevels);
889 return VK_ERROR_OUT_OF_HOST_MEMORY;
890 }
891
892 VkDeviceSize size =
893 VkDeviceSize(create_info->extent.width * create_info->extent.height) *
894 create_info->arrayLayers * create_info->samples * 4u;
895 ALOGW_IF(size > kMaxDeviceMemory,
896 "CreateImage: image size 0x%" PRIx64
897 " exceeds max device memory size 0x%" PRIx64,
898 size, kMaxDeviceMemory);
899
900 if (!allocator)
901 allocator = &device->allocator;
902 Image* image = static_cast<Image*>(allocator->pfnAllocation(
903 allocator->pUserData, sizeof(Image), alignof(Image),
904 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT));
905 if (!image)
906 return VK_ERROR_OUT_OF_HOST_MEMORY;
907 image->size = size;
908 *image_handle = GetHandleToImage(image);
909 return VK_SUCCESS;
910 }
911
GetImageMemoryRequirements(VkDevice,VkImage image_handle,VkMemoryRequirements * requirements)912 void GetImageMemoryRequirements(VkDevice,
913 VkImage image_handle,
914 VkMemoryRequirements* requirements) {
915 Image* image = GetImageFromHandle(image_handle);
916 requirements->size = image->size;
917 requirements->alignment = 16; // allow fast Neon/SSE memcpy
918 requirements->memoryTypeBits = 0x1;
919 }
920
DestroyImage(VkDevice device,VkImage image_handle,const VkAllocationCallbacks * allocator)921 void DestroyImage(VkDevice device,
922 VkImage image_handle,
923 const VkAllocationCallbacks* allocator) {
924 if (!allocator)
925 allocator = &device->allocator;
926 Image* image = GetImageFromHandle(image_handle);
927 allocator->pfnFree(allocator->pUserData, image);
928 }
929
GetSwapchainGrallocUsageANDROID(VkDevice,VkFormat,VkImageUsageFlags,int * grallocUsage)930 VkResult GetSwapchainGrallocUsageANDROID(VkDevice,
931 VkFormat,
932 VkImageUsageFlags,
933 int* grallocUsage) {
934 // The null driver never reads or writes the gralloc buffer
935 *grallocUsage = 0;
936 return VK_SUCCESS;
937 }
938
GetSwapchainGrallocUsage2ANDROID(VkDevice,VkFormat,VkImageUsageFlags,VkSwapchainImageUsageFlagsANDROID,uint64_t * grallocConsumerUsage,uint64_t * grallocProducerUsage)939 VkResult GetSwapchainGrallocUsage2ANDROID(VkDevice,
940 VkFormat,
941 VkImageUsageFlags,
942 VkSwapchainImageUsageFlagsANDROID,
943 uint64_t* grallocConsumerUsage,
944 uint64_t* grallocProducerUsage) {
945 // The null driver never reads or writes the gralloc buffer
946 *grallocConsumerUsage = 0;
947 *grallocProducerUsage = 0;
948 return VK_SUCCESS;
949 }
950
AcquireImageANDROID(VkDevice,VkImage,int fence,VkSemaphore,VkFence)951 VkResult AcquireImageANDROID(VkDevice,
952 VkImage,
953 int fence,
954 VkSemaphore,
955 VkFence) {
956 close(fence);
957 return VK_SUCCESS;
958 }
959
QueueSignalReleaseImageANDROID(VkQueue,uint32_t,const VkSemaphore *,VkImage,int * fence)960 VkResult QueueSignalReleaseImageANDROID(VkQueue,
961 uint32_t,
962 const VkSemaphore*,
963 VkImage,
964 int* fence) {
965 *fence = -1;
966 return VK_SUCCESS;
967 }
968
969 // -----------------------------------------------------------------------------
970 // No-op types
971
CreateBufferView(VkDevice device,const VkBufferViewCreateInfo *,const VkAllocationCallbacks *,VkBufferView * view)972 VkResult CreateBufferView(VkDevice device,
973 const VkBufferViewCreateInfo*,
974 const VkAllocationCallbacks* /*allocator*/,
975 VkBufferView* view) {
976 *view = AllocHandle<VkBufferView>(device, HandleType::kBufferView);
977 return VK_SUCCESS;
978 }
979
CreateDescriptorPool(VkDevice device,const VkDescriptorPoolCreateInfo *,const VkAllocationCallbacks *,VkDescriptorPool * pool)980 VkResult CreateDescriptorPool(VkDevice device,
981 const VkDescriptorPoolCreateInfo*,
982 const VkAllocationCallbacks* /*allocator*/,
983 VkDescriptorPool* pool) {
984 *pool = AllocHandle<VkDescriptorPool>(device, HandleType::kDescriptorPool);
985 return VK_SUCCESS;
986 }
987
AllocateDescriptorSets(VkDevice device,const VkDescriptorSetAllocateInfo * alloc_info,VkDescriptorSet * descriptor_sets)988 VkResult AllocateDescriptorSets(VkDevice device,
989 const VkDescriptorSetAllocateInfo* alloc_info,
990 VkDescriptorSet* descriptor_sets) {
991 for (uint32_t i = 0; i < alloc_info->descriptorSetCount; i++)
992 descriptor_sets[i] =
993 AllocHandle<VkDescriptorSet>(device, HandleType::kDescriptorSet);
994 return VK_SUCCESS;
995 }
996
CreateDescriptorSetLayout(VkDevice device,const VkDescriptorSetLayoutCreateInfo *,const VkAllocationCallbacks *,VkDescriptorSetLayout * layout)997 VkResult CreateDescriptorSetLayout(VkDevice device,
998 const VkDescriptorSetLayoutCreateInfo*,
999 const VkAllocationCallbacks* /*allocator*/,
1000 VkDescriptorSetLayout* layout) {
1001 *layout = AllocHandle<VkDescriptorSetLayout>(
1002 device, HandleType::kDescriptorSetLayout);
1003 return VK_SUCCESS;
1004 }
1005
CreateEvent(VkDevice device,const VkEventCreateInfo *,const VkAllocationCallbacks *,VkEvent * event)1006 VkResult CreateEvent(VkDevice device,
1007 const VkEventCreateInfo*,
1008 const VkAllocationCallbacks* /*allocator*/,
1009 VkEvent* event) {
1010 *event = AllocHandle<VkEvent>(device, HandleType::kEvent);
1011 return VK_SUCCESS;
1012 }
1013
CreateFence(VkDevice device,const VkFenceCreateInfo *,const VkAllocationCallbacks *,VkFence * fence)1014 VkResult CreateFence(VkDevice device,
1015 const VkFenceCreateInfo*,
1016 const VkAllocationCallbacks* /*allocator*/,
1017 VkFence* fence) {
1018 *fence = AllocHandle<VkFence>(device, HandleType::kFence);
1019 return VK_SUCCESS;
1020 }
1021
CreateFramebuffer(VkDevice device,const VkFramebufferCreateInfo *,const VkAllocationCallbacks *,VkFramebuffer * framebuffer)1022 VkResult CreateFramebuffer(VkDevice device,
1023 const VkFramebufferCreateInfo*,
1024 const VkAllocationCallbacks* /*allocator*/,
1025 VkFramebuffer* framebuffer) {
1026 *framebuffer = AllocHandle<VkFramebuffer>(device, HandleType::kFramebuffer);
1027 return VK_SUCCESS;
1028 }
1029
CreateImageView(VkDevice device,const VkImageViewCreateInfo *,const VkAllocationCallbacks *,VkImageView * view)1030 VkResult CreateImageView(VkDevice device,
1031 const VkImageViewCreateInfo*,
1032 const VkAllocationCallbacks* /*allocator*/,
1033 VkImageView* view) {
1034 *view = AllocHandle<VkImageView>(device, HandleType::kImageView);
1035 return VK_SUCCESS;
1036 }
1037
CreateGraphicsPipelines(VkDevice device,VkPipelineCache,uint32_t count,const VkGraphicsPipelineCreateInfo *,const VkAllocationCallbacks *,VkPipeline * pipelines)1038 VkResult CreateGraphicsPipelines(VkDevice device,
1039 VkPipelineCache,
1040 uint32_t count,
1041 const VkGraphicsPipelineCreateInfo*,
1042 const VkAllocationCallbacks* /*allocator*/,
1043 VkPipeline* pipelines) {
1044 for (uint32_t i = 0; i < count; i++)
1045 pipelines[i] = AllocHandle<VkPipeline>(device, HandleType::kPipeline);
1046 return VK_SUCCESS;
1047 }
1048
CreateComputePipelines(VkDevice device,VkPipelineCache,uint32_t count,const VkComputePipelineCreateInfo *,const VkAllocationCallbacks *,VkPipeline * pipelines)1049 VkResult CreateComputePipelines(VkDevice device,
1050 VkPipelineCache,
1051 uint32_t count,
1052 const VkComputePipelineCreateInfo*,
1053 const VkAllocationCallbacks* /*allocator*/,
1054 VkPipeline* pipelines) {
1055 for (uint32_t i = 0; i < count; i++)
1056 pipelines[i] = AllocHandle<VkPipeline>(device, HandleType::kPipeline);
1057 return VK_SUCCESS;
1058 }
1059
CreatePipelineCache(VkDevice device,const VkPipelineCacheCreateInfo *,const VkAllocationCallbacks *,VkPipelineCache * cache)1060 VkResult CreatePipelineCache(VkDevice device,
1061 const VkPipelineCacheCreateInfo*,
1062 const VkAllocationCallbacks* /*allocator*/,
1063 VkPipelineCache* cache) {
1064 *cache = AllocHandle<VkPipelineCache>(device, HandleType::kPipelineCache);
1065 return VK_SUCCESS;
1066 }
1067
CreatePipelineLayout(VkDevice device,const VkPipelineLayoutCreateInfo *,const VkAllocationCallbacks *,VkPipelineLayout * layout)1068 VkResult CreatePipelineLayout(VkDevice device,
1069 const VkPipelineLayoutCreateInfo*,
1070 const VkAllocationCallbacks* /*allocator*/,
1071 VkPipelineLayout* layout) {
1072 *layout =
1073 AllocHandle<VkPipelineLayout>(device, HandleType::kPipelineLayout);
1074 return VK_SUCCESS;
1075 }
1076
CreateQueryPool(VkDevice device,const VkQueryPoolCreateInfo *,const VkAllocationCallbacks *,VkQueryPool * pool)1077 VkResult CreateQueryPool(VkDevice device,
1078 const VkQueryPoolCreateInfo*,
1079 const VkAllocationCallbacks* /*allocator*/,
1080 VkQueryPool* pool) {
1081 *pool = AllocHandle<VkQueryPool>(device, HandleType::kQueryPool);
1082 return VK_SUCCESS;
1083 }
1084
CreateRenderPass(VkDevice device,const VkRenderPassCreateInfo *,const VkAllocationCallbacks *,VkRenderPass * renderpass)1085 VkResult CreateRenderPass(VkDevice device,
1086 const VkRenderPassCreateInfo*,
1087 const VkAllocationCallbacks* /*allocator*/,
1088 VkRenderPass* renderpass) {
1089 *renderpass = AllocHandle<VkRenderPass>(device, HandleType::kRenderPass);
1090 return VK_SUCCESS;
1091 }
1092
CreateSampler(VkDevice device,const VkSamplerCreateInfo *,const VkAllocationCallbacks *,VkSampler * sampler)1093 VkResult CreateSampler(VkDevice device,
1094 const VkSamplerCreateInfo*,
1095 const VkAllocationCallbacks* /*allocator*/,
1096 VkSampler* sampler) {
1097 *sampler = AllocHandle<VkSampler>(device, HandleType::kSampler);
1098 return VK_SUCCESS;
1099 }
1100
CreateSemaphore(VkDevice device,const VkSemaphoreCreateInfo *,const VkAllocationCallbacks *,VkSemaphore * semaphore)1101 VkResult CreateSemaphore(VkDevice device,
1102 const VkSemaphoreCreateInfo*,
1103 const VkAllocationCallbacks* /*allocator*/,
1104 VkSemaphore* semaphore) {
1105 *semaphore = AllocHandle<VkSemaphore>(device, HandleType::kSemaphore);
1106 return VK_SUCCESS;
1107 }
1108
CreateShaderModule(VkDevice device,const VkShaderModuleCreateInfo *,const VkAllocationCallbacks *,VkShaderModule * module)1109 VkResult CreateShaderModule(VkDevice device,
1110 const VkShaderModuleCreateInfo*,
1111 const VkAllocationCallbacks* /*allocator*/,
1112 VkShaderModule* module) {
1113 *module = AllocHandle<VkShaderModule>(device, HandleType::kShaderModule);
1114 return VK_SUCCESS;
1115 }
1116
CreateDebugReportCallbackEXT(VkInstance instance,const VkDebugReportCallbackCreateInfoEXT *,const VkAllocationCallbacks *,VkDebugReportCallbackEXT * callback)1117 VkResult CreateDebugReportCallbackEXT(VkInstance instance,
1118 const VkDebugReportCallbackCreateInfoEXT*,
1119 const VkAllocationCallbacks*,
1120 VkDebugReportCallbackEXT* callback) {
1121 *callback = AllocHandle<VkDebugReportCallbackEXT>(
1122 instance, HandleType::kDebugReportCallbackEXT);
1123 return VK_SUCCESS;
1124 }
1125
1126 // -----------------------------------------------------------------------------
1127 // No-op entrypoints
1128
1129 // clang-format off
1130 #pragma clang diagnostic push
1131 #pragma clang diagnostic ignored "-Wunused-parameter"
1132
GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties * pFormatProperties)1133 void GetPhysicalDeviceFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties* pFormatProperties) {
1134 ALOGV("TODO: vk%s", __FUNCTION__);
1135 }
1136
GetPhysicalDeviceFormatProperties2KHR(VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties2KHR * pFormatProperties)1137 void GetPhysicalDeviceFormatProperties2KHR(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2KHR* pFormatProperties) {
1138 ALOGV("TODO: vk%s", __FUNCTION__);
1139 }
1140
GetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice,VkFormat format,VkImageType type,VkImageTiling tiling,VkImageUsageFlags usage,VkImageCreateFlags flags,VkImageFormatProperties * pImageFormatProperties)1141 VkResult GetPhysicalDeviceImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkImageTiling tiling, VkImageUsageFlags usage, VkImageCreateFlags flags, VkImageFormatProperties* pImageFormatProperties) {
1142 ALOGV("TODO: vk%s", __FUNCTION__);
1143 return VK_SUCCESS;
1144 }
1145
GetPhysicalDeviceImageFormatProperties2KHR(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2KHR * pImageFormatInfo,VkImageFormatProperties2KHR * pImageFormatProperties)1146 VkResult GetPhysicalDeviceImageFormatProperties2KHR(VkPhysicalDevice physicalDevice,
1147 const VkPhysicalDeviceImageFormatInfo2KHR* pImageFormatInfo,
1148 VkImageFormatProperties2KHR* pImageFormatProperties) {
1149 ALOGV("TODO: vk%s", __FUNCTION__);
1150 return VK_SUCCESS;
1151 }
1152
EnumerateInstanceLayerProperties(uint32_t * pCount,VkLayerProperties * pProperties)1153 VkResult EnumerateInstanceLayerProperties(uint32_t* pCount, VkLayerProperties* pProperties) {
1154 ALOGV("TODO: vk%s", __FUNCTION__);
1155 return VK_SUCCESS;
1156 }
1157
QueueSubmit(VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmitInfo,VkFence fence)1158 VkResult QueueSubmit(VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmitInfo, VkFence fence) {
1159 return VK_SUCCESS;
1160 }
1161
QueueWaitIdle(VkQueue queue)1162 VkResult QueueWaitIdle(VkQueue queue) {
1163 ALOGV("TODO: vk%s", __FUNCTION__);
1164 return VK_SUCCESS;
1165 }
1166
DeviceWaitIdle(VkDevice device)1167 VkResult DeviceWaitIdle(VkDevice device) {
1168 ALOGV("TODO: vk%s", __FUNCTION__);
1169 return VK_SUCCESS;
1170 }
1171
UnmapMemory(VkDevice device,VkDeviceMemory mem)1172 void UnmapMemory(VkDevice device, VkDeviceMemory mem) {
1173 }
1174
FlushMappedMemoryRanges(VkDevice device,uint32_t memRangeCount,const VkMappedMemoryRange * pMemRanges)1175 VkResult FlushMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) {
1176 ALOGV("TODO: vk%s", __FUNCTION__);
1177 return VK_SUCCESS;
1178 }
1179
InvalidateMappedMemoryRanges(VkDevice device,uint32_t memRangeCount,const VkMappedMemoryRange * pMemRanges)1180 VkResult InvalidateMappedMemoryRanges(VkDevice device, uint32_t memRangeCount, const VkMappedMemoryRange* pMemRanges) {
1181 ALOGV("TODO: vk%s", __FUNCTION__);
1182 return VK_SUCCESS;
1183 }
1184
GetDeviceMemoryCommitment(VkDevice device,VkDeviceMemory memory,VkDeviceSize * pCommittedMemoryInBytes)1185 void GetDeviceMemoryCommitment(VkDevice device, VkDeviceMemory memory, VkDeviceSize* pCommittedMemoryInBytes) {
1186 ALOGV("TODO: vk%s", __FUNCTION__);
1187 }
1188
BindBufferMemory(VkDevice device,VkBuffer buffer,VkDeviceMemory mem,VkDeviceSize memOffset)1189 VkResult BindBufferMemory(VkDevice device, VkBuffer buffer, VkDeviceMemory mem, VkDeviceSize memOffset) {
1190 return VK_SUCCESS;
1191 }
1192
BindImageMemory(VkDevice device,VkImage image,VkDeviceMemory mem,VkDeviceSize memOffset)1193 VkResult BindImageMemory(VkDevice device, VkImage image, VkDeviceMemory mem, VkDeviceSize memOffset) {
1194 return VK_SUCCESS;
1195 }
1196
GetImageSparseMemoryRequirements(VkDevice device,VkImage image,uint32_t * pNumRequirements,VkSparseImageMemoryRequirements * pSparseMemoryRequirements)1197 void GetImageSparseMemoryRequirements(VkDevice device, VkImage image, uint32_t* pNumRequirements, VkSparseImageMemoryRequirements* pSparseMemoryRequirements) {
1198 ALOGV("TODO: vk%s", __FUNCTION__);
1199 }
1200
GetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice,VkFormat format,VkImageType type,VkSampleCountFlagBits samples,VkImageUsageFlags usage,VkImageTiling tiling,uint32_t * pNumProperties,VkSparseImageFormatProperties * pProperties)1201 void GetPhysicalDeviceSparseImageFormatProperties(VkPhysicalDevice physicalDevice, VkFormat format, VkImageType type, VkSampleCountFlagBits samples, VkImageUsageFlags usage, VkImageTiling tiling, uint32_t* pNumProperties, VkSparseImageFormatProperties* pProperties) {
1202 ALOGV("TODO: vk%s", __FUNCTION__);
1203 }
1204
GetPhysicalDeviceSparseImageFormatProperties2KHR(VkPhysicalDevice physicalDevice,VkPhysicalDeviceSparseImageFormatInfo2KHR const * pInfo,unsigned int * pNumProperties,VkSparseImageFormatProperties2KHR * pProperties)1205 void GetPhysicalDeviceSparseImageFormatProperties2KHR(VkPhysicalDevice physicalDevice,
1206 VkPhysicalDeviceSparseImageFormatInfo2KHR const* pInfo,
1207 unsigned int* pNumProperties,
1208 VkSparseImageFormatProperties2KHR* pProperties) {
1209 ALOGV("TODO: vk%s", __FUNCTION__);
1210 }
1211
1212
QueueBindSparse(VkQueue queue,uint32_t bindInfoCount,const VkBindSparseInfo * pBindInfo,VkFence fence)1213 VkResult QueueBindSparse(VkQueue queue, uint32_t bindInfoCount, const VkBindSparseInfo* pBindInfo, VkFence fence) {
1214 ALOGV("TODO: vk%s", __FUNCTION__);
1215 return VK_SUCCESS;
1216 }
1217
DestroyFence(VkDevice device,VkFence fence,const VkAllocationCallbacks * allocator)1218 void DestroyFence(VkDevice device, VkFence fence, const VkAllocationCallbacks* allocator) {
1219 }
1220
ResetFences(VkDevice device,uint32_t fenceCount,const VkFence * pFences)1221 VkResult ResetFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences) {
1222 return VK_SUCCESS;
1223 }
1224
GetFenceStatus(VkDevice device,VkFence fence)1225 VkResult GetFenceStatus(VkDevice device, VkFence fence) {
1226 ALOGV("TODO: vk%s", __FUNCTION__);
1227 return VK_SUCCESS;
1228 }
1229
WaitForFences(VkDevice device,uint32_t fenceCount,const VkFence * pFences,VkBool32 waitAll,uint64_t timeout)1230 VkResult WaitForFences(VkDevice device, uint32_t fenceCount, const VkFence* pFences, VkBool32 waitAll, uint64_t timeout) {
1231 return VK_SUCCESS;
1232 }
1233
DestroySemaphore(VkDevice device,VkSemaphore semaphore,const VkAllocationCallbacks * allocator)1234 void DestroySemaphore(VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks* allocator) {
1235 }
1236
DestroyEvent(VkDevice device,VkEvent event,const VkAllocationCallbacks * allocator)1237 void DestroyEvent(VkDevice device, VkEvent event, const VkAllocationCallbacks* allocator) {
1238 }
1239
GetEventStatus(VkDevice device,VkEvent event)1240 VkResult GetEventStatus(VkDevice device, VkEvent event) {
1241 ALOGV("TODO: vk%s", __FUNCTION__);
1242 return VK_SUCCESS;
1243 }
1244
SetEvent(VkDevice device,VkEvent event)1245 VkResult SetEvent(VkDevice device, VkEvent event) {
1246 ALOGV("TODO: vk%s", __FUNCTION__);
1247 return VK_SUCCESS;
1248 }
1249
ResetEvent(VkDevice device,VkEvent event)1250 VkResult ResetEvent(VkDevice device, VkEvent event) {
1251 ALOGV("TODO: vk%s", __FUNCTION__);
1252 return VK_SUCCESS;
1253 }
1254
DestroyQueryPool(VkDevice device,VkQueryPool queryPool,const VkAllocationCallbacks * allocator)1255 void DestroyQueryPool(VkDevice device, VkQueryPool queryPool, const VkAllocationCallbacks* allocator) {
1256 }
1257
GetQueryPoolResults(VkDevice device,VkQueryPool queryPool,uint32_t startQuery,uint32_t queryCount,size_t dataSize,void * pData,VkDeviceSize stride,VkQueryResultFlags flags)1258 VkResult GetQueryPoolResults(VkDevice device, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, size_t dataSize, void* pData, VkDeviceSize stride, VkQueryResultFlags flags) {
1259 ALOGV("TODO: vk%s", __FUNCTION__);
1260 return VK_SUCCESS;
1261 }
1262
DestroyBufferView(VkDevice device,VkBufferView bufferView,const VkAllocationCallbacks * allocator)1263 void DestroyBufferView(VkDevice device, VkBufferView bufferView, const VkAllocationCallbacks* allocator) {
1264 }
1265
GetImageSubresourceLayout(VkDevice device,VkImage image,const VkImageSubresource * pSubresource,VkSubresourceLayout * pLayout)1266 void GetImageSubresourceLayout(VkDevice device, VkImage image, const VkImageSubresource* pSubresource, VkSubresourceLayout* pLayout) {
1267 ALOGV("TODO: vk%s", __FUNCTION__);
1268 }
1269
DestroyImageView(VkDevice device,VkImageView imageView,const VkAllocationCallbacks * allocator)1270 void DestroyImageView(VkDevice device, VkImageView imageView, const VkAllocationCallbacks* allocator) {
1271 }
1272
DestroyShaderModule(VkDevice device,VkShaderModule shaderModule,const VkAllocationCallbacks * allocator)1273 void DestroyShaderModule(VkDevice device, VkShaderModule shaderModule, const VkAllocationCallbacks* allocator) {
1274 }
1275
DestroyPipelineCache(VkDevice device,VkPipelineCache pipelineCache,const VkAllocationCallbacks * allocator)1276 void DestroyPipelineCache(VkDevice device, VkPipelineCache pipelineCache, const VkAllocationCallbacks* allocator) {
1277 }
1278
GetPipelineCacheData(VkDevice device,VkPipelineCache pipelineCache,size_t * pDataSize,void * pData)1279 VkResult GetPipelineCacheData(VkDevice device, VkPipelineCache pipelineCache, size_t* pDataSize, void* pData) {
1280 ALOGV("TODO: vk%s", __FUNCTION__);
1281 return VK_SUCCESS;
1282 }
1283
MergePipelineCaches(VkDevice device,VkPipelineCache destCache,uint32_t srcCacheCount,const VkPipelineCache * pSrcCaches)1284 VkResult MergePipelineCaches(VkDevice device, VkPipelineCache destCache, uint32_t srcCacheCount, const VkPipelineCache* pSrcCaches) {
1285 ALOGV("TODO: vk%s", __FUNCTION__);
1286 return VK_SUCCESS;
1287 }
1288
DestroyPipeline(VkDevice device,VkPipeline pipeline,const VkAllocationCallbacks * allocator)1289 void DestroyPipeline(VkDevice device, VkPipeline pipeline, const VkAllocationCallbacks* allocator) {
1290 }
1291
DestroyPipelineLayout(VkDevice device,VkPipelineLayout pipelineLayout,const VkAllocationCallbacks * allocator)1292 void DestroyPipelineLayout(VkDevice device, VkPipelineLayout pipelineLayout, const VkAllocationCallbacks* allocator) {
1293 }
1294
DestroySampler(VkDevice device,VkSampler sampler,const VkAllocationCallbacks * allocator)1295 void DestroySampler(VkDevice device, VkSampler sampler, const VkAllocationCallbacks* allocator) {
1296 }
1297
DestroyDescriptorSetLayout(VkDevice device,VkDescriptorSetLayout descriptorSetLayout,const VkAllocationCallbacks * allocator)1298 void DestroyDescriptorSetLayout(VkDevice device, VkDescriptorSetLayout descriptorSetLayout, const VkAllocationCallbacks* allocator) {
1299 }
1300
DestroyDescriptorPool(VkDevice device,VkDescriptorPool descriptorPool,const VkAllocationCallbacks * allocator)1301 void DestroyDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, const VkAllocationCallbacks* allocator) {
1302 }
1303
ResetDescriptorPool(VkDevice device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)1304 VkResult ResetDescriptorPool(VkDevice device, VkDescriptorPool descriptorPool, VkDescriptorPoolResetFlags flags) {
1305 ALOGV("TODO: vk%s", __FUNCTION__);
1306 return VK_SUCCESS;
1307 }
1308
UpdateDescriptorSets(VkDevice device,uint32_t writeCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t copyCount,const VkCopyDescriptorSet * pDescriptorCopies)1309 void UpdateDescriptorSets(VkDevice device, uint32_t writeCount, const VkWriteDescriptorSet* pDescriptorWrites, uint32_t copyCount, const VkCopyDescriptorSet* pDescriptorCopies) {
1310 ALOGV("TODO: vk%s", __FUNCTION__);
1311 }
1312
FreeDescriptorSets(VkDevice device,VkDescriptorPool descriptorPool,uint32_t count,const VkDescriptorSet * pDescriptorSets)1313 VkResult FreeDescriptorSets(VkDevice device, VkDescriptorPool descriptorPool, uint32_t count, const VkDescriptorSet* pDescriptorSets) {
1314 ALOGV("TODO: vk%s", __FUNCTION__);
1315 return VK_SUCCESS;
1316 }
1317
DestroyFramebuffer(VkDevice device,VkFramebuffer framebuffer,const VkAllocationCallbacks * allocator)1318 void DestroyFramebuffer(VkDevice device, VkFramebuffer framebuffer, const VkAllocationCallbacks* allocator) {
1319 }
1320
DestroyRenderPass(VkDevice device,VkRenderPass renderPass,const VkAllocationCallbacks * allocator)1321 void DestroyRenderPass(VkDevice device, VkRenderPass renderPass, const VkAllocationCallbacks* allocator) {
1322 }
1323
GetRenderAreaGranularity(VkDevice device,VkRenderPass renderPass,VkExtent2D * pGranularity)1324 void GetRenderAreaGranularity(VkDevice device, VkRenderPass renderPass, VkExtent2D* pGranularity) {
1325 ALOGV("TODO: vk%s", __FUNCTION__);
1326 }
1327
ResetCommandPool(VkDevice device,VkCommandPool cmdPool,VkCommandPoolResetFlags flags)1328 VkResult ResetCommandPool(VkDevice device, VkCommandPool cmdPool, VkCommandPoolResetFlags flags) {
1329 ALOGV("TODO: vk%s", __FUNCTION__);
1330 return VK_SUCCESS;
1331 }
1332
BeginCommandBuffer(VkCommandBuffer cmdBuffer,const VkCommandBufferBeginInfo * pBeginInfo)1333 VkResult BeginCommandBuffer(VkCommandBuffer cmdBuffer, const VkCommandBufferBeginInfo* pBeginInfo) {
1334 return VK_SUCCESS;
1335 }
1336
EndCommandBuffer(VkCommandBuffer cmdBuffer)1337 VkResult EndCommandBuffer(VkCommandBuffer cmdBuffer) {
1338 return VK_SUCCESS;
1339 }
1340
ResetCommandBuffer(VkCommandBuffer cmdBuffer,VkCommandBufferResetFlags flags)1341 VkResult ResetCommandBuffer(VkCommandBuffer cmdBuffer, VkCommandBufferResetFlags flags) {
1342 ALOGV("TODO: vk%s", __FUNCTION__);
1343 return VK_SUCCESS;
1344 }
1345
CmdBindPipeline(VkCommandBuffer cmdBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipeline pipeline)1346 void CmdBindPipeline(VkCommandBuffer cmdBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipeline pipeline) {
1347 }
1348
CmdSetViewport(VkCommandBuffer cmdBuffer,uint32_t firstViewport,uint32_t viewportCount,const VkViewport * pViewports)1349 void CmdSetViewport(VkCommandBuffer cmdBuffer, uint32_t firstViewport, uint32_t viewportCount, const VkViewport* pViewports) {
1350 }
1351
CmdSetScissor(VkCommandBuffer cmdBuffer,uint32_t firstScissor,uint32_t scissorCount,const VkRect2D * pScissors)1352 void CmdSetScissor(VkCommandBuffer cmdBuffer, uint32_t firstScissor, uint32_t scissorCount, const VkRect2D* pScissors) {
1353 }
1354
CmdSetLineWidth(VkCommandBuffer cmdBuffer,float lineWidth)1355 void CmdSetLineWidth(VkCommandBuffer cmdBuffer, float lineWidth) {
1356 }
1357
CmdSetDepthBias(VkCommandBuffer cmdBuffer,float depthBias,float depthBiasClamp,float slopeScaledDepthBias)1358 void CmdSetDepthBias(VkCommandBuffer cmdBuffer, float depthBias, float depthBiasClamp, float slopeScaledDepthBias) {
1359 }
1360
CmdSetBlendConstants(VkCommandBuffer cmdBuffer,const float blendConst[4])1361 void CmdSetBlendConstants(VkCommandBuffer cmdBuffer, const float blendConst[4]) {
1362 }
1363
CmdSetDepthBounds(VkCommandBuffer cmdBuffer,float minDepthBounds,float maxDepthBounds)1364 void CmdSetDepthBounds(VkCommandBuffer cmdBuffer, float minDepthBounds, float maxDepthBounds) {
1365 }
1366
CmdSetStencilCompareMask(VkCommandBuffer cmdBuffer,VkStencilFaceFlags faceMask,uint32_t stencilCompareMask)1367 void CmdSetStencilCompareMask(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilCompareMask) {
1368 }
1369
CmdSetStencilWriteMask(VkCommandBuffer cmdBuffer,VkStencilFaceFlags faceMask,uint32_t stencilWriteMask)1370 void CmdSetStencilWriteMask(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilWriteMask) {
1371 }
1372
CmdSetStencilReference(VkCommandBuffer cmdBuffer,VkStencilFaceFlags faceMask,uint32_t stencilReference)1373 void CmdSetStencilReference(VkCommandBuffer cmdBuffer, VkStencilFaceFlags faceMask, uint32_t stencilReference) {
1374 }
1375
CmdBindDescriptorSets(VkCommandBuffer cmdBuffer,VkPipelineBindPoint pipelineBindPoint,VkPipelineLayout layout,uint32_t firstSet,uint32_t setCount,const VkDescriptorSet * pDescriptorSets,uint32_t dynamicOffsetCount,const uint32_t * pDynamicOffsets)1376 void CmdBindDescriptorSets(VkCommandBuffer cmdBuffer, VkPipelineBindPoint pipelineBindPoint, VkPipelineLayout layout, uint32_t firstSet, uint32_t setCount, const VkDescriptorSet* pDescriptorSets, uint32_t dynamicOffsetCount, const uint32_t* pDynamicOffsets) {
1377 }
1378
CmdBindIndexBuffer(VkCommandBuffer cmdBuffer,VkBuffer buffer,VkDeviceSize offset,VkIndexType indexType)1379 void CmdBindIndexBuffer(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, VkIndexType indexType) {
1380 }
1381
CmdBindVertexBuffers(VkCommandBuffer cmdBuffer,uint32_t startBinding,uint32_t bindingCount,const VkBuffer * pBuffers,const VkDeviceSize * pOffsets)1382 void CmdBindVertexBuffers(VkCommandBuffer cmdBuffer, uint32_t startBinding, uint32_t bindingCount, const VkBuffer* pBuffers, const VkDeviceSize* pOffsets) {
1383 }
1384
CmdDraw(VkCommandBuffer cmdBuffer,uint32_t vertexCount,uint32_t instanceCount,uint32_t firstVertex,uint32_t firstInstance)1385 void CmdDraw(VkCommandBuffer cmdBuffer, uint32_t vertexCount, uint32_t instanceCount, uint32_t firstVertex, uint32_t firstInstance) {
1386 }
1387
CmdDrawIndexed(VkCommandBuffer cmdBuffer,uint32_t indexCount,uint32_t instanceCount,uint32_t firstIndex,int32_t vertexOffset,uint32_t firstInstance)1388 void CmdDrawIndexed(VkCommandBuffer cmdBuffer, uint32_t indexCount, uint32_t instanceCount, uint32_t firstIndex, int32_t vertexOffset, uint32_t firstInstance) {
1389 }
1390
CmdDrawIndirect(VkCommandBuffer cmdBuffer,VkBuffer buffer,VkDeviceSize offset,uint32_t count,uint32_t stride)1391 void CmdDrawIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) {
1392 }
1393
CmdDrawIndexedIndirect(VkCommandBuffer cmdBuffer,VkBuffer buffer,VkDeviceSize offset,uint32_t count,uint32_t stride)1394 void CmdDrawIndexedIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset, uint32_t count, uint32_t stride) {
1395 }
1396
CmdDispatch(VkCommandBuffer cmdBuffer,uint32_t x,uint32_t y,uint32_t z)1397 void CmdDispatch(VkCommandBuffer cmdBuffer, uint32_t x, uint32_t y, uint32_t z) {
1398 }
1399
CmdDispatchIndirect(VkCommandBuffer cmdBuffer,VkBuffer buffer,VkDeviceSize offset)1400 void CmdDispatchIndirect(VkCommandBuffer cmdBuffer, VkBuffer buffer, VkDeviceSize offset) {
1401 }
1402
CmdCopyBuffer(VkCommandBuffer cmdBuffer,VkBuffer srcBuffer,VkBuffer destBuffer,uint32_t regionCount,const VkBufferCopy * pRegions)1403 void CmdCopyBuffer(VkCommandBuffer cmdBuffer, VkBuffer srcBuffer, VkBuffer destBuffer, uint32_t regionCount, const VkBufferCopy* pRegions) {
1404 }
1405
CmdCopyImage(VkCommandBuffer cmdBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage destImage,VkImageLayout destImageLayout,uint32_t regionCount,const VkImageCopy * pRegions)1406 void CmdCopyImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageCopy* pRegions) {
1407 }
1408
CmdBlitImage(VkCommandBuffer cmdBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage destImage,VkImageLayout destImageLayout,uint32_t regionCount,const VkImageBlit * pRegions,VkFilter filter)1409 void CmdBlitImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageBlit* pRegions, VkFilter filter) {
1410 }
1411
CmdCopyBufferToImage(VkCommandBuffer cmdBuffer,VkBuffer srcBuffer,VkImage destImage,VkImageLayout destImageLayout,uint32_t regionCount,const VkBufferImageCopy * pRegions)1412 void CmdCopyBufferToImage(VkCommandBuffer cmdBuffer, VkBuffer srcBuffer, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkBufferImageCopy* pRegions) {
1413 }
1414
CmdCopyImageToBuffer(VkCommandBuffer cmdBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkBuffer destBuffer,uint32_t regionCount,const VkBufferImageCopy * pRegions)1415 void CmdCopyImageToBuffer(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkBuffer destBuffer, uint32_t regionCount, const VkBufferImageCopy* pRegions) {
1416 }
1417
CmdUpdateBuffer(VkCommandBuffer cmdBuffer,VkBuffer destBuffer,VkDeviceSize destOffset,VkDeviceSize dataSize,const void * pData)1418 void CmdUpdateBuffer(VkCommandBuffer cmdBuffer, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize dataSize, const void* pData) {
1419 }
1420
CmdFillBuffer(VkCommandBuffer cmdBuffer,VkBuffer destBuffer,VkDeviceSize destOffset,VkDeviceSize fillSize,uint32_t data)1421 void CmdFillBuffer(VkCommandBuffer cmdBuffer, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize fillSize, uint32_t data) {
1422 }
1423
CmdClearColorImage(VkCommandBuffer cmdBuffer,VkImage image,VkImageLayout imageLayout,const VkClearColorValue * pColor,uint32_t rangeCount,const VkImageSubresourceRange * pRanges)1424 void CmdClearColorImage(VkCommandBuffer cmdBuffer, VkImage image, VkImageLayout imageLayout, const VkClearColorValue* pColor, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {
1425 }
1426
CmdClearDepthStencilImage(VkCommandBuffer cmdBuffer,VkImage image,VkImageLayout imageLayout,const VkClearDepthStencilValue * pDepthStencil,uint32_t rangeCount,const VkImageSubresourceRange * pRanges)1427 void CmdClearDepthStencilImage(VkCommandBuffer cmdBuffer, VkImage image, VkImageLayout imageLayout, const VkClearDepthStencilValue* pDepthStencil, uint32_t rangeCount, const VkImageSubresourceRange* pRanges) {
1428 }
1429
CmdClearAttachments(VkCommandBuffer cmdBuffer,uint32_t attachmentCount,const VkClearAttachment * pAttachments,uint32_t rectCount,const VkClearRect * pRects)1430 void CmdClearAttachments(VkCommandBuffer cmdBuffer, uint32_t attachmentCount, const VkClearAttachment* pAttachments, uint32_t rectCount, const VkClearRect* pRects) {
1431 }
1432
CmdResolveImage(VkCommandBuffer cmdBuffer,VkImage srcImage,VkImageLayout srcImageLayout,VkImage destImage,VkImageLayout destImageLayout,uint32_t regionCount,const VkImageResolve * pRegions)1433 void CmdResolveImage(VkCommandBuffer cmdBuffer, VkImage srcImage, VkImageLayout srcImageLayout, VkImage destImage, VkImageLayout destImageLayout, uint32_t regionCount, const VkImageResolve* pRegions) {
1434 }
1435
CmdSetEvent(VkCommandBuffer cmdBuffer,VkEvent event,VkPipelineStageFlags stageMask)1436 void CmdSetEvent(VkCommandBuffer cmdBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
1437 }
1438
CmdResetEvent(VkCommandBuffer cmdBuffer,VkEvent event,VkPipelineStageFlags stageMask)1439 void CmdResetEvent(VkCommandBuffer cmdBuffer, VkEvent event, VkPipelineStageFlags stageMask) {
1440 }
1441
CmdWaitEvents(VkCommandBuffer commandBuffer,uint32_t eventCount,const VkEvent * pEvents,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)1442 void CmdWaitEvents(VkCommandBuffer commandBuffer, uint32_t eventCount, const VkEvent* pEvents, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) {
1443 }
1444
CmdPipelineBarrier(VkCommandBuffer commandBuffer,VkPipelineStageFlags srcStageMask,VkPipelineStageFlags dstStageMask,VkDependencyFlags dependencyFlags,uint32_t memoryBarrierCount,const VkMemoryBarrier * pMemoryBarriers,uint32_t bufferMemoryBarrierCount,const VkBufferMemoryBarrier * pBufferMemoryBarriers,uint32_t imageMemoryBarrierCount,const VkImageMemoryBarrier * pImageMemoryBarriers)1445 void CmdPipelineBarrier(VkCommandBuffer commandBuffer, VkPipelineStageFlags srcStageMask, VkPipelineStageFlags dstStageMask, VkDependencyFlags dependencyFlags, uint32_t memoryBarrierCount, const VkMemoryBarrier* pMemoryBarriers, uint32_t bufferMemoryBarrierCount, const VkBufferMemoryBarrier* pBufferMemoryBarriers, uint32_t imageMemoryBarrierCount, const VkImageMemoryBarrier* pImageMemoryBarriers) {
1446 }
1447
CmdBeginQuery(VkCommandBuffer cmdBuffer,VkQueryPool queryPool,uint32_t slot,VkQueryControlFlags flags)1448 void CmdBeginQuery(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t slot, VkQueryControlFlags flags) {
1449 }
1450
CmdEndQuery(VkCommandBuffer cmdBuffer,VkQueryPool queryPool,uint32_t slot)1451 void CmdEndQuery(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t slot) {
1452 }
1453
CmdResetQueryPool(VkCommandBuffer cmdBuffer,VkQueryPool queryPool,uint32_t startQuery,uint32_t queryCount)1454 void CmdResetQueryPool(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount) {
1455 }
1456
CmdWriteTimestamp(VkCommandBuffer cmdBuffer,VkPipelineStageFlagBits pipelineStage,VkQueryPool queryPool,uint32_t slot)1457 void CmdWriteTimestamp(VkCommandBuffer cmdBuffer, VkPipelineStageFlagBits pipelineStage, VkQueryPool queryPool, uint32_t slot) {
1458 }
1459
CmdCopyQueryPoolResults(VkCommandBuffer cmdBuffer,VkQueryPool queryPool,uint32_t startQuery,uint32_t queryCount,VkBuffer destBuffer,VkDeviceSize destOffset,VkDeviceSize destStride,VkQueryResultFlags flags)1460 void CmdCopyQueryPoolResults(VkCommandBuffer cmdBuffer, VkQueryPool queryPool, uint32_t startQuery, uint32_t queryCount, VkBuffer destBuffer, VkDeviceSize destOffset, VkDeviceSize destStride, VkQueryResultFlags flags) {
1461 }
1462
CmdPushConstants(VkCommandBuffer cmdBuffer,VkPipelineLayout layout,VkShaderStageFlags stageFlags,uint32_t start,uint32_t length,const void * values)1463 void CmdPushConstants(VkCommandBuffer cmdBuffer, VkPipelineLayout layout, VkShaderStageFlags stageFlags, uint32_t start, uint32_t length, const void* values) {
1464 }
1465
CmdBeginRenderPass(VkCommandBuffer cmdBuffer,const VkRenderPassBeginInfo * pRenderPassBegin,VkSubpassContents contents)1466 void CmdBeginRenderPass(VkCommandBuffer cmdBuffer, const VkRenderPassBeginInfo* pRenderPassBegin, VkSubpassContents contents) {
1467 }
1468
CmdNextSubpass(VkCommandBuffer cmdBuffer,VkSubpassContents contents)1469 void CmdNextSubpass(VkCommandBuffer cmdBuffer, VkSubpassContents contents) {
1470 }
1471
CmdEndRenderPass(VkCommandBuffer cmdBuffer)1472 void CmdEndRenderPass(VkCommandBuffer cmdBuffer) {
1473 }
1474
CmdExecuteCommands(VkCommandBuffer cmdBuffer,uint32_t cmdBuffersCount,const VkCommandBuffer * pCmdBuffers)1475 void CmdExecuteCommands(VkCommandBuffer cmdBuffer, uint32_t cmdBuffersCount, const VkCommandBuffer* pCmdBuffers) {
1476 }
1477
DestroyDebugReportCallbackEXT(VkInstance instance,VkDebugReportCallbackEXT callback,const VkAllocationCallbacks * pAllocator)1478 void DestroyDebugReportCallbackEXT(VkInstance instance, VkDebugReportCallbackEXT callback, const VkAllocationCallbacks* pAllocator) {
1479 }
1480
DebugReportMessageEXT(VkInstance instance,VkDebugReportFlagsEXT flags,VkDebugReportObjectTypeEXT objectType,uint64_t object,size_t location,int32_t messageCode,const char * pLayerPrefix,const char * pMessage)1481 void DebugReportMessageEXT(VkInstance instance, VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objectType, uint64_t object, size_t location, int32_t messageCode, const char* pLayerPrefix, const char* pMessage) {
1482 }
1483
BindBufferMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)1484 VkResult BindBufferMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo* pBindInfos) {
1485 return VK_SUCCESS;
1486 }
1487
BindImageMemory2(VkDevice device,uint32_t bindInfoCount,const VkBindImageMemoryInfo * pBindInfos)1488 VkResult BindImageMemory2(VkDevice device, uint32_t bindInfoCount, const VkBindImageMemoryInfo* pBindInfos) {
1489 return VK_SUCCESS;
1490 }
1491
GetDeviceGroupPeerMemoryFeatures(VkDevice device,uint32_t heapIndex,uint32_t localDeviceIndex,uint32_t remoteDeviceIndex,VkPeerMemoryFeatureFlags * pPeerMemoryFeatures)1492 void GetDeviceGroupPeerMemoryFeatures(VkDevice device, uint32_t heapIndex, uint32_t localDeviceIndex, uint32_t remoteDeviceIndex, VkPeerMemoryFeatureFlags* pPeerMemoryFeatures) {
1493 }
1494
CmdSetDeviceMask(VkCommandBuffer commandBuffer,uint32_t deviceMask)1495 void CmdSetDeviceMask(VkCommandBuffer commandBuffer, uint32_t deviceMask) {
1496 }
1497
CmdDispatchBase(VkCommandBuffer commandBuffer,uint32_t baseGroupX,uint32_t baseGroupY,uint32_t baseGroupZ,uint32_t groupCountX,uint32_t groupCountY,uint32_t groupCountZ)1498 void CmdDispatchBase(VkCommandBuffer commandBuffer, uint32_t baseGroupX, uint32_t baseGroupY, uint32_t baseGroupZ, uint32_t groupCountX, uint32_t groupCountY, uint32_t groupCountZ) {
1499 }
1500
EnumeratePhysicalDeviceGroups(VkInstance instance,uint32_t * pPhysicalDeviceGroupCount,VkPhysicalDeviceGroupProperties * pPhysicalDeviceGroupProperties)1501 VkResult EnumeratePhysicalDeviceGroups(VkInstance instance, uint32_t* pPhysicalDeviceGroupCount, VkPhysicalDeviceGroupProperties* pPhysicalDeviceGroupProperties) {
1502 return VK_SUCCESS;
1503 }
1504
GetImageMemoryRequirements2(VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)1505 void GetImageMemoryRequirements2(VkDevice device, const VkImageMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) {
1506 }
1507
GetBufferMemoryRequirements2(VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)1508 void GetBufferMemoryRequirements2(VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo, VkMemoryRequirements2* pMemoryRequirements) {
1509 }
1510
GetImageSparseMemoryRequirements2(VkDevice device,const VkImageSparseMemoryRequirementsInfo2 * pInfo,uint32_t * pSparseMemoryRequirementCount,VkSparseImageMemoryRequirements2 * pSparseMemoryRequirements)1511 void GetImageSparseMemoryRequirements2(VkDevice device, const VkImageSparseMemoryRequirementsInfo2* pInfo, uint32_t* pSparseMemoryRequirementCount, VkSparseImageMemoryRequirements2* pSparseMemoryRequirements) {
1512 }
1513
GetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceFeatures2 * pFeatures)1514 void GetPhysicalDeviceFeatures2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceFeatures2* pFeatures) {
1515 }
1516
GetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties2 * pProperties)1517 void GetPhysicalDeviceProperties2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceProperties2* pProperties) {
1518 }
1519
GetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice,VkFormat format,VkFormatProperties2 * pFormatProperties)1520 void GetPhysicalDeviceFormatProperties2(VkPhysicalDevice physicalDevice, VkFormat format, VkFormatProperties2* pFormatProperties) {
1521 }
1522
GetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)1523 VkResult GetPhysicalDeviceImageFormatProperties2(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo, VkImageFormatProperties2* pImageFormatProperties) {
1524 return VK_SUCCESS;
1525 }
1526
GetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice,uint32_t * pQueueFamilyPropertyCount,VkQueueFamilyProperties2 * pQueueFamilyProperties)1527 void GetPhysicalDeviceQueueFamilyProperties2(VkPhysicalDevice physicalDevice, uint32_t* pQueueFamilyPropertyCount, VkQueueFamilyProperties2* pQueueFamilyProperties) {
1528 }
1529
GetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties2 * pMemoryProperties)1530 void GetPhysicalDeviceMemoryProperties2(VkPhysicalDevice physicalDevice, VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {
1531 }
1532
GetPhysicalDeviceSparseImageFormatProperties2(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceSparseImageFormatInfo2 * pFormatInfo,uint32_t * pPropertyCount,VkSparseImageFormatProperties2 * pProperties)1533 void GetPhysicalDeviceSparseImageFormatProperties2(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceSparseImageFormatInfo2* pFormatInfo, uint32_t* pPropertyCount, VkSparseImageFormatProperties2* pProperties) {
1534 }
1535
TrimCommandPool(VkDevice device,VkCommandPool commandPool,VkCommandPoolTrimFlags flags)1536 void TrimCommandPool(VkDevice device, VkCommandPool commandPool, VkCommandPoolTrimFlags flags) {
1537 }
1538
GetDeviceQueue2(VkDevice device,const VkDeviceQueueInfo2 * pQueueInfo,VkQueue * pQueue)1539 void GetDeviceQueue2(VkDevice device, const VkDeviceQueueInfo2* pQueueInfo, VkQueue* pQueue) {
1540 }
1541
CreateSamplerYcbcrConversion(VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)1542 VkResult CreateSamplerYcbcrConversion(VkDevice device, const VkSamplerYcbcrConversionCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkSamplerYcbcrConversion* pYcbcrConversion) {
1543 return VK_SUCCESS;
1544 }
1545
DestroySamplerYcbcrConversion(VkDevice device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)1546 void DestroySamplerYcbcrConversion(VkDevice device, VkSamplerYcbcrConversion ycbcrConversion, const VkAllocationCallbacks* pAllocator) {
1547 }
1548
CreateDescriptorUpdateTemplate(VkDevice device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)1549 VkResult CreateDescriptorUpdateTemplate(VkDevice device, const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo, const VkAllocationCallbacks* pAllocator, VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
1550 return VK_SUCCESS;
1551 }
1552
DestroyDescriptorUpdateTemplate(VkDevice device,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const VkAllocationCallbacks * pAllocator)1553 void DestroyDescriptorUpdateTemplate(VkDevice device, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const VkAllocationCallbacks* pAllocator) {
1554 }
1555
UpdateDescriptorSetWithTemplate(VkDevice device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData)1556 void UpdateDescriptorSetWithTemplate(VkDevice device, VkDescriptorSet descriptorSet, VkDescriptorUpdateTemplate descriptorUpdateTemplate, const void* pData) {
1557 }
1558
GetPhysicalDeviceExternalBufferProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalBufferInfo * pExternalBufferInfo,VkExternalBufferProperties * pExternalBufferProperties)1559 void GetPhysicalDeviceExternalBufferProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalBufferInfo* pExternalBufferInfo, VkExternalBufferProperties* pExternalBufferProperties) {
1560 }
1561
GetPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo,VkExternalFenceProperties * pExternalFenceProperties)1562 void GetPhysicalDeviceExternalFenceProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo, VkExternalFenceProperties* pExternalFenceProperties) {
1563 }
1564
GetPhysicalDeviceExternalSemaphoreProperties(VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalSemaphoreInfo * pExternalSemaphoreInfo,VkExternalSemaphoreProperties * pExternalSemaphoreProperties)1565 void GetPhysicalDeviceExternalSemaphoreProperties(VkPhysicalDevice physicalDevice, const VkPhysicalDeviceExternalSemaphoreInfo* pExternalSemaphoreInfo, VkExternalSemaphoreProperties* pExternalSemaphoreProperties) {
1566 }
1567
GetDescriptorSetLayoutSupport(VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,VkDescriptorSetLayoutSupport * pSupport)1568 void GetDescriptorSetLayoutSupport(VkDevice device, const VkDescriptorSetLayoutCreateInfo* pCreateInfo, VkDescriptorSetLayoutSupport* pSupport) {
1569 }
1570
1571 #pragma clang diagnostic pop
1572 // clang-format on
1573
1574 } // namespace null_driver
1575