1 // Copyright (C) 2018 The Android Open Source Project
2 // Copyright (C) 2018 Google Inc.
3 //
4 // Licensed under the Apache License, Version 2.0 (the "License");
5 // you may not use this file except in compliance with the License.
6 // You may obtain a copy of the License at
7 //
8 // http://www.apache.org/licenses/LICENSE-2.0
9 //
10 // Unless required by applicable law or agreed to in writing, software
11 // distributed under the License is distributed on an "AS IS" BASIS,
12 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 // See the License for the specific language governing permissions and
14 // limitations under the License.
15
16 #include "ResourceTracker.h"
17
18 #include "android/base/Optional.h"
19 #include "android/base/threads/AndroidWorkPool.h"
20
21 #include "goldfish_vk_private_defs.h"
22
23 #include "../OpenglSystemCommon/EmulatorFeatureInfo.h"
24 #include "../OpenglSystemCommon/HostConnection.h"
25
26 #ifdef VK_USE_PLATFORM_ANDROID_KHR
27
28 #include "../egl/goldfish_sync.h"
29
30 typedef uint32_t zx_handle_t;
31 #define ZX_HANDLE_INVALID ((zx_handle_t)0)
zx_handle_close(zx_handle_t)32 void zx_handle_close(zx_handle_t) { }
zx_event_create(int,zx_handle_t *)33 void zx_event_create(int, zx_handle_t*) { }
34
35 #include "AndroidHardwareBuffer.h"
36
37 #ifndef HOST_BUILD
38 #include <drm/virtgpu_drm.h>
39 #include <xf86drm.h>
40 #endif
41
42 #include "VirtioGpuNext.h"
43
44 #endif // VK_USE_PLATFORM_ANDROID_KHR
45
46 #ifdef VK_USE_PLATFORM_FUCHSIA
47
48 #include <cutils/native_handle.h>
49 #include <fuchsia/hardware/goldfish/llcpp/fidl.h>
50 #include <fuchsia/sysmem/llcpp/fidl.h>
51 #include <lib/zx/channel.h>
52 #include <lib/zx/vmo.h>
53 #include <zircon/process.h>
54 #include <zircon/syscalls.h>
55 #include <zircon/syscalls/object.h>
56
57 #include "services/service_connector.h"
58
59 #define GET_STATUS_SAFE(result, member) \
60 ((result).ok() ? ((result).Unwrap()->member) : ZX_OK)
61
62 struct AHardwareBuffer;
63
AHardwareBuffer_release(AHardwareBuffer *)64 void AHardwareBuffer_release(AHardwareBuffer*) { }
65
AHardwareBuffer_getNativeHandle(AHardwareBuffer *)66 native_handle_t *AHardwareBuffer_getNativeHandle(AHardwareBuffer*) { return NULL; }
67
getAndroidHardwareBufferUsageFromVkUsage(const VkImageCreateFlags vk_create,const VkImageUsageFlags vk_usage)68 uint64_t getAndroidHardwareBufferUsageFromVkUsage(
69 const VkImageCreateFlags vk_create,
70 const VkImageUsageFlags vk_usage) {
71 return AHARDWAREBUFFER_USAGE_GPU_SAMPLED_IMAGE;
72 }
73
importAndroidHardwareBuffer(Gralloc * grallocHelper,const VkImportAndroidHardwareBufferInfoANDROID * info,struct AHardwareBuffer ** importOut)74 VkResult importAndroidHardwareBuffer(
75 Gralloc *grallocHelper,
76 const VkImportAndroidHardwareBufferInfoANDROID* info,
77 struct AHardwareBuffer **importOut) {
78 return VK_SUCCESS;
79 }
80
createAndroidHardwareBuffer(bool hasDedicatedImage,bool hasDedicatedBuffer,const VkExtent3D & imageExtent,uint32_t imageLayers,VkFormat imageFormat,VkImageUsageFlags imageUsage,VkImageCreateFlags imageCreateFlags,VkDeviceSize bufferSize,VkDeviceSize allocationInfoAllocSize,struct AHardwareBuffer ** out)81 VkResult createAndroidHardwareBuffer(
82 bool hasDedicatedImage,
83 bool hasDedicatedBuffer,
84 const VkExtent3D& imageExtent,
85 uint32_t imageLayers,
86 VkFormat imageFormat,
87 VkImageUsageFlags imageUsage,
88 VkImageCreateFlags imageCreateFlags,
89 VkDeviceSize bufferSize,
90 VkDeviceSize allocationInfoAllocSize,
91 struct AHardwareBuffer **out) {
92 return VK_SUCCESS;
93 }
94
95 namespace goldfish_vk {
96 struct HostVisibleMemoryVirtualizationInfo;
97 }
98
getAndroidHardwareBufferPropertiesANDROID(Gralloc * grallocHelper,const goldfish_vk::HostVisibleMemoryVirtualizationInfo *,VkDevice,const AHardwareBuffer *,VkAndroidHardwareBufferPropertiesANDROID *)99 VkResult getAndroidHardwareBufferPropertiesANDROID(
100 Gralloc *grallocHelper,
101 const goldfish_vk::HostVisibleMemoryVirtualizationInfo*,
102 VkDevice,
103 const AHardwareBuffer*,
104 VkAndroidHardwareBufferPropertiesANDROID*) { return VK_SUCCESS; }
105
getMemoryAndroidHardwareBufferANDROID(struct AHardwareBuffer **)106 VkResult getMemoryAndroidHardwareBufferANDROID(struct AHardwareBuffer **) { return VK_SUCCESS; }
107
108 #endif // VK_USE_PLATFORM_FUCHSIA
109
110 #include "HostVisibleMemoryVirtualization.h"
111 #include "Resources.h"
112 #include "VkEncoder.h"
113
114 #include "android/base/AlignedBuf.h"
115 #include "android/base/synchronization/AndroidLock.h"
116
117 #include "goldfish_address_space.h"
118 #include "goldfish_vk_private_defs.h"
119 #include "vk_format_info.h"
120 #include "vk_util.h"
121
122 #include <set>
123 #include <string>
124 #include <unordered_map>
125 #include <unordered_set>
126
127 #include <vndk/hardware_buffer.h>
128 #include <log/log.h>
129 #include <stdlib.h>
130 #include <sync/sync.h>
131
132 #ifdef VK_USE_PLATFORM_ANDROID_KHR
133
134 #include <sys/mman.h>
135 #include <sys/syscall.h>
136
137 #ifdef HOST_BUILD
138 #include "android/utils/tempfile.h"
139 #endif
140
141 static inline int
inline_memfd_create(const char * name,unsigned int flags)142 inline_memfd_create(const char *name, unsigned int flags) {
143 #ifdef HOST_BUILD
144 TempFile* tmpFile = tempfile_create();
145 return open(tempfile_path(tmpFile), O_RDWR);
146 // TODO: Windows is not suppose to support VkSemaphoreGetFdInfoKHR
147 #else
148 return syscall(SYS_memfd_create, name, flags);
149 #endif
150 }
151 #define memfd_create inline_memfd_create
152 #endif // !VK_USE_PLATFORM_ANDROID_KHR
153
154 #define RESOURCE_TRACKER_DEBUG 0
155
156 #if RESOURCE_TRACKER_DEBUG
157 #undef D
158 #define D(fmt,...) ALOGD("%s: " fmt, __func__, ##__VA_ARGS__);
159 #else
160 #ifndef D
161 #define D(fmt,...)
162 #endif
163 #endif
164
165 using android::aligned_buf_alloc;
166 using android::aligned_buf_free;
167 using android::base::Optional;
168 using android::base::guest::AutoLock;
169 using android::base::guest::Lock;
170 using android::base::guest::WorkPool;
171
172 namespace goldfish_vk {
173
174 #define MAKE_HANDLE_MAPPING_FOREACH(type_name, map_impl, map_to_u64_impl, map_from_u64_impl) \
175 void mapHandles_##type_name(type_name* handles, size_t count) override { \
176 for (size_t i = 0; i < count; ++i) { \
177 map_impl; \
178 } \
179 } \
180 void mapHandles_##type_name##_u64(const type_name* handles, uint64_t* handle_u64s, size_t count) override { \
181 for (size_t i = 0; i < count; ++i) { \
182 map_to_u64_impl; \
183 } \
184 } \
185 void mapHandles_u64_##type_name(const uint64_t* handle_u64s, type_name* handles, size_t count) override { \
186 for (size_t i = 0; i < count; ++i) { \
187 map_from_u64_impl; \
188 } \
189 } \
190
191 #define DEFINE_RESOURCE_TRACKING_CLASS(class_name, impl) \
192 class class_name : public VulkanHandleMapping { \
193 public: \
194 virtual ~class_name() { } \
195 GOLDFISH_VK_LIST_HANDLE_TYPES(impl) \
196 }; \
197
198 #define CREATE_MAPPING_IMPL_FOR_TYPE(type_name) \
199 MAKE_HANDLE_MAPPING_FOREACH(type_name, \
200 handles[i] = new_from_host_##type_name(handles[i]); ResourceTracker::get()->register_##type_name(handles[i]);, \
201 handle_u64s[i] = (uint64_t)new_from_host_##type_name(handles[i]), \
202 handles[i] = (type_name)new_from_host_u64_##type_name(handle_u64s[i]); ResourceTracker::get()->register_##type_name(handles[i]);)
203
204 #define UNWRAP_MAPPING_IMPL_FOR_TYPE(type_name) \
205 MAKE_HANDLE_MAPPING_FOREACH(type_name, \
206 handles[i] = get_host_##type_name(handles[i]), \
207 handle_u64s[i] = (uint64_t)get_host_u64_##type_name(handles[i]), \
208 handles[i] = (type_name)get_host_##type_name((type_name)handle_u64s[i]))
209
210 #define DESTROY_MAPPING_IMPL_FOR_TYPE(type_name) \
211 MAKE_HANDLE_MAPPING_FOREACH(type_name, \
212 ResourceTracker::get()->unregister_##type_name(handles[i]); delete_goldfish_##type_name(handles[i]), \
213 (void)handle_u64s[i]; delete_goldfish_##type_name(handles[i]), \
214 (void)handles[i]; delete_goldfish_##type_name((type_name)handle_u64s[i]))
215
216 DEFINE_RESOURCE_TRACKING_CLASS(CreateMapping, CREATE_MAPPING_IMPL_FOR_TYPE)
217 DEFINE_RESOURCE_TRACKING_CLASS(UnwrapMapping, UNWRAP_MAPPING_IMPL_FOR_TYPE)
218 DEFINE_RESOURCE_TRACKING_CLASS(DestroyMapping, DESTROY_MAPPING_IMPL_FOR_TYPE)
219
220 class ResourceTracker::Impl {
221 public:
222 Impl() = default;
223 CreateMapping createMapping;
224 UnwrapMapping unwrapMapping;
225 DestroyMapping destroyMapping;
226 DefaultHandleMapping defaultMapping;
227
228 #define HANDLE_DEFINE_TRIVIAL_INFO_STRUCT(type) \
229 struct type##_Info { \
230 uint32_t unused; \
231 }; \
232
233 GOLDFISH_VK_LIST_TRIVIAL_HANDLE_TYPES(HANDLE_DEFINE_TRIVIAL_INFO_STRUCT)
234
235 struct VkInstance_Info {
236 uint32_t highestApiVersion;
237 std::set<std::string> enabledExtensions;
238 // Fodder for vkEnumeratePhysicalDevices.
239 std::vector<VkPhysicalDevice> physicalDevices;
240 };
241
242 using HostMemBlocks = std::vector<HostMemAlloc>;
243 using HostMemBlockIndex = size_t;
244
245 #define INVALID_HOST_MEM_BLOCK (-1)
246
247 struct VkDevice_Info {
248 VkPhysicalDevice physdev;
249 VkPhysicalDeviceProperties props;
250 VkPhysicalDeviceMemoryProperties memProps;
251 std::vector<HostMemBlocks> hostMemBlocks { VK_MAX_MEMORY_TYPES };
252 uint32_t apiVersion;
253 std::set<std::string> enabledExtensions;
254 };
255
256 struct VirtioGpuHostmemResourceInfo {
257 uint32_t resourceId = 0;
258 int primeFd = -1;
259 };
260
261 struct VkDeviceMemory_Info {
262 VkDeviceSize allocationSize = 0;
263 VkDeviceSize mappedSize = 0;
264 uint8_t* mappedPtr = nullptr;
265 uint32_t memoryTypeIndex = 0;
266 bool virtualHostVisibleBacking = false;
267 bool directMapped = false;
268 GoldfishAddressSpaceBlock*
269 goldfishAddressSpaceBlock = nullptr;
270 VirtioGpuHostmemResourceInfo resInfo;
271 SubAlloc subAlloc;
272 AHardwareBuffer* ahw = nullptr;
273 zx_handle_t vmoHandle = ZX_HANDLE_INVALID;
274 };
275
276 struct VkCommandBuffer_Info {
277 VkEncoder** lastUsedEncoderPtr = nullptr;
278 uint32_t sequenceNumber = 0;
279 };
280
281 struct VkQueue_Info {
282 VkEncoder** lastUsedEncoderPtr = nullptr;
283 uint32_t sequenceNumber = 0;
284 };
285
286 // custom guest-side structs for images/buffers because of AHardwareBuffer :((
287 struct VkImage_Info {
288 VkDevice device;
289 VkImageCreateInfo createInfo;
290 bool external = false;
291 VkExternalMemoryImageCreateInfo externalCreateInfo;
292 VkDeviceMemory currentBacking = VK_NULL_HANDLE;
293 VkDeviceSize currentBackingOffset = 0;
294 VkDeviceSize currentBackingSize = 0;
295 bool baseRequirementsKnown = false;
296 VkMemoryRequirements baseRequirements;
297 #ifdef VK_USE_PLATFORM_FUCHSIA
298 bool isSysmemBackedMemory = false;
299 #endif
300 };
301
302 struct VkBuffer_Info {
303 VkDevice device;
304 VkBufferCreateInfo createInfo;
305 bool external = false;
306 VkExternalMemoryBufferCreateInfo externalCreateInfo;
307 VkDeviceMemory currentBacking = VK_NULL_HANDLE;
308 VkDeviceSize currentBackingOffset = 0;
309 VkDeviceSize currentBackingSize = 0;
310 bool baseRequirementsKnown = false;
311 VkMemoryRequirements baseRequirements;
312 #ifdef VK_USE_PLATFORM_FUCHSIA
313 bool isSysmemBackedMemory = false;
314 #endif
315 };
316
317 struct VkSemaphore_Info {
318 VkDevice device;
319 zx_handle_t eventHandle = ZX_HANDLE_INVALID;
320 int syncFd = -1;
321 };
322
323 struct VkDescriptorUpdateTemplate_Info {
324 std::vector<VkDescriptorUpdateTemplateEntry> templateEntries;
325
326 // Flattened versions
327 std::vector<uint32_t> imageInfoEntryIndices;
328 std::vector<uint32_t> bufferInfoEntryIndices;
329 std::vector<uint32_t> bufferViewEntryIndices;
330 std::vector<VkDescriptorImageInfo> imageInfos;
331 std::vector<VkDescriptorBufferInfo> bufferInfos;
332 std::vector<VkBufferView> bufferViews;
333 };
334
335 struct VkFence_Info {
336 VkDevice device;
337 bool external = false;
338 VkExportFenceCreateInfo exportFenceCreateInfo;
339 #ifdef VK_USE_PLATFORM_ANDROID_KHR
340 int syncFd = -1;
341 #endif
342 };
343
344 struct VkDescriptorPool_Info {
345 std::unordered_set<VkDescriptorSet> allocedSets;
346 VkDescriptorPoolCreateFlags createFlags;
347 };
348
349 struct VkDescriptorSet_Info {
350 VkDescriptorPool pool;
351 std::vector<bool> bindingIsImmutableSampler;
352 };
353
354 struct VkDescriptorSetLayout_Info {
355 std::vector<VkDescriptorSetLayoutBinding> bindings;
356 };
357
358 #define HANDLE_REGISTER_IMPL_IMPL(type) \
359 std::unordered_map<type, type##_Info> info_##type; \
360 void register_##type(type obj) { \
361 AutoLock lock(mLock); \
362 info_##type[obj] = type##_Info(); \
363 } \
364
365 #define HANDLE_UNREGISTER_IMPL_IMPL(type) \
366 void unregister_##type(type obj) { \
367 AutoLock lock(mLock); \
368 info_##type.erase(obj); \
369 } \
370
371 GOLDFISH_VK_LIST_HANDLE_TYPES(HANDLE_REGISTER_IMPL_IMPL)
GOLDFISH_VK_LIST_TRIVIAL_HANDLE_TYPES(HANDLE_UNREGISTER_IMPL_IMPL)372 GOLDFISH_VK_LIST_TRIVIAL_HANDLE_TYPES(HANDLE_UNREGISTER_IMPL_IMPL)
373
374 void unregister_VkInstance(VkInstance instance) {
375 AutoLock lock(mLock);
376
377 auto it = info_VkInstance.find(instance);
378 if (it == info_VkInstance.end()) return;
379 auto info = it->second;
380 info_VkInstance.erase(instance);
381 lock.unlock();
382 }
383
unregister_VkDevice(VkDevice device)384 void unregister_VkDevice(VkDevice device) {
385 AutoLock lock(mLock);
386
387 auto it = info_VkDevice.find(device);
388 if (it == info_VkDevice.end()) return;
389 auto info = it->second;
390 info_VkDevice.erase(device);
391 lock.unlock();
392 }
393
unregister_VkCommandBuffer(VkCommandBuffer commandBuffer)394 void unregister_VkCommandBuffer(VkCommandBuffer commandBuffer) {
395 AutoLock lock(mLock);
396
397 auto it = info_VkCommandBuffer.find(commandBuffer);
398 if (it == info_VkCommandBuffer.end()) return;
399 auto& info = it->second;
400 auto lastUsedEncoder =
401 info.lastUsedEncoderPtr ?
402 *(info.lastUsedEncoderPtr) : nullptr;
403
404 if (lastUsedEncoder) {
405 delete info.lastUsedEncoderPtr;
406 info.lastUsedEncoderPtr = nullptr;
407 }
408
409 info_VkCommandBuffer.erase(commandBuffer);
410 }
411
unregister_VkQueue(VkQueue queue)412 void unregister_VkQueue(VkQueue queue) {
413 AutoLock lock(mLock);
414
415 auto it = info_VkQueue.find(queue);
416 if (it == info_VkQueue.end()) return;
417 auto& info = it->second;
418 auto lastUsedEncoder =
419 info.lastUsedEncoderPtr ?
420 *(info.lastUsedEncoderPtr) : nullptr;
421
422 if (lastUsedEncoder) {
423 delete info.lastUsedEncoderPtr;
424 info.lastUsedEncoderPtr = nullptr;
425 }
426
427 info_VkQueue.erase(queue);
428 }
429
unregister_VkDeviceMemory(VkDeviceMemory mem)430 void unregister_VkDeviceMemory(VkDeviceMemory mem) {
431 AutoLock lock(mLock);
432
433 auto it = info_VkDeviceMemory.find(mem);
434 if (it == info_VkDeviceMemory.end()) return;
435
436 auto& memInfo = it->second;
437
438 if (memInfo.ahw) {
439 AHardwareBuffer_release(memInfo.ahw);
440 }
441
442 if (memInfo.vmoHandle != ZX_HANDLE_INVALID) {
443 zx_handle_close(memInfo.vmoHandle);
444 }
445
446 if (memInfo.mappedPtr &&
447 !memInfo.virtualHostVisibleBacking &&
448 !memInfo.directMapped) {
449 aligned_buf_free(memInfo.mappedPtr);
450 }
451
452 if (memInfo.directMapped) {
453 subFreeHostMemory(&memInfo.subAlloc);
454 }
455
456 delete memInfo.goldfishAddressSpaceBlock;
457
458 info_VkDeviceMemory.erase(mem);
459 }
460
unregister_VkImage(VkImage img)461 void unregister_VkImage(VkImage img) {
462 AutoLock lock(mLock);
463
464 auto it = info_VkImage.find(img);
465 if (it == info_VkImage.end()) return;
466
467 auto& imageInfo = it->second;
468
469 info_VkImage.erase(img);
470 }
471
unregister_VkBuffer(VkBuffer buf)472 void unregister_VkBuffer(VkBuffer buf) {
473 AutoLock lock(mLock);
474
475 auto it = info_VkBuffer.find(buf);
476 if (it == info_VkBuffer.end()) return;
477
478 info_VkBuffer.erase(buf);
479 }
480
unregister_VkSemaphore(VkSemaphore sem)481 void unregister_VkSemaphore(VkSemaphore sem) {
482 AutoLock lock(mLock);
483
484 auto it = info_VkSemaphore.find(sem);
485 if (it == info_VkSemaphore.end()) return;
486
487 auto& semInfo = it->second;
488
489 if (semInfo.eventHandle != ZX_HANDLE_INVALID) {
490 zx_handle_close(semInfo.eventHandle);
491 }
492
493 info_VkSemaphore.erase(sem);
494 }
495
unregister_VkDescriptorUpdateTemplate(VkDescriptorUpdateTemplate templ)496 void unregister_VkDescriptorUpdateTemplate(VkDescriptorUpdateTemplate templ) {
497 info_VkDescriptorUpdateTemplate.erase(templ);
498 }
499
unregister_VkFence(VkFence fence)500 void unregister_VkFence(VkFence fence) {
501 AutoLock lock(mLock);
502 auto it = info_VkFence.find(fence);
503 if (it == info_VkFence.end()) return;
504
505 auto& fenceInfo = it->second;
506 (void)fenceInfo;
507
508 #ifdef VK_USE_PLATFORM_ANDROID_KHR
509 if (fenceInfo.syncFd >= 0) {
510 close(fenceInfo.syncFd);
511 }
512 #endif
513
514 info_VkFence.erase(fence);
515 }
516
unregister_VkDescriptorSet_locked(VkDescriptorSet set)517 void unregister_VkDescriptorSet_locked(VkDescriptorSet set) {
518 auto it = info_VkDescriptorSet.find(set);
519 if (it == info_VkDescriptorSet.end()) return;
520
521 const auto& setInfo = it->second;
522
523 auto poolIt = info_VkDescriptorPool.find(setInfo.pool);
524
525 info_VkDescriptorSet.erase(set);
526
527 if (poolIt == info_VkDescriptorPool.end()) return;
528
529 auto& poolInfo = poolIt->second;
530 poolInfo.allocedSets.erase(set);
531 }
532
unregister_VkDescriptorSet(VkDescriptorSet set)533 void unregister_VkDescriptorSet(VkDescriptorSet set) {
534 AutoLock lock(mLock);
535 unregister_VkDescriptorSet_locked(set);
536 }
537
unregister_VkDescriptorSetLayout(VkDescriptorSetLayout setLayout)538 void unregister_VkDescriptorSetLayout(VkDescriptorSetLayout setLayout) {
539 AutoLock lock(mLock);
540 info_VkDescriptorSetLayout.erase(setLayout);
541 }
542
initDescriptorSetStateLocked(const VkDescriptorSetAllocateInfo * ci,const VkDescriptorSet * sets)543 void initDescriptorSetStateLocked(const VkDescriptorSetAllocateInfo* ci, const VkDescriptorSet* sets) {
544 auto it = info_VkDescriptorPool.find(ci->descriptorPool);
545 if (it == info_VkDescriptorPool.end()) return;
546
547 auto& info = it->second;
548 for (uint32_t i = 0; i < ci->descriptorSetCount; ++i) {
549 info.allocedSets.insert(sets[i]);
550
551 auto setIt = info_VkDescriptorSet.find(sets[i]);
552 if (setIt == info_VkDescriptorSet.end()) continue;
553
554 auto& setInfo = setIt->second;
555 setInfo.pool = ci->descriptorPool;
556
557 VkDescriptorSetLayout setLayout = ci->pSetLayouts[i];
558 auto layoutIt = info_VkDescriptorSetLayout.find(setLayout);
559 if (layoutIt == info_VkDescriptorSetLayout.end()) continue;
560
561 const auto& layoutInfo = layoutIt->second;
562 for (size_t i = 0; i < layoutInfo.bindings.size(); ++i) {
563 // Bindings can be sparsely defined
564 const auto& binding = layoutInfo.bindings[i];
565 uint32_t bindingIndex = binding.binding;
566 if (setInfo.bindingIsImmutableSampler.size() <= bindingIndex) {
567 setInfo.bindingIsImmutableSampler.resize(bindingIndex + 1, false);
568 }
569 setInfo.bindingIsImmutableSampler[bindingIndex] =
570 binding.descriptorCount > 0 &&
571 (binding.descriptorType == VK_DESCRIPTOR_TYPE_SAMPLER ||
572 binding.descriptorType ==
573 VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) &&
574 binding.pImmutableSamplers;
575 }
576 }
577 }
578
579 VkWriteDescriptorSet
createImmutableSamplersFilteredWriteDescriptorSetLocked(const VkWriteDescriptorSet * descriptorWrite,std::vector<VkDescriptorImageInfo> * imageInfoArray)580 createImmutableSamplersFilteredWriteDescriptorSetLocked(
581 const VkWriteDescriptorSet* descriptorWrite,
582 std::vector<VkDescriptorImageInfo>* imageInfoArray) {
583
584 VkWriteDescriptorSet res = *descriptorWrite;
585
586 if (descriptorWrite->descriptorCount == 0) return res;
587
588 if (descriptorWrite->descriptorType != VK_DESCRIPTOR_TYPE_SAMPLER &&
589 descriptorWrite->descriptorType != VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) return res;
590
591 VkDescriptorSet set = descriptorWrite->dstSet;
592 auto descSetIt = info_VkDescriptorSet.find(set);
593 if (descSetIt == info_VkDescriptorSet.end()) {
594 ALOGE("%s: error: descriptor set 0x%llx not found\n", __func__,
595 (unsigned long long)set);
596 return res;
597 }
598
599 const auto& descInfo = descSetIt->second;
600 uint32_t binding = descriptorWrite->dstBinding;
601
602 bool immutableSampler = descInfo.bindingIsImmutableSampler[binding];
603
604 if (!immutableSampler) return res;
605
606 for (uint32_t i = 0; i < descriptorWrite->descriptorCount; ++i) {
607 VkDescriptorImageInfo imageInfo = descriptorWrite->pImageInfo[i];
608 imageInfo.sampler = 0;
609 imageInfoArray->push_back(imageInfo);
610 }
611
612 res.pImageInfo = imageInfoArray->data();
613
614 return res;
615 }
616
617 // Also unregisters underlying descriptor sets
618 // and deletes their guest-side wrapped handles.
clearDescriptorPoolLocked(VkDescriptorPool pool)619 void clearDescriptorPoolLocked(VkDescriptorPool pool) {
620 auto it = info_VkDescriptorPool.find(pool);
621 if (it == info_VkDescriptorPool.end()) return;
622
623 std::vector<VkDescriptorSet> toClear;
624 for (auto set : it->second.allocedSets) {
625 toClear.push_back(set);
626 }
627
628 for (auto set : toClear) {
629 unregister_VkDescriptorSet_locked(set);
630 delete_goldfish_VkDescriptorSet(set);
631 }
632 }
633
unregister_VkDescriptorPool(VkDescriptorPool pool)634 void unregister_VkDescriptorPool(VkDescriptorPool pool) {
635 AutoLock lock(mLock);
636 clearDescriptorPoolLocked(pool);
637 info_VkDescriptorPool.erase(pool);
638 }
639
descriptorPoolSupportsIndividualFreeLocked(VkDescriptorPool pool)640 bool descriptorPoolSupportsIndividualFreeLocked(VkDescriptorPool pool) {
641 auto it = info_VkDescriptorPool.find(pool);
642 if (it == info_VkDescriptorPool.end()) return false;
643
644 const auto& info = it->second;
645
646 return VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT &
647 info.createFlags;
648 }
649
descriptorSetReallyAllocedFromPoolLocked(VkDescriptorSet set,VkDescriptorPool pool)650 bool descriptorSetReallyAllocedFromPoolLocked(VkDescriptorSet set, VkDescriptorPool pool) {
651 auto it = info_VkDescriptorSet.find(set);
652 if (it == info_VkDescriptorSet.end()) return false;
653
654 const auto& info = it->second;
655
656 if (pool != info.pool) return false;
657
658 auto poolIt = info_VkDescriptorPool.find(info.pool);
659 if (poolIt == info_VkDescriptorPool.end()) return false;
660
661 const auto& poolInfo = poolIt->second;
662
663 if (poolInfo.allocedSets.find(set) == poolInfo.allocedSets.end()) return false;
664
665 return true;
666 }
667
668 static constexpr uint32_t kDefaultApiVersion = VK_MAKE_VERSION(1, 1, 0);
669
setInstanceInfo(VkInstance instance,uint32_t enabledExtensionCount,const char * const * ppEnabledExtensionNames,uint32_t apiVersion)670 void setInstanceInfo(VkInstance instance,
671 uint32_t enabledExtensionCount,
672 const char* const* ppEnabledExtensionNames,
673 uint32_t apiVersion) {
674 AutoLock lock(mLock);
675 auto& info = info_VkInstance[instance];
676 info.highestApiVersion = apiVersion;
677
678 if (!ppEnabledExtensionNames) return;
679
680 for (uint32_t i = 0; i < enabledExtensionCount; ++i) {
681 info.enabledExtensions.insert(ppEnabledExtensionNames[i]);
682 }
683 }
684
setDeviceInfo(VkDevice device,VkPhysicalDevice physdev,VkPhysicalDeviceProperties props,VkPhysicalDeviceMemoryProperties memProps,uint32_t enabledExtensionCount,const char * const * ppEnabledExtensionNames)685 void setDeviceInfo(VkDevice device,
686 VkPhysicalDevice physdev,
687 VkPhysicalDeviceProperties props,
688 VkPhysicalDeviceMemoryProperties memProps,
689 uint32_t enabledExtensionCount,
690 const char* const* ppEnabledExtensionNames) {
691 AutoLock lock(mLock);
692 auto& info = info_VkDevice[device];
693 info.physdev = physdev;
694 info.props = props;
695 info.memProps = memProps;
696 initHostVisibleMemoryVirtualizationInfo(
697 physdev, &memProps,
698 mFeatureInfo.get(),
699 &mHostVisibleMemoryVirtInfo);
700 info.apiVersion = props.apiVersion;
701
702 if (!ppEnabledExtensionNames) return;
703
704 for (uint32_t i = 0; i < enabledExtensionCount; ++i) {
705 info.enabledExtensions.insert(ppEnabledExtensionNames[i]);
706 }
707 }
708
setDeviceMemoryInfo(VkDevice device,VkDeviceMemory memory,VkDeviceSize allocationSize,VkDeviceSize mappedSize,uint8_t * ptr,uint32_t memoryTypeIndex,AHardwareBuffer * ahw=nullptr,zx_handle_t vmoHandle=ZX_HANDLE_INVALID)709 void setDeviceMemoryInfo(VkDevice device,
710 VkDeviceMemory memory,
711 VkDeviceSize allocationSize,
712 VkDeviceSize mappedSize,
713 uint8_t* ptr,
714 uint32_t memoryTypeIndex,
715 AHardwareBuffer* ahw = nullptr,
716 zx_handle_t vmoHandle = ZX_HANDLE_INVALID) {
717 AutoLock lock(mLock);
718 auto& deviceInfo = info_VkDevice[device];
719 auto& info = info_VkDeviceMemory[memory];
720
721 info.allocationSize = allocationSize;
722 info.mappedSize = mappedSize;
723 info.mappedPtr = ptr;
724 info.memoryTypeIndex = memoryTypeIndex;
725 info.ahw = ahw;
726 info.vmoHandle = vmoHandle;
727 }
728
setImageInfo(VkImage image,VkDevice device,const VkImageCreateInfo * pCreateInfo)729 void setImageInfo(VkImage image,
730 VkDevice device,
731 const VkImageCreateInfo *pCreateInfo) {
732 AutoLock lock(mLock);
733 auto& info = info_VkImage[image];
734
735 info.device = device;
736 info.createInfo = *pCreateInfo;
737 }
738
isMemoryTypeHostVisible(VkDevice device,uint32_t typeIndex) const739 bool isMemoryTypeHostVisible(VkDevice device, uint32_t typeIndex) const {
740 AutoLock lock(mLock);
741 const auto it = info_VkDevice.find(device);
742
743 if (it == info_VkDevice.end()) return false;
744
745 const auto& info = it->second;
746 return info.memProps.memoryTypes[typeIndex].propertyFlags &
747 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
748 }
749
getMappedPointer(VkDeviceMemory memory)750 uint8_t* getMappedPointer(VkDeviceMemory memory) {
751 AutoLock lock(mLock);
752 const auto it = info_VkDeviceMemory.find(memory);
753 if (it == info_VkDeviceMemory.end()) return nullptr;
754
755 const auto& info = it->second;
756 return info.mappedPtr;
757 }
758
getMappedSize(VkDeviceMemory memory)759 VkDeviceSize getMappedSize(VkDeviceMemory memory) {
760 AutoLock lock(mLock);
761 const auto it = info_VkDeviceMemory.find(memory);
762 if (it == info_VkDeviceMemory.end()) return 0;
763
764 const auto& info = it->second;
765 return info.mappedSize;
766 }
767
getNonCoherentExtendedSize(VkDevice device,VkDeviceSize basicSize) const768 VkDeviceSize getNonCoherentExtendedSize(VkDevice device, VkDeviceSize basicSize) const {
769 AutoLock lock(mLock);
770 const auto it = info_VkDevice.find(device);
771 if (it == info_VkDevice.end()) return basicSize;
772 const auto& info = it->second;
773
774 VkDeviceSize nonCoherentAtomSize =
775 info.props.limits.nonCoherentAtomSize;
776 VkDeviceSize atoms =
777 (basicSize + nonCoherentAtomSize - 1) / nonCoherentAtomSize;
778 return atoms * nonCoherentAtomSize;
779 }
780
isValidMemoryRange(const VkMappedMemoryRange & range) const781 bool isValidMemoryRange(const VkMappedMemoryRange& range) const {
782 AutoLock lock(mLock);
783 const auto it = info_VkDeviceMemory.find(range.memory);
784 if (it == info_VkDeviceMemory.end()) return false;
785 const auto& info = it->second;
786
787 if (!info.mappedPtr) return false;
788
789 VkDeviceSize offset = range.offset;
790 VkDeviceSize size = range.size;
791
792 if (size == VK_WHOLE_SIZE) {
793 return offset <= info.mappedSize;
794 }
795
796 return offset + size <= info.mappedSize;
797 }
798
setupFeatures(const EmulatorFeatureInfo * features)799 void setupFeatures(const EmulatorFeatureInfo* features) {
800 if (!features || mFeatureInfo) return;
801 mFeatureInfo.reset(new EmulatorFeatureInfo);
802 *mFeatureInfo = *features;
803
804 if (mFeatureInfo->hasDirectMem) {
805 mGoldfishAddressSpaceBlockProvider.reset(
806 new GoldfishAddressSpaceBlockProvider(
807 GoldfishAddressSpaceSubdeviceType::NoSubdevice));
808 }
809
810 #ifdef VK_USE_PLATFORM_FUCHSIA
811 if (mFeatureInfo->hasVulkan) {
812 zx::channel channel(GetConnectToServiceFunction()("/dev/class/goldfish-control/000"));
813 if (!channel) {
814 ALOGE("failed to open control device");
815 abort();
816 }
817 mControlDevice = std::make_unique<
818 llcpp::fuchsia::hardware::goldfish::ControlDevice::SyncClient>(
819 std::move(channel));
820
821 zx::channel sysmem_channel(GetConnectToServiceFunction()("/svc/fuchsia.sysmem.Allocator"));
822 if (!sysmem_channel) {
823 ALOGE("failed to open sysmem connection");
824 }
825 mSysmemAllocator =
826 std::make_unique<llcpp::fuchsia::sysmem::Allocator::SyncClient>(
827 std::move(sysmem_channel));
828 }
829 #endif
830
831 if (mFeatureInfo->hasVulkanNullOptionalStrings) {
832 mStreamFeatureBits |= VULKAN_STREAM_FEATURE_NULL_OPTIONAL_STRINGS_BIT;
833 }
834 if (mFeatureInfo->hasVulkanIgnoredHandles) {
835 mStreamFeatureBits |= VULKAN_STREAM_FEATURE_IGNORED_HANDLES_BIT;
836 }
837 if (mFeatureInfo->hasVulkanShaderFloat16Int8) {
838 mStreamFeatureBits |= VULKAN_STREAM_FEATURE_SHADER_FLOAT16_INT8_BIT;
839 }
840
841 #if !defined(HOST_BUILD) && defined(VK_USE_PLATFORM_ANDROID_KHR)
842 if (mFeatureInfo->hasVirtioGpuNext) {
843 ALOGD("%s: has virtio-gpu-next; create hostmem rendernode\n", __func__);
844 mRendernodeFd = drmOpenRender(128 /* RENDERNODE_MINOR */);
845 }
846 #endif
847 }
848
setThreadingCallbacks(const ResourceTracker::ThreadingCallbacks & callbacks)849 void setThreadingCallbacks(const ResourceTracker::ThreadingCallbacks& callbacks) {
850 mThreadingCallbacks = callbacks;
851 }
852
hostSupportsVulkan() const853 bool hostSupportsVulkan() const {
854 if (!mFeatureInfo) return false;
855
856 return mFeatureInfo->hasVulkan;
857 }
858
usingDirectMapping() const859 bool usingDirectMapping() const {
860 return mHostVisibleMemoryVirtInfo.virtualizationSupported;
861 }
862
getStreamFeatures() const863 uint32_t getStreamFeatures() const {
864 return mStreamFeatureBits;
865 }
866
supportsDeferredCommands() const867 bool supportsDeferredCommands() const {
868 if (!mFeatureInfo) return false;
869 return mFeatureInfo->hasDeferredVulkanCommands;
870 }
871
supportsAsyncQueueSubmit() const872 bool supportsAsyncQueueSubmit() const {
873 if (!mFeatureInfo) return false;
874 return mFeatureInfo->hasVulkanAsyncQueueSubmit;
875 }
876
supportsCreateResourcesWithRequirements() const877 bool supportsCreateResourcesWithRequirements() const {
878 if (!mFeatureInfo) return false;
879 return mFeatureInfo->hasVulkanCreateResourcesWithRequirements;
880 }
881
getHostInstanceExtensionIndex(const std::string & extName) const882 int getHostInstanceExtensionIndex(const std::string& extName) const {
883 int i = 0;
884 for (const auto& prop : mHostInstanceExtensions) {
885 if (extName == std::string(prop.extensionName)) {
886 return i;
887 }
888 ++i;
889 }
890 return -1;
891 }
892
getHostDeviceExtensionIndex(const std::string & extName) const893 int getHostDeviceExtensionIndex(const std::string& extName) const {
894 int i = 0;
895 for (const auto& prop : mHostDeviceExtensions) {
896 if (extName == std::string(prop.extensionName)) {
897 return i;
898 }
899 ++i;
900 }
901 return -1;
902 }
903
deviceMemoryTransform_tohost(VkDeviceMemory * memory,uint32_t memoryCount,VkDeviceSize * offset,uint32_t offsetCount,VkDeviceSize * size,uint32_t sizeCount,uint32_t * typeIndex,uint32_t typeIndexCount,uint32_t * typeBits,uint32_t typeBitsCount)904 void deviceMemoryTransform_tohost(
905 VkDeviceMemory* memory, uint32_t memoryCount,
906 VkDeviceSize* offset, uint32_t offsetCount,
907 VkDeviceSize* size, uint32_t sizeCount,
908 uint32_t* typeIndex, uint32_t typeIndexCount,
909 uint32_t* typeBits, uint32_t typeBitsCount) {
910
911 (void)memoryCount;
912 (void)offsetCount;
913 (void)sizeCount;
914
915 const auto& hostVirt =
916 mHostVisibleMemoryVirtInfo;
917
918 if (!hostVirt.virtualizationSupported) return;
919
920 if (memory) {
921 AutoLock lock (mLock);
922
923 for (uint32_t i = 0; i < memoryCount; ++i) {
924 VkDeviceMemory mem = memory[i];
925
926 auto it = info_VkDeviceMemory.find(mem);
927 if (it == info_VkDeviceMemory.end()) return;
928
929 const auto& info = it->second;
930
931 if (!info.directMapped) continue;
932
933 memory[i] = info.subAlloc.baseMemory;
934
935 if (offset) {
936 offset[i] = info.subAlloc.baseOffset + offset[i];
937 }
938
939 if (size) {
940 if (size[i] == VK_WHOLE_SIZE) {
941 size[i] = info.subAlloc.subMappedSize;
942 }
943 }
944
945 // TODO
946 (void)memory;
947 (void)offset;
948 (void)size;
949 }
950 }
951
952 for (uint32_t i = 0; i < typeIndexCount; ++i) {
953 typeIndex[i] =
954 hostVirt.memoryTypeIndexMappingToHost[typeIndex[i]];
955 }
956
957 for (uint32_t i = 0; i < typeBitsCount; ++i) {
958 uint32_t bits = 0;
959 for (uint32_t j = 0; j < VK_MAX_MEMORY_TYPES; ++j) {
960 bool guestHas = typeBits[i] & (1 << j);
961 uint32_t hostIndex =
962 hostVirt.memoryTypeIndexMappingToHost[j];
963 bits |= guestHas ? (1 << hostIndex) : 0;
964 }
965 typeBits[i] = bits;
966 }
967 }
968
deviceMemoryTransform_fromhost(VkDeviceMemory * memory,uint32_t memoryCount,VkDeviceSize * offset,uint32_t offsetCount,VkDeviceSize * size,uint32_t sizeCount,uint32_t * typeIndex,uint32_t typeIndexCount,uint32_t * typeBits,uint32_t typeBitsCount)969 void deviceMemoryTransform_fromhost(
970 VkDeviceMemory* memory, uint32_t memoryCount,
971 VkDeviceSize* offset, uint32_t offsetCount,
972 VkDeviceSize* size, uint32_t sizeCount,
973 uint32_t* typeIndex, uint32_t typeIndexCount,
974 uint32_t* typeBits, uint32_t typeBitsCount) {
975
976 (void)memoryCount;
977 (void)offsetCount;
978 (void)sizeCount;
979
980 const auto& hostVirt =
981 mHostVisibleMemoryVirtInfo;
982
983 if (!hostVirt.virtualizationSupported) return;
984
985 AutoLock lock (mLock);
986
987 for (uint32_t i = 0; i < memoryCount; ++i) {
988 // TODO
989 (void)memory;
990 (void)offset;
991 (void)size;
992 }
993
994 for (uint32_t i = 0; i < typeIndexCount; ++i) {
995 typeIndex[i] =
996 hostVirt.memoryTypeIndexMappingFromHost[typeIndex[i]];
997 }
998
999 for (uint32_t i = 0; i < typeBitsCount; ++i) {
1000 uint32_t bits = 0;
1001 for (uint32_t j = 0; j < VK_MAX_MEMORY_TYPES; ++j) {
1002 bool hostHas = typeBits[i] & (1 << j);
1003 uint32_t guestIndex =
1004 hostVirt.memoryTypeIndexMappingFromHost[j];
1005 bits |= hostHas ? (1 << guestIndex) : 0;
1006
1007 if (hostVirt.memoryTypeBitsShouldAdvertiseBoth[j]) {
1008 bits |= hostHas ? (1 << j) : 0;
1009 }
1010 }
1011 typeBits[i] = bits;
1012 }
1013 }
1014
on_vkEnumerateInstanceExtensionProperties(void * context,VkResult,const char *,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)1015 VkResult on_vkEnumerateInstanceExtensionProperties(
1016 void* context,
1017 VkResult,
1018 const char*,
1019 uint32_t* pPropertyCount,
1020 VkExtensionProperties* pProperties) {
1021 std::vector<const char*> allowedExtensionNames = {
1022 "VK_KHR_get_physical_device_properties2",
1023 "VK_KHR_sampler_ycbcr_conversion",
1024 #ifdef VK_USE_PLATFORM_ANDROID_KHR
1025 "VK_KHR_external_semaphore_capabilities",
1026 "VK_KHR_external_memory_capabilities",
1027 "VK_KHR_external_fence_capabilities",
1028 #endif
1029 // TODO:
1030 // VK_KHR_external_memory_capabilities
1031 };
1032
1033 VkEncoder* enc = (VkEncoder*)context;
1034
1035 // Only advertise a select set of extensions.
1036 if (mHostInstanceExtensions.empty()) {
1037 uint32_t hostPropCount = 0;
1038 enc->vkEnumerateInstanceExtensionProperties(nullptr, &hostPropCount, nullptr);
1039 mHostInstanceExtensions.resize(hostPropCount);
1040
1041 VkResult hostRes =
1042 enc->vkEnumerateInstanceExtensionProperties(
1043 nullptr, &hostPropCount, mHostInstanceExtensions.data());
1044
1045 if (hostRes != VK_SUCCESS) {
1046 return hostRes;
1047 }
1048 }
1049
1050 std::vector<VkExtensionProperties> filteredExts;
1051
1052 for (size_t i = 0; i < allowedExtensionNames.size(); ++i) {
1053 auto extIndex = getHostInstanceExtensionIndex(allowedExtensionNames[i]);
1054 if (extIndex != -1) {
1055 filteredExts.push_back(mHostInstanceExtensions[extIndex]);
1056 }
1057 }
1058
1059 VkExtensionProperties anbExtProps[] = {
1060 #ifdef VK_USE_PLATFORM_FUCHSIA
1061 { "VK_KHR_external_memory_capabilities", 1},
1062 { "VK_KHR_external_semaphore_capabilities", 1},
1063 #endif
1064 };
1065
1066 for (auto& anbExtProp: anbExtProps) {
1067 filteredExts.push_back(anbExtProp);
1068 }
1069
1070 // Spec:
1071 //
1072 // https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkEnumerateInstanceExtensionProperties.html
1073 //
1074 // If pProperties is NULL, then the number of extensions properties
1075 // available is returned in pPropertyCount. Otherwise, pPropertyCount
1076 // must point to a variable set by the user to the number of elements
1077 // in the pProperties array, and on return the variable is overwritten
1078 // with the number of structures actually written to pProperties. If
1079 // pPropertyCount is less than the number of extension properties
1080 // available, at most pPropertyCount structures will be written. If
1081 // pPropertyCount is smaller than the number of extensions available,
1082 // VK_INCOMPLETE will be returned instead of VK_SUCCESS, to indicate
1083 // that not all the available properties were returned.
1084 //
1085 // pPropertyCount must be a valid pointer to a uint32_t value
1086 if (!pPropertyCount) return VK_ERROR_INITIALIZATION_FAILED;
1087
1088 if (!pProperties) {
1089 *pPropertyCount = (uint32_t)filteredExts.size();
1090 return VK_SUCCESS;
1091 } else {
1092 auto actualExtensionCount = (uint32_t)filteredExts.size();
1093 if (*pPropertyCount > actualExtensionCount) {
1094 *pPropertyCount = actualExtensionCount;
1095 }
1096
1097 for (uint32_t i = 0; i < *pPropertyCount; ++i) {
1098 pProperties[i] = filteredExts[i];
1099 }
1100
1101 if (actualExtensionCount > *pPropertyCount) {
1102 return VK_INCOMPLETE;
1103 }
1104
1105 return VK_SUCCESS;
1106 }
1107 }
1108
on_vkEnumerateDeviceExtensionProperties(void * context,VkResult,VkPhysicalDevice physdev,const char *,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)1109 VkResult on_vkEnumerateDeviceExtensionProperties(
1110 void* context,
1111 VkResult,
1112 VkPhysicalDevice physdev,
1113 const char*,
1114 uint32_t* pPropertyCount,
1115 VkExtensionProperties* pProperties) {
1116
1117 std::vector<const char*> allowedExtensionNames = {
1118 "VK_KHR_maintenance1",
1119 "VK_KHR_maintenance2",
1120 "VK_KHR_maintenance3",
1121 "VK_KHR_get_memory_requirements2",
1122 "VK_KHR_dedicated_allocation",
1123 "VK_KHR_bind_memory2",
1124 "VK_KHR_sampler_ycbcr_conversion",
1125 "VK_KHR_shader_float16_int8",
1126 "VK_AMD_gpu_shader_half_float",
1127 "VK_NV_shader_subgroup_partitioned",
1128 #ifdef VK_USE_PLATFORM_ANDROID_KHR
1129 "VK_KHR_external_semaphore",
1130 "VK_KHR_external_semaphore_fd",
1131 // "VK_KHR_external_semaphore_win32", not exposed because it's translated to fd
1132 "VK_KHR_external_memory",
1133 "VK_KHR_external_fence",
1134 "VK_KHR_external_fence_fd",
1135 #endif
1136 // TODO:
1137 // VK_KHR_external_memory_capabilities
1138 };
1139
1140 VkEncoder* enc = (VkEncoder*)context;
1141
1142 if (mHostDeviceExtensions.empty()) {
1143 uint32_t hostPropCount = 0;
1144 enc->vkEnumerateDeviceExtensionProperties(physdev, nullptr, &hostPropCount, nullptr);
1145 mHostDeviceExtensions.resize(hostPropCount);
1146
1147 VkResult hostRes =
1148 enc->vkEnumerateDeviceExtensionProperties(
1149 physdev, nullptr, &hostPropCount, mHostDeviceExtensions.data());
1150
1151 if (hostRes != VK_SUCCESS) {
1152 return hostRes;
1153 }
1154 }
1155
1156 bool hostHasWin32ExternalSemaphore =
1157 getHostDeviceExtensionIndex(
1158 "VK_KHR_external_semaphore_win32") != -1;
1159
1160 bool hostHasPosixExternalSemaphore =
1161 getHostDeviceExtensionIndex(
1162 "VK_KHR_external_semaphore_fd") != -1;
1163
1164 ALOGD("%s: host has ext semaphore? win32 %d posix %d\n", __func__,
1165 hostHasWin32ExternalSemaphore,
1166 hostHasPosixExternalSemaphore);
1167
1168 bool hostSupportsExternalSemaphore =
1169 hostHasWin32ExternalSemaphore ||
1170 hostHasPosixExternalSemaphore;
1171
1172 std::vector<VkExtensionProperties> filteredExts;
1173
1174 for (size_t i = 0; i < allowedExtensionNames.size(); ++i) {
1175 auto extIndex = getHostDeviceExtensionIndex(allowedExtensionNames[i]);
1176 if (extIndex != -1) {
1177 filteredExts.push_back(mHostDeviceExtensions[extIndex]);
1178 }
1179 }
1180
1181 VkExtensionProperties anbExtProps[] = {
1182 #ifdef VK_USE_PLATFORM_ANDROID_KHR
1183 { "VK_ANDROID_native_buffer", 7 },
1184 #endif
1185 #ifdef VK_USE_PLATFORM_FUCHSIA
1186 { "VK_KHR_external_memory", 1 },
1187 { "VK_KHR_external_semaphore", 1 },
1188 { "VK_FUCHSIA_external_semaphore", 1 },
1189 #endif
1190 };
1191
1192 for (auto& anbExtProp: anbExtProps) {
1193 filteredExts.push_back(anbExtProp);
1194 }
1195
1196 #ifndef VK_USE_PLATFORM_FUCHSIA
1197 if (hostSupportsExternalSemaphore &&
1198 !hostHasPosixExternalSemaphore) {
1199 filteredExts.push_back(
1200 { "VK_KHR_external_semaphore_fd", 1});
1201 }
1202 #endif
1203
1204 bool win32ExtMemAvailable =
1205 getHostDeviceExtensionIndex(
1206 "VK_KHR_external_memory_win32") != -1;
1207 bool posixExtMemAvailable =
1208 getHostDeviceExtensionIndex(
1209 "VK_KHR_external_memory_fd") != -1;
1210 bool moltenVkExtAvailable =
1211 getHostInstanceExtensionIndex(
1212 "VK_MVK_moltenvk") != -1;
1213
1214 bool hostHasExternalMemorySupport =
1215 win32ExtMemAvailable || posixExtMemAvailable || moltenVkExtAvailable;
1216
1217 if (hostHasExternalMemorySupport) {
1218 #ifdef VK_USE_PLATFORM_ANDROID_KHR
1219 filteredExts.push_back({
1220 "VK_ANDROID_external_memory_android_hardware_buffer", 7
1221 });
1222 filteredExts.push_back({
1223 "VK_EXT_queue_family_foreign", 1
1224 });
1225 #endif
1226 #ifdef VK_USE_PLATFORM_FUCHSIA
1227 filteredExts.push_back({
1228 "VK_FUCHSIA_external_memory", 1
1229 });
1230 filteredExts.push_back({
1231 "VK_FUCHSIA_buffer_collection", 1
1232 });
1233 #endif
1234 }
1235
1236 // Spec:
1237 //
1238 // https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkEnumerateDeviceExtensionProperties.html
1239 //
1240 // pPropertyCount is a pointer to an integer related to the number of
1241 // extension properties available or queried, and is treated in the
1242 // same fashion as the
1243 // vkEnumerateInstanceExtensionProperties::pPropertyCount parameter.
1244 //
1245 // https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkEnumerateInstanceExtensionProperties.html
1246 //
1247 // If pProperties is NULL, then the number of extensions properties
1248 // available is returned in pPropertyCount. Otherwise, pPropertyCount
1249 // must point to a variable set by the user to the number of elements
1250 // in the pProperties array, and on return the variable is overwritten
1251 // with the number of structures actually written to pProperties. If
1252 // pPropertyCount is less than the number of extension properties
1253 // available, at most pPropertyCount structures will be written. If
1254 // pPropertyCount is smaller than the number of extensions available,
1255 // VK_INCOMPLETE will be returned instead of VK_SUCCESS, to indicate
1256 // that not all the available properties were returned.
1257 //
1258 // pPropertyCount must be a valid pointer to a uint32_t value
1259
1260 if (!pPropertyCount) return VK_ERROR_INITIALIZATION_FAILED;
1261
1262 if (!pProperties) {
1263 *pPropertyCount = (uint32_t)filteredExts.size();
1264 return VK_SUCCESS;
1265 } else {
1266 auto actualExtensionCount = (uint32_t)filteredExts.size();
1267 if (*pPropertyCount > actualExtensionCount) {
1268 *pPropertyCount = actualExtensionCount;
1269 }
1270
1271 for (uint32_t i = 0; i < *pPropertyCount; ++i) {
1272 pProperties[i] = filteredExts[i];
1273 }
1274
1275 if (actualExtensionCount > *pPropertyCount) {
1276 return VK_INCOMPLETE;
1277 }
1278
1279 return VK_SUCCESS;
1280 }
1281 }
1282
on_vkEnumeratePhysicalDevices(void * context,VkResult,VkInstance instance,uint32_t * pPhysicalDeviceCount,VkPhysicalDevice * pPhysicalDevices)1283 VkResult on_vkEnumeratePhysicalDevices(
1284 void* context, VkResult,
1285 VkInstance instance, uint32_t* pPhysicalDeviceCount,
1286 VkPhysicalDevice* pPhysicalDevices) {
1287
1288 VkEncoder* enc = (VkEncoder*)context;
1289
1290 if (!instance) return VK_ERROR_INITIALIZATION_FAILED;
1291
1292 if (!pPhysicalDeviceCount) return VK_ERROR_INITIALIZATION_FAILED;
1293
1294 AutoLock lock(mLock);
1295
1296 // When this function is called, we actually need to do two things:
1297 // - Get full information about physical devices from the host,
1298 // even if the guest did not ask for it
1299 // - Serve the guest query according to the spec:
1300 //
1301 // https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkEnumeratePhysicalDevices.html
1302
1303 auto it = info_VkInstance.find(instance);
1304
1305 if (it == info_VkInstance.end()) return VK_ERROR_INITIALIZATION_FAILED;
1306
1307 auto& info = it->second;
1308
1309 // Get the full host information here if it doesn't exist already.
1310 if (info.physicalDevices.empty()) {
1311 uint32_t hostPhysicalDeviceCount = 0;
1312
1313 lock.unlock();
1314 VkResult countRes = enc->vkEnumeratePhysicalDevices(
1315 instance, &hostPhysicalDeviceCount, nullptr);
1316 lock.lock();
1317
1318 if (countRes != VK_SUCCESS) {
1319 ALOGE("%s: failed: could not count host physical devices. "
1320 "Error %d\n", __func__, countRes);
1321 return countRes;
1322 }
1323
1324 info.physicalDevices.resize(hostPhysicalDeviceCount);
1325
1326 lock.unlock();
1327 VkResult enumRes = enc->vkEnumeratePhysicalDevices(
1328 instance, &hostPhysicalDeviceCount, info.physicalDevices.data());
1329 lock.lock();
1330
1331 if (enumRes != VK_SUCCESS) {
1332 ALOGE("%s: failed: could not retrieve host physical devices. "
1333 "Error %d\n", __func__, enumRes);
1334 return enumRes;
1335 }
1336 }
1337
1338 // Serve the guest query according to the spec.
1339 //
1340 // https://www.khronos.org/registry/vulkan/specs/1.1-extensions/man/html/vkEnumeratePhysicalDevices.html
1341 //
1342 // If pPhysicalDevices is NULL, then the number of physical devices
1343 // available is returned in pPhysicalDeviceCount. Otherwise,
1344 // pPhysicalDeviceCount must point to a variable set by the user to the
1345 // number of elements in the pPhysicalDevices array, and on return the
1346 // variable is overwritten with the number of handles actually written
1347 // to pPhysicalDevices. If pPhysicalDeviceCount is less than the number
1348 // of physical devices available, at most pPhysicalDeviceCount
1349 // structures will be written. If pPhysicalDeviceCount is smaller than
1350 // the number of physical devices available, VK_INCOMPLETE will be
1351 // returned instead of VK_SUCCESS, to indicate that not all the
1352 // available physical devices were returned.
1353
1354 if (!pPhysicalDevices) {
1355 *pPhysicalDeviceCount = (uint32_t)info.physicalDevices.size();
1356 return VK_SUCCESS;
1357 } else {
1358 uint32_t actualDeviceCount = (uint32_t)info.physicalDevices.size();
1359 uint32_t toWrite = actualDeviceCount < *pPhysicalDeviceCount ? actualDeviceCount : *pPhysicalDeviceCount;
1360
1361 for (uint32_t i = 0; i < toWrite; ++i) {
1362 pPhysicalDevices[i] = info.physicalDevices[i];
1363 }
1364
1365 *pPhysicalDeviceCount = toWrite;
1366
1367 if (actualDeviceCount > *pPhysicalDeviceCount) {
1368 return VK_INCOMPLETE;
1369 }
1370
1371 return VK_SUCCESS;
1372 }
1373 }
1374
on_vkGetPhysicalDeviceProperties(void *,VkPhysicalDevice,VkPhysicalDeviceProperties * pProperties)1375 void on_vkGetPhysicalDeviceProperties(
1376 void*,
1377 VkPhysicalDevice,
1378 VkPhysicalDeviceProperties* pProperties) {
1379 if (pProperties) {
1380 pProperties->deviceType = VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU;
1381 }
1382 }
1383
on_vkGetPhysicalDeviceProperties2(void *,VkPhysicalDevice,VkPhysicalDeviceProperties2 * pProperties)1384 void on_vkGetPhysicalDeviceProperties2(
1385 void*,
1386 VkPhysicalDevice,
1387 VkPhysicalDeviceProperties2* pProperties) {
1388 if (pProperties) {
1389 pProperties->properties.deviceType =
1390 VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU;
1391 }
1392 }
1393
on_vkGetPhysicalDeviceMemoryProperties(void *,VkPhysicalDevice physdev,VkPhysicalDeviceMemoryProperties * out)1394 void on_vkGetPhysicalDeviceMemoryProperties(
1395 void*,
1396 VkPhysicalDevice physdev,
1397 VkPhysicalDeviceMemoryProperties* out) {
1398
1399 initHostVisibleMemoryVirtualizationInfo(
1400 physdev,
1401 out,
1402 mFeatureInfo.get(),
1403 &mHostVisibleMemoryVirtInfo);
1404
1405 if (mHostVisibleMemoryVirtInfo.virtualizationSupported) {
1406 *out = mHostVisibleMemoryVirtInfo.guestMemoryProperties;
1407 }
1408 }
1409
on_vkGetPhysicalDeviceMemoryProperties2(void *,VkPhysicalDevice physdev,VkPhysicalDeviceMemoryProperties2 * out)1410 void on_vkGetPhysicalDeviceMemoryProperties2(
1411 void*,
1412 VkPhysicalDevice physdev,
1413 VkPhysicalDeviceMemoryProperties2* out) {
1414
1415 initHostVisibleMemoryVirtualizationInfo(
1416 physdev,
1417 &out->memoryProperties,
1418 mFeatureInfo.get(),
1419 &mHostVisibleMemoryVirtInfo);
1420
1421 if (mHostVisibleMemoryVirtInfo.virtualizationSupported) {
1422 out->memoryProperties = mHostVisibleMemoryVirtInfo.guestMemoryProperties;
1423 }
1424 }
1425
on_vkCreateInstance(void * context,VkResult input_result,const VkInstanceCreateInfo * createInfo,const VkAllocationCallbacks *,VkInstance * pInstance)1426 VkResult on_vkCreateInstance(
1427 void* context,
1428 VkResult input_result,
1429 const VkInstanceCreateInfo* createInfo,
1430 const VkAllocationCallbacks*,
1431 VkInstance* pInstance) {
1432
1433 if (input_result != VK_SUCCESS) return input_result;
1434
1435 VkEncoder* enc = (VkEncoder*)context;
1436
1437 uint32_t apiVersion;
1438 VkResult enumInstanceVersionRes =
1439 enc->vkEnumerateInstanceVersion(&apiVersion);
1440
1441 setInstanceInfo(
1442 *pInstance,
1443 createInfo->enabledExtensionCount,
1444 createInfo->ppEnabledExtensionNames,
1445 apiVersion);
1446
1447 return input_result;
1448 }
1449
on_vkCreateDevice(void * context,VkResult input_result,VkPhysicalDevice physicalDevice,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks *,VkDevice * pDevice)1450 VkResult on_vkCreateDevice(
1451 void* context,
1452 VkResult input_result,
1453 VkPhysicalDevice physicalDevice,
1454 const VkDeviceCreateInfo* pCreateInfo,
1455 const VkAllocationCallbacks*,
1456 VkDevice* pDevice) {
1457
1458 if (input_result != VK_SUCCESS) return input_result;
1459
1460 VkEncoder* enc = (VkEncoder*)context;
1461
1462 VkPhysicalDeviceProperties props;
1463 VkPhysicalDeviceMemoryProperties memProps;
1464 enc->vkGetPhysicalDeviceProperties(physicalDevice, &props);
1465 enc->vkGetPhysicalDeviceMemoryProperties(physicalDevice, &memProps);
1466
1467 setDeviceInfo(
1468 *pDevice, physicalDevice, props, memProps,
1469 pCreateInfo->enabledExtensionCount, pCreateInfo->ppEnabledExtensionNames);
1470
1471 return input_result;
1472 }
1473
on_vkDestroyDevice_pre(void * context,VkDevice device,const VkAllocationCallbacks *)1474 void on_vkDestroyDevice_pre(
1475 void* context,
1476 VkDevice device,
1477 const VkAllocationCallbacks*) {
1478
1479 AutoLock lock(mLock);
1480
1481 auto it = info_VkDevice.find(device);
1482 if (it == info_VkDevice.end()) return;
1483 auto info = it->second;
1484
1485 lock.unlock();
1486
1487 VkEncoder* enc = (VkEncoder*)context;
1488
1489 bool freeMemorySyncSupported =
1490 mFeatureInfo->hasVulkanFreeMemorySync;
1491 for (uint32_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i) {
1492 for (auto& block : info.hostMemBlocks[i]) {
1493 destroyHostMemAlloc(
1494 freeMemorySyncSupported,
1495 enc, device, &block);
1496 }
1497 }
1498 }
1499
on_vkGetAndroidHardwareBufferPropertiesANDROID(void *,VkResult,VkDevice device,const AHardwareBuffer * buffer,VkAndroidHardwareBufferPropertiesANDROID * pProperties)1500 VkResult on_vkGetAndroidHardwareBufferPropertiesANDROID(
1501 void*, VkResult,
1502 VkDevice device,
1503 const AHardwareBuffer* buffer,
1504 VkAndroidHardwareBufferPropertiesANDROID* pProperties) {
1505 auto grallocHelper =
1506 mThreadingCallbacks.hostConnectionGetFunc()->grallocHelper();
1507 return getAndroidHardwareBufferPropertiesANDROID(
1508 grallocHelper,
1509 &mHostVisibleMemoryVirtInfo,
1510 device, buffer, pProperties);
1511 }
1512
on_vkGetMemoryAndroidHardwareBufferANDROID(void *,VkResult,VkDevice device,const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo,struct AHardwareBuffer ** pBuffer)1513 VkResult on_vkGetMemoryAndroidHardwareBufferANDROID(
1514 void*, VkResult,
1515 VkDevice device,
1516 const VkMemoryGetAndroidHardwareBufferInfoANDROID *pInfo,
1517 struct AHardwareBuffer** pBuffer) {
1518
1519 if (!pInfo) return VK_ERROR_INITIALIZATION_FAILED;
1520 if (!pInfo->memory) return VK_ERROR_INITIALIZATION_FAILED;
1521
1522 AutoLock lock(mLock);
1523
1524 auto deviceIt = info_VkDevice.find(device);
1525
1526 if (deviceIt == info_VkDevice.end()) {
1527 return VK_ERROR_INITIALIZATION_FAILED;
1528 }
1529
1530 auto memoryIt = info_VkDeviceMemory.find(pInfo->memory);
1531
1532 if (memoryIt == info_VkDeviceMemory.end()) {
1533 return VK_ERROR_INITIALIZATION_FAILED;
1534 }
1535
1536 auto& info = memoryIt->second;
1537
1538 VkResult queryRes =
1539 getMemoryAndroidHardwareBufferANDROID(&info.ahw);
1540
1541 if (queryRes != VK_SUCCESS) return queryRes;
1542
1543 *pBuffer = info.ahw;
1544
1545 return queryRes;
1546 }
1547
1548 #ifdef VK_USE_PLATFORM_FUCHSIA
on_vkGetMemoryZirconHandleFUCHSIA(void *,VkResult,VkDevice device,const VkMemoryGetZirconHandleInfoFUCHSIA * pInfo,uint32_t * pHandle)1549 VkResult on_vkGetMemoryZirconHandleFUCHSIA(
1550 void*, VkResult,
1551 VkDevice device,
1552 const VkMemoryGetZirconHandleInfoFUCHSIA* pInfo,
1553 uint32_t* pHandle) {
1554
1555 if (!pInfo) return VK_ERROR_INITIALIZATION_FAILED;
1556 if (!pInfo->memory) return VK_ERROR_INITIALIZATION_FAILED;
1557
1558 AutoLock lock(mLock);
1559
1560 auto deviceIt = info_VkDevice.find(device);
1561
1562 if (deviceIt == info_VkDevice.end()) {
1563 return VK_ERROR_INITIALIZATION_FAILED;
1564 }
1565
1566 auto memoryIt = info_VkDeviceMemory.find(pInfo->memory);
1567
1568 if (memoryIt == info_VkDeviceMemory.end()) {
1569 return VK_ERROR_INITIALIZATION_FAILED;
1570 }
1571
1572 auto& info = memoryIt->second;
1573
1574 if (info.vmoHandle == ZX_HANDLE_INVALID) {
1575 ALOGE("%s: memory cannot be exported", __func__);
1576 return VK_ERROR_INITIALIZATION_FAILED;
1577 }
1578
1579 *pHandle = ZX_HANDLE_INVALID;
1580 zx_handle_duplicate(info.vmoHandle, ZX_RIGHT_SAME_RIGHTS, pHandle);
1581 return VK_SUCCESS;
1582 }
1583
on_vkGetMemoryZirconHandlePropertiesFUCHSIA(void *,VkResult,VkDevice device,VkExternalMemoryHandleTypeFlagBits handleType,uint32_t handle,VkMemoryZirconHandlePropertiesFUCHSIA * pProperties)1584 VkResult on_vkGetMemoryZirconHandlePropertiesFUCHSIA(
1585 void*, VkResult,
1586 VkDevice device,
1587 VkExternalMemoryHandleTypeFlagBits handleType,
1588 uint32_t handle,
1589 VkMemoryZirconHandlePropertiesFUCHSIA* pProperties) {
1590 if (handleType != VK_EXTERNAL_MEMORY_HANDLE_TYPE_TEMP_ZIRCON_VMO_BIT_FUCHSIA) {
1591 return VK_ERROR_INITIALIZATION_FAILED;
1592 }
1593
1594 AutoLock lock(mLock);
1595
1596 auto deviceIt = info_VkDevice.find(device);
1597
1598 if (deviceIt == info_VkDevice.end()) {
1599 return VK_ERROR_INITIALIZATION_FAILED;
1600 }
1601
1602 auto& info = deviceIt->second;
1603
1604 // Device local memory type supported.
1605 pProperties->memoryTypeBits = 0;
1606 for (uint32_t i = 0; i < info.memProps.memoryTypeCount; ++i) {
1607 if (info.memProps.memoryTypes[i].propertyFlags &
1608 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) {
1609 pProperties->memoryTypeBits |= 1ull << i;
1610 }
1611 }
1612 return VK_SUCCESS;
1613 }
1614
on_vkImportSemaphoreZirconHandleFUCHSIA(void *,VkResult,VkDevice device,const VkImportSemaphoreZirconHandleInfoFUCHSIA * pInfo)1615 VkResult on_vkImportSemaphoreZirconHandleFUCHSIA(
1616 void*, VkResult,
1617 VkDevice device,
1618 const VkImportSemaphoreZirconHandleInfoFUCHSIA* pInfo) {
1619
1620 if (!pInfo) return VK_ERROR_INITIALIZATION_FAILED;
1621 if (!pInfo->semaphore) return VK_ERROR_INITIALIZATION_FAILED;
1622
1623 AutoLock lock(mLock);
1624
1625 auto deviceIt = info_VkDevice.find(device);
1626
1627 if (deviceIt == info_VkDevice.end()) {
1628 return VK_ERROR_INITIALIZATION_FAILED;
1629 }
1630
1631 auto semaphoreIt = info_VkSemaphore.find(pInfo->semaphore);
1632
1633 if (semaphoreIt == info_VkSemaphore.end()) {
1634 return VK_ERROR_INITIALIZATION_FAILED;
1635 }
1636
1637 auto& info = semaphoreIt->second;
1638
1639 if (info.eventHandle != ZX_HANDLE_INVALID) {
1640 zx_handle_close(info.eventHandle);
1641 }
1642 info.eventHandle = pInfo->handle;
1643
1644 return VK_SUCCESS;
1645 }
1646
on_vkGetSemaphoreZirconHandleFUCHSIA(void *,VkResult,VkDevice device,const VkSemaphoreGetZirconHandleInfoFUCHSIA * pInfo,uint32_t * pHandle)1647 VkResult on_vkGetSemaphoreZirconHandleFUCHSIA(
1648 void*, VkResult,
1649 VkDevice device,
1650 const VkSemaphoreGetZirconHandleInfoFUCHSIA* pInfo,
1651 uint32_t* pHandle) {
1652
1653 if (!pInfo) return VK_ERROR_INITIALIZATION_FAILED;
1654 if (!pInfo->semaphore) return VK_ERROR_INITIALIZATION_FAILED;
1655
1656 AutoLock lock(mLock);
1657
1658 auto deviceIt = info_VkDevice.find(device);
1659
1660 if (deviceIt == info_VkDevice.end()) {
1661 return VK_ERROR_INITIALIZATION_FAILED;
1662 }
1663
1664 auto semaphoreIt = info_VkSemaphore.find(pInfo->semaphore);
1665
1666 if (semaphoreIt == info_VkSemaphore.end()) {
1667 return VK_ERROR_INITIALIZATION_FAILED;
1668 }
1669
1670 auto& info = semaphoreIt->second;
1671
1672 if (info.eventHandle == ZX_HANDLE_INVALID) {
1673 return VK_ERROR_INITIALIZATION_FAILED;
1674 }
1675
1676 *pHandle = ZX_HANDLE_INVALID;
1677 zx_handle_duplicate(info.eventHandle, ZX_RIGHT_SAME_RIGHTS, pHandle);
1678 return VK_SUCCESS;
1679 }
1680
on_vkCreateBufferCollectionFUCHSIA(void *,VkResult,VkDevice,const VkBufferCollectionCreateInfoFUCHSIA * pInfo,const VkAllocationCallbacks *,VkBufferCollectionFUCHSIA * pCollection)1681 VkResult on_vkCreateBufferCollectionFUCHSIA(
1682 void*, VkResult, VkDevice,
1683 const VkBufferCollectionCreateInfoFUCHSIA* pInfo,
1684 const VkAllocationCallbacks*,
1685 VkBufferCollectionFUCHSIA* pCollection) {
1686 zx::channel token_client;
1687
1688 if (pInfo->collectionToken) {
1689 token_client = zx::channel(pInfo->collectionToken);
1690 } else {
1691 zx::channel token_server;
1692 zx_status_t status =
1693 zx::channel::create(0, &token_server, &token_client);
1694 if (status != ZX_OK) {
1695 ALOGE("zx_channel_create failed: %d", status);
1696 return VK_ERROR_INITIALIZATION_FAILED;
1697 }
1698
1699 auto result = mSysmemAllocator->AllocateSharedCollection(
1700 std::move(token_server));
1701 if (!result.ok()) {
1702 ALOGE("AllocateSharedCollection failed: %d", result.status());
1703 return VK_ERROR_INITIALIZATION_FAILED;
1704 }
1705 }
1706
1707 zx::channel collection_client, collection_server;
1708 zx_status_t status =
1709 zx::channel::create(0, &collection_client, &collection_server);
1710 if (status != ZX_OK) {
1711 ALOGE("zx_channel_create failed: %d", status);
1712 return VK_ERROR_INITIALIZATION_FAILED;
1713 }
1714
1715 auto result = mSysmemAllocator->BindSharedCollection(
1716 std::move(token_client), std::move(collection_server));
1717 if (!result.ok()) {
1718 ALOGE("BindSharedCollection failed: %d", result.status());
1719 return VK_ERROR_INITIALIZATION_FAILED;
1720 }
1721
1722 auto sysmem_collection =
1723 new llcpp::fuchsia::sysmem::BufferCollection::SyncClient(
1724 std::move(collection_client));
1725 *pCollection = reinterpret_cast<VkBufferCollectionFUCHSIA>(sysmem_collection);
1726
1727 return VK_SUCCESS;
1728 }
1729
on_vkDestroyBufferCollectionFUCHSIA(void *,VkResult,VkDevice,VkBufferCollectionFUCHSIA collection,const VkAllocationCallbacks *)1730 void on_vkDestroyBufferCollectionFUCHSIA(
1731 void*, VkResult, VkDevice,
1732 VkBufferCollectionFUCHSIA collection,
1733 const VkAllocationCallbacks*) {
1734 auto sysmem_collection = reinterpret_cast<
1735 llcpp::fuchsia::sysmem::BufferCollection::SyncClient*>(collection);
1736 if (sysmem_collection) {
1737 sysmem_collection->Close();
1738 }
1739 delete sysmem_collection;
1740 }
1741
1742 inline llcpp::fuchsia::sysmem::BufferCollectionConstraints
defaultBufferCollectionConstraints(size_t min_size_bytes,size_t buffer_count)1743 defaultBufferCollectionConstraints(size_t min_size_bytes,
1744 size_t buffer_count) {
1745 llcpp::fuchsia::sysmem::BufferCollectionConstraints constraints = {};
1746 constraints.min_buffer_count = buffer_count;
1747 constraints.has_buffer_memory_constraints = true;
1748 llcpp::fuchsia::sysmem::BufferMemoryConstraints& buffer_constraints =
1749 constraints.buffer_memory_constraints;
1750
1751 buffer_constraints.min_size_bytes = min_size_bytes;
1752 buffer_constraints.max_size_bytes = 0xffffffff;
1753 buffer_constraints.physically_contiguous_required = false;
1754 buffer_constraints.secure_required = false;
1755 buffer_constraints.ram_domain_supported = false;
1756 buffer_constraints.cpu_domain_supported = false;
1757 buffer_constraints.inaccessible_domain_supported = true;
1758 buffer_constraints.heap_permitted_count = 1;
1759 buffer_constraints.heap_permitted[0] =
1760 llcpp::fuchsia::sysmem::HeapType::GOLDFISH_DEVICE_LOCAL;
1761
1762 return constraints;
1763 }
1764
getBufferCollectionConstraintsVulkanImageUsage(const VkImageCreateInfo * pImageInfo)1765 uint32_t getBufferCollectionConstraintsVulkanImageUsage(
1766 const VkImageCreateInfo* pImageInfo) {
1767 uint32_t usage = 0u;
1768 VkImageUsageFlags imageUsage = pImageInfo->usage;
1769
1770 #define SetUsageBit(USAGE) \
1771 if (imageUsage & VK_IMAGE_USAGE_##USAGE##_BIT) { \
1772 usage |= llcpp::fuchsia::sysmem::VULKAN_IMAGE_USAGE_##USAGE; \
1773 }
1774
1775 SetUsageBit(COLOR_ATTACHMENT);
1776 SetUsageBit(TRANSFER_SRC);
1777 SetUsageBit(TRANSFER_DST);
1778 SetUsageBit(SAMPLED);
1779
1780 #undef SetUsageBit
1781 return usage;
1782 }
1783
getBufferCollectionConstraintsVulkanBufferUsage(const VkBufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo)1784 uint32_t getBufferCollectionConstraintsVulkanBufferUsage(
1785 const VkBufferConstraintsInfoFUCHSIA* pBufferConstraintsInfo) {
1786 uint32_t usage = 0u;
1787 VkBufferUsageFlags bufferUsage =
1788 pBufferConstraintsInfo->pBufferCreateInfo->usage;
1789
1790 #define SetUsageBit(USAGE) \
1791 if (bufferUsage & VK_BUFFER_USAGE_##USAGE##_BIT) { \
1792 usage |= llcpp::fuchsia::sysmem::VULKAN_BUFFER_USAGE_##USAGE; \
1793 }
1794
1795 SetUsageBit(TRANSFER_SRC);
1796 SetUsageBit(TRANSFER_DST);
1797 SetUsageBit(UNIFORM_TEXEL_BUFFER);
1798 SetUsageBit(STORAGE_TEXEL_BUFFER);
1799 SetUsageBit(UNIFORM_BUFFER);
1800 SetUsageBit(STORAGE_BUFFER);
1801 SetUsageBit(INDEX_BUFFER);
1802 SetUsageBit(VERTEX_BUFFER);
1803 SetUsageBit(INDIRECT_BUFFER);
1804
1805 #undef SetUsageBit
1806 return usage;
1807 }
1808
setBufferCollectionConstraints(llcpp::fuchsia::sysmem::BufferCollection::SyncClient * collection,const VkImageCreateInfo * pImageInfo)1809 VkResult setBufferCollectionConstraints(
1810 llcpp::fuchsia::sysmem::BufferCollection::SyncClient* collection,
1811 const VkImageCreateInfo* pImageInfo) {
1812 if (pImageInfo == nullptr) {
1813 ALOGE("setBufferCollectionConstraints: pImageInfo cannot be null.");
1814 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
1815 }
1816
1817 // TODO(liyl): Currently the size only works for RGBA8 and BGRA8 images.
1818 // We should set the size based on its actual format.
1819 llcpp::fuchsia::sysmem::BufferCollectionConstraints constraints =
1820 defaultBufferCollectionConstraints(
1821 /* min_size_bytes */ pImageInfo->extent.width *
1822 pImageInfo->extent.height * 4,
1823 /* buffer_count */ 1u);
1824
1825 constraints.usage.vulkan =
1826 getBufferCollectionConstraintsVulkanImageUsage(pImageInfo);
1827
1828 // Set image format constraints for VkImage allocation.
1829 if (pImageInfo) {
1830 std::vector<VkFormat> formats{pImageInfo->format};
1831 if (pImageInfo->format == VK_FORMAT_UNDEFINED) {
1832 // This is a hack to allow the client to say it supports every
1833 // vulkan format the driver does. TODO(fxb/13247): Modify this
1834 // function to take a list of vulkan formats to use.
1835 formats = std::vector<VkFormat>{
1836 VK_FORMAT_B8G8R8A8_UNORM,
1837 VK_FORMAT_R8G8B8A8_UNORM,
1838 };
1839 }
1840 constraints.image_format_constraints_count = formats.size();
1841 uint32_t format_index = 0;
1842 for (VkFormat format : formats) {
1843 llcpp::fuchsia::sysmem::ImageFormatConstraints&
1844 image_constraints =
1845 constraints.image_format_constraints[format_index++];
1846 switch (format) {
1847 case VK_FORMAT_B8G8R8A8_SINT:
1848 case VK_FORMAT_B8G8R8A8_UNORM:
1849 case VK_FORMAT_B8G8R8A8_SRGB:
1850 case VK_FORMAT_B8G8R8A8_SNORM:
1851 case VK_FORMAT_B8G8R8A8_SSCALED:
1852 case VK_FORMAT_B8G8R8A8_USCALED:
1853 image_constraints.pixel_format.type =
1854 llcpp::fuchsia::sysmem::PixelFormatType::BGRA32;
1855 break;
1856 case VK_FORMAT_R8G8B8A8_SINT:
1857 case VK_FORMAT_R8G8B8A8_UNORM:
1858 case VK_FORMAT_R8G8B8A8_SRGB:
1859 case VK_FORMAT_R8G8B8A8_SNORM:
1860 case VK_FORMAT_R8G8B8A8_SSCALED:
1861 case VK_FORMAT_R8G8B8A8_USCALED:
1862 image_constraints.pixel_format.type =
1863 llcpp::fuchsia::sysmem::PixelFormatType::R8G8B8A8;
1864 break;
1865 default:
1866 return VK_ERROR_FORMAT_NOT_SUPPORTED;
1867 }
1868 image_constraints.color_spaces_count = 1;
1869 image_constraints.color_space[0].type =
1870 llcpp::fuchsia::sysmem::ColorSpaceType::SRGB;
1871 image_constraints.min_coded_width = pImageInfo->extent.width;
1872 image_constraints.max_coded_width = 0xfffffff;
1873 image_constraints.min_coded_height = pImageInfo->extent.height;
1874 image_constraints.max_coded_height = 0xffffffff;
1875 image_constraints.min_bytes_per_row =
1876 pImageInfo->extent.width * 4;
1877 image_constraints.max_bytes_per_row = 0xffffffff;
1878 image_constraints.max_coded_width_times_coded_height =
1879 0xffffffff;
1880 image_constraints.layers = 1;
1881 image_constraints.coded_width_divisor = 1;
1882 image_constraints.coded_height_divisor = 1;
1883 image_constraints.bytes_per_row_divisor = 1;
1884 image_constraints.start_offset_divisor = 1;
1885 image_constraints.display_width_divisor = 1;
1886 image_constraints.display_height_divisor = 1;
1887 }
1888 }
1889
1890 auto result = collection->SetConstraints(true, std::move(constraints));
1891 if (!result.ok()) {
1892 ALOGE("setBufferCollectionConstraints: SetConstraints failed: %d",
1893 result.status());
1894 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
1895 }
1896 return VK_SUCCESS;
1897 }
1898
setBufferCollectionBufferConstraints(llcpp::fuchsia::sysmem::BufferCollection::SyncClient * collection,const VkBufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo)1899 VkResult setBufferCollectionBufferConstraints(
1900 llcpp::fuchsia::sysmem::BufferCollection::SyncClient* collection,
1901 const VkBufferConstraintsInfoFUCHSIA* pBufferConstraintsInfo) {
1902 if (pBufferConstraintsInfo == nullptr) {
1903 ALOGE(
1904 "setBufferCollectionBufferConstraints: "
1905 "pBufferConstraintsInfo cannot be null.");
1906 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
1907 }
1908
1909 llcpp::fuchsia::sysmem::BufferCollectionConstraints constraints =
1910 defaultBufferCollectionConstraints(
1911 /* min_size_bytes */ pBufferConstraintsInfo->pBufferCreateInfo
1912 ->size,
1913 /* buffer_count */ pBufferConstraintsInfo->minCount);
1914 constraints.usage.vulkan =
1915 getBufferCollectionConstraintsVulkanBufferUsage(
1916 pBufferConstraintsInfo);
1917
1918 auto result = collection->SetConstraints(true, std::move(constraints));
1919 if (!result.ok()) {
1920 ALOGE("setBufferCollectionConstraints: SetConstraints failed: %d",
1921 result.status());
1922 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
1923 }
1924 return VK_SUCCESS;
1925 }
1926
on_vkSetBufferCollectionConstraintsFUCHSIA(void *,VkResult,VkDevice,VkBufferCollectionFUCHSIA collection,const VkImageCreateInfo * pImageInfo)1927 VkResult on_vkSetBufferCollectionConstraintsFUCHSIA(
1928 void*, VkResult, VkDevice,
1929 VkBufferCollectionFUCHSIA collection,
1930 const VkImageCreateInfo* pImageInfo) {
1931 auto sysmem_collection = reinterpret_cast<
1932 llcpp::fuchsia::sysmem::BufferCollection::SyncClient*>(collection);
1933 return setBufferCollectionConstraints(sysmem_collection, pImageInfo);
1934 }
1935
on_vkSetBufferCollectionBufferConstraintsFUCHSIA(void *,VkResult,VkDevice,VkBufferCollectionFUCHSIA collection,const VkBufferConstraintsInfoFUCHSIA * pBufferConstraintsInfo)1936 VkResult on_vkSetBufferCollectionBufferConstraintsFUCHSIA(
1937 void*,
1938 VkResult,
1939 VkDevice,
1940 VkBufferCollectionFUCHSIA collection,
1941 const VkBufferConstraintsInfoFUCHSIA* pBufferConstraintsInfo) {
1942 auto sysmem_collection = reinterpret_cast<
1943 llcpp::fuchsia::sysmem::BufferCollection::SyncClient*>(collection);
1944 return setBufferCollectionBufferConstraints(sysmem_collection,
1945 pBufferConstraintsInfo);
1946 }
1947
on_vkGetBufferCollectionPropertiesFUCHSIA(void *,VkResult,VkDevice device,VkBufferCollectionFUCHSIA collection,VkBufferCollectionPropertiesFUCHSIA * pProperties)1948 VkResult on_vkGetBufferCollectionPropertiesFUCHSIA(
1949 void*, VkResult,
1950 VkDevice device,
1951 VkBufferCollectionFUCHSIA collection,
1952 VkBufferCollectionPropertiesFUCHSIA* pProperties) {
1953 auto sysmem_collection = reinterpret_cast<
1954 llcpp::fuchsia::sysmem::BufferCollection::SyncClient*>(collection);
1955
1956 auto result = sysmem_collection->WaitForBuffersAllocated();
1957 if (!result.ok() || result.Unwrap()->status != ZX_OK) {
1958 ALOGE("Failed wait for allocation: %d %d", result.status(),
1959 GET_STATUS_SAFE(result, status));
1960 return VK_ERROR_INITIALIZATION_FAILED;
1961 }
1962 llcpp::fuchsia::sysmem::BufferCollectionInfo_2 info =
1963 std::move(result.Unwrap()->buffer_collection_info);
1964
1965 if (!info.settings.has_image_format_constraints) {
1966 return VK_ERROR_INITIALIZATION_FAILED;
1967 }
1968 pProperties->count = info.buffer_count;
1969
1970 AutoLock lock(mLock);
1971
1972 auto deviceIt = info_VkDevice.find(device);
1973
1974 if (deviceIt == info_VkDevice.end()) {
1975 return VK_ERROR_INITIALIZATION_FAILED;
1976 }
1977
1978 auto& deviceInfo = deviceIt->second;
1979
1980 // Device local memory type supported.
1981 pProperties->memoryTypeBits = 0;
1982 for (uint32_t i = 0; i < deviceInfo.memProps.memoryTypeCount; ++i) {
1983 if (deviceInfo.memProps.memoryTypes[i].propertyFlags &
1984 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) {
1985 pProperties->memoryTypeBits |= 1ull << i;
1986 }
1987 }
1988 return VK_SUCCESS;
1989 }
1990 #endif
1991
getOrAllocateHostMemBlockLocked(HostMemBlocks & blocks,const VkMemoryAllocateInfo * pAllocateInfo,VkEncoder * enc,VkDevice device,const VkDevice_Info & deviceInfo)1992 HostMemBlockIndex getOrAllocateHostMemBlockLocked(
1993 HostMemBlocks& blocks,
1994 const VkMemoryAllocateInfo* pAllocateInfo,
1995 VkEncoder* enc,
1996 VkDevice device,
1997 const VkDevice_Info& deviceInfo) {
1998
1999 HostMemBlockIndex res = 0;
2000 bool found = false;
2001
2002 while (!found) {
2003 for (HostMemBlockIndex i = 0; i < blocks.size(); ++i) {
2004 if (blocks[i].initialized &&
2005 blocks[i].initResult == VK_SUCCESS &&
2006 canSubAlloc(
2007 blocks[i].subAlloc,
2008 pAllocateInfo->allocationSize)) {
2009 res = i;
2010 found = true;
2011 return res;
2012 }
2013 }
2014
2015 blocks.push_back({});
2016
2017 auto& hostMemAlloc = blocks.back();
2018
2019 // Uninitialized block; allocate on host.
2020 static constexpr VkDeviceSize oneMb = 1048576;
2021 static constexpr VkDeviceSize kDefaultHostMemBlockSize =
2022 16 * oneMb; // 16 mb
2023 VkDeviceSize roundedUpAllocSize =
2024 oneMb * ((pAllocateInfo->allocationSize + oneMb - 1) / oneMb);
2025
2026 VkDeviceSize virtualHeapSize = VIRTUAL_HOST_VISIBLE_HEAP_SIZE;
2027
2028 VkDeviceSize blockSizeNeeded =
2029 std::max(roundedUpAllocSize,
2030 std::min(virtualHeapSize,
2031 kDefaultHostMemBlockSize));
2032
2033 VkMemoryAllocateInfo allocInfoForHost = *pAllocateInfo;
2034
2035 allocInfoForHost.allocationSize = blockSizeNeeded;
2036
2037 // TODO: Support dedicated/external host visible allocation
2038 allocInfoForHost.pNext = nullptr;
2039
2040 mLock.unlock();
2041 VkResult host_res =
2042 enc->vkAllocateMemory(
2043 device,
2044 &allocInfoForHost,
2045 nullptr,
2046 &hostMemAlloc.memory);
2047 mLock.lock();
2048
2049 if (host_res != VK_SUCCESS) {
2050 ALOGE("Could not allocate backing for virtual host visible memory: %d",
2051 host_res);
2052 hostMemAlloc.initialized = true;
2053 hostMemAlloc.initResult = host_res;
2054 return INVALID_HOST_MEM_BLOCK;
2055 }
2056
2057 auto& hostMemInfo = info_VkDeviceMemory[hostMemAlloc.memory];
2058 hostMemInfo.allocationSize = allocInfoForHost.allocationSize;
2059 VkDeviceSize nonCoherentAtomSize =
2060 deviceInfo.props.limits.nonCoherentAtomSize;
2061 hostMemInfo.mappedSize = hostMemInfo.allocationSize;
2062 hostMemInfo.memoryTypeIndex =
2063 pAllocateInfo->memoryTypeIndex;
2064 hostMemAlloc.nonCoherentAtomSize = nonCoherentAtomSize;
2065
2066 uint64_t directMappedAddr = 0;
2067
2068
2069 VkResult directMapResult = VK_SUCCESS;
2070 if (mFeatureInfo->hasDirectMem) {
2071 mLock.unlock();
2072 directMapResult =
2073 enc->vkMapMemoryIntoAddressSpaceGOOGLE(
2074 device, hostMemAlloc.memory, &directMappedAddr);
2075 mLock.lock();
2076 } else if (mFeatureInfo->hasVirtioGpuNext) {
2077 #if !defined(HOST_BUILD) && defined(VK_USE_PLATFORM_ANDROID_KHR)
2078 uint64_t hvaSizeId[3];
2079
2080 mLock.unlock();
2081 enc->vkGetMemoryHostAddressInfoGOOGLE(
2082 device, hostMemAlloc.memory,
2083 &hvaSizeId[0], &hvaSizeId[1], &hvaSizeId[2]);
2084 ALOGD("%s: hvaOff, size: 0x%llx 0x%llx id: 0x%llx\n", __func__,
2085 (unsigned long long)hvaSizeId[0],
2086 (unsigned long long)hvaSizeId[1],
2087 (unsigned long long)hvaSizeId[2]);
2088 mLock.lock();
2089
2090 struct drm_virtgpu_resource_create_blob drm_rc_blob = { 0 };
2091 drm_rc_blob.blob_mem = VIRTGPU_BLOB_MEM_HOST;
2092 drm_rc_blob.blob_flags = VIRTGPU_BLOB_FLAG_MAPPABLE;
2093 drm_rc_blob.blob_id = hvaSizeId[2];
2094 drm_rc_blob.size = hvaSizeId[1];
2095
2096 int res = drmIoctl(
2097 mRendernodeFd, DRM_IOCTL_VIRTGPU_RESOURCE_CREATE_BLOB, &drm_rc_blob);
2098
2099 if (res) {
2100 ALOGE("%s: Failed to resource create v2: sterror: %s errno: %d\n", __func__,
2101 strerror(errno), errno);
2102 abort();
2103 }
2104
2105 struct drm_virtgpu_map map_info = {
2106 .handle = drm_rc_blob.bo_handle,
2107 };
2108
2109 res = drmIoctl(mRendernodeFd, DRM_IOCTL_VIRTGPU_MAP, &map_info);
2110 if (res) {
2111 ALOGE("%s: Failed to virtgpu map: sterror: %s errno: %d\n", __func__,
2112 strerror(errno), errno);
2113 abort();
2114 }
2115
2116 directMappedAddr = (uint64_t)(uintptr_t)
2117 mmap64(0, hvaSizeId[1], PROT_WRITE, MAP_SHARED, mRendernodeFd, map_info.offset);
2118
2119 if (!directMappedAddr) {
2120 ALOGE("%s: mmap of virtio gpu resource failed\n", __func__);
2121 abort();
2122 }
2123
2124 // add the host's page offset
2125 directMappedAddr += (uint64_t)(uintptr_t)(hvaSizeId[0]) & (PAGE_SIZE - 1);
2126 directMapResult = VK_SUCCESS;
2127 #endif // VK_USE_PLATFORM_ANDROID_KHR
2128 }
2129
2130 if (directMapResult != VK_SUCCESS) {
2131 hostMemAlloc.initialized = true;
2132 hostMemAlloc.initResult = directMapResult;
2133 mLock.unlock();
2134 enc->vkFreeMemory(device, hostMemAlloc.memory, nullptr);
2135 mLock.lock();
2136 return INVALID_HOST_MEM_BLOCK;
2137 }
2138
2139 hostMemInfo.mappedPtr =
2140 (uint8_t*)(uintptr_t)directMappedAddr;
2141 hostMemInfo.virtualHostVisibleBacking = true;
2142
2143 VkResult hostMemAllocRes =
2144 finishHostMemAllocInit(
2145 enc,
2146 device,
2147 pAllocateInfo->memoryTypeIndex,
2148 nonCoherentAtomSize,
2149 hostMemInfo.allocationSize,
2150 hostMemInfo.mappedSize,
2151 hostMemInfo.mappedPtr,
2152 &hostMemAlloc);
2153
2154 if (hostMemAllocRes != VK_SUCCESS) {
2155 return INVALID_HOST_MEM_BLOCK;
2156 }
2157 }
2158
2159 // unreacheable, but we need to make Werror happy
2160 return INVALID_HOST_MEM_BLOCK;
2161 }
2162
on_vkAllocateMemory(void * context,VkResult input_result,VkDevice device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMemory)2163 VkResult on_vkAllocateMemory(
2164 void* context,
2165 VkResult input_result,
2166 VkDevice device,
2167 const VkMemoryAllocateInfo* pAllocateInfo,
2168 const VkAllocationCallbacks* pAllocator,
2169 VkDeviceMemory* pMemory) {
2170
2171 if (input_result != VK_SUCCESS) return input_result;
2172
2173 VkEncoder* enc = (VkEncoder*)context;
2174
2175 VkMemoryAllocateInfo finalAllocInfo = vk_make_orphan_copy(*pAllocateInfo);
2176 vk_struct_chain_iterator structChainIter = vk_make_chain_iterator(&finalAllocInfo);
2177
2178 VkMemoryDedicatedAllocateInfo dedicatedAllocInfo;
2179 VkImportColorBufferGOOGLE importCbInfo = {
2180 VK_STRUCTURE_TYPE_IMPORT_COLOR_BUFFER_GOOGLE, 0,
2181 };
2182 VkImportBufferGOOGLE importBufferInfo = {
2183 VK_STRUCTURE_TYPE_IMPORT_BUFFER_GOOGLE,
2184 0,
2185 };
2186 // VkImportPhysicalAddressGOOGLE importPhysAddrInfo = {
2187 // VK_STRUCTURE_TYPE_IMPORT_PHYSICAL_ADDRESS_GOOGLE, 0,
2188 // };
2189
2190 const VkExportMemoryAllocateInfo* exportAllocateInfoPtr =
2191 vk_find_struct<VkExportMemoryAllocateInfo>(pAllocateInfo);
2192
2193 const VkImportAndroidHardwareBufferInfoANDROID* importAhbInfoPtr =
2194 vk_find_struct<VkImportAndroidHardwareBufferInfoANDROID>(pAllocateInfo);
2195
2196 const VkImportMemoryBufferCollectionFUCHSIA* importBufferCollectionInfoPtr =
2197 vk_find_struct<VkImportMemoryBufferCollectionFUCHSIA>(pAllocateInfo);
2198
2199 const VkImportMemoryZirconHandleInfoFUCHSIA* importVmoInfoPtr =
2200 vk_find_struct<VkImportMemoryZirconHandleInfoFUCHSIA>(pAllocateInfo);
2201
2202 const VkMemoryDedicatedAllocateInfo* dedicatedAllocInfoPtr =
2203 vk_find_struct<VkMemoryDedicatedAllocateInfo>(pAllocateInfo);
2204
2205 bool shouldPassThroughDedicatedAllocInfo =
2206 !exportAllocateInfoPtr &&
2207 !importAhbInfoPtr &&
2208 !importBufferCollectionInfoPtr &&
2209 !importVmoInfoPtr &&
2210 !isHostVisibleMemoryTypeIndexForGuest(
2211 &mHostVisibleMemoryVirtInfo,
2212 pAllocateInfo->memoryTypeIndex);
2213
2214 if (!exportAllocateInfoPtr &&
2215 (importAhbInfoPtr || importBufferCollectionInfoPtr || importVmoInfoPtr) &&
2216 dedicatedAllocInfoPtr &&
2217 isHostVisibleMemoryTypeIndexForGuest(
2218 &mHostVisibleMemoryVirtInfo,
2219 pAllocateInfo->memoryTypeIndex)) {
2220 ALOGE("FATAL: It is not yet supported to import-allocate "
2221 "external memory that is both host visible and dedicated.");
2222 abort();
2223 }
2224
2225 if (shouldPassThroughDedicatedAllocInfo &&
2226 dedicatedAllocInfoPtr) {
2227 dedicatedAllocInfo = vk_make_orphan_copy(*dedicatedAllocInfoPtr);
2228 vk_append_struct(&structChainIter, &dedicatedAllocInfo);
2229 }
2230
2231 // State needed for import/export.
2232 bool exportAhb = false;
2233 bool exportVmo = false;
2234 bool importAhb = false;
2235 bool importBufferCollection = false;
2236 bool importVmo = false;
2237 (void)exportVmo;
2238
2239 // Even if we export allocate, the underlying operation
2240 // for the host is always going to be an import operation.
2241 // This is also how Intel's implementation works,
2242 // and is generally simpler;
2243 // even in an export allocation,
2244 // we perform AHardwareBuffer allocation
2245 // on the guest side, at this layer,
2246 // and then we attach a new VkDeviceMemory
2247 // to the AHardwareBuffer on the host via an "import" operation.
2248 AHardwareBuffer* ahw = nullptr;
2249
2250 if (exportAllocateInfoPtr) {
2251 exportAhb =
2252 exportAllocateInfoPtr->handleTypes &
2253 VK_EXTERNAL_MEMORY_HANDLE_TYPE_ANDROID_HARDWARE_BUFFER_BIT_ANDROID;
2254 exportVmo =
2255 exportAllocateInfoPtr->handleTypes &
2256 VK_EXTERNAL_MEMORY_HANDLE_TYPE_TEMP_ZIRCON_VMO_BIT_FUCHSIA;
2257 } else if (importAhbInfoPtr) {
2258 importAhb = true;
2259 } else if (importBufferCollectionInfoPtr) {
2260 importBufferCollection = true;
2261 } else if (importVmoInfoPtr) {
2262 importVmo = true;
2263 }
2264
2265 if (exportAhb) {
2266 bool hasDedicatedImage = dedicatedAllocInfoPtr &&
2267 (dedicatedAllocInfoPtr->image != VK_NULL_HANDLE);
2268 bool hasDedicatedBuffer = dedicatedAllocInfoPtr &&
2269 (dedicatedAllocInfoPtr->buffer != VK_NULL_HANDLE);
2270 VkExtent3D imageExtent = { 0, 0, 0 };
2271 uint32_t imageLayers = 0;
2272 VkFormat imageFormat = VK_FORMAT_UNDEFINED;
2273 VkImageUsageFlags imageUsage = 0;
2274 VkImageCreateFlags imageCreateFlags = 0;
2275 VkDeviceSize bufferSize = 0;
2276 VkDeviceSize allocationInfoAllocSize =
2277 finalAllocInfo.allocationSize;
2278
2279 if (hasDedicatedImage) {
2280 AutoLock lock(mLock);
2281
2282 auto it = info_VkImage.find(
2283 dedicatedAllocInfoPtr->image);
2284 if (it == info_VkImage.end()) return VK_ERROR_INITIALIZATION_FAILED;
2285 const auto& info = it->second;
2286 const auto& imgCi = info.createInfo;
2287
2288 imageExtent = imgCi.extent;
2289 imageLayers = imgCi.arrayLayers;
2290 imageFormat = imgCi.format;
2291 imageUsage = imgCi.usage;
2292 imageCreateFlags = imgCi.flags;
2293 }
2294
2295 if (hasDedicatedBuffer) {
2296 AutoLock lock(mLock);
2297
2298 auto it = info_VkBuffer.find(
2299 dedicatedAllocInfoPtr->buffer);
2300 if (it == info_VkBuffer.end()) return VK_ERROR_INITIALIZATION_FAILED;
2301 const auto& info = it->second;
2302 const auto& bufCi = info.createInfo;
2303
2304 bufferSize = bufCi.size;
2305 }
2306
2307 VkResult ahbCreateRes =
2308 createAndroidHardwareBuffer(
2309 hasDedicatedImage,
2310 hasDedicatedBuffer,
2311 imageExtent,
2312 imageLayers,
2313 imageFormat,
2314 imageUsage,
2315 imageCreateFlags,
2316 bufferSize,
2317 allocationInfoAllocSize,
2318 &ahw);
2319
2320 if (ahbCreateRes != VK_SUCCESS) {
2321 return ahbCreateRes;
2322 }
2323 }
2324
2325 if (importAhb) {
2326 ahw = importAhbInfoPtr->buffer;
2327 // We still need to acquire the AHardwareBuffer.
2328 importAndroidHardwareBuffer(
2329 mThreadingCallbacks.hostConnectionGetFunc()->grallocHelper(),
2330 importAhbInfoPtr, nullptr);
2331 }
2332
2333 if (ahw) {
2334 ALOGD("%s: Import AHardwareBuffer", __func__);
2335 importCbInfo.colorBuffer =
2336 mThreadingCallbacks.hostConnectionGetFunc()->grallocHelper()->
2337 getHostHandle(AHardwareBuffer_getNativeHandle(ahw));
2338 vk_append_struct(&structChainIter, &importCbInfo);
2339 }
2340
2341 zx_handle_t vmo_handle = ZX_HANDLE_INVALID;
2342
2343 if (importBufferCollection) {
2344
2345 #ifdef VK_USE_PLATFORM_FUCHSIA
2346 auto collection = reinterpret_cast<
2347 llcpp::fuchsia::sysmem::BufferCollection::SyncClient*>(
2348 importBufferCollectionInfoPtr->collection);
2349 auto result = collection->WaitForBuffersAllocated();
2350 if (!result.ok() || result.Unwrap()->status != ZX_OK) {
2351 ALOGE("WaitForBuffersAllocated failed: %d %d", result.status(),
2352 GET_STATUS_SAFE(result, status));
2353 return VK_ERROR_INITIALIZATION_FAILED;
2354 }
2355 llcpp::fuchsia::sysmem::BufferCollectionInfo_2& info =
2356 result.Unwrap()->buffer_collection_info;
2357 uint32_t index = importBufferCollectionInfoPtr->index;
2358 if (info.buffer_count < index) {
2359 ALOGE("Invalid buffer index: %d %d", index);
2360 return VK_ERROR_INITIALIZATION_FAILED;
2361 }
2362 vmo_handle = info.buffers[index].vmo.release();
2363 #endif
2364
2365 }
2366
2367 if (importVmo) {
2368 vmo_handle = importVmoInfoPtr->handle;
2369 }
2370
2371 #ifdef VK_USE_PLATFORM_FUCHSIA
2372 if (exportVmo) {
2373 bool hasDedicatedImage = dedicatedAllocInfoPtr &&
2374 (dedicatedAllocInfoPtr->image != VK_NULL_HANDLE);
2375 bool hasDedicatedBuffer =
2376 dedicatedAllocInfoPtr &&
2377 (dedicatedAllocInfoPtr->buffer != VK_NULL_HANDLE);
2378
2379 if (hasDedicatedImage && hasDedicatedBuffer) {
2380 ALOGE(
2381 "Invalid VkMemoryDedicatedAllocationInfo: At least one "
2382 "of image and buffer must be VK_NULL_HANDLE.");
2383 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
2384 }
2385
2386 const VkImageCreateInfo* pImageCreateInfo = nullptr;
2387
2388 VkBufferConstraintsInfoFUCHSIA bufferConstraintsInfo = {
2389 .sType =
2390 VK_STRUCTURE_TYPE_BUFFER_COLLECTION_CREATE_INFO_FUCHSIA,
2391 .pNext = nullptr,
2392 .pBufferCreateInfo = nullptr,
2393 .requiredFormatFeatures = 0,
2394 .minCount = 1,
2395 };
2396 const VkBufferConstraintsInfoFUCHSIA* pBufferConstraintsInfo =
2397 nullptr;
2398
2399 if (hasDedicatedImage) {
2400 AutoLock lock(mLock);
2401
2402 auto it = info_VkImage.find(dedicatedAllocInfoPtr->image);
2403 if (it == info_VkImage.end()) return VK_ERROR_INITIALIZATION_FAILED;
2404 const auto& imageInfo = it->second;
2405
2406 pImageCreateInfo = &imageInfo.createInfo;
2407 }
2408
2409 if (hasDedicatedBuffer) {
2410 AutoLock lock(mLock);
2411
2412 auto it = info_VkBuffer.find(dedicatedAllocInfoPtr->buffer);
2413 if (it == info_VkBuffer.end())
2414 return VK_ERROR_INITIALIZATION_FAILED;
2415 const auto& bufferInfo = it->second;
2416
2417 bufferConstraintsInfo.pBufferCreateInfo =
2418 &bufferInfo.createInfo;
2419 pBufferConstraintsInfo = &bufferConstraintsInfo;
2420 }
2421
2422 hasDedicatedImage = hasDedicatedImage &&
2423 getBufferCollectionConstraintsVulkanImageUsage(
2424 pImageCreateInfo);
2425 hasDedicatedBuffer =
2426 hasDedicatedBuffer &&
2427 getBufferCollectionConstraintsVulkanBufferUsage(
2428 pBufferConstraintsInfo);
2429
2430 if (hasDedicatedImage || hasDedicatedBuffer) {
2431 zx::channel token_server, token_client;
2432 zx_status_t status =
2433 zx::channel::create(0, &token_server, &token_client);
2434 if (status != ZX_OK) {
2435 ALOGE("zx_channel_create failed: %d", status);
2436 abort();
2437 }
2438
2439 {
2440 auto result = mSysmemAllocator->AllocateSharedCollection(
2441 std::move(token_server));
2442 if (!result.ok()) {
2443 ALOGE("AllocateSharedCollection failed: %d",
2444 result.status());
2445 abort();
2446 }
2447 }
2448
2449 zx::channel collection_server, collection_client;
2450 status = zx::channel::create(0, &collection_server,
2451 &collection_client);
2452 if (status != ZX_OK) {
2453 ALOGE("zx_channel_create failed: %d", status);
2454 abort();
2455 }
2456
2457 {
2458 auto result = mSysmemAllocator->BindSharedCollection(
2459 std::move(token_client), std::move(collection_server));
2460 if (!result.ok()) {
2461 ALOGE("BindSharedCollection failed: %d",
2462 result.status());
2463 abort();
2464 }
2465 }
2466
2467 llcpp::fuchsia::sysmem::BufferCollection::SyncClient collection(
2468 std::move(collection_client));
2469 if (hasDedicatedImage) {
2470 VkResult res = setBufferCollectionConstraints(
2471 &collection, pImageCreateInfo);
2472 if (res != VK_SUCCESS) {
2473 ALOGE("setBufferCollectionConstraints failed: %d", res);
2474 abort();
2475 }
2476 }
2477
2478 if (hasDedicatedBuffer) {
2479 VkResult res = setBufferCollectionBufferConstraints(
2480 &collection, pBufferConstraintsInfo);
2481 if (res != VK_SUCCESS) {
2482 ALOGE("setBufferCollectionBufferConstraints failed: %d",
2483 res);
2484 abort();
2485 }
2486 }
2487
2488 {
2489 auto result = collection.WaitForBuffersAllocated();
2490 if (result.ok() && result.Unwrap()->status == ZX_OK) {
2491 llcpp::fuchsia::sysmem::BufferCollectionInfo_2& info =
2492 result.Unwrap()->buffer_collection_info;
2493 if (!info.buffer_count) {
2494 ALOGE(
2495 "WaitForBuffersAllocated returned "
2496 "invalid count: %d",
2497 info.buffer_count);
2498 abort();
2499 }
2500 vmo_handle = info.buffers[0].vmo.release();
2501 } else {
2502 ALOGE("WaitForBuffersAllocated failed: %d %d",
2503 result.status(), GET_STATUS_SAFE(result, status));
2504 abort();
2505 }
2506 }
2507
2508 collection.Close();
2509
2510 zx::vmo vmo_copy;
2511 status = zx_handle_duplicate(vmo_handle, ZX_RIGHT_SAME_RIGHTS,
2512 vmo_copy.reset_and_get_address());
2513 if (status != ZX_OK) {
2514 ALOGE("Failed to duplicate VMO: %d", status);
2515 abort();
2516 }
2517
2518 if (pImageCreateInfo) {
2519 llcpp::fuchsia::hardware::goldfish::ColorBufferFormatType
2520 format;
2521 switch (pImageCreateInfo->format) {
2522 case VK_FORMAT_B8G8R8A8_SINT:
2523 case VK_FORMAT_B8G8R8A8_UNORM:
2524 case VK_FORMAT_B8G8R8A8_SRGB:
2525 case VK_FORMAT_B8G8R8A8_SNORM:
2526 case VK_FORMAT_B8G8R8A8_SSCALED:
2527 case VK_FORMAT_B8G8R8A8_USCALED:
2528 format = llcpp::fuchsia::hardware::goldfish::
2529 ColorBufferFormatType::BGRA;
2530 break;
2531 case VK_FORMAT_R8G8B8A8_SINT:
2532 case VK_FORMAT_R8G8B8A8_UNORM:
2533 case VK_FORMAT_R8G8B8A8_SRGB:
2534 case VK_FORMAT_R8G8B8A8_SNORM:
2535 case VK_FORMAT_R8G8B8A8_SSCALED:
2536 case VK_FORMAT_R8G8B8A8_USCALED:
2537 format = llcpp::fuchsia::hardware::goldfish::
2538 ColorBufferFormatType::RGBA;
2539 break;
2540 default:
2541 ALOGE("Unsupported format: %d",
2542 pImageCreateInfo->format);
2543 abort();
2544 }
2545
2546 auto result = mControlDevice->CreateColorBuffer(
2547 std::move(vmo_copy), pImageCreateInfo->extent.width,
2548 pImageCreateInfo->extent.height, format);
2549 if (!result.ok() || result.Unwrap()->res != ZX_OK) {
2550 ALOGE("CreateColorBuffer failed: %d:%d",
2551 result.status(), GET_STATUS_SAFE(result, res));
2552 abort();
2553 }
2554 }
2555
2556 if (pBufferConstraintsInfo) {
2557 auto result = mControlDevice->CreateBuffer(
2558 std::move(vmo_copy),
2559 pBufferConstraintsInfo->pBufferCreateInfo->size);
2560 if (!result.ok() || result.Unwrap()->res != ZX_OK) {
2561 ALOGE("CreateBuffer failed: %d:%d", result.status(),
2562 GET_STATUS_SAFE(result, res));
2563 abort();
2564 }
2565 }
2566 }
2567 }
2568
2569 if (vmo_handle != ZX_HANDLE_INVALID) {
2570 zx::vmo vmo_copy;
2571 zx_status_t status = zx_handle_duplicate(vmo_handle,
2572 ZX_RIGHT_SAME_RIGHTS,
2573 vmo_copy.reset_and_get_address());
2574 if (status != ZX_OK) {
2575 ALOGE("Failed to duplicate VMO: %d", status);
2576 abort();
2577 }
2578 zx_status_t status2 = ZX_OK;
2579
2580 auto result = mControlDevice->GetBufferHandle(std::move(vmo_copy));
2581 if (!result.ok() || result.Unwrap()->res != ZX_OK) {
2582 ALOGE("GetBufferHandle failed: %d:%d", result.status(),
2583 GET_STATUS_SAFE(result, res));
2584 } else {
2585 llcpp::fuchsia::hardware::goldfish::BufferHandleType
2586 handle_type = result.Unwrap()->type;
2587 uint32_t buffer_handle = result.Unwrap()->id;
2588
2589 if (handle_type == llcpp::fuchsia::hardware::goldfish::
2590 BufferHandleType::BUFFER) {
2591 importBufferInfo.buffer = buffer_handle;
2592 vk_append_struct(&structChainIter, &importBufferInfo);
2593 } else {
2594 importCbInfo.colorBuffer = buffer_handle;
2595 vk_append_struct(&structChainIter, &importCbInfo);
2596 }
2597 }
2598 }
2599 #endif
2600
2601 if (!isHostVisibleMemoryTypeIndexForGuest(
2602 &mHostVisibleMemoryVirtInfo,
2603 finalAllocInfo.memoryTypeIndex)) {
2604 input_result =
2605 enc->vkAllocateMemory(
2606 device, &finalAllocInfo, pAllocator, pMemory);
2607
2608 if (input_result != VK_SUCCESS) return input_result;
2609
2610 VkDeviceSize allocationSize = finalAllocInfo.allocationSize;
2611 setDeviceMemoryInfo(
2612 device, *pMemory,
2613 finalAllocInfo.allocationSize,
2614 0, nullptr,
2615 finalAllocInfo.memoryTypeIndex,
2616 ahw,
2617 vmo_handle);
2618
2619 return VK_SUCCESS;
2620 }
2621
2622 // Device-local memory dealing is over. What follows:
2623 // host-visible memory.
2624
2625 if (ahw) {
2626 ALOGE("%s: Host visible export/import allocation "
2627 "of Android hardware buffers is not supported.",
2628 __func__);
2629 abort();
2630 }
2631
2632 if (vmo_handle != ZX_HANDLE_INVALID) {
2633 ALOGE("%s: Host visible export/import allocation "
2634 "of VMO is not supported yet.",
2635 __func__);
2636 abort();
2637 }
2638
2639 // Host visible memory, non external
2640 bool directMappingSupported = usingDirectMapping();
2641 if (!directMappingSupported) {
2642 input_result =
2643 enc->vkAllocateMemory(
2644 device, &finalAllocInfo, pAllocator, pMemory);
2645
2646 if (input_result != VK_SUCCESS) return input_result;
2647
2648 VkDeviceSize mappedSize =
2649 getNonCoherentExtendedSize(device,
2650 finalAllocInfo.allocationSize);
2651 uint8_t* mappedPtr = (uint8_t*)aligned_buf_alloc(4096, mappedSize);
2652 D("host visible alloc (non-direct): "
2653 "size 0x%llx host ptr %p mapped size 0x%llx",
2654 (unsigned long long)finalAllocInfo.allocationSize, mappedPtr,
2655 (unsigned long long)mappedSize);
2656 setDeviceMemoryInfo(
2657 device, *pMemory,
2658 finalAllocInfo.allocationSize,
2659 mappedSize, mappedPtr,
2660 finalAllocInfo.memoryTypeIndex);
2661 return VK_SUCCESS;
2662 }
2663
2664 // Host visible memory with direct mapping via
2665 // VkImportPhysicalAddressGOOGLE
2666 // if (importPhysAddr) {
2667 // vkAllocateMemory(device, &finalAllocInfo, pAllocator, pMemory);
2668 // host maps the host pointer to the guest physical address
2669 // TODO: the host side page offset of the
2670 // host pointer needs to be returned somehow.
2671 // }
2672
2673 // Host visible memory with direct mapping
2674 AutoLock lock(mLock);
2675
2676 auto it = info_VkDevice.find(device);
2677 if (it == info_VkDevice.end()) return VK_ERROR_DEVICE_LOST;
2678 auto& deviceInfo = it->second;
2679
2680 auto& hostMemBlocksForTypeIndex =
2681 deviceInfo.hostMemBlocks[finalAllocInfo.memoryTypeIndex];
2682
2683 HostMemBlockIndex blockIndex =
2684 getOrAllocateHostMemBlockLocked(
2685 hostMemBlocksForTypeIndex,
2686 &finalAllocInfo,
2687 enc,
2688 device,
2689 deviceInfo);
2690
2691 if (blockIndex == (HostMemBlockIndex) INVALID_HOST_MEM_BLOCK) {
2692 return VK_ERROR_OUT_OF_HOST_MEMORY;
2693 }
2694
2695 VkDeviceMemory_Info virtualMemInfo;
2696
2697 subAllocHostMemory(
2698 &hostMemBlocksForTypeIndex[blockIndex],
2699 &finalAllocInfo,
2700 &virtualMemInfo.subAlloc);
2701
2702 virtualMemInfo.allocationSize = virtualMemInfo.subAlloc.subAllocSize;
2703 virtualMemInfo.mappedSize = virtualMemInfo.subAlloc.subMappedSize;
2704 virtualMemInfo.mappedPtr = virtualMemInfo.subAlloc.mappedPtr;
2705 virtualMemInfo.memoryTypeIndex = finalAllocInfo.memoryTypeIndex;
2706 virtualMemInfo.directMapped = true;
2707
2708 D("host visible alloc (direct, suballoc): "
2709 "size 0x%llx ptr %p mapped size 0x%llx",
2710 (unsigned long long)virtualMemInfo.allocationSize, virtualMemInfo.mappedPtr,
2711 (unsigned long long)virtualMemInfo.mappedSize);
2712
2713 info_VkDeviceMemory[
2714 virtualMemInfo.subAlloc.subMemory] = virtualMemInfo;
2715
2716 *pMemory = virtualMemInfo.subAlloc.subMemory;
2717
2718 return VK_SUCCESS;
2719 }
2720
on_vkFreeMemory(void * context,VkDevice device,VkDeviceMemory memory,const VkAllocationCallbacks * pAllocateInfo)2721 void on_vkFreeMemory(
2722 void* context,
2723 VkDevice device,
2724 VkDeviceMemory memory,
2725 const VkAllocationCallbacks* pAllocateInfo) {
2726
2727 AutoLock lock(mLock);
2728
2729 auto it = info_VkDeviceMemory.find(memory);
2730 if (it == info_VkDeviceMemory.end()) return;
2731 auto& info = it->second;
2732
2733 if (!info.directMapped) {
2734 lock.unlock();
2735 VkEncoder* enc = (VkEncoder*)context;
2736 enc->vkFreeMemory(device, memory, pAllocateInfo);
2737 return;
2738 }
2739
2740 subFreeHostMemory(&info.subAlloc);
2741 }
2742
on_vkMapMemory(void *,VkResult host_result,VkDevice,VkDeviceMemory memory,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags,void ** ppData)2743 VkResult on_vkMapMemory(
2744 void*,
2745 VkResult host_result,
2746 VkDevice,
2747 VkDeviceMemory memory,
2748 VkDeviceSize offset,
2749 VkDeviceSize size,
2750 VkMemoryMapFlags,
2751 void** ppData) {
2752
2753 if (host_result != VK_SUCCESS) return host_result;
2754
2755 AutoLock lock(mLock);
2756
2757 auto it = info_VkDeviceMemory.find(memory);
2758 if (it == info_VkDeviceMemory.end()) return VK_ERROR_MEMORY_MAP_FAILED;
2759
2760 auto& info = it->second;
2761
2762 if (!info.mappedPtr) return VK_ERROR_MEMORY_MAP_FAILED;
2763
2764 if (size != VK_WHOLE_SIZE &&
2765 (info.mappedPtr + offset + size > info.mappedPtr + info.allocationSize)) {
2766 return VK_ERROR_MEMORY_MAP_FAILED;
2767 }
2768
2769 *ppData = info.mappedPtr + offset;
2770
2771 return host_result;
2772 }
2773
on_vkUnmapMemory(void *,VkDevice,VkDeviceMemory)2774 void on_vkUnmapMemory(
2775 void*,
2776 VkDevice,
2777 VkDeviceMemory) {
2778 // no-op
2779 }
2780
transformNonExternalResourceMemoryTypeBitsForGuest(uint32_t hostBits)2781 uint32_t transformNonExternalResourceMemoryTypeBitsForGuest(
2782 uint32_t hostBits) {
2783 uint32_t res = 0;
2784 for (uint32_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i) {
2785 if (hostBits & (1 << i)) {
2786 res |= (1 << i);
2787 }
2788 }
2789 return res;
2790 }
2791
transformExternalResourceMemoryTypeBitsForGuest(uint32_t normalBits)2792 uint32_t transformExternalResourceMemoryTypeBitsForGuest(
2793 uint32_t normalBits) {
2794 uint32_t res = 0;
2795 for (uint32_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i) {
2796 if (normalBits & (1 << i) &&
2797 !isHostVisibleMemoryTypeIndexForGuest(
2798 &mHostVisibleMemoryVirtInfo, i)) {
2799 res |= (1 << i);
2800 }
2801 }
2802 return res;
2803 }
2804
transformNonExternalResourceMemoryRequirementsForGuest(VkMemoryRequirements * reqs)2805 void transformNonExternalResourceMemoryRequirementsForGuest(
2806 VkMemoryRequirements* reqs) {
2807 reqs->memoryTypeBits =
2808 transformNonExternalResourceMemoryTypeBitsForGuest(
2809 reqs->memoryTypeBits);
2810 }
2811
transformExternalResourceMemoryRequirementsForGuest(VkMemoryRequirements * reqs)2812 void transformExternalResourceMemoryRequirementsForGuest(
2813 VkMemoryRequirements* reqs) {
2814 reqs->memoryTypeBits =
2815 transformExternalResourceMemoryTypeBitsForGuest(
2816 reqs->memoryTypeBits);
2817 }
2818
transformExternalResourceMemoryDedicatedRequirementsForGuest(VkMemoryDedicatedRequirements * dedicatedReqs)2819 void transformExternalResourceMemoryDedicatedRequirementsForGuest(
2820 VkMemoryDedicatedRequirements* dedicatedReqs) {
2821 dedicatedReqs->prefersDedicatedAllocation = VK_TRUE;
2822 dedicatedReqs->requiresDedicatedAllocation = VK_TRUE;
2823 }
2824
transformImageMemoryRequirementsForGuestLocked(VkImage image,VkMemoryRequirements * reqs)2825 void transformImageMemoryRequirementsForGuestLocked(
2826 VkImage image,
2827 VkMemoryRequirements* reqs) {
2828
2829 auto it = info_VkImage.find(image);
2830 if (it == info_VkImage.end()) return;
2831
2832 auto& info = it->second;
2833
2834 if (!info.external ||
2835 !info.externalCreateInfo.handleTypes) {
2836 transformNonExternalResourceMemoryRequirementsForGuest(reqs);
2837 } else {
2838 transformExternalResourceMemoryRequirementsForGuest(reqs);
2839 }
2840 setMemoryRequirementsForSysmemBackedImage(image, reqs);
2841 }
2842
transformBufferMemoryRequirementsForGuestLocked(VkBuffer buffer,VkMemoryRequirements * reqs)2843 void transformBufferMemoryRequirementsForGuestLocked(
2844 VkBuffer buffer,
2845 VkMemoryRequirements* reqs) {
2846
2847 auto it = info_VkBuffer.find(buffer);
2848 if (it == info_VkBuffer.end()) return;
2849
2850 auto& info = it->second;
2851
2852 if (!info.external ||
2853 !info.externalCreateInfo.handleTypes) {
2854 transformNonExternalResourceMemoryRequirementsForGuest(reqs);
2855 return;
2856 }
2857
2858 transformExternalResourceMemoryRequirementsForGuest(reqs);
2859 }
2860
transformImageMemoryRequirements2ForGuest(VkImage image,VkMemoryRequirements2 * reqs2)2861 void transformImageMemoryRequirements2ForGuest(
2862 VkImage image,
2863 VkMemoryRequirements2* reqs2) {
2864
2865 AutoLock lock(mLock);
2866
2867 auto it = info_VkImage.find(image);
2868 if (it == info_VkImage.end()) return;
2869
2870 auto& info = it->second;
2871
2872 if (!info.external ||
2873 !info.externalCreateInfo.handleTypes) {
2874 transformNonExternalResourceMemoryRequirementsForGuest(
2875 &reqs2->memoryRequirements);
2876 setMemoryRequirementsForSysmemBackedImage(image, &reqs2->memoryRequirements);
2877 return;
2878 }
2879
2880 transformExternalResourceMemoryRequirementsForGuest(&reqs2->memoryRequirements);
2881
2882 setMemoryRequirementsForSysmemBackedImage(image, &reqs2->memoryRequirements);
2883
2884 VkMemoryDedicatedRequirements* dedicatedReqs =
2885 vk_find_struct<VkMemoryDedicatedRequirements>(reqs2);
2886
2887 if (!dedicatedReqs) return;
2888
2889 transformExternalResourceMemoryDedicatedRequirementsForGuest(
2890 dedicatedReqs);
2891 }
2892
transformBufferMemoryRequirements2ForGuest(VkBuffer buffer,VkMemoryRequirements2 * reqs2)2893 void transformBufferMemoryRequirements2ForGuest(
2894 VkBuffer buffer,
2895 VkMemoryRequirements2* reqs2) {
2896
2897 AutoLock lock(mLock);
2898
2899 auto it = info_VkBuffer.find(buffer);
2900 if (it == info_VkBuffer.end()) return;
2901
2902 auto& info = it->second;
2903
2904 if (!info.external ||
2905 !info.externalCreateInfo.handleTypes) {
2906 transformNonExternalResourceMemoryRequirementsForGuest(
2907 &reqs2->memoryRequirements);
2908 return;
2909 }
2910
2911 transformExternalResourceMemoryRequirementsForGuest(&reqs2->memoryRequirements);
2912
2913 VkMemoryDedicatedRequirements* dedicatedReqs =
2914 vk_find_struct<VkMemoryDedicatedRequirements>(reqs2);
2915
2916 if (!dedicatedReqs) return;
2917
2918 transformExternalResourceMemoryDedicatedRequirementsForGuest(
2919 dedicatedReqs);
2920 }
2921
on_vkCreateImage(void * context,VkResult,VkDevice device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage)2922 VkResult on_vkCreateImage(
2923 void* context, VkResult,
2924 VkDevice device, const VkImageCreateInfo *pCreateInfo,
2925 const VkAllocationCallbacks *pAllocator,
2926 VkImage *pImage) {
2927 VkEncoder* enc = (VkEncoder*)context;
2928
2929 VkImageCreateInfo localCreateInfo = vk_make_orphan_copy(*pCreateInfo);
2930 vk_struct_chain_iterator structChainIter = vk_make_chain_iterator(&localCreateInfo);
2931 VkExternalMemoryImageCreateInfo localExtImgCi;
2932
2933 const VkExternalMemoryImageCreateInfo* extImgCiPtr =
2934 vk_find_struct<VkExternalMemoryImageCreateInfo>(pCreateInfo);
2935 if (extImgCiPtr) {
2936 localExtImgCi = vk_make_orphan_copy(*extImgCiPtr);
2937 vk_append_struct(&structChainIter, &localExtImgCi);
2938 }
2939
2940 #ifdef VK_USE_PLATFORM_ANDROID_KHR
2941 VkNativeBufferANDROID localAnb;
2942 const VkNativeBufferANDROID* anbInfoPtr =
2943 vk_find_struct<VkNativeBufferANDROID>(pCreateInfo);
2944 if (anbInfoPtr) {
2945 localAnb = vk_make_orphan_copy(*anbInfoPtr);
2946 vk_append_struct(&structChainIter, &localAnb);
2947 }
2948
2949 VkExternalFormatANDROID localExtFormatAndroid;
2950 const VkExternalFormatANDROID* extFormatAndroidPtr =
2951 vk_find_struct<VkExternalFormatANDROID>(pCreateInfo);
2952 if (extFormatAndroidPtr) {
2953 localExtFormatAndroid = vk_make_orphan_copy(*extFormatAndroidPtr);
2954
2955 // Do not append external format android;
2956 // instead, replace the local image localCreateInfo format
2957 // with the corresponding Vulkan format
2958 if (extFormatAndroidPtr->externalFormat) {
2959 localCreateInfo.format =
2960 vk_format_from_android(extFormatAndroidPtr->externalFormat);
2961 if (localCreateInfo.format == VK_FORMAT_UNDEFINED)
2962 return VK_ERROR_VALIDATION_FAILED_EXT;
2963 }
2964 }
2965 #endif
2966
2967 #ifdef VK_USE_PLATFORM_FUCHSIA
2968 const VkBufferCollectionImageCreateInfoFUCHSIA* extBufferCollectionPtr =
2969 vk_find_struct<VkBufferCollectionImageCreateInfoFUCHSIA>(pCreateInfo);
2970 bool isSysmemBackedMemory = false;
2971
2972 if (extImgCiPtr &&
2973 (extImgCiPtr->handleTypes &
2974 VK_EXTERNAL_MEMORY_HANDLE_TYPE_TEMP_ZIRCON_VMO_BIT_FUCHSIA)) {
2975 isSysmemBackedMemory = true;
2976 }
2977
2978 if (extBufferCollectionPtr) {
2979 auto collection = reinterpret_cast<
2980 llcpp::fuchsia::sysmem::BufferCollection::SyncClient*>(
2981 extBufferCollectionPtr->collection);
2982 uint32_t index = extBufferCollectionPtr->index;
2983 zx::vmo vmo;
2984
2985 llcpp::fuchsia::sysmem::BufferCollectionInfo_2 info;
2986
2987 auto result = collection->WaitForBuffersAllocated();
2988 if (result.ok() && result.Unwrap()->status == ZX_OK) {
2989 info = std::move(result.Unwrap()->buffer_collection_info);
2990 if (index < info.buffer_count && info.settings.has_image_format_constraints) {
2991 vmo = std::move(info.buffers[index].vmo);
2992 }
2993 } else {
2994 ALOGE("WaitForBuffersAllocated failed: %d %d", result.status(),
2995 GET_STATUS_SAFE(result, status));
2996 }
2997
2998 if (vmo.is_valid()) {
2999 auto result = mControlDevice->CreateColorBuffer(
3000 std::move(vmo),
3001 info.settings.image_format_constraints.min_coded_width,
3002 info.settings.image_format_constraints.min_coded_height,
3003 info.settings.image_format_constraints.pixel_format.type ==
3004 llcpp::fuchsia::sysmem::PixelFormatType::R8G8B8A8
3005 ? llcpp::fuchsia::hardware::goldfish::
3006 ColorBufferFormatType::RGBA
3007 : llcpp::fuchsia::hardware::goldfish::
3008 ColorBufferFormatType::BGRA);
3009 if (!result.ok() || result.Unwrap()->res != ZX_OK) {
3010 ALOGE("CreateColorBuffer failed: %d:%d", result.status(),
3011 GET_STATUS_SAFE(result, res));
3012 }
3013 }
3014 isSysmemBackedMemory = true;
3015 }
3016 #endif
3017
3018 VkResult res;
3019 VkMemoryRequirements memReqs;
3020
3021 if (supportsCreateResourcesWithRequirements()) {
3022 res = enc->vkCreateImageWithRequirementsGOOGLE(device, &localCreateInfo, pAllocator, pImage, &memReqs);
3023 } else {
3024 res = enc->vkCreateImage(device, &localCreateInfo, pAllocator, pImage);
3025 }
3026
3027 if (res != VK_SUCCESS) return res;
3028
3029 AutoLock lock(mLock);
3030
3031 auto it = info_VkImage.find(*pImage);
3032 if (it == info_VkImage.end()) return VK_ERROR_INITIALIZATION_FAILED;
3033
3034 auto& info = it->second;
3035
3036 info.device = device;
3037 info.createInfo = *pCreateInfo;
3038 info.createInfo.pNext = nullptr;
3039
3040 if (supportsCreateResourcesWithRequirements()) {
3041 info.baseRequirementsKnown = true;
3042 }
3043
3044 if (extImgCiPtr) {
3045 info.external = true;
3046 info.externalCreateInfo = *extImgCiPtr;
3047 }
3048
3049 #ifdef VK_USE_PLATFORM_FUCHSIA
3050 if (isSysmemBackedMemory) {
3051 info.isSysmemBackedMemory = true;
3052 }
3053 #endif
3054
3055 if (info.baseRequirementsKnown) {
3056 transformImageMemoryRequirementsForGuestLocked(*pImage, &memReqs);
3057 info.baseRequirements = memReqs;
3058 }
3059 return res;
3060 }
3061
on_vkCreateSamplerYcbcrConversion(void * context,VkResult,VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)3062 VkResult on_vkCreateSamplerYcbcrConversion(
3063 void* context, VkResult,
3064 VkDevice device,
3065 const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
3066 const VkAllocationCallbacks* pAllocator,
3067 VkSamplerYcbcrConversion* pYcbcrConversion) {
3068
3069 VkSamplerYcbcrConversionCreateInfo localCreateInfo = vk_make_orphan_copy(*pCreateInfo);
3070
3071 #ifdef VK_USE_PLATFORM_ANDROID_KHR
3072 const VkExternalFormatANDROID* extFormatAndroidPtr =
3073 vk_find_struct<VkExternalFormatANDROID>(pCreateInfo);
3074 if (extFormatAndroidPtr) {
3075 if (extFormatAndroidPtr->externalFormat == AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM) {
3076 // We don't support external formats on host and it causes RGB565
3077 // to fail in CtsGraphicsTestCases android.graphics.cts.BasicVulkanGpuTest
3078 // when passed as an external format.
3079 // We may consider doing this for all external formats.
3080 // See b/134771579.
3081 *pYcbcrConversion = VK_YCBCR_CONVERSION_DO_NOTHING;
3082 return VK_SUCCESS;
3083 } else if (extFormatAndroidPtr->externalFormat) {
3084 localCreateInfo.format =
3085 vk_format_from_android(extFormatAndroidPtr->externalFormat);
3086 }
3087 }
3088 #endif
3089
3090 VkEncoder* enc = (VkEncoder*)context;
3091 VkResult res = enc->vkCreateSamplerYcbcrConversion(
3092 device, &localCreateInfo, pAllocator, pYcbcrConversion);
3093
3094 if (*pYcbcrConversion == VK_YCBCR_CONVERSION_DO_NOTHING) {
3095 ALOGE("FATAL: vkCreateSamplerYcbcrConversion returned a reserved value (VK_YCBCR_CONVERSION_DO_NOTHING)");
3096 abort();
3097 }
3098 return res;
3099 }
3100
on_vkDestroySamplerYcbcrConversion(void * context,VkDevice device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)3101 void on_vkDestroySamplerYcbcrConversion(
3102 void* context,
3103 VkDevice device,
3104 VkSamplerYcbcrConversion ycbcrConversion,
3105 const VkAllocationCallbacks* pAllocator) {
3106 VkEncoder* enc = (VkEncoder*)context;
3107 if (ycbcrConversion != VK_YCBCR_CONVERSION_DO_NOTHING) {
3108 enc->vkDestroySamplerYcbcrConversion(device, ycbcrConversion, pAllocator);
3109 }
3110 }
3111
on_vkCreateSamplerYcbcrConversionKHR(void * context,VkResult,VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)3112 VkResult on_vkCreateSamplerYcbcrConversionKHR(
3113 void* context, VkResult,
3114 VkDevice device,
3115 const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
3116 const VkAllocationCallbacks* pAllocator,
3117 VkSamplerYcbcrConversion* pYcbcrConversion) {
3118
3119 VkSamplerYcbcrConversionCreateInfo localCreateInfo = vk_make_orphan_copy(*pCreateInfo);
3120
3121 #ifdef VK_USE_PLATFORM_ANDROID_KHR
3122 const VkExternalFormatANDROID* extFormatAndroidPtr =
3123 vk_find_struct<VkExternalFormatANDROID>(pCreateInfo);
3124 if (extFormatAndroidPtr) {
3125 if (extFormatAndroidPtr->externalFormat == AHARDWAREBUFFER_FORMAT_R5G6B5_UNORM) {
3126 // We don't support external formats on host and it causes RGB565
3127 // to fail in CtsGraphicsTestCases android.graphics.cts.BasicVulkanGpuTest
3128 // when passed as an external format.
3129 // We may consider doing this for all external formats.
3130 // See b/134771579.
3131 *pYcbcrConversion = VK_YCBCR_CONVERSION_DO_NOTHING;
3132 return VK_SUCCESS;
3133 } else if (extFormatAndroidPtr->externalFormat) {
3134 localCreateInfo.format =
3135 vk_format_from_android(extFormatAndroidPtr->externalFormat);
3136 }
3137 }
3138 #endif
3139
3140 VkEncoder* enc = (VkEncoder*)context;
3141 VkResult res = enc->vkCreateSamplerYcbcrConversionKHR(
3142 device, &localCreateInfo, pAllocator, pYcbcrConversion);
3143
3144 if (*pYcbcrConversion == VK_YCBCR_CONVERSION_DO_NOTHING) {
3145 ALOGE("FATAL: vkCreateSamplerYcbcrConversionKHR returned a reserved value (VK_YCBCR_CONVERSION_DO_NOTHING)");
3146 abort();
3147 }
3148 return res;
3149 }
3150
on_vkDestroySamplerYcbcrConversionKHR(void * context,VkDevice device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)3151 void on_vkDestroySamplerYcbcrConversionKHR(
3152 void* context,
3153 VkDevice device,
3154 VkSamplerYcbcrConversion ycbcrConversion,
3155 const VkAllocationCallbacks* pAllocator) {
3156 VkEncoder* enc = (VkEncoder*)context;
3157 if (ycbcrConversion != VK_YCBCR_CONVERSION_DO_NOTHING) {
3158 enc->vkDestroySamplerYcbcrConversionKHR(device, ycbcrConversion, pAllocator);
3159 }
3160 }
3161
on_vkCreateSampler(void * context,VkResult,VkDevice device,const VkSamplerCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSampler * pSampler)3162 VkResult on_vkCreateSampler(
3163 void* context, VkResult,
3164 VkDevice device,
3165 const VkSamplerCreateInfo* pCreateInfo,
3166 const VkAllocationCallbacks* pAllocator,
3167 VkSampler* pSampler) {
3168
3169 VkSamplerCreateInfo localCreateInfo = vk_make_orphan_copy(*pCreateInfo);
3170 vk_struct_chain_iterator structChainIter = vk_make_chain_iterator(&localCreateInfo);
3171
3172 #if defined(VK_USE_PLATFORM_ANDROID_KHR) || defined(VK_USE_PLATFORM_FUCHSIA)
3173 VkSamplerYcbcrConversionInfo localVkSamplerYcbcrConversionInfo;
3174 const VkSamplerYcbcrConversionInfo* samplerYcbcrConversionInfo =
3175 vk_find_struct<VkSamplerYcbcrConversionInfo>(pCreateInfo);
3176 if (samplerYcbcrConversionInfo) {
3177 if (samplerYcbcrConversionInfo->conversion != VK_YCBCR_CONVERSION_DO_NOTHING) {
3178 localVkSamplerYcbcrConversionInfo = vk_make_orphan_copy(*samplerYcbcrConversionInfo);
3179 vk_append_struct(&structChainIter, &localVkSamplerYcbcrConversionInfo);
3180 }
3181 }
3182 #endif
3183
3184 VkEncoder* enc = (VkEncoder*)context;
3185 return enc->vkCreateSampler(device, &localCreateInfo, pAllocator, pSampler);
3186 }
3187
on_vkGetPhysicalDeviceExternalFenceProperties(void * context,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo,VkExternalFenceProperties * pExternalFenceProperties)3188 void on_vkGetPhysicalDeviceExternalFenceProperties(
3189 void* context,
3190 VkPhysicalDevice physicalDevice,
3191 const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo,
3192 VkExternalFenceProperties* pExternalFenceProperties) {
3193
3194 (void)context;
3195 (void)physicalDevice;
3196
3197 pExternalFenceProperties->exportFromImportedHandleTypes = 0;
3198 pExternalFenceProperties->compatibleHandleTypes = 0;
3199 pExternalFenceProperties->externalFenceFeatures = 0;
3200
3201 bool syncFd =
3202 pExternalFenceInfo->handleType &
3203 VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT;
3204
3205 if (!syncFd) {
3206 return;
3207 }
3208
3209 #ifdef VK_USE_PLATFORM_ANDROID_KHR
3210 pExternalFenceProperties->exportFromImportedHandleTypes =
3211 VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT;
3212 pExternalFenceProperties->compatibleHandleTypes =
3213 VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT;
3214 pExternalFenceProperties->externalFenceFeatures =
3215 VK_EXTERNAL_FENCE_FEATURE_IMPORTABLE_BIT |
3216 VK_EXTERNAL_FENCE_FEATURE_EXPORTABLE_BIT;
3217
3218 ALOGD("%s: asked for sync fd, set the features\n", __func__);
3219 #endif
3220 }
3221
on_vkCreateFence(void * context,VkResult input_result,VkDevice device,const VkFenceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFence * pFence)3222 VkResult on_vkCreateFence(
3223 void* context,
3224 VkResult input_result,
3225 VkDevice device,
3226 const VkFenceCreateInfo* pCreateInfo,
3227 const VkAllocationCallbacks* pAllocator, VkFence* pFence) {
3228
3229 VkEncoder* enc = (VkEncoder*)context;
3230 VkFenceCreateInfo finalCreateInfo = *pCreateInfo;
3231
3232 const VkExportFenceCreateInfo* exportFenceInfoPtr =
3233 vk_find_struct<VkExportFenceCreateInfo>(pCreateInfo);
3234
3235 #ifdef VK_USE_PLATFORM_ANDROID_KHR
3236 bool exportSyncFd =
3237 exportFenceInfoPtr &&
3238 (exportFenceInfoPtr->handleTypes &
3239 VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT);
3240
3241 if (exportSyncFd) {
3242 ALOGV("%s: exporting sync fd, do not send pNext to host\n", __func__);
3243 finalCreateInfo.pNext = nullptr;
3244 }
3245 #endif
3246
3247 input_result = enc->vkCreateFence(
3248 device, &finalCreateInfo, pAllocator, pFence);
3249
3250 if (input_result != VK_SUCCESS) return input_result;
3251
3252 #ifdef VK_USE_PLATFORM_ANDROID_KHR
3253 if (exportSyncFd) {
3254 ALOGV("%s: ensure sync device\n", __func__);
3255 ensureSyncDeviceFd();
3256
3257 ALOGV("%s: getting fence info\n", __func__);
3258 AutoLock lock(mLock);
3259 auto it = info_VkFence.find(*pFence);
3260
3261 if (it == info_VkFence.end())
3262 return VK_ERROR_INITIALIZATION_FAILED;
3263
3264 auto& info = it->second;
3265
3266 info.external = true;
3267 info.exportFenceCreateInfo = *exportFenceInfoPtr;
3268 ALOGV("%s: info set (fence still -1). fence: %p\n", __func__, (void*)(*pFence));
3269 // syncFd is still -1 because we expect user to explicitly
3270 // export it via vkGetFenceFdKHR
3271 }
3272 #endif
3273
3274 return input_result;
3275 }
3276
on_vkDestroyFence(void * context,VkDevice device,VkFence fence,const VkAllocationCallbacks * pAllocator)3277 void on_vkDestroyFence(
3278 void* context,
3279 VkDevice device,
3280 VkFence fence,
3281 const VkAllocationCallbacks* pAllocator) {
3282 VkEncoder* enc = (VkEncoder*)context;
3283 enc->vkDestroyFence(device, fence, pAllocator);
3284 }
3285
on_vkResetFences(void * context,VkResult,VkDevice device,uint32_t fenceCount,const VkFence * pFences)3286 VkResult on_vkResetFences(
3287 void* context,
3288 VkResult,
3289 VkDevice device,
3290 uint32_t fenceCount,
3291 const VkFence* pFences) {
3292
3293 VkEncoder* enc = (VkEncoder*)context;
3294 VkResult res = enc->vkResetFences(device, fenceCount, pFences);
3295
3296 if (res != VK_SUCCESS) return res;
3297
3298 if (!fenceCount) return res;
3299
3300 // Permanence: temporary
3301 // on fence reset, close the fence fd
3302 // and act like we need to GetFenceFdKHR/ImportFenceFdKHR again
3303 AutoLock lock(mLock);
3304 for (uint32_t i = 0; i < fenceCount; ++i) {
3305 VkFence fence = pFences[i];
3306 auto it = info_VkFence.find(fence);
3307 auto& info = it->second;
3308 if (!info.external) continue;
3309
3310 #ifdef VK_USE_PLATFORM_ANDROID_KHR
3311 if (info.syncFd >= 0) {
3312 ALOGV("%s: resetting fence. make fd -1\n", __func__);
3313 goldfish_sync_signal(info.syncFd);
3314 close(info.syncFd);
3315 info.syncFd = -1;
3316 }
3317 #endif
3318 }
3319
3320 return res;
3321 }
3322
on_vkImportFenceFdKHR(void * context,VkResult,VkDevice device,const VkImportFenceFdInfoKHR * pImportFenceFdInfo)3323 VkResult on_vkImportFenceFdKHR(
3324 void* context,
3325 VkResult,
3326 VkDevice device,
3327 const VkImportFenceFdInfoKHR* pImportFenceFdInfo) {
3328
3329 (void)context;
3330 (void)device;
3331 (void)pImportFenceFdInfo;
3332
3333 // Transference: copy
3334 // meaning dup() the incoming fd
3335
3336 VkEncoder* enc = (VkEncoder*)context;
3337
3338 bool hasFence = pImportFenceFdInfo->fence != VK_NULL_HANDLE;
3339
3340 if (!hasFence) return VK_ERROR_OUT_OF_HOST_MEMORY;
3341
3342 #ifdef VK_USE_PLATFORM_ANDROID_KHR
3343
3344 bool syncFdImport =
3345 pImportFenceFdInfo->handleType & VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT;
3346
3347 if (!syncFdImport) {
3348 ALOGV("%s: VK_ERROR_OUT_OF_HOST_MEMORY: no sync fd import\n", __func__);
3349 return VK_ERROR_OUT_OF_HOST_MEMORY;
3350 }
3351
3352 AutoLock lock(mLock);
3353 auto it = info_VkFence.find(pImportFenceFdInfo->fence);
3354 if (it == info_VkFence.end()) {
3355 ALOGV("%s: VK_ERROR_OUT_OF_HOST_MEMORY: no fence info\n", __func__);
3356 return VK_ERROR_OUT_OF_HOST_MEMORY;
3357 }
3358
3359 auto& info = it->second;
3360
3361 if (info.syncFd >= 0) {
3362 ALOGV("%s: previous sync fd exists, close it\n", __func__);
3363 goldfish_sync_signal(info.syncFd);
3364 close(info.syncFd);
3365 }
3366
3367 if (pImportFenceFdInfo->fd < 0) {
3368 ALOGV("%s: import -1, set to -1 and exit\n", __func__);
3369 info.syncFd = -1;
3370 } else {
3371 ALOGV("%s: import actual fd, dup and close()\n", __func__);
3372 info.syncFd = dup(pImportFenceFdInfo->fd);
3373 close(pImportFenceFdInfo->fd);
3374 }
3375 return VK_SUCCESS;
3376 #else
3377 return VK_ERROR_OUT_OF_HOST_MEMORY;
3378 #endif
3379 }
3380
on_vkGetFenceFdKHR(void * context,VkResult,VkDevice device,const VkFenceGetFdInfoKHR * pGetFdInfo,int * pFd)3381 VkResult on_vkGetFenceFdKHR(
3382 void* context,
3383 VkResult,
3384 VkDevice device,
3385 const VkFenceGetFdInfoKHR* pGetFdInfo,
3386 int* pFd) {
3387
3388 // export operation.
3389 // first check if fence is signaled
3390 // then if so, return -1
3391 // else, queue work
3392
3393 VkEncoder* enc = (VkEncoder*)context;
3394
3395 bool hasFence = pGetFdInfo->fence != VK_NULL_HANDLE;
3396
3397 if (!hasFence) {
3398 ALOGV("%s: VK_ERROR_OUT_OF_HOST_MEMORY: no fence\n", __func__);
3399 return VK_ERROR_OUT_OF_HOST_MEMORY;
3400 }
3401
3402 #ifdef VK_USE_PLATFORM_ANDROID_KHR
3403 bool syncFdExport =
3404 pGetFdInfo->handleType & VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT;
3405
3406 if (!syncFdExport) {
3407 ALOGV("%s: VK_ERROR_OUT_OF_HOST_MEMORY: no sync fd fence\n", __func__);
3408 return VK_ERROR_OUT_OF_HOST_MEMORY;
3409 }
3410
3411 VkResult currentFenceStatus = enc->vkGetFenceStatus(device, pGetFdInfo->fence);
3412
3413 if (VK_SUCCESS == currentFenceStatus) { // Fence already signaled
3414 ALOGV("%s: VK_SUCCESS: already signaled\n", __func__);
3415 *pFd = -1;
3416 return VK_SUCCESS;
3417 }
3418
3419 if (VK_ERROR_DEVICE_LOST == currentFenceStatus) { // Other error
3420 ALOGV("%s: VK_ERROR_DEVICE_LOST: Other error\n", __func__);
3421 *pFd = -1;
3422 return VK_ERROR_DEVICE_LOST;
3423 }
3424
3425 if (VK_NOT_READY == currentFenceStatus) { // Fence unsignaled; create fd here
3426 AutoLock lock(mLock);
3427
3428 auto it = info_VkFence.find(pGetFdInfo->fence);
3429 if (it == info_VkFence.end()) {
3430 ALOGV("%s: VK_ERROR_OUT_OF_HOST_MEMORY: no fence info\n", __func__);
3431 return VK_ERROR_OUT_OF_HOST_MEMORY;
3432 }
3433
3434 auto& info = it->second;
3435
3436 bool syncFdCreated =
3437 info.external &&
3438 (info.exportFenceCreateInfo.handleTypes &
3439 VK_EXTERNAL_FENCE_HANDLE_TYPE_SYNC_FD_BIT);
3440
3441 if (!syncFdCreated) {
3442 ALOGV("%s: VK_ERROR_OUT_OF_HOST_MEMORY: no sync fd created\n", __func__);
3443 return VK_ERROR_OUT_OF_HOST_MEMORY;
3444 }
3445
3446 goldfish_sync_queue_work(
3447 mSyncDeviceFd,
3448 get_host_u64_VkFence(pGetFdInfo->fence) /* the handle */,
3449 GOLDFISH_SYNC_VULKAN_SEMAPHORE_SYNC /* thread handle (doubling as type field) */,
3450 pFd);
3451 // relinquish ownership
3452 info.syncFd = -1;
3453 ALOGV("%s: got fd: %d\n", __func__, *pFd);
3454 return VK_SUCCESS;
3455 }
3456 return VK_ERROR_DEVICE_LOST;
3457 #else
3458 return VK_ERROR_OUT_OF_HOST_MEMORY;
3459 #endif
3460 }
3461
on_vkWaitForFences(void * context,VkResult,VkDevice device,uint32_t fenceCount,const VkFence * pFences,VkBool32 waitAll,uint64_t timeout)3462 VkResult on_vkWaitForFences(
3463 void* context,
3464 VkResult,
3465 VkDevice device,
3466 uint32_t fenceCount,
3467 const VkFence* pFences,
3468 VkBool32 waitAll,
3469 uint64_t timeout) {
3470
3471 VkEncoder* enc = (VkEncoder*)context;
3472
3473 #ifdef VK_USE_PLATFORM_ANDROID_KHR
3474 std::vector<VkFence> fencesExternal;
3475 std::vector<int> fencesExternalWaitFds;
3476 std::vector<VkFence> fencesNonExternal;
3477
3478 AutoLock lock(mLock);
3479
3480 for (uint32_t i = 0; i < fenceCount; ++i) {
3481 auto it = info_VkFence.find(pFences[i]);
3482 if (it == info_VkFence.end()) continue;
3483 const auto& info = it->second;
3484 if (info.syncFd >= 0) {
3485 fencesExternal.push_back(pFences[i]);
3486 fencesExternalWaitFds.push_back(info.syncFd);
3487 } else {
3488 fencesNonExternal.push_back(pFences[i]);
3489 }
3490 }
3491
3492 if (fencesExternal.empty()) {
3493 // No need for work pool, just wait with host driver.
3494 return enc->vkWaitForFences(
3495 device, fenceCount, pFences, waitAll, timeout);
3496 } else {
3497 // Depending on wait any or wait all,
3498 // schedule a wait group with waitAny/waitAll
3499 std::vector<WorkPool::Task> tasks;
3500
3501 ALOGV("%s: scheduling ext waits\n", __func__);
3502
3503 for (auto fd : fencesExternalWaitFds) {
3504 ALOGV("%s: wait on %d\n", __func__, fd);
3505 tasks.push_back([fd] {
3506 sync_wait(fd, 3000);
3507 ALOGV("done waiting on fd %d\n", fd);
3508 });
3509 }
3510
3511 if (!fencesNonExternal.empty()) {
3512 tasks.push_back([this,
3513 fencesNonExternal /* copy of vector */,
3514 device, waitAll, timeout] {
3515 auto hostConn = mThreadingCallbacks.hostConnectionGetFunc();
3516 auto vkEncoder = mThreadingCallbacks.vkEncoderGetFunc(hostConn);
3517 ALOGV("%s: vkWaitForFences to host\n", __func__);
3518 vkEncoder->vkWaitForFences(device, fencesNonExternal.size(), fencesNonExternal.data(), waitAll, timeout);
3519 });
3520 }
3521
3522 auto waitGroupHandle = mWorkPool.schedule(tasks);
3523
3524 // Convert timeout to microseconds from nanoseconds
3525 bool waitRes = false;
3526 if (waitAll) {
3527 waitRes = mWorkPool.waitAll(waitGroupHandle, timeout / 1000);
3528 } else {
3529 waitRes = mWorkPool.waitAny(waitGroupHandle, timeout / 1000);
3530 }
3531
3532 if (waitRes) {
3533 ALOGV("%s: VK_SUCCESS\n", __func__);
3534 return VK_SUCCESS;
3535 } else {
3536 ALOGV("%s: VK_TIMEOUT\n", __func__);
3537 return VK_TIMEOUT;
3538 }
3539 }
3540 #else
3541 return enc->vkWaitForFences(
3542 device, fenceCount, pFences, waitAll, timeout);
3543 #endif
3544 }
3545
on_vkCreateDescriptorPool(void * context,VkResult,VkDevice device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)3546 VkResult on_vkCreateDescriptorPool(
3547 void* context,
3548 VkResult,
3549 VkDevice device,
3550 const VkDescriptorPoolCreateInfo* pCreateInfo,
3551 const VkAllocationCallbacks* pAllocator,
3552 VkDescriptorPool* pDescriptorPool) {
3553
3554 VkEncoder* enc = (VkEncoder*)context;
3555
3556 VkResult res = enc->vkCreateDescriptorPool(
3557 device, pCreateInfo, pAllocator, pDescriptorPool);
3558
3559 if (res != VK_SUCCESS) return res;
3560
3561 AutoLock lock(mLock);
3562 auto it = info_VkDescriptorPool.find(*pDescriptorPool);
3563 if (it == info_VkDescriptorPool.end()) return res;
3564
3565 auto &info = it->second;
3566 info.createFlags = pCreateInfo->flags;
3567
3568 return res;
3569 }
3570
on_vkDestroyDescriptorPool(void * context,VkDevice device,VkDescriptorPool descriptorPool,const VkAllocationCallbacks * pAllocator)3571 void on_vkDestroyDescriptorPool(
3572 void* context,
3573 VkDevice device,
3574 VkDescriptorPool descriptorPool,
3575 const VkAllocationCallbacks* pAllocator) {
3576
3577 VkEncoder* enc = (VkEncoder*)context;
3578
3579 enc->vkDestroyDescriptorPool(device, descriptorPool, pAllocator);
3580 }
3581
on_vkResetDescriptorPool(void * context,VkResult,VkDevice device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)3582 VkResult on_vkResetDescriptorPool(
3583 void* context,
3584 VkResult,
3585 VkDevice device,
3586 VkDescriptorPool descriptorPool,
3587 VkDescriptorPoolResetFlags flags) {
3588
3589 VkEncoder* enc = (VkEncoder*)context;
3590
3591 VkResult res = enc->vkResetDescriptorPool(device, descriptorPool, flags);
3592
3593 if (res != VK_SUCCESS) return res;
3594
3595 AutoLock lock(mLock);
3596 clearDescriptorPoolLocked(descriptorPool);
3597 return res;
3598 }
3599
on_vkAllocateDescriptorSets(void * context,VkResult,VkDevice device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)3600 VkResult on_vkAllocateDescriptorSets(
3601 void* context,
3602 VkResult,
3603 VkDevice device,
3604 const VkDescriptorSetAllocateInfo* pAllocateInfo,
3605 VkDescriptorSet* pDescriptorSets) {
3606
3607 VkEncoder* enc = (VkEncoder*)context;
3608
3609 VkResult res = enc->vkAllocateDescriptorSets(device, pAllocateInfo, pDescriptorSets);
3610
3611 if (res != VK_SUCCESS) return res;
3612
3613 AutoLock lock(mLock);
3614 initDescriptorSetStateLocked(pAllocateInfo, pDescriptorSets);
3615 return res;
3616 }
3617
on_vkFreeDescriptorSets(void * context,VkResult,VkDevice device,VkDescriptorPool descriptorPool,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets)3618 VkResult on_vkFreeDescriptorSets(
3619 void* context,
3620 VkResult,
3621 VkDevice device,
3622 VkDescriptorPool descriptorPool,
3623 uint32_t descriptorSetCount,
3624 const VkDescriptorSet* pDescriptorSets) {
3625
3626 VkEncoder* enc = (VkEncoder*)context;
3627
3628 // Bit of robustness so that we can double free descriptor sets
3629 // and do other invalid usages
3630 // https://github.com/KhronosGroup/Vulkan-Docs/issues/1070
3631 // (people expect VK_SUCCESS to always be returned by vkFreeDescriptorSets)
3632 std::vector<VkDescriptorSet> toActuallyFree;
3633 {
3634 AutoLock lock(mLock);
3635
3636 if (!descriptorPoolSupportsIndividualFreeLocked(descriptorPool))
3637 return VK_SUCCESS;
3638
3639 for (uint32_t i = 0; i < descriptorSetCount; ++i) {
3640 if (descriptorSetReallyAllocedFromPoolLocked(
3641 pDescriptorSets[i], descriptorPool)) {
3642 toActuallyFree.push_back(pDescriptorSets[i]);
3643 }
3644 }
3645
3646 if (toActuallyFree.empty()) return VK_SUCCESS;
3647 }
3648
3649 return enc->vkFreeDescriptorSets(
3650 device, descriptorPool,
3651 (uint32_t)toActuallyFree.size(),
3652 toActuallyFree.data());
3653 }
3654
on_vkCreateDescriptorSetLayout(void * context,VkResult,VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)3655 VkResult on_vkCreateDescriptorSetLayout(
3656 void* context,
3657 VkResult,
3658 VkDevice device,
3659 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
3660 const VkAllocationCallbacks* pAllocator,
3661 VkDescriptorSetLayout* pSetLayout) {
3662
3663 VkEncoder* enc = (VkEncoder*)context;
3664
3665 VkResult res = enc->vkCreateDescriptorSetLayout(
3666 device, pCreateInfo, pAllocator, pSetLayout);
3667
3668 if (res != VK_SUCCESS) return res;
3669
3670 AutoLock lock(mLock);
3671
3672 auto it = info_VkDescriptorSetLayout.find(*pSetLayout);
3673 if (it == info_VkDescriptorSetLayout.end()) return res;
3674
3675 auto& info = it->second;
3676 for (uint32_t i = 0; i < pCreateInfo->bindingCount; ++i) {
3677 info.bindings.push_back(pCreateInfo->pBindings[i]);
3678 }
3679
3680 return res;
3681 }
3682
on_vkUpdateDescriptorSets(void * context,VkDevice device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)3683 void on_vkUpdateDescriptorSets(
3684 void* context,
3685 VkDevice device,
3686 uint32_t descriptorWriteCount,
3687 const VkWriteDescriptorSet* pDescriptorWrites,
3688 uint32_t descriptorCopyCount,
3689 const VkCopyDescriptorSet* pDescriptorCopies) {
3690
3691 VkEncoder* enc = (VkEncoder*)context;
3692
3693 std::vector<std::vector<VkDescriptorImageInfo>> imageInfosPerWrite(
3694 descriptorWriteCount);
3695
3696 std::vector<VkWriteDescriptorSet> writesWithSuppressedSamplers;
3697
3698 {
3699 AutoLock lock(mLock);
3700 for (uint32_t i = 0; i < descriptorWriteCount; ++i) {
3701 writesWithSuppressedSamplers.push_back(
3702 createImmutableSamplersFilteredWriteDescriptorSetLocked(
3703 pDescriptorWrites + i,
3704 imageInfosPerWrite.data() + i));
3705 }
3706 }
3707
3708 enc->vkUpdateDescriptorSets(
3709 device, descriptorWriteCount, writesWithSuppressedSamplers.data(),
3710 descriptorCopyCount, pDescriptorCopies);
3711 }
3712
on_vkDestroyImage(void * context,VkDevice device,VkImage image,const VkAllocationCallbacks * pAllocator)3713 void on_vkDestroyImage(
3714 void* context,
3715 VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
3716 VkEncoder* enc = (VkEncoder*)context;
3717 enc->vkDestroyImage(device, image, pAllocator);
3718 }
3719
setMemoryRequirementsForSysmemBackedImage(VkImage image,VkMemoryRequirements * pMemoryRequirements)3720 void setMemoryRequirementsForSysmemBackedImage(
3721 VkImage image, VkMemoryRequirements *pMemoryRequirements) {
3722 #ifdef VK_USE_PLATFORM_FUCHSIA
3723 auto it = info_VkImage.find(image);
3724 if (it == info_VkImage.end()) return;
3725 auto& info = it->second;
3726 if (info.isSysmemBackedMemory) {
3727 auto width = info.createInfo.extent.width;
3728 auto height = info.createInfo.extent.height;
3729 pMemoryRequirements->size = width * height * 4;
3730 }
3731 #else
3732 // Bypass "unused parameter" checks.
3733 (void)image;
3734 (void)pMemoryRequirements;
3735 #endif
3736 }
3737
on_vkGetImageMemoryRequirements(void * context,VkDevice device,VkImage image,VkMemoryRequirements * pMemoryRequirements)3738 void on_vkGetImageMemoryRequirements(
3739 void *context, VkDevice device, VkImage image,
3740 VkMemoryRequirements *pMemoryRequirements) {
3741
3742 AutoLock lock(mLock);
3743
3744 auto it = info_VkImage.find(image);
3745 if (it == info_VkImage.end()) return;
3746
3747 auto& info = it->second;
3748
3749 if (info.baseRequirementsKnown) {
3750 *pMemoryRequirements = info.baseRequirements;
3751 return;
3752 }
3753
3754 lock.unlock();
3755
3756 VkEncoder* enc = (VkEncoder*)context;
3757
3758 enc->vkGetImageMemoryRequirements(
3759 device, image, pMemoryRequirements);
3760
3761 lock.lock();
3762
3763 transformImageMemoryRequirementsForGuestLocked(
3764 image, pMemoryRequirements);
3765
3766 info.baseRequirementsKnown = true;
3767 info.baseRequirements = *pMemoryRequirements;
3768 }
3769
on_vkGetImageMemoryRequirements2(void * context,VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)3770 void on_vkGetImageMemoryRequirements2(
3771 void *context, VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
3772 VkMemoryRequirements2 *pMemoryRequirements) {
3773 VkEncoder* enc = (VkEncoder*)context;
3774 enc->vkGetImageMemoryRequirements2(
3775 device, pInfo, pMemoryRequirements);
3776 transformImageMemoryRequirements2ForGuest(
3777 pInfo->image, pMemoryRequirements);
3778 }
3779
on_vkGetImageMemoryRequirements2KHR(void * context,VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)3780 void on_vkGetImageMemoryRequirements2KHR(
3781 void *context, VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
3782 VkMemoryRequirements2 *pMemoryRequirements) {
3783 VkEncoder* enc = (VkEncoder*)context;
3784 enc->vkGetImageMemoryRequirements2KHR(
3785 device, pInfo, pMemoryRequirements);
3786 transformImageMemoryRequirements2ForGuest(
3787 pInfo->image, pMemoryRequirements);
3788 }
3789
on_vkBindImageMemory(void * context,VkResult,VkDevice device,VkImage image,VkDeviceMemory memory,VkDeviceSize memoryOffset)3790 VkResult on_vkBindImageMemory(
3791 void* context, VkResult,
3792 VkDevice device, VkImage image, VkDeviceMemory memory,
3793 VkDeviceSize memoryOffset) {
3794 VkEncoder* enc = (VkEncoder*)context;
3795 return enc->vkBindImageMemory(device, image, memory, memoryOffset);
3796 }
3797
on_vkBindImageMemory2(void * context,VkResult,VkDevice device,uint32_t bindingCount,const VkBindImageMemoryInfo * pBindInfos)3798 VkResult on_vkBindImageMemory2(
3799 void* context, VkResult,
3800 VkDevice device, uint32_t bindingCount, const VkBindImageMemoryInfo* pBindInfos) {
3801 VkEncoder* enc = (VkEncoder*)context;
3802 return enc->vkBindImageMemory2(device, bindingCount, pBindInfos);
3803 }
3804
on_vkBindImageMemory2KHR(void * context,VkResult,VkDevice device,uint32_t bindingCount,const VkBindImageMemoryInfo * pBindInfos)3805 VkResult on_vkBindImageMemory2KHR(
3806 void* context, VkResult,
3807 VkDevice device, uint32_t bindingCount, const VkBindImageMemoryInfo* pBindInfos) {
3808 VkEncoder* enc = (VkEncoder*)context;
3809 return enc->vkBindImageMemory2KHR(device, bindingCount, pBindInfos);
3810 }
3811
on_vkCreateBuffer(void * context,VkResult,VkDevice device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer)3812 VkResult on_vkCreateBuffer(
3813 void* context, VkResult,
3814 VkDevice device, const VkBufferCreateInfo *pCreateInfo,
3815 const VkAllocationCallbacks *pAllocator,
3816 VkBuffer *pBuffer) {
3817 VkEncoder* enc = (VkEncoder*)context;
3818
3819 #ifdef VK_USE_PLATFORM_FUCHSIA
3820 Optional<zx::vmo> vmo;
3821 bool isSysmemBackedMemory = false;
3822
3823 const VkExternalMemoryBufferCreateInfo* extBufCiPtr =
3824 vk_find_struct<VkExternalMemoryBufferCreateInfo>(pCreateInfo);
3825 if (extBufCiPtr &&
3826 (extBufCiPtr->handleTypes &
3827 VK_EXTERNAL_MEMORY_HANDLE_TYPE_TEMP_ZIRCON_VMO_BIT_FUCHSIA)) {
3828 isSysmemBackedMemory = true;
3829 }
3830
3831 const auto* extBufferCollectionPtr =
3832 vk_find_struct<VkBufferCollectionBufferCreateInfoFUCHSIA>(
3833 pCreateInfo);
3834
3835 if (extBufferCollectionPtr) {
3836 auto collection = reinterpret_cast<
3837 llcpp::fuchsia::sysmem::BufferCollection::SyncClient*>(
3838 extBufferCollectionPtr->collection);
3839 uint32_t index = extBufferCollectionPtr->index;
3840
3841 auto result = collection->WaitForBuffersAllocated();
3842 if (result.ok() && result.Unwrap()->status == ZX_OK) {
3843 auto& info = result.Unwrap()->buffer_collection_info;
3844 if (index < info.buffer_count) {
3845 vmo = android::base::makeOptional(
3846 std::move(info.buffers[index].vmo));
3847 }
3848 } else {
3849 ALOGE("WaitForBuffersAllocated failed: %d %d", result.status(),
3850 GET_STATUS_SAFE(result, status));
3851 }
3852
3853 if (vmo && vmo->is_valid()) {
3854 auto result = mControlDevice->CreateBuffer(std::move(*vmo),
3855 pCreateInfo->size);
3856 if (!result.ok() ||
3857 (result.Unwrap()->res != ZX_OK &&
3858 result.Unwrap()->res != ZX_ERR_ALREADY_EXISTS)) {
3859 ALOGE("CreateBuffer failed: %d:%d", result.status(),
3860 GET_STATUS_SAFE(result, res));
3861 }
3862 isSysmemBackedMemory = true;
3863 }
3864 }
3865 #endif // VK_USE_PLATFORM_FUCHSIA
3866
3867 VkResult res;
3868 VkMemoryRequirements memReqs;
3869
3870 if (supportsCreateResourcesWithRequirements()) {
3871 res = enc->vkCreateBufferWithRequirementsGOOGLE(device, pCreateInfo, pAllocator, pBuffer, &memReqs);
3872 } else {
3873 res = enc->vkCreateBuffer(device, pCreateInfo, pAllocator, pBuffer);
3874 }
3875
3876 if (res != VK_SUCCESS) return res;
3877
3878 AutoLock lock(mLock);
3879
3880 auto it = info_VkBuffer.find(*pBuffer);
3881 if (it == info_VkBuffer.end()) return VK_ERROR_INITIALIZATION_FAILED;
3882
3883 auto& info = it->second;
3884
3885 info.createInfo = *pCreateInfo;
3886 info.createInfo.pNext = nullptr;
3887
3888 if (supportsCreateResourcesWithRequirements()) {
3889 info.baseRequirementsKnown = true;
3890 }
3891
3892 const VkExternalMemoryBufferCreateInfo* extBufCi =
3893 vk_find_struct<VkExternalMemoryBufferCreateInfo>(pCreateInfo);
3894
3895 if (extBufCi) {
3896 info.external = true;
3897 info.externalCreateInfo = *extBufCi;
3898 }
3899
3900 #ifdef VK_USE_PLATFORM_FUCHSIA
3901 if (isSysmemBackedMemory) {
3902 info.isSysmemBackedMemory = true;
3903 }
3904 #endif
3905
3906 if (info.baseRequirementsKnown) {
3907 transformBufferMemoryRequirementsForGuestLocked(*pBuffer, &memReqs);
3908 info.baseRequirements = memReqs;
3909 }
3910
3911 return res;
3912 }
3913
on_vkDestroyBuffer(void * context,VkDevice device,VkBuffer buffer,const VkAllocationCallbacks * pAllocator)3914 void on_vkDestroyBuffer(
3915 void* context,
3916 VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
3917 VkEncoder* enc = (VkEncoder*)context;
3918 enc->vkDestroyBuffer(device, buffer, pAllocator);
3919 }
3920
on_vkGetBufferMemoryRequirements(void * context,VkDevice device,VkBuffer buffer,VkMemoryRequirements * pMemoryRequirements)3921 void on_vkGetBufferMemoryRequirements(
3922 void* context, VkDevice device, VkBuffer buffer, VkMemoryRequirements *pMemoryRequirements) {
3923
3924 AutoLock lock(mLock);
3925
3926 auto it = info_VkBuffer.find(buffer);
3927 if (it == info_VkBuffer.end()) return;
3928
3929 auto& info = it->second;
3930
3931 if (info.baseRequirementsKnown) {
3932 *pMemoryRequirements = info.baseRequirements;
3933 return;
3934 }
3935
3936 lock.unlock();
3937
3938 VkEncoder* enc = (VkEncoder*)context;
3939 enc->vkGetBufferMemoryRequirements(
3940 device, buffer, pMemoryRequirements);
3941
3942 lock.lock();
3943
3944 transformBufferMemoryRequirementsForGuestLocked(
3945 buffer, pMemoryRequirements);
3946 info.baseRequirementsKnown = true;
3947 info.baseRequirements = *pMemoryRequirements;
3948 }
3949
on_vkGetBufferMemoryRequirements2(void * context,VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)3950 void on_vkGetBufferMemoryRequirements2(
3951 void* context, VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo,
3952 VkMemoryRequirements2* pMemoryRequirements) {
3953 VkEncoder* enc = (VkEncoder*)context;
3954 enc->vkGetBufferMemoryRequirements2(device, pInfo, pMemoryRequirements);
3955 transformBufferMemoryRequirements2ForGuest(
3956 pInfo->buffer, pMemoryRequirements);
3957 }
3958
on_vkGetBufferMemoryRequirements2KHR(void * context,VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)3959 void on_vkGetBufferMemoryRequirements2KHR(
3960 void* context, VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo,
3961 VkMemoryRequirements2* pMemoryRequirements) {
3962 VkEncoder* enc = (VkEncoder*)context;
3963 enc->vkGetBufferMemoryRequirements2KHR(device, pInfo, pMemoryRequirements);
3964 transformBufferMemoryRequirements2ForGuest(
3965 pInfo->buffer, pMemoryRequirements);
3966 }
3967
on_vkBindBufferMemory(void * context,VkResult,VkDevice device,VkBuffer buffer,VkDeviceMemory memory,VkDeviceSize memoryOffset)3968 VkResult on_vkBindBufferMemory(
3969 void *context, VkResult,
3970 VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset) {
3971 VkEncoder *enc = (VkEncoder *)context;
3972 return enc->vkBindBufferMemory(
3973 device, buffer, memory, memoryOffset);
3974 }
3975
on_vkBindBufferMemory2(void * context,VkResult,VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)3976 VkResult on_vkBindBufferMemory2(
3977 void *context, VkResult,
3978 VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo *pBindInfos) {
3979 VkEncoder *enc = (VkEncoder *)context;
3980 return enc->vkBindBufferMemory2(
3981 device, bindInfoCount, pBindInfos);
3982 }
3983
on_vkBindBufferMemory2KHR(void * context,VkResult,VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)3984 VkResult on_vkBindBufferMemory2KHR(
3985 void *context, VkResult,
3986 VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo *pBindInfos) {
3987 VkEncoder *enc = (VkEncoder *)context;
3988 return enc->vkBindBufferMemory2KHR(
3989 device, bindInfoCount, pBindInfos);
3990 }
3991
ensureSyncDeviceFd()3992 void ensureSyncDeviceFd() {
3993 if (mSyncDeviceFd >= 0) return;
3994 #ifdef VK_USE_PLATFORM_ANDROID_KHR
3995 mSyncDeviceFd = goldfish_sync_open();
3996 if (mSyncDeviceFd >= 0) {
3997 ALOGD("%s: created sync device for current Vulkan process: %d\n", __func__, mSyncDeviceFd);
3998 } else {
3999 ALOGD("%s: failed to create sync device for current Vulkan process\n", __func__);
4000 }
4001 #endif
4002 }
4003
on_vkCreateSemaphore(void * context,VkResult input_result,VkDevice device,const VkSemaphoreCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSemaphore * pSemaphore)4004 VkResult on_vkCreateSemaphore(
4005 void* context, VkResult input_result,
4006 VkDevice device, const VkSemaphoreCreateInfo* pCreateInfo,
4007 const VkAllocationCallbacks* pAllocator,
4008 VkSemaphore* pSemaphore) {
4009
4010 VkEncoder* enc = (VkEncoder*)context;
4011
4012 VkSemaphoreCreateInfo finalCreateInfo = *pCreateInfo;
4013
4014 const VkExportSemaphoreCreateInfoKHR* exportSemaphoreInfoPtr =
4015 vk_find_struct<VkExportSemaphoreCreateInfoKHR>(pCreateInfo);
4016
4017 #ifdef VK_USE_PLATFORM_FUCHSIA
4018 bool exportEvent = exportSemaphoreInfoPtr &&
4019 (exportSemaphoreInfoPtr->handleTypes &
4020 VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_TEMP_ZIRCON_EVENT_BIT_FUCHSIA);
4021
4022 if (exportEvent) {
4023 finalCreateInfo.pNext = nullptr;
4024 }
4025 #endif
4026
4027 #ifdef VK_USE_PLATFORM_ANDROID_KHR
4028 bool exportSyncFd = exportSemaphoreInfoPtr &&
4029 (exportSemaphoreInfoPtr->handleTypes &
4030 VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT);
4031
4032 if (exportSyncFd) {
4033 finalCreateInfo.pNext = nullptr;
4034 }
4035 #endif
4036 input_result = enc->vkCreateSemaphore(
4037 device, &finalCreateInfo, pAllocator, pSemaphore);
4038
4039 zx_handle_t event_handle = ZX_HANDLE_INVALID;
4040
4041 #ifdef VK_USE_PLATFORM_FUCHSIA
4042 if (exportEvent) {
4043 zx_event_create(0, &event_handle);
4044 }
4045 #endif
4046
4047 AutoLock lock(mLock);
4048
4049 auto it = info_VkSemaphore.find(*pSemaphore);
4050 if (it == info_VkSemaphore.end()) return VK_ERROR_INITIALIZATION_FAILED;
4051
4052 auto& info = it->second;
4053
4054 info.device = device;
4055 info.eventHandle = event_handle;
4056
4057 #ifdef VK_USE_PLATFORM_ANDROID_KHR
4058 if (exportSyncFd) {
4059
4060 ensureSyncDeviceFd();
4061
4062 if (exportSyncFd) {
4063 int syncFd = -1;
4064 goldfish_sync_queue_work(
4065 mSyncDeviceFd,
4066 get_host_u64_VkSemaphore(*pSemaphore) /* the handle */,
4067 GOLDFISH_SYNC_VULKAN_SEMAPHORE_SYNC /* thread handle (doubling as type field) */,
4068 &syncFd);
4069 info.syncFd = syncFd;
4070 }
4071 }
4072 #endif
4073
4074 return VK_SUCCESS;
4075 }
4076
on_vkDestroySemaphore(void * context,VkDevice device,VkSemaphore semaphore,const VkAllocationCallbacks * pAllocator)4077 void on_vkDestroySemaphore(
4078 void* context,
4079 VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks *pAllocator) {
4080 VkEncoder* enc = (VkEncoder*)context;
4081 enc->vkDestroySemaphore(device, semaphore, pAllocator);
4082 }
4083
4084 // https://www.khronos.org/registry/vulkan/specs/1.0-extensions/html/vkspec.html#vkGetSemaphoreFdKHR
4085 // Each call to vkGetSemaphoreFdKHR must create a new file descriptor and transfer ownership
4086 // of it to the application. To avoid leaking resources, the application must release ownership
4087 // of the file descriptor when it is no longer needed.
on_vkGetSemaphoreFdKHR(void * context,VkResult,VkDevice device,const VkSemaphoreGetFdInfoKHR * pGetFdInfo,int * pFd)4088 VkResult on_vkGetSemaphoreFdKHR(
4089 void* context, VkResult,
4090 VkDevice device, const VkSemaphoreGetFdInfoKHR* pGetFdInfo,
4091 int* pFd) {
4092 #ifdef VK_USE_PLATFORM_ANDROID_KHR
4093 VkEncoder* enc = (VkEncoder*)context;
4094 bool getSyncFd =
4095 pGetFdInfo->handleType & VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT;
4096
4097 if (getSyncFd) {
4098 AutoLock lock(mLock);
4099 auto it = info_VkSemaphore.find(pGetFdInfo->semaphore);
4100 if (it == info_VkSemaphore.end()) return VK_ERROR_OUT_OF_HOST_MEMORY;
4101 auto& semInfo = it->second;
4102 *pFd = dup(semInfo.syncFd);
4103 return VK_SUCCESS;
4104 } else {
4105 // opaque fd
4106 int hostFd = 0;
4107 VkResult result = enc->vkGetSemaphoreFdKHR(device, pGetFdInfo, &hostFd);
4108 if (result != VK_SUCCESS) {
4109 return result;
4110 }
4111 *pFd = memfd_create("vk_opaque_fd", 0);
4112 write(*pFd, &hostFd, sizeof(hostFd));
4113 return VK_SUCCESS;
4114 }
4115 #else
4116 (void)context;
4117 (void)device;
4118 (void)pGetFdInfo;
4119 (void)pFd;
4120 return VK_ERROR_INCOMPATIBLE_DRIVER;
4121 #endif
4122 }
4123
on_vkImportSemaphoreFdKHR(void * context,VkResult input_result,VkDevice device,const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo)4124 VkResult on_vkImportSemaphoreFdKHR(
4125 void* context, VkResult input_result,
4126 VkDevice device,
4127 const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo) {
4128 #ifdef VK_USE_PLATFORM_ANDROID_KHR
4129 VkEncoder* enc = (VkEncoder*)context;
4130 if (input_result != VK_SUCCESS) {
4131 return input_result;
4132 }
4133
4134 if (pImportSemaphoreFdInfo->handleType &
4135 VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT) {
4136 VkImportSemaphoreFdInfoKHR tmpInfo = *pImportSemaphoreFdInfo;
4137
4138 AutoLock lock(mLock);
4139
4140 auto semaphoreIt = info_VkSemaphore.find(pImportSemaphoreFdInfo->semaphore);
4141 auto& info = semaphoreIt->second;
4142
4143 if (info.syncFd >= 0) {
4144 close(info.syncFd);
4145 }
4146
4147 info.syncFd = pImportSemaphoreFdInfo->fd;
4148
4149 return VK_SUCCESS;
4150 } else {
4151 int fd = pImportSemaphoreFdInfo->fd;
4152 int err = lseek(fd, 0, SEEK_SET);
4153 if (err == -1) {
4154 ALOGE("lseek fail on import semaphore");
4155 }
4156 int hostFd = 0;
4157 read(fd, &hostFd, sizeof(hostFd));
4158 VkImportSemaphoreFdInfoKHR tmpInfo = *pImportSemaphoreFdInfo;
4159 tmpInfo.fd = hostFd;
4160 VkResult result = enc->vkImportSemaphoreFdKHR(device, &tmpInfo);
4161 close(fd);
4162 return result;
4163 }
4164 #else
4165 (void)context;
4166 (void)input_result;
4167 (void)device;
4168 (void)pImportSemaphoreFdInfo;
4169 return VK_ERROR_INCOMPATIBLE_DRIVER;
4170 #endif
4171 }
4172
on_vkQueueSubmit(void * context,VkResult input_result,VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence)4173 VkResult on_vkQueueSubmit(
4174 void* context, VkResult input_result,
4175 VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence) {
4176
4177 std::vector<VkSemaphore> pre_signal_semaphores;
4178 std::vector<zx_handle_t> pre_signal_events;
4179 std::vector<int> pre_signal_sync_fds;
4180 std::vector<zx_handle_t> post_wait_events;
4181 std::vector<int> post_wait_sync_fds;
4182
4183 VkEncoder* enc = (VkEncoder*)context;
4184
4185 AutoLock lock(mLock);
4186
4187 for (uint32_t i = 0; i < submitCount; ++i) {
4188 for (uint32_t j = 0; j < pSubmits[i].waitSemaphoreCount; ++j) {
4189 auto it = info_VkSemaphore.find(pSubmits[i].pWaitSemaphores[j]);
4190 if (it != info_VkSemaphore.end()) {
4191 auto& semInfo = it->second;
4192 #ifdef VK_USE_PLATFORM_FUCHSIA
4193 if (semInfo.eventHandle) {
4194 pre_signal_events.push_back(semInfo.eventHandle);
4195 pre_signal_semaphores.push_back(pSubmits[i].pWaitSemaphores[j]);
4196 }
4197 #endif
4198 #ifdef VK_USE_PLATFORM_ANDROID_KHR
4199 if (semInfo.syncFd >= 0) {
4200 pre_signal_sync_fds.push_back(semInfo.syncFd);
4201 pre_signal_semaphores.push_back(pSubmits[i].pWaitSemaphores[j]);
4202 }
4203 #endif
4204 }
4205 }
4206 for (uint32_t j = 0; j < pSubmits[i].signalSemaphoreCount; ++j) {
4207 auto it = info_VkSemaphore.find(pSubmits[i].pSignalSemaphores[j]);
4208 if (it != info_VkSemaphore.end()) {
4209 auto& semInfo = it->second;
4210 #ifdef VK_USE_PLATFORM_FUCHSIA
4211 if (semInfo.eventHandle) {
4212 post_wait_events.push_back(semInfo.eventHandle);
4213 }
4214 #endif
4215 #ifdef VK_USE_PLATFORM_ANDROID_KHR
4216 if (semInfo.syncFd >= 0) {
4217 post_wait_sync_fds.push_back(semInfo.syncFd);
4218 }
4219 #endif
4220 }
4221 }
4222 }
4223 lock.unlock();
4224
4225 if (pre_signal_semaphores.empty()) {
4226 if (supportsAsyncQueueSubmit()) {
4227 enc->vkQueueSubmitAsyncGOOGLE(queue, submitCount, pSubmits, fence);
4228 input_result = VK_SUCCESS;
4229 } else {
4230 input_result = enc->vkQueueSubmit(queue, submitCount, pSubmits, fence);
4231 if (input_result != VK_SUCCESS) return input_result;
4232 }
4233 } else {
4234 // Schedule waits on the OS external objects and
4235 // signal the wait semaphores
4236 // in a separate thread.
4237 std::vector<WorkPool::Task> preSignalTasks;
4238 std::vector<WorkPool::Task> preSignalQueueSubmitTasks;;
4239 #ifdef VK_USE_PLATFORM_FUCHSIA
4240 for (auto event : pre_signal_events) {
4241 preSignalTasks.push_back([event] {
4242 zx_object_wait_one(
4243 event,
4244 ZX_EVENT_SIGNALED,
4245 ZX_TIME_INFINITE,
4246 nullptr);
4247 });
4248 }
4249 #endif
4250 #ifdef VK_USE_PLATFORM_ANDROID_KHR
4251 for (auto fd : pre_signal_sync_fds) {
4252 preSignalTasks.push_back([fd] {
4253 sync_wait(fd, 3000);
4254 });
4255 }
4256 #endif
4257 auto waitGroupHandle = mWorkPool.schedule(preSignalTasks);
4258 mWorkPool.waitAll(waitGroupHandle);
4259
4260 VkSubmitInfo submit_info = {
4261 .sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
4262 .waitSemaphoreCount = 0,
4263 .pWaitSemaphores = nullptr,
4264 .pWaitDstStageMask = nullptr,
4265 .signalSemaphoreCount = static_cast<uint32_t>(pre_signal_semaphores.size()),
4266 .pSignalSemaphores = pre_signal_semaphores.data()};
4267
4268 if (supportsAsyncQueueSubmit()) {
4269 enc->vkQueueSubmitAsyncGOOGLE(queue, 1, &submit_info, VK_NULL_HANDLE);
4270 } else {
4271 enc->vkQueueSubmit(queue, 1, &submit_info, VK_NULL_HANDLE);
4272 }
4273
4274 if (supportsAsyncQueueSubmit()) {
4275 enc->vkQueueSubmitAsyncGOOGLE(queue, submitCount, pSubmits, fence);
4276 input_result = VK_SUCCESS;
4277 } else {
4278 input_result = enc->vkQueueSubmit(queue, submitCount, pSubmits, fence);
4279 if (input_result != VK_SUCCESS) return input_result;
4280 }
4281 }
4282
4283 lock.lock();
4284 int externalFenceFdToSignal = -1;
4285
4286 #ifdef VK_USE_PLATFORM_ANDROID_KHR
4287 if (fence != VK_NULL_HANDLE) {
4288 auto it = info_VkFence.find(fence);
4289 if (it != info_VkFence.end()) {
4290 const auto& info = it->second;
4291 if (info.syncFd >= 0) {
4292 externalFenceFdToSignal = info.syncFd;
4293 }
4294 }
4295 }
4296 #endif
4297 if (externalFenceFdToSignal >= 0 ||
4298 !post_wait_events.empty() ||
4299 !post_wait_sync_fds.empty()) {
4300
4301 std::vector<WorkPool::Task> tasks;
4302
4303 tasks.push_back([this, queue, externalFenceFdToSignal,
4304 post_wait_events /* copy of zx handles */,
4305 post_wait_sync_fds /* copy of sync fds */] {
4306 auto hostConn = mThreadingCallbacks.hostConnectionGetFunc();
4307 auto vkEncoder = mThreadingCallbacks.vkEncoderGetFunc(hostConn);
4308 auto waitIdleRes = vkEncoder->vkQueueWaitIdle(queue);
4309 #ifdef VK_USE_PLATFORM_FUCHSIA
4310 (void)externalFenceFdToSignal;
4311 for (auto& event : post_wait_events) {
4312 zx_object_signal(event, 0, ZX_EVENT_SIGNALED);
4313 }
4314 #endif
4315 #ifdef VK_USE_PLATFORM_ANDROID_KHR
4316 for (auto& fd : post_wait_sync_fds) {
4317 goldfish_sync_signal(fd);
4318 }
4319
4320 if (externalFenceFdToSignal >= 0) {
4321 ALOGV("%s: external fence real signal: %d\n", __func__, externalFenceFdToSignal);
4322 goldfish_sync_signal(externalFenceFdToSignal);
4323 }
4324 #endif
4325 });
4326 auto queueAsyncWaitHandle = mWorkPool.schedule(tasks);
4327 auto& queueWorkItems = mQueueSensitiveWorkPoolItems[queue];
4328 queueWorkItems.push_back(queueAsyncWaitHandle);
4329 }
4330
4331 return VK_SUCCESS;
4332 }
4333
on_vkQueueWaitIdle(void * context,VkResult,VkQueue queue)4334 VkResult on_vkQueueWaitIdle(
4335 void* context, VkResult,
4336 VkQueue queue) {
4337
4338 VkEncoder* enc = (VkEncoder*)context;
4339
4340 AutoLock lock(mLock);
4341 std::vector<WorkPool::WaitGroupHandle> toWait =
4342 mQueueSensitiveWorkPoolItems[queue];
4343 mQueueSensitiveWorkPoolItems[queue].clear();
4344 lock.unlock();
4345
4346 if (toWait.empty()) {
4347 ALOGV("%s: No queue-specific work pool items\n", __func__);
4348 return enc->vkQueueWaitIdle(queue);
4349 }
4350
4351 for (auto handle : toWait) {
4352 ALOGV("%s: waiting on work group item: %llu\n", __func__,
4353 (unsigned long long)handle);
4354 mWorkPool.waitAll(handle);
4355 }
4356
4357 // now done waiting, get the host's opinion
4358 return enc->vkQueueWaitIdle(queue);
4359 }
4360
unwrap_VkNativeBufferANDROID(const VkImageCreateInfo * pCreateInfo,VkImageCreateInfo * local_pCreateInfo)4361 void unwrap_VkNativeBufferANDROID(
4362 const VkImageCreateInfo* pCreateInfo,
4363 VkImageCreateInfo* local_pCreateInfo) {
4364
4365 if (!pCreateInfo->pNext) return;
4366
4367 const VkNativeBufferANDROID* nativeInfo =
4368 vk_find_struct<VkNativeBufferANDROID>(pCreateInfo);
4369 if (!nativeInfo) {
4370 return;
4371 }
4372
4373 if (!nativeInfo->handle) return;
4374
4375 VkNativeBufferANDROID* nativeInfoOut =
4376 reinterpret_cast<VkNativeBufferANDROID*>(
4377 const_cast<void*>(
4378 local_pCreateInfo->pNext));
4379
4380 if (!nativeInfoOut->handle) {
4381 ALOGE("FATAL: Local native buffer info not properly allocated!");
4382 abort();
4383 }
4384
4385 *(uint32_t*)(nativeInfoOut->handle) =
4386 mThreadingCallbacks.hostConnectionGetFunc()->
4387 grallocHelper()->getHostHandle(
4388 (const native_handle_t*)nativeInfo->handle);
4389 }
4390
unwrap_vkAcquireImageANDROID_nativeFenceFd(int fd,int *)4391 void unwrap_vkAcquireImageANDROID_nativeFenceFd(int fd, int*) {
4392 #ifdef VK_USE_PLATFORM_ANDROID_KHR
4393 if (fd != -1) {
4394 // Implicit Synchronization
4395 sync_wait(fd, 3000);
4396 // From libvulkan's swapchain.cpp:
4397 // """
4398 // NOTE: we're relying on AcquireImageANDROID to close fence_clone,
4399 // even if the call fails. We could close it ourselves on failure, but
4400 // that would create a race condition if the driver closes it on a
4401 // failure path: some other thread might create an fd with the same
4402 // number between the time the driver closes it and the time we close
4403 // it. We must assume one of: the driver *always* closes it even on
4404 // failure, or *never* closes it on failure.
4405 // """
4406 // Therefore, assume contract where we need to close fd in this driver
4407 close(fd);
4408 }
4409 #endif
4410 }
4411
4412 // Action of vkMapMemoryIntoAddressSpaceGOOGLE:
4413 // 1. preprocess (on_vkMapMemoryIntoAddressSpaceGOOGLE_pre):
4414 // uses address space device to reserve the right size of
4415 // memory.
4416 // 2. the reservation results in a physical address. the physical
4417 // address is set as |*pAddress|.
4418 // 3. after pre, the API call is encoded to the host, where the
4419 // value of pAddress is also sent (the physical address).
4420 // 4. the host will obtain the actual gpu pointer and send it
4421 // back out in |*pAddress|.
4422 // 5. postprocess (on_vkMapMemoryIntoAddressSpaceGOOGLE) will run,
4423 // using the mmap() method of GoldfishAddressSpaceBlock to obtain
4424 // a pointer in guest userspace corresponding to the host pointer.
on_vkMapMemoryIntoAddressSpaceGOOGLE_pre(void *,VkResult,VkDevice,VkDeviceMemory memory,uint64_t * pAddress)4425 VkResult on_vkMapMemoryIntoAddressSpaceGOOGLE_pre(
4426 void*,
4427 VkResult,
4428 VkDevice,
4429 VkDeviceMemory memory,
4430 uint64_t* pAddress) {
4431
4432 AutoLock lock(mLock);
4433
4434 auto it = info_VkDeviceMemory.find(memory);
4435 if (it == info_VkDeviceMemory.end()) {
4436 return VK_ERROR_OUT_OF_HOST_MEMORY;
4437 }
4438
4439 auto& memInfo = it->second;
4440 memInfo.goldfishAddressSpaceBlock =
4441 new GoldfishAddressSpaceBlock;
4442 auto& block = *(memInfo.goldfishAddressSpaceBlock);
4443
4444 block.allocate(
4445 mGoldfishAddressSpaceBlockProvider.get(),
4446 memInfo.mappedSize);
4447
4448 *pAddress = block.physAddr();
4449
4450 return VK_SUCCESS;
4451 }
4452
on_vkMapMemoryIntoAddressSpaceGOOGLE(void *,VkResult input_result,VkDevice,VkDeviceMemory memory,uint64_t * pAddress)4453 VkResult on_vkMapMemoryIntoAddressSpaceGOOGLE(
4454 void*,
4455 VkResult input_result,
4456 VkDevice,
4457 VkDeviceMemory memory,
4458 uint64_t* pAddress) {
4459
4460 if (input_result != VK_SUCCESS) {
4461 return input_result;
4462 }
4463
4464 // Now pAddress points to the gpu addr from host.
4465 AutoLock lock(mLock);
4466
4467 auto it = info_VkDeviceMemory.find(memory);
4468 if (it == info_VkDeviceMemory.end()) {
4469 return VK_ERROR_OUT_OF_HOST_MEMORY;
4470 }
4471
4472 auto& memInfo = it->second;
4473 auto& block = *(memInfo.goldfishAddressSpaceBlock);
4474
4475 uint64_t gpuAddr = *pAddress;
4476
4477 void* userPtr = block.mmap(gpuAddr);
4478
4479 D("%s: Got new host visible alloc. "
4480 "Sizeof void: %zu map size: %zu Range: [%p %p]",
4481 __func__,
4482 sizeof(void*), (size_t)memInfo.mappedSize,
4483 userPtr,
4484 (unsigned char*)userPtr + memInfo.mappedSize);
4485
4486 *pAddress = (uint64_t)(uintptr_t)userPtr;
4487
4488 return input_result;
4489 }
4490
isDescriptorTypeImageInfo(VkDescriptorType descType)4491 bool isDescriptorTypeImageInfo(VkDescriptorType descType) {
4492 return (descType == VK_DESCRIPTOR_TYPE_SAMPLER) ||
4493 (descType == VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER) ||
4494 (descType == VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE) ||
4495 (descType == VK_DESCRIPTOR_TYPE_STORAGE_IMAGE) ||
4496 (descType == VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT);
4497 }
4498
isDescriptorTypeBufferInfo(VkDescriptorType descType)4499 bool isDescriptorTypeBufferInfo(VkDescriptorType descType) {
4500 return (descType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER) ||
4501 (descType == VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC) ||
4502 (descType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER) ||
4503 (descType == VK_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC);
4504 }
4505
isDescriptorTypeBufferView(VkDescriptorType descType)4506 bool isDescriptorTypeBufferView(VkDescriptorType descType) {
4507 return (descType == VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER) ||
4508 (descType == VK_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER);
4509 }
4510
initDescriptorUpdateTemplateBuffers(const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,VkDescriptorUpdateTemplate descriptorUpdateTemplate)4511 VkResult initDescriptorUpdateTemplateBuffers(
4512 const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
4513 VkDescriptorUpdateTemplate descriptorUpdateTemplate) {
4514
4515 AutoLock lock(mLock);
4516
4517 auto it = info_VkDescriptorUpdateTemplate.find(descriptorUpdateTemplate);
4518 if (it == info_VkDescriptorUpdateTemplate.end()) {
4519 return VK_ERROR_INITIALIZATION_FAILED;
4520 }
4521
4522 auto& info = it->second;
4523
4524 size_t imageInfosNeeded = 0;
4525 size_t bufferInfosNeeded = 0;
4526 size_t bufferViewsNeeded = 0;
4527
4528 for (uint32_t i = 0; i < pCreateInfo->descriptorUpdateEntryCount; ++i) {
4529 const auto& entry = pCreateInfo->pDescriptorUpdateEntries[i];
4530 uint32_t descCount = entry.descriptorCount;
4531 VkDescriptorType descType = entry.descriptorType;
4532
4533 info.templateEntries.push_back(entry);
4534
4535 for (uint32_t j = 0; j < descCount; ++j) {
4536 if (isDescriptorTypeImageInfo(descType)) {
4537 ++imageInfosNeeded;
4538 info.imageInfoEntryIndices.push_back(i);
4539 } else if (isDescriptorTypeBufferInfo(descType)) {
4540 ++bufferInfosNeeded;
4541 info.bufferInfoEntryIndices.push_back(i);
4542 } else if (isDescriptorTypeBufferView(descType)) {
4543 ++bufferViewsNeeded;
4544 info.bufferViewEntryIndices.push_back(i);
4545 } else {
4546 ALOGE("%s: FATAL: Unknown descriptor type %d\n", __func__, descType);
4547 abort();
4548 }
4549 }
4550 }
4551
4552 // To be filled in later (our flat structure)
4553 info.imageInfos.resize(imageInfosNeeded);
4554 info.bufferInfos.resize(bufferInfosNeeded);
4555 info.bufferViews.resize(bufferViewsNeeded);
4556
4557 return VK_SUCCESS;
4558 }
4559
on_vkCreateDescriptorUpdateTemplate(void * context,VkResult input_result,VkDevice device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)4560 VkResult on_vkCreateDescriptorUpdateTemplate(
4561 void* context, VkResult input_result,
4562 VkDevice device,
4563 const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
4564 const VkAllocationCallbacks* pAllocator,
4565 VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
4566
4567 (void)context;
4568 (void)device;
4569 (void)pAllocator;
4570
4571 if (input_result != VK_SUCCESS) return input_result;
4572
4573 return initDescriptorUpdateTemplateBuffers(pCreateInfo, *pDescriptorUpdateTemplate);
4574 }
4575
on_vkCreateDescriptorUpdateTemplateKHR(void * context,VkResult input_result,VkDevice device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)4576 VkResult on_vkCreateDescriptorUpdateTemplateKHR(
4577 void* context, VkResult input_result,
4578 VkDevice device,
4579 const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
4580 const VkAllocationCallbacks* pAllocator,
4581 VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
4582
4583 (void)context;
4584 (void)device;
4585 (void)pAllocator;
4586
4587 if (input_result != VK_SUCCESS) return input_result;
4588
4589 return initDescriptorUpdateTemplateBuffers(pCreateInfo, *pDescriptorUpdateTemplate);
4590 }
4591
on_vkUpdateDescriptorSetWithTemplate(void * context,VkDevice device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData)4592 void on_vkUpdateDescriptorSetWithTemplate(
4593 void* context,
4594 VkDevice device,
4595 VkDescriptorSet descriptorSet,
4596 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
4597 const void* pData) {
4598
4599 VkEncoder* enc = (VkEncoder*)context;
4600
4601 uint8_t* userBuffer = (uint8_t*)pData;
4602 if (!userBuffer) return;
4603
4604 AutoLock lock(mLock);
4605
4606 auto it = info_VkDescriptorUpdateTemplate.find(descriptorUpdateTemplate);
4607 if (it == info_VkDescriptorUpdateTemplate.end()) {
4608 return;
4609 }
4610
4611 auto& info = it->second;
4612
4613 size_t currImageInfoOffset = 0;
4614 size_t currBufferInfoOffset = 0;
4615 size_t currBufferViewOffset = 0;
4616
4617 for (const auto& entry : info.templateEntries) {
4618 VkDescriptorType descType = entry.descriptorType;
4619
4620 auto offset = entry.offset;
4621 auto stride = entry.stride;
4622
4623 uint32_t descCount = entry.descriptorCount;
4624
4625 if (isDescriptorTypeImageInfo(descType)) {
4626 if (!stride) stride = sizeof(VkDescriptorImageInfo);
4627 for (uint32_t j = 0; j < descCount; ++j) {
4628 memcpy(((uint8_t*)info.imageInfos.data()) + currImageInfoOffset,
4629 userBuffer + offset + j * stride,
4630 sizeof(VkDescriptorImageInfo));
4631 currImageInfoOffset += sizeof(VkDescriptorImageInfo);
4632 }
4633 } else if (isDescriptorTypeBufferInfo(descType)) {
4634 if (!stride) stride = sizeof(VkDescriptorBufferInfo);
4635 for (uint32_t j = 0; j < descCount; ++j) {
4636 memcpy(((uint8_t*)info.bufferInfos.data()) + currBufferInfoOffset,
4637 userBuffer + offset + j * stride,
4638 sizeof(VkDescriptorBufferInfo));
4639 currBufferInfoOffset += sizeof(VkDescriptorBufferInfo);
4640 }
4641 } else if (isDescriptorTypeBufferView(descType)) {
4642 if (!stride) stride = sizeof(VkBufferView);
4643 for (uint32_t j = 0; j < descCount; ++j) {
4644 memcpy(((uint8_t*)info.bufferViews.data()) + currBufferViewOffset,
4645 userBuffer + offset + j * stride,
4646 sizeof(VkBufferView));
4647 currBufferViewOffset += sizeof(VkBufferView);
4648 }
4649 } else {
4650 ALOGE("%s: FATAL: Unknown descriptor type %d\n", __func__, descType);
4651 abort();
4652 }
4653 }
4654
4655 enc->vkUpdateDescriptorSetWithTemplateSizedGOOGLE(
4656 device,
4657 descriptorSet,
4658 descriptorUpdateTemplate,
4659 (uint32_t)info.imageInfos.size(),
4660 (uint32_t)info.bufferInfos.size(),
4661 (uint32_t)info.bufferViews.size(),
4662 info.imageInfoEntryIndices.data(),
4663 info.bufferInfoEntryIndices.data(),
4664 info.bufferViewEntryIndices.data(),
4665 info.imageInfos.data(),
4666 info.bufferInfos.data(),
4667 info.bufferViews.data());
4668 }
4669
on_vkGetPhysicalDeviceImageFormatProperties2_common(bool isKhr,void * context,VkResult input_result,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)4670 VkResult on_vkGetPhysicalDeviceImageFormatProperties2_common(
4671 bool isKhr,
4672 void* context, VkResult input_result,
4673 VkPhysicalDevice physicalDevice,
4674 const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
4675 VkImageFormatProperties2* pImageFormatProperties) {
4676
4677 VkEncoder* enc = (VkEncoder*)context;
4678 (void)input_result;
4679
4680 VkAndroidHardwareBufferUsageANDROID* output_ahw_usage =
4681 vk_find_struct<VkAndroidHardwareBufferUsageANDROID>(pImageFormatProperties);
4682
4683 VkResult hostRes;
4684
4685 if (isKhr) {
4686 hostRes = enc->vkGetPhysicalDeviceImageFormatProperties2KHR(
4687 physicalDevice, pImageFormatInfo,
4688 pImageFormatProperties);
4689 } else {
4690 hostRes = enc->vkGetPhysicalDeviceImageFormatProperties2(
4691 physicalDevice, pImageFormatInfo,
4692 pImageFormatProperties);
4693 }
4694
4695 if (hostRes != VK_SUCCESS) return hostRes;
4696
4697 if (output_ahw_usage) {
4698 output_ahw_usage->androidHardwareBufferUsage =
4699 getAndroidHardwareBufferUsageFromVkUsage(
4700 pImageFormatInfo->flags,
4701 pImageFormatInfo->usage);
4702 }
4703
4704 return hostRes;
4705 }
4706
on_vkGetPhysicalDeviceImageFormatProperties2(void * context,VkResult input_result,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)4707 VkResult on_vkGetPhysicalDeviceImageFormatProperties2(
4708 void* context, VkResult input_result,
4709 VkPhysicalDevice physicalDevice,
4710 const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
4711 VkImageFormatProperties2* pImageFormatProperties) {
4712 return on_vkGetPhysicalDeviceImageFormatProperties2_common(
4713 false /* not KHR */, context, input_result,
4714 physicalDevice, pImageFormatInfo, pImageFormatProperties);
4715 }
4716
on_vkGetPhysicalDeviceImageFormatProperties2KHR(void * context,VkResult input_result,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)4717 VkResult on_vkGetPhysicalDeviceImageFormatProperties2KHR(
4718 void* context, VkResult input_result,
4719 VkPhysicalDevice physicalDevice,
4720 const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
4721 VkImageFormatProperties2* pImageFormatProperties) {
4722 return on_vkGetPhysicalDeviceImageFormatProperties2_common(
4723 true /* is KHR */, context, input_result,
4724 physicalDevice, pImageFormatInfo, pImageFormatProperties);
4725 }
4726
registerEncoderCleanupCallback(const VkEncoder * encoder,void * object,CleanupCallback callback)4727 void registerEncoderCleanupCallback(const VkEncoder* encoder, void* object, CleanupCallback callback) {
4728 AutoLock lock(mLock);
4729 auto& callbacks = mEncoderCleanupCallbacks[encoder];
4730 callbacks[object] = callback;
4731 }
4732
unregisterEncoderCleanupCallback(const VkEncoder * encoder,void * object)4733 void unregisterEncoderCleanupCallback(const VkEncoder* encoder, void* object) {
4734 AutoLock lock(mLock);
4735 mEncoderCleanupCallbacks[encoder].erase(object);
4736 }
4737
onEncoderDeleted(const VkEncoder * encoder)4738 void onEncoderDeleted(const VkEncoder* encoder) {
4739 AutoLock lock(mLock);
4740 if (mEncoderCleanupCallbacks.find(encoder) == mEncoderCleanupCallbacks.end()) return;
4741
4742 std::unordered_map<void*, CleanupCallback> callbackCopies = mEncoderCleanupCallbacks[encoder];
4743
4744 mEncoderCleanupCallbacks.erase(encoder);
4745 lock.unlock();
4746
4747 for (auto it : callbackCopies) {
4748 it.second();
4749 }
4750 }
4751
syncEncodersForCommandBuffer(VkCommandBuffer commandBuffer,VkEncoder * currentEncoder)4752 uint32_t syncEncodersForCommandBuffer(VkCommandBuffer commandBuffer, VkEncoder* currentEncoder) {
4753 AutoLock lock(mLock);
4754
4755 auto it = info_VkCommandBuffer.find(commandBuffer);
4756 if (it == info_VkCommandBuffer.end()) return 0;
4757
4758 auto& info = it->second;
4759
4760 if (!info.lastUsedEncoderPtr) {
4761 info.lastUsedEncoderPtr = new VkEncoder*;
4762 *(info.lastUsedEncoderPtr) = currentEncoder;
4763 }
4764
4765 auto lastUsedEncoderPtr = info.lastUsedEncoderPtr;
4766
4767 auto lastEncoder = *(lastUsedEncoderPtr);
4768
4769 // We always make lastUsedEncoderPtr track
4770 // the current encoder, even if the last encoder
4771 // is null.
4772 *(lastUsedEncoderPtr) = currentEncoder;
4773
4774 if (!lastEncoder) return 0;
4775 if (lastEncoder == currentEncoder) return 0;
4776
4777 auto oldSeq = info.sequenceNumber;
4778
4779 lock.unlock();
4780
4781 lastEncoder->vkCommandBufferHostSyncGOOGLE(commandBuffer, false, oldSeq + 1);
4782 lastEncoder->flush();
4783 currentEncoder->vkCommandBufferHostSyncGOOGLE(commandBuffer, true, oldSeq + 2);
4784
4785 unregisterEncoderCleanupCallback(lastEncoder, commandBuffer);
4786
4787 registerEncoderCleanupCallback(currentEncoder, commandBuffer, [this, currentEncoder, commandBuffer]() {
4788 AutoLock lock(mLock);
4789 auto it = info_VkCommandBuffer.find(commandBuffer);
4790 if (it == info_VkCommandBuffer.end()) return;
4791
4792 auto& info = it->second;
4793 if (!info.lastUsedEncoderPtr) return;
4794 if (!*(info.lastUsedEncoderPtr)) return;
4795
4796 if (currentEncoder == *(info.lastUsedEncoderPtr)) {
4797 *(info.lastUsedEncoderPtr) = nullptr;
4798 }
4799 });
4800
4801 return 1;
4802 }
4803
syncEncodersForQueue(VkQueue queue,VkEncoder * currentEncoder)4804 uint32_t syncEncodersForQueue(VkQueue queue, VkEncoder* currentEncoder) {
4805 if (!supportsAsyncQueueSubmit()) {
4806 return 0;
4807 }
4808
4809 AutoLock lock(mLock);
4810
4811 auto it = info_VkQueue.find(queue);
4812 if (it == info_VkQueue.end()) return 0;
4813
4814 auto& info = it->second;
4815
4816 if (!info.lastUsedEncoderPtr) {
4817 info.lastUsedEncoderPtr = new VkEncoder*;
4818 *(info.lastUsedEncoderPtr) = currentEncoder;
4819 }
4820
4821 auto lastUsedEncoderPtr = info.lastUsedEncoderPtr;
4822
4823 auto lastEncoder = *(lastUsedEncoderPtr);
4824
4825 // We always make lastUsedEncoderPtr track
4826 // the current encoder, even if the last encoder
4827 // is null.
4828 *(lastUsedEncoderPtr) = currentEncoder;
4829
4830 if (!lastEncoder) return 0;
4831 if (lastEncoder == currentEncoder) return 0;
4832
4833 auto oldSeq = info.sequenceNumber;
4834
4835 info.sequenceNumber += 2;
4836
4837 lock.unlock();
4838
4839 // at this point the seqno for the old thread is determined
4840
4841 lastEncoder->vkQueueHostSyncGOOGLE(queue, false, oldSeq + 1);
4842 lastEncoder->flush();
4843 currentEncoder->vkQueueHostSyncGOOGLE(queue, true, oldSeq + 2);
4844
4845 unregisterEncoderCleanupCallback(lastEncoder, queue);
4846
4847 registerEncoderCleanupCallback(currentEncoder, queue, [this, currentEncoder, queue]() {
4848 AutoLock lock(mLock);
4849 auto it = info_VkQueue.find(queue);
4850 if (it == info_VkQueue.end()) return;
4851
4852 auto& info = it->second;
4853 if (!info.lastUsedEncoderPtr) return;
4854 if (!*(info.lastUsedEncoderPtr)) return;
4855
4856 if (currentEncoder == *(info.lastUsedEncoderPtr)) {
4857 *(info.lastUsedEncoderPtr) = nullptr;
4858 }
4859 });
4860
4861 return 1;
4862 }
4863
on_vkBeginCommandBuffer(void * context,VkResult input_result,VkCommandBuffer commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo)4864 VkResult on_vkBeginCommandBuffer(
4865 void* context, VkResult input_result,
4866 VkCommandBuffer commandBuffer,
4867 const VkCommandBufferBeginInfo* pBeginInfo) {
4868
4869 VkEncoder* enc = (VkEncoder*)context;
4870 (void)input_result;
4871
4872 if (!supportsDeferredCommands()) {
4873 return enc->vkBeginCommandBuffer(commandBuffer, pBeginInfo);
4874 }
4875
4876 enc->vkBeginCommandBufferAsyncGOOGLE(commandBuffer, pBeginInfo);
4877
4878 return VK_SUCCESS;
4879 }
4880
on_vkEndCommandBuffer(void * context,VkResult input_result,VkCommandBuffer commandBuffer)4881 VkResult on_vkEndCommandBuffer(
4882 void* context, VkResult input_result,
4883 VkCommandBuffer commandBuffer) {
4884
4885 VkEncoder* enc = (VkEncoder*)context;
4886 (void)input_result;
4887
4888 if (!supportsDeferredCommands()) {
4889 return enc->vkEndCommandBuffer(commandBuffer);
4890 }
4891
4892 enc->vkEndCommandBufferAsyncGOOGLE(commandBuffer);
4893
4894 return VK_SUCCESS;
4895 }
4896
on_vkResetCommandBuffer(void * context,VkResult input_result,VkCommandBuffer commandBuffer,VkCommandBufferResetFlags flags)4897 VkResult on_vkResetCommandBuffer(
4898 void* context, VkResult input_result,
4899 VkCommandBuffer commandBuffer,
4900 VkCommandBufferResetFlags flags) {
4901
4902 VkEncoder* enc = (VkEncoder*)context;
4903 (void)input_result;
4904
4905 if (!supportsDeferredCommands()) {
4906 return enc->vkResetCommandBuffer(commandBuffer, flags);
4907 }
4908
4909 enc->vkResetCommandBufferAsyncGOOGLE(commandBuffer, flags);
4910 return VK_SUCCESS;
4911 }
4912
on_vkCreateImageView(void * context,VkResult input_result,VkDevice device,const VkImageViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImageView * pView)4913 VkResult on_vkCreateImageView(
4914 void* context, VkResult input_result,
4915 VkDevice device,
4916 const VkImageViewCreateInfo* pCreateInfo,
4917 const VkAllocationCallbacks* pAllocator,
4918 VkImageView* pView) {
4919
4920 VkEncoder* enc = (VkEncoder*)context;
4921 (void)input_result;
4922
4923 VkImageViewCreateInfo localCreateInfo = vk_make_orphan_copy(*pCreateInfo);
4924
4925 #ifdef VK_USE_PLATFORM_ANDROID_KHR
4926 const VkExternalFormatANDROID* extFormatAndroidPtr =
4927 vk_find_struct<VkExternalFormatANDROID>(pCreateInfo);
4928 if (extFormatAndroidPtr) {
4929 if (extFormatAndroidPtr->externalFormat) {
4930 localCreateInfo.format =
4931 vk_format_from_android(extFormatAndroidPtr->externalFormat);
4932 }
4933 }
4934 #endif
4935
4936 return enc->vkCreateImageView(device, &localCreateInfo, pAllocator, pView);
4937 }
4938
getApiVersionFromInstance(VkInstance instance) const4939 uint32_t getApiVersionFromInstance(VkInstance instance) const {
4940 AutoLock lock(mLock);
4941 uint32_t api = kDefaultApiVersion;
4942
4943 auto it = info_VkInstance.find(instance);
4944 if (it == info_VkInstance.end()) return api;
4945
4946 api = it->second.highestApiVersion;
4947
4948 return api;
4949 }
4950
getApiVersionFromDevice(VkDevice device) const4951 uint32_t getApiVersionFromDevice(VkDevice device) const {
4952 AutoLock lock(mLock);
4953
4954 uint32_t api = kDefaultApiVersion;
4955
4956 auto it = info_VkDevice.find(device);
4957 if (it == info_VkDevice.end()) return api;
4958
4959 api = it->second.apiVersion;
4960
4961 return api;
4962 }
4963
hasInstanceExtension(VkInstance instance,const std::string & name) const4964 bool hasInstanceExtension(VkInstance instance, const std::string& name) const {
4965 AutoLock lock(mLock);
4966
4967 auto it = info_VkInstance.find(instance);
4968 if (it == info_VkInstance.end()) return false;
4969
4970 return it->second.enabledExtensions.find(name) !=
4971 it->second.enabledExtensions.end();
4972 }
4973
hasDeviceExtension(VkDevice device,const std::string & name) const4974 bool hasDeviceExtension(VkDevice device, const std::string& name) const {
4975 AutoLock lock(mLock);
4976
4977 auto it = info_VkDevice.find(device);
4978 if (it == info_VkDevice.end()) return false;
4979
4980 return it->second.enabledExtensions.find(name) !=
4981 it->second.enabledExtensions.end();
4982 }
4983
4984 private:
4985 mutable Lock mLock;
4986 HostVisibleMemoryVirtualizationInfo mHostVisibleMemoryVirtInfo;
4987 std::unique_ptr<EmulatorFeatureInfo> mFeatureInfo;
4988 ResourceTracker::ThreadingCallbacks mThreadingCallbacks;
4989 uint32_t mStreamFeatureBits = 0;
4990 std::unique_ptr<GoldfishAddressSpaceBlockProvider> mGoldfishAddressSpaceBlockProvider;
4991
4992 std::vector<VkExtensionProperties> mHostInstanceExtensions;
4993 std::vector<VkExtensionProperties> mHostDeviceExtensions;
4994
4995 int mSyncDeviceFd = -1;
4996 #ifdef VK_USE_PLATFORM_ANDROID_KHR
4997 int mRendernodeFd = -1;
4998 #endif
4999
5000 #ifdef VK_USE_PLATFORM_FUCHSIA
5001 std::unique_ptr<
5002 llcpp::fuchsia::hardware::goldfish::ControlDevice::SyncClient>
5003 mControlDevice;
5004 std::unique_ptr<llcpp::fuchsia::sysmem::Allocator::SyncClient>
5005 mSysmemAllocator;
5006 #endif
5007
5008 WorkPool mWorkPool { 4 };
5009 std::unordered_map<VkQueue, std::vector<WorkPool::WaitGroupHandle>>
5010 mQueueSensitiveWorkPoolItems;
5011
5012 std::unordered_map<const VkEncoder*, std::unordered_map<void*, CleanupCallback>> mEncoderCleanupCallbacks;
5013
5014 };
5015
ResourceTracker()5016 ResourceTracker::ResourceTracker() : mImpl(new ResourceTracker::Impl()) { }
~ResourceTracker()5017 ResourceTracker::~ResourceTracker() { }
createMapping()5018 VulkanHandleMapping* ResourceTracker::createMapping() {
5019 return &mImpl->createMapping;
5020 }
unwrapMapping()5021 VulkanHandleMapping* ResourceTracker::unwrapMapping() {
5022 return &mImpl->unwrapMapping;
5023 }
destroyMapping()5024 VulkanHandleMapping* ResourceTracker::destroyMapping() {
5025 return &mImpl->destroyMapping;
5026 }
defaultMapping()5027 VulkanHandleMapping* ResourceTracker::defaultMapping() {
5028 return &mImpl->defaultMapping;
5029 }
5030 static ResourceTracker* sTracker = nullptr;
5031 // static
get()5032 ResourceTracker* ResourceTracker::get() {
5033 if (!sTracker) {
5034 // To be initialized once on vulkan device open.
5035 sTracker = new ResourceTracker;
5036 }
5037 return sTracker;
5038 }
5039
5040 #define HANDLE_REGISTER_IMPL(type) \
5041 void ResourceTracker::register_##type(type obj) { \
5042 mImpl->register_##type(obj); \
5043 } \
5044 void ResourceTracker::unregister_##type(type obj) { \
5045 mImpl->unregister_##type(obj); \
5046 } \
5047
GOLDFISH_VK_LIST_HANDLE_TYPES(HANDLE_REGISTER_IMPL)5048 GOLDFISH_VK_LIST_HANDLE_TYPES(HANDLE_REGISTER_IMPL)
5049
5050 bool ResourceTracker::isMemoryTypeHostVisible(
5051 VkDevice device, uint32_t typeIndex) const {
5052 return mImpl->isMemoryTypeHostVisible(device, typeIndex);
5053 }
5054
getMappedPointer(VkDeviceMemory memory)5055 uint8_t* ResourceTracker::getMappedPointer(VkDeviceMemory memory) {
5056 return mImpl->getMappedPointer(memory);
5057 }
5058
getMappedSize(VkDeviceMemory memory)5059 VkDeviceSize ResourceTracker::getMappedSize(VkDeviceMemory memory) {
5060 return mImpl->getMappedSize(memory);
5061 }
5062
getNonCoherentExtendedSize(VkDevice device,VkDeviceSize basicSize) const5063 VkDeviceSize ResourceTracker::getNonCoherentExtendedSize(VkDevice device, VkDeviceSize basicSize) const {
5064 return mImpl->getNonCoherentExtendedSize(device, basicSize);
5065 }
5066
isValidMemoryRange(const VkMappedMemoryRange & range) const5067 bool ResourceTracker::isValidMemoryRange(const VkMappedMemoryRange& range) const {
5068 return mImpl->isValidMemoryRange(range);
5069 }
5070
setupFeatures(const EmulatorFeatureInfo * features)5071 void ResourceTracker::setupFeatures(const EmulatorFeatureInfo* features) {
5072 mImpl->setupFeatures(features);
5073 }
5074
setThreadingCallbacks(const ResourceTracker::ThreadingCallbacks & callbacks)5075 void ResourceTracker::setThreadingCallbacks(const ResourceTracker::ThreadingCallbacks& callbacks) {
5076 mImpl->setThreadingCallbacks(callbacks);
5077 }
5078
hostSupportsVulkan() const5079 bool ResourceTracker::hostSupportsVulkan() const {
5080 return mImpl->hostSupportsVulkan();
5081 }
5082
usingDirectMapping() const5083 bool ResourceTracker::usingDirectMapping() const {
5084 return mImpl->usingDirectMapping();
5085 }
5086
getStreamFeatures() const5087 uint32_t ResourceTracker::getStreamFeatures() const {
5088 return mImpl->getStreamFeatures();
5089 }
5090
getApiVersionFromInstance(VkInstance instance) const5091 uint32_t ResourceTracker::getApiVersionFromInstance(VkInstance instance) const {
5092 return mImpl->getApiVersionFromInstance(instance);
5093 }
5094
getApiVersionFromDevice(VkDevice device) const5095 uint32_t ResourceTracker::getApiVersionFromDevice(VkDevice device) const {
5096 return mImpl->getApiVersionFromDevice(device);
5097 }
hasInstanceExtension(VkInstance instance,const std::string & name) const5098 bool ResourceTracker::hasInstanceExtension(VkInstance instance, const std::string &name) const {
5099 return mImpl->hasInstanceExtension(instance, name);
5100 }
hasDeviceExtension(VkDevice device,const std::string & name) const5101 bool ResourceTracker::hasDeviceExtension(VkDevice device, const std::string &name) const {
5102 return mImpl->hasDeviceExtension(device, name);
5103 }
5104
on_vkEnumerateInstanceExtensionProperties(void * context,VkResult input_result,const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)5105 VkResult ResourceTracker::on_vkEnumerateInstanceExtensionProperties(
5106 void* context,
5107 VkResult input_result,
5108 const char* pLayerName,
5109 uint32_t* pPropertyCount,
5110 VkExtensionProperties* pProperties) {
5111 return mImpl->on_vkEnumerateInstanceExtensionProperties(
5112 context, input_result, pLayerName, pPropertyCount, pProperties);
5113 }
5114
on_vkEnumerateDeviceExtensionProperties(void * context,VkResult input_result,VkPhysicalDevice physicalDevice,const char * pLayerName,uint32_t * pPropertyCount,VkExtensionProperties * pProperties)5115 VkResult ResourceTracker::on_vkEnumerateDeviceExtensionProperties(
5116 void* context,
5117 VkResult input_result,
5118 VkPhysicalDevice physicalDevice,
5119 const char* pLayerName,
5120 uint32_t* pPropertyCount,
5121 VkExtensionProperties* pProperties) {
5122 return mImpl->on_vkEnumerateDeviceExtensionProperties(
5123 context, input_result, physicalDevice, pLayerName, pPropertyCount, pProperties);
5124 }
5125
on_vkEnumeratePhysicalDevices(void * context,VkResult input_result,VkInstance instance,uint32_t * pPhysicalDeviceCount,VkPhysicalDevice * pPhysicalDevices)5126 VkResult ResourceTracker::on_vkEnumeratePhysicalDevices(
5127 void* context, VkResult input_result,
5128 VkInstance instance, uint32_t* pPhysicalDeviceCount,
5129 VkPhysicalDevice* pPhysicalDevices) {
5130 return mImpl->on_vkEnumeratePhysicalDevices(
5131 context, input_result, instance, pPhysicalDeviceCount,
5132 pPhysicalDevices);
5133 }
5134
on_vkGetPhysicalDeviceProperties(void * context,VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties * pProperties)5135 void ResourceTracker::on_vkGetPhysicalDeviceProperties(
5136 void* context,
5137 VkPhysicalDevice physicalDevice,
5138 VkPhysicalDeviceProperties* pProperties) {
5139 mImpl->on_vkGetPhysicalDeviceProperties(context, physicalDevice,
5140 pProperties);
5141 }
5142
on_vkGetPhysicalDeviceProperties2(void * context,VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties2 * pProperties)5143 void ResourceTracker::on_vkGetPhysicalDeviceProperties2(
5144 void* context,
5145 VkPhysicalDevice physicalDevice,
5146 VkPhysicalDeviceProperties2* pProperties) {
5147 mImpl->on_vkGetPhysicalDeviceProperties2(context, physicalDevice,
5148 pProperties);
5149 }
5150
on_vkGetPhysicalDeviceProperties2KHR(void * context,VkPhysicalDevice physicalDevice,VkPhysicalDeviceProperties2 * pProperties)5151 void ResourceTracker::on_vkGetPhysicalDeviceProperties2KHR(
5152 void* context,
5153 VkPhysicalDevice physicalDevice,
5154 VkPhysicalDeviceProperties2* pProperties) {
5155 mImpl->on_vkGetPhysicalDeviceProperties2(context, physicalDevice,
5156 pProperties);
5157 }
5158
on_vkGetPhysicalDeviceMemoryProperties(void * context,VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties * pMemoryProperties)5159 void ResourceTracker::on_vkGetPhysicalDeviceMemoryProperties(
5160 void* context,
5161 VkPhysicalDevice physicalDevice,
5162 VkPhysicalDeviceMemoryProperties* pMemoryProperties) {
5163 mImpl->on_vkGetPhysicalDeviceMemoryProperties(
5164 context, physicalDevice, pMemoryProperties);
5165 }
5166
on_vkGetPhysicalDeviceMemoryProperties2(void * context,VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties2 * pMemoryProperties)5167 void ResourceTracker::on_vkGetPhysicalDeviceMemoryProperties2(
5168 void* context,
5169 VkPhysicalDevice physicalDevice,
5170 VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {
5171 mImpl->on_vkGetPhysicalDeviceMemoryProperties2(
5172 context, physicalDevice, pMemoryProperties);
5173 }
5174
on_vkGetPhysicalDeviceMemoryProperties2KHR(void * context,VkPhysicalDevice physicalDevice,VkPhysicalDeviceMemoryProperties2 * pMemoryProperties)5175 void ResourceTracker::on_vkGetPhysicalDeviceMemoryProperties2KHR(
5176 void* context,
5177 VkPhysicalDevice physicalDevice,
5178 VkPhysicalDeviceMemoryProperties2* pMemoryProperties) {
5179 mImpl->on_vkGetPhysicalDeviceMemoryProperties2(
5180 context, physicalDevice, pMemoryProperties);
5181 }
5182
on_vkCreateInstance(void * context,VkResult input_result,const VkInstanceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkInstance * pInstance)5183 VkResult ResourceTracker::on_vkCreateInstance(
5184 void* context,
5185 VkResult input_result,
5186 const VkInstanceCreateInfo* pCreateInfo,
5187 const VkAllocationCallbacks* pAllocator,
5188 VkInstance* pInstance) {
5189 return mImpl->on_vkCreateInstance(
5190 context, input_result, pCreateInfo, pAllocator, pInstance);
5191 }
5192
on_vkCreateDevice(void * context,VkResult input_result,VkPhysicalDevice physicalDevice,const VkDeviceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDevice * pDevice)5193 VkResult ResourceTracker::on_vkCreateDevice(
5194 void* context,
5195 VkResult input_result,
5196 VkPhysicalDevice physicalDevice,
5197 const VkDeviceCreateInfo* pCreateInfo,
5198 const VkAllocationCallbacks* pAllocator,
5199 VkDevice* pDevice) {
5200 return mImpl->on_vkCreateDevice(
5201 context, input_result, physicalDevice, pCreateInfo, pAllocator, pDevice);
5202 }
5203
on_vkDestroyDevice_pre(void * context,VkDevice device,const VkAllocationCallbacks * pAllocator)5204 void ResourceTracker::on_vkDestroyDevice_pre(
5205 void* context,
5206 VkDevice device,
5207 const VkAllocationCallbacks* pAllocator) {
5208 mImpl->on_vkDestroyDevice_pre(context, device, pAllocator);
5209 }
5210
on_vkAllocateMemory(void * context,VkResult input_result,VkDevice device,const VkMemoryAllocateInfo * pAllocateInfo,const VkAllocationCallbacks * pAllocator,VkDeviceMemory * pMemory)5211 VkResult ResourceTracker::on_vkAllocateMemory(
5212 void* context,
5213 VkResult input_result,
5214 VkDevice device,
5215 const VkMemoryAllocateInfo* pAllocateInfo,
5216 const VkAllocationCallbacks* pAllocator,
5217 VkDeviceMemory* pMemory) {
5218 return mImpl->on_vkAllocateMemory(
5219 context, input_result, device, pAllocateInfo, pAllocator, pMemory);
5220 }
5221
on_vkFreeMemory(void * context,VkDevice device,VkDeviceMemory memory,const VkAllocationCallbacks * pAllocator)5222 void ResourceTracker::on_vkFreeMemory(
5223 void* context,
5224 VkDevice device,
5225 VkDeviceMemory memory,
5226 const VkAllocationCallbacks* pAllocator) {
5227 return mImpl->on_vkFreeMemory(
5228 context, device, memory, pAllocator);
5229 }
5230
on_vkMapMemory(void * context,VkResult input_result,VkDevice device,VkDeviceMemory memory,VkDeviceSize offset,VkDeviceSize size,VkMemoryMapFlags flags,void ** ppData)5231 VkResult ResourceTracker::on_vkMapMemory(
5232 void* context,
5233 VkResult input_result,
5234 VkDevice device,
5235 VkDeviceMemory memory,
5236 VkDeviceSize offset,
5237 VkDeviceSize size,
5238 VkMemoryMapFlags flags,
5239 void** ppData) {
5240 return mImpl->on_vkMapMemory(
5241 context, input_result, device, memory, offset, size, flags, ppData);
5242 }
5243
on_vkUnmapMemory(void * context,VkDevice device,VkDeviceMemory memory)5244 void ResourceTracker::on_vkUnmapMemory(
5245 void* context,
5246 VkDevice device,
5247 VkDeviceMemory memory) {
5248 mImpl->on_vkUnmapMemory(context, device, memory);
5249 }
5250
on_vkCreateImage(void * context,VkResult input_result,VkDevice device,const VkImageCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImage * pImage)5251 VkResult ResourceTracker::on_vkCreateImage(
5252 void* context, VkResult input_result,
5253 VkDevice device, const VkImageCreateInfo *pCreateInfo,
5254 const VkAllocationCallbacks *pAllocator,
5255 VkImage *pImage) {
5256 return mImpl->on_vkCreateImage(
5257 context, input_result,
5258 device, pCreateInfo, pAllocator, pImage);
5259 }
5260
on_vkDestroyImage(void * context,VkDevice device,VkImage image,const VkAllocationCallbacks * pAllocator)5261 void ResourceTracker::on_vkDestroyImage(
5262 void* context,
5263 VkDevice device, VkImage image, const VkAllocationCallbacks *pAllocator) {
5264 mImpl->on_vkDestroyImage(context,
5265 device, image, pAllocator);
5266 }
5267
on_vkGetImageMemoryRequirements(void * context,VkDevice device,VkImage image,VkMemoryRequirements * pMemoryRequirements)5268 void ResourceTracker::on_vkGetImageMemoryRequirements(
5269 void *context, VkDevice device, VkImage image,
5270 VkMemoryRequirements *pMemoryRequirements) {
5271 mImpl->on_vkGetImageMemoryRequirements(
5272 context, device, image, pMemoryRequirements);
5273 }
5274
on_vkGetImageMemoryRequirements2(void * context,VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)5275 void ResourceTracker::on_vkGetImageMemoryRequirements2(
5276 void *context, VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
5277 VkMemoryRequirements2 *pMemoryRequirements) {
5278 mImpl->on_vkGetImageMemoryRequirements2(
5279 context, device, pInfo, pMemoryRequirements);
5280 }
5281
on_vkGetImageMemoryRequirements2KHR(void * context,VkDevice device,const VkImageMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)5282 void ResourceTracker::on_vkGetImageMemoryRequirements2KHR(
5283 void *context, VkDevice device, const VkImageMemoryRequirementsInfo2 *pInfo,
5284 VkMemoryRequirements2 *pMemoryRequirements) {
5285 mImpl->on_vkGetImageMemoryRequirements2KHR(
5286 context, device, pInfo, pMemoryRequirements);
5287 }
5288
on_vkBindImageMemory(void * context,VkResult input_result,VkDevice device,VkImage image,VkDeviceMemory memory,VkDeviceSize memoryOffset)5289 VkResult ResourceTracker::on_vkBindImageMemory(
5290 void* context, VkResult input_result,
5291 VkDevice device, VkImage image, VkDeviceMemory memory,
5292 VkDeviceSize memoryOffset) {
5293 return mImpl->on_vkBindImageMemory(
5294 context, input_result, device, image, memory, memoryOffset);
5295 }
5296
on_vkBindImageMemory2(void * context,VkResult input_result,VkDevice device,uint32_t bindingCount,const VkBindImageMemoryInfo * pBindInfos)5297 VkResult ResourceTracker::on_vkBindImageMemory2(
5298 void* context, VkResult input_result,
5299 VkDevice device, uint32_t bindingCount, const VkBindImageMemoryInfo* pBindInfos) {
5300 return mImpl->on_vkBindImageMemory2(
5301 context, input_result, device, bindingCount, pBindInfos);
5302 }
5303
on_vkBindImageMemory2KHR(void * context,VkResult input_result,VkDevice device,uint32_t bindingCount,const VkBindImageMemoryInfo * pBindInfos)5304 VkResult ResourceTracker::on_vkBindImageMemory2KHR(
5305 void* context, VkResult input_result,
5306 VkDevice device, uint32_t bindingCount, const VkBindImageMemoryInfo* pBindInfos) {
5307 return mImpl->on_vkBindImageMemory2KHR(
5308 context, input_result, device, bindingCount, pBindInfos);
5309 }
5310
on_vkCreateBuffer(void * context,VkResult input_result,VkDevice device,const VkBufferCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkBuffer * pBuffer)5311 VkResult ResourceTracker::on_vkCreateBuffer(
5312 void* context, VkResult input_result,
5313 VkDevice device, const VkBufferCreateInfo *pCreateInfo,
5314 const VkAllocationCallbacks *pAllocator,
5315 VkBuffer *pBuffer) {
5316 return mImpl->on_vkCreateBuffer(
5317 context, input_result,
5318 device, pCreateInfo, pAllocator, pBuffer);
5319 }
5320
on_vkDestroyBuffer(void * context,VkDevice device,VkBuffer buffer,const VkAllocationCallbacks * pAllocator)5321 void ResourceTracker::on_vkDestroyBuffer(
5322 void* context,
5323 VkDevice device, VkBuffer buffer, const VkAllocationCallbacks *pAllocator) {
5324 mImpl->on_vkDestroyBuffer(context, device, buffer, pAllocator);
5325 }
5326
on_vkGetBufferMemoryRequirements(void * context,VkDevice device,VkBuffer buffer,VkMemoryRequirements * pMemoryRequirements)5327 void ResourceTracker::on_vkGetBufferMemoryRequirements(
5328 void* context, VkDevice device, VkBuffer buffer, VkMemoryRequirements *pMemoryRequirements) {
5329 mImpl->on_vkGetBufferMemoryRequirements(context, device, buffer, pMemoryRequirements);
5330 }
5331
on_vkGetBufferMemoryRequirements2(void * context,VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)5332 void ResourceTracker::on_vkGetBufferMemoryRequirements2(
5333 void* context, VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo,
5334 VkMemoryRequirements2* pMemoryRequirements) {
5335 mImpl->on_vkGetBufferMemoryRequirements2(
5336 context, device, pInfo, pMemoryRequirements);
5337 }
5338
on_vkGetBufferMemoryRequirements2KHR(void * context,VkDevice device,const VkBufferMemoryRequirementsInfo2 * pInfo,VkMemoryRequirements2 * pMemoryRequirements)5339 void ResourceTracker::on_vkGetBufferMemoryRequirements2KHR(
5340 void* context, VkDevice device, const VkBufferMemoryRequirementsInfo2* pInfo,
5341 VkMemoryRequirements2* pMemoryRequirements) {
5342 mImpl->on_vkGetBufferMemoryRequirements2KHR(
5343 context, device, pInfo, pMemoryRequirements);
5344 }
5345
on_vkBindBufferMemory(void * context,VkResult input_result,VkDevice device,VkBuffer buffer,VkDeviceMemory memory,VkDeviceSize memoryOffset)5346 VkResult ResourceTracker::on_vkBindBufferMemory(
5347 void* context, VkResult input_result,
5348 VkDevice device, VkBuffer buffer, VkDeviceMemory memory, VkDeviceSize memoryOffset) {
5349 return mImpl->on_vkBindBufferMemory(
5350 context, input_result,
5351 device, buffer, memory, memoryOffset);
5352 }
5353
on_vkBindBufferMemory2(void * context,VkResult input_result,VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)5354 VkResult ResourceTracker::on_vkBindBufferMemory2(
5355 void* context, VkResult input_result,
5356 VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo *pBindInfos) {
5357 return mImpl->on_vkBindBufferMemory2(
5358 context, input_result,
5359 device, bindInfoCount, pBindInfos);
5360 }
5361
on_vkBindBufferMemory2KHR(void * context,VkResult input_result,VkDevice device,uint32_t bindInfoCount,const VkBindBufferMemoryInfo * pBindInfos)5362 VkResult ResourceTracker::on_vkBindBufferMemory2KHR(
5363 void* context, VkResult input_result,
5364 VkDevice device, uint32_t bindInfoCount, const VkBindBufferMemoryInfo *pBindInfos) {
5365 return mImpl->on_vkBindBufferMemory2KHR(
5366 context, input_result,
5367 device, bindInfoCount, pBindInfos);
5368 }
5369
on_vkCreateSemaphore(void * context,VkResult input_result,VkDevice device,const VkSemaphoreCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSemaphore * pSemaphore)5370 VkResult ResourceTracker::on_vkCreateSemaphore(
5371 void* context, VkResult input_result,
5372 VkDevice device, const VkSemaphoreCreateInfo *pCreateInfo,
5373 const VkAllocationCallbacks *pAllocator,
5374 VkSemaphore *pSemaphore) {
5375 return mImpl->on_vkCreateSemaphore(
5376 context, input_result,
5377 device, pCreateInfo, pAllocator, pSemaphore);
5378 }
5379
on_vkDestroySemaphore(void * context,VkDevice device,VkSemaphore semaphore,const VkAllocationCallbacks * pAllocator)5380 void ResourceTracker::on_vkDestroySemaphore(
5381 void* context,
5382 VkDevice device, VkSemaphore semaphore, const VkAllocationCallbacks *pAllocator) {
5383 mImpl->on_vkDestroySemaphore(context, device, semaphore, pAllocator);
5384 }
5385
on_vkQueueSubmit(void * context,VkResult input_result,VkQueue queue,uint32_t submitCount,const VkSubmitInfo * pSubmits,VkFence fence)5386 VkResult ResourceTracker::on_vkQueueSubmit(
5387 void* context, VkResult input_result,
5388 VkQueue queue, uint32_t submitCount, const VkSubmitInfo* pSubmits, VkFence fence) {
5389 return mImpl->on_vkQueueSubmit(
5390 context, input_result, queue, submitCount, pSubmits, fence);
5391 }
5392
on_vkQueueWaitIdle(void * context,VkResult input_result,VkQueue queue)5393 VkResult ResourceTracker::on_vkQueueWaitIdle(
5394 void* context, VkResult input_result,
5395 VkQueue queue) {
5396 return mImpl->on_vkQueueWaitIdle(context, input_result, queue);
5397 }
5398
on_vkGetSemaphoreFdKHR(void * context,VkResult input_result,VkDevice device,const VkSemaphoreGetFdInfoKHR * pGetFdInfo,int * pFd)5399 VkResult ResourceTracker::on_vkGetSemaphoreFdKHR(
5400 void* context, VkResult input_result,
5401 VkDevice device,
5402 const VkSemaphoreGetFdInfoKHR* pGetFdInfo,
5403 int* pFd) {
5404 return mImpl->on_vkGetSemaphoreFdKHR(context, input_result, device, pGetFdInfo, pFd);
5405 }
5406
on_vkImportSemaphoreFdKHR(void * context,VkResult input_result,VkDevice device,const VkImportSemaphoreFdInfoKHR * pImportSemaphoreFdInfo)5407 VkResult ResourceTracker::on_vkImportSemaphoreFdKHR(
5408 void* context, VkResult input_result,
5409 VkDevice device,
5410 const VkImportSemaphoreFdInfoKHR* pImportSemaphoreFdInfo) {
5411 return mImpl->on_vkImportSemaphoreFdKHR(context, input_result, device, pImportSemaphoreFdInfo);
5412 }
5413
unwrap_VkNativeBufferANDROID(const VkImageCreateInfo * pCreateInfo,VkImageCreateInfo * local_pCreateInfo)5414 void ResourceTracker::unwrap_VkNativeBufferANDROID(
5415 const VkImageCreateInfo* pCreateInfo,
5416 VkImageCreateInfo* local_pCreateInfo) {
5417 mImpl->unwrap_VkNativeBufferANDROID(pCreateInfo, local_pCreateInfo);
5418 }
5419
unwrap_vkAcquireImageANDROID_nativeFenceFd(int fd,int * fd_out)5420 void ResourceTracker::unwrap_vkAcquireImageANDROID_nativeFenceFd(int fd, int* fd_out) {
5421 mImpl->unwrap_vkAcquireImageANDROID_nativeFenceFd(fd, fd_out);
5422 }
5423
5424 #ifdef VK_USE_PLATFORM_FUCHSIA
on_vkGetMemoryZirconHandleFUCHSIA(void * context,VkResult input_result,VkDevice device,const VkMemoryGetZirconHandleInfoFUCHSIA * pInfo,uint32_t * pHandle)5425 VkResult ResourceTracker::on_vkGetMemoryZirconHandleFUCHSIA(
5426 void* context, VkResult input_result,
5427 VkDevice device,
5428 const VkMemoryGetZirconHandleInfoFUCHSIA* pInfo,
5429 uint32_t* pHandle) {
5430 return mImpl->on_vkGetMemoryZirconHandleFUCHSIA(
5431 context, input_result, device, pInfo, pHandle);
5432 }
5433
on_vkGetMemoryZirconHandlePropertiesFUCHSIA(void * context,VkResult input_result,VkDevice device,VkExternalMemoryHandleTypeFlagBits handleType,uint32_t handle,VkMemoryZirconHandlePropertiesFUCHSIA * pProperties)5434 VkResult ResourceTracker::on_vkGetMemoryZirconHandlePropertiesFUCHSIA(
5435 void* context, VkResult input_result,
5436 VkDevice device,
5437 VkExternalMemoryHandleTypeFlagBits handleType,
5438 uint32_t handle,
5439 VkMemoryZirconHandlePropertiesFUCHSIA* pProperties) {
5440 return mImpl->on_vkGetMemoryZirconHandlePropertiesFUCHSIA(
5441 context, input_result, device, handleType, handle, pProperties);
5442 }
5443
on_vkGetSemaphoreZirconHandleFUCHSIA(void * context,VkResult input_result,VkDevice device,const VkSemaphoreGetZirconHandleInfoFUCHSIA * pInfo,uint32_t * pHandle)5444 VkResult ResourceTracker::on_vkGetSemaphoreZirconHandleFUCHSIA(
5445 void* context, VkResult input_result,
5446 VkDevice device,
5447 const VkSemaphoreGetZirconHandleInfoFUCHSIA* pInfo,
5448 uint32_t* pHandle) {
5449 return mImpl->on_vkGetSemaphoreZirconHandleFUCHSIA(
5450 context, input_result, device, pInfo, pHandle);
5451 }
5452
on_vkImportSemaphoreZirconHandleFUCHSIA(void * context,VkResult input_result,VkDevice device,const VkImportSemaphoreZirconHandleInfoFUCHSIA * pInfo)5453 VkResult ResourceTracker::on_vkImportSemaphoreZirconHandleFUCHSIA(
5454 void* context, VkResult input_result,
5455 VkDevice device,
5456 const VkImportSemaphoreZirconHandleInfoFUCHSIA* pInfo) {
5457 return mImpl->on_vkImportSemaphoreZirconHandleFUCHSIA(
5458 context, input_result, device, pInfo);
5459 }
5460
on_vkCreateBufferCollectionFUCHSIA(void * context,VkResult input_result,VkDevice device,const VkBufferCollectionCreateInfoFUCHSIA * pInfo,const VkAllocationCallbacks * pAllocator,VkBufferCollectionFUCHSIA * pCollection)5461 VkResult ResourceTracker::on_vkCreateBufferCollectionFUCHSIA(
5462 void* context, VkResult input_result,
5463 VkDevice device,
5464 const VkBufferCollectionCreateInfoFUCHSIA* pInfo,
5465 const VkAllocationCallbacks* pAllocator,
5466 VkBufferCollectionFUCHSIA* pCollection) {
5467 return mImpl->on_vkCreateBufferCollectionFUCHSIA(
5468 context, input_result, device, pInfo, pAllocator, pCollection);
5469 }
5470
on_vkDestroyBufferCollectionFUCHSIA(void * context,VkResult input_result,VkDevice device,VkBufferCollectionFUCHSIA collection,const VkAllocationCallbacks * pAllocator)5471 void ResourceTracker::on_vkDestroyBufferCollectionFUCHSIA(
5472 void* context, VkResult input_result,
5473 VkDevice device,
5474 VkBufferCollectionFUCHSIA collection,
5475 const VkAllocationCallbacks* pAllocator) {
5476 return mImpl->on_vkDestroyBufferCollectionFUCHSIA(
5477 context, input_result, device, collection, pAllocator);
5478 }
5479
on_vkSetBufferCollectionConstraintsFUCHSIA(void * context,VkResult input_result,VkDevice device,VkBufferCollectionFUCHSIA collection,const VkImageCreateInfo * pImageInfo)5480 VkResult ResourceTracker::on_vkSetBufferCollectionConstraintsFUCHSIA(
5481 void* context, VkResult input_result,
5482 VkDevice device,
5483 VkBufferCollectionFUCHSIA collection,
5484 const VkImageCreateInfo* pImageInfo) {
5485 return mImpl->on_vkSetBufferCollectionConstraintsFUCHSIA(
5486 context, input_result, device, collection, pImageInfo);
5487 }
5488
on_vkSetBufferCollectionBufferConstraintsFUCHSIA(void * context,VkResult input_result,VkDevice device,VkBufferCollectionFUCHSIA collection,const VkBufferConstraintsInfoFUCHSIA * pBufferDConstraintsInfo)5489 VkResult ResourceTracker::on_vkSetBufferCollectionBufferConstraintsFUCHSIA(
5490 void* context, VkResult input_result,
5491 VkDevice device,
5492 VkBufferCollectionFUCHSIA collection,
5493 const VkBufferConstraintsInfoFUCHSIA* pBufferDConstraintsInfo) {
5494 return mImpl->on_vkSetBufferCollectionBufferConstraintsFUCHSIA(
5495 context, input_result, device, collection, pBufferDConstraintsInfo);
5496 }
5497
on_vkGetBufferCollectionPropertiesFUCHSIA(void * context,VkResult input_result,VkDevice device,VkBufferCollectionFUCHSIA collection,VkBufferCollectionPropertiesFUCHSIA * pProperties)5498 VkResult ResourceTracker::on_vkGetBufferCollectionPropertiesFUCHSIA(
5499 void* context, VkResult input_result,
5500 VkDevice device,
5501 VkBufferCollectionFUCHSIA collection,
5502 VkBufferCollectionPropertiesFUCHSIA* pProperties) {
5503 return mImpl->on_vkGetBufferCollectionPropertiesFUCHSIA(
5504 context, input_result, device, collection, pProperties);
5505 }
5506 #endif
5507
on_vkGetAndroidHardwareBufferPropertiesANDROID(void * context,VkResult input_result,VkDevice device,const AHardwareBuffer * buffer,VkAndroidHardwareBufferPropertiesANDROID * pProperties)5508 VkResult ResourceTracker::on_vkGetAndroidHardwareBufferPropertiesANDROID(
5509 void* context, VkResult input_result,
5510 VkDevice device,
5511 const AHardwareBuffer* buffer,
5512 VkAndroidHardwareBufferPropertiesANDROID* pProperties) {
5513 return mImpl->on_vkGetAndroidHardwareBufferPropertiesANDROID(
5514 context, input_result, device, buffer, pProperties);
5515 }
on_vkGetMemoryAndroidHardwareBufferANDROID(void * context,VkResult input_result,VkDevice device,const VkMemoryGetAndroidHardwareBufferInfoANDROID * pInfo,struct AHardwareBuffer ** pBuffer)5516 VkResult ResourceTracker::on_vkGetMemoryAndroidHardwareBufferANDROID(
5517 void* context, VkResult input_result,
5518 VkDevice device,
5519 const VkMemoryGetAndroidHardwareBufferInfoANDROID *pInfo,
5520 struct AHardwareBuffer** pBuffer) {
5521 return mImpl->on_vkGetMemoryAndroidHardwareBufferANDROID(
5522 context, input_result,
5523 device, pInfo, pBuffer);
5524 }
5525
on_vkCreateSamplerYcbcrConversion(void * context,VkResult input_result,VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)5526 VkResult ResourceTracker::on_vkCreateSamplerYcbcrConversion(
5527 void* context, VkResult input_result,
5528 VkDevice device,
5529 const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
5530 const VkAllocationCallbacks* pAllocator,
5531 VkSamplerYcbcrConversion* pYcbcrConversion) {
5532 return mImpl->on_vkCreateSamplerYcbcrConversion(
5533 context, input_result, device, pCreateInfo, pAllocator, pYcbcrConversion);
5534 }
5535
on_vkDestroySamplerYcbcrConversion(void * context,VkDevice device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)5536 void ResourceTracker::on_vkDestroySamplerYcbcrConversion(
5537 void* context,
5538 VkDevice device,
5539 VkSamplerYcbcrConversion ycbcrConversion,
5540 const VkAllocationCallbacks* pAllocator) {
5541 mImpl->on_vkDestroySamplerYcbcrConversion(
5542 context, device, ycbcrConversion, pAllocator);
5543 }
5544
on_vkCreateSamplerYcbcrConversionKHR(void * context,VkResult input_result,VkDevice device,const VkSamplerYcbcrConversionCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSamplerYcbcrConversion * pYcbcrConversion)5545 VkResult ResourceTracker::on_vkCreateSamplerYcbcrConversionKHR(
5546 void* context, VkResult input_result,
5547 VkDevice device,
5548 const VkSamplerYcbcrConversionCreateInfo* pCreateInfo,
5549 const VkAllocationCallbacks* pAllocator,
5550 VkSamplerYcbcrConversion* pYcbcrConversion) {
5551 return mImpl->on_vkCreateSamplerYcbcrConversionKHR(
5552 context, input_result, device, pCreateInfo, pAllocator, pYcbcrConversion);
5553 }
5554
on_vkDestroySamplerYcbcrConversionKHR(void * context,VkDevice device,VkSamplerYcbcrConversion ycbcrConversion,const VkAllocationCallbacks * pAllocator)5555 void ResourceTracker::on_vkDestroySamplerYcbcrConversionKHR(
5556 void* context,
5557 VkDevice device,
5558 VkSamplerYcbcrConversion ycbcrConversion,
5559 const VkAllocationCallbacks* pAllocator) {
5560 mImpl->on_vkDestroySamplerYcbcrConversionKHR(
5561 context, device, ycbcrConversion, pAllocator);
5562 }
5563
on_vkCreateSampler(void * context,VkResult input_result,VkDevice device,const VkSamplerCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkSampler * pSampler)5564 VkResult ResourceTracker::on_vkCreateSampler(
5565 void* context, VkResult input_result,
5566 VkDevice device,
5567 const VkSamplerCreateInfo* pCreateInfo,
5568 const VkAllocationCallbacks* pAllocator,
5569 VkSampler* pSampler) {
5570 return mImpl->on_vkCreateSampler(
5571 context, input_result, device, pCreateInfo, pAllocator, pSampler);
5572 }
5573
on_vkGetPhysicalDeviceExternalFenceProperties(void * context,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo,VkExternalFenceProperties * pExternalFenceProperties)5574 void ResourceTracker::on_vkGetPhysicalDeviceExternalFenceProperties(
5575 void* context,
5576 VkPhysicalDevice physicalDevice,
5577 const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo,
5578 VkExternalFenceProperties* pExternalFenceProperties) {
5579 mImpl->on_vkGetPhysicalDeviceExternalFenceProperties(
5580 context, physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
5581 }
5582
on_vkGetPhysicalDeviceExternalFencePropertiesKHR(void * context,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceExternalFenceInfo * pExternalFenceInfo,VkExternalFenceProperties * pExternalFenceProperties)5583 void ResourceTracker::on_vkGetPhysicalDeviceExternalFencePropertiesKHR(
5584 void* context,
5585 VkPhysicalDevice physicalDevice,
5586 const VkPhysicalDeviceExternalFenceInfo* pExternalFenceInfo,
5587 VkExternalFenceProperties* pExternalFenceProperties) {
5588 mImpl->on_vkGetPhysicalDeviceExternalFenceProperties(
5589 context, physicalDevice, pExternalFenceInfo, pExternalFenceProperties);
5590 }
5591
on_vkCreateFence(void * context,VkResult input_result,VkDevice device,const VkFenceCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkFence * pFence)5592 VkResult ResourceTracker::on_vkCreateFence(
5593 void* context,
5594 VkResult input_result,
5595 VkDevice device,
5596 const VkFenceCreateInfo* pCreateInfo,
5597 const VkAllocationCallbacks* pAllocator, VkFence* pFence) {
5598 return mImpl->on_vkCreateFence(
5599 context, input_result, device, pCreateInfo, pAllocator, pFence);
5600 }
5601
on_vkDestroyFence(void * context,VkDevice device,VkFence fence,const VkAllocationCallbacks * pAllocator)5602 void ResourceTracker::on_vkDestroyFence(
5603 void* context,
5604 VkDevice device,
5605 VkFence fence,
5606 const VkAllocationCallbacks* pAllocator) {
5607 mImpl->on_vkDestroyFence(
5608 context, device, fence, pAllocator);
5609 }
5610
on_vkResetFences(void * context,VkResult input_result,VkDevice device,uint32_t fenceCount,const VkFence * pFences)5611 VkResult ResourceTracker::on_vkResetFences(
5612 void* context,
5613 VkResult input_result,
5614 VkDevice device,
5615 uint32_t fenceCount,
5616 const VkFence* pFences) {
5617 return mImpl->on_vkResetFences(
5618 context, input_result, device, fenceCount, pFences);
5619 }
5620
on_vkImportFenceFdKHR(void * context,VkResult input_result,VkDevice device,const VkImportFenceFdInfoKHR * pImportFenceFdInfo)5621 VkResult ResourceTracker::on_vkImportFenceFdKHR(
5622 void* context,
5623 VkResult input_result,
5624 VkDevice device,
5625 const VkImportFenceFdInfoKHR* pImportFenceFdInfo) {
5626 return mImpl->on_vkImportFenceFdKHR(
5627 context, input_result, device, pImportFenceFdInfo);
5628 }
5629
on_vkGetFenceFdKHR(void * context,VkResult input_result,VkDevice device,const VkFenceGetFdInfoKHR * pGetFdInfo,int * pFd)5630 VkResult ResourceTracker::on_vkGetFenceFdKHR(
5631 void* context,
5632 VkResult input_result,
5633 VkDevice device,
5634 const VkFenceGetFdInfoKHR* pGetFdInfo,
5635 int* pFd) {
5636 return mImpl->on_vkGetFenceFdKHR(
5637 context, input_result, device, pGetFdInfo, pFd);
5638 }
5639
on_vkWaitForFences(void * context,VkResult input_result,VkDevice device,uint32_t fenceCount,const VkFence * pFences,VkBool32 waitAll,uint64_t timeout)5640 VkResult ResourceTracker::on_vkWaitForFences(
5641 void* context,
5642 VkResult input_result,
5643 VkDevice device,
5644 uint32_t fenceCount,
5645 const VkFence* pFences,
5646 VkBool32 waitAll,
5647 uint64_t timeout) {
5648 return mImpl->on_vkWaitForFences(
5649 context, input_result, device, fenceCount, pFences, waitAll, timeout);
5650 }
5651
on_vkCreateDescriptorPool(void * context,VkResult input_result,VkDevice device,const VkDescriptorPoolCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorPool * pDescriptorPool)5652 VkResult ResourceTracker::on_vkCreateDescriptorPool(
5653 void* context,
5654 VkResult input_result,
5655 VkDevice device,
5656 const VkDescriptorPoolCreateInfo* pCreateInfo,
5657 const VkAllocationCallbacks* pAllocator,
5658 VkDescriptorPool* pDescriptorPool) {
5659 return mImpl->on_vkCreateDescriptorPool(
5660 context, input_result, device, pCreateInfo, pAllocator, pDescriptorPool);
5661 }
5662
on_vkDestroyDescriptorPool(void * context,VkDevice device,VkDescriptorPool descriptorPool,const VkAllocationCallbacks * pAllocator)5663 void ResourceTracker::on_vkDestroyDescriptorPool(
5664 void* context,
5665 VkDevice device,
5666 VkDescriptorPool descriptorPool,
5667 const VkAllocationCallbacks* pAllocator) {
5668 mImpl->on_vkDestroyDescriptorPool(context, device, descriptorPool, pAllocator);
5669 }
5670
on_vkResetDescriptorPool(void * context,VkResult input_result,VkDevice device,VkDescriptorPool descriptorPool,VkDescriptorPoolResetFlags flags)5671 VkResult ResourceTracker::on_vkResetDescriptorPool(
5672 void* context,
5673 VkResult input_result,
5674 VkDevice device,
5675 VkDescriptorPool descriptorPool,
5676 VkDescriptorPoolResetFlags flags) {
5677 return mImpl->on_vkResetDescriptorPool(
5678 context, input_result, device, descriptorPool, flags);
5679 }
5680
on_vkAllocateDescriptorSets(void * context,VkResult input_result,VkDevice device,const VkDescriptorSetAllocateInfo * pAllocateInfo,VkDescriptorSet * pDescriptorSets)5681 VkResult ResourceTracker::on_vkAllocateDescriptorSets(
5682 void* context,
5683 VkResult input_result,
5684 VkDevice device,
5685 const VkDescriptorSetAllocateInfo* pAllocateInfo,
5686 VkDescriptorSet* pDescriptorSets) {
5687 return mImpl->on_vkAllocateDescriptorSets(
5688 context, input_result, device, pAllocateInfo, pDescriptorSets);
5689 }
5690
on_vkFreeDescriptorSets(void * context,VkResult input_result,VkDevice device,VkDescriptorPool descriptorPool,uint32_t descriptorSetCount,const VkDescriptorSet * pDescriptorSets)5691 VkResult ResourceTracker::on_vkFreeDescriptorSets(
5692 void* context,
5693 VkResult input_result,
5694 VkDevice device,
5695 VkDescriptorPool descriptorPool,
5696 uint32_t descriptorSetCount,
5697 const VkDescriptorSet* pDescriptorSets) {
5698 return mImpl->on_vkFreeDescriptorSets(
5699 context, input_result, device, descriptorPool, descriptorSetCount, pDescriptorSets);
5700 }
5701
on_vkCreateDescriptorSetLayout(void * context,VkResult input_result,VkDevice device,const VkDescriptorSetLayoutCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorSetLayout * pSetLayout)5702 VkResult ResourceTracker::on_vkCreateDescriptorSetLayout(
5703 void* context,
5704 VkResult input_result,
5705 VkDevice device,
5706 const VkDescriptorSetLayoutCreateInfo* pCreateInfo,
5707 const VkAllocationCallbacks* pAllocator,
5708 VkDescriptorSetLayout* pSetLayout) {
5709 return mImpl->on_vkCreateDescriptorSetLayout(
5710 context, input_result, device, pCreateInfo, pAllocator, pSetLayout);
5711 }
5712
on_vkUpdateDescriptorSets(void * context,VkDevice device,uint32_t descriptorWriteCount,const VkWriteDescriptorSet * pDescriptorWrites,uint32_t descriptorCopyCount,const VkCopyDescriptorSet * pDescriptorCopies)5713 void ResourceTracker::on_vkUpdateDescriptorSets(
5714 void* context,
5715 VkDevice device,
5716 uint32_t descriptorWriteCount,
5717 const VkWriteDescriptorSet* pDescriptorWrites,
5718 uint32_t descriptorCopyCount,
5719 const VkCopyDescriptorSet* pDescriptorCopies) {
5720 return mImpl->on_vkUpdateDescriptorSets(
5721 context, device, descriptorWriteCount, pDescriptorWrites, descriptorCopyCount, pDescriptorCopies);
5722 }
5723
on_vkMapMemoryIntoAddressSpaceGOOGLE_pre(void * context,VkResult input_result,VkDevice device,VkDeviceMemory memory,uint64_t * pAddress)5724 VkResult ResourceTracker::on_vkMapMemoryIntoAddressSpaceGOOGLE_pre(
5725 void* context,
5726 VkResult input_result,
5727 VkDevice device,
5728 VkDeviceMemory memory,
5729 uint64_t* pAddress) {
5730 return mImpl->on_vkMapMemoryIntoAddressSpaceGOOGLE_pre(
5731 context, input_result, device, memory, pAddress);
5732 }
5733
on_vkMapMemoryIntoAddressSpaceGOOGLE(void * context,VkResult input_result,VkDevice device,VkDeviceMemory memory,uint64_t * pAddress)5734 VkResult ResourceTracker::on_vkMapMemoryIntoAddressSpaceGOOGLE(
5735 void* context,
5736 VkResult input_result,
5737 VkDevice device,
5738 VkDeviceMemory memory,
5739 uint64_t* pAddress) {
5740 return mImpl->on_vkMapMemoryIntoAddressSpaceGOOGLE(
5741 context, input_result, device, memory, pAddress);
5742 }
5743
on_vkCreateDescriptorUpdateTemplate(void * context,VkResult input_result,VkDevice device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)5744 VkResult ResourceTracker::on_vkCreateDescriptorUpdateTemplate(
5745 void* context, VkResult input_result,
5746 VkDevice device,
5747 const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
5748 const VkAllocationCallbacks* pAllocator,
5749 VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
5750 return mImpl->on_vkCreateDescriptorUpdateTemplate(
5751 context, input_result,
5752 device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate);
5753 }
5754
on_vkCreateDescriptorUpdateTemplateKHR(void * context,VkResult input_result,VkDevice device,const VkDescriptorUpdateTemplateCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkDescriptorUpdateTemplate * pDescriptorUpdateTemplate)5755 VkResult ResourceTracker::on_vkCreateDescriptorUpdateTemplateKHR(
5756 void* context, VkResult input_result,
5757 VkDevice device,
5758 const VkDescriptorUpdateTemplateCreateInfo* pCreateInfo,
5759 const VkAllocationCallbacks* pAllocator,
5760 VkDescriptorUpdateTemplate* pDescriptorUpdateTemplate) {
5761 return mImpl->on_vkCreateDescriptorUpdateTemplateKHR(
5762 context, input_result,
5763 device, pCreateInfo, pAllocator, pDescriptorUpdateTemplate);
5764 }
5765
on_vkUpdateDescriptorSetWithTemplate(void * context,VkDevice device,VkDescriptorSet descriptorSet,VkDescriptorUpdateTemplate descriptorUpdateTemplate,const void * pData)5766 void ResourceTracker::on_vkUpdateDescriptorSetWithTemplate(
5767 void* context,
5768 VkDevice device,
5769 VkDescriptorSet descriptorSet,
5770 VkDescriptorUpdateTemplate descriptorUpdateTemplate,
5771 const void* pData) {
5772 mImpl->on_vkUpdateDescriptorSetWithTemplate(
5773 context, device, descriptorSet,
5774 descriptorUpdateTemplate, pData);
5775 }
5776
on_vkGetPhysicalDeviceImageFormatProperties2(void * context,VkResult input_result,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)5777 VkResult ResourceTracker::on_vkGetPhysicalDeviceImageFormatProperties2(
5778 void* context, VkResult input_result,
5779 VkPhysicalDevice physicalDevice,
5780 const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
5781 VkImageFormatProperties2* pImageFormatProperties) {
5782 return mImpl->on_vkGetPhysicalDeviceImageFormatProperties2(
5783 context, input_result, physicalDevice, pImageFormatInfo,
5784 pImageFormatProperties);
5785 }
5786
on_vkGetPhysicalDeviceImageFormatProperties2KHR(void * context,VkResult input_result,VkPhysicalDevice physicalDevice,const VkPhysicalDeviceImageFormatInfo2 * pImageFormatInfo,VkImageFormatProperties2 * pImageFormatProperties)5787 VkResult ResourceTracker::on_vkGetPhysicalDeviceImageFormatProperties2KHR(
5788 void* context, VkResult input_result,
5789 VkPhysicalDevice physicalDevice,
5790 const VkPhysicalDeviceImageFormatInfo2* pImageFormatInfo,
5791 VkImageFormatProperties2* pImageFormatProperties) {
5792 return mImpl->on_vkGetPhysicalDeviceImageFormatProperties2KHR(
5793 context, input_result, physicalDevice, pImageFormatInfo,
5794 pImageFormatProperties);
5795 }
5796
registerEncoderCleanupCallback(const VkEncoder * encoder,void * handle,ResourceTracker::CleanupCallback callback)5797 void ResourceTracker::registerEncoderCleanupCallback(const VkEncoder* encoder, void* handle, ResourceTracker::CleanupCallback callback) {
5798 mImpl->registerEncoderCleanupCallback(encoder, handle, callback);
5799 }
5800
unregisterEncoderCleanupCallback(const VkEncoder * encoder,void * handle)5801 void ResourceTracker::unregisterEncoderCleanupCallback(const VkEncoder* encoder, void* handle) {
5802 mImpl->unregisterEncoderCleanupCallback(encoder, handle);
5803 }
5804
onEncoderDeleted(const VkEncoder * encoder)5805 void ResourceTracker::onEncoderDeleted(const VkEncoder* encoder) {
5806 mImpl->onEncoderDeleted(encoder);
5807 }
5808
syncEncodersForCommandBuffer(VkCommandBuffer commandBuffer,VkEncoder * current)5809 uint32_t ResourceTracker::syncEncodersForCommandBuffer(VkCommandBuffer commandBuffer, VkEncoder* current) {
5810 return mImpl->syncEncodersForCommandBuffer(commandBuffer, current);
5811 }
5812
syncEncodersForQueue(VkQueue queue,VkEncoder * current)5813 uint32_t ResourceTracker::syncEncodersForQueue(VkQueue queue, VkEncoder* current) {
5814 return mImpl->syncEncodersForQueue(queue, current);
5815 }
5816
5817
on_vkBeginCommandBuffer(void * context,VkResult input_result,VkCommandBuffer commandBuffer,const VkCommandBufferBeginInfo * pBeginInfo)5818 VkResult ResourceTracker::on_vkBeginCommandBuffer(
5819 void* context, VkResult input_result,
5820 VkCommandBuffer commandBuffer,
5821 const VkCommandBufferBeginInfo* pBeginInfo) {
5822 return mImpl->on_vkBeginCommandBuffer(
5823 context, input_result, commandBuffer, pBeginInfo);
5824 }
5825
on_vkEndCommandBuffer(void * context,VkResult input_result,VkCommandBuffer commandBuffer)5826 VkResult ResourceTracker::on_vkEndCommandBuffer(
5827 void* context, VkResult input_result,
5828 VkCommandBuffer commandBuffer) {
5829 return mImpl->on_vkEndCommandBuffer(
5830 context, input_result, commandBuffer);
5831 }
5832
on_vkResetCommandBuffer(void * context,VkResult input_result,VkCommandBuffer commandBuffer,VkCommandBufferResetFlags flags)5833 VkResult ResourceTracker::on_vkResetCommandBuffer(
5834 void* context, VkResult input_result,
5835 VkCommandBuffer commandBuffer,
5836 VkCommandBufferResetFlags flags) {
5837 return mImpl->on_vkResetCommandBuffer(
5838 context, input_result, commandBuffer, flags);
5839 }
5840
on_vkCreateImageView(void * context,VkResult input_result,VkDevice device,const VkImageViewCreateInfo * pCreateInfo,const VkAllocationCallbacks * pAllocator,VkImageView * pView)5841 VkResult ResourceTracker::on_vkCreateImageView(
5842 void* context, VkResult input_result,
5843 VkDevice device,
5844 const VkImageViewCreateInfo* pCreateInfo,
5845 const VkAllocationCallbacks* pAllocator,
5846 VkImageView* pView) {
5847 return mImpl->on_vkCreateImageView(
5848 context, input_result, device, pCreateInfo, pAllocator, pView);
5849 }
5850
deviceMemoryTransform_tohost(VkDeviceMemory * memory,uint32_t memoryCount,VkDeviceSize * offset,uint32_t offsetCount,VkDeviceSize * size,uint32_t sizeCount,uint32_t * typeIndex,uint32_t typeIndexCount,uint32_t * typeBits,uint32_t typeBitsCount)5851 void ResourceTracker::deviceMemoryTransform_tohost(
5852 VkDeviceMemory* memory, uint32_t memoryCount,
5853 VkDeviceSize* offset, uint32_t offsetCount,
5854 VkDeviceSize* size, uint32_t sizeCount,
5855 uint32_t* typeIndex, uint32_t typeIndexCount,
5856 uint32_t* typeBits, uint32_t typeBitsCount) {
5857 mImpl->deviceMemoryTransform_tohost(
5858 memory, memoryCount,
5859 offset, offsetCount,
5860 size, sizeCount,
5861 typeIndex, typeIndexCount,
5862 typeBits, typeBitsCount);
5863 }
5864
deviceMemoryTransform_fromhost(VkDeviceMemory * memory,uint32_t memoryCount,VkDeviceSize * offset,uint32_t offsetCount,VkDeviceSize * size,uint32_t sizeCount,uint32_t * typeIndex,uint32_t typeIndexCount,uint32_t * typeBits,uint32_t typeBitsCount)5865 void ResourceTracker::deviceMemoryTransform_fromhost(
5866 VkDeviceMemory* memory, uint32_t memoryCount,
5867 VkDeviceSize* offset, uint32_t offsetCount,
5868 VkDeviceSize* size, uint32_t sizeCount,
5869 uint32_t* typeIndex, uint32_t typeIndexCount,
5870 uint32_t* typeBits, uint32_t typeBitsCount) {
5871 mImpl->deviceMemoryTransform_fromhost(
5872 memory, memoryCount,
5873 offset, offsetCount,
5874 size, sizeCount,
5875 typeIndex, typeIndexCount,
5876 typeBits, typeBitsCount);
5877 }
5878
5879 #define DEFINE_TRANSFORMED_TYPE_IMPL(type) \
5880 void ResourceTracker::transformImpl_##type##_tohost(const type*, uint32_t) { } \
5881 void ResourceTracker::transformImpl_##type##_fromhost(const type*, uint32_t) { } \
5882
5883 LIST_TRANSFORMED_TYPES(DEFINE_TRANSFORMED_TYPE_IMPL)
5884
5885 } // namespace goldfish_vk
5886