Index: src/gpu/vk/GrVkBackendContext.cpp |
diff --git a/src/gpu/vk/GrVkBackendContext.cpp b/src/gpu/vk/GrVkBackendContext.cpp |
index ae61aa5337b28bfbc36807f9e5ab1aa31b6c0756..8b6c1dadc65bd43a58fd0c9ee9ab1a4d66d75357 100644 |
--- a/src/gpu/vk/GrVkBackendContext.cpp |
+++ b/src/gpu/vk/GrVkBackendContext.cpp |
@@ -40,7 +40,9 @@ const uint32_t kGrVkMinimumVersion = VK_MAKE_VERSION(1, 0, 8); |
// Create the base Vulkan objects needed by the GrVkGpu object |
const GrVkBackendContext* GrVkBackendContext::Create(uint32_t* presentQueueIndexPtr, |
- bool(*canPresent)(VkInstance, VkPhysicalDevice, uint32_t queueIndex)) { |
+ bool(*canPresent)(VkInstance, VkPhysicalDevice, uint32_t queueIndex, |
+ void* platformData), |
+ void* platformData) { |
VkPhysicalDevice physDev; |
VkDevice device; |
VkInstance inst; |
@@ -91,11 +93,11 @@ const GrVkBackendContext* GrVkBackendContext::Create(uint32_t* presentQueueIndex |
if (extensions.hasInstanceExtension(VK_KHR_ANDROID_SURFACE_EXTENSION_NAME)) { |
instanceExtensionNames.push_back(VK_KHR_ANDROID_SURFACE_EXTENSION_NAME); |
extensionFlags |= kKHR_android_surface_GrVkExtensionFlag; |
-} |
+ } |
#elif SK_BUILD_FOR_UNIX |
- if (extensions.hasInstanceExtension(VK_KHR_XLIB_SURFACE_EXTENSION_NAME)) { |
- instanceExtensionNames.push_back(VK_KHR_XLIB_SURFACE_EXTENSION_NAME); |
- extensionFlags |= kKHR_xlib_surface_GrVkExtensionFlag; |
+ if (extensions.hasInstanceExtension(VK_KHR_XCB_SURFACE_EXTENSION_NAME)) { |
+ instanceExtensionNames.push_back(VK_KHR_XCB_SURFACE_EXTENSION_NAME); |
+ extensionFlags |= kKHR_xcb_surface_GrVkExtensionFlag; |
} |
#endif |
@@ -159,7 +161,7 @@ const GrVkBackendContext* GrVkBackendContext::Create(uint32_t* presentQueueIndex |
uint32_t presentQueueIndex = graphicsQueueIndex; |
if (presentQueueIndexPtr && canPresent) { |
for (uint32_t i = 0; i < queueCount; i++) { |
- if (canPresent(inst, physDev, i)) { |
+ if (canPresent(inst, physDev, i, platformData)) { |
presentQueueIndex = i; |
break; |
} |