5 #include <vulkan/vulkan.h>
11 #include <vulkan/vulkan_win32.h>
13 /*#include <vulkan/vulkan_xlib.h>*/
14 #include <X11/Xlib-xcb.h>
15 #include <vulkan/vulkan_xcb.h>
35 /* if rpasses[rpidx].vkobj != vkrpass, the framebuf is invalid */
39 VkImageView imgv[MAX_FB_IMGV];
47 static struct rpass *rpasses;
48 static struct framebuf *framebufs;
51 static int create_instance(void);
52 static int create_surface(void);
53 static int choose_phys_dev(void);
54 static int create_device(void);
55 static int create_swapchain(void);
57 static int choose_pixfmt(void);
58 static int eval_pdev_score(VkPhysicalDevice dev);
59 static int have_inst_layer(const char *name);
60 static int have_ext(VkExtensionProperties *ext, int next, const char *name);
67 static VkPhysicalDevice vkpdev;
68 static int vkqfam_idx, vkqfam_maxq;
69 static VkDevice vkdev;
71 static VkSurfaceKHR vksurf;
72 static VkSurfaceCapabilitiesKHR vksurf_caps;
73 static int vksurf_numfmt, vksurf_selfmt;
74 static VkSurfaceFormatKHR *vksurf_fmt;
75 static VkSwapchainKHR vksc;
76 static int vksc_numimg;
77 static VkImage *vksc_img;
78 static VkExtent2D vksc_extent;
79 static VkImageView *vksc_view;
81 static VkLayerProperties *inst_layers;
82 static VkExtensionProperties *inst_ext, *dev_ext;
83 static uint32_t inst_ext_count, dev_ext_count, inst_layers_count;
85 static VkPhysicalDevice *pdev_list;
86 static uint32_t num_pdev;
88 static int have_raytrace, have_debug_report;
90 void vk_init_xwin(Display *d, Window w)
96 int vk_init(unsigned int flags, unsigned int *usedflags)
99 if(create_instance() == -1) return -1;
100 if(create_surface() == -1) return -1;
101 if(choose_phys_dev() == -1) return -1;
102 if(create_device() == -1) return -1;
104 if(initflags != flags) {
106 *usedflags = initflags;
115 void vk_cleanup(void)
124 for(i=0; i<vksc_numimg; i++) {
125 vkDestroyImageView(vkdev, vksc_view[i], 0);
130 vkDestroySwapchainKHR(vkdev, vksc, 0);
134 vkDestroyDevice(vkdev, 0);
138 vkDestroySurfaceKHR(vk, vksurf, 0);
142 vkDestroyInstance(vk, 0);
155 int vk_reshape(int xsz, int ysz)
159 if(vksc && vksc_extent.width == xsz && vksc_extent.height == ysz) {
164 for(i=0; i<vksc_numimg; i++) {
165 vkDestroyImageView(vkdev, vksc_view[i], 0);
168 if(vksc) vkDestroySwapchainKHR(vkdev, vksc, 0);
170 vksc_extent.width = xsz;
171 vksc_extent.height = ysz;
173 if(create_swapchain() == -1) return -1;
175 /* TODO create depth/stencil buffers as needed (initflags) */
180 int vk_create_rpass(void)
183 struct rpass rpass = {0}, *rp = &rpass;
186 rpasses = darr_alloc(0, sizeof *rpasses);
187 darr_push(rpasses, &rpass); /* add dummy rpass */
190 for(i=1; i<darr_size(rpasses); i++) {
191 if(!rpasses[i].used) {
196 /* init renderpass defaults */
198 rp->fmt = vksurf_fmt[vksurf_selfmt].format;
199 rp->zfmt = VK_FORMAT_D24_UNORM_S8_UINT;
207 darr_push(rpasses, rp);
208 return darr_size(rpasses) - 1;
213 void vk_free_rpass(int rp)
215 if(!rpasses || rp < 1 || rp >= darr_size(rpasses)) {
219 if(rpasses[rp].used && rpasses[rp].vkobj) {
220 vkDestroyRenderPass(vkdev, rpasses[rp].vkobj, 0);
222 rpasses[rp].used = 0;
225 void vk_rpass_colorbuf(int rp, int fmt, int n)
227 rpasses[rp].fmt = fmt;
228 rpasses[rp].num_colbuf = n;
229 rpasses[rp].vkobj_valid = 0;
232 void vk_rpass_msaa(int rp, int nsamp)
234 rpasses[rp].num_samples = nsamp;
235 rpasses[rp].vkobj_valid = 0;
238 void vk_rpass_clear(int rp, int clear)
240 rpasses[rp].clear = clear;
241 rpasses[rp].vkobj_valid = 0;
244 VkRenderPass vk_rpass(int rp)
248 VkAttachmentDescription att[17];
249 VkAttachmentReference catref[16], zatref;
250 VkSubpassDescription subpass;
251 VkRenderPassCreateInfo pinf;
255 if(!r->vkobj_valid) {
257 vkDestroyRenderPass(vkdev, r->vkobj, 0);
261 zidx = r->num_colbuf;
262 memset(att, 0, sizeof att);
263 for(i=0; i<r->num_colbuf; i++) {
264 att[i].format = r->fmt;
265 att[i].samples = r->num_samples;
266 att[i].loadOp = r->clear ? VK_ATTACHMENT_LOAD_OP_CLEAR : VK_ATTACHMENT_LOAD_OP_DONT_CARE;
267 att[i].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
268 att[i].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
269 att[i].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
270 att[i].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
271 att[i].finalLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
273 att[zidx].format = r->zfmt;
274 att[zidx].samples = 1;
275 att[zidx].loadOp = r->clear ? VK_ATTACHMENT_LOAD_OP_CLEAR : VK_ATTACHMENT_LOAD_OP_DONT_CARE;
276 att[zidx].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
277 att[zidx].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
278 att[zidx].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
279 att[zidx].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
280 att[zidx].finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
282 for(i=0; i<r->num_colbuf; i++) {
283 catref[i].attachment = i;
284 catref[i].layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
286 zatref.attachment = zidx;
287 zatref.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
289 memset(&subpass, 0, sizeof subpass);
290 subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
291 subpass.colorAttachmentCount = r->num_colbuf;
292 subpass.pColorAttachments = catref;
293 subpass.pDepthStencilAttachment = &zatref;
295 memset(&pinf, 0, sizeof pinf);
296 pinf.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
297 pinf.attachmentCount = r->num_colbuf + 1;
298 pinf.pAttachments = att;
299 pinf.subpassCount = 1;
300 pinf.pSubpasses = &subpass;
302 if(vkCreateRenderPass(vkdev, &pinf, 0, &r->vkobj) != 0) {
303 fprintf(stderr, "failed to create render pass!\n");
313 int vk_create_fb(void)
316 struct framebuf framebuf = {0}, *fb = &framebuf;
319 framebufs = darr_alloc(0, sizeof *framebufs);
320 darr_push(framebufs, &framebuf); /* add dummy rpass */
323 for(i=1; i<darr_size(framebufs); i++) {
324 if(!framebufs[i].used) {
329 /* init framebuffer defaults */
330 memset(fb, 0, sizeof &fb);
333 if(fb == &framebuf) {
334 darr_push(framebufs, fb);
335 return darr_size(framebufs) - 1;
337 return fb - framebufs;
340 void vk_free_fb(int fb)
342 if(!framebufs || fb < 1 || fb >= darr_size(framebufs)) {
346 if(framebufs[fb].used && framebufs[fb].vkobj) {
347 vkDestroyFramebuffer(vkdev, framebufs[fb].vkobj, 0);
349 framebufs[fb].used = 0;
352 void vk_fb_size(int fb, int x, int y)
354 framebufs[fb].width = x;
355 framebufs[fb].height = y;
356 framebufs[fb].vkobj_valid = 0;
359 void vk_fb_rpass(int fb, int rpass)
361 if(rpass < 0 || rpass >= darr_size(rpasses) || !rpasses[rpass].used) {
362 fprintf(stderr, "vk_fb_rpass: %d is not a valid renderpass\n", rpass);
366 framebufs[fb].rpidx = rpass;
367 if(rpasses[rpass].vkobj_valid) {
368 framebufs[fb].vkrpass = rpasses[rpass].vkobj;
370 framebufs[fb].vkrpass = 0;
372 framebufs[fb].vkobj_valid = 0;
375 void vk_fb_images(int fb, int n, ...)
380 if(n > MAX_FB_IMGV) {
381 fprintf(stderr, "vk_fb_images: %d is too many images\n", n);
387 framebufs[fb].imgv[i] = va_arg(ap, VkImageView);
390 framebufs[fb].num_imgv = n;
391 framebufs[fb].vkobj_valid = 0;
394 VkFramebuffer vk_fb(int fb)
396 VkFramebufferCreateInfo fbinf;
402 if(!(rpass = vk_rpass(f->rpidx))) {
406 if(rpass != f->vkrpass || !f->vkobj_valid) {
409 vkDestroyFramebuffer(vkdev, f->vkobj, 0);
413 memset(&fbinf, 0, sizeof fbinf);
414 fbinf.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
415 fbinf.renderPass = rpass;
416 fbinf.attachmentCount = f->num_imgv;
417 fbinf.pAttachments = f->imgv;
418 fbinf.width = f->width;
419 fbinf.height = f->height;
421 if(vkCreateFramebuffer(vkdev, &fbinf, 0, &f->vkobj) != 0) {
422 fprintf(stderr, "vk_fb: failed to create framebuffer\n");
431 #define ARRSZ(arr) (sizeof arr / sizeof *arr)
432 static const char *known_layer_list[] = {
433 "VK_LAYER_GOOGLE_threading",
434 "VK_LAYER_LUNARG_parameter_validation",
435 "VK_LAYER_LUNARG_object_tracker",
436 "VK_LAYER_LUNARG_image",
437 "VK_LAYER_LUNARG_core_validation",
438 "VK_LAYER_LUNARG_swapchain",
439 "VK_LAYER_GOOGLE_unique_objects"
445 } known_instext_list[] = {
446 {"VK_KHR_surface", 1},
448 {"VK_KHR_win32_surface", 1},
450 /*{"VK_KHR_xlib_surface", 1},*/
451 {"VK_KHR_xcb_surface", 1},
453 {"VK_KHR_debug_report", 0}
459 } known_devext_list[] = {
460 {"VK_KHR_swapchain", 1},
461 {"VK_KHR_acceleration_structure", 0},
462 {"VK_KHR_ray_tracing_pipeline", 0}
465 static int create_instance(void)
467 int i, nlayers = 0, next = 0;
468 VkInstanceCreateInfo instinf;
469 VkApplicationInfo appinf;
470 const char *layers[ARRSZ(known_layer_list)];
471 const char *ext[ARRSZ(known_instext_list)];
474 vkEnumerateInstanceVersion(&apiver);
475 printf("Vulkan API version: %d.%d.%d\n", (apiver >> 22) & 0x7f,
476 (apiver >> 12) & 0x3ff, apiver & 0xfff);
478 memset(&appinf, 0, sizeof appinf);
479 appinf.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
480 appinf.pApplicationName = "vkray";
481 appinf.pEngineName = "vkray";
482 appinf.apiVersion = apiver;
484 vkEnumerateInstanceLayerProperties(&inst_layers_count, 0);
485 inst_layers = malloc_nf(inst_layers_count * sizeof *inst_layers);
486 vkEnumerateInstanceLayerProperties(&inst_layers_count, inst_layers);
488 vkEnumerateInstanceExtensionProperties(0, &inst_ext_count, 0);
489 inst_ext = malloc_nf(inst_ext_count * sizeof *inst_ext);
490 vkEnumerateInstanceExtensionProperties(0, &inst_ext_count, inst_ext);
493 for(i=0; i<inst_layers_count; i++) {
494 printf(" - %s: %s\n", inst_layers[i].layerName, inst_layers[i].description);
496 printf("Instance extensions:\n");
497 for(i=0; i<inst_ext_count; i++) {
498 printf(" - %s\n", inst_ext[i].extensionName);
501 have_debug_report = have_ext(inst_ext, inst_ext_count, "VK_KHR_debug_report");
503 for(i=0; i<ARRSZ(known_layer_list); i++) {
504 if(have_inst_layer(known_layer_list[i])) {
505 layers[nlayers++] = known_layer_list[i];
508 for(i=0; i<ARRSZ(known_instext_list); i++) {
509 if(have_ext(inst_ext, inst_ext_count, known_instext_list[i].name)) {
510 ext[next++] = known_instext_list[i].name;
511 } else if(known_instext_list[i].required) {
512 fprintf(stderr, "Vulkan implementation lacks required instance extension: %s\n",
513 known_instext_list[i].name);
518 memset(&instinf, 0, sizeof instinf);
519 instinf.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
520 instinf.pApplicationInfo = &appinf;
521 instinf.enabledLayerCount = nlayers;
522 instinf.ppEnabledLayerNames = layers;
523 instinf.enabledExtensionCount = next;
524 instinf.ppEnabledExtensionNames = ext;
525 if(vkCreateInstance(&instinf, 0, &vk) != 0) {
526 fprintf(stderr, "failed to create vulkan instance\n");
533 static int create_surface(void)
536 VkXlibSurfaceCreateInfoKHR xinf = {0};
537 xinf.sType = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR;
541 if(vkCreateXlibSurfaceKHR(vk, &xinf, 0, &vksurf) != 0) {
542 fprintf(stderr, "failed to create Xlib window surface\n");
546 VkXcbSurfaceCreateInfoKHR xinf = {0};
547 xinf.sType = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR;
548 xinf.connection = XGetXCBConnection(dpy);
549 xinf.window = (xcb_window_t)win;
551 if(vkCreateXcbSurfaceKHR(vk, &xinf, 0, &vksurf) != 0) {
552 fprintf(stderr, "failed to create XCB window surface\n");
558 int choose_phys_dev(void)
560 uint32_t i, num_pdev, num_qfam, score, best_score, best_dev;
561 VkPhysicalDevice *pdev;
562 VkPhysicalDeviceProperties pdevprop;
563 VkQueueFamilyProperties *qfam;
566 vkEnumeratePhysicalDevices(vk, &num_pdev, 0);
568 fprintf(stderr, "no vulkan devices found\n");
571 pdev = malloc_nf(num_pdev * sizeof *pdev);
572 vkEnumeratePhysicalDevices(vk, &num_pdev, pdev);
574 printf("Found %d physical devices\n", num_pdev);
578 for(i=0; i<num_pdev; i++) {
579 if((score = eval_pdev_score(pdev[i])) && score > best_score) {
584 vkGetPhysicalDeviceProperties(pdev[i], &pdevprop);
585 printf(" %d: %s (score: %d)\n", i, pdevprop.deviceName, score);
588 fprintf(stderr, "no suitable vulkan device found\n");
592 vkpdev = pdev[best_dev];
594 vkGetPhysicalDeviceQueueFamilyProperties(vkpdev, &num_qfam, 0);
595 qfam = malloc_nf(num_qfam * sizeof *qfam);
596 vkGetPhysicalDeviceQueueFamilyProperties(vkpdev, &num_qfam, qfam);
599 for(i=0; i<num_qfam; i++) {
600 vkGetPhysicalDeviceSurfaceSupportKHR(vkpdev, i, vksurf, &can_pres);
601 if(qfam[i].queueCount && (qfam[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) && can_pres) {
602 vkqfam_maxq = qfam[i].queueCount;
614 static int create_device(void)
617 VkDeviceQueueCreateInfo qinf = {0};
618 VkPhysicalDeviceFeatures feat = {0};
619 VkDeviceCreateInfo devinf = {0};
620 const char *ext[ARRSZ(known_devext_list) + 16];
623 vkEnumerateDeviceExtensionProperties(vkpdev, 0, &dev_ext_count, 0);
624 dev_ext = malloc_nf(dev_ext_count * sizeof *dev_ext);
625 vkEnumerateDeviceExtensionProperties(vkpdev, 0, &dev_ext_count, dev_ext);
628 for(i=0; i<ARRSZ(known_devext_list); i++) {
629 if(have_ext(dev_ext, dev_ext_count, known_devext_list[i].name)) {
630 ext[num_ext++] = known_devext_list[i].name;
631 } else if(known_devext_list[i].required) {
632 fprintf(stderr, "Vulkan device lacks required extension: %s\n",
633 known_devext_list[i].name);
638 if(initflags & VKINIT_RAY) {
639 if(have_ext(dev_ext, dev_ext_count, "VK_KHR_acceleration_structure") &&
640 have_ext(dev_ext, dev_ext_count, "VK_KHR_ray_tracing_pipeline")) {
641 ext[num_ext++] = "VK_KHR_acceleration_structure";
642 ext[num_ext++] = "VK_KHR_ray_tracing_pipeline";
644 initflags &= ~VKINIT_RAY;
648 qinf.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
649 qinf.queueFamilyIndex = vkqfam_idx;
651 qinf.pQueuePriorities = &prio;
653 devinf.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
654 devinf.pQueueCreateInfos = &qinf;
655 devinf.queueCreateInfoCount = 1;
656 devinf.pEnabledFeatures = &feat;
657 devinf.enabledExtensionCount = num_ext;
658 devinf.ppEnabledExtensionNames = ext;
660 if(vkCreateDevice(vkpdev, &devinf, 0, &vkdev) != 0) {
661 fprintf(stderr, "failed to create vulkan device\n");
665 vkGetDeviceQueue(vkdev, vkqfam_idx, 0, &vkq);
669 static int create_swapchain(void)
673 VkSwapchainCreateInfoKHR scinf = {0};
674 VkImageViewCreateInfo ivinf;
676 if(vksc_extent.width <= 0 || vksc_extent.height <= 0) {
680 scinf.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
681 scinf.surface = vksurf;
682 scinf.minImageCount = 2;
683 scinf.imageFormat = vksurf_fmt[vksurf_selfmt].format;
684 scinf.imageColorSpace = vksurf_fmt[vksurf_selfmt].colorSpace;
685 scinf.imageExtent = vksc_extent;
686 scinf.imageArrayLayers = 1;
687 scinf.imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
688 scinf.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
689 scinf.preTransform = vksurf_caps.currentTransform;
690 scinf.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
691 scinf.presentMode = VK_PRESENT_MODE_FIFO_KHR;
692 scinf.clipped = VK_TRUE;
694 if(vkCreateSwapchainKHR(vkdev, &scinf, 0, &vksc) != 0) {
695 fprintf(stderr, "failed to create swapchain\n");
699 if(!vksc_img || vksc_numimg != num) {
701 vkGetSwapchainImagesKHR(vkdev, vksc, &num, 0);
702 vksc_img = malloc_nf(num * sizeof *vksc_img);
703 vkGetSwapchainImagesKHR(vkdev, vksc, &num, vksc_img);
705 if(!vksc_view || vksc_numimg != num) {
707 vksc_view = malloc_nf(num * sizeof *vksc_view);
711 for(i=0; i<num; i++) {
712 memset(&ivinf, 0, sizeof ivinf);
713 ivinf.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
714 ivinf.image = vksc_img[i];
715 ivinf.format = vksurf_fmt[vksurf_selfmt].format;
716 ivinf.components.r = VK_COMPONENT_SWIZZLE_IDENTITY;
717 ivinf.components.g = VK_COMPONENT_SWIZZLE_IDENTITY;
718 ivinf.components.b = VK_COMPONENT_SWIZZLE_IDENTITY;
719 ivinf.components.a = VK_COMPONENT_SWIZZLE_IDENTITY;
720 ivinf.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
721 ivinf.subresourceRange.levelCount = 1;
722 ivinf.subresourceRange.layerCount = 1;
723 ivinf.viewType = VK_IMAGE_VIEW_TYPE_2D;
725 if(vkCreateImageView(vkdev, &ivinf, 0, vksc_view + i) != 0) {
726 fprintf(stderr, "failed to create image view (%d)\n", i);
734 static int eval_pdev_score(VkPhysicalDevice dev)
737 uint32_t i, num_fmt, num_qfam, num_ext;
738 VkQueueFamilyProperties *qfam;
739 VkExtensionProperties *ext;
740 VkPhysicalDeviceProperties prop;
741 VkPhysicalDeviceFeatures feat;
742 VkSurfaceFormatKHR *sfmt;
745 vkGetPhysicalDeviceProperties(dev, &prop);
746 vkGetPhysicalDeviceFeatures(dev, &feat);
748 /* check if we have the swapchain extension */
749 vkEnumerateDeviceExtensionProperties(dev, 0, &num_ext, 0);
750 ext = malloc_nf(num_ext * sizeof *ext);
751 vkEnumerateDeviceExtensionProperties(dev, 0, &num_ext, ext);
753 if(!have_ext(ext, num_ext, "VK_KHR_swapchain")) {
758 /* populate format and present modes arrays, and make sure we have some of each */
759 vkGetPhysicalDeviceSurfaceFormatsKHR(dev, vksurf, &num_fmt, 0);
764 sfmt = malloc_nf(num_fmt * sizeof *sfmt);
765 vkGetPhysicalDeviceSurfaceFormatsKHR(dev, vksurf, &num_fmt, sfmt);
767 vkGetPhysicalDeviceSurfaceCapabilitiesKHR(dev, vksurf, &vksurf_caps);
769 /* find a queue family which can do graphics and can present */
770 vkGetPhysicalDeviceQueueFamilyProperties(dev, &num_qfam, 0);
771 qfam = malloc_nf(num_qfam * sizeof *qfam);
772 vkGetPhysicalDeviceQueueFamilyProperties(dev, &num_qfam, qfam);
774 for(i=0; i<num_qfam; i++) {
775 vkGetPhysicalDeviceSurfaceSupportKHR(dev, i, vksurf, &can_pres);
776 if(qfam[i].queueCount && (qfam[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) && can_pres) {
781 switch(prop.deviceType) {
782 case VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU:
785 case VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU:
788 case VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU:
795 if(initflags & VKINIT_RAY) {
796 if(have_ext(ext, num_ext, "VK_KHR_acceleration_structure") &&
797 have_ext(ext, num_ext, "VK_KHR_ray_tracing_pipeline")) {
808 static int choose_pixfmt(void)
810 static const VkFormat pref[] = {
811 VK_FORMAT_B8G8R8_UNORM,
812 VK_FORMAT_R8G8B8_UNORM,
813 VK_FORMAT_B8G8R8A8_UNORM,
814 VK_FORMAT_R8G8B8A8_UNORM
819 vkGetPhysicalDeviceSurfaceFormatsKHR(vkpdev, vksurf, &num_fmt, 0);
820 if(!num_fmt) return -1;
821 vksurf_fmt = malloc_nf(num_fmt * sizeof *vksurf_fmt);
822 vkGetPhysicalDeviceSurfaceFormatsKHR(vkpdev, vksurf, &num_fmt, vksurf_fmt);
825 for(i=0; i<num_fmt; i++) {
826 if(vksurf_fmt[i].colorSpace != VK_COLOR_SPACE_SRGB_NONLINEAR_KHR) {
829 for(j=0; j<sizeof pref / sizeof *pref; j++) {
830 if(vksurf_fmt[i].format == pref[j]) {
832 vksurf_numfmt = num_fmt;
843 static int have_inst_layer(const char *name)
846 for(i=0; i<inst_layers_count; i++) {
847 if(strcmp(inst_layers[i].layerName, name) == 0) {
854 static int have_ext(VkExtensionProperties *ext, int next, const char *name)
857 for(i=0; i<next; i++) {
858 if(strcmp(ext[i].extensionName, name) == 0) {