framebuffers
[vktest3] / src / vk.c
1 #include <stdio.h>
2 #include <string.h>
3 #include <stdint.h>
4 #include <stdarg.h>
5 #include <vulkan/vulkan.h>
6 #include "vk.h"
7 #include "util.h"
8 #include "darray.h"
9
10 #ifdef __WIN32__
11 #include <vulkan/vulkan_win32.h>
12 #else
13 /*#include <vulkan/vulkan_xlib.h>*/
14 #include <X11/Xlib-xcb.h>
15 #include <vulkan/vulkan_xcb.h>
16 #endif
17
18 struct rpass {
19         int used;
20         int fmt;
21         int zfmt;
22         int num_colbuf;
23         int num_samples;
24         int clear;
25
26         int vkobj_valid;
27         VkRenderPass vkobj;
28 };
29
30 #define MAX_FB_IMGV     8
31 struct framebuf {
32         int used;
33         int width, height;
34
35         /* if rpasses[rpidx].vkobj != vkrpass, the framebuf is invalid */
36         int rpidx;
37         VkRenderPass vkrpass;
38
39         VkImageView imgv[MAX_FB_IMGV];
40         int num_imgv;
41
42         int vkobj_valid;
43         VkFramebuffer vkobj;
44 };
45
46
47 static struct rpass *rpasses;
48 static struct framebuf *framebufs;
49
50
51 static int create_instance(void);
52 static int create_surface(void);
53 static int choose_phys_dev(void);
54 static int create_device(void);
55 static int create_swapchain(void);
56
57 static int choose_pixfmt(void);
58 static int eval_pdev_score(VkPhysicalDevice dev);
59 static int have_inst_layer(const char *name);
60 static int have_ext(VkExtensionProperties *ext, int next, const char *name);
61
62 static Display *dpy;
63 static Window win;
64 static int initflags;
65
66 static VkInstance vk;
67 static VkPhysicalDevice vkpdev;
68 static int vkqfam_idx, vkqfam_maxq;
69 static VkDevice vkdev;
70 static VkQueue vkq;
71 static VkSurfaceKHR vksurf;
72 static VkSurfaceCapabilitiesKHR vksurf_caps;
73 static int vksurf_numfmt, vksurf_selfmt;
74 static VkSurfaceFormatKHR *vksurf_fmt;
75 static VkSwapchainKHR vksc;
76 static int vksc_numimg;
77 static VkImage *vksc_img;
78 static VkExtent2D vksc_extent;
79 static VkImageView *vksc_view;
80
81 static VkLayerProperties *inst_layers;
82 static VkExtensionProperties *inst_ext, *dev_ext;
83 static uint32_t inst_ext_count, dev_ext_count, inst_layers_count;
84
85 static VkPhysicalDevice *pdev_list;
86 static uint32_t num_pdev;
87
88 static int have_raytrace, have_debug_report;
89
90 void vk_init_xwin(Display *d, Window w)
91 {
92         dpy = d;
93         win = w;
94 }
95
96 int vk_init(unsigned int flags, unsigned int *usedflags)
97 {
98         initflags = flags;
99         if(create_instance() == -1)     return -1;
100         if(create_surface() == -1) return -1;
101         if(choose_phys_dev() == -1) return -1;
102         if(create_device() == -1) return -1;
103
104         if(initflags != flags) {
105                 if(usedflags) {
106                         *usedflags = initflags;
107                 } else {
108                         vk_cleanup();
109                         return -1;
110                 }
111         }
112         return 0;
113 }
114
115 void vk_cleanup(void)
116 {
117         int i;
118
119         free(vksc_img);
120         vksc_img = 0;
121         free(vksc_view);
122         vksc_view = 0;
123         if(vksc_view) {
124                 for(i=0; i<vksc_numimg; i++) {
125                         vkDestroyImageView(vkdev, vksc_view[i], 0);
126                 }
127                 vksc_view = 0;
128         }
129         if(vksc) {
130                 vkDestroySwapchainKHR(vkdev, vksc, 0);
131                 vksc = 0;
132         }
133         if(vkdev) {
134                 vkDestroyDevice(vkdev, 0);
135                 vkdev = 0;
136         }
137         if(vksurf) {
138                 vkDestroySurfaceKHR(vk, vksurf, 0);
139                 vksurf = 0;
140         }
141         if(vk) {
142                 vkDestroyInstance(vk, 0);
143                 vk = 0;
144         }
145         free(inst_layers);
146         inst_layers = 0;
147         free(inst_ext);
148         inst_ext = 0;
149         free(dev_ext);
150         dev_ext = 0;
151         free(pdev_list);
152         pdev_list = 0;
153 }
154
155 int vk_reshape(int xsz, int ysz)
156 {
157         int i;
158
159         if(vksc && vksc_extent.width == xsz && vksc_extent.height == ysz) {
160                 return 0;
161         }
162
163         if(vksc_view) {
164                 for(i=0; i<vksc_numimg; i++) {
165                         vkDestroyImageView(vkdev, vksc_view[i], 0);
166                 }
167         }
168         if(vksc) vkDestroySwapchainKHR(vkdev, vksc, 0);
169
170         vksc_extent.width = xsz;
171         vksc_extent.height = ysz;
172
173         if(create_swapchain() == -1) return -1;
174
175         /* TODO create depth/stencil buffers as needed (initflags) */
176         return 0;
177 }
178
179
180 int vk_create_rpass(void)
181 {
182         int i;
183         struct rpass rpass = {0}, *rp = &rpass;
184
185         if(!rpasses) {
186                 rpasses = darr_alloc(0, sizeof *rpasses);
187                 darr_push(rpasses, &rpass);     /* add dummy rpass */
188         }
189
190         for(i=1; i<darr_size(rpasses); i++) {
191                 if(!rpasses[i].used) {
192                         rp = rpasses + i;
193                 }
194         }
195
196         /* init renderpass defaults */
197         rp->used = 1;
198         rp->fmt = vksurf_fmt[vksurf_selfmt].format;
199         rp->zfmt = VK_FORMAT_D24_UNORM_S8_UINT;
200         rp->num_colbuf = 1;
201         rp->num_samples = 1;
202         rp->clear = 1;
203         rp->vkobj_valid = 0;
204         rp->vkobj = 0;
205
206         if(rp == &rpass) {
207                 darr_push(rpasses, rp);
208                 return darr_size(rpasses) - 1;
209         }
210         return rp - rpasses;
211 }
212
213 void vk_free_rpass(int rp)
214 {
215         if(!rpasses || rp < 1 || rp >= darr_size(rpasses)) {
216                 return;
217         }
218
219         if(rpasses[rp].used && rpasses[rp].vkobj) {
220                 vkDestroyRenderPass(vkdev, rpasses[rp].vkobj, 0);
221         }
222         rpasses[rp].used = 0;
223 }
224
225 void vk_rpass_colorbuf(int rp, int fmt, int n)
226 {
227         rpasses[rp].fmt = fmt;
228         rpasses[rp].num_colbuf = n;
229         rpasses[rp].vkobj_valid = 0;
230 }
231
232 void vk_rpass_msaa(int rp, int nsamp)
233 {
234         rpasses[rp].num_samples = nsamp;
235         rpasses[rp].vkobj_valid = 0;
236 }
237
238 void vk_rpass_clear(int rp, int clear)
239 {
240         rpasses[rp].clear = clear;
241         rpasses[rp].vkobj_valid = 0;
242 }
243
244 VkRenderPass vk_rpass(int rp)
245 {
246         int i, zidx;
247         struct rpass *r;
248         VkAttachmentDescription att[17];
249         VkAttachmentReference catref[16], zatref;
250         VkSubpassDescription subpass;
251         VkRenderPassCreateInfo pinf;
252
253         r = rpasses + rp;
254
255         if(!r->vkobj_valid) {
256                 if(r->vkobj) {
257                         vkDestroyRenderPass(vkdev, r->vkobj, 0);
258                         r->vkobj = 0;
259                 }
260
261                 zidx = r->num_colbuf;
262                 memset(att, 0, sizeof att);
263                 for(i=0; i<r->num_colbuf; i++) {
264                         att[i].format = r->fmt;
265                         att[i].samples = r->num_samples;
266                         att[i].loadOp = r->clear ? VK_ATTACHMENT_LOAD_OP_CLEAR : VK_ATTACHMENT_LOAD_OP_DONT_CARE;
267                         att[i].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
268                         att[i].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
269                         att[i].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
270                         att[i].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
271                         att[i].finalLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
272                 }
273                 att[zidx].format = r->zfmt;
274                 att[zidx].samples = 1;
275                 att[zidx].loadOp = r->clear ? VK_ATTACHMENT_LOAD_OP_CLEAR : VK_ATTACHMENT_LOAD_OP_DONT_CARE;
276                 att[zidx].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
277                 att[zidx].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
278                 att[zidx].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
279                 att[zidx].initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
280                 att[zidx].finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
281
282                 for(i=0; i<r->num_colbuf; i++) {
283                         catref[i].attachment = i;
284                         catref[i].layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
285                 }
286                 zatref.attachment = zidx;
287                 zatref.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
288
289                 memset(&subpass, 0, sizeof subpass);
290                 subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
291                 subpass.colorAttachmentCount = r->num_colbuf;
292                 subpass.pColorAttachments = catref;
293                 subpass.pDepthStencilAttachment = &zatref;
294
295                 memset(&pinf, 0, sizeof pinf);
296                 pinf.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
297                 pinf.attachmentCount = r->num_colbuf + 1;
298                 pinf.pAttachments = att;
299                 pinf.subpassCount = 1;
300                 pinf.pSubpasses = &subpass;
301
302                 if(vkCreateRenderPass(vkdev, &pinf, 0, &r->vkobj) != 0) {
303                         fprintf(stderr, "failed to create render pass!\n");
304                         return 0;
305                 }
306                 r->vkobj_valid = 1;
307         }
308
309         return r->vkobj;
310 }
311
312
313 int vk_create_fb(void)
314 {
315         int i;
316         struct framebuf framebuf = {0}, *fb = &framebuf;
317
318         if(!framebufs) {
319                 framebufs = darr_alloc(0, sizeof *framebufs);
320                 darr_push(framebufs, &framebuf);        /* add dummy rpass */
321         }
322
323         for(i=1; i<darr_size(framebufs); i++) {
324                 if(!framebufs[i].used) {
325                         fb = framebufs + i;
326                 }
327         }
328
329         /* init framebuffer defaults */
330         memset(fb, 0, sizeof &fb);
331         fb->used = 1;
332
333         if(fb == &framebuf) {
334                 darr_push(framebufs, fb);
335                 return darr_size(framebufs) - 1;
336         }
337         return fb - framebufs;
338 }
339
340 void vk_free_fb(int fb)
341 {
342         if(!framebufs || fb < 1 || fb >= darr_size(framebufs)) {
343                 return;
344         }
345
346         if(framebufs[fb].used && framebufs[fb].vkobj) {
347                 vkDestroyFramebuffer(vkdev, framebufs[fb].vkobj, 0);
348         }
349         framebufs[fb].used = 0;
350 }
351
352 void vk_fb_size(int fb, int x, int  y)
353 {
354         framebufs[fb].width = x;
355         framebufs[fb].height = y;
356         framebufs[fb].vkobj_valid = 0;
357 }
358
359 void vk_fb_rpass(int fb, int rpass)
360 {
361         if(rpass < 0 || rpass >= darr_size(rpasses) || !rpasses[rpass].used) {
362                 fprintf(stderr, "vk_fb_rpass: %d is not a valid renderpass\n", rpass);
363                 return;
364         }
365
366         framebufs[fb].rpidx = rpass;
367         if(rpasses[rpass].vkobj_valid) {
368                 framebufs[fb].vkrpass = rpasses[rpass].vkobj;
369         } else {
370                 framebufs[fb].vkrpass = 0;
371         }
372         framebufs[fb].vkobj_valid = 0;
373 }
374
375 void vk_fb_images(int fb, int n, ...)
376 {
377         int i;
378         va_list ap;
379
380         if(n > MAX_FB_IMGV) {
381                 fprintf(stderr, "vk_fb_images: %d is too many images\n", n);
382                 n = MAX_FB_IMGV;
383         }
384
385         va_start(ap, n);
386         for(i=0; i<n; i++) {
387                 framebufs[fb].imgv[i] = va_arg(ap, VkImageView);
388         }
389         va_end(ap);
390         framebufs[fb].num_imgv = n;
391         framebufs[fb].vkobj_valid = 0;
392 }
393
394 VkFramebuffer vk_fb(int fb)
395 {
396         VkFramebufferCreateInfo fbinf;
397         VkRenderPass rpass;
398         struct framebuf *f;
399
400         f = framebufs + fb;
401
402         if(!(rpass = vk_rpass(f->rpidx))) {
403                 return 0;
404         }
405
406         if(rpass != f->vkrpass || !f->vkobj_valid) {
407                 f->vkrpass = rpass;
408                 if(f->vkobj) {
409                         vkDestroyFramebuffer(vkdev, f->vkobj, 0);
410                         f->vkobj = 0;
411                 }
412
413                 memset(&fbinf, 0, sizeof fbinf);
414                 fbinf.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
415                 fbinf.renderPass = rpass;
416                 fbinf.attachmentCount = f->num_imgv;
417                 fbinf.pAttachments = f->imgv;
418                 fbinf.width = f->width;
419                 fbinf.height = f->height;
420
421                 if(vkCreateFramebuffer(vkdev, &fbinf, 0, &f->vkobj) != 0) {
422                         fprintf(stderr, "vk_fb: failed to create framebuffer\n");
423                         return 0;
424                 }
425                 f->vkobj_valid = 1;
426         }
427         return f->vkobj;
428 }
429
430
431 #define ARRSZ(arr)      (sizeof arr / sizeof *arr)
432 static const char *known_layer_list[] = {
433         "VK_LAYER_GOOGLE_threading",
434         "VK_LAYER_LUNARG_parameter_validation",
435         "VK_LAYER_LUNARG_object_tracker",
436         "VK_LAYER_LUNARG_image",
437         "VK_LAYER_LUNARG_core_validation",
438         "VK_LAYER_LUNARG_swapchain",
439         "VK_LAYER_GOOGLE_unique_objects"
440 };
441
442 static struct {
443         const char *name;
444         int required;
445 } known_instext_list[] = {
446         {"VK_KHR_surface", 1},
447 #ifdef __WIN32__
448         {"VK_KHR_win32_surface", 1},
449 #else
450         /*{"VK_KHR_xlib_surface", 1},*/
451         {"VK_KHR_xcb_surface", 1},
452 #endif
453         {"VK_KHR_debug_report", 0}
454 };
455
456 static struct {
457         const char *name;
458         int required;
459 } known_devext_list[] = {
460         {"VK_KHR_swapchain", 1},
461         {"VK_KHR_acceleration_structure", 0},
462         {"VK_KHR_ray_tracing_pipeline", 0}
463 };
464
465 static int create_instance(void)
466 {
467         int i, nlayers = 0, next = 0;
468         VkInstanceCreateInfo instinf;
469         VkApplicationInfo appinf;
470         const char *layers[ARRSZ(known_layer_list)];
471         const char *ext[ARRSZ(known_instext_list)];
472         uint32_t apiver;
473
474         vkEnumerateInstanceVersion(&apiver);
475         printf("Vulkan API version: %d.%d.%d\n", (apiver >> 22) & 0x7f,
476                         (apiver >> 12) & 0x3ff, apiver & 0xfff);
477
478         memset(&appinf, 0, sizeof appinf);
479         appinf.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
480         appinf.pApplicationName = "vkray";
481         appinf.pEngineName = "vkray";
482         appinf.apiVersion = apiver;
483
484         vkEnumerateInstanceLayerProperties(&inst_layers_count, 0);
485         inst_layers = malloc_nf(inst_layers_count * sizeof *inst_layers);
486         vkEnumerateInstanceLayerProperties(&inst_layers_count, inst_layers);
487
488         vkEnumerateInstanceExtensionProperties(0, &inst_ext_count, 0);
489         inst_ext = malloc_nf(inst_ext_count * sizeof *inst_ext);
490         vkEnumerateInstanceExtensionProperties(0, &inst_ext_count, inst_ext);
491
492         printf("Layers:\n");
493         for(i=0; i<inst_layers_count; i++) {
494                 printf(" - %s: %s\n", inst_layers[i].layerName, inst_layers[i].description);
495         }
496         printf("Instance extensions:\n");
497         for(i=0; i<inst_ext_count; i++) {
498                 printf(" - %s\n", inst_ext[i].extensionName);
499         }
500
501         have_debug_report = have_ext(inst_ext, inst_ext_count, "VK_KHR_debug_report");
502
503         for(i=0; i<ARRSZ(known_layer_list); i++) {
504                 if(have_inst_layer(known_layer_list[i])) {
505                         layers[nlayers++] = known_layer_list[i];
506                 }
507         }
508         for(i=0; i<ARRSZ(known_instext_list); i++) {
509                 if(have_ext(inst_ext, inst_ext_count, known_instext_list[i].name)) {
510                         ext[next++] = known_instext_list[i].name;
511                 } else if(known_instext_list[i].required) {
512                         fprintf(stderr, "Vulkan implementation lacks required instance extension: %s\n",
513                                         known_instext_list[i].name);
514                         return -1;
515                 }
516         }
517
518         memset(&instinf, 0, sizeof instinf);
519         instinf.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
520         instinf.pApplicationInfo = &appinf;
521         instinf.enabledLayerCount = nlayers;
522         instinf.ppEnabledLayerNames = layers;
523         instinf.enabledExtensionCount = next;
524         instinf.ppEnabledExtensionNames = ext;
525         if(vkCreateInstance(&instinf, 0, &vk) != 0) {
526                 fprintf(stderr, "failed to create vulkan instance\n");
527                 return -1;
528         }
529
530         return 0;
531 }
532
533 static int create_surface(void)
534 {
535         /*
536         VkXlibSurfaceCreateInfoKHR xinf = {0};
537         xinf.sType = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR;
538         xinf.dpy = dpy;
539         xinf.window = win;
540
541         if(vkCreateXlibSurfaceKHR(vk, &xinf, 0, &vksurf) != 0) {
542                 fprintf(stderr, "failed to create Xlib window surface\n");
543                 return -1;
544         }
545         */
546         VkXcbSurfaceCreateInfoKHR xinf = {0};
547         xinf.sType = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR;
548         xinf.connection = XGetXCBConnection(dpy);
549         xinf.window = (xcb_window_t)win;
550
551         if(vkCreateXcbSurfaceKHR(vk, &xinf, 0, &vksurf) != 0) {
552                 fprintf(stderr, "failed to create XCB window surface\n");
553                 return -1;
554         }
555         return 0;
556 }
557
558 int choose_phys_dev(void)
559 {
560         uint32_t i, num_pdev, num_qfam, score, best_score, best_dev;
561         VkPhysicalDevice *pdev;
562         VkPhysicalDeviceProperties pdevprop;
563         VkQueueFamilyProperties *qfam;
564         VkBool32 can_pres;
565
566         vkEnumeratePhysicalDevices(vk, &num_pdev, 0);
567         if(!num_pdev) {
568                 fprintf(stderr, "no vulkan devices found\n");
569                 return -1;
570         }
571         pdev = malloc_nf(num_pdev * sizeof *pdev);
572         vkEnumeratePhysicalDevices(vk, &num_pdev, pdev);
573
574         printf("Found %d physical devices\n", num_pdev);
575
576         best_score = 0;
577         best_dev = -1;
578         for(i=0; i<num_pdev; i++) {
579                 if((score = eval_pdev_score(pdev[i])) && score > best_score) {
580                         best_score = score;
581                         best_dev = i;
582                 }
583
584                 vkGetPhysicalDeviceProperties(pdev[i], &pdevprop);
585                 printf(" %d: %s (score: %d)\n", i, pdevprop.deviceName, score);
586         }
587         if(best_dev == -1) {
588                 fprintf(stderr, "no suitable vulkan device found\n");
589                 free(pdev);
590                 return -1;
591         }
592         vkpdev = pdev[best_dev];
593
594         vkGetPhysicalDeviceQueueFamilyProperties(vkpdev, &num_qfam, 0);
595         qfam = malloc_nf(num_qfam * sizeof *qfam);
596         vkGetPhysicalDeviceQueueFamilyProperties(vkpdev, &num_qfam, qfam);
597
598         vkqfam_idx = -1;
599         for(i=0; i<num_qfam; i++) {
600                 vkGetPhysicalDeviceSurfaceSupportKHR(vkpdev, i, vksurf, &can_pres);
601                 if(qfam[i].queueCount && (qfam[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) && can_pres) {
602                         vkqfam_maxq = qfam[i].queueCount;
603                         vkqfam_idx = i;
604                         break;
605                 }
606         }
607
608         free(qfam);
609         free(pdev);
610         choose_pixfmt();
611         return 0;
612 }
613
614 static int create_device(void)
615 {
616         float prio = 1.0f;
617         VkDeviceQueueCreateInfo qinf = {0};
618         VkPhysicalDeviceFeatures feat = {0};
619         VkDeviceCreateInfo devinf = {0};
620         const char *ext[ARRSZ(known_devext_list) + 16];
621         int i, num_ext;
622
623         vkEnumerateDeviceExtensionProperties(vkpdev, 0, &dev_ext_count, 0);
624         dev_ext = malloc_nf(dev_ext_count * sizeof *dev_ext);
625         vkEnumerateDeviceExtensionProperties(vkpdev, 0, &dev_ext_count, dev_ext);
626
627         num_ext = 0;
628         for(i=0; i<ARRSZ(known_devext_list); i++) {
629                 if(have_ext(dev_ext, dev_ext_count, known_devext_list[i].name)) {
630                         ext[num_ext++] = known_devext_list[i].name;
631                 } else if(known_devext_list[i].required) {
632                         fprintf(stderr, "Vulkan device lacks required extension: %s\n",
633                                         known_devext_list[i].name);
634                         return -1;
635                 }
636         }
637
638         if(initflags & VKINIT_RAY) {
639                 if(have_ext(dev_ext, dev_ext_count, "VK_KHR_acceleration_structure") &&
640                                 have_ext(dev_ext, dev_ext_count, "VK_KHR_ray_tracing_pipeline")) {
641                         ext[num_ext++] = "VK_KHR_acceleration_structure";
642                         ext[num_ext++] = "VK_KHR_ray_tracing_pipeline";
643                 } else {
644                         initflags &= ~VKINIT_RAY;
645                 }
646         }
647
648         qinf.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
649         qinf.queueFamilyIndex = vkqfam_idx;
650         qinf.queueCount = 1;
651         qinf.pQueuePriorities = &prio;
652
653         devinf.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
654         devinf.pQueueCreateInfos = &qinf;
655         devinf.queueCreateInfoCount = 1;
656         devinf.pEnabledFeatures = &feat;
657         devinf.enabledExtensionCount = num_ext;
658         devinf.ppEnabledExtensionNames = ext;
659
660         if(vkCreateDevice(vkpdev, &devinf, 0, &vkdev) != 0) {
661                 fprintf(stderr, "failed to create vulkan device\n");
662                 return -1;
663         }
664
665         vkGetDeviceQueue(vkdev, vkqfam_idx, 0, &vkq);
666         return 0;
667 }
668
669 static int create_swapchain(void)
670 {
671         int i;
672         uint32_t num;
673         VkSwapchainCreateInfoKHR scinf = {0};
674         VkImageViewCreateInfo ivinf;
675
676         if(vksc_extent.width <= 0 || vksc_extent.height <= 0) {
677                 return -1;
678         }
679
680         scinf.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
681         scinf.surface = vksurf;
682         scinf.minImageCount = 2;
683         scinf.imageFormat = vksurf_fmt[vksurf_selfmt].format;
684         scinf.imageColorSpace = vksurf_fmt[vksurf_selfmt].colorSpace;
685         scinf.imageExtent = vksc_extent;
686         scinf.imageArrayLayers = 1;
687         scinf.imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
688         scinf.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
689         scinf.preTransform = vksurf_caps.currentTransform;
690         scinf.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
691         scinf.presentMode = VK_PRESENT_MODE_FIFO_KHR;
692         scinf.clipped = VK_TRUE;
693
694         if(vkCreateSwapchainKHR(vkdev, &scinf, 0, &vksc) != 0) {
695                 fprintf(stderr, "failed to create swapchain\n");
696                 return -1;
697         }
698
699         if(!vksc_img || vksc_numimg != num) {
700                 free(vksc_img);
701                 vkGetSwapchainImagesKHR(vkdev, vksc, &num, 0);
702                 vksc_img = malloc_nf(num * sizeof *vksc_img);
703                 vkGetSwapchainImagesKHR(vkdev, vksc, &num, vksc_img);
704         }
705         if(!vksc_view || vksc_numimg != num) {
706                 free(vksc_view);
707                 vksc_view = malloc_nf(num * sizeof *vksc_view);
708         }
709         vksc_numimg = num;
710
711         for(i=0; i<num; i++) {
712                 memset(&ivinf, 0, sizeof ivinf);
713                 ivinf.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
714                 ivinf.image = vksc_img[i];
715                 ivinf.format = vksurf_fmt[vksurf_selfmt].format;
716                 ivinf.components.r = VK_COMPONENT_SWIZZLE_IDENTITY;
717                 ivinf.components.g = VK_COMPONENT_SWIZZLE_IDENTITY;
718                 ivinf.components.b = VK_COMPONENT_SWIZZLE_IDENTITY;
719                 ivinf.components.a = VK_COMPONENT_SWIZZLE_IDENTITY;
720                 ivinf.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
721                 ivinf.subresourceRange.levelCount = 1;
722                 ivinf.subresourceRange.layerCount = 1;
723                 ivinf.viewType = VK_IMAGE_VIEW_TYPE_2D;
724
725                 if(vkCreateImageView(vkdev, &ivinf, 0, vksc_view + i) != 0) {
726                         fprintf(stderr, "failed to create image view (%d)\n", i);
727                         return -1;
728                 }
729         }
730         return 0;
731 }
732
733
734 static int eval_pdev_score(VkPhysicalDevice dev)
735 {
736         int score = 0;
737         uint32_t i, num_fmt, num_qfam, num_ext;
738         VkQueueFamilyProperties *qfam;
739         VkExtensionProperties *ext;
740         VkPhysicalDeviceProperties prop;
741         VkPhysicalDeviceFeatures feat;
742         VkSurfaceFormatKHR *sfmt;
743         VkBool32 can_pres;
744
745         vkGetPhysicalDeviceProperties(dev, &prop);
746         vkGetPhysicalDeviceFeatures(dev, &feat);
747
748         /* check if we have the swapchain extension */
749         vkEnumerateDeviceExtensionProperties(dev, 0, &num_ext, 0);
750         ext = malloc_nf(num_ext * sizeof *ext);
751         vkEnumerateDeviceExtensionProperties(dev, 0, &num_ext, ext);
752
753         if(!have_ext(ext, num_ext, "VK_KHR_swapchain")) {
754                 free(ext);
755                 return 0;
756         }
757
758         /* populate format and present modes arrays, and make sure we have some of each */
759         vkGetPhysicalDeviceSurfaceFormatsKHR(dev, vksurf, &num_fmt, 0);
760         if(!num_fmt) {
761                 free(ext);
762                 return 0;
763         }
764         sfmt = malloc_nf(num_fmt * sizeof *sfmt);
765         vkGetPhysicalDeviceSurfaceFormatsKHR(dev, vksurf, &num_fmt, sfmt);
766
767         vkGetPhysicalDeviceSurfaceCapabilitiesKHR(dev, vksurf, &vksurf_caps);
768
769         /* find a queue family which can do graphics and can present */
770         vkGetPhysicalDeviceQueueFamilyProperties(dev, &num_qfam, 0);
771         qfam = malloc_nf(num_qfam * sizeof *qfam);
772         vkGetPhysicalDeviceQueueFamilyProperties(dev, &num_qfam, qfam);
773
774         for(i=0; i<num_qfam; i++) {
775                 vkGetPhysicalDeviceSurfaceSupportKHR(dev, i, vksurf, &can_pres);
776                 if(qfam[i].queueCount && (qfam[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) && can_pres) {
777                         score = 1;
778                 }
779         }
780
781         switch(prop.deviceType) {
782         case VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU:
783                 score++;
784                 break;
785         case VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU:
786                 score += 2;
787                 break;
788         case VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU:
789                 score += 4;
790                 break;
791         default:
792                 break;
793         }
794
795         if(initflags & VKINIT_RAY) {
796                 if(have_ext(ext, num_ext, "VK_KHR_acceleration_structure") &&
797                                 have_ext(ext, num_ext, "VK_KHR_ray_tracing_pipeline")) {
798                         score += 100;
799                 }
800         }
801
802         free(ext);
803         free(sfmt);
804         free(qfam);
805         return score;
806 }
807
808 static int choose_pixfmt(void)
809 {
810         static const VkFormat pref[] = {
811                 VK_FORMAT_B8G8R8_UNORM,
812                 VK_FORMAT_R8G8B8_UNORM,
813                 VK_FORMAT_B8G8R8A8_UNORM,
814                 VK_FORMAT_R8G8B8A8_UNORM
815         };
816         int i, j;
817         uint32_t num_fmt;
818
819         vkGetPhysicalDeviceSurfaceFormatsKHR(vkpdev, vksurf, &num_fmt, 0);
820         if(!num_fmt) return -1;
821         vksurf_fmt = malloc_nf(num_fmt * sizeof *vksurf_fmt);
822         vkGetPhysicalDeviceSurfaceFormatsKHR(vkpdev, vksurf, &num_fmt, vksurf_fmt);
823
824         vksurf_selfmt = 0;
825         for(i=0; i<num_fmt; i++) {
826                 if(vksurf_fmt[i].colorSpace != VK_COLOR_SPACE_SRGB_NONLINEAR_KHR) {
827                         continue;
828                 }
829                 for(j=0; j<sizeof pref / sizeof *pref; j++) {
830                         if(vksurf_fmt[i].format == pref[j]) {
831                                 vksurf_selfmt = i;
832                                 vksurf_numfmt = num_fmt;
833                                 return i;
834                         }
835                 }
836         }
837         free(vksurf_fmt);
838         vksurf_fmt = 0;
839         return -1;
840 }
841
842
843 static int have_inst_layer(const char *name)
844 {
845         int i;
846         for(i=0; i<inst_layers_count; i++) {
847                 if(strcmp(inst_layers[i].layerName, name) == 0) {
848                         return 1;
849                 }
850         }
851         return 0;
852 }
853
854 static int have_ext(VkExtensionProperties *ext, int next, const char *name)
855 {
856         int i;
857         for(i=0; i<next; i++) {
858                 if(strcmp(ext[i].extensionName, name) == 0) {
859                         return 1;
860                 }
861         }
862         return 0;
863 }