4 #include <vulkan/vulkan.h>
9 #include <vulkan/vulkan_win32.h>
11 /*#include <vulkan/vulkan_xlib.h>*/
12 #include <X11/Xlib-xcb.h>
13 #include <vulkan/vulkan_xcb.h>
16 static int create_instance(void);
17 static int create_surface(void);
18 static int choose_phys_dev(void);
19 static int create_device(void);
20 static int create_swapchain(void);
22 static int choose_pixfmt(void);
23 static int eval_pdev_score(VkPhysicalDevice dev);
24 static int have_inst_layer(const char *name);
25 static int have_ext(VkExtensionProperties *ext, int next, const char *name);
32 static VkPhysicalDevice vkpdev;
33 static int vkqfam_idx, vkqfam_maxq;
34 static VkDevice vkdev;
36 static VkSurfaceKHR vksurf;
37 static VkSurfaceCapabilitiesKHR vksurf_caps;
38 static int vksurf_numfmt, vksurf_selfmt;
39 static VkSurfaceFormatKHR *vksurf_fmt;
40 static VkSwapchainKHR vksc;
41 static int vksc_numimg;
42 static VkImage *vksc_img;
43 static VkExtent2D vksc_extent;
44 static VkImageView *vksc_view;
46 static VkLayerProperties *inst_layers;
47 static VkExtensionProperties *inst_ext, *dev_ext;
48 static uint32_t inst_ext_count, dev_ext_count, inst_layers_count;
50 static VkPhysicalDevice *pdev_list;
51 static uint32_t num_pdev;
53 static int have_raytrace, have_debug_report;
55 void vk_init_xwin(Display *d, Window w)
61 int vk_init(unsigned int flags, unsigned int *usedflags)
64 if(create_instance() == -1) return -1;
65 if(create_surface() == -1) return -1;
66 if(choose_phys_dev() == -1) return -1;
67 if(create_device() == -1) return -1;
69 if(initflags != flags) {
71 *usedflags = initflags;
89 for(i=0; i<vksc_numimg; i++) {
90 vkDestroyImageView(vkdev, vksc_view[i], 0);
95 vkDestroySwapchainKHR(vkdev, vksc, 0);
99 vkDestroyDevice(vkdev, 0);
103 vkDestroySurfaceKHR(vk, vksurf, 0);
107 vkDestroyInstance(vk, 0);
120 int vk_reshape(int xsz, int ysz)
124 if(vksc && vksc_extent.width == xsz && vksc_extent.height == ysz) {
129 for(i=0; i<vksc_numimg; i++) {
130 vkDestroyImageView(vkdev, vksc_view[i], 0);
133 if(vksc) vkDestroySwapchainKHR(vkdev, vksc, 0);
135 vksc_extent.width = xsz;
136 vksc_extent.height = ysz;
138 if(create_swapchain() == -1) return -1;
140 /* TODO create depth/stencil buffers as needed (initflags) */
149 /* TODO: stuff about depth-stencil */
156 static struct rpass *rpasses;
158 int vk_create_rpass(void)
161 struct rpass rpass = {0}, *rp = &rpass;
164 rpasses = darr_alloc(0, sizeof *rpasses);
165 darr_push(rpasses, &rpass); /* add dummy rpass */
168 for(i=1; i<darr_size(rpasses); i++) {
169 if(!rpasses[i].used) {
174 /* init renderpass defaults */
176 rp->fmt = vksurf_fmt[vksurf_selfmt].format;
184 darr_push(rpasses, rp);
185 return darr_size(rpasses) - 1;
190 void vk_free_rpass(int rp)
192 if(!rpasses || rp < 1 || rp >= darr_size(rpasses)) {
196 if(rpasses[rp].used && rpasses[rp].vkobj) {
197 vkDestroyRenderPass(vkdev, rpasses[rp].vkobj, 0);
199 rpasses[rp].used = 0;
202 void vk_rpass_colorbuf(int rp, int fmt, int n)
204 rpasses[rp].fmt = fmt;
205 rpasses[rp].num_colbuf = n;
206 rpasses[rp].vkobj_valid = 0;
209 void vk_rpass_msaa(int rp, int nsamp)
211 rpasses[rp].num_samples = nsamp;
214 void vk_rpass_clear(int clear)
216 rpasses[rp].clear = clear;
219 VkRenderPass vk_rpass(int rp)
222 VkAttachmentDescription cat;
223 VkAttachmentReference catref[16];
224 VkSubpassDescription subpass;
225 VkRenderPassCreateInfo pinf;
226 VkSubpassDependency dep;
230 if(!r->vkobj_valid) {
231 memset(&cat, 0, sizeof cat);
233 cat.samples = r->num_samples;
234 cat.loadOp = r->clear ? VK_ATTACHMENT_LOAD_OP_CLEAR : VK_ATTACHMENT_LOAD_OP_DONT_CARE;
235 cat.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
236 cat.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
237 cat.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
238 cat.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
239 cat.finalLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
241 for(i=0; i<r->num_colbuf; i++) {
242 memset(&catref[i], 0, sizeof catref);
243 catref[i].layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
246 memset(&subpass, 0, sizeof subpass);
247 subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
248 subpass.colorAttachmentCount = r->num_colbuf;
249 subpass.pColorAttachments = catref;
251 memset(&dep, 0, sizeof dep);
255 return rpasses[rp].vkobj;
258 #define ARRSZ(arr) (sizeof arr / sizeof *arr)
259 static const char *known_layer_list[] = {
260 "VK_LAYER_GOOGLE_threading",
261 "VK_LAYER_LUNARG_parameter_validation",
262 "VK_LAYER_LUNARG_object_tracker",
263 "VK_LAYER_LUNARG_image",
264 "VK_LAYER_LUNARG_core_validation",
265 "VK_LAYER_LUNARG_swapchain",
266 "VK_LAYER_GOOGLE_unique_objects"
272 } known_instext_list[] = {
273 {"VK_KHR_surface", 1},
275 {"VK_KHR_win32_surface", 1},
277 /*{"VK_KHR_xlib_surface", 1},*/
278 {"VK_KHR_xcb_surface", 1},
280 {"VK_KHR_debug_report", 0}
286 } known_devext_list[] = {
287 {"VK_KHR_swapchain", 1},
288 {"VK_KHR_acceleration_structure", 0},
289 {"VK_KHR_ray_tracing_pipeline", 0}
292 static int create_instance(void)
294 int i, nlayers = 0, next = 0;
295 VkInstanceCreateInfo instinf;
296 VkApplicationInfo appinf;
297 const char *layers[ARRSZ(known_layer_list)];
298 const char *ext[ARRSZ(known_instext_list)];
301 vkEnumerateInstanceVersion(&apiver);
302 printf("Vulkan API version: %d.%d.%d\n", (apiver >> 22) & 0x7f,
303 (apiver >> 12) & 0x3ff, apiver & 0xfff);
305 memset(&appinf, 0, sizeof appinf);
306 appinf.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
307 appinf.pApplicationName = "vkray";
308 appinf.pEngineName = "vkray";
309 appinf.apiVersion = apiver;
311 vkEnumerateInstanceLayerProperties(&inst_layers_count, 0);
312 inst_layers = malloc_nf(inst_layers_count * sizeof *inst_layers);
313 vkEnumerateInstanceLayerProperties(&inst_layers_count, inst_layers);
315 vkEnumerateInstanceExtensionProperties(0, &inst_ext_count, 0);
316 inst_ext = malloc_nf(inst_ext_count * sizeof *inst_ext);
317 vkEnumerateInstanceExtensionProperties(0, &inst_ext_count, inst_ext);
320 for(i=0; i<inst_layers_count; i++) {
321 printf(" - %s: %s\n", inst_layers[i].layerName, inst_layers[i].description);
323 printf("Instance extensions:\n");
324 for(i=0; i<inst_ext_count; i++) {
325 printf(" - %s\n", inst_ext[i].extensionName);
328 have_debug_report = have_ext(inst_ext, inst_ext_count, "VK_KHR_debug_report");
330 for(i=0; i<ARRSZ(known_layer_list); i++) {
331 if(have_inst_layer(known_layer_list[i])) {
332 layers[nlayers++] = known_layer_list[i];
335 for(i=0; i<ARRSZ(known_instext_list); i++) {
336 if(have_ext(inst_ext, inst_ext_count, known_instext_list[i].name)) {
337 ext[next++] = known_instext_list[i].name;
338 } else if(known_instext_list[i].required) {
339 fprintf(stderr, "Vulkan implementation lacks required instance extension: %s\n",
340 known_instext_list[i].name);
345 memset(&instinf, 0, sizeof instinf);
346 instinf.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
347 instinf.pApplicationInfo = &appinf;
348 instinf.enabledLayerCount = nlayers;
349 instinf.ppEnabledLayerNames = layers;
350 instinf.enabledExtensionCount = next;
351 instinf.ppEnabledExtensionNames = ext;
352 if(vkCreateInstance(&instinf, 0, &vk) != 0) {
353 fprintf(stderr, "failed to create vulkan instance\n");
360 static int create_surface(void)
363 VkXlibSurfaceCreateInfoKHR xinf = {0};
364 xinf.sType = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR;
368 if(vkCreateXlibSurfaceKHR(vk, &xinf, 0, &vksurf) != 0) {
369 fprintf(stderr, "failed to create Xlib window surface\n");
373 VkXcbSurfaceCreateInfoKHR xinf = {0};
374 xinf.sType = VK_STRUCTURE_TYPE_XCB_SURFACE_CREATE_INFO_KHR;
375 xinf.connection = XGetXCBConnection(dpy);
376 xinf.window = (xcb_window_t)win;
378 if(vkCreateXcbSurfaceKHR(vk, &xinf, 0, &vksurf) != 0) {
379 fprintf(stderr, "failed to create XCB window surface\n");
385 int choose_phys_dev(void)
387 uint32_t i, num_pdev, num_qfam, score, best_score, best_dev;
388 VkPhysicalDevice *pdev;
389 VkPhysicalDeviceProperties pdevprop;
390 VkQueueFamilyProperties *qfam;
393 vkEnumeratePhysicalDevices(vk, &num_pdev, 0);
395 fprintf(stderr, "no vulkan devices found\n");
398 pdev = malloc_nf(num_pdev * sizeof *pdev);
399 vkEnumeratePhysicalDevices(vk, &num_pdev, pdev);
401 printf("Found %d physical devices\n", num_pdev);
405 for(i=0; i<num_pdev; i++) {
406 if((score = eval_pdev_score(pdev[i])) && score > best_score) {
411 vkGetPhysicalDeviceProperties(pdev[i], &pdevprop);
412 printf(" %d: %s (score: %d)\n", i, pdevprop.deviceName, score);
415 fprintf(stderr, "no suitable vulkan device found\n");
419 vkpdev = pdev[best_dev];
421 vkGetPhysicalDeviceQueueFamilyProperties(vkpdev, &num_qfam, 0);
422 qfam = malloc_nf(num_qfam * sizeof *qfam);
423 vkGetPhysicalDeviceQueueFamilyProperties(vkpdev, &num_qfam, qfam);
426 for(i=0; i<num_qfam; i++) {
427 vkGetPhysicalDeviceSurfaceSupportKHR(vkpdev, i, vksurf, &can_pres);
428 if(qfam[i].queueCount && (qfam[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) && can_pres) {
429 vkqfam_maxq = qfam[i].queueCount;
441 static int create_device(void)
444 VkDeviceQueueCreateInfo qinf = {0};
445 VkPhysicalDeviceFeatures feat = {0};
446 VkDeviceCreateInfo devinf = {0};
447 const char *ext[ARRSZ(known_devext_list) + 16];
450 vkEnumerateDeviceExtensionProperties(vkpdev, 0, &dev_ext_count, 0);
451 dev_ext = malloc_nf(dev_ext_count * sizeof *dev_ext);
452 vkEnumerateDeviceExtensionProperties(vkpdev, 0, &dev_ext_count, dev_ext);
455 for(i=0; i<ARRSZ(known_devext_list); i++) {
456 if(have_ext(dev_ext, dev_ext_count, known_devext_list[i].name)) {
457 ext[num_ext++] = known_devext_list[i].name;
458 } else if(known_devext_list[i].required) {
459 fprintf(stderr, "Vulkan device lacks required extension: %s\n",
460 known_devext_list[i].name);
465 if(initflags & VKINIT_RAY) {
466 if(have_ext(dev_ext, dev_ext_count, "VK_KHR_acceleration_structure") &&
467 have_ext(dev_ext, dev_ext_count, "VK_KHR_ray_tracing_pipeline")) {
468 ext[num_ext++] = "VK_KHR_acceleration_structure";
469 ext[num_ext++] = "VK_KHR_ray_tracing_pipeline";
471 initflags &= ~VKINIT_RAY;
475 qinf.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
476 qinf.queueFamilyIndex = vkqfam_idx;
478 qinf.pQueuePriorities = &prio;
480 devinf.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
481 devinf.pQueueCreateInfos = &qinf;
482 devinf.queueCreateInfoCount = 1;
483 devinf.pEnabledFeatures = &feat;
484 devinf.enabledExtensionCount = num_ext;
485 devinf.ppEnabledExtensionNames = ext;
487 if(vkCreateDevice(vkpdev, &devinf, 0, &vkdev) != 0) {
488 fprintf(stderr, "failed to create vulkan device\n");
492 vkGetDeviceQueue(vkdev, vkqfam_idx, 0, &vkq);
496 static int create_swapchain(void)
500 VkSwapchainCreateInfoKHR scinf = {0};
501 VkImageViewCreateInfo ivinf;
503 if(vksc_extent.width <= 0 || vksc_extent.height <= 0) {
507 scinf.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
508 scinf.surface = vksurf;
509 scinf.minImageCount = 2;
510 scinf.imageFormat = vksurf_fmt[vksurf_selfmt].format;
511 scinf.imageColorSpace = vksurf_fmt[vksurf_selfmt].colorSpace;
512 scinf.imageExtent = vksc_extent;
513 scinf.imageArrayLayers = 1;
514 scinf.imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
515 scinf.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
516 scinf.preTransform = vksurf_caps.currentTransform;
517 scinf.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
518 scinf.presentMode = VK_PRESENT_MODE_FIFO_KHR;
519 scinf.clipped = VK_TRUE;
521 if(vkCreateSwapchainKHR(vkdev, &scinf, 0, &vksc) != 0) {
522 fprintf(stderr, "failed to create swapchain\n");
526 if(!vksc_img || vksc_numimg != num) {
528 vkGetSwapchainImagesKHR(vkdev, vksc, &num, 0);
529 vksc_img = malloc_nf(num * sizeof *vksc_img);
530 vkGetSwapchainImagesKHR(vkdev, vksc, &num, vksc_img);
532 if(!vksc_view || vksc_numimg != num) {
534 vksc_view = malloc_nf(num * sizeof *vksc_view);
538 for(i=0; i<num; i++) {
539 memset(&ivinf, 0, sizeof ivinf);
540 ivinf.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
541 ivinf.image = vksc_img[i];
542 ivinf.format = vksurf_fmt[vksurf_selfmt].format;
543 ivinf.components.r = VK_COMPONENT_SWIZZLE_IDENTITY;
544 ivinf.components.g = VK_COMPONENT_SWIZZLE_IDENTITY;
545 ivinf.components.b = VK_COMPONENT_SWIZZLE_IDENTITY;
546 ivinf.components.a = VK_COMPONENT_SWIZZLE_IDENTITY;
547 ivinf.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
548 ivinf.subresourceRange.levelCount = 1;
549 ivinf.subresourceRange.layerCount = 1;
550 ivinf.viewType = VK_IMAGE_VIEW_TYPE_2D;
552 if(vkCreateImageView(vkdev, &ivinf, 0, vksc_view + i) != 0) {
553 fprintf(stderr, "failed to create image view (%d)\n", i);
561 static int eval_pdev_score(VkPhysicalDevice dev)
564 uint32_t i, num_fmt, num_qfam, num_ext;
565 VkQueueFamilyProperties *qfam;
566 VkExtensionProperties *ext;
567 VkPhysicalDeviceProperties prop;
568 VkPhysicalDeviceFeatures feat;
569 VkSurfaceFormatKHR *sfmt;
572 vkGetPhysicalDeviceProperties(dev, &prop);
573 vkGetPhysicalDeviceFeatures(dev, &feat);
575 /* check if we have the swapchain extension */
576 vkEnumerateDeviceExtensionProperties(dev, 0, &num_ext, 0);
577 ext = malloc_nf(num_ext * sizeof *ext);
578 vkEnumerateDeviceExtensionProperties(dev, 0, &num_ext, ext);
580 if(!have_ext(ext, num_ext, "VK_KHR_swapchain")) {
585 /* populate format and present modes arrays, and make sure we have some of each */
586 vkGetPhysicalDeviceSurfaceFormatsKHR(dev, vksurf, &num_fmt, 0);
591 sfmt = malloc_nf(num_fmt * sizeof *sfmt);
592 vkGetPhysicalDeviceSurfaceFormatsKHR(dev, vksurf, &num_fmt, sfmt);
594 vkGetPhysicalDeviceSurfaceCapabilitiesKHR(dev, vksurf, &vksurf_caps);
596 /* find a queue family which can do graphics and can present */
597 vkGetPhysicalDeviceQueueFamilyProperties(dev, &num_qfam, 0);
598 qfam = malloc_nf(num_qfam * sizeof *qfam);
599 vkGetPhysicalDeviceQueueFamilyProperties(dev, &num_qfam, qfam);
601 for(i=0; i<num_qfam; i++) {
602 vkGetPhysicalDeviceSurfaceSupportKHR(dev, i, vksurf, &can_pres);
603 if(qfam[i].queueCount && (qfam[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) && can_pres) {
608 switch(prop.deviceType) {
609 case VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU:
612 case VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU:
615 case VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU:
622 if(initflags & VKINIT_RAY) {
623 if(have_ext(ext, num_ext, "VK_KHR_acceleration_structure") &&
624 have_ext(ext, num_ext, "VK_KHR_ray_tracing_pipeline")) {
635 static int choose_pixfmt(void)
637 static const VkFormat pref[] = {
638 VK_FORMAT_B8G8R8_UNORM,
639 VK_FORMAT_R8G8B8_UNORM,
640 VK_FORMAT_B8G8R8A8_UNORM,
641 VK_FORMAT_R8G8B8A8_UNORM
646 vkGetPhysicalDeviceSurfaceFormatsKHR(vkpdev, vksurf, &num_fmt, 0);
647 if(!num_fmt) return -1;
648 vksurf_fmt = malloc_nf(num_fmt * sizeof *vksurf_fmt);
649 vkGetPhysicalDeviceSurfaceFormatsKHR(vkpdev, vksurf, &num_fmt, vksurf_fmt);
652 for(i=0; i<num_fmt; i++) {
653 if(vksurf_fmt[i].colorSpace != VK_COLOR_SPACE_SRGB_NONLINEAR_KHR) {
656 for(j=0; j<sizeof pref / sizeof *pref; j++) {
657 if(vksurf_fmt[i].format == pref[j]) {
659 vksurf_numfmt = num_fmt;
670 static int have_inst_layer(const char *name)
673 for(i=0; i<inst_layers_count; i++) {
674 if(strcmp(inst_layers[i].layerName, name) == 0) {
681 static int have_ext(VkExtensionProperties *ext, int next, const char *name)
684 for(i=0; i<next; i++) {
685 if(strcmp(ext[i].extensionName, name) == 0) {