21 #define VK_NO_PROTOTYPES
22 #define VK_ENABLE_BETA_EXTENSIONS
26 #include <versionhelpers.h>
53 #include <va/va_drmcommon.h>
56 #include <sys/sysmacros.h>
60 #include <drm_fourcc.h>
64 #if HAVE_LINUX_DMA_BUF_H
65 #include <sys/ioctl.h>
66 #include <linux/dma-buf.h>
72 #define CHECK_CU(x) FF_CUDA_CHECK_DL(cuda_cu, cu, x)
86 #ifdef VK_EXT_shader_long_vector
87 VkPhysicalDeviceShaderLongVectorFeaturesEXT long_vector;
90 #ifdef VK_EXT_shader_replicated_composites
91 VkPhysicalDeviceShaderReplicatedCompositesFeaturesEXT replicated_composites;
94 #ifdef VK_EXT_zero_initialize_device_memory
95 VkPhysicalDeviceZeroInitializeDeviceMemoryFeaturesEXT zero_initialize;
98 #ifdef VK_KHR_shader_expect_assume
99 VkPhysicalDeviceShaderExpectAssumeFeaturesKHR expect_assume;
103 #ifdef VK_KHR_video_maintenance2
104 VkPhysicalDeviceVideoMaintenance2FeaturesKHR video_maintenance_2;
106 #ifdef VK_KHR_video_decode_vp9
107 VkPhysicalDeviceVideoDecodeVP9FeaturesKHR vp9_decode;
109 #ifdef VK_KHR_video_encode_av1
110 VkPhysicalDeviceVideoEncodeAV1FeaturesKHR av1_encode;
117 #ifdef VK_KHR_shader_relaxed_extended_instruction
118 VkPhysicalDeviceShaderRelaxedExtendedInstructionFeaturesKHR relaxed_extended_instruction;
121 #ifdef VK_KHR_internally_synchronized_queues
122 VkPhysicalDeviceInternallySynchronizedQueuesFeaturesKHR internal_queue_sync;
142 VkPhysicalDeviceExternalMemoryHostPropertiesEXT
hprops;
230 feats->
device = (VkPhysicalDeviceFeatures2) {
231 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2,
235 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_1_FEATURES);
237 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_2_FEATURES);
239 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VULKAN_1_3_FEATURES);
242 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES);
244 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_SUBGROUP_ROTATE_FEATURES_KHR);
246 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_HOST_IMAGE_COPY_FEATURES_EXT);
248 #ifdef VK_EXT_shader_long_vector
250 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_LONG_VECTOR_FEATURES_EXT);
253 #ifdef VK_EXT_shader_replicated_composites
255 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_REPLICATED_COMPOSITES_FEATURES_EXT);
258 #ifdef VK_EXT_zero_initialize_device_memory
260 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ZERO_INITIALIZE_DEVICE_MEMORY_FEATURES_EXT);
263 #ifdef VK_KHR_shader_expect_assume
265 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_EXPECT_ASSUME_FEATURES_KHR);
269 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_MAINTENANCE_1_FEATURES_KHR);
270 #ifdef VK_KHR_video_maintenance2
272 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_MAINTENANCE_2_FEATURES_KHR);
274 #ifdef VK_KHR_video_decode_vp9
276 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_DECODE_VP9_FEATURES_KHR);
278 #ifdef VK_KHR_video_encode_av1
280 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_VIDEO_ENCODE_AV1_FEATURES_KHR);
284 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_OBJECT_FEATURES_EXT);
286 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_COOPERATIVE_MATRIX_FEATURES_KHR);
288 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_FLOAT_FEATURES_EXT);
290 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_WORKGROUP_MEMORY_EXPLICIT_LAYOUT_FEATURES_KHR);
292 #ifdef VK_KHR_shader_relaxed_extended_instruction
294 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_RELAXED_EXTENDED_INSTRUCTION_FEATURES_KHR);
297 #ifdef VK_KHR_internally_synchronized_queues
299 VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_INTERNALLY_SYNCHRONIZED_QUEUES_FEATURES_KHR);
306 #define COPY_VAL(VAL) \
308 dst->VAL = src->VAL; \
311 COPY_VAL(device.features.shaderImageGatherExtended);
312 COPY_VAL(device.features.shaderStorageImageReadWithoutFormat);
313 COPY_VAL(device.features.shaderStorageImageWriteWithoutFormat);
314 COPY_VAL(device.features.fragmentStoresAndAtomics);
315 COPY_VAL(device.features.vertexPipelineStoresAndAtomics);
316 COPY_VAL(device.features.shaderInt64);
317 COPY_VAL(device.features.shaderInt16);
318 COPY_VAL(device.features.shaderFloat64);
319 COPY_VAL(device.features.shaderStorageImageReadWithoutFormat);
320 COPY_VAL(device.features.shaderStorageImageWriteWithoutFormat);
322 COPY_VAL(vulkan_1_1.samplerYcbcrConversion);
323 COPY_VAL(vulkan_1_1.storagePushConstant16);
324 COPY_VAL(vulkan_1_1.storageBuffer16BitAccess);
325 COPY_VAL(vulkan_1_1.uniformAndStorageBuffer16BitAccess);
327 COPY_VAL(vulkan_1_2.timelineSemaphore);
328 COPY_VAL(vulkan_1_2.scalarBlockLayout);
329 COPY_VAL(vulkan_1_2.bufferDeviceAddress);
330 COPY_VAL(vulkan_1_2.hostQueryReset);
331 COPY_VAL(vulkan_1_2.storagePushConstant8);
333 COPY_VAL(vulkan_1_2.storageBuffer8BitAccess);
334 COPY_VAL(vulkan_1_2.uniformAndStorageBuffer8BitAccess);
336 COPY_VAL(vulkan_1_2.shaderBufferInt64Atomics);
337 COPY_VAL(vulkan_1_2.shaderSharedInt64Atomics);
338 COPY_VAL(vulkan_1_2.vulkanMemoryModel);
339 COPY_VAL(vulkan_1_2.vulkanMemoryModelDeviceScope);
340 COPY_VAL(vulkan_1_2.vulkanMemoryModelAvailabilityVisibilityChains);
341 COPY_VAL(vulkan_1_2.uniformBufferStandardLayout);
342 COPY_VAL(vulkan_1_2.runtimeDescriptorArray);
343 COPY_VAL(vulkan_1_2.shaderSubgroupExtendedTypes);
344 COPY_VAL(vulkan_1_2.shaderUniformBufferArrayNonUniformIndexing);
345 COPY_VAL(vulkan_1_2.shaderSampledImageArrayNonUniformIndexing);
346 COPY_VAL(vulkan_1_2.shaderStorageBufferArrayNonUniformIndexing);
347 COPY_VAL(vulkan_1_2.shaderStorageImageArrayNonUniformIndexing);
349 COPY_VAL(vulkan_1_3.dynamicRendering);
351 COPY_VAL(vulkan_1_3.synchronization2);
352 COPY_VAL(vulkan_1_3.computeFullSubgroups);
353 COPY_VAL(vulkan_1_3.subgroupSizeControl);
354 COPY_VAL(vulkan_1_3.shaderZeroInitializeWorkgroupMemory);
355 COPY_VAL(vulkan_1_3.dynamicRendering);
357 COPY_VAL(timeline_semaphore.timelineSemaphore);
358 COPY_VAL(subgroup_rotate.shaderSubgroupRotate);
359 COPY_VAL(host_image_copy.hostImageCopy);
361 #ifdef VK_EXT_shader_long_vector
365 #ifdef VK_EXT_shader_replicated_composites
366 COPY_VAL(replicated_composites.shaderReplicatedComposites);
369 #ifdef VK_EXT_zero_initialize_device_memory
370 COPY_VAL(zero_initialize.zeroInitializeDeviceMemory);
373 COPY_VAL(video_maintenance_1.videoMaintenance1);
374 #ifdef VK_KHR_video_maintenance2
375 COPY_VAL(video_maintenance_2.videoMaintenance2);
378 #ifdef VK_KHR_video_decode_vp9
379 COPY_VAL(vp9_decode.videoDecodeVP9);
382 #ifdef VK_KHR_video_encode_av1
383 COPY_VAL(av1_encode.videoEncodeAV1);
386 COPY_VAL(shader_object.shaderObject);
388 COPY_VAL(cooperative_matrix.cooperativeMatrix);
390 COPY_VAL(atomic_float.shaderBufferFloat32Atomics);
391 COPY_VAL(atomic_float.shaderBufferFloat32AtomicAdd);
393 COPY_VAL(explicit_mem_layout.workgroupMemoryExplicitLayout);
394 COPY_VAL(explicit_mem_layout.workgroupMemoryExplicitLayoutScalarBlockLayout);
395 COPY_VAL(explicit_mem_layout.workgroupMemoryExplicitLayout8BitAccess);
396 COPY_VAL(explicit_mem_layout.workgroupMemoryExplicitLayout16BitAccess);
398 #ifdef VK_KHR_shader_relaxed_extended_instruction
399 COPY_VAL(relaxed_extended_instruction.shaderRelaxedExtendedInstruction);
402 #ifdef VK_KHR_shader_expect_assume
403 COPY_VAL(expect_assume.shaderExpectAssume);
406 #ifdef VK_KHR_internally_synchronized_queues
407 COPY_VAL(internal_queue_sync.internallySynchronizedQueues);
413 #define ASPECT_2PLANE (VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT)
414 #define ASPECT_3PLANE (VK_IMAGE_ASPECT_PLANE_0_BIT | VK_IMAGE_ASPECT_PLANE_1_BIT | VK_IMAGE_ASPECT_PLANE_2_BIT)
426 { VK_FORMAT_R8_UNORM,
AV_PIX_FMT_GRAY8, VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 1, { VK_FORMAT_R8_UNORM } },
427 { VK_FORMAT_R16_UNORM,
AV_PIX_FMT_GRAY10, VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 1, { VK_FORMAT_R16_UNORM } },
428 { VK_FORMAT_R16_UNORM,
AV_PIX_FMT_GRAY12, VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 1, { VK_FORMAT_R16_UNORM } },
429 { VK_FORMAT_R16_UNORM,
AV_PIX_FMT_GRAY14, VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 1, { VK_FORMAT_R16_UNORM } },
430 { VK_FORMAT_R16_UNORM,
AV_PIX_FMT_GRAY16, VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 1, { VK_FORMAT_R16_UNORM } },
431 { VK_FORMAT_R32_UINT,
AV_PIX_FMT_GRAY32, VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 1, { VK_FORMAT_R32_UINT } },
432 { VK_FORMAT_R32_SFLOAT,
AV_PIX_FMT_GRAYF32, VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 1, { VK_FORMAT_R32_SFLOAT } },
436 { VK_FORMAT_R8G8B8A8_UNORM,
AV_PIX_FMT_RGBA, VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 1, { VK_FORMAT_R8G8B8A8_UNORM } },
437 { VK_FORMAT_R8G8B8_UNORM,
AV_PIX_FMT_RGB24, VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 1, { VK_FORMAT_R8G8B8_UNORM } },
438 { VK_FORMAT_B8G8R8_UNORM,
AV_PIX_FMT_BGR24, VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 1, { VK_FORMAT_B8G8R8_UNORM } },
439 { VK_FORMAT_R16G16B16_UNORM,
AV_PIX_FMT_RGB48, VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 1, { VK_FORMAT_R16G16B16_UNORM } },
440 { VK_FORMAT_R16G16B16A16_UNORM,
AV_PIX_FMT_RGBA64, VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 1, { VK_FORMAT_R16G16B16A16_UNORM } },
442 { VK_FORMAT_R8G8B8A8_UNORM,
AV_PIX_FMT_RGB0, VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 1, { VK_FORMAT_R8G8B8A8_UNORM } },
445 { VK_FORMAT_R32G32B32_SFLOAT,
AV_PIX_FMT_RGBF32, VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 1, { VK_FORMAT_R32G32B32_SFLOAT } },
446 { VK_FORMAT_R32G32B32A32_SFLOAT,
AV_PIX_FMT_RGBAF32, VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 1, { VK_FORMAT_R32G32B32A32_SFLOAT } },
447 { VK_FORMAT_R32G32B32_UINT,
AV_PIX_FMT_RGB96, VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 1, { VK_FORMAT_R32G32B32_UINT } },
448 { VK_FORMAT_R32G32B32A32_UINT,
AV_PIX_FMT_RGBA128, VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 1, { VK_FORMAT_R32G32B32A32_UINT } },
451 { VK_FORMAT_R8_UNORM,
AV_PIX_FMT_GBRP, VK_IMAGE_ASPECT_COLOR_BIT, 3, 3, 3, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM } },
452 { VK_FORMAT_R16_UNORM,
AV_PIX_FMT_GBRP10, VK_IMAGE_ASPECT_COLOR_BIT, 3, 3, 3, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
453 { VK_FORMAT_R16_UNORM,
AV_PIX_FMT_GBRP12, VK_IMAGE_ASPECT_COLOR_BIT, 3, 3, 3, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
454 { VK_FORMAT_R16_UNORM,
AV_PIX_FMT_GBRP14, VK_IMAGE_ASPECT_COLOR_BIT, 3, 3, 3, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
455 { VK_FORMAT_R16_UNORM,
AV_PIX_FMT_GBRP16, VK_IMAGE_ASPECT_COLOR_BIT, 3, 3, 3, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
456 { VK_FORMAT_R16_SFLOAT,
AV_PIX_FMT_GBRPF16, VK_IMAGE_ASPECT_COLOR_BIT, 3, 3, 3, { VK_FORMAT_R16_SFLOAT, VK_FORMAT_R16_SFLOAT, VK_FORMAT_R16_SFLOAT } },
457 { VK_FORMAT_R32_SFLOAT,
AV_PIX_FMT_GBRPF32, VK_IMAGE_ASPECT_COLOR_BIT, 3, 3, 3, { VK_FORMAT_R32_SFLOAT, VK_FORMAT_R32_SFLOAT, VK_FORMAT_R32_SFLOAT } },
460 { VK_FORMAT_R8_UNORM,
AV_PIX_FMT_GBRAP, VK_IMAGE_ASPECT_COLOR_BIT, 4, 4, 4, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM } },
461 { VK_FORMAT_R16_UNORM,
AV_PIX_FMT_GBRAP10, VK_IMAGE_ASPECT_COLOR_BIT, 4, 4, 4, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
462 { VK_FORMAT_R16_UNORM,
AV_PIX_FMT_GBRAP12, VK_IMAGE_ASPECT_COLOR_BIT, 4, 4, 4, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
463 { VK_FORMAT_R16_UNORM,
AV_PIX_FMT_GBRAP14, VK_IMAGE_ASPECT_COLOR_BIT, 4, 4, 4, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
464 { VK_FORMAT_R16_UNORM,
AV_PIX_FMT_GBRAP16, VK_IMAGE_ASPECT_COLOR_BIT, 4, 4, 4, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
465 { VK_FORMAT_R16_UNORM,
AV_PIX_FMT_GBRAPF16, VK_IMAGE_ASPECT_COLOR_BIT, 4, 4, 4, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
466 { VK_FORMAT_R32_UINT,
AV_PIX_FMT_GBRAP32, VK_IMAGE_ASPECT_COLOR_BIT, 4, 4, 4, { VK_FORMAT_R32_UINT, VK_FORMAT_R32_UINT, VK_FORMAT_R32_UINT, VK_FORMAT_R32_UINT } },
467 { VK_FORMAT_R32_SFLOAT,
AV_PIX_FMT_GBRAPF32, VK_IMAGE_ASPECT_COLOR_BIT, 4, 4, 4, { VK_FORMAT_R32_SFLOAT, VK_FORMAT_R32_SFLOAT, VK_FORMAT_R32_SFLOAT, VK_FORMAT_R32_SFLOAT } },
474 { VK_FORMAT_G10X6_B10X6R10X6_2PLANE_420_UNORM_3PACK16,
AV_PIX_FMT_P010,
ASPECT_2PLANE, 2, 1, 2, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16G16_UNORM } },
475 { VK_FORMAT_G12X4_B12X4R12X4_2PLANE_420_UNORM_3PACK16,
AV_PIX_FMT_P012,
ASPECT_2PLANE, 2, 1, 2, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16G16_UNORM } },
480 { VK_FORMAT_G10X6_B10X6R10X6_2PLANE_422_UNORM_3PACK16,
AV_PIX_FMT_P210,
ASPECT_2PLANE, 2, 1, 2, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16G16_UNORM } },
481 { VK_FORMAT_G12X4_B12X4R12X4_2PLANE_422_UNORM_3PACK16,
AV_PIX_FMT_P212,
ASPECT_2PLANE, 2, 1, 2, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16G16_UNORM } },
486 { VK_FORMAT_G10X6_B10X6R10X6_2PLANE_444_UNORM_3PACK16,
AV_PIX_FMT_P410,
ASPECT_2PLANE, 2, 1, 2, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16G16_UNORM } },
487 { VK_FORMAT_G12X4_B12X4R12X4_2PLANE_444_UNORM_3PACK16,
AV_PIX_FMT_P412,
ASPECT_2PLANE, 2, 1, 2, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16G16_UNORM } },
505 { VK_FORMAT_G8B8G8R8_422_UNORM,
AV_PIX_FMT_YUYV422, VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 1, { VK_FORMAT_R8G8B8A8_UNORM } },
506 { VK_FORMAT_B8G8R8G8_422_UNORM,
AV_PIX_FMT_UYVY422, VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 1, { VK_FORMAT_R8G8B8A8_UNORM } },
507 { VK_FORMAT_G10X6B10X6G10X6R10X6_422_UNORM_4PACK16,
AV_PIX_FMT_Y210, VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 1, { VK_FORMAT_R16G16B16A16_UNORM } },
508 { VK_FORMAT_G12X4B12X4G12X4R12X4_422_UNORM_4PACK16,
AV_PIX_FMT_Y212, VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 1, { VK_FORMAT_R16G16B16A16_UNORM } },
509 { VK_FORMAT_G16B16G16R16_422_UNORM,
AV_PIX_FMT_Y216, VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 1, { VK_FORMAT_R16G16B16A16_UNORM } },
512 { VK_FORMAT_R8_UNORM,
AV_PIX_FMT_YUVA420P, VK_IMAGE_ASPECT_COLOR_BIT, 4, 4, 4, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM } },
513 { VK_FORMAT_R16_UNORM,
AV_PIX_FMT_YUVA420P10, VK_IMAGE_ASPECT_COLOR_BIT, 4, 4, 4, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
514 { VK_FORMAT_R16_UNORM,
AV_PIX_FMT_YUVA420P16, VK_IMAGE_ASPECT_COLOR_BIT, 4, 4, 4, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
517 { VK_FORMAT_R8_UNORM,
AV_PIX_FMT_YUVA422P, VK_IMAGE_ASPECT_COLOR_BIT, 4, 4, 4, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM } },
518 { VK_FORMAT_R16_UNORM,
AV_PIX_FMT_YUVA422P10, VK_IMAGE_ASPECT_COLOR_BIT, 4, 4, 4, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
519 { VK_FORMAT_R16_UNORM,
AV_PIX_FMT_YUVA422P12, VK_IMAGE_ASPECT_COLOR_BIT, 4, 4, 4, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
520 { VK_FORMAT_R16_UNORM,
AV_PIX_FMT_YUVA422P16, VK_IMAGE_ASPECT_COLOR_BIT, 4, 4, 4, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
523 { VK_FORMAT_R8_UNORM,
AV_PIX_FMT_YUVA444P, VK_IMAGE_ASPECT_COLOR_BIT, 4, 4, 4, { VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM, VK_FORMAT_R8_UNORM } },
524 { VK_FORMAT_R16_UNORM,
AV_PIX_FMT_YUVA444P10, VK_IMAGE_ASPECT_COLOR_BIT, 4, 4, 4, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
525 { VK_FORMAT_R16_UNORM,
AV_PIX_FMT_YUVA444P12, VK_IMAGE_ASPECT_COLOR_BIT, 4, 4, 4, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
526 { VK_FORMAT_R16_UNORM,
AV_PIX_FMT_YUVA444P16, VK_IMAGE_ASPECT_COLOR_BIT, 4, 4, 4, { VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM, VK_FORMAT_R16_UNORM } },
530 { VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16,
AV_PIX_FMT_XV36, VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 1, { VK_FORMAT_R16G16B16A16_UNORM } },
531 { VK_FORMAT_R16G16B16A16_UNORM,
AV_PIX_FMT_XV48, VK_IMAGE_ASPECT_COLOR_BIT, 1, 1, 1, { VK_FORMAT_R16G16B16A16_UNORM } },
552 VkImageTiling tiling,
555 VkImageAspectFlags *
aspect,
556 VkImageUsageFlags *supported_usage,
557 int disable_multiplane,
int need_storage)
563 const VkFormatFeatureFlagBits2 basic_flags = VK_FORMAT_FEATURE_2_SAMPLED_IMAGE_BIT |
564 VK_FORMAT_FEATURE_2_TRANSFER_SRC_BIT |
565 VK_FORMAT_FEATURE_2_TRANSFER_DST_BIT;
569 VkFormatProperties3 fprops = {
570 .sType = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_3,
572 VkFormatProperties2 prop = {
573 .sType = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2,
576 VkFormatFeatureFlagBits2 feats_primary, feats_secondary;
577 int basics_primary = 0, basics_secondary = 0;
578 int storage_primary = 0, storage_secondary = 0;
580 vk->GetPhysicalDeviceFormatProperties2(hwctx->
phys_dev,
584 feats_primary = tiling == VK_IMAGE_TILING_LINEAR ?
585 fprops.linearTilingFeatures : fprops.optimalTilingFeatures;
586 basics_primary = (feats_primary & basic_flags) == basic_flags;
587 storage_primary = !!(feats_primary & VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT);
590 vk->GetPhysicalDeviceFormatProperties2(hwctx->
phys_dev,
593 feats_secondary = tiling == VK_IMAGE_TILING_LINEAR ?
594 fprops.linearTilingFeatures : fprops.optimalTilingFeatures;
595 basics_secondary = (feats_secondary & basic_flags) == basic_flags;
596 storage_secondary = !!(feats_secondary & VK_FORMAT_FEATURE_2_STORAGE_IMAGE_BIT);
598 basics_secondary = basics_primary;
599 storage_secondary = storage_primary;
602 if (basics_primary &&
604 (!need_storage || (need_storage && (storage_primary | storage_secondary)))) {
619 ((need_storage && (storage_primary | storage_secondary)) ?
620 VK_IMAGE_USAGE_STORAGE_BIT : 0);
622 }
else if (basics_secondary &&
623 (!need_storage || (need_storage && storage_secondary))) {
644 #if CONFIG_VULKAN_STATIC
645 VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vkGetInstanceProcAddr(VkInstance instance,
654 #if CONFIG_VULKAN_STATIC
657 static const char *lib_names[] = {
660 #elif defined(__APPLE__)
671 p->libvulkan = dlopen(lib_names[
i], RTLD_NOW | RTLD_LOCAL);
681 hwctx->
get_proc_addr = (PFN_vkGetInstanceProcAddr)dlsym(
p->libvulkan,
"vkGetInstanceProcAddr");
710 #ifdef VK_KHR_shader_relaxed_extended_instruction
713 #ifdef VK_EXT_shader_long_vector
716 #ifdef VK_EXT_shader_replicated_composites
719 #ifdef VK_EXT_zero_initialize_device_memory
722 #ifdef VK_KHR_shader_expect_assume
726 #ifdef VK_KHR_video_maintenance2
729 #ifdef VK_KHR_internally_synchronized_queues
752 #ifdef VK_KHR_video_decode_vp9
755 #ifdef VK_KHR_video_encode_av1
791 VkDebugUtilsMessageTypeFlagsEXT messageType,
792 const VkDebugUtilsMessengerCallbackDataEXT *
data,
799 switch (
data->messageIdNumber) {
808 case VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT: l =
AV_LOG_VERBOSE;
break;
809 case VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT: l =
AV_LOG_INFO;
break;
810 case VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT: l =
AV_LOG_WARNING;
break;
811 case VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT: l =
AV_LOG_ERROR;
break;
816 for (
int i = 0;
i <
data->cmdBufLabelCount;
i++)
822 #define ADD_VAL_TO_LIST(list, count, val) \
824 list = av_realloc_array(list, ++count, sizeof(*list)); \
826 err = AVERROR(ENOMEM); \
829 list[count - 1] = av_strdup(val); \
830 if (!list[count - 1]) { \
831 err = AVERROR(ENOMEM); \
836 #define RELEASE_PROPS(props, count) \
838 for (int i = 0; i < count; i++) \
839 av_free((void *)((props)[i])); \
840 av_free((void *)props); \
846 VkDeviceSize max_vram = 0, max_visible_vram = 0;
850 for (
int i = 0;
i <
p->mprops.memoryTypeCount;
i++) {
851 const VkMemoryType
type =
p->mprops.memoryTypes[
i];
852 const VkMemoryHeap heap =
p->mprops.memoryHeaps[
type.heapIndex];
853 if (!(
type.propertyFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT))
855 max_vram =
FFMAX(max_vram, heap.size);
856 if (
type.propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT)
857 max_visible_vram =
FFMAX(max_visible_vram, heap.size);
860 return max_vram - max_visible_vram < 1024;
876 const char *
const **
dst, uint32_t *num,
880 const char **extension_names =
NULL;
884 int err = 0, found, extensions_found = 0;
887 int optional_exts_num;
888 uint32_t sup_ext_count;
889 char *user_exts_str =
NULL;
891 VkExtensionProperties *sup_ext;
901 if (!user_exts_str) {
906 vk->EnumerateInstanceExtensionProperties(
NULL, &sup_ext_count,
NULL);
907 sup_ext =
av_malloc_array(sup_ext_count,
sizeof(VkExtensionProperties));
910 vk->EnumerateInstanceExtensionProperties(
NULL, &sup_ext_count, sup_ext);
918 if (!user_exts_str) {
923 vk->EnumerateDeviceExtensionProperties(hwctx->
phys_dev,
NULL,
924 &sup_ext_count,
NULL);
925 sup_ext =
av_malloc_array(sup_ext_count,
sizeof(VkExtensionProperties));
928 vk->EnumerateDeviceExtensionProperties(hwctx->
phys_dev,
NULL,
929 &sup_ext_count, sup_ext);
932 for (
int i = 0;
i < optional_exts_num;
i++) {
933 tstr = optional_exts[
i].
name;
937 if (!strcmp(tstr, VK_EXT_HOST_IMAGE_COPY_EXTENSION_NAME) &&
945 (!strcmp(tstr, VK_EXT_SHADER_OBJECT_EXTENSION_NAME))) {
949 for (
int j = 0; j < sup_ext_count; j++) {
950 if (!strcmp(tstr, sup_ext[j].extensionName)) {
959 p->vkctx.extensions |= optional_exts[
i].
flag;
967 tstr = VK_EXT_DEBUG_UTILS_EXTENSION_NAME;
969 for (
int j = 0; j < sup_ext_count; j++) {
970 if (!strcmp(tstr, sup_ext[j].extensionName)) {
986 #ifdef VK_KHR_shader_relaxed_extended_instruction
988 tstr = VK_KHR_SHADER_RELAXED_EXTENDED_INSTRUCTION_EXTENSION_NAME;
990 for (
int j = 0; j < sup_ext_count; j++) {
991 if (!strcmp(tstr, sup_ext[j].extensionName)) {
1005 if (user_exts_str) {
1006 char *save, *token =
av_strtok(user_exts_str,
"+", &save);
1009 for (
int j = 0; j < sup_ext_count; j++) {
1010 if (!strcmp(token, sup_ext[j].extensionName)) {
1026 *
dst = extension_names;
1027 *num = extensions_found;
1041 const char *
const **
dst, uint32_t *num,
1048 static const char layer_standard_validation[] = {
"VK_LAYER_KHRONOS_validation" };
1049 int layer_standard_validation_found = 0;
1051 uint32_t sup_layer_count;
1052 VkLayerProperties *sup_layers;
1055 char *user_layers_str =
NULL;
1058 const char **enabled_layers =
NULL;
1059 uint32_t enabled_layers_count = 0;
1067 vk->EnumerateInstanceLayerProperties(&sup_layer_count,
NULL);
1068 sup_layers =
av_malloc_array(sup_layer_count,
sizeof(VkLayerProperties));
1071 vk->EnumerateInstanceLayerProperties(&sup_layer_count, sup_layers);
1074 for (
int i = 0;
i < sup_layer_count;
i++)
1078 if (!debug_opt && !user_layers)
1083 if (!strcmp(debug_opt->
value,
"printf")) {
1085 }
else if (!strcmp(debug_opt->
value,
"validate")) {
1087 }
else if (!strcmp(debug_opt->
value,
"practices")) {
1090 char *end_ptr =
NULL;
1091 int idx = strtol(debug_opt->
value, &end_ptr, 10);
1092 if (end_ptr == debug_opt->
value || end_ptr[0] !=
'\0' ||
1107 for (
int i = 0;
i < sup_layer_count;
i++) {
1108 if (!strcmp(layer_standard_validation, sup_layers[
i].layerName)) {
1110 layer_standard_validation);
1111 ADD_VAL_TO_LIST(enabled_layers, enabled_layers_count, layer_standard_validation);
1113 layer_standard_validation_found = 1;
1117 if (!layer_standard_validation_found) {
1119 "Validation Layer \"%s\" not supported\n", layer_standard_validation);
1130 if (!user_layers_str) {
1135 token =
av_strtok(user_layers_str,
"+", &save);
1140 if (!strcmp(layer_standard_validation, token) && layer_standard_validation_found) {
1146 for (
int j = 0; j < sup_layer_count; j++) {
1147 if (!strcmp(token, sup_layers[j].layerName)) {
1158 if (!strcmp(layer_standard_validation, token))
1162 "Layer \"%s\" not supported\n", token);
1179 *
dst = enabled_layers;
1180 *num = enabled_layers_count;
1195 VkApplicationInfo application_info = {
1196 .sType = VK_STRUCTURE_TYPE_APPLICATION_INFO,
1197 .pApplicationName =
"ffmpeg",
1201 .pEngineName =
"libavutil",
1202 .apiVersion = VK_API_VERSION_1_3,
1207 VkValidationFeaturesEXT validation_features = {
1208 .sType = VK_STRUCTURE_TYPE_VALIDATION_FEATURES_EXT,
1210 VkInstanceCreateInfo inst_props = {
1211 .sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
1212 .pApplicationInfo = &application_info,
1228 &inst_props.enabledLayerCount, debug_mode);
1234 &inst_props.enabledExtensionCount, *debug_mode);
1242 static const VkValidationFeatureEnableEXT feat_list_validate[] = {
1243 VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT,
1244 VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT,
1245 VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_EXT,
1247 validation_features.pEnabledValidationFeatures = feat_list_validate;
1248 validation_features.enabledValidationFeatureCount =
FF_ARRAY_ELEMS(feat_list_validate);
1249 inst_props.pNext = &validation_features;
1251 static const VkValidationFeatureEnableEXT feat_list_debug[] = {
1252 VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT,
1253 VK_VALIDATION_FEATURE_ENABLE_GPU_ASSISTED_RESERVE_BINDING_SLOT_EXT,
1254 VK_VALIDATION_FEATURE_ENABLE_DEBUG_PRINTF_EXT,
1256 validation_features.pEnabledValidationFeatures = feat_list_debug;
1257 validation_features.enabledValidationFeatureCount =
FF_ARRAY_ELEMS(feat_list_debug);
1258 inst_props.pNext = &validation_features;
1260 static const VkValidationFeatureEnableEXT feat_list_practices[] = {
1261 VK_VALIDATION_FEATURE_ENABLE_SYNCHRONIZATION_VALIDATION_EXT,
1262 VK_VALIDATION_FEATURE_ENABLE_BEST_PRACTICES_EXT,
1264 validation_features.pEnabledValidationFeatures = feat_list_practices;
1265 validation_features.enabledValidationFeatureCount =
FF_ARRAY_ELEMS(feat_list_practices);
1266 inst_props.pNext = &validation_features;
1270 for (
int i = 0;
i < inst_props.enabledExtensionCount;
i++) {
1271 if (!strcmp(VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME,
1272 inst_props.ppEnabledExtensionNames[
i])) {
1273 inst_props.flags |= VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR;
1280 ret = vk->CreateInstance(&inst_props, hwctx->
alloc, &hwctx->
inst);
1283 if (
ret != VK_SUCCESS) {
1300 VkDebugUtilsMessengerCreateInfoEXT dbg = {
1301 .sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT,
1302 .messageSeverity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT |
1303 VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT |
1304 VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT |
1305 VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT,
1306 .messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT |
1307 VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT |
1308 VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT,
1313 vk->CreateDebugUtilsMessengerEXT(hwctx->
inst, &dbg,
1314 hwctx->
alloc, &
p->debug_ctx);
1320 RELEASE_PROPS(inst_props.ppEnabledLayerNames, inst_props.enabledLayerCount);
1339 case VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU:
return "integrated";
1340 case VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU:
return "discrete";
1341 case VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU:
return "virtual";
1342 case VK_PHYSICAL_DEVICE_TYPE_CPU:
return "software";
1343 default:
return "unknown";
1350 int err = 0, choice = -1;
1356 VkPhysicalDevice *devices =
NULL;
1357 VkPhysicalDeviceIDProperties *idp =
NULL;
1358 VkPhysicalDeviceProperties2 *prop =
NULL;
1359 VkPhysicalDeviceDriverProperties *driver_prop =
NULL;
1360 VkPhysicalDeviceDrmPropertiesEXT *drm_prop =
NULL;
1362 ret = vk->EnumeratePhysicalDevices(hwctx->
inst, &num,
NULL);
1363 if (
ret != VK_SUCCESS || !num) {
1372 ret = vk->EnumeratePhysicalDevices(hwctx->
inst, &num, devices);
1373 if (
ret != VK_SUCCESS) {
1392 driver_prop =
av_calloc(num,
sizeof(*driver_prop));
1399 drm_prop =
av_calloc(num,
sizeof(*drm_prop));
1407 for (
int i = 0;
i < num;
i++) {
1409 drm_prop[
i].sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRM_PROPERTIES_EXT;
1410 driver_prop[
i].pNext = &drm_prop[
i];
1412 driver_prop[
i].sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES;
1413 idp[
i].pNext = &driver_prop[
i];
1414 idp[
i].sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ID_PROPERTIES;
1415 prop[
i].sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
1416 prop[
i].pNext = &idp[
i];
1418 vk->GetPhysicalDeviceProperties2(devices[
i], &prop[
i]);
1420 prop[
i].properties.deviceName,
1422 prop[
i].properties.deviceID);
1426 for (
int i = 0;
i < num;
i++) {
1427 if (!strncmp(idp[
i].deviceUUID, select->
uuid, VK_UUID_SIZE)) {
1436 for (
int i = 0;
i < num;
i++) {
1437 if ((select->
drm_major == drm_prop[
i].primaryMajor &&
1438 select->
drm_minor == drm_prop[
i].primaryMinor) ||
1439 (select->
drm_major == drm_prop[
i].renderMajor &&
1440 select->
drm_minor == drm_prop[
i].renderMinor)) {
1449 }
else if (select->
name) {
1451 for (
int i = 0;
i < num;
i++) {
1452 if (strstr(prop[
i].properties.deviceName, select->
name)) {
1463 for (
int i = 0;
i < num;
i++) {
1464 if (select->
pci_device == prop[
i].properties.deviceID) {
1475 for (
int i = 0;
i < num;
i++) {
1476 if (select->
vendor_id == prop[
i].properties.vendorID) {
1486 if (select->
index < num) {
1487 choice = select->
index;
1499 choice, prop[choice].properties.deviceName,
1501 prop[choice].properties.deviceID);
1503 p->props = prop[choice];
1504 p->props.pNext =
NULL;
1505 p->dprops = driver_prop[choice];
1506 p->dprops.pNext =
NULL;
1520 VkQueueFlagBits
flags)
1523 uint32_t min_score = UINT32_MAX;
1525 for (
int i = 0;
i < num_qf;
i++) {
1526 VkQueueFlagBits qflags = qf[
i].queueFamilyProperties.queueFlags;
1529 if ((
flags & VK_QUEUE_TRANSFER_BIT) &&
1530 (qflags & (VK_QUEUE_GRAPHICS_BIT | VK_QUEUE_COMPUTE_BIT)))
1531 qflags |= VK_QUEUE_TRANSFER_BIT;
1533 if (qflags &
flags) {
1534 uint32_t score =
av_popcount(qflags) + qf[
i].queueFamilyProperties.timestampValidBits;
1535 if (score < min_score) {
1543 qf[
index].queueFamilyProperties.timestampValidBits++;
1549 VkQueueFamilyVideoPropertiesKHR *qf_vid, uint32_t num_qf,
1550 VkVideoCodecOperationFlagsKHR
flags)
1553 uint32_t min_score = UINT32_MAX;
1555 for (
int i = 0;
i < num_qf;
i++) {
1556 const VkQueueFlags qflags = qf[
i].queueFamilyProperties.queueFlags;
1557 const VkVideoCodecOperationFlagsKHR vflags = qf_vid[
i].videoCodecOperations;
1559 if (!(qflags & (VK_QUEUE_VIDEO_ENCODE_BIT_KHR | VK_QUEUE_VIDEO_DECODE_BIT_KHR)))
1562 if (vflags &
flags) {
1563 uint32_t score =
av_popcount(vflags) + qf[
i].queueFamilyProperties.timestampValidBits;
1564 if (score < min_score) {
1572 qf[
index].queueFamilyProperties.timestampValidBits++;
1584 VkQueueFamilyProperties2 *qf =
NULL;
1585 VkQueueFamilyVideoPropertiesKHR *qf_vid =
NULL;
1588 vk->GetPhysicalDeviceQueueFamilyProperties(hwctx->
phys_dev, &num,
NULL);
1599 qf_vid =
av_malloc_array(num,
sizeof(VkQueueFamilyVideoPropertiesKHR));
1603 for (uint32_t
i = 0;
i < num;
i++) {
1604 qf_vid[
i] = (VkQueueFamilyVideoPropertiesKHR) {
1605 .sType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_VIDEO_PROPERTIES_KHR,
1607 qf[
i] = (VkQueueFamilyProperties2) {
1608 .sType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2,
1614 vk->GetPhysicalDeviceQueueFamilyProperties2(hwctx->
phys_dev, &num, qf);
1617 for (
int i = 0;
i < num;
i++) {
1619 ((qf[
i].queueFamilyProperties.queueFlags) & VK_QUEUE_GRAPHICS_BIT) ?
" graphics" :
"",
1620 ((qf[
i].queueFamilyProperties.queueFlags) & VK_QUEUE_COMPUTE_BIT) ?
" compute" :
"",
1621 ((qf[
i].queueFamilyProperties.queueFlags) & VK_QUEUE_TRANSFER_BIT) ?
" transfer" :
"",
1622 ((qf[
i].queueFamilyProperties.queueFlags) & VK_QUEUE_VIDEO_ENCODE_BIT_KHR) ?
" encode" :
"",
1623 ((qf[
i].queueFamilyProperties.queueFlags) & VK_QUEUE_VIDEO_DECODE_BIT_KHR) ?
" decode" :
"",
1624 ((qf[
i].queueFamilyProperties.queueFlags) & VK_QUEUE_SPARSE_BINDING_BIT) ?
" sparse" :
"",
1625 ((qf[
i].queueFamilyProperties.queueFlags) & VK_QUEUE_OPTICAL_FLOW_BIT_NV) ?
" optical_flow" :
"",
1626 ((qf[
i].queueFamilyProperties.queueFlags) & VK_QUEUE_PROTECTED_BIT) ?
" protected" :
"",
1627 qf[
i].queueFamilyProperties.queueCount);
1631 qf[
i].queueFamilyProperties.timestampValidBits = 0;
1637 #define PICK_QF(type, vid_op) \
1643 idx = pick_video_queue_family(qf, qf_vid, num, vid_op); \
1645 idx = pick_queue_family(qf, num, type); \
1650 for (i = 0; i < hwctx->nb_qf; i++) { \
1651 if (hwctx->qf[i].idx == idx) { \
1652 hwctx->qf[i].flags |= type; \
1653 hwctx->qf[i].video_caps |= vid_op; \
1657 if (i == hwctx->nb_qf) { \
1658 hwctx->qf[i].idx = idx; \
1659 hwctx->qf[i].num = qf[idx].queueFamilyProperties.queueCount; \
1660 if (p->limit_queues || \
1661 p->dprops.driverID == VK_DRIVER_ID_NVIDIA_PROPRIETARY) { \
1662 int max = p->limit_queues; \
1663 if (type == VK_QUEUE_GRAPHICS_BIT) \
1664 hwctx->qf[i].num = FFMIN(hwctx->qf[i].num, \
1667 hwctx->qf[i].num = FFMIN(hwctx->qf[i].num, max); \
1669 hwctx->qf[i].flags = type; \
1670 hwctx->qf[i].video_caps = vid_op; \
1675 PICK_QF(VK_QUEUE_GRAPHICS_BIT, VK_VIDEO_CODEC_OPERATION_NONE_KHR);
1676 PICK_QF(VK_QUEUE_COMPUTE_BIT, VK_VIDEO_CODEC_OPERATION_NONE_KHR);
1677 PICK_QF(VK_QUEUE_TRANSFER_BIT, VK_VIDEO_CODEC_OPERATION_NONE_KHR);
1679 PICK_QF(VK_QUEUE_VIDEO_ENCODE_BIT_KHR, VK_VIDEO_CODEC_OPERATION_ENCODE_H264_BIT_KHR);
1680 PICK_QF(VK_QUEUE_VIDEO_DECODE_BIT_KHR, VK_VIDEO_CODEC_OPERATION_DECODE_H264_BIT_KHR);
1682 PICK_QF(VK_QUEUE_VIDEO_ENCODE_BIT_KHR, VK_VIDEO_CODEC_OPERATION_ENCODE_H265_BIT_KHR);
1683 PICK_QF(VK_QUEUE_VIDEO_DECODE_BIT_KHR, VK_VIDEO_CODEC_OPERATION_DECODE_H265_BIT_KHR);
1685 #ifdef VK_KHR_video_decode_vp9
1686 PICK_QF(VK_QUEUE_VIDEO_DECODE_BIT_KHR, VK_VIDEO_CODEC_OPERATION_DECODE_VP9_BIT_KHR);
1689 #ifdef VK_KHR_video_encode_av1
1690 PICK_QF(VK_QUEUE_VIDEO_ENCODE_BIT_KHR, VK_VIDEO_CODEC_OPERATION_ENCODE_AV1_BIT_KHR);
1692 PICK_QF(VK_QUEUE_VIDEO_DECODE_BIT_KHR, VK_VIDEO_CODEC_OPERATION_DECODE_AV1_BIT_KHR);
1700 sizeof(VkDeviceQueueCreateInfo));
1701 if (!cd->pQueueCreateInfos)
1704 for (uint32_t
i = 0;
i < hwctx->
nb_qf;
i++) {
1707 VkDeviceQueueCreateInfo *pc;
1708 for (uint32_t j = 0; j < cd->queueCreateInfoCount; j++) {
1709 if (hwctx->
qf[
i].
idx == cd->pQueueCreateInfos[j].queueFamilyIndex) {
1719 for (uint32_t j = 0; j < cd->queueCreateInfoCount; j++)
1720 av_free((
void *)cd->pQueueCreateInfos[
i].pQueuePriorities);
1721 av_free((
void *)cd->pQueueCreateInfos);
1725 for (uint32_t j = 0; j < hwctx->
qf[
i].
num; j++)
1728 pc = (VkDeviceQueueCreateInfo *)cd->pQueueCreateInfos;
1729 VkDeviceQueueCreateFlags qflags = 0;
1730 #ifdef VK_KHR_internally_synchronized_queues
1732 qflags |= VK_DEVICE_QUEUE_CREATE_INTERNALLY_SYNCHRONIZED_BIT_KHR;
1734 pc[cd->queueCreateInfoCount++] = (VkDeviceQueueCreateInfo) {
1735 .sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
1737 .queueFamilyIndex = hwctx->
qf[
i].
idx,
1738 .queueCount = hwctx->
qf[
i].
num,
1743 #if FF_API_VULKAN_FIXED_QUEUES
1752 #define SET_OLD_QF(field, nb_field, type) \
1754 if (field < 0 && hwctx->qf[i].flags & type) { \
1755 field = hwctx->qf[i].idx; \
1756 nb_field = hwctx->qf[i].num; \
1760 for (uint32_t
i = 0;
i < hwctx->
nb_qf;
i++) {
1790 vk->DestroyDebugUtilsMessengerEXT(hwctx->
inst,
p->debug_ctx,
1794 vk->DestroyInstance(hwctx->
inst, hwctx->
alloc);
1797 dlclose(
p->libvulkan);
1808 for (uint32_t
i = 0;
i <
p->nb_tot_qfs;
i++) {
1820 int disable_multiplane,
1831 VkDeviceCreateInfo dev_info = {
1832 .sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
1844 vk->GetPhysicalDeviceMemoryProperties(hwctx->
phys_dev, &
p->mprops);
1848 &dev_info.enabledExtensionCount, debug_mode))) {
1849 for (
int i = 0;
i < dev_info.queueCreateInfoCount;
i++)
1850 av_free((
void *)dev_info.pQueueCreateInfos[
i].pQueuePriorities);
1851 av_free((
void *)dev_info.pQueueCreateInfos);
1857 vk->GetPhysicalDeviceFeatures2(hwctx->
phys_dev, &supported_feats.
device);
1862 dev_info.pNext =
p->feats.device.pNext;
1863 dev_info.pEnabledFeatures = &
p->feats.device.features;
1868 p->limit_queues = strtol(opt_d->
value,
NULL, 10);
1878 for (
int i = 0;
i < dev_info.queueCreateInfoCount;
i++)
1879 av_free((
void *)dev_info.pQueueCreateInfos[
i].pQueuePriorities);
1880 av_free((
void *)dev_info.pQueueCreateInfos);
1882 if (
ret != VK_SUCCESS) {
1885 for (
int i = 0;
i < dev_info.enabledExtensionCount;
i++)
1886 av_free((
void *)dev_info.ppEnabledExtensionNames[
i]);
1887 av_free((
void *)dev_info.ppEnabledExtensionNames);
1895 p->use_linear_images = strtol(opt_d->
value,
NULL, 10);
1898 p->disable_multiplane = disable_multiplane;
1899 if (!
p->disable_multiplane) {
1902 p->disable_multiplane = strtol(opt_d->
value,
NULL, 10);
1906 p->avoid_host_import =
p->dprops.driverID == VK_DRIVER_ID_NVIDIA_PROPRIETARY;
1909 p->avoid_host_import = strtol(opt_d->
value,
NULL, 10);
1946 VkQueueFamilyProperties2 *qf;
1947 VkQueueFamilyVideoPropertiesKHR *qf_vid;
1948 VkPhysicalDeviceExternalSemaphoreInfo ext_sem_props_info;
1949 int graph_index, comp_index, tx_index, enc_index, dec_index;
1968 p->props.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PROPERTIES_2;
1969 p->props.pNext = &
p->hprops;
1970 p->hprops.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_MEMORY_HOST_PROPERTIES_EXT;
1971 p->hprops.pNext = &
p->dprops;
1972 p->dprops.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DRIVER_PROPERTIES;
1974 vk->GetPhysicalDeviceProperties2(hwctx->
phys_dev, &
p->props);
1976 p->props.properties.deviceName);
1979 p->props.properties.limits.optimalBufferCopyRowPitchAlignment);
1981 p->props.properties.limits.minMemoryMapAlignment);
1983 p->props.properties.limits.nonCoherentAtomSize);
1986 p->hprops.minImportedHostPointerAlignment);
1988 vk->GetPhysicalDeviceQueueFamilyProperties(hwctx->
phys_dev, &qf_num,
NULL);
1994 ext_sem_props_info = (VkPhysicalDeviceExternalSemaphoreInfo) {
1995 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_SEMAPHORE_INFO,
1999 ext_sem_props_info.handleType =
2001 IsWindows8OrGreater()
2002 ? VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT
2003 : VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT;
2005 VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT;
2007 p->ext_sem_props_opaque.sType = VK_STRUCTURE_TYPE_EXTERNAL_SEMAPHORE_PROPERTIES;
2008 vk->GetPhysicalDeviceExternalSemaphoreProperties(hwctx->
phys_dev,
2009 &ext_sem_props_info,
2010 &
p->ext_sem_props_opaque);
2016 qf_vid =
av_malloc_array(qf_num,
sizeof(VkQueueFamilyVideoPropertiesKHR));
2022 for (uint32_t
i = 0;
i < qf_num;
i++) {
2023 qf_vid[
i] = (VkQueueFamilyVideoPropertiesKHR) {
2024 .sType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_VIDEO_PROPERTIES_KHR,
2026 qf[
i] = (VkQueueFamilyProperties2) {
2027 .sType = VK_STRUCTURE_TYPE_QUEUE_FAMILY_PROPERTIES_2,
2032 vk->GetPhysicalDeviceQueueFamilyProperties2(hwctx->
phys_dev, &qf_num, qf);
2034 p->nb_tot_qfs = qf_num;
2037 p->qf_mutex =
av_calloc(qf_num,
sizeof(*
p->qf_mutex));
2043 for (uint32_t
i = 0;
i < qf_num;
i++) {
2044 p->qf_mutex[
i] =
av_calloc(qf[
i].queueFamilyProperties.queueCount,
2045 sizeof(**
p->qf_mutex));
2046 if (!
p->qf_mutex[
i]) {
2050 for (uint32_t j = 0; j < qf[
i].queueFamilyProperties.queueCount; j++) {
2062 #if FF_API_VULKAN_FIXED_QUEUES
2070 #define CHECK_QUEUE(type, required, fidx, ctx_qf, qc) \
2072 if (ctx_qf < 0 && required) { \
2073 av_log(ctx, AV_LOG_ERROR, "%s queue family is required, but marked as missing" \
2074 " in the context!\n", type); \
2075 err = AVERROR(EINVAL); \
2077 } else if (fidx < 0 || ctx_qf < 0) { \
2079 } else if (ctx_qf >= qf_num) { \
2080 av_log(ctx, AV_LOG_ERROR, "Invalid %s family index %i (device has %i families)!\n", \
2081 type, ctx_qf, qf_num); \
2082 err = AVERROR(EINVAL); \
2086 av_log(ctx, AV_LOG_VERBOSE, "Using queue family %i (queues: %i)" \
2087 " for%s%s%s%s%s\n", \
2089 ctx_qf == graph_index ? " graphics" : "", \
2090 ctx_qf == comp_index ? " compute" : "", \
2091 ctx_qf == tx_index ? " transfers" : "", \
2092 ctx_qf == enc_index ? " encode" : "", \
2093 ctx_qf == dec_index ? " decode" : ""); \
2094 graph_index = (ctx_qf == graph_index) ? -1 : graph_index; \
2095 comp_index = (ctx_qf == comp_index) ? -1 : comp_index; \
2096 tx_index = (ctx_qf == tx_index) ? -1 : tx_index; \
2097 enc_index = (ctx_qf == enc_index) ? -1 : enc_index; \
2098 dec_index = (ctx_qf == dec_index) ? -1 : dec_index; \
2111 if (!hwctx->
nb_qf) {
2112 #define ADD_QUEUE(ctx_qf, qc, flag) \
2114 if (ctx_qf != -1) { \
2115 hwctx->qf[hwctx->nb_qf++] = (AVVulkanDeviceQueueFamily) { \
2133 for (
int i = 0;
i < hwctx->
nb_qf;
i++) {
2135 hwctx->
qf[
i].
flags & (VK_QUEUE_VIDEO_DECODE_BIT_KHR |
2136 VK_QUEUE_VIDEO_ENCODE_BIT_KHR)) {
2143 for (
int i = 0;
i < hwctx->
nb_qf;
i++) {
2147 for (
int j = (
i - 1); j >= 0; j--) {
2154 p->img_qfs[
p->nb_img_qfs++] = hwctx->
qf[
i].
idx;
2157 #if FF_API_VULKAN_SYNC_QUEUES
2167 vk->GetPhysicalDeviceMemoryProperties(hwctx->
phys_dev, &
p->mprops);
2169 p->vkctx.device =
ctx;
2170 p->vkctx.hwctx = hwctx;
2177 vk->GetPhysicalDeviceMemoryProperties(hwctx->
phys_dev, &
p->mprops);
2189 if (device && device[0]) {
2191 dev_select.
index = strtol(device, &end, 10);
2192 if (end == device) {
2193 dev_select.
index = 0;
2194 dev_select.
name = device;
2210 switch(src_ctx->
type) {
2214 VADisplay dpy = src_hwctx->
display;
2215 #if VA_CHECK_VERSION(1, 15, 0)
2217 VADisplayAttribute attr = {
2218 .type = VADisplayPCIID,
2223 #if VA_CHECK_VERSION(1, 15, 0)
2224 vas = vaGetDisplayAttributes(dpy, &attr, 1);
2225 if (vas == VA_STATUS_SUCCESS && attr.flags != VA_DISPLAY_ATTRIB_NOT_SUPPORTED)
2226 dev_select.pci_device = (attr.value & 0xFFFF);
2229 if (!dev_select.pci_device) {
2230 vendor = vaQueryVendorString(dpy);
2236 if (strstr(vendor,
"AMD"))
2237 dev_select.vendor_id = 0x1002;
2246 struct stat drm_node_info;
2247 drmDevice *drm_dev_info;
2250 err = fstat(src_hwctx->
fd, &drm_node_info);
2257 dev_select.drm_major = major(drm_node_info.st_dev);
2258 dev_select.drm_minor = minor(drm_node_info.st_dev);
2259 dev_select.has_drm = 1;
2261 err = drmGetDevice(src_hwctx->
fd, &drm_dev_info);
2268 if (drm_dev_info->bustype == DRM_BUS_PCI)
2269 dev_select.pci_device = drm_dev_info->deviceinfo.pci->device_id;
2271 drmFreeDevice(&drm_dev_info);
2281 CudaFunctions *cu = cu_internal->
cuda_dl;
2283 int ret =
CHECK_CU(cu->cuDeviceGetUuid((CUuuid *)&dev_select.uuid,
2290 dev_select.has_uuid = 1;
2305 const void *hwconfig,
2313 p->use_linear_images ? VK_IMAGE_TILING_LINEAR :
2314 VK_IMAGE_TILING_OPTIMAL,
2326 p->use_linear_images ? VK_IMAGE_TILING_LINEAR :
2327 VK_IMAGE_TILING_OPTIMAL,
2337 constraints->
max_width =
p->props.properties.limits.maxImageDimension2D;
2338 constraints->
max_height =
p->props.properties.limits.maxImageDimension2D;
2351 VkMemoryPropertyFlagBits req_flags,
const void *alloc_extension,
2352 VkMemoryPropertyFlagBits *mem_flags, VkDeviceMemory *mem)
2359 VkMemoryAllocateInfo alloc_info = {
2360 .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
2361 .pNext = alloc_extension,
2362 .allocationSize = req->size,
2367 for (
int i = 0;
i <
p->mprops.memoryTypeCount;
i++) {
2368 const VkMemoryType *
type = &
p->mprops.memoryTypes[
i];
2371 if (!(req->memoryTypeBits & (1 <<
i)))
2375 if ((
type->propertyFlags & req_flags) != req_flags)
2379 if (req->size >
p->mprops.memoryHeaps[
type->heapIndex].size)
2393 alloc_info.memoryTypeIndex =
index;
2395 ret = vk->AllocateMemory(dev_hwctx->
act_dev, &alloc_info,
2396 dev_hwctx->
alloc, mem);
2397 if (
ret != VK_SUCCESS) {
2403 *mem_flags |=
p->mprops.memoryTypes[
index].propertyFlags;
2413 if (internal->cuda_fc_ref) {
2419 CudaFunctions *cu = cu_internal->
cuda_dl;
2422 if (internal->cu_sem[
i])
2423 CHECK_CU(cu->cuDestroyExternalSemaphore(internal->cu_sem[
i]));
2424 if (internal->cu_mma[
i])
2425 CHECK_CU(cu->cuMipmappedArrayDestroy(internal->cu_mma[
i]));
2426 if (internal->ext_mem[
i])
2427 CHECK_CU(cu->cuDestroyExternalMemory(internal->ext_mem[
i]));
2429 if (internal->ext_sem_handle[
i])
2430 CloseHandle(internal->ext_sem_handle[
i]);
2431 if (internal->ext_mem_handle[
i])
2432 CloseHandle(internal->ext_mem_handle[
i]);
2440 if (internal->drm_sync_sem != VK_NULL_HANDLE)
2441 p->vkctx.vkfn.DestroySemaphore(
p->p.act_dev, internal->drm_sync_sem,
2461 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO,
2463 .pSemaphores =
f->sem,
2464 .pValues =
f->sem_value,
2465 .semaphoreCount = nb_sems,
2473 for (
int i = 0;
i < nb_images;
i++) {
2488 void *alloc_pnext,
size_t alloc_pnext_stride)
2490 int img_cnt = 0, err;
2498 while (
f->img[img_cnt]) {
2500 VkImageMemoryRequirementsInfo2 req_desc = {
2501 .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
2502 .image =
f->img[img_cnt],
2504 VkMemoryDedicatedAllocateInfo ded_alloc = {
2505 .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
2506 .pNext = (
void *)(((uint8_t *)alloc_pnext) + img_cnt*alloc_pnext_stride),
2508 VkMemoryDedicatedRequirements ded_req = {
2509 .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
2511 VkMemoryRequirements2 req = {
2512 .sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
2516 vk->GetImageMemoryRequirements2(hwctx->
act_dev, &req_desc, &req);
2519 "plane %d: driver reports prefersDedicatedAllocation=%i requiresDedicatedAllocation=%i\n",
2520 img_cnt, ded_req.prefersDedicatedAllocation, ded_req.requiresDedicatedAllocation);
2522 if (
f->tiling == VK_IMAGE_TILING_LINEAR)
2523 req.memoryRequirements.size =
FFALIGN(req.memoryRequirements.size,
2524 p->props.properties.limits.minMemoryMapAlignment);
2527 use_ded_mem = ded_req.prefersDedicatedAllocation |
2528 ded_req.requiresDedicatedAllocation;
2532 ded_alloc.image =
f->img[img_cnt];
2536 f->tiling == VK_IMAGE_TILING_LINEAR ?
2537 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT :
2538 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
2539 use_ded_mem ? &ded_alloc : (
void *)ded_alloc.pNext,
2540 &
f->flags, &
f->mem[img_cnt])))
2543 f->size[img_cnt] = req.memoryRequirements.size;
2544 bind_info[img_cnt].sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
2545 bind_info[img_cnt].image =
f->img[img_cnt];
2546 bind_info[img_cnt].memory =
f->mem[img_cnt];
2552 ret = vk->BindImageMemory2(hwctx->
act_dev, img_cnt, bind_info);
2553 if (
ret != VK_SUCCESS) {
2573 VkAccessFlags2 *new_access)
2577 *new_layout = VK_IMAGE_LAYOUT_GENERAL;
2578 *new_access = VK_ACCESS_TRANSFER_WRITE_BIT;
2581 *new_layout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL;
2582 *new_access = VK_ACCESS_TRANSFER_WRITE_BIT;
2585 *new_layout = VK_IMAGE_LAYOUT_GENERAL;
2586 *new_access = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT;
2589 *new_layout = VK_IMAGE_LAYOUT_GENERAL;
2590 *new_access = VK_ACCESS_MEMORY_READ_BIT | VK_ACCESS_MEMORY_WRITE_BIT;
2593 *new_layout = VK_IMAGE_LAYOUT_VIDEO_DECODE_DST_KHR;
2594 *new_access = VK_ACCESS_TRANSFER_WRITE_BIT;
2597 *new_layout = VK_IMAGE_LAYOUT_VIDEO_DECODE_DPB_KHR;
2598 *new_access = VK_ACCESS_TRANSFER_READ_BIT | VK_ACCESS_TRANSFER_WRITE_BIT;
2601 *new_layout = VK_IMAGE_LAYOUT_VIDEO_ENCODE_DPB_KHR;
2602 *new_access = VK_ACCESS_TRANSFER_READ_BIT | VK_ACCESS_TRANSFER_WRITE_BIT;
2616 VkImageLayout new_layout;
2617 VkAccessFlags2 new_access;
2620 uint32_t dst_qf =
p->nb_img_qfs > 1 ? VK_QUEUE_FAMILY_IGNORED :
p->img_qfs[0];
2621 VkPipelineStageFlagBits2 src_stage = VK_PIPELINE_STAGE_2_NONE;
2623 dst_qf = VK_QUEUE_FAMILY_EXTERNAL_KHR;
2624 src_stage = VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT;
2631 .
data = (uint8_t *)hwfc,
2635 .hw_frames_ctx = &tmp_ref,
2638 VkCommandBuffer cmd_buf;
2640 cmd_buf = exec->
buf;
2644 VK_PIPELINE_STAGE_2_NONE,
2645 VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT);
2651 VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT,
2652 new_access, new_layout, dst_qf);
2654 vk->CmdPipelineBarrier2(cmd_buf, &(VkDependencyInfo) {
2655 .sType = VK_STRUCTURE_TYPE_DEPENDENCY_INFO,
2656 .pImageMemoryBarriers = img_bar,
2657 .imageMemoryBarrierCount = nb_img_bar,
2679 VkImageLayout new_layout;
2680 VkAccessFlags2 new_access;
2684 for (
i = 0;
i <
p->vkctx.host_image_props.copyDstLayoutCount;
i++) {
2685 if (
p->vkctx.host_image_props.pCopyDstLayouts[
i] == new_layout)
2688 if (
i ==
p->vkctx.host_image_props.copyDstLayoutCount)
2691 for (
i = 0;
i < nb_images;
i++) {
2692 layout_change[
i] = (VkHostImageLayoutTransitionInfoEXT) {
2693 .sType = VK_STRUCTURE_TYPE_HOST_IMAGE_LAYOUT_TRANSITION_INFO_EXT,
2695 .oldLayout =
frame->layout[
i],
2696 .newLayout = new_layout,
2697 .subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
2698 .subresourceRange.layerCount = 1,
2699 .subresourceRange.levelCount = 1,
2701 frame->layout[
i] = new_layout;
2704 ret = vk->TransitionImageLayoutEXT(
p->vkctx.hwctx->act_dev,
2705 nb_images, layout_change);
2706 if (
ret != VK_SUCCESS) {
2720 if (hwfc_vk->
usage & VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT &&
2732 int frame_w,
int frame_h,
int plane)
2749 VkImageTiling tiling, VkImageUsageFlagBits
usage,
2750 VkImageCreateFlags
flags,
int nb_layers,
2762 VkSemaphoreTypeCreateInfo sem_type_info = {
2763 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO,
2764 .semaphoreType = VK_SEMAPHORE_TYPE_TIMELINE,
2767 VkSemaphoreCreateInfo sem_spawn = {
2768 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
2769 .pNext = &sem_type_info,
2772 VkExportSemaphoreCreateInfo ext_sem_info_opaque = {
2773 .sType = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO,
2775 .handleTypes = IsWindows8OrGreater()
2776 ? VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT
2777 : VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
2779 .handleTypes = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT,
2784 if (
p->ext_sem_props_opaque.externalSemaphoreFeatures & VK_EXTERNAL_SEMAPHORE_FEATURE_EXPORTABLE_BIT) {
2798 for (
int i = 0; (hwfc_vk->
format[
i] != VK_FORMAT_UNDEFINED);
i++) {
2799 VkImageCreateInfo create_info = {
2800 .sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
2801 .pNext = create_pnext,
2802 .imageType = VK_IMAGE_TYPE_2D,
2806 .arrayLayers = nb_layers,
2809 .initialLayout = VK_IMAGE_LAYOUT_UNDEFINED,
2811 .samples = VK_SAMPLE_COUNT_1_BIT,
2812 .pQueueFamilyIndices =
p->img_qfs,
2813 .queueFamilyIndexCount =
p->nb_img_qfs,
2814 .sharingMode =
p->nb_img_qfs > 1 ? VK_SHARING_MODE_CONCURRENT :
2815 VK_SHARING_MODE_EXCLUSIVE,
2818 get_plane_wh(&create_info.extent.width, &create_info.extent.height,
2821 ret = vk->CreateImage(hwctx->
act_dev, &create_info,
2823 if (
ret != VK_SUCCESS) {
2831 ret = vk->CreateSemaphore(hwctx->
act_dev, &sem_spawn,
2833 if (
ret != VK_SUCCESS) {
2840 f->queue_family[
i] =
p->nb_img_qfs > 1 ? VK_QUEUE_FAMILY_IGNORED :
p->img_qfs[0];
2841 f->layout[
i] = create_info.initialLayout;
2843 f->sem_value[
i] = 0;
2859 VkExternalMemoryHandleTypeFlags *comp_handle_types,
2860 VkExternalMemoryHandleTypeFlags *iexp,
2861 VkExternalMemoryHandleTypeFlagBits
exp)
2869 const VkImageDrmFormatModifierListCreateInfoEXT *drm_mod_info =
2871 VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_LIST_CREATE_INFO_EXT);
2872 int has_mods = hwctx->
tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT && drm_mod_info;
2875 VkExternalImageFormatProperties eprops = {
2876 .sType = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR,
2878 VkImageFormatProperties2 props = {
2879 .sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
2882 VkPhysicalDeviceImageDrmFormatModifierInfoEXT phy_dev_mod_info = {
2883 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT,
2885 .pQueueFamilyIndices =
p->img_qfs,
2886 .queueFamilyIndexCount =
p->nb_img_qfs,
2887 .sharingMode =
p->nb_img_qfs > 1 ? VK_SHARING_MODE_CONCURRENT :
2888 VK_SHARING_MODE_EXCLUSIVE,
2890 VkPhysicalDeviceExternalImageFormatInfo enext = {
2891 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO,
2893 .pNext = has_mods ? &phy_dev_mod_info :
NULL,
2895 VkPhysicalDeviceImageFormatInfo2 pinfo = {
2896 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
2897 .pNext = !
exp ?
NULL : &enext,
2899 .type = VK_IMAGE_TYPE_2D,
2901 .usage = hwctx->
usage,
2902 .flags = (hwctx->
tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT && has_mods) ?
2903 (hwctx->
img_flags) : (VkImageCreateFlags)(VK_IMAGE_CREATE_ALIAS_BIT),
2906 nb_mods = has_mods ? drm_mod_info->drmFormatModifierCount : 1;
2907 for (
int i = 0;
i < nb_mods;
i++) {
2909 phy_dev_mod_info.drmFormatModifier = drm_mod_info->pDrmFormatModifiers[
i];
2911 ret = vk->GetPhysicalDeviceImageFormatProperties2(dev_hwctx->
phys_dev,
2915 av_log(hwfc,
AV_LOG_VERBOSE,
"GetPhysicalDeviceImageFormatProperties2: mod[%d]=0x%llx -> %s\n",
2916 i, (
unsigned long long)phy_dev_mod_info.drmFormatModifier,
2917 ret == VK_SUCCESS ?
"OK" :
"FAIL");
2918 if (
ret == VK_SUCCESS) {
2920 *comp_handle_types |= eprops.externalMemoryProperties.compatibleHandleTypes;
2921 if (
exp == VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT) {
2924 VK_EXTERNAL_MEMORY_FEATURE_DEDICATED_ONLY_BIT);
2939 VkExternalMemoryHandleTypeFlags e = 0x0;
2942 VkExternalMemoryImageCreateInfo eiinfo = {
2943 .sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
2950 ? VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT
2951 : VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT);
2954 (hwctx->
tiling != VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT))
2956 VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT);
2959 hwctx->
tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT)
2961 VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT);
2965 eminfo[
i].sType = VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO;
2967 eminfo[
i].handleTypes = e;
2974 av_log(hwfc,
AV_LOG_ERROR,
"vulkan_pool_alloc failed: create_frame failed: %d\n", err);
2982 if ( (hwctx->
usage & VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR) &&
2983 !(hwctx->
usage & VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR))
2985 else if (hwctx->
usage & VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR)
2987 else if (hwctx->
usage & VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR)
2989 else if (hwctx->
usage & VK_IMAGE_USAGE_TRANSFER_DST_BIT)
3045 VkImageUsageFlags supported_usage;
3048 int disable_multiplane =
p->disable_multiplane ||
3050 int is_lone_dpb = ((hwctx->
usage & VK_IMAGE_USAGE_VIDEO_ENCODE_DPB_BIT_KHR) ||
3051 ((hwctx->
usage & VK_IMAGE_USAGE_VIDEO_DECODE_DPB_BIT_KHR) &&
3052 !(hwctx->
usage & VK_IMAGE_USAGE_VIDEO_DECODE_DST_BIT_KHR)));
3059 if (
p->use_linear_images &&
3060 (hwctx->
tiling != VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT))
3061 hwctx->
tiling = VK_IMAGE_TILING_LINEAR;
3071 if (hwctx->
format[0] != VK_FORMAT_UNDEFINED) {
3076 "for the current sw_format %s!\n",
3088 (hwctx->
usage & VK_IMAGE_USAGE_STORAGE_BIT));
3097 NULL, &supported_usage,
3100 (hwctx->
usage & VK_IMAGE_USAGE_STORAGE_BIT));
3109 int drm_mod_with_video = (hwctx->
tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT &&
3111 VK_STRUCTURE_TYPE_VIDEO_PROFILE_LIST_INFO_KHR));
3113 if (!is_lone_dpb && !drm_mod_with_video) {
3115 hwctx->
usage |= supported_usage & (VK_IMAGE_USAGE_TRANSFER_DST_BIT |
3116 VK_IMAGE_USAGE_TRANSFER_SRC_BIT |
3117 VK_IMAGE_USAGE_STORAGE_BIT |
3118 VK_IMAGE_USAGE_SAMPLED_BIT);
3121 !(
p->dprops.driverID == VK_DRIVER_ID_NVIDIA_PROPRIETARY) &&
3122 !(
p->dprops.driverID == VK_DRIVER_ID_MOLTENVK))
3123 hwctx->
usage |= supported_usage & VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT;
3126 if ((supported_usage & VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR) &&
3129 hwctx->
usage |= VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR;
3135 int sampleable = hwctx->
usage & (VK_IMAGE_USAGE_SAMPLED_BIT |
3136 VK_IMAGE_USAGE_STORAGE_BIT);
3137 hwctx->
img_flags = VK_IMAGE_CREATE_MUTABLE_FORMAT_BIT;
3139 hwctx->
img_flags |= VK_IMAGE_CREATE_ALIAS_BIT;
3141 hwctx->
img_flags |= VK_IMAGE_CREATE_EXTENDED_USAGE_BIT;
3150 if ((hwctx->
usage & VK_IMAGE_USAGE_VIDEO_ENCODE_SRC_BIT_KHR) &&
3153 const VkVideoProfileListInfoKHR *pl;
3156 hwctx->
img_flags |= VK_IMAGE_CREATE_VIDEO_PROFILE_INDEPENDENT_BIT_KHR;
3159 for (
i = 0;
i < pl->profileCount;
i++) {
3161 if (pl->pProfiles[
i].videoCodecOperation & 0xFFFF0000)
3164 if (
i == pl->profileCount)
3165 hwctx->
img_flags |= VK_IMAGE_CREATE_VIDEO_PROFILE_INDEPENDENT_BIT_KHR;
3176 p->compute_qf->num, 0, 0, 0,
NULL);
3181 p->transfer_qf->num*2, 0, 0, 0,
NULL);
3186 p->transfer_qf->num, 0, 0, 0,
NULL);
3198 VkImageDrmFormatModifierPropertiesEXT drm_mod = {
3199 .sType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
3201 err = vk->GetImageDrmFormatModifierPropertiesEXT(dev_hwctx->
act_dev,
f->img[0],
3203 if (err != VK_SUCCESS) {
3209 VkDrmFormatModifierPropertiesListEXT modp;
3210 VkFormatProperties2 fmtp;
3211 VkDrmFormatModifierPropertiesEXT *mod_props =
NULL;
3213 modp = (VkDrmFormatModifierPropertiesListEXT) {
3214 .sType = VK_STRUCTURE_TYPE_DRM_FORMAT_MODIFIER_PROPERTIES_LIST_EXT,
3216 fmtp = (VkFormatProperties2) {
3217 .sType = VK_STRUCTURE_TYPE_FORMAT_PROPERTIES_2,
3222 vk->GetPhysicalDeviceFormatProperties2(dev_hwctx->
phys_dev, fmt->
fallback[
i], &fmtp);
3224 modp.pDrmFormatModifierProperties =
3225 av_calloc(modp.drmFormatModifierCount,
sizeof(*modp.pDrmFormatModifierProperties));
3226 if (!modp.pDrmFormatModifierProperties) {
3230 vk->GetPhysicalDeviceFormatProperties2(dev_hwctx->
phys_dev, fmt->
fallback[
i], &fmtp);
3232 for (uint32_t
i = 0;
i < modp.drmFormatModifierCount; ++
i) {
3233 VkDrmFormatModifierPropertiesEXT *m = &modp.pDrmFormatModifierProperties[
i];
3234 if (m->drmFormatModifier == drm_mod.drmFormatModifier) {
3240 if (mod_props ==
NULL) {
3241 av_log(hwfc,
AV_LOG_ERROR,
"No DRM format modifier properties found for modifier 0x%016"PRIx64
"\n",
3242 drm_mod.drmFormatModifier);
3243 av_free(modp.pDrmFormatModifierProperties);
3249 av_free(modp.pDrmFormatModifierProperties);
3313 static const struct {
3314 uint32_t drm_fourcc;
3316 } vulkan_drm_format_map[] = {
3317 { DRM_FORMAT_R8, VK_FORMAT_R8_UNORM },
3318 { DRM_FORMAT_R16, VK_FORMAT_R16_UNORM },
3319 { DRM_FORMAT_GR88, VK_FORMAT_R8G8_UNORM },
3320 { DRM_FORMAT_RG88, VK_FORMAT_R8G8_UNORM },
3321 { DRM_FORMAT_GR1616, VK_FORMAT_R16G16_UNORM },
3322 { DRM_FORMAT_RG1616, VK_FORMAT_R16G16_UNORM },
3323 { DRM_FORMAT_ARGB8888, VK_FORMAT_B8G8R8A8_UNORM },
3324 { DRM_FORMAT_XRGB8888, VK_FORMAT_B8G8R8A8_UNORM },
3325 { DRM_FORMAT_ABGR8888, VK_FORMAT_R8G8B8A8_UNORM },
3326 { DRM_FORMAT_XBGR8888, VK_FORMAT_R8G8B8A8_UNORM },
3327 { DRM_FORMAT_ARGB2101010, VK_FORMAT_A2B10G10R10_UNORM_PACK32 },
3328 { DRM_FORMAT_ABGR2101010, VK_FORMAT_A2R10G10B10_UNORM_PACK32 },
3329 { DRM_FORMAT_XRGB2101010, VK_FORMAT_A2B10G10R10_UNORM_PACK32 },
3330 { DRM_FORMAT_XBGR2101010, VK_FORMAT_A2R10G10B10_UNORM_PACK32 },
3333 #ifdef DRM_FORMAT_XYUV8888
3334 { DRM_FORMAT_XYUV8888, VK_FORMAT_R8G8B8A8_UNORM },
3335 { DRM_FORMAT_XVYU2101010, VK_FORMAT_A2R10G10B10_UNORM_PACK32 } ,
3336 { DRM_FORMAT_XVYU12_16161616, VK_FORMAT_R12X4G12X4B12X4A12X4_UNORM_4PACK16 } ,
3337 { DRM_FORMAT_XVYU16161616, VK_FORMAT_R16G16B16A16_UNORM } ,
3341 static inline VkFormat drm_to_vulkan_fmt(uint32_t drm_fourcc)
3344 if (vulkan_drm_format_map[
i].drm_fourcc == drm_fourcc)
3345 return vulkan_drm_format_map[
i].vk_format;
3346 return VK_FORMAT_UNDEFINED;
3355 int bind_counts = 0;
3365 if (drm_to_vulkan_fmt(
desc->layers[
i].format) == VK_FORMAT_UNDEFINED) {
3367 desc->layers[
i].format);
3378 f->tiling = VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT;
3380 for (
int i = 0;
i <
desc->nb_layers;
i++) {
3384 VkSemaphoreTypeCreateInfo sem_type_info = {
3385 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO,
3386 .semaphoreType = VK_SEMAPHORE_TYPE_TIMELINE,
3389 VkSemaphoreCreateInfo sem_spawn = {
3390 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
3391 .pNext = &sem_type_info,
3396 VkImageDrmFormatModifierExplicitCreateInfoEXT ext_img_mod_spec = {
3397 .sType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_EXPLICIT_CREATE_INFO_EXT,
3398 .drmFormatModifier =
desc->objects[0].format_modifier,
3399 .drmFormatModifierPlaneCount =
planes,
3400 .pPlaneLayouts = (
const VkSubresourceLayout *)&ext_img_layouts,
3402 VkExternalMemoryImageCreateInfo ext_img_spec = {
3403 .sType = VK_STRUCTURE_TYPE_EXTERNAL_MEMORY_IMAGE_CREATE_INFO,
3404 .pNext = &ext_img_mod_spec,
3405 .handleTypes = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
3407 VkImageCreateInfo create_info = {
3408 .sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
3409 .pNext = &ext_img_spec,
3410 .imageType = VK_IMAGE_TYPE_2D,
3411 .format = drm_to_vulkan_fmt(
desc->layers[
i].format),
3416 .tiling = VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT,
3417 .initialLayout = VK_IMAGE_LAYOUT_UNDEFINED,
3419 .samples = VK_SAMPLE_COUNT_1_BIT,
3420 .pQueueFamilyIndices =
p->img_qfs,
3421 .queueFamilyIndexCount =
p->nb_img_qfs,
3422 .sharingMode =
p->nb_img_qfs > 1 ? VK_SHARING_MODE_CONCURRENT :
3423 VK_SHARING_MODE_EXCLUSIVE,
3427 VkExternalImageFormatProperties ext_props = {
3428 .sType = VK_STRUCTURE_TYPE_EXTERNAL_IMAGE_FORMAT_PROPERTIES_KHR,
3430 VkImageFormatProperties2 props_ret = {
3431 .sType = VK_STRUCTURE_TYPE_IMAGE_FORMAT_PROPERTIES_2,
3432 .pNext = &ext_props,
3434 VkPhysicalDeviceImageDrmFormatModifierInfoEXT props_drm_mod = {
3435 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_DRM_FORMAT_MODIFIER_INFO_EXT,
3436 .drmFormatModifier = ext_img_mod_spec.drmFormatModifier,
3437 .pQueueFamilyIndices = create_info.pQueueFamilyIndices,
3438 .queueFamilyIndexCount = create_info.queueFamilyIndexCount,
3439 .sharingMode = create_info.sharingMode,
3441 VkPhysicalDeviceExternalImageFormatInfo props_ext = {
3442 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_EXTERNAL_IMAGE_FORMAT_INFO,
3443 .pNext = &props_drm_mod,
3444 .handleType = ext_img_spec.handleTypes,
3446 VkPhysicalDeviceImageFormatInfo2 fmt_props;
3449 create_info.usage |= VK_IMAGE_USAGE_SAMPLED_BIT |
3450 VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
3452 create_info.usage |= VK_IMAGE_USAGE_STORAGE_BIT |
3453 VK_IMAGE_USAGE_TRANSFER_DST_BIT;
3455 fmt_props = (VkPhysicalDeviceImageFormatInfo2) {
3456 .sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_IMAGE_FORMAT_INFO_2,
3457 .pNext = &props_ext,
3458 .format = create_info.format,
3459 .type = create_info.imageType,
3460 .tiling = create_info.tiling,
3461 .usage = create_info.usage,
3462 .flags = create_info.flags,
3466 ret = vk->GetPhysicalDeviceImageFormatProperties2(hwctx->
phys_dev,
3467 &fmt_props, &props_ret);
3468 if (
ret != VK_SUCCESS) {
3476 get_plane_wh(&create_info.extent.width, &create_info.extent.height,
3480 for (
int j = 0; j <
planes; j++) {
3481 ext_img_layouts[j].offset =
desc->layers[
i].planes[j].offset;
3482 ext_img_layouts[j].rowPitch =
desc->layers[
i].planes[j].pitch;
3483 ext_img_layouts[j].size = 0;
3484 ext_img_layouts[j].arrayPitch = 0;
3485 ext_img_layouts[j].depthPitch = 0;
3489 ret = vk->CreateImage(hwctx->
act_dev, &create_info,
3491 if (
ret != VK_SUCCESS) {
3498 ret = vk->CreateSemaphore(hwctx->
act_dev, &sem_spawn,
3500 if (
ret != VK_SUCCESS) {
3507 f->queue_family[
i] = VK_QUEUE_FAMILY_EXTERNAL;
3508 f->layout[
i] = create_info.initialLayout;
3510 f->sem_value[
i] = 0;
3513 for (
int i = 0;
i <
desc->nb_layers;
i++) {
3515 VkImageMemoryRequirementsInfo2 req_desc = {
3516 .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2,
3519 VkMemoryDedicatedRequirements ded_req = {
3520 .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS,
3522 VkMemoryRequirements2 req2 = {
3523 .sType = VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2,
3528 VkMemoryFdPropertiesKHR fdmp = {
3529 .sType = VK_STRUCTURE_TYPE_MEMORY_FD_PROPERTIES_KHR,
3535 VkImportMemoryFdInfoKHR idesc = {
3536 .sType = VK_STRUCTURE_TYPE_IMPORT_MEMORY_FD_INFO_KHR,
3537 .fd = dup(
desc->objects[
desc->layers[
i].planes[0].object_index].fd),
3538 .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
3540 VkMemoryDedicatedAllocateInfo ded_alloc = {
3541 .sType = VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO,
3543 .image = req_desc.image,
3547 ret = vk->GetMemoryFdPropertiesKHR(hwctx->
act_dev,
3548 VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
3550 if (
ret != VK_SUCCESS) {
3558 vk->GetImageMemoryRequirements2(hwctx->
act_dev, &req_desc, &req2);
3561 req2.memoryRequirements.memoryTypeBits = fdmp.memoryTypeBits;
3564 VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT,
3565 (ded_req.prefersDedicatedAllocation ||
3566 ded_req.requiresDedicatedAllocation) ?
3567 &ded_alloc : ded_alloc.pNext,
3568 &
f->flags, &
f->mem[
i]);
3574 f->size[
i] = req2.memoryRequirements.size;
3577 for (
int i = 0;
i <
desc->nb_layers;
i++) {
3579 for (
int j = 0; j <
planes; j++) {
3580 VkImageAspectFlagBits aspect = j == 0 ? VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT :
3581 j == 1 ? VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT :
3582 VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT;
3584 plane_info[bind_counts].sType = VK_STRUCTURE_TYPE_BIND_IMAGE_PLANE_MEMORY_INFO;
3586 plane_info[bind_counts].planeAspect = aspect;
3588 bind_info[bind_counts].sType = VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO;
3590 bind_info[bind_counts].image =
f->img[
i];
3591 bind_info[bind_counts].memory =
f->mem[
i];
3594 bind_info[bind_counts].memoryOffset = 0;
3601 ret = vk->BindImageMemory2(hwctx->
act_dev, bind_counts, bind_info);
3602 if (
ret != VK_SUCCESS) {
3630 #ifdef DMA_BUF_IOCTL_EXPORT_SYNC_FILE
3632 VkCommandBuffer cmd_buf;
3638 for (
int i = 0;
i <
desc->nb_objects;
i++) {
3639 VkSemaphoreTypeCreateInfo sem_type_info = {
3640 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO,
3641 .semaphoreType = VK_SEMAPHORE_TYPE_BINARY,
3643 VkSemaphoreCreateInfo sem_spawn = {
3644 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
3645 .pNext = &sem_type_info,
3647 VkImportSemaphoreFdInfoKHR import_info;
3648 struct dma_buf_export_sync_file implicit_fd_info = {
3649 .flags = DMA_BUF_SYNC_READ,
3653 if (ioctl(
desc->objects[
i].fd, DMA_BUF_IOCTL_EXPORT_SYNC_FILE,
3654 &implicit_fd_info)) {
3659 vk->DestroySemaphore(hwctx->
act_dev, drm_sync_sem[
i], hwctx->
alloc);
3663 ret = vk->CreateSemaphore(hwctx->
act_dev, &sem_spawn,
3664 hwctx->
alloc, &drm_sync_sem[
i]);
3665 if (
ret != VK_SUCCESS) {
3670 vk->DestroySemaphore(hwctx->
act_dev, drm_sync_sem[
i], hwctx->
alloc);
3674 import_info = (VkImportSemaphoreFdInfoKHR) {
3675 .sType = VK_STRUCTURE_TYPE_IMPORT_SEMAPHORE_FD_INFO_KHR,
3676 .handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT,
3677 .flags = VK_SEMAPHORE_IMPORT_TEMPORARY_BIT,
3678 .semaphore = drm_sync_sem[
i],
3679 .fd = implicit_fd_info.fd,
3682 ret = vk->ImportSemaphoreFdKHR(hwctx->
act_dev, &import_info);
3683 if (
ret != VK_SUCCESS) {
3688 vk->DestroySemaphore(hwctx->
act_dev, drm_sync_sem[
i], hwctx->
alloc);
3694 cmd_buf = exec->
buf;
3700 drm_sync_sem,
desc->nb_objects,
3701 VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT, 1);
3706 VK_PIPELINE_STAGE_2_NONE,
3707 VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT);
3712 VK_PIPELINE_STAGE_2_NONE,
3713 VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT,
3715 VK_ACCESS_2_SHADER_SAMPLED_READ_BIT : 0x0) |
3717 VK_ACCESS_2_SHADER_STORAGE_WRITE_BIT : 0x0),
3718 VK_IMAGE_LAYOUT_GENERAL,
3719 p->nb_img_qfs > 1 ? VK_QUEUE_FAMILY_IGNORED :
p->img_qfs[0]);
3721 vk->CmdPipelineBarrier2(cmd_buf, &(VkDependencyInfo) {
3722 .sType = VK_STRUCTURE_TYPE_DEPENDENCY_INFO,
3723 .pImageMemoryBarriers = img_bar,
3724 .imageMemoryBarrierCount = nb_img_bar,
3735 "image may be corrupted.\n");
3751 if ((err = vulkan_map_from_drm_frame_desc(hwfc, &
f,
src,
flags)))
3755 dst->data[0] = (uint8_t *)
f;
3757 dst->height =
src->height;
3760 &vulkan_unmap_from_drm,
f);
3764 err = vulkan_map_from_drm_frame_sync(hwfc,
dst,
desc,
flags);
3787 VASurfaceID surface_id = (VASurfaceID)(uintptr_t)
src->data[3];
3793 vaSyncSurface(vaapi_ctx->display, surface_id);
3801 err = vulkan_map_from_drm(dst_fc,
dst,
tmp,
flags);
3818 VkDeviceMemory mem,
size_t size)
3826 CUDA_EXTERNAL_MEMORY_HANDLE_DESC ext_desc = {
3827 .type = IsWindows8OrGreater()
3828 ? CU_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32
3829 : CU_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT,
3832 VkMemoryGetWin32HandleInfoKHR export_info = {
3833 .sType = VK_STRUCTURE_TYPE_MEMORY_GET_WIN32_HANDLE_INFO_KHR,
3835 .handleType = IsWindows8OrGreater()
3836 ? VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_BIT
3837 : VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
3840 ret = vk->GetMemoryWin32HandleKHR(hwctx->
act_dev, &export_info,
3841 &ext_desc.handle.win32.handle);
3842 if (
ret != VK_SUCCESS) {
3847 dst_int->ext_mem_handle[idx] = ext_desc.handle.win32.handle;
3849 CUDA_EXTERNAL_MEMORY_HANDLE_DESC ext_desc = {
3850 .type = CU_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD,
3853 VkMemoryGetFdInfoKHR export_info = {
3854 .sType = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR,
3856 .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_OPAQUE_FD_BIT_KHR,
3859 ret = vk->GetMemoryFdKHR(hwctx->
act_dev, &export_info,
3860 &ext_desc.handle.fd);
3861 if (
ret != VK_SUCCESS) {
3868 ret =
CHECK_CU(cu->cuImportExternalMemory(&dst_int->ext_mem[idx], &ext_desc));
3871 close(ext_desc.handle.fd);
3890 VkSemaphoreGetWin32HandleInfoKHR sem_export = {
3891 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_GET_WIN32_HANDLE_INFO_KHR,
3893 .handleType = IsWindows8OrGreater()
3894 ? VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_BIT
3895 : VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_WIN32_KMT_BIT,
3897 CUDA_EXTERNAL_SEMAPHORE_HANDLE_DESC ext_sem_desc = {
3901 VkSemaphoreGetFdInfoKHR sem_export = {
3902 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR,
3904 .handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_OPAQUE_FD_BIT,
3906 CUDA_EXTERNAL_SEMAPHORE_HANDLE_DESC ext_sem_desc = {
3912 ret = vk->GetSemaphoreWin32HandleKHR(hwctx->
act_dev, &sem_export,
3913 &ext_sem_desc.handle.win32.handle);
3915 ret = vk->GetSemaphoreFdKHR(hwctx->
act_dev, &sem_export,
3916 &ext_sem_desc.handle.fd);
3918 if (
ret != VK_SUCCESS) {
3924 dst_int->ext_sem_handle[idx] = ext_sem_desc.handle.win32.handle;
3927 ret =
CHECK_CU(cu->cuImportExternalSemaphore(&dst_int->cu_sem[idx],
3931 close(ext_sem_desc.handle.fd);
3959 CudaFunctions *cu = cu_internal->
cuda_dl;
3960 CUarray_format cufmt =
desc->comp[0].depth > 8 ? CU_AD_FORMAT_UNSIGNED_INT16 :
3961 CU_AD_FORMAT_UNSIGNED_INT8;
3966 if (!dst_int->cuda_fc_ref) {
3970 if (!dst_int->cuda_fc_ref)
3974 for (
int i = 0;
i < nb_images;
i++) {
3975 err = export_mem_to_cuda(
ctx, cuda_cu, cu, dst_int,
i,
3980 err = export_sem_to_cuda(
ctx, cuda_cu, cu, dst_int,
i,
3986 if (nb_images !=
planes) {
3988 VkImageSubresource subres = {
3989 .aspectMask =
i == 2 ? VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT :
3990 i == 1 ? VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT :
3991 VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT
3993 VkSubresourceLayout
layout = { 0 };
3994 vk->GetImageSubresourceLayout(hwctx->
act_dev, dst_f->
img[
FFMIN(
i, nb_images - 1)],
4001 CUDA_EXTERNAL_MEMORY_MIPMAPPED_ARRAY_DESC tex_desc = {
4006 .NumChannels = 1 + ((
planes == 2) &&
i),
4014 tex_desc.arrayDesc.Width = p_w;
4015 tex_desc.arrayDesc.Height = p_h;
4017 ret =
CHECK_CU(cu->cuExternalMemoryGetMappedMipmappedArray(&dst_int->cu_mma[
i],
4018 dst_int->ext_mem[
FFMIN(
i, nb_images - 1)],
4025 ret =
CHECK_CU(cu->cuMipmappedArrayGetLevel(&dst_int->cu_array[
i],
4026 dst_int->cu_mma[
i], 0));
4058 CudaFunctions *cu = cu_internal->
cuda_dl;
4068 err =
CHECK_CU(cu->cuCtxPushCurrent(cuda_dev->cuda_ctx));
4072 err = vulkan_export_to_cuda(hwfc,
src->hw_frames_ctx,
dst);
4081 s_w_par[
i].params.fence.value = dst_f->
sem_value[
i] + 0;
4082 s_s_par[
i].params.fence.value = dst_f->
sem_value[
i] + 1;
4085 err =
CHECK_CU(cu->cuWaitExternalSemaphoresAsync(dst_int->cu_sem, s_w_par,
4086 planes, cuda_dev->stream));
4091 CUDA_MEMCPY2D cpy = {
4092 .srcMemoryType = CU_MEMORYTYPE_DEVICE,
4093 .srcDevice = (CUdeviceptr)
src->data[
i],
4094 .srcPitch =
src->linesize[
i],
4097 .dstMemoryType = CU_MEMORYTYPE_ARRAY,
4098 .dstArray = dst_int->cu_array[
i],
4104 cpy.WidthInBytes = p_w *
desc->comp[
i].step;
4107 err =
CHECK_CU(cu->cuMemcpy2DAsync(&cpy, cuda_dev->stream));
4112 err =
CHECK_CU(cu->cuSignalExternalSemaphoresAsync(dst_int->cu_sem, s_s_par,
4113 planes, cuda_dev->stream));
4139 switch (
src->format) {
4144 return vulkan_map_from_vaapi(hwfc,
dst,
src,
flags);
4150 return vulkan_map_from_drm(hwfc,
dst,
src,
flags);
4160 typedef struct VulkanDRMMapping {
4179 static inline uint32_t vulkan_fmt_to_drm(
VkFormat vkfmt)
4182 if (vulkan_drm_format_map[
i].vk_format == vkfmt)
4183 return vulkan_drm_format_map[
i].drm_fourcc;
4184 return DRM_FORMAT_INVALID;
4187 #define MAX_MEMORY_PLANES 4
4188 static VkImageAspectFlags plane_index_to_aspect(
int plane) {
4189 if (plane == 0)
return VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT;
4190 if (plane == 1)
return VK_IMAGE_ASPECT_MEMORY_PLANE_1_BIT_EXT;
4191 if (plane == 2)
return VK_IMAGE_ASPECT_MEMORY_PLANE_2_BIT_EXT;
4192 if (plane == 3)
return VK_IMAGE_ASPECT_MEMORY_PLANE_3_BIT_EXT;
4195 return VK_IMAGE_ASPECT_MEMORY_PLANE_0_BIT_EXT;
4198 #ifdef DMA_BUF_IOCTL_EXPORT_SYNC_FILE
4208 if (
f->internal->drm_sync_sem == VK_NULL_HANDLE) {
4209 VkExportSemaphoreCreateInfo exp_info = {
4210 .sType = VK_STRUCTURE_TYPE_EXPORT_SEMAPHORE_CREATE_INFO,
4211 .handleTypes = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT,
4213 VkSemaphoreTypeCreateInfo type_info = {
4214 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_TYPE_CREATE_INFO,
4216 .semaphoreType = VK_SEMAPHORE_TYPE_BINARY,
4218 VkSemaphoreCreateInfo sem_create = {
4219 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
4220 .pNext = &type_info,
4223 &
f->internal->drm_sync_sem);
4224 if (
ret != VK_SUCCESS) {
4236 for (
int i = 0;
i < nb_sems;
i++)
4239 VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT);
4241 VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT, 0);
4243 VkSemaphoreGetFdInfoKHR get_fd_info = {
4244 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_GET_FD_INFO_KHR,
4245 .semaphore =
f->internal->drm_sync_sem,
4246 .handleType = VK_EXTERNAL_SEMAPHORE_HANDLE_TYPE_SYNC_FD_BIT,
4248 ret = vk->GetSemaphoreFdKHR(hwctx->
act_dev, &get_fd_info, &sync_fd);
4249 if (
ret != VK_SUCCESS) {
4251 "Failed to get sync fd from DRM map export semaphore: %s\n",
4276 VkImageDrmFormatModifierPropertiesEXT drm_mod = {
4277 .sType = VK_STRUCTURE_TYPE_IMAGE_DRM_FORMAT_MODIFIER_PROPERTIES_EXT,
4279 const int nb_sems = nb_images;
4280 int free_drm_desc_on_err = 1;
4291 #ifdef DMA_BUF_IOCTL_EXPORT_SYNC_FILE
4293 f->tiling == VK_IMAGE_TILING_DRM_FORMAT_MODIFIER_EXT &&
4294 vk->GetSemaphoreFdKHR && vk->CreateSemaphore) {
4295 err = vulkan_drm_export_sync_fd(hwfc,
f, fp, nb_sems);
4304 VkSemaphoreWaitInfo wait_info = {
4305 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO,
4307 .semaphoreCount = nb_sems,
4308 .pSemaphores =
f->sem,
4309 .pValues =
f->sem_value,
4311 vk->WaitSemaphores(hwctx->
act_dev, &wait_info, UINT64_MAX);
4319 free_drm_desc_on_err = 0;
4321 ret = vk->GetImageDrmFormatModifierPropertiesEXT(hwctx->
act_dev,
f->img[0],
4323 if (
ret != VK_SUCCESS) {
4329 for (
int i = 0; (
i <
planes) && (
f->mem[
i]);
i++) {
4330 VkMemoryGetFdInfoKHR export_info = {
4331 .sType = VK_STRUCTURE_TYPE_MEMORY_GET_FD_INFO_KHR,
4332 .memory =
f->mem[
i],
4333 .handleType = VK_EXTERNAL_MEMORY_HANDLE_TYPE_DMA_BUF_BIT_EXT,
4336 ret = vk->GetMemoryFdKHR(hwctx->
act_dev, &export_info,
4338 if (
ret != VK_SUCCESS) {
4344 #if HAVE_LINUX_DMA_BUF_H && defined(DMA_BUF_IOCTL_IMPORT_SYNC_FILE)
4346 int dup_fd = dup(sync_fd);
4348 struct dma_buf_import_sync_file import_info = {
4349 .flags = DMA_BUF_SYNC_WRITE,
4352 if (ioctl(drm_desc->
objects[
i].
fd, DMA_BUF_IOCTL_IMPORT_SYNC_FILE, &import_info) < 0)
4371 drm_desc->
layers[
i].
format = vulkan_fmt_to_drm(plane_vkfmt);
4381 VkSubresourceLayout
layout;
4382 int aspect_plane = (nb_images == 1) ?
i : j;
4383 VkImageSubresource sub = {
4384 .aspectMask = plane_index_to_aspect(aspect_plane),
4401 if (
f->tiling == VK_IMAGE_TILING_OPTIMAL)
4407 dst->height =
src->height;
4408 dst->data[0] = (uint8_t *)drm_desc;
4421 if (free_drm_desc_on_err)
4461 switch (
dst->format) {
4471 return vulkan_map_to_vaapi(hwfc,
dst,
src,
flags);
4483 AVFrame *swf, VkBufferImageCopy *region,
4493 region[
i].bufferRowLength,
4497 region[
i].imageExtent.height);
4500 if (err != VK_SUCCESS) {
4507 if (err != VK_SUCCESS) {
4517 region[
i].bufferRowLength,
4519 region[
i].imageExtent.height);
4526 AVFrame *swf, VkBufferImageCopy *region,
int upload)
4533 VkBufferUsageFlags buf_usage = upload ? VK_BUFFER_USAGE_TRANSFER_SRC_BIT :
4534 VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4536 size_t buf_offset = 0;
4540 region[
i] = (VkBufferImageCopy) {
4541 .bufferOffset = buf_offset,
4543 p->props.properties.limits.optimalBufferCopyRowPitchAlignment),
4544 .bufferImageHeight = p_h,
4545 .imageSubresource.layerCount = 1,
4546 .imageExtent = (VkExtent3D){ p_w, p_h, 1 },
4550 buf_offset +=
FFALIGN(p_h*region[
i].bufferRowLength,
4551 p->props.properties.limits.optimalBufferCopyOffsetAlignment);
4556 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
4557 p->vkctx.host_cached_flag);
4565 AVFrame *swf, VkBufferImageCopy *region,
int upload)
4572 VkBufferUsageFlags buf_usage = upload ? VK_BUFFER_USAGE_TRANSFER_SRC_BIT :
4573 VK_BUFFER_USAGE_TRANSFER_DST_BIT;
4582 while (swf->
buf[nb_src_bufs])
4586 if (nb_src_bufs == 1) {
4597 }
else if (nb_src_bufs ==
planes) {
4616 for (
int i = 0;
i < (*nb_bufs);
i++)
4637 int nb_layout_ch = 0;
4641 for (
int i = 0;
i < nb_images;
i++) {
4643 for (
int j = 0; j <
p->vkctx.host_image_props.copySrcLayoutCount; j++) {
4644 if (hwf_vk->
layout[
i] ==
p->vkctx.host_image_props.pCopySrcLayouts[j]) {
4652 layout_ch_info[nb_layout_ch] = (VkHostImageLayoutTransitionInfoEXT) {
4653 .sType = VK_STRUCTURE_TYPE_HOST_IMAGE_LAYOUT_TRANSITION_INFO_EXT,
4654 .image = hwf_vk->
img[
i],
4655 .oldLayout = hwf_vk->
layout[
i],
4656 .newLayout = VK_IMAGE_LAYOUT_GENERAL,
4657 .subresourceRange = {
4658 .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
4664 hwf_vk->
layout[
i] = layout_ch_info[nb_layout_ch].newLayout;
4669 .sType = VK_STRUCTURE_TYPE_SEMAPHORE_WAIT_INFO,
4670 .pSemaphores = hwf_vk->
sem,
4672 .semaphoreCount = nb_images,
4678 vk->TransitionImageLayoutEXT(hwctx->
act_dev,
4679 nb_layout_ch, layout_ch_info);
4682 VkMemoryToImageCopyEXT region_info = {
4683 .sType = VK_STRUCTURE_TYPE_MEMORY_TO_IMAGE_COPY_EXT,
4684 .imageSubresource = {
4688 VkCopyMemoryToImageInfoEXT copy_info = {
4689 .sType = VK_STRUCTURE_TYPE_COPY_MEMORY_TO_IMAGE_INFO_EXT,
4691 .pRegions = ®ion_info,
4694 int img_idx =
FFMIN(
i, (nb_images - 1));
4698 region_info.pHostPointer = swf->
data[
i];
4699 region_info.memoryRowLength = swf->
linesize[
i] /
desc->comp[
i].step;
4701 region_info.imageExtent = (VkExtent3D){ p_w, p_h, 1 };
4702 copy_info.dstImage = hwf_vk->
img[img_idx];
4703 copy_info.dstImageLayout = hwf_vk->
layout[img_idx];
4705 vk->CopyMemoryToImageEXT(hwctx->
act_dev, ©_info);
4708 VkImageToMemoryCopyEXT region_info = {
4709 .sType = VK_STRUCTURE_TYPE_IMAGE_TO_MEMORY_COPY_EXT,
4710 .imageSubresource = {
4714 VkCopyImageToMemoryInfoEXT copy_info = {
4715 .sType = VK_STRUCTURE_TYPE_COPY_IMAGE_TO_MEMORY_INFO_EXT,
4717 .pRegions = ®ion_info,
4720 int img_idx =
FFMIN(
i, (nb_images - 1));
4724 region_info.pHostPointer = swf->
data[
i];
4725 region_info.memoryRowLength = swf->
linesize[
i] /
desc->comp[
i].step;
4727 region_info.imageExtent = (VkExtent3D){ p_w, p_h, 1 };
4728 copy_info.srcImage = hwf_vk->
img[img_idx];
4729 copy_info.srcImageLayout = hwf_vk->
layout[img_idx];
4731 vk->CopyImageToMemoryEXT(hwctx->
act_dev, ©_info);
4750 int host_mapped = 0;
4765 VkCommandBuffer cmd_buf;
4777 if (hwctx->
usage & VK_IMAGE_USAGE_HOST_TRANSFER_BIT_EXT &&
4778 !(
p->dprops.driverID == VK_DRIVER_ID_NVIDIA_PROPRIETARY))
4786 region[
i] = (VkBufferImageCopy) {
4789 .bufferImageHeight = p_h,
4790 .imageSubresource.layerCount = 1,
4791 .imageExtent = (VkExtent3D){ p_w, p_h, 1 },
4817 cmd_buf = exec->
buf;
4823 VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT,
4824 VK_PIPELINE_STAGE_2_TRANSFER_BIT);
4848 VK_PIPELINE_STAGE_2_ALL_COMMANDS_BIT,
4849 VK_PIPELINE_STAGE_2_TRANSFER_BIT_KHR,
4850 upload ? VK_ACCESS_TRANSFER_WRITE_BIT :
4851 VK_ACCESS_TRANSFER_READ_BIT,
4852 upload ? VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL :
4853 VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
4854 p->nb_img_qfs > 1 ? VK_QUEUE_FAMILY_IGNORED :
p->img_qfs[0]);
4856 vk->CmdPipelineBarrier2(cmd_buf, &(VkDependencyInfo) {
4857 .sType = VK_STRUCTURE_TYPE_DEPENDENCY_INFO,
4858 .pImageMemoryBarriers = img_bar,
4859 .imageMemoryBarrierCount = nb_img_bar,
4863 int buf_idx =
FFMIN(
i, (nb_bufs - 1));
4864 int img_idx =
FFMIN(
i, (nb_images - 1));
4867 uint32_t orig_stride = region[
i].bufferRowLength;
4868 region[
i].bufferRowLength /=
desc->comp[
i].step;
4872 vk->CmdCopyBufferToImage(cmd_buf, vkbuf->
buf,
4873 hwf_vk->
img[img_idx],
4874 img_bar[img_idx].newLayout,
4877 vk->CmdCopyImageToBuffer(cmd_buf, hwf_vk->
img[img_idx],
4878 img_bar[img_idx].newLayout,
4882 region[
i].bufferRowLength = orig_stride;
4888 }
else if (!upload) {
4895 for (
int i = 0;
i < nb_bufs;
i++)
4906 switch (
src->format) {
4916 return vulkan_transfer_data_from_cuda(hwfc,
dst,
src);
4919 if (
src->hw_frames_ctx)
4944 CudaFunctions *cu = cu_internal->
cuda_dl;
4955 err =
CHECK_CU(cu->cuCtxPushCurrent(cuda_dev->cuda_ctx));
4959 err = vulkan_export_to_cuda(hwfc,
dst->hw_frames_ctx,
src);
4968 s_w_par[
i].params.fence.value = dst_f->
sem_value[
i] + 0;
4969 s_s_par[
i].params.fence.value = dst_f->
sem_value[
i] + 1;
4972 err =
CHECK_CU(cu->cuWaitExternalSemaphoresAsync(dst_int->cu_sem, s_w_par,
4973 nb_images, cuda_dev->stream));
4978 CUDA_MEMCPY2D cpy = {
4979 .dstMemoryType = CU_MEMORYTYPE_DEVICE,
4980 .dstDevice = (CUdeviceptr)
dst->data[
i],
4981 .dstPitch =
dst->linesize[
i],
4984 .srcMemoryType = CU_MEMORYTYPE_ARRAY,
4985 .srcArray = dst_int->cu_array[
i],
4991 cpy.WidthInBytes =
w *
desc->comp[
i].step;
4994 err =
CHECK_CU(cu->cuMemcpy2DAsync(&cpy, cuda_dev->stream));
4999 err =
CHECK_CU(cu->cuSignalExternalSemaphoresAsync(dst_int->cu_sem, s_s_par,
5000 nb_images, cuda_dev->stream));
5026 switch (
dst->format) {
5036 return vulkan_transfer_data_to_cuda(hwfc,
dst,
src);
5039 if (
dst->hw_frames_ctx)