From fa846b87b0ce62372f1b6ae270677a841a4e541f Mon Sep 17 00:00:00 2001 From: ened Date: Thu, 16 Oct 2025 02:07:59 +0900 Subject: [PATCH] WIP --- .../app/src/main/cpp/vulkan_renderer.cpp | 50 +++++++++++++++++-- .../src/Decoder/MediaCodecSurfaceManager.cpp | 26 ++++++---- 2 files changed, 62 insertions(+), 14 deletions(-) diff --git a/vav2/platforms/android/applications/vav2player/app/src/main/cpp/vulkan_renderer.cpp b/vav2/platforms/android/applications/vav2player/app/src/main/cpp/vulkan_renderer.cpp index 26d0bf7..0f0d17e 100644 --- a/vav2/platforms/android/applications/vav2player/app/src/main/cpp/vulkan_renderer.cpp +++ b/vav2/platforms/android/applications/vav2player/app/src/main/cpp/vulkan_renderer.cpp @@ -2547,11 +2547,48 @@ bool VulkanVideoRenderer::RenderVulkanImage(VkImage sourceImage, VkSamplerYcbcrC LOGI("Created YCbCr ImageView successfully (ImageView=%p)", (void*)ycbcrImageView); - // Update descriptor set to bind single YCbCr image with combined sampler + // CRITICAL FIX: Create YCbCr-aware sampler with matching conversion + // According to Vulkan spec, BOTH ImageView AND Sampler must have the same YCbCr conversion attached + VkSampler ycbcrSampler = VK_NULL_HANDLE; + + VkSamplerYcbcrConversionInfo samplerYcbcrInfo = {}; + samplerYcbcrInfo.sType = VK_STRUCTURE_TYPE_SAMPLER_YCBCR_CONVERSION_INFO; + samplerYcbcrInfo.pNext = nullptr; + samplerYcbcrInfo.conversion = ycbcrConversion; + + VkSamplerCreateInfo samplerInfo = {}; + samplerInfo.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO; + samplerInfo.pNext = &samplerYcbcrInfo; // Chain YCbCr conversion to sampler + samplerInfo.magFilter = VK_FILTER_LINEAR; + samplerInfo.minFilter = VK_FILTER_LINEAR; + samplerInfo.addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE; + samplerInfo.addressModeV = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE; + samplerInfo.addressModeW = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE; + samplerInfo.anisotropyEnable = VK_FALSE; + samplerInfo.maxAnisotropy = 1.0f; + samplerInfo.borderColor = VK_BORDER_COLOR_INT_OPAQUE_BLACK; + samplerInfo.unnormalizedCoordinates = VK_FALSE; + samplerInfo.compareEnable = VK_FALSE; + samplerInfo.compareOp = VK_COMPARE_OP_ALWAYS; + samplerInfo.mipmapMode = VK_SAMPLER_MIPMAP_MODE_LINEAR; + samplerInfo.mipLodBias = 0.0f; + samplerInfo.minLod = 0.0f; + samplerInfo.maxLod = 0.0f; + + result = vkCreateSampler(m_device, &samplerInfo, nullptr, &ycbcrSampler); + if (result != VK_SUCCESS) { + LOGE("Failed to create YCbCr sampler: %d", result); + vkDestroyImageView(m_device, ycbcrImageView, nullptr); + return false; + } + + LOGI("Created YCbCr Sampler successfully (Sampler=%p) with matching conversion", (void*)ycbcrSampler); + + // Update descriptor set to bind single YCbCr image with YCbCr-aware sampler VkDescriptorImageInfo imageInfo = {}; imageInfo.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL; imageInfo.imageView = ycbcrImageView; - imageInfo.sampler = m_textureSampler; + imageInfo.sampler = ycbcrSampler; // Use YCbCr-aware sampler instead of generic m_textureSampler VkWriteDescriptorSet descriptorWrite = {}; descriptorWrite.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET; @@ -2572,6 +2609,7 @@ bool VulkanVideoRenderer::RenderVulkanImage(VkImage sourceImage, VkSamplerYcbcrC // Record and submit command buffer (uses existing pipeline) if (!RecordCommandBuffer(imageIndex)) { LOGE("Failed to record command buffer"); + vkDestroySampler(m_device, ycbcrSampler, nullptr); vkDestroyImageView(m_device, ycbcrImageView, nullptr); return false; } @@ -2579,6 +2617,7 @@ bool VulkanVideoRenderer::RenderVulkanImage(VkImage sourceImage, VkSamplerYcbcrC // End frame and present if (!EndFrame(imageIndex)) { LOGE("Failed to end frame"); + vkDestroySampler(m_device, ycbcrSampler, nullptr); vkDestroyImageView(m_device, ycbcrImageView, nullptr); return false; } @@ -2586,10 +2625,11 @@ bool VulkanVideoRenderer::RenderVulkanImage(VkImage sourceImage, VkSamplerYcbcrC // Update performance metrics UpdatePerformanceMetrics(); - // Clean up YCbCr ImageView after frame is submitted and fence is signaled - // The ImageView is now safe to destroy since EndFrame() waits for submission + // Clean up YCbCr Sampler and ImageView after frame is submitted and fence is signaled + // Both are now safe to destroy since EndFrame() waits for submission + vkDestroySampler(m_device, ycbcrSampler, nullptr); vkDestroyImageView(m_device, ycbcrImageView, nullptr); - LOGI("YCbCr ImageView destroyed after frame submission"); + LOGI("YCbCr Sampler and ImageView destroyed after frame submission"); LOGI("RenderVulkanImage completed successfully"); return true; diff --git a/vav2/platforms/windows/vavcore/src/Decoder/MediaCodecSurfaceManager.cpp b/vav2/platforms/windows/vavcore/src/Decoder/MediaCodecSurfaceManager.cpp index 3159ffa..125c092 100644 --- a/vav2/platforms/windows/vavcore/src/Decoder/MediaCodecSurfaceManager.cpp +++ b/vav2/platforms/windows/vavcore/src/Decoder/MediaCodecSurfaceManager.cpp @@ -346,19 +346,27 @@ bool MediaCodecSurfaceManager::CreateVulkanImage(void* vk_device, void* vk_insta ycbcrConversionCreateInfo.format = vulkan_format; - // Force BT.601 color space (most video content uses BT.601, not BT.709) - // MediaCodec often suggests BT.709 which causes incorrect colors - ycbcrConversionCreateInfo.ycbcrModel = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601; // BT.601 - ycbcrConversionCreateInfo.ycbcrRange = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW; // Limited range (16-235) + // Use MediaCodec suggested YCbCr conversion settings + // MediaCodec provides optimal color space settings based on video metadata + ycbcrConversionCreateInfo.ycbcrModel = static_cast(ahb_format_props.suggestedYcbcrModel); + ycbcrConversionCreateInfo.ycbcrRange = static_cast(ahb_format_props.suggestedYcbcrRange); - // Log suggested vs actual + // Log color space settings LogInfo("YCbCr conversion:"); - LogInfo(" Suggested model: " + std::to_string(ahb_format_props.suggestedYcbcrModel) + " (ignoring - using BT.601)"); - LogInfo(" Suggested range: " + std::to_string(ahb_format_props.suggestedYcbcrRange)); - LogInfo(" Using model: BT.601 (1)"); - LogInfo(" Using range: ITU_NARROW (1)"); + LogInfo(" MediaCodec suggested model: " + std::to_string(ahb_format_props.suggestedYcbcrModel)); + LogInfo(" MediaCodec suggested range: " + std::to_string(ahb_format_props.suggestedYcbcrRange)); + LogInfo(" Using MediaCodec suggested color space settings"); + // Log component mapping from MediaCodec + LogInfo(" MediaCodec suggested components:"); + LogInfo(" r: " + std::to_string(ahb_format_props.samplerYcbcrConversionComponents.r)); + LogInfo(" g: " + std::to_string(ahb_format_props.samplerYcbcrConversionComponents.g)); + LogInfo(" b: " + std::to_string(ahb_format_props.samplerYcbcrConversionComponents.b)); + LogInfo(" a: " + std::to_string(ahb_format_props.samplerYcbcrConversionComponents.a)); + + // Use MediaCodec suggested component mapping ycbcrConversionCreateInfo.components = ahb_format_props.samplerYcbcrConversionComponents; + LogInfo(" Using MediaCodec suggested component mapping"); ycbcrConversionCreateInfo.xChromaOffset = ahb_format_props.suggestedXChromaOffset; ycbcrConversionCreateInfo.yChromaOffset = ahb_format_props.suggestedYChromaOffset; ycbcrConversionCreateInfo.chromaFilter = VK_FILTER_LINEAR;