Files
video-v1/vav2/platforms/windows/applications/vav2player/Vav2Player/VideoPlayerControl.xaml.cpp

1343 lines
49 KiB
C++

#include "pch.h"
#include "VideoPlayerControl.xaml.h"
#if __has_include("VideoPlayerControl.g.cpp")
#include "VideoPlayerControl.g.cpp"
#endif
// Note: VideoTypes.h not included due to VavCore migration guard
#include <winrt/Microsoft.UI.Dispatching.h>
#include <winrt/Windows.Storage.Streams.h>
#include <winrt/Windows.Storage.h>
#include <windows.storage.streams.h>
#include <chrono>
#include <algorithm>
#include <cstring>
#include <cassert>
// Include log manager for logging
#include "src/Logger/LogManager.h"
// Using alias to avoid namespace conflicts
using LogMgr = Vav2Player::LogManager;
using namespace winrt;
using namespace winrt::Microsoft::UI::Xaml;
using namespace winrt::Microsoft::UI::Xaml::Controls;
using namespace winrt::Microsoft::UI::Dispatching;
namespace winrt::Vav2Player::implementation
{
VideoPlayerControl::VideoPlayerControl()
: m_useHardwareRendering(true) // Default to GPU rendering
, m_vavCorePlayer(nullptr)
, m_memoryPool(std::make_unique<MemoryPool>())
, m_performanceMonitor(std::make_unique<AdvancedPerformanceMonitor>())
{
InitializeComponent();
// Load decoder settings from Windows.Storage.ApplicationData
LoadDecoderSettings();
// Initialize VavCore library (only once)
static bool vavCoreInitialized = false;
if (!vavCoreInitialized) {
VavCoreResult result = vavcore_initialize();
vavCoreInitialized = (result == VAVCORE_SUCCESS);
}
// Create VavCore player
m_vavCorePlayer = vavcore_create_player();
}
VideoPlayerControl::~VideoPlayerControl()
{
// Stop all playback immediately
m_isPlaying = false;
m_shouldStopTiming = true;
// Clean up VavCore player
if (m_vavCorePlayer) {
vavcore_destroy_player(m_vavCorePlayer);
m_vavCorePlayer = nullptr;
}
// GPU renderer cleanup re-enabled
if (m_gpuRenderer) {
m_gpuRenderer->Shutdown();
m_gpuRenderer.reset();
}
// Clean up timing thread
if (m_timingThread && m_timingThread->joinable()) {
m_timingThread->join();
m_timingThread.reset();
}
}
// Event Handlers
void VideoPlayerControl::UserControl_Loaded(winrt::Windows::Foundation::IInspectable const&, winrt::Microsoft::UI::Xaml::RoutedEventArgs const&)
{
try
{
m_isInitialized = true;
UpdateStatus(L"Ready");
// Auto load video if source is set
if (!m_videoSource.empty())
{
LoadVideo(m_videoSource);
}
// Setup container size change handler for AspectFit updates
VideoDisplayArea().SizeChanged([this](auto&&, auto&&) {
ApplyAspectFitIfReady();
});
// Ready for user interaction
}
catch (...)
{
UpdateStatus(L"Error during initialization");
}
}
void VideoPlayerControl::UserControl_Unloaded(winrt::Windows::Foundation::IInspectable const&, winrt::Microsoft::UI::Xaml::RoutedEventArgs const&)
{
try
{
// Stop all playback immediately (avoid seeking to prevent deadlock)
m_isPlaying = false;
m_isLoaded = false;
m_isInitialized = false;
// Stop timing thread safely
m_shouldStopTiming = true;
if (m_timingThread && m_timingThread->joinable()) {
m_timingThread->join();
m_timingThread.reset();
}
// Stop UI timer
if (m_playbackTimer)
{
m_playbackTimer.Stop();
m_playbackTimer = nullptr;
}
// GPU renderer cleanup
if (m_gpuRenderer)
{
m_gpuRenderer->Shutdown();
m_gpuRenderer.reset();
}
// Clean up VavCore player (this will handle internal cleanup safely)
if (m_vavCorePlayer) {
vavcore_destroy_player(m_vavCorePlayer);
m_vavCorePlayer = nullptr;
}
m_renderBitmap = nullptr;
UpdateStatus(L"Unloaded");
}
catch (...)
{
// Ignore cleanup errors during unload
}
}
void VideoPlayerControl::UserControl_SizeChanged(winrt::Windows::Foundation::IInspectable const&, winrt::Microsoft::UI::Xaml::SizeChangedEventArgs const& e)
{
// Recalculate AspectFit when container size changes
if (m_hasValidVideoSize && m_videoWidth > 0 && m_videoHeight > 0) {
UpdateVideoImageAspectFit(m_videoWidth, m_videoHeight);
}
// Retry GPU rendering initialization if user prefers hardware rendering
// but we're currently using CPU rendering due to previous container size issues
if (m_useHardwareRendering && m_isLoaded) {
auto container = VideoDisplayArea();
if (container) {
double containerWidth = container.ActualWidth();
double containerHeight = container.ActualHeight();
// If container size is now valid and we're not showing GPU panel, retry GPU init
if (containerWidth > 0 && containerHeight > 0 &&
VideoSwapChainPanel().Visibility() == winrt::Microsoft::UI::Xaml::Visibility::Collapsed) {
InitializeVideoRenderer();
}
}
}
}
void VideoPlayerControl::HoverDetector_PointerEntered(winrt::Windows::Foundation::IInspectable const&, winrt::Microsoft::UI::Xaml::Input::PointerRoutedEventArgs const&)
{
// Controls are disabled for now
}
void VideoPlayerControl::HoverDetector_PointerExited(winrt::Windows::Foundation::IInspectable const&, winrt::Microsoft::UI::Xaml::Input::PointerRoutedEventArgs const&)
{
// Controls are disabled for now
}
// Public Properties
winrt::hstring VideoPlayerControl::VideoSource()
{
return m_videoSource;
}
void VideoPlayerControl::VideoSource(winrt::hstring const& value)
{
if (m_videoSource != value)
{
m_videoSource = value;
if (m_isInitialized && !value.empty())
{
LoadVideo(value);
}
}
}
bool VideoPlayerControl::ShowControls()
{
return m_showControls;
}
void VideoPlayerControl::ShowControls(bool value)
{
m_showControls = value;
if (m_isInitialized)
{
// Update controls visibility based on value and loaded state
}
}
bool VideoPlayerControl::AutoPlay()
{
return m_autoPlay;
}
void VideoPlayerControl::AutoPlay(bool value)
{
m_autoPlay = value;
}
Vav2Player::VideoDecoderType VideoPlayerControl::DecoderType()
{
switch (m_decoderType)
{
case VAVCORE_DECODER_AUTO:
return Vav2Player::VideoDecoderType::Auto;
case VAVCORE_DECODER_DAV1D:
return Vav2Player::VideoDecoderType::Software;
case VAVCORE_DECODER_NVDEC:
return Vav2Player::VideoDecoderType::Software; // Temporarily map to Software
case VAVCORE_DECODER_MEDIA_FOUNDATION:
return Vav2Player::VideoDecoderType::HardwareMF;
default:
return Vav2Player::VideoDecoderType::Auto;
}
}
void VideoPlayerControl::DecoderType(Vav2Player::VideoDecoderType value)
{
VavCoreDecoderType newType;
switch (value)
{
case Vav2Player::VideoDecoderType::Auto:
newType = VAVCORE_DECODER_AUTO;
break;
case Vav2Player::VideoDecoderType::Software:
newType = VAVCORE_DECODER_DAV1D;
break;
// case Vav2Player::VideoDecoderType::HardwareNV:
// newType = VAVCORE_DECODER_NVDEC;
// break;
case Vav2Player::VideoDecoderType::HardwareMF:
newType = VAVCORE_DECODER_MEDIA_FOUNDATION;
break;
default:
newType = VAVCORE_DECODER_AUTO;
break;
}
SetInternalDecoderType(newType);
}
bool VideoPlayerControl::UseHardwareRendering()
{
return m_useHardwareRendering;
}
void VideoPlayerControl::UseHardwareRendering(bool value)
{
if (m_useHardwareRendering != value)
{
m_useHardwareRendering = value;
// Reinitialize renderer if video is already loaded
if (m_isLoaded && m_vavCorePlayer)
{
InitializeVideoRenderer();
}
else
{
// Just switch visibility for now
if (value)
{
VideoSwapChainPanel().Visibility(winrt::Microsoft::UI::Xaml::Visibility::Visible);
VideoImage().Visibility(winrt::Microsoft::UI::Xaml::Visibility::Collapsed);
}
else
{
VideoSwapChainPanel().Visibility(winrt::Microsoft::UI::Xaml::Visibility::Collapsed);
VideoImage().Visibility(winrt::Microsoft::UI::Xaml::Visibility::Visible);
}
}
}
}
VavCoreDecoderType VideoPlayerControl::GetInternalDecoderType()
{
return m_decoderType;
}
void VideoPlayerControl::SetInternalDecoderType(VavCoreDecoderType value)
{
if (m_decoderType != value)
{
m_decoderType = value;
// Update VavCore decoder type if player is active
if (m_isLoaded && m_vavCorePlayer)
{
vavcore_set_decoder_type(m_vavCorePlayer, value);
}
}
}
// Public Methods
void VideoPlayerControl::LoadVideo(winrt::hstring const& filePath)
{
std::string filePathStr = winrt::to_string(filePath);
UpdateStatus(L"Loading video...");
LoadingRing().IsActive(true);
// Log video load attempt
LogMgr::GetInstance().LogInfo(L"Attempting to load video: " + std::wstring(filePath), L"VideoPlayerControl");
// Reset video state
ResetVideoState();
if (!m_vavCorePlayer) {
UpdateStatus(L"VavCore player not initialized");
LoadingRing().IsActive(false);
LogMgr::GetInstance().LogError(L"VavCore player not initialized", L"VideoPlayerControl");
return;
}
// Set decoder type before opening file
vavcore_set_decoder_type(m_vavCorePlayer, m_decoderType);
// Log decoder type selection
std::wstring decoderName = L"Unknown";
switch (m_decoderType) {
case VAVCORE_DECODER_AUTO: decoderName = L"Auto"; break;
case VAVCORE_DECODER_DAV1D: decoderName = L"Software (dav1d)"; break;
case VAVCORE_DECODER_MEDIA_FOUNDATION: decoderName = L"Hardware (Media Foundation)"; break;
case VAVCORE_DECODER_NVDEC: decoderName = L"Hardware (NVDEC)"; break;
case VAVCORE_DECODER_VPL: decoderName = L"Hardware (Intel VPL)"; break;
case VAVCORE_DECODER_AMF: decoderName = L"Hardware (AMD AMF)"; break;
}
LogMgr::GetInstance().LogDecoderInfo(decoderName, L"Decoder type selected");
// Open video file using VavCore API
VavCoreResult result = vavcore_open_file(m_vavCorePlayer, filePathStr.c_str());
if (result != VAVCORE_SUCCESS) {
UpdateStatus(L"Failed to open video file");
LoadingRing().IsActive(false);
LogMgr::GetInstance().LogVideoError(L"Failed to open file", std::wstring(filePath));
return;
}
// Get video metadata from VavCore
VavCoreVideoMetadata metadata;
result = vavcore_get_metadata(m_vavCorePlayer, &metadata);
if (result != VAVCORE_SUCCESS) {
UpdateStatus(L"Failed to get video metadata");
LoadingRing().IsActive(false);
LogMgr::GetInstance().LogVideoError(L"Failed to get metadata", std::wstring(filePath));
return;
}
// Set up video properties
m_videoWidth = metadata.width;
m_videoHeight = metadata.height;
m_frameRate = metadata.frame_rate > 0 ? metadata.frame_rate : 30.0;
m_totalFrames = metadata.total_frames;
m_duration = metadata.total_frames / m_frameRate;
// Initialize D3D surface support if hardware rendering is enabled
if (m_useHardwareRendering) {
InitializeD3DSurfaceSupport();
}
// Log video info
std::wstring videoInfo = L"Resolution: " + std::to_wstring(m_videoWidth) + L"x" + std::to_wstring(m_videoHeight) +
L", FPS: " + std::to_wstring(static_cast<int>(m_frameRate)) +
L", Frames: " + std::to_wstring(m_totalFrames) +
L", Duration: " + std::to_wstring(static_cast<int>(m_duration)) + L"s";
LogMgr::GetInstance().LogInfo(videoInfo, L"VideoPlayerControl");
InitializeVideoRenderer();
m_hasValidVideoSize = true;
m_isLoaded = true;
ApplyAspectFitIfReady();
LoadingRing().IsActive(false);
UpdateStatus(L"Video loaded");
LogMgr::GetInstance().LogVideoLoad(std::wstring(filePath), true);
if (m_autoPlay) {
LogMgr::GetInstance().LogInfo(L"Auto-play enabled, starting playback", L"VideoPlayerControl");
Play();
}
}
void VideoPlayerControl::Play()
{
if (!m_isLoaded || m_isPlaying) {
if (!m_isLoaded) {
LogMgr::GetInstance().LogWarning(L"Cannot play: Video not loaded", L"VideoPlayerControl");
}
return;
}
m_isPlaying = true;
UpdateStatus(L"Playing");
LogMgr::GetInstance().LogVideoPlay(std::wstring(m_videoSource));
// Record playback start time for accurate speed measurement
m_playbackStartTime = std::chrono::high_resolution_clock::now();
// Stop any existing timer/thread
if (m_playbackTimer)
{
m_playbackTimer.Stop();
m_playbackTimer = nullptr;
}
if (m_timingThread && m_timingThread->joinable()) {
m_shouldStopTiming = true;
m_timingThread->join();
m_timingThread.reset();
}
// Start high-resolution timing thread
m_shouldStopTiming = false;
auto weakThis = get_weak();
double targetIntervalMs = 1000.0 / m_frameRate;
m_timingThread = std::make_unique<std::thread>([weakThis, targetIntervalMs]() {
auto start = std::chrono::high_resolution_clock::now();
while (true) {
if (auto strongThis = weakThis.get()) {
if (strongThis->m_shouldStopTiming || !strongThis->m_isPlaying) {
break;
}
// Process frame on UI thread
strongThis->DispatcherQueue().TryEnqueue([strongThis]() {
if (strongThis->m_isPlaying && strongThis->m_isLoaded) {
strongThis->ProcessSingleFrame();
}
});
// High-precision sleep until next frame
auto nextFrame = start + std::chrono::microseconds(
static_cast<long long>(targetIntervalMs * 1000));
std::this_thread::sleep_until(nextFrame);
start = nextFrame;
} else {
break; // Object was destroyed
}
}
});
ProcessSingleFrame();
}
void VideoPlayerControl::Pause()
{
m_isPlaying = false;
m_shouldStopTiming = true;
if (m_playbackTimer)
{
m_playbackTimer.Stop();
}
if (m_timingThread && m_timingThread->joinable()) {
m_timingThread->join();
m_timingThread.reset();
}
UpdateStatus(L"Paused");
LogMgr::GetInstance().LogVideoPause(std::wstring(m_videoSource));
}
void VideoPlayerControl::Stop()
{
m_isPlaying = false;
m_shouldStopTiming = true;
// Properly cleanup timer and thread to prevent resource leaks
if (m_playbackTimer)
{
m_playbackTimer.Stop();
m_playbackTimer = nullptr; // Release timer completely
}
if (m_timingThread && m_timingThread->joinable()) {
m_timingThread->join();
m_timingThread.reset();
}
m_currentFrame = 0;
m_currentTime = 0.0;
// Reset VavCore player to beginning for next playback
if (m_vavCorePlayer && m_isLoaded) {
VavCoreResult result = vavcore_reset(m_vavCorePlayer);
if (result != VAVCORE_SUCCESS) {
UpdateStatus(L"Stop - Reset failed");
LogMgr::GetInstance().LogError(L"Failed to reset VavCore player", L"VideoPlayerControl");
} else {
LogMgr::GetInstance().LogInfo(L"VavCore player reset to beginning", L"VideoPlayerControl");
}
}
UpdateStatus(L"Stopped - Ready to play from beginning");
LogMgr::GetInstance().LogVideoStop(std::wstring(m_videoSource));
}
void VideoPlayerControl::ProcessSingleFrame()
{
// Simple validation
if (!m_isPlaying || !m_vavCorePlayer) {
return;
}
// Choose decode path based on D3D surface support
if (m_useD3DSurfaces) {
ProcessSingleFrameWithSurfaces();
return;
}
// Phase 2 Optimization: Start frame timing
m_performanceMonitor->RecordFrameStart();
// Phase 2 Optimization: Start decode timing
m_performanceMonitor->RecordDecodeStart();
// Decode next frame using VavCore
VavCoreVideoFrame vavFrame;
VavCoreResult result = vavcore_decode_next_frame(m_vavCorePlayer, &vavFrame);
// Phase 2 Optimization: End decode timing
m_performanceMonitor->RecordDecodeEnd();
if (result == VAVCORE_END_OF_STREAM) {
// End of video - stop playback
m_isPlaying = false;
if (m_playbackTimer) m_playbackTimer.Stop();
UpdateStatus(L"Playback completed");
LogMgr::GetInstance().LogInfo(L"Playback completed - End of stream reached", L"VideoPlayerControl");
return;
}
if (result != VAVCORE_SUCCESS) {
// Decode error - count but continue processing
m_framesDecodeErrors++;
m_currentFrame++;
m_currentTime = m_currentFrame / m_frameRate;
// Log decode error occasionally
if (m_framesDecodeErrors % 10 == 1) {
LogMgr::GetInstance().LogError(L"Decode error count: " + std::to_wstring(m_framesDecodeErrors), L"VideoPlayerControl");
wchar_t errorMsg[256];
swprintf_s(errorMsg, L"VavCore decode error #%llu at frame %llu", m_framesDecodeErrors, m_currentFrame);
OutputDebugStringW(errorMsg);
OutputDebugStringW(L"\n");
}
return;
}
// Phase 2 Optimization: Start render timing
m_performanceMonitor->RecordRenderStart();
// Render frame
RenderFrameToScreen(vavFrame);
// Phase 2 Optimization: End render timing
m_performanceMonitor->RecordRenderEnd();
// Update counters
m_currentFrame++;
m_currentTime = m_currentFrame / m_frameRate;
// Phase 2 Optimization: End frame timing
m_performanceMonitor->RecordFrameEnd();
// Phase 2 Optimization: Enhanced performance logging every 60 frames
if (m_currentFrame % 60 == 0) {
auto stats = m_performanceMonitor->GetStats();
// Check for adaptive quality adjustment
if (m_performanceMonitor->ShouldReduceQuality()) {
LogMgr::GetInstance().LogDebug(L"VavPlayer: QUALITY REDUCTION triggered - FPS: " +
std::to_wstring(stats.CurrentFPS), L"VideoPlayerControl");
} else if (m_performanceMonitor->ShouldRestoreQuality()) {
LogMgr::GetInstance().LogDebug(L"VavPlayer: QUALITY RESTORATION triggered - FPS: " +
std::to_wstring(stats.CurrentFPS), L"VideoPlayerControl");
}
// Enhanced performance output
std::wstring perfInfo = L"VavPlayer: PERFORMANCE STATS\n" +
std::wstring(L" FPS: ") + std::to_wstring(stats.CurrentFPS).substr(0, 4) +
L" | Decode: " + std::to_wstring(stats.AverageDecodeTime).substr(0, 4) + L"ms" +
L" | Render: " + std::to_wstring(stats.AverageRenderTime).substr(0, 4) + L"ms\n" +
L" Total: " + std::to_wstring(stats.AverageTotalTime).substr(0, 4) + L"ms" +
L" | Quality Reduction: " + (stats.QualityReductionActive ? L"True" : L"False");
LogMgr::GetInstance().LogDebug(perfInfo, L"VideoPlayerControl");
// Memory Pool Statistics
m_memoryPool->PrintStats();
// Also output to debug console for analysis
std::wstring shortStatus = L"Frame " + std::to_wstring(m_currentFrame) +
L" - FPS: " + std::to_wstring(stats.CurrentFPS).substr(0, 4) +
L", Decode: " + std::to_wstring(stats.AverageDecodeTime).substr(0, 4) + L"ms" +
L", Render: " + std::to_wstring(stats.AverageRenderTime).substr(0, 4) + L"ms";
UpdateStatus(shortStatus.c_str());
OutputDebugStringW((shortStatus + L"\n").c_str());
}
}
void VideoPlayerControl::ProcessSingleFrameLegacy()
{
// Legacy method - calls ProcessSingleFrame for compatibility
ProcessSingleFrame();
}
void VideoPlayerControl::RenderFrameToScreen(const VavCoreVideoFrame& frame)
{
// GPU rendering re-enabled for VavCore
// Try GPU rendering first if available and enabled
if (m_gpuRenderer && m_useHardwareRendering) {
// Direct VavCoreVideoFrame usage - no adapter needed
if (m_gpuRenderer->TryRenderFrame(frame)) {
return; // GPU rendering successful
}
// Fall through to CPU rendering if GPU fails
}
// CPU rendering (either by user choice or GPU fallback)
auto cpuStart = std::chrono::high_resolution_clock::now();
RenderFrameSoftware(frame);
auto cpuEnd = std::chrono::high_resolution_clock::now();
double cpuTime = std::chrono::duration<double, std::milli>(cpuEnd - cpuStart).count();
// Log CPU rendering time occasionally for debugging
if (m_currentFrame % 60 == 0) { // Every 2 seconds
wchar_t cpuMsg[256];
swprintf_s(cpuMsg, L"CPU render time: %.2fms", cpuTime);
OutputDebugStringW(cpuMsg);
OutputDebugStringW(L"\n");
}
}
void VideoPlayerControl::RenderFrameSoftware(const VavCoreVideoFrame& frame)
{
if (!frame.y_plane || frame.width == 0 || frame.height == 0) return;
try {
// Phase 2 Optimization: Check if bitmap needs recreation using Memory Pool
bool needNewBitmap = !m_renderBitmap ||
m_lastFrameWidth != static_cast<uint32_t>(frame.width) ||
m_lastFrameHeight != static_cast<uint32_t>(frame.height);
if (needNewBitmap) {
// Phase 2 Optimization: Return old bitmap to pool if exists
if (m_renderBitmap) {
m_memoryPool->ReturnBitmap(m_renderBitmap);
}
// Phase 2 Optimization: Get bitmap from Memory Pool
m_renderBitmap = m_memoryPool->GetBitmap(frame.width, frame.height);
VideoImage().Source(m_renderBitmap);
// Cache dimensions to avoid repeated checks
m_lastFrameWidth = static_cast<uint32_t>(frame.width);
m_lastFrameHeight = static_cast<uint32_t>(frame.height);
// Update video dimensions and apply AspectFit
if (m_videoWidth != static_cast<uint32_t>(frame.width) || m_videoHeight != static_cast<uint32_t>(frame.height)) {
m_videoWidth = static_cast<uint32_t>(frame.width);
m_videoHeight = static_cast<uint32_t>(frame.height);
m_hasValidVideoSize = true;
UpdateVideoImageAspectFit(frame.width, frame.height);
}
VideoImage().Visibility(winrt::Microsoft::UI::Xaml::Visibility::Visible);
}
// Fast path: direct conversion to bitmap buffer
auto buffer = m_renderBitmap.PixelBuffer();
auto bufferByteAccess = buffer.as<::Windows::Storage::Streams::IBufferByteAccess>();
uint8_t* bufferData = nullptr;
winrt::check_hresult(bufferByteAccess->Buffer(&bufferData));
// Optimized YUV to BGRA conversion (direct to target buffer)
ConvertYUVToBGRA(frame, bufferData, frame.width, frame.height);
buffer.Length(frame.width * frame.height * 4);
// Minimal UI update
m_renderBitmap.Invalidate();
} catch (...) {
// Ignore render errors to maintain playback
}
}
void VideoPlayerControl::ConvertYUVToBGRA(const VavCoreVideoFrame& yuv_frame, uint8_t* bgra_buffer, uint32_t width, uint32_t height)
{
// YUV420P to BGRA conversion using BT.709 color space
const uint8_t* y_plane = yuv_frame.y_plane;
const uint8_t* u_plane = yuv_frame.u_plane;
const uint8_t* v_plane = yuv_frame.v_plane;
if (!y_plane || !u_plane || !v_plane) {
return;
}
const uint32_t y_stride = yuv_frame.y_stride;
const uint32_t u_stride = yuv_frame.u_stride;
const uint32_t v_stride = yuv_frame.v_stride;
for (uint32_t y = 0; y < height; y++) {
const uint8_t* y_row = y_plane + y * y_stride;
const uint8_t* u_row = u_plane + (y / 2) * u_stride;
const uint8_t* v_row = v_plane + (y / 2) * v_stride;
uint8_t* bgra_row = bgra_buffer + y * width * 4;
for (uint32_t x = 0; x < width; x++) {
const uint8_t Y = y_row[x];
const uint8_t U = u_row[x / 2];
const uint8_t V = v_row[x / 2];
// BT.709 YUV to RGB conversion
const int C = Y - 16;
const int D = U - 128;
const int E = V - 128;
int R = (298 * C + 409 * E + 128) >> 8;
int G = (298 * C - 100 * D - 208 * E + 128) >> 8;
int B = (298 * C + 516 * D + 128) >> 8;
// Clamp to [0, 255]
R = std::max(0, std::min(255, R));
G = std::max(0, std::min(255, G));
B = std::max(0, std::min(255, B));
// Store as BGRA
bgra_row[x * 4 + 0] = static_cast<uint8_t>(B); // Blue
bgra_row[x * 4 + 1] = static_cast<uint8_t>(G); // Green
bgra_row[x * 4 + 2] = static_cast<uint8_t>(R); // Red
bgra_row[x * 4 + 3] = 255; // Alpha
}
}
}
void VideoPlayerControl::UpdateStatus(winrt::hstring const& message)
{
m_status = message;
}
void VideoPlayerControl::InitializeVideoRenderer()
{
// GPU rendering re-enabled for VavCore
// Try hardware rendering if enabled, fallback to software
bool useGPU = m_useHardwareRendering && TryInitializeGPURenderer();
SetRenderingMode(useGPU);
// If GPU initialization failed, ensure software rendering is ready
if (!useGPU) {
VideoImage().Visibility(winrt::Microsoft::UI::Xaml::Visibility::Visible);
VideoSwapChainPanel().Visibility(winrt::Microsoft::UI::Xaml::Visibility::Collapsed);
}
}
// GPU rendering methods re-enabled for VavCore
bool VideoPlayerControl::TryInitializeGPURenderer()
{
// Create GPU renderer if needed
if (!m_gpuRenderer) {
m_gpuRenderer = std::make_unique<SimpleGPURenderer>();
}
// Get container dimensions
auto container = VideoDisplayArea();
uint32_t width = static_cast<uint32_t>(container.ActualWidth());
uint32_t height = static_cast<uint32_t>(container.ActualHeight());
// Container must be ready with valid dimensions
if (width == 0 || height == 0) {
return false;
}
// Initialize GPU renderer
HRESULT hr = m_gpuRenderer->InitializeWithSwapChain(VideoSwapChainPanel(), width, height);
return SUCCEEDED(hr);
}
void VideoPlayerControl::SetRenderingMode(bool useGPU)
{
if (useGPU) {
VideoSwapChainPanel().Visibility(winrt::Microsoft::UI::Xaml::Visibility::Visible);
VideoImage().Visibility(winrt::Microsoft::UI::Xaml::Visibility::Collapsed);
} else {
VideoSwapChainPanel().Visibility(winrt::Microsoft::UI::Xaml::Visibility::Collapsed);
VideoImage().Visibility(winrt::Microsoft::UI::Xaml::Visibility::Visible);
}
}
void VideoPlayerControl::ResetVideoState()
{
m_currentFrame = 0;
m_currentTime = 0.0;
m_isLoaded = false;
m_isPlaying = false;
// Reset AspectFit state
m_hasValidVideoSize = false;
m_videoWidth = 0;
m_videoHeight = 0;
// Stop and reset playback timer
if (m_playbackTimer)
{
m_playbackTimer.Stop();
}
}
void VideoPlayerControl::ApplyAspectFitIfReady()
{
if (!m_hasValidVideoSize || !m_isLoaded) {
return;
}
auto container = VideoDisplayArea();
if (!container) return;
double containerWidth = container.ActualWidth();
double containerHeight = container.ActualHeight();
if (containerWidth <= 0 || containerHeight <= 0) {
return;
}
UpdateVideoImageAspectFit(m_videoWidth, m_videoHeight);
}
void VideoPlayerControl::UpdateVideoImageAspectFit(int videoWidth, int videoHeight)
{
// Store video dimensions for future use
m_videoWidth = static_cast<uint32_t>(videoWidth);
m_videoHeight = static_cast<uint32_t>(videoHeight);
m_hasValidVideoSize = true;
// AspectFit calculation for proper video scaling
auto container = VideoDisplayArea();
if (!container) {
return;
}
double containerWidth = container.ActualWidth();
double containerHeight = container.ActualHeight();
if (containerWidth <= 0 || containerHeight <= 0) {
return;
}
double videoAspectRatio = static_cast<double>(videoWidth) / videoHeight;
double containerAspectRatio = containerWidth / containerHeight;
double displayWidth, displayHeight;
if (videoAspectRatio > containerAspectRatio) {
// Video is wider - fit to container width
displayWidth = containerWidth;
displayHeight = containerWidth / videoAspectRatio;
} else {
// Video is taller - fit to container height
displayHeight = containerHeight;
displayWidth = containerHeight * videoAspectRatio;
}
// Apply AspectFit to both CPU and GPU rendering controls
VideoImage().Width(displayWidth);
VideoImage().Height(displayHeight);
VideoImage().MaxWidth(displayWidth);
VideoImage().MaxHeight(displayHeight);
// Also apply to GPU rendering SwapChainPanel
VideoSwapChainPanel().Width(displayWidth);
VideoSwapChainPanel().Height(displayHeight);
}
void VideoPlayerControl::Seek(double timeSeconds)
{
if (!m_isLoaded || !m_vavCorePlayer) return;
// Stop playback during seek
bool wasPlaying = m_isPlaying;
if (m_isPlaying) {
Pause();
}
// Seek to the specified time using VavCore API
VavCoreResult result = vavcore_seek_to_time(m_vavCorePlayer, timeSeconds);
if (result == VAVCORE_SUCCESS) {
m_currentTime = timeSeconds;
m_currentFrame = static_cast<uint64_t>(timeSeconds * m_frameRate);
// Process one frame to update display
ProcessSingleFrame();
// Resume playback if it was playing before seek
if (wasPlaying) {
Play();
}
UpdateStatus(L"Seeked");
} else {
UpdateStatus(L"Seek failed");
}
}
bool VideoPlayerControl::IsVideoPlaying() { return m_isPlaying; }
bool VideoPlayerControl::IsVideoLoaded() { return m_isLoaded; }
double VideoPlayerControl::CurrentTime() { return m_currentTime; }
double VideoPlayerControl::Duration() { return m_duration; }
winrt::hstring VideoPlayerControl::Status() { return m_status; }
void VideoPlayerControl::LoadDecoderSettings()
{
try {
// Load from Windows.Storage.ApplicationData.Current.LocalSettings
auto localSettings = winrt::Windows::Storage::ApplicationData::Current().LocalSettings();
auto values = localSettings.Values();
// Load decoder type (default: AUTO)
if (values.HasKey(L"DecoderType")) {
auto decoderValue = values.Lookup(L"DecoderType");
if (decoderValue) {
int32_t decoderInt = winrt::unbox_value<int32_t>(decoderValue);
m_decoderType = static_cast<VavCoreDecoderType>(decoderInt);
// Log loaded decoder setting
std::wstring decoderName = L"Unknown";
switch (m_decoderType) {
case VAVCORE_DECODER_AUTO: decoderName = L"Auto"; break;
case VAVCORE_DECODER_DAV1D: decoderName = L"Software (dav1d)"; break;
case VAVCORE_DECODER_MEDIA_FOUNDATION: decoderName = L"Hardware (Media Foundation)"; break;
case VAVCORE_DECODER_NVDEC: decoderName = L"Hardware (NVDEC)"; break;
case VAVCORE_DECODER_VPL: decoderName = L"Hardware (Intel VPL)"; break;
case VAVCORE_DECODER_AMF: decoderName = L"Hardware (AMD AMF)"; break;
}
LogMgr::GetInstance().LogInfo(L"Loaded decoder setting: " + decoderName, L"VideoPlayerControl");
}
} else {
m_decoderType = VAVCORE_DECODER_AUTO;
LogMgr::GetInstance().LogInfo(L"Using default decoder: Auto", L"VideoPlayerControl");
}
} catch (...) {
// If settings loading fails, use default
m_decoderType = VAVCORE_DECODER_AUTO;
LogMgr::GetInstance().LogWarning(L"Failed to load decoder settings, using default: Auto", L"VideoPlayerControl");
}
}
void VideoPlayerControl::RefreshDecoderSettings()
{
// Reload decoder settings from storage
LoadDecoderSettings();
// If a video is currently loaded, update the VavCore player with new decoder type
if (m_vavCorePlayer && m_isLoaded) {
vavcore_set_decoder_type(m_vavCorePlayer, m_decoderType);
std::wstring decoderName = L"Unknown";
switch (m_decoderType) {
case VAVCORE_DECODER_AUTO: decoderName = L"Auto"; break;
case VAVCORE_DECODER_DAV1D: decoderName = L"Software (dav1d)"; break;
case VAVCORE_DECODER_MEDIA_FOUNDATION: decoderName = L"Hardware (Media Foundation)"; break;
case VAVCORE_DECODER_NVDEC: decoderName = L"Hardware (NVDEC)"; break;
case VAVCORE_DECODER_VPL: decoderName = L"Hardware (Intel VPL)"; break;
case VAVCORE_DECODER_AMF: decoderName = L"Hardware (AMD AMF)"; break;
}
LogMgr::GetInstance().LogInfo(L"Applied new decoder setting: " + decoderName, L"VideoPlayerControl");
}
}
// D3D Surface Support Methods
bool VideoPlayerControl::InitializeD3DSurfaceSupport()
{
try {
// Check if decoder supports any D3D surface types
VavCoreSurfaceType supportedTypes[] = {
VAVCORE_SURFACE_D3D11_TEXTURE,
VAVCORE_SURFACE_D3D12_RESOURCE,
VAVCORE_SURFACE_CUDA_DEVICE,
VAVCORE_SURFACE_AMF_SURFACE
};
for (auto surfaceType : supportedTypes) {
if (vavcore_supports_surface_type(m_vavCorePlayer, surfaceType)) {
m_supportedSurfaceType = surfaceType;
break;
}
}
if (m_supportedSurfaceType == VAVCORE_SURFACE_CPU) {
LogMgr::GetInstance().LogInfo(L"No D3D surface types supported, using CPU decoding", L"VideoPlayerControl");
return false;
}
// For now, prioritize D3D11 texture support for SwapChainPanel compatibility
if (m_supportedSurfaceType == VAVCORE_SURFACE_D3D11_TEXTURE) {
// TODO: Get D3D11 device from SwapChainPanel or create one
// m_d3dDevice = GetD3D11DeviceFromSwapChainPanel();
// For now, set to nullptr - will be initialized when needed
VavCoreResult result = vavcore_set_d3d_device(m_vavCorePlayer, m_d3dDevice, m_supportedSurfaceType);
if (result == VAVCORE_SUCCESS) {
m_useD3DSurfaces = true;
LogMgr::GetInstance().LogInfo(L"D3D11 surface decoding enabled", L"VideoPlayerControl");
return true;
}
}
LogMgr::GetInstance().LogWarning(L"Failed to initialize D3D surface support", L"VideoPlayerControl");
return false;
}
catch (...) {
LogMgr::GetInstance().LogError(L"Exception during D3D surface initialization", L"VideoPlayerControl");
return false;
}
}
void VideoPlayerControl::ProcessSingleFrameWithSurfaces()
{
try {
// Simple validation
if (!m_isPlaying || !m_vavCorePlayer) {
return;
}
auto totalStart = std::chrono::high_resolution_clock::now();
// Create or reuse D3D texture for this frame
void* d3dTexture = nullptr;
if (!CreateD3DTexture(m_videoWidth, m_videoHeight, &d3dTexture)) {
LogMgr::GetInstance().LogError(L"Failed to create D3D texture", L"VideoPlayerControl");
return;
}
// Decode directly to D3D surface
VavCoreVideoFrame vavFrame;
VavCoreResult result = vavcore_decode_to_surface(m_vavCorePlayer, m_supportedSurfaceType, d3dTexture, &vavFrame);
if (result == VAVCORE_END_OF_STREAM) {
// End of video - stop playback
m_isPlaying = false;
if (m_playbackTimer) m_playbackTimer.Stop();
UpdateStatus(L"Playback completed");
LogMgr::GetInstance().LogInfo(L"Playback completed - End of stream reached", L"VideoPlayerControl");
return;
}
if (result != VAVCORE_SUCCESS) {
// Decode error - count but continue processing
m_framesDecodeErrors++;
m_currentFrame++;
m_currentTime = m_currentFrame / m_frameRate;
// Log decode error occasionally
if (m_framesDecodeErrors % 10 == 1) {
LogMgr::GetInstance().LogError(L"D3D surface decode error count: " + std::to_wstring(m_framesDecodeErrors), L"VideoPlayerControl");
}
return;
}
// Render D3D surface directly to screen
RenderD3DSurfaceToScreen(d3dTexture, vavFrame);
// Update counters
m_currentFrame++;
m_currentTime = m_currentFrame / m_frameRate;
// Free VavCore frame (surface data remains in d3dTexture)
vavcore_free_frame(&vavFrame);
}
catch (...) {
LogMgr::GetInstance().LogError(L"Exception in ProcessSingleFrameWithSurfaces", L"VideoPlayerControl");
}
}
bool VideoPlayerControl::CreateD3DTexture(uint32_t width, uint32_t height, void** texture)
{
// TODO: Implement D3D11 texture creation
// For now, return nullptr to indicate fallback to CPU decoding
*texture = nullptr;
return false;
}
void VideoPlayerControl::RenderD3DSurfaceToScreen(void* d3dTexture, const VavCoreVideoFrame& frame)
{
// TODO: Implement direct D3D surface rendering to SwapChainPanel
// For now, fall back to software rendering
RenderFrameSoftware(frame);
}
// ===============================
// Phase 2 Optimization: Memory Pool Implementation
// ===============================
winrt::Microsoft::UI::Xaml::Media::Imaging::WriteableBitmap VideoPlayerControl::MemoryPool::GetBitmap(uint32_t width, uint32_t height)
{
std::lock_guard<std::mutex> lock(_poolMutex);
if (!_bitmapPool.empty()) {
auto bitmap = _bitmapPool.front();
_bitmapPool.pop();
// Check if size matches
if (bitmap.PixelWidth() == static_cast<int32_t>(width) &&
bitmap.PixelHeight() == static_cast<int32_t>(height)) {
_bitmapPoolHits++;
return bitmap;
} else {
// Size mismatch, will create new one
bitmap = nullptr;
}
}
_bitmapPoolMisses++;
return winrt::Microsoft::UI::Xaml::Media::Imaging::WriteableBitmap(width, height);
}
void VideoPlayerControl::MemoryPool::ReturnBitmap(winrt::Microsoft::UI::Xaml::Media::Imaging::WriteableBitmap bitmap)
{
std::lock_guard<std::mutex> lock(_poolMutex);
if (_bitmapPool.size() < MAX_POOL_SIZE && bitmap) {
_bitmapPool.push(bitmap);
}
// If pool is full or bitmap is null, let it be garbage collected
}
std::vector<uint8_t> VideoPlayerControl::MemoryPool::GetBuffer(size_t size)
{
std::lock_guard<std::mutex> lock(_poolMutex);
if (!_bufferPool.empty()) {
auto buffer = _bufferPool.front();
_bufferPool.pop();
// Check if size is adequate
if (buffer.size() >= size) {
_bufferPoolHits++;
buffer.resize(size); // Resize to exact size needed
return buffer;
}
// Size too small, will create new one
}
_bufferPoolMisses++;
return std::vector<uint8_t>(size);
}
void VideoPlayerControl::MemoryPool::ReturnBuffer(std::vector<uint8_t> buffer)
{
std::lock_guard<std::mutex> lock(_poolMutex);
if (_bufferPool.size() < MAX_POOL_SIZE) {
_bufferPool.push(std::move(buffer));
}
// If pool is full, let it be destroyed
}
void VideoPlayerControl::MemoryPool::PrintStats()
{
std::lock_guard<std::mutex> lock(_poolMutex);
int totalBitmapRequests = _bitmapPoolHits + _bitmapPoolMisses;
int totalBufferRequests = _bufferPoolHits + _bufferPoolMisses;
if (totalBitmapRequests > 0) {
double bitmapHitRate = (static_cast<double>(_bitmapPoolHits) / totalBitmapRequests) * 100.0;
LogMgr::GetInstance().LogDebug(
L"Memory Pool Stats - Bitmap: " + std::to_wstring(bitmapHitRate) +
L"% hit rate (" + std::to_wstring(_bitmapPoolHits) + L"/" + std::to_wstring(totalBitmapRequests) + L")",
L"VideoPlayerControl");
}
if (totalBufferRequests > 0) {
double bufferHitRate = (static_cast<double>(_bufferPoolHits) / totalBufferRequests) * 100.0;
LogMgr::GetInstance().LogDebug(
L"Memory Pool Stats - Buffer: " + std::to_wstring(bufferHitRate) +
L"% hit rate (" + std::to_wstring(_bufferPoolHits) + L"/" + std::to_wstring(totalBufferRequests) + L")",
L"VideoPlayerControl");
}
}
// ===============================
// Phase 2 Optimization: Advanced Performance Monitor Implementation
// ===============================
void VideoPlayerControl::AdvancedPerformanceMonitor::RecordFrameStart()
{
_frameStartTime = std::chrono::high_resolution_clock::now();
}
void VideoPlayerControl::AdvancedPerformanceMonitor::RecordDecodeStart()
{
_decodeStartTime = std::chrono::high_resolution_clock::now();
}
void VideoPlayerControl::AdvancedPerformanceMonitor::RecordDecodeEnd()
{
auto decodeEndTime = std::chrono::high_resolution_clock::now();
auto decodeTime = std::chrono::duration<double, std::milli>(decodeEndTime - _decodeStartTime).count();
_decodingTimes.push(decodeTime);
if (_decodingTimes.size() > SAMPLE_SIZE) {
_decodingTimes.pop();
}
}
void VideoPlayerControl::AdvancedPerformanceMonitor::RecordRenderStart()
{
_renderStartTime = std::chrono::high_resolution_clock::now();
}
void VideoPlayerControl::AdvancedPerformanceMonitor::RecordRenderEnd()
{
auto renderEndTime = std::chrono::high_resolution_clock::now();
auto renderTime = std::chrono::duration<double, std::milli>(renderEndTime - _renderStartTime).count();
_renderingTimes.push(renderTime);
if (_renderingTimes.size() > SAMPLE_SIZE) {
_renderingTimes.pop();
}
}
void VideoPlayerControl::AdvancedPerformanceMonitor::RecordFrameEnd()
{
auto frameEndTime = std::chrono::high_resolution_clock::now();
auto totalTime = std::chrono::duration<double, std::milli>(frameEndTime - _frameStartTime).count();
_totalFrameTimes.push(totalTime);
if (_totalFrameTimes.size() > SAMPLE_SIZE) {
_totalFrameTimes.pop();
}
// Check for adaptive quality adjustment
CheckForQualityAdjustment(totalTime);
}
VideoPlayerControl::AdvancedPerformanceMonitor::PerformanceStats VideoPlayerControl::AdvancedPerformanceMonitor::GetStats()
{
PerformanceStats stats = {};
if (!_decodingTimes.empty()) {
double sum = 0;
std::queue<double> temp = _decodingTimes;
while (!temp.empty()) {
sum += temp.front();
temp.pop();
}
stats.AverageDecodeTime = sum / _decodingTimes.size();
}
if (!_renderingTimes.empty()) {
double sum = 0;
std::queue<double> temp = _renderingTimes;
while (!temp.empty()) {
sum += temp.front();
temp.pop();
}
stats.AverageRenderTime = sum / _renderingTimes.size();
}
if (!_totalFrameTimes.empty()) {
double sum = 0;
std::queue<double> temp = _totalFrameTimes;
while (!temp.empty()) {
sum += temp.front();
temp.pop();
}
stats.AverageTotalTime = sum / _totalFrameTimes.size();
stats.CurrentFPS = 1000.0 / stats.AverageTotalTime;
}
stats.QualityReductionActive = _qualityReductionActive;
return stats;
}
bool VideoPlayerControl::AdvancedPerformanceMonitor::ShouldReduceQuality()
{
return (_consecutiveSlowFrames >= SLOW_FRAME_THRESHOLD && !_qualityReductionActive);
}
bool VideoPlayerControl::AdvancedPerformanceMonitor::ShouldRestoreQuality()
{
return (_consecutiveFastFrames >= FAST_FRAME_THRESHOLD && _qualityReductionActive);
}
void VideoPlayerControl::AdvancedPerformanceMonitor::CheckForQualityAdjustment(double frameTime)
{
const double SLOW_THRESHOLD = 40.0; // 25fps (too slow)
const double FAST_THRESHOLD = 25.0; // 40fps (fast enough)
if (frameTime > SLOW_THRESHOLD) {
_consecutiveSlowFrames++;
_consecutiveFastFrames = 0;
} else if (frameTime < FAST_THRESHOLD) {
_consecutiveFastFrames++;
_consecutiveSlowFrames = 0;
} else {
// Reset counters for moderate frame times
_consecutiveSlowFrames = 0;
_consecutiveFastFrames = 0;
}
// Update quality reduction state
if (ShouldReduceQuality()) {
_qualityReductionActive = true;
LogMgr::GetInstance().LogDebug(L"QUALITY REDUCTION triggered - Frame time: " + std::to_wstring(frameTime) + L"ms", L"VideoPlayerControl");
} else if (ShouldRestoreQuality()) {
_qualityReductionActive = false;
LogMgr::GetInstance().LogDebug(L"QUALITY RESTORATION triggered - Frame time: " + std::to_wstring(frameTime) + L"ms", L"VideoPlayerControl");
}
}
}