Files
video-v1/vav2/Vav2Player/Vav2Player/VideoPlayerControl.xaml.cpp

1009 lines
38 KiB
C++
Raw Normal View History

2025-09-20 15:40:39 +09:00
#include "pch.h"
#include "VideoPlayerControl.xaml.h"
#if __has_include("VideoPlayerControl.g.cpp")
#include "VideoPlayerControl.g.cpp"
#endif
#include <winrt/Microsoft.UI.Dispatching.h>
2025-09-22 22:01:53 +09:00
#include <winrt/Windows.Storage.Streams.h>
#include <windows.storage.streams.h>
#include <chrono>
2025-09-20 23:49:47 +09:00
#include <algorithm>
#include <cstring>
#include <cassert>
2025-09-22 02:15:47 +09:00
#include "headless/AV1Decoder_Headless.h"
#include "src/Rendering/SimpleGPURenderer.h"
2025-09-22 22:01:53 +09:00
#include "src/Common/VideoTypes.h"
#include "src/FileIO/WebMFileReader.h"
#include "src/Decoder/VideoDecoderFactory.h"
2025-09-20 15:40:39 +09:00
using namespace winrt;
using namespace winrt::Microsoft::UI::Xaml;
using namespace winrt::Microsoft::UI::Xaml::Controls;
using namespace winrt::Microsoft::UI::Dispatching;
namespace winrt::Vav2Player::implementation
{
VideoPlayerControl::VideoPlayerControl()
2025-09-23 00:31:16 +09:00
: m_useHardwareRendering(true) // TEST: Enable GPU rendering to test the upload buffer fix
2025-09-20 15:40:39 +09:00
{
InitializeComponent();
}
VideoPlayerControl::~VideoPlayerControl()
{
// Destructor implementation - complete types are available here due to includes
}
2025-09-20 15:40:39 +09:00
// Event Handlers
void VideoPlayerControl::UserControl_Loaded(winrt::Windows::Foundation::IInspectable const&, winrt::Microsoft::UI::Xaml::RoutedEventArgs const&)
{
try
{
m_isInitialized = true;
UpdateStatus(L"Ready");
// Auto load video if source is set
if (!m_videoSource.empty())
{
LoadVideo(m_videoSource);
}
// Setup container size change handler for AspectFit updates
VideoDisplayArea().SizeChanged([this](auto&&, auto&&) {
2025-09-23 02:25:59 +09:00
ApplyAspectFitIfReady();
});
2025-09-20 15:40:39 +09:00
OutputDebugStringA("VideoPlayerControl loaded successfully\n");
2025-09-22 22:01:53 +09:00
// Show purple outline placeholder while waiting
ShowPurpleOutlinePlaceholder();
// After 3 seconds, try to load actual video
auto delayTimer = winrt::Microsoft::UI::Xaml::DispatcherTimer();
delayTimer.Interval(std::chrono::seconds(3));
delayTimer.Tick([this, delayTimer](auto&&, auto&&) mutable {
delayTimer.Stop();
// Try to load test video file
auto testVideoPath = L"D:/Project/video-av1/sample/simple_test.webm";
OutputDebugStringA("[DEBUG] Attempting to load test video after 3 second delay\n");
LoadVideo(testVideoPath);
});
delayTimer.Start();
2025-09-20 15:40:39 +09:00
}
catch (...)
{
UpdateStatus(L"Error during initialization");
}
}
2025-09-22 22:01:53 +09:00
void VideoPlayerControl::ShowPurpleOutlinePlaceholder()
{
try
{
OutputDebugStringA("Showing purple outline placeholder...\n");
// Get container size for full screen placeholder
auto container = VideoDisplayArea();
if (!container) return;
int width = static_cast<int>(container.ActualWidth());
int height = static_cast<int>(container.ActualHeight());
2025-09-23 02:25:59 +09:00
// Wait for container to be ready - don't use arbitrary fallback sizes
2025-09-22 22:01:53 +09:00
if (width <= 0 || height <= 0) {
2025-09-23 02:25:59 +09:00
OutputDebugStringA("[DEBUG] Container size not ready, deferring rendering initialization\n");
return; // Wait for layout to complete
2025-09-22 22:01:53 +09:00
}
m_renderBitmap = winrt::Microsoft::UI::Xaml::Media::Imaging::WriteableBitmap(width, height);
VideoImage().Source(m_renderBitmap);
VideoImage().Visibility(winrt::Microsoft::UI::Xaml::Visibility::Visible);
VideoImage().Width(width);
VideoImage().Height(height);
auto buffer = m_renderBitmap.PixelBuffer();
auto bufferByteAccess = buffer.as<::Windows::Storage::Streams::IBufferByteAccess>();
uint8_t* bufferData = nullptr;
winrt::check_hresult(bufferByteAccess->Buffer(&bufferData));
// Fill with transparent background
for (int i = 0; i < width * height; i++)
{
bufferData[i * 4 + 0] = 0; // Blue
bufferData[i * 4 + 1] = 0; // Green
bufferData[i * 4 + 2] = 0; // Red
bufferData[i * 4 + 3] = 0; // Alpha (transparent)
}
// Draw purple outline (border thickness = 4 pixels)
int borderThickness = 4;
for (int y = 0; y < height; y++)
{
for (int x = 0; x < width; x++)
{
bool isOutline = (x < borderThickness || x >= width - borderThickness ||
y < borderThickness || y >= height - borderThickness);
if (isOutline)
{
int index = (y * width + x) * 4;
bufferData[index + 0] = 128; // Blue
bufferData[index + 1] = 0; // Green
bufferData[index + 2] = 128; // Red (Purple = Red + Blue)
bufferData[index + 3] = 255; // Alpha (opaque)
}
}
}
buffer.Length(width * height * 4);
m_renderBitmap.Invalidate();
OutputDebugStringA("Purple outline placeholder completed\n");
}
catch (...)
{
OutputDebugStringA("Purple outline placeholder failed\n");
}
}
2025-09-20 15:40:39 +09:00
void VideoPlayerControl::UserControl_Unloaded(winrt::Windows::Foundation::IInspectable const&, winrt::Microsoft::UI::Xaml::RoutedEventArgs const&)
{
try
{
2025-09-23 02:25:59 +09:00
// Stop all playback immediately
m_isPlaying = false;
m_isLoaded = false;
m_isInitialized = false;
2025-09-20 15:40:39 +09:00
Stop();
2025-09-22 22:01:53 +09:00
if (m_playbackTimer)
{
m_playbackTimer.Stop();
}
2025-09-20 15:40:39 +09:00
StopControlsHideTimer();
2025-09-22 02:15:47 +09:00
if (m_gpuRenderer)
{
2025-09-22 02:15:47 +09:00
m_gpuRenderer->Shutdown();
m_gpuRenderer.reset();
}
2025-09-23 02:25:59 +09:00
if (m_decoder) {
m_decoder.reset();
}
if (m_fileReader) {
// Explicitly close file before destroying the reader
if (m_fileReader->IsFileOpen()) {
m_fileReader->CloseFile();
OutputDebugStringA("[DEBUG] File explicitly closed\n");
}
m_fileReader.reset();
}
2025-09-20 15:40:39 +09:00
m_renderBitmap = nullptr;
2025-09-23 02:25:59 +09:00
// Additional safety flags
m_isInitialized = false;
m_isLoaded = false;
OutputDebugStringA("[DEBUG] UserControl_Unloaded: All resources cleaned up\n");
2025-09-20 15:40:39 +09:00
m_isInitialized = false;
OutputDebugStringA("VideoPlayerControl unloaded\n");
}
catch (...)
{
// Ignore cleanup errors
}
}
void VideoPlayerControl::HoverDetector_PointerEntered(winrt::Windows::Foundation::IInspectable const&, winrt::Microsoft::UI::Xaml::Input::PointerRoutedEventArgs const&)
{
if (m_showControls && m_isLoaded)
{
ShowControlsInternal();
StopControlsHideTimer();
}
}
void VideoPlayerControl::HoverDetector_PointerExited(winrt::Windows::Foundation::IInspectable const&, winrt::Microsoft::UI::Xaml::Input::PointerRoutedEventArgs const&)
{
if (m_showControls && m_isLoaded && m_isPlaying)
{
StartControlsHideTimer();
}
}
// Public Properties
winrt::hstring VideoPlayerControl::VideoSource()
{
return m_videoSource;
}
void VideoPlayerControl::VideoSource(winrt::hstring const& value)
{
if (m_videoSource != value)
{
m_videoSource = value;
if (m_isInitialized && !value.empty())
{
LoadVideo(value);
}
}
}
bool VideoPlayerControl::ShowControls()
{
return m_showControls;
}
void VideoPlayerControl::ShowControls(bool value)
{
m_showControls = value;
if (m_isInitialized)
{
2025-09-22 22:01:53 +09:00
// Update controls visibility based on value and loaded state
2025-09-20 15:40:39 +09:00
}
}
bool VideoPlayerControl::AutoPlay()
{
return m_autoPlay;
}
void VideoPlayerControl::AutoPlay(bool value)
{
m_autoPlay = value;
}
Vav2Player::VideoDecoderType VideoPlayerControl::DecoderType()
{
switch (m_decoderType)
{
case VideoDecoderFactory::DecoderType::AUTO:
return Vav2Player::VideoDecoderType::Auto;
case VideoDecoderFactory::DecoderType::SOFTWARE:
return Vav2Player::VideoDecoderType::Software;
case VideoDecoderFactory::DecoderType::HARDWARE_MF:
return Vav2Player::VideoDecoderType::HardwareMF;
default:
return Vav2Player::VideoDecoderType::Auto;
}
}
void VideoPlayerControl::DecoderType(Vav2Player::VideoDecoderType value)
{
VideoDecoderFactory::DecoderType newType;
switch (value)
{
case Vav2Player::VideoDecoderType::Auto:
newType = VideoDecoderFactory::DecoderType::AUTO;
break;
case Vav2Player::VideoDecoderType::Software:
newType = VideoDecoderFactory::DecoderType::SOFTWARE;
break;
case Vav2Player::VideoDecoderType::HardwareMF:
newType = VideoDecoderFactory::DecoderType::HARDWARE_MF;
break;
default:
newType = VideoDecoderFactory::DecoderType::AUTO;
break;
}
SetInternalDecoderType(newType);
}
2025-09-20 23:49:47 +09:00
bool VideoPlayerControl::UseHardwareRendering()
{
return m_useHardwareRendering;
}
void VideoPlayerControl::UseHardwareRendering(bool value)
{
if (m_useHardwareRendering != value)
{
m_useHardwareRendering = value;
// Reinitialize renderer if video is already loaded
if (m_isLoaded && m_fileReader && m_fileReader->IsFileOpen())
2025-09-20 23:49:47 +09:00
{
InitializeVideoRenderer();
OutputDebugStringA(("Switched to " +
std::string(value ? "hardware D3D12" : "software CPU") +
" rendering\n").c_str());
2025-09-20 23:49:47 +09:00
}
else
{
// Just switch visibility for now
if (value)
{
VideoSwapChainPanel().Visibility(winrt::Microsoft::UI::Xaml::Visibility::Visible);
VideoImage().Visibility(winrt::Microsoft::UI::Xaml::Visibility::Collapsed);
}
else
{
VideoSwapChainPanel().Visibility(winrt::Microsoft::UI::Xaml::Visibility::Collapsed);
VideoImage().Visibility(winrt::Microsoft::UI::Xaml::Visibility::Visible);
}
2025-09-20 23:49:47 +09:00
}
}
}
2025-09-20 15:40:39 +09:00
VideoDecoderFactory::DecoderType VideoPlayerControl::GetInternalDecoderType()
{
return m_decoderType;
}
void VideoPlayerControl::SetInternalDecoderType(VideoDecoderFactory::DecoderType value)
{
if (m_decoderType != value)
{
m_decoderType = value;
// Reset decoder if currently loaded
if (m_isLoaded)
{
m_decoder.reset();
CreateDecoder();
}
}
}
// Public Methods
void VideoPlayerControl::LoadVideo(winrt::hstring const& filePath)
{
2025-09-22 22:01:53 +09:00
OutputDebugStringA("[DEBUG] LoadVideo called\n");
2025-09-20 15:40:39 +09:00
try
{
std::string filePathStr = winrt::to_string(filePath);
2025-09-22 22:01:53 +09:00
OutputDebugStringA(("[DEBUG] Loading file: " + filePathStr + "\n").c_str());
2025-09-20 15:40:39 +09:00
UpdateStatus(L"Loading video...");
LoadingRing().IsActive(true);
// Reset previous state
ResetVideoState();
2025-09-23 02:25:59 +09:00
// Create or reuse file reader
2025-09-20 15:40:39 +09:00
if (!m_fileReader)
{
2025-09-22 22:01:53 +09:00
OutputDebugStringA("[DEBUG] Creating WebMFileReader\n");
2025-09-20 15:40:39 +09:00
m_fileReader = std::make_unique<WebMFileReader>();
}
2025-09-23 02:25:59 +09:00
else
{
// Explicitly close previous file if open
if (m_fileReader->IsFileOpen())
{
OutputDebugStringA("[DEBUG] Closing previous file before loading new one\n");
m_fileReader->CloseFile();
}
}
2025-09-20 15:40:39 +09:00
// Open file
2025-09-22 22:01:53 +09:00
OutputDebugStringA("[DEBUG] Opening file...\n");
2025-09-20 15:40:39 +09:00
if (!m_fileReader->OpenFile(filePathStr))
{
2025-09-22 22:01:53 +09:00
OutputDebugStringA("[DEBUG] Failed to open file\n");
2025-09-20 15:40:39 +09:00
UpdateStatus(L"Failed to open video file");
LoadingRing().IsActive(false);
return;
}
2025-09-22 22:01:53 +09:00
OutputDebugStringA("[DEBUG] File opened successfully\n");
2025-09-20 15:40:39 +09:00
// Get video tracks
2025-09-22 22:01:53 +09:00
OutputDebugStringA("[DEBUG] Getting video tracks...\n");
2025-09-20 15:40:39 +09:00
auto tracks = m_fileReader->GetVideoTracks();
if (tracks.empty())
{
2025-09-22 22:01:53 +09:00
OutputDebugStringA("[DEBUG] No video tracks found\n");
2025-09-20 15:40:39 +09:00
UpdateStatus(L"No video tracks found");
LoadingRing().IsActive(false);
return;
}
2025-09-22 22:01:53 +09:00
OutputDebugStringA(("[DEBUG] Found " + std::to_string(tracks.size()) + " video tracks\n").c_str());
2025-09-20 15:40:39 +09:00
// Select first video track
2025-09-22 22:01:53 +09:00
OutputDebugStringA("[DEBUG] Selecting video track...\n");
2025-09-20 15:40:39 +09:00
if (!m_fileReader->SelectVideoTrack(tracks[0].track_number))
{
2025-09-22 22:01:53 +09:00
OutputDebugStringA("[DEBUG] Failed to select video track\n");
2025-09-20 15:40:39 +09:00
UpdateStatus(L"Failed to select video track");
LoadingRing().IsActive(false);
return;
}
2025-09-22 22:01:53 +09:00
OutputDebugStringA("[DEBUG] Video track selected successfully\n");
2025-09-20 15:40:39 +09:00
// Get metadata
2025-09-22 22:01:53 +09:00
OutputDebugStringA("[DEBUG] Getting video metadata...\n");
2025-09-20 15:40:39 +09:00
auto metadata = m_fileReader->GetVideoMetadata();
m_totalFrames = metadata.total_frames;
m_frameRate = metadata.frame_rate > 0 ? metadata.frame_rate : 30.0;
m_duration = m_totalFrames / m_frameRate;
2025-09-22 22:01:53 +09:00
OutputDebugStringA(("[DEBUG] Video metadata: " + std::to_string(metadata.width) + "x" + std::to_string(metadata.height) +
", " + std::to_string(m_totalFrames) + " frames, " + std::to_string(m_frameRate) + " fps\n").c_str());
2025-09-23 00:31:16 +09:00
// Try GPU rendering first, fallback to CPU if needed
UseHardwareRendering(true);
2025-09-20 15:40:39 +09:00
// Create and initialize decoder
2025-09-22 22:01:53 +09:00
OutputDebugStringA("[DEBUG] Creating decoder...\n");
if (!CreateDecoder())
{
OutputDebugStringA("[DEBUG] Failed to create decoder\n");
UpdateStatus(L"Failed to create decoder");
LoadingRing().IsActive(false);
return;
}
OutputDebugStringA("[DEBUG] Initializing decoder...\n");
if (!InitializeDecoder())
2025-09-20 15:40:39 +09:00
{
2025-09-22 22:01:53 +09:00
OutputDebugStringA("[DEBUG] Failed to initialize decoder\n");
2025-09-20 15:40:39 +09:00
UpdateStatus(L"Failed to initialize decoder");
LoadingRing().IsActive(false);
return;
}
2025-09-22 22:01:53 +09:00
OutputDebugStringA("[DEBUG] Decoder initialized successfully\n");
2025-09-20 15:40:39 +09:00
// Initialize video renderer
InitializeVideoRenderer();
2025-09-23 02:25:59 +09:00
// Set video dimensions from metadata for AspectFit
m_videoWidth = metadata.width;
m_videoHeight = metadata.height;
m_hasValidVideoSize = true;
m_isLoaded = true; // Set loaded state before AspectFit
OutputDebugStringA(("[DEBUG] Video dimensions set: " + std::to_string(m_videoWidth) + "x" + std::to_string(m_videoHeight) + "\n").c_str());
// Apply AspectFit now that we have video dimensions
ApplyAspectFitIfReady();
2025-09-20 15:40:39 +09:00
LoadingRing().IsActive(false);
2025-09-23 02:25:59 +09:00
// Keep placeholder visible until first frame is rendered
2025-09-20 15:40:39 +09:00
UpdateStatus(L"Video loaded successfully");
2025-09-22 22:01:53 +09:00
OutputDebugStringA(("[DEBUG] Video loaded successfully: " + filePathStr + "\n").c_str());
2025-09-20 15:40:39 +09:00
// Auto play if enabled
if (m_autoPlay)
{
2025-09-22 22:01:53 +09:00
OutputDebugStringA("[DEBUG] Auto play enabled - starting playback\n");
2025-09-20 15:40:39 +09:00
Play();
}
}
catch (...)
{
2025-09-22 22:01:53 +09:00
OutputDebugStringA("[DEBUG] Exception in LoadVideo\n");
2025-09-20 15:40:39 +09:00
UpdateStatus(L"Error loading video");
LoadingRing().IsActive(false);
}
}
void VideoPlayerControl::Play()
{
2025-09-22 22:01:53 +09:00
OutputDebugStringA("[DEBUG] Play() called\n");
2025-09-20 15:40:39 +09:00
if (!m_isLoaded || m_isPlaying)
2025-09-22 22:01:53 +09:00
{
OutputDebugStringA("[DEBUG] Play() - not ready or already playing\n");
2025-09-20 15:40:39 +09:00
return;
2025-09-22 22:01:53 +09:00
}
2025-09-22 02:15:47 +09:00
m_isPlaying = true;
UpdateStatus(L"Playing");
2025-09-22 22:01:53 +09:00
2025-09-23 02:25:59 +09:00
2025-09-22 22:01:53 +09:00
// Setup playback timer for continuous frame processing
if (!m_playbackTimer)
{
OutputDebugStringA("[DEBUG] Creating playback timer\n");
m_playbackTimer = winrt::Microsoft::UI::Xaml::DispatcherTimer();
// Store weak reference to avoid circular dependency
auto weakThis = get_weak();
m_playbackTimer.Tick([weakThis](auto&&, auto&&) {
if (auto strongThis = weakThis.get())
{
OutputDebugStringA("[DEBUG] Timer tick - checking conditions\n");
OutputDebugStringA(("[DEBUG] m_isPlaying: " + std::string(strongThis->m_isPlaying ? "true" : "false") +
", m_isLoaded: " + std::string(strongThis->m_isLoaded ? "true" : "false") + "\n").c_str());
if (strongThis->m_isPlaying && strongThis->m_isLoaded)
{
try {
strongThis->ProcessSingleFrame();
}
catch (...) {
OutputDebugStringA("[DEBUG] Exception in timer ProcessSingleFrame\n");
strongThis->m_isPlaying = false;
strongThis->m_playbackTimer.Stop();
}
}
else
{
OutputDebugStringA("[DEBUG] Timer tick - conditions not met, skipping frame processing\n");
}
}
else
{
OutputDebugStringA("[DEBUG] Timer tick - object destroyed, stopping timer\n");
}
});
}
// Set timer interval based on frame rate (default 30fps = 33.33ms)
auto interval = std::chrono::milliseconds(static_cast<int>(1000.0 / m_frameRate));
m_playbackTimer.Interval(interval);
OutputDebugStringA(("[DEBUG] Timer interval set to: " + std::to_string(interval.count()) + "ms\n").c_str());
OutputDebugStringA(("[DEBUG] Timer object valid: " + std::string(m_playbackTimer ? "true" : "false") + "\n").c_str());
m_playbackTimer.Start();
OutputDebugStringA("[DEBUG] Timer.Start() called\n");
// Immediate test: try to process one frame manually to verify the pipeline works
OutputDebugStringA("[DEBUG] Testing immediate frame processing...\n");
ProcessSingleFrame();
OutputDebugStringA(("[DEBUG] Started playback timer at " + std::to_string(m_frameRate) + " fps\n").c_str());
// Backup approach: Create a manual timer that restarts itself
OutputDebugStringA("[DEBUG] Setting up backup manual timer approach\n");
auto manualTimer = winrt::Microsoft::UI::Xaml::DispatcherTimer();
manualTimer.Interval(interval);
// Manual timer approach with restart
manualTimer.Tick([this, manualTimer](auto&&, auto&&) mutable {
if (m_isPlaying && m_isLoaded) {
ProcessSingleFrame();
// Schedule next frame
manualTimer.Start();
}
});
// Start both timers
manualTimer.Start();
OutputDebugStringA("[DEBUG] Manual timer also started\n");
}
void VideoPlayerControl::Pause()
{
OutputDebugStringA("[DEBUG] Pause() called\n");
m_isPlaying = false;
if (m_playbackTimer)
{
m_playbackTimer.Stop();
}
UpdateStatus(L"Paused");
}
void VideoPlayerControl::Stop()
{
OutputDebugStringA("[DEBUG] Stop() called\n");
m_isPlaying = false;
if (m_playbackTimer)
{
m_playbackTimer.Stop();
}
// Reset position to beginning
m_currentFrame = 0;
m_currentTime = 0.0;
// Reset file reader to beginning for next play
if (m_fileReader && m_fileReader->IsFileOpen())
{
OutputDebugStringA("[DEBUG] Resetting file reader to beginning\n");
m_fileReader->Reset();
}
// Reset decoder state
if (m_decoder)
{
OutputDebugStringA("[DEBUG] Resetting decoder state\n");
m_decoder->Reset();
}
UpdateStatus(L"Stopped");
}
void VideoPlayerControl::ProcessSingleFrame()
{
2025-09-23 02:25:59 +09:00
// Enhanced safety checks to prevent crashes during transitions
if (!m_isLoaded || !m_fileReader || !m_decoder || !m_isInitialized || !m_isPlaying) {
OutputDebugStringA("[DEBUG] ProcessSingleFrame: Not ready or stopping - skipping frame\n");
return;
}
// Additional check for file reader state
if (!m_fileReader->IsFileOpen()) {
OutputDebugStringA("[DEBUG] ProcessSingleFrame: File not open - stopping playback\n");
m_isPlaying = false;
if (m_playbackTimer) m_playbackTimer.Stop();
return;
}
2025-09-22 22:01:53 +09:00
VideoPacket packet;
if (!m_fileReader->ReadNextPacket(packet))
{
2025-09-23 00:31:16 +09:00
// End of video - stop playback
2025-09-22 22:01:53 +09:00
m_isPlaying = false;
2025-09-23 00:31:16 +09:00
if (m_playbackTimer) m_playbackTimer.Stop();
2025-09-22 22:01:53 +09:00
UpdateStatus(L"Playback completed");
return;
}
VideoFrame frame;
2025-09-23 00:31:16 +09:00
if (!m_decoder->DecodeFrame(packet, frame)) return; // Skip failed frames
2025-09-22 22:01:53 +09:00
RenderFrameToScreen(frame);
m_currentFrame++;
m_currentTime = m_currentFrame / m_frameRate;
}
void VideoPlayerControl::ProcessSingleFrameLegacy()
{
// Legacy method - calls ProcessSingleFrame for compatibility
ProcessSingleFrame();
}
void VideoPlayerControl::RenderFrameToScreen(const VideoFrame& frame)
{
2025-09-23 00:31:16 +09:00
// GPU rendering attempt
if (m_useHardwareRendering && m_gpuRenderer && m_gpuRenderer->IsInitialized()) {
if (m_gpuRenderer->TryRenderFrame(frame)) {
2025-09-23 02:25:59 +09:00
// GPU rendering successful - AspectFit was already applied during initialization
2025-09-23 00:31:16 +09:00
return; // Success - done
}
}
// CPU rendering fallback (always works)
2025-09-22 22:01:53 +09:00
RenderFrameSoftware(frame);
}
void VideoPlayerControl::RenderFrameSoftware(const VideoFrame& frame)
{
OutputDebugStringA(("[DEBUG] RenderFrameSoftware() called - " + std::to_string(frame.width) + "x" + std::to_string(frame.height) + "\n").c_str());
OutputDebugStringA(("[DEBUG] y_plane: " + std::string(frame.y_plane ? "valid" : "null") + "\n").c_str());
if (!frame.y_plane || frame.width == 0 || frame.height == 0)
{
OutputDebugStringA("[DEBUG] Invalid frame data - returning\n");
return;
}
try {
// Create or reuse WriteableBitmap for the frame
if (!m_renderBitmap ||
static_cast<uint32_t>(m_renderBitmap.PixelWidth()) != frame.width ||
static_cast<uint32_t>(m_renderBitmap.PixelHeight()) != frame.height) {
OutputDebugStringA("[DEBUG] Creating new WriteableBitmap\n");
m_renderBitmap = winrt::Microsoft::UI::Xaml::Media::Imaging::WriteableBitmap(
frame.width, frame.height);
VideoImage().Source(m_renderBitmap);
2025-09-23 02:25:59 +09:00
// Update video dimensions and apply AspectFit when bitmap size changes
if (m_videoWidth != frame.width || m_videoHeight != frame.height) {
m_videoWidth = frame.width;
m_videoHeight = frame.height;
m_hasValidVideoSize = true;
UpdateVideoImageAspectFit(frame.width, frame.height);
}
// Ensure video is visible
VideoImage().Visibility(winrt::Microsoft::UI::Xaml::Visibility::Visible);
2025-09-22 22:01:53 +09:00
OutputDebugStringA(("Created WriteableBitmap: " + std::to_string(frame.width) + "x" + std::to_string(frame.height) + "\n").c_str());
}
// Convert YUV to BGRA and render to bitmap
auto buffer = m_renderBitmap.PixelBuffer();
uint32_t capacity = buffer.Capacity();
OutputDebugStringA(("[DEBUG] Buffer capacity: " + std::to_string(capacity) + "\n").c_str());
if (capacity >= frame.width * frame.height * 4) {
OutputDebugStringA("[DEBUG] Converting YUV to BGRA...\n");
// Simple approach: create BGRA data and copy to buffer
std::vector<uint8_t> bgra_data(frame.width * frame.height * 4);
ConvertYUVToBGRA(frame, bgra_data.data(), frame.width, frame.height);
OutputDebugStringA("[DEBUG] Copying to bitmap buffer...\n");
// Copy BGRA data directly to bitmap buffer
auto bufferByteAccess = buffer.as<::Windows::Storage::Streams::IBufferByteAccess>();
uint8_t* bufferData = nullptr;
winrt::check_hresult(bufferByteAccess->Buffer(&bufferData));
std::memcpy(bufferData, bgra_data.data(), frame.width * frame.height * 4);
buffer.Length(frame.width * frame.height * 4);
// Trigger UI update
m_renderBitmap.Invalidate();
OutputDebugStringA(("[DEBUG] Frame rendered successfully: " + std::to_string(frame.width) + "x" + std::to_string(frame.height) + "\n").c_str());
} else {
OutputDebugStringA("[DEBUG] Buffer capacity too small\n");
}
} catch (...) {
OutputDebugStringA("[DEBUG] Software rendering failed\n");
}
}
void VideoPlayerControl::ConvertYUVToBGRA(const VideoFrame& yuv_frame, uint8_t* bgra_buffer, uint32_t width, uint32_t height)
{
OutputDebugStringA("[DEBUG] ConvertYUVToBGRA() called\n");
// YUV420P to BGRA conversion using BT.709 color space
const uint8_t* y_plane = yuv_frame.y_plane.get();
const uint8_t* u_plane = yuv_frame.u_plane.get();
const uint8_t* v_plane = yuv_frame.v_plane.get();
if (!y_plane || !u_plane || !v_plane) {
OutputDebugStringA("[DEBUG] ConvertYUVToBGRA: Invalid plane data\n");
return;
}
const uint32_t y_stride = yuv_frame.y_stride;
const uint32_t u_stride = yuv_frame.u_stride;
const uint32_t v_stride = yuv_frame.v_stride;
OutputDebugStringA(("[DEBUG] YUV strides: Y=" + std::to_string(y_stride) + " U=" + std::to_string(u_stride) + " V=" + std::to_string(v_stride) + "\n").c_str());
for (uint32_t y = 0; y < height; y++) {
const uint8_t* y_row = y_plane + y * y_stride;
const uint8_t* u_row = u_plane + (y / 2) * u_stride;
const uint8_t* v_row = v_plane + (y / 2) * v_stride;
uint8_t* bgra_row = bgra_buffer + y * width * 4;
for (uint32_t x = 0; x < width; x++) {
const uint8_t Y = y_row[x];
const uint8_t U = u_row[x / 2];
const uint8_t V = v_row[x / 2];
// BT.709 YUV to RGB conversion
const int C = Y - 16;
const int D = U - 128;
const int E = V - 128;
int R = (298 * C + 409 * E + 128) >> 8;
int G = (298 * C - 100 * D - 208 * E + 128) >> 8;
int B = (298 * C + 516 * D + 128) >> 8;
// Clamp to [0, 255]
R = std::max(0, std::min(255, R));
G = std::max(0, std::min(255, G));
B = std::max(0, std::min(255, B));
// Store as BGRA
bgra_row[x * 4 + 0] = static_cast<uint8_t>(B); // Blue
bgra_row[x * 4 + 1] = static_cast<uint8_t>(G); // Green
bgra_row[x * 4 + 2] = static_cast<uint8_t>(R); // Red
bgra_row[x * 4 + 3] = 255; // Alpha
}
}
OutputDebugStringA("[DEBUG] YUV to BGRA conversion completed\n");
}
void VideoPlayerControl::UpdateStatus(winrt::hstring const& message)
{
m_status = message;
OutputDebugStringA(("[DEBUG] Status: " + winrt::to_string(message) + "\n").c_str());
}
void VideoPlayerControl::ShowControlsInternal()
{
// Show video controls - simplified implementation
}
void VideoPlayerControl::InitializeVideoRenderer()
{
OutputDebugStringA("[DEBUG] InitializeVideoRenderer() called\n");
// Initialize CPU rendering mode (Phase 1)
if (!m_useHardwareRendering)
{
// Ensure software rendering UI is visible
VideoSwapChainPanel().Visibility(winrt::Microsoft::UI::Xaml::Visibility::Collapsed);
VideoImage().Visibility(winrt::Microsoft::UI::Xaml::Visibility::Visible);
OutputDebugStringA("[DEBUG] Initialized CPU rendering mode\n");
}
else
{
2025-09-23 00:31:16 +09:00
// Hardware rendering setup
2025-09-22 22:01:53 +09:00
VideoSwapChainPanel().Visibility(winrt::Microsoft::UI::Xaml::Visibility::Visible);
VideoImage().Visibility(winrt::Microsoft::UI::Xaml::Visibility::Collapsed);
2025-09-23 00:31:16 +09:00
if (!m_gpuRenderer)
{
m_gpuRenderer = std::make_unique<SimpleGPURenderer>();
}
2025-09-23 02:25:59 +09:00
// Initialize GPU renderer with actual container size
auto container = VideoDisplayArea();
uint32_t containerWidth = static_cast<uint32_t>(container.ActualWidth());
uint32_t containerHeight = static_cast<uint32_t>(container.ActualHeight());
// Wait for container to be ready - don't use arbitrary fallback sizes
if (containerWidth == 0 || containerHeight == 0) {
OutputDebugStringA("[DEBUG] GPU renderer: Container size not ready, deferring initialization\n");
// Fallback to CPU rendering if container size unavailable
VideoSwapChainPanel().Visibility(winrt::Microsoft::UI::Xaml::Visibility::Collapsed);
VideoImage().Visibility(winrt::Microsoft::UI::Xaml::Visibility::Visible);
m_useHardwareRendering = false;
OutputDebugStringA("[DEBUG] Switched to CPU rendering due to container size unavailable\n");
return;
}
OutputDebugStringA(("[DEBUG] Initializing GPU renderer with size: " + std::to_string(containerWidth) + "x" + std::to_string(containerHeight) + "\n").c_str());
HRESULT hr = m_gpuRenderer->Initialize(VideoSwapChainPanel(), containerWidth, containerHeight);
2025-09-23 00:31:16 +09:00
if (SUCCEEDED(hr))
{
OutputDebugStringA("[DEBUG] GPU rendering initialized successfully\n");
}
else
{
OutputDebugStringA("[DEBUG] GPU rendering initialization failed, falling back to CPU\n");
m_useHardwareRendering = false;
m_gpuRenderer.reset();
VideoSwapChainPanel().Visibility(winrt::Microsoft::UI::Xaml::Visibility::Collapsed);
VideoImage().Visibility(winrt::Microsoft::UI::Xaml::Visibility::Visible);
}
2025-09-22 22:01:53 +09:00
}
}
void VideoPlayerControl::ResetVideoState()
{
OutputDebugStringA("[DEBUG] ResetVideoState() called\n");
m_currentFrame = 0;
m_currentTime = 0.0;
m_isLoaded = false;
m_isPlaying = false;
2025-09-23 02:25:59 +09:00
// Reset AspectFit state
m_hasValidVideoSize = false;
m_videoWidth = 0;
m_videoHeight = 0;
2025-09-22 22:01:53 +09:00
// Stop and reset playback timer
if (m_playbackTimer)
{
m_playbackTimer.Stop();
}
}
bool VideoPlayerControl::CreateDecoder()
{
OutputDebugStringA("[DEBUG] CreateDecoder() called\n");
m_decoder = VideoDecoderFactory::CreateDecoder(VideoCodecType::AV1, m_decoderType);
bool success = m_decoder != nullptr;
OutputDebugStringA(("[DEBUG] Decoder created: " + std::string(success ? "success" : "failed") + "\n").c_str());
return success;
}
bool VideoPlayerControl::InitializeDecoder()
{
OutputDebugStringA("[DEBUG] InitializeDecoder() called\n");
if (!m_decoder) {
OutputDebugStringA("[DEBUG] No decoder available\n");
return false;
}
auto metadata = m_fileReader->GetVideoMetadata();
bool success = m_decoder->Initialize(metadata);
OutputDebugStringA(("[DEBUG] Decoder initialized: " + std::string(success ? "success" : "failed") + "\n").c_str());
return success;
}
2025-09-23 02:25:59 +09:00
void VideoPlayerControl::ApplyAspectFitIfReady()
{
if (!m_hasValidVideoSize || !m_isLoaded) {
OutputDebugStringA("[DEBUG] ApplyAspectFitIfReady: Not ready, skipping\n");
return;
}
auto container = VideoDisplayArea();
if (!container) return;
double containerWidth = container.ActualWidth();
double containerHeight = container.ActualHeight();
OutputDebugStringA(("[DEBUG] ApplyAspectFitIfReady: Container size: " + std::to_string(containerWidth) + "x" + std::to_string(containerHeight) + "\n").c_str());
if (containerWidth <= 0 || containerHeight <= 0) {
OutputDebugStringA("[DEBUG] ApplyAspectFitIfReady: Container size invalid, skipping (will retry on SizeChanged)\n");
return;
}
UpdateVideoImageAspectFit(m_videoWidth, m_videoHeight);
}
2025-09-22 22:01:53 +09:00
void VideoPlayerControl::UpdateVideoImageAspectFit(int videoWidth, int videoHeight)
{
OutputDebugStringA(("[DEBUG] UpdateVideoImageAspectFit: " + std::to_string(videoWidth) + "x" + std::to_string(videoHeight) + "\n").c_str());
2025-09-23 02:25:59 +09:00
// Store video dimensions for future use
m_videoWidth = static_cast<uint32_t>(videoWidth);
m_videoHeight = static_cast<uint32_t>(videoHeight);
m_hasValidVideoSize = true;
2025-09-22 22:01:53 +09:00
// AspectFit calculation for proper video scaling
auto container = VideoDisplayArea();
2025-09-23 02:25:59 +09:00
if (!container) {
OutputDebugStringA("[DEBUG] UpdateVideoImageAspectFit: No container\n");
return;
}
2025-09-22 22:01:53 +09:00
double containerWidth = container.ActualWidth();
double containerHeight = container.ActualHeight();
2025-09-23 02:25:59 +09:00
OutputDebugStringA(("[DEBUG] UpdateVideoImageAspectFit: Container size: " + std::to_string(containerWidth) + "x" + std::to_string(containerHeight) + "\n").c_str());
if (containerWidth <= 0 || containerHeight <= 0) {
OutputDebugStringA("[DEBUG] UpdateVideoImageAspectFit: Invalid container size, skipping\n");
return;
}
2025-09-22 22:01:53 +09:00
double videoAspectRatio = static_cast<double>(videoWidth) / videoHeight;
double containerAspectRatio = containerWidth / containerHeight;
double displayWidth, displayHeight;
if (videoAspectRatio > containerAspectRatio) {
// Video is wider - fit to container width
displayWidth = containerWidth;
displayHeight = containerWidth / videoAspectRatio;
} else {
// Video is taller - fit to container height
displayHeight = containerHeight;
displayWidth = containerHeight * videoAspectRatio;
}
2025-09-23 02:25:59 +09:00
OutputDebugStringA(("[DEBUG] UpdateVideoImageAspectFit: Calculated display size: " + std::to_string(displayWidth) + "x" + std::to_string(displayHeight) + "\n").c_str());
2025-09-23 00:31:16 +09:00
// Apply AspectFit to both CPU and GPU rendering controls
2025-09-22 22:01:53 +09:00
VideoImage().Width(displayWidth);
VideoImage().Height(displayHeight);
2025-09-23 02:25:59 +09:00
VideoImage().MaxWidth(displayWidth);
VideoImage().MaxHeight(displayHeight);
2025-09-23 00:31:16 +09:00
// Also apply to GPU rendering SwapChainPanel
VideoSwapChainPanel().Width(displayWidth);
VideoSwapChainPanel().Height(displayHeight);
2025-09-23 02:25:59 +09:00
OutputDebugStringA("[DEBUG] UpdateVideoImageAspectFit: AspectFit applied successfully\n");
2025-09-22 22:01:53 +09:00
}
void VideoPlayerControl::Seek(double timeSeconds)
{
OutputDebugStringA(("[DEBUG] Seek to: " + std::to_string(timeSeconds) + "s\n").c_str());
if (!m_isLoaded || !m_fileReader) return;
// Stop playback during seek
bool wasPlaying = m_isPlaying;
if (m_isPlaying) {
Pause();
}
// Seek to the specified time
if (m_fileReader->SeekToTime(timeSeconds)) {
m_currentTime = timeSeconds;
m_currentFrame = static_cast<uint64_t>(timeSeconds * m_frameRate);
// Process one frame to update display
ProcessSingleFrame();
// Resume playback if it was playing before seek
if (wasPlaying) {
Play();
}
UpdateStatus(L"Seeked");
} else {
OutputDebugStringA("[DEBUG] Seek operation failed\n");
}
2025-09-22 02:15:47 +09:00
}
bool VideoPlayerControl::IsVideoPlaying() { return m_isPlaying; }
bool VideoPlayerControl::IsVideoLoaded() { return m_isLoaded; }
double VideoPlayerControl::CurrentTime() { return m_currentTime; }
double VideoPlayerControl::Duration() { return m_duration; }
winrt::hstring VideoPlayerControl::Status() { return m_status; }
2025-09-22 22:01:53 +09:00
void VideoPlayerControl::StartControlsHideTimer() { /* Simplified implementation */ }
void VideoPlayerControl::StopControlsHideTimer() { /* Simplified implementation */ }
}