Split platform implementation

This commit is contained in:
2025-11-19 23:08:37 +09:00
parent d592a28224
commit eae1b0be1b
7 changed files with 1728 additions and 345 deletions

View File

@@ -61,7 +61,7 @@ endif()
# Common source files (cross-platform) - no PCH for Android
set(VAVCORE_COMMON_SOURCES
${VAVCORE_ROOT}/src/Decoder/VideoDecoderFactory.cpp
${VAVCORE_ROOT}/src/VavCore.cpp
${VAVCORE_ROOT}/src/VavCore_Android.cpp
)
# Android-specific source files

View File

@@ -60,7 +60,7 @@
<Link>
<SubSystem>Console</SubSystem>
<GenerateDebugInformation>true</GenerateDebugInformation>
<AdditionalLibraryDirectories>$(ProjectDir)..\..\vavcore\lib;$(ProjectDir)..\..\..\..\..\lib\libwebm;$(ProjectDir)..\..\..\..\..\lib\dav1d;$(ProjectDir)..\..\..\..\..\lib\amf;$(ProjectDir)..\..\..\..\..\lib\libvpl;$(ProjectDir)..\..\..\..\..\oss\nvidia-video-codec\Lib\x64;C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v13.0\lib\x64</AdditionalLibraryDirectories>
<AdditionalLibraryDirectories>$(ProjectDir)..\..\vavcore\lib;$(ProjectDir)..\..\..\..\..\lib\windows-x64\libwebm;$(ProjectDir)..\..\..\..\..\lib\windows-x64\dav1d;$(ProjectDir)..\..\..\..\..\lib\windows-x64\amf;$(ProjectDir)..\..\..\..\..\lib\windows-x64\libvpl;$(ProjectDir)..\..\..\..\..\oss\nvidia-video-codec\Lib\x64;C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v13.0\lib\x64</AdditionalLibraryDirectories>
<AdditionalDependencies>VavCore-debug.lib;webm-debug.lib;dav1d-debug.lib;amf-debug.lib;vpld.lib;nvcuvid.lib;cuda.lib;mfplat.lib;mf.lib;mfuuid.lib;d3d11.lib;d3d12.lib;dxgi.lib;kernel32.lib;user32.lib;gdi32.lib;winspool.lib;comdlg32.lib;advapi32.lib;shell32.lib;ole32.lib;oleaut32.lib;uuid.lib;odbc32.lib;odbccp32.lib;%(AdditionalDependencies)</AdditionalDependencies>
</Link>
<PostBuildEvent>
@@ -87,7 +87,7 @@ echo VavCore Debug DLL copy completed.</Command>
<EnableCOMDATFolding>true</EnableCOMDATFolding>
<OptimizeReferences>true</OptimizeReferences>
<GenerateDebugInformation>true</GenerateDebugInformation>
<AdditionalLibraryDirectories>$(ProjectDir)..\..\vavcore\lib;$(ProjectDir)..\..\..\..\..\lib\libwebm;$(ProjectDir)..\..\..\..\..\lib\dav1d;$(ProjectDir)..\..\..\..\..\lib\amf;$(ProjectDir)..\..\..\..\..\lib\libvpl;$(ProjectDir)..\..\..\..\..\oss\nvidia-video-codec\Lib\x64;C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v13.0\lib\x64</AdditionalLibraryDirectories>
<AdditionalLibraryDirectories>$(ProjectDir)..\..\vavcore\lib;$(ProjectDir)..\..\..\..\..\lib\windows-x64\libwebm;$(ProjectDir)..\..\..\..\..\lib\windows-x64\dav1d;$(ProjectDir)..\..\..\..\..\lib\windows-x64\amf;$(ProjectDir)..\..\..\..\..\lib\windows-x64\libvpl;$(ProjectDir)..\..\..\..\..\oss\nvidia-video-codec\Lib\x64;C:\Program Files\NVIDIA GPU Computing Toolkit\CUDA\v13.0\lib\x64</AdditionalLibraryDirectories>
<AdditionalDependencies>VavCore.lib;webm.lib;dav1d.lib;amf.lib;vpl.lib;nvcuvid.lib;cuda.lib;mfplat.lib;mf.lib;mfuuid.lib;d3d11.lib;d3d12.lib;dxgi.lib;kernel32.lib;user32.lib;gdi32.lib;winspool.lib;comdlg32.lib;advapi32.lib;shell32.lib;ole32.lib;oleaut32.lib;uuid.lib;odbc32.lib;odbccp32.lib;%(AdditionalDependencies)</AdditionalDependencies>
</Link>
<PostBuildEvent>

View File

@@ -142,7 +142,7 @@
<PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">Create</PrecompiledHeader>
<PrecompiledHeader Condition="'$(Configuration)|$(Platform)'=='Release|x64'">Create</PrecompiledHeader>
</ClCompile>
<ClCompile Include="src\VavCore.cpp" />
<ClCompile Include="src\VavCore_Windows.cpp" />
<ClCompile Include="src\DllMain.cpp" />
<ClCompile Include="src\Common\VavCoreLogger.cpp" />
<ClCompile Include="src\Common\ImageUtils.cpp" />

View File

@@ -388,18 +388,10 @@ bool MediaCodecSurfaceManager::CreateVulkanImage(void* vk_device, void* vk_insta
ycbcrConversionCreateInfo.format = vulkan_format;
// DIAGNOSTIC: Try BT.601 instead of BT.709 for NV21 devices
// Qualcomm devices may expect BT.601 color matrix
if (is_nv21_device) {
ycbcrConversionCreateInfo.ycbcrModel = VK_SAMPLER_YCBCR_MODEL_CONVERSION_YCBCR_601; // BT.601
ycbcrConversionCreateInfo.ycbcrRange = VK_SAMPLER_YCBCR_RANGE_ITU_NARROW; // Limited range
LogInfo(" Using BT.601 + ITU_NARROW for NV21 device");
} else {
// Use MediaCodec suggested settings
ycbcrConversionCreateInfo.ycbcrModel = static_cast<VkSamplerYcbcrModelConversion>(ahb_format_props.suggestedYcbcrModel);
ycbcrConversionCreateInfo.ycbcrRange = static_cast<VkSamplerYcbcrRange>(ahb_format_props.suggestedYcbcrRange);
LogInfo(" Using MediaCodec suggested YCbCr model and range");
}
// Use MediaCodec suggested YCbCr model and range for all devices
ycbcrConversionCreateInfo.ycbcrModel = static_cast<VkSamplerYcbcrModelConversion>(ahb_format_props.suggestedYcbcrModel);
ycbcrConversionCreateInfo.ycbcrRange = static_cast<VkSamplerYcbcrRange>(ahb_format_props.suggestedYcbcrRange);
LogInfo(" Using MediaCodec suggested YCbCr model and range");
// Log color space settings
LogInfo("YCbCr conversion:");
@@ -414,28 +406,25 @@ bool MediaCodecSurfaceManager::CreateVulkanImage(void* vk_device, void* vk_insta
LogInfo(" b: " + std::to_string(ahb_format_props.samplerYcbcrConversionComponents.b));
LogInfo(" a: " + std::to_string(ahb_format_props.samplerYcbcrConversionComponents.a));
// Use already-detected is_nv21_device from above
if (is_nv21_device || ahb_format_props.samplerYcbcrConversionComponents.r == 0) {
// NV21 device: Need to swap Cb/Cr which appear as G/B in conversion output
// In YCbCr conversion, Cb maps to B-Y and Cr maps to R-Y
// For NV21 (CrCb), we get the order reversed
ycbcrConversionCreateInfo.components.r = VK_COMPONENT_SWIZZLE_IDENTITY;
ycbcrConversionCreateInfo.components.g = VK_COMPONENT_SWIZZLE_IDENTITY;
ycbcrConversionCreateInfo.components.b = VK_COMPONENT_SWIZZLE_IDENTITY;
ycbcrConversionCreateInfo.components.a = VK_COMPONENT_SWIZZLE_IDENTITY;
LogInfo(" Using IDENTITY components (will use BT.601 instead of BT.709)");
// Qualcomm Adreno GPU requires component swizzle because it outputs NV21 (CrCb)
// but VkFormat expects NV12 (CbCr)
if (is_qualcomm_gpu) {
ycbcrConversionCreateInfo.components.r = VK_COMPONENT_SWIZZLE_B; // R (Cr) from B position
ycbcrConversionCreateInfo.components.g = VK_COMPONENT_SWIZZLE_G; // G (Y) unchanged
ycbcrConversionCreateInfo.components.b = VK_COMPONENT_SWIZZLE_R; // B (Cb) from R position
ycbcrConversionCreateInfo.components.a = VK_COMPONENT_SWIZZLE_A; // A unchanged
LogInfo(" Using SWAPPED components (R<-B, B<-R) for Adreno NV21");
} else {
// Use MediaCodec suggested component mapping
ycbcrConversionCreateInfo.components = ahb_format_props.samplerYcbcrConversionComponents;
LogInfo(" Using MediaCodec suggested component mapping");
LogInfo(" Using MediaCodec suggested components");
}
// DIAGNOSTIC: Override with MIDPOINT chroma offset (standard for JPEG/MPEG)
ycbcrConversionCreateInfo.xChromaOffset = VK_CHROMA_LOCATION_MIDPOINT;
ycbcrConversionCreateInfo.yChromaOffset = VK_CHROMA_LOCATION_MIDPOINT;
// Use MediaCodec suggested chroma offset (DON'T override!)
ycbcrConversionCreateInfo.xChromaOffset = static_cast<VkChromaLocation>(ahb_format_props.suggestedXChromaOffset);
ycbcrConversionCreateInfo.yChromaOffset = static_cast<VkChromaLocation>(ahb_format_props.suggestedYChromaOffset);
LogInfo(" MediaCodec suggested xChromaOffset: " + std::to_string(ahb_format_props.suggestedXChromaOffset));
LogInfo(" MediaCodec suggested yChromaOffset: " + std::to_string(ahb_format_props.suggestedYChromaOffset));
LogInfo(" Overriding with MIDPOINT (0) chroma offset");
LogInfo(" Using MediaCodec suggested chroma offset");
ycbcrConversionCreateInfo.chromaFilter = VK_FILTER_LINEAR;
ycbcrConversionCreateInfo.forceExplicitReconstruction = VK_FALSE;

View File

@@ -1,7 +1,10 @@
// VavCore_Android_Full.cpp - Complete Android implementation of VavCore C API
// All platform-specific code consolidated in this file
#include "pch.h"
#include "VavCore/VavCore.h"
#include "Common/VideoTypes.h" // Internal VavCore types
#include "Common/AdaptiveTypes.h" // Adaptive types
#include "Common/VideoTypes.h"
#include "Common/AdaptiveTypes.h"
#include "Decoder/IVideoDecoder.h"
#include "Decoder/VideoDecoderFactory.h"
#include "FileIO/WebMFileReader.h"
@@ -13,59 +16,42 @@
#include <cstring>
#ifdef ANDROID
#include <jni.h> // For JNI functions and types
#endif
#include <jni.h>
// Use VavCore namespace internally
using namespace VavCore;
// Forward declarations for DllMain-based initialization
extern "C" bool PerformSafeDllInitialization();
extern "C" bool IsDllReadyForInitialization();
// Forward declaration for Android JavaVM access
#ifdef ANDROID
namespace VavCore {
JavaVM* GetAndroidJavaVM();
}
#endif
// Global state
static bool g_initialized = false;
static bool g_jni_loaded = false;
static std::mutex g_mutex;
#ifdef ANDROID
static JavaVM* g_android_java_vm = nullptr; // Global JavaVM for Android JNI operations
static JavaVM* g_android_java_vm = nullptr;
// Android JNI initialization - equivalent to DllMain for lazy loading
JNIEXPORT jint JNICALL JNI_OnLoad(JavaVM* vm, void* reserved) {
std::lock_guard<std::mutex> lock(g_mutex);
g_android_java_vm = vm; // Store JavaVM for later use
g_android_java_vm = vm;
g_jni_loaded = true;
LOGF_INFO("[VavCore Android] JNI_OnLoad: JavaVM registered at %p", vm);
return JNI_VERSION_1_6;
}
JNIEXPORT void JNICALL JNI_OnUnload(JavaVM* vm, void* reserved) {
std::lock_guard<std::mutex> lock(g_mutex);
// Perform cleanup if initialized
if (g_initialized) {
// Note: We can't call vavcore_cleanup() here as it might not be safe
// The cleanup should be handled by the application calling vavcore_cleanup()
g_initialized = false;
}
LOGF_INFO("[VavCore Android] JNI_OnUnload: JavaVM=%p", vm);
g_jni_loaded = false;
}
// Internal function to get JavaVM for use by MediaCodec decoders
// Defined here so it's available before first use
namespace VavCore {
JavaVM* GetAndroidJavaVM() {
std::lock_guard<std::mutex> lock(g_mutex);
// DEBUG: Log every call to GetAndroidJavaVM()
LOGF_INFO("[GetAndroidJavaVM] Called - g_android_java_vm = %p", g_android_java_vm);
LOGF_INFO("[GetAndroidJavaVM] g_jni_loaded = %d", g_jni_loaded);
@@ -83,7 +69,6 @@ namespace VavCore {
static bool IsAndroidLibraryReady() {
return g_jni_loaded;
}
#endif
// Error message mapping
static const char* get_error_message(VavCoreResult result) {
@@ -104,7 +89,7 @@ class VavCorePlayerImpl;
// C-compatible player structure (pimpl pattern)
struct VavCorePlayer {
VavCorePlayerImpl* impl; // Opaque pointer to C++ implementation
VavCorePlayerImpl* impl;
};
// C++ implementation class (hidden from C API)
@@ -120,11 +105,7 @@ public:
double currentTimeSeconds;
std::string decoderName;
// Store D3D device before decoder creation
void* pendingD3DDevice;
VavCoreSurfaceType pendingD3DSurfaceType;
// Store Vulkan device before decoder creation
// Store Vulkan device before decoder creation (Android-specific)
void* vulkan_device;
void* vulkan_instance;
void* vulkan_physical_device;
@@ -132,7 +113,7 @@ public:
// Debug options
VavCoreDebugOptions debugOptions;
std::string debugOutputPath; // Owned copy of debug_output_path
std::string debugOutputPath;
VavCorePlayerImpl()
: qualityMode(VAVCORE_QUALITY_CONSERVATIVE)
@@ -141,8 +122,6 @@ public:
, currentFrame(0)
, currentTimeSeconds(0.0)
, decoderName("unknown")
, pendingD3DDevice(nullptr)
, pendingD3DSurfaceType(VAVCORE_SURFACE_CPU)
, vulkan_device(nullptr)
, vulkan_instance(nullptr)
, vulkan_physical_device(nullptr)
@@ -151,7 +130,6 @@ public:
{
fileReader = std::make_unique<WebMFileReader>();
// Initialize debug options with defaults
debugOptions.enable_first_frame_debug = false;
debugOptions.first_frame_debug_count = 3;
debugOptions.enable_rgba_debug = false;
@@ -164,7 +142,6 @@ public:
}
void close_internal() {
// Clean up decoder and file reader
if (decoder) {
decoder->Cleanup();
decoder.reset();
@@ -176,7 +153,6 @@ public:
currentFrame = 0;
currentTimeSeconds = 0.0;
}
};
// Convert internal quality mode to adaptive quality mode
@@ -194,10 +170,7 @@ static VavCore::VideoDecoderFactory::DecoderType to_decoder_type(VavCoreDecoderT
switch (type) {
case VAVCORE_DECODER_AUTO: return VavCore::VideoDecoderFactory::DecoderType::AUTO;
case VAVCORE_DECODER_DAV1D: return VavCore::VideoDecoderFactory::DecoderType::DAV1D;
case VAVCORE_DECODER_NVDEC: return VavCore::VideoDecoderFactory::DecoderType::NVDEC;
case VAVCORE_DECODER_MEDIA_FOUNDATION: return VavCore::VideoDecoderFactory::DecoderType::MEDIA_FOUNDATION;
case VAVCORE_DECODER_VPL: return VavCore::VideoDecoderFactory::DecoderType::VPL;
case VAVCORE_DECODER_AMF: return VavCore::VideoDecoderFactory::DecoderType::AMF;
case VAVCORE_DECODER_MEDIACODEC: return VavCore::VideoDecoderFactory::DecoderType::MEDIACODEC;
default: return VavCore::VideoDecoderFactory::DecoderType::AUTO;
}
}
@@ -208,25 +181,20 @@ static void copy_frame_data(const VideoFrame& src, VavCoreVideoFrame* dst) {
dst->width = src.width;
dst->height = src.height;
dst->timestamp_us = static_cast<uint64_t>(src.timestamp_seconds * 1000000.0); // Convert seconds to microseconds
dst->timestamp_us = static_cast<uint64_t>(src.timestamp_seconds * 1000000.0);
dst->frame_number = src.frame_index;
// Set default surface type to CPU
dst->surface_type = VAVCORE_SURFACE_CPU;
// Use actual plane sizes from source frame
size_t y_size = src.y_size;
size_t u_size = src.u_size;
size_t v_size = src.v_size;
// Allocate memory for frame data
dst->y_plane = static_cast<uint8_t*>(malloc(y_size));
dst->u_plane = static_cast<uint8_t*>(malloc(u_size));
dst->v_plane = static_cast<uint8_t*>(malloc(v_size));
if (dst->y_plane && dst->u_plane && dst->v_plane &&
src.y_plane && src.u_plane && src.v_plane) {
// Copy frame data from individual planes
memcpy(dst->y_plane, src.y_plane.get(), y_size);
memcpy(dst->u_plane, src.u_plane.get(), u_size);
memcpy(dst->v_plane, src.v_plane.get(), v_size);
@@ -236,7 +204,6 @@ static void copy_frame_data(const VideoFrame& src, VavCoreVideoFrame* dst) {
dst->u_stride = src.u_stride;
dst->v_stride = src.v_stride;
// Initialize CPU surface data for backward compatibility
dst->surface_data.cpu.planes[0] = dst->y_plane;
dst->surface_data.cpu.planes[1] = dst->u_plane;
dst->surface_data.cpu.planes[2] = dst->v_plane;
@@ -245,7 +212,10 @@ static void copy_frame_data(const VideoFrame& src, VavCoreVideoFrame* dst) {
dst->surface_data.cpu.strides[2] = dst->v_stride;
}
// API Implementation
// ============================================================================
// C API Implementation - Android Platform
// ============================================================================
extern "C" {
VAVCORE_API VavCoreResult vavcore_initialize(void) {
@@ -255,30 +225,16 @@ VAVCORE_API VavCoreResult vavcore_initialize(void) {
return VAVCORE_SUCCESS;
}
#ifndef ANDROID
// Check if DLL is ready for safe initialization
if (!IsDllReadyForInitialization()) {
return VAVCORE_ERROR_INIT_FAILED;
}
// Perform safe DLL-level initialization
if (!PerformSafeDllInitialization()) {
return VAVCORE_ERROR_INIT_FAILED;
}
#else
// Android: Check if JNI library is ready
// Android-specific: Check if JNI library is ready
if (!IsAndroidLibraryReady()) {
LOGF_ERROR("[VavCore Android] JNI not loaded, cannot initialize");
return VAVCORE_ERROR_INIT_FAILED;
}
#endif
// Initialize decoder factory
// Note: InitializeFactory() handles platform-specific decoder registration internally
// - Android: Explicitly calls RegisterMediaCodecDecoders() due to JNI initialization order
// - Windows: Uses static initialization for decoder registration
// Initialize decoder factory (Android explicitly calls RegisterMediaCodecDecoders)
VideoDecoderFactory::InitializeFactory();
g_initialized = true;
LOGF_INFO("[VavCore Android] Initialization complete (JavaVM=%p)", g_android_java_vm);
return VAVCORE_SUCCESS;
}
@@ -286,8 +242,8 @@ VAVCORE_API void vavcore_cleanup(void) {
std::lock_guard<std::mutex> lock(g_mutex);
if (g_initialized) {
// Cleanup subsystems
g_initialized = false;
LOGF_INFO("[VavCore Android] Cleanup complete");
}
}
@@ -311,7 +267,6 @@ VAVCORE_API VavCorePlayer* vavcore_create_player(void) {
VavCorePlayer* player = new VavCorePlayer();
player->impl = new VavCorePlayerImpl();
// Verify fileReader was created successfully
if (!player->impl->fileReader) {
delete player->impl;
delete player;
@@ -338,16 +293,13 @@ VAVCORE_API VavCoreResult vavcore_open_file(VavCorePlayer* player, const char* f
return VAVCORE_ERROR_INVALID_PARAM;
}
// Verify fileReader exists before proceeding
if (!player->impl->fileReader) {
return VAVCORE_ERROR_INIT_FAILED;
}
try {
// Debug log
LOGF_DEBUG("[VavCore] Opening file: %s", filepath);
// Open file with WebM reader
if (!player->impl->fileReader->OpenFile(filepath)) {
LOGF_DEBUG("[VavCore] OpenFile() returned false");
return VAVCORE_ERROR_FILE_NOT_FOUND;
@@ -355,9 +307,7 @@ VAVCORE_API VavCoreResult vavcore_open_file(VavCorePlayer* player, const char* f
LOGF_DEBUG("[VavCore] OpenFile() succeeded");
// Get video tracks and select the first AV1 track
auto tracks = player->impl->fileReader->GetVideoTracks();
LOGF_DEBUG("[VavCore] Found %zu video tracks", tracks.size());
bool foundAV1 = false;
@@ -369,7 +319,6 @@ VAVCORE_API VavCoreResult vavcore_open_file(VavCorePlayer* player, const char* f
LOGF_DEBUG("[VavCore] AV1 track found! Selecting track...");
if (player->impl->fileReader->SelectVideoTrack(track.track_number)) {
LOGF_DEBUG("[VavCore] Track selected successfully");
// Get full metadata from WebMFileReader (includes codec_private_data)
player->impl->metadata = player->impl->fileReader->GetVideoMetadata();
foundAV1 = true;
break;
@@ -383,11 +332,10 @@ VAVCORE_API VavCoreResult vavcore_open_file(VavCorePlayer* player, const char* f
return VAVCORE_ERROR_NOT_SUPPORTED;
}
// Create appropriate decoder
LOGF_DEBUG("[VavCore] Creating decoder...");
auto decoderType = to_decoder_type(player->impl->decoderType);
LOGF_DEBUG("[VavCore] Decoder type requested: %d (0=AUTO, 1=NVDEC, 2=VPL, 3=AMF, 4=DAV1D, 5=MF, 6=MEDIACODEC)",
LOGF_DEBUG("[VavCore] Decoder type requested: %d (0=AUTO, 4=DAV1D, 6=MEDIACODEC)",
static_cast<int>(decoderType));
player->impl->decoder = VavCore::VideoDecoderFactory::CreateDecoder(VavCore::VideoCodecType::AV1, decoderType);
@@ -400,29 +348,13 @@ VAVCORE_API VavCoreResult vavcore_open_file(VavCorePlayer* player, const char* f
LOGF_DEBUG("[VavCore] Decoder created successfully.");
// Apply pending D3D device if it was set before decoder creation
if (player->impl->pendingD3DDevice) {
LOGF_DEBUG("[VavCore] Applying pending D3D device before decoder initialization...");
LOGF_DEBUG("[VavCore] Pending D3D device: %p, Type: %d",
player->impl->pendingD3DDevice, static_cast<int>(player->impl->pendingD3DSurfaceType));
player->impl->decoder->SetD3DDevice(player->impl->pendingD3DDevice, player->impl->pendingD3DSurfaceType);
// Clear pending device after applying
player->impl->pendingD3DDevice = nullptr;
player->impl->pendingD3DSurfaceType = VAVCORE_SURFACE_CPU;
}
#ifdef ANDROID
// CRITICAL: Apply Vulkan device BEFORE decoder initialization
// This allows MediaCodec to be created with ImageReader surface from the start
// Android-specific: Apply pending Vulkan device BEFORE decoder initialization
if (player->impl->has_vulkan_device) {
LOGF_DEBUG("[VavCore] Applying pending Vulkan device BEFORE decoder initialization...");
LOGF_DEBUG("[VavCore] Vulkan device: %p, instance: %p, physical device: %p",
player->impl->vulkan_device, player->impl->vulkan_instance, player->impl->vulkan_physical_device);
// Pre-check: Vulkan device requires JavaVM for ImageReader initialization
// If JavaVM is not available, decoder initialization will 100% fail
JavaVM* javaVM = VavCore::GetAndroidJavaVM();
if (!javaVM) {
LOGF_ERROR("[VavCore] CRITICAL: Vulkan device set but JavaVM unavailable!");
@@ -449,11 +381,9 @@ VAVCORE_API VavCoreResult vavcore_open_file(VavCorePlayer* player, const char* f
return VAVCORE_ERROR_INIT_FAILED;
}
}
#endif
LOGF_DEBUG("[VavCore] Initializing decoder...");
// Initialize decoder (now with Vulkan device already set!)
if (!player->impl->decoder->Initialize(player->impl->metadata)) {
LOGF_ERROR("[VavCore] Decoder initialization failed (unsupported format or hardware unavailable)");
player->impl->decoder.reset();
@@ -463,22 +393,11 @@ VAVCORE_API VavCoreResult vavcore_open_file(VavCorePlayer* player, const char* f
LOGF_DEBUG("[VavCore] Decoder initialized successfully!");
// Apply debug options to newly created decoder
player->impl->decoder->SetDebugOptions(&player->impl->debugOptions);
LOGF_DEBUG("[VavCore] Debug options applied to decoder");
// Store the actual decoder name for later retrieval
player->impl->decoderName = player->impl->decoder->GetCodecName();
// Set adaptive quality mode if supported
// TODO: Implement adaptive quality support in VavCore v1.1
// Currently disabled as adaptive decoders don't implement IAdaptiveVideoDecoder interface yet
// auto adaptiveDecoder = dynamic_cast<VavCore::IAdaptiveVideoDecoder*>(player->impl->decoder.get());
// if (adaptiveDecoder) {
// adaptiveDecoder->SetQualityMode(to_adaptive_quality_mode(player->impl->qualityMode));
// }
// Final verification - both fileReader and decoder should be ready
if (!player->impl->fileReader || !player->impl->decoder) {
if (player->impl->fileReader) {
player->impl->fileReader->CloseFile();
@@ -520,19 +439,16 @@ VAVCORE_API VavCoreResult vavcore_decode_next_frame(VavCorePlayer* player, VavCo
}
try {
// Read next packet
VideoPacket packet;
if (!player->impl->fileReader->ReadNextPacket(packet)) {
return VAVCORE_END_OF_STREAM; // End of file
return VAVCORE_END_OF_STREAM;
}
// Decode frame
VideoFrame videoFrame;
if (!player->impl->decoder->DecodeFrame(packet, videoFrame)) {
return VAVCORE_ERROR_DECODE_FAILED;
}
// Copy frame data to C structure
copy_frame_data(videoFrame, frame);
player->impl->currentFrame++;
@@ -577,7 +493,6 @@ VAVCORE_API VavCoreResult vavcore_seek_to_frame(VavCorePlayer* player, uint64_t
}
}
// Test function to verify linking
VAVCORE_API VavCoreResult vavcore_test_function(void) {
return VAVCORE_SUCCESS;
}
@@ -592,7 +507,6 @@ VAVCORE_API VavCoreResult vavcore_reset(VavCorePlayer* player) {
}
try {
// Reset decoder if available
if (player->impl->decoder) {
if (!player->impl->decoder->Reset()) {
// Continue anyway - not fatal
@@ -601,7 +515,6 @@ VAVCORE_API VavCoreResult vavcore_reset(VavCorePlayer* player) {
return VAVCORE_ERROR_INIT_FAILED;
}
// Reset file reader if available
if (player->impl->fileReader) {
if (!player->impl->fileReader->Reset()) {
// Continue anyway - not fatal
@@ -610,7 +523,6 @@ VAVCORE_API VavCoreResult vavcore_reset(VavCorePlayer* player) {
return VAVCORE_ERROR_INIT_FAILED;
}
// Reset state variables
player->impl->currentFrame = 0;
player->impl->currentTimeSeconds = 0.0;
@@ -633,7 +545,7 @@ VAVCORE_API VavCoreResult vavcore_get_metadata(VavCorePlayer* player, VavCoreVid
metadata->frame_rate = player->impl->metadata.frame_rate;
metadata->duration_seconds = player->impl->metadata.duration_seconds;
metadata->total_frames = player->impl->metadata.total_frames;
metadata->codec_name = "AV1"; // Static for now
metadata->codec_name = "AV1";
return VAVCORE_SUCCESS;
}
@@ -648,7 +560,7 @@ VAVCORE_API double vavcore_get_current_time(VavCorePlayer* player) {
VAVCORE_API int vavcore_is_end_of_file(VavCorePlayer* player) {
if (!player || !player->impl || !player->impl->isOpen || !player->impl->fileReader) {
return 1; // Consider as EOF if invalid
return 1;
}
return player->impl->fileReader->IsEndOfFile() ? 1 : 0;
}
@@ -669,10 +581,6 @@ VAVCORE_API VavCoreResult vavcore_set_quality_mode(VavCorePlayer* player, VavCor
if (player->impl->isOpen && player->impl->decoder) {
// TODO: Implement adaptive quality support in VavCore v1.1
// auto adaptiveDecoder = dynamic_cast<VavCore::IAdaptiveVideoDecoder*>(player->impl->decoder.get());
// if (adaptiveDecoder) {
// adaptiveDecoder->SetQualityMode(to_adaptive_quality_mode(mode));
// }
}
return VAVCORE_SUCCESS;
@@ -688,19 +596,8 @@ VAVCORE_API VavCoreResult vavcore_get_performance_metrics(VavCorePlayer* player,
}
// TODO: Implement adaptive performance metrics in VavCore v1.1
// auto adaptiveDecoder = dynamic_cast<VavCore::IAdaptiveVideoDecoder*>(player->decoder.get());
// if (adaptiveDecoder) {
// auto perfMetrics = adaptiveDecoder->GetPerformanceMetrics();
// metrics->average_decode_time_ms = perfMetrics.average_decode_time_ms;
// metrics->current_fps = perfMetrics.current_fps;
// metrics->frames_decoded = perfMetrics.frames_decoded;
// metrics->frames_dropped = perfMetrics.frames_dropped;
// metrics->current_quality_level = static_cast<int>(adaptiveDecoder->GetCurrentQualityLevel());
// } else {
// Default metrics for non-adaptive decoders
memset(metrics, 0, sizeof(VavCorePerformanceMetrics));
metrics->current_quality_level = 4; // ULTRA quality
// }
memset(metrics, 0, sizeof(VavCorePerformanceMetrics));
metrics->current_quality_level = 4;
return VAVCORE_SUCCESS;
}
@@ -720,12 +617,6 @@ VAVCORE_API VavCoreResult vavcore_enable_adaptive_quality(VavCorePlayer* player,
}
// TODO: Implement adaptive mode control in VavCore v1.1
// auto adaptiveDecoder = dynamic_cast<VavCore::IAdaptiveVideoDecoder*>(player->decoder.get());
// if (adaptiveDecoder) {
// adaptiveDecoder->EnableAdaptiveMode(enable != 0);
// return VAVCORE_SUCCESS;
// }
return VAVCORE_ERROR_NOT_SUPPORTED;
}
@@ -735,12 +626,6 @@ VAVCORE_API VavCoreResult vavcore_set_target_framerate(VavCorePlayer* player, do
}
// TODO: Implement adaptive framerate control in VavCore v1.1
// auto adaptiveDecoder = dynamic_cast<VavCore::IAdaptiveVideoDecoder*>(player->decoder.get());
// if (adaptiveDecoder) {
// adaptiveDecoder->SetTargetFrameRate(fps);
// return VAVCORE_SUCCESS;
// }
return VAVCORE_ERROR_NOT_SUPPORTED;
}
@@ -756,39 +641,83 @@ VAVCORE_API void vavcore_free_frame(VavCoreVideoFrame* frame) {
frame->v_plane = nullptr;
}
// D3D Surface decoding API functions
// Android-specific Vulkan Surface decoding API functions
VAVCORE_API int vavcore_supports_surface_type(VavCorePlayer* player, VavCoreSurfaceType type) {
if (!player || !player->impl || !player->impl->decoder) {
return 0; // false
return 0;
}
return player->impl->decoder->SupportsSurfaceType(type) ? 1 : 0;
}
VAVCORE_API VavCoreResult vavcore_set_d3d_device(VavCorePlayer* player, void* d3d_device, VavCoreSurfaceType type) {
if (!player || !player->impl || !d3d_device) {
VAVCORE_API VavCoreResult vavcore_set_vulkan_device(VavCorePlayer* player, void* vk_device, void* vk_instance, void* vk_physical_device) {
if (!player || !player->impl) {
return VAVCORE_ERROR_INVALID_PARAM;
}
// Always store for pending use (in case decoder is recreated)
player->impl->pendingD3DDevice = d3d_device;
player->impl->pendingD3DSurfaceType = type;
// If decoder exists, also apply immediately
if (player->impl->decoder) {
bool success = player->impl->decoder->SetD3DDevice(d3d_device, type);
if (success) {
LOGF_DEBUG("[vavcore_set_d3d_device] D3D device applied to existing decoder");
return VAVCORE_SUCCESS;
} else {
LOGF_ERROR("[vavcore_set_d3d_device] WARNING: Failed to apply D3D device to existing decoder (will retry on next decode)");
// Still return success - device is stored for later use
return VAVCORE_SUCCESS;
}
} else {
LOGF_DEBUG("[vavcore_set_d3d_device] Decoder not created yet, D3D device stored for later");
return VAVCORE_SUCCESS;
if (!vk_device || !vk_instance || !vk_physical_device) {
LOGF_ERROR("[vavcore_set_vulkan_device] Invalid Vulkan handles");
return VAVCORE_ERROR_INVALID_PARAM;
}
LOGF_INFO("[vavcore_set_vulkan_device] Registering Vulkan device with VavCore");
LOGF_DEBUG("[vavcore_set_vulkan_device] VkDevice: %p, VkInstance: %p, VkPhysicalDevice: %p",
vk_device, vk_instance, vk_physical_device);
// Store Vulkan device for later use (when decoder is created)
player->impl->vulkan_device = vk_device;
player->impl->vulkan_instance = vk_instance;
player->impl->vulkan_physical_device = vk_physical_device;
player->impl->has_vulkan_device = true;
LOGF_INFO("[vavcore_set_vulkan_device] Vulkan device registered successfully - will be passed to decoder during initialization");
return VAVCORE_SUCCESS;
}
VAVCORE_API VavCoreResult vavcore_set_current_frame_fence(VavCorePlayer* player, void* vk_fence) {
if (!player || !player->impl) {
return VAVCORE_ERROR_INVALID_PARAM;
}
if (!player->impl->decoder) {
LOGF_WARNING("[vavcore_set_current_frame_fence] Decoder not initialized yet");
return VAVCORE_ERROR_INIT_FAILED;
}
LOGF_DEBUG("[vavcore_set_current_frame_fence] Setting VkFence=%p for current frame", vk_fence);
bool success = player->impl->decoder->SetCurrentFrameFence(vk_fence);
if (success) {
LOGF_DEBUG("[vavcore_set_current_frame_fence] VkFence set successfully");
return VAVCORE_SUCCESS;
} else {
LOGF_ERROR("[vavcore_set_current_frame_fence] Failed to set VkFence (decoder may not support this operation)");
return VAVCORE_ERROR_NOT_SUPPORTED;
}
}
VAVCORE_API VavCoreResult vavcore_set_android_java_vm(void* java_vm) {
if (!java_vm) {
LOGF_ERROR("[vavcore_set_android_java_vm] Invalid JavaVM pointer");
return VAVCORE_ERROR_INVALID_PARAM;
}
std::lock_guard<std::mutex> lock(g_mutex);
g_android_java_vm = static_cast<JavaVM*>(java_vm);
LOGF_INFO("[vavcore_set_android_java_vm] JavaVM registered successfully: %p", java_vm);
return VAVCORE_SUCCESS;
}
VAVCORE_API VavCoreResult vavcore_set_android_surface(VavCorePlayer* player, void* native_window) {
if (!player || !player->impl) {
return VAVCORE_ERROR_INVALID_PARAM;
}
// TODO: Implement Android surface registration
LOGF_DEBUG("[vavcore_set_android_surface] Android surface registration requested (NOT YET IMPLEMENTED)");
return VAVCORE_SUCCESS;
}
VAVCORE_API void* vavcore_get_sync_fence(VavCorePlayer* player) {
@@ -810,7 +739,6 @@ VAVCORE_API VavCoreResult vavcore_decode_to_surface(VavCorePlayer* player,
return VAVCORE_ERROR_INIT_FAILED;
}
// Check if decoder supports the requested surface type
if (!player->impl->decoder->SupportsSurfaceType(target_type)) {
return VAVCORE_ERROR_NOT_SUPPORTED;
}
@@ -820,12 +748,9 @@ VAVCORE_API VavCoreResult vavcore_decode_to_surface(VavCorePlayer* player,
size_t packet_size = 0;
VideoPacket packet;
// Drain mode: target_surface==NULL means flush buffered frames
if (target_surface == nullptr) {
LOGF_DEBUG("[vavcore_decode_to_surface] Drain mode - flushing buffered frames");
// packet_data remains NULL to signal drain mode
} else {
// Normal mode: Read next packet from file
if (!player->impl->fileReader->ReadNextPacket(packet)) {
if (player->impl->fileReader->IsEndOfFile()) {
LOGF_DEBUG("[vavcore_decode_to_surface] End of file reached");
@@ -837,7 +762,6 @@ VAVCORE_API VavCoreResult vavcore_decode_to_surface(VavCorePlayer* player,
packet_size = packet.size;
}
// Decode to surface (handles both normal and drain modes)
VideoFrame videoFrame;
bool success = player->impl->decoder->DecodeToSurface(
packet_data, packet_size,
@@ -845,51 +769,28 @@ VAVCORE_API VavCoreResult vavcore_decode_to_surface(VavCorePlayer* player,
videoFrame
);
// Interpret result based on videoFrame content
if (!success) {
// Decoder returned false
if (videoFrame.width == 0 && videoFrame.height == 0) {
// No frame output yet - priming/buffering phase
LOGF_DEBUG("[vavcore_decode_to_surface] Packet accepted, no output yet (priming)");
return VAVCORE_PACKET_ACCEPTED;
} else {
// Actual decode error
LOGF_ERROR("[vavcore_decode_to_surface] Decode failed");
return VAVCORE_ERROR_DECODE_FAILED;
}
}
// Success - check if we actually got a frame
if (videoFrame.width == 0 || videoFrame.height == 0 || !videoFrame.is_valid) {
// Decoder returned true but no valid frame (should not happen, but handle it)
LOGF_WARNING("[vavcore_decode_to_surface] Decoder returned success but frame invalid");
return VAVCORE_PACKET_ACCEPTED;
}
// Convert to VavCoreVideoFrame with surface data
frame->width = videoFrame.width;
frame->height = videoFrame.height;
frame->timestamp_us = static_cast<uint64_t>(videoFrame.timestamp_seconds * 1000000.0);
frame->frame_number = videoFrame.frame_index;
frame->surface_type = target_type;
// Set surface-specific data
switch (target_type) {
case VAVCORE_SURFACE_D3D11_TEXTURE:
frame->surface_data.d3d11.d3d11_texture = target_surface;
break;
case VAVCORE_SURFACE_D3D12_RESOURCE:
frame->surface_data.d3d12.d3d12_resource = target_surface;
// CRITICAL FIX: Copy CUDA fence value for D3D12-CUDA synchronization
// This fence value is set by NVDECAV1Decoder after CUDA kernel completion
frame->surface_data.d3d12.fence_value = videoFrame.sync_fence_value;
break;
case VAVCORE_SURFACE_CUDA_DEVICE:
// CUDA device pointer will be set by decoder implementation
break;
case VAVCORE_SURFACE_AMF_SURFACE:
frame->surface_data.amf.amf_surface = target_surface;
break;
case VAVCORE_SURFACE_VULKAN_IMAGE:
// Android MediaCodec → ImageReader → VkImage pipeline
frame->surface_data.vulkan.vk_image = videoFrame.surface_data.vulkan.vk_image;
@@ -904,7 +805,6 @@ VAVCORE_API VavCoreResult vavcore_decode_to_surface(VavCorePlayer* player,
break;
case VAVCORE_SURFACE_CPU:
default:
// Fallback to CPU decoding
copy_frame_data(videoFrame, frame);
break;
}
@@ -936,19 +836,16 @@ VAVCORE_API VavCoreResult vavcore_set_debug_options(VavCorePlayer* player, const
return VAVCORE_ERROR_INVALID_PARAM;
}
// Copy debug options
player->impl->debugOptions.enable_first_frame_debug = options->enable_first_frame_debug;
player->impl->debugOptions.first_frame_debug_count = options->first_frame_debug_count;
player->impl->debugOptions.enable_rgba_debug = options->enable_rgba_debug;
player->impl->debugOptions.rgba_debug_count = options->rgba_debug_count;
// Copy debug output path if provided
if (options->debug_output_path) {
player->impl->debugOutputPath = options->debug_output_path;
player->impl->debugOptions.debug_output_path = player->impl->debugOutputPath.c_str();
}
// Pass debug options to decoder if it exists
if (player->impl->decoder) {
player->impl->decoder->SetDebugOptions(&player->impl->debugOptions);
}
@@ -965,7 +862,6 @@ VAVCORE_API VavCoreResult vavcore_get_debug_options(VavCorePlayer* player, VavCo
return VAVCORE_ERROR_INVALID_PARAM;
}
// Copy current debug options to output
*options = player->impl->debugOptions;
return VAVCORE_SUCCESS;
@@ -979,130 +875,25 @@ VAVCORE_API int vavcore_get_pending_decode_count(VavCorePlayer* player) {
return player->impl->decoder->GetPendingDecodeCount();
}
// Android GPU Surface API stubs (Phase 1-3 implementation)
// TODO: Implement Vulkan device registration for MediaCodec → Vulkan pipeline
VAVCORE_API VavCoreResult vavcore_set_vulkan_device(VavCorePlayer* player, void* vk_device, void* vk_instance, void* vk_physical_device) {
if (!player || !player->impl) {
return VAVCORE_ERROR_INVALID_PARAM;
}
if (!vk_device || !vk_instance || !vk_physical_device) {
LOGF_ERROR("[vavcore_set_vulkan_device] Invalid Vulkan handles");
return VAVCORE_ERROR_INVALID_PARAM;
}
LOGF_INFO("[vavcore_set_vulkan_device] Registering Vulkan device with VavCore");
LOGF_DEBUG("[vavcore_set_vulkan_device] VkDevice: %p, VkInstance: %p, VkPhysicalDevice: %p",
vk_device, vk_instance, vk_physical_device);
#ifdef ANDROID
// Store Vulkan device for later use (when decoder is created)
player->impl->vulkan_device = vk_device;
player->impl->vulkan_instance = vk_instance;
player->impl->vulkan_physical_device = vk_physical_device;
player->impl->has_vulkan_device = true;
LOGF_INFO("[vavcore_set_vulkan_device] Vulkan device registered successfully - will be passed to decoder during initialization");
// Note: Vulkan device will be passed to MediaCodec surface manager during decoder initialization
// in vavcore_open_file() after the decoder is created
return VAVCORE_SUCCESS;
#else
LOGF_WARNING("[vavcore_set_vulkan_device] Vulkan device registration not supported on this platform");
// Stub implementations for unsupported GPU APIs on Android
VAVCORE_API VavCoreResult vavcore_set_d3d_device(VavCorePlayer* player, void* d3d_device, VavCoreSurfaceType type) {
LOGF_WARNING("[vavcore_set_d3d_device] D3D device registration not supported on Android");
return VAVCORE_ERROR_NOT_SUPPORTED;
#endif
}
VAVCORE_API VavCoreResult vavcore_set_current_frame_fence(VavCorePlayer* player, void* vk_fence) {
if (!player || !player->impl) {
return VAVCORE_ERROR_INVALID_PARAM;
}
#ifdef ANDROID
// Check if decoder exists and is MediaCodec-based
if (!player->impl->decoder) {
LOGF_WARNING("[vavcore_set_current_frame_fence] Decoder not initialized yet");
return VAVCORE_ERROR_INIT_FAILED;
}
// Pass fence to decoder's surface manager
// This allows GPU-synchronized Image release in the next frame
LOGF_DEBUG("[vavcore_set_current_frame_fence] Setting VkFence=%p for current frame", vk_fence);
// Get decoder's surface manager and set the fence
// The fence will be waited on before releasing the Image in the next ProcessAsyncOutputFrame call
bool success = player->impl->decoder->SetCurrentFrameFence(vk_fence);
if (success) {
LOGF_DEBUG("[vavcore_set_current_frame_fence] VkFence set successfully");
return VAVCORE_SUCCESS;
} else {
LOGF_ERROR("[vavcore_set_current_frame_fence] Failed to set VkFence (decoder may not support this operation)");
return VAVCORE_ERROR_NOT_SUPPORTED;
}
#else
LOGF_WARNING("[vavcore_set_current_frame_fence] VkFence setting not supported on this platform");
return VAVCORE_ERROR_NOT_SUPPORTED;
#endif
}
VAVCORE_API VavCoreResult vavcore_set_android_java_vm(void* java_vm) {
#ifdef ANDROID
if (!java_vm) {
LOGF_ERROR("[vavcore_set_android_java_vm] Invalid JavaVM pointer");
return VAVCORE_ERROR_INVALID_PARAM;
}
std::lock_guard<std::mutex> lock(g_mutex);
g_android_java_vm = static_cast<JavaVM*>(java_vm);
LOGF_INFO("[vavcore_set_android_java_vm] JavaVM registered successfully: %p", java_vm);
return VAVCORE_SUCCESS;
#else
LOGF_WARNING("[vavcore_set_android_java_vm] JavaVM registration not supported on this platform");
return VAVCORE_ERROR_NOT_SUPPORTED;
#endif
}
VAVCORE_API VavCoreResult vavcore_set_android_surface(VavCorePlayer* player, void* native_window) {
if (!player || !player->impl) {
return VAVCORE_ERROR_INVALID_PARAM;
}
// TODO: Implement Android surface registration
LOGF_DEBUG("[vavcore_set_android_surface] Android surface registration requested (NOT YET IMPLEMENTED)");
return VAVCORE_SUCCESS;
}
VAVCORE_API VavCoreResult vavcore_set_opengl_es_context(VavCorePlayer* player, void* egl_context) {
if (!player || !player->impl) {
return VAVCORE_ERROR_INVALID_PARAM;
}
// TODO: Implement OpenGL ES context registration
LOGF_DEBUG("[vavcore_set_opengl_es_context] OpenGL ES context registration requested (NOT YET IMPLEMENTED)");
return VAVCORE_SUCCESS;
}
VAVCORE_API VavCoreResult vavcore_set_opengl_context(VavCorePlayer* player, void* gl_context) {
if (!player || !player->impl) {
return VAVCORE_ERROR_INVALID_PARAM;
}
// TODO: Implement OpenGL context registration
LOGF_DEBUG("[vavcore_set_opengl_context] OpenGL context registration requested (NOT YET IMPLEMENTED)");
return VAVCORE_SUCCESS;
}
VAVCORE_API VavCoreResult vavcore_set_metal_device(VavCorePlayer* player, void* metal_device) {
if (!player || !player->impl) {
return VAVCORE_ERROR_INVALID_PARAM;
}
// TODO: Implement Metal device registration
LOGF_DEBUG("[vavcore_set_metal_device] Metal device registration requested (NOT YET IMPLEMENTED)");
return VAVCORE_SUCCESS;
LOGF_WARNING("[vavcore_set_metal_device] Metal device registration not supported on Android");
return VAVCORE_ERROR_NOT_SUPPORTED;
}
VAVCORE_API VavCoreResult vavcore_convert_yuv_to_rgb(
@@ -1114,9 +905,10 @@ VAVCORE_API VavCoreResult vavcore_convert_yuv_to_rgb(
return VAVCORE_ERROR_INVALID_PARAM;
}
// TODO: Implement YUV to RGB conversion
LOGF_DEBUG("[vavcore_convert_yuv_to_rgb] YUV→RGB conversion requested (NOT YET IMPLEMENTED)");
return VAVCORE_ERROR_NOT_SUPPORTED;
}
} // extern "C"
} // extern "C"
#endif // ANDROID

View File

@@ -0,0 +1,783 @@
// VavCore_Apple_Full.cpp - Complete iOS/macOS implementation of VavCore C API
// All platform-specific code consolidated in this file
#include "pch.h"
#include "VavCore/VavCore.h"
#include "Common/VideoTypes.h"
#include "Common/AdaptiveTypes.h"
#include "Decoder/IVideoDecoder.h"
#include "Decoder/VideoDecoderFactory.h"
#include "FileIO/WebMFileReader.h"
#include "Common/VavCoreLogger.h"
#include <memory>
#include <string>
#include <mutex>
#include <cstring>
#if defined(__APPLE__)
#include <TargetConditionals.h>
// Use VavCore namespace internally
using namespace VavCore;
// Global state
static bool g_initialized = false;
static std::mutex g_mutex;
// Error message mapping
static const char* get_error_message(VavCoreResult result) {
switch (result) {
case VAVCORE_SUCCESS: return "Success";
case VAVCORE_ERROR_INIT_FAILED: return "Initialization failed";
case VAVCORE_ERROR_INVALID_PARAM: return "Invalid parameter";
case VAVCORE_ERROR_FILE_NOT_FOUND: return "File not found";
case VAVCORE_ERROR_DECODE_FAILED: return "Decode failed";
case VAVCORE_ERROR_OUT_OF_MEMORY: return "Out of memory";
case VAVCORE_ERROR_NOT_SUPPORTED: return "Not supported";
default: return "Unknown error";
}
}
// Forward declaration of implementation class
class VavCorePlayerImpl;
// C-compatible player structure (pimpl pattern)
struct VavCorePlayer {
VavCorePlayerImpl* impl;
};
// C++ implementation class (hidden from C API)
class VavCorePlayerImpl {
public:
std::unique_ptr<IVideoDecoder> decoder;
std::unique_ptr<WebMFileReader> fileReader;
VideoMetadata metadata;
VavCoreQualityMode qualityMode;
VavCoreDecoderType decoderType;
bool isOpen;
uint64_t currentFrame;
double currentTimeSeconds;
std::string decoderName;
// Store Metal device before decoder creation (Apple-specific)
void* metal_device;
bool has_metal_device;
// Debug options
VavCoreDebugOptions debugOptions;
std::string debugOutputPath;
VavCorePlayerImpl()
: qualityMode(VAVCORE_QUALITY_CONSERVATIVE)
, decoderType(VAVCORE_DECODER_AUTO)
, isOpen(false)
, currentFrame(0)
, currentTimeSeconds(0.0)
, decoderName("unknown")
, metal_device(nullptr)
, has_metal_device(false)
, debugOutputPath("./debug_output")
{
fileReader = std::make_unique<WebMFileReader>();
debugOptions.enable_first_frame_debug = false;
debugOptions.first_frame_debug_count = 3;
debugOptions.enable_rgba_debug = false;
debugOptions.rgba_debug_count = 1;
debugOptions.debug_output_path = debugOutputPath.c_str();
}
~VavCorePlayerImpl() {
close_internal();
}
void close_internal() {
if (decoder) {
decoder->Cleanup();
decoder.reset();
}
if (fileReader) {
fileReader->CloseFile();
}
isOpen = false;
currentFrame = 0;
currentTimeSeconds = 0.0;
}
};
// Convert internal quality mode to adaptive quality mode
static VavCore::AdaptiveQualityMode to_adaptive_quality_mode(VavCoreQualityMode mode) {
switch (mode) {
case VAVCORE_QUALITY_CONSERVATIVE: return VavCore::AdaptiveQualityMode::CONSERVATIVE;
case VAVCORE_QUALITY_FAST: return VavCore::AdaptiveQualityMode::FAST;
case VAVCORE_QUALITY_ULTRA_FAST: return VavCore::AdaptiveQualityMode::ULTRA_FAST;
default: return VavCore::AdaptiveQualityMode::CONSERVATIVE;
}
}
// Convert internal decoder type to factory decoder type
static VavCore::VideoDecoderFactory::DecoderType to_decoder_type(VavCoreDecoderType type) {
switch (type) {
case VAVCORE_DECODER_AUTO: return VavCore::VideoDecoderFactory::DecoderType::AUTO;
case VAVCORE_DECODER_DAV1D: return VavCore::VideoDecoderFactory::DecoderType::DAV1D;
case VAVCORE_DECODER_VIDEO_TOOLBOX: return VavCore::VideoDecoderFactory::DecoderType::VIDEO_TOOLBOX;
default: return VavCore::VideoDecoderFactory::DecoderType::AUTO;
}
}
// Convert VideoFrame to VavCoreVideoFrame
static void copy_frame_data(const VideoFrame& src, VavCoreVideoFrame* dst) {
if (!dst) return;
dst->width = src.width;
dst->height = src.height;
dst->timestamp_us = static_cast<uint64_t>(src.timestamp_seconds * 1000000.0);
dst->frame_number = src.frame_index;
dst->surface_type = VAVCORE_SURFACE_CPU;
size_t y_size = src.y_size;
size_t u_size = src.u_size;
size_t v_size = src.v_size;
dst->y_plane = static_cast<uint8_t*>(malloc(y_size));
dst->u_plane = static_cast<uint8_t*>(malloc(u_size));
dst->v_plane = static_cast<uint8_t*>(malloc(v_size));
if (dst->y_plane && dst->u_plane && dst->v_plane &&
src.y_plane && src.u_plane && src.v_plane) {
memcpy(dst->y_plane, src.y_plane.get(), y_size);
memcpy(dst->u_plane, src.u_plane.get(), u_size);
memcpy(dst->v_plane, src.v_plane.get(), v_size);
}
dst->y_stride = src.y_stride;
dst->u_stride = src.u_stride;
dst->v_stride = src.v_stride;
dst->surface_data.cpu.planes[0] = dst->y_plane;
dst->surface_data.cpu.planes[1] = dst->u_plane;
dst->surface_data.cpu.planes[2] = dst->v_plane;
dst->surface_data.cpu.strides[0] = dst->y_stride;
dst->surface_data.cpu.strides[1] = dst->u_stride;
dst->surface_data.cpu.strides[2] = dst->v_stride;
}
// ============================================================================
// C API Implementation - Apple Platform (iOS/macOS)
// ============================================================================
extern "C" {
VAVCORE_API VavCoreResult vavcore_initialize(void) {
std::lock_guard<std::mutex> lock(g_mutex);
if (g_initialized) {
return VAVCORE_SUCCESS;
}
// Apple platforms don't need special initialization like DllMain or JNI
// Hardware decoders (VideoToolbox) are automatically available
VideoDecoderFactory::InitializeFactory();
g_initialized = true;
LOGF_INFO("[VavCore Apple] Initialization complete");
return VAVCORE_SUCCESS;
}
VAVCORE_API void vavcore_cleanup(void) {
std::lock_guard<std::mutex> lock(g_mutex);
if (g_initialized) {
g_initialized = false;
LOGF_INFO("[VavCore Apple] Cleanup complete");
}
}
VAVCORE_API const char* vavcore_get_version_string(void) {
static std::string version = std::to_string(VAVCORE_VERSION_MAJOR) + "." +
std::to_string(VAVCORE_VERSION_MINOR) + "." +
std::to_string(VAVCORE_VERSION_PATCH);
return version.c_str();
}
VAVCORE_API const char* vavcore_get_error_string(VavCoreResult result) {
return get_error_message(result);
}
VAVCORE_API VavCorePlayer* vavcore_create_player(void) {
if (!g_initialized) {
return nullptr;
}
try {
VavCorePlayer* player = new VavCorePlayer();
player->impl = new VavCorePlayerImpl();
if (!player->impl->fileReader) {
delete player->impl;
delete player;
return nullptr;
}
return player;
} catch (const std::exception& e) {
return nullptr;
} catch (...) {
return nullptr;
}
}
VAVCORE_API void vavcore_destroy_player(VavCorePlayer* player) {
if (player) {
delete player->impl;
delete player;
}
}
VAVCORE_API VavCoreResult vavcore_open_file(VavCorePlayer* player, const char* filepath) {
if (!player || !player->impl || !filepath) {
return VAVCORE_ERROR_INVALID_PARAM;
}
if (!player->impl->fileReader) {
return VAVCORE_ERROR_INIT_FAILED;
}
try {
LOGF_DEBUG("[VavCore] Opening file: %s", filepath);
if (!player->impl->fileReader->OpenFile(filepath)) {
LOGF_DEBUG("[VavCore] OpenFile() returned false");
return VAVCORE_ERROR_FILE_NOT_FOUND;
}
LOGF_DEBUG("[VavCore] OpenFile() succeeded");
auto tracks = player->impl->fileReader->GetVideoTracks();
LOGF_DEBUG("[VavCore] Found %zu video tracks", tracks.size());
bool foundAV1 = false;
for (const auto& track : tracks) {
LOGF_DEBUG("[VavCore] Track %lld: codec_type=%d (AV1=%d)",
track.track_number, (int)track.codec_type, (int)VideoCodecType::AV1);
if (track.codec_type == VideoCodecType::AV1) {
LOGF_DEBUG("[VavCore] AV1 track found! Selecting track...");
if (player->impl->fileReader->SelectVideoTrack(track.track_number)) {
LOGF_DEBUG("[VavCore] Track selected successfully");
player->impl->metadata = player->impl->fileReader->GetVideoMetadata();
foundAV1 = true;
break;
}
}
}
if (!foundAV1) {
LOGF_ERROR("[VavCore] No AV1 tracks found - returning VAVCORE_ERROR_NOT_SUPPORTED");
player->impl->fileReader->CloseFile();
return VAVCORE_ERROR_NOT_SUPPORTED;
}
LOGF_DEBUG("[VavCore] Creating decoder...");
auto decoderType = to_decoder_type(player->impl->decoderType);
LOGF_DEBUG("[VavCore] Decoder type requested: %d (0=AUTO, 4=DAV1D, 7=VIDEOTOOLBOX)",
static_cast<int>(decoderType));
player->impl->decoder = VavCore::VideoDecoderFactory::CreateDecoder(VavCore::VideoCodecType::AV1, decoderType);
if (!player->impl->decoder) {
LOGF_ERROR("[VavCore] No suitable decoder found (VideoDecoderFactory returned NULL)");
player->impl->fileReader->CloseFile();
return VAVCORE_ERROR_NO_DECODER;
}
LOGF_DEBUG("[VavCore] Decoder created successfully.");
// Apple-specific: Apply pending Metal device if it was set before decoder creation
if (player->impl->has_metal_device) {
LOGF_DEBUG("[VavCore] Applying pending Metal device before decoder initialization...");
LOGF_DEBUG("[VavCore] Metal device: %p", player->impl->metal_device);
// TODO: Implement SetMetalDevice in decoder interface
// bool metal_success = player->impl->decoder->SetMetalDevice(player->impl->metal_device);
}
LOGF_DEBUG("[VavCore] Initializing decoder...");
if (!player->impl->decoder->Initialize(player->impl->metadata)) {
LOGF_ERROR("[VavCore] Decoder initialization failed (unsupported format or hardware unavailable)");
player->impl->decoder.reset();
player->impl->fileReader->CloseFile();
return VAVCORE_ERROR_DECODER_UNAVAILABLE;
}
LOGF_DEBUG("[VavCore] Decoder initialized successfully!");
player->impl->decoder->SetDebugOptions(&player->impl->debugOptions);
LOGF_DEBUG("[VavCore] Debug options applied to decoder");
player->impl->decoderName = player->impl->decoder->GetCodecName();
if (!player->impl->fileReader || !player->impl->decoder) {
if (player->impl->fileReader) {
player->impl->fileReader->CloseFile();
}
if (player->impl->decoder) {
player->impl->decoder.reset();
}
return VAVCORE_ERROR_INIT_FAILED;
}
player->impl->isOpen = true;
player->impl->currentFrame = 0;
player->impl->currentTimeSeconds = 0.0;
return VAVCORE_SUCCESS;
} catch (const std::exception& e) {
return VAVCORE_ERROR_INIT_FAILED;
} catch (...) {
return VAVCORE_ERROR_INIT_FAILED;
}
}
VAVCORE_API VavCoreResult vavcore_close_file(VavCorePlayer* player) {
if (!player) {
return VAVCORE_ERROR_INVALID_PARAM;
}
player->impl->close_internal();
return VAVCORE_SUCCESS;
}
VAVCORE_API int vavcore_is_open(VavCorePlayer* player) {
return (player && player->impl && player->impl->isOpen) ? 1 : 0;
}
VAVCORE_API VavCoreResult vavcore_decode_next_frame(VavCorePlayer* player, VavCoreVideoFrame* frame) {
if (!player || !player->impl || !frame || !player->impl->isOpen) {
return VAVCORE_ERROR_INVALID_PARAM;
}
try {
VideoPacket packet;
if (!player->impl->fileReader->ReadNextPacket(packet)) {
return VAVCORE_END_OF_STREAM;
}
VideoFrame videoFrame;
if (!player->impl->decoder->DecodeFrame(packet, videoFrame)) {
return VAVCORE_ERROR_DECODE_FAILED;
}
copy_frame_data(videoFrame, frame);
player->impl->currentFrame++;
player->impl->currentTimeSeconds = packet.timestamp_seconds;
return VAVCORE_SUCCESS;
} catch (...) {
return VAVCORE_ERROR_DECODE_FAILED;
}
}
VAVCORE_API VavCoreResult vavcore_seek_to_time(VavCorePlayer* player, double time_seconds) {
if (!player || !player->impl || !player->impl->isOpen) {
return VAVCORE_ERROR_INVALID_PARAM;
}
try {
if (player->impl->fileReader->SeekToTime(time_seconds)) {
player->impl->currentTimeSeconds = time_seconds;
return VAVCORE_SUCCESS;
}
return VAVCORE_ERROR_DECODE_FAILED;
} catch (...) {
return VAVCORE_ERROR_DECODE_FAILED;
}
}
VAVCORE_API VavCoreResult vavcore_seek_to_frame(VavCorePlayer* player, uint64_t frame_number) {
if (!player || !player->impl || !player->impl->isOpen) {
return VAVCORE_ERROR_INVALID_PARAM;
}
try {
if (player->impl->fileReader->SeekToFrame(frame_number)) {
player->impl->currentFrame = frame_number;
player->impl->currentTimeSeconds = static_cast<double>(frame_number) / player->impl->metadata.frame_rate;
return VAVCORE_SUCCESS;
}
return VAVCORE_ERROR_DECODE_FAILED;
} catch (...) {
return VAVCORE_ERROR_DECODE_FAILED;
}
}
VAVCORE_API VavCoreResult vavcore_test_function(void) {
return VAVCORE_SUCCESS;
}
VAVCORE_API VavCoreResult vavcore_reset(VavCorePlayer* player) {
if (!player || !player->impl) {
return VAVCORE_ERROR_INVALID_PARAM;
}
if (!player->impl->isOpen) {
return VAVCORE_ERROR_INVALID_PARAM;
}
try {
if (player->impl->decoder) {
if (!player->impl->decoder->Reset()) {
// Continue anyway - not fatal
}
} else {
return VAVCORE_ERROR_INIT_FAILED;
}
if (player->impl->fileReader) {
if (!player->impl->fileReader->Reset()) {
// Continue anyway - not fatal
}
} else {
return VAVCORE_ERROR_INIT_FAILED;
}
player->impl->currentFrame = 0;
player->impl->currentTimeSeconds = 0.0;
return VAVCORE_SUCCESS;
} catch (const std::exception& e) {
return VAVCORE_ERROR_INIT_FAILED;
} catch (...) {
return VAVCORE_ERROR_INIT_FAILED;
}
}
VAVCORE_API VavCoreResult vavcore_get_metadata(VavCorePlayer* player, VavCoreVideoMetadata* metadata) {
if (!player || !player->impl || !metadata || !player->impl->isOpen) {
return VAVCORE_ERROR_INVALID_PARAM;
}
metadata->width = player->impl->metadata.width;
metadata->height = player->impl->metadata.height;
metadata->frame_rate = player->impl->metadata.frame_rate;
metadata->duration_seconds = player->impl->metadata.duration_seconds;
metadata->total_frames = player->impl->metadata.total_frames;
metadata->codec_name = "AV1";
return VAVCORE_SUCCESS;
}
VAVCORE_API uint64_t vavcore_get_current_frame(VavCorePlayer* player) {
return (player && player->impl) ? player->impl->currentFrame : 0;
}
VAVCORE_API double vavcore_get_current_time(VavCorePlayer* player) {
return (player && player->impl) ? player->impl->currentTimeSeconds : 0.0;
}
VAVCORE_API int vavcore_is_end_of_file(VavCorePlayer* player) {
if (!player || !player->impl || !player->impl->isOpen || !player->impl->fileReader) {
return 1;
}
return player->impl->fileReader->IsEndOfFile() ? 1 : 0;
}
VAVCORE_API const char* vavcore_get_codec_name(VavCorePlayer* player) {
if (!player || !player->impl) {
return "unknown";
}
return player->impl->decoderName.c_str();
}
VAVCORE_API VavCoreResult vavcore_set_quality_mode(VavCorePlayer* player, VavCoreQualityMode mode) {
if (!player || !player->impl) {
return VAVCORE_ERROR_INVALID_PARAM;
}
player->impl->qualityMode = mode;
if (player->impl->isOpen && player->impl->decoder) {
// TODO: Implement adaptive quality support in VavCore v1.1
}
return VAVCORE_SUCCESS;
}
VAVCORE_API VavCoreQualityMode vavcore_get_quality_mode(VavCorePlayer* player) {
return (player && player->impl) ? player->impl->qualityMode : VAVCORE_QUALITY_CONSERVATIVE;
}
VAVCORE_API VavCoreResult vavcore_get_performance_metrics(VavCorePlayer* player, VavCorePerformanceMetrics* metrics) {
if (!player || !player->impl || !metrics || !player->impl->isOpen) {
return VAVCORE_ERROR_INVALID_PARAM;
}
// TODO: Implement adaptive performance metrics in VavCore v1.1
memset(metrics, 0, sizeof(VavCorePerformanceMetrics));
metrics->current_quality_level = 4;
return VAVCORE_SUCCESS;
}
VAVCORE_API VavCoreResult vavcore_set_decoder_type(VavCorePlayer* player, VavCoreDecoderType type) {
if (!player || !player->impl) {
return VAVCORE_ERROR_INVALID_PARAM;
}
player->impl->decoderType = type;
return VAVCORE_SUCCESS;
}
VAVCORE_API VavCoreResult vavcore_enable_adaptive_quality(VavCorePlayer* player, int enable) {
if (!player || !player->impl || !player->impl->isOpen) {
return VAVCORE_ERROR_INVALID_PARAM;
}
// TODO: Implement adaptive mode control in VavCore v1.1
return VAVCORE_ERROR_NOT_SUPPORTED;
}
VAVCORE_API VavCoreResult vavcore_set_target_framerate(VavCorePlayer* player, double fps) {
if (!player || !player->impl || !player->impl->isOpen || fps <= 0.0) {
return VAVCORE_ERROR_INVALID_PARAM;
}
// TODO: Implement adaptive framerate control in VavCore v1.1
return VAVCORE_ERROR_NOT_SUPPORTED;
}
VAVCORE_API void vavcore_free_frame(VavCoreVideoFrame* frame) {
if (!frame) return;
free(frame->y_plane);
free(frame->u_plane);
free(frame->v_plane);
frame->y_plane = nullptr;
frame->u_plane = nullptr;
frame->v_plane = nullptr;
}
// Apple-specific Metal Surface decoding API functions
VAVCORE_API int vavcore_supports_surface_type(VavCorePlayer* player, VavCoreSurfaceType type) {
if (!player || !player->impl || !player->impl->decoder) {
return 0;
}
return player->impl->decoder->SupportsSurfaceType(type) ? 1 : 0;
}
VAVCORE_API VavCoreResult vavcore_set_metal_device(VavCorePlayer* player, void* metal_device) {
if (!player || !player->impl) {
return VAVCORE_ERROR_INVALID_PARAM;
}
player->impl->metal_device = metal_device;
player->impl->has_metal_device = true;
LOGF_DEBUG("[vavcore_set_metal_device] Metal device registration requested (NOT YET IMPLEMENTED)");
return VAVCORE_SUCCESS;
}
VAVCORE_API void* vavcore_get_sync_fence(VavCorePlayer* player) {
if (!player || !player->impl || !player->impl->decoder) {
return nullptr;
}
return player->impl->decoder->GetSyncFence();
}
VAVCORE_API VavCoreResult vavcore_decode_to_surface(VavCorePlayer* player,
VavCoreSurfaceType target_type,
void* target_surface,
VavCoreVideoFrame* frame) {
if (!player || !player->impl || !player->impl->decoder || !frame) {
return VAVCORE_ERROR_INVALID_PARAM;
}
if (!player->impl->isOpen) {
return VAVCORE_ERROR_INIT_FAILED;
}
if (!player->impl->decoder->SupportsSurfaceType(target_type)) {
return VAVCORE_ERROR_NOT_SUPPORTED;
}
try {
const uint8_t* packet_data = nullptr;
size_t packet_size = 0;
VideoPacket packet;
if (target_surface == nullptr) {
LOGF_DEBUG("[vavcore_decode_to_surface] Drain mode - flushing buffered frames");
} else {
if (!player->impl->fileReader->ReadNextPacket(packet)) {
if (player->impl->fileReader->IsEndOfFile()) {
LOGF_DEBUG("[vavcore_decode_to_surface] End of file reached");
return VAVCORE_END_OF_STREAM;
}
return VAVCORE_ERROR_DECODE_FAILED;
}
packet_data = packet.data.get();
packet_size = packet.size;
}
VideoFrame videoFrame;
bool success = player->impl->decoder->DecodeToSurface(
packet_data, packet_size,
target_type, target_surface,
videoFrame
);
if (!success) {
if (videoFrame.width == 0 && videoFrame.height == 0) {
LOGF_DEBUG("[vavcore_decode_to_surface] Packet accepted, no output yet (priming)");
return VAVCORE_PACKET_ACCEPTED;
} else {
LOGF_ERROR("[vavcore_decode_to_surface] Decode failed");
return VAVCORE_ERROR_DECODE_FAILED;
}
}
if (videoFrame.width == 0 || videoFrame.height == 0 || !videoFrame.is_valid) {
LOGF_WARNING("[vavcore_decode_to_surface] Decoder returned success but frame invalid");
return VAVCORE_PACKET_ACCEPTED;
}
frame->width = videoFrame.width;
frame->height = videoFrame.height;
frame->timestamp_us = static_cast<uint64_t>(videoFrame.timestamp_seconds * 1000000.0);
frame->frame_number = videoFrame.frame_index;
frame->surface_type = target_type;
switch (target_type) {
case VAVCORE_SURFACE_METAL_TEXTURE:
// TODO: Implement Metal texture support
break;
case VAVCORE_SURFACE_CPU:
default:
copy_frame_data(videoFrame, frame);
break;
}
player->impl->currentFrame++;
player->impl->currentTimeSeconds = videoFrame.timestamp_seconds;
return VAVCORE_SUCCESS;
}
catch (const std::exception&) {
return VAVCORE_ERROR_DECODE_FAILED;
}
}
VAVCORE_API VavCoreSurfaceType vavcore_get_optimal_surface_type(VavCorePlayer* player) {
if (!player || !player->impl || !player->impl->decoder) {
return VAVCORE_SURFACE_CPU;
}
return player->impl->decoder->GetOptimalSurfaceType();
}
VAVCORE_API VavCoreResult vavcore_set_debug_options(VavCorePlayer* player, const VavCoreDebugOptions* options) {
if (!player || !player->impl) {
return VAVCORE_ERROR_INVALID_PARAM;
}
if (!options) {
return VAVCORE_ERROR_INVALID_PARAM;
}
player->impl->debugOptions.enable_first_frame_debug = options->enable_first_frame_debug;
player->impl->debugOptions.first_frame_debug_count = options->first_frame_debug_count;
player->impl->debugOptions.enable_rgba_debug = options->enable_rgba_debug;
player->impl->debugOptions.rgba_debug_count = options->rgba_debug_count;
if (options->debug_output_path) {
player->impl->debugOutputPath = options->debug_output_path;
player->impl->debugOptions.debug_output_path = player->impl->debugOutputPath.c_str();
}
if (player->impl->decoder) {
player->impl->decoder->SetDebugOptions(&player->impl->debugOptions);
}
return VAVCORE_SUCCESS;
}
VAVCORE_API VavCoreResult vavcore_get_debug_options(VavCorePlayer* player, VavCoreDebugOptions* options) {
if (!player || !player->impl) {
return VAVCORE_ERROR_INVALID_PARAM;
}
if (!options) {
return VAVCORE_ERROR_INVALID_PARAM;
}
*options = player->impl->debugOptions;
return VAVCORE_SUCCESS;
}
VAVCORE_API int vavcore_get_pending_decode_count(VavCorePlayer* player) {
if (!player || !player->impl || !player->impl->decoder) {
return 0;
}
return player->impl->decoder->GetPendingDecodeCount();
}
// Stub implementations for unsupported GPU APIs on Apple platforms
VAVCORE_API VavCoreResult vavcore_set_d3d_device(VavCorePlayer* player, void* d3d_device, VavCoreSurfaceType type) {
LOGF_WARNING("[vavcore_set_d3d_device] D3D device registration not supported on Apple platforms");
return VAVCORE_ERROR_NOT_SUPPORTED;
}
VAVCORE_API VavCoreResult vavcore_set_vulkan_device(VavCorePlayer* player, void* vk_device, void* vk_instance, void* vk_physical_device) {
LOGF_WARNING("[vavcore_set_vulkan_device] Vulkan device registration not supported on Apple platforms");
return VAVCORE_ERROR_NOT_SUPPORTED;
}
VAVCORE_API VavCoreResult vavcore_set_current_frame_fence(VavCorePlayer* player, void* vk_fence) {
LOGF_WARNING("[vavcore_set_current_frame_fence] VkFence setting not supported on Apple platforms");
return VAVCORE_ERROR_NOT_SUPPORTED;
}
VAVCORE_API VavCoreResult vavcore_set_android_java_vm(void* java_vm) {
LOGF_WARNING("[vavcore_set_android_java_vm] JavaVM registration not supported on Apple platforms");
return VAVCORE_ERROR_NOT_SUPPORTED;
}
VAVCORE_API VavCoreResult vavcore_set_android_surface(VavCorePlayer* player, void* native_window) {
LOGF_WARNING("[vavcore_set_android_surface] Android surface registration not supported on Apple platforms");
return VAVCORE_ERROR_NOT_SUPPORTED;
}
VAVCORE_API VavCoreResult vavcore_set_opengl_es_context(VavCorePlayer* player, void* egl_context) {
LOGF_DEBUG("[vavcore_set_opengl_es_context] OpenGL ES context registration requested (NOT YET IMPLEMENTED)");
return VAVCORE_SUCCESS;
}
VAVCORE_API VavCoreResult vavcore_set_opengl_context(VavCorePlayer* player, void* gl_context) {
LOGF_DEBUG("[vavcore_set_opengl_context] OpenGL context registration requested (NOT YET IMPLEMENTED)");
return VAVCORE_SUCCESS;
}
VAVCORE_API VavCoreResult vavcore_convert_yuv_to_rgb(
VavCoreVideoFrame* yuv_frame,
uint8_t* rgb_buffer,
int rgb_stride)
{
if (!yuv_frame || !rgb_buffer) {
return VAVCORE_ERROR_INVALID_PARAM;
}
LOGF_DEBUG("[vavcore_convert_yuv_to_rgb] YUV→RGB conversion requested (NOT YET IMPLEMENTED)");
return VAVCORE_ERROR_NOT_SUPPORTED;
}
} // extern "C"
#endif // __APPLE__

View File

@@ -0,0 +1,819 @@
// VavCore_Windows_Full.cpp - Complete Windows implementation of VavCore C API
// All platform-specific code consolidated in this file
#include "pch.h"
#include "VavCore/VavCore.h"
#include "Common/VideoTypes.h"
#include "Common/AdaptiveTypes.h"
#include "Decoder/IVideoDecoder.h"
#include "Decoder/VideoDecoderFactory.h"
#include "FileIO/WebMFileReader.h"
#include "Common/VavCoreLogger.h"
#include <memory>
#include <string>
#include <mutex>
#include <cstring>
// Use VavCore namespace internally
using namespace VavCore;
// Forward declarations for DllMain-based initialization
extern "C" bool PerformSafeDllInitialization();
extern "C" bool IsDllReadyForInitialization();
// Global state
static bool g_initialized = false;
static std::mutex g_mutex;
// Error message mapping
static const char* get_error_message(VavCoreResult result) {
switch (result) {
case VAVCORE_SUCCESS: return "Success";
case VAVCORE_ERROR_INIT_FAILED: return "Initialization failed";
case VAVCORE_ERROR_INVALID_PARAM: return "Invalid parameter";
case VAVCORE_ERROR_FILE_NOT_FOUND: return "File not found";
case VAVCORE_ERROR_DECODE_FAILED: return "Decode failed";
case VAVCORE_ERROR_OUT_OF_MEMORY: return "Out of memory";
case VAVCORE_ERROR_NOT_SUPPORTED: return "Not supported";
default: return "Unknown error";
}
}
// Forward declaration of implementation class
class VavCorePlayerImpl;
// C-compatible player structure (pimpl pattern)
struct VavCorePlayer {
VavCorePlayerImpl* impl;
};
// C++ implementation class (hidden from C API)
class VavCorePlayerImpl {
public:
std::unique_ptr<IVideoDecoder> decoder;
std::unique_ptr<WebMFileReader> fileReader;
VideoMetadata metadata;
VavCoreQualityMode qualityMode;
VavCoreDecoderType decoderType;
bool isOpen;
uint64_t currentFrame;
double currentTimeSeconds;
std::string decoderName;
// Store D3D device before decoder creation
void* pendingD3DDevice;
VavCoreSurfaceType pendingD3DSurfaceType;
// Debug options
VavCoreDebugOptions debugOptions;
std::string debugOutputPath;
VavCorePlayerImpl()
: qualityMode(VAVCORE_QUALITY_CONSERVATIVE)
, decoderType(VAVCORE_DECODER_AUTO)
, isOpen(false)
, currentFrame(0)
, currentTimeSeconds(0.0)
, decoderName("unknown")
, pendingD3DDevice(nullptr)
, pendingD3DSurfaceType(VAVCORE_SURFACE_CPU)
, debugOutputPath("./debug_output")
{
fileReader = std::make_unique<WebMFileReader>();
debugOptions.enable_first_frame_debug = false;
debugOptions.first_frame_debug_count = 3;
debugOptions.enable_rgba_debug = false;
debugOptions.rgba_debug_count = 1;
debugOptions.debug_output_path = debugOutputPath.c_str();
}
~VavCorePlayerImpl() {
close_internal();
}
void close_internal() {
if (decoder) {
decoder->Cleanup();
decoder.reset();
}
if (fileReader) {
fileReader->CloseFile();
}
isOpen = false;
currentFrame = 0;
currentTimeSeconds = 0.0;
}
};
// Convert internal quality mode to adaptive quality mode
static VavCore::AdaptiveQualityMode to_adaptive_quality_mode(VavCoreQualityMode mode) {
switch (mode) {
case VAVCORE_QUALITY_CONSERVATIVE: return VavCore::AdaptiveQualityMode::CONSERVATIVE;
case VAVCORE_QUALITY_FAST: return VavCore::AdaptiveQualityMode::FAST;
case VAVCORE_QUALITY_ULTRA_FAST: return VavCore::AdaptiveQualityMode::ULTRA_FAST;
default: return VavCore::AdaptiveQualityMode::CONSERVATIVE;
}
}
// Convert internal decoder type to factory decoder type
static VavCore::VideoDecoderFactory::DecoderType to_decoder_type(VavCoreDecoderType type) {
switch (type) {
case VAVCORE_DECODER_AUTO: return VavCore::VideoDecoderFactory::DecoderType::AUTO;
case VAVCORE_DECODER_DAV1D: return VavCore::VideoDecoderFactory::DecoderType::DAV1D;
case VAVCORE_DECODER_NVDEC: return VavCore::VideoDecoderFactory::DecoderType::NVDEC;
case VAVCORE_DECODER_MEDIA_FOUNDATION: return VavCore::VideoDecoderFactory::DecoderType::MEDIA_FOUNDATION;
case VAVCORE_DECODER_VPL: return VavCore::VideoDecoderFactory::DecoderType::VPL;
case VAVCORE_DECODER_AMF: return VavCore::VideoDecoderFactory::DecoderType::AMF;
default: return VavCore::VideoDecoderFactory::DecoderType::AUTO;
}
}
// Convert VideoFrame to VavCoreVideoFrame
static void copy_frame_data(const VideoFrame& src, VavCoreVideoFrame* dst) {
if (!dst) return;
dst->width = src.width;
dst->height = src.height;
dst->timestamp_us = static_cast<uint64_t>(src.timestamp_seconds * 1000000.0);
dst->frame_number = src.frame_index;
dst->surface_type = VAVCORE_SURFACE_CPU;
size_t y_size = src.y_size;
size_t u_size = src.u_size;
size_t v_size = src.v_size;
dst->y_plane = static_cast<uint8_t*>(malloc(y_size));
dst->u_plane = static_cast<uint8_t*>(malloc(u_size));
dst->v_plane = static_cast<uint8_t*>(malloc(v_size));
if (dst->y_plane && dst->u_plane && dst->v_plane &&
src.y_plane && src.u_plane && src.v_plane) {
memcpy(dst->y_plane, src.y_plane.get(), y_size);
memcpy(dst->u_plane, src.u_plane.get(), u_size);
memcpy(dst->v_plane, src.v_plane.get(), v_size);
}
dst->y_stride = src.y_stride;
dst->u_stride = src.u_stride;
dst->v_stride = src.v_stride;
dst->surface_data.cpu.planes[0] = dst->y_plane;
dst->surface_data.cpu.planes[1] = dst->u_plane;
dst->surface_data.cpu.planes[2] = dst->v_plane;
dst->surface_data.cpu.strides[0] = dst->y_stride;
dst->surface_data.cpu.strides[1] = dst->u_stride;
dst->surface_data.cpu.strides[2] = dst->v_stride;
}
// ============================================================================
// C API Implementation - Windows Platform
// ============================================================================
extern "C" {
VAVCORE_API VavCoreResult vavcore_initialize(void) {
std::lock_guard<std::mutex> lock(g_mutex);
if (g_initialized) {
return VAVCORE_SUCCESS;
}
// Windows-specific: Check if DLL is ready for safe initialization
if (!IsDllReadyForInitialization()) {
LOGF_ERROR("[VavCore Windows] DLL not ready for initialization");
return VAVCORE_ERROR_INIT_FAILED;
}
// Windows-specific: Perform safe DLL-level initialization
if (!PerformSafeDllInitialization()) {
LOGF_ERROR("[VavCore Windows] DLL initialization failed");
return VAVCORE_ERROR_INIT_FAILED;
}
// Initialize decoder factory (Windows uses static initialization for decoder registration)
VideoDecoderFactory::InitializeFactory();
g_initialized = true;
LOGF_INFO("[VavCore Windows] Initialization complete");
return VAVCORE_SUCCESS;
}
VAVCORE_API void vavcore_cleanup(void) {
std::lock_guard<std::mutex> lock(g_mutex);
if (g_initialized) {
g_initialized = false;
LOGF_INFO("[VavCore Windows] Cleanup complete");
}
}
VAVCORE_API const char* vavcore_get_version_string(void) {
static std::string version = std::to_string(VAVCORE_VERSION_MAJOR) + "." +
std::to_string(VAVCORE_VERSION_MINOR) + "." +
std::to_string(VAVCORE_VERSION_PATCH);
return version.c_str();
}
VAVCORE_API const char* vavcore_get_error_string(VavCoreResult result) {
return get_error_message(result);
}
VAVCORE_API VavCorePlayer* vavcore_create_player(void) {
if (!g_initialized) {
return nullptr;
}
try {
VavCorePlayer* player = new VavCorePlayer();
player->impl = new VavCorePlayerImpl();
if (!player->impl->fileReader) {
delete player->impl;
delete player;
return nullptr;
}
return player;
} catch (const std::exception& e) {
return nullptr;
} catch (...) {
return nullptr;
}
}
VAVCORE_API void vavcore_destroy_player(VavCorePlayer* player) {
if (player) {
delete player->impl;
delete player;
}
}
VAVCORE_API VavCoreResult vavcore_open_file(VavCorePlayer* player, const char* filepath) {
if (!player || !player->impl || !filepath) {
return VAVCORE_ERROR_INVALID_PARAM;
}
if (!player->impl->fileReader) {
return VAVCORE_ERROR_INIT_FAILED;
}
try {
LOGF_DEBUG("[VavCore] Opening file: %s", filepath);
if (!player->impl->fileReader->OpenFile(filepath)) {
LOGF_DEBUG("[VavCore] OpenFile() returned false");
return VAVCORE_ERROR_FILE_NOT_FOUND;
}
LOGF_DEBUG("[VavCore] OpenFile() succeeded");
auto tracks = player->impl->fileReader->GetVideoTracks();
LOGF_DEBUG("[VavCore] Found %zu video tracks", tracks.size());
bool foundAV1 = false;
for (const auto& track : tracks) {
LOGF_DEBUG("[VavCore] Track %lld: codec_type=%d (AV1=%d)",
track.track_number, (int)track.codec_type, (int)VideoCodecType::AV1);
if (track.codec_type == VideoCodecType::AV1) {
LOGF_DEBUG("[VavCore] AV1 track found! Selecting track...");
if (player->impl->fileReader->SelectVideoTrack(track.track_number)) {
LOGF_DEBUG("[VavCore] Track selected successfully");
player->impl->metadata = player->impl->fileReader->GetVideoMetadata();
foundAV1 = true;
break;
}
}
}
if (!foundAV1) {
LOGF_ERROR("[VavCore] No AV1 tracks found - returning VAVCORE_ERROR_NOT_SUPPORTED");
player->impl->fileReader->CloseFile();
return VAVCORE_ERROR_NOT_SUPPORTED;
}
LOGF_DEBUG("[VavCore] Creating decoder...");
auto decoderType = to_decoder_type(player->impl->decoderType);
LOGF_DEBUG("[VavCore] Decoder type requested: %d (0=AUTO, 1=NVDEC, 2=VPL, 3=AMF, 4=DAV1D, 5=MF)",
static_cast<int>(decoderType));
player->impl->decoder = VavCore::VideoDecoderFactory::CreateDecoder(VavCore::VideoCodecType::AV1, decoderType);
if (!player->impl->decoder) {
LOGF_ERROR("[VavCore] No suitable decoder found (VideoDecoderFactory returned NULL)");
player->impl->fileReader->CloseFile();
return VAVCORE_ERROR_NO_DECODER;
}
LOGF_DEBUG("[VavCore] Decoder created successfully.");
// Windows-specific: Apply pending D3D device if it was set before decoder creation
if (player->impl->pendingD3DDevice) {
LOGF_DEBUG("[VavCore] Applying pending D3D device before decoder initialization...");
LOGF_DEBUG("[VavCore] Pending D3D device: %p, Type: %d",
player->impl->pendingD3DDevice, static_cast<int>(player->impl->pendingD3DSurfaceType));
player->impl->decoder->SetD3DDevice(player->impl->pendingD3DDevice, player->impl->pendingD3DSurfaceType);
player->impl->pendingD3DDevice = nullptr;
player->impl->pendingD3DSurfaceType = VAVCORE_SURFACE_CPU;
}
LOGF_DEBUG("[VavCore] Initializing decoder...");
if (!player->impl->decoder->Initialize(player->impl->metadata)) {
LOGF_ERROR("[VavCore] Decoder initialization failed (unsupported format or hardware unavailable)");
player->impl->decoder.reset();
player->impl->fileReader->CloseFile();
return VAVCORE_ERROR_DECODER_UNAVAILABLE;
}
LOGF_DEBUG("[VavCore] Decoder initialized successfully!");
player->impl->decoder->SetDebugOptions(&player->impl->debugOptions);
LOGF_DEBUG("[VavCore] Debug options applied to decoder");
player->impl->decoderName = player->impl->decoder->GetCodecName();
if (!player->impl->fileReader || !player->impl->decoder) {
if (player->impl->fileReader) {
player->impl->fileReader->CloseFile();
}
if (player->impl->decoder) {
player->impl->decoder.reset();
}
return VAVCORE_ERROR_INIT_FAILED;
}
player->impl->isOpen = true;
player->impl->currentFrame = 0;
player->impl->currentTimeSeconds = 0.0;
return VAVCORE_SUCCESS;
} catch (const std::exception& e) {
return VAVCORE_ERROR_INIT_FAILED;
} catch (...) {
return VAVCORE_ERROR_INIT_FAILED;
}
}
VAVCORE_API VavCoreResult vavcore_close_file(VavCorePlayer* player) {
if (!player) {
return VAVCORE_ERROR_INVALID_PARAM;
}
player->impl->close_internal();
return VAVCORE_SUCCESS;
}
VAVCORE_API int vavcore_is_open(VavCorePlayer* player) {
return (player && player->impl && player->impl->isOpen) ? 1 : 0;
}
VAVCORE_API VavCoreResult vavcore_decode_next_frame(VavCorePlayer* player, VavCoreVideoFrame* frame) {
if (!player || !player->impl || !frame || !player->impl->isOpen) {
return VAVCORE_ERROR_INVALID_PARAM;
}
try {
VideoPacket packet;
if (!player->impl->fileReader->ReadNextPacket(packet)) {
return VAVCORE_END_OF_STREAM;
}
VideoFrame videoFrame;
if (!player->impl->decoder->DecodeFrame(packet, videoFrame)) {
return VAVCORE_ERROR_DECODE_FAILED;
}
copy_frame_data(videoFrame, frame);
player->impl->currentFrame++;
player->impl->currentTimeSeconds = packet.timestamp_seconds;
return VAVCORE_SUCCESS;
} catch (...) {
return VAVCORE_ERROR_DECODE_FAILED;
}
}
VAVCORE_API VavCoreResult vavcore_seek_to_time(VavCorePlayer* player, double time_seconds) {
if (!player || !player->impl || !player->impl->isOpen) {
return VAVCORE_ERROR_INVALID_PARAM;
}
try {
if (player->impl->fileReader->SeekToTime(time_seconds)) {
player->impl->currentTimeSeconds = time_seconds;
return VAVCORE_SUCCESS;
}
return VAVCORE_ERROR_DECODE_FAILED;
} catch (...) {
return VAVCORE_ERROR_DECODE_FAILED;
}
}
VAVCORE_API VavCoreResult vavcore_seek_to_frame(VavCorePlayer* player, uint64_t frame_number) {
if (!player || !player->impl || !player->impl->isOpen) {
return VAVCORE_ERROR_INVALID_PARAM;
}
try {
if (player->impl->fileReader->SeekToFrame(frame_number)) {
player->impl->currentFrame = frame_number;
player->impl->currentTimeSeconds = static_cast<double>(frame_number) / player->impl->metadata.frame_rate;
return VAVCORE_SUCCESS;
}
return VAVCORE_ERROR_DECODE_FAILED;
} catch (...) {
return VAVCORE_ERROR_DECODE_FAILED;
}
}
VAVCORE_API VavCoreResult vavcore_test_function(void) {
return VAVCORE_SUCCESS;
}
VAVCORE_API VavCoreResult vavcore_reset(VavCorePlayer* player) {
if (!player || !player->impl) {
return VAVCORE_ERROR_INVALID_PARAM;
}
if (!player->impl->isOpen) {
return VAVCORE_ERROR_INVALID_PARAM;
}
try {
if (player->impl->decoder) {
if (!player->impl->decoder->Reset()) {
// Continue anyway - not fatal
}
} else {
return VAVCORE_ERROR_INIT_FAILED;
}
if (player->impl->fileReader) {
if (!player->impl->fileReader->Reset()) {
// Continue anyway - not fatal
}
} else {
return VAVCORE_ERROR_INIT_FAILED;
}
player->impl->currentFrame = 0;
player->impl->currentTimeSeconds = 0.0;
return VAVCORE_SUCCESS;
} catch (const std::exception& e) {
return VAVCORE_ERROR_INIT_FAILED;
} catch (...) {
return VAVCORE_ERROR_INIT_FAILED;
}
}
VAVCORE_API VavCoreResult vavcore_get_metadata(VavCorePlayer* player, VavCoreVideoMetadata* metadata) {
if (!player || !player->impl || !metadata || !player->impl->isOpen) {
return VAVCORE_ERROR_INVALID_PARAM;
}
metadata->width = player->impl->metadata.width;
metadata->height = player->impl->metadata.height;
metadata->frame_rate = player->impl->metadata.frame_rate;
metadata->duration_seconds = player->impl->metadata.duration_seconds;
metadata->total_frames = player->impl->metadata.total_frames;
metadata->codec_name = "AV1";
return VAVCORE_SUCCESS;
}
VAVCORE_API uint64_t vavcore_get_current_frame(VavCorePlayer* player) {
return (player && player->impl) ? player->impl->currentFrame : 0;
}
VAVCORE_API double vavcore_get_current_time(VavCorePlayer* player) {
return (player && player->impl) ? player->impl->currentTimeSeconds : 0.0;
}
VAVCORE_API int vavcore_is_end_of_file(VavCorePlayer* player) {
if (!player || !player->impl || !player->impl->isOpen || !player->impl->fileReader) {
return 1;
}
return player->impl->fileReader->IsEndOfFile() ? 1 : 0;
}
VAVCORE_API const char* vavcore_get_codec_name(VavCorePlayer* player) {
if (!player || !player->impl) {
return "unknown";
}
return player->impl->decoderName.c_str();
}
VAVCORE_API VavCoreResult vavcore_set_quality_mode(VavCorePlayer* player, VavCoreQualityMode mode) {
if (!player || !player->impl) {
return VAVCORE_ERROR_INVALID_PARAM;
}
player->impl->qualityMode = mode;
if (player->impl->isOpen && player->impl->decoder) {
// TODO: Implement adaptive quality support in VavCore v1.1
}
return VAVCORE_SUCCESS;
}
VAVCORE_API VavCoreQualityMode vavcore_get_quality_mode(VavCorePlayer* player) {
return (player && player->impl) ? player->impl->qualityMode : VAVCORE_QUALITY_CONSERVATIVE;
}
VAVCORE_API VavCoreResult vavcore_get_performance_metrics(VavCorePlayer* player, VavCorePerformanceMetrics* metrics) {
if (!player || !player->impl || !metrics || !player->impl->isOpen) {
return VAVCORE_ERROR_INVALID_PARAM;
}
// TODO: Implement adaptive performance metrics in VavCore v1.1
memset(metrics, 0, sizeof(VavCorePerformanceMetrics));
metrics->current_quality_level = 4;
return VAVCORE_SUCCESS;
}
VAVCORE_API VavCoreResult vavcore_set_decoder_type(VavCorePlayer* player, VavCoreDecoderType type) {
if (!player || !player->impl) {
return VAVCORE_ERROR_INVALID_PARAM;
}
player->impl->decoderType = type;
return VAVCORE_SUCCESS;
}
VAVCORE_API VavCoreResult vavcore_enable_adaptive_quality(VavCorePlayer* player, int enable) {
if (!player || !player->impl || !player->impl->isOpen) {
return VAVCORE_ERROR_INVALID_PARAM;
}
// TODO: Implement adaptive mode control in VavCore v1.1
return VAVCORE_ERROR_NOT_SUPPORTED;
}
VAVCORE_API VavCoreResult vavcore_set_target_framerate(VavCorePlayer* player, double fps) {
if (!player || !player->impl || !player->impl->isOpen || fps <= 0.0) {
return VAVCORE_ERROR_INVALID_PARAM;
}
// TODO: Implement adaptive framerate control in VavCore v1.1
return VAVCORE_ERROR_NOT_SUPPORTED;
}
VAVCORE_API void vavcore_free_frame(VavCoreVideoFrame* frame) {
if (!frame) return;
free(frame->y_plane);
free(frame->u_plane);
free(frame->v_plane);
frame->y_plane = nullptr;
frame->u_plane = nullptr;
frame->v_plane = nullptr;
}
// Windows-specific D3D Surface decoding API functions
VAVCORE_API int vavcore_supports_surface_type(VavCorePlayer* player, VavCoreSurfaceType type) {
if (!player || !player->impl || !player->impl->decoder) {
return 0;
}
return player->impl->decoder->SupportsSurfaceType(type) ? 1 : 0;
}
VAVCORE_API VavCoreResult vavcore_set_d3d_device(VavCorePlayer* player, void* d3d_device, VavCoreSurfaceType type) {
if (!player || !player->impl || !d3d_device) {
return VAVCORE_ERROR_INVALID_PARAM;
}
player->impl->pendingD3DDevice = d3d_device;
player->impl->pendingD3DSurfaceType = type;
if (player->impl->decoder) {
bool success = player->impl->decoder->SetD3DDevice(d3d_device, type);
if (success) {
LOGF_DEBUG("[vavcore_set_d3d_device] D3D device applied to existing decoder");
return VAVCORE_SUCCESS;
} else {
LOGF_ERROR("[vavcore_set_d3d_device] WARNING: Failed to apply D3D device to existing decoder (will retry on next decode)");
return VAVCORE_SUCCESS;
}
} else {
LOGF_DEBUG("[vavcore_set_d3d_device] Decoder not created yet, D3D device stored for later");
return VAVCORE_SUCCESS;
}
}
VAVCORE_API void* vavcore_get_sync_fence(VavCorePlayer* player) {
if (!player || !player->impl || !player->impl->decoder) {
return nullptr;
}
return player->impl->decoder->GetSyncFence();
}
VAVCORE_API VavCoreResult vavcore_decode_to_surface(VavCorePlayer* player,
VavCoreSurfaceType target_type,
void* target_surface,
VavCoreVideoFrame* frame) {
if (!player || !player->impl || !player->impl->decoder || !frame) {
return VAVCORE_ERROR_INVALID_PARAM;
}
if (!player->impl->isOpen) {
return VAVCORE_ERROR_INIT_FAILED;
}
if (!player->impl->decoder->SupportsSurfaceType(target_type)) {
return VAVCORE_ERROR_NOT_SUPPORTED;
}
try {
const uint8_t* packet_data = nullptr;
size_t packet_size = 0;
VideoPacket packet;
if (target_surface == nullptr) {
LOGF_DEBUG("[vavcore_decode_to_surface] Drain mode - flushing buffered frames");
} else {
if (!player->impl->fileReader->ReadNextPacket(packet)) {
if (player->impl->fileReader->IsEndOfFile()) {
LOGF_DEBUG("[vavcore_decode_to_surface] End of file reached");
return VAVCORE_END_OF_STREAM;
}
return VAVCORE_ERROR_DECODE_FAILED;
}
packet_data = packet.data.get();
packet_size = packet.size;
}
VideoFrame videoFrame;
bool success = player->impl->decoder->DecodeToSurface(
packet_data, packet_size,
target_type, target_surface,
videoFrame
);
if (!success) {
if (videoFrame.width == 0 && videoFrame.height == 0) {
LOGF_DEBUG("[vavcore_decode_to_surface] Packet accepted, no output yet (priming)");
return VAVCORE_PACKET_ACCEPTED;
} else {
LOGF_ERROR("[vavcore_decode_to_surface] Decode failed");
return VAVCORE_ERROR_DECODE_FAILED;
}
}
if (videoFrame.width == 0 || videoFrame.height == 0 || !videoFrame.is_valid) {
LOGF_WARNING("[vavcore_decode_to_surface] Decoder returned success but frame invalid");
return VAVCORE_PACKET_ACCEPTED;
}
frame->width = videoFrame.width;
frame->height = videoFrame.height;
frame->timestamp_us = static_cast<uint64_t>(videoFrame.timestamp_seconds * 1000000.0);
frame->frame_number = videoFrame.frame_index;
frame->surface_type = target_type;
switch (target_type) {
case VAVCORE_SURFACE_D3D11_TEXTURE:
frame->surface_data.d3d11.d3d11_texture = target_surface;
break;
case VAVCORE_SURFACE_D3D12_RESOURCE:
frame->surface_data.d3d12.d3d12_resource = target_surface;
frame->surface_data.d3d12.fence_value = videoFrame.sync_fence_value;
break;
case VAVCORE_SURFACE_CUDA_DEVICE:
break;
case VAVCORE_SURFACE_AMF_SURFACE:
frame->surface_data.amf.amf_surface = target_surface;
break;
case VAVCORE_SURFACE_CPU:
default:
copy_frame_data(videoFrame, frame);
break;
}
player->impl->currentFrame++;
player->impl->currentTimeSeconds = videoFrame.timestamp_seconds;
return VAVCORE_SUCCESS;
}
catch (const std::exception&) {
return VAVCORE_ERROR_DECODE_FAILED;
}
}
VAVCORE_API VavCoreSurfaceType vavcore_get_optimal_surface_type(VavCorePlayer* player) {
if (!player || !player->impl || !player->impl->decoder) {
return VAVCORE_SURFACE_CPU;
}
return player->impl->decoder->GetOptimalSurfaceType();
}
VAVCORE_API VavCoreResult vavcore_set_debug_options(VavCorePlayer* player, const VavCoreDebugOptions* options) {
if (!player || !player->impl) {
return VAVCORE_ERROR_INVALID_PARAM;
}
if (!options) {
return VAVCORE_ERROR_INVALID_PARAM;
}
player->impl->debugOptions.enable_first_frame_debug = options->enable_first_frame_debug;
player->impl->debugOptions.first_frame_debug_count = options->first_frame_debug_count;
player->impl->debugOptions.enable_rgba_debug = options->enable_rgba_debug;
player->impl->debugOptions.rgba_debug_count = options->rgba_debug_count;
if (options->debug_output_path) {
player->impl->debugOutputPath = options->debug_output_path;
player->impl->debugOptions.debug_output_path = player->impl->debugOutputPath.c_str();
}
if (player->impl->decoder) {
player->impl->decoder->SetDebugOptions(&player->impl->debugOptions);
}
return VAVCORE_SUCCESS;
}
VAVCORE_API VavCoreResult vavcore_get_debug_options(VavCorePlayer* player, VavCoreDebugOptions* options) {
if (!player || !player->impl) {
return VAVCORE_ERROR_INVALID_PARAM;
}
if (!options) {
return VAVCORE_ERROR_INVALID_PARAM;
}
*options = player->impl->debugOptions;
return VAVCORE_SUCCESS;
}
VAVCORE_API int vavcore_get_pending_decode_count(VavCorePlayer* player) {
if (!player || !player->impl || !player->impl->decoder) {
return 0;
}
return player->impl->decoder->GetPendingDecodeCount();
}
// Stub implementations for unsupported GPU APIs on Windows
VAVCORE_API VavCoreResult vavcore_set_vulkan_device(VavCorePlayer* player, void* vk_device, void* vk_instance, void* vk_physical_device) {
LOGF_WARNING("[vavcore_set_vulkan_device] Vulkan device registration not supported on Windows");
return VAVCORE_ERROR_NOT_SUPPORTED;
}
VAVCORE_API VavCoreResult vavcore_set_current_frame_fence(VavCorePlayer* player, void* vk_fence) {
LOGF_WARNING("[vavcore_set_current_frame_fence] VkFence setting not supported on Windows");
return VAVCORE_ERROR_NOT_SUPPORTED;
}
VAVCORE_API VavCoreResult vavcore_set_android_java_vm(void* java_vm) {
LOGF_WARNING("[vavcore_set_android_java_vm] JavaVM registration not supported on Windows");
return VAVCORE_ERROR_NOT_SUPPORTED;
}
VAVCORE_API VavCoreResult vavcore_set_android_surface(VavCorePlayer* player, void* native_window) {
LOGF_WARNING("[vavcore_set_android_surface] Android surface registration not supported on Windows");
return VAVCORE_ERROR_NOT_SUPPORTED;
}
VAVCORE_API VavCoreResult vavcore_set_opengl_es_context(VavCorePlayer* player, void* egl_context) {
LOGF_DEBUG("[vavcore_set_opengl_es_context] OpenGL ES context registration requested (NOT YET IMPLEMENTED)");
return VAVCORE_SUCCESS;
}
VAVCORE_API VavCoreResult vavcore_set_opengl_context(VavCorePlayer* player, void* gl_context) {
LOGF_DEBUG("[vavcore_set_opengl_context] OpenGL context registration requested (NOT YET IMPLEMENTED)");
return VAVCORE_SUCCESS;
}
VAVCORE_API VavCoreResult vavcore_set_metal_device(VavCorePlayer* player, void* metal_device) {
LOGF_WARNING("[vavcore_set_metal_device] Metal device registration not supported on Windows");
return VAVCORE_ERROR_NOT_SUPPORTED;
}
VAVCORE_API VavCoreResult vavcore_convert_yuv_to_rgb(
VavCoreVideoFrame* yuv_frame,
uint8_t* rgb_buffer,
int rgb_stride)
{
if (!yuv_frame || !rgb_buffer) {
return VAVCORE_ERROR_INVALID_PARAM;
}
LOGF_DEBUG("[vavcore_convert_yuv_to_rgb] YUV→RGB conversion requested (NOT YET IMPLEMENTED)");
return VAVCORE_ERROR_NOT_SUPPORTED;
}
} // extern "C"