Files
video-orchestra/android/jni/vp9_decoder.cpp

375 lines
11 KiB
C++

#include "vp9_decoder.h"
#include <android/log.h>
#include <media/NdkMediaError.h>
#include <cstring>
#define LOG_TAG "VP9Orchestra"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO, LOG_TAG, __VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)
// Global decoder instance
AndroidVP9Decoder* g_decoder = nullptr;
AndroidVP9Decoder::AndroidVP9Decoder()
: global_initialized(false), egl_display(EGL_NO_DISPLAY), egl_context(EGL_NO_CONTEXT) {
// Initialize all streams
for (int i = 0; i < MAX_VP9_STREAMS; i++) {
streams[i].codec = nullptr;
streams[i].surface = nullptr;
streams[i].texture_id = 0;
streams[i].initialized = false;
streams[i].width = 0;
streams[i].height = 0;
}
}
AndroidVP9Decoder::~AndroidVP9Decoder() {
release();
}
bool AndroidVP9Decoder::initialize(int width, int height) {
LOGI("Initializing VP9 decoder with resolution %dx%d", width, height);
if (global_initialized) {
LOGI("VP9 decoder already initialized");
return true;
}
// Check hardware support
if (!isHardwareDecodingSupported()) {
LOGE("VP9 hardware decoding not supported on this device");
return false;
}
// Initialize EGL context for texture management
egl_display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
if (egl_display == EGL_NO_DISPLAY) {
LOGE("Failed to get EGL display");
return false;
}
if (!eglInitialize(egl_display, nullptr, nullptr)) {
LOGE("Failed to initialize EGL");
return false;
}
// Initialize all decoder streams
for (int i = 0; i < MAX_VP9_STREAMS; i++) {
if (!initializeMediaCodec(i, width, height)) {
LOGE("Failed to initialize MediaCodec for stream %d", i);
release();
return false;
}
if (!createSurfaceTexture(i)) {
LOGE("Failed to create surface texture for stream %d", i);
release();
return false;
}
streams[i].width = width;
streams[i].height = height;
streams[i].initialized = true;
}
global_initialized = true;
LOGI("VP9 decoder initialization completed successfully");
return true;
}
bool AndroidVP9Decoder::initializeMediaCodec(int stream_id, int width, int height) {
if (stream_id < 0 || stream_id >= MAX_VP9_STREAMS) {
return false;
}
// Create MediaCodec for VP9
streams[stream_id].codec = AMediaCodec_createDecoderByType("video/x-vnd.on2.vp9");
if (!streams[stream_id].codec) {
LOGE("Failed to create VP9 MediaCodec for stream %d", stream_id);
return false;
}
// Create media format
AMediaFormat* format = AMediaFormat_new();
AMediaFormat_setString(format, AMEDIAFORMAT_KEY_MIME, "video/x-vnd.on2.vp9");
AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_WIDTH, width);
AMediaFormat_setInt32(format, AMEDIAFORMAT_KEY_HEIGHT, height);
// Configure the codec
media_status_t status = AMediaCodec_configure(
streams[stream_id].codec,
format,
streams[stream_id].surface,
nullptr,
0
);
AMediaFormat_delete(format);
if (status != AMEDIA_OK) {
LOGE("Failed to configure MediaCodec for stream %d: %d", stream_id, status);
AMediaCodec_delete(streams[stream_id].codec);
streams[stream_id].codec = nullptr;
return false;
}
// Start the codec
status = AMediaCodec_start(streams[stream_id].codec);
if (status != AMEDIA_OK) {
LOGE("Failed to start MediaCodec for stream %d: %d", stream_id, status);
AMediaCodec_delete(streams[stream_id].codec);
streams[stream_id].codec = nullptr;
return false;
}
LOGI("MediaCodec initialized successfully for stream %d", stream_id);
return true;
}
bool AndroidVP9Decoder::createSurfaceTexture(int stream_id) {
if (stream_id < 0 || stream_id >= MAX_VP9_STREAMS) {
return false;
}
// Generate OpenGL texture
glGenTextures(1, &streams[stream_id].texture_id);
glBindTexture(GL_TEXTURE_EXTERNAL_OES, streams[stream_id].texture_id);
// Set texture parameters
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
// Create surface from texture (requires additional JNI calls in real implementation)
// This is a simplified version - actual implementation would need SurfaceTexture creation
// streams[stream_id].surface = ANativeWindow_fromSurface(env, surface);
LOGI("Surface texture created for stream %d with texture ID %u",
stream_id, streams[stream_id].texture_id);
return true;
}
bool AndroidVP9Decoder::decodeFrame(const uint8_t* data, size_t data_size, int stream_id) {
if (!global_initialized || stream_id < 0 || stream_id >= MAX_VP9_STREAMS ||
!streams[stream_id].initialized || !data || data_size == 0) {
return false;
}
AMediaCodec* codec = streams[stream_id].codec;
if (!codec) {
return false;
}
// Get input buffer
ssize_t input_buffer_index = AMediaCodec_dequeueInputBuffer(codec, 10000); // 10ms timeout
if (input_buffer_index < 0) {
LOGE("Failed to dequeue input buffer for stream %d", stream_id);
return false;
}
// Get input buffer pointer
size_t input_buffer_size;
uint8_t* input_buffer = AMediaCodec_getInputBuffer(codec, input_buffer_index, &input_buffer_size);
if (!input_buffer || input_buffer_size < data_size) {
LOGE("Input buffer too small for stream %d", stream_id);
return false;
}
// Copy frame data to input buffer
memcpy(input_buffer, data, data_size);
// Queue input buffer
media_status_t status = AMediaCodec_queueInputBuffer(
codec,
input_buffer_index,
0,
data_size,
0, // presentation time (not used for single frames)
0 // flags
);
if (status != AMEDIA_OK) {
LOGE("Failed to queue input buffer for stream %d: %d", stream_id, status);
return false;
}
// Try to get output buffer
AMediaCodecBufferInfo buffer_info;
ssize_t output_buffer_index = AMediaCodec_dequeueOutputBuffer(codec, &buffer_info, 10000);
if (output_buffer_index >= 0) {
// Frame decoded successfully, release output buffer
AMediaCodec_releaseOutputBuffer(codec, output_buffer_index, true); // true = render to surface
return true;
} else if (output_buffer_index == AMEDIACODEC_INFO_OUTPUT_FORMAT_CHANGED) {
// Output format changed, this is normal
LOGI("Output format changed for stream %d", stream_id);
return true;
} else {
LOGE("Failed to dequeue output buffer for stream %d: %zd", stream_id, output_buffer_index);
return false;
}
}
uint32_t AndroidVP9Decoder::getTextureId(int stream_id) {
if (!global_initialized || stream_id < 0 || stream_id >= MAX_VP9_STREAMS ||
!streams[stream_id].initialized) {
return 0;
}
return streams[stream_id].texture_id;
}
bool AndroidVP9Decoder::isHardwareDecodingSupported() {
// Create a temporary codec to check support
AMediaCodec* test_codec = AMediaCodec_createDecoderByType("video/x-vnd.on2.vp9");
if (test_codec) {
AMediaCodec_delete(test_codec);
return true;
}
return false;
}
bool AndroidVP9Decoder::isHardwareSupported() {
return isHardwareDecodingSupported();
}
vp9_status_t AndroidVP9Decoder::getStatus() {
vp9_status_t status = {};
status.is_initialized = global_initialized;
status.hardware_supported = isHardwareDecodingSupported();
status.active_streams = 0;
for (int i = 0; i < MAX_VP9_STREAMS; i++) {
if (streams[i].initialized) {
status.active_streams++;
}
status.decoded_frames[i] = 0; // TODO: Implement frame counting
}
return status;
}
void AndroidVP9Decoder::releaseSurfaceTexture(int stream_id) {
if (stream_id < 0 || stream_id >= MAX_VP9_STREAMS) {
return;
}
if (streams[stream_id].texture_id != 0) {
glDeleteTextures(1, &streams[stream_id].texture_id);
streams[stream_id].texture_id = 0;
}
if (streams[stream_id].surface) {
ANativeWindow_release(streams[stream_id].surface);
streams[stream_id].surface = nullptr;
}
}
void AndroidVP9Decoder::release() {
LOGI("Releasing VP9 decoder");
// Release all streams
for (int i = 0; i < MAX_VP9_STREAMS; i++) {
if (streams[i].codec) {
AMediaCodec_stop(streams[i].codec);
AMediaCodec_delete(streams[i].codec);
streams[i].codec = nullptr;
}
releaseSurfaceTexture(i);
streams[i].initialized = false;
}
// Release EGL resources
if (egl_display != EGL_NO_DISPLAY) {
eglTerminate(egl_display);
egl_display = EGL_NO_DISPLAY;
}
global_initialized = false;
LOGI("VP9 decoder released successfully");
}
// C interface implementation
extern "C" {
bool vp9_initialize(int width, int height) {
if (!g_decoder) {
g_decoder = new AndroidVP9Decoder();
}
return g_decoder->initialize(width, height);
}
bool vp9_decode_frame(const uint8_t* data, size_t data_size, int stream_id) {
if (!g_decoder) {
return false;
}
return g_decoder->decodeFrame(data, data_size, stream_id);
}
uint32_t vp9_get_texture_id(int stream_id) {
if (!g_decoder) {
return 0;
}
return g_decoder->getTextureId(stream_id);
}
bool vp9_is_hardware_supported() {
if (!g_decoder) {
AndroidVP9Decoder temp;
return temp.isHardwareSupported();
}
return g_decoder->isHardwareSupported();
}
vp9_status_t vp9_get_status() {
if (!g_decoder) {
vp9_status_t status = {};
return status;
}
return g_decoder->getStatus();
}
void vp9_release() {
if (g_decoder) {
delete g_decoder;
g_decoder = nullptr;
}
}
// JNI exports for Godot
JNIEXPORT jboolean JNICALL
Java_org_godotengine_vp9orchestra_VP9Orchestra_nativeInitialize(JNIEnv* env, jclass clazz,
jint width, jint height) {
return vp9_initialize(width, height);
}
JNIEXPORT jboolean JNICALL
Java_org_godotengine_vp9orchestra_VP9Orchestra_nativeDecodeFrame(JNIEnv* env, jclass clazz,
jbyteArray data, jint streamId) {
if (!data) return false;
jsize data_size = env->GetArrayLength(data);
jbyte* data_ptr = env->GetByteArrayElements(data, nullptr);
bool result = vp9_decode_frame(reinterpret_cast<const uint8_t*>(data_ptr), data_size, streamId);
env->ReleaseByteArrayElements(data, data_ptr, JNI_ABORT);
return result;
}
JNIEXPORT jint JNICALL
Java_org_godotengine_vp9orchestra_VP9Orchestra_nativeGetTextureId(JNIEnv* env, jclass clazz,
jint streamId) {
return vp9_get_texture_id(streamId);
}
JNIEXPORT void JNICALL
Java_org_godotengine_vp9orchestra_VP9Orchestra_nativeRelease(JNIEnv* env, jclass clazz) {
vp9_release();
}
} // extern "C"