Fix various bugs on vav2player of android
This commit is contained in:
@@ -32,7 +32,11 @@ bool VavCoreVulkanBridge::Initialize(ANativeWindow* window, const VideoPlayerCon
|
||||
return false;
|
||||
}
|
||||
|
||||
// Acquire our own reference to the native window
|
||||
// This ensures the window remains valid for the lifetime of this bridge
|
||||
m_nativeWindow = window;
|
||||
ANativeWindow_acquire(m_nativeWindow);
|
||||
|
||||
m_config = config;
|
||||
|
||||
LOGI("Initializing VavCore-Vulkan bridge...");
|
||||
@@ -69,8 +73,13 @@ void VavCoreVulkanBridge::Cleanup() {
|
||||
CleanupVulkanRenderer();
|
||||
CleanupVavCore();
|
||||
|
||||
// Release our reference to the native window
|
||||
if (m_nativeWindow != nullptr) {
|
||||
ANativeWindow_release(m_nativeWindow);
|
||||
m_nativeWindow = nullptr;
|
||||
}
|
||||
|
||||
m_initialized = false;
|
||||
m_nativeWindow = nullptr;
|
||||
|
||||
LOGI("VavCore-Vulkan bridge cleanup completed");
|
||||
}
|
||||
@@ -282,7 +291,12 @@ bool VavCoreVulkanBridge::ProcessNextFrame() {
|
||||
|
||||
bool VavCoreVulkanBridge::ConvertVavCoreFrameToVulkan(const VavCoreVideoFrame* vavFrame, DecodedFrameData& frameData) {
|
||||
if (!vavFrame || !vavFrame->y_plane) {
|
||||
LOGE("Invalid VavCore frame");
|
||||
LOGE("Invalid VavCore frame - missing Y plane");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!vavFrame->u_plane || !vavFrame->v_plane) {
|
||||
LOGE("Invalid VavCore frame - missing U or V plane");
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -317,11 +331,20 @@ void VavCoreVulkanBridge::UpdateVideoProperties(const VavCoreVideoMetadata* meta
|
||||
}
|
||||
|
||||
void VavCoreVulkanBridge::SetPlaybackState(PlaybackState newState) {
|
||||
PlaybackState oldState = m_playbackState;
|
||||
m_playbackState = newState;
|
||||
PlaybackState oldState;
|
||||
StateChangeCallback callback;
|
||||
|
||||
if (m_stateChangeCallback && oldState != newState) {
|
||||
m_stateChangeCallback(oldState, newState);
|
||||
// Lock only for state change, not for callback execution
|
||||
{
|
||||
std::lock_guard<std::mutex> lock(m_stateMutex);
|
||||
oldState = m_playbackState;
|
||||
m_playbackState = newState;
|
||||
callback = m_stateChangeCallback; // Copy callback pointer
|
||||
}
|
||||
|
||||
// Execute callback OUTSIDE of mutex to avoid deadlock
|
||||
if (callback && oldState != newState) {
|
||||
callback(oldState, newState);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -569,9 +592,15 @@ void VavCoreVulkanBridge::PlaybackThreadMain() {
|
||||
}
|
||||
|
||||
bool VavCoreVulkanBridge::ShouldContinuePlayback() const {
|
||||
std::lock_guard<std::mutex> lock(m_stateMutex);
|
||||
return m_shouldContinuePlayback.load() &&
|
||||
m_playbackState == PlaybackState::PLAYING &&
|
||||
m_fileLoaded;
|
||||
}
|
||||
|
||||
PlaybackState VavCoreVulkanBridge::GetPlaybackState() const {
|
||||
std::lock_guard<std::mutex> lock(m_stateMutex);
|
||||
return m_playbackState;
|
||||
}
|
||||
|
||||
} // namespace VavCore
|
||||
@@ -76,7 +76,7 @@ public:
|
||||
bool SetQualityMode(VavCoreQualityMode qualityMode);
|
||||
|
||||
// State management
|
||||
PlaybackState GetPlaybackState() const { return m_playbackState; }
|
||||
PlaybackState GetPlaybackState() const;
|
||||
bool IsInitialized() const { return m_initialized; }
|
||||
bool IsFileLoaded() const { return m_fileLoaded; }
|
||||
|
||||
@@ -163,7 +163,7 @@ private:
|
||||
// Continuous playback thread
|
||||
std::thread m_playbackThread;
|
||||
std::atomic<bool> m_shouldContinuePlayback{false};
|
||||
std::mutex m_stateMutex;
|
||||
mutable std::mutex m_stateMutex; // mutable to allow locking in const methods
|
||||
std::chrono::microseconds m_frameDurationUs{33333}; // Default: 30fps
|
||||
};
|
||||
|
||||
|
||||
@@ -25,11 +25,17 @@ jobject CreateJavaPerformanceMetrics(JNIEnv* env, const PerformanceMetrics& metr
|
||||
jmethodID constructor = env->GetMethodID(metricsClass, "<init>", "(Ljava/lang/String;FIIFIFI)V");
|
||||
if (constructor == nullptr) {
|
||||
LOGE("Could not find PerformanceMonitor.Metrics constructor");
|
||||
env->DeleteLocalRef(metricsClass); // Clean up before returning
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// Create decoder type string
|
||||
jstring decoderType = env->NewStringUTF("VavCore-Vulkan");
|
||||
if (decoderType == nullptr) {
|
||||
LOGE("Failed to create decoder type string");
|
||||
env->DeleteLocalRef(metricsClass);
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// Create metrics object
|
||||
jobject metricsObject = env->NewObject(metricsClass,
|
||||
@@ -44,7 +50,10 @@ jobject CreateJavaPerformanceMetrics(JNIEnv* env, const PerformanceMetrics& metr
|
||||
metrics.gpuUtilizationPercent,
|
||||
0.0f); // Additional GPU usage
|
||||
|
||||
// Clean up local references
|
||||
env->DeleteLocalRef(decoderType);
|
||||
env->DeleteLocalRef(metricsClass);
|
||||
|
||||
return metricsObject;
|
||||
}
|
||||
|
||||
@@ -241,17 +250,23 @@ Java_com_vavcore_player_VulkanVideoView_nativeGetVideoInfo(JNIEnv* env, jobject
|
||||
jmethodID constructor = env->GetMethodID(videoInfoClass, "<init>", "(IIJJD)V");
|
||||
if (constructor == nullptr) {
|
||||
LOGE("Could not find VideoInfo constructor");
|
||||
env->DeleteLocalRef(videoInfoClass); // Clean up before returning
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// Create VideoInfo object
|
||||
return env->NewObject(videoInfoClass,
|
||||
constructor,
|
||||
(jint)player->GetVideoWidth(),
|
||||
(jint)player->GetVideoHeight(),
|
||||
(jlong)player->GetDurationUs(),
|
||||
(jlong)player->GetCurrentPositionUs(),
|
||||
(jdouble)player->GetFrameRate());
|
||||
jobject videoInfo = env->NewObject(videoInfoClass,
|
||||
constructor,
|
||||
(jint)player->GetVideoWidth(),
|
||||
(jint)player->GetVideoHeight(),
|
||||
(jlong)player->GetDurationUs(),
|
||||
(jlong)player->GetCurrentPositionUs(),
|
||||
(jdouble)player->GetFrameRate());
|
||||
|
||||
// Clean up local reference
|
||||
env->DeleteLocalRef(videoInfoClass);
|
||||
|
||||
return videoInfo;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -323,6 +338,18 @@ Java_com_vavcore_player_VulkanVideoView_nativeIsInitialized(JNIEnv* env, jobject
|
||||
return player->IsInitialized() ? JNI_TRUE : JNI_FALSE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current playback position in microseconds
|
||||
*/
|
||||
JNIEXPORT jlong JNICALL
|
||||
Java_com_vavcore_player_VulkanVideoView_nativeGetCurrentPosition(JNIEnv* env, jobject thiz, jlong playerPtr) {
|
||||
VavCoreVulkanBridge* player = reinterpret_cast<VavCoreVulkanBridge*>(playerPtr);
|
||||
if (player == nullptr) {
|
||||
return 0;
|
||||
}
|
||||
return static_cast<jlong>(player->GetCurrentPositionUs());
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if video file is loaded
|
||||
*/
|
||||
|
||||
@@ -61,108 +61,126 @@ bool VulkanVideoRenderer::Initialize(ANativeWindow* window) {
|
||||
// Step 1: Create Vulkan instance
|
||||
if (!CreateInstance()) {
|
||||
LOGE("Failed to create Vulkan instance");
|
||||
Cleanup();
|
||||
return false;
|
||||
}
|
||||
|
||||
// Step 2: Create surface
|
||||
if (!CreateSurface()) {
|
||||
LOGE("Failed to create Vulkan surface");
|
||||
Cleanup();
|
||||
return false;
|
||||
}
|
||||
|
||||
// Step 3: Pick physical device
|
||||
if (!PickPhysicalDevice()) {
|
||||
LOGE("Failed to find suitable physical device");
|
||||
Cleanup();
|
||||
return false;
|
||||
}
|
||||
|
||||
// Step 4: Create logical device
|
||||
if (!CreateLogicalDevice()) {
|
||||
LOGE("Failed to create logical device");
|
||||
Cleanup();
|
||||
return false;
|
||||
}
|
||||
|
||||
// Step 5: Create swapchain
|
||||
if (!CreateSwapchain()) {
|
||||
LOGE("Failed to create swapchain");
|
||||
Cleanup();
|
||||
return false;
|
||||
}
|
||||
|
||||
// Step 6: Create image views
|
||||
if (!CreateImageViews()) {
|
||||
LOGE("Failed to create image views");
|
||||
Cleanup();
|
||||
return false;
|
||||
}
|
||||
|
||||
// Step 7: Create render pass
|
||||
if (!CreateRenderPass()) {
|
||||
LOGE("Failed to create render pass");
|
||||
Cleanup();
|
||||
return false;
|
||||
}
|
||||
|
||||
// Step 8: Create descriptor set layout
|
||||
if (!CreateDescriptorSetLayout()) {
|
||||
LOGE("Failed to create descriptor set layout");
|
||||
Cleanup();
|
||||
return false;
|
||||
}
|
||||
|
||||
// Step 9: Create graphics pipeline
|
||||
if (!CreateGraphicsPipeline()) {
|
||||
LOGE("Failed to create graphics pipeline");
|
||||
Cleanup();
|
||||
return false;
|
||||
}
|
||||
|
||||
// Step 10: Create framebuffers
|
||||
if (!CreateFramebuffers()) {
|
||||
LOGE("Failed to create framebuffers");
|
||||
Cleanup();
|
||||
return false;
|
||||
}
|
||||
|
||||
// Step 11: Create command pool
|
||||
if (!CreateCommandPool()) {
|
||||
LOGE("Failed to create command pool");
|
||||
Cleanup();
|
||||
return false;
|
||||
}
|
||||
|
||||
// Step 12: Create vertex buffer
|
||||
if (!CreateVertexBuffer()) {
|
||||
LOGE("Failed to create vertex buffer");
|
||||
Cleanup();
|
||||
return false;
|
||||
}
|
||||
|
||||
// Step 13: Create uniform buffer
|
||||
if (!CreateUniformBuffer()) {
|
||||
LOGE("Failed to create uniform buffer");
|
||||
Cleanup();
|
||||
return false;
|
||||
}
|
||||
|
||||
// Step 14: Create descriptor pool
|
||||
if (!CreateDescriptorPool()) {
|
||||
LOGE("Failed to create descriptor pool");
|
||||
Cleanup();
|
||||
return false;
|
||||
}
|
||||
|
||||
// Step 15: Create descriptor sets
|
||||
if (!CreateDescriptorSets()) {
|
||||
LOGE("Failed to create descriptor sets");
|
||||
Cleanup();
|
||||
return false;
|
||||
}
|
||||
|
||||
// Step 16: Create texture sampler
|
||||
if (!CreateTextureSampler()) {
|
||||
LOGE("Failed to create texture sampler");
|
||||
Cleanup();
|
||||
return false;
|
||||
}
|
||||
|
||||
// Step 17: Create synchronization objects
|
||||
if (!CreateSyncObjects()) {
|
||||
LOGE("Failed to create sync objects");
|
||||
Cleanup();
|
||||
return false;
|
||||
}
|
||||
|
||||
// Step 18: Create timestamp query pool
|
||||
if (!CreateTimestampQueryPool()) {
|
||||
LOGE("Failed to create timestamp query pool");
|
||||
Cleanup();
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -172,9 +190,8 @@ bool VulkanVideoRenderer::Initialize(ANativeWindow* window) {
|
||||
}
|
||||
|
||||
void VulkanVideoRenderer::Cleanup() {
|
||||
if (!m_initialized) {
|
||||
return;
|
||||
}
|
||||
// Allow cleanup even if initialization failed partway through
|
||||
// Individual resource checks handle null/invalid handles safely
|
||||
|
||||
LOGI("Cleaning up Vulkan renderer...");
|
||||
|
||||
@@ -1533,34 +1550,80 @@ QueueFamilyIndices VulkanVideoRenderer::FindQueueFamilies(VkPhysicalDevice devic
|
||||
|
||||
bool VulkanVideoRenderer::CreateTextureFromYUV(const uint8_t* yPlane, const uint8_t* uPlane, const uint8_t* vPlane,
|
||||
uint32_t width, uint32_t height, uint32_t yStride, uint32_t uStride, uint32_t vStride) {
|
||||
LOGI("Creating YUV textures (%dx%d)", width, height);
|
||||
|
||||
// Calculate UV dimensions (assuming 4:2:0 format)
|
||||
uint32_t uvWidth = width / 2;
|
||||
uint32_t uvHeight = height / 2;
|
||||
|
||||
// Create Y texture
|
||||
if (!CreateYUVPlaneTexture(yPlane, width, height, yStride, m_yTexture, m_yTextureMemory, m_yTextureView)) {
|
||||
LOGE("Failed to create Y texture");
|
||||
return false;
|
||||
// Check if textures already exist with matching dimensions
|
||||
bool needsRecreation = (m_yTexture == VK_NULL_HANDLE) || (m_videoWidth != width) || (m_videoHeight != height);
|
||||
|
||||
if (needsRecreation) {
|
||||
LOGI("Creating YUV textures (%dx%d)", width, height);
|
||||
|
||||
// Destroy old textures if they exist
|
||||
if (m_yTexture != VK_NULL_HANDLE) {
|
||||
vkDestroyImageView(m_device, m_yTextureView, nullptr);
|
||||
vkDestroyImage(m_device, m_yTexture, nullptr);
|
||||
vkFreeMemory(m_device, m_yTextureMemory, nullptr);
|
||||
m_yTexture = VK_NULL_HANDLE;
|
||||
m_yTextureView = VK_NULL_HANDLE;
|
||||
m_yTextureMemory = VK_NULL_HANDLE;
|
||||
}
|
||||
if (m_uTexture != VK_NULL_HANDLE) {
|
||||
vkDestroyImageView(m_device, m_uTextureView, nullptr);
|
||||
vkDestroyImage(m_device, m_uTexture, nullptr);
|
||||
vkFreeMemory(m_device, m_uTextureMemory, nullptr);
|
||||
m_uTexture = VK_NULL_HANDLE;
|
||||
m_uTextureView = VK_NULL_HANDLE;
|
||||
m_uTextureMemory = VK_NULL_HANDLE;
|
||||
}
|
||||
if (m_vTexture != VK_NULL_HANDLE) {
|
||||
vkDestroyImageView(m_device, m_vTextureView, nullptr);
|
||||
vkDestroyImage(m_device, m_vTexture, nullptr);
|
||||
vkFreeMemory(m_device, m_vTextureMemory, nullptr);
|
||||
m_vTexture = VK_NULL_HANDLE;
|
||||
m_vTextureView = VK_NULL_HANDLE;
|
||||
m_vTextureMemory = VK_NULL_HANDLE;
|
||||
}
|
||||
|
||||
// Create Y texture
|
||||
if (!CreateYUVPlaneTexture(yPlane, width, height, yStride, m_yTexture, m_yTextureMemory, m_yTextureView)) {
|
||||
LOGE("Failed to create Y texture");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Create U texture
|
||||
if (!CreateYUVPlaneTexture(uPlane, uvWidth, uvHeight, uStride, m_uTexture, m_uTextureMemory, m_uTextureView)) {
|
||||
LOGE("Failed to create U texture");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Create V texture
|
||||
if (!CreateYUVPlaneTexture(vPlane, uvWidth, uvHeight, vStride, m_vTexture, m_vTextureMemory, m_vTextureView)) {
|
||||
LOGE("Failed to create V texture");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Update descriptor sets with new textures
|
||||
UpdateYUVDescriptorSets();
|
||||
|
||||
LOGI("YUV textures created successfully");
|
||||
} else {
|
||||
// Update existing textures with new frame data
|
||||
if (!UpdateYUVPlaneTexture(yPlane, width, height, yStride, m_yTexture)) {
|
||||
LOGE("Failed to update Y texture");
|
||||
return false;
|
||||
}
|
||||
if (!UpdateYUVPlaneTexture(uPlane, uvWidth, uvHeight, uStride, m_uTexture)) {
|
||||
LOGE("Failed to update U texture");
|
||||
return false;
|
||||
}
|
||||
if (!UpdateYUVPlaneTexture(vPlane, uvWidth, uvHeight, vStride, m_vTexture)) {
|
||||
LOGE("Failed to update V texture");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Create U texture
|
||||
if (!CreateYUVPlaneTexture(uPlane, uvWidth, uvHeight, uStride, m_uTexture, m_uTextureMemory, m_uTextureView)) {
|
||||
LOGE("Failed to create U texture");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Create V texture
|
||||
if (!CreateYUVPlaneTexture(vPlane, uvWidth, uvHeight, vStride, m_vTexture, m_vTextureMemory, m_vTextureView)) {
|
||||
LOGE("Failed to create V texture");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Update descriptor sets with new textures
|
||||
UpdateYUVDescriptorSets();
|
||||
|
||||
LOGI("YUV textures created successfully");
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -1630,6 +1693,50 @@ bool VulkanVideoRenderer::CreateYUVPlaneTexture(const uint8_t* data, uint32_t wi
|
||||
return true;
|
||||
}
|
||||
|
||||
bool VulkanVideoRenderer::UpdateYUVPlaneTexture(const uint8_t* data, uint32_t width, uint32_t height, uint32_t stride, VkImage texture) {
|
||||
VkDeviceSize imageSize = static_cast<VkDeviceSize>(height) * stride;
|
||||
|
||||
// Create staging buffer
|
||||
VkBuffer stagingBuffer;
|
||||
VkDeviceMemory stagingBufferMemory;
|
||||
if (!CreateBuffer(imageSize, VK_BUFFER_USAGE_TRANSFER_SRC_BIT,
|
||||
VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
|
||||
stagingBuffer, stagingBufferMemory)) {
|
||||
LOGE("Failed to create staging buffer for texture update");
|
||||
return false;
|
||||
}
|
||||
|
||||
// Copy data to staging buffer
|
||||
void* mappedData;
|
||||
vkMapMemory(m_device, stagingBufferMemory, 0, imageSize, 0, &mappedData);
|
||||
if (stride == width) {
|
||||
// Simple copy if no padding
|
||||
memcpy(mappedData, data, imageSize);
|
||||
} else {
|
||||
// Copy row by row if there's stride padding
|
||||
uint8_t* dst = static_cast<uint8_t*>(mappedData);
|
||||
for (uint32_t y = 0; y < height; y++) {
|
||||
memcpy(dst + y * width, data + y * stride, width);
|
||||
}
|
||||
}
|
||||
vkUnmapMemory(m_device, stagingBufferMemory);
|
||||
|
||||
// Transition image layout for transfer
|
||||
TransitionImageLayout(texture, VK_FORMAT_R8_UNORM, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL);
|
||||
|
||||
// Copy buffer to image
|
||||
CopyBufferToImage(stagingBuffer, texture, width, height);
|
||||
|
||||
// Transition image layout back to shader read
|
||||
TransitionImageLayout(texture, VK_FORMAT_R8_UNORM, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL);
|
||||
|
||||
// Cleanup staging buffer
|
||||
vkDestroyBuffer(m_device, stagingBuffer, nullptr);
|
||||
vkFreeMemory(m_device, stagingBufferMemory, nullptr);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void VulkanVideoRenderer::TransitionImageLayout(VkImage image, VkFormat format, VkImageLayout oldLayout, VkImageLayout newLayout) {
|
||||
VkCommandBufferAllocateInfo allocInfo = {};
|
||||
allocInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
|
||||
|
||||
@@ -263,6 +263,7 @@ private:
|
||||
uint32_t width, uint32_t height, uint32_t yStride, uint32_t uStride, uint32_t vStride);
|
||||
bool CreateYUVPlaneTexture(const uint8_t* data, uint32_t width, uint32_t height, uint32_t stride,
|
||||
VkImage& texture, VkDeviceMemory& textureMemory, VkImageView& textureView);
|
||||
bool UpdateYUVPlaneTexture(const uint8_t* data, uint32_t width, uint32_t height, uint32_t stride, VkImage texture);
|
||||
void UpdateYUVDescriptorSets();
|
||||
|
||||
// Queue family helpers
|
||||
|
||||
@@ -110,13 +110,13 @@ public class FileBrowserActivity extends AppCompatActivity {
|
||||
text2.setTextColor(getResources().getColor(R.color.text_secondary, null));
|
||||
|
||||
if (item.isParentDir) {
|
||||
text1.setText("📁 " + item.name + " (Go up)");
|
||||
text1.setText("[DIR] " + item.name + " (Go up)");
|
||||
text2.setText("Parent directory");
|
||||
} else if (item.isDirectory) {
|
||||
text1.setText("📁 " + item.name);
|
||||
text1.setText("[DIR] " + item.name);
|
||||
text2.setText("Directory • " + item.getFormattedDate());
|
||||
} else {
|
||||
text1.setText("🎬 " + item.name);
|
||||
text1.setText("[VIDEO] " + item.name);
|
||||
text2.setText(item.getFormattedSize() + " • " + item.getFormattedDate());
|
||||
}
|
||||
|
||||
|
||||
@@ -52,8 +52,7 @@ public class MainActivity extends AppCompatActivity {
|
||||
|
||||
// Core Components
|
||||
private PerformanceMonitor performanceMonitor;
|
||||
private boolean isFrameProcessing = false;
|
||||
private Thread frameProcessingThread;
|
||||
// Removed: Java frame processing thread (native side handles playback)
|
||||
|
||||
// Progress tracking
|
||||
private long videoDurationUs = 0;
|
||||
@@ -395,7 +394,7 @@ public class MainActivity extends AppCompatActivity {
|
||||
if (success) {
|
||||
statusText.setText("Playing");
|
||||
performanceMonitor.startMonitoring();
|
||||
startFrameProcessing();
|
||||
// Removed: startFrameProcessing() - native side handles this
|
||||
startProgressUpdates();
|
||||
// Update overlay state
|
||||
videoPlayerOverlay.setPlaybackState(true);
|
||||
@@ -412,7 +411,7 @@ public class MainActivity extends AppCompatActivity {
|
||||
if (success) {
|
||||
statusText.setText("Paused");
|
||||
performanceMonitor.pauseMonitoring();
|
||||
stopFrameProcessing();
|
||||
// Removed: stopFrameProcessing() - native side handles this
|
||||
stopProgressUpdates();
|
||||
// Update overlay state
|
||||
videoPlayerOverlay.setPlaybackState(false);
|
||||
@@ -426,7 +425,7 @@ public class MainActivity extends AppCompatActivity {
|
||||
statusText.setText("Stopped");
|
||||
}
|
||||
performanceMonitor.stopMonitoring();
|
||||
stopFrameProcessing();
|
||||
// Removed: stopFrameProcessing() - native side handles this
|
||||
stopProgressUpdates();
|
||||
progressBar.setProgress(0);
|
||||
currentTimeText.setText("00:00");
|
||||
@@ -463,44 +462,9 @@ public class MainActivity extends AppCompatActivity {
|
||||
performanceText.setText(perfText);
|
||||
}
|
||||
|
||||
private void startFrameProcessing() {
|
||||
if (isFrameProcessing) {
|
||||
return;
|
||||
}
|
||||
|
||||
isFrameProcessing = true;
|
||||
frameProcessingThread = new Thread(() -> {
|
||||
while (isFrameProcessing) {
|
||||
try {
|
||||
if (vulkanVideoView.getPlaybackState() == VulkanVideoView.PlaybackState.PLAYING) {
|
||||
vulkanVideoView.processFrame();
|
||||
Thread.sleep(33); // ~30 FPS
|
||||
} else {
|
||||
Thread.sleep(100);
|
||||
}
|
||||
} catch (InterruptedException e) {
|
||||
break;
|
||||
} catch (Exception e) {
|
||||
runOnUiThread(() -> showError("Frame processing error: " + e.getMessage()));
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
frameProcessingThread.start();
|
||||
}
|
||||
|
||||
private void stopFrameProcessing() {
|
||||
isFrameProcessing = false;
|
||||
if (frameProcessingThread != null) {
|
||||
frameProcessingThread.interrupt();
|
||||
try {
|
||||
frameProcessingThread.join(1000);
|
||||
} catch (InterruptedException e) {
|
||||
// Ignore
|
||||
}
|
||||
frameProcessingThread = null;
|
||||
}
|
||||
}
|
||||
// Removed: startFrameProcessing() and stopFrameProcessing()
|
||||
// Frame processing is now handled entirely by the native playback thread
|
||||
// This eliminates the duplicate frame processing issue and improves performance
|
||||
|
||||
private void showError(String message) {
|
||||
Toast.makeText(this, "Error: " + message, Toast.LENGTH_LONG).show();
|
||||
@@ -523,7 +487,7 @@ public class MainActivity extends AppCompatActivity {
|
||||
@Override
|
||||
protected void onDestroy() {
|
||||
super.onDestroy();
|
||||
stopFrameProcessing();
|
||||
// Removed: stopFrameProcessing() - native side handles this
|
||||
vulkanVideoView.cleanup();
|
||||
performanceMonitor.cleanup();
|
||||
stopProgressUpdates();
|
||||
@@ -540,23 +504,20 @@ public class MainActivity extends AppCompatActivity {
|
||||
}
|
||||
|
||||
private void updateProgressDisplay() {
|
||||
// Note: For now, this is a placeholder.
|
||||
// Actual current position tracking would require additional native methods
|
||||
// For demonstration, we'll just show that the system is working
|
||||
VulkanVideoView.PlaybackState state = vulkanVideoView.getPlaybackState();
|
||||
if (state == VulkanVideoView.PlaybackState.PLAYING && videoDurationUs > 0) {
|
||||
// This is a simple simulation - in a real implementation,
|
||||
// we would get the actual current position from the video player
|
||||
int currentProgress = progressBar.getProgress();
|
||||
if (currentProgress < 100) {
|
||||
// Increment by 1% every 500ms for demonstration
|
||||
progressBar.setProgress(Math.min(100, currentProgress + 1));
|
||||
long currentPositionUs = (videoDurationUs * progressBar.getProgress()) / 100;
|
||||
currentTimeText.setText(formatTime(currentPositionUs));
|
||||
if (videoDurationUs > 0) {
|
||||
// Get actual current position from native player
|
||||
long currentPositionUs = vulkanVideoView.getCurrentPositionUs();
|
||||
|
||||
// Update overlay progress as well
|
||||
videoPlayerOverlay.updateProgress(currentPositionUs, videoDurationUs);
|
||||
}
|
||||
// Update progress bar (0-100)
|
||||
int progress = (int) ((currentPositionUs * 100) / videoDurationUs);
|
||||
progressBar.setProgress(Math.min(100, Math.max(0, progress)));
|
||||
|
||||
// Update time display
|
||||
currentTimeText.setText(formatTime(currentPositionUs));
|
||||
|
||||
// Update overlay progress
|
||||
videoPlayerOverlay.updateProgress(currentPositionUs, videoDurationUs);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -270,15 +270,11 @@ public class PerformanceMonitor {
|
||||
|
||||
private float getCpuUsage() {
|
||||
try {
|
||||
// Simple CPU usage estimation
|
||||
Runtime runtime = Runtime.getRuntime();
|
||||
long totalMemory = runtime.totalMemory();
|
||||
long freeMemory = runtime.freeMemory();
|
||||
long usedMemory = totalMemory - freeMemory;
|
||||
|
||||
// Convert to rough CPU usage percentage
|
||||
return Math.min(100.0f, (usedMemory / (float) totalMemory) * 100.0f);
|
||||
|
||||
// Note: Accurate CPU usage measurement requires /proc/stat parsing
|
||||
// which needs additional permissions. For simplicity, we return 0
|
||||
// and rely on native performance metrics instead.
|
||||
// TODO: Implement proper CPU usage if needed
|
||||
return 0.0f;
|
||||
} catch (Exception e) {
|
||||
return 0.0f;
|
||||
}
|
||||
|
||||
@@ -1,353 +0,0 @@
|
||||
package com.vavcore.player;
|
||||
|
||||
import android.content.Context;
|
||||
import android.os.Handler;
|
||||
import android.os.HandlerThread;
|
||||
import android.util.Log;
|
||||
|
||||
/**
|
||||
* Video playback controller that manages VavCore integration
|
||||
*
|
||||
* Handles:
|
||||
* - Video loading and playback control
|
||||
* - VavCore C API integration
|
||||
* - Background decoding thread management
|
||||
* - State management and callbacks
|
||||
*/
|
||||
public class VideoController {
|
||||
private static final String TAG = "VideoController";
|
||||
private static final int RENDER_INTERVAL_MS = 33; // ~30 FPS
|
||||
|
||||
public interface OnVideoStateChangedListener {
|
||||
void onVideoLoaded(String filename, int width, int height, double duration);
|
||||
void onVideoPlaying();
|
||||
void onVideoPaused();
|
||||
void onVideoStopped();
|
||||
void onProgressUpdate(double currentTime, double totalTime);
|
||||
void onError(String message);
|
||||
}
|
||||
|
||||
// Components
|
||||
private Context context;
|
||||
private OnVideoStateChangedListener listener;
|
||||
private VulkanVideoView videoView;
|
||||
|
||||
// VavCore player
|
||||
private long vavCorePlayer = 0; // VavCorePlayer* pointer
|
||||
|
||||
// State
|
||||
private VideoState currentState = VideoState.IDLE;
|
||||
private String currentFilePath = "";
|
||||
private int videoWidth = 0;
|
||||
private int videoHeight = 0;
|
||||
private double videoDuration = 0.0;
|
||||
private double currentTime = 0.0;
|
||||
|
||||
// Threading
|
||||
private HandlerThread decodingThread;
|
||||
private Handler decodingHandler;
|
||||
private Handler mainHandler;
|
||||
private Runnable renderRunnable;
|
||||
|
||||
private enum VideoState {
|
||||
IDLE, // No video loaded
|
||||
LOADED, // Video file loaded and ready
|
||||
PLAYING, // Currently playing
|
||||
PAUSED, // Paused (can resume)
|
||||
STOPPED // Stopped (reset to beginning)
|
||||
}
|
||||
|
||||
public VideoController(Context context) {
|
||||
this.context = context;
|
||||
this.mainHandler = new Handler();
|
||||
|
||||
// Initialize decoding thread
|
||||
decodingThread = new HandlerThread("VideoDecodingThread");
|
||||
decodingThread.start();
|
||||
decodingHandler = new Handler(decodingThread.getLooper());
|
||||
|
||||
// Load VavCore JNI library
|
||||
if (!VavCore.initializeVavCore()) {
|
||||
Log.e(TAG, "Failed to initialize VavCore");
|
||||
}
|
||||
}
|
||||
|
||||
public void setVideoView(VulkanVideoView videoView) {
|
||||
this.videoView = videoView;
|
||||
}
|
||||
|
||||
public void setOnVideoStateChangedListener(OnVideoStateChangedListener listener) {
|
||||
this.listener = listener;
|
||||
}
|
||||
|
||||
/**
|
||||
* Load a video file for playback
|
||||
*/
|
||||
public void loadVideo(String filePath) {
|
||||
decodingHandler.post(() -> {
|
||||
try {
|
||||
// Cleanup previous video
|
||||
if (vavCorePlayer != 0) {
|
||||
VavCore.destroyPlayer(vavCorePlayer);
|
||||
vavCorePlayer = 0;
|
||||
}
|
||||
|
||||
// Create new VavCore player
|
||||
vavCorePlayer = VavCore.createPlayer();
|
||||
if (vavCorePlayer == 0) {
|
||||
notifyError("Failed to create VavCore player");
|
||||
return;
|
||||
}
|
||||
|
||||
// Open video file
|
||||
int result = VavCore.openFile(vavCorePlayer, filePath);
|
||||
if (result != VavCore.VAVCORE_SUCCESS) {
|
||||
notifyError("Failed to open video file: " + VavCore.getErrorString(result));
|
||||
return;
|
||||
}
|
||||
|
||||
// Get video metadata
|
||||
VavCore.VideoMetadata metadata = VavCore.getMetadata(vavCorePlayer);
|
||||
if (metadata != null) {
|
||||
videoWidth = metadata.width;
|
||||
videoHeight = metadata.height;
|
||||
videoDuration = metadata.durationSeconds;
|
||||
currentFilePath = filePath;
|
||||
|
||||
// Update video view size
|
||||
mainHandler.post(() -> {
|
||||
if (videoView != null) {
|
||||
videoView.setVideoSize(videoWidth, videoHeight);
|
||||
}
|
||||
});
|
||||
|
||||
// Notify loaded
|
||||
currentState = VideoState.LOADED;
|
||||
currentTime = 0.0;
|
||||
|
||||
String filename = filePath.substring(filePath.lastIndexOf('/') + 1);
|
||||
notifyVideoLoaded(filename, videoWidth, videoHeight, videoDuration);
|
||||
|
||||
} else {
|
||||
notifyError("Failed to get video metadata");
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
Log.e(TAG, "Error loading video", e);
|
||||
notifyError("Error loading video: " + e.getMessage());
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Start video playback
|
||||
*/
|
||||
public void play() {
|
||||
if (currentState != VideoState.LOADED && currentState != VideoState.PAUSED) {
|
||||
return;
|
||||
}
|
||||
|
||||
currentState = VideoState.PLAYING;
|
||||
startRenderLoop();
|
||||
notifyVideoPlaying();
|
||||
}
|
||||
|
||||
/**
|
||||
* Pause video playback
|
||||
*/
|
||||
public void pause() {
|
||||
if (currentState != VideoState.PLAYING) {
|
||||
return;
|
||||
}
|
||||
|
||||
currentState = VideoState.PAUSED;
|
||||
stopRenderLoop();
|
||||
notifyVideoPaused();
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop video playback and reset to beginning
|
||||
*/
|
||||
public void stop() {
|
||||
if (currentState == VideoState.IDLE) {
|
||||
return;
|
||||
}
|
||||
|
||||
currentState = VideoState.STOPPED;
|
||||
stopRenderLoop();
|
||||
currentTime = 0.0;
|
||||
|
||||
// Reset player to beginning
|
||||
if (vavCorePlayer != 0) {
|
||||
decodingHandler.post(() -> {
|
||||
VavCore.seekToTime(vavCorePlayer, 0.0);
|
||||
});
|
||||
}
|
||||
|
||||
notifyVideoStopped();
|
||||
}
|
||||
|
||||
/**
|
||||
* Seek to specific time position
|
||||
*/
|
||||
public void seekTo(double timeSeconds) {
|
||||
if (vavCorePlayer != 0) {
|
||||
decodingHandler.post(() -> {
|
||||
int result = VavCore.seekToTime(vavCorePlayer, timeSeconds);
|
||||
if (result == VavCore.VAVCORE_SUCCESS) {
|
||||
currentTime = timeSeconds;
|
||||
mainHandler.post(() -> notifyProgressUpdate(currentTime, videoDuration));
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private void startRenderLoop() {
|
||||
if (renderRunnable != null) {
|
||||
stopRenderLoop();
|
||||
}
|
||||
|
||||
renderRunnable = new Runnable() {
|
||||
@Override
|
||||
public void run() {
|
||||
if (currentState == VideoState.PLAYING) {
|
||||
decodeAndRenderFrame();
|
||||
decodingHandler.postDelayed(this, RENDER_INTERVAL_MS);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
decodingHandler.post(renderRunnable);
|
||||
}
|
||||
|
||||
private void stopRenderLoop() {
|
||||
if (renderRunnable != null) {
|
||||
decodingHandler.removeCallbacks(renderRunnable);
|
||||
renderRunnable = null;
|
||||
}
|
||||
}
|
||||
|
||||
private void decodeAndRenderFrame() {
|
||||
if (vavCorePlayer == 0 || videoView == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Decode next frame
|
||||
VavCore.VideoFrame frame = VavCore.decodeNextFrame(vavCorePlayer);
|
||||
if (frame != null) {
|
||||
// Update current time
|
||||
currentTime = VavCore.getCurrentTime(vavCorePlayer);
|
||||
|
||||
// Process frame on main thread
|
||||
mainHandler.post(() -> {
|
||||
videoView.processFrame();
|
||||
notifyProgressUpdate(currentTime, videoDuration);
|
||||
});
|
||||
|
||||
} else {
|
||||
// Check if end of file
|
||||
if (VavCore.isEndOfFile(vavCorePlayer)) {
|
||||
mainHandler.post(() -> stop());
|
||||
}
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
Log.e(TAG, "Error decoding frame", e);
|
||||
mainHandler.post(() -> notifyError("Decoding error: " + e.getMessage()));
|
||||
}
|
||||
}
|
||||
|
||||
// State query methods
|
||||
public boolean isLoaded() {
|
||||
return currentState != VideoState.IDLE;
|
||||
}
|
||||
|
||||
public boolean isPlaying() {
|
||||
return currentState == VideoState.PLAYING;
|
||||
}
|
||||
|
||||
public double getCurrentTime() {
|
||||
return currentTime;
|
||||
}
|
||||
|
||||
public double getDuration() {
|
||||
return videoDuration;
|
||||
}
|
||||
|
||||
public int getVideoWidth() {
|
||||
return videoWidth;
|
||||
}
|
||||
|
||||
public int getVideoHeight() {
|
||||
return videoHeight;
|
||||
}
|
||||
|
||||
// Notification methods
|
||||
private void notifyVideoLoaded(String filename, int width, int height, double duration) {
|
||||
mainHandler.post(() -> {
|
||||
if (listener != null) {
|
||||
listener.onVideoLoaded(filename, width, height, duration);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void notifyVideoPlaying() {
|
||||
mainHandler.post(() -> {
|
||||
if (listener != null) {
|
||||
listener.onVideoPlaying();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void notifyVideoPaused() {
|
||||
mainHandler.post(() -> {
|
||||
if (listener != null) {
|
||||
listener.onVideoPaused();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void notifyVideoStopped() {
|
||||
mainHandler.post(() -> {
|
||||
if (listener != null) {
|
||||
listener.onVideoStopped();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private void notifyProgressUpdate(double currentTime, double totalTime) {
|
||||
if (listener != null) {
|
||||
listener.onProgressUpdate(currentTime, totalTime);
|
||||
}
|
||||
}
|
||||
|
||||
private void notifyError(String message) {
|
||||
Log.e(TAG, message);
|
||||
mainHandler.post(() -> {
|
||||
if (listener != null) {
|
||||
listener.onError(message);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
public void cleanup() {
|
||||
stopRenderLoop();
|
||||
|
||||
// Cleanup VavCore player
|
||||
if (vavCorePlayer != 0) {
|
||||
VavCore.destroyPlayer(vavCorePlayer);
|
||||
vavCorePlayer = 0;
|
||||
}
|
||||
|
||||
// Stop decoding thread
|
||||
if (decodingThread != null) {
|
||||
decodingThread.quitSafely();
|
||||
try {
|
||||
decodingThread.join();
|
||||
} catch (InterruptedException e) {
|
||||
Log.e(TAG, "Error stopping decoding thread", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -57,7 +57,8 @@ public class VulkanVideoView extends SurfaceView implements SurfaceHolder.Callba
|
||||
|
||||
// Surface state
|
||||
private SurfaceHolder surfaceHolder;
|
||||
private boolean surfaceCreated = false;
|
||||
private volatile boolean surfaceCreated = false; // volatile for thread visibility
|
||||
private final Object surfaceLock = new Object(); // Synchronization lock
|
||||
private String pendingVideoPath = null;
|
||||
|
||||
// Gesture detection
|
||||
@@ -213,12 +214,16 @@ public class VulkanVideoView extends SurfaceView implements SurfaceHolder.Callba
|
||||
|
||||
/**
|
||||
* Process next frame (for continuous playback)
|
||||
* @deprecated No longer needed - native side handles frame processing automatically
|
||||
*/
|
||||
@Deprecated
|
||||
public boolean processFrame() {
|
||||
if (!isInitialized) {
|
||||
return false;
|
||||
synchronized (surfaceLock) {
|
||||
if (!isInitialized || !surfaceCreated || nativeVideoPlayer == 0) {
|
||||
return false;
|
||||
}
|
||||
return nativeProcessFrame(nativeVideoPlayer);
|
||||
}
|
||||
return nativeProcessFrame(nativeVideoPlayer);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -252,6 +257,16 @@ public class VulkanVideoView extends SurfaceView implements SurfaceHolder.Callba
|
||||
return PlaybackState.ERROR_STATE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current playback position in microseconds
|
||||
*/
|
||||
public long getCurrentPositionUs() {
|
||||
if (nativeVideoPlayer != 0) {
|
||||
return nativeGetCurrentPosition(nativeVideoPlayer);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set gesture listener for handling video playback gestures
|
||||
*/
|
||||
@@ -307,16 +322,17 @@ public class VulkanVideoView extends SurfaceView implements SurfaceHolder.Callba
|
||||
|
||||
@Override
|
||||
public void surfaceCreated(SurfaceHolder holder) {
|
||||
// Mark surface as created
|
||||
surfaceCreated = true;
|
||||
android.util.Log.i(TAG, "Surface created, ready for video loading");
|
||||
synchronized (surfaceLock) {
|
||||
surfaceCreated = true;
|
||||
android.util.Log.i(TAG, "Surface created, ready for video loading");
|
||||
|
||||
// If there's a pending video load, process it now
|
||||
if (pendingVideoPath != null) {
|
||||
android.util.Log.i(TAG, "Processing pending video load: " + pendingVideoPath);
|
||||
String path = pendingVideoPath;
|
||||
pendingVideoPath = null;
|
||||
loadVideo(path);
|
||||
// If there's a pending video load, process it now
|
||||
if (pendingVideoPath != null && nativeVideoPlayer != 0) {
|
||||
android.util.Log.i(TAG, "Processing pending video load: " + pendingVideoPath);
|
||||
String path = pendingVideoPath;
|
||||
pendingVideoPath = null;
|
||||
loadVideo(path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -330,7 +346,17 @@ public class VulkanVideoView extends SurfaceView implements SurfaceHolder.Callba
|
||||
|
||||
@Override
|
||||
public void surfaceDestroyed(SurfaceHolder holder) {
|
||||
surfaceCreated = false;
|
||||
synchronized (surfaceLock) {
|
||||
surfaceCreated = false;
|
||||
}
|
||||
|
||||
// Wait briefly for any in-flight rendering to complete
|
||||
try {
|
||||
Thread.sleep(50);
|
||||
} catch (InterruptedException e) {
|
||||
Thread.currentThread().interrupt();
|
||||
}
|
||||
|
||||
if (nativeVideoPlayer != 0) {
|
||||
nativeSurfaceDestroyed(nativeVideoPlayer);
|
||||
}
|
||||
@@ -485,6 +511,7 @@ public class VulkanVideoView extends SurfaceView implements SurfaceHolder.Callba
|
||||
private native void nativeSurfaceDestroyed(long playerPtr);
|
||||
private native VideoInfo nativeGetVideoInfo(long playerPtr);
|
||||
private native int nativeGetPlaybackState(long playerPtr);
|
||||
private native long nativeGetCurrentPosition(long playerPtr);
|
||||
private native PerformanceMonitor.Metrics nativeGetPerformanceMetrics(long playerPtr);
|
||||
private native boolean nativeSetDecoderType(long playerPtr, int decoderType);
|
||||
private native boolean nativeSetQualityMode(long playerPtr, int qualityMode);
|
||||
|
||||
@@ -22,7 +22,7 @@
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="wrap_content"
|
||||
android:padding="12dp"
|
||||
android:text="/sdcard/"
|
||||
android:text="@string/default_path"
|
||||
android:textColor="@color/text_secondary"
|
||||
android:textSize="12sp"
|
||||
android:fontFamily="monospace"
|
||||
@@ -46,7 +46,7 @@
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="wrap_content"
|
||||
android:padding="12dp"
|
||||
android:text="Showing AV1/WebM video files only"
|
||||
android:text="@string/file_filter_hint"
|
||||
android:textColor="@color/text_secondary"
|
||||
android:textSize="11sp"
|
||||
android:gravity="center"
|
||||
|
||||
@@ -119,7 +119,7 @@
|
||||
android:id="@+id/current_time"
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
android:text="00:00"
|
||||
android:text="@string/time_default"
|
||||
android:textColor="@color/text_secondary"
|
||||
android:textSize="12sp"
|
||||
android:fontFamily="monospace"
|
||||
@@ -135,7 +135,7 @@
|
||||
android:id="@+id/duration_time"
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
android:text="00:00"
|
||||
android:text="@string/time_default"
|
||||
android:textColor="@color/text_secondary"
|
||||
android:textSize="12sp"
|
||||
android:fontFamily="monospace"
|
||||
|
||||
@@ -30,7 +30,7 @@
|
||||
<TextView
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="wrap_content"
|
||||
android:text="AV1 Decoder Type"
|
||||
android:text="@string/settings_decoder_title"
|
||||
android:textColor="@color/text_primary"
|
||||
android:textSize="18sp"
|
||||
android:textStyle="bold"
|
||||
@@ -39,7 +39,7 @@
|
||||
<TextView
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="wrap_content"
|
||||
android:text="Choose the AV1 decoder type for video playback"
|
||||
android:text="@string/settings_decoder_description"
|
||||
android:textColor="@color/text_secondary"
|
||||
android:textSize="14sp"
|
||||
android:layout_marginBottom="16dp" />
|
||||
@@ -54,7 +54,7 @@
|
||||
android:id="@+id/radio_auto"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="wrap_content"
|
||||
android:text="Auto (Recommended)"
|
||||
android:text="@string/settings_decoder_auto"
|
||||
android:textColor="@color/text_primary"
|
||||
android:textSize="16sp"
|
||||
android:padding="12dp"
|
||||
@@ -63,7 +63,7 @@
|
||||
<TextView
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="wrap_content"
|
||||
android:text="Automatically selects the best available decoder"
|
||||
android:text="@string/settings_decoder_auto_description"
|
||||
android:textColor="@color/text_secondary"
|
||||
android:textSize="12sp"
|
||||
android:layout_marginStart="32dp"
|
||||
@@ -73,7 +73,7 @@
|
||||
android:id="@+id/radio_hardware"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="wrap_content"
|
||||
android:text="Hardware Acceleration"
|
||||
android:text="@string/settings_decoder_hardware"
|
||||
android:textColor="@color/text_primary"
|
||||
android:textSize="16sp"
|
||||
android:padding="12dp" />
|
||||
@@ -81,7 +81,7 @@
|
||||
<TextView
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="wrap_content"
|
||||
android:text="Uses MediaCodec hardware acceleration for better performance"
|
||||
android:text="@string/settings_decoder_hardware_description"
|
||||
android:textColor="@color/text_secondary"
|
||||
android:textSize="12sp"
|
||||
android:layout_marginStart="32dp"
|
||||
@@ -91,7 +91,7 @@
|
||||
android:id="@+id/radio_software"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="wrap_content"
|
||||
android:text="Software (dav1d)"
|
||||
android:text="@string/settings_decoder_software"
|
||||
android:textColor="@color/text_primary"
|
||||
android:textSize="16sp"
|
||||
android:padding="12dp" />
|
||||
@@ -99,7 +99,7 @@
|
||||
<TextView
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="wrap_content"
|
||||
android:text="Uses dav1d software decoder for maximum compatibility"
|
||||
android:text="@string/settings_decoder_software_description"
|
||||
android:textColor="@color/text_secondary"
|
||||
android:textSize="12sp"
|
||||
android:layout_marginStart="32dp"
|
||||
@@ -111,7 +111,7 @@
|
||||
<TextView
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="wrap_content"
|
||||
android:text="Advanced Settings"
|
||||
android:text="@string/settings_advanced_title"
|
||||
android:textColor="@color/text_primary"
|
||||
android:textSize="18sp"
|
||||
android:textStyle="bold"
|
||||
@@ -135,14 +135,14 @@
|
||||
<TextView
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="wrap_content"
|
||||
android:text="Asynchronous MediaCodec"
|
||||
android:text="@string/settings_async_mediacodec"
|
||||
android:textColor="@color/text_primary"
|
||||
android:textSize="16sp" />
|
||||
|
||||
<TextView
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="wrap_content"
|
||||
android:text="Enable for high-end devices (Snapdragon 8 Gen 1+)"
|
||||
android:text="@string/settings_async_mediacodec_description"
|
||||
android:textColor="@color/text_secondary"
|
||||
android:textSize="12sp"
|
||||
android:layout_marginTop="2dp" />
|
||||
@@ -175,14 +175,14 @@
|
||||
<TextView
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="wrap_content"
|
||||
android:text="Hardware Decoder Priming"
|
||||
android:text="@string/settings_hardware_priming"
|
||||
android:textColor="@color/text_primary"
|
||||
android:textSize="16sp" />
|
||||
|
||||
<TextView
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="wrap_content"
|
||||
android:text="Pre-warm hardware decoder to reduce initial latency"
|
||||
android:text="@string/settings_hardware_priming_description"
|
||||
android:textColor="@color/text_secondary"
|
||||
android:textSize="12sp"
|
||||
android:layout_marginTop="2dp" />
|
||||
@@ -201,7 +201,7 @@
|
||||
<TextView
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="wrap_content"
|
||||
android:text="Device Information"
|
||||
android:text="@string/settings_device_info_title"
|
||||
android:textColor="@color/text_primary"
|
||||
android:textSize="18sp"
|
||||
android:textStyle="bold"
|
||||
@@ -211,7 +211,7 @@
|
||||
android:id="@+id/device_info_text"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="wrap_content"
|
||||
android:text="Loading device information..."
|
||||
android:text="@string/settings_device_info_loading"
|
||||
android:textColor="@color/text_secondary"
|
||||
android:textSize="14sp"
|
||||
android:fontFamily="monospace"
|
||||
@@ -223,7 +223,7 @@
|
||||
<TextView
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="wrap_content"
|
||||
android:text="Note: Settings changes will take effect after reloading the video. Hardware acceleration provides better performance and battery life for supported devices."
|
||||
android:text="@string/settings_note"
|
||||
android:textColor="@color/text_secondary"
|
||||
android:textSize="12sp"
|
||||
android:textStyle="italic"
|
||||
|
||||
@@ -46,7 +46,7 @@
|
||||
android:layout_weight="1"
|
||||
android:layout_marginStart="16dp"
|
||||
android:layout_marginEnd="16dp"
|
||||
android:text="Video Title"
|
||||
android:text="@string/overlay_video_title"
|
||||
android:textColor="@android:color/white"
|
||||
android:textSize="18sp"
|
||||
android:textStyle="bold"
|
||||
@@ -99,7 +99,7 @@
|
||||
android:id="@+id/overlay_current_time"
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
android:text="00:00"
|
||||
android:text="@string/time_default"
|
||||
android:textColor="@android:color/white"
|
||||
android:textSize="14sp"
|
||||
android:fontFamily="monospace"
|
||||
@@ -158,7 +158,7 @@
|
||||
android:id="@+id/overlay_duration_time"
|
||||
android:layout_width="wrap_content"
|
||||
android:layout_height="wrap_content"
|
||||
android:text="00:00"
|
||||
android:text="@string/time_default"
|
||||
android:textColor="@android:color/white"
|
||||
android:textSize="14sp"
|
||||
android:fontFamily="monospace"
|
||||
|
||||
@@ -51,4 +51,32 @@
|
||||
<string name="content_description_stop_button">Stop video</string>
|
||||
<string name="content_description_load_button">Load video file</string>
|
||||
<string name="content_description_progress_bar">Video playback progress</string>
|
||||
|
||||
<!-- File Browser -->
|
||||
<string name="default_path">/sdcard/</string>
|
||||
<string name="file_filter_hint">Showing AV1/WebM video files only</string>
|
||||
|
||||
<!-- Time Display -->
|
||||
<string name="time_default">00:00</string>
|
||||
|
||||
<!-- Settings -->
|
||||
<string name="settings_decoder_title">AV1 Decoder Type</string>
|
||||
<string name="settings_decoder_description">Choose the AV1 decoder type for video playback</string>
|
||||
<string name="settings_decoder_auto">Auto (Recommended)</string>
|
||||
<string name="settings_decoder_auto_description">Automatically selects the best available decoder</string>
|
||||
<string name="settings_decoder_hardware">Hardware Acceleration</string>
|
||||
<string name="settings_decoder_hardware_description">Uses MediaCodec hardware acceleration for better performance</string>
|
||||
<string name="settings_decoder_software">Software (dav1d)</string>
|
||||
<string name="settings_decoder_software_description">Uses dav1d software decoder for maximum compatibility</string>
|
||||
<string name="settings_advanced_title">Advanced Settings</string>
|
||||
<string name="settings_async_mediacodec">Asynchronous MediaCodec</string>
|
||||
<string name="settings_async_mediacodec_description">Enable for high-end devices (Snapdragon 8 Gen 1+)</string>
|
||||
<string name="settings_hardware_priming">Hardware Decoder Priming</string>
|
||||
<string name="settings_hardware_priming_description">Pre-warm hardware decoder to reduce initial latency</string>
|
||||
<string name="settings_device_info_title">Device Information</string>
|
||||
<string name="settings_device_info_loading">Loading device information...</string>
|
||||
<string name="settings_note">Note: Settings changes will take effect after reloading the video. Hardware acceleration provides better performance and battery life for supported devices.</string>
|
||||
|
||||
<!-- Overlay -->
|
||||
<string name="overlay_video_title">Video Title</string>
|
||||
</resources>
|
||||
Reference in New Issue
Block a user