Files
video-v1/vav1/Vav1Player/Video/VideoDecoderPipeline.cs
2025-09-19 04:42:07 +09:00

709 lines
31 KiB
C#

using System;
using System.Threading;
using System.Threading.Tasks;
using Vav1Player.Container;
using Vav1Player.Decoder;
namespace Vav1Player.Video
{
/// <summary>
/// Video decoder pipeline that continuously decodes frames from file reader to frame buffer
/// </summary>
public class VideoDecoderPipeline : IDisposable
{
private readonly VideoFileReader _fileReader;
private readonly Dav1dDecoder _decoder;
private readonly FrameBuffer _frameBuffer;
private readonly CancellationTokenSource _cancellationTokenSource;
private readonly Task _decodingTask;
private volatile bool _disposed = false;
private volatile bool _isPaused = false;
private int _frameCounter = 0;
public bool IsRunning => !_decodingTask.IsCompleted && !_disposed;
public bool IsPaused => _isPaused;
public VideoFileReader FileReader => _fileReader;
public FrameBuffer FrameBuffer => _frameBuffer;
public int DecodedFrameCount => _frameCounter;
public VideoDecoderPipeline(VideoFileReader fileReader, Dav1dDecoder decoder, FrameBuffer frameBuffer)
{
_fileReader = fileReader ?? throw new ArgumentNullException(nameof(fileReader));
_decoder = decoder ?? throw new ArgumentNullException(nameof(decoder));
_frameBuffer = frameBuffer ?? throw new ArgumentNullException(nameof(frameBuffer));
// Initialize decoder with av1C configuration if available
InitializeDecoderWithConfig();
_cancellationTokenSource = new CancellationTokenSource();
_decodingTask = Task.Run(DecodingLoop, _cancellationTokenSource.Token);
System.Diagnostics.Debug.WriteLine("[VideoDecoderPipeline] Started decoding pipeline");
}
private void InitializeDecoderWithConfig()
{
var trackInfo = _fileReader.TrackInfo;
if (trackInfo?.Av1ConfigurationRecord != null)
{
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] AV1 configuration available: {trackInfo.Av1ConfigurationRecord.Length} bytes");
// Determine the configuration format based on file extension
var extension = System.IO.Path.GetExtension(_fileReader.FilePath).ToLowerInvariant();
byte[]? sequenceOBUs = null;
if (extension == ".mp4")
{
// MP4 uses av1C format (ISO BMFF)
sequenceOBUs = ParseAv1ConfigurationRecord(trackInfo.Av1ConfigurationRecord);
System.Diagnostics.Debug.WriteLine("[VideoDecoderPipeline] Parsed MP4 av1C configuration");
}
else if (extension == ".webm" || extension == ".mkv")
{
// WebM/MKV uses raw AV1 OBUs in CodecPrivate
sequenceOBUs = ParseMatroskaCodecPrivate(trackInfo.Av1ConfigurationRecord);
System.Diagnostics.Debug.WriteLine("[VideoDecoderPipeline] Parsed WebM CodecPrivate configuration");
}
if (sequenceOBUs != null && sequenceOBUs.Length > 0)
{
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] Extracted sequence OBUs: {sequenceOBUs.Length} bytes");
// Log first few bytes for debugging
var hexData = string.Join(" ", sequenceOBUs.Take(Math.Min(16, sequenceOBUs.Length)).Select(b => b.ToString("X2")));
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] Sequence OBU data: {hexData}");
// Send sequence header to decoder
if (_decoder.DecodeFrame(sequenceOBUs, out var _))
{
System.Diagnostics.Debug.WriteLine("[VideoDecoderPipeline] Successfully initialized decoder with sequence header");
}
else
{
System.Diagnostics.Debug.WriteLine("[VideoDecoderPipeline] Failed to initialize decoder with sequence header");
}
}
else
{
System.Diagnostics.Debug.WriteLine("[VideoDecoderPipeline] No sequence header found in configuration - relying on stream data");
}
}
else
{
System.Diagnostics.Debug.WriteLine("[VideoDecoderPipeline] No AV1 configuration available - using first frame for initialization");
}
}
private byte[]? ParseMatroskaCodecPrivate(byte[] codecPrivate)
{
try
{
if (codecPrivate == null || codecPrivate.Length == 0)
return null;
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] Parsing WebM CodecPrivate: {codecPrivate.Length} bytes");
// Log raw CodecPrivate data for debugging
var hexData = string.Join(" ", codecPrivate.Take(Math.Min(32, codecPrivate.Length)).Select(b => b.ToString("X2")));
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] CodecPrivate data: {hexData}");
// According to Matroska AV1 spec:
// CodecPrivate consists of 4 octets similar to ISOBMFF AV1CodecConfigurationRecord
// However, some encoders include sequence header OBUs after the 4-byte config
if (codecPrivate.Length == 4)
{
// Standard compliant: only 4-byte configuration
System.Diagnostics.Debug.WriteLine("[VideoDecoderPipeline] Standard 4-byte CodecPrivate (no sequence header included)");
ParseAv1ConfigBytes(codecPrivate);
return null; // No sequence header in CodecPrivate, will come from first keyframe
}
else if (codecPrivate.Length > 4)
{
// Non-standard but common: includes sequence header after 4-byte config
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] Extended CodecPrivate with potential sequence header");
// Parse the 4-byte configuration first
var configBytes = new byte[4];
Array.Copy(codecPrivate, 0, configBytes, 0, 4);
ParseAv1ConfigBytes(configBytes);
// Check if remaining data contains sequence header OBU
if (codecPrivate.Length > 4)
{
// Use proper OBU parser to validate and extract sequence header
var obuInfo = ParseObuHeader(codecPrivate, 4);
if (obuInfo != null && obuInfo.Value.obuType == 1) // Sequence Header OBU
{
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] Found sequence header OBU at offset 4");
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] Original OBU size: {obuInfo.Value.obuSize}, has size field: {obuInfo.Value.hasSizeField}");
// The original OBU might have incorrect size field, reconstruct it properly
byte obuHeaderByte = codecPrivate[4]; // 0x0A (type 1, has_size_field=1)
// Calculate the actual payload size from CodecPrivate
int actualPayloadSize = codecPrivate.Length - 4 - 1 - 1; // total - offset - header - size_field
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] Reconstructing OBU with correct size field: {actualPayloadSize} bytes");
// Reconstruct the OBU with correct size
var sequenceOBU = new List<byte>();
sequenceOBU.Add(obuHeaderByte); // OBU header (0x0A)
sequenceOBU.Add((byte)actualPayloadSize); // Corrected size field (simple case, < 128)
// Add the payload (skip original header and size field)
for (int i = 6; i < codecPrivate.Length; i++)
{
sequenceOBU.Add(codecPrivate[i]);
}
var reconstructedOBU = sequenceOBU.ToArray();
var sequenceHex = string.Join(" ", reconstructedOBU.Take(Math.Min(16, reconstructedOBU.Length)).Select(b => b.ToString("X2")));
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] Reconstructed sequence OBU: {sequenceHex} (length: {reconstructedOBU.Length})");
return reconstructedOBU;
}
else
{
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] Invalid OBU at offset 4: type={obuInfo?.obuType ?? -1}");
}
}
}
// Fallback: non-standard format, search for sequence header
for (int i = 0; i < codecPrivate.Length; i++)
{
var obuInfo = ParseObuHeader(codecPrivate, i);
if (obuInfo != null && obuInfo.Value.obuType == 1) // Sequence Header OBU
{
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] Found valid sequence header OBU at offset {i} (fallback search)");
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] OBU size: {obuInfo.Value.obuSize}, has size field: {obuInfo.Value.hasSizeField}");
// Extract the complete OBU including header and size field
int obuLength = obuInfo.Value.nextPosition - i;
var sequenceOBU = new byte[obuLength];
Array.Copy(codecPrivate, i, sequenceOBU, 0, obuLength);
var sequenceHex = string.Join(" ", sequenceOBU.Take(Math.Min(16, sequenceOBU.Length)).Select(b => b.ToString("X2")));
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] Extracted sequence OBU: {sequenceHex} (length: {obuLength})");
return sequenceOBU;
}
}
System.Diagnostics.Debug.WriteLine("[VideoDecoderPipeline] No sequence header found in CodecPrivate");
return null;
}
catch (Exception ex)
{
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] Error parsing WebM CodecPrivate: {ex.Message}");
return null;
}
}
private void ParseAv1ConfigBytes(byte[] configBytes)
{
if (configBytes.Length != 4)
return;
// Parse 4-byte AV1 configuration according to spec
byte byte0 = configBytes[0];
byte byte1 = configBytes[1];
byte byte2 = configBytes[2];
byte byte3 = configBytes[3];
bool marker = (byte0 & 0x80) != 0;
int version = byte0 & 0x7F;
int seqProfile = (byte1 >> 5) & 0x07;
int seqLevelIdx = byte1 & 0x1F;
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] AV1 Config - Marker: {marker}, Version: {version}, Profile: {seqProfile}, Level: {seqLevelIdx}");
}
private void ValidateAv1Keyframe(byte[] frameData, long sampleIndex)
{
if (frameData == null || frameData.Length == 0)
return;
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] Validating AV1 keyframe at sample {sampleIndex}");
bool hasSequenceHeader = false;
bool hasFrameHeader = false;
bool hasFrameOBU = false;
int obuCount = 0;
// Parse OBUs in the keyframe using proper AV1 spec parsing
int position = 0;
while (position < frameData.Length && obuCount < 10)
{
var obuInfo = ParseObuHeader(frameData, position);
if (obuInfo == null) break;
obuCount++;
string obuTypeName = GetOBUTypeName(obuInfo.Value.obuType);
if (obuCount <= 5) // Log first few OBUs
{
System.Diagnostics.Debug.WriteLine($" OBU {obuCount}: Type {obuInfo.Value.obuType} ({obuTypeName}), HasSize: {obuInfo.Value.hasSizeField}, Size: {obuInfo.Value.obuSize}");
}
// Track important OBU types for spec validation
if (obuInfo.Value.obuType == 1) hasSequenceHeader = true;
if (obuInfo.Value.obuType == 3) hasFrameHeader = true;
if (obuInfo.Value.obuType == 6) hasFrameOBU = true;
// Advance to next OBU
position = obuInfo.Value.nextPosition;
}
// Validate Matroska AV1 spec requirements for keyframes
if (!hasSequenceHeader)
{
System.Diagnostics.Debug.WriteLine($" ⚠️ SPEC VIOLATION: Keyframe at sample {sampleIndex} missing Sequence Header OBU");
}
// Either Frame Header (Type 3) OR Frame OBU (Type 6) is required, not both
if (!hasFrameHeader && !hasFrameOBU)
{
System.Diagnostics.Debug.WriteLine($" ⚠️ SPEC VIOLATION: Keyframe at sample {sampleIndex} missing Frame Header (Type 3) or Frame OBU (Type 6)");
}
if (hasSequenceHeader && (hasFrameHeader || hasFrameOBU))
{
System.Diagnostics.Debug.WriteLine($" ✅ Keyframe at sample {sampleIndex} appears spec compliant");
}
}
private (int obuType, bool hasSizeField, uint obuSize, int nextPosition)? ParseObuHeader(byte[] data, int position)
{
if (position >= data.Length) return null;
// Parse OBU header according to AV1 spec
byte obuHeader = data[position];
// Validate forbidden bit (must be 0)
if ((obuHeader & 0x80) != 0)
{
System.Diagnostics.Debug.WriteLine($" ⚠️ SPEC VIOLATION: OBU forbidden bit is 1 at position {position}");
return null;
}
int obuType = (obuHeader >> 3) & 0x0F;
bool extensionFlag = (obuHeader & 0x04) != 0;
bool hasSizeField = (obuHeader & 0x02) != 0;
position++; // Move past OBU header
// Skip extension header if present
if (extensionFlag && position < data.Length)
{
position++; // Skip extension byte
}
uint obuSize = 0;
if (hasSizeField)
{
// Parse LEB128 size field according to AV1 spec
var leb128Result = ParseLeb128(data, position);
if (leb128Result == null) return null;
obuSize = leb128Result.Value.value;
position = leb128Result.Value.nextPosition;
}
else
{
// If no size field, OBU extends to end of current data
obuSize = (uint)(data.Length - position);
}
// Low Overhead Bitstream Format requires has_size_field = 1
if (!hasSizeField)
{
System.Diagnostics.Debug.WriteLine($" ⚠️ SPEC VIOLATION: Low Overhead Format requires obu_has_size_field=1, but found 0");
}
return (obuType, hasSizeField, obuSize, position + (int)obuSize);
}
private (uint value, int nextPosition)? ParseLeb128(byte[] data, int position)
{
uint value = 0;
int shift = 0;
while (position < data.Length && shift < 35) // Max 5 bytes for uint32
{
byte b = data[position++];
value |= (uint)(b & 0x7F) << shift;
if ((b & 0x80) == 0) // No continuation bit
{
return (value, position);
}
shift += 7;
}
System.Diagnostics.Debug.WriteLine($" ⚠️ Invalid LEB128 encoding at position {position}");
return null;
}
private List<byte[]> SplitOBUsFromData(byte[] data)
{
var obuList = new List<byte[]>();
int position = 0;
while (position < data.Length)
{
var obuInfo = ParseObuHeader(data, position);
if (obuInfo == null) break;
int obuStart = position;
int obuEnd = obuInfo.Value.nextPosition;
// Extract this OBU data including header and payload
int obuLength = obuEnd - obuStart;
if (obuLength > 0 && obuEnd <= data.Length)
{
byte[] obuData = new byte[obuLength];
Array.Copy(data, obuStart, obuData, 0, obuLength);
obuList.Add(obuData);
System.Diagnostics.Debug.WriteLine($"[SplitOBUs] Extracted OBU Type {obuInfo.Value.obuType}: {obuLength} bytes");
}
position = obuEnd;
}
System.Diagnostics.Debug.WriteLine($"[SplitOBUs] Split data into {obuList.Count} OBUs");
return obuList;
}
private string GetOBUTypeName(int obuType)
{
return obuType switch
{
0 => "Reserved",
1 => "Sequence Header",
2 => "Temporal Delimiter",
3 => "Frame Header",
4 => "Tile Group",
5 => "Metadata",
6 => "Frame",
7 => "Redundant Frame Header",
8 => "Tile List",
15 => "Padding",
_ => $"Unknown({obuType})"
};
}
private byte[]? ParseAv1ConfigurationRecord(byte[] av1C)
{
try
{
if (av1C.Length < 4)
return null;
// av1C format according to AV1 codec ISO BMFF specification:
// - 1 byte: marker + version (bit 7 = marker, bits 6-0 = version)
// - 1 byte: seq_profile (3 bits) + seq_level_idx_0 (5 bits)
// - 1 byte: various flags
// - 1 byte: chroma/color info
// - N bytes: configOBUs (sequence header and metadata OBUs)
byte marker_version = av1C[0];
if ((marker_version & 0x80) == 0)
{
System.Diagnostics.Debug.WriteLine("[VideoDecoderPipeline] Invalid av1C: missing marker bit");
return null;
}
// Skip the 4-byte configuration header to get to configOBUs
if (av1C.Length <= 4)
{
System.Diagnostics.Debug.WriteLine("[VideoDecoderPipeline] av1C contains no configOBUs");
return null;
}
var configOBUs = new byte[av1C.Length - 4];
Array.Copy(av1C, 4, configOBUs, 0, configOBUs.Length);
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] Extracted configOBUs: {configOBUs.Length} bytes");
// Log first few bytes for debugging
var hexData = string.Join(" ", configOBUs.Take(Math.Min(16, configOBUs.Length)).Select(b => b.ToString("X2")));
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] ConfigOBUs data: {hexData}");
return configOBUs;
}
catch (Exception ex)
{
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] Error parsing av1C: {ex.Message}");
return null;
}
}
/// <summary>
/// Pause the decoding pipeline
/// </summary>
public void Pause()
{
_isPaused = true;
System.Diagnostics.Debug.WriteLine("[VideoDecoderPipeline] Paused");
}
/// <summary>
/// Resume the decoding pipeline
/// </summary>
public void Resume()
{
_isPaused = false;
System.Diagnostics.Debug.WriteLine("[VideoDecoderPipeline] Resumed");
}
/// <summary>
/// Seek to a specific time position
/// </summary>
public async Task<bool> SeekAsync(TimeSpan time)
{
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] Seeking to {time}");
// Clear the frame buffer
_frameBuffer.Clear();
// Seek in the file reader
var success = await _fileReader.SeekToTimeAsync(time, _cancellationTokenSource.Token);
if (success)
{
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] Seek successful, resumed at sample {_fileReader.CurrentSampleIndex}");
}
else
{
System.Diagnostics.Debug.WriteLine("[VideoDecoderPipeline] Seek failed");
}
return success;
}
/// <summary>
/// Main decoding loop that runs continuously
/// </summary>
private async Task DecodingLoop()
{
try
{
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] Starting decoding loop for {_fileReader.TotalSamples} samples");
while (!_cancellationTokenSource.Token.IsCancellationRequested)
{
// Pause handling
while (_isPaused && !_cancellationTokenSource.Token.IsCancellationRequested)
{
await Task.Delay(10, _cancellationTokenSource.Token);
}
// Check if we have space in the buffer
var bufferStats = _frameBuffer.GetStats();
if (bufferStats.FrameCount >= bufferStats.MaxFrameCount * 0.9) // 90% full
{
// Buffer is nearly full, wait a bit
await Task.Delay(10, _cancellationTokenSource.Token);
continue;
}
// Read next chunk from file
var chunk = await _fileReader.ReadNextChunkAsync(_cancellationTokenSource.Token);
if (chunk == null)
{
// End of file reached
_frameBuffer.MarkEndOfStream();
System.Diagnostics.Debug.WriteLine("[VideoDecoderPipeline] End of stream reached");
break;
}
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] Processing {chunk}");
// Process the chunk based on container format
await ProcessVideoChunk(chunk);
}
}
catch (OperationCanceledException)
{
System.Diagnostics.Debug.WriteLine("[VideoDecoderPipeline] Decoding loop cancelled");
}
catch (Exception ex)
{
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] Decoding loop error: {ex.Message}");
}
finally
{
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] Decoding loop finished. Decoded {_frameCounter} frames");
}
}
/// <summary>
/// Process a video chunk based on container format
/// </summary>
private async Task ProcessVideoChunk(VideoDataChunk chunk)
{
try
{
byte[] decodingData;
// Handle different container formats
var extension = System.IO.Path.GetExtension(_fileReader.FilePath).ToLowerInvariant();
if (extension == ".mp4")
{
// MP4: Parse AV1 sample to extract OBUs
var obuList = Av1BitstreamParser.ParseMp4Sample(chunk.Data);
if (obuList.Count == 0)
{
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] No OBUs found in MP4 sample");
return;
}
// Log OBU types for first few chunks to understand structure
if (chunk.SampleIndex < 3) // Only log first 3 chunks to avoid spam
{
for (int i = 0; i < obuList.Count; i++)
{
Av1BitstreamParser.LogOBUInfo(obuList[i], $"[VideoDecoderPipeline] Chunk {chunk.SampleIndex} OBU {i}: ");
}
}
// Combine all OBUs for decoding
decodingData = Av1BitstreamParser.CombineOBUs(obuList);
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] MP4: Extracted {obuList.Count} OBUs, combined size: {decodingData.Length}");
}
else
{
// WebM/MKV: Use data directly as AV1 OBUs in "Low Overhead Bitstream Format"
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] Matroska: Using data directly, size: {chunk.Data.Length}");
// For WebM keyframes with multiple OBUs, handle them separately
if (chunk.IsKeyFrame && chunk.Data.Length > 1000) // Large keyframes likely have multiple OBUs
{
ValidateAv1Keyframe(chunk.Data, chunk.SampleIndex);
// Split OBUs and process separately for dav1d
var obuList = SplitOBUsFromData(chunk.Data);
bool decodingSuccess = false;
foreach (var obuData in obuList)
{
if (obuData.Length > 0)
{
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] Processing individual OBU: {obuData.Length} bytes");
if (_decoder.DecodeFrame(obuData, out var frameResult))
{
if (frameResult.HasValue)
{
var frame = frameResult.Value;
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] Decoded frame #{_frameCounter}: {frame.Width}x{frame.Height}");
// Create video frame with timing information
var videoFrame = new VideoFrame(frame, chunk.PresentationTimeMs, _frameCounter, chunk.IsKeyFrame);
// Add to frame buffer
var enqueued = await _frameBuffer.TryEnqueueAsync(videoFrame, _cancellationTokenSource.Token);
if (enqueued)
{
_frameCounter++;
decodingSuccess = true;
// Log buffer status periodically
if (_frameCounter % 10 == 0)
{
var stats = _frameBuffer.GetStats();
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] Buffer: {stats}");
}
}
else
{
// Buffer is full, dispose the frame
videoFrame.Dispose();
System.Diagnostics.Debug.WriteLine("[VideoDecoderPipeline] Buffer full, dropped frame");
}
break; // Successfully decoded, stop processing more OBUs
}
}
}
}
if (!decodingSuccess)
{
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] Failed to decode keyframe with split OBUs");
}
return; // Skip regular decoding for keyframes
}
else
{
decodingData = chunk.Data;
}
}
// Decode the frame
if (_decoder.DecodeFrame(decodingData, out var decodedFrame))
{
if (decodedFrame.HasValue)
{
var frame = decodedFrame.Value;
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] Decoded frame #{_frameCounter}: {frame.Width}x{frame.Height}");
// Create video frame with timing information
var videoFrame = new VideoFrame(frame, chunk.PresentationTimeMs, _frameCounter, chunk.IsKeyFrame);
// Add to frame buffer
var enqueued = await _frameBuffer.TryEnqueueAsync(videoFrame, _cancellationTokenSource.Token);
if (enqueued)
{
_frameCounter++;
// Log buffer status periodically
if (_frameCounter % 10 == 0)
{
var stats = _frameBuffer.GetStats();
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] Buffer: {stats}");
}
}
else
{
// Buffer is full, dispose the frame
videoFrame.Dispose();
System.Diagnostics.Debug.WriteLine("[VideoDecoderPipeline] Buffer full, dropped frame");
}
}
}
else
{
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] Failed to decode chunk of size {decodingData.Length}");
}
}
catch (Exception ex)
{
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] Error processing chunk: {ex.Message}");
}
}
public void Dispose()
{
if (_disposed)
return;
System.Diagnostics.Debug.WriteLine("[VideoDecoderPipeline] Disposing");
_disposed = true;
_cancellationTokenSource.Cancel();
try
{
_decodingTask.Wait(TimeSpan.FromSeconds(5));
}
catch (Exception ex)
{
System.Diagnostics.Debug.WriteLine($"[VideoDecoderPipeline] Error waiting for decoding task: {ex.Message}");
}
_cancellationTokenSource.Dispose();
System.Diagnostics.Debug.WriteLine("[VideoDecoderPipeline] Disposed");
}
}
}