Files
video-v1/vav1/Vav1Player.Tests/Container/Av1BitstreamParserPerformanceTests.cs
2025-09-17 04:16:34 +09:00

258 lines
8.5 KiB
C#
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
using FluentAssertions;
using System.Diagnostics;
using Vav1Player.Container;
namespace Vav1Player.Tests.Container;
public class Av1BitstreamParserPerformanceTests
{
[Fact]
public void ParseMp4Sample_WithLargeValidSample_ShouldCompleteWithinTimeLimit()
{
// Arrange - Create large but valid sample (5MB total)
var obuCount = 100;
var obuSize = 50_000; // 50KB per OBU
var sampleData = CreateLargeSampleData(obuCount, obuSize);
var stopwatch = Stopwatch.StartNew();
// Act
var result = Av1BitstreamParser.ParseMp4Sample(sampleData);
// Assert
stopwatch.Stop();
result.Should().HaveCount(obuCount);
result.All(obu => obu.Length == obuSize).Should().BeTrue();
// Performance assertion - should complete within 100ms for 5MB
stopwatch.ElapsedMilliseconds.Should().BeLessThan(100,
"because parsing 5MB of valid OBU data should be fast");
}
[Fact]
public void ParseMp4Sample_WithManySmallOBUs_ShouldScaleLinearly()
{
// Arrange - Test with different numbers of small OBUs
var testCases = new[] { 100, 500, 1000 };
var timings = new List<long>();
foreach (var obuCount in testCases)
{
var sampleData = CreateLargeSampleData(obuCount, 100); // 100 bytes per OBU
var stopwatch = Stopwatch.StartNew();
// Act
var result = Av1BitstreamParser.ParseMp4Sample(sampleData);
stopwatch.Stop();
timings.Add(stopwatch.ElapsedMilliseconds);
// Assert correctness
result.Should().HaveCount(obuCount);
}
// Performance assertion - should scale roughly linearly
// Time for 1000 OBUs should be less than 10x time for 100 OBUs
var ratio = (double)timings[2] / Math.Max(timings[0], 1);
ratio.Should().BeLessThan(10.0,
"because parsing should scale roughly linearly with OBU count");
}
[Fact]
public void CombineOBUs_WithLargeDataSet_ShouldCompleteQuickly()
{
// Arrange - Create 1000 OBUs of 1KB each (headers without size field)
var obuList = new List<byte[]>();
for (int i = 0; i < 1000; i++)
{
var obu = new byte[1024];
obu[0] = (byte)(i & 0xFC); // Ensure bit 1 (size field) is 0
// Fill with pattern
for (int j = 1; j < obu.Length; j++)
{
obu[j] = (byte)((i + j) & 0xFF);
}
obuList.Add(obu);
}
var stopwatch = Stopwatch.StartNew();
// Act
var result = Av1BitstreamParser.CombineOBUs(obuList);
// Assert
stopwatch.Stop();
// Should be larger than original due to added size fields (~1MB + size fields)
result.Length.Should().BeGreaterThan(1000 * 1024); // Original 1MB + size overhead
// Performance assertion - should complete within 50ms for ~1MB
stopwatch.ElapsedMilliseconds.Should().BeLessThan(50,
"because combining 1MB of OBU data should be fast");
}
[Fact]
public void ParseMp4Sample_WithMaxSizeLEB128_ShouldHandleEfficiently()
{
// Arrange - Create sample with maximum reasonable LEB128 values
var samples = new List<byte[]>();
// Test various LEB128 sizes: 1-byte, 2-byte, 3-byte
samples.Add(CreateSampleWithLEB128Size(127, 1)); // 1-byte LEB128
samples.Add(CreateSampleWithLEB128Size(16383, 2)); // 2-byte LEB128
samples.Add(CreateSampleWithLEB128Size(2097151, 3)); // 3-byte LEB128
foreach (var sampleData in samples)
{
var stopwatch = Stopwatch.StartNew();
// Act
var result = Av1BitstreamParser.ParseMp4Sample(sampleData);
stopwatch.Stop();
// Assert
result.Should().HaveCount(1);
// Performance assertion - even large LEB128 should parse quickly
stopwatch.ElapsedMilliseconds.Should().BeLessThan(10,
"because LEB128 parsing should be efficient regardless of size");
}
}
[Fact]
public void ParseMp4Sample_WithInvalidData_ShouldFailFast()
{
// Arrange - Various types of invalid data that should fail quickly
var invalidSamples = new[]
{
new byte[] { 0x80, 0x80, 0x80, 0x80, 0x80 }, // Truncated LEB128
new byte[] { 0xFF, 0xFF, 0xFF, 0xFF, 0x0F }, // Oversized LEB128
new byte[] { 0x80, 0x00 }, // Invalid multi-byte zero
new byte[0], // Empty data
new byte[] { 0x10, 0x01, 0x02 } // Length=16 but only 2 bytes
};
foreach (var invalidData in invalidSamples)
{
var stopwatch = Stopwatch.StartNew();
// Act
var result = Av1BitstreamParser.ParseMp4Sample(invalidData);
stopwatch.Stop();
// Assert
result.Should().BeEmpty();
// Performance assertion - should fail fast
stopwatch.ElapsedMilliseconds.Should().BeLessThan(5,
"because invalid data should be rejected quickly");
}
}
[Fact]
public void ParseMp4Sample_StressTest_ShouldHandleRepeatedParsing()
{
// Arrange - Medium-sized sample that will be parsed many times
var sampleData = CreateLargeSampleData(50, 1000); // 50 OBUs of 1KB each
const int iterations = 1000;
var stopwatch = Stopwatch.StartNew();
// Act - Parse the same data many times
for (int i = 0; i < iterations; i++)
{
var result = Av1BitstreamParser.ParseMp4Sample(sampleData);
result.Should().HaveCount(50); // Sanity check
}
// Assert
stopwatch.Stop();
var avgTimePerParse = stopwatch.ElapsedMilliseconds / (double)iterations;
// Performance assertion - average parse time should be reasonable
avgTimePerParse.Should().BeLessThan(1.0,
"because repeated parsing of the same data should be consistently fast");
}
[Theory]
[InlineData(10, 100)] // Small: 10 OBUs × 100 bytes
[InlineData(100, 1000)] // Medium: 100 OBUs × 1KB
[InlineData(50, 10000)] // Large: 50 OBUs × 10KB
public void ParseMp4Sample_VariousDataSizes_ShouldCompleteReasonably(int obuCount, int obuSize)
{
// Arrange
var sampleData = CreateLargeSampleData(obuCount, obuSize);
var totalSize = obuCount * obuSize;
var stopwatch = Stopwatch.StartNew();
// Act
var result = Av1BitstreamParser.ParseMp4Sample(sampleData);
// Assert
stopwatch.Stop();
result.Should().HaveCount(obuCount);
// Performance target: should complete within reasonable time (less than 1 second for reasonable data sizes)
stopwatch.ElapsedMilliseconds.Should().BeLessThan(1000,
$"because parsing {totalSize} bytes should complete within 1 second (actual: {stopwatch.ElapsedMilliseconds}ms)");
}
private static byte[] CreateLargeSampleData(int obuCount, int obuSize)
{
var sampleList = new List<byte>();
for (int i = 0; i < obuCount; i++)
{
// Add LEB128 length
var lengthBytes = EncodeLEB128((uint)obuSize);
sampleList.AddRange(lengthBytes);
// Add OBU data with pattern
for (int j = 0; j < obuSize; j++)
{
sampleList.Add((byte)((i + j) & 0xFF));
}
}
return sampleList.ToArray();
}
private static byte[] CreateSampleWithLEB128Size(uint value, int expectedBytes)
{
var lengthBytes = EncodeLEB128(value);
lengthBytes.Length.Should().Be(expectedBytes, $"LEB128 encoding of {value} should use {expectedBytes} bytes");
var obuData = new byte[value];
for (int i = 0; i < obuData.Length; i++)
{
obuData[i] = (byte)(i & 0xFF);
}
var result = new byte[lengthBytes.Length + obuData.Length];
Array.Copy(lengthBytes, 0, result, 0, lengthBytes.Length);
Array.Copy(obuData, 0, result, lengthBytes.Length, obuData.Length);
return result;
}
private static byte[] EncodeLEB128(uint value)
{
var bytes = new List<byte>();
while (value >= 0x80)
{
bytes.Add((byte)((value & 0x7F) | 0x80));
value >>= 7;
}
bytes.Add((byte)(value & 0x7F));
return bytes.ToArray();
}
}