diff --git a/Snappier.Tests/SnappyStreamTests.cs b/Snappier.Tests/SnappyStreamTests.cs index 4c186da..cf79415 100644 --- a/Snappier.Tests/SnappyStreamTests.cs +++ b/Snappier.Tests/SnappyStreamTests.cs @@ -1,4 +1,5 @@ -using System.IO; +using System; +using System.IO; using System.IO.Compression; using System.Text; using System.Threading.Tasks; @@ -101,5 +102,54 @@ public async Task CompressAndDecompressAsync(string filename) Assert.Equal(sourceText, decompressedText); } + + [Theory] + [InlineData("alice29.txt")] + [InlineData("asyoulik.txt")] + [InlineData("fireworks.jpeg")] + [InlineData("geo.protodata")] + [InlineData("html")] + [InlineData("html_x_4")] + [InlineData("kppkn.gtb")] + [InlineData("lcet10.txt")] + [InlineData("paper-100k.pdf")] + [InlineData("plrabn12.txt")] + [InlineData("urls.10K")] + // Test writing lots of small chunks to catch errors where reading needs to break mid-chunk. + public void CompressAndDecompressChunkStressTest(string filename) + { + var resource = typeof(SnappyStreamTests).Assembly.GetManifestResourceStream($"Snappier.Tests.TestData.{filename}"); + using var resourceMem = new MemoryStream(); + resource.CopyTo(resourceMem); + var originalBytes = resourceMem.ToArray(); + + var rand = new Random(123); + + using var compresed = new MemoryStream(); + using (var inputStream = new MemoryStream(originalBytes)) + using (var compressor = new SnappyStream(compresed, CompressionMode.Compress, true)) + { + // Write lots of small randomly sized chunks to increase change of hitting error conditions. + byte[] buffer = new byte[100]; + var requestedSize = rand.Next(1, buffer.Length); + int n; + while ((n = inputStream.Read(buffer.AsSpan(0, requestedSize))) != 0) + { + compressor.Write(buffer.AsSpan(0, n)); + // Flush after every write so we get lots of small chunks in the compressed output. + compressor.Flush(); + } + } + compresed.Position = 0; + + using var decompressed = new MemoryStream(); + using (var decompressor = new SnappyStream(compresed, CompressionMode.Decompress, true)) + { + decompressor.CopyTo(decompressed); + } + + Assert.Equal(originalBytes.Length, decompressed.Length); + Assert.Equal(originalBytes, decompressed.ToArray()); + } } } diff --git a/Snappier/Internal/SnappyDecompressor.cs b/Snappier/Internal/SnappyDecompressor.cs index 60d7e46..6b74845 100644 --- a/Snappier/Internal/SnappyDecompressor.cs +++ b/Snappier/Internal/SnappyDecompressor.cs @@ -94,7 +94,7 @@ public void Reset() bool foundEnd = false; var i = 0; - while (input.Length > 0) + while (input.Length > i) { byte c = input[i]; i += 1; diff --git a/Snappier/Internal/SnappyStreamDecompressor.cs b/Snappier/Internal/SnappyStreamDecompressor.cs index 799cb4f..fbd0d4a 100644 --- a/Snappier/Internal/SnappyStreamDecompressor.cs +++ b/Snappier/Internal/SnappyStreamDecompressor.cs @@ -220,7 +220,7 @@ private unsafe uint ReadChunkHeader(ref byte* buffer, byte* bufferEnd) var bytesToCopyToScratch = 4 - _scratchLength; fixed (byte* scratch = _scratch) { - Buffer.MemoryCopy(buffer, scratch, ScratchBufferSize, bytesToCopyToScratch); + Buffer.MemoryCopy(buffer, scratch + _scratchLength, ScratchBufferSize, bytesToCopyToScratch); buffer += bytesToCopyToScratch; _scratchLength += bytesToCopyToScratch; @@ -280,15 +280,17 @@ private unsafe bool ReadChunkCrc(ref byte* inputPtr, byte* inputEnd, ref int chu } // Copy to scratch - new ReadOnlySpan(inputPtr, bytesAvailable) + int crcBytesAvailable = Math.Min(bytesAvailable, 4 - chunkBytesProcessed); + new ReadOnlySpan(inputPtr, crcBytesAvailable) .CopyTo(_scratch.AsSpan(_scratchLength)); - _scratchLength += bytesAvailable; - inputPtr += bytesAvailable; - chunkBytesProcessed += bytesAvailable; + _scratchLength += crcBytesAvailable; + inputPtr += crcBytesAvailable; + chunkBytesProcessed += crcBytesAvailable; if (_scratchLength >= 4) { _expectedChunkCrc = BinaryPrimitives.ReadUInt32LittleEndian(_scratch); + _scratchLength = 0; return true; }