Browse Source

Merge remote-tracking branch 'upstream/main' into better_handle_corrupt_png

pull/2589/head
James Jackson-South 2 years ago
parent
commit
637a109305
  1. 31
      src/ImageSharp/Common/Helpers/SimdUtils.HwIntrinsics.cs
  2. 93
      src/ImageSharp/Formats/AnimatedImageFrameMetadata.cs
  3. 32
      src/ImageSharp/Formats/AnimatedImageMetadata.cs
  4. 244
      src/ImageSharp/Formats/AnimationUtilities.cs
  5. 2
      src/ImageSharp/Formats/Gif/GifDecoderCore.cs
  6. 2
      src/ImageSharp/Formats/Gif/GifEncoder.cs
  7. 534
      src/ImageSharp/Formats/Gif/GifEncoderCore.cs
  8. 40
      src/ImageSharp/Formats/Gif/GifFrameMetadata.cs
  9. 26
      src/ImageSharp/Formats/Gif/GifMetadata.cs
  10. 12
      src/ImageSharp/Formats/Gif/LzwEncoder.cs
  11. 51
      src/ImageSharp/Formats/Gif/MetadataExtensions.cs
  12. 14
      src/ImageSharp/Formats/Png/Chunks/AnimationControl.cs
  13. 2
      src/ImageSharp/Formats/Png/Chunks/PngPhysical.cs
  14. 58
      src/ImageSharp/Formats/Png/MetadataExtensions.cs
  15. 57
      src/ImageSharp/Formats/Png/PngDecoderCore.cs
  16. 8
      src/ImageSharp/Formats/Png/PngDisposalMethod.cs
  17. 236
      src/ImageSharp/Formats/Png/PngEncoderCore.cs
  18. 18
      src/ImageSharp/Formats/Png/PngFrameMetadata.cs
  19. 33
      src/ImageSharp/Formats/Png/PngMetadata.cs
  20. 37
      src/ImageSharp/Formats/Webp/AlphaEncoder.cs
  21. 7
      src/ImageSharp/Formats/Webp/BitWriter/BitWriterBase.cs
  22. 24
      src/ImageSharp/Formats/Webp/Chunks/WebpFrameData.cs
  23. 73
      src/ImageSharp/Formats/Webp/Lossless/Vp8LEncoder.cs
  24. 64
      src/ImageSharp/Formats/Webp/Lossy/Vp8Encoder.cs
  25. 15
      src/ImageSharp/Formats/Webp/Lossy/YuvConversion.cs
  26. 48
      src/ImageSharp/Formats/Webp/MetadataExtensions.cs
  27. 8
      src/ImageSharp/Formats/Webp/WebpAnimationDecoder.cs
  28. 16
      src/ImageSharp/Formats/Webp/WebpBlendMethod.cs
  29. 56
      src/ImageSharp/Formats/Webp/WebpCommonUtils.cs
  30. 2
      src/ImageSharp/Formats/Webp/WebpDisposalMethod.cs
  31. 98
      src/ImageSharp/Formats/Webp/WebpEncoderCore.cs
  32. 10
      src/ImageSharp/Formats/Webp/WebpFrameMetadata.cs
  33. 22
      src/ImageSharp/Formats/Webp/WebpMetadata.cs
  34. 20
      src/ImageSharp/Metadata/FrameDecodingMode.cs
  35. 26
      src/ImageSharp/Metadata/ImageMetadata.cs
  36. 14
      src/ImageSharp/Processing/Processors/Dithering/ErrorDither.cs
  37. 85
      src/ImageSharp/Processing/Processors/Quantization/EuclideanPixelMap{TPixel}.cs
  38. 4
      src/ImageSharp/Processing/Processors/Quantization/QuantizerUtilities.cs
  39. 14
      tests/ImageSharp.Tests/Formats/GeneralFormatTests.cs
  40. 15
      tests/ImageSharp.Tests/Formats/Gif/GifDecoderTests.cs
  41. 117
      tests/ImageSharp.Tests/Formats/Gif/GifEncoderTests.cs
  42. 113
      tests/ImageSharp.Tests/Formats/Png/PngEncoderTests.cs
  43. 4
      tests/ImageSharp.Tests/Formats/Png/PngFrameMetadataTests.cs
  44. 4
      tests/ImageSharp.Tests/Formats/WebP/WebpDecoderTests.cs
  45. 107
      tests/ImageSharp.Tests/Formats/WebP/WebpEncoderTests.cs
  46. 5
      tests/ImageSharp.Tests/Formats/WebP/YuvConversionTests.cs
  47. 22
      tests/ImageSharp.Tests/TestImages.cs
  48. 4
      tests/ImageSharp.Tests/TestUtilities/ImagingTestCaseUtility.cs
  49. 1
      tests/ImageSharp.Tests/TestUtilities/TestEnvironment.Formats.cs
  50. 4
      tests/Images/External/ReferenceOutput/DitherTests/ApplyDiffusionFilterInBox_Rgba32_CalliphoraPartial.png
  51. 4
      tests/Images/External/ReferenceOutput/DitherTests/ApplyDitherFilterInBox_Rgba32_CalliphoraPartial.png
  52. 4
      tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_ShouldNotDependOnSinglePixelType_Bgra32_filter0.png
  53. 4
      tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_ShouldNotDependOnSinglePixelType_Rgb24_filter0.png
  54. 4
      tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_ShouldNotDependOnSinglePixelType_Rgba32_filter0.png
  55. 4
      tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_ShouldNotDependOnSinglePixelType_RgbaVector_filter0.png
  56. 4
      tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_Bike_Atkinson.png
  57. 4
      tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_Bike_Burks.png
  58. 4
      tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_Bike_FloydSteinberg.png
  59. 4
      tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_Bike_JarvisJudiceNinke.png
  60. 4
      tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_Bike_Sierra2.png
  61. 4
      tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_Bike_Sierra3.png
  62. 4
      tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_Bike_SierraLite.png
  63. 4
      tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_Bike_StevensonArce.png
  64. 4
      tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_Bike_Stucki.png
  65. 4
      tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_CalliphoraPartial_Atkinson.png
  66. 4
      tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_CalliphoraPartial_Burks.png
  67. 4
      tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_CalliphoraPartial_FloydSteinberg.png
  68. 4
      tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_CalliphoraPartial_JarvisJudiceNinke.png
  69. 4
      tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_CalliphoraPartial_Sierra2.png
  70. 4
      tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_CalliphoraPartial_Sierra3.png
  71. 4
      tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_CalliphoraPartial_SierraLite.png
  72. 4
      tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_CalliphoraPartial_StevensonArce.png
  73. 4
      tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_CalliphoraPartial_Stucki.png
  74. 4
      tests/Images/External/ReferenceOutput/DitherTests/DitherFilter_ShouldNotDependOnSinglePixelType_Bgra32_filter0.png
  75. 4
      tests/Images/External/ReferenceOutput/DitherTests/DitherFilter_ShouldNotDependOnSinglePixelType_Rgb24_filter0.png
  76. 4
      tests/Images/External/ReferenceOutput/DitherTests/DitherFilter_ShouldNotDependOnSinglePixelType_Rgba32_filter0.png
  77. 4
      tests/Images/External/ReferenceOutput/DitherTests/DitherFilter_ShouldNotDependOnSinglePixelType_RgbaVector_filter0.png
  78. 4
      tests/Images/External/ReferenceOutput/DitherTests/DitherFilter_WorksWithAllDitherers_Bike_Bayer16x16.png
  79. 4
      tests/Images/External/ReferenceOutput/DitherTests/DitherFilter_WorksWithAllDitherers_Bike_Bayer2x2.png
  80. 4
      tests/Images/External/ReferenceOutput/DitherTests/DitherFilter_WorksWithAllDitherers_Bike_Bayer4x4.png
  81. 4
      tests/Images/External/ReferenceOutput/DitherTests/DitherFilter_WorksWithAllDitherers_Bike_Bayer8x8.png
  82. 4
      tests/Images/External/ReferenceOutput/DitherTests/DitherFilter_WorksWithAllDitherers_Bike_Ordered3x3.png
  83. 4
      tests/Images/External/ReferenceOutput/DitherTests/DitherFilter_WorksWithAllDitherers_CalliphoraPartial_Bayer16x16.png
  84. 4
      tests/Images/External/ReferenceOutput/DitherTests/DitherFilter_WorksWithAllDitherers_CalliphoraPartial_Bayer2x2.png
  85. 4
      tests/Images/External/ReferenceOutput/DitherTests/DitherFilter_WorksWithAllDitherers_CalliphoraPartial_Bayer4x4.png
  86. 4
      tests/Images/External/ReferenceOutput/DitherTests/DitherFilter_WorksWithAllDitherers_CalliphoraPartial_Bayer8x8.png
  87. 4
      tests/Images/External/ReferenceOutput/DitherTests/DitherFilter_WorksWithAllDitherers_CalliphoraPartial_Ordered3x3.png
  88. 3
      tests/Images/External/ReferenceOutput/GifDecoderTests/Decode_VerifyRootFrameAndFrameCount_Rgba32_cheers.png
  89. 3
      tests/Images/External/ReferenceOutput/GifDecoderTests/Decode_VerifyRootFrameAndFrameCount_Rgba32_issue403_baddescriptorwidth.png
  90. 3
      tests/Images/External/ReferenceOutput/GifDecoderTests/Decode_VerifyRootFrameAndFrameCount_Rgba32_m4nb.png
  91. 3
      tests/Images/External/ReferenceOutput/GifDecoderTests/Decode_VerifyRootFrameAndFrameCount_Rgba32_mixed-disposal.png
  92. 4
      tests/Images/External/ReferenceOutput/PngEncoderTests/Issue2469_Quantized_Encode_Artifacts_Rgba32_issue_2469.png
  93. 4
      tests/Images/External/ReferenceOutput/QuantizerTests/ApplyQuantizationInBox_Bike_OctreeQuantizer_ErrorDither.png
  94. 4
      tests/Images/External/ReferenceOutput/QuantizerTests/ApplyQuantizationInBox_Bike_WebSafePaletteQuantizer_ErrorDither.png
  95. 4
      tests/Images/External/ReferenceOutput/QuantizerTests/ApplyQuantizationInBox_Bike_WernerPaletteQuantizer_ErrorDither.png
  96. 4
      tests/Images/External/ReferenceOutput/QuantizerTests/ApplyQuantizationInBox_Bike_WuQuantizer_ErrorDither.png
  97. 4
      tests/Images/External/ReferenceOutput/QuantizerTests/ApplyQuantizationInBox_CalliphoraPartial_OctreeQuantizer_ErrorDither.png
  98. 4
      tests/Images/External/ReferenceOutput/QuantizerTests/ApplyQuantizationInBox_CalliphoraPartial_WebSafePaletteQuantizer_ErrorDither.png
  99. 4
      tests/Images/External/ReferenceOutput/QuantizerTests/ApplyQuantizationInBox_CalliphoraPartial_WernerPaletteQuantizer_ErrorDither.png
  100. 4
      tests/Images/External/ReferenceOutput/QuantizerTests/ApplyQuantizationInBox_CalliphoraPartial_WuQuantizer_ErrorDither.png

31
src/ImageSharp/Common/Helpers/SimdUtils.HwIntrinsics.cs

@ -1,6 +1,7 @@
// Copyright (c) Six Labors.
// Licensed under the Six Labors Split License.
using System.Numerics;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
@ -656,6 +657,36 @@ internal static partial class SimdUtils
return AdvSimd.BitwiseSelect(signedMask, right.AsInt16(), left.AsInt16()).AsByte();
}
/// <summary>
/// Blend packed 32-bit unsigned integers from <paramref name="left"/> and <paramref name="right"/> using <paramref name="mask"/>.
/// The high bit of each corresponding <paramref name="mask"/> byte determines the selection.
/// If the high bit is set the element of <paramref name="left"/> is selected.
/// The element of <paramref name="right"/> is selected otherwise.
/// </summary>
/// <param name="left">The left vector.</param>
/// <param name="right">The right vector.</param>
/// <param name="mask">The mask vector.</param>
/// <returns>The <see cref="Vector256{T}"/>.</returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static Vector128<uint> BlendVariable(Vector128<uint> left, Vector128<uint> right, Vector128<uint> mask)
=> BlendVariable(left.AsByte(), right.AsByte(), mask.AsByte()).AsUInt32();
/// <summary>
/// Count the number of leading zero bits in a mask.
/// Similar in behavior to the x86 instruction LZCNT.
/// </summary>
/// <param name="value">The value.</param>
public static ushort LeadingZeroCount(ushort value)
=> (ushort)(BitOperations.LeadingZeroCount(value) - 16);
/// <summary>
/// Count the number of trailing zero bits in an integer value.
/// Similar in behavior to the x86 instruction TZCNT.
/// </summary>
/// <param name="value">The value.</param>
public static ushort TrailingZeroCount(ushort value)
=> (ushort)(BitOperations.TrailingZeroCount(value << 16) - 16);
/// <summary>
/// <see cref="ByteToNormalizedFloat"/> as many elements as possible, slicing them down (keeping the remainder).
/// </summary>

93
src/ImageSharp/Formats/AnimatedImageFrameMetadata.cs

@ -0,0 +1,93 @@
// Copyright (c) Six Labors.
// Licensed under the Six Labors Split License.
namespace SixLabors.ImageSharp.Formats;
internal class AnimatedImageFrameMetadata
{
/// <summary>
/// Gets or sets the frame color table.
/// </summary>
public ReadOnlyMemory<Color>? ColorTable { get; set; }
/// <summary>
/// Gets or sets the frame color table mode.
/// </summary>
public FrameColorTableMode ColorTableMode { get; set; }
/// <summary>
/// Gets or sets the duration of the frame.
/// </summary>
public TimeSpan Duration { get; set; }
/// <summary>
/// Gets or sets the frame alpha blending mode.
/// </summary>
public FrameBlendMode BlendMode { get; set; }
/// <summary>
/// Gets or sets the frame disposal mode.
/// </summary>
public FrameDisposalMode DisposalMode { get; set; }
}
#pragma warning disable SA1201 // Elements should appear in the correct order
internal enum FrameBlendMode
#pragma warning restore SA1201 // Elements should appear in the correct order
{
/// <summary>
/// Do not blend. Render the current frame on the canvas by overwriting the rectangle covered by the current frame.
/// </summary>
Source = 0,
/// <summary>
/// Blend the current frame with the previous frame in the animation sequence within the rectangle covered
/// by the current frame.
/// If the current has any transparent areas, the corresponding areas of the previous frame will be visible
/// through these transparent regions.
/// </summary>
Over = 1
}
internal enum FrameDisposalMode
{
/// <summary>
/// No disposal specified.
/// The decoder is not required to take any action.
/// </summary>
Unspecified = 0,
/// <summary>
/// Do not dispose. The current frame is not disposed of, or in other words, not cleared or altered when moving to
/// the next frame. This means that the next frame is drawn over the current frame, and if the next frame contains
/// transparency, the previous frame will be visible through these transparent areas.
/// </summary>
DoNotDispose = 1,
/// <summary>
/// Restore to background color. When transitioning to the next frame, the area occupied by the current frame is
/// filled with the background color specified in the image metadata.
/// This effectively erases the current frame by replacing it with the background color before the next frame is displayed.
/// </summary>
RestoreToBackground = 2,
/// <summary>
/// Restore to previous. This method restores the area affected by the current frame to what it was before the
/// current frame was displayed. It essentially "undoes" the current frame, reverting to the state of the image
/// before the frame was displayed, then the next frame is drawn. This is useful for animations where only a small
/// part of the image changes from frame to frame.
/// </summary>
RestoreToPrevious = 3
}
internal enum FrameColorTableMode
{
/// <summary>
/// The frame uses the shared color table specified by the image metadata.
/// </summary>
Global,
/// <summary>
/// The frame uses a color table specified by the frame metadata.
/// </summary>
Local
}

32
src/ImageSharp/Formats/AnimatedImageMetadata.cs

@ -0,0 +1,32 @@
// Copyright (c) Six Labors.
// Licensed under the Six Labors Split License.
namespace SixLabors.ImageSharp.Formats;
internal class AnimatedImageMetadata
{
/// <summary>
/// Gets or sets the shared color table.
/// </summary>
public ReadOnlyMemory<Color>? ColorTable { get; set; }
/// <summary>
/// Gets or sets the shared color table mode.
/// </summary>
public FrameColorTableMode ColorTableMode { get; set; }
/// <summary>
/// Gets or sets the default background color of the canvas when animating.
/// This color may be used to fill the unused space on the canvas around the frames,
/// as well as the transparent pixels of the first frame.
/// The background color is also used when the disposal mode is <see cref="FrameDisposalMode.RestoreToBackground"/>.
/// </summary>
public Color BackgroundColor { get; set; }
/// <summary>
/// Gets or sets the number of times any animation is repeated.
/// <remarks>
/// 0 means to repeat indefinitely, count is set as repeat n-1 times. Defaults to 1.
/// </remarks>
/// </summary>
public ushort RepeatCount { get; set; }
}

244
src/ImageSharp/Formats/AnimationUtilities.cs

@ -0,0 +1,244 @@
// Copyright (c) Six Labors.
// Licensed under the Six Labors Split License.
using System.Buffers;
using System.Numerics;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
using SixLabors.ImageSharp.Advanced;
using SixLabors.ImageSharp.Memory;
using SixLabors.ImageSharp.PixelFormats;
namespace SixLabors.ImageSharp.Formats;
/// <summary>
/// Utility methods for animated formats.
/// </summary>
internal static class AnimationUtilities
{
/// <summary>
/// Deduplicates pixels between the previous and current frame returning only the changed pixels and bounds.
/// </summary>
/// <typeparam name="TPixel">The type of pixel format.</typeparam>
/// <param name="configuration">The configuration.</param>
/// <param name="previousFrame">The previous frame if present.</param>
/// <param name="currentFrame">The current frame.</param>
/// <param name="nextFrame">The next frame if present.</param>
/// <param name="resultFrame">The resultant output.</param>
/// <param name="replacement">The value to use when replacing duplicate pixels.</param>
/// <param name="blend">Whether the resultant frame represents an animation blend.</param>
/// <param name="clampingMode">The clamping bound to apply when calculating difference bounds.</param>
/// <returns>The <see cref="ValueTuple{Boolean, Rectangle}"/> representing the operation result.</returns>
public static (bool Difference, Rectangle Bounds) DeDuplicatePixels<TPixel>(
Configuration configuration,
ImageFrame<TPixel>? previousFrame,
ImageFrame<TPixel> currentFrame,
ImageFrame<TPixel>? nextFrame,
ImageFrame<TPixel> resultFrame,
Color replacement,
bool blend,
ClampingMode clampingMode = ClampingMode.None)
where TPixel : unmanaged, IPixel<TPixel>
{
MemoryAllocator memoryAllocator = configuration.MemoryAllocator;
using IMemoryOwner<Rgba32> buffers = memoryAllocator.Allocate<Rgba32>(currentFrame.Width * 4, AllocationOptions.Clean);
Span<Rgba32> previous = buffers.GetSpan()[..currentFrame.Width];
Span<Rgba32> current = buffers.GetSpan().Slice(currentFrame.Width, currentFrame.Width);
Span<Rgba32> next = buffers.GetSpan().Slice(currentFrame.Width * 2, currentFrame.Width);
Span<Rgba32> result = buffers.GetSpan()[(currentFrame.Width * 3)..];
Rgba32 bg = replacement;
int top = int.MinValue;
int bottom = int.MaxValue;
int left = int.MaxValue;
int right = int.MinValue;
bool hasDiff = false;
for (int y = 0; y < currentFrame.Height; y++)
{
if (previousFrame != null)
{
PixelOperations<TPixel>.Instance.ToRgba32(configuration, previousFrame.DangerousGetPixelRowMemory(y).Span, previous);
}
PixelOperations<TPixel>.Instance.ToRgba32(configuration, currentFrame.DangerousGetPixelRowMemory(y).Span, current);
if (nextFrame != null)
{
PixelOperations<TPixel>.Instance.ToRgba32(configuration, nextFrame.DangerousGetPixelRowMemory(y).Span, next);
}
ref Vector256<byte> previousBase256 = ref Unsafe.As<Rgba32, Vector256<byte>>(ref MemoryMarshal.GetReference(previous));
ref Vector256<byte> currentBase256 = ref Unsafe.As<Rgba32, Vector256<byte>>(ref MemoryMarshal.GetReference(current));
ref Vector256<byte> nextBase256 = ref Unsafe.As<Rgba32, Vector256<byte>>(ref MemoryMarshal.GetReference(next));
ref Vector256<byte> resultBase256 = ref Unsafe.As<Rgba32, Vector256<byte>>(ref MemoryMarshal.GetReference(result));
int i = 0;
uint x = 0;
bool hasRowDiff = false;
int length = current.Length;
int remaining = current.Length;
if (Avx2.IsSupported && remaining >= 8)
{
Vector256<uint> r256 = previousFrame != null ? Vector256.Create(bg.PackedValue) : Vector256<uint>.Zero;
Vector256<uint> vmb256 = Vector256<uint>.Zero;
if (blend)
{
vmb256 = Avx2.CompareEqual(vmb256, vmb256);
}
while (remaining >= 8)
{
Vector256<uint> p = Unsafe.Add(ref previousBase256, x).AsUInt32();
Vector256<uint> c = Unsafe.Add(ref currentBase256, x).AsUInt32();
Vector256<uint> eq = Avx2.CompareEqual(p, c);
Vector256<uint> r = Avx2.BlendVariable(c, r256, Avx2.And(eq, vmb256));
if (nextFrame != null)
{
Vector256<int> n = Avx2.ShiftRightLogical(Unsafe.Add(ref nextBase256, x).AsUInt32(), 24).AsInt32();
eq = Avx2.AndNot(Avx2.CompareGreaterThan(Avx2.ShiftRightLogical(c, 24).AsInt32(), n).AsUInt32(), eq);
}
Unsafe.Add(ref resultBase256, x) = r.AsByte();
uint msk = (uint)Avx2.MoveMask(eq.AsByte());
msk = ~msk;
if (msk != 0)
{
// If is diff is found, the left side is marked by the min of previously found left side and the start position.
// The right is the max of the previously found right side and the end position.
int start = i + (BitOperations.TrailingZeroCount(msk) / sizeof(uint));
int end = i + (8 - (BitOperations.LeadingZeroCount(msk) / sizeof(uint)));
left = Math.Min(left, start);
right = Math.Max(right, end);
hasRowDiff = true;
hasDiff = true;
}
x++;
i += 8;
remaining -= 8;
}
}
if (Sse2.IsSupported && remaining >= 4)
{
// Update offset since we may be operating on the remainder previously incremented by pixel steps of 8.
x *= 2;
Vector128<uint> r128 = previousFrame != null ? Vector128.Create(bg.PackedValue) : Vector128<uint>.Zero;
Vector128<uint> vmb128 = Vector128<uint>.Zero;
if (blend)
{
vmb128 = Sse2.CompareEqual(vmb128, vmb128);
}
while (remaining >= 4)
{
Vector128<uint> p = Unsafe.Add(ref Unsafe.As<Vector256<byte>, Vector128<uint>>(ref previousBase256), x);
Vector128<uint> c = Unsafe.Add(ref Unsafe.As<Vector256<byte>, Vector128<uint>>(ref currentBase256), x);
Vector128<uint> eq = Sse2.CompareEqual(p, c);
Vector128<uint> r = SimdUtils.HwIntrinsics.BlendVariable(c, r128, Sse2.And(eq, vmb128));
if (nextFrame != null)
{
Vector128<int> n = Sse2.ShiftRightLogical(Unsafe.Add(ref Unsafe.As<Vector256<byte>, Vector128<uint>>(ref nextBase256), x), 24).AsInt32();
eq = Sse2.AndNot(Sse2.CompareGreaterThan(Sse2.ShiftRightLogical(c, 24).AsInt32(), n).AsUInt32(), eq);
}
Unsafe.Add(ref Unsafe.As<Vector256<byte>, Vector128<uint>>(ref resultBase256), x) = r;
ushort msk = (ushort)(uint)Sse2.MoveMask(eq.AsByte());
msk = (ushort)~msk;
if (msk != 0)
{
// If is diff is found, the left side is marked by the min of previously found left side and the start position.
// The right is the max of the previously found right side and the end position.
int start = i + (SimdUtils.HwIntrinsics.TrailingZeroCount(msk) / sizeof(uint));
int end = i + (4 - (SimdUtils.HwIntrinsics.LeadingZeroCount(msk) / sizeof(uint)));
left = Math.Min(left, start);
right = Math.Max(right, end);
hasRowDiff = true;
hasDiff = true;
}
x++;
i += 4;
remaining -= 4;
}
}
// TODO: v4 AdvSimd when we can use .NET 8
for (i = remaining; i > 0; i--)
{
x = (uint)(length - i);
Rgba32 p = Unsafe.Add(ref MemoryMarshal.GetReference(previous), x);
Rgba32 c = Unsafe.Add(ref MemoryMarshal.GetReference(current), x);
Rgba32 n = Unsafe.Add(ref MemoryMarshal.GetReference(next), x);
ref Rgba32 r = ref Unsafe.Add(ref MemoryMarshal.GetReference(result), x);
bool peq = c.Rgba == (previousFrame != null ? p.Rgba : bg.Rgba);
Rgba32 val = (blend & peq) ? replacement : c;
peq &= nextFrame == null || (n.Rgba >> 24 >= c.Rgba >> 24);
r = val;
if (!peq)
{
// If is diff is found, the left side is marked by the min of previously found left side and the diff position.
// The right is the max of the previously found right side and the diff position + 1.
left = Math.Min(left, (int)x);
right = Math.Max(right, (int)x + 1);
hasRowDiff = true;
hasDiff = true;
}
}
if (hasRowDiff)
{
if (top == int.MinValue)
{
top = y;
}
bottom = y + 1;
}
PixelOperations<TPixel>.Instance.FromRgba32(configuration, result, resultFrame.DangerousGetPixelRowMemory(y).Span);
}
Rectangle bounds = Rectangle.FromLTRB(
left = Numerics.Clamp(left, 0, resultFrame.Width - 1),
top = Numerics.Clamp(top, 0, resultFrame.Height - 1),
Numerics.Clamp(right, left + 1, resultFrame.Width),
Numerics.Clamp(bottom, top + 1, resultFrame.Height));
// Webp requires even bounds
if (clampingMode == ClampingMode.Even)
{
bounds.Width = Math.Min(resultFrame.Width, bounds.Width + (bounds.X & 1));
bounds.Height = Math.Min(resultFrame.Height, bounds.Height + (bounds.Y & 1));
bounds.X = Math.Max(0, bounds.X - (bounds.X & 1));
bounds.Y = Math.Max(0, bounds.Y - (bounds.Y & 1));
}
return (hasDiff, bounds);
}
}
#pragma warning disable SA1201 // Elements should appear in the correct order
internal enum ClampingMode
#pragma warning restore SA1201 // Elements should appear in the correct order
{
None,
Even,
}

2
src/ImageSharp/Formats/Gif/GifDecoderCore.cs

@ -797,6 +797,8 @@ internal sealed class GifDecoderCore : IImageDecoderInternals
this.gifMetadata.GlobalColorTable = colorTable;
}
}
this.gifMetadata.BackgroundColorIndex = this.logicalScreenDescriptor.BackgroundColorIndex;
}
private unsafe struct ScratchBuffer

2
src/ImageSharp/Formats/Gif/GifEncoder.cs

@ -1,8 +1,6 @@
// Copyright (c) Six Labors.
// Licensed under the Six Labors Split License.
using SixLabors.ImageSharp.Advanced;
namespace SixLabors.ImageSharp.Formats.Gif;
/// <summary>

534
src/ImageSharp/Formats/Gif/GifEncoderCore.cs

@ -4,10 +4,9 @@
using System.Buffers;
using System.Numerics;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
using SixLabors.ImageSharp.Advanced;
using SixLabors.ImageSharp.Formats.Png;
using SixLabors.ImageSharp.Formats.Webp;
using SixLabors.ImageSharp.Memory;
using SixLabors.ImageSharp.Metadata;
using SixLabors.ImageSharp.Metadata.Profiles.Xmp;
@ -86,8 +85,7 @@ internal sealed class GifEncoderCore : IImageEncoderInternals
Guard.NotNull(image, nameof(image));
Guard.NotNull(stream, nameof(stream));
ImageMetadata metadata = image.Metadata;
GifMetadata gifMetadata = metadata.GetGifMetadata();
GifMetadata gifMetadata = GetGifMetadata(image);
this.colorTableMode ??= gifMetadata.ColorTableMode;
bool useGlobalTable = this.colorTableMode == GifColorTableMode.Global;
@ -96,8 +94,7 @@ internal sealed class GifEncoderCore : IImageEncoderInternals
// Work out if there is an explicit transparent index set for the frame. We use that to ensure the
// correct value is set for the background index when quantizing.
image.Frames.RootFrame.Metadata.TryGetGifMetadata(out GifFrameMetadata? frameMetadata);
int transparencyIndex = GetTransparentIndex(quantized, frameMetadata);
GifFrameMetadata frameMetadata = GetGifFrameMetadata(image.Frames.RootFrame, -1);
if (this.quantizer is null)
{
@ -105,6 +102,7 @@ internal sealed class GifEncoderCore : IImageEncoderInternals
if (gifMetadata.ColorTableMode == GifColorTableMode.Global && gifMetadata.GlobalColorTable?.Length > 0)
{
// We avoid dithering by default to preserve the original colors.
int transparencyIndex = GetTransparentIndex(quantized, frameMetadata);
this.quantizer = new PaletteQuantizer(gifMetadata.GlobalColorTable.Value, new() { Dither = null }, transparencyIndex);
}
else
@ -131,16 +129,20 @@ internal sealed class GifEncoderCore : IImageEncoderInternals
WriteHeader(stream);
// Write the LSD.
transparencyIndex = GetTransparentIndex(quantized, frameMetadata);
byte backgroundIndex = unchecked((byte)transparencyIndex);
if (transparencyIndex == -1)
int derivedTransparencyIndex = GetTransparentIndex(quantized, null);
if (derivedTransparencyIndex >= 0)
{
backgroundIndex = gifMetadata.BackgroundColorIndex;
frameMetadata.HasTransparency = true;
frameMetadata.TransparencyIndex = ClampIndex(derivedTransparencyIndex);
}
byte backgroundIndex = derivedTransparencyIndex >= 0
? frameMetadata.TransparencyIndex
: gifMetadata.BackgroundColorIndex;
// Get the number of bits.
int bitDepth = ColorNumerics.GetBitsNeededForColorDepth(quantized.Palette.Length);
this.WriteLogicalScreenDescriptor(metadata, image.Width, image.Height, backgroundIndex, useGlobalTable, bitDepth, stream);
this.WriteLogicalScreenDescriptor(image.Metadata, image.Width, image.Height, backgroundIndex, useGlobalTable, bitDepth, stream);
if (useGlobalTable)
{
@ -157,22 +159,78 @@ internal sealed class GifEncoderCore : IImageEncoderInternals
this.WriteApplicationExtensions(stream, image.Frames.Count, gifMetadata.RepeatCount, xmpProfile);
}
this.EncodeFirstFrame(stream, frameMetadata, quantized, transparencyIndex);
this.EncodeFirstFrame(stream, frameMetadata, quantized);
// Capture the global palette for reuse on subsequent frames and cleanup the quantized frame.
TPixel[] globalPalette = image.Frames.Count == 1 ? Array.Empty<TPixel>() : quantized.Palette.ToArray();
quantized.Dispose();
this.EncodeAdditionalFrames(stream, image, globalPalette);
this.EncodeAdditionalFrames(stream, image, globalPalette, derivedTransparencyIndex, frameMetadata.DisposalMethod);
stream.WriteByte(GifConstants.EndIntroducer);
quantized?.Dispose();
}
private static GifMetadata GetGifMetadata<TPixel>(Image<TPixel> image)
where TPixel : unmanaged, IPixel<TPixel>
{
if (image.Metadata.TryGetGifMetadata(out GifMetadata? gif))
{
return (GifMetadata)gif.DeepClone();
}
if (image.Metadata.TryGetPngMetadata(out PngMetadata? png))
{
AnimatedImageMetadata ani = png.ToAnimatedImageMetadata();
return GifMetadata.FromAnimatedMetadata(ani);
}
if (image.Metadata.TryGetWebpMetadata(out WebpMetadata? webp))
{
AnimatedImageMetadata ani = webp.ToAnimatedImageMetadata();
return GifMetadata.FromAnimatedMetadata(ani);
}
// Return explicit new instance so we do not mutate the original metadata.
return new();
}
private static GifFrameMetadata GetGifFrameMetadata<TPixel>(ImageFrame<TPixel> frame, int transparencyIndex)
where TPixel : unmanaged, IPixel<TPixel>
{
if (frame.Metadata.TryGetGifMetadata(out GifFrameMetadata? gif))
{
return (GifFrameMetadata)gif.DeepClone();
}
GifFrameMetadata? metadata = null;
if (frame.Metadata.TryGetPngMetadata(out PngFrameMetadata? png))
{
AnimatedImageFrameMetadata ani = png.ToAnimatedImageFrameMetadata();
metadata = GifFrameMetadata.FromAnimatedMetadata(ani);
}
if (frame.Metadata.TryGetWebpFrameMetadata(out WebpFrameMetadata? webp))
{
AnimatedImageFrameMetadata ani = webp.ToAnimatedImageFrameMetadata();
metadata = GifFrameMetadata.FromAnimatedMetadata(ani);
}
if (metadata?.ColorTableMode == GifColorTableMode.Global && transparencyIndex > -1)
{
metadata.HasTransparency = true;
metadata.TransparencyIndex = ClampIndex(transparencyIndex);
}
return metadata ?? new();
}
private void EncodeAdditionalFrames<TPixel>(
Stream stream,
Image<TPixel> image,
ReadOnlyMemory<TPixel> globalPalette)
ReadOnlyMemory<TPixel> globalPalette,
int globalTransparencyIndex,
GifDisposalMethod previousDisposalMethod)
where TPixel : unmanaged, IPixel<TPixel>
{
if (image.Frames.Count == 1)
@ -187,24 +245,22 @@ internal sealed class GifEncoderCore : IImageEncoderInternals
ImageFrame<TPixel> previousFrame = image.Frames.RootFrame;
// This frame is reused to store de-duplicated pixel buffers.
// This is more expensive memory-wise than de-duplicating indexed buffer but allows us to deduplicate
// frames using both local and global palettes.
using ImageFrame<TPixel> encodingFrame = new(previousFrame.Configuration, previousFrame.Size());
for (int i = 1; i < image.Frames.Count; i++)
{
// Gather the metadata for this frame.
ImageFrame<TPixel> currentFrame = image.Frames[i];
ImageFrameMetadata metadata = currentFrame.Metadata;
metadata.TryGetGifMetadata(out GifFrameMetadata? gifMetadata);
bool useLocal = this.colorTableMode == GifColorTableMode.Local || (gifMetadata?.ColorTableMode == GifColorTableMode.Local);
ImageFrame<TPixel>? nextFrame = i < image.Frames.Count - 1 ? image.Frames[i + 1] : null;
GifFrameMetadata gifMetadata = GetGifFrameMetadata(currentFrame, globalTransparencyIndex);
bool useLocal = this.colorTableMode == GifColorTableMode.Local || (gifMetadata.ColorTableMode == GifColorTableMode.Local);
if (!useLocal && !hasPaletteQuantizer && i > 0)
{
// The palette quantizer can reuse the same global pixel map across multiple frames since the palette is unchanging.
// This allows a reduction of memory usage across multi-frame gifs using a global palette
// and also allows use to reuse the cache from previous runs.
int transparencyIndex = gifMetadata?.HasTransparency == true ? gifMetadata.TransparencyIndex : -1;
int transparencyIndex = gifMetadata.HasTransparency ? gifMetadata.TransparencyIndex : -1;
paletteQuantizer = new(this.configuration, this.quantizer!.Options, globalPalette, transparencyIndex);
hasPaletteQuantizer = true;
}
@ -213,12 +269,15 @@ internal sealed class GifEncoderCore : IImageEncoderInternals
stream,
previousFrame,
currentFrame,
nextFrame,
encodingFrame,
useLocal,
gifMetadata,
paletteQuantizer);
paletteQuantizer,
previousDisposalMethod);
previousFrame = currentFrame;
previousDisposalMethod = gifMetadata.DisposalMethod;
}
if (hasPaletteQuantizer)
@ -229,16 +288,15 @@ internal sealed class GifEncoderCore : IImageEncoderInternals
private void EncodeFirstFrame<TPixel>(
Stream stream,
GifFrameMetadata? metadata,
IndexedImageFrame<TPixel> quantized,
int transparencyIndex)
GifFrameMetadata metadata,
IndexedImageFrame<TPixel> quantized)
where TPixel : unmanaged, IPixel<TPixel>
{
this.WriteGraphicalControlExtension(metadata, transparencyIndex, stream);
this.WriteGraphicalControlExtension(metadata, stream);
Buffer2D<byte> indices = ((IPixelSource)quantized).PixelBuffer;
Rectangle interest = indices.FullRectangle();
bool useLocal = this.colorTableMode == GifColorTableMode.Local || (metadata?.ColorTableMode == GifColorTableMode.Local);
bool useLocal = this.colorTableMode == GifColorTableMode.Local || (metadata.ColorTableMode == GifColorTableMode.Local);
int bitDepth = ColorNumerics.GetBitsNeededForColorDepth(quantized.Palette.Length);
this.WriteImageDescriptor(interest, useLocal, bitDepth, stream);
@ -248,367 +306,139 @@ internal sealed class GifEncoderCore : IImageEncoderInternals
this.WriteColorTable(quantized, bitDepth, stream);
}
this.WriteImageData(indices, interest, stream, quantized.Palette.Length, transparencyIndex);
this.WriteImageData(indices, stream, quantized.Palette.Length, metadata.TransparencyIndex);
}
private void EncodeAdditionalFrame<TPixel>(
Stream stream,
ImageFrame<TPixel> previousFrame,
ImageFrame<TPixel> currentFrame,
ImageFrame<TPixel>? nextFrame,
ImageFrame<TPixel> encodingFrame,
bool useLocal,
GifFrameMetadata? metadata,
PaletteQuantizer<TPixel> globalPaletteQuantizer)
GifFrameMetadata metadata,
PaletteQuantizer<TPixel> globalPaletteQuantizer,
GifDisposalMethod previousDisposal)
where TPixel : unmanaged, IPixel<TPixel>
{
// Capture any explicit transparency index from the metadata.
// We use it to determine the value to use to replace duplicate pixels.
int transparencyIndex = metadata?.HasTransparency == true ? metadata.TransparencyIndex : -1;
Vector4 replacement = Vector4.Zero;
if (transparencyIndex >= 0)
{
if (useLocal)
{
if (metadata?.LocalColorTable?.Length > 0)
{
ReadOnlySpan<Color> palette = metadata.LocalColorTable.Value.Span;
if (transparencyIndex < palette.Length)
{
replacement = palette[transparencyIndex].ToScaledVector4();
}
}
}
else
{
ReadOnlySpan<TPixel> palette = globalPaletteQuantizer.Palette.Span;
if (transparencyIndex < palette.Length)
{
replacement = palette[transparencyIndex].ToScaledVector4();
}
}
}
this.DeDuplicatePixels(previousFrame, currentFrame, encodingFrame, replacement);
int transparencyIndex = metadata.HasTransparency ? metadata.TransparencyIndex : -1;
IndexedImageFrame<TPixel> quantized;
if (useLocal)
{
// Reassign using the current frame and details.
if (metadata?.LocalColorTable?.Length > 0)
{
// We can use the color data from the decoded metadata here.
// We avoid dithering by default to preserve the original colors.
ReadOnlyMemory<Color> palette = metadata.LocalColorTable.Value;
PaletteQuantizer quantizer = new(palette, new() { Dither = null }, transparencyIndex);
using IQuantizer<TPixel> frameQuantizer = quantizer.CreatePixelSpecificQuantizer<TPixel>(this.configuration, quantizer.Options);
quantized = frameQuantizer.BuildPaletteAndQuantizeFrame(encodingFrame, encodingFrame.Bounds());
}
else
{
// We must quantize the frame to generate a local color table.
IQuantizer quantizer = this.hasQuantizer ? this.quantizer! : KnownQuantizers.Octree;
using IQuantizer<TPixel> frameQuantizer = quantizer.CreatePixelSpecificQuantizer<TPixel>(this.configuration, quantizer.Options);
quantized = frameQuantizer.BuildPaletteAndQuantizeFrame(encodingFrame, encodingFrame.Bounds());
}
}
else
{
// Quantize the image using the global palette.
// Individual frames, though using the shared palette, can use a different transparent index to represent transparency.
globalPaletteQuantizer.SetTransparentIndex(transparencyIndex);
quantized = globalPaletteQuantizer.QuantizeFrame(encodingFrame, encodingFrame.Bounds());
}
ImageFrame<TPixel>? previous = previousDisposal == GifDisposalMethod.RestoreToBackground ? null : previousFrame;
// Recalculate the transparency index as depending on the quantizer used could have a new value.
transparencyIndex = GetTransparentIndex(quantized, metadata);
// Deduplicate and quantize the frame capturing only required parts.
(bool difference, Rectangle bounds) =
AnimationUtilities.DeDuplicatePixels(
this.configuration,
previous,
currentFrame,
nextFrame,
encodingFrame,
Color.Transparent,
true);
// Trim down the buffer to the minimum size required.
Buffer2D<byte> indices = ((IPixelSource)quantized).PixelBuffer;
Rectangle interest = TrimTransparentPixels(indices, transparencyIndex);
using IndexedImageFrame<TPixel> quantized = this.QuantizeAdditionalFrameAndUpdateMetadata(
encodingFrame,
bounds,
metadata,
useLocal,
globalPaletteQuantizer,
difference,
transparencyIndex);
this.WriteGraphicalControlExtension(metadata, transparencyIndex, stream);
this.WriteGraphicalControlExtension(metadata, stream);
int bitDepth = ColorNumerics.GetBitsNeededForColorDepth(quantized.Palette.Length);
this.WriteImageDescriptor(interest, useLocal, bitDepth, stream);
this.WriteImageDescriptor(bounds, useLocal, bitDepth, stream);
if (useLocal)
{
this.WriteColorTable(quantized, bitDepth, stream);
}
this.WriteImageData(indices, interest, stream, quantized.Palette.Length, transparencyIndex);
Buffer2D<byte> indices = ((IPixelSource)quantized).PixelBuffer;
this.WriteImageData(indices, stream, quantized.Palette.Length, metadata.TransparencyIndex);
}
private void DeDuplicatePixels<TPixel>(
ImageFrame<TPixel> backgroundFrame,
ImageFrame<TPixel> sourceFrame,
ImageFrame<TPixel> resultFrame,
Vector4 replacement)
private IndexedImageFrame<TPixel> QuantizeAdditionalFrameAndUpdateMetadata<TPixel>(
ImageFrame<TPixel> encodingFrame,
Rectangle bounds,
GifFrameMetadata metadata,
bool useLocal,
PaletteQuantizer<TPixel> globalPaletteQuantizer,
bool hasDuplicates,
int transparencyIndex)
where TPixel : unmanaged, IPixel<TPixel>
{
IMemoryOwner<Vector4> buffers = this.memoryAllocator.Allocate<Vector4>(backgroundFrame.Width * 3);
Span<Vector4> background = buffers.GetSpan()[..backgroundFrame.Width];
Span<Vector4> source = buffers.GetSpan()[backgroundFrame.Width..];
Span<Vector4> result = buffers.GetSpan()[(backgroundFrame.Width * 2)..];
// TODO: This algorithm is greedy and will always replace matching colors, however, theoretically, if the proceeding color
// is the same, but not replaced, you would actually be better of not replacing it since longer runs compress better.
// This would require a more complex algorithm.
for (int y = 0; y < backgroundFrame.Height; y++)
{
PixelOperations<TPixel>.Instance.ToVector4(this.configuration, backgroundFrame.DangerousGetPixelRowMemory(y).Span, background, PixelConversionModifiers.Scale);
PixelOperations<TPixel>.Instance.ToVector4(this.configuration, sourceFrame.DangerousGetPixelRowMemory(y).Span, source, PixelConversionModifiers.Scale);
ref Vector256<float> backgroundBase = ref Unsafe.As<Vector4, Vector256<float>>(ref MemoryMarshal.GetReference(background));
ref Vector256<float> sourceBase = ref Unsafe.As<Vector4, Vector256<float>>(ref MemoryMarshal.GetReference(source));
ref Vector256<float> resultBase = ref Unsafe.As<Vector4, Vector256<float>>(ref MemoryMarshal.GetReference(result));
uint x = 0;
int remaining = background.Length;
if (Avx2.IsSupported && remaining >= 2)
{
Vector256<float> replacement256 = Vector256.Create(replacement.X, replacement.Y, replacement.Z, replacement.W, replacement.X, replacement.Y, replacement.Z, replacement.W);
while (remaining >= 2)
{
Vector256<float> b = Unsafe.Add(ref backgroundBase, x);
Vector256<float> s = Unsafe.Add(ref sourceBase, x);
Vector256<int> m = Avx.CompareEqual(b, s).AsInt32();
m = Avx2.HorizontalAdd(m, m);
m = Avx2.HorizontalAdd(m, m);
m = Avx2.CompareEqual(m, Vector256.Create(-4));
Unsafe.Add(ref resultBase, x) = Avx.BlendVariable(s, replacement256, m.AsSingle());
x++;
remaining -= 2;
}
}
for (int i = remaining; i >= 0; i--)
{
x = (uint)i;
Vector4 b = Unsafe.Add(ref Unsafe.As<Vector256<float>, Vector4>(ref backgroundBase), x);
Vector4 s = Unsafe.Add(ref Unsafe.As<Vector256<float>, Vector4>(ref sourceBase), x);
ref Vector4 r = ref Unsafe.Add(ref Unsafe.As<Vector256<float>, Vector4>(ref resultBase), x);
r = (b == s) ? replacement : s;
}
PixelOperations<TPixel>.Instance.FromVector4Destructive(this.configuration, result, resultFrame.DangerousGetPixelRowMemory(y).Span, PixelConversionModifiers.Scale);
}
}
private static Rectangle TrimTransparentPixels(Buffer2D<byte> buffer, int transparencyIndex)
{
if (transparencyIndex < 0)
{
return buffer.FullRectangle();
}
byte trimmableIndex = unchecked((byte)transparencyIndex);
int top = int.MinValue;
int bottom = int.MaxValue;
int left = int.MaxValue;
int right = int.MinValue;
int minY = -1;
bool isTransparentRow = true;
// Run through the buffer in a single pass. Use variables to track the min/max values.
for (int y = 0; y < buffer.Height; y++)
IndexedImageFrame<TPixel> quantized;
if (useLocal)
{
isTransparentRow = true;
Span<byte> rowSpan = buffer.DangerousGetRowSpan(y);
ref byte rowPtr = ref MemoryMarshal.GetReference(rowSpan);
nint rowLength = (nint)(uint)rowSpan.Length;
nint x = 0;
#if NET7_0_OR_GREATER
if (Vector128.IsHardwareAccelerated && rowLength >= Vector128<byte>.Count)
{
Vector256<byte> trimmableVec256 = Vector256.Create(trimmableIndex);
if (Vector256.IsHardwareAccelerated && rowLength >= Vector256<byte>.Count)
{
do
{
Vector256<byte> vec = Vector256.LoadUnsafe(ref rowPtr, (nuint)x);
Vector256<byte> notEquals = ~Vector256.Equals(vec, trimmableVec256);
uint mask = notEquals.ExtractMostSignificantBits();
if (mask != 0)
{
isTransparentRow = false;
nint start = x + (nint)uint.TrailingZeroCount(mask);
nint end = (nint)uint.LeadingZeroCount(mask);
// end is from the end, but we need the index from the beginning
end = x + Vector256<byte>.Count - 1 - end;
left = Math.Min(left, (int)start);
right = Math.Max(right, (int)end);
}
x += Vector256<byte>.Count;
}
while (x <= rowLength - Vector256<byte>.Count);
}
Vector128<byte> trimmableVec = Vector256.IsHardwareAccelerated
? trimmableVec256.GetLower()
: Vector128.Create(trimmableIndex);
while (x <= rowLength - Vector128<byte>.Count)
{
Vector128<byte> vec = Vector128.LoadUnsafe(ref rowPtr, (nuint)x);
Vector128<byte> notEquals = ~Vector128.Equals(vec, trimmableVec);
uint mask = notEquals.ExtractMostSignificantBits();
if (mask != 0)
{
isTransparentRow = false;
nint start = x + (nint)uint.TrailingZeroCount(mask);
nint end = (nint)uint.LeadingZeroCount(mask) - Vector128<byte>.Count;
// end is from the end, but we need the index from the beginning
end = x + Vector128<byte>.Count - 1 - end;
left = Math.Min(left, (int)start);
right = Math.Max(right, (int)end);
}
x += Vector128<byte>.Count;
}
}
#else
if (Sse41.IsSupported && rowLength >= Vector128<byte>.Count)
// Reassign using the current frame and details.
if (metadata.LocalColorTable?.Length > 0)
{
Vector256<byte> trimmableVec256 = Vector256.Create(trimmableIndex);
// We can use the color data from the decoded metadata here.
// We avoid dithering by default to preserve the original colors.
ReadOnlyMemory<Color> palette = metadata.LocalColorTable.Value;
if (Avx2.IsSupported && rowLength >= Vector256<byte>.Count)
if (hasDuplicates && !metadata.HasTransparency)
{
do
{
Vector256<byte> vec = Unsafe.ReadUnaligned<Vector256<byte>>(ref Unsafe.Add(ref rowPtr, x));
Vector256<byte> notEquals = Avx2.CompareEqual(vec, trimmableVec256);
notEquals = Avx2.Xor(notEquals, Vector256<byte>.AllBitsSet);
int mask = Avx2.MoveMask(notEquals);
if (mask != 0)
{
isTransparentRow = false;
nint start = x + (nint)(uint)BitOperations.TrailingZeroCount(mask);
nint end = (nint)(uint)BitOperations.LeadingZeroCount((uint)mask);
// end is from the end, but we need the index from the beginning
end = x + Vector256<byte>.Count - 1 - end;
left = Math.Min(left, (int)start);
right = Math.Max(right, (int)end);
}
x += Vector256<byte>.Count;
}
while (x <= rowLength - Vector256<byte>.Count);
// A difference was captured but the metadata does not have transparency.
metadata.HasTransparency = true;
transparencyIndex = palette.Length;
metadata.TransparencyIndex = ClampIndex(transparencyIndex);
}
Vector128<byte> trimmableVec = Sse41.IsSupported
? trimmableVec256.GetLower()
: Vector128.Create(trimmableIndex);
while (x <= rowLength - Vector128<byte>.Count)
{
Vector128<byte> vec = Unsafe.ReadUnaligned<Vector128<byte>>(ref Unsafe.Add(ref rowPtr, x));
Vector128<byte> notEquals = Sse2.CompareEqual(vec, trimmableVec);
notEquals = Sse2.Xor(notEquals, Vector128<byte>.AllBitsSet);
int mask = Sse2.MoveMask(notEquals);
if (mask != 0)
{
isTransparentRow = false;
nint start = x + (nint)(uint)BitOperations.TrailingZeroCount(mask);
nint end = (nint)(uint)BitOperations.LeadingZeroCount((uint)mask) - Vector128<byte>.Count;
// end is from the end, but we need the index from the beginning
end = x + Vector128<byte>.Count - 1 - end;
left = Math.Min(left, (int)start);
right = Math.Max(right, (int)end);
}
x += Vector128<byte>.Count;
}
PaletteQuantizer quantizer = new(palette, new() { Dither = null }, transparencyIndex);
using IQuantizer<TPixel> frameQuantizer = quantizer.CreatePixelSpecificQuantizer<TPixel>(this.configuration, quantizer.Options);
quantized = frameQuantizer.BuildPaletteAndQuantizeFrame(encodingFrame, bounds);
}
#endif
for (; x < rowLength; ++x)
else
{
if (Unsafe.Add(ref rowPtr, x) != trimmableIndex)
{
isTransparentRow = false;
left = Math.Min(left, (int)x);
right = Math.Max(right, (int)x);
}
}
// We must quantize the frame to generate a local color table.
IQuantizer quantizer = this.hasQuantizer ? this.quantizer! : KnownQuantizers.Octree;
using IQuantizer<TPixel> frameQuantizer = quantizer.CreatePixelSpecificQuantizer<TPixel>(this.configuration, quantizer.Options);
quantized = frameQuantizer.BuildPaletteAndQuantizeFrame(encodingFrame, bounds);
if (!isTransparentRow)
{
if (y == 0)
// The transparency index derived by the quantizer might differ from the index
// within the metadata. We need to update the metadata to reflect this.
int derivedTransparencyIndex = GetTransparentIndex(quantized, null);
if (derivedTransparencyIndex < 0)
{
// First row is opaque.
// Capture to prevent over assignment when a match is found below.
top = 0;
// If no index is found set to the palette length, this trick allows us to fake transparency without an explicit index.
derivedTransparencyIndex = quantized.Palette.Length;
}
// The minimum top bounds have already been captured.
// Increment the bottom to include the current opaque row.
if (minY < 0 && top != 0)
{
// Increment to the first opaque row.
top++;
}
metadata.TransparencyIndex = ClampIndex(derivedTransparencyIndex);
minY = top;
bottom = y;
}
else
{
// We've yet to hit an opaque row. Capture the top position.
if (minY < 0)
if (hasDuplicates)
{
top = Math.Max(top, y);
metadata.HasTransparency = true;
}
bottom = Math.Min(bottom, y);
}
}
if (left == int.MaxValue)
{
left = 0;
}
if (right == int.MinValue)
else
{
right = buffer.Width;
}
// Quantize the image using the global palette.
// Individual frames, though using the shared palette, can use a different transparent index to represent transparency.
if (top == bottom || left == right)
{
// The entire image is transparent.
return buffer.FullRectangle();
}
// A difference was captured but the metadata does not have transparency.
if (hasDuplicates && !metadata.HasTransparency)
{
metadata.HasTransparency = true;
transparencyIndex = globalPaletteQuantizer.Palette.Length;
metadata.TransparencyIndex = ClampIndex(transparencyIndex);
}
if (!isTransparentRow)
{
// Last row is opaque.
bottom = buffer.Height;
globalPaletteQuantizer.SetTransparentIndex(transparencyIndex);
quantized = globalPaletteQuantizer.QuantizeFrame(encodingFrame, bounds);
}
return Rectangle.FromLTRB(left, top, Math.Min(right + 1, buffer.Width), Math.Min(bottom + 1, buffer.Height));
return quantized;
}
private static byte ClampIndex(int value) => (byte)Numerics.Clamp(value, byte.MinValue, byte.MaxValue);
/// <summary>
/// Returns the index of the most transparent color in the palette.
/// </summary>
@ -800,30 +630,19 @@ internal sealed class GifEncoderCore : IImageEncoderInternals
/// Writes the optional graphics control extension to the stream.
/// </summary>
/// <param name="metadata">The metadata of the image or frame.</param>
/// <param name="transparencyIndex">The index of the color in the color palette to make transparent.</param>
/// <param name="stream">The stream to write to.</param>
private void WriteGraphicalControlExtension(GifFrameMetadata? metadata, int transparencyIndex, Stream stream)
private void WriteGraphicalControlExtension(GifFrameMetadata metadata, Stream stream)
{
GifFrameMetadata? data = metadata;
bool hasTransparency;
if (metadata is null)
{
data = new();
hasTransparency = transparencyIndex >= 0;
}
else
{
hasTransparency = metadata.HasTransparency;
}
bool hasTransparency = metadata.HasTransparency;
byte packedValue = GifGraphicControlExtension.GetPackedValue(
disposalMethod: data!.DisposalMethod,
disposalMethod: metadata.DisposalMethod,
transparencyFlag: hasTransparency);
GifGraphicControlExtension extension = new(
packed: packedValue,
delayTime: (ushort)data.FrameDelay,
transparencyIndex: hasTransparency ? unchecked((byte)transparencyIndex) : byte.MinValue);
delayTime: (ushort)metadata.FrameDelay,
transparencyIndex: hasTransparency ? metadata.TransparencyIndex : byte.MinValue);
this.WriteExtension(extension, stream);
}
@ -924,14 +743,11 @@ internal sealed class GifEncoderCore : IImageEncoderInternals
/// Writes the image pixel data to the stream.
/// </summary>
/// <param name="indices">The <see cref="Buffer2DRegion{Byte}"/> containing indexed pixels.</param>
/// <param name="interest">The region of interest.</param>
/// <param name="stream">The stream to write to.</param>
/// <param name="paletteLength">The length of the frame color palette.</param>
/// <param name="transparencyIndex">The index of the color used to represent transparency.</param>
private void WriteImageData(Buffer2D<byte> indices, Rectangle interest, Stream stream, int paletteLength, int transparencyIndex)
private void WriteImageData(Buffer2D<byte> indices, Stream stream, int paletteLength, int transparencyIndex)
{
Buffer2DRegion<byte> region = indices.GetRegion(interest);
// Pad the bit depth when required for encoding the image data.
// This is a common trick which allows to use out of range indexes for transparency and avoid allocating a larger color palette
// as decoders skip indexes that are out of range.
@ -940,6 +756,6 @@ internal sealed class GifEncoderCore : IImageEncoderInternals
: 0;
using LzwEncoder encoder = new(this.memoryAllocator, ColorNumerics.GetBitsNeededForColorDepth(paletteLength + padding));
encoder.Encode(region, stream);
encoder.Encode(indices, stream);
}
}

40
src/ImageSharp/Formats/Gif/GifFrameMetadata.cs

@ -1,6 +1,7 @@
// Copyright (c) Six Labors.
// Licensed under the Six Labors Split License.
using System.Numerics;
using SixLabors.ImageSharp.PixelFormats;
namespace SixLabors.ImageSharp.Formats.Gif;
@ -76,4 +77,43 @@ public class GifFrameMetadata : IDeepCloneable
/// <inheritdoc/>
public IDeepCloneable DeepClone() => new GifFrameMetadata(this);
internal static GifFrameMetadata FromAnimatedMetadata(AnimatedImageFrameMetadata metadata)
{
// TODO: v4 How do I link the parent metadata to the frame metadata to get the global color table?
int index = -1;
float background = 1f;
if (metadata.ColorTable.HasValue)
{
ReadOnlySpan<Color> colorTable = metadata.ColorTable.Value.Span;
for (int i = 0; i < colorTable.Length; i++)
{
Vector4 vector = (Vector4)colorTable[i];
if (vector.W < background)
{
index = i;
}
}
}
bool hasTransparency = index >= 0;
return new()
{
LocalColorTable = metadata.ColorTable,
ColorTableMode = metadata.ColorTableMode == FrameColorTableMode.Global ? GifColorTableMode.Global : GifColorTableMode.Local,
FrameDelay = (int)Math.Round(metadata.Duration.TotalMilliseconds / 10),
DisposalMethod = GetMode(metadata.DisposalMode),
HasTransparency = hasTransparency,
TransparencyIndex = hasTransparency ? unchecked((byte)index) : byte.MinValue,
};
}
private static GifDisposalMethod GetMode(FrameDisposalMode mode) => mode switch
{
FrameDisposalMode.DoNotDispose => GifDisposalMethod.NotDispose,
FrameDisposalMode.RestoreToBackground => GifDisposalMethod.RestoreToBackground,
FrameDisposalMode.RestoreToPrevious => GifDisposalMethod.RestoreToPrevious,
_ => GifDisposalMethod.Unspecified,
};
}

26
src/ImageSharp/Formats/Gif/GifMetadata.cs

@ -71,4 +71,30 @@ public class GifMetadata : IDeepCloneable
/// <inheritdoc/>
public IDeepCloneable DeepClone() => new GifMetadata(this);
internal static GifMetadata FromAnimatedMetadata(AnimatedImageMetadata metadata)
{
int index = 0;
Color background = metadata.BackgroundColor;
if (metadata.ColorTable.HasValue)
{
ReadOnlySpan<Color> colorTable = metadata.ColorTable.Value.Span;
for (int i = 0; i < colorTable.Length; i++)
{
if (background == colorTable[i])
{
index = i;
break;
}
}
}
return new()
{
GlobalColorTable = metadata.ColorTable,
ColorTableMode = metadata.ColorTableMode == FrameColorTableMode.Global ? GifColorTableMode.Global : GifColorTableMode.Local,
RepeatCount = metadata.RepeatCount,
BackgroundColorIndex = (byte)Numerics.Clamp(index, 0, 255),
};
}
}

12
src/ImageSharp/Formats/Gif/LzwEncoder.cs

@ -186,7 +186,7 @@ internal sealed class LzwEncoder : IDisposable
/// </summary>
/// <param name="indexedPixels">The 2D buffer of indexed pixels.</param>
/// <param name="stream">The stream to write to.</param>
public void Encode(Buffer2DRegion<byte> indexedPixels, Stream stream)
public void Encode(Buffer2D<byte> indexedPixels, Stream stream)
{
// Write "initial code size" byte
stream.WriteByte((byte)this.initialCodeSize);
@ -204,7 +204,7 @@ internal sealed class LzwEncoder : IDisposable
/// <param name="bitCount">The number of bits</param>
/// <returns>See <see cref="int"/></returns>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
private static int GetMaxcode(int bitCount) => (1 << bitCount) - 1;
private static int GetMaxCode(int bitCount) => (1 << bitCount) - 1;
/// <summary>
/// Add a character to the end of the current packet, and if it is 254 characters,
@ -249,7 +249,7 @@ internal sealed class LzwEncoder : IDisposable
/// <param name="indexedPixels">The 2D buffer of indexed pixels.</param>
/// <param name="initialBits">The initial bits.</param>
/// <param name="stream">The stream to write to.</param>
private void Compress(Buffer2DRegion<byte> indexedPixels, int initialBits, Stream stream)
private void Compress(Buffer2D<byte> indexedPixels, int initialBits, Stream stream)
{
// Set up the globals: globalInitialBits - initial number of bits
this.globalInitialBits = initialBits;
@ -257,7 +257,7 @@ internal sealed class LzwEncoder : IDisposable
// Set up the necessary values
this.clearFlag = false;
this.bitCount = this.globalInitialBits;
this.maxCode = GetMaxcode(this.bitCount);
this.maxCode = GetMaxCode(this.bitCount);
this.clearCode = 1 << (initialBits - 1);
this.eofCode = this.clearCode + 1;
this.freeEntry = this.clearCode + 2;
@ -383,7 +383,7 @@ internal sealed class LzwEncoder : IDisposable
{
if (this.clearFlag)
{
this.maxCode = GetMaxcode(this.bitCount = this.globalInitialBits);
this.maxCode = GetMaxCode(this.bitCount = this.globalInitialBits);
this.clearFlag = false;
}
else
@ -391,7 +391,7 @@ internal sealed class LzwEncoder : IDisposable
++this.bitCount;
this.maxCode = this.bitCount == MaxBits
? MaxMaxCode
: GetMaxcode(this.bitCount);
: GetMaxCode(this.bitCount);
}
}

51
src/ImageSharp/Formats/Gif/MetadataExtensions.cs

@ -2,6 +2,7 @@
// Licensed under the Six Labors Split License.
using System.Diagnostics.CodeAnalysis;
using SixLabors.ImageSharp.Formats;
using SixLabors.ImageSharp.Formats.Gif;
using SixLabors.ImageSharp.Metadata;
@ -20,6 +21,21 @@ public static partial class MetadataExtensions
public static GifMetadata GetGifMetadata(this ImageMetadata source)
=> source.GetFormatMetadata(GifFormat.Instance);
/// <summary>
/// Gets the gif format specific metadata for the image.
/// </summary>
/// <param name="source">The metadata this method extends.</param>
/// <param name="metadata">
/// When this method returns, contains the metadata associated with the specified image,
/// if found; otherwise, the default value for the type of the metadata parameter.
/// This parameter is passed uninitialized.
/// </param>
/// <returns>
/// <see langword="true"/> if the gif metadata exists; otherwise, <see langword="false"/>.
/// </returns>
public static bool TryGetGifMetadata(this ImageMetadata source, [NotNullWhen(true)] out GifMetadata? metadata)
=> source.TryGetFormatMetadata(GifFormat.Instance, out metadata);
/// <summary>
/// Gets the gif format specific metadata for the image frame.
/// </summary>
@ -42,4 +58,39 @@ public static partial class MetadataExtensions
/// </returns>
public static bool TryGetGifMetadata(this ImageFrameMetadata source, [NotNullWhen(true)] out GifFrameMetadata? metadata)
=> source.TryGetFormatMetadata(GifFormat.Instance, out metadata);
internal static AnimatedImageMetadata ToAnimatedImageMetadata(this GifMetadata source)
{
Color background = Color.Transparent;
if (source.GlobalColorTable != null)
{
background = source.GlobalColorTable.Value.Span[source.BackgroundColorIndex];
}
return new()
{
ColorTable = source.GlobalColorTable,
ColorTableMode = source.ColorTableMode == GifColorTableMode.Global ? FrameColorTableMode.Global : FrameColorTableMode.Local,
RepeatCount = source.RepeatCount,
BackgroundColor = background,
};
}
internal static AnimatedImageFrameMetadata ToAnimatedImageFrameMetadata(this GifFrameMetadata source)
=> new()
{
ColorTable = source.LocalColorTable,
ColorTableMode = source.ColorTableMode == GifColorTableMode.Global ? FrameColorTableMode.Global : FrameColorTableMode.Local,
Duration = TimeSpan.FromMilliseconds(source.FrameDelay * 10),
DisposalMode = GetMode(source.DisposalMethod),
BlendMode = source.DisposalMethod == GifDisposalMethod.RestoreToBackground ? FrameBlendMode.Source : FrameBlendMode.Over,
};
private static FrameDisposalMode GetMode(GifDisposalMethod method) => method switch
{
GifDisposalMethod.NotDispose => FrameDisposalMode.DoNotDispose,
GifDisposalMethod.RestoreToBackground => FrameDisposalMode.RestoreToBackground,
GifDisposalMethod.RestoreToPrevious => FrameDisposalMode.RestoreToPrevious,
_ => FrameDisposalMode.Unspecified,
};
}

14
src/ImageSharp/Formats/Png/Chunks/AnimationControl.cs

@ -9,7 +9,7 @@ internal readonly struct AnimationControl
{
public const int Size = 8;
public AnimationControl(int numberFrames, int numberPlays)
public AnimationControl(uint numberFrames, uint numberPlays)
{
this.NumberFrames = numberFrames;
this.NumberPlays = numberPlays;
@ -18,12 +18,12 @@ internal readonly struct AnimationControl
/// <summary>
/// Gets the number of frames
/// </summary>
public int NumberFrames { get; }
public uint NumberFrames { get; }
/// <summary>
/// Gets the number of times to loop this APNG. 0 indicates infinite looping.
/// </summary>
public int NumberPlays { get; }
public uint NumberPlays { get; }
/// <summary>
/// Writes the acTL to the given buffer.
@ -31,8 +31,8 @@ internal readonly struct AnimationControl
/// <param name="buffer">The buffer to write to.</param>
public void WriteTo(Span<byte> buffer)
{
BinaryPrimitives.WriteInt32BigEndian(buffer[..4], this.NumberFrames);
BinaryPrimitives.WriteInt32BigEndian(buffer[4..8], this.NumberPlays);
BinaryPrimitives.WriteInt32BigEndian(buffer[..4], (int)this.NumberFrames);
BinaryPrimitives.WriteInt32BigEndian(buffer[4..8], (int)this.NumberPlays);
}
/// <summary>
@ -42,6 +42,6 @@ internal readonly struct AnimationControl
/// <returns>The parsed acTL.</returns>
public static AnimationControl Parse(ReadOnlySpan<byte> data)
=> new(
numberFrames: BinaryPrimitives.ReadInt32BigEndian(data[..4]),
numberPlays: BinaryPrimitives.ReadInt32BigEndian(data[4..8]));
numberFrames: BinaryPrimitives.ReadUInt32BigEndian(data[..4]),
numberPlays: BinaryPrimitives.ReadUInt32BigEndian(data[4..8]));
}

2
src/ImageSharp/Formats/Png/Chunks/PngPhysical.cs

@ -61,10 +61,10 @@ internal readonly struct PngPhysical
/// <returns>The constructed PngPhysicalChunkData instance.</returns>
public static PngPhysical FromMetadata(ImageMetadata meta)
{
byte unitSpecifier = 0;
uint x;
uint y;
byte unitSpecifier;
switch (meta.ResolutionUnits)
{
case PixelResolutionUnit.AspectRatio:

58
src/ImageSharp/Formats/Png/MetadataExtensions.cs

@ -2,6 +2,7 @@
// Licensed under the Six Labors Split License.
using System.Diagnostics.CodeAnalysis;
using SixLabors.ImageSharp.Formats;
using SixLabors.ImageSharp.Formats.Png;
using SixLabors.ImageSharp.Metadata;
@ -20,17 +21,64 @@ public static partial class MetadataExtensions
public static PngMetadata GetPngMetadata(this ImageMetadata source) => source.GetFormatMetadata(PngFormat.Instance);
/// <summary>
/// Gets the aPng format specific metadata for the image frame.
/// Gets the png format specific metadata for the image.
/// </summary>
/// <param name="source">The metadata this method extends.</param>
/// <param name="metadata">The metadata.</param>
/// <returns>
/// <see langword="true"/> if the png metadata exists; otherwise, <see langword="false"/>.
/// </returns>
public static bool TryGetPngMetadata(this ImageMetadata source, [NotNullWhen(true)] out PngMetadata? metadata)
=> source.TryGetFormatMetadata(PngFormat.Instance, out metadata);
/// <summary>
/// Gets the png format specific metadata for the image frame.
/// </summary>
/// <param name="source">The metadata this method extends.</param>
/// <returns>The <see cref="PngFrameMetadata"/>.</returns>
public static PngFrameMetadata GetPngFrameMetadata(this ImageFrameMetadata source) => source.GetFormatMetadata(PngFormat.Instance);
public static PngFrameMetadata GetPngMetadata(this ImageFrameMetadata source) => source.GetFormatMetadata(PngFormat.Instance);
/// <summary>
/// Gets the aPng format specific metadata for the image frame.
/// Gets the png format specific metadata for the image frame.
/// </summary>
/// <param name="source">The metadata this method extends.</param>
/// <param name="metadata">The metadata.</param>
/// <returns>The <see cref="PngFrameMetadata"/>.</returns>
public static bool TryGetPngFrameMetadata(this ImageFrameMetadata source, [NotNullWhen(true)] out PngFrameMetadata? metadata) => source.TryGetFormatMetadata(PngFormat.Instance, out metadata);
/// <returns>
/// <see langword="true"/> if the png frame metadata exists; otherwise, <see langword="false"/>.
/// </returns>
public static bool TryGetPngMetadata(this ImageFrameMetadata source, [NotNullWhen(true)] out PngFrameMetadata? metadata)
=> source.TryGetFormatMetadata(PngFormat.Instance, out metadata);
internal static AnimatedImageMetadata ToAnimatedImageMetadata(this PngMetadata source)
=> new()
{
ColorTable = source.ColorTable,
ColorTableMode = FrameColorTableMode.Global,
RepeatCount = (ushort)Numerics.Clamp(source.RepeatCount, 0, ushort.MaxValue),
};
internal static AnimatedImageFrameMetadata ToAnimatedImageFrameMetadata(this PngFrameMetadata source)
{
double delay = source.FrameDelay.ToDouble();
if (double.IsNaN(delay))
{
delay = 0;
}
return new()
{
ColorTableMode = FrameColorTableMode.Global,
Duration = TimeSpan.FromMilliseconds(delay * 1000),
DisposalMode = GetMode(source.DisposalMethod),
BlendMode = source.BlendMethod == PngBlendMethod.Source ? FrameBlendMode.Source : FrameBlendMode.Over,
};
}
private static FrameDisposalMode GetMode(PngDisposalMethod method) => method switch
{
PngDisposalMethod.DoNotDispose => FrameDisposalMode.DoNotDispose,
PngDisposalMethod.RestoreToBackground => FrameDisposalMode.RestoreToBackground,
PngDisposalMethod.RestoreToPrevious => FrameDisposalMode.RestoreToPrevious,
_ => FrameDisposalMode.Unspecified,
};
}

57
src/ImageSharp/Formats/Png/PngDecoderCore.cs

@ -225,7 +225,7 @@ internal sealed class PngDecoderCore : IImageDecoderInternals
chunk.Length - 4,
currentFrame,
pngMetadata,
this.ReadNextDataChunkAndSkipSeq,
this.ReadNextFrameDataChunk,
currentFrameControl.Value,
cancellationToken);
@ -601,7 +601,7 @@ internal sealed class PngDecoderCore : IImageDecoderInternals
metadata);
}
PngFrameMetadata frameMetadata = image.Frames.RootFrame.Metadata.GetPngFrameMetadata();
PngFrameMetadata frameMetadata = image.Frames.RootFrame.Metadata.GetPngMetadata();
frameMetadata.FromChunk(in frameControl);
this.bytesPerPixel = this.CalculateBytesPerPixel();
@ -641,8 +641,8 @@ internal sealed class PngDecoderCore : IImageDecoderInternals
frame = image.Frames.AddFrame(previousFrame ?? image.Frames.RootFrame);
// If the first `fcTL` chunk uses a `dispose_op` of APNG_DISPOSE_OP_PREVIOUS it should be treated as APNG_DISPOSE_OP_BACKGROUND.
if (previousFrameControl.DisposeOperation == PngDisposalMethod.Background
|| (previousFrame is null && previousFrameControl.DisposeOperation == PngDisposalMethod.Previous))
if (previousFrameControl.DisposeOperation == PngDisposalMethod.RestoreToBackground
|| (previousFrame is null && previousFrameControl.DisposeOperation == PngDisposalMethod.RestoreToPrevious))
{
Rectangle restoreArea = previousFrameControl.Bounds;
Rectangle interest = Rectangle.Intersect(frame.Bounds(), restoreArea);
@ -650,7 +650,7 @@ internal sealed class PngDecoderCore : IImageDecoderInternals
pixelRegion.Clear();
}
PngFrameMetadata frameMetadata = frame.Metadata.GetPngFrameMetadata();
PngFrameMetadata frameMetadata = frame.Metadata.GetPngMetadata();
frameMetadata.FromChunk(currentFrameControl);
this.previousScanline?.Dispose();
@ -784,10 +784,12 @@ internal sealed class PngDecoderCore : IImageDecoderInternals
{
cancellationToken.ThrowIfCancellationRequested();
int bytesPerFrameScanline = this.CalculateScanlineLength((int)frameControl.Width) + 1;
Span<byte> scanlineSpan = this.scanline.GetSpan()[..bytesPerFrameScanline];
Span<byte> scanSpan = this.scanline.GetSpan()[..bytesPerFrameScanline];
Span<byte> prevSpan = this.previousScanline.GetSpan()[..bytesPerFrameScanline];
while (currentRowBytesRead < bytesPerFrameScanline)
{
int bytesRead = compressedStream.Read(scanlineSpan, currentRowBytesRead, bytesPerFrameScanline - currentRowBytesRead);
int bytesRead = compressedStream.Read(scanSpan, currentRowBytesRead, bytesPerFrameScanline - currentRowBytesRead);
if (bytesRead <= 0)
{
return;
@ -798,25 +800,25 @@ internal sealed class PngDecoderCore : IImageDecoderInternals
currentRowBytesRead = 0;
switch ((FilterType)scanlineSpan[0])
switch ((FilterType)scanSpan[0])
{
case FilterType.None:
break;
case FilterType.Sub:
SubFilter.Decode(scanlineSpan, this.bytesPerPixel);
SubFilter.Decode(scanSpan, this.bytesPerPixel);
break;
case FilterType.Up:
UpFilter.Decode(scanlineSpan, this.previousScanline.GetSpan());
UpFilter.Decode(scanSpan, prevSpan);
break;
case FilterType.Average:
AverageFilter.Decode(scanlineSpan, this.previousScanline.GetSpan(), this.bytesPerPixel);
AverageFilter.Decode(scanSpan, prevSpan, this.bytesPerPixel);
break;
case FilterType.Paeth:
PaethFilter.Decode(scanlineSpan, this.previousScanline.GetSpan(), this.bytesPerPixel);
PaethFilter.Decode(scanSpan, prevSpan, this.bytesPerPixel);
break;
default:
@ -829,7 +831,7 @@ internal sealed class PngDecoderCore : IImageDecoderInternals
break;
}
this.ProcessDefilteredScanline(frameControl, currentRow, scanlineSpan, imageFrame, pngMetadata, blendRowBuffer);
this.ProcessDefilteredScanline(frameControl, currentRow, scanSpan, imageFrame, pngMetadata, blendRowBuffer);
this.SwapScanlineBuffers();
currentRow++;
}
@ -1751,19 +1753,34 @@ internal sealed class PngDecoderCore : IImageDecoderInternals
}
/// <summary>
/// Reads the next data chunk and skip sequence number.
/// Reads the next animated frame data chunk.
/// </summary>
/// <returns>Count of bytes in the next data chunk, or 0 if there are no more data chunks left.</returns>
private int ReadNextDataChunkAndSkipSeq()
private int ReadNextFrameDataChunk()
{
int length = this.ReadNextDataChunk();
if (this.ReadNextDataChunk() is 0)
if (this.nextChunk != null)
{
return 0;
}
Span<byte> buffer = stackalloc byte[20];
_ = this.currentStream.Read(buffer, 0, 4);
if (this.TryReadChunk(buffer, out PngChunk chunk))
{
return length;
if (chunk.Type is PngChunkType.FrameData)
{
chunk.Data?.Dispose();
this.currentStream.Position += 4; // Skip sequence number
return chunk.Length - 4;
}
this.nextChunk = chunk;
}
this.currentStream.Position += 4; // Skip sequence number
return length - 4;
return 0;
}
/// <summary>

8
src/ImageSharp/Formats/Png/PngDisposalMethod.cs

@ -1,4 +1,4 @@
// Copyright (c) Six Labors.
// Copyright (c) Six Labors.
// Licensed under the Six Labors Split License.
namespace SixLabors.ImageSharp.Formats.Png;
@ -11,15 +11,15 @@ public enum PngDisposalMethod
/// <summary>
/// No disposal is done on this frame before rendering the next; the contents of the output buffer are left as is.
/// </summary>
None,
DoNotDispose,
/// <summary>
/// The frame's region of the output buffer is to be cleared to fully transparent black before rendering the next frame.
/// </summary>
Background,
RestoreToBackground,
/// <summary>
/// The frame's region of the output buffer is to be reverted to the previous contents before rendering the next frame.
/// </summary>
Previous
RestoreToPrevious
}

236
src/ImageSharp/Formats/Png/PngEncoderCore.cs

@ -7,8 +7,10 @@ using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using SixLabors.ImageSharp.Common.Helpers;
using SixLabors.ImageSharp.Compression.Zlib;
using SixLabors.ImageSharp.Formats.Gif;
using SixLabors.ImageSharp.Formats.Png.Chunks;
using SixLabors.ImageSharp.Formats.Png.Filters;
using SixLabors.ImageSharp.Formats.Webp;
using SixLabors.ImageSharp.Memory;
using SixLabors.ImageSharp.Metadata;
using SixLabors.ImageSharp.PixelFormats;
@ -116,6 +118,11 @@ internal sealed class PngEncoderCore : IImageEncoderInternals, IDisposable
/// </summary>
private IQuantizer? quantizer;
/// <summary>
/// Any explicit quantized transparent index provided by the background color.
/// </summary>
private int derivedTransparencyIndex = -1;
/// <summary>
/// Initializes a new instance of the <see cref="PngEncoderCore" /> class.
/// </summary>
@ -137,7 +144,7 @@ internal sealed class PngEncoderCore : IImageEncoderInternals, IDisposable
/// <param name="stream">The <see cref="Stream"/> to encode the image data to.</param>
/// <param name="cancellationToken">The token to request cancellation.</param>
public void Encode<TPixel>(Image<TPixel> image, Stream stream, CancellationToken cancellationToken)
where TPixel : unmanaged, IPixel<TPixel>
where TPixel : unmanaged, IPixel<TPixel>
{
Guard.NotNull(image, nameof(image));
Guard.NotNull(stream, nameof(stream));
@ -146,7 +153,7 @@ internal sealed class PngEncoderCore : IImageEncoderInternals, IDisposable
this.height = image.Height;
ImageMetadata metadata = image.Metadata;
PngMetadata pngMetadata = metadata.GetFormatMetadata(PngFormat.Instance);
PngMetadata pngMetadata = GetPngMetadata(image);
this.SanitizeAndSetEncoderOptions<TPixel>(this.encoder, pngMetadata, out this.use16Bit, out this.bytesPerPixel);
stream.Write(PngConstants.HeaderBytes);
@ -162,7 +169,11 @@ internal sealed class PngEncoderCore : IImageEncoderInternals, IDisposable
}
// Do not move this. We require an accurate bit depth for the header chunk.
IndexedImageFrame<TPixel>? quantized = this.CreateQuantizedImageAndUpdateBitDepth(pngMetadata, currentFrame, null);
IndexedImageFrame<TPixel>? quantized = this.CreateQuantizedImageAndUpdateBitDepth(
pngMetadata,
currentFrame,
currentFrame.Bounds(),
null);
this.WriteHeaderChunk(stream);
this.WriteGammaChunk(stream);
@ -176,46 +187,64 @@ internal sealed class PngEncoderCore : IImageEncoderInternals, IDisposable
if (image.Frames.Count > 1)
{
this.WriteAnimationControlChunk(stream, image.Frames.Count, pngMetadata.RepeatCount);
// TODO: We should attempt to optimize the output by clipping the indexed result to
// non-transparent bounds. That way we can assign frame control bounds and encode
// less data. See GifEncoder for the implementation there.
this.WriteAnimationControlChunk(stream, (uint)image.Frames.Count, pngMetadata.RepeatCount);
// Write the first frame.
FrameControl frameControl = this.WriteFrameControlChunk(stream, currentFrame, 0);
this.WriteDataChunks(frameControl, currentFrame, quantized, stream, false);
PngFrameMetadata frameMetadata = GetPngFrameMetadata(currentFrame);
PngDisposalMethod previousDisposal = frameMetadata.DisposalMethod;
FrameControl frameControl = this.WriteFrameControlChunk(stream, frameMetadata, currentFrame.Bounds(), 0);
this.WriteDataChunks(frameControl, currentFrame.PixelBuffer.GetRegion(), quantized, stream, false);
// Capture the global palette for reuse on subsequent frames.
ReadOnlyMemory<TPixel>? previousPalette = quantized?.Palette.ToArray();
// Write following frames.
uint increment = 0;
ImageFrame<TPixel> previousFrame = image.Frames.RootFrame;
// This frame is reused to store de-duplicated pixel buffers.
using ImageFrame<TPixel> encodingFrame = new(image.Configuration, previousFrame.Size());
for (int i = 1; i < image.Frames.Count; i++)
{
ImageFrame<TPixel>? prev = previousDisposal == PngDisposalMethod.RestoreToBackground ? null : previousFrame;
currentFrame = image.Frames[i];
ImageFrame<TPixel>? nextFrame = i < image.Frames.Count - 1 ? image.Frames[i + 1] : null;
frameMetadata = GetPngFrameMetadata(currentFrame);
bool blend = frameMetadata.BlendMethod == PngBlendMethod.Over;
(bool difference, Rectangle bounds) =
AnimationUtilities.DeDuplicatePixels(
image.Configuration,
prev,
currentFrame,
nextFrame,
encodingFrame,
Color.Transparent,
blend);
if (clearTransparency)
{
// Dispose of previous clone and reassign.
clonedFrame?.Dispose();
currentFrame = clonedFrame = currentFrame.Clone();
ClearTransparentPixels(currentFrame);
ClearTransparentPixels(encodingFrame);
}
// Each frame control sequence number must be incremented by the
// number of frame data chunks that follow.
frameControl = this.WriteFrameControlChunk(stream, currentFrame, (uint)i + increment);
// Each frame control sequence number must be incremented by the number of frame data chunks that follow.
frameControl = this.WriteFrameControlChunk(stream, frameMetadata, bounds, (uint)i + increment);
// Dispose of previous quantized frame and reassign.
quantized?.Dispose();
quantized = this.CreateQuantizedImageAndUpdateBitDepth(pngMetadata, currentFrame, previousPalette);
increment += this.WriteDataChunks(frameControl, currentFrame, quantized, stream, true);
quantized = this.CreateQuantizedImageAndUpdateBitDepth(pngMetadata, encodingFrame, bounds, previousPalette);
increment += this.WriteDataChunks(frameControl, encodingFrame.PixelBuffer.GetRegion(bounds), quantized, stream, true);
previousFrame = currentFrame;
previousDisposal = frameMetadata.DisposalMethod;
}
}
else
{
FrameControl frameControl = new((uint)this.width, (uint)this.height);
this.WriteDataChunks(frameControl, currentFrame, quantized, stream, false);
this.WriteDataChunks(frameControl, currentFrame.PixelBuffer.GetRegion(), quantized, stream, false);
}
this.WriteEndChunk(stream);
@ -234,6 +263,54 @@ internal sealed class PngEncoderCore : IImageEncoderInternals, IDisposable
this.currentScanline?.Dispose();
}
private static PngMetadata GetPngMetadata<TPixel>(Image<TPixel> image)
where TPixel : unmanaged, IPixel<TPixel>
{
if (image.Metadata.TryGetPngMetadata(out PngMetadata? png))
{
return (PngMetadata)png.DeepClone();
}
if (image.Metadata.TryGetGifMetadata(out GifMetadata? gif))
{
AnimatedImageMetadata ani = gif.ToAnimatedImageMetadata();
return PngMetadata.FromAnimatedMetadata(ani);
}
if (image.Metadata.TryGetWebpMetadata(out WebpMetadata? webp))
{
AnimatedImageMetadata ani = webp.ToAnimatedImageMetadata();
return PngMetadata.FromAnimatedMetadata(ani);
}
// Return explicit new instance so we do not mutate the original metadata.
return new();
}
private static PngFrameMetadata GetPngFrameMetadata<TPixel>(ImageFrame<TPixel> frame)
where TPixel : unmanaged, IPixel<TPixel>
{
if (frame.Metadata.TryGetPngMetadata(out PngFrameMetadata? png))
{
return (PngFrameMetadata)png.DeepClone();
}
if (frame.Metadata.TryGetGifMetadata(out GifFrameMetadata? gif))
{
AnimatedImageFrameMetadata ani = gif.ToAnimatedImageFrameMetadata();
return PngFrameMetadata.FromAnimatedMetadata(ani);
}
if (frame.Metadata.TryGetWebpFrameMetadata(out WebpFrameMetadata? webp))
{
AnimatedImageFrameMetadata ani = webp.ToAnimatedImageFrameMetadata();
return PngFrameMetadata.FromAnimatedMetadata(ani);
}
// Return explicit new instance so we do not mutate the original metadata.
return new();
}
/// <summary>
/// Convert transparent pixels, to transparent black pixels, which can yield to better compression in some cases.
/// </summary>
@ -267,15 +344,17 @@ internal sealed class PngEncoderCore : IImageEncoderInternals, IDisposable
/// <typeparam name="TPixel">The type of the pixel.</typeparam>
/// <param name="metadata">The image metadata.</param>
/// <param name="frame">The frame to quantize.</param>
/// <param name="bounds">The area of interest within the frame.</param>
/// <param name="previousPalette">Any previously derived palette.</param>
/// <returns>The quantized image.</returns>
private IndexedImageFrame<TPixel>? CreateQuantizedImageAndUpdateBitDepth<TPixel>(
PngMetadata metadata,
ImageFrame<TPixel> frame,
Rectangle bounds,
ReadOnlyMemory<TPixel>? previousPalette)
where TPixel : unmanaged, IPixel<TPixel>
{
IndexedImageFrame<TPixel>? quantized = this.CreateQuantizedFrame(this.encoder, this.colorType, this.bitDepth, metadata, frame, previousPalette);
IndexedImageFrame<TPixel>? quantized = this.CreateQuantizedFrame(this.encoder, this.colorType, this.bitDepth, metadata, frame, bounds, previousPalette);
this.bitDepth = CalculateBitDepth(this.colorType, this.bitDepth, quantized);
return quantized;
}
@ -621,7 +700,7 @@ internal sealed class PngEncoderCore : IImageEncoderInternals, IDisposable
/// <param name="stream">The <see cref="Stream"/> containing image data.</param>
/// <param name="framesCount">The number of frames.</param>
/// <param name="playsCount">The number of times to loop this APNG.</param>
private void WriteAnimationControlChunk(Stream stream, int framesCount, int playsCount)
private void WriteAnimationControlChunk(Stream stream, uint framesCount, uint playsCount)
{
AnimationControl acTL = new(framesCount, playsCount);
@ -983,19 +1062,17 @@ internal sealed class PngEncoderCore : IImageEncoderInternals, IDisposable
/// Writes the animation control chunk to the stream.
/// </summary>
/// <param name="stream">The <see cref="Stream"/> containing image data.</param>
/// <param name="imageFrame">The image frame.</param>
/// <param name="frameMetadata">The frame metadata.</param>
/// <param name="bounds">The frame area of interest.</param>
/// <param name="sequenceNumber">The frame sequence number.</param>
private FrameControl WriteFrameControlChunk(Stream stream, ImageFrame imageFrame, uint sequenceNumber)
private FrameControl WriteFrameControlChunk(Stream stream, PngFrameMetadata frameMetadata, Rectangle bounds, uint sequenceNumber)
{
PngFrameMetadata frameMetadata = imageFrame.Metadata.GetPngFrameMetadata();
// TODO: If we can clip the indexed frame for transparent bounds we can set properties here.
FrameControl fcTL = new(
sequenceNumber: sequenceNumber,
width: (uint)imageFrame.Width,
height: (uint)imageFrame.Height,
xOffset: 0,
yOffset: 0,
width: (uint)bounds.Width,
height: (uint)bounds.Height,
xOffset: (uint)bounds.Left,
yOffset: (uint)bounds.Top,
delayNumerator: (ushort)frameMetadata.FrameDelay.Numerator,
delayDenominator: (ushort)frameMetadata.FrameDelay.Denominator,
disposeOperation: frameMetadata.DisposalMethod,
@ -1013,11 +1090,11 @@ internal sealed class PngEncoderCore : IImageEncoderInternals, IDisposable
/// </summary>
/// <typeparam name="TPixel">The pixel format.</typeparam>
/// <param name="frameControl">The frame control</param>
/// <param name="pixels">The frame.</param>
/// <param name="frame">The image frame.</param>
/// <param name="quantized">The quantized pixel data. Can be null.</param>
/// <param name="stream">The stream.</param>
/// <param name="isFrame">Is writing fdAT or IDAT.</param>
private uint WriteDataChunks<TPixel>(FrameControl frameControl, ImageFrame<TPixel> pixels, IndexedImageFrame<TPixel>? quantized, Stream stream, bool isFrame)
private uint WriteDataChunks<TPixel>(FrameControl frameControl, Buffer2DRegion<TPixel> frame, IndexedImageFrame<TPixel>? quantized, Stream stream, bool isFrame)
where TPixel : unmanaged, IPixel<TPixel>
{
byte[] buffer;
@ -1031,16 +1108,16 @@ internal sealed class PngEncoderCore : IImageEncoderInternals, IDisposable
{
if (quantized is not null)
{
this.EncodeAdam7IndexedPixels(frameControl, quantized, deflateStream);
this.EncodeAdam7IndexedPixels(quantized, deflateStream);
}
else
{
this.EncodeAdam7Pixels(frameControl, pixels, deflateStream);
this.EncodeAdam7Pixels(frame, deflateStream);
}
}
else
{
this.EncodePixels(frameControl, pixels, quantized, deflateStream);
this.EncodePixels(frame, quantized, deflateStream);
}
}
@ -1105,54 +1182,43 @@ internal sealed class PngEncoderCore : IImageEncoderInternals, IDisposable
/// Encodes the pixels.
/// </summary>
/// <typeparam name="TPixel">The type of the pixel.</typeparam>
/// <param name="frameControl">The frame control</param>
/// <param name="pixels">The pixels.</param>
/// <param name="quantized">The quantized pixels span.</param>
/// <param name="pixels">The image frame pixel buffer.</param>
/// <param name="quantized">The quantized pixels.</param>
/// <param name="deflateStream">The deflate stream.</param>
private void EncodePixels<TPixel>(FrameControl frameControl, ImageFrame<TPixel> pixels, IndexedImageFrame<TPixel>? quantized, ZlibDeflateStream deflateStream)
private void EncodePixels<TPixel>(Buffer2DRegion<TPixel> pixels, IndexedImageFrame<TPixel>? quantized, ZlibDeflateStream deflateStream)
where TPixel : unmanaged, IPixel<TPixel>
{
int width = (int)frameControl.Width;
int height = (int)frameControl.Height;
int bytesPerScanline = this.CalculateScanlineLength(width);
int bytesPerScanline = this.CalculateScanlineLength(pixels.Width);
int filterLength = bytesPerScanline + 1;
this.AllocateScanlineBuffers(bytesPerScanline);
using IMemoryOwner<byte> filterBuffer = this.memoryAllocator.Allocate<byte>(filterLength, AllocationOptions.Clean);
using IMemoryOwner<byte> attemptBuffer = this.memoryAllocator.Allocate<byte>(filterLength, AllocationOptions.Clean);
pixels.ProcessPixelRows(accessor =>
Span<byte> filter = filterBuffer.GetSpan();
Span<byte> attempt = attemptBuffer.GetSpan();
for (int y = 0; y < pixels.Height; y++)
{
Span<byte> filter = filterBuffer.GetSpan();
Span<byte> attempt = attemptBuffer.GetSpan();
for (int y = (int)frameControl.YOffset; y < frameControl.YMax; y++)
{
this.CollectAndFilterPixelRow(accessor.GetRowSpan(y), ref filter, ref attempt, quantized, y);
deflateStream.Write(filter);
this.SwapScanlineBuffers();
}
});
this.CollectAndFilterPixelRow(pixels.DangerousGetRowSpan(y), ref filter, ref attempt, quantized, y);
deflateStream.Write(filter);
this.SwapScanlineBuffers();
}
}
/// <summary>
/// Interlaced encoding the pixels.
/// </summary>
/// <typeparam name="TPixel">The type of the pixel.</typeparam>
/// <param name="frameControl">The frame control</param>
/// <param name="frame">The image frame.</param>
/// <param name="pixels">The image frame pixel buffer.</param>
/// <param name="deflateStream">The deflate stream.</param>
private void EncodeAdam7Pixels<TPixel>(FrameControl frameControl, ImageFrame<TPixel> frame, ZlibDeflateStream deflateStream)
private void EncodeAdam7Pixels<TPixel>(Buffer2DRegion<TPixel> pixels, ZlibDeflateStream deflateStream)
where TPixel : unmanaged, IPixel<TPixel>
{
int width = (int)frameControl.XMax;
int height = (int)frameControl.YMax;
Buffer2D<TPixel> pixelBuffer = frame.PixelBuffer;
for (int pass = 0; pass < 7; pass++)
{
int startRow = Adam7.FirstRow[pass] + (int)frameControl.YOffset;
int startCol = Adam7.FirstColumn[pass] + (int)frameControl.XOffset;
int blockWidth = Adam7.ComputeBlockWidth(width, pass);
int startRow = Adam7.FirstRow[pass];
int startCol = Adam7.FirstColumn[pass];
int blockWidth = Adam7.ComputeBlockWidth(pixels.Width, pass);
int bytesPerScanline = this.bytesPerPixel <= 1
? ((blockWidth * this.bitDepth) + 7) / 8
@ -1169,13 +1235,13 @@ internal sealed class PngEncoderCore : IImageEncoderInternals, IDisposable
Span<byte> filter = filterBuffer.GetSpan();
Span<byte> attempt = attemptBuffer.GetSpan();
for (int row = startRow; row < height; row += Adam7.RowIncrement[pass])
for (int row = startRow; row < pixels.Height; row += Adam7.RowIncrement[pass])
{
// Collect pixel data
Span<TPixel> srcRow = pixelBuffer.DangerousGetRowSpan(row);
for (int col = startCol, i = 0; col < frameControl.XMax; col += Adam7.ColumnIncrement[pass])
Span<TPixel> srcRow = pixels.DangerousGetRowSpan(row);
for (int col = startCol, i = 0; col < pixels.Width; col += Adam7.ColumnIncrement[pass], i++)
{
block[i++] = srcRow[col];
block[i] = srcRow[col];
}
// Encode data
@ -1193,19 +1259,16 @@ internal sealed class PngEncoderCore : IImageEncoderInternals, IDisposable
/// Interlaced encoding the quantized (indexed, with palette) pixels.
/// </summary>
/// <typeparam name="TPixel">The type of the pixel.</typeparam>
/// <param name="frameControl">The frame control</param>
/// <param name="quantized">The quantized.</param>
/// <param name="deflateStream">The deflate stream.</param>
private void EncodeAdam7IndexedPixels<TPixel>(FrameControl frameControl, IndexedImageFrame<TPixel> quantized, ZlibDeflateStream deflateStream)
private void EncodeAdam7IndexedPixels<TPixel>(IndexedImageFrame<TPixel> quantized, ZlibDeflateStream deflateStream)
where TPixel : unmanaged, IPixel<TPixel>
{
int width = (int)frameControl.Width;
int endRow = (int)frameControl.YMax;
for (int pass = 0; pass < 7; pass++)
{
int startRow = Adam7.FirstRow[pass] + (int)frameControl.YOffset;
int startCol = Adam7.FirstColumn[pass] + (int)frameControl.XOffset;
int blockWidth = Adam7.ComputeBlockWidth(width, pass);
int startRow = Adam7.FirstRow[pass];
int startCol = Adam7.FirstColumn[pass];
int blockWidth = Adam7.ComputeBlockWidth(quantized.Width, pass);
int bytesPerScanline = this.bytesPerPixel <= 1
? ((blockWidth * this.bitDepth) + 7) / 8
@ -1223,16 +1286,13 @@ internal sealed class PngEncoderCore : IImageEncoderInternals, IDisposable
Span<byte> filter = filterBuffer.GetSpan();
Span<byte> attempt = attemptBuffer.GetSpan();
for (int row = startRow; row < endRow; row += Adam7.RowIncrement[pass])
for (int row = startRow; row < quantized.Height; row += Adam7.RowIncrement[pass])
{
// Collect data
ReadOnlySpan<byte> srcRow = quantized.DangerousGetRowSpan(row);
for (int col = startCol, i = 0;
col < frameControl.XMax;
col += Adam7.ColumnIncrement[pass])
for (int col = startCol, i = 0; col < quantized.Width; col += Adam7.ColumnIncrement[pass], i++)
{
block[i] = srcRow[col];
i++;
}
// Encode data
@ -1404,6 +1464,7 @@ internal sealed class PngEncoderCore : IImageEncoderInternals, IDisposable
/// <param name="bitDepth">The bits per component.</param>
/// <param name="metadata">The image metadata.</param>
/// <param name="frame">The frame to quantize.</param>
/// <param name="bounds">The frame area of interest.</param>
/// <param name="previousPalette">Any previously derived palette.</param>
private IndexedImageFrame<TPixel>? CreateQuantizedFrame<TPixel>(
QuantizingImageEncoder encoder,
@ -1411,6 +1472,7 @@ internal sealed class PngEncoderCore : IImageEncoderInternals, IDisposable
byte bitDepth,
PngMetadata metadata,
ImageFrame<TPixel> frame,
Rectangle bounds,
ReadOnlyMemory<TPixel>? previousPalette)
where TPixel : unmanaged, IPixel<TPixel>
{
@ -1422,9 +1484,13 @@ internal sealed class PngEncoderCore : IImageEncoderInternals, IDisposable
if (previousPalette is not null)
{
// Use the previously derived palette created by quantizing the root frame to quantize the current frame.
using PaletteQuantizer<TPixel> paletteQuantizer = new(this.configuration, this.quantizer!.Options, previousPalette.Value, -1);
using PaletteQuantizer<TPixel> paletteQuantizer = new(
this.configuration,
this.quantizer!.Options,
previousPalette.Value,
this.derivedTransparencyIndex);
paletteQuantizer.BuildPalette(encoder.PixelSamplingStrategy, frame);
return paletteQuantizer.QuantizeFrame(frame, frame.Bounds());
return paletteQuantizer.QuantizeFrame(frame, bounds);
}
// Use the metadata to determine what quantization depth to use if no quantizer has been set.
@ -1432,8 +1498,10 @@ internal sealed class PngEncoderCore : IImageEncoderInternals, IDisposable
{
if (metadata.ColorTable is not null)
{
// Use the provided palette. The caller is responsible for setting values.
this.quantizer = new PaletteQuantizer(metadata.ColorTable.Value);
// We can use the color data from the decoded metadata here.
// We avoid dithering by default to preserve the original colors.
this.derivedTransparencyIndex = metadata.ColorTable.Value.Span.IndexOf(Color.Transparent);
this.quantizer = new PaletteQuantizer(metadata.ColorTable.Value, new() { Dither = null }, this.derivedTransparencyIndex);
}
else
{
@ -1445,7 +1513,7 @@ internal sealed class PngEncoderCore : IImageEncoderInternals, IDisposable
using IQuantizer<TPixel> frameQuantizer = this.quantizer.CreatePixelSpecificQuantizer<TPixel>(frame.Configuration);
frameQuantizer.BuildPalette(encoder.PixelSamplingStrategy, frame);
return frameQuantizer.QuantizeFrame(frame, frame.Bounds());
return frameQuantizer.QuantizeFrame(frame, bounds);
}
/// <summary>

18
src/ImageSharp/Formats/Png/PngFrameMetadata.cs

@ -34,7 +34,7 @@ public class PngFrameMetadata : IDeepCloneable
/// wait before continuing with the processing of the Data Stream.
/// The clock starts ticking immediately after the graphic is rendered.
/// </summary>
public Rational FrameDelay { get; set; }
public Rational FrameDelay { get; set; } = new(0);
/// <summary>
/// Gets or sets the type of frame area disposal to be done after rendering this frame
@ -59,4 +59,20 @@ public class PngFrameMetadata : IDeepCloneable
/// <inheritdoc/>
public IDeepCloneable DeepClone() => new PngFrameMetadata(this);
internal static PngFrameMetadata FromAnimatedMetadata(AnimatedImageFrameMetadata metadata)
=> new()
{
FrameDelay = new(metadata.Duration.TotalMilliseconds / 1000),
DisposalMethod = GetMode(metadata.DisposalMode),
BlendMethod = metadata.BlendMode == FrameBlendMode.Source ? PngBlendMethod.Source : PngBlendMethod.Over,
};
private static PngDisposalMethod GetMode(FrameDisposalMode mode) => mode switch
{
FrameDisposalMode.RestoreToBackground => PngDisposalMethod.RestoreToBackground,
FrameDisposalMode.RestoreToPrevious => PngDisposalMethod.RestoreToPrevious,
FrameDisposalMode.DoNotDispose => PngDisposalMethod.DoNotDispose,
_ => PngDisposalMethod.DoNotDispose,
};
}

33
src/ImageSharp/Formats/Png/PngMetadata.cs

@ -2,7 +2,6 @@
// Licensed under the Six Labors Split License.
using SixLabors.ImageSharp.Formats.Png.Chunks;
using SixLabors.ImageSharp.PixelFormats;
namespace SixLabors.ImageSharp.Formats.Png;
@ -82,8 +81,38 @@ public class PngMetadata : IDeepCloneable
/// <summary>
/// Gets or sets the number of times to loop this APNG. 0 indicates infinite looping.
/// </summary>
public int RepeatCount { get; set; }
public uint RepeatCount { get; set; } = 1;
/// <inheritdoc/>
public IDeepCloneable DeepClone() => new PngMetadata(this);
internal static PngMetadata FromAnimatedMetadata(AnimatedImageMetadata metadata)
{
// Should the conversion be from a format that uses a 24bit palette entries (gif)
// we need to clone and adjust the color table to allow for transparency.
Color[]? colorTable = metadata.ColorTable.HasValue ? metadata.ColorTable.Value.ToArray() : null;
if (colorTable != null)
{
for (int i = 0; i < colorTable.Length; i++)
{
ref Color c = ref colorTable[i];
if (c == metadata.BackgroundColor)
{
// Png treats background as fully empty
c = Color.Transparent;
break;
}
}
}
return new()
{
ColorType = colorTable != null ? PngColorType.Palette : null,
BitDepth = colorTable != null
? (PngBitDepth)Numerics.Clamp(ColorNumerics.GetBitsNeededForColorDepth(colorTable.Length), 1, 8)
: null,
ColorTable = colorTable,
RepeatCount = metadata.RepeatCount,
};
}
}

37
src/ImageSharp/Formats/Webp/AlphaEncoder.cs

@ -27,7 +27,7 @@ internal static class AlphaEncoder
/// <param name="size">The size in bytes of the alpha data.</param>
/// <returns>The encoded alpha data.</returns>
public static IMemoryOwner<byte> EncodeAlpha<TPixel>(
ImageFrame<TPixel> frame,
Buffer2DRegion<TPixel> frame,
Configuration configuration,
MemoryAllocator memoryAllocator,
bool skipMetadata,
@ -35,8 +35,6 @@ internal static class AlphaEncoder
out int size)
where TPixel : unmanaged, IPixel<TPixel>
{
int width = frame.Width;
int height = frame.Height;
IMemoryOwner<byte> alphaData = ExtractAlphaChannel(frame, configuration, memoryAllocator);
if (compress)
@ -46,8 +44,8 @@ internal static class AlphaEncoder
using Vp8LEncoder lossLessEncoder = new(
memoryAllocator,
configuration,
width,
height,
frame.Width,
frame.Height,
quality,
skipMetadata,
effort,
@ -58,14 +56,14 @@ internal static class AlphaEncoder
// The transparency information will be stored in the green channel of the ARGB quadruplet.
// The green channel is allowed extra transformation steps in the specification -- unlike the other channels,
// that can improve compression.
using ImageFrame<Rgba32> alphaAsFrame = DispatchAlphaToGreen(frame, alphaData.GetSpan());
using ImageFrame<Bgra32> alphaAsFrame = DispatchAlphaToGreen(configuration, frame, alphaData.GetSpan());
size = lossLessEncoder.EncodeAlphaImageData(alphaAsFrame, alphaData);
size = lossLessEncoder.EncodeAlphaImageData(alphaAsFrame.PixelBuffer.GetRegion(), alphaData);
return alphaData;
}
size = width * height;
size = frame.Width * frame.Height;
return alphaData;
}
@ -73,25 +71,28 @@ internal static class AlphaEncoder
/// Store the transparency in the green channel.
/// </summary>
/// <typeparam name="TPixel">The pixel format.</typeparam>
/// <param name="frame">The <see cref="ImageFrame{TPixel}"/> to encode from.</param>
/// <param name="configuration">The configuration.</param>
/// <param name="frame">The pixel buffer to encode from.</param>
/// <param name="alphaData">A byte sequence of length width * height, containing all the 8-bit transparency values in scan order.</param>
/// <returns>The transparency frame.</returns>
private static ImageFrame<Rgba32> DispatchAlphaToGreen<TPixel>(ImageFrame<TPixel> frame, Span<byte> alphaData)
private static ImageFrame<Bgra32> DispatchAlphaToGreen<TPixel>(Configuration configuration, Buffer2DRegion<TPixel> frame, Span<byte> alphaData)
where TPixel : unmanaged, IPixel<TPixel>
{
int width = frame.Width;
int height = frame.Height;
ImageFrame<Rgba32> alphaAsFrame = new ImageFrame<Rgba32>(Configuration.Default, width, height);
ImageFrame<Bgra32> alphaAsFrame = new(configuration, width, height);
for (int y = 0; y < height; y++)
{
Memory<Rgba32> rowBuffer = alphaAsFrame.DangerousGetPixelRowMemory(y);
Span<Rgba32> pixelRow = rowBuffer.Span;
Memory<Bgra32> rowBuffer = alphaAsFrame.DangerousGetPixelRowMemory(y);
Span<Bgra32> pixelRow = rowBuffer.Span;
Span<byte> alphaRow = alphaData.Slice(y * width, width);
// TODO: This can be probably simd optimized.
for (int x = 0; x < width; x++)
{
// Leave A/R/B channels zero'd.
pixelRow[x] = new Rgba32(0, alphaRow[x], 0, 0);
pixelRow[x] = new Bgra32(0, alphaRow[x], 0, 0);
}
}
@ -106,12 +107,12 @@ internal static class AlphaEncoder
/// <param name="configuration">The global configuration.</param>
/// <param name="memoryAllocator">The memory manager.</param>
/// <returns>A byte sequence of length width * height, containing all the 8-bit transparency values in scan order.</returns>
private static IMemoryOwner<byte> ExtractAlphaChannel<TPixel>(ImageFrame<TPixel> frame, Configuration configuration, MemoryAllocator memoryAllocator)
private static IMemoryOwner<byte> ExtractAlphaChannel<TPixel>(Buffer2DRegion<TPixel> frame, Configuration configuration, MemoryAllocator memoryAllocator)
where TPixel : unmanaged, IPixel<TPixel>
{
Buffer2D<TPixel> imageBuffer = frame.PixelBuffer;
int height = frame.Height;
int width = frame.Width;
int height = frame.Height;
IMemoryOwner<byte> alphaDataBuffer = memoryAllocator.Allocate<byte>(width * height);
Span<byte> alphaData = alphaDataBuffer.GetSpan();
@ -120,7 +121,7 @@ internal static class AlphaEncoder
for (int y = 0; y < height; y++)
{
Span<TPixel> rowSpan = imageBuffer.DangerousGetRowSpan(y);
Span<TPixel> rowSpan = frame.DangerousGetRowSpan(y);
PixelOperations<TPixel>.Instance.ToRgba32(configuration, rowSpan, rgbaRow);
int offset = y * width;
for (int x = 0; x < width; x++)

7
src/ImageSharp/Formats/Webp/BitWriter/BitWriterBase.cs

@ -1,7 +1,6 @@
// Copyright (c) Six Labors.
// Licensed under the Six Labors Split License.
using System.Diagnostics;
using SixLabors.ImageSharp.Common.Helpers;
using SixLabors.ImageSharp.Formats.Webp.Chunks;
using SixLabors.ImageSharp.Metadata.Profiles.Exif;
@ -100,9 +99,7 @@ internal abstract class BitWriterBase
bool hasAnimation)
{
// Write file size later
long pos = RiffHelper.BeginWriteRiffFile(stream, WebpConstants.WebpFourCc);
Debug.Assert(pos is 4, "Stream should be written from position 0.");
RiffHelper.BeginWriteRiffFile(stream, WebpConstants.WebpFourCc);
// Write VP8X, header if necessary.
bool isVp8X = exifProfile != null || xmpProfile != null || iccProfile != null || hasAlpha || hasAnimation;
@ -160,7 +157,7 @@ internal abstract class BitWriterBase
/// <param name="loopCount">The number of times to loop the animation. If it is 0, this means infinitely.</param>
public static void WriteAnimationParameter(Stream stream, Color background, ushort loopCount)
{
WebpAnimationParameter chunk = new(background.ToRgba32().Rgba, loopCount);
WebpAnimationParameter chunk = new(background.ToBgra32().PackedValue, loopCount);
chunk.WriteTo(stream);
}

24
src/ImageSharp/Formats/Webp/Chunks/WebpFrameData.cs

@ -12,7 +12,7 @@ internal readonly struct WebpFrameData
/// </summary>
public const uint HeaderSize = 16;
public WebpFrameData(uint dataSize, uint x, uint y, uint width, uint height, uint duration, WebpBlendingMethod blendingMethod, WebpDisposalMethod disposalMethod)
public WebpFrameData(uint dataSize, uint x, uint y, uint width, uint height, uint duration, WebpBlendMethod blendingMethod, WebpDisposalMethod disposalMethod)
{
this.DataSize = dataSize;
this.X = x;
@ -32,12 +32,12 @@ internal readonly struct WebpFrameData
width,
height,
duration,
(flags & 2) != 0 ? WebpBlendingMethod.DoNotBlend : WebpBlendingMethod.AlphaBlending,
(flags & 1) == 1 ? WebpDisposalMethod.Dispose : WebpDisposalMethod.DoNotDispose)
(flags & 2) == 0 ? WebpBlendMethod.Over : WebpBlendMethod.Source,
(flags & 1) == 1 ? WebpDisposalMethod.RestoreToBackground : WebpDisposalMethod.DoNotDispose)
{
}
public WebpFrameData(uint x, uint y, uint width, uint height, uint duration, WebpBlendingMethod blendingMethod, WebpDisposalMethod disposalMethod)
public WebpFrameData(uint x, uint y, uint width, uint height, uint duration, WebpBlendMethod blendingMethod, WebpDisposalMethod disposalMethod)
: this(0, x, y, width, height, duration, blendingMethod, disposalMethod)
{
}
@ -76,14 +76,14 @@ internal readonly struct WebpFrameData
/// <summary>
/// Gets how transparent pixels of the current frame are to be blended with corresponding pixels of the previous canvas.
/// </summary>
public WebpBlendingMethod BlendingMethod { get; }
public WebpBlendMethod BlendingMethod { get; }
/// <summary>
/// Gets how the current frame is to be treated after it has been displayed (before rendering the next frame) on the canvas.
/// </summary>
public WebpDisposalMethod DisposalMethod { get; }
public Rectangle Bounds => new((int)this.X * 2, (int)this.Y * 2, (int)this.Width, (int)this.Height);
public Rectangle Bounds => new((int)this.X, (int)this.Y, (int)this.Width, (int)this.Height);
/// <summary>
/// Writes the animation frame(<see cref="WebpChunkType.FrameData"/>) to the stream.
@ -93,13 +93,13 @@ internal readonly struct WebpFrameData
{
byte flags = 0;
if (this.BlendingMethod is WebpBlendingMethod.DoNotBlend)
if (this.BlendingMethod is WebpBlendMethod.Source)
{
// Set blending flag.
flags |= 2;
}
if (this.DisposalMethod is WebpDisposalMethod.Dispose)
if (this.DisposalMethod is WebpDisposalMethod.RestoreToBackground)
{
// Set disposal flag.
flags |= 1;
@ -107,8 +107,8 @@ internal readonly struct WebpFrameData
long pos = RiffHelper.BeginWriteChunk(stream, (uint)WebpChunkType.FrameData);
WebpChunkParsingUtils.WriteUInt24LittleEndian(stream, this.X);
WebpChunkParsingUtils.WriteUInt24LittleEndian(stream, this.Y);
WebpChunkParsingUtils.WriteUInt24LittleEndian(stream, (uint)Math.Round(this.X / 2f));
WebpChunkParsingUtils.WriteUInt24LittleEndian(stream, (uint)Math.Round(this.Y / 2f));
WebpChunkParsingUtils.WriteUInt24LittleEndian(stream, this.Width - 1);
WebpChunkParsingUtils.WriteUInt24LittleEndian(stream, this.Height - 1);
WebpChunkParsingUtils.WriteUInt24LittleEndian(stream, this.Duration);
@ -128,8 +128,8 @@ internal readonly struct WebpFrameData
WebpFrameData data = new(
dataSize: WebpChunkParsingUtils.ReadChunkSize(stream, buffer),
x: WebpChunkParsingUtils.ReadUInt24LittleEndian(stream, buffer),
y: WebpChunkParsingUtils.ReadUInt24LittleEndian(stream, buffer),
x: WebpChunkParsingUtils.ReadUInt24LittleEndian(stream, buffer) * 2,
y: WebpChunkParsingUtils.ReadUInt24LittleEndian(stream, buffer) * 2,
width: WebpChunkParsingUtils.ReadUInt24LittleEndian(stream, buffer) + 1,
height: WebpChunkParsingUtils.ReadUInt24LittleEndian(stream, buffer) + 1,
duration: WebpChunkParsingUtils.ReadUInt24LittleEndian(stream, buffer),

73
src/ImageSharp/Formats/Webp/Lossless/Vp8LEncoder.cs

@ -240,7 +240,7 @@ internal class Vp8LEncoder : IDisposable
public void EncodeHeader<TPixel>(Image<TPixel> image, Stream stream, bool hasAnimation)
where TPixel : unmanaged, IPixel<TPixel>
{
// Write bytes from the bitwriter buffer to the stream.
// Write bytes from the bit-writer buffer to the stream.
ImageMetadata metadata = image.Metadata;
metadata.SyncProfiles();
@ -259,15 +259,15 @@ internal class Vp8LEncoder : IDisposable
if (hasAnimation)
{
WebpMetadata webpMetadata = metadata.GetWebpMetadata();
BitWriterBase.WriteAnimationParameter(stream, webpMetadata.AnimationBackground, webpMetadata.AnimationLoopCount);
WebpMetadata webpMetadata = WebpCommonUtils.GetWebpMetadata(image);
BitWriterBase.WriteAnimationParameter(stream, webpMetadata.BackgroundColor, webpMetadata.RepeatCount);
}
}
public void EncodeFooter<TPixel>(Image<TPixel> image, Stream stream)
where TPixel : unmanaged, IPixel<TPixel>
{
// Write bytes from the bitwriter buffer to the stream.
// Write bytes from the bit-writer buffer to the stream.
ImageMetadata metadata = image.Metadata;
ExifProfile exifProfile = this.skipMetadata ? null : metadata.ExifProfile;
@ -280,26 +280,25 @@ internal class Vp8LEncoder : IDisposable
/// Encodes the image as lossless webp to the specified stream.
/// </summary>
/// <typeparam name="TPixel">The pixel format.</typeparam>
/// <param name="frame">The <see cref="ImageFrame{TPixel}"/> to encode from.</param>
/// <param name="frame">The image frame to encode from.</param>
/// <param name="bounds">The region of interest within the frame to encode.</param>
/// <param name="frameMetadata">The frame metadata.</param>
/// <param name="stream">The <see cref="Stream"/> to encode the image data to.</param>
/// <param name="hasAnimation">Flag indicating, if an animation parameter is present.</param>
public void Encode<TPixel>(ImageFrame<TPixel> frame, Stream stream, bool hasAnimation)
public void Encode<TPixel>(ImageFrame<TPixel> frame, Rectangle bounds, WebpFrameMetadata frameMetadata, Stream stream, bool hasAnimation)
where TPixel : unmanaged, IPixel<TPixel>
{
int width = frame.Width;
int height = frame.Height;
// Convert image pixels to bgra array.
bool hasAlpha = this.ConvertPixelsToBgra(frame, width, height);
bool hasAlpha = this.ConvertPixelsToBgra(frame.PixelBuffer.GetRegion(bounds));
// Write the image size.
this.WriteImageSize(width, height);
this.WriteImageSize(bounds.Width, bounds.Height);
// Write the non-trivial Alpha flag and lossless version.
this.WriteAlphaAndVersion(hasAlpha);
// Encode the main image stream.
this.EncodeStream(frame);
this.EncodeStream(bounds.Width, bounds.Height);
this.bitWriter.Finish();
@ -307,21 +306,18 @@ internal class Vp8LEncoder : IDisposable
if (hasAnimation)
{
WebpFrameMetadata frameMetadata = frame.Metadata.GetWebpMetadata();
// TODO: If we can clip the indexed frame for transparent bounds we can set properties here.
prevPosition = new WebpFrameData(
0,
0,
(uint)frame.Width,
(uint)frame.Height,
(uint)bounds.Left,
(uint)bounds.Top,
(uint)bounds.Width,
(uint)bounds.Height,
frameMetadata.FrameDelay,
frameMetadata.BlendMethod,
frameMetadata.DisposalMethod)
.WriteHeaderTo(stream);
}
// Write bytes from the bitwriter buffer to the stream.
// Write bytes from the bit-writer buffer to the stream.
this.bitWriter.WriteEncodedImageToStream(stream);
if (hasAnimation)
@ -334,12 +330,12 @@ internal class Vp8LEncoder : IDisposable
/// Encodes the alpha image data using the webp lossless compression.
/// </summary>
/// <typeparam name="TPixel">The type of the pixel.</typeparam>
/// <param name="frame">The <see cref="ImageFrame{TPixel}"/> to encode from.</param>
/// <param name="frame">The alpha-pixel data to encode from.</param>
/// <param name="alphaData">The destination buffer to write the encoded alpha data to.</param>
/// <returns>The size of the compressed data in bytes.
/// If the size of the data is the same as the pixel count, the compression would not yield in smaller data and is left uncompressed.
/// </returns>
public int EncodeAlphaImageData<TPixel>(ImageFrame<TPixel> frame, IMemoryOwner<byte> alphaData)
public int EncodeAlphaImageData<TPixel>(Buffer2DRegion<TPixel> frame, IMemoryOwner<byte> alphaData)
where TPixel : unmanaged, IPixel<TPixel>
{
int width = frame.Width;
@ -347,10 +343,10 @@ internal class Vp8LEncoder : IDisposable
int pixelCount = width * height;
// Convert image pixels to bgra array.
this.ConvertPixelsToBgra(frame, width, height);
this.ConvertPixelsToBgra(frame);
// The image-stream will NOT contain any headers describing the image dimension, the dimension is already known.
this.EncodeStream(frame);
this.EncodeStream(width, height);
this.bitWriter.Finish();
int size = this.bitWriter.NumBytes;
if (size >= pixelCount)
@ -364,7 +360,7 @@ internal class Vp8LEncoder : IDisposable
}
/// <summary>
/// Writes the image size to the bitwriter buffer.
/// Writes the image size to the bit writer buffer.
/// </summary>
/// <param name="inputImgWidth">The input image width.</param>
/// <param name="inputImgHeight">The input image height.</param>
@ -381,7 +377,7 @@ internal class Vp8LEncoder : IDisposable
}
/// <summary>
/// Writes a flag indicating if alpha channel is used and the VP8L version to the bitwriter buffer.
/// Writes a flag indicating if alpha channel is used and the VP8L version to the bit-writer buffer.
/// </summary>
/// <param name="hasAlpha">Indicates if a alpha channel is present.</param>
private void WriteAlphaAndVersion(bool hasAlpha)
@ -393,14 +389,10 @@ internal class Vp8LEncoder : IDisposable
/// <summary>
/// Encodes the image stream using lossless webp format.
/// </summary>
/// <typeparam name="TPixel">The pixel type.</typeparam>
/// <param name="frame">The frame to encode.</param>
private void EncodeStream<TPixel>(ImageFrame<TPixel> frame)
where TPixel : unmanaged, IPixel<TPixel>
/// <param name="width">The image frame width.</param>
/// <param name="height">The image frame height.</param>
private void EncodeStream(int width, int height)
{
int width = frame.Width;
int height = frame.Height;
Span<uint> bgra = this.Bgra.GetSpan();
Span<uint> encodedData = this.EncodedData.GetSpan();
bool lowEffort = this.method == 0;
@ -508,23 +500,20 @@ internal class Vp8LEncoder : IDisposable
/// Converts the pixels of the image to bgra.
/// </summary>
/// <typeparam name="TPixel">The type of the pixels.</typeparam>
/// <param name="frame">The frame to convert.</param>
/// <param name="width">The width of the image.</param>
/// <param name="height">The height of the image.</param>
/// <param name="pixels">The frame pixel buffer to convert.</param>
/// <returns>true, if the image is non opaque.</returns>
private bool ConvertPixelsToBgra<TPixel>(ImageFrame<TPixel> frame, int width, int height)
private bool ConvertPixelsToBgra<TPixel>(Buffer2DRegion<TPixel> pixels)
where TPixel : unmanaged, IPixel<TPixel>
{
Buffer2D<TPixel> imageBuffer = frame.PixelBuffer;
bool nonOpaque = false;
Span<uint> bgra = this.Bgra.GetSpan();
Span<byte> bgraBytes = MemoryMarshal.Cast<uint, byte>(bgra);
int widthBytes = width * 4;
for (int y = 0; y < height; y++)
int widthBytes = pixels.Width * 4;
for (int y = 0; y < pixels.Height; y++)
{
Span<TPixel> rowSpan = imageBuffer.DangerousGetRowSpan(y);
Span<TPixel> rowSpan = pixels.DangerousGetRowSpan(y);
Span<byte> rowBytes = bgraBytes.Slice(y * widthBytes, widthBytes);
PixelOperations<TPixel>.Instance.ToBgra32Bytes(this.configuration, rowSpan, rowBytes, width);
PixelOperations<TPixel>.Instance.ToBgra32Bytes(this.configuration, rowSpan, rowBytes, pixels.Width);
if (!nonOpaque)
{
Span<Bgra32> rowBgra = MemoryMarshal.Cast<byte, Bgra32>(rowBytes);

64
src/ImageSharp/Formats/Webp/Lossy/Vp8Encoder.cs

@ -333,8 +333,8 @@ internal class Vp8Encoder : IDisposable
if (hasAnimation)
{
WebpMetadata webpMetadata = metadata.GetWebpMetadata();
BitWriterBase.WriteAnimationParameter(stream, webpMetadata.AnimationBackground, webpMetadata.AnimationLoopCount);
WebpMetadata webpMetadata = WebpCommonUtils.GetWebpMetadata(image);
BitWriterBase.WriteAnimationParameter(stream, webpMetadata.BackgroundColor, webpMetadata.RepeatCount);
}
}
@ -351,44 +351,53 @@ internal class Vp8Encoder : IDisposable
}
/// <summary>
/// Encodes the image to the specified stream from the <see cref="Image{TPixel}"/>.
/// Encodes the animated image frame to the specified stream.
/// </summary>
/// <typeparam name="TPixel">The pixel format.</typeparam>
/// <param name="frame">The <see cref="ImageFrame{TPixel}"/> to encode from.</param>
/// <param name="stream">The <see cref="Stream"/> to encode the image data to.</param>
public void EncodeAnimation<TPixel>(ImageFrame<TPixel> frame, Stream stream)
/// <param name="frame">The image frame to encode from.</param>
/// <param name="stream">The stream to encode the image data to.</param>
/// <param name="bounds">The region of interest within the frame to encode.</param>
/// <param name="frameMetadata">The frame metadata.</param>
public void EncodeAnimation<TPixel>(ImageFrame<TPixel> frame, Stream stream, Rectangle bounds, WebpFrameMetadata frameMetadata)
where TPixel : unmanaged, IPixel<TPixel> =>
this.Encode(frame, stream, true, null);
this.Encode(stream, frame, bounds, frameMetadata, true, null);
/// <summary>
/// Encodes the image to the specified stream from the <see cref="Image{TPixel}"/>.
/// Encodes the static image frame to the specified stream.
/// </summary>
/// <typeparam name="TPixel">The pixel format.</typeparam>
/// <param name="image">The <see cref="Image{TPixel}"/> to encode from.</param>
/// <param name="stream">The <see cref="Stream"/> to encode the image data to.</param>
public void EncodeStatic<TPixel>(Image<TPixel> image, Stream stream)
where TPixel : unmanaged, IPixel<TPixel> =>
this.Encode(image.Frames.RootFrame, stream, false, image);
/// <param name="stream">The stream to encode the image data to.</param>
/// <param name="image">The image to encode from.</param>
public void EncodeStatic<TPixel>(Stream stream, Image<TPixel> image)
where TPixel : unmanaged, IPixel<TPixel>
{
ImageFrame<TPixel> frame = image.Frames.RootFrame;
this.Encode(stream, frame, image.Bounds, WebpCommonUtils.GetWebpFrameMetadata(frame), false, image);
}
/// <summary>
/// Encodes the image to the specified stream from the <see cref="Image{TPixel}"/>.
/// Encodes the image to the specified stream.
/// </summary>
/// <typeparam name="TPixel">The pixel format.</typeparam>
/// <param name="frame">The <see cref="ImageFrame{TPixel}"/> to encode from.</param>
/// <param name="stream">The <see cref="Stream"/> to encode the image data to.</param>
/// <param name="stream">The stream to encode the image data to.</param>
/// <param name="frame">The image frame to encode from.</param>
/// <param name="bounds">The region of interest within the frame to encode.</param>
/// <param name="frameMetadata">The frame metadata.</param>
/// <param name="hasAnimation">Flag indicating, if an animation parameter is present.</param>
/// <param name="image">The <see cref="Image{TPixel}"/> to encode from.</param>
private void Encode<TPixel>(ImageFrame<TPixel> frame, Stream stream, bool hasAnimation, Image<TPixel> image)
/// <param name="image">The image to encode from.</param>
private void Encode<TPixel>(Stream stream, ImageFrame<TPixel> frame, Rectangle bounds, WebpFrameMetadata frameMetadata, bool hasAnimation, Image<TPixel> image)
where TPixel : unmanaged, IPixel<TPixel>
{
int width = frame.Width;
int height = frame.Height;
int width = bounds.Width;
int height = bounds.Height;
int pixelCount = width * height;
Span<byte> y = this.Y.GetSpan();
Span<byte> u = this.U.GetSpan();
Span<byte> v = this.V.GetSpan();
bool hasAlpha = YuvConversion.ConvertRgbToYuv(frame, this.configuration, this.memoryAllocator, y, u, v);
Buffer2DRegion<TPixel> pixels = frame.PixelBuffer.GetRegion(bounds);
bool hasAlpha = YuvConversion.ConvertRgbToYuv(pixels, this.configuration, this.memoryAllocator, y, u, v);
if (!hasAnimation)
{
@ -456,7 +465,7 @@ internal class Vp8Encoder : IDisposable
{
// TODO: This can potentially run in an separate task.
encodedAlphaData = AlphaEncoder.EncodeAlpha(
frame,
pixels,
this.configuration,
this.memoryAllocator,
this.skipMetadata,
@ -477,14 +486,11 @@ internal class Vp8Encoder : IDisposable
if (hasAnimation)
{
WebpFrameMetadata frameMetadata = frame.Metadata.GetWebpMetadata();
// TODO: If we can clip the indexed frame for transparent bounds we can set properties here.
prevPosition = new WebpFrameData(
0,
0,
(uint)frame.Width,
(uint)frame.Height,
(uint)bounds.X,
(uint)bounds.Y,
(uint)bounds.Width,
(uint)bounds.Height,
frameMetadata.FrameDelay,
frameMetadata.BlendMethod,
frameMetadata.DisposalMethod)

15
src/ImageSharp/Formats/Webp/Lossy/YuvConversion.cs

@ -259,7 +259,7 @@ internal static class YuvConversion
}
/// <summary>
/// Converts the RGB values of the image to YUV.
/// Converts the pixel values of the image to YUV.
/// </summary>
/// <typeparam name="TPixel">The pixel type of the image.</typeparam>
/// <param name="frame">The frame to convert.</param>
@ -269,12 +269,11 @@ internal static class YuvConversion
/// <param name="u">Span to store the u component of the image.</param>
/// <param name="v">Span to store the v component of the image.</param>
/// <returns>true, if the image contains alpha data.</returns>
public static bool ConvertRgbToYuv<TPixel>(ImageFrame<TPixel> frame, Configuration configuration, MemoryAllocator memoryAllocator, Span<byte> y, Span<byte> u, Span<byte> v)
public static bool ConvertRgbToYuv<TPixel>(Buffer2DRegion<TPixel> frame, Configuration configuration, MemoryAllocator memoryAllocator, Span<byte> y, Span<byte> u, Span<byte> v)
where TPixel : unmanaged, IPixel<TPixel>
{
Buffer2D<TPixel> imageBuffer = frame.PixelBuffer;
int width = imageBuffer.Width;
int height = imageBuffer.Height;
int width = frame.Width;
int height = frame.Height;
int uvWidth = (width + 1) >> 1;
// Temporary storage for accumulated R/G/B values during conversion to U/V.
@ -289,8 +288,8 @@ internal static class YuvConversion
bool hasAlpha = false;
for (rowIndex = 0; rowIndex < height - 1; rowIndex += 2)
{
Span<TPixel> rowSpan = imageBuffer.DangerousGetRowSpan(rowIndex);
Span<TPixel> nextRowSpan = imageBuffer.DangerousGetRowSpan(rowIndex + 1);
Span<TPixel> rowSpan = frame.DangerousGetRowSpan(rowIndex);
Span<TPixel> nextRowSpan = frame.DangerousGetRowSpan(rowIndex + 1);
PixelOperations<TPixel>.Instance.ToBgra32(configuration, rowSpan, bgraRow0);
PixelOperations<TPixel>.Instance.ToBgra32(configuration, nextRowSpan, bgraRow1);
@ -320,7 +319,7 @@ internal static class YuvConversion
// Extra last row.
if ((height & 1) != 0)
{
Span<TPixel> rowSpan = imageBuffer.DangerousGetRowSpan(rowIndex);
Span<TPixel> rowSpan = frame.DangerousGetRowSpan(rowIndex);
PixelOperations<TPixel>.Instance.ToBgra32(configuration, rowSpan, bgraRow0);
ConvertRgbaToY(bgraRow0, y[(rowIndex * width)..], width);

48
src/ImageSharp/Formats/Webp/MetadataExtensions.cs

@ -1,6 +1,8 @@
// Copyright (c) Six Labors.
// Licensed under the Six Labors Split License.
using System.Diagnostics.CodeAnalysis;
using SixLabors.ImageSharp.Formats;
using SixLabors.ImageSharp.Formats.Webp;
using SixLabors.ImageSharp.Metadata;
@ -18,10 +20,56 @@ public static partial class MetadataExtensions
/// <returns>The <see cref="WebpMetadata"/>.</returns>
public static WebpMetadata GetWebpMetadata(this ImageMetadata metadata) => metadata.GetFormatMetadata(WebpFormat.Instance);
/// <summary>
/// Gets the webp format specific metadata for the image.
/// </summary>
/// <param name="source">The metadata this method extends.</param>
/// <param name="metadata">The metadata.</param>
/// <returns>
/// <see langword="true"/> if the webp metadata exists; otherwise, <see langword="false"/>.
/// </returns>
public static bool TryGetWebpMetadata(this ImageMetadata source, [NotNullWhen(true)] out WebpMetadata? metadata)
=> source.TryGetFormatMetadata(WebpFormat.Instance, out metadata);
/// <summary>
/// Gets the webp format specific metadata for the image frame.
/// </summary>
/// <param name="metadata">The metadata this method extends.</param>
/// <returns>The <see cref="WebpFrameMetadata"/>.</returns>
public static WebpFrameMetadata GetWebpMetadata(this ImageFrameMetadata metadata) => metadata.GetFormatMetadata(WebpFormat.Instance);
/// <summary>
/// Gets the webp format specific metadata for the image frame.
/// </summary>
/// <param name="source">The metadata this method extends.</param>
/// <param name="metadata">The metadata.</param>
/// <returns>
/// <see langword="true"/> if the webp frame metadata exists; otherwise, <see langword="false"/>.
/// </returns>
public static bool TryGetWebpFrameMetadata(this ImageFrameMetadata source, [NotNullWhen(true)] out WebpFrameMetadata? metadata)
=> source.TryGetFormatMetadata(WebpFormat.Instance, out metadata);
internal static AnimatedImageMetadata ToAnimatedImageMetadata(this WebpMetadata source)
=> new()
{
ColorTableMode = FrameColorTableMode.Global,
RepeatCount = source.RepeatCount,
BackgroundColor = source.BackgroundColor
};
internal static AnimatedImageFrameMetadata ToAnimatedImageFrameMetadata(this WebpFrameMetadata source)
=> new()
{
ColorTableMode = FrameColorTableMode.Global,
Duration = TimeSpan.FromMilliseconds(source.FrameDelay),
DisposalMode = GetMode(source.DisposalMethod),
BlendMode = source.BlendMethod == WebpBlendMethod.Over ? FrameBlendMode.Over : FrameBlendMode.Source,
};
private static FrameDisposalMode GetMode(WebpDisposalMethod method) => method switch
{
WebpDisposalMethod.RestoreToBackground => FrameDisposalMode.RestoreToBackground,
WebpDisposalMethod.DoNotDispose => FrameDisposalMode.DoNotDispose,
_ => FrameDisposalMode.DoNotDispose,
};
}

8
src/ImageSharp/Formats/Webp/WebpAnimationDecoder.cs

@ -89,7 +89,7 @@ internal class WebpAnimationDecoder : IDisposable
this.metadata = new ImageMetadata();
this.webpMetadata = this.metadata.GetWebpMetadata();
this.webpMetadata.AnimationLoopCount = features.AnimationLoopCount;
this.webpMetadata.RepeatCount = features.AnimationLoopCount;
Span<byte> buffer = stackalloc byte[4];
uint frameCount = 0;
@ -195,14 +195,14 @@ internal class WebpAnimationDecoder : IDisposable
Rectangle regionRectangle = frameData.Bounds;
if (frameData.DisposalMethod is WebpDisposalMethod.Dispose)
if (frameData.DisposalMethod is WebpDisposalMethod.RestoreToBackground)
{
this.RestoreToBackground(imageFrame, backgroundColor);
}
using Buffer2D<TPixel> decodedImageFrame = this.DecodeImageFrameData<TPixel>(frameData, webpInfo);
bool blend = previousFrame != null && frameData.BlendingMethod == WebpBlendingMethod.AlphaBlending;
bool blend = previousFrame != null && frameData.BlendingMethod == WebpBlendMethod.Over;
DrawDecodedImageFrameOnCanvas(decodedImageFrame, imageFrame, regionRectangle, blend);
previousFrame = currentFrame ?? image.Frames.RootFrame;
@ -253,7 +253,7 @@ internal class WebpAnimationDecoder : IDisposable
private Buffer2D<TPixel> DecodeImageFrameData<TPixel>(WebpFrameData frameData, WebpImageInfo webpInfo)
where TPixel : unmanaged, IPixel<TPixel>
{
ImageFrame<TPixel> decodedFrame = new(Configuration.Default, (int)frameData.Width, (int)frameData.Height);
ImageFrame<TPixel> decodedFrame = new(this.configuration, (int)frameData.Width, (int)frameData.Height);
try
{

16
src/ImageSharp/Formats/Webp/WebpBlendingMethod.cs → src/ImageSharp/Formats/Webp/WebpBlendMethod.cs

@ -1,4 +1,4 @@
// Copyright (c) Six Labors.
// Copyright (c) Six Labors.
// Licensed under the Six Labors Split License.
namespace SixLabors.ImageSharp.Formats.Webp;
@ -6,17 +6,17 @@ namespace SixLabors.ImageSharp.Formats.Webp;
/// <summary>
/// Indicates how transparent pixels of the current frame are to be blended with corresponding pixels of the previous canvas.
/// </summary>
public enum WebpBlendingMethod
public enum WebpBlendMethod
{
/// <summary>
/// Use alpha blending. After disposing of the previous frame, render the current frame on the canvas using alpha-blending.
/// If the current frame does not have an alpha channel, assume alpha value of 255, effectively replacing the rectangle.
/// Do not blend. After disposing of the previous frame,
/// render the current frame on the canvas by overwriting the rectangle covered by the current frame.
/// </summary>
AlphaBlending = 0,
Source = 0,
/// <summary>
/// Do not blend. After disposing of the previous frame,
/// render the current frame on the canvas by overwriting the rectangle covered by the current frame.
/// Use alpha blending. After disposing of the previous frame, render the current frame on the canvas using alpha-blending.
/// If the current frame does not have an alpha channel, assume alpha value of 255, effectively replacing the rectangle.
/// </summary>
DoNotBlend = 1
Over = 1,
}

56
src/ImageSharp/Formats/Webp/WebpCommonUtils.cs

@ -4,6 +4,8 @@
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
using SixLabors.ImageSharp.Formats.Gif;
using SixLabors.ImageSharp.Formats.Png;
using SixLabors.ImageSharp.PixelFormats;
namespace SixLabors.ImageSharp.Formats.Webp;
@ -13,6 +15,54 @@ namespace SixLabors.ImageSharp.Formats.Webp;
/// </summary>
internal static class WebpCommonUtils
{
public static WebpMetadata GetWebpMetadata<TPixel>(Image<TPixel> image)
where TPixel : unmanaged, IPixel<TPixel>
{
if (image.Metadata.TryGetWebpMetadata(out WebpMetadata? webp))
{
return (WebpMetadata)webp.DeepClone();
}
if (image.Metadata.TryGetGifMetadata(out GifMetadata? gif))
{
AnimatedImageMetadata ani = gif.ToAnimatedImageMetadata();
return WebpMetadata.FromAnimatedMetadata(ani);
}
if (image.Metadata.TryGetPngMetadata(out PngMetadata? png))
{
AnimatedImageMetadata ani = png.ToAnimatedImageMetadata();
return WebpMetadata.FromAnimatedMetadata(ani);
}
// Return explicit new instance so we do not mutate the original metadata.
return new();
}
public static WebpFrameMetadata GetWebpFrameMetadata<TPixel>(ImageFrame<TPixel> frame)
where TPixel : unmanaged, IPixel<TPixel>
{
if (frame.Metadata.TryGetWebpFrameMetadata(out WebpFrameMetadata? webp))
{
return (WebpFrameMetadata)webp.DeepClone();
}
if (frame.Metadata.TryGetGifMetadata(out GifFrameMetadata? gif))
{
AnimatedImageFrameMetadata ani = gif.ToAnimatedImageFrameMetadata();
return WebpFrameMetadata.FromAnimatedMetadata(ani);
}
if (frame.Metadata.TryGetPngMetadata(out PngFrameMetadata? png))
{
AnimatedImageFrameMetadata ani = png.ToAnimatedImageFrameMetadata();
return WebpFrameMetadata.FromAnimatedMetadata(ani);
}
// Return explicit new instance so we do not mutate the original metadata.
return new();
}
/// <summary>
/// Checks if the pixel row is not opaque.
/// </summary>
@ -27,7 +77,7 @@ internal static class WebpCommonUtils
int length = (row.Length * 4) - 3;
fixed (byte* src = rowBytes)
{
var alphaMaskVector256 = Vector256.Create(0, 0, 0, 255, 0, 0, 0, 255, 0, 0, 0, 255, 0, 0, 0, 255, 0, 0, 0, 255, 0, 0, 0, 255, 0, 0, 0, 255, 0, 0, 0, 255);
Vector256<byte> alphaMaskVector256 = Vector256.Create(0, 0, 0, 255, 0, 0, 0, 255, 0, 0, 0, 255, 0, 0, 0, 255, 0, 0, 0, 255, 0, 0, 0, 255, 0, 0, 0, 255, 0, 0, 0, 255);
Vector256<byte> all0x80Vector256 = Vector256.Create((byte)0x80).AsByte();
for (; i + 128 <= length; i += 128)
@ -124,7 +174,7 @@ internal static class WebpCommonUtils
private static unsafe bool IsNoneOpaque64Bytes(byte* src, int i)
{
var alphaMask = Vector128.Create(0, 0, 0, 255, 0, 0, 0, 255, 0, 0, 0, 255, 0, 0, 0, 255);
Vector128<byte> alphaMask = Vector128.Create(0, 0, 0, 255, 0, 0, 0, 255, 0, 0, 0, 255, 0, 0, 0, 255);
Vector128<byte> a0 = Sse2.LoadVector128(src + i).AsByte();
Vector128<byte> a1 = Sse2.LoadVector128(src + i + 16).AsByte();
@ -144,7 +194,7 @@ internal static class WebpCommonUtils
private static unsafe bool IsNoneOpaque32Bytes(byte* src, int i)
{
var alphaMask = Vector128.Create(0, 0, 0, 255, 0, 0, 0, 255, 0, 0, 0, 255, 0, 0, 0, 255);
Vector128<byte> alphaMask = Vector128.Create(0, 0, 0, 255, 0, 0, 0, 255, 0, 0, 0, 255, 0, 0, 0, 255);
Vector128<byte> a0 = Sse2.LoadVector128(src + i).AsByte();
Vector128<byte> a1 = Sse2.LoadVector128(src + i + 16).AsByte();

2
src/ImageSharp/Formats/Webp/WebpDisposalMethod.cs

@ -16,5 +16,5 @@ public enum WebpDisposalMethod
/// <summary>
/// Dispose to background color. Fill the rectangle on the canvas covered by the current frame with background color specified in the ANIM chunk.
/// </summary>
Dispose = 1
RestoreToBackground = 1
}

98
src/ImageSharp/Formats/Webp/WebpEncoderCore.cs

@ -123,12 +123,14 @@ internal sealed class WebpEncoderCore : IImageEncoderInternals
}
else
{
WebpMetadata webpMetadata = image.Metadata.GetWebpMetadata();
WebpMetadata webpMetadata = WebpCommonUtils.GetWebpMetadata(image);
lossless = webpMetadata.FileFormat == WebpFileFormatType.Lossless;
}
if (lossless)
{
bool hasAnimation = image.Frames.Count > 1;
using Vp8LEncoder encoder = new(
this.memoryAllocator,
this.configuration,
@ -141,17 +143,46 @@ internal sealed class WebpEncoderCore : IImageEncoderInternals
this.nearLossless,
this.nearLosslessQuality);
bool hasAnimation = image.Frames.Count > 1;
encoder.EncodeHeader(image, stream, hasAnimation);
// Encode the first frame.
ImageFrame<TPixel> previousFrame = image.Frames.RootFrame;
WebpFrameMetadata frameMetadata = WebpCommonUtils.GetWebpFrameMetadata(previousFrame);
encoder.Encode(previousFrame, previousFrame.Bounds(), frameMetadata, stream, hasAnimation);
if (hasAnimation)
{
foreach (ImageFrame<TPixel> imageFrame in image.Frames)
WebpDisposalMethod previousDisposal = frameMetadata.DisposalMethod;
// Encode additional frames
// This frame is reused to store de-duplicated pixel buffers.
using ImageFrame<TPixel> encodingFrame = new(image.Configuration, previousFrame.Size());
for (int i = 1; i < image.Frames.Count; i++)
{
using Vp8LEncoder enc = new(
ImageFrame<TPixel>? prev = previousDisposal == WebpDisposalMethod.RestoreToBackground ? null : previousFrame;
ImageFrame<TPixel> currentFrame = image.Frames[i];
ImageFrame<TPixel>? nextFrame = i < image.Frames.Count - 1 ? image.Frames[i + 1] : null;
frameMetadata = WebpCommonUtils.GetWebpFrameMetadata(currentFrame);
bool blend = frameMetadata.BlendMethod == WebpBlendMethod.Over;
(bool difference, Rectangle bounds) =
AnimationUtilities.DeDuplicatePixels(
image.Configuration,
prev,
currentFrame,
nextFrame,
encodingFrame,
Color.Transparent,
blend,
ClampingMode.Even);
using Vp8LEncoder animatedEncoder = new(
this.memoryAllocator,
this.configuration,
image.Width,
image.Height,
bounds.Width,
bounds.Height,
this.quality,
this.skipMetadata,
this.method,
@ -159,13 +190,12 @@ internal sealed class WebpEncoderCore : IImageEncoderInternals
this.nearLossless,
this.nearLosslessQuality);
enc.Encode(imageFrame, stream, true);
animatedEncoder.Encode(encodingFrame, bounds, frameMetadata, stream, hasAnimation);
previousFrame = currentFrame;
previousDisposal = frameMetadata.DisposalMethod;
}
}
else
{
encoder.Encode(image.Frames.RootFrame, stream, false);
}
encoder.EncodeFooter(image, stream);
}
@ -183,17 +213,48 @@ internal sealed class WebpEncoderCore : IImageEncoderInternals
this.filterStrength,
this.spatialNoiseShaping,
this.alphaCompression);
if (image.Frames.Count > 1)
{
// TODO: What about alpha here?
encoder.EncodeHeader(image, stream, false, true);
foreach (ImageFrame<TPixel> imageFrame in image.Frames)
// Encode the first frame.
ImageFrame<TPixel> previousFrame = image.Frames.RootFrame;
WebpFrameMetadata frameMetadata = WebpCommonUtils.GetWebpFrameMetadata(previousFrame);
WebpDisposalMethod previousDisposal = frameMetadata.DisposalMethod;
encoder.EncodeAnimation(previousFrame, stream, previousFrame.Bounds(), frameMetadata);
// Encode additional frames
// This frame is reused to store de-duplicated pixel buffers.
using ImageFrame<TPixel> encodingFrame = new(image.Configuration, previousFrame.Size());
for (int i = 1; i < image.Frames.Count; i++)
{
using Vp8Encoder enc = new(
ImageFrame<TPixel>? prev = previousDisposal == WebpDisposalMethod.RestoreToBackground ? null : previousFrame;
ImageFrame<TPixel> currentFrame = image.Frames[i];
ImageFrame<TPixel>? nextFrame = i < image.Frames.Count - 1 ? image.Frames[i + 1] : null;
frameMetadata = WebpCommonUtils.GetWebpFrameMetadata(currentFrame);
bool blend = frameMetadata.BlendMethod == WebpBlendMethod.Over;
(bool difference, Rectangle bounds) =
AnimationUtilities.DeDuplicatePixels(
image.Configuration,
prev,
currentFrame,
nextFrame,
encodingFrame,
Color.Transparent,
blend,
ClampingMode.Even);
using Vp8Encoder animatedEncoder = new(
this.memoryAllocator,
this.configuration,
image.Width,
image.Height,
bounds.Width,
bounds.Height,
this.quality,
this.skipMetadata,
this.method,
@ -202,12 +263,15 @@ internal sealed class WebpEncoderCore : IImageEncoderInternals
this.spatialNoiseShaping,
this.alphaCompression);
enc.EncodeAnimation(imageFrame, stream);
animatedEncoder.EncodeAnimation(encodingFrame, stream, bounds, frameMetadata);
previousFrame = currentFrame;
previousDisposal = frameMetadata.DisposalMethod;
}
}
else
{
encoder.EncodeStatic(image, stream);
encoder.EncodeStatic(stream, image);
}
encoder.EncodeFooter(image, stream);

10
src/ImageSharp/Formats/Webp/WebpFrameMetadata.cs

@ -29,7 +29,7 @@ public class WebpFrameMetadata : IDeepCloneable
/// <summary>
/// Gets or sets how transparent pixels of the current frame are to be blended with corresponding pixels of the previous canvas.
/// </summary>
public WebpBlendingMethod BlendMethod { get; set; }
public WebpBlendMethod BlendMethod { get; set; }
/// <summary>
/// Gets or sets how the current frame is to be treated after it has been displayed (before rendering the next frame) on the canvas.
@ -44,4 +44,12 @@ public class WebpFrameMetadata : IDeepCloneable
/// <inheritdoc/>
public IDeepCloneable DeepClone() => new WebpFrameMetadata(this);
internal static WebpFrameMetadata FromAnimatedMetadata(AnimatedImageFrameMetadata metadata)
=> new()
{
FrameDelay = (uint)metadata.Duration.Milliseconds,
BlendMethod = metadata.BlendMode == FrameBlendMode.Source ? WebpBlendMethod.Source : WebpBlendMethod.Over,
DisposalMethod = metadata.DisposalMode == FrameDisposalMode.RestoreToBackground ? WebpDisposalMethod.RestoreToBackground : WebpDisposalMethod.DoNotDispose
};
}

22
src/ImageSharp/Formats/Webp/WebpMetadata.cs

@ -22,8 +22,8 @@ public class WebpMetadata : IDeepCloneable
private WebpMetadata(WebpMetadata other)
{
this.FileFormat = other.FileFormat;
this.AnimationLoopCount = other.AnimationLoopCount;
this.AnimationBackground = other.AnimationBackground;
this.RepeatCount = other.RepeatCount;
this.BackgroundColor = other.BackgroundColor;
}
/// <summary>
@ -34,16 +34,24 @@ public class WebpMetadata : IDeepCloneable
/// <summary>
/// Gets or sets the loop count. The number of times to loop the animation. 0 means infinitely.
/// </summary>
public ushort AnimationLoopCount { get; set; } = 1;
public ushort RepeatCount { get; set; } = 1;
/// <summary>
/// Gets or sets the default background color of the canvas in [Blue, Green, Red, Alpha] byte order.
/// This color MAY be used to fill the unused space on the canvas around the frames,
/// Gets or sets the default background color of the canvas when animating.
/// This color may be used to fill the unused space on the canvas around the frames,
/// as well as the transparent pixels of the first frame.
/// The background color is also used when the Disposal method is 1.
/// The background color is also used when the Disposal method is <see cref="WebpDisposalMethod.RestoreToBackground"/>.
/// </summary>
public Color AnimationBackground { get; set; }
public Color BackgroundColor { get; set; }
/// <inheritdoc/>
public IDeepCloneable DeepClone() => new WebpMetadata(this);
internal static WebpMetadata FromAnimatedMetadata(AnimatedImageMetadata metadata)
=> new()
{
FileFormat = WebpFileFormatType.Lossless,
BackgroundColor = metadata.BackgroundColor,
RepeatCount = metadata.RepeatCount
};
}

20
src/ImageSharp/Metadata/FrameDecodingMode.cs

@ -1,20 +0,0 @@
// Copyright (c) Six Labors.
// Licensed under the Six Labors Split License.
namespace SixLabors.ImageSharp.Metadata;
/// <summary>
/// Enumerated frame process modes to apply to multi-frame images.
/// </summary>
public enum FrameDecodingMode
{
/// <summary>
/// Decodes all the frames of a multi-frame image.
/// </summary>
All,
/// <summary>
/// Decodes only the first frame of a multi-frame image.
/// </summary>
First
}

26
src/ImageSharp/Metadata/ImageMetadata.cs

@ -183,6 +183,32 @@ public sealed class ImageMetadata : IDeepCloneable<ImageMetadata>
return newMeta;
}
/// <summary>
/// Gets the metadata value associated with the specified key.
/// </summary>
/// <typeparam name="TFormatMetadata">The type of format metadata.</typeparam>
/// <param name="key">The key of the value to get.</param>
/// <param name="metadata">
/// When this method returns, contains the metadata associated with the specified key,
/// if the key is found; otherwise, the default value for the type of the metadata parameter.
/// This parameter is passed uninitialized.
/// </param>
/// <returns>
/// <see langword="true"/> if the frame metadata exists for the specified key; otherwise, <see langword="false"/>.
/// </returns>
public bool TryGetFormatMetadata<TFormatMetadata>(IImageFormat<TFormatMetadata> key, out TFormatMetadata? metadata)
where TFormatMetadata : class, IDeepCloneable
{
if (this.formatMetadata.TryGetValue(key, out IDeepCloneable? meta))
{
metadata = (TFormatMetadata)meta;
return true;
}
metadata = default;
return false;
}
/// <inheritdoc/>
public ImageMetadata DeepClone() => new(this);

14
src/ImageSharp/Processing/Processors/Dithering/ErrorDither.cs

@ -107,15 +107,15 @@ public readonly partial struct ErrorDither : IDither, IEquatable<ErrorDither>, I
float scale = quantizer.Options.DitherScale;
Buffer2D<TPixel> sourceBuffer = source.PixelBuffer;
for (int y = bounds.Top; y < bounds.Bottom; y++)
for (int y = 0; y < destination.Height; y++)
{
ref TPixel sourceRowRef = ref MemoryMarshal.GetReference(sourceBuffer.DangerousGetRowSpan(y));
ref byte destinationRowRef = ref MemoryMarshal.GetReference(destination.GetWritablePixelRowSpanUnsafe(y - offsetY));
ReadOnlySpan<TPixel> sourceRow = sourceBuffer.DangerousGetRowSpan(y + offsetY);
Span<byte> destinationRow = destination.GetWritablePixelRowSpanUnsafe(y);
for (int x = bounds.Left; x < bounds.Right; x++)
for (int x = 0; x < destinationRow.Length; x++)
{
TPixel sourcePixel = Unsafe.Add(ref sourceRowRef, (uint)x);
Unsafe.Add(ref destinationRowRef, (uint)(x - offsetX)) = quantizer.GetQuantizedColor(sourcePixel, out TPixel transformed);
TPixel sourcePixel = sourceRow[x + offsetX];
destinationRow[x] = quantizer.GetQuantizedColor(sourcePixel, out TPixel transformed);
this.Dither(source, bounds, sourcePixel, transformed, x, y, scale);
}
}
@ -200,7 +200,7 @@ public readonly partial struct ErrorDither : IDither, IEquatable<ErrorDither>, I
}
ref TPixel pixel = ref rowSpan[targetX];
var result = pixel.ToVector4();
Vector4 result = pixel.ToVector4();
result += error * coefficient;
pixel.FromVector4(result);

85
src/ImageSharp/Processing/Processors/Quantization/EuclideanPixelMap{TPixel}.cs

@ -2,7 +2,6 @@
// Licensed under the Six Labors Split License.
using System.Buffers;
using System.Numerics;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using SixLabors.ImageSharp.Memory;
@ -23,6 +22,7 @@ internal sealed class EuclideanPixelMap<TPixel> : IDisposable
{
private Rgba32[] rgbaPalette;
private int transparentIndex;
private readonly TPixel transparentMatch;
/// <summary>
/// Do not make this readonly! Struct value would be always copied on non-readonly method calls.
@ -54,8 +54,9 @@ internal sealed class EuclideanPixelMap<TPixel> : IDisposable
this.cache = new ColorDistanceCache(configuration.MemoryAllocator);
PixelOperations<TPixel>.Instance.ToRgba32(configuration, this.Palette.Span, this.rgbaPalette);
// If the provided transparentIndex is outside of the palette, silently ignore it.
this.transparentIndex = transparentIndex < this.Palette.Length ? transparentIndex : -1;
this.transparentIndex = transparentIndex;
Unsafe.SkipInit(out this.transparentMatch);
this.transparentMatch.FromRgba32(default);
}
/// <summary>
@ -97,32 +98,40 @@ internal sealed class EuclideanPixelMap<TPixel> : IDisposable
this.Palette = palette;
this.rgbaPalette = new Rgba32[palette.Length];
PixelOperations<TPixel>.Instance.ToRgba32(this.configuration, this.Palette.Span, this.rgbaPalette);
this.transparentIndex = -1;
this.cache.Clear();
}
/// <summary>
/// Allows setting the transparent index after construction. If the provided transparentIndex is outside of the palette, silently ignore it.
/// Allows setting the transparent index after construction.
/// </summary>
/// <param name="index">An explicit index at which to match transparent pixels.</param>
public void SetTransparentIndex(int index) => this.transparentIndex = index < this.Palette.Length ? index : -1;
public void SetTransparentIndex(int index)
{
if (index != this.transparentIndex)
{
this.cache.Clear();
}
this.transparentIndex = index;
}
[MethodImpl(InliningOptions.ShortMethod)]
private int GetClosestColorSlow(Rgba32 rgba, ref TPixel paletteRef, out TPixel match)
{
// Loop through the palette and find the nearest match.
int index = 0;
float leastDistance = float.MaxValue;
if (this.transparentIndex >= 0 && rgba == default)
{
// We have explicit instructions. No need to search.
index = this.transparentIndex;
DebugGuard.MustBeLessThan(index, this.Palette.Length, nameof(index));
this.cache.Add(rgba, (byte)index);
match = Unsafe.Add(ref paletteRef, (uint)index);
match = this.transparentMatch;
return index;
}
float leastDistance = float.MaxValue;
for (int i = 0; i < this.rgbaPalette.Length; i++)
{
Rgba32 candidate = this.rgbaPalette[i];
@ -175,18 +184,24 @@ internal sealed class EuclideanPixelMap<TPixel> : IDisposable
/// The granularity of the cache has been determined based upon the current
/// suite of test images and provides the lowest possible memory usage while
/// providing enough match accuracy.
/// Entry count is currently limited to 2335905 entries (4671810 bytes ~4.45MB).
/// Entry count is currently limited to 2335905 entries (4MB).
/// </para>
/// </remarks>
private unsafe struct ColorDistanceCache : IDisposable
{
private const int IndexBits = 5;
private const int IndexAlphaBits = 6;
private const int IndexCount = (1 << IndexBits) + 1;
private const int IndexAlphaCount = (1 << IndexAlphaBits) + 1;
private const int RgbShift = 8 - IndexBits;
private const int AlphaShift = 8 - IndexAlphaBits;
private const int Entries = IndexCount * IndexCount * IndexCount * IndexAlphaCount;
private const int IndexRBits = 5;
private const int IndexGBits = 5;
private const int IndexBBits = 5;
private const int IndexABits = 6;
private const int IndexRCount = (1 << IndexRBits) + 1;
private const int IndexGCount = (1 << IndexGBits) + 1;
private const int IndexBCount = (1 << IndexBBits) + 1;
private const int IndexACount = (1 << IndexABits) + 1;
private const int RShift = 8 - IndexRBits;
private const int GShift = 8 - IndexGBits;
private const int BShift = 8 - IndexBBits;
private const int AShift = 8 - IndexABits;
private const int Entries = IndexRCount * IndexGCount * IndexBCount * IndexACount;
private MemoryHandle tableHandle;
private readonly IMemoryOwner<short> table;
private readonly short* tablePointer;
@ -200,24 +215,16 @@ internal sealed class EuclideanPixelMap<TPixel> : IDisposable
}
[MethodImpl(InliningOptions.ShortMethod)]
public void Add(Rgba32 rgba, byte index)
public readonly void Add(Rgba32 rgba, byte index)
{
int r = rgba.R >> RgbShift;
int g = rgba.G >> RgbShift;
int b = rgba.B >> RgbShift;
int a = rgba.A >> AlphaShift;
int idx = GetPaletteIndex(r, g, b, a);
int idx = GetPaletteIndex(rgba);
this.tablePointer[idx] = index;
}
[MethodImpl(InliningOptions.ShortMethod)]
public bool TryGetValue(Rgba32 rgba, out short match)
public readonly bool TryGetValue(Rgba32 rgba, out short match)
{
int r = rgba.R >> RgbShift;
int g = rgba.G >> RgbShift;
int b = rgba.B >> RgbShift;
int a = rgba.A >> AlphaShift;
int idx = GetPaletteIndex(r, g, b, a);
int idx = GetPaletteIndex(rgba);
match = this.tablePointer[idx];
return match > -1;
}
@ -226,18 +233,20 @@ internal sealed class EuclideanPixelMap<TPixel> : IDisposable
/// Clears the cache resetting each entry to empty.
/// </summary>
[MethodImpl(InliningOptions.ShortMethod)]
public void Clear() => this.table.GetSpan().Fill(-1);
public readonly void Clear() => this.table.GetSpan().Fill(-1);
[MethodImpl(InliningOptions.ShortMethod)]
private static int GetPaletteIndex(int r, int g, int b, int a)
=> (r << ((IndexBits << 1) + IndexAlphaBits))
+ (r << (IndexBits + IndexAlphaBits + 1))
+ (g << (IndexBits + IndexAlphaBits))
+ (r << (IndexBits << 1))
+ (r << (IndexBits + 1))
+ (g << IndexBits)
+ ((r + g + b) << IndexAlphaBits)
+ r + g + b + a;
private static int GetPaletteIndex(Rgba32 rgba)
{
int rIndex = rgba.R >> RShift;
int gIndex = rgba.G >> GShift;
int bIndex = rgba.B >> BShift;
int aIndex = rgba.A >> AShift;
return (aIndex * (IndexRCount * IndexGCount * IndexBCount)) +
(rIndex * (IndexGCount * IndexBCount)) +
(gIndex * IndexBCount) + bIndex;
}
public void Dispose()
{

4
src/ImageSharp/Processing/Processors/Quantization/QuantizerUtilities.cs

@ -156,10 +156,10 @@ public static class QuantizerUtilities
for (int y = 0; y < destination.Height; y++)
{
Span<TPixel> sourceRow = sourceBuffer.DangerousGetRowSpan(y + offsetY);
ReadOnlySpan<TPixel> sourceRow = sourceBuffer.DangerousGetRowSpan(y + offsetY);
Span<byte> destinationRow = destination.GetWritablePixelRowSpanUnsafe(y);
for (int x = 0; x < destination.Width; x++)
for (int x = 0; x < destinationRow.Length; x++)
{
destinationRow[x] = Unsafe.AsRef(quantizer).GetQuantizedColor(sourceRow[x + offsetX], out TPixel _);
}

14
tests/ImageSharp.Tests/Formats/GeneralFormatTests.cs

@ -162,37 +162,37 @@ public class GeneralFormatTests
foreach (TestFile file in Files)
{
using Image<Rgba32> image = file.CreateRgba32Image();
using (FileStream output = File.OpenWrite(Path.Combine(path, $"{file.FileNameWithoutExtension}.bmp")))
using (FileStream output = File.Create(Path.Combine(path, $"{file.FileNameWithoutExtension}.bmp")))
{
image.SaveAsBmp(output);
}
using (FileStream output = File.OpenWrite(Path.Combine(path, $"{file.FileNameWithoutExtension}.jpg")))
using (FileStream output = File.Create(Path.Combine(path, $"{file.FileNameWithoutExtension}.jpg")))
{
image.SaveAsJpeg(output);
}
using (FileStream output = File.OpenWrite(Path.Combine(path, $"{file.FileNameWithoutExtension}.pbm")))
using (FileStream output = File.Create(Path.Combine(path, $"{file.FileNameWithoutExtension}.pbm")))
{
image.SaveAsPbm(output);
}
using (FileStream output = File.OpenWrite(Path.Combine(path, $"{file.FileNameWithoutExtension}.png")))
using (FileStream output = File.Create(Path.Combine(path, $"{file.FileNameWithoutExtension}.png")))
{
image.SaveAsPng(output);
}
using (FileStream output = File.OpenWrite(Path.Combine(path, $"{file.FileNameWithoutExtension}.gif")))
using (FileStream output = File.Create(Path.Combine(path, $"{file.FileNameWithoutExtension}.gif")))
{
image.SaveAsGif(output);
}
using (FileStream output = File.OpenWrite(Path.Combine(path, $"{file.FileNameWithoutExtension}.tga")))
using (FileStream output = File.Create(Path.Combine(path, $"{file.FileNameWithoutExtension}.tga")))
{
image.SaveAsTga(output);
}
using (FileStream output = File.OpenWrite(Path.Combine(path, $"{file.FileNameWithoutExtension}.tiff")))
using (FileStream output = File.Create(Path.Combine(path, $"{file.FileNameWithoutExtension}.tiff")))
{
image.SaveAsTiff(output);
}

15
tests/ImageSharp.Tests/Formats/Gif/GifDecoderTests.cs

@ -101,9 +101,9 @@ public class GifDecoderTests
}
[Theory]
[WithFile(TestImages.Gif.Cheers, PixelTypes.Rgba32, 93)]
[WithFile(TestImages.Gif.M4nb, PixelTypes.Rgba32, 5)]
[WithFile(TestImages.Gif.Rings, PixelTypes.Rgba32, 1)]
[WithFile(TestImages.Gif.Issues.BadDescriptorWidth, PixelTypes.Rgba32, 36)]
[WithFile(TestImages.Gif.MixedDisposal, PixelTypes.Rgba32, 11)]
public void Decode_VerifyRootFrameAndFrameCount<TPixel>(TestImageProvider<TPixel> provider, int expectedFrameCount)
where TPixel : unmanaged, IPixel<TPixel>
{
@ -133,7 +133,6 @@ public class GifDecoderTests
}
[Theory]
[InlineData(TestImages.Gif.Cheers, 8)]
[InlineData(TestImages.Gif.Giphy, 8)]
[InlineData(TestImages.Gif.Rings, 8)]
[InlineData(TestImages.Gif.Trans, 8)]
@ -194,7 +193,7 @@ public class GifDecoderTests
}
}
// https://github.com/SixLabors/ImageSharp/issues/1503
// https://github.com/SixLabors/ImageSharp/issues/1530
[Theory]
[WithFile(TestImages.Gif.Issues.Issue1530, PixelTypes.Rgba32)]
public void Issue1530_BadDescriptorDimensions<TPixel>(TestImageProvider<TPixel> provider)
@ -212,7 +211,7 @@ public class GifDecoderTests
public void Issue405_BadApplicationExtensionBlockLength<TPixel>(TestImageProvider<TPixel> provider)
where TPixel : unmanaged, IPixel<TPixel>
{
using Image<TPixel> image = provider.GetImage();
using Image<TPixel> image = provider.GetImage(GifDecoder.Instance, new() { MaxFrames = 1 });
image.DebugSave(provider);
image.CompareFirstFrameToReferenceOutput(ImageComparer.Exact, provider);
@ -224,7 +223,7 @@ public class GifDecoderTests
public void Issue1668_InvalidColorIndex<TPixel>(TestImageProvider<TPixel> provider)
where TPixel : unmanaged, IPixel<TPixel>
{
using Image<TPixel> image = provider.GetImage();
using Image<TPixel> image = provider.GetImage(GifDecoder.Instance, new() { MaxFrames = 1 });
image.DebugSave(provider);
image.CompareFirstFrameToReferenceOutput(ImageComparer.Exact, provider);
@ -273,7 +272,7 @@ public class GifDecoderTests
public void Issue1962<TPixel>(TestImageProvider<TPixel> provider)
where TPixel : unmanaged, IPixel<TPixel>
{
using Image<TPixel> image = provider.GetImage();
using Image<TPixel> image = provider.GetImage(GifDecoder.Instance, new() { MaxFrames = 1 });
image.DebugSave(provider);
image.CompareFirstFrameToReferenceOutput(ImageComparer.Exact, provider);
@ -285,7 +284,7 @@ public class GifDecoderTests
public void Issue2012EmptyXmp<TPixel>(TestImageProvider<TPixel> provider)
where TPixel : unmanaged, IPixel<TPixel>
{
using Image<TPixel> image = provider.GetImage();
using Image<TPixel> image = provider.GetImage(GifDecoder.Instance, new() { MaxFrames = 1 });
image.DebugSave(provider);
image.CompareFirstFrameToReferenceOutput(ImageComparer.Exact, provider);

117
tests/ImageSharp.Tests/Formats/Gif/GifEncoderTests.cs

@ -2,6 +2,8 @@
// Licensed under the Six Labors Split License.
using SixLabors.ImageSharp.Formats.Gif;
using SixLabors.ImageSharp.Formats.Png;
using SixLabors.ImageSharp.Formats.Webp;
using SixLabors.ImageSharp.Metadata;
using SixLabors.ImageSharp.PixelFormats;
using SixLabors.ImageSharp.Processing.Processors.Quantization;
@ -224,8 +226,6 @@ public class GifEncoderTests
}
Assert.Equal(iMeta.FrameDelay, cMeta.FrameDelay);
Assert.Equal(iMeta.HasTransparency, cMeta.HasTransparency);
Assert.Equal(iMeta.TransparencyIndex, cMeta.TransparencyIndex);
}
image.Dispose();
@ -269,4 +269,117 @@ public class GifEncoderTests
Assert.Equal(image2.Frames.Count, count);
}
[Theory]
[WithFile(TestImages.Png.APng, PixelTypes.Rgba32)]
public void Encode_AnimatedFormatTransform_FromPng<TPixel>(TestImageProvider<TPixel> provider)
where TPixel : unmanaged, IPixel<TPixel>
{
if (TestEnvironment.RunsOnCI && !TestEnvironment.IsWindows)
{
return;
}
using Image<TPixel> image = provider.GetImage(PngDecoder.Instance);
using MemoryStream memStream = new();
image.Save(memStream, new GifEncoder());
memStream.Position = 0;
using Image<TPixel> output = Image.Load<TPixel>(memStream);
// TODO: Find a better way to compare.
// The image has been visually checked but the quantization and frame trimming pattern used in the gif encoder
// means we cannot use an exact comparison nor replicate using the quantizing processor.
ImageComparer.TolerantPercentage(1.51f).VerifySimilarity(output, image);
PngMetadata png = image.Metadata.GetPngMetadata();
GifMetadata gif = output.Metadata.GetGifMetadata();
Assert.Equal(png.RepeatCount, gif.RepeatCount);
for (int i = 0; i < image.Frames.Count; i++)
{
PngFrameMetadata pngF = image.Frames[i].Metadata.GetPngMetadata();
GifFrameMetadata gifF = output.Frames[i].Metadata.GetGifMetadata();
Assert.Equal((int)(pngF.FrameDelay.ToDouble() * 100), gifF.FrameDelay);
switch (pngF.DisposalMethod)
{
case PngDisposalMethod.RestoreToBackground:
Assert.Equal(GifDisposalMethod.RestoreToBackground, gifF.DisposalMethod);
break;
case PngDisposalMethod.DoNotDispose:
default:
Assert.Equal(GifDisposalMethod.NotDispose, gifF.DisposalMethod);
break;
}
}
}
[Theory]
[WithFile(TestImages.Webp.Lossless.Animated, PixelTypes.Rgba32)]
public void Encode_AnimatedFormatTransform_FromWebp<TPixel>(TestImageProvider<TPixel> provider)
where TPixel : unmanaged, IPixel<TPixel>
{
if (TestEnvironment.RunsOnCI && !TestEnvironment.IsWindows)
{
return;
}
using Image<TPixel> image = provider.GetImage(WebpDecoder.Instance);
using MemoryStream memStream = new();
image.Save(memStream, new GifEncoder());
memStream.Position = 0;
using Image<TPixel> output = Image.Load<TPixel>(memStream);
image.Save(provider.Utility.GetTestOutputFileName("gif"), new GifEncoder());
// TODO: Find a better way to compare.
// The image has been visually checked but the quantization and frame trimming pattern used in the gif encoder
// means we cannot use an exact comparison nor replicate using the quantizing processor.
ImageComparer.TolerantPercentage(0.776f).VerifySimilarity(output, image);
WebpMetadata webp = image.Metadata.GetWebpMetadata();
GifMetadata gif = output.Metadata.GetGifMetadata();
Assert.Equal(webp.RepeatCount, gif.RepeatCount);
for (int i = 0; i < image.Frames.Count; i++)
{
WebpFrameMetadata webpF = image.Frames[i].Metadata.GetWebpMetadata();
GifFrameMetadata gifF = output.Frames[i].Metadata.GetGifMetadata();
Assert.Equal(webpF.FrameDelay, (uint)(gifF.FrameDelay * 10));
switch (webpF.DisposalMethod)
{
case WebpDisposalMethod.RestoreToBackground:
Assert.Equal(GifDisposalMethod.RestoreToBackground, gifF.DisposalMethod);
break;
case WebpDisposalMethod.DoNotDispose:
default:
Assert.Equal(GifDisposalMethod.NotDispose, gifF.DisposalMethod);
break;
}
}
}
public static string[] Animated => TestImages.Gif.Animated;
[Theory(Skip = "Enable for visual animated testing")]
[WithFileCollection(nameof(Animated), PixelTypes.Rgba32)]
public void Encode_Animated_VisualTest<TPixel>(TestImageProvider<TPixel> provider)
where TPixel : unmanaged, IPixel<TPixel>
{
using Image<TPixel> image = provider.GetImage();
provider.Utility.SaveTestOutputFile(image, "webp", new WebpEncoder() { FileFormat = WebpFileFormatType.Lossless }, "animated");
provider.Utility.SaveTestOutputFile(image, "webp", new WebpEncoder() { FileFormat = WebpFileFormatType.Lossy }, "animated-lossy");
provider.Utility.SaveTestOutputFile(image, "png", new PngEncoder(), "animated");
provider.Utility.SaveTestOutputFile(image, "gif", new GifEncoder(), "animated");
}
}

113
tests/ImageSharp.Tests/Formats/Png/PngEncoderTests.cs

@ -3,7 +3,9 @@
// ReSharper disable InconsistentNaming
using SixLabors.ImageSharp.Formats;
using SixLabors.ImageSharp.Formats.Gif;
using SixLabors.ImageSharp.Formats.Png;
using SixLabors.ImageSharp.Formats.Webp;
using SixLabors.ImageSharp.Metadata;
using SixLabors.ImageSharp.PixelFormats;
using SixLabors.ImageSharp.Processing.Processors.Quantization;
@ -467,8 +469,8 @@ public partial class PngEncoderTests
for (int i = 0; i < image.Frames.Count; i++)
{
PngFrameMetadata originalFrameMetadata = image.Frames[i].Metadata.GetPngFrameMetadata();
PngFrameMetadata outputFrameMetadata = output.Frames[i].Metadata.GetPngFrameMetadata();
PngFrameMetadata originalFrameMetadata = image.Frames[i].Metadata.GetPngMetadata();
PngFrameMetadata outputFrameMetadata = output.Frames[i].Metadata.GetPngMetadata();
Assert.Equal(originalFrameMetadata.FrameDelay, outputFrameMetadata.FrameDelay);
Assert.Equal(originalFrameMetadata.BlendMethod, outputFrameMetadata.BlendMethod);
@ -476,6 +478,113 @@ public partial class PngEncoderTests
}
}
[Theory]
[WithFile(TestImages.Gif.Leo, PixelTypes.Rgba32)]
public void Encode_AnimatedFormatTransform_FromGif<TPixel>(TestImageProvider<TPixel> provider)
where TPixel : unmanaged, IPixel<TPixel>
{
if (TestEnvironment.RunsOnCI && !TestEnvironment.IsWindows)
{
return;
}
using Image<TPixel> image = provider.GetImage(GifDecoder.Instance);
using MemoryStream memStream = new();
image.Save(memStream, PngEncoder);
memStream.Position = 0;
using Image<TPixel> output = Image.Load<TPixel>(memStream);
// TODO: Find a better way to compare.
// The image has been visually checked but the quantization pattern used in the png encoder
// means we cannot use an exact comparison nor replicate using the quantizing processor.
ImageComparer.TolerantPercentage(0.46f).VerifySimilarity(output, image);
GifMetadata gif = image.Metadata.GetGifMetadata();
PngMetadata png = output.Metadata.GetPngMetadata();
Assert.Equal(gif.RepeatCount, png.RepeatCount);
for (int i = 0; i < image.Frames.Count; i++)
{
GifFrameMetadata gifF = image.Frames[i].Metadata.GetGifMetadata();
PngFrameMetadata pngF = output.Frames[i].Metadata.GetPngMetadata();
Assert.Equal(gifF.FrameDelay, (int)(pngF.FrameDelay.ToDouble() * 100));
switch (gifF.DisposalMethod)
{
case GifDisposalMethod.RestoreToBackground:
Assert.Equal(PngDisposalMethod.RestoreToBackground, pngF.DisposalMethod);
break;
case GifDisposalMethod.RestoreToPrevious:
Assert.Equal(PngDisposalMethod.RestoreToPrevious, pngF.DisposalMethod);
break;
case GifDisposalMethod.Unspecified:
case GifDisposalMethod.NotDispose:
default:
Assert.Equal(PngDisposalMethod.DoNotDispose, pngF.DisposalMethod);
break;
}
}
}
[Theory]
[WithFile(TestImages.Webp.Lossless.Animated, PixelTypes.Rgba32)]
public void Encode_AnimatedFormatTransform_FromWebp<TPixel>(TestImageProvider<TPixel> provider)
where TPixel : unmanaged, IPixel<TPixel>
{
if (TestEnvironment.RunsOnCI && !TestEnvironment.IsWindows)
{
return;
}
using Image<TPixel> image = provider.GetImage(WebpDecoder.Instance);
using MemoryStream memStream = new();
image.Save(memStream, PngEncoder);
memStream.Position = 0;
using Image<TPixel> output = Image.Load<TPixel>(memStream);
ImageComparer.Exact.VerifySimilarity(output, image);
WebpMetadata webp = image.Metadata.GetWebpMetadata();
PngMetadata png = output.Metadata.GetPngMetadata();
Assert.Equal(webp.RepeatCount, png.RepeatCount);
for (int i = 0; i < image.Frames.Count; i++)
{
WebpFrameMetadata webpF = image.Frames[i].Metadata.GetWebpMetadata();
PngFrameMetadata pngF = output.Frames[i].Metadata.GetPngMetadata();
Assert.Equal(webpF.FrameDelay, (uint)(pngF.FrameDelay.ToDouble() * 1000));
switch (webpF.BlendMethod)
{
case WebpBlendMethod.Source:
Assert.Equal(PngBlendMethod.Source, pngF.BlendMethod);
break;
case WebpBlendMethod.Over:
default:
Assert.Equal(PngBlendMethod.Over, pngF.BlendMethod);
break;
}
switch (webpF.DisposalMethod)
{
case WebpDisposalMethod.RestoreToBackground:
Assert.Equal(PngDisposalMethod.RestoreToBackground, pngF.DisposalMethod);
break;
case WebpDisposalMethod.DoNotDispose:
default:
Assert.Equal(PngDisposalMethod.DoNotDispose, pngF.DisposalMethod);
break;
}
}
}
[Theory]
[MemberData(nameof(PngTrnsFiles))]
public void Encode_PreserveTrns(string imagePath, PngBitDepth pngBitDepth, PngColorType pngColorType)

4
tests/ImageSharp.Tests/Formats/Png/PngFrameMetadataTests.cs

@ -14,7 +14,7 @@ public class PngFrameMetadataTests
PngFrameMetadata meta = new()
{
FrameDelay = new(1, 0),
DisposalMethod = PngDisposalMethod.Background,
DisposalMethod = PngDisposalMethod.RestoreToBackground,
BlendMethod = PngBlendMethod.Over,
};
@ -25,7 +25,7 @@ public class PngFrameMetadataTests
Assert.True(meta.BlendMethod.Equals(clone.BlendMethod));
clone.FrameDelay = new(2, 1);
clone.DisposalMethod = PngDisposalMethod.Previous;
clone.DisposalMethod = PngDisposalMethod.RestoreToPrevious;
clone.BlendMethod = PngBlendMethod.Source;
Assert.False(meta.FrameDelay.Equals(clone.FrameDelay));

4
tests/ImageSharp.Tests/Formats/WebP/WebpDecoderTests.cs

@ -307,7 +307,7 @@ public class WebpDecoderTests
image.DebugSaveMultiFrame(provider);
image.CompareToReferenceOutputMultiFrame(provider, ImageComparer.Exact);
Assert.Equal(0, webpMetaData.AnimationLoopCount);
Assert.Equal(0, webpMetaData.RepeatCount);
Assert.Equal(150U, frameMetaData.FrameDelay);
Assert.Equal(12, image.Frames.Count);
}
@ -324,7 +324,7 @@ public class WebpDecoderTests
image.DebugSaveMultiFrame(provider);
image.CompareToReferenceOutputMultiFrame(provider, ImageComparer.Tolerant(0.04f));
Assert.Equal(0, webpMetaData.AnimationLoopCount);
Assert.Equal(0, webpMetaData.RepeatCount);
Assert.Equal(150U, frameMetaData.FrameDelay);
Assert.Equal(12, image.Frames.Count);
}

107
tests/ImageSharp.Tests/Formats/WebP/WebpEncoderTests.cs

@ -2,6 +2,8 @@
// Licensed under the Six Labors Split License.
using System.Runtime.InteropServices;
using SixLabors.ImageSharp.Formats.Gif;
using SixLabors.ImageSharp.Formats.Png;
using SixLabors.ImageSharp.Formats.Webp;
using SixLabors.ImageSharp.Metadata;
using SixLabors.ImageSharp.PixelFormats;
@ -60,6 +62,111 @@ public class WebpEncoderTests
encoded.CompareToReferenceOutput(ImageComparer.Tolerant(0.01f), provider, null, "webp");
}
[Theory]
[WithFile(TestImages.Gif.Leo, PixelTypes.Rgba32)]
public void Encode_AnimatedFormatTransform_FromGif<TPixel>(TestImageProvider<TPixel> provider)
where TPixel : unmanaged, IPixel<TPixel>
{
if (TestEnvironment.RunsOnCI && !TestEnvironment.IsWindows)
{
return;
}
using Image<TPixel> image = provider.GetImage(GifDecoder.Instance);
using MemoryStream memStream = new();
image.Save(memStream, new WebpEncoder());
memStream.Position = 0;
using Image<TPixel> output = Image.Load<TPixel>(memStream);
ImageComparer.Exact.VerifySimilarity(output, image);
GifMetadata gif = image.Metadata.GetGifMetadata();
WebpMetadata webp = output.Metadata.GetWebpMetadata();
Assert.Equal(gif.RepeatCount, webp.RepeatCount);
for (int i = 0; i < image.Frames.Count; i++)
{
GifFrameMetadata gifF = image.Frames[i].Metadata.GetGifMetadata();
WebpFrameMetadata webpF = output.Frames[i].Metadata.GetWebpMetadata();
Assert.Equal(gifF.FrameDelay, (int)(webpF.FrameDelay / 10));
switch (gifF.DisposalMethod)
{
case GifDisposalMethod.RestoreToBackground:
Assert.Equal(WebpDisposalMethod.RestoreToBackground, webpF.DisposalMethod);
break;
case GifDisposalMethod.RestoreToPrevious:
case GifDisposalMethod.Unspecified:
case GifDisposalMethod.NotDispose:
default:
Assert.Equal(WebpDisposalMethod.DoNotDispose, webpF.DisposalMethod);
break;
}
}
}
[Theory]
[WithFile(TestImages.Png.APng, PixelTypes.Rgba32)]
public void Encode_AnimatedFormatTransform_FromPng<TPixel>(TestImageProvider<TPixel> provider)
where TPixel : unmanaged, IPixel<TPixel>
{
if (TestEnvironment.RunsOnCI && !TestEnvironment.IsWindows)
{
return;
}
using Image<TPixel> image = provider.GetImage(PngDecoder.Instance);
using MemoryStream memStream = new();
image.Save(memStream, new WebpEncoder());
memStream.Position = 0;
provider.Utility.SaveTestOutputFile(image, "gif", new GifEncoder());
provider.Utility.SaveTestOutputFile(image, "png", new PngEncoder());
provider.Utility.SaveTestOutputFile(image, "webp", new WebpEncoder());
using Image<TPixel> output = Image.Load<TPixel>(memStream);
ImageComparer.Exact.VerifySimilarity(output, image);
PngMetadata png = image.Metadata.GetPngMetadata();
WebpMetadata webp = output.Metadata.GetWebpMetadata();
Assert.Equal(png.RepeatCount, webp.RepeatCount);
for (int i = 0; i < image.Frames.Count; i++)
{
PngFrameMetadata pngF = image.Frames[i].Metadata.GetPngMetadata();
WebpFrameMetadata webpF = output.Frames[i].Metadata.GetWebpMetadata();
Assert.Equal((uint)(pngF.FrameDelay.ToDouble() * 1000), webpF.FrameDelay);
switch (pngF.BlendMethod)
{
case PngBlendMethod.Source:
Assert.Equal(WebpBlendMethod.Source, webpF.BlendMethod);
break;
case PngBlendMethod.Over:
default:
Assert.Equal(WebpBlendMethod.Over, webpF.BlendMethod);
break;
}
switch (pngF.DisposalMethod)
{
case PngDisposalMethod.RestoreToBackground:
Assert.Equal(WebpDisposalMethod.RestoreToBackground, webpF.DisposalMethod);
break;
case PngDisposalMethod.DoNotDispose:
default:
Assert.Equal(WebpDisposalMethod.DoNotDispose, webpF.DisposalMethod);
break;
}
}
}
[Theory]
[WithFile(Flag, PixelTypes.Rgba32, WebpFileFormatType.Lossy)] // If its not a webp input image, it should default to lossy.
[WithFile(Lossless.NoTransform1, PixelTypes.Rgba32, WebpFileFormatType.Lossless)]

5
tests/ImageSharp.Tests/Formats/WebP/YuvConversionTests.cs

@ -1,7 +1,6 @@
// Copyright (c) Six Labors.
// Licensed under the Six Labors Split License.
using SixLabors.ImageSharp.Advanced;
using SixLabors.ImageSharp.Formats.Webp;
using SixLabors.ImageSharp.Formats.Webp.Lossy;
using SixLabors.ImageSharp.Memory;
@ -143,7 +142,7 @@ public class YuvConversionTests
};
// act
YuvConversion.ConvertRgbToYuv(image.Frames.RootFrame, config, memoryAllocator, y, u, v);
YuvConversion.ConvertRgbToYuv(image.Frames.RootFrame.PixelBuffer.GetRegion(), config, memoryAllocator, y, u, v);
// assert
Assert.True(expectedY.AsSpan().SequenceEqual(y));
@ -249,7 +248,7 @@ public class YuvConversionTests
};
// act
YuvConversion.ConvertRgbToYuv(image.Frames.RootFrame, config, memoryAllocator, y, u, v);
YuvConversion.ConvertRgbToYuv(image.Frames.RootFrame.PixelBuffer.GetRegion(), config, memoryAllocator, y, u, v);
// assert
Assert.True(expectedY.AsSpan().SequenceEqual(y));

22
tests/ImageSharp.Tests/TestImages.cs

@ -483,6 +483,8 @@ public static class TestImages
public const string Ratio1x4 = "Gif/base_1x4.gif";
public const string LargeComment = "Gif/large_comment.gif";
public const string GlobalQuantizationTest = "Gif/GlobalQuantizationTest.gif";
public const string MixedDisposal = "Gif/mixed-disposal.gif";
public const string M4nb = "Gif/m4nb.gif";
// Test images from https://github.com/robert-ancell/pygif/tree/master/test-suite
public const string ZeroSize = "Gif/image-zero-size.gif";
@ -512,7 +514,25 @@ public static class TestImages
public const string Issue2198 = "Gif/issues/issue_2198.gif";
}
public static readonly string[] All = { Rings, Giphy, Cheers, Trans, Kumin, Leo, Ratio4x1, Ratio1x4 };
public static readonly string[] Animated =
{
M4nb,
Giphy,
Cheers,
Kumin,
Leo,
MixedDisposal,
GlobalQuantizationTest,
Issues.Issue2198,
Issues.Issue2288_A,
Issues.Issue2288_B,
Issues.Issue2288_C,
Issues.Issue2288_D,
Issues.Issue2450_A,
Issues.Issue2450_B,
Issues.BadDescriptorWidth,
Issues.Issue1530
};
}
public static class Tga

4
tests/ImageSharp.Tests/TestUtilities/ImagingTestCaseUtility.cs

@ -171,7 +171,7 @@ public class ImagingTestCaseUtility
encoder ??= TestEnvironment.GetReferenceEncoder(path);
using (FileStream stream = File.OpenWrite(path))
using (FileStream stream = File.Create(path))
{
image.Save(stream, encoder);
}
@ -227,7 +227,7 @@ public class ImagingTestCaseUtility
{
using Image<TPixel> frameImage = image.Frames.CloneFrame(file.Index);
string filePath = file.FileName;
using FileStream stream = File.OpenWrite(filePath);
using FileStream stream = File.Create(filePath);
frameImage.Save(stream, encoder);
}

1
tests/ImageSharp.Tests/TestUtilities/TestEnvironment.Formats.cs

@ -1,7 +1,6 @@
// Copyright (c) Six Labors.
// Licensed under the Six Labors Split License.
using System.Diagnostics.CodeAnalysis;
using SixLabors.ImageSharp.Formats;
using SixLabors.ImageSharp.Formats.Bmp;
using SixLabors.ImageSharp.Formats.Gif;

4
tests/Images/External/ReferenceOutput/DitherTests/ApplyDiffusionFilterInBox_Rgba32_CalliphoraPartial.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:cafc426ac8e8d02a87f67c90e8c1976c5fae0e12b49deae52ad08476f7ed49a4
size 266391
oid sha256:681b0e36298cb702683fb9ffb2a82f7dfd9080b268db19a03f413809f69d0e07
size 273269

4
tests/Images/External/ReferenceOutput/DitherTests/ApplyDitherFilterInBox_Rgba32_CalliphoraPartial.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:98115a7087aced0c28cefa32a57bc72be245886cabeefc4ff7faf7984236218c
size 271226
oid sha256:a899a84c6af24bfad89f9fde75957c7a979d65bcf096ab667cb976efd71cb560
size 271171

4
tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_ShouldNotDependOnSinglePixelType_Bgra32_filter0.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:0369747820c86bb692fc7b75f3519095c9b2a58a885ebd37c871c103d08405a0
size 720
oid sha256:f7c19df70d24948e1a36299705bb030715cf0d01b453d390989d472c0999d46a
size 728

4
tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_ShouldNotDependOnSinglePixelType_Rgb24_filter0.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:0369747820c86bb692fc7b75f3519095c9b2a58a885ebd37c871c103d08405a0
size 720
oid sha256:f7c19df70d24948e1a36299705bb030715cf0d01b453d390989d472c0999d46a
size 728

4
tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_ShouldNotDependOnSinglePixelType_Rgba32_filter0.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:0369747820c86bb692fc7b75f3519095c9b2a58a885ebd37c871c103d08405a0
size 720
oid sha256:f7c19df70d24948e1a36299705bb030715cf0d01b453d390989d472c0999d46a
size 728

4
tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_ShouldNotDependOnSinglePixelType_RgbaVector_filter0.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:f63aebed17504ef50d96ac7e58dc41f5227a83a38810359ed8e9cecda137183b
size 720
oid sha256:0e7ece9d70c4fe0771abd43e4dbb33fb95f474ca56633dcb821022ee44e746d4
size 728

4
tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_Bike_Atkinson.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:471eaf2e532b40592c86dc816709d3ae4bbd64892006e00fd611ef6869d3b934
size 52070
oid sha256:38597c6144d61960d25c74d7a465b1cdf69b7c0804a6dec68128a6c953258313
size 52688

4
tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_Bike_Burks.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:91fb9966a4b3eaefd5533ddf0b98ec08fbf8cbc263e4ebd438895e6d4129dd03
size 61447
oid sha256:5f9191c71eea1f73aa4c55397ca26f240615c9c4a7fff9a05e6f2e046b5e4d8b
size 62323

4
tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_Bike_FloydSteinberg.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:d74faa8d188a2915739de64ba9d71b2132b53c8d154db22510c524ae757578a5
size 61183
oid sha256:b63810145832db459bb7a6b37a028a7b778f6b6b4e6eae00e50e6e21c5a06086
size 62199

4
tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_Bike_JarvisJudiceNinke.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:080cc89d1d6568a2c9b707bf05428ab5febd2951e37223f96e349cc6646d32aa
size 56070
oid sha256:a67c14ef99a943706f050ff1ea0ef101429292d52bc14ed4610f8338736ff87e
size 56800

4
tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_Bike_Sierra2.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:c7589986c1a762d52fe8ffc252e9938ff0e3a9e00b91ea7f5e36d4335b2b7870
size 58502
oid sha256:623dd82d372ba517b0d3357d06cffaf105d407a9090cbcbc6a76ae944ab33d67
size 59468

4
tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_Bike_Sierra3.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:934042746c3a9b652069da26b479e2be7cbdb17ab20e41c5e271013a76e96e46
size 58480
oid sha256:8edceef8e12c4f3d194523437045c5cf4e80c7bb95ff75f38c1f38a21872e3d0
size 59376

4
tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_Bike_SierraLite.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:03d5d5cbf1b2c0be736aa2bf726ad4bb04fca77aff393edb9663a7915a794264
size 62418
oid sha256:b1d7019e8cb170ae67496f8250446c4f6b6217378658408c3d51a95c49a4c3bc
size 63287

4
tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_Bike_StevensonArce.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:19a0d8667bfd01e18adbfca778e868ea7a6c43d427f9ae40eb4281d438ef509c
size 54464
oid sha256:d7c03ede7ab3bd4e57e6a63e53e2e8c771e938fdc7d5dfe5c9339a2c9907c9cf
size 55550

4
tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_Bike_Stucki.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:11c1056e013292e0543598f5690625b9bac0420a15fd1f37f6484daa3b8326fa
size 60074
oid sha256:79b690b91223d1fe7ddf1b8826b4474b89644822bc8aa9adee3cf819bc095b4c
size 60979

4
tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_CalliphoraPartial_Atkinson.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:3fcf9b7e4ee34e80e8811f94940aff09a5392c21019fc86b145d16fd9c6b1cd2
size 57501
oid sha256:7e22401dddf6552cd91517c1cdd142d3b9a66a7ad5c80d2e52ae07a7f583708e
size 57657

4
tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_CalliphoraPartial_Burks.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:4f0d9a43d8a47e00f6e5932b57f99565370a7239496fdbe162fb774497c4ef2a
size 59377
oid sha256:819a0ce38e27e2adfa454d8c5ad5b24e818bf8954c9f2406f608dcecf506c2c4
size 59838

4
tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_CalliphoraPartial_FloydSteinberg.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:d4a64da29f144d4d4c525ea45e56819e02a46030ae09542be01fdd8ffc85a295
size 60377
oid sha256:007ac609ec61b39c7bdd04bc87a698f5cdc76eadd834c1457f41eb9c135c3f7b
size 60688

4
tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_CalliphoraPartial_JarvisJudiceNinke.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:90fc8048141b2182e4851a48ac5a79c96210eab9e56468fe06f90e7e70a7c180
size 58539
oid sha256:46892c07e9a93f1df71f0e38b331a437fb9b7c52d8f40cf62780cb6bd35d3b13
size 58963

4
tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_CalliphoraPartial_Sierra2.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:b312bd18eba03a37121bbcfb3b285f97fe22283b51256883ce0235bb8605b757
size 58616
oid sha256:1b83345ca3de8d1fc0fbb5d8e68329b94ad79fc29b9f10a1392a97ffe9a0733e
size 58985

4
tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_CalliphoraPartial_Sierra3.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:750ccd26984a4d5a370c1af6ca5dd1c9c5c6c66e693f7645130fd1669e3b7b4e
size 58923
oid sha256:c775a5b19ba09e1b335389e0dc12cb0c3feaff6072e904da750a676fcd6b07dc
size 59202

4
tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_CalliphoraPartial_SierraLite.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:f9d3777a936883a2177a964f24d9ac86c8a106c375583bc9a8fbeb0ec39a7dc6
size 60610
oid sha256:6c88740c0553829eaa42ca751b34cc456623a84ccdff4020949a06ef4b4802d1
size 61137

4
tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_CalliphoraPartial_StevensonArce.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:f638821c29d852d6fabe4cc4cfe802e386024835ad07ee496a7bec7a930e851b
size 57886
oid sha256:0a4a404b0767faac952435f768867cf7bf053848e1e3ef121624f136658a107c
size 58386

4
tests/Images/External/ReferenceOutput/DitherTests/DiffusionFilter_WorksWithAllErrorDiffusers_CalliphoraPartial_Stucki.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:c6e86bfc1594ec4cb8f89a1c92a42778c59aa755ce170a97afb8cab3e623aa79
size 58376
oid sha256:8cc216ed952216d203836dc559234216614f1ed059651677cc0ea714010bd932
size 58855

4
tests/Images/External/ReferenceOutput/DitherTests/DitherFilter_ShouldNotDependOnSinglePixelType_Bgra32_filter0.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:f7e849620a297e29ba11014c54430db01d851e4192650f6e39e0410591244cb5
size 865
oid sha256:a3253003b088c9975725cf321c2fc827547a5feb199f2d1aa515c69bde59deb7
size 871

4
tests/Images/External/ReferenceOutput/DitherTests/DitherFilter_ShouldNotDependOnSinglePixelType_Rgb24_filter0.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:f7e849620a297e29ba11014c54430db01d851e4192650f6e39e0410591244cb5
size 865
oid sha256:bb3e3b9b3001e76505fb0e2db7ad200cad2a016c06f1993c60c3cab42c134863
size 867

4
tests/Images/External/ReferenceOutput/DitherTests/DitherFilter_ShouldNotDependOnSinglePixelType_Rgba32_filter0.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:f7e849620a297e29ba11014c54430db01d851e4192650f6e39e0410591244cb5
size 865
oid sha256:a3253003b088c9975725cf321c2fc827547a5feb199f2d1aa515c69bde59deb7
size 871

4
tests/Images/External/ReferenceOutput/DitherTests/DitherFilter_ShouldNotDependOnSinglePixelType_RgbaVector_filter0.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:f7e849620a297e29ba11014c54430db01d851e4192650f6e39e0410591244cb5
size 865
oid sha256:a3253003b088c9975725cf321c2fc827547a5feb199f2d1aa515c69bde59deb7
size 871

4
tests/Images/External/ReferenceOutput/DitherTests/DitherFilter_WorksWithAllDitherers_Bike_Bayer16x16.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:58c03e354b108033873e2a4c0b043ce15919c4d0630e6ca72ff70b89cbedb979
size 44239
oid sha256:ca70bb0200776efd00c4ef7596d4e1f2f5fbc68e447b395b25ef2b3c732e5156
size 44189

4
tests/Images/External/ReferenceOutput/DitherTests/DitherFilter_WorksWithAllDitherers_Bike_Bayer2x2.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:f987f4d270568facefc11eee7f81dd156af56c26b69fe3a6d2d2e9818652befa
size 43116
oid sha256:8474b847b7d4a8f3e5c9793ca257ce46efcf49c473c731a9ca9c759851410b94
size 43066

4
tests/Images/External/ReferenceOutput/DitherTests/DitherFilter_WorksWithAllDitherers_Bike_Bayer4x4.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:ebdad83936e50bbb00fd74b7dd7d2f5a480bb7347aa3d151e7827107cd279bac
size 44441
oid sha256:20e80e7d9e68fd85bfbc63c61953327354b0634000ec142e01a42618995fd14c
size 44391

4
tests/Images/External/ReferenceOutput/DitherTests/DitherFilter_WorksWithAllDitherers_Bike_Bayer8x8.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:ccdf5937c30999e3b09071200de2e1db63b606ad9cbf6f7677a7499fb0b52963
size 44252
oid sha256:8af98bfcc5edef3f3ff33ee8f76f33ce2906a6677167e2b29e1dbe63b00a78d8
size 44202

4
tests/Images/External/ReferenceOutput/DitherTests/DitherFilter_WorksWithAllDitherers_Bike_Ordered3x3.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:baf70b732646d7c6cec60cfbe569ec673418dfb2dd0b5937bccfb91d9821d586
size 45053
oid sha256:b149ebbd550808ae46ff05b5ddcdb1fc0eb6ae0eacbe048e9a1ff24368d8f64d
size 45003

4
tests/Images/External/ReferenceOutput/DitherTests/DitherFilter_WorksWithAllDitherers_CalliphoraPartial_Bayer16x16.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:f6a1eae610ed730e4cec41693829929ba8db674886c2bd558f1b8893d2b76802
size 51201
oid sha256:9316cbbcb137ae6ff31646f6a5ba1d0aec100db4512509f7684187e74d16a111
size 51074

4
tests/Images/External/ReferenceOutput/DitherTests/DitherFilter_WorksWithAllDitherers_CalliphoraPartial_Bayer2x2.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:ba674e0236c2e146c64a7f3e224c702030769304cd0fd624d1989536da341659
size 52814
oid sha256:08c39a43993deadebab21f1d3504027b5910a52adc437c167d77d62e5f5db46e
size 52762

4
tests/Images/External/ReferenceOutput/DitherTests/DitherFilter_WorksWithAllDitherers_CalliphoraPartial_Bayer4x4.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:316231c8d837f864cf62dcc79fdce698dc8c45c0327372de42c2b89eac1d9f81
size 51851
oid sha256:0c9c47fa755d603f8c148011511ee91f32444e0d94367f9db57593e3bf30f2e0
size 51808

4
tests/Images/External/ReferenceOutput/DitherTests/DitherFilter_WorksWithAllDitherers_CalliphoraPartial_Bayer8x8.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:b58144146585f50960dfd6ac5dc3f52238160287ae5f9b18c6796962cc3d2fd2
size 51550
oid sha256:6d2289ed4fa0c679f0f120d260fec8ab40b1599043cc0a1fbebc6b67e238ff87
size 51428

4
tests/Images/External/ReferenceOutput/DitherTests/DitherFilter_WorksWithAllDitherers_CalliphoraPartial_Ordered3x3.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:6515be764d7026a87cfeea2d58344c404e4f15908139a25f413d51cc7cc61a0c
size 52216
oid sha256:366e84ab8587735455798651096d2af5f965fc325f4852dc68356e94600598b1
size 52176

3
tests/Images/External/ReferenceOutput/GifDecoderTests/Decode_VerifyRootFrameAndFrameCount_Rgba32_cheers.png

@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:b199a36f9e682a54c5d7c67f3403bba174b37e1a7a8412481f66c6d5eb0349e9
size 27679

3
tests/Images/External/ReferenceOutput/GifDecoderTests/Decode_VerifyRootFrameAndFrameCount_Rgba32_issue403_baddescriptorwidth.png

@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:4bedcf1a0ca0281dd5531d96c74e19c5d5fd379d6e2acb899077299917215705
size 1013

3
tests/Images/External/ReferenceOutput/GifDecoderTests/Decode_VerifyRootFrameAndFrameCount_Rgba32_m4nb.png

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:04239ee46f0276ba52566bcb2407f4a6fcee35b3f51a6182394f851e2d8df3fc
size 277

3
tests/Images/External/ReferenceOutput/GifDecoderTests/Decode_VerifyRootFrameAndFrameCount_Rgba32_mixed-disposal.png

@ -0,0 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:0b654f948a26d256ff9e28ada399465bd6a4205aedaf93ea7cdffb70483535ef
size 2216

4
tests/Images/External/ReferenceOutput/PngEncoderTests/Issue2469_Quantized_Encode_Artifacts_Rgba32_issue_2469.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:ef5a85b2adde25b5f343a18420fe787f5e159029a361a15ef2d6322eb7bb81fb
size 944597
oid sha256:1af50619f835b4470afac4553445176c121c3c9fa838dff937dcc56ae37941c3
size 945821

4
tests/Images/External/ReferenceOutput/QuantizerTests/ApplyQuantizationInBox_Bike_OctreeQuantizer_ErrorDither.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:f4d36c8f7e5d5c0d798af5fb6bfad28ed0d628b880bea81efe0d54ac1fde86b2
size 265268
oid sha256:a51d04953c1c82d99884af62912d2271108c6bc62f18d4b32d0b5290c01fa7f7
size 247462

4
tests/Images/External/ReferenceOutput/QuantizerTests/ApplyQuantizationInBox_Bike_WebSafePaletteQuantizer_ErrorDither.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:531594a2e47710545d96d0fd9a8cc86983233420172e0ced28df050df1a5e039
size 239844
oid sha256:4f1462733e02d499b0d8c61ab835a27c7fee560fdc7fc521d20ec09bb4ccc80f
size 216030

4
tests/Images/External/ReferenceOutput/QuantizerTests/ApplyQuantizationInBox_Bike_WernerPaletteQuantizer_ErrorDither.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:620463c14de12abb4f2cab3ee6259ad8cbb24c688212729535f41ebf492a8836
size 224490
oid sha256:c68eba122814b5470e5f2e03e34190ff79e84e4b431ad8227355ce7ffcd4a6a7
size 220192

4
tests/Images/External/ReferenceOutput/QuantizerTests/ApplyQuantizationInBox_Bike_WuQuantizer_ErrorDither.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:274c3e57f186c47bb070dfd2a79b8353032f9d91d03a3ab9ecb3aec13fdd9855
size 273333
oid sha256:f4df5b1bc2c291ec1cf599580d198b447278412576ab998e099cc21110e82b3d
size 263152

4
tests/Images/External/ReferenceOutput/QuantizerTests/ApplyQuantizationInBox_CalliphoraPartial_OctreeQuantizer_ErrorDither.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:fd007786164af8f410bd9738c0a072fc75d1f9b50145e5c191c9e3df345341a5
size 318778
oid sha256:f414473561bfa792c2e6342ff5e5dddffbdec5286932781b11a093803593b52a
size 313787

4
tests/Images/External/ReferenceOutput/QuantizerTests/ApplyQuantizationInBox_CalliphoraPartial_WebSafePaletteQuantizer_ErrorDither.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:08ace2edc5089a7742c1c8005126dcce850b1adf9c017b12db6892f4daeef1bb
size 271721
oid sha256:3a2aae04edebcaca9b95f30963201794887fa0eac954b64c68bfe529b14fa9be
size 269397

4
tests/Images/External/ReferenceOutput/QuantizerTests/ApplyQuantizationInBox_CalliphoraPartial_WernerPaletteQuantizer_ErrorDither.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:6ac7cdcc2fbee0572a369bed29c51e1c9056a4f09c4e0750ecb34d65daf403d4
size 287741
oid sha256:752760327cc1416c171a920f1e0e95e34eae6d78bd0c7393a3be427bf3c8e55c
size 284481

4
tests/Images/External/ReferenceOutput/QuantizerTests/ApplyQuantizationInBox_CalliphoraPartial_WuQuantizer_ErrorDither.png

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:be7812accadc6347d6df43c308f7293596f92d0d90cf0b6a8e48fac1f4144fc0
size 320157
oid sha256:90a2b7b3872c6eb1f1f039558d9f6ace92891c86951c801da01ad55b055fd670
size 316544

Some files were not shown because too many files changed in this diff

Loading…
Cancel
Save