Browse Source

BackwardReferencesTraceBackwards

pull/1552/head
Brian Popow 6 years ago
parent
commit
affec6a9ab
  1. 2
      src/ImageSharp/Formats/WebP/BitWriter/Vp8LBitWriter.cs
  2. 218
      src/ImageSharp/Formats/WebP/Lossless/BackwardReferenceEncoder.cs
  3. 20
      src/ImageSharp/Formats/WebP/Lossless/CostCacheInterval.cs
  4. 19
      src/ImageSharp/Formats/WebP/Lossless/CostInterval.cs
  5. 250
      src/ImageSharp/Formats/WebP/Lossless/CostManager.cs
  6. 105
      src/ImageSharp/Formats/WebP/Lossless/CostModel.cs
  7. 18
      src/ImageSharp/Formats/WebP/Lossless/HistogramBinInfo.cs
  8. 350
      src/ImageSharp/Formats/WebP/Lossless/HistogramEncoder.cs
  9. 3
      src/ImageSharp/Formats/WebP/Lossless/HistogramPair.cs
  10. 17
      src/ImageSharp/Formats/WebP/Lossless/HuffmanUtils.cs
  11. 4
      src/ImageSharp/Formats/WebP/Lossless/PredictorEncoder.cs
  12. 18
      src/ImageSharp/Formats/WebP/Lossless/Vp8LBitEntropy.cs
  13. 21
      src/ImageSharp/Formats/WebP/Lossless/Vp8LHashChain.cs
  14. 354
      src/ImageSharp/Formats/WebP/Lossless/Vp8LHistogram.cs
  15. 3
      src/ImageSharp/Formats/WebP/Lossless/Vp8LStreaks.cs
  16. 46
      src/ImageSharp/Formats/WebP/WebPEncoderCore.cs

2
src/ImageSharp/Formats/WebP/BitWriter/Vp8LBitWriter.cs

@ -66,7 +66,7 @@ namespace SixLabors.ImageSharp.Formats.WebP.BitWriter
this.PutBitsFlushBits();
}
this.bits |= bits << this.used;
this.bits |= (ulong)bits << this.used;
this.used += nBits;
}
}

218
src/ImageSharp/Formats/WebP/Lossless/BackwardReferenceEncoder.cs

@ -3,6 +3,7 @@
using System;
using System.Collections.Generic;
using SixLabors.ImageSharp.Formats.WebP.Lossy;
namespace SixLabors.ImageSharp.Formats.WebP.Lossless
{
@ -43,7 +44,7 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
/// <summary>
/// We want the max value to be attainable and stored in MaxLengthBits bits.
/// </summary>
private const int MaxLength = (1 << MaxLengthBits) - 1;
public const int MaxLength = (1 << MaxLengthBits) - 1;
/// <summary>
/// Minimum number of pixels for which it is cheaper to encode a
@ -51,6 +52,7 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
/// </summary>
private const int MinLength = 4;
// TODO: move to Hashchain?
public static void HashChainFill(Vp8LHashChain p, Span<uint> bgra, int quality, int xSize, int ySize)
{
int size = xSize * ySize;
@ -230,7 +232,7 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
/// <summary>
/// Evaluates best possible backward references for specified quality.
/// The input cache_bits to 'VP8LGetBackwardReferences' sets the maximum cache
/// The input cacheBits to 'GetBackwardReferences' sets the maximum cache
/// bits to use (passing 0 implies disabling the local color cache).
/// The optimal cache bits is evaluated and set for the cacheBits parameter.
/// The return value is the pointer to the best of the two backward refs viz,
@ -313,6 +315,7 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
/// The input bestCacheBits sets the maximum cache bits to use (passing 0 implies disabling the local color cache).
/// The local color cache is also disabled for the lower (smaller then 25) quality.
/// </summary>
/// <returns>Best cache size.</returns>
private static int CalculateBestCacheSize(Span<uint> bgra, int quality, Vp8LBackwardRefs refs, int bestCacheBits)
{
int cacheBitsMax = (quality <= 25) ? 0 : bestCacheBits;
@ -328,7 +331,7 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
var histos = new Vp8LHistogram[WebPConstants.MaxColorCacheBits + 1];
for (int i = 0; i < WebPConstants.MaxColorCacheBits + 1; i++)
{
histos[i] = new Vp8LHistogram();
histos[i] = new Vp8LHistogram(bestCacheBits);
colorCache[i] = new ColorCache();
colorCache[i].Init(i);
}
@ -419,19 +422,210 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
{
int distArraySize = xSize * ySize;
var distArray = new short[distArraySize];
short[] chosenPath;
BackwardReferencesHashChainDistanceOnly(xSize, ySize, bgra, cacheBits, hashChain, refsSrc, distArray);
int chosenPathSize = TraceBackwards(distArray, distArraySize);
Span<short> chosenPath = distArray.AsSpan(distArraySize - chosenPathSize);
BackwardReferencesHashChainFollowChosenPath(bgra, cacheBits, chosenPath, chosenPathSize, hashChain, refsDst);
}
private static void BackwardReferencesHashChainDistanceOnly(int xSize, int ySize, Span<uint> bgra, int cacheBits, Vp8LHashChain hashChain, Vp8LBackwardRefs refs, short[] distArray)
{
int pixCount = xSize * ySize;
bool useColorCache = cacheBits > 0;
var literalArraySize = WebPConstants.NumLiteralCodes + WebPConstants.NumLengthCodes + ((cacheBits > 0) ? (1 << cacheBits) : 0);
var costModel = new CostModel(literalArraySize);
int offsetPrev = -1;
int lenPrev = -1;
double offsetCost = -1;
int firstOffsetIsConstant = -1; // initialized with 'impossible' value
int reach = 0;
var colorCache = new ColorCache();
if (useColorCache)
{
colorCache.Init(cacheBits);
}
costModel.Build(xSize, cacheBits, refs);
var costManager = new CostManager(distArray, pixCount, costModel);
// We loop one pixel at a time, but store all currently best points to
// non-processed locations from this point.
distArray[0] = 0;
// Add first pixel as literal.
AddSingleLiteralWithCostModel(bgra, colorCache, costModel, 0, useColorCache, 0.0f, costManager.Costs, distArray);
for (int i = 1; i < pixCount; i++)
{
float prevCost = costManager.Costs[i - 1];
int offset = hashChain.FindOffset(i);
int len = hashChain.FindLength(i);
// Try adding the pixel as a literal.
AddSingleLiteralWithCostModel(bgra, colorCache, costModel, i, useColorCache, prevCost, costManager.Costs, distArray);
// If we are dealing with a non-literal.
if (len >= 2)
{
if (offset != offsetPrev)
{
int code = DistanceToPlaneCode(xSize, offset);
offsetCost = costModel.GetDistanceCost(code);
firstOffsetIsConstant = 1;
costManager.PushInterval(prevCost + offsetCost, i, len);
}
else
{
// Instead of considering all contributions from a pixel i by calling:
// costManager.PushInterval(prevCost + offsetCost, i, len);
// we optimize these contributions in case offsetCost stays the same
// for consecutive pixels. This describes a set of pixels similar to a
// previous set (e.g. constant color regions).
if (firstOffsetIsConstant != 0)
{
reach = i - 1 + lenPrev - 1;
firstOffsetIsConstant = 0;
}
if (i + len - 1 > reach)
{
int offsetJ = 0;
int lenJ = 0;
int j;
for (j = i; j <= reach; ++j)
{
offset = hashChain.FindOffset(j + 1);
len = hashChain.FindLength(j + 1);
if (offsetJ != offset)
{
offset = hashChain.FindOffset(j);
len = hashChain.FindLength(j);
break;
}
}
// Update the cost at j - 1 and j.
costManager.UpdateCostAtIndex(j - 1, false);
costManager.UpdateCostAtIndex(j, false);
costManager.PushInterval(costManager.Costs[j - 1] + offsetCost, j, lenJ);
reach = j + lenJ - 1;
}
}
}
costManager.UpdateCostAtIndex(i, true);
offsetPrev = offset;
lenPrev = len;
}
}
private static int TraceBackwards(short[] distArray, int distArraySize)
{
int chosenPathSize = 0;
int pathPos = distArraySize;
int curPos = distArraySize - 1;
while (curPos >= 0)
{
short cur = distArray[curPos];
pathPos--;
chosenPathSize++;
distArray[pathPos] = cur;
curPos -= cur;
}
// TODO: implement this
// BackwardReferencesHashChainDistanceOnly(xSize, ySize, bgra, cacheBits, hashChain, refsSrc, distArray);
// TraceBackwards(distArray, distArraySize, chosenPath, chosenPathSize);
// BackwardReferencesHashChainFollowChosenPath(bgra, cacheBits, chosenPath, chosenPathSize, hashChain, refsDst);
return chosenPathSize;
}
private static void BackwardReferencesHashChainFollowChosenPath(Span<uint> bgra, int cacheBits, Span<short> chosenPath, int chosenPathSize, Vp8LHashChain hashChain, Vp8LBackwardRefs backwardRefs)
{
bool useColorCache = cacheBits > 0;
var colorCache = new ColorCache();
int i = 0;
if (useColorCache)
{
colorCache.Init(cacheBits);
}
backwardRefs.Refs.Clear();
for (int ix = 0; ix < chosenPathSize; ix++)
{
int len = chosenPath[ix];
if (len != 1)
{
int offset = hashChain.FindOffset(i);
backwardRefs.Add(PixOrCopy.CreateCopy((uint)offset, (short)len));
if (useColorCache)
{
for (int k = 0; k < len; k++)
{
colorCache.Insert(bgra[i + k]);
}
}
i += len;
}
else
{
PixOrCopy v;
int idx = useColorCache ? colorCache.Contains(bgra[i]) : -1;
if (idx >= 0)
{
// useColorCache is true and color cache contains bgra[i]
// Push pixel as a color cache index.
v = PixOrCopy.CreateCacheIdx(idx);
}
else
{
if (useColorCache)
{
colorCache.Insert(bgra[i]);
}
v = PixOrCopy.CreateLiteral(bgra[i]);
}
backwardRefs.Add(v);
i++;
}
}
}
private static void AddSingleLiteralWithCostModel(Span<uint> bgra, ColorCache colorCache, CostModel costModel, int idx, bool useColorCache, float prevCost, float[] cost, short[] distArray)
{
double costVal = prevCost;
uint color = bgra[idx];
int ix = useColorCache ? colorCache.Contains(color) : -1;
if (ix >= 0)
{
double mul0 = 0.68;
costVal += costModel.GetCacheCost((uint)ix) * mul0;
}
else
{
double mul1 = 0.82;
if (useColorCache)
{
colorCache.Insert(color);
}
costVal += costModel.GetLiteralCost(color) * mul1;
}
if (cost[idx] > costVal)
{
cost[idx] = (float)costVal;
distArray[idx] = 1; // only one is inserted.
}
}
private static void BackwardReferencesLz77(int xSize, int ySize, Span<uint> bgra, int cacheBits, Vp8LHashChain hashChain, Vp8LBackwardRefs refs)
{
int iLastCheck = -1;
int ccInit = 0;
bool useColorCache = cacheBits > 0;
int pixCount = xSize * ySize;
var colorCache = new ColorCache();
@ -526,7 +720,7 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
{
if (bgra[i] == bgra[i + 1])
{
// Max out the counts to MAX_LENGTH.
// Max out the counts to MaxLength.
counts[countsPos] = counts[countsPos + 1];
if (counts[countsPos + 1] != MaxLength)
{
@ -540,7 +734,7 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
}
// Figure out the window offsets around a pixel. They are stored in a
// spiraling order around the pixel as defined by VP8LDistanceToPlaneCode.
// spiraling order around the pixel as defined by DistanceToPlaneCode.
for (int y = 0; y <= 6; y++)
{
for (int x = -6; x <= 6; x++)
@ -819,7 +1013,7 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
refs.Add(v);
}
private static int DistanceToPlaneCode(int xSize, int dist)
public static int DistanceToPlaneCode(int xSize, int dist)
{
int yOffset = dist / xSize;
int xOffset = dist - (yOffset * xSize);

20
src/ImageSharp/Formats/WebP/Lossless/CostCacheInterval.cs

@ -0,0 +1,20 @@
// Copyright (c) Six Labors and contributors.
// Licensed under the GNU Affero General Public License, Version 3.
using System.Diagnostics;
namespace SixLabors.ImageSharp.Formats.WebP.Lossless
{
/// <summary>
/// The GetLengthCost(costModel, k) are cached in a CostCacheInterval.
/// </summary>
[DebuggerDisplay("Start: {Start}, End: {End}, Cost: {Cost}")]
internal class CostCacheInterval
{
public double Cost { get; set; }
public int Start { get; set; }
public int End { get; set; } // Exclusive.
}
}

19
src/ImageSharp/Formats/WebP/Lossless/CostInterval.cs

@ -0,0 +1,19 @@
// Copyright (c) Six Labors and contributors.
// Licensed under the GNU Affero General Public License, Version 3.
using System.Diagnostics;
namespace SixLabors.ImageSharp.Formats.WebP.Lossless
{
[DebuggerDisplay("Start: {Start}, End: {End}, Cost: {Cost}")]
internal class CostInterval
{
public float Cost { get; set; }
public int Start { get; set; }
public int End { get; set; }
public int Index { get; set; }
}
}

250
src/ImageSharp/Formats/WebP/Lossless/CostManager.cs

@ -0,0 +1,250 @@
// Copyright (c) Six Labors and contributors.
// Licensed under the GNU Affero General Public License, Version 3.
using System.Collections.Generic;
using System.Linq;
namespace SixLabors.ImageSharp.Formats.WebP.Lossless
{
/// <summary>
/// The CostManager is in charge of managing intervals and costs.
/// It caches the different CostCacheInterval, caches the different
/// GetLengthCost(cost_model, k) in cost_cache_ and the CostInterval's.
/// </summary>
internal class CostManager
{
public CostManager(short[] distArray, int pixCount, CostModel costModel)
{
int costCacheSize = (pixCount > BackwardReferenceEncoder.MaxLength) ? BackwardReferenceEncoder.MaxLength : pixCount;
this.Intervals = new List<CostInterval>();
this.CacheIntervals = new List<CostCacheInterval>();
this.CostCache = new List<double>();
this.Costs = new float[pixCount];
this.DistArray = distArray;
this.Count = 0;
// Fill in the cost cache.
this.CacheIntervalsSize++;
this.CostCache.Add(costModel.GetLengthCost(0));
for (int i = 1; i < costCacheSize; i++)
{
this.CostCache.Add(costModel.GetLengthCost(i));
// Get the number of bound intervals.
if (this.CostCache[i] != this.CostCache[i - 1])
{
this.CacheIntervalsSize++;
}
}
// Fill in the cache intervals.
var cur = new CostCacheInterval()
{
Start = 0,
End = 1,
Cost = this.CostCache[0]
};
this.CacheIntervals.Add(cur);
for (int i = 1; i < costCacheSize; i++)
{
double costVal = this.CostCache[i];
if (costVal != cur.Cost)
{
cur = new CostCacheInterval()
{
Start = i,
Cost = costVal
};
this.CacheIntervals.Add(cur);
}
cur.End = i + 1;
}
// Set the initial costs_ high for every pixel as we will keep the minimum.
for (int i = 0; i < pixCount; i++)
{
this.Costs[i] = 1e38f;
}
}
/// <summary>
/// Gets the number of stored intervals.
/// </summary>
public int Count { get; }
/// <summary>
/// Gets the costs cache. Contains the GetLengthCost(costModel, k).
/// </summary>
public List<double> CostCache { get; }
public int CacheIntervalsSize { get; }
public float[] Costs { get; }
public short[] DistArray { get; }
public List<CostInterval> Intervals { get; }
public List<CostCacheInterval> CacheIntervals { get; }
/// <summary>
/// Update the cost at index i by going over all the stored intervals that overlap with i.
/// </summary>
/// <param name="i">The index to update.</param>
/// <param name="doCleanIntervals">If 'doCleanIntervals' is true, intervals that end before 'i' will be popped.</param>
public void UpdateCostAtIndex(int i, bool doCleanIntervals)
{
var indicesToRemove = new List<int>();
using List<CostInterval>.Enumerator intervalEnumerator = this.Intervals.GetEnumerator();
while (intervalEnumerator.MoveNext() && intervalEnumerator.Current.Start <= i)
{
if (intervalEnumerator.Current.End <= i)
{
if (doCleanIntervals)
{
// We have an outdated interval, remove it.
indicesToRemove.Add(i);
}
}
else
{
this.UpdateCost(i, intervalEnumerator.Current.Index, intervalEnumerator.Current.Cost);
}
}
foreach (int index in indicesToRemove.OrderByDescending(i => i))
{
this.Intervals.RemoveAt(index);
}
}
/// <summary>
/// Given a new cost interval defined by its start at position, its length value
/// and distanceCost, add its contributions to the previous intervals and costs.
/// If handling the interval or one of its subintervals becomes to heavy, its
/// contribution is added to the costs right away.
/// </summary>
public void PushInterval(double distanceCost, int position, int len)
{
// If the interval is small enough, no need to deal with the heavy
// interval logic, just serialize it right away. This constant is empirical.
int skipDistance = 10;
if (len < skipDistance)
{
for (int j = position; j < position + len; ++j)
{
int k = j - position;
float costTmp = (float)(distanceCost + this.CostCache[k]);
if (this.Costs[j] > costTmp)
{
this.Costs[j] = costTmp;
this.DistArray[j] = (short)(k + 1);
}
}
return;
}
for (int i = 0; i < this.CacheIntervalsSize && this.CacheIntervals[i].Start < len; i++)
{
// Define the intersection of the ith interval with the new one.
int start = position + this.CacheIntervals[i].Start;
int end = position + (this.CacheIntervals[i].End > len ? len : this.CacheIntervals[i].End);
float cost = (float)(distanceCost + this.CacheIntervals[i].Cost);
var idx = i;
CostCacheInterval interval = this.CacheIntervals[idx];
var indicesToRemove = new List<int>();
for (; interval.Start < end; idx++)
{
// Make sure we have some overlap.
if (start >= interval.End)
{
continue;
}
if (cost >= interval.Cost)
{
int startNew = interval.End;
this.InsertInterval(cost, position, start, interval.Start);
start = startNew;
if (start >= end)
{
break;
}
continue;
}
if (start <= interval.Start)
{
if (interval.End <= end)
{
indicesToRemove.Add(idx);
}
else
{
interval.Start = end;
break;
}
}
else
{
if (end < interval.End)
{
int endOriginal = interval.End;
interval.End = start;
this.InsertInterval(interval.Cost, idx, end, endOriginal);
break;
}
else
{
interval.End = start;
}
}
}
foreach (int indice in indicesToRemove.OrderByDescending(i => i))
{
this.Intervals.RemoveAt(indice);
}
// Insert the remaining interval from start to end.
this.InsertInterval(cost, position, start, end);
}
}
private void InsertInterval(double cost, int position, int start, int end)
{
// TODO: use COST_CACHE_INTERVAL_SIZE_MAX
var interval = new CostCacheInterval()
{
Cost = cost,
Start = start,
End = end
};
this.CacheIntervals.Insert(position, interval);
}
/// <summary>
/// Given the cost and the position that define an interval, update the cost at
/// pixel 'i' if it is smaller than the previously computed value.
/// </summary>
private void UpdateCost(int i, int position, float cost)
{
int k = i - position;
if (this.Costs[i] > cost)
{
this.Costs[i] = cost;
this.DistArray[i] = (short)(k + 1);
}
}
}
}

105
src/ImageSharp/Formats/WebP/Lossless/CostModel.cs

@ -0,0 +1,105 @@
// Copyright (c) Six Labors and contributors.
// Licensed under the GNU Affero General Public License, Version 3.
using System;
namespace SixLabors.ImageSharp.Formats.WebP.Lossless
{
internal class CostModel
{
private const int ValuesInBytes = 256;
/// <summary>
/// Initializes a new instance of the <see cref="CostModel"/> class.
/// </summary>
/// <param name="literalArraySize">The literal array size.</param>
public CostModel(int literalArraySize)
{
this.Alpha = new double[ValuesInBytes];
this.Red = new double[ValuesInBytes];
this.Blue = new double[ValuesInBytes];
this.Distance = new double[WebPConstants.NumDistanceCodes];
this.Literal = new double[literalArraySize];
}
public double[] Alpha { get; }
public double[] Red { get; }
public double[] Blue { get; }
public double[] Distance { get; }
public double[] Literal { get; }
public void Build(int xSize, int cacheBits, Vp8LBackwardRefs backwardRefs)
{
var histogram = new Vp8LHistogram(cacheBits);
using System.Collections.Generic.List<PixOrCopy>.Enumerator refsEnumerator = backwardRefs.Refs.GetEnumerator();
// The following code is similar to HistogramCreate but converts the distance to plane code.
while (refsEnumerator.MoveNext())
{
histogram.AddSinglePixOrCopy(refsEnumerator.Current, true, xSize);
}
ConvertPopulationCountTableToBitEstimates(histogram.NumCodes(), histogram.Literal, this.Literal);
ConvertPopulationCountTableToBitEstimates(ValuesInBytes, histogram.Red, this.Red);
ConvertPopulationCountTableToBitEstimates(ValuesInBytes, histogram.Blue, this.Blue);
ConvertPopulationCountTableToBitEstimates(ValuesInBytes, histogram.Alpha, this.Alpha);
ConvertPopulationCountTableToBitEstimates(WebPConstants.NumDistanceCodes, histogram.Distance, this.Distance);
}
public double GetLengthCost(int length)
{
int extraBits = 0;
int code = LosslessUtils.PrefixEncodeBits(length, ref extraBits);
return this.Literal[ValuesInBytes + code] + extraBits;
}
public double GetDistanceCost(int distance)
{
int extraBits = 0;
int code = LosslessUtils.PrefixEncodeBits(distance, ref extraBits);
return this.Distance[code] + extraBits;
}
public double GetCacheCost(uint idx)
{
int literalIdx = (int)(ValuesInBytes + WebPConstants.NumLengthCodes + idx);
return this.Literal[literalIdx];
}
public double GetLiteralCost(uint v)
{
return this.Alpha[v >> 24] + this.Red[(v >> 16) & 0xff] + this.Literal[(v >> 8) & 0xff] + this.Blue[v & 0xff];
}
private static void ConvertPopulationCountTableToBitEstimates(int numSymbols, uint[] populationCounts, double[] output)
{
uint sum = 0;
int nonzeros = 0;
for (int i = 0; i < numSymbols; i++)
{
sum += populationCounts[i];
if (populationCounts[i] > 0)
{
nonzeros++;
}
}
if (nonzeros <= 1)
{
output.AsSpan(0, numSymbols).Fill(0);
}
else
{
double logsum = LosslessUtils.FastLog2(sum);
for (int i = 0; i < numSymbols; i++)
{
output[i] = logsum - LosslessUtils.FastLog2(populationCounts[i]);
}
}
}
}
}

18
src/ImageSharp/Formats/WebP/Lossless/HistogramBinInfo.cs

@ -0,0 +1,18 @@
// Copyright (c) Six Labors and contributors.
// Licensed under the GNU Affero General Public License, Version 3.
namespace SixLabors.ImageSharp.Formats.WebP.Lossless
{
internal struct HistogramBinInfo
{
/// <summary>
/// Position of the histogram that accumulates all histograms with the same binId.
/// </summary>
public short First;
/// <summary>
/// Number of combine failures per binId.
/// </summary>
public short NumCombineFailures;
}
}

350
src/ImageSharp/Formats/WebP/Lossless/HistogramEncoder.cs

@ -41,10 +41,10 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
origHisto.Add(new Vp8LHistogram(cacheBits));
}
// Construct the histograms from backward references.
// Construct the histograms from the backward references.
HistogramBuild(xSize, histoBits, refs, origHisto);
// Copies the histograms and computes its bit_cost. histogramSymbols is optimized.
// Copies the histograms and computes its bitCost. histogramSymbols is optimized.
HistogramCopyAndAnalyze(origHisto, imageHisto, ref numUsed, histogramSymbols);
var entropyCombine = (numUsed > entropyCombineNumBins * 2) && (quality < 100);
@ -61,22 +61,44 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
OptimizeHistogramSymbols(imageHisto, clusterMappings, numClusters, mapTmp, histogramSymbols);
}
if (!entropyCombine)
float x = quality / 100.0f;
// Cubic ramp between 1 and MaxHistoGreedy:
int thresholdSize = (int)(1 + (x * x * x * (MaxHistoGreedy - 1)));
bool doGreedy = HistogramCombineStochastic(imageHisto, ref numUsed, thresholdSize);
if (doGreedy)
{
float x = quality / 100.0f;
HistogramCombineGreedy(imageHisto);
}
// Cubic ramp between 1 and MaxHistoGreedy:
int thresholdSize = (int)(1 + (x * x * x * (MaxHistoGreedy - 1)));
bool doGreedy = HistogramCombineStochastic(imageHisto, ref numUsed, thresholdSize);
if (doGreedy)
// Find the optimal map from original histograms to the final ones.
RemoveEmptyHistograms(imageHisto);
HistogramRemap(origHisto, imageHisto, histogramSymbols);
}
private static void RemoveEmptyHistograms(List<Vp8LHistogram> histograms)
{
int size = 0;
var indicesToRemove = new List<int>();
for (int i = 0; i < histograms.Count; i++)
{
if (histograms[i] == null)
{
HistogramCombineGreedy(imageHisto, ref numUsed);
indicesToRemove.Add(i);
continue;
}
histograms[size++] = histograms[i];
}
foreach (int index in indicesToRemove.OrderByDescending(i => i))
{
histograms.RemoveAt(index);
}
}
/// <summary>
/// Construct the histograms from backward references.
/// Construct the histograms from the backward references.
/// </summary>
private static void HistogramBuild(int xSize, int histoBits, Vp8LBackwardRefs backwardRefs, List<Vp8LHistogram> histograms)
{
@ -137,30 +159,91 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
}
else
{
// TODO: HistogramCopy(histo, histograms[i]);
histograms[i] = (Vp8LHistogram)histo.DeepClone();
histogramSymbols[i] = (short)clusterId++;
}
}
foreach (int indice in indicesToRemove.OrderByDescending(v => v))
foreach (int index in indicesToRemove.OrderByDescending(v => v))
{
origHistograms.RemoveAt(indice);
histograms.RemoveAt(indice);
origHistograms.RemoveAt(index);
histograms.RemoveAt(index);
}
}
private static void HistogramCombineEntropyBin(List<Vp8LHistogram> histograms, ref int numUsed, short[] clusters, short[] clusterMappings, Vp8LHistogram curCombo, short[] binMap, int numBins, double combineCostFactor)
{
var binInfo = new HistogramBinInfo[BinSize];
for (int idx = 0; idx < numBins; idx++)
{
binInfo[idx].First = -1;
binInfo[idx].NumCombineFailures = 0;
}
// By default, a cluster matches itself.
for (int idx = 0; idx < histograms.Count; idx++)
{
clusterMappings[idx] = (short)idx;
}
var indicesToRemove = new List<int>();
for (int idx = 0; idx < histograms.Count; idx++)
{
if (histograms[idx] == null)
{
continue;
}
int binId = binMap[idx];
int first = binInfo[binId].First;
if (first == -1)
{
binInfo[binId].First = (short)idx;
}
else
{
// Try to merge #idx into #first (both share the same binId)
double bitCost = histograms[idx].BitCost;
double bitCostThresh = -bitCost * combineCostFactor;
double currCostDiff = histograms[first].AddEval(histograms[idx], bitCostThresh, curCombo);
if (currCostDiff < bitCostThresh)
{
// Try to merge two histograms only if the combo is a trivial one or
// the two candidate histograms are already non-trivial.
// For some images, 'tryCombine' turns out to be false for a lot of
// histogram pairs. In that case, we fallback to combining
// histograms as usual to avoid increasing the header size.
bool tryCombine = (curCombo.TrivialSymbol != NonTrivialSym) || ((histograms[idx].TrivialSymbol == NonTrivialSym) && (histograms[first].TrivialSymbol == NonTrivialSym));
int maxCombineFailures = 32;
if (tryCombine || binInfo[binId].NumCombineFailures >= maxCombineFailures)
{
// Move the (better) merged histogram to its final slot.
Vp8LHistogram tmp = curCombo;
curCombo = histograms[first];
histograms[first] = tmp;
histograms[idx] = null;
indicesToRemove.Add(idx);
clusterMappings[clusters[idx]] = clusters[first];
}
else
{
binInfo[binId].NumCombineFailures++;
}
}
}
}
foreach (int index in indicesToRemove.OrderByDescending(i => i))
{
histograms.RemoveAt(index);
}
}
/// <summary>
/// Given a Histogram set, the mapping of clusters 'clusterMapping' and the
/// current assignment of the cells in 'symbols', merge the clusters and
/// assign the smallest possible clusters values.
/// current assignment of the cells in 'symbols', merge the clusters and assign the smallest possible clusters values.
/// </summary>
private static void OptimizeHistogramSymbols(List<Vp8LHistogram> histograms, short[] clusterMappings, int numClusters, short[] clusterMappingsTmp, short[] symbols)
{
@ -194,10 +277,9 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
clusterMappingsTmp.AsSpan().Fill(0);
// Re-map the ids.
for (int i = 0; i < histograms.Count; i++)
for (int i = 0; i < symbols.Length; i++)
{
int cluster;
cluster = clusterMappings[symbols[i]];
int cluster = clusterMappings[symbols[i]];
if (cluster > 0 && clusterMappingsTmp[cluster] == 0)
{
clusterMax++;
@ -206,18 +288,6 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
symbols[i] = clusterMappingsTmp[cluster];
}
// Make sure all cluster values are used.
clusterMax = 0;
for (int i = 0; i < histograms.Count; i++)
{
if (symbols[i] <= clusterMax)
{
continue;
}
clusterMax++;
}
}
/// <summary>
@ -231,6 +301,11 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
int outerIters = numUsed;
int numTriesNoSuccess = outerIters / 2;
if (histograms.Count < minClusterSize)
{
return true;
}
// Priority queue of histogram pairs. Its size impacts the quality of the compression and the speed:
// the smaller the faster but the worse for the compression.
var histoPriorityList = new List<HistogramPair>();
@ -269,7 +344,7 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
idx2 = mappings[idx2];
// Calculate cost reduction on combination.
currCost = HistoQueuePush(histoPriorityList, histoQueueMaxSize, histograms, idx1, idx2, bestCost);
currCost = HistoPriorityListPush(histoPriorityList, histoQueueMaxSize, histograms, idx1, idx2, bestCost);
// Found a better pair?
if (currCost < 0)
@ -346,7 +421,7 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
if (doEval)
{
// Re-evaluate the cost of an updated pair.
HistoQueueUpdatePair(histograms[p.Idx1], histograms[p.Idx2], 0.0d, p);
HistoListUpdatePair(histograms[p.Idx1], histograms[p.Idx2], 0.0d, p);
if (p.CostDiff >= 0.0d)
{
indicesToRemove.Add(lastIndex);
@ -356,7 +431,7 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
}
}
HistoQueueUpdateHead(histoPriorityList, p);
HistoListUpdateHead(histoPriorityList, p);
j++;
}
@ -368,20 +443,19 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
return doGreedy;
}
private static void HistogramCombineGreedy(List<Vp8LHistogram> histograms, ref int numUsed)
private static void HistogramCombineGreedy(List<Vp8LHistogram> histograms)
{
int histoSize = histograms.Count;
// Priority list of histogram pairs.
var histoPriorityList = new List<HistogramPair>();
int maxHistoQueueSize = histoSize * histoSize;
int maxSize = histoSize * histoSize;
for (int i = 0; i < histograms.Count; i++)
{
for (int j = i + 1; j < histograms.Count; j++)
{
// Initialize queue.
HistoQueuePush(histoPriorityList, maxHistoQueueSize, histograms, i, j, 0.0d);
HistoPriorityListPush(histoPriorityList, maxSize, histograms, i, j, 0.0d);
}
}
@ -393,8 +467,8 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
histograms[idx1].BitCost = histoPriorityList[0].CostCombo;
// Remove merged histogram.
histograms.RemoveAt(idx2);
numUsed--;
// TODO: can the element be removed instead? histograms.RemoveAt(idx2);
histograms[idx2] = null;
// Remove pairs intersecting the just combined best pair.
for (int i = 0; i < histoPriorityList.Count;)
@ -402,41 +476,83 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
HistogramPair p = histoPriorityList.ElementAt(i);
if (p.Idx1 == idx1 || p.Idx2 == idx1 || p.Idx1 == idx2 || p.Idx2 == idx2)
{
// Remove last entry from the queue.
p = histoPriorityList.ElementAt(histoPriorityList.Count - 1);
histoPriorityList.RemoveAt(histoPriorityList.Count - 1); // TODO: use list instead Queue?
// Replace item at pos i with the last one and shrinking the list.
histoPriorityList[i] = histoPriorityList[histoPriorityList.Count - 1];
histoPriorityList.RemoveAt(histoPriorityList.Count - 1);
}
else
{
HistoQueueUpdateHead(histoPriorityList, p);
HistoListUpdateHead(histoPriorityList, p);
i++;
}
}
// Push new pairs formed with combined histogram to the queue.
// Push new pairs formed with combined histogram to the list.
for (int i = 0; i < histograms.Count; i++)
{
if (i == idx1)
if (i == idx1 || histograms[i] == null)
{
continue;
}
HistoQueuePush(histoPriorityList, maxHistoQueueSize, histograms, idx1, i, 0.0d);
HistoPriorityListPush(histoPriorityList, maxSize, histograms, idx1, i, 0.0d);
}
}
}
private static void HistogramRemap(List<Vp8LHistogram> input, List<Vp8LHistogram> output, short[] symbols)
{
int inSize = symbols.Length;
int outSize = output.Count;
if (outSize > 1)
{
for (int i = 0; i < inSize; i++)
{
int bestOut = 0;
double bestBits = double.MaxValue;
for (int k = 0; k < outSize; k++)
{
double curBits = output[k].AddThresh(input[i], bestBits);
if (k == 0 || curBits < bestBits)
{
bestBits = curBits;
bestOut = k;
}
}
symbols[i] = (short)bestOut;
}
}
else
{
for (int i = 0; i < inSize; i++)
{
symbols[i] = 0;
}
}
for (int i = 0; i < inSize; i++)
{
if (input[i] == null)
{
continue;
}
int idx = symbols[i];
input[i].Add(output[idx], output[idx]);
}
}
/// <summary>
/// // Create a pair from indices "idx1" and "idx2" provided its cost
/// Create a pair from indices "idx1" and "idx2" provided its cost
/// is inferior to "threshold", a negative entropy.
/// </summary>
/// <returns>The cost of the pair, or 0. if it superior to threshold.</returns>
private static double HistoQueuePush(List<HistogramPair> histoQueue, int queueMaxSize, List<Vp8LHistogram> histograms, int idx1, int idx2, double threshold)
private static double HistoPriorityListPush(List<HistogramPair> histoList, int maxSize, List<Vp8LHistogram> histograms, int idx1, int idx2, double threshold)
{
var pair = new HistogramPair();
// Stop here if the queue is full.
if (histoQueue.Count == queueMaxSize)
if (histoList.Count == maxSize)
{
return 0.0d;
}
@ -453,7 +569,7 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
Vp8LHistogram h1 = histograms[idx1];
Vp8LHistogram h2 = histograms[idx2];
HistoQueueUpdatePair(h1, h2, threshold, pair);
HistoListUpdatePair(h1, h2, threshold, pair);
// Do not even consider the pair if it does not improve the entropy.
if (pair.CostDiff >= threshold)
@ -461,140 +577,44 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
return 0.0d;
}
histoQueue.Add(pair);
histoList.Add(pair);
HistoQueueUpdateHead(histoQueue, pair);
HistoListUpdateHead(histoList, pair);
return pair.CostDiff;
}
/// <summary>
/// Update the cost diff and combo of a pair of histograms. This needs to be
/// called when the the histograms have been merged with a third one.
/// Update the cost diff and combo of a pair of histograms. This needs to be called when the the histograms have been merged with a third one.
/// </summary>
private static void HistoQueueUpdatePair(Vp8LHistogram h1, Vp8LHistogram h2, double threshold, HistogramPair pair)
private static void HistoListUpdatePair(Vp8LHistogram h1, Vp8LHistogram h2, double threshold, HistogramPair pair)
{
double sumCost = h1.BitCost + h2.BitCost;
pair.CostCombo = GetCombinedHistogramEntropy(h1, h2, sumCost + threshold);
h1.GetCombinedHistogramEntropy(h2, sumCost + threshold, costInitial: pair.CostCombo, out var cost);
pair.CostCombo = cost;
pair.CostDiff = pair.CostCombo - sumCost;
}
private static double GetCombinedHistogramEntropy(Vp8LHistogram a, Vp8LHistogram b, double costThreshold)
{
double cost = 0.0d;
int paletteCodeBits = a.PaletteCodeBits;
bool trivialAtEnd = false;
cost += GetCombinedEntropy(a.Literal, b.Literal, Vp8LHistogram.HistogramNumCodes(paletteCodeBits), a.IsUsed[0], b.IsUsed[0], false);
cost += ExtraCostCombined(a.Literal.AsSpan(WebPConstants.NumLiteralCodes), b.Literal.AsSpan(WebPConstants.NumLiteralCodes), WebPConstants.NumLengthCodes);
if (cost > costThreshold)
{
return 0;
}
if (a.TrivialSymbol != NonTrivialSym && a.TrivialSymbol == b.TrivialSymbol)
{
// A, R and B are all 0 or 0xff.
uint color_a = (a.TrivialSymbol >> 24) & 0xff;
uint color_r = (a.TrivialSymbol >> 16) & 0xff;
uint color_b = (a.TrivialSymbol >> 0) & 0xff;
if ((color_a == 0 || color_a == 0xff) &&
(color_r == 0 || color_r == 0xff) &&
(color_b == 0 || color_b == 0xff))
{
trivialAtEnd = true;
}
}
cost += GetCombinedEntropy(a.Red, b.Red, WebPConstants.NumLiteralCodes, a.IsUsed[1], b.IsUsed[1], trivialAtEnd);
return cost;
}
private static double GetCombinedEntropy(uint[] x, uint[] y, int length, bool isXUsed, bool isYUsed, bool trivialAtEnd)
{
var stats = new Vp8LStreaks();
if (trivialAtEnd)
{
// This configuration is due to palettization that transforms an indexed
// pixel into 0xff000000 | (pixel << 8) in BundleColorMap.
// BitsEntropyRefine is 0 for histograms with only one non-zero value.
// Only FinalHuffmanCost needs to be evaluated.
// Deal with the non-zero value at index 0 or length-1.
stats.Streaks[1][0] = 1;
// Deal with the following/previous zero streak.
stats.Counts[0] = 1;
stats.Streaks[0][1] = length - 1;
return stats.FinalHuffmanCost();
}
var bitEntropy = new Vp8LBitEntropy();
if (isXUsed)
{
if (isYUsed)
{
bitEntropy.GetCombinedEntropyUnrefined(x, y, length, stats);
}
else
{
bitEntropy.GetEntropyUnrefined(x, length, stats);
}
}
else
{
if (isYUsed)
{
bitEntropy.GetEntropyUnrefined(y, length, stats);
}
else
{
stats.Counts[0] = 1;
stats.Streaks[0][length > 3 ? 1 : 0] = length;
bitEntropy.Init();
}
}
return bitEntropy.BitsEntropyRefine() + stats.FinalHuffmanCost();
}
private static double ExtraCostCombined(Span<uint> x, Span<uint> y, int length)
{
double cost = 0.0d;
for (int i = 2; i < length - 2; i++)
{
int xy = (int)(x[i + 2] + y[i + 2]);
cost += (i >> 1) * xy;
}
return cost;
}
private static void HistogramAdd(Vp8LHistogram a, Vp8LHistogram b, Vp8LHistogram output)
{
// TODO: VP8LHistogramAdd(a, b, out);
output.TrivialSymbol = (a.TrivialSymbol == b.TrivialSymbol)
? a.TrivialSymbol
: NonTrivialSym;
}
/// <summary>
/// Check whether a pair in the list should be updated as head or not.
/// </summary>
private static void HistoQueueUpdateHead(List<HistogramPair> histoQueue, HistogramPair pair)
private static void HistoListUpdateHead(List<HistogramPair> histoList, HistogramPair pair)
{
if (pair.CostDiff < histoQueue[0].CostDiff)
if (pair.CostDiff < histoList[0].CostDiff)
{
// Replace the best pair.
histoQueue.RemoveAt(0);
histoQueue.Insert(0, pair);
var oldIdx = histoList.IndexOf(pair);
histoList[oldIdx] = histoList[0];
histoList[0] = pair;
}
}
private static void HistogramAdd(Vp8LHistogram a, Vp8LHistogram b, Vp8LHistogram output)
{
a.Add(b, output);
output.TrivialSymbol = (a.TrivialSymbol == b.TrivialSymbol) ? a.TrivialSymbol : NonTrivialSym;
}
private static double GetCombineCostFactor(int histoSize, int quality)
{
double combineCostFactor = 0.16d;

3
src/ImageSharp/Formats/WebP/Lossless/HistogramPair.cs

@ -1,11 +1,14 @@
// Copyright (c) Six Labors and contributors.
// Licensed under the GNU Affero General Public License, Version 3.
using System.Diagnostics;
namespace SixLabors.ImageSharp.Formats.WebP.Lossless
{
/// <summary>
/// Pair of histograms. Negative Idx1 value means that pair is out-of-date.
/// </summary>
[DebuggerDisplay("Idx1: {Idx1}, Idx2: {Idx2}, CostDiff: {CostDiff}, CostCombo: {CostCombo}")]
internal class HistogramPair
{
public int Idx1 { get; set; }

17
src/ImageSharp/Formats/WebP/Lossless/HuffmanUtils.cs

@ -156,7 +156,7 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
/// </summary>
/// <see cref="http://en.wikipedia.org/wiki/Huffman_coding"/>
/// <param name="tree">The huffman tree.</param>
/// <param name="histogram">The historgram.</param>
/// <param name="histogram">The histogram.</param>
/// <param name="histogramSize">The size of the histogram.</param>
/// <param name="treeDepthLimit">The tree depth limit.</param>
/// <param name="bitDepths">How many bits are used for the symbol.</param>
@ -269,13 +269,12 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
}
}
public static int CreateCompressedHuffmanTree(HuffmanTreeCode tree, HuffmanTreeToken[] tokens)
public static int CreateCompressedHuffmanTree(HuffmanTreeCode tree, HuffmanTreeToken[] tokensArray)
{
int depthSize = tree.NumSymbols;
int prevValue = 8; // 8 is the initial value for rle.
int i = 0;
int tokenIdx = 0;
Span<HuffmanTreeToken> tokenSpan = tokens.AsSpan();
int tokenPos = 0;
while (i < depthSize)
{
int value = tree.CodeLengths[i];
@ -289,19 +288,18 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
runs = k - i;
if (value == 0)
{
tokenIdx += CodeRepeatedZeros(runs, tokens);
tokenPos += CodeRepeatedZeros(runs, tokensArray.AsSpan(tokenPos));
}
else
{
tokenIdx += CodeRepeatedValues(runs, tokens, value, prevValue);
tokenPos += CodeRepeatedValues(runs, tokensArray.AsSpan(tokenPos), value, prevValue);
prevValue = value;
}
tokenSpan.Slice(tokenIdx);
i += runs;
}
return tokenIdx;
return tokenPos;
}
public static int BuildHuffmanTable(Span<HuffmanCode> table, int rootBits, int[] codeLengths, int codeLengthsSize)
@ -458,8 +456,7 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
{
if (repetitions < 3)
{
int i;
for (i = 0; i < repetitions; ++i)
for (int i = 0; i < repetitions; i++)
{
tokens[pos].Code = 0; // 0-value
tokens[pos].ExtraBits = 0;

4
src/ImageSharp/Formats/WebP/Lossless/PredictorEncoder.cs

@ -325,7 +325,7 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
// Update the source image.
currentRow[x] = LosslessUtils.AddPixels(predict, residual);
// x is never 0 here so we do not need to update upper_row like below.
// x is never 0 here so we do not need to update upperRow like below.
}
if ((currentRow[x] & MaskAlpha) == 0)
@ -344,7 +344,7 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
// in that row as its top-right context pixel. Hence if we change the
// leftmost pixel of current_row, the corresponding change must be
// applied
// to upper_row as well where top-right context is being read from.
// to upperRow as well where top-right context is being read from.
if (x == 0 && y != 0)
{
upperRow[width] = currentRow[0];

18
src/ImageSharp/Formats/WebP/Lossless/Vp8LBitEntropy.cs

@ -79,9 +79,9 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
return (0.99 * this.Sum) + (0.01 * this.Entropy);
}
// No matter what the entropy says, we cannot be better than min_limit
// No matter what the entropy says, we cannot be better than minLimit
// with Huffman coding. I am mixing a bit of entropy into the
// min_limit since it produces much better (~0.5 %) compression results
// minLimit since it produces much better (~0.5 %) compression results
// perhaps because of better entropy clustering.
if (this.NoneZeros == 3)
{
@ -195,9 +195,21 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
private void GetEntropyUnrefined(uint val, int i, ref uint valPrev, ref int iPrev, Vp8LStreaks stats)
{
// Gather info for the bit entropy.
int streak = i - iPrev;
// Gather info for the bit entropy.
if (valPrev != 0)
{
this.Sum += (uint)(valPrev * streak);
this.NoneZeros += streak;
this.NoneZeroCode = (uint)iPrev;
this.Entropy -= LosslessUtils.FastSLog2(valPrev) * streak;
if (this.MaxVal < valPrev)
{
this.MaxVal = valPrev;
}
}
// Gather info for the Huffman cost.
stats.Counts[valPrev != 0 ? 1 : 0] += streak > 3 ? 1 : 0;
stats.Streaks[valPrev != 0 ? 1 : 0][streak > 3 ? 1 : 0] += streak;

21
src/ImageSharp/Formats/WebP/Lossless/Vp8LHashChain.cs

@ -7,11 +7,21 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
{
internal class Vp8LHashChain
{
/// <summary>
/// Initializes a new instance of the <see cref="Vp8LHashChain"/> class.
/// </summary>
/// <param name="size">The size off the chain.</param>
public Vp8LHashChain(int size)
{
this.OffsetLength = new uint[size];
this.OffsetLength.AsSpan().Fill(0xcdcdcdcd);
this.Size = size;
}
/// <summary>
/// The 20 most significant bits contain the offset at which the best match is found.
/// These 20 bits are the limit defined by GetWindowSizeForHashChain (through WindowSize = 1 << 20).
/// The lower 12 bits contain the length of the match. The 12 bit limit is
/// defined in MaxFindCopyLength with MAX_LENGTH=4096.
/// The lower 12 bits contain the length of the match.
/// </summary>
public uint[] OffsetLength { get; }
@ -21,13 +31,6 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
/// </summary>
public int Size { get; }
public Vp8LHashChain(int size)
{
this.OffsetLength = new uint[size];
this.OffsetLength.AsSpan().Fill(0xcdcdcdcd);
this.Size = size;
}
public int FindLength(int basePosition)
{
return (int)(this.OffsetLength[basePosition] & ((1U << BackwardReferenceEncoder.MaxLengthBits) - 1));

354
src/ImageSharp/Formats/WebP/Lossless/Vp8LHistogram.cs

@ -6,41 +6,55 @@ using System.Collections.Generic;
namespace SixLabors.ImageSharp.Formats.WebP.Lossless
{
internal class Vp8LHistogram
internal class Vp8LHistogram : IDeepCloneable
{
private const uint NonTrivialSym = 0xffffffff;
/// <summary>
/// Initializes a new instance of the <see cref="Vp8LHistogram"/> class.
/// </summary>
/// <param name="refs">The backward references to initialize the histogram with.</param>
/// <param name="paletteCodeBits">The palette code bits.</param>
public Vp8LHistogram(Vp8LBackwardRefs refs, int paletteCodeBits)
: this()
public Vp8LHistogram()
{
if (paletteCodeBits >= 0)
{
this.PaletteCodeBits = paletteCodeBits;
}
}
this.StoreRefs(refs);
/// <summary>
/// Initializes a new instance of the <see cref="Vp8LHistogram"/> class.
/// </summary>
/// <param name="other">The histogram to create an instance from.</param>
private Vp8LHistogram(Vp8LHistogram other)
: this(other.PaletteCodeBits)
{
other.Red.AsSpan().CopyTo(this.Red);
other.Blue.AsSpan().CopyTo(this.Blue);
other.Alpha.AsSpan().CopyTo(this.Alpha);
other.Literal.AsSpan().CopyTo(this.Literal);
other.IsUsed.AsSpan().CopyTo(this.IsUsed);
this.LiteralCost = other.LiteralCost;
this.RedCost = other.RedCost;
this.BlueCost = other.BlueCost;
this.BitCost = other.BitCost;
this.TrivialSymbol = other.TrivialSymbol;
this.PaletteCodeBits = other.PaletteCodeBits;
}
/// <summary>
/// Initializes a new instance of the <see cref="Vp8LHistogram"/> class.
/// </summary>
/// <param name="refs">The backward references to initialize the histogram with.</param>
/// <param name="paletteCodeBits">The palette code bits.</param>
public Vp8LHistogram(int paletteCodeBits)
: this()
public Vp8LHistogram(Vp8LBackwardRefs refs, int paletteCodeBits)
: this(paletteCodeBits)
{
this.PaletteCodeBits = paletteCodeBits;
this.StoreRefs(refs);
}
/// <summary>
/// Initializes a new instance of the <see cref="Vp8LHistogram"/> class.
/// </summary>
public Vp8LHistogram()
/// <param name="paletteCodeBits">The palette code bits.</param>
public Vp8LHistogram(int paletteCodeBits)
{
this.PaletteCodeBits = paletteCodeBits;
this.Red = new uint[WebPConstants.NumLiteralCodes + 1];
this.Blue = new uint[WebPConstants.NumLiteralCodes + 1];
this.Alpha = new uint[WebPConstants.NumLiteralCodes + 1];
@ -53,10 +67,13 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
this.IsUsed = new bool[5];
}
/// <inheritdoc/>
public IDeepCloneable DeepClone() => new Vp8LHistogram(this);
/// <summary>
/// Gets the palette code bits.
/// Gets or sets the palette code bits.
/// </summary>
public int PaletteCodeBits { get; }
public int PaletteCodeBits { get; set; }
/// <summary>
/// Gets or sets the cached value of bit cost.
@ -110,7 +127,8 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
/// </summary>
/// <param name="v">The token to add.</param>
/// <param name="useDistanceModifier">Indicates whether to use the distance modifier.</param>
public void AddSinglePixOrCopy(PixOrCopy v, bool useDistanceModifier)
/// <param name="xSize">xSize is only used when useDistanceModifier is true.</param>
public void AddSinglePixOrCopy(PixOrCopy v, bool useDistanceModifier, int xSize = 0)
{
if (v.IsLiteral())
{
@ -135,7 +153,7 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
}
else
{
// TODO: VP8LPrefixEncodeBits(distance_modifier(distance_modifier_arg0, PixOrCopyDistance(v)), &code, &extra_bits);
code = LosslessUtils.PrefixEncodeBits(BackwardReferenceEncoder.DistanceToPlaneCode(xSize, (int)v.Distance()), ref extraBits);
}
this.Distance[code]++;
@ -170,7 +188,7 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
uint notUsed = 0;
double alphaCost = PopulationCost(this.Alpha, WebPConstants.NumLiteralCodes, ref alphaSym, ref this.IsUsed[3]);
double distanceCost = PopulationCost(this.Distance, WebPConstants.NumDistanceCodes, ref notUsed, ref this.IsUsed[4]) + ExtraCost(this.Distance, WebPConstants.NumDistanceCodes);
int numCodes = HistogramNumCodes(this.PaletteCodeBits);
int numCodes = this.NumCodes();
this.LiteralCost = PopulationCost(this.Literal, numCodes, ref notUsed, ref this.IsUsed[0]) + ExtraCost(this.Literal.AsSpan(WebPConstants.NumLiteralCodes), WebPConstants.NumLengthCodes);
this.RedCost = PopulationCost(this.Red, WebPConstants.NumLiteralCodes, ref redSym, ref this.IsUsed[1]);
this.BlueCost = PopulationCost(this.Blue, WebPConstants.NumLiteralCodes, ref blueSym, ref this.IsUsed[2]);
@ -181,10 +199,295 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
}
else
{
this.TrivialSymbol = ((uint)alphaSym << 24) | (redSym << 16) | (blueSym << 0);
this.TrivialSymbol = (alphaSym << 24) | (redSym << 16) | (blueSym << 0);
}
}
/// <summary>
/// Performs output = a + b, computing the cost C(a+b) - C(a) - C(b) while comparing
/// to the threshold value 'costThreshold'. The score returned is
/// Score = C(a+b) - C(a) - C(b), where C(a) + C(b) is known and fixed.
/// Since the previous score passed is 'costThreshold', we only need to compare
/// the partial cost against 'costThreshold + C(a) + C(b)' to possibly bail-out early.
/// </summary>
public double AddEval(Vp8LHistogram b, double costThreshold, Vp8LHistogram output)
{
double sumCost = this.BitCost + b.BitCost;
costThreshold += sumCost;
if (this.GetCombinedHistogramEntropy(b, costThreshold, costInitial: 0, out var cost))
{
this.Add(b, output);
output.BitCost = cost;
output.PaletteCodeBits = this.PaletteCodeBits;
}
return cost - sumCost;
}
public double AddThresh(Vp8LHistogram b, double costThreshold)
{
double costInitial = -this.BitCost;
this.GetCombinedHistogramEntropy(b, costThreshold, costInitial, out var cost);
return cost;
}
public void Add(Vp8LHistogram b, Vp8LHistogram output)
{
int literalSize = this.NumCodes();
this.AddLiteral(b, output, literalSize);
this.AddRed(b, output, WebPConstants.NumLiteralCodes);
this.AddBlue(b, output, WebPConstants.NumLiteralCodes);
this.AddAlpha(b, output, WebPConstants.NumLiteralCodes);
this.AddDistance(b, output, WebPConstants.NumDistanceCodes);
for (int i = 0; i < 5; i++)
{
output.IsUsed[i] = this.IsUsed[i] | b.IsUsed[i];
}
output.TrivialSymbol = (this.TrivialSymbol == b.TrivialSymbol)
? this.TrivialSymbol
: NonTrivialSym;
}
public bool GetCombinedHistogramEntropy(Vp8LHistogram b, double costThreshold, double costInitial, out double cost)
{
bool trivialAtEnd = false;
cost = costInitial;
cost += GetCombinedEntropy(this.Literal, b.Literal, this.NumCodes(), this.IsUsed[0], b.IsUsed[0], false);
cost += ExtraCostCombined(this.Literal.AsSpan(WebPConstants.NumLiteralCodes), b.Literal.AsSpan(WebPConstants.NumLiteralCodes), WebPConstants.NumLengthCodes);
if (cost > costThreshold)
{
return false;
}
if (this.TrivialSymbol != NonTrivialSym && this.TrivialSymbol == b.TrivialSymbol)
{
// A, R and B are all 0 or 0xff.
uint colorA = (this.TrivialSymbol >> 24) & 0xff;
uint colorR = (this.TrivialSymbol >> 16) & 0xff;
uint colorB = (this.TrivialSymbol >> 0) & 0xff;
if ((colorA == 0 || colorA == 0xff) &&
(colorR == 0 || colorR == 0xff) &&
(colorB == 0 || colorB == 0xff))
{
trivialAtEnd = true;
}
}
cost += GetCombinedEntropy(this.Red, b.Red, WebPConstants.NumLiteralCodes, this.IsUsed[1], b.IsUsed[1], trivialAtEnd);
if (cost > costThreshold)
{
return false;
}
cost += GetCombinedEntropy(this.Blue, b.Blue, WebPConstants.NumLiteralCodes, this.IsUsed[2], b.IsUsed[2], trivialAtEnd);
if (cost > costThreshold)
{
return false;
}
cost += GetCombinedEntropy(this.Alpha, b.Alpha, WebPConstants.NumLiteralCodes, this.IsUsed[3], b.IsUsed[3], trivialAtEnd);
if (cost > costThreshold)
{
return false;
}
cost += GetCombinedEntropy(this.Distance, b.Distance, WebPConstants.NumDistanceCodes, this.IsUsed[4], b.IsUsed[4], false);
if (cost > costThreshold)
{
return false;
}
cost += ExtraCostCombined(this.Distance, b.Distance, WebPConstants.NumDistanceCodes);
if (cost > costThreshold)
{
return false;
}
return true;
}
private void AddLiteral(Vp8LHistogram b, Vp8LHistogram output, int literalSize)
{
if (this.IsUsed[0])
{
if (b.IsUsed[0])
{
AddVector(this.Literal, b.Literal, output.Literal, literalSize);
}
else
{
this.Literal.AsSpan(0, literalSize).CopyTo(output.Literal);
}
}
else if (b.IsUsed[0])
{
b.Literal.AsSpan(0, literalSize).CopyTo(output.Literal);
}
else
{
output.Literal.AsSpan(0, literalSize).Fill(0);
}
}
private void AddRed(Vp8LHistogram b, Vp8LHistogram output, int size)
{
if (this.IsUsed[1])
{
if (b.IsUsed[1])
{
AddVector(this.Red, b.Red, output.Red, size);
}
else
{
this.Red.AsSpan(0, size).CopyTo(output.Red);
}
}
else if (b.IsUsed[1])
{
b.Red.AsSpan(0, size).CopyTo(output.Red);
}
else
{
output.Red.AsSpan(0, size).Fill(0);
}
}
private void AddBlue(Vp8LHistogram b, Vp8LHistogram output, int size)
{
if (this.IsUsed[2])
{
if (b.IsUsed[2])
{
AddVector(this.Blue, b.Blue, output.Blue, size);
}
else
{
this.Blue.AsSpan(0, size).CopyTo(output.Blue);
}
}
else if (b.IsUsed[2])
{
b.Blue.AsSpan(0, size).CopyTo(output.Blue);
}
else
{
output.Blue.AsSpan(0, size).Fill(0);
}
}
private void AddAlpha(Vp8LHistogram b, Vp8LHistogram output, int size)
{
if (this.IsUsed[3])
{
if (b.IsUsed[3])
{
AddVector(this.Alpha, b.Alpha, output.Alpha, size);
}
else
{
this.Alpha.AsSpan(0, size).CopyTo(output.Alpha);
}
}
else if (b.IsUsed[3])
{
b.Alpha.AsSpan(0, size).CopyTo(output.Alpha);
}
else
{
output.Alpha.AsSpan(0, size).Fill(0);
}
}
private void AddDistance(Vp8LHistogram b, Vp8LHistogram output, int size)
{
if (this.IsUsed[4])
{
if (b.IsUsed[4])
{
AddVector(this.Distance, b.Distance, output.Distance, size);
}
else
{
this.Distance.AsSpan(0, size).CopyTo(output.Distance);
}
}
else if (b.IsUsed[4])
{
b.Distance.AsSpan(0, size).CopyTo(output.Distance);
}
else
{
output.Distance.AsSpan(0, size).Fill(0);
}
}
private static double GetCombinedEntropy(uint[] x, uint[] y, int length, bool isXUsed, bool isYUsed, bool trivialAtEnd)
{
var stats = new Vp8LStreaks();
if (trivialAtEnd)
{
// This configuration is due to palettization that transforms an indexed
// pixel into 0xff000000 | (pixel << 8) in BundleColorMap.
// BitsEntropyRefine is 0 for histograms with only one non-zero value.
// Only FinalHuffmanCost needs to be evaluated.
// Deal with the non-zero value at index 0 or length-1.
stats.Streaks[1][0] = 1;
// Deal with the following/previous zero streak.
stats.Counts[0] = 1;
stats.Streaks[0][1] = length - 1;
return stats.FinalHuffmanCost();
}
var bitEntropy = new Vp8LBitEntropy();
if (isXUsed)
{
if (isYUsed)
{
bitEntropy.GetCombinedEntropyUnrefined(x, y, length, stats);
}
else
{
bitEntropy.GetEntropyUnrefined(x, length, stats);
}
}
else
{
if (isYUsed)
{
bitEntropy.GetEntropyUnrefined(y, length, stats);
}
else
{
stats.Counts[0] = 1;
stats.Streaks[0][length > 3 ? 1 : 0] = length;
bitEntropy.Init();
}
}
return bitEntropy.BitsEntropyRefine() + stats.FinalHuffmanCost();
}
private static double ExtraCostCombined(Span<uint> x, Span<uint> y, int length)
{
double cost = 0.0d;
for (int i = 2; i < length - 2; i++)
{
int xy = (int)(x[i + 2] + y[i + 2]);
cost += (i >> 1) * xy;
}
return cost;
}
/// <summary>
/// Get the symbol entropy for the distribution 'population'.
/// </summary>
@ -194,13 +497,15 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
var stats = new Vp8LStreaks();
bitEntropy.BitsEntropyUnrefined(population, length, stats);
trivialSym = (bitEntropy.NoneZeros == 1) ? bitEntropy.NoneZeroCode : NonTrivialSym;
// The histogram is used if there is at least one non-zero streak.
isUsed = stats.Streaks[1][0] != 0 || stats.Streaks[1][1] != 0;
return bitEntropy.BitsEntropyRefine() + stats.FinalHuffmanCost();
}
private static double ExtraCost(Span<uint> population, int length)
private static double ExtraCost(Span<uint> population, int length)
{
double cost = 0.0d;
for (int i = 2; i < length - 2; ++i)
@ -211,9 +516,12 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
return cost;
}
public static int HistogramNumCodes(int paletteCodeBits)
private static void AddVector(uint[] a, uint[] b, uint[] output, int size)
{
return WebPConstants.NumLiteralCodes + WebPConstants.NumLengthCodes + ((paletteCodeBits > 0) ? (1 << paletteCodeBits) : 0);
for (int i = 0; i < size; i++)
{
output[i] = a[i] + b[i];
}
}
}
}

3
src/ImageSharp/Formats/WebP/Lossless/Vp8LStreaks.cs

@ -5,6 +5,9 @@ namespace SixLabors.ImageSharp.Formats.WebP.Lossless
{
internal class Vp8LStreaks
{
/// <summary>
/// Initializes a new instance of the <see cref="Vp8LStreaks"/> class.
/// </summary>
public Vp8LStreaks()
{
this.Counts = new int[2];

46
src/ImageSharp/Formats/WebP/WebPEncoderCore.cs

@ -5,8 +5,8 @@ using System;
using System.Collections.Generic;
using System.IO;
using System.Runtime.CompilerServices;
using SixLabors.ImageSharp.Advanced;
using SixLabors.ImageSharp.Formats.Jpeg.Components.Decoder;
using SixLabors.ImageSharp.Formats.WebP.BitWriter;
using SixLabors.ImageSharp.Formats.WebP.Lossless;
using SixLabors.ImageSharp.Memory;
@ -211,11 +211,15 @@ namespace SixLabors.ImageSharp.Formats.WebP
private void EncodeImage(Span<uint> bgra, Vp8LHashChain hashChain, Vp8LBackwardRefs[] refsArray, int width, int height, int quality, bool useCache, int cacheBits, int histogramBits, int initBytePosition)
{
int lz77sTypesToTrySize = 1; // TODO: harcoded for now.
int lz77sTypesToTrySize = 1; // TODO: hardcoded for now.
int[] lz77sTypesToTry = { 3 };
int histogramImageXySize = LosslessUtils.SubSampleSize(width, histogramBits) * LosslessUtils.SubSampleSize(height, histogramBits);
short[] histogramSymbols = new short[histogramImageXySize];
var histogramSymbols = new short[histogramImageXySize];
var huffTree = new HuffmanTree[3 * WebPConstants.CodeLengthCodes];
for (int i = 0; i < huffTree.Length; i++)
{
huffTree[i] = new HuffmanTree();
}
if (useCache)
{
@ -256,6 +260,10 @@ namespace SixLabors.ImageSharp.Formats.WebP
var histogramImageSize = histogramImage.Count;
var bitArraySize = 5 * histogramImageSize;
var huffmanCodes = new HuffmanTreeCode[bitArraySize];
for (int i = 0; i < huffmanCodes.Length; i++)
{
huffmanCodes[i] = new HuffmanTreeCode();
}
GetHuffBitLengthsAndCodes(histogramImage, huffmanCodes);
@ -306,6 +314,11 @@ namespace SixLabors.ImageSharp.Formats.WebP
}
var tokens = new HuffmanTreeToken[maxTokens];
for (int i = 0; i < tokens.Length; i++)
{
tokens[i] = new HuffmanTreeToken();
}
for (int i = 0; i < 5 * histogramImageSize; i++)
{
HuffmanTreeCode codes = huffmanCodes[i];
@ -347,7 +360,7 @@ namespace SixLabors.ImageSharp.Formats.WebP
/// <summary>
/// Applies the substract green transformation to the pixel data of the image.
/// </summary>
/// <param name="enc">The VP8 Encoder.</param>
/// <param name="enc">The VP8L Encoder.</param>
/// <param name="width">The width of the image.</param>
/// <param name="height">The height of the image.</param>
private void ApplySubtractGreen(Vp8LEncoder enc, int width, int height)
@ -517,32 +530,29 @@ namespace SixLabors.ImageSharp.Formats.WebP
private void StoreFullHuffmanCode(HuffmanTree[] huffTree, HuffmanTreeToken[] tokens, HuffmanTreeCode tree)
{
int numTokens;
int i;
byte[] codeLengthBitdepth = new byte[WebPConstants.CodeLengthCodes];
short[] codeLengthBitdepthSymbols = new short[WebPConstants.CodeLengthCodes];
var codeLengthBitDepth = new byte[WebPConstants.CodeLengthCodes];
var codeLengthBitDepthSymbols = new short[WebPConstants.CodeLengthCodes];
var huffmanCode = new HuffmanTreeCode();
huffmanCode.NumSymbols = WebPConstants.CodeLengthCodes;
huffmanCode.CodeLengths = codeLengthBitdepth;
huffmanCode.Codes = codeLengthBitdepthSymbols;
huffmanCode.CodeLengths = codeLengthBitDepth;
huffmanCode.Codes = codeLengthBitDepthSymbols;
this.bitWriter.PutBits(0, 1);
numTokens = HuffmanUtils.CreateCompressedHuffmanTree(tree, tokens);
uint[] histogram = new uint[WebPConstants.CodeLengthCodes + 1];
bool[] bufRle = new bool[WebPConstants.CodeLengthCodes + 1];
var numTokens = HuffmanUtils.CreateCompressedHuffmanTree(tree, tokens);
var histogram = new uint[WebPConstants.CodeLengthCodes + 1];
var bufRle = new bool[WebPConstants.CodeLengthCodes + 1];
for (i = 0; i < numTokens; i++)
{
histogram[tokens[i].Code]++;
}
HuffmanUtils.CreateHuffmanTree(histogram, 7, bufRle, huffTree, huffmanCode);
this.StoreHuffmanTreeOfHuffmanTreeToBitMask(codeLengthBitdepth);
this.StoreHuffmanTreeOfHuffmanTreeToBitMask(codeLengthBitDepth);
ClearHuffmanTreeIfOnlyOneSymbol(huffmanCode);
int trailingZeroBits = 0;
int trimmedLength = numTokens;
bool writeTrimmedLength;
int length;
i = numTokens;
while (i-- > 0)
{
@ -550,7 +560,7 @@ namespace SixLabors.ImageSharp.Formats.WebP
if (ix == 0 || ix == 17 || ix == 18)
{
trimmedLength--; // discount trailing zeros.
trailingZeroBits += codeLengthBitdepth[ix];
trailingZeroBits += codeLengthBitDepth[ix];
if (ix == 17)
{
trailingZeroBits += 3;
@ -566,8 +576,8 @@ namespace SixLabors.ImageSharp.Formats.WebP
}
}
writeTrimmedLength = trimmedLength > 1 && trailingZeroBits > 12;
length = writeTrimmedLength ? trimmedLength : numTokens;
var writeTrimmedLength = trimmedLength > 1 && trailingZeroBits > 12;
var length = writeTrimmedLength ? trimmedLength : numTokens;
this.bitWriter.PutBits((uint)(writeTrimmedLength ? 1 : 0), 1);
if (writeTrimmedLength)
{

Loading…
Cancel
Save