Browse Source

Implement Block Decoder

pull/2633/head
Ynse Hoornenborg 2 years ago
parent
commit
aa1b2992b2
  1. 5
      src/ImageSharp/Formats/Heif/Av1/Av1Decoder.cs
  2. 2
      src/ImageSharp/Formats/Heif/Av1/OpenBitstreamUnit/ObuReader.cs
  3. 19
      src/ImageSharp/Formats/Heif/Av1/Pipeline/Av1FrameDecoder.cs
  4. 6
      src/ImageSharp/Formats/Heif/Av1/Pipeline/Quantification/Av1InverseQuantizer.cs
  5. 2
      src/ImageSharp/Formats/Heif/Av1/Pipeline/Quantification/Av1QuantizationLookup.cs
  6. 10
      src/ImageSharp/Formats/Heif/Av1/Prediction/Av1DcFillPredictor.cs
  7. 10
      src/ImageSharp/Formats/Heif/Av1/Prediction/Av1DcLeftPredictor.cs
  8. 10
      src/ImageSharp/Formats/Heif/Av1/Prediction/Av1DcPredictor.cs
  9. 10
      src/ImageSharp/Formats/Heif/Av1/Prediction/Av1DcTopPredictor.cs
  10. 48
      src/ImageSharp/Formats/Heif/Av1/Prediction/Av1PredictionDecoder.cs
  11. 26
      src/ImageSharp/Formats/Heif/Av1/Prediction/Av1PredictorFactory.cs
  12. 8
      src/ImageSharp/Formats/Heif/Av1/Tiling/Av1SuperblockInfo.cs
  13. 134
      src/ImageSharp/Formats/Heif/Av1/Transform/Av1BlockDecoder.cs
  14. 2
      src/ImageSharp/Formats/Heif/Av1/Transform/Av1InverseTransformMath.cs
  15. 15
      src/ImageSharp/Formats/Heif/Av1/Transform/Av1InverseTransformer.cs
  16. 10
      tests/ImageSharp.Tests/Formats/Heif/Av1/ObuPrettyPrint.cs

5
src/ImageSharp/Formats/Heif/Av1/Av1Decoder.cs

@ -26,6 +26,8 @@ internal class Av1Decoder : IAv1TileReader
public Av1FrameInfo? FrameInfo { get; private set; }
public Av1FrameBuffer? FrameBuffer { get; private set; }
public void Decode(Span<byte> buffer)
{
Av1BitStreamReader reader = new(buffer);
@ -35,7 +37,8 @@ internal class Av1Decoder : IAv1TileReader
Guard.NotNull(this.FrameHeader, nameof(this.FrameHeader));
this.FrameInfo = this.tileReader.FrameInfo;
this.frameDecoder = new(this.SequenceHeader, this.FrameHeader, this.FrameInfo);
this.FrameBuffer = new(this.configuration, this.SequenceHeader, this.SequenceHeader.ColorConfig.GetColorFormat(), false);
this.frameDecoder = new(this.SequenceHeader, this.FrameHeader, this.FrameInfo, this.FrameBuffer);
this.frameDecoder.DecodeFrame();
}

2
src/ImageSharp/Formats/Heif/Av1/OpenBitstreamUnit/ObuReader.cs

@ -1092,7 +1092,7 @@ internal class ObuReader
frameHeader.SegmentationParameters.QMLevel[2] = new int[Av1Constants.MaxSegmentCount];
for (int segmentId = 0; segmentId < Av1Constants.MaxSegmentCount; segmentId++)
{
int qIndex = QuantizationLookup.GetQIndex(frameHeader.SegmentationParameters, segmentId, frameHeader.QuantizationParameters.BaseQIndex);
int qIndex = Av1QuantizationLookup.GetQIndex(frameHeader.SegmentationParameters, segmentId, frameHeader.QuantizationParameters.BaseQIndex);
frameHeader.QuantizationParameters.QIndex[segmentId] = qIndex;
frameHeader.LosslessArray[segmentId] = qIndex == 0 &&
frameHeader.QuantizationParameters.DeltaQDc[(int)Av1Plane.Y] == 0 &&

19
src/ImageSharp/Formats/Heif/Av1/Pipeline/Av1FrameDecoder.cs

@ -13,16 +13,20 @@ internal class Av1FrameDecoder
private readonly ObuSequenceHeader sequenceHeader;
private readonly ObuFrameHeader frameHeader;
private readonly Av1FrameInfo frameInfo;
private readonly Av1FrameBuffer frameBuffer;
private readonly Av1InverseQuantizer inverseQuantizer;
private readonly Av1DeQuantizationContext deQuants;
private readonly Av1BlockDecoder blockDecoder;
public Av1FrameDecoder(ObuSequenceHeader sequenceHeader, ObuFrameHeader frameHeader, Av1FrameInfo frameInfo)
public Av1FrameDecoder(ObuSequenceHeader sequenceHeader, ObuFrameHeader frameHeader, Av1FrameInfo frameInfo, Av1FrameBuffer frameBuffer)
{
this.sequenceHeader = sequenceHeader;
this.frameHeader = frameHeader;
this.frameInfo = frameInfo;
this.frameBuffer = frameBuffer;
this.inverseQuantizer = new(sequenceHeader, frameHeader);
this.deQuants = new();
this.blockDecoder = new(this.sequenceHeader, this.frameHeader, this.frameInfo, this.frameBuffer);
}
public void DecodeFrame()
@ -86,18 +90,19 @@ internal class Av1FrameDecoder
Av1SuperblockInfo superblockInfo = this.frameInfo.GetSuperblock(new Point(superblockColumn, superblockRow));
Point modeInfoPosition = new Point(modeInfoColumn, modeInfoRow);
this.DecodeSuperblock(modeInfoPosition, superblockInfo);
Point modeInfoPosition = new(modeInfoColumn, modeInfoRow);
this.DecodeSuperblock(modeInfoPosition, superblockInfo, new Av1TileInfo(tileRow, tileColumn, this.frameHeader));
}
}
private void DecodeSuperblock(Point modeInfoPosition, Av1SuperblockInfo superblockInfo)
private void DecodeSuperblock(Point modeInfoPosition, Av1SuperblockInfo superblockInfo, Av1TileInfo tileInfo)
{
this.blockDecoder.UpdateSuperblock(superblockInfo);
this.inverseQuantizer.UpdateDequant(this.deQuants, superblockInfo);
DecodePartition(modeInfoPosition, superblockInfo);
this.DecodePartition(modeInfoPosition, superblockInfo, tileInfo);
}
private static void DecodePartition(Point modeInfoPosition, Av1SuperblockInfo superblockInfo)
private void DecodePartition(Point modeInfoPosition, Av1SuperblockInfo superblockInfo, Av1TileInfo tileInfo)
{
Av1BlockModeInfo modeInfo = superblockInfo.GetModeInfo(modeInfoPosition);
@ -107,7 +112,7 @@ internal class Av1FrameDecoder
Av1BlockSize subSize = modeInfo.BlockSize;
Point globalPosition = new(modeInfoPosition.X, modeInfoPosition.Y);
globalPosition.Offset(subPosition);
Av1BlockDecoder.DecodeBlock(modeInfo, globalPosition, subSize, superblockInfo);
this.blockDecoder.DecodeBlock(modeInfo, globalPosition, subSize, superblockInfo, tileInfo);
}
}

6
src/ImageSharp/Formats/Heif/Av1/Pipeline/Quantification/Av1InverseQuantizer.cs

@ -64,15 +64,15 @@ internal class Av1InverseQuantizer
{
for (int i = 0; i < Av1Constants.MaxSegmentCount; i++)
{
int currentQIndex = QuantizationLookup.GetQIndex(this.frameHeader.SegmentationParameters, i, superblockInfo.SuperblockDeltaQ);
int currentQIndex = Av1QuantizationLookup.GetQIndex(this.frameHeader.SegmentationParameters, i, superblockInfo.SuperblockDeltaQ);
for (Av1Plane plane = 0; (int)plane < Av1Constants.MaxPlanes; plane++)
{
int dcDeltaQ = this.frameHeader.QuantizationParameters.DeltaQDc[(int)plane];
int acDeltaQ = this.frameHeader.QuantizationParameters.DeltaQAc[(int)plane];
this.deQuantsDeltaQ.SetDc(i, plane, QuantizationLookup.GetDcQuant(currentQIndex, dcDeltaQ, bitDepth));
this.deQuantsDeltaQ.SetAc(i, plane, QuantizationLookup.GetAcQuant(currentQIndex, acDeltaQ, bitDepth));
this.deQuantsDeltaQ.SetDc(i, plane, Av1QuantizationLookup.GetDcQuant(currentQIndex, dcDeltaQ, bitDepth));
this.deQuantsDeltaQ.SetAc(i, plane, Av1QuantizationLookup.GetAcQuant(currentQIndex, acDeltaQ, bitDepth));
}
}
}

2
src/ImageSharp/Formats/Heif/Av1/Pipeline/Quantification/QuantizationLookup.cs → src/ImageSharp/Formats/Heif/Av1/Pipeline/Quantification/Av1QuantizationLookup.cs

@ -5,7 +5,7 @@ using SixLabors.ImageSharp.Formats.Heif.Av1.OpenBitstreamUnit;
namespace SixLabors.ImageSharp.Formats.Heif.Av1.Pipeline.Quantification;
internal class QuantizationLookup
internal class Av1QuantizationLookup
{
// Coefficient scaling and quantization with AV1 TX are tailored to
// the AV1 TX transforms. Regardless of the bit-depth of the input,

10
src/ImageSharp/Formats/Heif/Av1/Prediction/Av1DcFillPredictor.cs

@ -2,6 +2,7 @@
// Licensed under the Six Labors Split License.
using System.Runtime.CompilerServices;
using SixLabors.ImageSharp.Formats.Heif.Av1.Transform;
namespace SixLabors.ImageSharp.Formats.Heif.Av1.Prediction;
@ -16,6 +17,15 @@ internal class Av1DcFillPredictor : IAv1Predictor
this.blockHeight = (uint)blockSize.Height;
}
public Av1DcFillPredictor(Av1TransformSize transformSize)
{
this.blockWidth = (uint)transformSize.GetWidth();
this.blockHeight = (uint)transformSize.GetHeight();
}
public static void PredictScalar(Av1TransformSize transformSize, ref byte destination, nuint stride, ref byte above, ref byte left)
=> new Av1DcFillPredictor(transformSize).PredictScalar(ref destination, stride, ref above, ref left);
public void PredictScalar(ref byte destination, nuint stride, ref byte above, ref byte left)
{
const byte expectedDc = 0x80;

10
src/ImageSharp/Formats/Heif/Av1/Prediction/Av1DcLeftPredictor.cs

@ -2,6 +2,7 @@
// Licensed under the Six Labors Split License.
using System.Runtime.CompilerServices;
using SixLabors.ImageSharp.Formats.Heif.Av1.Transform;
namespace SixLabors.ImageSharp.Formats.Heif.Av1.Prediction;
@ -16,6 +17,15 @@ internal class Av1DcLeftPredictor : IAv1Predictor
this.blockHeight = (uint)blockSize.Height;
}
public Av1DcLeftPredictor(Av1TransformSize transformSize)
{
this.blockWidth = (uint)transformSize.GetWidth();
this.blockHeight = (uint)transformSize.GetHeight();
}
public static void PredictScalar(Av1TransformSize transformSize, ref byte destination, nuint stride, ref byte above, ref byte left)
=> new Av1DcLeftPredictor(transformSize).PredictScalar(ref destination, stride, ref above, ref left);
public void PredictScalar(ref byte destination, nuint stride, ref byte above, ref byte left)
{
int sum = 0;

10
src/ImageSharp/Formats/Heif/Av1/Prediction/Av1DcPredictor.cs

@ -2,6 +2,7 @@
// Licensed under the Six Labors Split License.
using System.Runtime.CompilerServices;
using SixLabors.ImageSharp.Formats.Heif.Av1.Transform;
namespace SixLabors.ImageSharp.Formats.Heif.Av1.Prediction;
@ -16,6 +17,15 @@ internal class Av1DcPredictor : IAv1Predictor
this.blockHeight = (uint)blockSize.Height;
}
public Av1DcPredictor(Av1TransformSize transformSize)
{
this.blockWidth = (uint)transformSize.GetWidth();
this.blockHeight = (uint)transformSize.GetHeight();
}
public static void PredictScalar(Av1TransformSize transformSize, ref byte destination, nuint stride, ref byte above, ref byte left)
=> new Av1DcPredictor(transformSize).PredictScalar(ref destination, stride, ref above, ref left);
public void PredictScalar(ref byte destination, nuint stride, ref byte above, ref byte left)
{
int sum = 0;

10
src/ImageSharp/Formats/Heif/Av1/Prediction/Av1DcTopPredictor.cs

@ -2,6 +2,7 @@
// Licensed under the Six Labors Split License.
using System.Runtime.CompilerServices;
using SixLabors.ImageSharp.Formats.Heif.Av1.Transform;
namespace SixLabors.ImageSharp.Formats.Heif.Av1.Prediction;
@ -16,6 +17,15 @@ internal class Av1DcTopPredictor : IAv1Predictor
this.blockHeight = (uint)blockSize.Height;
}
public Av1DcTopPredictor(Av1TransformSize transformSize)
{
this.blockWidth = (uint)transformSize.GetWidth();
this.blockHeight = (uint)transformSize.GetHeight();
}
public static void PredictScalar(Av1TransformSize transformSize, ref byte destination, nuint stride, ref byte above, ref byte left)
=> new Av1DcTopPredictor(transformSize).PredictScalar(ref destination, stride, ref above, ref left);
public void PredictScalar(ref byte destination, nuint stride, ref byte above, ref byte left)
{
int sum = 0;

48
src/ImageSharp/Formats/Heif/Av1/Prediction/PredictionDecoder.cs → src/ImageSharp/Formats/Heif/Av1/Prediction/Av1PredictionDecoder.cs

@ -10,7 +10,7 @@ using SixLabors.ImageSharp.Memory;
namespace SixLabors.ImageSharp.Formats.Heif.Av1.Prediction;
internal class PredictionDecoder
internal class Av1PredictionDecoder
{
private const int MaxUpsampleSize = 16;
@ -18,49 +18,29 @@ internal class PredictionDecoder
private readonly ObuFrameHeader frameHeader;
private readonly bool is16BitPipeline;
public PredictionDecoder(ObuSequenceHeader sequenceHeader, ObuFrameHeader frameHeader, bool is16BitPipeline)
public Av1PredictionDecoder(ObuSequenceHeader sequenceHeader, ObuFrameHeader frameHeader, bool is16BitPipeline)
{
this.sequenceHeader = sequenceHeader;
this.frameHeader = frameHeader;
this.is16BitPipeline = is16BitPipeline;
}
public void DecodeFrame(
public void Decode(
Av1PartitionInfo partitionInfo,
Av1Plane plane,
Av1TransformSize transformSize,
Av1TileInfo tileInfo,
Av1FrameBuffer frameBuffer,
Span<byte> pixelBuffer,
int pixelStride,
Av1BitDepth bitDepth,
int blockModeInfoColumnOffset,
int blockModeInfoRowOffset)
{
Buffer2D<byte>? pixelBuffer = null;
switch (plane)
{
case Av1Plane.Y:
pixelBuffer = frameBuffer.BufferY;
break;
case Av1Plane.U:
pixelBuffer = frameBuffer.BufferCb;
break;
case Av1Plane.V:
pixelBuffer = frameBuffer.BufferCr;
break;
default:
break;
}
if (pixelBuffer == null)
{
return;
}
int bytesPerPixel = (bitDepth == Av1BitDepth.EightBit && !this.is16BitPipeline) ? 2 : 1;
ref byte pixelRef = ref pixelBuffer[frameBuffer.StartPosition.X, frameBuffer.StartPosition.Y];
ref byte pixelRef = ref pixelBuffer[0];
ref byte topNeighbor = ref pixelRef;
ref byte leftNeighbor = ref pixelRef;
int stride = frameBuffer.BufferY!.Width * bytesPerPixel;
int stride = pixelStride * bytesPerPixel;
topNeighbor = Unsafe.Subtract(ref topNeighbor, stride);
leftNeighbor = Unsafe.Subtract(ref leftNeighbor, 1);
@ -111,7 +91,7 @@ internal class PredictionDecoder
bitDepth);
}
private void PredictChromaFromLumaBlock(Av1PartitionInfo partitionInfo, Av1ChromaFromLumaContext? chromaFromLumaContext, ref Buffer2D<byte> pixelBuffer, int stride, Av1TransformSize transformSize, Av1Plane plane)
private void PredictChromaFromLumaBlock(Av1PartitionInfo partitionInfo, Av1ChromaFromLumaContext? chromaFromLumaContext, ref Span<byte> pixelBuffer, int stride, Av1TransformSize transformSize, Av1Plane plane)
{
Av1BlockModeInfo modeInfo = partitionInfo.ModeInfo;
bool isChromaFromLumaAllowedFlag = IsChromaFromLumaAllowedWithFrameHeader(partitionInfo, this.sequenceHeader.ColorConfig, this.frameHeader);
@ -148,7 +128,7 @@ internal class PredictionDecoder
}
ChromaFromLumaPredict(
chromaFromLumaContext.Q3Buffer!,
chromaFromLumaContext.Q3Buffer!.DangerousGetSingleSpan(),
pixelBuffer,
stride,
pixelBuffer,
@ -196,7 +176,7 @@ internal class PredictionDecoder
return Av1Math.RoundPowerOf2Signed(scaledLumaQ6, 6);
}
private static void ChromaFromLumaPredict(Buffer2D<short> predictedBufferQ3, Buffer2D<byte> predictedBuffer, int predictedStride, Buffer2D<byte> destinationBuffer, int destinationStride, int alphaQ3, Av1BitDepth bitDepth, int width, int height)
private static void ChromaFromLumaPredict(Span<short> predictedBufferQ3, Span<byte> predictedBuffer, int predictedStride, Span<byte> destinationBuffer, int destinationStride, int alphaQ3, Av1BitDepth bitDepth, int width, int height)
{
// TODO: Make SIMD variant of this method.
int maxPixelValue = (1 << bitDepth.GetBitCount()) - 1;
@ -204,9 +184,13 @@ internal class PredictionDecoder
{
for (int i = 0; i < width; i++)
{
int alphaQ0 = GetScaledLumaQ0(alphaQ3, predictedBufferQ3[i, j]);
destinationBuffer[i, j] = (byte)Av1Math.Clamp(alphaQ0 + predictedBuffer[i, j], 0, maxPixelValue);
int alphaQ0 = GetScaledLumaQ0(alphaQ3, predictedBufferQ3[i]);
destinationBuffer[i] = (byte)Av1Math.Clamp(alphaQ0 + predictedBuffer[i], 0, maxPixelValue);
}
destinationBuffer = destinationBuffer[width..];
predictedBuffer = predictedBuffer[width..];
predictedBufferQ3 = predictedBufferQ3[width..];
}
}

26
src/ImageSharp/Formats/Heif/Av1/Prediction/Av1PredictorFactory.cs

@ -8,7 +8,31 @@ namespace SixLabors.ImageSharp.Formats.Heif.Av1.Prediction;
internal class Av1PredictorFactory
{
internal static void DcPredictor(bool v1, bool v2, Av1TransformSize transformSize, ref byte destination, nuint destinationStride, Span<byte> aboveRow, Span<byte> leftColumn) => throw new NotImplementedException();
internal static void DcPredictor(bool hasLeft, bool hasAbove, Av1TransformSize transformSize, ref byte destination, nuint destinationStride, Span<byte> aboveRow, Span<byte> leftColumn)
{
if (hasLeft)
{
if (hasAbove)
{
Av1DcPredictor.PredictScalar(transformSize, ref destination, destinationStride, ref aboveRow[0], ref leftColumn[0]);
}
else
{
Av1DcLeftPredictor.PredictScalar(transformSize, ref destination, destinationStride, ref aboveRow[0], ref leftColumn[0]);
}
}
else
{
if (hasAbove)
{
Av1DcTopPredictor.PredictScalar(transformSize, ref destination, destinationStride, ref aboveRow[0], ref leftColumn[0]);
}
else
{
Av1DcFillPredictor.PredictScalar(transformSize, ref destination, destinationStride, ref aboveRow[0], ref leftColumn[0]);
}
}
}
internal static void DirectionalPredictor(ref byte destination, nuint destinationStride, Av1TransformSize transformSize, Span<byte> aboveRow, Span<byte> leftColumn, bool upsampleAbove, bool upsampleLeft, int angle) => throw new NotImplementedException();

8
src/ImageSharp/Formats/Heif/Av1/Tiling/Av1SuperblockInfo.cs

@ -43,4 +43,12 @@ internal class Av1SuperblockInfo
public ref Av1TransformInfo GetTransformInfoUv() => ref this.frameInfo.GetSuperblockTransformUv(this.Position);
public Av1BlockModeInfo GetModeInfo(Point index) => this.frameInfo.GetModeInfo(this.Position, index);
public Span<int> GetCoefficients(Av1Plane plane) => plane switch
{
Av1Plane.Y => this.CoefficientsY,
Av1Plane.U => this.CoefficientsU,
Av1Plane.V => this.CoefficientsV,
_ => []
};
}

134
src/ImageSharp/Formats/Heif/Av1/Transform/Av1BlockDecoder.cs

@ -3,6 +3,8 @@
using System.Runtime.CompilerServices;
using SixLabors.ImageSharp.Formats.Heif.Av1.OpenBitstreamUnit;
using SixLabors.ImageSharp.Formats.Heif.Av1.Pipeline.Quantification;
using SixLabors.ImageSharp.Formats.Heif.Av1.Prediction;
using SixLabors.ImageSharp.Formats.Heif.Av1.Tiling;
namespace SixLabors.ImageSharp.Formats.Heif.Av1.Transform;
@ -17,20 +19,41 @@ internal class Av1BlockDecoder
private readonly Av1FrameBuffer frameBuffer;
private readonly bool isLoopFilterEnabled;
private readonly int[] currentCoefficientIndex;
public Av1BlockDecoder(ObuSequenceHeader sequenceHeader, ObuFrameHeader frameHeader, Av1FrameInfo frameInfo, Av1FrameBuffer frameBuffer)
{
this.sequenceHeader = sequenceHeader;
this.frameHeader = frameHeader;
this.frameInfo = frameInfo;
this.frameBuffer = frameBuffer;
int ySize = (1 << this.sequenceHeader.SuperblockSizeLog2) * (1 << this.sequenceHeader.SuperblockSizeLog2);
int inverseQuantizationSize = ySize +
(this.sequenceHeader.ColorConfig.SubSamplingX ? ySize >> 2 : ySize) +
(this.sequenceHeader.ColorConfig.SubSamplingY ? ySize >> 2 : ySize);
this.CurrentInverseQuantizationCoefficients = new int[inverseQuantizationSize];
this.isLoopFilterEnabled = false;
this.currentCoefficientIndex = new int[3];
}
public int[] CurrentInverseQuantizationCoefficients { get; private set; }
public void UpdateSuperblock(Av1SuperblockInfo superblockInfo)
{
this.currentCoefficientIndex[0] = 0;
this.currentCoefficientIndex[1] = 0;
this.currentCoefficientIndex[2] = 0;
}
public static void DecodeBlock(Av1BlockModeInfo modeInfo, Point modeInfoPosition, Av1BlockSize blockSize, Av1SuperblockInfo superblockInfo)
/// <summary>
/// SVT: svt_aom_decode_block
/// </summary>
public void DecodeBlock(Av1BlockModeInfo modeInfo, Point modeInfoPosition, Av1BlockSize blockSize, Av1SuperblockInfo superblockInfo, Av1TileInfo tileInfo)
{
/*
ObuColorConfig colorConfig = this.sequenceHeader.ColorConfig;
Av1TransformType transformType;
Span<int> coefficients;
Av1TransformSize transformSize;
int transformUnitCount;
bool hasChroma = Av1TileReader.HasChroma(this.sequenceHeader, modeInfoPosition, blockSize);
@ -47,6 +70,8 @@ internal class Av1BlockDecoder
bool highBitDepth = false;
bool is16BitsPipeline = false;
int loopFilterStride = this.frameHeader.ModeInfoStride;
Av1PredictionDecoder predictionDecoder = new(this.sequenceHeader, this.frameHeader, false);
Av1InverseQuantizer inverseQuantizer = new(this.sequenceHeader, this.frameHeader);
for (int plane = 0; plane < colorConfig.PlaneCount; plane++)
{
@ -79,8 +104,9 @@ internal class Av1BlockDecoder
Guard.IsFalse(transformUnitCount == 0, nameof(transformUnitCount), "Must have at least a single transform unit to decode.");
this.DeriveBlockPointers(
this.reconstructionFrameBuffer,
// SVT: svt_aom_derive_blk_pointers
DeriveBlockPointers(
this.frameBuffer,
plane,
(modeInfoPosition.X >> subX) << Av1Constants.ModeInfoSizeLog2,
(modeInfoPosition.Y >> subY) << Av1Constants.ModeInfoSizeLog2,
@ -95,15 +121,17 @@ internal class Av1BlockDecoder
int transformBlockOffset;
transformSize = transformInfo.Size;
coefficients = this.currentCoefficients[plane];
Span<int> coefficients = superblockInfo.GetCoefficients((Av1Plane)plane)[this.currentCoefficientIndex[plane]..];
transformBlockOffset = ((transformInfo.OffsetY * reconstructionStride) + transformInfo.OffsetX) << Av1Constants.ModeInfoSizeLog2;
transformBlockReconstructionBuffer = blockReconstructionBuffer.Slice(transformBlockOffset << (highBitDepth ? 1 : 0));
if (this.isLoopFilterEnabled)
{
/*
if (plane != 2)
{
// SVT: svt_aom_fill_4x4_lf_param
Fill4x4LoopFilterParameters(
this.loopFilterContext,
(modeInfoPosition.X & (~subX)) + (transformInfo.OffsetX << subX),
@ -113,20 +141,21 @@ internal class Av1BlockDecoder
subX,
subY,
plane);
}
}*/
}
// if (!inter_block)
if (true)
{
PredictIntra(
// SVT: svt_av1_predict_intra
predictionDecoder.Decode(
partitionInfo,
plane,
(Av1Plane)plane,
transformSize,
tile,
tileInfo,
transformBlockReconstructionBuffer,
reconstructionStride,
this.reconstructionFrameBuffer.BitDepth,
this.frameBuffer.BitDepth,
transformInfo.OffsetX,
transformInfo.OffsetY);
}
@ -138,22 +167,23 @@ internal class Av1BlockDecoder
Span<int> quantizationCoefficients = this.CurrentInverseQuantizationCoefficients;
int inverseQuantizationSize = transformSize.GetWidth() * transformSize.GetHeight();
quantizationCoefficients[..inverseQuantizationSize].Clear();
this.CurrentInverseQuantizationCoefficients = quantizationCoefficients[inverseQuantizationSize..];
transformType = transformInfo.Type;
numberOfCoefficients = InverseQuantize(
partitionInfo, modeInfo, coefficients, quantizationCoefficients, transformType, transformSize, plane);
// SVT: svt_aom_inverse_quantize
numberOfCoefficients = inverseQuantizer.InverseQuantize(
modeInfo, coefficients, quantizationCoefficients, transformType, transformSize, (Av1Plane)plane);
if (numberOfCoefficients != 0)
{
this.CurrentCoefficients[plane] += numberOfCoefficients + 1;
this.currentCoefficientIndex[plane] += numberOfCoefficients + 1;
if (this.reconstructionFrameBuffer.BitDepth == Av1BitDepth.EightBit && !is16BitsPipeline)
if (this.frameBuffer.BitDepth == Av1BitDepth.EightBit && !is16BitsPipeline)
{
InverseTransformReconstruction8Bit(
// SVT: svt_aom_inv_transform_recon8bit
Av1InverseTransformer.Reconstruct8Bit(
quantizationCoefficients,
(Span<byte>)transformBlockReconstructionBuffer,
transformBlockReconstructionBuffer,
reconstructionStride,
(Span<byte>)transformBlockReconstructionBuffer,
transformBlockReconstructionBuffer,
reconstructionStride,
transformSize,
transformType,
@ -169,8 +199,10 @@ internal class Av1BlockDecoder
}
// Store Luma for CFL if required!
if (plane == (int)Av1Plane.Y && StoreChromeFromLumeRequired(colorConfig, partitionInfo, this.frameHeader.IsChroma))
if (plane == (int)Av1Plane.Y && StoreChromeFromLumeRequired(colorConfig, partitionInfo, hasChroma))
{
/*
// SVT: svt_cfl_store_tx
ChromaFromLumaStoreTransform(
partitionInfo,
this.chromaFromLumaContext,
@ -182,11 +214,71 @@ internal class Av1BlockDecoder
transformBlockReconstructionBuffer,
reconstructionStride,
is16BitsPipeline);
*/
}
// increment transform pointer
transformInfo = ref Unsafe.Add(ref transformInfo, 1);
}
}*/
}
}
private static void DeriveBlockPointers(Av1FrameBuffer frameBuffer, int plane, int blockColumnInPixels, int blockRowInPixels, out Span<byte> blockReconstructionBuffer, out int reconstructionStride, int subX, int subY)
{
int blockOffset;
if (plane == 0)
{
blockOffset = ((frameBuffer.OriginY + blockRowInPixels) * frameBuffer.BufferY!.Width) +
(frameBuffer.OriginX + blockColumnInPixels);
reconstructionStride = frameBuffer.BufferY!.Width;
}
else if (plane == 1)
{
blockOffset = (((frameBuffer.OriginY >> subY) + blockRowInPixels) * frameBuffer.BufferCb!.Width) +
((frameBuffer.OriginX >> subX) + blockColumnInPixels);
reconstructionStride = frameBuffer.BufferCb!.Width;
}
else
{
blockOffset = (((frameBuffer.OriginY >> subY) + blockRowInPixels) * frameBuffer.BufferCr!.Width) +
((frameBuffer.OriginX >> subX) + blockColumnInPixels);
reconstructionStride = frameBuffer.BufferCr!.Width;
}
if (frameBuffer.BitDepth != Av1BitDepth.EightBit || frameBuffer.Is16BitPipeline)
{
// 16bit pipeline
blockOffset *= 2;
if (plane == 0)
{
blockReconstructionBuffer = frameBuffer.BufferY!.DangerousGetSingleSpan()[blockOffset..];
}
else if (plane == 1)
{
blockReconstructionBuffer = frameBuffer.BufferCb!.DangerousGetSingleSpan()[blockOffset..];
}
else
{
blockReconstructionBuffer = frameBuffer.BufferCr!.DangerousGetSingleSpan()[blockOffset..];
}
}
else
{
if (plane == 0)
{
blockReconstructionBuffer = frameBuffer.BufferY!.DangerousGetSingleSpan()[blockOffset..];
}
else if (plane == 1)
{
blockReconstructionBuffer = frameBuffer.BufferCb!.DangerousGetSingleSpan()[blockOffset..];
}
else
{
blockReconstructionBuffer = frameBuffer.BufferCr!.DangerousGetSingleSpan()[blockOffset..];
}
}
}
private static bool StoreChromeFromLumeRequired(ObuColorConfig colorConfig, Av1PartitionInfo partitionInfo, bool hasChroma) => false;
}

2
src/ImageSharp/Formats/Heif/Av1/Transform/Av1InverseTransform.cs → src/ImageSharp/Formats/Heif/Av1/Transform/Av1InverseTransformMath.cs

@ -3,7 +3,7 @@
namespace SixLabors.ImageSharp.Formats.Heif.Av1.Quantization;
internal static class Av1InverseTransform
internal static class Av1InverseTransformMath
{
public static readonly int[,] AcQLookup = new int[3, 256]
{

15
src/ImageSharp/Formats/Heif/Av1/Transform/Av1InverseTransformer.cs

@ -0,0 +1,15 @@
// Copyright (c) Six Labors.
// Licensed under the Six Labors Split License.
namespace SixLabors.ImageSharp.Formats.Heif.Av1.Transform;
internal class Av1InverseTransformer
{
/// <summary>
/// SVT: svt_aom_inv_transform_recon8bit
/// </summary>
public static void Reconstruct8Bit(Span<int> coefficientsBuffer, Span<byte> transformBlockReconstructionBuffer1, int reconstructionStride1, Span<byte> transformBlockReconstructionBuffer2, int reconstructionStride2, Av1TransformSize transformSize, Av1TransformType transformType, int plane, int numberOfCoefficients, bool isLossless)
{
throw new NotImplementedException("Inverse transformation not implemented yet.");
}
}

10
tests/ImageSharp.Tests/Formats/Heif/Av1/ObuPrettyPrint.cs

@ -9,7 +9,7 @@ namespace SixLabors.ImageSharp.Tests.Formats.Heif.Av1;
internal class ObuPrettyPrint
{
private static readonly char[] spaces = " ".ToCharArray();
private static readonly char[] Spaces = " ".ToCharArray();
public static string PrettyPrintProperties(object obj, int indent = 0)
{
@ -20,7 +20,7 @@ internal class ObuPrettyPrint
MemberInfo[] properties = obj.GetType().FindMembers(MemberTypes.Property, BindingFlags.Instance | BindingFlags.Public, null, null);
foreach (MemberInfo member in properties)
{
builder.Append(spaces, 0, indent);
builder.Append(Spaces, 0, indent);
if (member is PropertyInfo property)
{
builder.Append(property.Name);
@ -31,7 +31,7 @@ internal class ObuPrettyPrint
}
indent -= 2;
builder.Append(spaces, 0, indent);
builder.Append(Spaces, 0, indent);
builder.AppendLine("}");
return builder.ToString();
}
@ -46,7 +46,7 @@ internal class ObuPrettyPrint
{
builder.AppendLine("[");
indent += 2;
builder.Append(spaces, 0, indent);
builder.Append(Spaces, 0, indent);
Type elementType = value.GetType().GetElementType();
IList list = value as IList;
foreach (object item in list)
@ -55,7 +55,7 @@ internal class ObuPrettyPrint
}
indent -= 2;
builder.Append(spaces, 0, indent);
builder.Append(Spaces, 0, indent);
builder.AppendLine("]");
}
else if (value.GetType().IsClass)

Loading…
Cancel
Save