Merge pull request #696 from Simon04090/webp-lossless

WebP lossless
This commit is contained in:
Harald Kuhr 2022-09-09 08:43:48 +02:00 committed by GitHub
commit e333c7d1b2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
25 changed files with 1399 additions and 333 deletions

View File

@ -0,0 +1,8 @@
package com.twelvemonkeys.imageio.plugins.webp;
public interface AlphaFiltering {
int NONE = 0;
int HORIZONTAL = 1;
int VERTICAL = 2;
int GRADIENT = 3;
}

View File

@ -11,46 +11,127 @@ public final class LSBBitReader {
// TODO: Consider creating an ImageInputStream wrapper with the WebP implementation of readBit(s)? // TODO: Consider creating an ImageInputStream wrapper with the WebP implementation of readBit(s)?
private final ImageInputStream imageInput; private final ImageInputStream imageInput;
int bitOffset = 0; private int bitOffset = 64;
private long streamPosition = -1;
/**
* Pre buffers up to the next 8 Bytes in input.
* Contains valid bits in bits 63 to {@code bitOffset} (inclusive).
* Should always be refilled to have at least 56 valid bits (if possible)
*/
private long buffer;
public LSBBitReader(ImageInputStream imageInput) { public LSBBitReader(ImageInputStream imageInput) {
this.imageInput = imageInput; this.imageInput = imageInput;
} }
// TODO: Optimize this... Read many bits at once! /**
* Reads the specified number of bits from the stream in an LSB-first way and advances the bitOffset.
* The underlying ImageInputStream will be advanced to the first not (completely) read byte.
* Requesting more than 64 bits will advance the reader by the correct amount and return the lowest 64 bits of
* the read number
*
* @param bits the number of bits to read
* @return a signed long built from the requested bits (truncated to the low 64 bits)
* @throws IOException if an I/O error occurs
* @see LSBBitReader#peekBits
*/
public long readBits(int bits) throws IOException { public long readBits(int bits) throws IOException {
long result = 0; return readBits(bits, false);
for (int i = 0; i < bits; i++) {
result |= (long) readBit() << i;
}
return result;
} }
// TODO: Optimize this... /**
// TODO: Consider not reading value over and over.... * Reads the specified number of bits from the buffer in an LSB-first way.
* Does not advance the bitOffset or the underlying input stream.
* As only 56 bits are buffered (in the worst case) peeking more is not possible without advancing the reader and
* as such disallowed.
*
* @param bits the number of bits to peek (max 56)
* @return a signed long built from the requested bits
* @throws IOException if an I/O error occurs
* @see LSBBitReader#readBits
*/
public long peekBits(int bits) throws IOException {
if (bits > 56) {
throw new IllegalArgumentException("Tried peeking over 56");
}
return readBits(bits, true);
}
//Driver
private long readBits(int bits, boolean peek) throws IOException {
if (bits <= 56) {
/*
Could eliminate if we never read from the underlying InputStream outside this class after the object is
created
*/
long inputStreamPosition = imageInput.getStreamPosition();
if (streamPosition != inputStreamPosition) {
//Need to reset buffer as stream was read in the meantime
resetBuffer();
}
long ret = (buffer >>> bitOffset) & ((1L << bits) - 1);
if (!peek) {
bitOffset += bits;
refillBuffer();
}
return ret;
}
else {
//FIXME Untested
long lower = readBits(56);
return (readBits(bits - 56) << (56)) | lower;
}
}
private void refillBuffer() throws IOException {
//Set to stream position consistent with buffered bytes
imageInput.seek(streamPosition + 8);
for (; bitOffset >= 8; bitOffset -= 8) {
try {
byte b = imageInput.readByte();
buffer >>>= 8;
streamPosition++;
buffer |= ((long) b << 56);
}
catch (EOFException e) {
imageInput.seek(streamPosition);
return;
}
}
/*
Reset to guarantee stream position consistent with returned bytes
Would not need to do this seeking around when the underlying ImageInputStream is never read from outside
this class after the object is created.
*/
imageInput.seek(streamPosition);
}
private void resetBuffer() throws IOException {
long inputStreamPosition = imageInput.getStreamPosition();
try {
buffer = imageInput.readLong();
bitOffset = 0;
streamPosition = inputStreamPosition;
imageInput.seek(inputStreamPosition);
}
catch (EOFException e) {
//Retry byte by byte
streamPosition = inputStreamPosition - 8;
bitOffset = 64;
refillBuffer();
}
}
//Left for backwards compatibility / Compatibility with ImageInputStream interface
public int readBit() throws IOException { public int readBit() throws IOException {
int bit = 7 - bitOffset; return (int) readBits(1);
imageInput.setBitOffset(bit);
// Compute final bit offset before we call read() and seek()
int newBitOffset = (bitOffset + 1) & 0x7;
int val = imageInput.read();
if (val == -1) {
throw new EOFException();
}
if (newBitOffset != 0) {
// Move byte position back if in the middle of a byte
// NOTE: RESETS bit offset!
imageInput.seek(imageInput.getStreamPosition() - 1);
}
bitOffset = newBitOffset;
// Shift the bit to be read to the rightmost position
return (val >> (7 - bit)) & 0x1;
} }
} }

View File

@ -38,6 +38,7 @@ import java.awt.image.BufferedImage;
import java.awt.image.ColorConvertOp; import java.awt.image.ColorConvertOp;
import java.awt.image.ColorModel; import java.awt.image.ColorModel;
import java.awt.image.DataBuffer; import java.awt.image.DataBuffer;
import java.awt.image.Raster;
import java.awt.image.WritableRaster; import java.awt.image.WritableRaster;
import java.io.IOException; import java.io.IOException;
import java.nio.ByteOrder; import java.nio.ByteOrder;
@ -141,6 +142,16 @@ final class WebPImageReader extends ImageReaderBase {
case WebP.CHUNK_ANIM: case WebP.CHUNK_ANIM:
// TODO: 32 bit bg color (hint!) + 16 bit loop count // TODO: 32 bit bg color (hint!) + 16 bit loop count
// + expose bg color in std image metadata... // + expose bg color in std image metadata...
/*
int b = (int) lsbBitReader.readBits(8);
int g = (int) lsbBitReader.readBits(8);
int r = (int) lsbBitReader.readBits(8);
int a = (int) lsbBitReader.readBits(8);
Color bg = new Color(r, g, b, a);
short loopCount = (short) lsbBitReader.readBits(16);
*/
break; break;
case WebP.CHUNK_ANMF: case WebP.CHUNK_ANMF:
@ -153,7 +164,7 @@ final class WebPImageReader extends ImageReaderBase {
Rectangle bounds = new Rectangle(x, y, w, h); Rectangle bounds = new Rectangle(x, y, w, h);
// TODO: Expose duration/flags in image metadata // TODO: Expose duration/flags in image metadata
int duration = (int) imageInput.readBits(24); int duration = (int) lsbBitReader.readBits(24);
int flags = imageInput.readUnsignedByte(); // 6 bit reserved + blend mode + disposal mode int flags = imageInput.readUnsignedByte(); // 6 bit reserved + blend mode + disposal mode
frames.add(new AnimationFrame(chunkLength, chunkStart, bounds, duration, flags)); frames.add(new AnimationFrame(chunkLength, chunkStart, bounds, duration, flags));
@ -426,7 +437,7 @@ final class WebPImageReader extends ImageReaderBase {
AnimationFrame frame = frames.get(imageIndex); AnimationFrame frame = frames.get(imageIndex);
imageInput.seek(frame.offset + 16); imageInput.seek(frame.offset + 16);
opaqueAlpha(destination.getAlphaRaster()); opaqueAlpha(destination.getAlphaRaster());
readVP8Extended(destination, param, frame.offset + frame.length); readVP8Extended(destination, param, frame.offset + frame.length, frame.bounds.width, frame.bounds.height);
} }
else { else {
imageInput.seek(header.offset + header.length); imageInput.seek(header.offset + header.length);
@ -452,6 +463,11 @@ final class WebPImageReader extends ImageReaderBase {
} }
private void readVP8Extended(BufferedImage destination, ImageReadParam param, long streamEnd) throws IOException { private void readVP8Extended(BufferedImage destination, ImageReadParam param, long streamEnd) throws IOException {
readVP8Extended(destination, param, streamEnd, header.width, header.height);
}
private void readVP8Extended(BufferedImage destination, ImageReadParam param, long streamEnd, final int width,
final int height) throws IOException {
while (imageInput.getStreamPosition() < streamEnd) { while (imageInput.getStreamPosition() < streamEnd) {
int nextChunk = imageInput.readInt(); int nextChunk = imageInput.readInt();
long chunkLength = imageInput.readUnsignedInt(); long chunkLength = imageInput.readUnsignedInt();
@ -465,35 +481,7 @@ final class WebPImageReader extends ImageReaderBase {
switch (nextChunk) { switch (nextChunk) {
case WebP.CHUNK_ALPH: case WebP.CHUNK_ALPH:
int reserved = (int) imageInput.readBits(2); readAlpha(destination, param, width, height);
if (reserved != 0) {
// Spec says SHOULD be 0
processWarningOccurred(String.format("Unexpected 'ALPH' chunk reserved value, expected 0: %d", reserved));
}
int preProcessing = (int) imageInput.readBits(2);
int filtering = (int) imageInput.readBits(2);
int compression = (int) imageInput.readBits(2);
if (DEBUG) {
System.out.println("preProcessing: " + preProcessing);
System.out.println("filtering: " + filtering);
System.out.println("compression: " + compression);
}
switch (compression) {
case 0:
readUncompressedAlpha(destination.getAlphaRaster());
break;
case 1:
opaqueAlpha(destination.getAlphaRaster()); // TODO: Remove when correctly implemented!
// readVP8Lossless(destination.getAlphaRaster(), param);
break;
default:
processWarningOccurred("Unknown WebP alpha compression: " + compression);
opaqueAlpha(destination.getAlphaRaster());
break;
}
break; break;
@ -503,7 +491,7 @@ final class WebPImageReader extends ImageReaderBase {
break; break;
case WebP.CHUNK_VP8L: case WebP.CHUNK_VP8L:
readVP8Lossless(RasterUtils.asByteRaster(destination.getRaster()), param); readVP8Lossless(RasterUtils.asByteRaster(destination.getRaster()), param, width, height);
break; break;
case WebP.CHUNK_ANIM: case WebP.CHUNK_ANIM:
@ -527,6 +515,107 @@ final class WebPImageReader extends ImageReaderBase {
} }
} }
private void readAlpha(BufferedImage destination, ImageReadParam param, final int width, final int height) throws IOException {
int reserved = (int) imageInput.readBits(2);
if (reserved != 0) {
// Spec says SHOULD be 0
processWarningOccurred(
String.format("Unexpected 'ALPH' chunk reserved value, expected 0: %d", reserved));
}
int preProcessing = (int) imageInput.readBits(2);
int filtering = (int) imageInput.readBits(2);
int compression = (int) imageInput.readBits(2);
if (DEBUG) {
System.out.println("preProcessing: " + preProcessing);
System.out.println("filtering: " + filtering);
System.out.println("compression: " + compression);
}
WritableRaster alphaRaster = destination.getAlphaRaster();
switch (compression) {
case 0:
readUncompressedAlpha(alphaRaster);
break;
case 1:
WritableRaster tempRaster = Raster.createInterleavedRaster(DataBuffer.TYPE_BYTE,
destination.getWidth(), destination.getHeight(), 4,
destination.getRaster().getBounds().getLocation());
//Simulate header
imageInput.seek(imageInput.getStreamPosition() - 5);
readVP8Lossless(tempRaster, param, width, height);
//Copy from green (band 1) in temp to alpha in destination
alphaRaster.setRect(tempRaster.createChild(0, 0, tempRaster.getWidth(),
tempRaster.getHeight(), 0, 0, new int[] {1}));
break;
default:
processWarningOccurred("Unknown WebP alpha compression: " + compression);
opaqueAlpha(alphaRaster);
break;
}
if (filtering != AlphaFiltering.NONE) {
for (int y = 0; y < destination.getHeight(); y++) {
for (int x = 0; x < destination.getWidth(); x++) {
int predictorAlpha = getPredictorAlpha(alphaRaster, filtering, y, x);
alphaRaster.setSample(x, y, 0, alphaRaster.getSample(x, y, 0) + predictorAlpha % 256);
}
}
}
}
private int getPredictorAlpha(WritableRaster alphaRaster, int filtering, int y, int x) {
switch (filtering) {
case AlphaFiltering.NONE:
return 0;
case AlphaFiltering.HORIZONTAL:
if (x == 0) {
if (y == 0) {
return 0;
}
else {
return alphaRaster.getSample(0, y - 1, 0);
}
}
else {
return alphaRaster.getSample(x - 1, y, 0);
}
case AlphaFiltering.VERTICAL:
if (y == 0) {
if (x == 0) {
return 0;
}
else {
return alphaRaster.getSample(x - 1, 0, 0);
}
}
else {
return alphaRaster.getSample(x, y - 1, 0);
}
case AlphaFiltering.GRADIENT:
if (x == 0 && y == 0) {
return 0;
}
else if (x == 0) {
return alphaRaster.getSample(0, y - 1, 0);
}
else if (y == 0) {
return alphaRaster.getSample(x - 1, 0, 0);
}
else {
int left = alphaRaster.getSample(x - 1, y, 0);
int top = alphaRaster.getSample(x, y - 1, 0);
int topLeft = alphaRaster.getSample(x - 1, y - 1, 0);
return Math.max(0, Math.min(left + top - topLeft, 255));
}
default:
processWarningOccurred("Unknown WebP alpha filtering: " + filtering);
return 0;
}
}
private void applyICCProfileIfNeeded(final BufferedImage destination) { private void applyICCProfileIfNeeded(final BufferedImage destination) {
if (iccProfile != null) { if (iccProfile != null) {
ColorModel colorModel = destination.getColorModel(); ColorModel colorModel = destination.getColorModel();
@ -565,8 +654,13 @@ final class WebPImageReader extends ImageReaderBase {
} }
private void readVP8Lossless(final WritableRaster raster, final ImageReadParam param) throws IOException { private void readVP8Lossless(final WritableRaster raster, final ImageReadParam param) throws IOException {
readVP8Lossless(raster, param, header.width, header.height);
}
private void readVP8Lossless(final WritableRaster raster, final ImageReadParam param,
final int width, final int height) throws IOException {
VP8LDecoder decoder = new VP8LDecoder(imageInput, DEBUG); VP8LDecoder decoder = new VP8LDecoder(imageInput, DEBUG);
decoder.readVP8Lossless(raster, true); decoder.readVP8Lossless(raster, true, param, width, height);
} }
private void readVP8(final WritableRaster raster, final ImageReadParam param) throws IOException { private void readVP8(final WritableRaster raster, final ImageReadParam param) throws IOException {

View File

@ -75,10 +75,8 @@ public final class WebPImageReaderSpi extends ImageReaderSpiBase {
int chunk = stream.readInt(); int chunk = stream.readInt();
switch (chunk) { switch (chunk) {
// TODO. Support lossless case WebP.CHUNK_VP8L:
// case WebP.CHUNK_VP8L:
case WebP.CHUNK_VP8X: case WebP.CHUNK_VP8X:
return containsSupportedChunk(stream, chunk);
case WebP.CHUNK_VP8_: case WebP.CHUNK_VP8_:
return true; return true;
default: default:
@ -91,30 +89,6 @@ public final class WebPImageReaderSpi extends ImageReaderSpiBase {
} }
} }
private static boolean containsSupportedChunk(ImageInputStream stream, int chunk) throws IOException {
// Temporary: Seek for VP8_, either first or second (after ICCP), or inside ANMF...
try {
while (chunk != WebP.CHUNK_VP8L && chunk != WebP.CHUNK_ALPH) {
long length = stream.readUnsignedInt();
stream.seek(stream.getStreamPosition() + length);
chunk = stream.readInt();
// Look inside ANMF chunks...
if (chunk == WebP.CHUNK_ANMF) {
stream.seek(stream.getStreamPosition() + 4 + 16);
chunk = stream.readInt();
}
if (chunk == WebP.CHUNK_VP8_) {
return true;
}
}
}
catch (EOFException ignore) {}
return false;
}
@Override @Override
public ImageReader createReaderInstance(final Object extension) { public ImageReader createReaderInstance(final Object extension) {
return new WebPImageReader(this); return new WebPImageReader(this);

View File

@ -32,16 +32,26 @@
package com.twelvemonkeys.imageio.plugins.webp.lossless; package com.twelvemonkeys.imageio.plugins.webp.lossless;
import com.twelvemonkeys.imageio.plugins.webp.LSBBitReader; import com.twelvemonkeys.imageio.plugins.webp.LSBBitReader;
import com.twelvemonkeys.imageio.plugins.webp.lossless.huffman.HuffmanCodeGroup;
import com.twelvemonkeys.imageio.plugins.webp.lossless.huffman.HuffmanInfo;
import com.twelvemonkeys.imageio.plugins.webp.lossless.transform.ColorIndexingTransform;
import com.twelvemonkeys.imageio.plugins.webp.lossless.transform.ColorTransform;
import com.twelvemonkeys.imageio.plugins.webp.lossless.transform.PredictorTransform;
import com.twelvemonkeys.imageio.plugins.webp.lossless.transform.SubtractGreenTransform;
import com.twelvemonkeys.imageio.plugins.webp.lossless.transform.Transform;
import com.twelvemonkeys.imageio.plugins.webp.lossless.transform.TransformType;
import javax.imageio.IIOException; import javax.imageio.IIOException;
import javax.imageio.ImageReadParam;
import javax.imageio.stream.ImageInputStream; import javax.imageio.stream.ImageInputStream;
import java.awt.*;
import java.awt.image.*; import java.awt.image.*;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import static com.twelvemonkeys.imageio.util.RasterUtils.asByteRaster; import static com.twelvemonkeys.imageio.util.RasterUtils.asByteRaster;
import static java.lang.Math.*; import static java.lang.Math.max;
/** /**
* VP8LDecoder. * VP8LDecoder.
@ -49,26 +59,48 @@ import static java.lang.Math.*;
* @author <a href="mailto:harald.kuhr@gmail.com">Harald Kuhr</a> * @author <a href="mailto:harald.kuhr@gmail.com">Harald Kuhr</a>
*/ */
public final class VP8LDecoder { public final class VP8LDecoder {
/**
* Used for decoding backward references
* Upper 4Bits are y distance, lower 4 Bits are 8 minus x distance
*/
private final static byte[] DISTANCES = {
0x18, 0x07, 0x17, 0x19, 0x28, 0x06, 0x27, 0x29, 0x16, 0x1a,
0x26, 0x2a, 0x38, 0x05, 0x37, 0x39, 0x15, 0x1b, 0x36, 0x3a,
0x25, 0x2b, 0x48, 0x04, 0x47, 0x49, 0x14, 0x1c, 0x35, 0x3b,
0x46, 0x4a, 0x24, 0x2c, 0x58, 0x45, 0x4b, 0x34, 0x3c, 0x03,
0x57, 0x59, 0x13, 0x1d, 0x56, 0x5a, 0x23, 0x2d, 0x44, 0x4c,
0x55, 0x5b, 0x33, 0x3d, 0x68, 0x02, 0x67, 0x69, 0x12, 0x1e,
0x66, 0x6a, 0x22, 0x2e, 0x54, 0x5c, 0x43, 0x4d, 0x65, 0x6b,
0x32, 0x3e, 0x78, 0x01, 0x77, 0x79, 0x53, 0x5d, 0x11, 0x1f,
0x64, 0x6c, 0x42, 0x4e, 0x76, 0x7a, 0x21, 0x2f, 0x75, 0x7b,
0x31, 0x3f, 0x63, 0x6d, 0x52, 0x5e, 0x00, 0x74, 0x7c, 0x41,
0x4f, 0x10, 0x20, 0x62, 0x6e, 0x30, 0x73, 0x7d, 0x51, 0x5f,
0x40, 0x72, 0x7e, 0x61, 0x6f, 0x50, 0x71, 0x7f, 0x60, 0x70
};
private final ImageInputStream imageInput; private final ImageInputStream imageInput;
private final LSBBitReader lsbBitReader; private final LSBBitReader lsbBitReader;
private final List<Transform> transforms = new ArrayList<>();
private ColorCache colorCache;
public VP8LDecoder(final ImageInputStream imageInput, final boolean debug) { public VP8LDecoder(final ImageInputStream imageInput, final boolean debug) {
this.imageInput = imageInput; this.imageInput = imageInput;
lsbBitReader = new LSBBitReader(imageInput); lsbBitReader = new LSBBitReader(imageInput);
} }
public void readVP8Lossless(final WritableRaster raster, final boolean topLevel) throws IOException { public void readVP8Lossless(final WritableRaster raster, final boolean topLevel, ImageReadParam param, int width,
int height) throws IOException {
//https://github.com/webmproject/libwebp/blob/666bd6c65483a512fe4c2eb63fbc198b6fb4fae4/src/dec/vp8l_dec.c#L1114 //https://github.com/webmproject/libwebp/blob/666bd6c65483a512fe4c2eb63fbc198b6fb4fae4/src/dec/vp8l_dec.c#L1114
int xSize = raster.getWidth(); //Skip past already read parts of header (signature, width, height, alpha, version) 5 Bytes in total
int ySize = raster.getHeight(); if (topLevel) {
imageInput.seek(imageInput.getStreamPosition() + 5);
}
int xSize = width;
// Read transforms // Read transforms
ArrayList<Transform> transforms = new ArrayList<>();
while (topLevel && lsbBitReader.readBit() == 1) { while (topLevel && lsbBitReader.readBit() == 1) {
xSize = readTransform(xSize, ySize); xSize = readTransform(xSize, height, transforms);
} }
// Read color cache size // Read color cache size
@ -81,82 +113,279 @@ public final class VP8LDecoder {
} }
// Read Huffman codes // Read Huffman codes
readHuffmanCodes(colorCacheBits, topLevel); HuffmanInfo huffmanInfo = readHuffmanCodes(xSize, height, colorCacheBits, topLevel);
ColorCache colorCache = null;
if (colorCacheBits > 0) { if (colorCacheBits > 0) {
colorCache = new ColorCache(colorCacheBits); colorCache = new ColorCache(colorCacheBits);
} }
WritableRaster fullSizeRaster;
WritableRaster decodeRaster;
if (topLevel) {
Rectangle bounds = new Rectangle(width, height);
fullSizeRaster = getRasterForDecoding(raster, param, bounds);
//If multiple indices packed into one pixel xSize is different from raster width
decodeRaster = fullSizeRaster.createWritableChild(0, 0, xSize, height, 0, 0, null);
}
else {
//All recursive calls have Rasters of the correct sizes with origin (0, 0)
decodeRaster = fullSizeRaster = raster;
}
// Use the Huffman trees to decode the LZ77 encoded data. // Use the Huffman trees to decode the LZ77 encoded data.
// decodeImageData(raster, ) decodeImage(decodeRaster, huffmanInfo, colorCache);
for (Transform transform : transforms) {
transform.applyInverse(fullSizeRaster);
}
if (fullSizeRaster != raster && param != null) {
//Copy into destination raster with settings applied
Rectangle sourceRegion = param.getSourceRegion();
int sourceXSubsampling = param.getSourceXSubsampling();
int sourceYSubsampling = param.getSourceYSubsampling();
int subsamplingXOffset = param.getSubsamplingXOffset();
int subsamplingYOffset = param.getSubsamplingYOffset();
Point destinationOffset = param.getDestinationOffset();
if (sourceRegion == null) {
sourceRegion = raster.getBounds();
}
if (sourceXSubsampling == 1 && sourceYSubsampling == 1) {
//Only apply offset (and limit to requested region)
raster.setRect(destinationOffset.x, destinationOffset.y, fullSizeRaster);
}
else {
//Manual copy, more efficient way might exist
byte[] rgba = new byte[4];
int xEnd = raster.getWidth() + raster.getMinX();
int yEnd = raster.getHeight() + raster.getMinY();
for (int xDst = destinationOffset.x, xSrc = sourceRegion.x + subsamplingXOffset;
xDst < xEnd; xDst++, xSrc += sourceXSubsampling) {
for (int yDst = destinationOffset.y, ySrc = sourceRegion.y + subsamplingYOffset;
yDst < yEnd; yDst++, ySrc += sourceYSubsampling) {
fullSizeRaster.getDataElements(xSrc, ySrc, rgba);
raster.setDataElements(xDst, yDst, rgba);
}
}
}
}
} }
private int readTransform(int xSize, int ySize) throws IOException { private WritableRaster getRasterForDecoding(WritableRaster raster, ImageReadParam param, Rectangle bounds) {
//If the ImageReadParam requires only a subregion of the image, and if the whole image does not fit into the
// Raster or subsampling is requested, we need a temporary Raster as we can only decode the whole image at once
boolean originSet = false;
if (param != null) {
if (param.getSourceRegion() != null && !param.getSourceRegion().contains(bounds) ||
param.getSourceXSubsampling() != 1 || param.getSourceYSubsampling() != 1) {
//Can't reuse existing
return Raster.createInterleavedRaster(DataBuffer.TYPE_BYTE, bounds.width, bounds.height,
4 * bounds.width, 4, new int[] {0, 1, 2, 3}, null);
}
else {
bounds.setLocation(param.getDestinationOffset());
originSet = true;
}
}
if (!raster.getBounds().contains(bounds)) {
//Can't reuse existing
return Raster.createInterleavedRaster(DataBuffer.TYPE_BYTE, bounds.width, bounds.height, 4 * bounds.width,
4, new int[] {0, 1, 2, 3}, null);
}
return originSet ?
//Recenter to (0, 0)
raster.createWritableChild(bounds.x, bounds.y, bounds.width, bounds.height, 0, 0, null) :
raster;
}
private void decodeImage(WritableRaster raster, HuffmanInfo huffmanInfo, ColorCache colorCache) throws IOException {
int width = raster.getWidth();
int height = raster.getHeight();
int huffmanMask = huffmanInfo.metaCodeBits == 0 ? -1 : ((1 << huffmanInfo.metaCodeBits) - 1);
HuffmanCodeGroup curCodeGroup = huffmanInfo.huffmanGroups[0];
byte[] rgba = new byte[4];
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
if ((x & huffmanMask) == 0 && huffmanInfo.huffmanMetaCodes != null) {
//Crossed border into new metaGroup
int index = huffmanInfo.huffmanMetaCodes.getSample(x >> huffmanInfo.metaCodeBits, y >> huffmanInfo.metaCodeBits, 0);
curCodeGroup = huffmanInfo.huffmanGroups[index];
}
short code = curCodeGroup.mainCode.readSymbol(lsbBitReader);
if (code < 256) { //Literal
decodeLiteral(raster, colorCache, curCodeGroup, rgba, y, x, code);
}
else if (code < 256 + 24) { //backward reference
int length = decodeBwRef(raster, colorCache, width, curCodeGroup, rgba, code, x, y);
//Decrement one because for loop already increments by one
x--;
y = y + ((x + length) / width);
x = (x + length) % width;
//Reset Huffman meta group
if (y < height && x < width && huffmanInfo.huffmanMetaCodes != null) {
int index = huffmanInfo.huffmanMetaCodes.getSample(x >> huffmanInfo.metaCodeBits, y >> huffmanInfo.metaCodeBits, 0);
curCodeGroup = huffmanInfo.huffmanGroups[index];
}
}
else { //colorCache
decodeCached(raster, colorCache, rgba, y, x, code);
}
}
}
}
private void decodeCached(WritableRaster raster, ColorCache colorCache, byte[] rgba, int y, int x, short code) {
int argb = colorCache.lookup(code - 256 - 24);
rgba[0] = (byte) ((argb >> 16) & 0xff);
rgba[1] = (byte) ((argb >> 8) & 0xff);
rgba[2] = (byte) (argb & 0xff);
rgba[3] = (byte) (argb >>> 24);
raster.setDataElements(x, y, rgba);
}
private void decodeLiteral(WritableRaster raster, ColorCache colorCache, HuffmanCodeGroup curCodeGroup, byte[] rgba, int y, int x, short code) throws IOException {
byte red = (byte) curCodeGroup.redCode.readSymbol(lsbBitReader);
byte blue = (byte) curCodeGroup.blueCode.readSymbol(lsbBitReader);
byte alpha = (byte) curCodeGroup.alphaCode.readSymbol(lsbBitReader);
rgba[0] = red;
rgba[1] = (byte) code;
rgba[2] = blue;
rgba[3] = alpha;
raster.setDataElements(x, y, rgba);
if (colorCache != null) {
colorCache.insert((alpha & 0xff) << 24 | (red & 0xff) << 16 | (code & 0xff) << 8 | (blue & 0xff));
}
}
private int decodeBwRef(WritableRaster raster, ColorCache colorCache, int width, HuffmanCodeGroup curCodeGroup, byte[] rgba, short code, int x, int y) throws IOException {
int length = lz77decode(code - 256);
short distancePrefix = curCodeGroup.distanceCode.readSymbol(lsbBitReader);
int distanceCode = lz77decode(distancePrefix);
int xSrc, ySrc;
if (distanceCode > 120) {
//Linear distance
int distance = distanceCode - 120;
ySrc = y - (distance / width);
xSrc = x - (distance % width);
}
else {
//See comment of distances array
xSrc = x - (8 - (DISTANCES[distanceCode - 1] & 0xf));
ySrc = y - (DISTANCES[distanceCode - 1] >> 4);
}
if (xSrc < 0) {
ySrc--;
xSrc += width;
}
else if (xSrc >= width) {
xSrc -= width;
ySrc++;
}
for (int l = length; l > 0; x++, l--) {
//Check length and xSrc, ySrc not falling outside raster? (Should not occur if image is correct)
if (x == width) {
x = 0;
y++;
}
raster.getDataElements(xSrc++, ySrc, rgba);
raster.setDataElements(x, y, rgba);
if (xSrc == width) {
xSrc = 0;
ySrc++;
}
if (colorCache != null) {
colorCache.insert((rgba[3] & 0xff) << 24 | (rgba[0] & 0xff) << 16 | (rgba[1] & 0xff) << 8 | (rgba[2] & 0xff));
}
}
return length;
}
private int lz77decode(int prefixCode) throws IOException {
//According to specification
if (prefixCode < 4) {
return prefixCode + 1;
}
else {
int extraBits = (prefixCode - 2) >> 1;
int offset = (2 + (prefixCode & 1)) << extraBits;
return offset + (int) lsbBitReader.readBits(extraBits) + 1;
}
}
private int readTransform(int xSize, int ySize, List<Transform> transforms) throws IOException {
int transformType = (int) lsbBitReader.readBits(2); int transformType = (int) lsbBitReader.readBits(2);
// TODO: Each transform type can only be present once in the stream. // TODO: Each transform type can only be present once in the stream.
switch (transformType) { switch (transformType) {
case TransformType.PREDICTOR_TRANSFORM: { case TransformType.PREDICTOR_TRANSFORM:
System.err.println("transformType: PREDICTOR_TRANSFORM"); //Intentional Fallthrough
// int sizeBits = (int) readBits(3) + 2;
int sizeBits = (int) lsbBitReader.readBits(3) + 2;
int size = 1 << sizeBits;
int blockWidth = size;
int blockHeight = size;
// int blockSize = divRoundUp(width, size);
int blockSize = divRoundUp(xSize, size);
for (int y = 0; y < ySize; y++) {
for (int x = 0; x < xSize; x++) {
int blockIndex = (y >> sizeBits) * blockSize + (x >> sizeBits);
}
}
// Special rules:
// Top-left pixel of image is predicted BLACK
// Rest of top pixels is predicted L
// Rest of leftmost pixels are predicted T
// Rightmost pixels using TR, uses LEFTMOST pixel on SAME ROW (same distance as TR in memory!)
// WritableRaster data = Raster.createInterleavedRaster(DataBuffer.TYPE_BYTE, blockWidth, blockHeight, blockWidth, 1, new int[] {0}, null);
// readVP8Lossless(data, false);
//
break;
}
case TransformType.COLOR_TRANSFORM: { case TransformType.COLOR_TRANSFORM: {
// The two first transforms contains the exact same data, can be combined // The two first transforms contains the exact same data, can be combined
System.err.println("transformType: COLOR_TRANSFORM");
int sizeBits = (int) lsbBitReader.readBits(3) + 2; byte sizeBits = (byte) (lsbBitReader.readBits(3) + 2);
// int size = 1 << sizeBits;
// TODO: Understand difference between spec divRoundUp and impl VP8LSubSampleSize
int blockWidth = subSampleSize(xSize, sizeBits); int blockWidth = subSampleSize(xSize, sizeBits);
int blockHeight = subSampleSize(ySize, sizeBits); int blockHeight = subSampleSize(ySize, sizeBits);
WritableRaster data = Raster.createInterleavedRaster(DataBuffer.TYPE_BYTE, blockWidth, blockHeight, blockWidth, 1, new int[] {0}, null); WritableRaster raster =
readVP8Lossless(data, false); Raster.createInterleavedRaster(DataBuffer.TYPE_BYTE, blockWidth, blockHeight, 4 * blockWidth, 4,
new int[] {0, 1, 2, 3}, null);
readVP8Lossless(raster, false, null, blockWidth, blockHeight);
transforms.add(new Transform(transformType, ((DataBufferByte) data.getDataBuffer()).getData())); //Keep data as raster for convenient (x,y) indexing
if (transformType == TransformType.PREDICTOR_TRANSFORM) {
transforms.add(0, new PredictorTransform(raster, sizeBits));
}
else {
transforms.add(0, new ColorTransform(raster, sizeBits));
}
break; break;
} }
case TransformType.SUBTRACT_GREEN: { case TransformType.SUBTRACT_GREEN: {
System.err.println("transformType: SUBTRACT_GREEN");
// No data here // No data here
transforms.add(0, new SubtractGreenTransform());
// addGreenToBlueAndRed();
break; break;
} }
case TransformType.COLOR_INDEXING_TRANSFORM: { case TransformType.COLOR_INDEXING_TRANSFORM: {
System.err.println("transformType: COLOR_INDEXING_TRANSFORM");
// 8 bit value for color table size // 8 bit value for color table size
int colorTableSize = imageInput.readUnsignedByte() + 1; // 1-256 int colorTableSize = ((int) lsbBitReader.readBits(8)) + 1; // 1-256
System.err.println("colorTableSize: " + colorTableSize);
// If the index is equal or larger than color_table_size, // If the index is equal or larger than color_table_size,
// the argb color value should be set to 0x00000000 // the argb color value should be set to 0x00000000
@ -165,43 +394,37 @@ public final class VP8LDecoder {
colorTableSize > 4 ? 16 : colorTableSize > 4 ? 16 :
colorTableSize > 2 ? 4 : 2; colorTableSize > 2 ? 4 : 2;
System.err.println("safeColorTableSize: " + safeColorTableSize);
int[] colorTable = new int[safeColorTableSize]; byte[] colorTable = new byte[safeColorTableSize * 4];
// The color table can be obtained by reading an image, // The color table can be obtained by reading an image,
// without the RIFF header, image size, and transforms, // without the RIFF header, image size, and transforms,
// assuming a height of one pixel and a width of // assuming a height of one pixel and a width of
// color_table_size. The color table is always // color_table_size. The color table is always
// subtraction-coded to reduce image entropy. // subtraction-coded to reduce image entropy.
// TODO: Read *without transforms*, using SUBTRACT_GREEN only! readVP8Lossless(
readVP8Lossless(asByteRaster( Raster.createInterleavedRaster(
Raster.createPackedRaster( new DataBufferByte(colorTable, colorTableSize * 4),
new DataBufferInt(colorTable, colorTableSize), colorTableSize, 1, colorTableSize * 4,
colorTableSize, 1, colorTableSize, 4, new int[] {0, 1, 2, 3}, null)
new int[] {0}, null , false, null, colorTableSize, 1);
)
), false);
// TODO: We may not really need this value...
// What we need is the number of pixels packed into each green sample (byte) //resolve subtraction code
int widthBits = colorTableSize > 16 ? 0 : for (int i = 4; i < colorTable.length; i++) {
colorTableSize > 4 ? 1 : colorTable[i] += colorTable[i - 4];
colorTableSize > 2 ? 2 : 3; }
// The number of pixels packed into each green sample (byte)
byte widthBits = (byte) (colorTableSize > 16 ? 0 :
colorTableSize > 4 ? 1 :
colorTableSize > 2 ? 2 : 3);
xSize = subSampleSize(xSize, widthBits); xSize = subSampleSize(xSize, widthBits);
/* // The colors components are stored in ARGB order at 4*index, 4*index + 1, 4*index + 2, 4*index + 3
// TODO: read ARGB
int argb = 0;
// Inverse transform
// TODO: Expand to mutliple pixels?
argb = colorTable[GREEN(argb)];
*/
// TODO: Can we use this to produce an image with IndexColorModel instead of expanding the values in-memory? // TODO: Can we use this to produce an image with IndexColorModel instead of expanding the values in-memory?
transforms.add(new Transform(transformType, colorTable)); transforms.add(0, new ColorIndexingTransform(colorTable, widthBits));
break; break;
} }
@ -212,147 +435,53 @@ public final class VP8LDecoder {
return xSize; return xSize;
} }
private void readHuffmanCodes(int colorCacheBits, boolean allowRecursion) { private HuffmanInfo readHuffmanCodes(int xSize, int ySize, int colorCacheBits, boolean readMetaCodes)
throws IOException {
int huffmanGroupNum = 1;
int huffmanXSize;
int huffmanYSize;
} int metaCodeBits = 0;
//// WritableRaster huffmanMetaCodes = null;
if (readMetaCodes && lsbBitReader.readBit() == 1) {
//read in meta codes
metaCodeBits = (int) lsbBitReader.readBits(3) + 2;
huffmanXSize = subSampleSize(xSize, metaCodeBits);
huffmanYSize = subSampleSize(ySize, metaCodeBits);
// FROM the spec //Raster with elements as BARG (only the RG components encode the meta group)
private static int divRoundUp(final int numerator, final int denominator) { WritableRaster packedRaster = Raster.createPackedRaster(DataBuffer.TYPE_INT, huffmanXSize, huffmanYSize,
return (numerator + denominator - 1) / denominator; new int[] {0x0000ff00, 0x000000ff, 0xff000000, 0x00ff0000}, null);
readVP8Lossless(asByteRaster(packedRaster), false, null, huffmanXSize, huffmanYSize);
int[] data = ((DataBufferInt) packedRaster.getDataBuffer()).getData();
//Max metaGroup is number of meta groups
int maxCode = Integer.MIN_VALUE;
for (int code : data) {
maxCode = max(maxCode, code & 0xffff);
}
huffmanGroupNum = maxCode + 1;
/*
New Raster with just RG components exposed as single band allowing simple access of metaGroupIndex with
x,y lookup
*/
huffmanMetaCodes = Raster.createPackedRaster(packedRaster.getDataBuffer(), huffmanXSize, huffmanYSize,
huffmanXSize, new int[] {0xffff}, null);
}
HuffmanCodeGroup[] huffmanGroups = new HuffmanCodeGroup[huffmanGroupNum];
for (int i = 0; i < huffmanGroups.length; i++) {
huffmanGroups[i] = new HuffmanCodeGroup(lsbBitReader, colorCacheBits);
}
return new HuffmanInfo(huffmanMetaCodes, metaCodeBits, huffmanGroups);
} }
private static int subSampleSize(final int size, final int samplingBits) { private static int subSampleSize(final int size, final int samplingBits) {
return (size + (1 << samplingBits) - 1) >> samplingBits; return (size + (1 << samplingBits) - 1) >> samplingBits;
} }
private static int ALPHA(final int ARGB) {
return ARGB >>> 24;
}
private static int RED(final int ARGB) {
return (ARGB >> 16) & 0xff;
}
private static int GREEN(final int ARGB) {
return (ARGB >> 8) & 0xff;
}
private static int BLUE(final int ARGB) {
return ARGB & 0xff;
}
private static int select(final int L, final int T, final int TL) {
// L = left pixel, T = top pixel, TL = top left pixel.
// ARGB component estimates for prediction.
int pAlpha = ALPHA(L) + ALPHA(T) - ALPHA(TL);
int pRed = RED(L) + RED(T) - RED(TL);
int pGreen = GREEN(L) + GREEN(T) - GREEN(TL);
int pBlue = BLUE(L) + BLUE(T) - BLUE(TL);
// Manhattan distances to estimates for left and top pixels.
int pL = abs(pAlpha - ALPHA(L)) + abs(pRed - RED(L)) +
abs(pGreen - GREEN(L)) + abs(pBlue - BLUE(L));
int pT = abs(pAlpha - ALPHA(T)) + abs(pRed - RED(T)) +
abs(pGreen - GREEN(T)) + abs(pBlue - BLUE(T));
// Return either left or top, the one closer to the prediction.
return pL < pT ? L : T;
}
private static int average2(final int a, final int b) {
return (a + b) / 2;
}
// Clamp the input value between 0 and 255.
private static int clamp(final int a) {
return max(0, min(a, 255));
}
private static int clampAddSubtractFull(final int a, final int b, final int c) {
return clamp(a + b - c);
}
private static int clampAddSubtractHalf(final int a, final int b) {
return clamp(a + (a - b) / 2);
}
static final class ColorTransformElement {
final int green_to_red;
final int green_to_blue;
final int red_to_blue;
ColorTransformElement(final int green_to_red, final int green_to_blue, final int red_to_blue) {
this.green_to_red = green_to_red;
this.green_to_blue = green_to_blue;
this.red_to_blue = red_to_blue;
}
}
// NOTE: For encoding!
private static void colorTransform(final int red, final int blue, final int green,
final ColorTransformElement trans,
final int[] newRedBlue) {
// Transformed values of red and blue components
int tmp_red = red;
int tmp_blue = blue;
// Applying transform is just adding the transform deltas
tmp_red += colorTransformDelta((byte) trans.green_to_red, (byte) green);
tmp_blue += colorTransformDelta((byte) trans.green_to_blue, (byte) green);
tmp_blue += colorTransformDelta((byte) trans.red_to_blue, (byte) red);
// No pointer dereferences in Java...
// TODO: Consider passing an offset too, so we can modify in-place
newRedBlue[0] = tmp_red & 0xff;
newRedBlue[1] = tmp_blue & 0xff;
}
// A conversion from the 8-bit unsigned representation (uint8) to the 8-bit
// signed one (int8) is required before calling ColorTransformDelta(). It
// should be performed using 8-bit two's complement (that is: uint8 range
// [128-255] is mapped to the [-128, -1] range of its converted int8
// value).
private static byte colorTransformDelta(final byte t, final byte c) {
return (byte) ((t * c) >> 5);
}
private static void inverseTransform(final byte red, final byte green, final byte blue,
final ColorTransformElement trans,
final int[] newRedBlue) {
// Applying inverse transform is just subtracting the
// color transform deltas
// Transformed values of red and blue components
int tmp_red = red;
int tmp_blue = blue;
tmp_red -= colorTransformDelta((byte) trans.green_to_red, green);
tmp_blue -= colorTransformDelta((byte) trans.green_to_blue, green);
tmp_blue -= colorTransformDelta((byte) trans.red_to_blue, red); // Spec has red & 0xff
newRedBlue[0] = tmp_red & 0xff;
newRedBlue[1] = tmp_blue & 0xff;
}
private static void inverseTransform(final byte[] rgb, final ColorTransformElement trans) {
// Applying inverse transform is just subtracting the
// color transform deltas
// Transformed values of red and blue components
int tmp_red = rgb[0];
int tmp_blue = rgb[2];
tmp_red -= colorTransformDelta((byte) trans.green_to_red, rgb[1]);
tmp_blue -= colorTransformDelta((byte) trans.green_to_blue, rgb[1]);
tmp_blue -= colorTransformDelta((byte) trans.red_to_blue, rgb[0]); // Spec has red & 0xff
rgb[0] = (byte) (tmp_red & 0xff);
rgb[2] = (byte) (tmp_blue & 0xff);
}
private static void addGreenToBlueAndRed(byte[] rgb) {
rgb[0] = (byte) ((rgb[0] + rgb[1]) & 0xff);
rgb[2] = (byte) ((rgb[2] + rgb[1]) & 0xff);
}
} }

View File

@ -0,0 +1,25 @@
package com.twelvemonkeys.imageio.plugins.webp.lossless.huffman;
import com.twelvemonkeys.imageio.plugins.webp.LSBBitReader;
import java.io.IOException;
public class HuffmanCodeGroup {
/**
* Used for green, backward reference length and color cache
*/
public final HuffmanTable mainCode;
public final HuffmanTable redCode;
public final HuffmanTable blueCode;
public final HuffmanTable alphaCode;
public final HuffmanTable distanceCode;
public HuffmanCodeGroup(LSBBitReader lsbBitReader, int colorCacheBits) throws IOException {
mainCode = new HuffmanTable(lsbBitReader, 256 + 24 + (colorCacheBits > 0 ? 1 << colorCacheBits : 0));
redCode = new HuffmanTable(lsbBitReader, 256);
blueCode = new HuffmanTable(lsbBitReader, 256);
alphaCode = new HuffmanTable(lsbBitReader, 256);
distanceCode = new HuffmanTable(lsbBitReader, 40);
}
}

View File

@ -0,0 +1,17 @@
package com.twelvemonkeys.imageio.plugins.webp.lossless.huffman;
import java.awt.image.*;
public class HuffmanInfo {
public Raster huffmanMetaCodes; //Raster allows intuitive lookup by x and y
public int metaCodeBits;
public HuffmanCodeGroup[] huffmanGroups;
public HuffmanInfo(Raster huffmanMetaCodes, int metaCodeBits, HuffmanCodeGroup[] huffmanGroups) {
this.huffmanMetaCodes = huffmanMetaCodes;
this.metaCodeBits = metaCodeBits;
this.huffmanGroups = huffmanGroups;
}
}

View File

@ -0,0 +1,334 @@
package com.twelvemonkeys.imageio.plugins.webp.lossless.huffman;
import com.twelvemonkeys.imageio.plugins.webp.LSBBitReader;
import javax.imageio.IIOException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
* Represents a single huffman tree as a table.
* <p>
* Decoding a symbol just involves reading bits from the input stream and using that read value to index into the
* lookup table.
* <p>
* Code length and the corresponding symbol are packed into one array element (int).
* This is done to avoid the overhead and the fragmentation over the whole heap involved with creating objects
* of a custom class. The upper 16 bits of each element are the code length and lower 16 bits are the symbol.
* <p>
* The max allowed code length by the WEBP specification is 15, therefore this would mean the table needs to have
* 2^15 elements. To keep a reasonable memory usage, instead the lookup table only directly holds symbols with code
* length up to {@code LEVEL1_BITS} (Currently 8 bits). For longer codes the lookup table stores a reference to a
* second level lookup table. This reference consists of an element with length as the max length of the level 2
* table and value as the index of the table in the list of level 2 tables.
* <p>
* Reading bits from the input is done in a least significant bit first way (LSB) way, therefore the prefix of the
* read value of length i is the lowest i bits in inverse order.
* The lookup table is directly indexed by the {@code LEVEL1_BITS} next bits read from the input (i.e. the bits
* corresponding to next code are inverse suffix of the read value/index).
* So for a code length of l all values with the lowest l bits the same need to decode to the same symbol
* regardless of the {@code (LEVEL1_BITS - l)} higher bits. So the lookup table needs to have the entry of this symbol
* repeated every 2^(l + 1) spots starting from the bitwise inverse of the code.
*/
public class HuffmanTable {
private static final int LEVEL1_BITS = 8;
/**
* Symbols of the L-code in the order they need to be read
*/
private static final int[] L_CODE_ORDER = {17, 18, 0, 1, 2, 3, 4, 5, 16, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15};
private final int[] level1 = new int[1 << LEVEL1_BITS];
private final List<int[]> level2 = new ArrayList<>();
/**
* Build a Huffman table by reading the encoded symbol lengths from the reader
*
* @param lsbBitReader the reader to read from
* @param alphabetSize the number of symbols in the alphabet to be decoded by this huffman table
* @throws IOException when reading produces an exception
*/
public HuffmanTable(LSBBitReader lsbBitReader, int alphabetSize) throws IOException {
boolean simpleLengthCode = lsbBitReader.readBit() == 1;
if (simpleLengthCode) {
int symbolNum = lsbBitReader.readBit() + 1;
boolean first8Bits = lsbBitReader.readBit() == 1;
short symbol1 = (short) lsbBitReader.readBits(first8Bits ? 8 : 1);
if (symbolNum == 2) {
short symbol2 = (short) lsbBitReader.readBits(8);
for (int i = 0; i < (1 << LEVEL1_BITS); i += 2) {
level1[i] = 1 << 16 | symbol1;
level1[i + 1] = 1 << 16 | symbol2;
}
}
else {
Arrays.fill(level1, symbol1);
}
}
else {
/*
code lengths also huffman coded
first read the "first stage" code lengths
In the following this is called the L-Code (for length code)
*/
int numLCodeLengths = (int) (lsbBitReader.readBits(4) + 4);
short[] lCodeLengths = new short[L_CODE_ORDER.length];
int numPosCodeLens = 0;
for (int i = 0; i < numLCodeLengths; i++) {
short len = (short) lsbBitReader.readBits(3);
lCodeLengths[L_CODE_ORDER[i]] = len;
if (len > 0) {
numPosCodeLens++;
}
}
//Use L-Code to read the actual code lengths
short[] codeLengths = readCodeLengths(lsbBitReader, lCodeLengths, alphabetSize, numPosCodeLens);
buildFromLengths(codeLengths);
}
}
/**
* Builds a Huffman table by using already given code lengths to generate the codes from
*
* @param codeLengths the array specifying the bit length of the code for a symbol (i.e. {@code codeLengths[i]}
* is the bit length of the code for the symbol i)
* @param numPosCodeLens the number of positive (i.e. non-zero) codeLengths in the array (allows more efficient
* table generation)
*/
private HuffmanTable(short[] codeLengths, int numPosCodeLens) {
buildFromLengths(codeLengths, numPosCodeLens);
}
/*
Helper methods to allow reusing in different constructors
*/
private void buildFromLengths(short[] codeLengths) {
int numPosCodeLens = 0;
for (short codeLength : codeLengths) {
if (codeLength != 0) {
numPosCodeLens++;
}
}
buildFromLengths(codeLengths, numPosCodeLens);
}
private void buildFromLengths(short[] codeLengths, int numPosCodeLens) {
//Pack code length and corresponding symbols as described above
int[] lengthsAndSymbols = new int[numPosCodeLens];
int index = 0;
for (int i = 0; i < codeLengths.length; i++) {
if (codeLengths[i] != 0) {
lengthsAndSymbols[index++] = codeLengths[i] << 16 | i;
}
}
//Special case: Only 1 code value
if (numPosCodeLens == 1) {
//Length is 0 so mask to clear length bits
Arrays.fill(level1, lengthsAndSymbols[0] & 0xffff);
}
//Due to the layout of the elements this effectively first sorts by length and then symbol.
Arrays.sort(lengthsAndSymbols);
/*
The next code, in the bit order it would appear on the input stream, i.e. it is reversed.
Only the lowest bits (corresponding to the bit length of the code) are considered.
Example: code 0..010 (length 2) would appear as 0..001.
*/
int code = 0;
//Used for level2 lookup
int rootEntry = -1;
int[] currentTable = null;
for (int i = 0; i < lengthsAndSymbols.length; i++) {
int lengthAndSymbol = lengthsAndSymbols[i];
int length = lengthAndSymbol >>> 16;
if (length <= LEVEL1_BITS) {
for (int j = code; j < level1.length; j += 1 << length) {
level1[j] = lengthAndSymbol;
}
}
else {
//Existing level2 table not fitting
if ((code & ((1 << LEVEL1_BITS) - 1)) != rootEntry) {
/*
Figure out needed table size.
Start at current symbol and length.
Every symbol uses 1 slot at the current bit length.
Going up 1 bit in length multiplies the slots by 2.
No more open slots indicate the table size to be big enough.
*/
int maxLength = length;
for (int j = i, openSlots = 1 << (length - LEVEL1_BITS);
j < lengthsAndSymbols.length && openSlots > 0;
j++, openSlots--) {
int innerLength = lengthsAndSymbols[j] >>> 16;
while (innerLength != maxLength) {
maxLength++;
openSlots <<= 1;
}
}
int level2Size = maxLength - LEVEL1_BITS;
currentTable = new int[1 << level2Size];
rootEntry = code & ((1 << LEVEL1_BITS) - 1);
level2.add(currentTable);
//Set root table indirection
level1[rootEntry] = (LEVEL1_BITS + level2Size) << 16 | (level2.size() - 1);
}
//Add to existing (or newly generated) 2nd level table
for (int j = (code >>> LEVEL1_BITS); j < currentTable.length; j += 1 << (length - LEVEL1_BITS)) {
currentTable[j] = (length - LEVEL1_BITS) << 16 | (lengthAndSymbol & 0xffff);
}
}
code = nextCode(code, length);
}
}
/**
* Computes the next code
*
* @param code the current code
* @param length the currently valid length
* @return {@code reverse(reverse(code, length) + 1, length)} where {@code reverse(a, b)} is the lowest b bits of
* a in inverted order
*/
private int nextCode(int code, int length) {
int a = (~code) & ((1 << length) - 1);
//This will result in the highest 0-bit in the lower length bits of code set (by construction of a)
//I.e. the lowest 0-bit in the value code represents
int step = Integer.highestOneBit(a);
//In the represented value this clears the consecutive 1-bits starting at bit 0 and then sets the lowest 0 bit
//This corresponds to adding 1 to the value
return (code & (step - 1)) | step;
}
private static short[] readCodeLengths(LSBBitReader lsbBitReader, short[] aCodeLengths, int alphabetSize,
int numPosCodeLens) throws IOException {
HuffmanTable huffmanTable = new HuffmanTable(aCodeLengths, numPosCodeLens);
//Not sure where this comes from. Just adapted from the libwebp implementation
int codedSymbols;
if (lsbBitReader.readBit() == 1) {
int maxSymbolBitLength = (int) (2 + 2 * lsbBitReader.readBits(3));
codedSymbols = (int) (2 + lsbBitReader.readBits(maxSymbolBitLength));
}
else {
codedSymbols = alphabetSize;
}
short[] codeLengths = new short[alphabetSize];
//Default code for repeating
short prevLength = 8;
for (int i = 0; i < alphabetSize && codedSymbols > 0; i++, codedSymbols--) {
short len = huffmanTable.readSymbol(lsbBitReader);
if (len < 16) { //Literal length
codeLengths[i] = len;
if (len != 0) {
prevLength = len;
}
}
else {
short repeatSymbol = 0;
int extraBits;
int repeatOffset;
switch (len) {
case 16: //Repeat previous
repeatSymbol = prevLength;
extraBits = 2;
repeatOffset = 3;
break;
case 17: //Repeat 0 short
extraBits = 3;
repeatOffset = 3;
break;
case 18: //Repeat 0 long
extraBits = 7;
repeatOffset = 11;
break;
default:
throw new IIOException("Huffman: Unreachable: Decoded Code Length > 18.");
}
int repeatCount = (int) (lsbBitReader.readBits(extraBits) + repeatOffset);
if (i + repeatCount > alphabetSize) {
throw new IIOException(
String.format(
"Huffman: Code length repeat count overflows alphabet: Start index: %d, count: " +
"%d, alphabet size: %d", i, repeatCount, alphabetSize)
);
}
Arrays.fill(codeLengths, i, i + repeatCount, repeatSymbol);
i += repeatCount - 1;
}
}
return codeLengths;
}
/**
* Reads the next code symbol from the streaming and decode it using the Huffman table
*
* @param lsbBitReader the reader to read a symbol from (will be advanced accordingly)
* @return the decoded symbol
* @throws IOException when the reader throws one reading a symbol
*/
public short readSymbol(LSBBitReader lsbBitReader) throws IOException {
int index = (int) lsbBitReader.peekBits(LEVEL1_BITS);
int lengthAndSymbol = level1[index];
int length = lengthAndSymbol >>> 16;
if (length > LEVEL1_BITS) {
//Lvl2 lookup
lsbBitReader.readBits(LEVEL1_BITS); //Consume bits of first level
int level2Index = (int) lsbBitReader.peekBits(length - LEVEL1_BITS); //Peek remaining required bits
lengthAndSymbol = level2.get(lengthAndSymbol & 0xffff)[level2Index];
length = lengthAndSymbol >>> 16;
}
lsbBitReader.readBits(length); //Consume bits
return (short) (lengthAndSymbol & 0xffff);
}
}

View File

@ -0,0 +1,43 @@
package com.twelvemonkeys.imageio.plugins.webp.lossless.transform;
import java.awt.image.*;
public class ColorIndexingTransform implements Transform {
private final byte[] colorTable;
private final byte bits;
public ColorIndexingTransform(byte[] colorTable, byte bits) {
this.colorTable = colorTable;
this.bits = bits;
}
@Override
public void applyInverse(WritableRaster raster) {
int width = raster.getWidth();
int height = raster.getHeight();
byte[] rgba = new byte[4];
for (int y = 0; y < height; y++) {
//Reversed so no used elements are overridden (in case of packing)
for (int x = width - 1; x >= 0; x--) {
int componentSize = 8 >> bits;
int packed = 1 << bits;
int xC = x / packed;
int componentOffset = componentSize * (x % packed);
int sample = raster.getSample(xC, y, 1);
int index = sample >> componentOffset & ((1 << componentSize) - 1);
//Arraycopy for 4 elements might not be beneficial
System.arraycopy(colorTable, index * 4, rgba, 0, 4);
raster.setDataElements(x, y, rgba);
}
}
}
}

View File

@ -0,0 +1,93 @@
package com.twelvemonkeys.imageio.plugins.webp.lossless.transform;
import java.awt.image.*;
public class ColorTransform implements Transform {
private final Raster data;
private final byte bits;
public ColorTransform(Raster raster, byte bits) {
this.data = raster;
this.bits = bits;
}
@Override
public void applyInverse(WritableRaster raster) {
int width = raster.getWidth();
int height = raster.getHeight();
byte[] rgba = new byte[4];
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
data.getDataElements(x >> bits, y >> bits, rgba);
ColorTransformElement trans = new ColorTransformElement(rgba);
raster.getDataElements(x, y, rgba);
trans.inverseTransform(rgba);
raster.setDataElements(x, y, rgba);
}
}
}
// NOTE: For encoding!
private static void colorTransform(final int red, final int blue, final int green,
final ColorTransformElement trans,
final int[] newRedBlue) {
// Transformed values of red and blue components
int tmp_red = red;
int tmp_blue = blue;
// Applying transform is just adding the transform deltas
tmp_red += colorTransformDelta((byte) trans.green_to_red, (byte) green);
tmp_blue += colorTransformDelta((byte) trans.green_to_blue, (byte) green);
tmp_blue += colorTransformDelta((byte) trans.red_to_blue, (byte) red);
// No pointer dereferences in Java...
// TODO: Consider passing an offset too, so we can modify in-place
newRedBlue[0] = tmp_red & 0xff;
newRedBlue[1] = tmp_blue & 0xff;
}
// A conversion from the 8-bit unsigned representation (uint8) to the 8-bit
// signed one (int8) is required before calling ColorTransformDelta(). It
// should be performed using 8-bit two's complement (that is: uint8 range
// [128-255] is mapped to the [-128, -1] range of its converted int8
// value).
private static byte colorTransformDelta(final byte t, final byte c) {
return (byte) ((t * c) >> 5);
}
private static final class ColorTransformElement {
final int green_to_red;
final int green_to_blue;
final int red_to_blue;
ColorTransformElement(final byte[] rgba) {
this.green_to_red = rgba[2];
this.green_to_blue = rgba[1];
this.red_to_blue = rgba[0];
}
private void inverseTransform(final byte[] rgb) {
// Applying inverse transform is just adding (!, different from specification) the
// color transform deltas 3
// Transformed values of red and blue components
int tmp_red = rgb[0];
int tmp_blue = rgb[2];
tmp_red += colorTransformDelta((byte) this.green_to_red, rgb[1]);
tmp_blue += colorTransformDelta((byte) this.green_to_blue, rgb[1]);
tmp_blue += colorTransformDelta((byte) this.red_to_blue, (byte) tmp_red); // Spec has red & 0xff
rgb[0] = (byte) (tmp_red & 0xff);
rgb[2] = (byte) (tmp_blue & 0xff);
}
}
}

View File

@ -29,7 +29,7 @@
* POSSIBILITY OF SUCH DAMAGE. * POSSIBILITY OF SUCH DAMAGE.
*/ */
package com.twelvemonkeys.imageio.plugins.webp.lossless; package com.twelvemonkeys.imageio.plugins.webp.lossless.transform;
/** /**
* PredictorMode. * PredictorMode.

View File

@ -0,0 +1,238 @@
package com.twelvemonkeys.imageio.plugins.webp.lossless.transform;
import java.awt.image.*;
import static java.lang.Math.*;
public class PredictorTransform implements Transform {
private final Raster data;
private final byte bits;
public PredictorTransform(Raster raster, byte bits) {
this.data = raster;
this.bits = bits;
}
@Override
public void applyInverse(WritableRaster raster) {
int width = raster.getWidth();
int height = raster.getHeight();
byte[] rgba = new byte[4];
//Handle top and left border separately
//(0,0) Black (0x000000ff) predict
raster.getDataElements(0, 0, rgba);
rgba[3] += 0xff;
raster.setDataElements(0, 0, rgba);
byte[] predictor = new byte[4];
byte[] predictor2 = new byte[4];
byte[] predictor3 = new byte[4];
//(x,0) L predict
for (int x = 1; x < width; x++) {
raster.getDataElements(x, 0, rgba);
raster.getDataElements(x - 1, 0, predictor);
addPixels(rgba, predictor);
raster.setDataElements(x, 0, rgba);
}
//(0,y) T predict
for (int y = 1; y < height; y++) {
raster.getDataElements(0, y, rgba);
raster.getDataElements(0, y - 1, predictor);
addPixels(rgba, predictor);
raster.setDataElements(0, y, rgba);
}
for (int y = 1; y < height; y++) {
for (int x = 1; x < width; x++) {
int transformType = data.getSample(x >> bits, y >> bits, 1);
raster.getDataElements(x, y, rgba);
int lX = x - 1; //x for left
int tY = y - 1; //y for top
//top right is not (x+1, tY) if last pixel in line instead (0, y)
int trX = x == width - 1 ? 0 : x + 1;
int trY = x == width - 1 ? y : tY;
switch (transformType) {
case PredictorMode.BLACK:
rgba[3] += 0xff;
break;
case PredictorMode.L:
raster.getDataElements(lX, y, predictor);
addPixels(rgba, predictor);
break;
case PredictorMode.T:
raster.getDataElements(x, tY, predictor);
addPixels(rgba, predictor);
break;
case PredictorMode.TR:
raster.getDataElements(trX, trY, predictor);
addPixels(rgba, predictor);
break;
case PredictorMode.TL:
raster.getDataElements(lX, tY, predictor);
addPixels(rgba, predictor);
break;
case PredictorMode.AVG_L_TR_T:
raster.getDataElements(lX, y, predictor);
raster.getDataElements(trX, trY, predictor2);
average2(predictor, predictor2);
raster.getDataElements(x, tY, predictor2);
average2(predictor, predictor2);
addPixels(rgba, predictor);
break;
case PredictorMode.AVG_L_TL:
raster.getDataElements(lX, y, predictor);
raster.getDataElements(lX, tY, predictor2);
average2(predictor, predictor2);
addPixels(rgba, predictor);
break;
case PredictorMode.AVG_L_T:
raster.getDataElements(lX, y, predictor);
raster.getDataElements(x, tY, predictor2);
average2(predictor, predictor2);
addPixels(rgba, predictor);
break;
case PredictorMode.AVG_TL_T:
raster.getDataElements(lX, tY, predictor);
raster.getDataElements(x, tY, predictor2);
average2(predictor, predictor2);
addPixels(rgba, predictor);
break;
case PredictorMode.AVG_T_TR:
raster.getDataElements(x, tY, predictor);
raster.getDataElements(trX, trY, predictor2);
average2(predictor, predictor2);
addPixels(rgba, predictor);
break;
case PredictorMode.AVG_L_TL_T_TR:
raster.getDataElements(lX, y, predictor);
raster.getDataElements(lX, tY, predictor2);
average2(predictor, predictor2);
raster.getDataElements(x, tY, predictor2);
raster.getDataElements(trX, trY, predictor3);
average2(predictor2, predictor3);
average2(predictor, predictor2);
addPixels(rgba, predictor);
break;
case PredictorMode.SELECT:
raster.getDataElements(lX, y, predictor);
raster.getDataElements(x, tY, predictor2);
raster.getDataElements(lX, tY, predictor3);
addPixels(rgba, select(predictor, predictor2, predictor3));
break;
case PredictorMode.CLAMP_ADD_SUB_FULL:
raster.getDataElements(lX, y, predictor);
raster.getDataElements(x, tY, predictor2);
raster.getDataElements(lX, tY, predictor3);
clampAddSubtractFull(predictor, predictor2, predictor3);
addPixels(rgba, predictor);
break;
case PredictorMode.CLAMP_ADD_SUB_HALF:
raster.getDataElements(lX, y, predictor);
raster.getDataElements(x, tY, predictor2);
average2(predictor, predictor2);
raster.getDataElements(lX, tY, predictor2);
clampAddSubtractHalf(predictor, predictor2);
addPixels(rgba, predictor);
break;
}
raster.setDataElements(x, y, rgba);
}
}
}
private static byte[] select(final byte[] l, final byte[] t, final byte[] tl) {
// l = left pixel, t = top pixel, tl = top left pixel.
// ARGB component estimates for prediction.
int pAlpha = addSubtractFull(l[3], t[3], tl[3]);
int pRed = addSubtractFull(l[0], t[0], tl[0]);
int pGreen = addSubtractFull(l[1], t[1], tl[1]);
int pBlue = addSubtractFull(l[2], t[2], tl[2]);
// Manhattan distances to estimates for left and top pixels.
int pL = manhattanDistance(l, pAlpha, pRed, pGreen, pBlue);
int pT = manhattanDistance(t, pAlpha, pRed, pGreen, pBlue);
// Return either left or top, the one closer to the prediction.
return pL < pT ? l : t;
}
private static int manhattanDistance(byte[] rgba, int pAlpha, int pRed, int pGreen, int pBlue) {
return abs(pAlpha - (rgba[3] & 0xff)) + abs(pRed - (rgba[0] & 0xff)) +
abs(pGreen - (rgba[1] & 0xff)) + abs(pBlue - (rgba[2] & 0xff));
}
private static void average2(final byte[] rgba1, final byte[] rgba2) {
rgba1[0] = (byte) (((rgba1[0] & 0xff) + (rgba2[0] & 0xff)) / 2);
rgba1[1] = (byte) (((rgba1[1] & 0xff) + (rgba2[1] & 0xff)) / 2);
rgba1[2] = (byte) (((rgba1[2] & 0xff) + (rgba2[2] & 0xff)) / 2);
rgba1[3] = (byte) (((rgba1[3] & 0xff) + (rgba2[3] & 0xff)) / 2);
}
// Clamp the input value between 0 and 255.
private static int clamp(final int a) {
return max(0, min(a, 255));
}
private static void clampAddSubtractFull(final byte[] a, final byte[] b, final byte[] c) {
a[0] = (byte) clamp(addSubtractFull(a[0], b[0], c[0]));
a[1] = (byte) clamp(addSubtractFull(a[1], b[1], c[1]));
a[2] = (byte) clamp(addSubtractFull(a[2], b[2], c[2]));
a[3] = (byte) clamp(addSubtractFull(a[3], b[3], c[3]));
}
private static void clampAddSubtractHalf(final byte[] a, final byte[] b) {
a[0] = (byte) clamp(addSubtractHalf(a[0], b[0]));
a[1] = (byte) clamp(addSubtractHalf(a[1], b[1]));
a[2] = (byte) clamp(addSubtractHalf(a[2], b[2]));
a[3] = (byte) clamp(addSubtractHalf(a[3], b[3]));
}
private static int addSubtractFull(byte a, byte b, byte c) {
return (a & 0xff) + (b & 0xff) - (c & 0xff);
}
private static int addSubtractHalf(byte a, byte b) {
return (a & 0xff) + ((a & 0xff) - (b & 0xff)) / 2;
}
private static void addPixels(byte[] rgba, byte[] predictor) {
rgba[0] += predictor[0];
rgba[1] += predictor[1];
rgba[2] += predictor[2];
rgba[3] += predictor[3];
}
}

View File

@ -0,0 +1,29 @@
package com.twelvemonkeys.imageio.plugins.webp.lossless.transform;
import java.awt.image.*;
public class SubtractGreenTransform implements Transform {
private static void addGreenToBlueAndRed(byte[] rgb) {
rgb[0] = (byte) ((rgb[0] + rgb[1]) & 0xff);
rgb[2] = (byte) ((rgb[2] + rgb[1]) & 0xff);
}
@Override
public void applyInverse(WritableRaster raster) {
int width = raster.getWidth();
int height = raster.getHeight();
byte[] rgba = new byte[4];
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
raster.getDataElements(x, y, rgba);
addGreenToBlueAndRed(rgba);
raster.setDataElements(x, y, rgba);
}
}
}
}

View File

@ -29,27 +29,16 @@
* POSSIBILITY OF SUCH DAMAGE. * POSSIBILITY OF SUCH DAMAGE.
*/ */
package com.twelvemonkeys.imageio.plugins.webp.lossless; package com.twelvemonkeys.imageio.plugins.webp.lossless.transform;
import java.awt.image.WritableRaster;
/** /**
* Transform. * Transform.
* *
* @author <a href="mailto:harald.kuhr@gmail.com">Harald Kuhr</a> * @author <a href="mailto:harald.kuhr@gmail.com">Harald Kuhr</a>
*/ */
final class Transform { public interface Transform {
final int type;
final Object data;
Transform(final int type, final Object data) { void applyInverse(WritableRaster raster);
this.type = type;
this.data = data;
}
byte[] getData() {
return (byte[]) data;
}
int[] getColorMap() {
return (int[]) data;
}
} }

View File

@ -29,7 +29,7 @@
* POSSIBILITY OF SUCH DAMAGE. * POSSIBILITY OF SUCH DAMAGE.
*/ */
package com.twelvemonkeys.imageio.plugins.webp.lossless; package com.twelvemonkeys.imageio.plugins.webp.lossless.transform;
/** /**
* TransformType. * TransformType.
@ -37,7 +37,7 @@ package com.twelvemonkeys.imageio.plugins.webp.lossless;
* @author <a href="mailto:harald.kuhr@gmail.com">Harald Kuhr</a> * @author <a href="mailto:harald.kuhr@gmail.com">Harald Kuhr</a>
*/ */
// Hmm.. Why doesn't SUBTRACT_GREEN follow the convention? // Hmm.. Why doesn't SUBTRACT_GREEN follow the convention?
interface TransformType { public interface TransformType {
int PREDICTOR_TRANSFORM = 0; int PREDICTOR_TRANSFORM = 0;
int COLOR_TRANSFORM = 1; int COLOR_TRANSFORM = 1;
int SUBTRACT_GREEN = 2; int SUBTRACT_GREEN = 2;

View File

@ -39,14 +39,26 @@ public class WebPImageReaderTest extends ImageReaderAbstractTest<WebPImageReader
new TestData(getClassLoaderResource("/webp/small_13x1.webp"), new Dimension(13, 1)), new TestData(getClassLoaderResource("/webp/small_13x1.webp"), new Dimension(13, 1)),
new TestData(getClassLoaderResource("/webp/small_31x13.webp"), new Dimension(31, 13)), new TestData(getClassLoaderResource("/webp/small_31x13.webp"), new Dimension(31, 13)),
new TestData(getClassLoaderResource("/webp/test.webp"), new Dimension(128, 128)), new TestData(getClassLoaderResource("/webp/test.webp"), new Dimension(128, 128)),
new TestData(getClassLoaderResource("/webp/very_short.webp"), new Dimension(63, 66)) new TestData(getClassLoaderResource("/webp/very_short.webp"), new Dimension(63, 66)),
// TODO: Support lossless // Lossless
// // Lossless new TestData(getClassLoaderResource("/webp/1_webp_ll.webp"), new Dimension(400, 301)),
// new TestData(getClassLoaderResource("/webp/1_webp_ll.webp"), new Dimension(400, 301)), new TestData(getClassLoaderResource("/webp/2_webp_ll.webp"), new Dimension(386, 395)),
// // Extended format: Alpha + VP8 new TestData(getClassLoaderResource("/webp/2_webp_ll_alt.webp"), new Dimension(386, 395)),
// new TestData(getClassLoaderResource("/webp/1_webp_a.webp"), new Dimension(400, 301)), new TestData(getClassLoaderResource("/webp/3_webp_ll.webp"), new Dimension(800, 600)),
// // Extendad format: Anim new TestData(getClassLoaderResource("/webp/4_webp_ll.webp"), new Dimension(421, 163)),
// new TestData(getClassLoaderResource("/webp/animated-webp-supported.webp"), new Dimension(400, 400)) new TestData(getClassLoaderResource("/webp/5_webp_ll.webp"), new Dimension(300, 300)),
// Extended format: Alpha + VP8
new TestData(getClassLoaderResource("/webp/1_webp_a.webp"), new Dimension(400, 301)),
new TestData(getClassLoaderResource("/webp/2_webp_a.webp"), new Dimension(386, 395)),
new TestData(getClassLoaderResource("/webp/3_webp_a.webp"), new Dimension(800, 600)),
new TestData(getClassLoaderResource("/webp/4_webp_a.webp"), new Dimension(421, 163)),
new TestData(getClassLoaderResource("/webp/5_webp_a.webp"), new Dimension(300, 300)),
// Extended format: Anim
new TestData(getClassLoaderResource("/webp/animated-webp-supported.webp"), new Dimension(400, 400),
new Dimension(400, 400), new Dimension(400, 400), new Dimension(400, 394),
new Dimension(371, 394), new Dimension(394, 382), new Dimension(400, 388),
new Dimension(394, 383), new Dimension(394, 394), new Dimension(372, 394),
new Dimension(400, 400), new Dimension(320, 382))
); );
} }

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 52 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 149 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 33 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 57 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 97 KiB