implemented WEBP images.

This commit is contained in:
morkt 2016-05-20 04:01:40 +04:00
parent d20fed194c
commit 5f4c5d127f
6 changed files with 5526 additions and 0 deletions

197
ArcFormats/WebP/Alpha.cs Normal file
View File

@ -0,0 +1,197 @@
//! \file Alpha.cs
//! \date Wed May 18 20:06:15 2016
//! \brief Google WEBP alpha channel processing functions.
/*
Copyright (c) 2010, Google Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the
distribution.
* Neither the name of Google nor the names of its contributors may
be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
//
// C# port by morkt (C) 2016
//
using System;
namespace GameRes.Formats.Google
{
internal class AlphaDecoder
{
public int width_;
public int height_;
public int method_;
public int filter_;
public int pre_processing_;
public LosslessDecoder vp8l_dec_;
public VP8Io m_io;
public bool use_8b_decode_;
public const int HeaderLen = 1;
public const int NoCompression = 0;
public const int LosslessCompression = 1;
public const int PreprocessedLevels = 1;
public bool DecodeComplete { get; private set; }
delegate void FilterFunc (byte[] input, int src, int width, int height,
int stride, byte[] output, int dst);
delegate void UnfilterFunc (int width, int height, int stride, int row,
int num_rows, byte[] data, int dst);
FilterFunc[] Filters = new FilterFunc[WebpFilter.Last];
UnfilterFunc[] Unfilters = new UnfilterFunc[WebpFilter.Last];
public bool Init (byte[] data, VP8Io src_io, byte[] output)
{
m_io = new VP8Io();
int alpha_data = HeaderLen;
int alpha_data_size = data.Length - HeaderLen;
width_ = src_io.width;
height_ = src_io.height;
if (data.Length <= HeaderLen)
return false;
method_ = (data[0] >> 0) & 3;
filter_ = (data[0] >> 2) & 3;
pre_processing_ = (data[0] >> 4) & 3;
int rsrv = (data[0] >> 6) & 3;
if (method_ < NoCompression
|| method_ > LosslessCompression
|| filter_ >= WebpFilter.Last
|| pre_processing_ > PreprocessedLevels
|| rsrv != 0)
{
return false;
}
bool ok = false;
if (NoCompression == method_)
{
int alpha_decoded_size = width_ * height_;
ok = (alpha_data_size >= alpha_decoded_size);
}
else
{
ok = DecodeAlphaHeader (data, alpha_data, alpha_data_size, output);
}
FiltersInit();
// Copy the necessary parameters from src_io to io
// m_io.Init();
m_io.opaque = output; // output plane
m_io.width = src_io.width;
m_io.height = src_io.height;
return ok;
}
void FiltersInit ()
{
Unfilters[WebpFilter.None] = null;
Unfilters[WebpFilter.Horizontal] = WebpFilter.HorizontalUnfilter;
Unfilters[WebpFilter.Vertical] = WebpFilter.VerticalUnfilter;
Unfilters[WebpFilter.Gradient] = WebpFilter.GradientUnfilter;
Filters[WebpFilter.None] = null;
Filters[WebpFilter.Horizontal] = WebpFilter.HorizontalFilter;
Filters[WebpFilter.Vertical] = WebpFilter.VerticalFilter;
Filters[WebpFilter.Gradient] = WebpFilter.GradientFilter;
}
/// <summary>
// Decodes, unfilters and dequantizes *at least* 'num_rows' rows of alpha starting from row number
// 'row'. It assumes that rows up to (row - 1) have already been decoded.
// Returns false in case of bitstream error.
/// </summary>
public bool Decode (byte[] alpha_data, byte[] alpha_plane, int row, int num_rows)
{
var unfilter_func = Unfilters[filter_];
if (AlphaDecoder.NoCompression == method_)
{
int offset = row * width_;
int num_pixels = num_rows * width_;
Buffer.BlockCopy (alpha_data, AlphaDecoder.HeaderLen + offset, alpha_plane, offset, num_pixels);
}
else // alph_dec_->method_ == ALPHA_LOSSLESS_COMPRESSION
{
if (!DecodeAlphaImageStream (row + num_rows))
return false;
}
if (unfilter_func != null)
unfilter_func (width_, height_, width_, row, num_rows, alpha_plane, 0);
// if (row + num_rows >= alph_dec_.m_io.crop_bottom)
if (row + num_rows >= height_)
DecodeComplete = true;
return true;
}
bool DecodeAlphaHeader (byte[] data, int data_i, int data_size, byte[] output)
{
vp8l_dec_ = new LosslessDecoder();
vp8l_dec_.Init (width_, height_, m_io, data, data_i, data_size, output);
uint[] decoded = null;
if (!vp8l_dec_.DecodeImageStream (width_, height_, true, ref decoded, false))
return false;
// Special case: if alpha data uses only the color indexing transform and
// doesn't use color cache (a frequent case), we will use DecodeAlphaData()
// method that only needs allocation of 1 byte per pixel (alpha channel).
if (vp8l_dec_.next_transform_ == 1
&& vp8l_dec_.transforms_[0].type_ == VP8LImageTransformType.ColorIndexing
&& vp8l_dec_.Is8bOptimizable())
{
use_8b_decode_ = true;
vp8l_dec_.AllocateInternalBuffers8b();
}
else
{
// Allocate internal buffers (note that dec->width_ may have changed here).
use_8b_decode_ = false;
vp8l_dec_.AllocateInternalBuffers32b (width_);
}
return true;
}
public bool DecodeAlphaImageStream (int last_row)
{
if (vp8l_dec_.last_pixel_ == vp8l_dec_.Width * vp8l_dec_.Height)
return true; // done
// Decode (with special row processing).
return use_8b_decode_ ?
vp8l_dec_.DecodeAlphaData (vp8l_dec_.Width, vp8l_dec_.Height, last_row) :
vp8l_dec_.DecodeImageData (vp8l_dec_.Pixels, vp8l_dec_.Width, vp8l_dec_.Height, last_row, LosslessDecoder.ExtractAlphaRows);
}
}
}

2920
ArcFormats/WebP/Decoder.cs Normal file

File diff suppressed because it is too large Load Diff

238
ArcFormats/WebP/Filters.cs Normal file
View File

@ -0,0 +1,238 @@
//! \file Filters.cs
//! \date Thu May 19 21:26:22 2016
//! \brief Google WEBP filter functions.
/*
Copyright (c) 2010, Google Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the
distribution.
* Neither the name of Google nor the names of its contributors may
be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
//
// C# port by morkt (C) 2016
//
namespace GameRes.Formats.Google
{
internal static class WebpFilter // Filter types.
{
public const int None = 0;
public const int Horizontal = 1;
public const int Vertical = 2;
public const int Gradient = 3;
public const int Last = Gradient + 1; // end marker
public const int Best = Last + 1; // meta-types
public const int Fast = Best + 1;
static void PredictLine (byte[] input, int src, byte[] preds, int pred,
byte[] output, int dst, int length, bool inverse)
{
if (inverse)
{
for (int i = 0; i < length; ++i)
output[dst+i] = (byte)(input[src+i] + preds[pred+i]);
}
else
{
for (int i = 0; i < length; ++i)
output[dst+i] = (byte)(input[src+i] - preds[pred+i]);
}
}
//------------------------------------------------------------------------------
// Horizontal filter.
static void DoHorizontalFilter (byte[] input, int src, int width, int height,
int stride, int row, int num_rows, bool inverse,
byte[] output, int dst)
{
int start_offset = row * stride;
int last_row = row + num_rows;
src += start_offset;
dst += start_offset;
var preds = inverse ? output : input;
var pred = inverse ? dst : src;
if (0 == row)
{
// Leftmost pixel is the same as input for topmost scanline.
output[dst] = input[src];
PredictLine (input, src + 1, preds, pred, output, dst + 1, width - 1, inverse);
row = 1;
pred += stride;
src += stride;
dst += stride;
}
// Filter line-by-line.
while (row < last_row)
{
// Leftmost pixel is predicted from above.
PredictLine (input, src, preds, pred - stride, output, dst, 1, inverse);
PredictLine (input, src + 1, preds, pred, output, dst + 1, width - 1, inverse);
++row;
pred += stride;
src += stride;
dst += stride;
}
}
//------------------------------------------------------------------------------
// Vertical filter.
static void DoVerticalFilter (byte[] input, int src, int width, int height,
int stride, int row, int num_rows, bool inverse,
byte[] output, int dst)
{
int start_offset = row * stride;
int last_row = row + num_rows;
src += start_offset;
dst += start_offset;
var preds = inverse ? output : input;
int pred = inverse ? dst : src;
if (0 == row)
{
// Very first top-left pixel is copied.
output[dst] = input[src];
// Rest of top scan-line is left-predicted.
PredictLine (input, src + 1, preds, pred, output, dst + 1, width - 1, inverse);
row = 1;
src += stride;
dst += stride;
}
else
{
// We are starting from in-between. Make sure 'preds' points to prev row.
pred -= stride;
}
// Filter line-by-line.
while (row < last_row)
{
PredictLine (input, src, preds, pred, output, dst, width, inverse);
++row;
pred += stride;
src += stride;
dst += stride;
}
}
//------------------------------------------------------------------------------
// Gradient filter.
static int GradientPredictor (byte a, byte b, byte c)
{
int g = a + b - c;
return ((g & ~0xFF) == 0) ? g : (g < 0) ? 0 : 255; // clip to 8bit
}
static void DoGradientFilter (byte[] input, int src, int width, int height,
int stride, int row, int num_rows, bool inverse,
byte[] output, int dst)
{
int start_offset = row * stride;
int last_row = row + num_rows;
src += start_offset;
dst += start_offset;
var preds = inverse ? output : input;
int pred = inverse ? dst : src;
// left prediction for top scan-line
if (0 == row)
{
output[dst] = input[src];
PredictLine (input, src + 1, preds, pred, output, dst + 1, width - 1, inverse);
row = 1;
pred += stride;
src += stride;
dst += stride;
}
// Filter line-by-line.
while (row < last_row)
{
// leftmost pixel: predict from above.
PredictLine (input, src, preds, pred - stride, output, dst, 1, inverse);
for (int w = 1; w < width; ++w)
{
int p = GradientPredictor (preds[pred + w - 1], preds[pred + w - stride], preds[pred + w - stride - 1]);
output[dst+w] = (byte)(input[src+w] + (inverse ? p : -p));
}
++row;
pred += stride;
src += stride;
dst += stride;
}
}
//------------------------------------------------------------------------------
public static void HorizontalFilter (byte[] data, int src, int width, int height,
int stride, byte[] filtered_data, int dst)
{
DoHorizontalFilter (data, src, width, height, stride, 0, height, false, filtered_data, dst);
}
public static void VerticalFilter (byte[] data, int src, int width, int height,
int stride, byte[] filtered_data, int dst)
{
DoVerticalFilter (data, src, width, height, stride, 0, height, false, filtered_data, dst);
}
public static void GradientFilter (byte[] data, int src, int width, int height,
int stride, byte[] filtered_data, int dst)
{
DoGradientFilter (data, src, width, height, stride, 0, height, false, filtered_data, dst);
}
//------------------------------------------------------------------------------
delegate void UnfilterFunc (int width, int height, int stride, int row,
int num_rows, byte[] data, int dst);
public static void VerticalUnfilter (int width, int height, int stride, int row,
int num_rows, byte[] data, int src)
{
DoVerticalFilter (data, src, width, height, stride, row, num_rows, true, data, src);
}
public static void HorizontalUnfilter (int width, int height, int stride, int row,
int num_rows, byte[] data, int src)
{
DoHorizontalFilter (data, src, width, height, stride, row, num_rows, true, data, src);
}
public static void GradientUnfilter (int width, int height, int stride, int row,
int num_rows, byte[] data, int src)
{
DoGradientFilter (data, src, width, height, stride, row, num_rows, true, data, src);
}
}
}

272
ArcFormats/WebP/Huffman.cs Normal file
View File

@ -0,0 +1,272 @@
//! \file Huffman.cs
//! \date Wed May 18 22:19:23 2016
//! \brief Google WEBP Huffman compression implementaion.
/*
Copyright (c) 2010, Google Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the
distribution.
* Neither the name of Google nor the names of its contributors may
be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
//
// C# port by morkt (C) 2016
//
namespace GameRes.Formats.Google
{
static class Huffman
{
public const int CodesPerMetaCode = 5;
public const int PackedBits = 6;
public const int PackedTableSize = 1 << PackedBits;
public const int DefaultCodeLength = 8;
public const int MaxAllowedCodeLength = 15;
public const int NumLiteralCodes = 256;
public const int NumLengthCodes = 24;
public const int NumDistanceCodes = 40;
public const int CodeLengthCodes = 19;
public const int MinBits = 2; // min number of Huffman bits
public const int MaxBits = 9; // max number of Huffman bits
public const int TableBits = 8;
public const int TableMask = (1 << TableBits) - 1;
public const int LengthsTableBits = 7;
public const int LengthsTableMask = (1 << LengthsTableBits) - 1;
static uint GetNextKey (uint key, int len)
{
uint step = 1u << (len - 1);
while (0 != (key & step))
step >>= 1;
return (key & (step - 1)) + step;
}
public static int BuildTable (HuffmanCode[] root_table, int index, int root_bits, int[] code_lengths, int code_lengths_size)
{
int table = index; // next available space in table
int total_size = 1 << root_bits; // total size root table + 2nd level table
int len; // current code length
int symbol; // symbol index in original or sorted table
// number of codes of each length:
int[] count = new int[MaxAllowedCodeLength + 1];
// offsets in sorted table for each length:
int[] offset = new int[MaxAllowedCodeLength + 1];
// Build histogram of code lengths.
for (symbol = 0; symbol < code_lengths_size; ++symbol)
{
if (code_lengths[symbol] > MaxAllowedCodeLength)
return 0;
++count[code_lengths[symbol]];
}
// Error, all code lengths are zeros.
if (count[0] == code_lengths_size)
return 0;
// Generate offsets into sorted symbol table by code length.
offset[1] = 0;
for (len = 1; len < MaxAllowedCodeLength; ++len)
{
if (count[len] > (1 << len))
return 0;
offset[len + 1] = offset[len] + count[len];
}
var sorted = new int[code_lengths_size];
// Sort symbols by length, by symbol order within each length.
for (symbol = 0; symbol < code_lengths_size; ++symbol)
{
int symbol_code_length = code_lengths[symbol];
if (code_lengths[symbol] > 0)
sorted[offset[symbol_code_length]++] = symbol;
}
// Special case code with only one value.
if (offset[MaxAllowedCodeLength] == 1)
{
HuffmanCode code;
code.bits = 0;
code.value = (ushort)sorted[0];
ReplicateValue (root_table, table, 1, total_size, code);
return total_size;
}
int step; // step size to replicate values in current table
uint low = uint.MaxValue; // low bits for current root entry
uint mask = (uint)total_size - 1; // mask for low bits
uint key = 0; // reversed prefix code
int num_nodes = 1; // number of Huffman tree nodes
int num_open = 1; // number of open branches in current tree level
int table_bits = root_bits; // key length of current table
int table_size = 1 << table_bits; // size of current table
symbol = 0;
// Fill in root table.
for (len = 1, step = 2; len <= root_bits; ++len, step <<= 1)
{
num_open <<= 1;
num_nodes += num_open;
num_open -= count[len];
if (num_open < 0)
return 0;
for (; count[len] > 0; --count[len])
{
HuffmanCode code;
code.bits = (byte)len;
code.value = (ushort)sorted[symbol++];
ReplicateValue (root_table, table + (int)key, step, table_size, code);
key = GetNextKey (key, len);
}
}
// Fill in 2nd level tables and add pointers to root table.
for (len = root_bits + 1, step = 2; len <= MaxAllowedCodeLength; ++len, step <<= 1)
{
num_open <<= 1;
num_nodes += num_open;
num_open -= count[len];
if (num_open < 0)
return 0;
for (; count[len] > 0; --count[len])
{
HuffmanCode code;
if ((key & mask) != low)
{
table += table_size;
table_bits = NextTableBitSize (count, len, root_bits);
table_size = 1 << table_bits;
total_size += table_size;
low = key & mask;
root_table[index+low].bits = (byte)(table_bits + root_bits);
root_table[index+low].value = (ushort)(table - index - low);
}
code.bits = (byte)(len - root_bits);
code.value = (ushort)sorted[symbol++];
ReplicateValue (root_table, table + (int)(key >> root_bits), step, table_size, code);
key = GetNextKey (key, len);
}
}
// Check if tree is full.
if (num_nodes != 2 * offset[MaxAllowedCodeLength] - 1)
return 0;
return total_size;
}
static void ReplicateValue (HuffmanCode[] table, int offset, int step, int end, HuffmanCode code)
{
do
{
end -= step;
table[offset+end] = code;
}
while (end > 0);
}
static int NextTableBitSize (int[] count, int len, int root_bits)
{
int left = 1 << (len - root_bits);
while (len < MaxAllowedCodeLength)
{
left -= count[len];
if (left <= 0) break;
++len;
left <<= 1;
}
return len - root_bits;
}
}
internal struct HuffmanCode
{
public byte bits; // number of bits used for this symbol
public ushort value; // symbol value or table offset
}
internal struct HuffmanCode32
{
public int bits; // number of bits used for this symbol,
// or an impossible value if not a literal code.
public uint value; // 32b packed ARGB value if literal,
// or non-literal symbol otherwise
}
internal class HTreeGroup
{
HuffmanCode[] tables;
int[] htrees = new int[Huffman.CodesPerMetaCode];
public bool is_trivial_literal; // True, if huffman trees for Red, Blue & Alpha
// Symbols are trivial (have a single code).
public uint literal_arb; // If is_trivial_literal is true, this is the
// ARGB value of the pixel, with Green channel
// being set to zero.
public bool is_trivial_code; // true if is_trivial_literal with only one code
public bool use_packed_table; // use packed table below for short literal code
// table mapping input bits to a packed values, or escape case to literal code
public HuffmanCode32[] packed_table = new HuffmanCode32[Huffman.PackedTableSize];
public HuffmanCode[] Tables { get { return tables; } }
public void SetMeta (int meta, int base_index)
{
htrees[meta] = base_index;
}
public int GetMeta (int meta)
{
return htrees[meta];
}
public HuffmanCode GetCode (int meta, int index)
{
return tables[htrees[meta] + index];
}
public void SetCode (int meta, int index, HuffmanCode code)
{
tables[htrees[meta] + index] = code;
}
public static HTreeGroup[] New (int num_htree_groups, int table_size)
{
var tables = new HuffmanCode[num_htree_groups * table_size];
var htree_groups = new HTreeGroup[num_htree_groups];
for (int i = 0; i < num_htree_groups; ++i)
{
htree_groups[i] = new HTreeGroup();
htree_groups[i].tables = tables;
}
return htree_groups;
}
}
}

View File

@ -0,0 +1,145 @@
//! \file ImageWEBP.cs
//! \date Wed Apr 06 07:16:39 2016
//! \brief Google WEBP image format.
//
// Copyright (C) 2016 by morkt
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//
using System;
using System.ComponentModel.Composition;
using System.IO;
using System.Windows.Media;
using GameRes.Utility;
namespace GameRes.Formats.Google
{
internal class WebPMetaData : ImageMetaData
{
public WebPFeature Flags;
public bool IsLossless;
public long DataOffset;
public int DataSize;
public long AlphaOffset;
public int AlphaSize;
}
[Flags]
internal enum WebPFeature : uint
{
Fragments = 0x0001,
Animation = 0x0002,
Xmp = 0x0004,
Exif = 0x0008,
Alpha = 0x0010,
Iccp = 0x0020,
}
[Export(typeof(ImageFormat))]
public class WebPFormat : ImageFormat
{
public override string Tag { get { return "WEBP"; } }
public override string Description { get { return "Google WebP image format"; } }
public override uint Signature { get { return 0; } }
public override ImageMetaData ReadMetaData (Stream stream)
{
if (0x46464952 != FormatCatalog.ReadSignature (stream)) // 'RIFF'
return null;
var header = new byte[0x10];
if (8 != stream.Read (header, 0, 8))
return null;
if (!Binary.AsciiEqual (header, 4, "WEBP"))
return null;
bool found_vp8x = false;
var info = new WebPMetaData();
int chunk_size;
for (;;)
{
if (8 != stream.Read (header, 0, 8))
return null;
chunk_size = LittleEndian.ToInt32 (header, 4);
int aligned_size = (chunk_size + 1) & ~1;
if (!found_vp8x && Binary.AsciiEqual (header, 0, "VP8X"))
{
found_vp8x = true;
if (chunk_size < 10)
return null;
if (chunk_size > header.Length)
header = new byte[chunk_size];
if (chunk_size != stream.Read (header, 0, chunk_size))
return null;
info.Flags = (WebPFeature)LittleEndian.ToUInt32 (header, 0);
info.Width = 1 + GetUInt24 (header, 4);
info.Height = 1 + GetUInt24 (header, 7);
if ((long)info.Width * info.Height >= (1L << 32))
return null;
continue;
}
if (Binary.AsciiEqual (header, 0, "VP8 ") || Binary.AsciiEqual (header, 0, "VP8L"))
{
info.IsLossless = header[3] == 'L';
info.DataOffset = stream.Position;
info.DataSize = chunk_size;
if (!found_vp8x)
{
if (chunk_size < 10 || 10 != stream.Read (header, 0, 10))
return null;
if (header[3] != 0x9D || header[4] != 1 || header[5] != 0x2A)
return null;
if (0 != (header[0] & 1)) // not a keyframe
return null;
info.Width = LittleEndian.ToUInt16 (header, 6) & 0x3FFFu;
info.Height = LittleEndian.ToUInt16 (header, 8) & 0x3FFFu;
}
break;
}
if (Binary.AsciiEqual (header, 0, "ALPH"))
{
info.AlphaOffset = stream.Position;
info.AlphaSize = chunk_size;
}
stream.Seek (aligned_size, SeekOrigin.Current);
}
if (0 == info.Width || 0 == info.Height)
return null;
return info;
}
static uint GetUInt24 (byte[] src, int offset)
{
return (uint)(src[offset] | src[offset+1] << 8 | src[offset+2] << 16);
}
public override ImageData Read (Stream stream, ImageMetaData info)
{
using (var reader = new WebPDecoder (stream, (WebPMetaData)info))
{
reader.Decode();
return ImageData.Create (info, reader.Format, null, reader.Output);
}
}
public override void Write (Stream file, ImageData image)
{
throw new NotImplementedException ("WebPFormat.Write not implemented");
}
}
}

1754
ArcFormats/WebP/Lossless.cs Normal file

File diff suppressed because it is too large Load Diff