ladybird/Userland/Utilities/animation.cpp

71 lines
2.7 KiB
C++
Raw Normal View History

/*
* Copyright (c) 2024, Nico Weber <thakis@chromium.org>
*
* SPDX-License-Identifier: BSD-2-Clause
*/
#include <LibCore/ArgsParser.h>
#include <LibCore/File.h>
#include <LibCore/MappedFile.h>
#include <LibGfx/ImageFormats/AnimationWriter.h>
#include <LibGfx/ImageFormats/GIFWriter.h>
#include <LibGfx/ImageFormats/ImageDecoder.h>
#include <LibGfx/ImageFormats/WebPWriter.h>
struct Options {
StringView in_path;
StringView out_path;
LibGfx+animation: Only store changed pixels in animation frames For example, for 7z7c.gif, we now store one 500x500 frame and then a 94x78 frame at (196, 208) and a 91x78 frame at (198, 208). This reduces how much data we have to store. We currently store all pixels in the rect with changed pixels. We could in the future store pixels that are equal in that rect as transparent pixels. When inputs are gif files, this would guaranteee that new frames only have at most 256 distinct colors (since GIFs require that), which would help a future color indexing transform. For now, we don't do that though. The API I'm adding here is a bit ugly: * WebPs can only store x/y offsets that are a multiple of 2. This currently leaks into the AnimationWriter base class. (Since we potentially have to make a webp frame 1 pixel wider and higher due to this, it's possible to have a frame that has <= 256 colors in a gif input but > 256 colors in the webp, if we do the technique above.) * Every client writing animations has to have logic to track previous frames, decide which of the two functions to call, etc. This also adds an opt-out flag to `animation`, because: 1. Some clients apparently assume the size of the last VP8L chunk is the size of the image (see https://github.com/discord/lilliput/issues/159). 2. Having incremental frames is good for filesize and for playing the animation start-to-end, but it makes it hard to extract arbitrary frames (have to extract all frames from start to target frame) -- but this is mean tto be a delivery codec, not an editing codec. It's also more vulnerable to corrupted bytes in the middle of the file -- but transport protocols are good these days. (It'd also be an idea to write a full frame every N frames.) For https://giphy.com/gifs/XT9HMdwmpHqqOu1f1a (an 184K gif), output webp size goes from 21M to 11M. For 7z7c.gif (an 11K gif), output webp size goes from 2.1M to 775K. (The webp image data still isn't compressed at all.)
2024-05-12 00:23:41 +00:00
bool write_full_frames { false };
};
static ErrorOr<Options> parse_options(Main::Arguments arguments)
{
Options options;
Core::ArgsParser args_parser;
args_parser.add_positional_argument(options.in_path, "Path to input image file", "FILE");
args_parser.add_option(options.out_path, "Path to output image file", "output", 'o', "FILE");
LibGfx+animation: Only store changed pixels in animation frames For example, for 7z7c.gif, we now store one 500x500 frame and then a 94x78 frame at (196, 208) and a 91x78 frame at (198, 208). This reduces how much data we have to store. We currently store all pixels in the rect with changed pixels. We could in the future store pixels that are equal in that rect as transparent pixels. When inputs are gif files, this would guaranteee that new frames only have at most 256 distinct colors (since GIFs require that), which would help a future color indexing transform. For now, we don't do that though. The API I'm adding here is a bit ugly: * WebPs can only store x/y offsets that are a multiple of 2. This currently leaks into the AnimationWriter base class. (Since we potentially have to make a webp frame 1 pixel wider and higher due to this, it's possible to have a frame that has <= 256 colors in a gif input but > 256 colors in the webp, if we do the technique above.) * Every client writing animations has to have logic to track previous frames, decide which of the two functions to call, etc. This also adds an opt-out flag to `animation`, because: 1. Some clients apparently assume the size of the last VP8L chunk is the size of the image (see https://github.com/discord/lilliput/issues/159). 2. Having incremental frames is good for filesize and for playing the animation start-to-end, but it makes it hard to extract arbitrary frames (have to extract all frames from start to target frame) -- but this is mean tto be a delivery codec, not an editing codec. It's also more vulnerable to corrupted bytes in the middle of the file -- but transport protocols are good these days. (It'd also be an idea to write a full frame every N frames.) For https://giphy.com/gifs/XT9HMdwmpHqqOu1f1a (an 184K gif), output webp size goes from 21M to 11M. For 7z7c.gif (an 11K gif), output webp size goes from 2.1M to 775K. (The webp image data still isn't compressed at all.)
2024-05-12 00:23:41 +00:00
args_parser.add_option(options.write_full_frames, "Do not store incremental frames. Produces larger files.", "write-full-frames");
args_parser.parse(arguments);
if (options.out_path.is_empty())
return Error::from_string_literal("-o is required ");
return options;
}
ErrorOr<int> serenity_main(Main::Arguments arguments)
{
Options options = TRY(parse_options(arguments));
// FIXME: Allow multiple single frames as input too, and allow manually setting their duration.
auto file = TRY(Core::MappedFile::map(options.in_path));
auto decoder = TRY(Gfx::ImageDecoder::try_create_for_raw_bytes(file->bytes()));
if (!decoder)
return Error::from_string_literal("Could not find decoder for input file");
auto output_file = TRY(Core::File::open(options.out_path, Core::File::OpenMode::Write));
auto output_stream = TRY(Core::OutputBufferedFile::create(move(output_file)));
auto animation_writer = TRY([&]() -> ErrorOr<NonnullOwnPtr<Gfx::AnimationWriter>> {
if (options.out_path.ends_with(".webp"sv))
return Gfx::WebPWriter::start_encoding_animation(*output_stream, decoder->size(), decoder->loop_count());
if (options.out_path.ends_with(".gif"sv))
return Gfx::GIFWriter::start_encoding_animation(*output_stream, decoder->size(), decoder->loop_count());
return Error::from_string_literal("Unable to find a encoder for the requested extension.");
}());
LibGfx+animation: Only store changed pixels in animation frames For example, for 7z7c.gif, we now store one 500x500 frame and then a 94x78 frame at (196, 208) and a 91x78 frame at (198, 208). This reduces how much data we have to store. We currently store all pixels in the rect with changed pixels. We could in the future store pixels that are equal in that rect as transparent pixels. When inputs are gif files, this would guaranteee that new frames only have at most 256 distinct colors (since GIFs require that), which would help a future color indexing transform. For now, we don't do that though. The API I'm adding here is a bit ugly: * WebPs can only store x/y offsets that are a multiple of 2. This currently leaks into the AnimationWriter base class. (Since we potentially have to make a webp frame 1 pixel wider and higher due to this, it's possible to have a frame that has <= 256 colors in a gif input but > 256 colors in the webp, if we do the technique above.) * Every client writing animations has to have logic to track previous frames, decide which of the two functions to call, etc. This also adds an opt-out flag to `animation`, because: 1. Some clients apparently assume the size of the last VP8L chunk is the size of the image (see https://github.com/discord/lilliput/issues/159). 2. Having incremental frames is good for filesize and for playing the animation start-to-end, but it makes it hard to extract arbitrary frames (have to extract all frames from start to target frame) -- but this is mean tto be a delivery codec, not an editing codec. It's also more vulnerable to corrupted bytes in the middle of the file -- but transport protocols are good these days. (It'd also be an idea to write a full frame every N frames.) For https://giphy.com/gifs/XT9HMdwmpHqqOu1f1a (an 184K gif), output webp size goes from 21M to 11M. For 7z7c.gif (an 11K gif), output webp size goes from 2.1M to 775K. (The webp image data still isn't compressed at all.)
2024-05-12 00:23:41 +00:00
RefPtr<Gfx::Bitmap> last_frame;
for (size_t i = 0; i < decoder->frame_count(); ++i) {
auto frame = TRY(decoder->frame(i));
LibGfx+animation: Only store changed pixels in animation frames For example, for 7z7c.gif, we now store one 500x500 frame and then a 94x78 frame at (196, 208) and a 91x78 frame at (198, 208). This reduces how much data we have to store. We currently store all pixels in the rect with changed pixels. We could in the future store pixels that are equal in that rect as transparent pixels. When inputs are gif files, this would guaranteee that new frames only have at most 256 distinct colors (since GIFs require that), which would help a future color indexing transform. For now, we don't do that though. The API I'm adding here is a bit ugly: * WebPs can only store x/y offsets that are a multiple of 2. This currently leaks into the AnimationWriter base class. (Since we potentially have to make a webp frame 1 pixel wider and higher due to this, it's possible to have a frame that has <= 256 colors in a gif input but > 256 colors in the webp, if we do the technique above.) * Every client writing animations has to have logic to track previous frames, decide which of the two functions to call, etc. This also adds an opt-out flag to `animation`, because: 1. Some clients apparently assume the size of the last VP8L chunk is the size of the image (see https://github.com/discord/lilliput/issues/159). 2. Having incremental frames is good for filesize and for playing the animation start-to-end, but it makes it hard to extract arbitrary frames (have to extract all frames from start to target frame) -- but this is mean tto be a delivery codec, not an editing codec. It's also more vulnerable to corrupted bytes in the middle of the file -- but transport protocols are good these days. (It'd also be an idea to write a full frame every N frames.) For https://giphy.com/gifs/XT9HMdwmpHqqOu1f1a (an 184K gif), output webp size goes from 21M to 11M. For 7z7c.gif (an 11K gif), output webp size goes from 2.1M to 775K. (The webp image data still isn't compressed at all.)
2024-05-12 00:23:41 +00:00
if (options.write_full_frames) {
TRY(animation_writer->add_frame(*frame.image, frame.duration));
} else {
TRY(animation_writer->add_frame_relative_to_last_frame(*frame.image, frame.duration, last_frame));
last_frame = frame.image;
}
}
return 0;
}