1 // Copyright 2014 Google Inc. All Rights Reserved.
2 //
3 // Use of this source code is governed by a BSD-style license
4 // that can be found in the COPYING file in the root of the source
5 // tree. An additional intellectual property rights grant can be found
6 // in the file PATENTS. All contributing project authors may
7 // be found in the AUTHORS file in the root of the source tree.
8 // -----------------------------------------------------------------------------
9 //
10 // AnimEncoder implementation.
11 //
12
13 #include <assert.h>
14 #include <limits.h>
15 #include <math.h> // for pow()
16 #include <stdio.h>
17 #include <stdlib.h> // for abs()
18
19 #include "src/mux/animi.h"
20 #include "src/utils/utils.h"
21 #include "src/webp/decode.h"
22 #include "src/webp/encode.h"
23 #include "src/webp/format_constants.h"
24 #include "src/webp/mux.h"
25 #include "src/webp/types.h"
26
27 #if defined(_MSC_VER) && _MSC_VER < 1900
28 #define snprintf _snprintf
29 #endif
30
31 #define ERROR_STR_MAX_LENGTH 100
32
33 //------------------------------------------------------------------------------
34 // Internal structs.
35
36 // Stores frame rectangle dimensions.
37 typedef struct {
38 int x_offset_, y_offset_, width_, height_;
39 } FrameRectangle;
40
41 // Used to store two candidates of encoded data for an animation frame. One of
42 // the two will be chosen later.
43 typedef struct {
44 WebPMuxFrameInfo sub_frame_; // Encoded frame rectangle.
45 WebPMuxFrameInfo key_frame_; // Encoded frame if it is a key-frame.
46 int is_key_frame_; // True if 'key_frame' has been chosen.
47 } EncodedFrame;
48
49 struct WebPAnimEncoder {
50 const int canvas_width_; // Canvas width.
51 const int canvas_height_; // Canvas height.
52 const WebPAnimEncoderOptions options_; // Global encoding options.
53
54 FrameRectangle prev_rect_; // Previous WebP frame rectangle.
55 WebPConfig last_config_; // Cached in case a re-encode is needed.
56 WebPConfig last_config_reversed_; // If 'last_config_' uses lossless, then
57 // this config uses lossy and vice versa;
58 // only valid if 'options_.allow_mixed'
59 // is true.
60
61 WebPPicture* curr_canvas_; // Only pointer; we don't own memory.
62
63 // Canvas buffers.
64 WebPPicture curr_canvas_copy_; // Possibly modified current canvas.
65 int curr_canvas_copy_modified_; // True if pixels in 'curr_canvas_copy_'
66 // differ from those in 'curr_canvas_'.
67
68 WebPPicture prev_canvas_; // Previous canvas.
69 WebPPicture prev_canvas_disposed_; // Previous canvas disposed to background.
70
71 // Encoded data.
72 EncodedFrame* encoded_frames_; // Array of encoded frames.
73 size_t size_; // Number of allocated frames.
74 size_t start_; // Frame start index.
75 size_t count_; // Number of valid frames.
76 size_t flush_count_; // If >0, 'flush_count' frames starting from
77 // 'start' are ready to be added to mux.
78
79 // key-frame related.
80 int64_t best_delta_; // min(canvas size - frame size) over the frames.
81 // Can be negative in certain cases due to
82 // transparent pixels in a frame.
83 int keyframe_; // Index of selected key-frame relative to 'start_'.
84 int count_since_key_frame_; // Frames seen since the last key-frame.
85
86 int first_timestamp_; // Timestamp of the first frame.
87 int prev_timestamp_; // Timestamp of the last added frame.
88 int prev_candidate_undecided_; // True if it's not yet decided if previous
89 // frame would be a sub-frame or a key-frame.
90
91 // Misc.
92 int is_first_frame_; // True if first frame is yet to be added/being added.
93 int got_null_frame_; // True if WebPAnimEncoderAdd() has already been called
94 // with a NULL frame.
95
96 size_t in_frame_count_; // Number of input frames processed so far.
97 size_t out_frame_count_; // Number of frames added to mux so far. This may be
98 // different from 'in_frame_count_' due to merging.
99
100 WebPMux* mux_; // Muxer to assemble the WebP bitstream.
101 char error_str_[ERROR_STR_MAX_LENGTH]; // Error string. Empty if no error.
102 };
103
104 // -----------------------------------------------------------------------------
105 // Life of WebPAnimEncoder object.
106
107 #define DELTA_INFINITY (1ULL << 32)
108 #define KEYFRAME_NONE (-1)
109
110 // Reset the counters in the WebPAnimEncoder.
ResetCounters(WebPAnimEncoder * const enc)111 static void ResetCounters(WebPAnimEncoder* const enc) {
112 enc->start_ = 0;
113 enc->count_ = 0;
114 enc->flush_count_ = 0;
115 enc->best_delta_ = DELTA_INFINITY;
116 enc->keyframe_ = KEYFRAME_NONE;
117 }
118
DisableKeyframes(WebPAnimEncoderOptions * const enc_options)119 static void DisableKeyframes(WebPAnimEncoderOptions* const enc_options) {
120 enc_options->kmax = INT_MAX;
121 enc_options->kmin = enc_options->kmax - 1;
122 }
123
124 #define MAX_CACHED_FRAMES 30
125
SanitizeEncoderOptions(WebPAnimEncoderOptions * const enc_options)126 static void SanitizeEncoderOptions(WebPAnimEncoderOptions* const enc_options) {
127 int print_warning = enc_options->verbose;
128
129 if (enc_options->minimize_size) {
130 DisableKeyframes(enc_options);
131 }
132
133 if (enc_options->kmax == 1) { // All frames will be key-frames.
134 enc_options->kmin = 0;
135 enc_options->kmax = 0;
136 return;
137 } else if (enc_options->kmax <= 0) {
138 DisableKeyframes(enc_options);
139 print_warning = 0;
140 }
141
142 if (enc_options->kmin >= enc_options->kmax) {
143 enc_options->kmin = enc_options->kmax - 1;
144 if (print_warning) {
145 fprintf(stderr, "WARNING: Setting kmin = %d, so that kmin < kmax.\n",
146 enc_options->kmin);
147 }
148 } else {
149 const int kmin_limit = enc_options->kmax / 2 + 1;
150 if (enc_options->kmin < kmin_limit && kmin_limit < enc_options->kmax) {
151 // This ensures that enc.keyframe + kmin >= kmax is always true. So, we
152 // can flush all the frames in the 'count_since_key_frame == kmax' case.
153 enc_options->kmin = kmin_limit;
154 if (print_warning) {
155 fprintf(stderr,
156 "WARNING: Setting kmin = %d, so that kmin >= kmax / 2 + 1.\n",
157 enc_options->kmin);
158 }
159 }
160 }
161 // Limit the max number of frames that are allocated.
162 if (enc_options->kmax - enc_options->kmin > MAX_CACHED_FRAMES) {
163 enc_options->kmin = enc_options->kmax - MAX_CACHED_FRAMES;
164 if (print_warning) {
165 fprintf(stderr,
166 "WARNING: Setting kmin = %d, so that kmax - kmin <= %d.\n",
167 enc_options->kmin, MAX_CACHED_FRAMES);
168 }
169 }
170 assert(enc_options->kmin < enc_options->kmax);
171 }
172
173 #undef MAX_CACHED_FRAMES
174
DefaultEncoderOptions(WebPAnimEncoderOptions * const enc_options)175 static void DefaultEncoderOptions(WebPAnimEncoderOptions* const enc_options) {
176 enc_options->anim_params.loop_count = 0;
177 enc_options->anim_params.bgcolor = 0xffffffff; // White.
178 enc_options->minimize_size = 0;
179 DisableKeyframes(enc_options);
180 enc_options->allow_mixed = 0;
181 enc_options->verbose = 0;
182 }
183
WebPAnimEncoderOptionsInitInternal(WebPAnimEncoderOptions * enc_options,int abi_version)184 int WebPAnimEncoderOptionsInitInternal(WebPAnimEncoderOptions* enc_options,
185 int abi_version) {
186 if (enc_options == NULL ||
187 WEBP_ABI_IS_INCOMPATIBLE(abi_version, WEBP_MUX_ABI_VERSION)) {
188 return 0;
189 }
190 DefaultEncoderOptions(enc_options);
191 return 1;
192 }
193
194 // This starting value is more fit to WebPCleanupTransparentAreaLossless().
195 #define TRANSPARENT_COLOR 0x00000000
196
ClearRectangle(WebPPicture * const picture,int left,int top,int width,int height)197 static void ClearRectangle(WebPPicture* const picture,
198 int left, int top, int width, int height) {
199 int j;
200 for (j = top; j < top + height; ++j) {
201 uint32_t* const dst = picture->argb + j * picture->argb_stride;
202 int i;
203 for (i = left; i < left + width; ++i) {
204 dst[i] = TRANSPARENT_COLOR;
205 }
206 }
207 }
208
WebPUtilClearPic(WebPPicture * const picture,const FrameRectangle * const rect)209 static void WebPUtilClearPic(WebPPicture* const picture,
210 const FrameRectangle* const rect) {
211 if (rect != NULL) {
212 ClearRectangle(picture, rect->x_offset_, rect->y_offset_,
213 rect->width_, rect->height_);
214 } else {
215 ClearRectangle(picture, 0, 0, picture->width, picture->height);
216 }
217 }
218
MarkNoError(WebPAnimEncoder * const enc)219 static void MarkNoError(WebPAnimEncoder* const enc) {
220 enc->error_str_[0] = '\0'; // Empty string.
221 }
222
MarkError(WebPAnimEncoder * const enc,const char * str)223 static void MarkError(WebPAnimEncoder* const enc, const char* str) {
224 if (snprintf(enc->error_str_, ERROR_STR_MAX_LENGTH, "%s.", str) < 0) {
225 assert(0); // FIX ME!
226 }
227 }
228
MarkError2(WebPAnimEncoder * const enc,const char * str,int error_code)229 static void MarkError2(WebPAnimEncoder* const enc,
230 const char* str, int error_code) {
231 if (snprintf(enc->error_str_, ERROR_STR_MAX_LENGTH, "%s: %d.", str,
232 error_code) < 0) {
233 assert(0); // FIX ME!
234 }
235 }
236
WebPAnimEncoderNewInternal(int width,int height,const WebPAnimEncoderOptions * enc_options,int abi_version)237 WebPAnimEncoder* WebPAnimEncoderNewInternal(
238 int width, int height, const WebPAnimEncoderOptions* enc_options,
239 int abi_version) {
240 WebPAnimEncoder* enc;
241
242 if (WEBP_ABI_IS_INCOMPATIBLE(abi_version, WEBP_MUX_ABI_VERSION)) {
243 return NULL;
244 }
245 if (width <= 0 || height <= 0 ||
246 (width * (uint64_t)height) >= MAX_IMAGE_AREA) {
247 return NULL;
248 }
249
250 enc = (WebPAnimEncoder*)WebPSafeCalloc(1, sizeof(*enc));
251 if (enc == NULL) return NULL;
252 MarkNoError(enc);
253
254 // Dimensions and options.
255 *(int*)&enc->canvas_width_ = width;
256 *(int*)&enc->canvas_height_ = height;
257 if (enc_options != NULL) {
258 *(WebPAnimEncoderOptions*)&enc->options_ = *enc_options;
259 SanitizeEncoderOptions((WebPAnimEncoderOptions*)&enc->options_);
260 } else {
261 DefaultEncoderOptions((WebPAnimEncoderOptions*)&enc->options_);
262 }
263
264 // Canvas buffers.
265 if (!WebPPictureInit(&enc->curr_canvas_copy_) ||
266 !WebPPictureInit(&enc->prev_canvas_) ||
267 !WebPPictureInit(&enc->prev_canvas_disposed_)) {
268 goto Err;
269 }
270 enc->curr_canvas_copy_.width = width;
271 enc->curr_canvas_copy_.height = height;
272 enc->curr_canvas_copy_.use_argb = 1;
273 if (!WebPPictureAlloc(&enc->curr_canvas_copy_) ||
274 !WebPPictureCopy(&enc->curr_canvas_copy_, &enc->prev_canvas_) ||
275 !WebPPictureCopy(&enc->curr_canvas_copy_, &enc->prev_canvas_disposed_)) {
276 goto Err;
277 }
278 WebPUtilClearPic(&enc->prev_canvas_, NULL);
279 enc->curr_canvas_copy_modified_ = 1;
280
281 // Encoded frames.
282 ResetCounters(enc);
283 // Note: one extra storage is for the previous frame.
284 enc->size_ = enc->options_.kmax - enc->options_.kmin + 1;
285 // We need space for at least 2 frames. But when kmin, kmax are both zero,
286 // enc->size_ will be 1. So we handle that special case below.
287 if (enc->size_ < 2) enc->size_ = 2;
288 enc->encoded_frames_ =
289 (EncodedFrame*)WebPSafeCalloc(enc->size_, sizeof(*enc->encoded_frames_));
290 if (enc->encoded_frames_ == NULL) goto Err;
291
292 enc->mux_ = WebPMuxNew();
293 if (enc->mux_ == NULL) goto Err;
294
295 enc->count_since_key_frame_ = 0;
296 enc->first_timestamp_ = 0;
297 enc->prev_timestamp_ = 0;
298 enc->prev_candidate_undecided_ = 0;
299 enc->is_first_frame_ = 1;
300 enc->got_null_frame_ = 0;
301
302 return enc; // All OK.
303
304 Err:
305 WebPAnimEncoderDelete(enc);
306 return NULL;
307 }
308
309 // Release the data contained by 'encoded_frame'.
FrameRelease(EncodedFrame * const encoded_frame)310 static void FrameRelease(EncodedFrame* const encoded_frame) {
311 if (encoded_frame != NULL) {
312 WebPDataClear(&encoded_frame->sub_frame_.bitstream);
313 WebPDataClear(&encoded_frame->key_frame_.bitstream);
314 memset(encoded_frame, 0, sizeof(*encoded_frame));
315 }
316 }
317
WebPAnimEncoderDelete(WebPAnimEncoder * enc)318 void WebPAnimEncoderDelete(WebPAnimEncoder* enc) {
319 if (enc != NULL) {
320 WebPPictureFree(&enc->curr_canvas_copy_);
321 WebPPictureFree(&enc->prev_canvas_);
322 WebPPictureFree(&enc->prev_canvas_disposed_);
323 if (enc->encoded_frames_ != NULL) {
324 size_t i;
325 for (i = 0; i < enc->size_; ++i) {
326 FrameRelease(&enc->encoded_frames_[i]);
327 }
328 WebPSafeFree(enc->encoded_frames_);
329 }
330 WebPMuxDelete(enc->mux_);
331 WebPSafeFree(enc);
332 }
333 }
334
335 // -----------------------------------------------------------------------------
336 // Frame addition.
337
338 // Returns cached frame at the given 'position'.
GetFrame(const WebPAnimEncoder * const enc,size_t position)339 static EncodedFrame* GetFrame(const WebPAnimEncoder* const enc,
340 size_t position) {
341 assert(enc->start_ + position < enc->size_);
342 return &enc->encoded_frames_[enc->start_ + position];
343 }
344
345 typedef int (*ComparePixelsFunc)(const uint32_t*, int, const uint32_t*, int,
346 int, int);
347
348 // Returns true if 'length' number of pixels in 'src' and 'dst' are equal,
349 // assuming the given step sizes between pixels.
350 // 'max_allowed_diff' is unused and only there to allow function pointer use.
ComparePixelsLossless(const uint32_t * src,int src_step,const uint32_t * dst,int dst_step,int length,int max_allowed_diff)351 static WEBP_INLINE int ComparePixelsLossless(const uint32_t* src, int src_step,
352 const uint32_t* dst, int dst_step,
353 int length, int max_allowed_diff) {
354 (void)max_allowed_diff;
355 assert(length > 0);
356 while (length-- > 0) {
357 if (*src != *dst) {
358 return 0;
359 }
360 src += src_step;
361 dst += dst_step;
362 }
363 return 1;
364 }
365
366 // Helper to check if each channel in 'src' and 'dst' is at most off by
367 // 'max_allowed_diff'.
PixelsAreSimilar(uint32_t src,uint32_t dst,int max_allowed_diff)368 static WEBP_INLINE int PixelsAreSimilar(uint32_t src, uint32_t dst,
369 int max_allowed_diff) {
370 const int src_a = (src >> 24) & 0xff;
371 const int src_r = (src >> 16) & 0xff;
372 const int src_g = (src >> 8) & 0xff;
373 const int src_b = (src >> 0) & 0xff;
374 const int dst_a = (dst >> 24) & 0xff;
375 const int dst_r = (dst >> 16) & 0xff;
376 const int dst_g = (dst >> 8) & 0xff;
377 const int dst_b = (dst >> 0) & 0xff;
378
379 return (src_a == dst_a) &&
380 (abs(src_r - dst_r) * dst_a <= (max_allowed_diff * 255)) &&
381 (abs(src_g - dst_g) * dst_a <= (max_allowed_diff * 255)) &&
382 (abs(src_b - dst_b) * dst_a <= (max_allowed_diff * 255));
383 }
384
385 // Returns true if 'length' number of pixels in 'src' and 'dst' are within an
386 // error bound, assuming the given step sizes between pixels.
ComparePixelsLossy(const uint32_t * src,int src_step,const uint32_t * dst,int dst_step,int length,int max_allowed_diff)387 static WEBP_INLINE int ComparePixelsLossy(const uint32_t* src, int src_step,
388 const uint32_t* dst, int dst_step,
389 int length, int max_allowed_diff) {
390 assert(length > 0);
391 while (length-- > 0) {
392 if (!PixelsAreSimilar(*src, *dst, max_allowed_diff)) {
393 return 0;
394 }
395 src += src_step;
396 dst += dst_step;
397 }
398 return 1;
399 }
400
IsEmptyRect(const FrameRectangle * const rect)401 static int IsEmptyRect(const FrameRectangle* const rect) {
402 return (rect->width_ == 0) || (rect->height_ == 0);
403 }
404
QualityToMaxDiff(float quality)405 static int QualityToMaxDiff(float quality) {
406 const double val = pow(quality / 100., 0.5);
407 const double max_diff = 31 * (1 - val) + 1 * val;
408 return (int)(max_diff + 0.5);
409 }
410
411 // Assumes that an initial valid guess of change rectangle 'rect' is passed.
MinimizeChangeRectangle(const WebPPicture * const src,const WebPPicture * const dst,FrameRectangle * const rect,int is_lossless,float quality)412 static void MinimizeChangeRectangle(const WebPPicture* const src,
413 const WebPPicture* const dst,
414 FrameRectangle* const rect,
415 int is_lossless, float quality) {
416 int i, j;
417 const ComparePixelsFunc compare_pixels =
418 is_lossless ? ComparePixelsLossless : ComparePixelsLossy;
419 const int max_allowed_diff_lossy = QualityToMaxDiff(quality);
420 const int max_allowed_diff = is_lossless ? 0 : max_allowed_diff_lossy;
421
422 // Assumption/correctness checks.
423 assert(src->width == dst->width && src->height == dst->height);
424 assert(rect->x_offset_ + rect->width_ <= dst->width);
425 assert(rect->y_offset_ + rect->height_ <= dst->height);
426
427 // Left boundary.
428 for (i = rect->x_offset_; i < rect->x_offset_ + rect->width_; ++i) {
429 const uint32_t* const src_argb =
430 &src->argb[rect->y_offset_ * src->argb_stride + i];
431 const uint32_t* const dst_argb =
432 &dst->argb[rect->y_offset_ * dst->argb_stride + i];
433 if (compare_pixels(src_argb, src->argb_stride, dst_argb, dst->argb_stride,
434 rect->height_, max_allowed_diff)) {
435 --rect->width_; // Redundant column.
436 ++rect->x_offset_;
437 } else {
438 break;
439 }
440 }
441 if (rect->width_ == 0) goto NoChange;
442
443 // Right boundary.
444 for (i = rect->x_offset_ + rect->width_ - 1; i >= rect->x_offset_; --i) {
445 const uint32_t* const src_argb =
446 &src->argb[rect->y_offset_ * src->argb_stride + i];
447 const uint32_t* const dst_argb =
448 &dst->argb[rect->y_offset_ * dst->argb_stride + i];
449 if (compare_pixels(src_argb, src->argb_stride, dst_argb, dst->argb_stride,
450 rect->height_, max_allowed_diff)) {
451 --rect->width_; // Redundant column.
452 } else {
453 break;
454 }
455 }
456 if (rect->width_ == 0) goto NoChange;
457
458 // Top boundary.
459 for (j = rect->y_offset_; j < rect->y_offset_ + rect->height_; ++j) {
460 const uint32_t* const src_argb =
461 &src->argb[j * src->argb_stride + rect->x_offset_];
462 const uint32_t* const dst_argb =
463 &dst->argb[j * dst->argb_stride + rect->x_offset_];
464 if (compare_pixels(src_argb, 1, dst_argb, 1, rect->width_,
465 max_allowed_diff)) {
466 --rect->height_; // Redundant row.
467 ++rect->y_offset_;
468 } else {
469 break;
470 }
471 }
472 if (rect->height_ == 0) goto NoChange;
473
474 // Bottom boundary.
475 for (j = rect->y_offset_ + rect->height_ - 1; j >= rect->y_offset_; --j) {
476 const uint32_t* const src_argb =
477 &src->argb[j * src->argb_stride + rect->x_offset_];
478 const uint32_t* const dst_argb =
479 &dst->argb[j * dst->argb_stride + rect->x_offset_];
480 if (compare_pixels(src_argb, 1, dst_argb, 1, rect->width_,
481 max_allowed_diff)) {
482 --rect->height_; // Redundant row.
483 } else {
484 break;
485 }
486 }
487 if (rect->height_ == 0) goto NoChange;
488
489 if (IsEmptyRect(rect)) {
490 NoChange:
491 rect->x_offset_ = 0;
492 rect->y_offset_ = 0;
493 rect->width_ = 0;
494 rect->height_ = 0;
495 }
496 }
497
498 // Snap rectangle to even offsets (and adjust dimensions if needed).
SnapToEvenOffsets(FrameRectangle * const rect)499 static WEBP_INLINE void SnapToEvenOffsets(FrameRectangle* const rect) {
500 rect->width_ += (rect->x_offset_ & 1);
501 rect->height_ += (rect->y_offset_ & 1);
502 rect->x_offset_ &= ~1;
503 rect->y_offset_ &= ~1;
504 }
505
506 typedef struct {
507 int should_try_; // Should try this set of parameters.
508 int empty_rect_allowed_; // Frame with empty rectangle can be skipped.
509 FrameRectangle rect_ll_; // Frame rectangle for lossless compression.
510 WebPPicture sub_frame_ll_; // Sub-frame pic for lossless compression.
511 FrameRectangle rect_lossy_; // Frame rectangle for lossy compression.
512 // Could be smaller than rect_ll_ as pixels
513 // with small diffs can be ignored.
514 WebPPicture sub_frame_lossy_; // Sub-frame pic for lossless compression.
515 } SubFrameParams;
516
SubFrameParamsInit(SubFrameParams * const params,int should_try,int empty_rect_allowed)517 static int SubFrameParamsInit(SubFrameParams* const params,
518 int should_try, int empty_rect_allowed) {
519 params->should_try_ = should_try;
520 params->empty_rect_allowed_ = empty_rect_allowed;
521 if (!WebPPictureInit(¶ms->sub_frame_ll_) ||
522 !WebPPictureInit(¶ms->sub_frame_lossy_)) {
523 return 0;
524 }
525 return 1;
526 }
527
SubFrameParamsFree(SubFrameParams * const params)528 static void SubFrameParamsFree(SubFrameParams* const params) {
529 WebPPictureFree(¶ms->sub_frame_ll_);
530 WebPPictureFree(¶ms->sub_frame_lossy_);
531 }
532
533 // Given previous and current canvas, picks the optimal rectangle for the
534 // current frame based on 'is_lossless' and other parameters. Assumes that the
535 // initial guess 'rect' is valid.
GetSubRect(const WebPPicture * const prev_canvas,const WebPPicture * const curr_canvas,int is_key_frame,int is_first_frame,int empty_rect_allowed,int is_lossless,float quality,FrameRectangle * const rect,WebPPicture * const sub_frame)536 static int GetSubRect(const WebPPicture* const prev_canvas,
537 const WebPPicture* const curr_canvas, int is_key_frame,
538 int is_first_frame, int empty_rect_allowed,
539 int is_lossless, float quality,
540 FrameRectangle* const rect,
541 WebPPicture* const sub_frame) {
542 if (!is_key_frame || is_first_frame) { // Optimize frame rectangle.
543 // Note: This behaves as expected for first frame, as 'prev_canvas' is
544 // initialized to a fully transparent canvas in the beginning.
545 MinimizeChangeRectangle(prev_canvas, curr_canvas, rect,
546 is_lossless, quality);
547 }
548
549 if (IsEmptyRect(rect)) {
550 if (empty_rect_allowed) { // No need to get 'sub_frame'.
551 return 1;
552 } else { // Force a 1x1 rectangle.
553 rect->width_ = 1;
554 rect->height_ = 1;
555 assert(rect->x_offset_ == 0);
556 assert(rect->y_offset_ == 0);
557 }
558 }
559
560 SnapToEvenOffsets(rect);
561 return WebPPictureView(curr_canvas, rect->x_offset_, rect->y_offset_,
562 rect->width_, rect->height_, sub_frame);
563 }
564
565 // Picks optimal frame rectangle for both lossless and lossy compression. The
566 // initial guess for frame rectangles will be the full canvas.
GetSubRects(const WebPPicture * const prev_canvas,const WebPPicture * const curr_canvas,int is_key_frame,int is_first_frame,float quality,SubFrameParams * const params)567 static int GetSubRects(const WebPPicture* const prev_canvas,
568 const WebPPicture* const curr_canvas, int is_key_frame,
569 int is_first_frame, float quality,
570 SubFrameParams* const params) {
571 // Lossless frame rectangle.
572 params->rect_ll_.x_offset_ = 0;
573 params->rect_ll_.y_offset_ = 0;
574 params->rect_ll_.width_ = curr_canvas->width;
575 params->rect_ll_.height_ = curr_canvas->height;
576 if (!GetSubRect(prev_canvas, curr_canvas, is_key_frame, is_first_frame,
577 params->empty_rect_allowed_, 1, quality,
578 ¶ms->rect_ll_, ¶ms->sub_frame_ll_)) {
579 return 0;
580 }
581 // Lossy frame rectangle.
582 params->rect_lossy_ = params->rect_ll_; // seed with lossless rect.
583 return GetSubRect(prev_canvas, curr_canvas, is_key_frame, is_first_frame,
584 params->empty_rect_allowed_, 0, quality,
585 ¶ms->rect_lossy_, ¶ms->sub_frame_lossy_);
586 }
587
clip(int v,int min_v,int max_v)588 static WEBP_INLINE int clip(int v, int min_v, int max_v) {
589 return (v < min_v) ? min_v : (v > max_v) ? max_v : v;
590 }
591
WebPAnimEncoderRefineRect(const WebPPicture * const prev_canvas,const WebPPicture * const curr_canvas,int is_lossless,float quality,int * const x_offset,int * const y_offset,int * const width,int * const height)592 int WebPAnimEncoderRefineRect(
593 const WebPPicture* const prev_canvas, const WebPPicture* const curr_canvas,
594 int is_lossless, float quality, int* const x_offset, int* const y_offset,
595 int* const width, int* const height) {
596 FrameRectangle rect;
597 int right, left, bottom, top;
598 if (prev_canvas == NULL || curr_canvas == NULL ||
599 prev_canvas->width != curr_canvas->width ||
600 prev_canvas->height != curr_canvas->height ||
601 !prev_canvas->use_argb || !curr_canvas->use_argb) {
602 return 0;
603 }
604 right = clip(*x_offset + *width, 0, curr_canvas->width);
605 left = clip(*x_offset, 0, curr_canvas->width - 1);
606 bottom = clip(*y_offset + *height, 0, curr_canvas->height);
607 top = clip(*y_offset, 0, curr_canvas->height - 1);
608 rect.x_offset_ = left;
609 rect.y_offset_ = top;
610 rect.width_ = clip(right - left, 0, curr_canvas->width - rect.x_offset_);
611 rect.height_ = clip(bottom - top, 0, curr_canvas->height - rect.y_offset_);
612 MinimizeChangeRectangle(prev_canvas, curr_canvas, &rect, is_lossless,
613 quality);
614 SnapToEvenOffsets(&rect);
615 *x_offset = rect.x_offset_;
616 *y_offset = rect.y_offset_;
617 *width = rect.width_;
618 *height = rect.height_;
619 return 1;
620 }
621
DisposeFrameRectangle(int dispose_method,const FrameRectangle * const rect,WebPPicture * const curr_canvas)622 static void DisposeFrameRectangle(int dispose_method,
623 const FrameRectangle* const rect,
624 WebPPicture* const curr_canvas) {
625 assert(rect != NULL);
626 if (dispose_method == WEBP_MUX_DISPOSE_BACKGROUND) {
627 WebPUtilClearPic(curr_canvas, rect);
628 }
629 }
630
RectArea(const FrameRectangle * const rect)631 static uint32_t RectArea(const FrameRectangle* const rect) {
632 return (uint32_t)rect->width_ * rect->height_;
633 }
634
IsLosslessBlendingPossible(const WebPPicture * const src,const WebPPicture * const dst,const FrameRectangle * const rect)635 static int IsLosslessBlendingPossible(const WebPPicture* const src,
636 const WebPPicture* const dst,
637 const FrameRectangle* const rect) {
638 int i, j;
639 assert(src->width == dst->width && src->height == dst->height);
640 assert(rect->x_offset_ + rect->width_ <= dst->width);
641 assert(rect->y_offset_ + rect->height_ <= dst->height);
642 for (j = rect->y_offset_; j < rect->y_offset_ + rect->height_; ++j) {
643 for (i = rect->x_offset_; i < rect->x_offset_ + rect->width_; ++i) {
644 const uint32_t src_pixel = src->argb[j * src->argb_stride + i];
645 const uint32_t dst_pixel = dst->argb[j * dst->argb_stride + i];
646 const uint32_t dst_alpha = dst_pixel >> 24;
647 if (dst_alpha != 0xff && src_pixel != dst_pixel) {
648 // In this case, if we use blending, we can't attain the desired
649 // 'dst_pixel' value for this pixel. So, blending is not possible.
650 return 0;
651 }
652 }
653 }
654 return 1;
655 }
656
IsLossyBlendingPossible(const WebPPicture * const src,const WebPPicture * const dst,const FrameRectangle * const rect,float quality)657 static int IsLossyBlendingPossible(const WebPPicture* const src,
658 const WebPPicture* const dst,
659 const FrameRectangle* const rect,
660 float quality) {
661 const int max_allowed_diff_lossy = QualityToMaxDiff(quality);
662 int i, j;
663 assert(src->width == dst->width && src->height == dst->height);
664 assert(rect->x_offset_ + rect->width_ <= dst->width);
665 assert(rect->y_offset_ + rect->height_ <= dst->height);
666 for (j = rect->y_offset_; j < rect->y_offset_ + rect->height_; ++j) {
667 for (i = rect->x_offset_; i < rect->x_offset_ + rect->width_; ++i) {
668 const uint32_t src_pixel = src->argb[j * src->argb_stride + i];
669 const uint32_t dst_pixel = dst->argb[j * dst->argb_stride + i];
670 const uint32_t dst_alpha = dst_pixel >> 24;
671 if (dst_alpha != 0xff &&
672 !PixelsAreSimilar(src_pixel, dst_pixel, max_allowed_diff_lossy)) {
673 // In this case, if we use blending, we can't attain the desired
674 // 'dst_pixel' value for this pixel. So, blending is not possible.
675 return 0;
676 }
677 }
678 }
679 return 1;
680 }
681
682 // For pixels in 'rect', replace those pixels in 'dst' that are same as 'src' by
683 // transparent pixels.
684 // Returns true if at least one pixel gets modified.
IncreaseTransparency(const WebPPicture * const src,const FrameRectangle * const rect,WebPPicture * const dst)685 static int IncreaseTransparency(const WebPPicture* const src,
686 const FrameRectangle* const rect,
687 WebPPicture* const dst) {
688 int i, j;
689 int modified = 0;
690 assert(src != NULL && dst != NULL && rect != NULL);
691 assert(src->width == dst->width && src->height == dst->height);
692 for (j = rect->y_offset_; j < rect->y_offset_ + rect->height_; ++j) {
693 const uint32_t* const psrc = src->argb + j * src->argb_stride;
694 uint32_t* const pdst = dst->argb + j * dst->argb_stride;
695 for (i = rect->x_offset_; i < rect->x_offset_ + rect->width_; ++i) {
696 if (psrc[i] == pdst[i] && pdst[i] != TRANSPARENT_COLOR) {
697 pdst[i] = TRANSPARENT_COLOR;
698 modified = 1;
699 }
700 }
701 }
702 return modified;
703 }
704
705 #undef TRANSPARENT_COLOR
706
707 // Replace similar blocks of pixels by a 'see-through' transparent block
708 // with uniform average color.
709 // Assumes lossy compression is being used.
710 // Returns true if at least one pixel gets modified.
FlattenSimilarBlocks(const WebPPicture * const src,const FrameRectangle * const rect,WebPPicture * const dst,float quality)711 static int FlattenSimilarBlocks(const WebPPicture* const src,
712 const FrameRectangle* const rect,
713 WebPPicture* const dst, float quality) {
714 const int max_allowed_diff_lossy = QualityToMaxDiff(quality);
715 int i, j;
716 int modified = 0;
717 const int block_size = 8;
718 const int y_start = (rect->y_offset_ + block_size) & ~(block_size - 1);
719 const int y_end = (rect->y_offset_ + rect->height_) & ~(block_size - 1);
720 const int x_start = (rect->x_offset_ + block_size) & ~(block_size - 1);
721 const int x_end = (rect->x_offset_ + rect->width_) & ~(block_size - 1);
722 assert(src != NULL && dst != NULL && rect != NULL);
723 assert(src->width == dst->width && src->height == dst->height);
724 assert((block_size & (block_size - 1)) == 0); // must be a power of 2
725 // Iterate over each block and count similar pixels.
726 for (j = y_start; j < y_end; j += block_size) {
727 for (i = x_start; i < x_end; i += block_size) {
728 int cnt = 0;
729 int avg_r = 0, avg_g = 0, avg_b = 0;
730 int x, y;
731 const uint32_t* const psrc = src->argb + j * src->argb_stride + i;
732 uint32_t* const pdst = dst->argb + j * dst->argb_stride + i;
733 for (y = 0; y < block_size; ++y) {
734 for (x = 0; x < block_size; ++x) {
735 const uint32_t src_pixel = psrc[x + y * src->argb_stride];
736 const int alpha = src_pixel >> 24;
737 if (alpha == 0xff &&
738 PixelsAreSimilar(src_pixel, pdst[x + y * dst->argb_stride],
739 max_allowed_diff_lossy)) {
740 ++cnt;
741 avg_r += (src_pixel >> 16) & 0xff;
742 avg_g += (src_pixel >> 8) & 0xff;
743 avg_b += (src_pixel >> 0) & 0xff;
744 }
745 }
746 }
747 // If we have a fully similar block, we replace it with an
748 // average transparent block. This compresses better in lossy mode.
749 if (cnt == block_size * block_size) {
750 const uint32_t color = (0x00 << 24) |
751 ((avg_r / cnt) << 16) |
752 ((avg_g / cnt) << 8) |
753 ((avg_b / cnt) << 0);
754 for (y = 0; y < block_size; ++y) {
755 for (x = 0; x < block_size; ++x) {
756 pdst[x + y * dst->argb_stride] = color;
757 }
758 }
759 modified = 1;
760 }
761 }
762 }
763 return modified;
764 }
765
EncodeFrame(const WebPConfig * const config,WebPPicture * const pic,WebPMemoryWriter * const memory)766 static int EncodeFrame(const WebPConfig* const config, WebPPicture* const pic,
767 WebPMemoryWriter* const memory) {
768 pic->use_argb = 1;
769 pic->writer = WebPMemoryWrite;
770 pic->custom_ptr = memory;
771 if (!WebPEncode(config, pic)) {
772 return 0;
773 }
774 return 1;
775 }
776
777 // Struct representing a candidate encoded frame including its metadata.
778 typedef struct {
779 WebPMemoryWriter mem_;
780 WebPMuxFrameInfo info_;
781 FrameRectangle rect_;
782 int evaluate_; // True if this candidate should be evaluated.
783 } Candidate;
784
785 // Generates a candidate encoded frame given a picture and metadata.
EncodeCandidate(WebPPicture * const sub_frame,const FrameRectangle * const rect,const WebPConfig * const encoder_config,int use_blending,Candidate * const candidate)786 static WebPEncodingError EncodeCandidate(WebPPicture* const sub_frame,
787 const FrameRectangle* const rect,
788 const WebPConfig* const encoder_config,
789 int use_blending,
790 Candidate* const candidate) {
791 WebPConfig config = *encoder_config;
792 WebPEncodingError error_code = VP8_ENC_OK;
793 assert(candidate != NULL);
794 memset(candidate, 0, sizeof(*candidate));
795
796 // Set frame rect and info.
797 candidate->rect_ = *rect;
798 candidate->info_.id = WEBP_CHUNK_ANMF;
799 candidate->info_.x_offset = rect->x_offset_;
800 candidate->info_.y_offset = rect->y_offset_;
801 candidate->info_.dispose_method = WEBP_MUX_DISPOSE_NONE; // Set later.
802 candidate->info_.blend_method =
803 use_blending ? WEBP_MUX_BLEND : WEBP_MUX_NO_BLEND;
804 candidate->info_.duration = 0; // Set in next call to WebPAnimEncoderAdd().
805
806 // Encode picture.
807 WebPMemoryWriterInit(&candidate->mem_);
808
809 if (!config.lossless && use_blending) {
810 // Disable filtering to avoid blockiness in reconstructed frames at the
811 // time of decoding.
812 config.autofilter = 0;
813 config.filter_strength = 0;
814 }
815 if (!EncodeFrame(&config, sub_frame, &candidate->mem_)) {
816 error_code = sub_frame->error_code;
817 goto Err;
818 }
819
820 candidate->evaluate_ = 1;
821 return error_code;
822
823 Err:
824 WebPMemoryWriterClear(&candidate->mem_);
825 return error_code;
826 }
827
CopyCurrentCanvas(WebPAnimEncoder * const enc)828 static void CopyCurrentCanvas(WebPAnimEncoder* const enc) {
829 if (enc->curr_canvas_copy_modified_) {
830 WebPCopyPixels(enc->curr_canvas_, &enc->curr_canvas_copy_);
831 enc->curr_canvas_copy_.progress_hook = enc->curr_canvas_->progress_hook;
832 enc->curr_canvas_copy_.user_data = enc->curr_canvas_->user_data;
833 enc->curr_canvas_copy_modified_ = 0;
834 }
835 }
836
837 enum {
838 LL_DISP_NONE = 0,
839 LL_DISP_BG,
840 LOSSY_DISP_NONE,
841 LOSSY_DISP_BG,
842 CANDIDATE_COUNT
843 };
844
845 #define MIN_COLORS_LOSSY 31 // Don't try lossy below this threshold.
846 #define MAX_COLORS_LOSSLESS 194 // Don't try lossless above this threshold.
847
848 // Generates candidates for a given dispose method given pre-filled sub-frame
849 // 'params'.
GenerateCandidates(WebPAnimEncoder * const enc,Candidate candidates[CANDIDATE_COUNT],WebPMuxAnimDispose dispose_method,int is_lossless,int is_key_frame,SubFrameParams * const params,const WebPConfig * const config_ll,const WebPConfig * const config_lossy)850 static WebPEncodingError GenerateCandidates(
851 WebPAnimEncoder* const enc, Candidate candidates[CANDIDATE_COUNT],
852 WebPMuxAnimDispose dispose_method, int is_lossless, int is_key_frame,
853 SubFrameParams* const params,
854 const WebPConfig* const config_ll, const WebPConfig* const config_lossy) {
855 WebPEncodingError error_code = VP8_ENC_OK;
856 const int is_dispose_none = (dispose_method == WEBP_MUX_DISPOSE_NONE);
857 Candidate* const candidate_ll =
858 is_dispose_none ? &candidates[LL_DISP_NONE] : &candidates[LL_DISP_BG];
859 Candidate* const candidate_lossy = is_dispose_none
860 ? &candidates[LOSSY_DISP_NONE]
861 : &candidates[LOSSY_DISP_BG];
862 WebPPicture* const curr_canvas = &enc->curr_canvas_copy_;
863 const WebPPicture* const prev_canvas =
864 is_dispose_none ? &enc->prev_canvas_ : &enc->prev_canvas_disposed_;
865 int use_blending_ll, use_blending_lossy;
866 int evaluate_ll, evaluate_lossy;
867
868 CopyCurrentCanvas(enc);
869 use_blending_ll =
870 !is_key_frame &&
871 IsLosslessBlendingPossible(prev_canvas, curr_canvas, ¶ms->rect_ll_);
872 use_blending_lossy =
873 !is_key_frame &&
874 IsLossyBlendingPossible(prev_canvas, curr_canvas, ¶ms->rect_lossy_,
875 config_lossy->quality);
876
877 // Pick candidates to be tried.
878 if (!enc->options_.allow_mixed) {
879 evaluate_ll = is_lossless;
880 evaluate_lossy = !is_lossless;
881 } else if (enc->options_.minimize_size) {
882 evaluate_ll = 1;
883 evaluate_lossy = 1;
884 } else { // Use a heuristic for trying lossless and/or lossy compression.
885 const int num_colors = WebPGetColorPalette(¶ms->sub_frame_ll_, NULL);
886 evaluate_ll = (num_colors < MAX_COLORS_LOSSLESS);
887 evaluate_lossy = (num_colors >= MIN_COLORS_LOSSY);
888 }
889
890 // Generate candidates.
891 if (evaluate_ll) {
892 CopyCurrentCanvas(enc);
893 if (use_blending_ll) {
894 enc->curr_canvas_copy_modified_ =
895 IncreaseTransparency(prev_canvas, ¶ms->rect_ll_, curr_canvas);
896 }
897 error_code = EncodeCandidate(¶ms->sub_frame_ll_, ¶ms->rect_ll_,
898 config_ll, use_blending_ll, candidate_ll);
899 if (error_code != VP8_ENC_OK) return error_code;
900 }
901 if (evaluate_lossy) {
902 CopyCurrentCanvas(enc);
903 if (use_blending_lossy) {
904 enc->curr_canvas_copy_modified_ =
905 FlattenSimilarBlocks(prev_canvas, ¶ms->rect_lossy_, curr_canvas,
906 config_lossy->quality);
907 }
908 error_code =
909 EncodeCandidate(¶ms->sub_frame_lossy_, ¶ms->rect_lossy_,
910 config_lossy, use_blending_lossy, candidate_lossy);
911 if (error_code != VP8_ENC_OK) return error_code;
912 enc->curr_canvas_copy_modified_ = 1;
913 }
914 return error_code;
915 }
916
917 #undef MIN_COLORS_LOSSY
918 #undef MAX_COLORS_LOSSLESS
919
GetEncodedData(const WebPMemoryWriter * const memory,WebPData * const encoded_data)920 static void GetEncodedData(const WebPMemoryWriter* const memory,
921 WebPData* const encoded_data) {
922 encoded_data->bytes = memory->mem;
923 encoded_data->size = memory->size;
924 }
925
926 // Sets dispose method of the previous frame to be 'dispose_method'.
SetPreviousDisposeMethod(WebPAnimEncoder * const enc,WebPMuxAnimDispose dispose_method)927 static void SetPreviousDisposeMethod(WebPAnimEncoder* const enc,
928 WebPMuxAnimDispose dispose_method) {
929 const size_t position = enc->count_ - 2;
930 EncodedFrame* const prev_enc_frame = GetFrame(enc, position);
931 assert(enc->count_ >= 2); // As current and previous frames are in enc.
932
933 if (enc->prev_candidate_undecided_) {
934 assert(dispose_method == WEBP_MUX_DISPOSE_NONE);
935 prev_enc_frame->sub_frame_.dispose_method = dispose_method;
936 prev_enc_frame->key_frame_.dispose_method = dispose_method;
937 } else {
938 WebPMuxFrameInfo* const prev_info = prev_enc_frame->is_key_frame_
939 ? &prev_enc_frame->key_frame_
940 : &prev_enc_frame->sub_frame_;
941 prev_info->dispose_method = dispose_method;
942 }
943 }
944
IncreasePreviousDuration(WebPAnimEncoder * const enc,int duration)945 static int IncreasePreviousDuration(WebPAnimEncoder* const enc, int duration) {
946 const size_t position = enc->count_ - 1;
947 EncodedFrame* const prev_enc_frame = GetFrame(enc, position);
948 int new_duration;
949
950 assert(enc->count_ >= 1);
951 assert(!prev_enc_frame->is_key_frame_ ||
952 prev_enc_frame->sub_frame_.duration ==
953 prev_enc_frame->key_frame_.duration);
954 assert(prev_enc_frame->sub_frame_.duration ==
955 (prev_enc_frame->sub_frame_.duration & (MAX_DURATION - 1)));
956 assert(duration == (duration & (MAX_DURATION - 1)));
957
958 new_duration = prev_enc_frame->sub_frame_.duration + duration;
959 if (new_duration >= MAX_DURATION) { // Special case.
960 // Separate out previous frame from earlier merged frames to avoid overflow.
961 // We add a 1x1 transparent frame for the previous frame, with blending on.
962 const FrameRectangle rect = { 0, 0, 1, 1 };
963 const uint8_t lossless_1x1_bytes[] = {
964 0x52, 0x49, 0x46, 0x46, 0x14, 0x00, 0x00, 0x00, 0x57, 0x45, 0x42, 0x50,
965 0x56, 0x50, 0x38, 0x4c, 0x08, 0x00, 0x00, 0x00, 0x2f, 0x00, 0x00, 0x00,
966 0x10, 0x88, 0x88, 0x08
967 };
968 const WebPData lossless_1x1 = {
969 lossless_1x1_bytes, sizeof(lossless_1x1_bytes)
970 };
971 const uint8_t lossy_1x1_bytes[] = {
972 0x52, 0x49, 0x46, 0x46, 0x40, 0x00, 0x00, 0x00, 0x57, 0x45, 0x42, 0x50,
973 0x56, 0x50, 0x38, 0x58, 0x0a, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
974 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x41, 0x4c, 0x50, 0x48, 0x02, 0x00,
975 0x00, 0x00, 0x00, 0x00, 0x56, 0x50, 0x38, 0x20, 0x18, 0x00, 0x00, 0x00,
976 0x30, 0x01, 0x00, 0x9d, 0x01, 0x2a, 0x01, 0x00, 0x01, 0x00, 0x02, 0x00,
977 0x34, 0x25, 0xa4, 0x00, 0x03, 0x70, 0x00, 0xfe, 0xfb, 0xfd, 0x50, 0x00
978 };
979 const WebPData lossy_1x1 = { lossy_1x1_bytes, sizeof(lossy_1x1_bytes) };
980 const int can_use_lossless =
981 (enc->last_config_.lossless || enc->options_.allow_mixed);
982 EncodedFrame* const curr_enc_frame = GetFrame(enc, enc->count_);
983 curr_enc_frame->is_key_frame_ = 0;
984 curr_enc_frame->sub_frame_.id = WEBP_CHUNK_ANMF;
985 curr_enc_frame->sub_frame_.x_offset = 0;
986 curr_enc_frame->sub_frame_.y_offset = 0;
987 curr_enc_frame->sub_frame_.dispose_method = WEBP_MUX_DISPOSE_NONE;
988 curr_enc_frame->sub_frame_.blend_method = WEBP_MUX_BLEND;
989 curr_enc_frame->sub_frame_.duration = duration;
990 if (!WebPDataCopy(can_use_lossless ? &lossless_1x1 : &lossy_1x1,
991 &curr_enc_frame->sub_frame_.bitstream)) {
992 return 0;
993 }
994 ++enc->count_;
995 ++enc->count_since_key_frame_;
996 enc->flush_count_ = enc->count_ - 1;
997 enc->prev_candidate_undecided_ = 0;
998 enc->prev_rect_ = rect;
999 } else { // Regular case.
1000 // Increase duration of the previous frame by 'duration'.
1001 prev_enc_frame->sub_frame_.duration = new_duration;
1002 prev_enc_frame->key_frame_.duration = new_duration;
1003 }
1004 return 1;
1005 }
1006
1007 // Pick the candidate encoded frame with smallest size and release other
1008 // candidates.
1009 // TODO(later): Perhaps a rough SSIM/PSNR produced by the encoder should
1010 // also be a criteria, in addition to sizes.
PickBestCandidate(WebPAnimEncoder * const enc,Candidate * const candidates,int is_key_frame,EncodedFrame * const encoded_frame)1011 static void PickBestCandidate(WebPAnimEncoder* const enc,
1012 Candidate* const candidates, int is_key_frame,
1013 EncodedFrame* const encoded_frame) {
1014 int i;
1015 int best_idx = -1;
1016 size_t best_size = ~0;
1017 for (i = 0; i < CANDIDATE_COUNT; ++i) {
1018 if (candidates[i].evaluate_) {
1019 const size_t candidate_size = candidates[i].mem_.size;
1020 if (candidate_size < best_size) {
1021 best_idx = i;
1022 best_size = candidate_size;
1023 }
1024 }
1025 }
1026 assert(best_idx != -1);
1027 for (i = 0; i < CANDIDATE_COUNT; ++i) {
1028 if (candidates[i].evaluate_) {
1029 if (i == best_idx) {
1030 WebPMuxFrameInfo* const dst = is_key_frame
1031 ? &encoded_frame->key_frame_
1032 : &encoded_frame->sub_frame_;
1033 *dst = candidates[i].info_;
1034 GetEncodedData(&candidates[i].mem_, &dst->bitstream);
1035 if (!is_key_frame) {
1036 // Note: Previous dispose method only matters for non-keyframes.
1037 // Also, we don't want to modify previous dispose method that was
1038 // selected when a non key-frame was assumed.
1039 const WebPMuxAnimDispose prev_dispose_method =
1040 (best_idx == LL_DISP_NONE || best_idx == LOSSY_DISP_NONE)
1041 ? WEBP_MUX_DISPOSE_NONE
1042 : WEBP_MUX_DISPOSE_BACKGROUND;
1043 SetPreviousDisposeMethod(enc, prev_dispose_method);
1044 }
1045 enc->prev_rect_ = candidates[i].rect_; // save for next frame.
1046 } else {
1047 WebPMemoryWriterClear(&candidates[i].mem_);
1048 candidates[i].evaluate_ = 0;
1049 }
1050 }
1051 }
1052 }
1053
1054 // Depending on the configuration, tries different compressions
1055 // (lossy/lossless), dispose methods, blending methods etc to encode the current
1056 // frame and outputs the best one in 'encoded_frame'.
1057 // 'frame_skipped' will be set to true if this frame should actually be skipped.
SetFrame(WebPAnimEncoder * const enc,const WebPConfig * const config,int is_key_frame,EncodedFrame * const encoded_frame,int * const frame_skipped)1058 static WebPEncodingError SetFrame(WebPAnimEncoder* const enc,
1059 const WebPConfig* const config,
1060 int is_key_frame,
1061 EncodedFrame* const encoded_frame,
1062 int* const frame_skipped) {
1063 int i;
1064 WebPEncodingError error_code = VP8_ENC_OK;
1065 const WebPPicture* const curr_canvas = &enc->curr_canvas_copy_;
1066 const WebPPicture* const prev_canvas = &enc->prev_canvas_;
1067 Candidate candidates[CANDIDATE_COUNT];
1068 const int is_lossless = config->lossless;
1069 const int consider_lossless = is_lossless || enc->options_.allow_mixed;
1070 const int consider_lossy = !is_lossless || enc->options_.allow_mixed;
1071 const int is_first_frame = enc->is_first_frame_;
1072
1073 // First frame cannot be skipped as there is no 'previous frame' to merge it
1074 // to. So, empty rectangle is not allowed for the first frame.
1075 const int empty_rect_allowed_none = !is_first_frame;
1076
1077 // Even if there is exact pixel match between 'disposed previous canvas' and
1078 // 'current canvas', we can't skip current frame, as there may not be exact
1079 // pixel match between 'previous canvas' and 'current canvas'. So, we don't
1080 // allow empty rectangle in this case.
1081 const int empty_rect_allowed_bg = 0;
1082
1083 // If current frame is a key-frame, dispose method of previous frame doesn't
1084 // matter, so we don't try dispose to background.
1085 // Also, if key-frame insertion is on, and previous frame could be picked as
1086 // either a sub-frame or a key-frame, then we can't be sure about what frame
1087 // rectangle would be disposed. In that case too, we don't try dispose to
1088 // background.
1089 const int dispose_bg_possible =
1090 !is_key_frame && !enc->prev_candidate_undecided_;
1091
1092 SubFrameParams dispose_none_params;
1093 SubFrameParams dispose_bg_params;
1094
1095 WebPConfig config_ll = *config;
1096 WebPConfig config_lossy = *config;
1097 config_ll.lossless = 1;
1098 config_lossy.lossless = 0;
1099 enc->last_config_ = *config;
1100 enc->last_config_reversed_ = config->lossless ? config_lossy : config_ll;
1101 *frame_skipped = 0;
1102
1103 if (!SubFrameParamsInit(&dispose_none_params, 1, empty_rect_allowed_none) ||
1104 !SubFrameParamsInit(&dispose_bg_params, 0, empty_rect_allowed_bg)) {
1105 return VP8_ENC_ERROR_INVALID_CONFIGURATION;
1106 }
1107
1108 memset(candidates, 0, sizeof(candidates));
1109
1110 // Change-rectangle assuming previous frame was DISPOSE_NONE.
1111 if (!GetSubRects(prev_canvas, curr_canvas, is_key_frame, is_first_frame,
1112 config_lossy.quality, &dispose_none_params)) {
1113 error_code = VP8_ENC_ERROR_INVALID_CONFIGURATION;
1114 goto Err;
1115 }
1116
1117 if ((consider_lossless && IsEmptyRect(&dispose_none_params.rect_ll_)) ||
1118 (consider_lossy && IsEmptyRect(&dispose_none_params.rect_lossy_))) {
1119 // Don't encode the frame at all. Instead, the duration of the previous
1120 // frame will be increased later.
1121 assert(empty_rect_allowed_none);
1122 *frame_skipped = 1;
1123 goto End;
1124 }
1125
1126 if (dispose_bg_possible) {
1127 // Change-rectangle assuming previous frame was DISPOSE_BACKGROUND.
1128 WebPPicture* const prev_canvas_disposed = &enc->prev_canvas_disposed_;
1129 WebPCopyPixels(prev_canvas, prev_canvas_disposed);
1130 DisposeFrameRectangle(WEBP_MUX_DISPOSE_BACKGROUND, &enc->prev_rect_,
1131 prev_canvas_disposed);
1132
1133 if (!GetSubRects(prev_canvas_disposed, curr_canvas, is_key_frame,
1134 is_first_frame, config_lossy.quality,
1135 &dispose_bg_params)) {
1136 error_code = VP8_ENC_ERROR_INVALID_CONFIGURATION;
1137 goto Err;
1138 }
1139 assert(!IsEmptyRect(&dispose_bg_params.rect_ll_));
1140 assert(!IsEmptyRect(&dispose_bg_params.rect_lossy_));
1141
1142 if (enc->options_.minimize_size) { // Try both dispose methods.
1143 dispose_bg_params.should_try_ = 1;
1144 dispose_none_params.should_try_ = 1;
1145 } else if ((is_lossless &&
1146 RectArea(&dispose_bg_params.rect_ll_) <
1147 RectArea(&dispose_none_params.rect_ll_)) ||
1148 (!is_lossless &&
1149 RectArea(&dispose_bg_params.rect_lossy_) <
1150 RectArea(&dispose_none_params.rect_lossy_))) {
1151 dispose_bg_params.should_try_ = 1; // Pick DISPOSE_BACKGROUND.
1152 dispose_none_params.should_try_ = 0;
1153 }
1154 }
1155
1156 if (dispose_none_params.should_try_) {
1157 error_code = GenerateCandidates(
1158 enc, candidates, WEBP_MUX_DISPOSE_NONE, is_lossless, is_key_frame,
1159 &dispose_none_params, &config_ll, &config_lossy);
1160 if (error_code != VP8_ENC_OK) goto Err;
1161 }
1162
1163 if (dispose_bg_params.should_try_) {
1164 assert(!enc->is_first_frame_);
1165 assert(dispose_bg_possible);
1166 error_code = GenerateCandidates(
1167 enc, candidates, WEBP_MUX_DISPOSE_BACKGROUND, is_lossless, is_key_frame,
1168 &dispose_bg_params, &config_ll, &config_lossy);
1169 if (error_code != VP8_ENC_OK) goto Err;
1170 }
1171
1172 PickBestCandidate(enc, candidates, is_key_frame, encoded_frame);
1173
1174 goto End;
1175
1176 Err:
1177 for (i = 0; i < CANDIDATE_COUNT; ++i) {
1178 if (candidates[i].evaluate_) {
1179 WebPMemoryWriterClear(&candidates[i].mem_);
1180 }
1181 }
1182
1183 End:
1184 SubFrameParamsFree(&dispose_none_params);
1185 SubFrameParamsFree(&dispose_bg_params);
1186 return error_code;
1187 }
1188
1189 // Calculate the penalty incurred if we encode given frame as a key frame
1190 // instead of a sub-frame.
KeyFramePenalty(const EncodedFrame * const encoded_frame)1191 static int64_t KeyFramePenalty(const EncodedFrame* const encoded_frame) {
1192 return ((int64_t)encoded_frame->key_frame_.bitstream.size -
1193 encoded_frame->sub_frame_.bitstream.size);
1194 }
1195
CacheFrame(WebPAnimEncoder * const enc,const WebPConfig * const config)1196 static int CacheFrame(WebPAnimEncoder* const enc,
1197 const WebPConfig* const config) {
1198 int ok = 0;
1199 int frame_skipped = 0;
1200 WebPEncodingError error_code = VP8_ENC_OK;
1201 const size_t position = enc->count_;
1202 EncodedFrame* const encoded_frame = GetFrame(enc, position);
1203
1204 ++enc->count_;
1205
1206 if (enc->is_first_frame_) { // Add this as a key-frame.
1207 error_code = SetFrame(enc, config, 1, encoded_frame, &frame_skipped);
1208 if (error_code != VP8_ENC_OK) goto End;
1209 assert(frame_skipped == 0); // First frame can't be skipped, even if empty.
1210 assert(position == 0 && enc->count_ == 1);
1211 encoded_frame->is_key_frame_ = 1;
1212 enc->flush_count_ = 0;
1213 enc->count_since_key_frame_ = 0;
1214 enc->prev_candidate_undecided_ = 0;
1215 } else {
1216 ++enc->count_since_key_frame_;
1217 if (enc->count_since_key_frame_ <= enc->options_.kmin) {
1218 // Add this as a frame rectangle.
1219 error_code = SetFrame(enc, config, 0, encoded_frame, &frame_skipped);
1220 if (error_code != VP8_ENC_OK) goto End;
1221 if (frame_skipped) goto Skip;
1222 encoded_frame->is_key_frame_ = 0;
1223 enc->flush_count_ = enc->count_ - 1;
1224 enc->prev_candidate_undecided_ = 0;
1225 } else {
1226 int64_t curr_delta;
1227 FrameRectangle prev_rect_key, prev_rect_sub;
1228
1229 // Add this as a frame rectangle to enc.
1230 error_code = SetFrame(enc, config, 0, encoded_frame, &frame_skipped);
1231 if (error_code != VP8_ENC_OK) goto End;
1232 if (frame_skipped) goto Skip;
1233 prev_rect_sub = enc->prev_rect_;
1234
1235
1236 // Add this as a key-frame to enc, too.
1237 error_code = SetFrame(enc, config, 1, encoded_frame, &frame_skipped);
1238 if (error_code != VP8_ENC_OK) goto End;
1239 assert(frame_skipped == 0); // Key-frame cannot be an empty rectangle.
1240 prev_rect_key = enc->prev_rect_;
1241
1242 // Analyze size difference of the two variants.
1243 curr_delta = KeyFramePenalty(encoded_frame);
1244 if (curr_delta <= enc->best_delta_) { // Pick this as the key-frame.
1245 if (enc->keyframe_ != KEYFRAME_NONE) {
1246 EncodedFrame* const old_keyframe = GetFrame(enc, enc->keyframe_);
1247 assert(old_keyframe->is_key_frame_);
1248 old_keyframe->is_key_frame_ = 0;
1249 }
1250 encoded_frame->is_key_frame_ = 1;
1251 enc->prev_candidate_undecided_ = 1;
1252 enc->keyframe_ = (int)position;
1253 enc->best_delta_ = curr_delta;
1254 enc->flush_count_ = enc->count_ - 1; // We can flush previous frames.
1255 } else {
1256 encoded_frame->is_key_frame_ = 0;
1257 enc->prev_candidate_undecided_ = 0;
1258 }
1259 // Note: We need '>=' below because when kmin and kmax are both zero,
1260 // count_since_key_frame will always be > kmax.
1261 if (enc->count_since_key_frame_ >= enc->options_.kmax) {
1262 enc->flush_count_ = enc->count_ - 1;
1263 enc->count_since_key_frame_ = 0;
1264 enc->keyframe_ = KEYFRAME_NONE;
1265 enc->best_delta_ = DELTA_INFINITY;
1266 }
1267 if (!enc->prev_candidate_undecided_) {
1268 enc->prev_rect_ =
1269 encoded_frame->is_key_frame_ ? prev_rect_key : prev_rect_sub;
1270 }
1271 }
1272 }
1273
1274 // Update previous to previous and previous canvases for next call.
1275 WebPCopyPixels(enc->curr_canvas_, &enc->prev_canvas_);
1276 enc->is_first_frame_ = 0;
1277
1278 Skip:
1279 ok = 1;
1280 ++enc->in_frame_count_;
1281
1282 End:
1283 if (!ok || frame_skipped) {
1284 FrameRelease(encoded_frame);
1285 // We reset some counters, as the frame addition failed/was skipped.
1286 --enc->count_;
1287 if (!enc->is_first_frame_) --enc->count_since_key_frame_;
1288 if (!ok) {
1289 MarkError2(enc, "ERROR adding frame. WebPEncodingError", error_code);
1290 }
1291 }
1292 enc->curr_canvas_->error_code = error_code; // report error_code
1293 assert(ok || error_code != VP8_ENC_OK);
1294 return ok;
1295 }
1296
FlushFrames(WebPAnimEncoder * const enc)1297 static int FlushFrames(WebPAnimEncoder* const enc) {
1298 while (enc->flush_count_ > 0) {
1299 WebPMuxError err;
1300 EncodedFrame* const curr = GetFrame(enc, 0);
1301 const WebPMuxFrameInfo* const info =
1302 curr->is_key_frame_ ? &curr->key_frame_ : &curr->sub_frame_;
1303 assert(enc->mux_ != NULL);
1304 err = WebPMuxPushFrame(enc->mux_, info, 1);
1305 if (err != WEBP_MUX_OK) {
1306 MarkError2(enc, "ERROR adding frame. WebPMuxError", err);
1307 return 0;
1308 }
1309 if (enc->options_.verbose) {
1310 fprintf(stderr, "INFO: Added frame. offset:%d,%d dispose:%d blend:%d\n",
1311 info->x_offset, info->y_offset, info->dispose_method,
1312 info->blend_method);
1313 }
1314 ++enc->out_frame_count_;
1315 FrameRelease(curr);
1316 ++enc->start_;
1317 --enc->flush_count_;
1318 --enc->count_;
1319 if (enc->keyframe_ != KEYFRAME_NONE) --enc->keyframe_;
1320 }
1321
1322 if (enc->count_ == 1 && enc->start_ != 0) {
1323 // Move enc->start to index 0.
1324 const int enc_start_tmp = (int)enc->start_;
1325 EncodedFrame temp = enc->encoded_frames_[0];
1326 enc->encoded_frames_[0] = enc->encoded_frames_[enc_start_tmp];
1327 enc->encoded_frames_[enc_start_tmp] = temp;
1328 FrameRelease(&enc->encoded_frames_[enc_start_tmp]);
1329 enc->start_ = 0;
1330 }
1331 return 1;
1332 }
1333
1334 #undef DELTA_INFINITY
1335 #undef KEYFRAME_NONE
1336
WebPAnimEncoderAdd(WebPAnimEncoder * enc,WebPPicture * frame,int timestamp,const WebPConfig * encoder_config)1337 int WebPAnimEncoderAdd(WebPAnimEncoder* enc, WebPPicture* frame, int timestamp,
1338 const WebPConfig* encoder_config) {
1339 WebPConfig config;
1340 int ok;
1341
1342 if (enc == NULL) {
1343 return 0;
1344 }
1345 MarkNoError(enc);
1346
1347 if (!enc->is_first_frame_) {
1348 // Make sure timestamps are non-decreasing (integer wrap-around is OK).
1349 const uint32_t prev_frame_duration =
1350 (uint32_t)timestamp - enc->prev_timestamp_;
1351 if (prev_frame_duration >= MAX_DURATION) {
1352 if (frame != NULL) {
1353 frame->error_code = VP8_ENC_ERROR_INVALID_CONFIGURATION;
1354 }
1355 MarkError(enc, "ERROR adding frame: timestamps must be non-decreasing");
1356 return 0;
1357 }
1358 if (!IncreasePreviousDuration(enc, (int)prev_frame_duration)) {
1359 return 0;
1360 }
1361 // IncreasePreviousDuration() may add a frame to avoid exceeding
1362 // MAX_DURATION which could cause CacheFrame() to over read encoded_frames_
1363 // before the next flush.
1364 if (enc->count_ == enc->size_ && !FlushFrames(enc)) {
1365 return 0;
1366 }
1367 } else {
1368 enc->first_timestamp_ = timestamp;
1369 }
1370
1371 if (frame == NULL) { // Special: last call.
1372 enc->got_null_frame_ = 1;
1373 enc->prev_timestamp_ = timestamp;
1374 return 1;
1375 }
1376
1377 if (frame->width != enc->canvas_width_ ||
1378 frame->height != enc->canvas_height_) {
1379 frame->error_code = VP8_ENC_ERROR_INVALID_CONFIGURATION;
1380 MarkError(enc, "ERROR adding frame: Invalid frame dimensions");
1381 return 0;
1382 }
1383
1384 if (!frame->use_argb) { // Convert frame from YUV(A) to ARGB.
1385 if (enc->options_.verbose) {
1386 fprintf(stderr, "WARNING: Converting frame from YUV(A) to ARGB format; "
1387 "this incurs a small loss.\n");
1388 }
1389 if (!WebPPictureYUVAToARGB(frame)) {
1390 MarkError(enc, "ERROR converting frame from YUV(A) to ARGB");
1391 return 0;
1392 }
1393 }
1394
1395 if (encoder_config != NULL) {
1396 if (!WebPValidateConfig(encoder_config)) {
1397 MarkError(enc, "ERROR adding frame: Invalid WebPConfig");
1398 return 0;
1399 }
1400 config = *encoder_config;
1401 } else {
1402 if (!WebPConfigInit(&config)) {
1403 MarkError(enc, "Cannot Init config");
1404 return 0;
1405 }
1406 config.lossless = 1;
1407 }
1408 assert(enc->curr_canvas_ == NULL);
1409 enc->curr_canvas_ = frame; // Store reference.
1410 assert(enc->curr_canvas_copy_modified_ == 1);
1411 CopyCurrentCanvas(enc);
1412
1413 ok = CacheFrame(enc, &config) && FlushFrames(enc);
1414
1415 enc->curr_canvas_ = NULL;
1416 enc->curr_canvas_copy_modified_ = 1;
1417 if (ok) {
1418 enc->prev_timestamp_ = timestamp;
1419 }
1420 return ok;
1421 }
1422
1423 // -----------------------------------------------------------------------------
1424 // Bitstream assembly.
1425
DecodeFrameOntoCanvas(const WebPMuxFrameInfo * const frame,WebPPicture * const canvas)1426 WEBP_NODISCARD static int DecodeFrameOntoCanvas(
1427 const WebPMuxFrameInfo* const frame, WebPPicture* const canvas) {
1428 const WebPData* const image = &frame->bitstream;
1429 WebPPicture sub_image;
1430 WebPDecoderConfig config;
1431 if (!WebPInitDecoderConfig(&config)) {
1432 return 0;
1433 }
1434 WebPUtilClearPic(canvas, NULL);
1435 if (WebPGetFeatures(image->bytes, image->size, &config.input) !=
1436 VP8_STATUS_OK) {
1437 return 0;
1438 }
1439 if (!WebPPictureView(canvas, frame->x_offset, frame->y_offset,
1440 config.input.width, config.input.height, &sub_image)) {
1441 return 0;
1442 }
1443 config.output.is_external_memory = 1;
1444 config.output.colorspace = MODE_BGRA;
1445 config.output.u.RGBA.rgba = (uint8_t*)sub_image.argb;
1446 config.output.u.RGBA.stride = sub_image.argb_stride * 4;
1447 config.output.u.RGBA.size = config.output.u.RGBA.stride * sub_image.height;
1448
1449 if (WebPDecode(image->bytes, image->size, &config) != VP8_STATUS_OK) {
1450 return 0;
1451 }
1452 return 1;
1453 }
1454
FrameToFullCanvas(WebPAnimEncoder * const enc,const WebPMuxFrameInfo * const frame,WebPData * const full_image)1455 static int FrameToFullCanvas(WebPAnimEncoder* const enc,
1456 const WebPMuxFrameInfo* const frame,
1457 WebPData* const full_image) {
1458 WebPPicture* const canvas_buf = &enc->curr_canvas_copy_;
1459 WebPMemoryWriter mem1, mem2;
1460 WebPMemoryWriterInit(&mem1);
1461 WebPMemoryWriterInit(&mem2);
1462
1463 if (!DecodeFrameOntoCanvas(frame, canvas_buf)) goto Err;
1464 if (!EncodeFrame(&enc->last_config_, canvas_buf, &mem1)) goto Err;
1465 GetEncodedData(&mem1, full_image);
1466
1467 if (enc->options_.allow_mixed) {
1468 if (!EncodeFrame(&enc->last_config_reversed_, canvas_buf, &mem2)) goto Err;
1469 if (mem2.size < mem1.size) {
1470 GetEncodedData(&mem2, full_image);
1471 WebPMemoryWriterClear(&mem1);
1472 } else {
1473 WebPMemoryWriterClear(&mem2);
1474 }
1475 }
1476 return 1;
1477
1478 Err:
1479 WebPMemoryWriterClear(&mem1);
1480 WebPMemoryWriterClear(&mem2);
1481 return 0;
1482 }
1483
1484 // Convert a single-frame animation to a non-animated image if appropriate.
1485 // TODO(urvang): Can we pick one of the two heuristically (based on frame
1486 // rectangle and/or presence of alpha)?
OptimizeSingleFrame(WebPAnimEncoder * const enc,WebPData * const webp_data)1487 static WebPMuxError OptimizeSingleFrame(WebPAnimEncoder* const enc,
1488 WebPData* const webp_data) {
1489 WebPMuxError err = WEBP_MUX_OK;
1490 int canvas_width, canvas_height;
1491 WebPMuxFrameInfo frame;
1492 WebPData full_image;
1493 WebPData webp_data2;
1494 WebPMux* const mux = WebPMuxCreate(webp_data, 0);
1495 if (mux == NULL) return WEBP_MUX_BAD_DATA;
1496 assert(enc->out_frame_count_ == 1);
1497 WebPDataInit(&frame.bitstream);
1498 WebPDataInit(&full_image);
1499 WebPDataInit(&webp_data2);
1500
1501 err = WebPMuxGetFrame(mux, 1, &frame);
1502 if (err != WEBP_MUX_OK) goto End;
1503 if (frame.id != WEBP_CHUNK_ANMF) goto End; // Non-animation: nothing to do.
1504 err = WebPMuxGetCanvasSize(mux, &canvas_width, &canvas_height);
1505 if (err != WEBP_MUX_OK) goto End;
1506 if (!FrameToFullCanvas(enc, &frame, &full_image)) {
1507 err = WEBP_MUX_BAD_DATA;
1508 goto End;
1509 }
1510 err = WebPMuxSetImage(mux, &full_image, 1);
1511 if (err != WEBP_MUX_OK) goto End;
1512 err = WebPMuxAssemble(mux, &webp_data2);
1513 if (err != WEBP_MUX_OK) goto End;
1514
1515 if (webp_data2.size < webp_data->size) { // Pick 'webp_data2' if smaller.
1516 WebPDataClear(webp_data);
1517 *webp_data = webp_data2;
1518 WebPDataInit(&webp_data2);
1519 }
1520
1521 End:
1522 WebPDataClear(&frame.bitstream);
1523 WebPDataClear(&full_image);
1524 WebPMuxDelete(mux);
1525 WebPDataClear(&webp_data2);
1526 return err;
1527 }
1528
WebPAnimEncoderAssemble(WebPAnimEncoder * enc,WebPData * webp_data)1529 int WebPAnimEncoderAssemble(WebPAnimEncoder* enc, WebPData* webp_data) {
1530 WebPMux* mux;
1531 WebPMuxError err;
1532
1533 if (enc == NULL) {
1534 return 0;
1535 }
1536 MarkNoError(enc);
1537
1538 if (webp_data == NULL) {
1539 MarkError(enc, "ERROR assembling: NULL input");
1540 return 0;
1541 }
1542
1543 if (enc->in_frame_count_ == 0) {
1544 MarkError(enc, "ERROR: No frames to assemble");
1545 return 0;
1546 }
1547
1548 if (!enc->got_null_frame_ && enc->in_frame_count_ > 1 && enc->count_ > 0) {
1549 // set duration of the last frame to be avg of durations of previous frames.
1550 const double delta_time =
1551 (uint32_t)enc->prev_timestamp_ - enc->first_timestamp_;
1552 const int average_duration = (int)(delta_time / (enc->in_frame_count_ - 1));
1553 if (!IncreasePreviousDuration(enc, average_duration)) {
1554 return 0;
1555 }
1556 }
1557
1558 // Flush any remaining frames.
1559 enc->flush_count_ = enc->count_;
1560 if (!FlushFrames(enc)) {
1561 return 0;
1562 }
1563
1564 // Set definitive canvas size.
1565 mux = enc->mux_;
1566 err = WebPMuxSetCanvasSize(mux, enc->canvas_width_, enc->canvas_height_);
1567 if (err != WEBP_MUX_OK) goto Err;
1568
1569 err = WebPMuxSetAnimationParams(mux, &enc->options_.anim_params);
1570 if (err != WEBP_MUX_OK) goto Err;
1571
1572 // Assemble into a WebP bitstream.
1573 err = WebPMuxAssemble(mux, webp_data);
1574 if (err != WEBP_MUX_OK) goto Err;
1575
1576 if (enc->out_frame_count_ == 1) {
1577 err = OptimizeSingleFrame(enc, webp_data);
1578 if (err != WEBP_MUX_OK) goto Err;
1579 }
1580 return 1;
1581
1582 Err:
1583 MarkError2(enc, "ERROR assembling WebP", err);
1584 return 0;
1585 }
1586
WebPAnimEncoderGetError(WebPAnimEncoder * enc)1587 const char* WebPAnimEncoderGetError(WebPAnimEncoder* enc) {
1588 if (enc == NULL) return NULL;
1589 return enc->error_str_;
1590 }
1591
WebPAnimEncoderSetChunk(WebPAnimEncoder * enc,const char fourcc[4],const WebPData * chunk_data,int copy_data)1592 WebPMuxError WebPAnimEncoderSetChunk(
1593 WebPAnimEncoder* enc, const char fourcc[4], const WebPData* chunk_data,
1594 int copy_data) {
1595 if (enc == NULL) return WEBP_MUX_INVALID_ARGUMENT;
1596 return WebPMuxSetChunk(enc->mux_, fourcc, chunk_data, copy_data);
1597 }
1598
WebPAnimEncoderGetChunk(const WebPAnimEncoder * enc,const char fourcc[4],WebPData * chunk_data)1599 WebPMuxError WebPAnimEncoderGetChunk(
1600 const WebPAnimEncoder* enc, const char fourcc[4], WebPData* chunk_data) {
1601 if (enc == NULL) return WEBP_MUX_INVALID_ARGUMENT;
1602 return WebPMuxGetChunk(enc->mux_, fourcc, chunk_data);
1603 }
1604
WebPAnimEncoderDeleteChunk(WebPAnimEncoder * enc,const char fourcc[4])1605 WebPMuxError WebPAnimEncoderDeleteChunk(
1606 WebPAnimEncoder* enc, const char fourcc[4]) {
1607 if (enc == NULL) return WEBP_MUX_INVALID_ARGUMENT;
1608 return WebPMuxDeleteChunk(enc->mux_, fourcc);
1609 }
1610
1611 // -----------------------------------------------------------------------------
1612