1 /*
2 * Copyright © 2018, VideoLAN and dav1d authors
3 * Copyright © 2018, Two Orioles, LLC
4 * All rights reserved.
5 *
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions are met:
8 *
9 * 1. Redistributions of source code must retain the above copyright notice, this
10 * list of conditions and the following disclaimer.
11 *
12 * 2. Redistributions in binary form must reproduce the above copyright notice,
13 * this list of conditions and the following disclaimer in the documentation
14 * and/or other materials provided with the distribution.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
17 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
19 * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
20 * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
21 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
22 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
23 * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
25 * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 */
27
28 #include "config.h"
29
30 #include <errno.h>
31 #include <stdint.h>
32 #include <stdio.h>
33 #include <stdlib.h>
34 #include <string.h>
35
36 #include "common/intops.h"
37 #include "common/validate.h"
38
39 #include "src/internal.h"
40 #include "src/log.h"
41 #include "src/picture.h"
42 #include "src/ref.h"
43 #include "src/thread.h"
44 #include "src/thread_task.h"
45
dav1d_default_picture_alloc(Dav1dPicture * const p,void * const cookie)46 int dav1d_default_picture_alloc(Dav1dPicture *const p, void *const cookie) {
47 assert(sizeof(Dav1dMemPoolBuffer) <= DAV1D_PICTURE_ALIGNMENT);
48 const int hbd = p->p.bpc > 8;
49 const int aligned_w = (p->p.w + 127) & ~127;
50 const int aligned_h = (p->p.h + 127) & ~127;
51 const int has_chroma = p->p.layout != DAV1D_PIXEL_LAYOUT_I400;
52 const int ss_ver = p->p.layout == DAV1D_PIXEL_LAYOUT_I420;
53 const int ss_hor = p->p.layout != DAV1D_PIXEL_LAYOUT_I444;
54 ptrdiff_t y_stride = aligned_w << hbd;
55 ptrdiff_t uv_stride = has_chroma ? y_stride >> ss_hor : 0;
56 /* Due to how mapping of addresses to sets works in most L1 and L2 cache
57 * implementations, strides of multiples of certain power-of-two numbers
58 * may cause multiple rows of the same superblock to map to the same set,
59 * causing evictions of previous rows resulting in a reduction in cache
60 * hit rate. Avoid that by slightly padding the stride when necessary. */
61 if (!(y_stride & 1023))
62 y_stride += DAV1D_PICTURE_ALIGNMENT;
63 if (!(uv_stride & 1023) && has_chroma)
64 uv_stride += DAV1D_PICTURE_ALIGNMENT;
65 p->stride[0] = y_stride;
66 p->stride[1] = uv_stride;
67 const size_t y_sz = y_stride * aligned_h;
68 const size_t uv_sz = uv_stride * (aligned_h >> ss_ver);
69 const size_t pic_size = y_sz + 2 * uv_sz;
70
71 Dav1dMemPoolBuffer *const buf = dav1d_mem_pool_pop(cookie, pic_size +
72 DAV1D_PICTURE_ALIGNMENT -
73 sizeof(Dav1dMemPoolBuffer));
74 if (!buf) return DAV1D_ERR(ENOMEM);
75 p->allocator_data = buf;
76
77 uint8_t *const data = buf->data;
78 p->data[0] = data;
79 p->data[1] = has_chroma ? data + y_sz : NULL;
80 p->data[2] = has_chroma ? data + y_sz + uv_sz : NULL;
81
82 return 0;
83 }
84
dav1d_default_picture_release(Dav1dPicture * const p,void * const cookie)85 void dav1d_default_picture_release(Dav1dPicture *const p, void *const cookie) {
86 dav1d_mem_pool_push(cookie, p->allocator_data);
87 }
88
89 struct pic_ctx_context {
90 Dav1dPicAllocator allocator;
91 Dav1dPicture pic;
92 Dav1dRef ref;
93 void *extra_data[];
94 };
95
free_buffer(const uint8_t * const data,void * const user_data)96 static void free_buffer(const uint8_t *const data, void *const user_data) {
97 Dav1dMemPoolBuffer *buf = (Dav1dMemPoolBuffer *)data;
98 struct pic_ctx_context *pic_ctx = buf->data;
99
100 pic_ctx->allocator.release_picture_callback(&pic_ctx->pic,
101 pic_ctx->allocator.cookie);
102 dav1d_mem_pool_push(user_data, buf);
103 }
104
dav1d_picture_free_itut_t35(const uint8_t * const data,void * const user_data)105 void dav1d_picture_free_itut_t35(const uint8_t *const data, void *const user_data) {
106 struct itut_t35_ctx_context *itut_t35_ctx = user_data;
107
108 for (size_t i = 0; i < itut_t35_ctx->n_itut_t35; i++)
109 dav1d_free(itut_t35_ctx->itut_t35[i].payload);
110 dav1d_free(itut_t35_ctx->itut_t35);
111 dav1d_free(itut_t35_ctx);
112 }
113
picture_alloc(Dav1dContext * const c,Dav1dPicture * const p,const int w,const int h,Dav1dSequenceHeader * const seq_hdr,Dav1dRef * const seq_hdr_ref,Dav1dFrameHeader * const frame_hdr,Dav1dRef * const frame_hdr_ref,const int bpc,const Dav1dDataProps * const props,Dav1dPicAllocator * const p_allocator,void ** const extra_ptr)114 static int picture_alloc(Dav1dContext *const c,
115 Dav1dPicture *const p,
116 const int w, const int h,
117 Dav1dSequenceHeader *const seq_hdr, Dav1dRef *const seq_hdr_ref,
118 Dav1dFrameHeader *const frame_hdr, Dav1dRef *const frame_hdr_ref,
119 const int bpc,
120 const Dav1dDataProps *const props,
121 Dav1dPicAllocator *const p_allocator,
122 void **const extra_ptr)
123 {
124 if (p->data[0]) {
125 dav1d_log(c, "Picture already allocated!\n");
126 return -1;
127 }
128 assert(bpc > 0 && bpc <= 16);
129
130 size_t extra = c->n_fc > 1 ? sizeof(atomic_int) * 2 : 0;
131 Dav1dMemPoolBuffer *buf = dav1d_mem_pool_pop(c->pic_ctx_pool,
132 extra + sizeof(struct pic_ctx_context));
133 if (buf == NULL)
134 return DAV1D_ERR(ENOMEM);
135
136 struct pic_ctx_context *pic_ctx = buf->data;
137
138 p->p.w = w;
139 p->p.h = h;
140 p->seq_hdr = seq_hdr;
141 p->frame_hdr = frame_hdr;
142 p->p.layout = seq_hdr->layout;
143 p->p.bpc = bpc;
144 dav1d_data_props_set_defaults(&p->m);
145 const int res = p_allocator->alloc_picture_callback(p, p_allocator->cookie);
146 if (res < 0) {
147 dav1d_mem_pool_push(c->pic_ctx_pool, buf);
148 return res;
149 }
150
151 pic_ctx->allocator = *p_allocator;
152 pic_ctx->pic = *p;
153 p->ref = dav1d_ref_init(&pic_ctx->ref, buf, free_buffer, c->pic_ctx_pool, 0);
154
155 p->seq_hdr_ref = seq_hdr_ref;
156 if (seq_hdr_ref) dav1d_ref_inc(seq_hdr_ref);
157
158 p->frame_hdr_ref = frame_hdr_ref;
159 if (frame_hdr_ref) dav1d_ref_inc(frame_hdr_ref);
160
161 if (extra && extra_ptr)
162 *extra_ptr = &pic_ctx->extra_data;
163
164 return 0;
165 }
166
dav1d_picture_copy_props(Dav1dPicture * const p,Dav1dContentLightLevel * const content_light,Dav1dRef * const content_light_ref,Dav1dMasteringDisplay * const mastering_display,Dav1dRef * const mastering_display_ref,Dav1dITUTT35 * const itut_t35,Dav1dRef * itut_t35_ref,size_t n_itut_t35,const Dav1dDataProps * const props)167 void dav1d_picture_copy_props(Dav1dPicture *const p,
168 Dav1dContentLightLevel *const content_light, Dav1dRef *const content_light_ref,
169 Dav1dMasteringDisplay *const mastering_display, Dav1dRef *const mastering_display_ref,
170 Dav1dITUTT35 *const itut_t35, Dav1dRef *itut_t35_ref, size_t n_itut_t35,
171 const Dav1dDataProps *const props)
172 {
173 dav1d_data_props_copy(&p->m, props);
174
175 dav1d_ref_dec(&p->content_light_ref);
176 p->content_light_ref = content_light_ref;
177 p->content_light = content_light;
178 if (content_light_ref) dav1d_ref_inc(content_light_ref);
179
180 dav1d_ref_dec(&p->mastering_display_ref);
181 p->mastering_display_ref = mastering_display_ref;
182 p->mastering_display = mastering_display;
183 if (mastering_display_ref) dav1d_ref_inc(mastering_display_ref);
184
185 dav1d_ref_dec(&p->itut_t35_ref);
186 p->itut_t35_ref = itut_t35_ref;
187 p->itut_t35 = itut_t35;
188 p->n_itut_t35 = n_itut_t35;
189 if (itut_t35_ref) dav1d_ref_inc(itut_t35_ref);
190 }
191
dav1d_thread_picture_alloc(Dav1dContext * const c,Dav1dFrameContext * const f,const int bpc)192 int dav1d_thread_picture_alloc(Dav1dContext *const c, Dav1dFrameContext *const f,
193 const int bpc)
194 {
195 Dav1dThreadPicture *const p = &f->sr_cur;
196
197 const int res = picture_alloc(c, &p->p, f->frame_hdr->width[1], f->frame_hdr->height,
198 f->seq_hdr, f->seq_hdr_ref,
199 f->frame_hdr, f->frame_hdr_ref,
200 bpc, &f->tile[0].data.m, &c->allocator,
201 (void **) &p->progress);
202 if (res) return res;
203
204 // Don't clear these flags from c->frame_flags if the frame is not going to be output.
205 // This way they will be added to the next visible frame too.
206 const int flags_mask = ((f->frame_hdr->show_frame || c->output_invisible_frames) &&
207 c->max_spatial_id == f->frame_hdr->spatial_id)
208 ? 0 : (PICTURE_FLAG_NEW_SEQUENCE | PICTURE_FLAG_NEW_OP_PARAMS_INFO);
209 p->flags = c->frame_flags;
210 c->frame_flags &= flags_mask;
211
212 p->visible = f->frame_hdr->show_frame;
213 p->showable = f->frame_hdr->showable_frame;
214
215 if (p->visible) {
216 // Only add HDR10+ and T35 metadata when show frame flag is enabled
217 dav1d_picture_copy_props(&p->p, c->content_light, c->content_light_ref,
218 c->mastering_display, c->mastering_display_ref,
219 c->itut_t35, c->itut_t35_ref, c->n_itut_t35,
220 &f->tile[0].data.m);
221
222 // Must be removed from the context after being attached to the frame
223 dav1d_ref_dec(&c->itut_t35_ref);
224 c->itut_t35 = NULL;
225 c->n_itut_t35 = 0;
226 } else {
227 dav1d_data_props_copy(&p->p.m, &f->tile[0].data.m);
228 }
229
230 if (c->n_fc > 1) {
231 atomic_init(&p->progress[0], 0);
232 atomic_init(&p->progress[1], 0);
233 }
234 return res;
235 }
236
dav1d_picture_alloc_copy(Dav1dContext * const c,Dav1dPicture * const dst,const int w,const Dav1dPicture * const src)237 int dav1d_picture_alloc_copy(Dav1dContext *const c, Dav1dPicture *const dst, const int w,
238 const Dav1dPicture *const src)
239 {
240 Dav1dMemPoolBuffer *const buf = (Dav1dMemPoolBuffer *)src->ref->const_data;
241 struct pic_ctx_context *const pic_ctx = buf->data;
242 const int res = picture_alloc(c, dst, w, src->p.h,
243 src->seq_hdr, src->seq_hdr_ref,
244 src->frame_hdr, src->frame_hdr_ref,
245 src->p.bpc, &src->m, &pic_ctx->allocator,
246 NULL);
247 if (res) return res;
248
249 dav1d_picture_copy_props(dst, src->content_light, src->content_light_ref,
250 src->mastering_display, src->mastering_display_ref,
251 src->itut_t35, src->itut_t35_ref, src->n_itut_t35,
252 &src->m);
253
254 return 0;
255 }
256
dav1d_picture_ref(Dav1dPicture * const dst,const Dav1dPicture * const src)257 void dav1d_picture_ref(Dav1dPicture *const dst, const Dav1dPicture *const src) {
258 assert(dst != NULL);
259 assert(dst->data[0] == NULL);
260 assert(src != NULL);
261
262 if (src->ref) {
263 assert(src->data[0] != NULL);
264 dav1d_ref_inc(src->ref);
265 }
266 if (src->frame_hdr_ref) dav1d_ref_inc(src->frame_hdr_ref);
267 if (src->seq_hdr_ref) dav1d_ref_inc(src->seq_hdr_ref);
268 if (src->m.user_data.ref) dav1d_ref_inc(src->m.user_data.ref);
269 if (src->content_light_ref) dav1d_ref_inc(src->content_light_ref);
270 if (src->mastering_display_ref) dav1d_ref_inc(src->mastering_display_ref);
271 if (src->itut_t35_ref) dav1d_ref_inc(src->itut_t35_ref);
272 *dst = *src;
273 }
274
dav1d_picture_move_ref(Dav1dPicture * const dst,Dav1dPicture * const src)275 void dav1d_picture_move_ref(Dav1dPicture *const dst, Dav1dPicture *const src) {
276 assert(dst != NULL);
277 assert(dst->data[0] == NULL);
278 assert(src != NULL);
279
280 if (src->ref)
281 assert(src->data[0] != NULL);
282
283 *dst = *src;
284 memset(src, 0, sizeof(*src));
285 }
286
dav1d_thread_picture_ref(Dav1dThreadPicture * const dst,const Dav1dThreadPicture * const src)287 void dav1d_thread_picture_ref(Dav1dThreadPicture *const dst,
288 const Dav1dThreadPicture *const src)
289 {
290 dav1d_picture_ref(&dst->p, &src->p);
291 dst->visible = src->visible;
292 dst->showable = src->showable;
293 dst->progress = src->progress;
294 dst->flags = src->flags;
295 }
296
dav1d_thread_picture_move_ref(Dav1dThreadPicture * const dst,Dav1dThreadPicture * const src)297 void dav1d_thread_picture_move_ref(Dav1dThreadPicture *const dst,
298 Dav1dThreadPicture *const src)
299 {
300 dav1d_picture_move_ref(&dst->p, &src->p);
301 dst->visible = src->visible;
302 dst->showable = src->showable;
303 dst->progress = src->progress;
304 dst->flags = src->flags;
305 memset(src, 0, sizeof(*src));
306 }
307
dav1d_picture_unref_internal(Dav1dPicture * const p)308 void dav1d_picture_unref_internal(Dav1dPicture *const p) {
309 validate_input(p != NULL);
310
311 if (p->ref) {
312 validate_input(p->data[0] != NULL);
313 dav1d_ref_dec(&p->ref);
314 }
315 dav1d_ref_dec(&p->seq_hdr_ref);
316 dav1d_ref_dec(&p->frame_hdr_ref);
317 dav1d_ref_dec(&p->m.user_data.ref);
318 dav1d_ref_dec(&p->content_light_ref);
319 dav1d_ref_dec(&p->mastering_display_ref);
320 dav1d_ref_dec(&p->itut_t35_ref);
321 memset(p, 0, sizeof(*p));
322 dav1d_data_props_set_defaults(&p->m);
323 }
324
dav1d_thread_picture_unref(Dav1dThreadPicture * const p)325 void dav1d_thread_picture_unref(Dav1dThreadPicture *const p) {
326 dav1d_picture_unref_internal(&p->p);
327
328 p->progress = NULL;
329 }
330
dav1d_picture_get_event_flags(const Dav1dThreadPicture * const p)331 enum Dav1dEventFlags dav1d_picture_get_event_flags(const Dav1dThreadPicture *const p) {
332 if (!p->flags)
333 return 0;
334
335 enum Dav1dEventFlags flags = 0;
336 if (p->flags & PICTURE_FLAG_NEW_SEQUENCE)
337 flags |= DAV1D_EVENT_FLAG_NEW_SEQUENCE;
338 if (p->flags & PICTURE_FLAG_NEW_OP_PARAMS_INFO)
339 flags |= DAV1D_EVENT_FLAG_NEW_OP_PARAMS_INFO;
340
341 return flags;
342 }
343