1 /**************************************************************************
2 *
3 * Copyright 2009 VMware, Inc. All Rights Reserved.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the
7 * "Software"), to deal in the Software without restriction, including
8 * without limitation the rights to use, copy, modify, merge, publish,
9 * distribute, sub license, and/or sell copies of the Software, and to
10 * permit persons to whom the Software is furnished to do so, subject to
11 * the following conditions:
12 *
13 * The above copyright notice and this permission notice (including the
14 * next paragraph) shall be included in all copies or substantial portions
15 * of the Software.
16 *
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
18 * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT.
20 * IN NO EVENT SHALL VMWARE AND/OR ITS SUPPLIERS BE LIABLE FOR
21 * ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
22 * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
23 * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
24 *
25 **************************************************************************/
26
27 /**
28 * @file
29 * Surface utility functions.
30 *
31 * @author Brian Paul
32 */
33
34
35 #include "pipe/p_defines.h"
36 #include "pipe/p_screen.h"
37 #include "pipe/p_state.h"
38
39 #include "util/format/u_format.h"
40 #include "util/u_inlines.h"
41 #include "util/u_rect.h"
42 #include "util/u_surface.h"
43 #include "util/u_pack_color.h"
44 #include "util/u_memset.h"
45
46 /**
47 * Initialize a pipe_surface object. 'view' is considered to have
48 * uninitialized contents.
49 */
50 void
u_surface_default_template(struct pipe_surface * surf,const struct pipe_resource * texture)51 u_surface_default_template(struct pipe_surface *surf,
52 const struct pipe_resource *texture)
53 {
54 memset(surf, 0, sizeof(*surf));
55
56 surf->format = texture->format;
57 }
58
59
60 /**
61 * Copy 3D box from one place to another.
62 * Position and sizes are in pixels.
63 */
64 void
util_copy_box(uint8_t * dst,enum pipe_format format,unsigned dst_stride,uint64_t dst_slice_stride,unsigned dst_x,unsigned dst_y,unsigned dst_z,unsigned width,unsigned height,unsigned depth,const uint8_t * src,int src_stride,uint64_t src_slice_stride,unsigned src_x,unsigned src_y,unsigned src_z)65 util_copy_box(uint8_t * dst,
66 enum pipe_format format,
67 unsigned dst_stride, uint64_t dst_slice_stride,
68 unsigned dst_x, unsigned dst_y, unsigned dst_z,
69 unsigned width, unsigned height, unsigned depth,
70 const uint8_t * src,
71 int src_stride, uint64_t src_slice_stride,
72 unsigned src_x, unsigned src_y, unsigned src_z)
73 {
74 unsigned z;
75 dst += dst_z * dst_slice_stride;
76 src += src_z * src_slice_stride;
77 for (z = 0; z < depth; ++z) {
78 util_copy_rect(dst,
79 format,
80 dst_stride,
81 dst_x, dst_y,
82 width, height,
83 src,
84 src_stride,
85 src_x, src_y);
86
87 dst += dst_slice_stride;
88 src += src_slice_stride;
89 }
90 }
91
92
93 void
util_fill_rect(uint8_t * dst,enum pipe_format format,unsigned dst_stride,unsigned dst_x,unsigned dst_y,unsigned width,unsigned height,union util_color * uc)94 util_fill_rect(uint8_t * dst,
95 enum pipe_format format,
96 unsigned dst_stride,
97 unsigned dst_x,
98 unsigned dst_y,
99 unsigned width,
100 unsigned height,
101 union util_color *uc)
102 {
103 const struct util_format_description *desc = util_format_description(format);
104 unsigned i, j;
105 unsigned width_size;
106 int blocksize = desc->block.bits / 8;
107 int blockwidth = desc->block.width;
108 int blockheight = desc->block.height;
109
110 assert(blocksize > 0);
111 assert(blockwidth > 0);
112 assert(blockheight > 0);
113
114 dst_x /= blockwidth;
115 dst_y /= blockheight;
116 width = (width + blockwidth - 1)/blockwidth;
117 height = (height + blockheight - 1)/blockheight;
118
119 dst += dst_x * blocksize;
120 dst += (uint64_t)dst_y * dst_stride;
121 width_size = width * blocksize;
122
123 switch (blocksize) {
124 case 1:
125 if(dst_stride == width_size)
126 memset(dst, uc->ub, height * width_size);
127 else {
128 for (i = 0; i < height; i++) {
129 memset(dst, uc->ub, width_size);
130 dst += dst_stride;
131 }
132 }
133 break;
134 case 2:
135 for (i = 0; i < height; i++) {
136 uint16_t *row = (uint16_t *)dst;
137 for (j = 0; j < width; j++)
138 *row++ = uc->us;
139 dst += dst_stride;
140 }
141 break;
142 case 4:
143 for (i = 0; i < height; i++) {
144 util_memset32(dst, uc->ui[0], width);
145 dst += dst_stride;
146 }
147 break;
148 case 8:
149 for (i = 0; i < height; i++) {
150 util_memset64(dst, ((uint64_t *)uc)[0], width);
151 dst += dst_stride;
152 }
153 break;
154 default:
155 for (i = 0; i < height; i++) {
156 uint8_t *row = dst;
157 for (j = 0; j < width; j++) {
158 memcpy(row, uc, blocksize);
159 row += blocksize;
160 }
161 dst += dst_stride;
162 }
163 break;
164 }
165 }
166
167
168 void
util_fill_box(uint8_t * dst,enum pipe_format format,unsigned stride,uintptr_t layer_stride,unsigned x,unsigned y,unsigned z,unsigned width,unsigned height,unsigned depth,union util_color * uc)169 util_fill_box(uint8_t * dst,
170 enum pipe_format format,
171 unsigned stride,
172 uintptr_t layer_stride,
173 unsigned x,
174 unsigned y,
175 unsigned z,
176 unsigned width,
177 unsigned height,
178 unsigned depth,
179 union util_color *uc)
180 {
181 unsigned layer;
182 dst += z * layer_stride;
183 for (layer = z; layer < depth; layer++) {
184 util_fill_rect(dst, format,
185 stride,
186 x, y, width, height, uc);
187 dst += layer_stride;
188 }
189 }
190
191
192 /**
193 * Fallback function for pipe->resource_copy_region().
194 * We support copying between different formats (including compressed/
195 * uncompressed) if the bytes per block or pixel matches. If copying
196 * compressed -> uncompressed, the dst region is reduced by the block
197 * width, height. If copying uncompressed -> compressed, the dest region
198 * is expanded by the block width, height. See GL_ARB_copy_image.
199 * Note: (X,Y)=(0,0) is always the upper-left corner.
200 */
201 void
util_resource_copy_region(struct pipe_context * pipe,struct pipe_resource * dst,unsigned dst_level,unsigned dst_x,unsigned dst_y,unsigned dst_z,struct pipe_resource * src,unsigned src_level,const struct pipe_box * src_box_in)202 util_resource_copy_region(struct pipe_context *pipe,
203 struct pipe_resource *dst,
204 unsigned dst_level,
205 unsigned dst_x, unsigned dst_y, unsigned dst_z,
206 struct pipe_resource *src,
207 unsigned src_level,
208 const struct pipe_box *src_box_in)
209 {
210 struct pipe_transfer *src_trans, *dst_trans;
211 uint8_t *dst_map;
212 const uint8_t *src_map;
213 enum pipe_format src_format;
214 enum pipe_format dst_format;
215 struct pipe_box src_box, dst_box;
216 unsigned src_bs, dst_bs, src_bw, dst_bw, src_bh, dst_bh;
217
218 assert(src && dst);
219 if (!src || !dst)
220 return;
221
222 assert((src->target == PIPE_BUFFER && dst->target == PIPE_BUFFER) ||
223 (src->target != PIPE_BUFFER && dst->target != PIPE_BUFFER));
224
225 src_format = src->format;
226 dst_format = dst->format;
227
228 /* init src box */
229 src_box = *src_box_in;
230
231 /* init dst box */
232 dst_box.x = dst_x;
233 dst_box.y = dst_y;
234 dst_box.z = dst_z;
235 dst_box.width = src_box.width;
236 dst_box.height = src_box.height;
237 dst_box.depth = src_box.depth;
238
239 src_bs = util_format_get_blocksize(src_format);
240 src_bw = util_format_get_blockwidth(src_format);
241 src_bh = util_format_get_blockheight(src_format);
242 dst_bs = util_format_get_blocksize(dst_format);
243 dst_bw = util_format_get_blockwidth(dst_format);
244 dst_bh = util_format_get_blockheight(dst_format);
245
246 /* Note: all box positions and sizes are in pixels */
247 if (src_bw > 1 && dst_bw == 1) {
248 /* Copy from compressed to uncompressed.
249 * Shrink dest box by the src block size.
250 */
251 dst_box.width /= src_bw;
252 dst_box.height /= src_bh;
253 }
254 else if (src_bw == 1 && dst_bw > 1) {
255 /* Copy from uncompressed to compressed.
256 * Expand dest box by the dest block size.
257 */
258 dst_box.width *= dst_bw;
259 dst_box.height *= dst_bh;
260 }
261 else {
262 /* compressed -> compressed or uncompressed -> uncompressed copy */
263 assert(src_bw == dst_bw);
264 assert(src_bh == dst_bh);
265 }
266
267 assert(src_bs == dst_bs);
268 if (src_bs != dst_bs) {
269 /* This can happen if we fail to do format checking before hand.
270 * Don't crash below.
271 */
272 return;
273 }
274
275 /* check that region boxes are block aligned */
276 assert(src_box.x % src_bw == 0);
277 assert(src_box.y % src_bh == 0);
278 assert(dst_box.x % dst_bw == 0);
279 assert(dst_box.y % dst_bh == 0);
280
281 /* check that region boxes are not out of bounds */
282 assert(src_box.x + src_box.width <= (int)u_minify(src->width0, src_level));
283 assert(src_box.y + src_box.height <= (int)u_minify(src->height0, src_level));
284 assert(dst_box.x + dst_box.width <= (int)u_minify(dst->width0, dst_level));
285 assert(dst_box.y + dst_box.height <= (int)u_minify(dst->height0, dst_level));
286
287 /* check that total number of src, dest bytes match */
288 assert((src_box.width / src_bw) * (src_box.height / src_bh) * src_bs ==
289 (dst_box.width / dst_bw) * (dst_box.height / dst_bh) * dst_bs);
290
291 if (dst->target == PIPE_BUFFER && src->target == PIPE_BUFFER) {
292 src_map = pipe->buffer_map(pipe,
293 src,
294 src_level,
295 PIPE_MAP_READ,
296 &src_box, &src_trans);
297 assert(src_map);
298 if (!src_map) {
299 goto no_src_map_buf;
300 }
301
302 dst_map = pipe->buffer_map(pipe,
303 dst,
304 dst_level,
305 PIPE_MAP_WRITE |
306 PIPE_MAP_DISCARD_RANGE, &dst_box,
307 &dst_trans);
308 assert(dst_map);
309 if (!dst_map) {
310 goto no_dst_map_buf;
311 }
312
313 assert(src_box.height == 1);
314 assert(src_box.depth == 1);
315 memcpy(dst_map, src_map, src_box.width);
316
317 pipe->buffer_unmap(pipe, dst_trans);
318 no_dst_map_buf:
319 pipe->buffer_unmap(pipe, src_trans);
320 no_src_map_buf:
321 ;
322 } else {
323 src_map = pipe->texture_map(pipe,
324 src,
325 src_level,
326 PIPE_MAP_READ,
327 &src_box, &src_trans);
328 assert(src_map);
329 if (!src_map) {
330 goto no_src_map;
331 }
332
333 dst_map = pipe->texture_map(pipe,
334 dst,
335 dst_level,
336 PIPE_MAP_WRITE |
337 PIPE_MAP_DISCARD_RANGE, &dst_box,
338 &dst_trans);
339 assert(dst_map);
340 if (!dst_map) {
341 goto no_dst_map;
342 }
343
344 util_copy_box(dst_map,
345 src_format,
346 dst_trans->stride, dst_trans->layer_stride,
347 0, 0, 0,
348 src_box.width, src_box.height, src_box.depth,
349 src_map,
350 src_trans->stride, src_trans->layer_stride,
351 0, 0, 0);
352
353 pipe->texture_unmap(pipe, dst_trans);
354 no_dst_map:
355 pipe->texture_unmap(pipe, src_trans);
356 no_src_map:
357 ;
358 }
359 }
360
361 static void
util_clear_color_texture_helper(struct pipe_transfer * dst_trans,uint8_t * dst_map,enum pipe_format format,const union pipe_color_union * color,unsigned width,unsigned height,unsigned depth)362 util_clear_color_texture_helper(struct pipe_transfer *dst_trans,
363 uint8_t *dst_map,
364 enum pipe_format format,
365 const union pipe_color_union *color,
366 unsigned width, unsigned height, unsigned depth)
367 {
368 union util_color uc;
369
370 assert(dst_trans->stride > 0);
371
372 util_pack_color_union(format, &uc, color);
373
374 util_fill_box(dst_map, format,
375 dst_trans->stride, dst_trans->layer_stride,
376 0, 0, 0, width, height, depth, &uc);
377 }
378
379 static void
util_clear_color_texture(struct pipe_context * pipe,struct pipe_resource * texture,enum pipe_format format,const union pipe_color_union * color,unsigned level,unsigned dstx,unsigned dsty,unsigned dstz,unsigned width,unsigned height,unsigned depth)380 util_clear_color_texture(struct pipe_context *pipe,
381 struct pipe_resource *texture,
382 enum pipe_format format,
383 const union pipe_color_union *color,
384 unsigned level,
385 unsigned dstx, unsigned dsty, unsigned dstz,
386 unsigned width, unsigned height, unsigned depth)
387 {
388 struct pipe_transfer *dst_trans;
389 uint8_t *dst_map;
390
391 dst_map = pipe_texture_map_3d(pipe,
392 texture,
393 level,
394 PIPE_MAP_WRITE,
395 dstx, dsty, dstz,
396 width, height, depth,
397 &dst_trans);
398 if (!dst_map)
399 return;
400
401 if (dst_trans->stride > 0) {
402 util_clear_color_texture_helper(dst_trans, dst_map, format, color,
403 width, height, depth);
404 }
405 pipe->texture_unmap(pipe, dst_trans);
406 }
407
408
409 #define UBYTE_TO_USHORT(B) ((B) | ((B) << 8))
410
411
412 /**
413 * Fallback for pipe->clear_render_target() function.
414 * XXX this looks too hackish to be really useful.
415 * cpp > 4 looks like a gross hack at best...
416 * Plus can't use these transfer fallbacks when clearing
417 * multisampled surfaces for instance.
418 * Clears all bound layers.
419 */
420 void
util_clear_render_target(struct pipe_context * pipe,struct pipe_surface * dst,const union pipe_color_union * color,unsigned dstx,unsigned dsty,unsigned width,unsigned height)421 util_clear_render_target(struct pipe_context *pipe,
422 struct pipe_surface *dst,
423 const union pipe_color_union *color,
424 unsigned dstx, unsigned dsty,
425 unsigned width, unsigned height)
426 {
427 struct pipe_transfer *dst_trans;
428 uint8_t *dst_map;
429
430 assert(dst->texture);
431 if (!dst->texture)
432 return;
433
434 if (dst->texture->target == PIPE_BUFFER) {
435 /*
436 * The fill naturally works on the surface format, however
437 * the transfer uses resource format which is just bytes for buffers.
438 */
439 unsigned dx, w;
440 unsigned pixstride = util_format_get_blocksize(dst->format);
441 dx = (dst->u.buf.first_element + dstx) * pixstride;
442 w = width * pixstride;
443 dst_map = pipe_texture_map(pipe,
444 dst->texture,
445 0, 0,
446 PIPE_MAP_WRITE,
447 dx, 0, w, 1,
448 &dst_trans);
449 if (dst_map) {
450 util_clear_color_texture_helper(dst_trans, dst_map, dst->format,
451 color, width, height, 1);
452 pipe->texture_unmap(pipe, dst_trans);
453 }
454 }
455 else {
456 unsigned depth = dst->u.tex.last_layer - dst->u.tex.first_layer + 1;
457 util_clear_color_texture(pipe, dst->texture, dst->format, color,
458 dst->u.tex.level, dstx, dsty,
459 dst->u.tex.first_layer, width, height, depth);
460 }
461 }
462
463 static void
util_fill_zs_rect(uint8_t * dst_map,enum pipe_format format,bool need_rmw,unsigned clear_flags,unsigned dst_stride,unsigned width,unsigned height,uint64_t zstencil)464 util_fill_zs_rect(uint8_t *dst_map,
465 enum pipe_format format,
466 bool need_rmw,
467 unsigned clear_flags,
468 unsigned dst_stride,
469 unsigned width,
470 unsigned height,
471 uint64_t zstencil)
472 {
473 unsigned i, j;
474 switch (util_format_get_blocksize(format)) {
475 case 1:
476 assert(format == PIPE_FORMAT_S8_UINT);
477 if(dst_stride == width)
478 memset(dst_map, (uint8_t) zstencil, (uint64_t)height * width);
479 else {
480 for (i = 0; i < height; i++) {
481 memset(dst_map, (uint8_t) zstencil, width);
482 dst_map += dst_stride;
483 }
484 }
485 break;
486 case 2:
487 assert(format == PIPE_FORMAT_Z16_UNORM);
488 for (i = 0; i < height; i++) {
489 uint16_t *row = (uint16_t *)dst_map;
490 for (j = 0; j < width; j++)
491 *row++ = (uint16_t) zstencil;
492 dst_map += dst_stride;
493 }
494 break;
495 case 4:
496 if (!need_rmw) {
497 for (i = 0; i < height; i++) {
498 util_memset32(dst_map, (uint32_t)zstencil, width);
499 dst_map += dst_stride;
500 }
501 }
502 else {
503 uint32_t dst_mask;
504 if (format == PIPE_FORMAT_Z24_UNORM_S8_UINT)
505 dst_mask = 0x00ffffff;
506 else {
507 assert(format == PIPE_FORMAT_S8_UINT_Z24_UNORM);
508 dst_mask = 0xffffff00;
509 }
510 if (clear_flags & PIPE_CLEAR_DEPTH)
511 dst_mask = ~dst_mask;
512 for (i = 0; i < height; i++) {
513 uint32_t *row = (uint32_t *)dst_map;
514 for (j = 0; j < width; j++) {
515 uint32_t tmp = *row & dst_mask;
516 *row++ = tmp | ((uint32_t) zstencil & ~dst_mask);
517 }
518 dst_map += dst_stride;
519 }
520 }
521 break;
522 case 8:
523 if (!need_rmw) {
524 for (i = 0; i < height; i++) {
525 util_memset64(dst_map, zstencil, width);
526 dst_map += dst_stride;
527 }
528 }
529 else {
530 uint64_t src_mask;
531
532 if (clear_flags & PIPE_CLEAR_DEPTH)
533 src_mask = 0x00000000ffffffffull;
534 else
535 src_mask = 0x000000ff00000000ull;
536
537 for (i = 0; i < height; i++) {
538 uint64_t *row = (uint64_t *)dst_map;
539 for (j = 0; j < width; j++) {
540 uint64_t tmp = *row & ~src_mask;
541 *row++ = tmp | (zstencil & src_mask);
542 }
543 dst_map += dst_stride;
544 }
545 }
546 break;
547 default:
548 assert(0);
549 break;
550 }
551 }
552
553 void
util_fill_zs_box(uint8_t * dst,enum pipe_format format,bool need_rmw,unsigned clear_flags,unsigned stride,unsigned layer_stride,unsigned width,unsigned height,unsigned depth,uint64_t zstencil)554 util_fill_zs_box(uint8_t *dst,
555 enum pipe_format format,
556 bool need_rmw,
557 unsigned clear_flags,
558 unsigned stride,
559 unsigned layer_stride,
560 unsigned width,
561 unsigned height,
562 unsigned depth,
563 uint64_t zstencil)
564 {
565 unsigned layer;
566
567 for (layer = 0; layer < depth; layer++) {
568 util_fill_zs_rect(dst, format, need_rmw, clear_flags, stride,
569 width, height, zstencil);
570 dst += layer_stride;
571 }
572 }
573
574 static void
util_clear_depth_stencil_texture(struct pipe_context * pipe,struct pipe_resource * texture,enum pipe_format format,unsigned clear_flags,uint64_t zstencil,unsigned level,unsigned dstx,unsigned dsty,unsigned dstz,unsigned width,unsigned height,unsigned depth)575 util_clear_depth_stencil_texture(struct pipe_context *pipe,
576 struct pipe_resource *texture,
577 enum pipe_format format,
578 unsigned clear_flags,
579 uint64_t zstencil, unsigned level,
580 unsigned dstx, unsigned dsty, unsigned dstz,
581 unsigned width, unsigned height, unsigned depth)
582 {
583 struct pipe_transfer *dst_trans;
584 uint8_t *dst_map;
585 bool need_rmw = false;
586
587 if ((clear_flags & PIPE_CLEAR_DEPTHSTENCIL) &&
588 ((clear_flags & PIPE_CLEAR_DEPTHSTENCIL) != PIPE_CLEAR_DEPTHSTENCIL) &&
589 util_format_is_depth_and_stencil(format))
590 need_rmw = true;
591
592 dst_map = pipe_texture_map_3d(pipe,
593 texture,
594 level,
595 (need_rmw ? PIPE_MAP_READ_WRITE :
596 PIPE_MAP_WRITE),
597 dstx, dsty, dstz,
598 width, height, depth, &dst_trans);
599 assert(dst_map);
600 if (!dst_map)
601 return;
602
603 assert(dst_trans->stride > 0);
604
605 util_fill_zs_box(dst_map, format, need_rmw, clear_flags,
606 dst_trans->stride,
607 dst_trans->layer_stride, width, height,
608 depth, zstencil);
609
610 pipe->texture_unmap(pipe, dst_trans);
611 }
612
613
614 /* Try to clear the texture as a surface, returns true if successful.
615 */
616 static bool
util_clear_texture_as_surface(struct pipe_context * pipe,struct pipe_resource * res,unsigned level,const struct pipe_box * box,const void * data)617 util_clear_texture_as_surface(struct pipe_context *pipe,
618 struct pipe_resource *res,
619 unsigned level,
620 const struct pipe_box *box,
621 const void *data)
622 {
623 struct pipe_surface tmpl = {{0}}, *sf;
624
625 tmpl.format = res->format;
626 tmpl.u.tex.first_layer = box->z;
627 tmpl.u.tex.last_layer = box->z + box->depth - 1;
628 tmpl.u.tex.level = level;
629
630 if (util_format_is_depth_or_stencil(res->format)) {
631 if (!pipe->clear_depth_stencil)
632 return false;
633
634 sf = pipe->create_surface(pipe, res, &tmpl);
635 if (!sf)
636 return false;
637
638 float depth = 0;
639 uint8_t stencil = 0;
640 unsigned clear = 0;
641 const struct util_format_description *desc =
642 util_format_description(tmpl.format);
643
644 if (util_format_has_depth(desc)) {
645 clear |= PIPE_CLEAR_DEPTH;
646 util_format_unpack_z_float(tmpl.format, &depth, data, 1);
647 }
648 if (util_format_has_stencil(desc)) {
649 clear |= PIPE_CLEAR_STENCIL;
650 util_format_unpack_s_8uint(tmpl.format, &stencil, data, 1);
651 }
652 pipe->clear_depth_stencil(pipe, sf, clear, depth, stencil,
653 box->x, box->y, box->width, box->height,
654 false);
655
656 pipe_surface_reference(&sf, NULL);
657 } else {
658 if (!pipe->clear_render_target)
659 return false;
660
661 if (!pipe->screen->is_format_supported(pipe->screen, tmpl.format,
662 res->target, 0, 0,
663 PIPE_BIND_RENDER_TARGET)) {
664 tmpl.format = util_format_as_renderable(tmpl.format);
665
666 if (tmpl.format == PIPE_FORMAT_NONE)
667 return false;
668
669 if (!pipe->screen->is_format_supported(pipe->screen, tmpl.format,
670 res->target, 0, 0,
671 PIPE_BIND_RENDER_TARGET))
672 return false;
673 }
674
675 sf = pipe->create_surface(pipe, res, &tmpl);
676 if (!sf)
677 return false;
678
679 union pipe_color_union color;
680 util_format_unpack_rgba(sf->format, color.ui, data, 1);
681 pipe->clear_render_target(pipe, sf, &color, box->x, box->y,
682 box->width, box->height, false);
683
684 pipe_surface_reference(&sf, NULL);
685 }
686
687 return true;
688 }
689
690 /* First attempt to clear using HW, fallback to SW if needed.
691 */
692 void
u_default_clear_texture(struct pipe_context * pipe,struct pipe_resource * tex,unsigned level,const struct pipe_box * box,const void * data)693 u_default_clear_texture(struct pipe_context *pipe,
694 struct pipe_resource *tex,
695 unsigned level,
696 const struct pipe_box *box,
697 const void *data)
698 {
699 struct pipe_screen *screen = pipe->screen;
700 bool cleared = false;
701 assert(data != NULL);
702
703 bool has_layers = screen->get_param(screen, PIPE_CAP_VS_INSTANCEID) &&
704 screen->get_param(screen, PIPE_CAP_VS_LAYER_VIEWPORT);
705
706 if (has_layers) {
707 cleared = util_clear_texture_as_surface(pipe, tex, level,
708 box, data);
709 } else {
710 struct pipe_box layer = *box;
711 layer.depth = 1;
712 int l;
713 for (l = box->z; l < box->z + box->depth; l++) {
714 layer.z = l;
715 cleared |= util_clear_texture_as_surface(pipe, tex, level,
716 &layer, data);
717 if (!cleared) {
718 /* If one layer is cleared, all layers should also be clearable.
719 * Therefore, if we fail on any later other than the first, it
720 * is a bug somewhere.
721 */
722 assert(l == box->z);
723 break;
724 }
725 }
726 }
727
728 /* Fallback to clearing it in SW if the HW paths failed. */
729 if (!cleared)
730 util_clear_texture_sw(pipe, tex, level, box, data);
731 }
732
733 void
util_clear_texture_sw(struct pipe_context * pipe,struct pipe_resource * tex,unsigned level,const struct pipe_box * box,const void * data)734 util_clear_texture_sw(struct pipe_context *pipe,
735 struct pipe_resource *tex,
736 unsigned level,
737 const struct pipe_box *box,
738 const void *data)
739 {
740 const struct util_format_description *desc =
741 util_format_description(tex->format);
742 assert(data != NULL);
743
744 if (level > tex->last_level)
745 return;
746
747 if (util_format_is_depth_or_stencil(tex->format)) {
748 unsigned clear = 0;
749 float depth = 0.0f;
750 uint8_t stencil = 0;
751 uint64_t zstencil;
752
753 if (util_format_has_depth(desc)) {
754 clear |= PIPE_CLEAR_DEPTH;
755 util_format_unpack_z_float(tex->format, &depth, data, 1);
756 }
757
758 if (util_format_has_stencil(desc)) {
759 clear |= PIPE_CLEAR_STENCIL;
760 util_format_unpack_s_8uint(tex->format, &stencil, data, 1);
761 }
762
763 zstencil = util_pack64_z_stencil(tex->format, depth, stencil);
764
765 util_clear_depth_stencil_texture(pipe, tex, tex->format, clear, zstencil,
766 level, box->x, box->y, box->z,
767 box->width, box->height, box->depth);
768 } else {
769 union pipe_color_union color;
770 util_format_unpack_rgba(tex->format, color.ui, data, 1);
771
772 util_clear_color_texture(pipe, tex, tex->format, &color, level,
773 box->x, box->y, box->z,
774 box->width, box->height, box->depth);
775 }
776 }
777
778
779 /**
780 * Fallback for pipe->clear_stencil() function.
781 * sw fallback doesn't look terribly useful here.
782 * Plus can't use these transfer fallbacks when clearing
783 * multisampled surfaces for instance.
784 * Clears all bound layers.
785 */
786 void
util_clear_depth_stencil(struct pipe_context * pipe,struct pipe_surface * dst,unsigned clear_flags,double depth,unsigned stencil,unsigned dstx,unsigned dsty,unsigned width,unsigned height)787 util_clear_depth_stencil(struct pipe_context *pipe,
788 struct pipe_surface *dst,
789 unsigned clear_flags,
790 double depth,
791 unsigned stencil,
792 unsigned dstx, unsigned dsty,
793 unsigned width, unsigned height)
794 {
795 uint64_t zstencil;
796 unsigned max_layer;
797
798 assert(dst->texture);
799 if (!dst->texture)
800 return;
801
802 zstencil = util_pack64_z_stencil(dst->format, depth, stencil);
803 max_layer = dst->u.tex.last_layer - dst->u.tex.first_layer;
804 util_clear_depth_stencil_texture(pipe, dst->texture, dst->format,
805 clear_flags, zstencil, dst->u.tex.level,
806 dstx, dsty, dst->u.tex.first_layer,
807 width, height, max_layer + 1);
808 }
809
810
811 /* Return if the box is totally inside the resource.
812 */
813 static bool
is_box_inside_resource(const struct pipe_resource * res,const struct pipe_box * box,unsigned level)814 is_box_inside_resource(const struct pipe_resource *res,
815 const struct pipe_box *box,
816 unsigned level)
817 {
818 unsigned width = 1, height = 1, depth = 1;
819
820 switch (res->target) {
821 case PIPE_BUFFER:
822 width = res->width0;
823 height = 1;
824 depth = 1;
825 break;
826 case PIPE_TEXTURE_1D:
827 width = u_minify(res->width0, level);
828 height = 1;
829 depth = 1;
830 break;
831 case PIPE_TEXTURE_2D:
832 case PIPE_TEXTURE_RECT:
833 width = u_minify(res->width0, level);
834 height = u_minify(res->height0, level);
835 depth = 1;
836 break;
837 case PIPE_TEXTURE_3D:
838 width = u_minify(res->width0, level);
839 height = u_minify(res->height0, level);
840 depth = u_minify(res->depth0, level);
841 break;
842 case PIPE_TEXTURE_CUBE:
843 width = u_minify(res->width0, level);
844 height = u_minify(res->height0, level);
845 depth = 6;
846 break;
847 case PIPE_TEXTURE_1D_ARRAY:
848 width = u_minify(res->width0, level);
849 height = 1;
850 depth = res->array_size;
851 break;
852 case PIPE_TEXTURE_2D_ARRAY:
853 width = u_minify(res->width0, level);
854 height = u_minify(res->height0, level);
855 depth = res->array_size;
856 break;
857 case PIPE_TEXTURE_CUBE_ARRAY:
858 width = u_minify(res->width0, level);
859 height = u_minify(res->height0, level);
860 depth = res->array_size;
861 assert(res->array_size % 6 == 0);
862 break;
863 case PIPE_MAX_TEXTURE_TYPES:
864 break;
865 }
866
867 return box->x >= 0 &&
868 box->x + box->width <= (int) width &&
869 box->y >= 0 &&
870 box->y + box->height <= (int) height &&
871 box->z >= 0 &&
872 box->z + box->depth <= (int) depth;
873 }
874
875 static unsigned
get_sample_count(const struct pipe_resource * res)876 get_sample_count(const struct pipe_resource *res)
877 {
878 return res->nr_samples ? res->nr_samples : 1;
879 }
880
881
882 /**
883 * Check if a blit() command can be implemented with a resource_copy_region().
884 * If tight_format_check is true, only allow the resource_copy_region() if
885 * the blit src/dst formats are identical, ignoring the resource formats.
886 * Otherwise, check for format casting and compatibility.
887 */
888 bool
util_can_blit_via_copy_region(const struct pipe_blit_info * blit,bool tight_format_check,bool render_condition_bound)889 util_can_blit_via_copy_region(const struct pipe_blit_info *blit,
890 bool tight_format_check,
891 bool render_condition_bound)
892 {
893 const struct util_format_description *src_desc, *dst_desc;
894
895 src_desc = util_format_description(blit->src.resource->format);
896 dst_desc = util_format_description(blit->dst.resource->format);
897
898 if (tight_format_check) {
899 /* no format conversions allowed */
900 if (blit->src.format != blit->dst.format) {
901 return false;
902 }
903 }
904 else {
905 /* do loose format compatibility checking */
906 if ((blit->src.format != blit->dst.format ||
907 src_desc != dst_desc) &&
908 (blit->src.resource->format != blit->src.format ||
909 blit->dst.resource->format != blit->dst.format ||
910 !util_is_format_compatible(src_desc, dst_desc))) {
911 return false;
912 }
913 }
914
915 unsigned mask = util_format_get_mask(blit->dst.format);
916
917 /* No masks, no filtering, no scissor, no blending */
918 if ((blit->mask & mask) != mask ||
919 blit->filter != PIPE_TEX_FILTER_NEAREST ||
920 blit->scissor_enable ||
921 blit->num_window_rectangles > 0 ||
922 blit->alpha_blend ||
923 (blit->render_condition_enable && render_condition_bound)) {
924 return false;
925 }
926
927 /* Only the src box can have negative dims for flipping */
928 assert(blit->dst.box.width >= 1);
929 assert(blit->dst.box.height >= 1);
930 assert(blit->dst.box.depth >= 1);
931
932 /* No scaling or flipping */
933 if (blit->src.box.width != blit->dst.box.width ||
934 blit->src.box.height != blit->dst.box.height ||
935 blit->src.box.depth != blit->dst.box.depth) {
936 return false;
937 }
938
939 /* No out-of-bounds access. */
940 if (!is_box_inside_resource(blit->src.resource, &blit->src.box,
941 blit->src.level) ||
942 !is_box_inside_resource(blit->dst.resource, &blit->dst.box,
943 blit->dst.level)) {
944 return false;
945 }
946
947 /* Sample counts must match. */
948 if (get_sample_count(blit->src.resource) !=
949 get_sample_count(blit->dst.resource)) {
950 return false;
951 }
952
953 return true;
954 }
955
956
957 /**
958 * Try to do a blit using resource_copy_region. The function calls
959 * resource_copy_region if the blit description is compatible with it.
960 *
961 * It returns TRUE if the blit was done using resource_copy_region.
962 *
963 * It returns FALSE otherwise and the caller must fall back to a more generic
964 * codepath for the blit operation. (e.g. by using u_blitter)
965 */
966 bool
util_try_blit_via_copy_region(struct pipe_context * ctx,const struct pipe_blit_info * blit,bool render_condition_bound)967 util_try_blit_via_copy_region(struct pipe_context *ctx,
968 const struct pipe_blit_info *blit,
969 bool render_condition_bound)
970 {
971 if (util_can_blit_via_copy_region(blit, false, render_condition_bound)) {
972 ctx->resource_copy_region(ctx, blit->dst.resource, blit->dst.level,
973 blit->dst.box.x, blit->dst.box.y,
974 blit->dst.box.z,
975 blit->src.resource, blit->src.level,
976 &blit->src.box);
977 return true;
978 }
979 else {
980 return false;
981 }
982 }
983