1 /*
2 * Copyright 2008 Corbin Simpson <[email protected]>
3 * SPDX-License-Identifier: MIT
4 */
5
6 #include "draw/draw_context.h"
7
8 #include "util/u_memory.h"
9 #include "util/u_sampler.h"
10 #include "util/u_upload_mgr.h"
11 #include "util/u_debug_cb.h"
12 #include "util/os_time.h"
13 #include "vl/vl_decoder.h"
14 #include "vl/vl_video_buffer.h"
15
16 #include "r300_cb.h"
17 #include "r300_context.h"
18 #include "r300_emit.h"
19 #include "r300_screen.h"
20 #include "r300_screen_buffer.h"
21 #include "compiler/radeon_regalloc.h"
22
23 #include <inttypes.h>
24
r300_release_referenced_objects(struct r300_context * r300)25 static void r300_release_referenced_objects(struct r300_context *r300)
26 {
27 struct pipe_framebuffer_state *fb =
28 (struct pipe_framebuffer_state*)r300->fb_state.state;
29 struct r300_textures_state *textures =
30 (struct r300_textures_state*)r300->textures_state.state;
31 unsigned i;
32
33 /* Framebuffer state. */
34 util_unreference_framebuffer_state(fb);
35
36 /* Textures. */
37 for (i = 0; i < textures->sampler_view_count; i++)
38 pipe_sampler_view_reference(
39 (struct pipe_sampler_view**)&textures->sampler_views[i], NULL);
40
41 /* The special dummy texture for texkill. */
42 if (r300->texkill_sampler) {
43 pipe_sampler_view_reference(
44 (struct pipe_sampler_view**)&r300->texkill_sampler,
45 NULL);
46 }
47
48 /* Manually-created vertex buffers. */
49 pipe_vertex_buffer_unreference(&r300->dummy_vb);
50 radeon_bo_reference(r300->rws, &r300->vbo, NULL);
51
52 r300->context.delete_depth_stencil_alpha_state(&r300->context,
53 r300->dsa_decompress_zmask);
54 }
55
r300_destroy_context(struct pipe_context * context)56 static void r300_destroy_context(struct pipe_context* context)
57 {
58 struct r300_context* r300 = r300_context(context);
59
60 if (r300->cs.priv && r300->hyperz_enabled) {
61 r300->rws->cs_request_feature(&r300->cs, RADEON_FID_R300_HYPERZ_ACCESS, false);
62 }
63 if (r300->cs.priv && r300->cmask_access) {
64 r300->rws->cs_request_feature(&r300->cs, RADEON_FID_R300_CMASK_ACCESS, false);
65 }
66
67 if (r300->blitter)
68 util_blitter_destroy(r300->blitter);
69 if (r300->draw)
70 draw_destroy(r300->draw);
71
72 for (unsigned i = 0; i < r300->nr_vertex_buffers; i++)
73 pipe_vertex_buffer_unreference(&r300->vertex_buffer[i]);
74
75 if (r300->uploader)
76 u_upload_destroy(r300->uploader);
77 if (r300->context.stream_uploader)
78 u_upload_destroy(r300->context.stream_uploader);
79 if (r300->context.const_uploader)
80 u_upload_destroy(r300->context.const_uploader);
81
82 /* XXX: This function assumes r300->query_list was initialized */
83 r300_release_referenced_objects(r300);
84
85 r300->rws->cs_destroy(&r300->cs);
86 if (r300->ctx)
87 r300->rws->ctx_destroy(r300->ctx);
88
89 rc_destroy_regalloc_state(&r300->fs_regalloc_state);
90 rc_destroy_regalloc_state(&r300->vs_regalloc_state);
91
92 /* XXX: No way to tell if this was initialized or not? */
93 slab_destroy_child(&r300->pool_transfers);
94
95 /* Free the structs allocated in r300_setup_atoms() */
96 if (r300->aa_state.state) {
97 FREE(r300->aa_state.state);
98 FREE(r300->blend_color_state.state);
99 FREE(r300->clip_state.state);
100 FREE(r300->fb_state.state);
101 FREE(r300->gpu_flush.state);
102 FREE(r300->hyperz_state.state);
103 FREE(r300->invariant_state.state);
104 FREE(r300->rs_block_state.state);
105 FREE(r300->sample_mask.state);
106 FREE(r300->scissor_state.state);
107 FREE(r300->textures_state.state);
108 FREE(r300->vap_invariant_state.state);
109 FREE(r300->viewport_state.state);
110 FREE(r300->ztop_state.state);
111 FREE(r300->fs_constants.state);
112 FREE(r300->vs_constants.state);
113 if (!r300->screen->caps.has_tcl) {
114 FREE(r300->vertex_stream_state.state);
115 }
116 }
117
118 FREE(r300->stencilref_fallback);
119
120 FREE(r300);
121 }
122
r300_flush_callback(void * data,unsigned flags,struct pipe_fence_handle ** fence)123 static void r300_flush_callback(void *data, unsigned flags,
124 struct pipe_fence_handle **fence)
125 {
126 struct r300_context* const cs_context_copy = data;
127
128 r300_flush(&cs_context_copy->context, flags, fence);
129 }
130
131 #define R300_INIT_ATOM(atomname, atomsize) \
132 do { \
133 r300->atomname.name = #atomname; \
134 r300->atomname.state = NULL; \
135 r300->atomname.size = atomsize; \
136 r300->atomname.emit = r300_emit_##atomname; \
137 r300->atomname.dirty = false; \
138 } while (0)
139
140 #define R300_ALLOC_ATOM(atomname, statetype) \
141 do { \
142 r300->atomname.state = CALLOC_STRUCT(statetype); \
143 if (r300->atomname.state == NULL) \
144 return false; \
145 } while (0)
146
r300_setup_atoms(struct r300_context * r300)147 static bool r300_setup_atoms(struct r300_context* r300)
148 {
149 bool is_rv350 = r300->screen->caps.is_rv350;
150 bool is_r500 = r300->screen->caps.is_r500;
151 bool has_tcl = r300->screen->caps.has_tcl;
152
153 /* Create the actual atom list.
154 *
155 * Some atoms never change size, others change every emit - those have
156 * the size of 0 here.
157 *
158 * NOTE: The framebuffer state is split into these atoms:
159 * - gpu_flush (unpipelined regs)
160 * - aa_state (unpipelined regs)
161 * - fb_state (unpipelined regs)
162 * - hyperz_state (unpipelined regs followed by pipelined ones)
163 * - fb_state_pipelined (pipelined regs)
164 * The motivation behind this is to be able to emit a strict
165 * subset of the regs, and to have reasonable register ordering. */
166 /* SC, GB (unpipelined), RB3D (unpipelined), ZB (unpipelined). */
167 R300_INIT_ATOM(gpu_flush, 9);
168 R300_INIT_ATOM(aa_state, 4);
169 R300_INIT_ATOM(fb_state, 0);
170 R300_INIT_ATOM(hyperz_state, is_r500 || is_rv350 ? 10 : 8);
171 /* ZB (unpipelined), SC. */
172 R300_INIT_ATOM(ztop_state, 2);
173 /* ZB, FG. */
174 R300_INIT_ATOM(dsa_state, is_r500 ? 10 : 6);
175 /* RB3D. */
176 R300_INIT_ATOM(blend_state, 8);
177 R300_INIT_ATOM(blend_color_state, is_r500 ? 3 : 2);
178 /* SC. */
179 R300_INIT_ATOM(sample_mask, 2);
180 R300_INIT_ATOM(scissor_state, 3);
181 /* GB, FG, GA, SU, SC, RB3D. */
182 R300_INIT_ATOM(invariant_state, 14 + (is_rv350 ? 4 : 0) + (is_r500 ? 4 : 0));
183 /* VAP. */
184 R300_INIT_ATOM(viewport_state, 9);
185 R300_INIT_ATOM(pvs_flush, 2);
186 R300_INIT_ATOM(vap_invariant_state, is_r500 || !has_tcl ? 11 : 9);
187 R300_INIT_ATOM(vertex_stream_state, 0);
188 R300_INIT_ATOM(vs_state, 0);
189 R300_INIT_ATOM(vs_constants, 0);
190 R300_INIT_ATOM(clip_state, has_tcl ? 3 + (6 * 4) : 0);
191 /* VAP, RS, GA, GB, SU, SC. */
192 R300_INIT_ATOM(rs_block_state, 0);
193 R300_INIT_ATOM(rs_state, 0);
194 /* SC, US. */
195 R300_INIT_ATOM(fb_state_pipelined, 8);
196 /* US. */
197 R300_INIT_ATOM(fs, 0);
198 R300_INIT_ATOM(fs_rc_constant_state, 0);
199 R300_INIT_ATOM(fs_constants, 0);
200 /* TX. */
201 R300_INIT_ATOM(texture_cache_inval, 2);
202 R300_INIT_ATOM(textures_state, 0);
203 /* Clear commands */
204 R300_INIT_ATOM(hiz_clear, r300->screen->caps.hiz_ram > 0 ? 4 : 0);
205 R300_INIT_ATOM(zmask_clear, r300->screen->caps.zmask_ram > 0 ? 4 : 0);
206 R300_INIT_ATOM(cmask_clear, 4);
207 /* ZB (unpipelined), SU. */
208 R300_INIT_ATOM(query_start, 4);
209
210 /* Replace emission functions for r500. */
211 if (is_r500) {
212 r300->fs.emit = r500_emit_fs;
213 r300->fs_rc_constant_state.emit = r500_emit_fs_rc_constant_state;
214 r300->fs_constants.emit = r500_emit_fs_constants;
215 }
216
217 /* Some non-CSO atoms need explicit space to store the state locally. */
218 R300_ALLOC_ATOM(aa_state, r300_aa_state);
219 R300_ALLOC_ATOM(blend_color_state, r300_blend_color_state);
220 R300_ALLOC_ATOM(clip_state, r300_clip_state);
221 R300_ALLOC_ATOM(hyperz_state, r300_hyperz_state);
222 R300_ALLOC_ATOM(invariant_state, r300_invariant_state);
223 R300_ALLOC_ATOM(textures_state, r300_textures_state);
224 R300_ALLOC_ATOM(vap_invariant_state, r300_vap_invariant_state);
225 R300_ALLOC_ATOM(viewport_state, r300_viewport_state);
226 R300_ALLOC_ATOM(ztop_state, r300_ztop_state);
227 R300_ALLOC_ATOM(fb_state, pipe_framebuffer_state);
228 R300_ALLOC_ATOM(gpu_flush, pipe_framebuffer_state);
229 r300->sample_mask.state = malloc(4);
230 R300_ALLOC_ATOM(scissor_state, pipe_scissor_state);
231 R300_ALLOC_ATOM(rs_block_state, r300_rs_block);
232 R300_ALLOC_ATOM(fs_constants, r300_constant_buffer);
233 R300_ALLOC_ATOM(vs_constants, r300_constant_buffer);
234 if (!r300->screen->caps.has_tcl) {
235 R300_ALLOC_ATOM(vertex_stream_state, r300_vertex_stream_state);
236 }
237
238 /* Some non-CSO atoms don't use the state pointer. */
239 r300->fb_state_pipelined.allow_null_state = true;
240 r300->fs_rc_constant_state.allow_null_state = true;
241 r300->pvs_flush.allow_null_state = true;
242 r300->query_start.allow_null_state = true;
243 r300->texture_cache_inval.allow_null_state = true;
244
245 /* Some states must be marked as dirty here to properly set up
246 * hardware in the first command stream. */
247 r300_mark_atom_dirty(r300, &r300->invariant_state);
248 r300_mark_atom_dirty(r300, &r300->pvs_flush);
249 r300_mark_atom_dirty(r300, &r300->vap_invariant_state);
250 r300_mark_atom_dirty(r300, &r300->texture_cache_inval);
251 r300_mark_atom_dirty(r300, &r300->textures_state);
252
253 return true;
254 }
255
256 /* Not every gallium frontend calls every driver function before the first draw
257 * call and we must initialize the command buffers somehow. */
r300_init_states(struct pipe_context * pipe)258 static void r300_init_states(struct pipe_context *pipe)
259 {
260 struct r300_context *r300 = r300_context(pipe);
261 struct pipe_blend_color bc = {{0}};
262 struct pipe_clip_state cs = {{{0}}};
263 struct pipe_scissor_state ss = {0};
264 struct r300_gpu_flush *gpuflush =
265 (struct r300_gpu_flush*)r300->gpu_flush.state;
266 struct r300_vap_invariant_state *vap_invariant =
267 (struct r300_vap_invariant_state*)r300->vap_invariant_state.state;
268 struct r300_invariant_state *invariant =
269 (struct r300_invariant_state*)r300->invariant_state.state;
270
271 CB_LOCALS;
272
273 pipe->set_blend_color(pipe, &bc);
274 pipe->set_clip_state(pipe, &cs);
275 pipe->set_scissor_states(pipe, 0, 1, &ss);
276 pipe->set_sample_mask(pipe, ~0);
277
278 /* Initialize the GPU flush. */
279 {
280 BEGIN_CB(gpuflush->cb_flush_clean, 6);
281
282 /* Flush and free renderbuffer caches. */
283 OUT_CB_REG(R300_RB3D_DSTCACHE_CTLSTAT,
284 R300_RB3D_DSTCACHE_CTLSTAT_DC_FREE_FREE_3D_TAGS |
285 R300_RB3D_DSTCACHE_CTLSTAT_DC_FLUSH_FLUSH_DIRTY_3D);
286 OUT_CB_REG(R300_ZB_ZCACHE_CTLSTAT,
287 R300_ZB_ZCACHE_CTLSTAT_ZC_FLUSH_FLUSH_AND_FREE |
288 R300_ZB_ZCACHE_CTLSTAT_ZC_FREE_FREE);
289
290 /* Wait until the GPU is idle.
291 * This fixes random pixels sometimes appearing probably caused
292 * by incomplete rendering. */
293 OUT_CB_REG(RADEON_WAIT_UNTIL, RADEON_WAIT_3D_IDLECLEAN);
294 END_CB;
295 }
296
297 /* Initialize the VAP invariant state. */
298 {
299 BEGIN_CB(vap_invariant->cb, r300->vap_invariant_state.size);
300 OUT_CB_REG(VAP_PVS_VTX_TIMEOUT_REG, 0xffff);
301 OUT_CB_REG_SEQ(R300_VAP_GB_VERT_CLIP_ADJ, 4);
302 OUT_CB_32F(1.0);
303 OUT_CB_32F(1.0);
304 OUT_CB_32F(1.0);
305 OUT_CB_32F(1.0);
306 OUT_CB_REG(R300_VAP_PSC_SGN_NORM_CNTL, R300_SGN_NORM_NO_ZERO);
307
308 if (r300->screen->caps.is_r500) {
309 OUT_CB_REG(R500_VAP_TEX_TO_COLOR_CNTL, 0);
310 } else if (!r300->screen->caps.has_tcl) {
311 /* RSxxx:
312 * Static VAP setup since r300_emit_vs_state() is never called.
313 */
314 OUT_CB_REG(R300_VAP_CNTL, R300_PVS_NUM_SLOTS(10) |
315 R300_PVS_NUM_CNTLRS(5) |
316 R300_PVS_NUM_FPUS(2) |
317 R300_PVS_VF_MAX_VTX_NUM(5));
318 }
319 END_CB;
320 }
321
322 /* Initialize the invariant state. */
323 {
324 BEGIN_CB(invariant->cb, r300->invariant_state.size);
325 OUT_CB_REG(R300_GB_SELECT, 0);
326 OUT_CB_REG(R300_FG_FOG_BLEND, 0);
327 OUT_CB_REG(R300_GA_OFFSET, 0);
328 OUT_CB_REG(R300_SU_TEX_WRAP, 0);
329 OUT_CB_REG(R300_SU_DEPTH_SCALE, 0x4B7FFFFF);
330 OUT_CB_REG(R300_SU_DEPTH_OFFSET, 0);
331 OUT_CB_REG(R300_SC_EDGERULE, 0x2DA49525);
332
333 if (r300->screen->caps.is_rv350) {
334 OUT_CB_REG(R500_RB3D_DISCARD_SRC_PIXEL_LTE_THRESHOLD, 0x01010101);
335 OUT_CB_REG(R500_RB3D_DISCARD_SRC_PIXEL_GTE_THRESHOLD, 0xFEFEFEFE);
336 }
337
338 if (r300->screen->caps.is_r500) {
339 OUT_CB_REG(R500_GA_COLOR_CONTROL_PS3, 0);
340 OUT_CB_REG(R500_SU_TEX_WRAP_PS3, 0);
341 }
342 END_CB;
343 }
344
345 /* Initialize the hyperz state. */
346 {
347 struct r300_hyperz_state *hyperz =
348 (struct r300_hyperz_state*)r300->hyperz_state.state;
349 BEGIN_CB(&hyperz->cb_flush_begin, r300->hyperz_state.size);
350 OUT_CB_REG(R300_ZB_ZCACHE_CTLSTAT,
351 R300_ZB_ZCACHE_CTLSTAT_ZC_FLUSH_FLUSH_AND_FREE);
352 OUT_CB_REG(R300_ZB_BW_CNTL, 0);
353 OUT_CB_REG(R300_ZB_DEPTHCLEARVALUE, 0);
354 OUT_CB_REG(R300_SC_HYPERZ, R300_SC_HYPERZ_ADJ_2);
355
356 if (r300->screen->caps.is_r500 || r300->screen->caps.is_rv350) {
357 OUT_CB_REG(R300_GB_Z_PEQ_CONFIG, 0);
358 }
359 END_CB;
360 }
361 }
362
r300_create_context(struct pipe_screen * screen,void * priv,unsigned flags)363 struct pipe_context* r300_create_context(struct pipe_screen* screen,
364 void *priv, unsigned flags)
365 {
366 struct r300_context* r300 = CALLOC_STRUCT(r300_context);
367 struct r300_screen* r300screen = r300_screen(screen);
368 struct radeon_winsys *rws = r300screen->rws;
369
370 if (!r300)
371 return NULL;
372
373 r300->rws = rws;
374 r300->screen = r300screen;
375
376 r300->context.screen = screen;
377 r300->context.priv = priv;
378 r300->context.set_debug_callback = u_default_set_debug_callback;
379
380 r300->context.destroy = r300_destroy_context;
381
382 slab_create_child(&r300->pool_transfers, &r300screen->pool_transfers);
383
384 r300->ctx = rws->ctx_create(rws, RADEON_CTX_PRIORITY_MEDIUM, false);
385 if (!r300->ctx)
386 goto fail;
387
388
389 if (!rws->cs_create(&r300->cs, r300->ctx, AMD_IP_GFX, r300_flush_callback, r300))
390 goto fail;
391
392 if (!r300screen->caps.has_tcl) {
393 /* Create a Draw. This is used for SW TCL. */
394 r300->draw = draw_create(&r300->context);
395 if (r300->draw == NULL)
396 goto fail;
397 /* Enable our renderer. */
398 draw_set_rasterize_stage(r300->draw, r300_draw_stage(r300));
399 /* Disable converting points/lines to triangles. */
400 draw_wide_line_threshold(r300->draw, 10000000.f);
401 draw_wide_point_threshold(r300->draw, 10000000.f);
402 draw_wide_point_sprites(r300->draw, false);
403 draw_enable_line_stipple(r300->draw, true);
404 draw_enable_point_sprites(r300->draw, false);
405 }
406
407 if (!r300_setup_atoms(r300))
408 goto fail;
409
410 r300_init_blit_functions(r300);
411 r300_init_flush_functions(r300);
412 r300_init_query_functions(r300);
413 r300_init_state_functions(r300);
414 r300_init_resource_functions(r300);
415 r300_init_render_functions(r300);
416 r300_init_states(&r300->context);
417
418 r300->context.create_video_codec = vl_create_decoder;
419 r300->context.create_video_buffer = vl_video_buffer_create;
420
421 r300->uploader = u_upload_create(&r300->context, 128 * 1024,
422 PIPE_BIND_CUSTOM, PIPE_USAGE_STREAM, 0);
423 r300->context.stream_uploader = u_upload_create(&r300->context, 1024 * 1024,
424 0, PIPE_USAGE_STREAM, 0);
425 r300->context.const_uploader = u_upload_create(&r300->context, 1024 * 1024,
426 PIPE_BIND_CONSTANT_BUFFER,
427 PIPE_USAGE_STREAM, 0);
428
429 r300->blitter = util_blitter_create(&r300->context);
430 if (r300->blitter == NULL)
431 goto fail;
432 r300->blitter->draw_rectangle = r300_blitter_draw_rectangle;
433
434 /* The KIL opcode needs the first texture unit to be enabled
435 * on r3xx-r4xx. In order to calm down the CS checker, we bind this
436 * dummy texture there. */
437 if (!r300->screen->caps.is_r500) {
438 struct pipe_resource *tex;
439 struct pipe_resource rtempl = {0};
440 struct pipe_sampler_view vtempl = {0};
441
442 rtempl.target = PIPE_TEXTURE_2D;
443 rtempl.format = PIPE_FORMAT_I8_UNORM;
444 rtempl.usage = PIPE_USAGE_IMMUTABLE;
445 rtempl.width0 = 1;
446 rtempl.height0 = 1;
447 rtempl.depth0 = 1;
448 tex = screen->resource_create(screen, &rtempl);
449
450 u_sampler_view_default_template(&vtempl, tex, tex->format);
451
452 r300->texkill_sampler = (struct r300_sampler_view*)
453 r300->context.create_sampler_view(&r300->context, tex, &vtempl);
454
455 pipe_resource_reference(&tex, NULL);
456 }
457
458 if (r300screen->caps.has_tcl) {
459 struct pipe_resource vb;
460 memset(&vb, 0, sizeof(vb));
461 vb.target = PIPE_BUFFER;
462 vb.format = PIPE_FORMAT_R8_UNORM;
463 vb.usage = PIPE_USAGE_DEFAULT;
464 vb.width0 = sizeof(float) * 16;
465 vb.height0 = 1;
466 vb.depth0 = 1;
467
468 r300->dummy_vb.buffer.resource = screen->resource_create(screen, &vb);
469 util_set_vertex_buffers(&r300->context, 1, false, &r300->dummy_vb);
470 }
471
472 {
473 struct pipe_depth_stencil_alpha_state dsa;
474 memset(&dsa, 0, sizeof(dsa));
475 dsa.depth_writemask = 1;
476
477 r300->dsa_decompress_zmask =
478 r300->context.create_depth_stencil_alpha_state(&r300->context,
479 &dsa);
480 }
481
482 r300->hyperz_time_of_last_flush = os_time_get();
483
484 /* Register allocator state */
485 rc_init_regalloc_state(&r300->fs_regalloc_state, RC_FRAGMENT_PROGRAM);
486 rc_init_regalloc_state(&r300->vs_regalloc_state, RC_VERTEX_PROGRAM);
487
488 /* Print driver info. */
489 #if MESA_DEBUG
490 {
491 #else
492 if (DBG_ON(r300, DBG_INFO)) {
493 #endif
494 fprintf(stderr,
495 "r300: DRM version: %d.%d.%d, Name: %s, ID: 0x%04x, GB: %d, Z: %d\n"
496 "r300: GART size: %u MB, VRAM size: %u MB\n"
497 "r300: AA compression RAM: %s, Z compression RAM: %s, HiZ RAM: %s\n",
498 r300->screen->info.drm_major,
499 r300->screen->info.drm_minor,
500 r300->screen->info.drm_patchlevel,
501 screen->get_name(screen),
502 r300->screen->info.pci_id,
503 r300->screen->info.r300_num_gb_pipes,
504 r300->screen->info.r300_num_z_pipes,
505 r300->screen->info.gart_size_kb >> 10,
506 r300->screen->info.vram_size_kb >> 10,
507 "YES", /* XXX really? */
508 r300->screen->caps.zmask_ram ? "YES" : "NO",
509 r300->screen->caps.hiz_ram ? "YES" : "NO");
510 }
511
512 return &r300->context;
513
514 fail:
515 r300_destroy_context(&r300->context);
516 return NULL;
517 }
518