Lines Matching +full:0 +full:xf000

78 	return (rptr & 0x3fffc) >> 2;  in cik_sdma_get_rptr()
99 return (RREG32(reg) & 0x3fffc) >> 2; in cik_sdma_get_wptr()
120 WREG32(reg, (ring->wptr << 2) & 0x3fffc); in cik_sdma_set_wptr()
136 u32 extra_bits = (ib->vm ? ib->vm->ids[ib->ring].id : 0) & 0xf; in cik_sdma_ring_ib_execute()
143 radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_WRITE, SDMA_WRITE_SUB_OPCODE_LINEAR, 0)); in cik_sdma_ring_ib_execute()
144 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc); in cik_sdma_ring_ib_execute()
152 radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_NOP, 0, 0)); in cik_sdma_ring_ib_execute()
153 radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_INDIRECT_BUFFER, 0, extra_bits)); in cik_sdma_ring_ib_execute()
154 radeon_ring_write(ring, ib->gpu_addr & 0xffffffe0); /* base must be 32 byte aligned */ in cik_sdma_ring_ib_execute()
181 radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_POLL_REG_MEM, 0, extra_bits)); in cik_sdma_hdp_flush_ring_emit()
186 radeon_ring_write(ring, (0xfff << 16) | 10); /* retry count, poll interval */ in cik_sdma_hdp_flush_ring_emit()
206 radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_FENCE, 0, 0)); in cik_sdma_fence_ring_emit()
211 radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_TRAP, 0, 0)); in cik_sdma_fence_ring_emit()
233 u32 extra_bits = emit_wait ? 0 : SDMA_SEMAPHORE_EXTRA_S; in cik_sdma_semaphore_ring_emit()
235 radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_SEMAPHORE, 0, extra_bits)); in cik_sdma_semaphore_ring_emit()
236 radeon_ring_write(ring, addr & 0xfffffff8); in cik_sdma_semaphore_ring_emit()
258 for (i = 0; i < 2; i++) { in cik_sdma_gfx_stop()
259 if (i == 0) in cik_sdma_gfx_stop()
266 WREG32(SDMA0_GFX_IB_CNTL + reg_offset, 0); in cik_sdma_gfx_stop()
279 WREG32(SRBM_SOFT_RESET, 0); in cik_sdma_gfx_stop()
308 for (i = 0; i < 2; i++) { in cik_sdma_ctx_switch_enable()
309 if (i == 0) in cik_sdma_ctx_switch_enable()
340 for (i = 0; i < 2; i++) { in cik_sdma_enable()
341 if (i == 0) in cik_sdma_enable()
362 * Returns 0 for success, error for failure.
372 for (i = 0; i < 2; i++) { in cik_sdma_gfx_resume()
373 if (i == 0) { in cik_sdma_gfx_resume()
383 WREG32(SDMA0_SEM_INCOMPLETE_TIMER_CNTL + reg_offset, 0); in cik_sdma_gfx_resume()
384 WREG32(SDMA0_SEM_WAIT_FAIL_TIMER_CNTL + reg_offset, 0); in cik_sdma_gfx_resume()
395 WREG32(SDMA0_GFX_RB_RPTR + reg_offset, 0); in cik_sdma_gfx_resume()
396 WREG32(SDMA0_GFX_RB_WPTR + reg_offset, 0); in cik_sdma_gfx_resume()
400 upper_32_bits(rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFF); in cik_sdma_gfx_resume()
402 ((rdev->wb.gpu_addr + wb_offset) & 0xFFFFFFFC)); in cik_sdma_gfx_resume()
410 ring->wptr = 0; in cik_sdma_gfx_resume()
436 return 0; in cik_sdma_gfx_resume()
445 * Returns 0 for success, error for failure.
450 return 0; in cik_sdma_rlc_resume()
459 * Returns 0 for success, -EINVAL if the ucode is not available.
483 WREG32(SDMA0_UCODE_ADDR + SDMA0_REGISTER_OFFSET, 0); in cik_sdma_load_microcode()
484 for (i = 0; i < fw_size; i++) in cik_sdma_load_microcode()
492 WREG32(SDMA0_UCODE_ADDR + SDMA1_REGISTER_OFFSET, 0); in cik_sdma_load_microcode()
493 for (i = 0; i < fw_size; i++) in cik_sdma_load_microcode()
501 WREG32(SDMA0_UCODE_ADDR + SDMA0_REGISTER_OFFSET, 0); in cik_sdma_load_microcode()
502 for (i = 0; i < CIK_SDMA_UCODE_SIZE; i++) in cik_sdma_load_microcode()
508 WREG32(SDMA0_UCODE_ADDR + SDMA1_REGISTER_OFFSET, 0); in cik_sdma_load_microcode()
509 for (i = 0; i < CIK_SDMA_UCODE_SIZE; i++) in cik_sdma_load_microcode()
514 WREG32(SDMA0_UCODE_ADDR + SDMA0_REGISTER_OFFSET, 0); in cik_sdma_load_microcode()
515 WREG32(SDMA0_UCODE_ADDR + SDMA1_REGISTER_OFFSET, 0); in cik_sdma_load_microcode()
516 return 0; in cik_sdma_load_microcode()
525 * Returns 0 for success, error for failure.
546 return 0; in cik_sdma_resume()
589 int r = 0; in cik_copy_dma()
594 num_loops = DIV_ROUND_UP(size_in_bytes, 0x1fffff); in cik_copy_dma()
605 for (i = 0; i < num_loops; i++) { in cik_copy_dma()
607 if (cur_size_in_bytes > 0x1fffff) in cik_copy_dma()
608 cur_size_in_bytes = 0x1fffff; in cik_copy_dma()
610 radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_COPY, SDMA_COPY_SUB_OPCODE_LINEAR, 0)); in cik_copy_dma()
612 radeon_ring_write(ring, 0); /* src/dst endian swap */ in cik_copy_dma()
642 * Returns 0 for success, error for failure.
660 tmp = 0xCAFEDEAD; in cik_sdma_ring_test()
668 radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_WRITE, SDMA_WRITE_SUB_OPCODE_LINEAR, 0)); in cik_sdma_ring_test()
672 radeon_ring_write(ring, 0xDEADBEEF); in cik_sdma_ring_test()
675 for (i = 0; i < rdev->usec_timeout; i++) { in cik_sdma_ring_test()
677 if (tmp == 0xDEADBEEF) in cik_sdma_ring_test()
685 DRM_ERROR("radeon: ring %d test failed (0x%08X)\n", in cik_sdma_ring_test()
699 * Returns 0 on success, error on failure.
707 u32 tmp = 0; in cik_sdma_ib_test()
717 tmp = 0xCAFEDEAD; in cik_sdma_ib_test()
726 ib.ptr[0] = SDMA_PACKET(SDMA_OPCODE_WRITE, SDMA_WRITE_SUB_OPCODE_LINEAR, 0); in cik_sdma_ib_test()
730 ib.ptr[4] = 0xDEADBEEF; in cik_sdma_ib_test()
741 if (r < 0) { in cik_sdma_ib_test()
744 } else if (r == 0) { in cik_sdma_ib_test()
748 r = 0; in cik_sdma_ib_test()
749 for (i = 0; i < rdev->usec_timeout; i++) { in cik_sdma_ib_test()
751 if (tmp == 0xDEADBEEF) in cik_sdma_ib_test()
758 DRM_ERROR("radeon: ib test failed (0x%08X)\n", tmp); in cik_sdma_ib_test()
809 if (bytes > 0x1FFFF8) in cik_sdma_vm_copy_pages()
810 bytes = 0x1FFFF8; in cik_sdma_vm_copy_pages()
813 SDMA_WRITE_SUB_OPCODE_LINEAR, 0); in cik_sdma_vm_copy_pages()
815 ib->ptr[ib->length_dw++] = 0; /* src/dst endian swap */ in cik_sdma_vm_copy_pages()
851 if (ndw > 0xFFFFE) in cik_sdma_vm_write_pages()
852 ndw = 0xFFFFE; in cik_sdma_vm_write_pages()
856 SDMA_WRITE_SUB_OPCODE_LINEAR, 0); in cik_sdma_vm_write_pages()
860 for (; ndw > 0; ndw -= 2, --count, pe += 8) { in cik_sdma_vm_write_pages()
866 value = 0; in cik_sdma_vm_write_pages()
900 if (ndw > 0x7FFFF) in cik_sdma_vm_set_pages()
901 ndw = 0x7FFFF; in cik_sdma_vm_set_pages()
906 value = 0; in cik_sdma_vm_set_pages()
909 ib->ptr[ib->length_dw++] = SDMA_PACKET(SDMA_OPCODE_GENERATE_PTE_PDE, 0, 0); in cik_sdma_vm_set_pages()
913 ib->ptr[ib->length_dw++] = 0; in cik_sdma_vm_set_pages()
917 ib->ptr[ib->length_dw++] = 0; in cik_sdma_vm_set_pages()
934 while (ib->length_dw & 0x7) in cik_sdma_vm_pad_ib()
935 ib->ptr[ib->length_dw++] = SDMA_PACKET(SDMA_OPCODE_NOP, 0, 0); in cik_sdma_vm_pad_ib()
947 u32 extra_bits = (SDMA_POLL_REG_MEM_EXTRA_OP(0) | in cik_dma_vm_flush()
948 SDMA_POLL_REG_MEM_EXTRA_FUNC(0)); /* always */ in cik_dma_vm_flush()
950 radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_SRBM_WRITE, 0, 0xf000)); in cik_dma_vm_flush()
959 radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_SRBM_WRITE, 0, 0xf000)); in cik_dma_vm_flush()
963 radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_SRBM_WRITE, 0, 0xf000)); in cik_dma_vm_flush()
965 radeon_ring_write(ring, 0); in cik_dma_vm_flush()
967 radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_SRBM_WRITE, 0, 0xf000)); in cik_dma_vm_flush()
969 radeon_ring_write(ring, 0); in cik_dma_vm_flush()
971 radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_SRBM_WRITE, 0, 0xf000)); in cik_dma_vm_flush()
975 radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_SRBM_WRITE, 0, 0xf000)); in cik_dma_vm_flush()
977 radeon_ring_write(ring, 0); in cik_dma_vm_flush()
979 radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_SRBM_WRITE, 0, 0xf000)); in cik_dma_vm_flush()
981 radeon_ring_write(ring, VMID(0)); in cik_dma_vm_flush()
987 radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_SRBM_WRITE, 0, 0xf000)); in cik_dma_vm_flush()
991 radeon_ring_write(ring, SDMA_PACKET(SDMA_OPCODE_POLL_REG_MEM, 0, extra_bits)); in cik_dma_vm_flush()
993 radeon_ring_write(ring, 0); in cik_dma_vm_flush()
994 radeon_ring_write(ring, 0); /* reference */ in cik_dma_vm_flush()
995 radeon_ring_write(ring, 0); /* mask */ in cik_dma_vm_flush()
996 radeon_ring_write(ring, (0xfff << 16) | 10); /* retry count, poll interval */ in cik_dma_vm_flush()