1package xiangshan.mem 2 3import chisel3._ 4import chisel3.util._ 5import utils._ 6import xiangshan._ 7import xiangshan.cache.{DCacheWordIO, TlbRequestIO, TlbCmd, MemoryOpConstants} 8import xiangshan.backend.LSUOpType 9 10class AtomicsUnit extends XSModule with MemoryOpConstants{ 11 val io = IO(new Bundle() { 12 val in = Flipped(Decoupled(new ExuInput)) 13 val out = Decoupled(new ExuOutput) 14 val dcache = new DCacheWordIO 15 val dtlb = new TlbRequestIO 16 val flush_sbuffer = new SbufferFlushBundle 17 val tlbFeedback = ValidIO(new TlbFeedback) 18 val redirect = Flipped(ValidIO(new Redirect)) 19 }) 20 21 //------------------------------------------------------- 22 // Atomics Memory Accsess FSM 23 //------------------------------------------------------- 24 val s_invalid :: s_tlb :: s_flush_sbuffer_req :: s_flush_sbuffer_resp :: s_cache_req :: s_cache_resp :: s_finish :: Nil = Enum(7) 25 val state = RegInit(s_invalid) 26 val in = Reg(new ExuInput()) 27 val atom_override_xtval = RegInit(false.B) 28 // paddr after translation 29 val paddr = Reg(UInt()) 30 // dcache response data 31 val resp_data = Reg(UInt()) 32 val is_lrsc_valid = Reg(Bool()) 33 34 ExcitingUtils.addSource(in.src1, "ATOM_EXECPTION_VADDR") 35 ExcitingUtils.addSource(atom_override_xtval, "ATOM_OVERRIDE_XTVAL") 36 37 // assign default value to output signals 38 io.in.ready := false.B 39 io.out.valid := false.B 40 io.out.bits := DontCare 41 42 io.dcache.req.valid := false.B 43 io.dcache.req.bits := DontCare 44 io.dcache.s1_kill := false.B 45 io.dcache.resp.ready := false.B 46 47 io.dtlb.req.valid := false.B 48 io.dtlb.req.bits := DontCare 49 50 io.flush_sbuffer.valid := false.B 51 52 XSDebug("state: %d\n", state) 53 54 when (state === s_invalid) { 55 io.in.ready := true.B 56 when (io.in.fire()) { 57 in := io.in.bits 58 state := s_tlb 59 } 60 } 61 62 // Send TLB feedback to store issue queue 63 // we send feedback right after we receives request 64 // also, we always treat amo as tlb hit 65 // since we will continue polling tlb all by ourself 66 io.tlbFeedback.valid := RegNext(io.in.fire()) 67 io.tlbFeedback.bits.hit := true.B 68 io.tlbFeedback.bits.roqIdx := in.uop.roqIdx 69 70 71 // tlb translation, manipulating signals && deal with exception 72 when (state === s_tlb) { 73 // send req to dtlb 74 // keep firing until tlb hit 75 io.dtlb.req.valid := true.B 76 io.dtlb.req.bits.vaddr := in.src1 77 io.dtlb.req.bits.roqIdx := in.uop.roqIdx 78 val is_lr = in.uop.ctrl.fuOpType === LSUOpType.lr_w || in.uop.ctrl.fuOpType === LSUOpType.lr_d 79 io.dtlb.req.bits.cmd := Mux(is_lr, TlbCmd.read, TlbCmd.write) 80 io.dtlb.req.bits.debug.pc := in.uop.cf.pc 81 io.dtlb.req.bits.debug.lsroqIdx := in.uop.lsroqIdx // FIXME: need update 82 83 when(io.dtlb.resp.valid && !io.dtlb.resp.bits.miss){ 84 // exception handling 85 val addrAligned = LookupTree(in.uop.ctrl.fuOpType(1,0), List( 86 "b00".U -> true.B, //b 87 "b01".U -> (in.src1(0) === 0.U), //h 88 "b10".U -> (in.src1(1,0) === 0.U), //w 89 "b11".U -> (in.src1(2,0) === 0.U) //d 90 )) 91 in.uop.cf.exceptionVec(storeAddrMisaligned) := !addrAligned 92 in.uop.cf.exceptionVec(storePageFault) := io.dtlb.resp.bits.excp.pf.st 93 in.uop.cf.exceptionVec(loadPageFault) := io.dtlb.resp.bits.excp.pf.ld 94 val exception = !addrAligned || io.dtlb.resp.bits.excp.pf.st || io.dtlb.resp.bits.excp.pf.ld 95 when (exception) { 96 // check for exceptions 97 // if there are exceptions, no need to execute it 98 state := s_finish 99 atom_override_xtval := true.B 100 } .otherwise { 101 paddr := io.dtlb.resp.bits.paddr 102 state := s_flush_sbuffer_req 103 } 104 } 105 } 106 107 108 when (state === s_flush_sbuffer_req) { 109 io.flush_sbuffer.valid := true.B 110 state := s_flush_sbuffer_resp 111 } 112 113 when (state === s_flush_sbuffer_resp) { 114 when (io.flush_sbuffer.empty) { 115 state := s_cache_req 116 } 117 } 118 119 when (state === s_cache_req) { 120 io.dcache.req.valid := true.B 121 io.dcache.req.bits.cmd := LookupTree(in.uop.ctrl.fuOpType, List( 122 LSUOpType.lr_w -> M_XLR, 123 LSUOpType.sc_w -> M_XSC, 124 LSUOpType.amoswap_w -> M_XA_SWAP, 125 LSUOpType.amoadd_w -> M_XA_ADD, 126 LSUOpType.amoxor_w -> M_XA_XOR, 127 LSUOpType.amoand_w -> M_XA_AND, 128 LSUOpType.amoor_w -> M_XA_OR, 129 LSUOpType.amomin_w -> M_XA_MIN, 130 LSUOpType.amomax_w -> M_XA_MAX, 131 LSUOpType.amominu_w -> M_XA_MINU, 132 LSUOpType.amomaxu_w -> M_XA_MAXU, 133 134 LSUOpType.lr_d -> M_XLR, 135 LSUOpType.sc_d -> M_XSC, 136 LSUOpType.amoswap_d -> M_XA_SWAP, 137 LSUOpType.amoadd_d -> M_XA_ADD, 138 LSUOpType.amoxor_d -> M_XA_XOR, 139 LSUOpType.amoand_d -> M_XA_AND, 140 LSUOpType.amoor_d -> M_XA_OR, 141 LSUOpType.amomin_d -> M_XA_MIN, 142 LSUOpType.amomax_d -> M_XA_MAX, 143 LSUOpType.amominu_d -> M_XA_MINU, 144 LSUOpType.amomaxu_d -> M_XA_MAXU 145 )) 146 147 io.dcache.req.bits.addr := paddr 148 io.dcache.req.bits.data := genWdata(in.src2, in.uop.ctrl.fuOpType(1,0)) 149 // TODO: atomics do need mask: fix mask 150 io.dcache.req.bits.mask := genWmask(paddr, in.uop.ctrl.fuOpType(1,0)) 151 io.dcache.req.bits.meta.id := DontCare 152 io.dcache.req.bits.meta.paddr := paddr 153 io.dcache.req.bits.meta.tlb_miss := false.B 154 io.dcache.req.bits.meta.replay := false.B 155 156 when(io.dcache.req.fire()){ 157 state := s_cache_resp 158 } 159 } 160 161 when (state === s_cache_resp) { 162 io.dcache.resp.ready := true.B 163 when(io.dcache.resp.fire()) { 164 is_lrsc_valid := io.dcache.resp.bits.meta.id 165 val rdata = io.dcache.resp.bits.data 166 val rdataSel = LookupTree(paddr(2, 0), List( 167 "b000".U -> rdata(63, 0), 168 "b001".U -> rdata(63, 8), 169 "b010".U -> rdata(63, 16), 170 "b011".U -> rdata(63, 24), 171 "b100".U -> rdata(63, 32), 172 "b101".U -> rdata(63, 40), 173 "b110".U -> rdata(63, 48), 174 "b111".U -> rdata(63, 56) 175 )) 176 177 resp_data := LookupTree(in.uop.ctrl.fuOpType, List( 178 LSUOpType.lr_w -> SignExt(rdataSel(31, 0), XLEN), 179 LSUOpType.sc_w -> rdata, 180 LSUOpType.amoswap_w -> SignExt(rdataSel(31, 0), XLEN), 181 LSUOpType.amoadd_w -> SignExt(rdataSel(31, 0), XLEN), 182 LSUOpType.amoxor_w -> SignExt(rdataSel(31, 0), XLEN), 183 LSUOpType.amoand_w -> SignExt(rdataSel(31, 0), XLEN), 184 LSUOpType.amoor_w -> SignExt(rdataSel(31, 0), XLEN), 185 LSUOpType.amomin_w -> SignExt(rdataSel(31, 0), XLEN), 186 LSUOpType.amomax_w -> SignExt(rdataSel(31, 0), XLEN), 187 LSUOpType.amominu_w -> SignExt(rdataSel(31, 0), XLEN), 188 LSUOpType.amomaxu_w -> SignExt(rdataSel(31, 0), XLEN), 189 190 LSUOpType.lr_d -> SignExt(rdataSel(63, 0), XLEN), 191 LSUOpType.sc_d -> rdata, 192 LSUOpType.amoswap_d -> SignExt(rdataSel(63, 0), XLEN), 193 LSUOpType.amoadd_d -> SignExt(rdataSel(63, 0), XLEN), 194 LSUOpType.amoxor_d -> SignExt(rdataSel(63, 0), XLEN), 195 LSUOpType.amoand_d -> SignExt(rdataSel(63, 0), XLEN), 196 LSUOpType.amoor_d -> SignExt(rdataSel(63, 0), XLEN), 197 LSUOpType.amomin_d -> SignExt(rdataSel(63, 0), XLEN), 198 LSUOpType.amomax_d -> SignExt(rdataSel(63, 0), XLEN), 199 LSUOpType.amominu_d -> SignExt(rdataSel(63, 0), XLEN), 200 LSUOpType.amomaxu_d -> SignExt(rdataSel(63, 0), XLEN) 201 )) 202 203 state := s_finish 204 } 205 } 206 207 when (state === s_finish) { 208 io.out.valid := true.B 209 io.out.bits.uop := in.uop 210 io.out.bits.uop.diffTestDebugLrScValid := is_lrsc_valid 211 io.out.bits.data := resp_data 212 io.out.bits.redirectValid := false.B 213 io.out.bits.redirect := DontCare 214 io.out.bits.brUpdate := DontCare 215 io.out.bits.debug.isMMIO := AddressSpace.isMMIO(paddr) 216 when (io.out.fire()) { 217 XSDebug("atomics writeback: pc %x data %x\n", io.out.bits.uop.cf.pc, io.dcache.resp.bits.data) 218 state := s_invalid 219 } 220 } 221 222 when(io.redirect.valid){ 223 atom_override_xtval := false.B 224 } 225} 226