1package xiangshan.mem 2 3import chisel3._ 4import chisel3.util._ 5import utils._ 6import xiangshan._ 7import xiangshan.backend.decode.ImmUnion 8import xiangshan.cache._ 9// import xiangshan.cache.{DCacheWordIO, TlbRequestIO, TlbCmd, MemoryOpConstants, TlbReq, DCacheLoadReq, DCacheWordResp} 10import xiangshan.backend.LSUOpType 11 12class LoadToLsqIO extends XSBundle { 13 val loadIn = ValidIO(new LsPipelineBundle) 14 val ldout = Flipped(DecoupledIO(new ExuOutput)) 15 val loadDataForwarded = Output(Bool()) 16 val needReplayFromRS = Output(Bool()) 17 val forward = new MaskedLoadForwardQueryIO 18} 19 20// Load Pipeline Stage 0 21// Generate addr, use addr to query DCache and DTLB 22class LoadUnit_S0 extends XSModule { 23 val io = IO(new Bundle() { 24 val in = Flipped(Decoupled(new ExuInput)) 25 val out = Decoupled(new LsPipelineBundle) 26 val dtlbReq = DecoupledIO(new TlbReq) 27 val dcacheReq = DecoupledIO(new DCacheWordReq) 28 val rsIdx = Input(UInt(log2Up(IssQueSize).W)) 29 }) 30 31 val s0_uop = io.in.bits.uop 32 // val s0_vaddr = io.in.bits.src1 + SignExt(s0_uop.ctrl.imm(11,0), VAddrBits) 33 // val s0_mask = genWmask(s0_vaddr, s0_uop.ctrl.fuOpType(1,0)) 34 val imm12 = WireInit(s0_uop.ctrl.imm(11,0)) 35 val s0_vaddr_lo = io.in.bits.src1(11,0) + Cat(0.U(1.W), imm12) 36 val s0_vaddr_hi = Mux(s0_vaddr_lo(12), 37 Mux(imm12(11), io.in.bits.src1(VAddrBits-1, 12), io.in.bits.src1(VAddrBits-1, 12)+1.U), 38 Mux(imm12(11), io.in.bits.src1(VAddrBits-1, 12)+SignExt(1.U, VAddrBits-12), io.in.bits.src1(VAddrBits-1, 12)), 39 ) 40 val s0_vaddr = Cat(s0_vaddr_hi, s0_vaddr_lo(11,0)) 41 val s0_mask = genWmask(s0_vaddr_lo, s0_uop.ctrl.fuOpType(1,0)) 42 43 // query DTLB 44 io.dtlbReq.valid := io.in.valid 45 io.dtlbReq.bits.vaddr := s0_vaddr 46 io.dtlbReq.bits.cmd := TlbCmd.read 47 io.dtlbReq.bits.roqIdx := s0_uop.roqIdx 48 io.dtlbReq.bits.debug.pc := s0_uop.cf.pc 49 50 // query DCache 51 io.dcacheReq.valid := io.in.valid 52 io.dcacheReq.bits.cmd := MemoryOpConstants.M_XRD 53 io.dcacheReq.bits.addr := s0_vaddr 54 io.dcacheReq.bits.mask := s0_mask 55 io.dcacheReq.bits.data := DontCare 56 57 // TODO: update cache meta 58 io.dcacheReq.bits.id := DontCare 59 60 val addrAligned = LookupTree(s0_uop.ctrl.fuOpType(1, 0), List( 61 "b00".U -> true.B, //b 62 "b01".U -> (s0_vaddr(0) === 0.U), //h 63 "b10".U -> (s0_vaddr(1, 0) === 0.U), //w 64 "b11".U -> (s0_vaddr(2, 0) === 0.U) //d 65 )) 66 67 io.out.valid := io.in.valid && io.dcacheReq.ready 68 69 io.out.bits := DontCare 70 io.out.bits.vaddr := s0_vaddr 71 io.out.bits.mask := s0_mask 72 io.out.bits.uop := s0_uop 73 io.out.bits.uop.cf.exceptionVec(loadAddrMisaligned) := !addrAligned 74 io.out.bits.rsIdx := io.rsIdx 75 76 io.in.ready := !io.in.valid || (io.out.ready && io.dcacheReq.ready) 77 78 XSDebug(io.dcacheReq.fire(), 79 p"[DCACHE LOAD REQ] pc ${Hexadecimal(s0_uop.cf.pc)}, vaddr ${Hexadecimal(s0_vaddr)}\n" 80 ) 81 XSPerf("in", io.in.valid) 82 XSPerf("stall_out", io.out.valid && !io.out.ready && io.dcacheReq.ready) 83 XSPerf("stall_dcache", io.out.valid && io.out.ready && !io.dcacheReq.ready) 84} 85 86 87// Load Pipeline Stage 1 88// TLB resp (send paddr to dcache) 89class LoadUnit_S1 extends XSModule { 90 val io = IO(new Bundle() { 91 val in = Flipped(Decoupled(new LsPipelineBundle)) 92 val out = Decoupled(new LsPipelineBundle) 93 val dtlbResp = Flipped(DecoupledIO(new TlbResp)) 94 val dcachePAddr = Output(UInt(PAddrBits.W)) 95 val dcacheKill = Output(Bool()) 96 val sbuffer = new LoadForwardQueryIO 97 val lsq = new MaskedLoadForwardQueryIO 98 }) 99 100 val s1_uop = io.in.bits.uop 101 val s1_paddr = io.dtlbResp.bits.paddr 102 val s1_exception = selectLoad(io.out.bits.uop.cf.exceptionVec, false).asUInt.orR 103 val s1_tlb_miss = io.dtlbResp.bits.miss 104 val s1_mmio = !s1_tlb_miss && io.dtlbResp.bits.mmio 105 val s1_mask = io.in.bits.mask 106 107 io.out.bits := io.in.bits // forwardXX field will be updated in s1 108 109 io.dtlbResp.ready := true.B 110 111 // TOOD: PMA check 112 io.dcachePAddr := s1_paddr 113 io.dcacheKill := s1_tlb_miss || s1_exception || s1_mmio 114 115 // load forward query datapath 116 io.sbuffer.valid := io.in.valid 117 io.sbuffer.paddr := s1_paddr 118 io.sbuffer.uop := s1_uop 119 io.sbuffer.sqIdx := s1_uop.sqIdx 120 io.sbuffer.mask := s1_mask 121 io.sbuffer.pc := s1_uop.cf.pc // FIXME: remove it 122 123 io.lsq.valid := io.in.valid 124 io.lsq.paddr := s1_paddr 125 io.lsq.uop := s1_uop 126 io.lsq.sqIdx := s1_uop.sqIdx 127 io.lsq.sqIdxMask := DontCare // will be overwritten by sqIdxMask pre-generated in s0 128 io.lsq.mask := s1_mask 129 io.lsq.pc := s1_uop.cf.pc // FIXME: remove it 130 131 io.out.valid := io.in.valid// && !s1_tlb_miss 132 io.out.bits.paddr := s1_paddr 133 io.out.bits.mmio := s1_mmio && !s1_exception 134 io.out.bits.tlbMiss := s1_tlb_miss 135 io.out.bits.uop.cf.exceptionVec(loadPageFault) := io.dtlbResp.bits.excp.pf.ld 136 io.out.bits.uop.cf.exceptionVec(loadAccessFault) := io.dtlbResp.bits.excp.af.ld 137 io.out.bits.ptwBack := io.dtlbResp.bits.ptwBack 138 io.out.bits.rsIdx := io.in.bits.rsIdx 139 140 io.in.ready := !io.in.valid || io.out.ready 141 142 XSPerf("in", io.in.valid) 143 XSPerf("tlb_miss", io.in.valid && s1_tlb_miss) 144 XSPerf("stall_out", io.out.valid && !io.out.ready) 145} 146 147 148// Load Pipeline Stage 2 149// DCache resp 150class LoadUnit_S2 extends XSModule with HasLoadHelper { 151 val io = IO(new Bundle() { 152 val in = Flipped(Decoupled(new LsPipelineBundle)) 153 val out = Decoupled(new LsPipelineBundle) 154 val tlbFeedback = ValidIO(new TlbFeedback) 155 val dcacheResp = Flipped(DecoupledIO(new DCacheWordResp)) 156 val lsq = new LoadForwardQueryIO 157 val sbuffer = new LoadForwardQueryIO 158 val dataForwarded = Output(Bool()) 159 val needReplayFromRS = Output(Bool()) 160 }) 161 162 val s2_uop = io.in.bits.uop 163 val s2_mask = io.in.bits.mask 164 val s2_paddr = io.in.bits.paddr 165 val s2_tlb_miss = io.in.bits.tlbMiss 166 val s2_exception = selectLoad(io.in.bits.uop.cf.exceptionVec, false).asUInt.orR 167 val s2_mmio = io.in.bits.mmio && !s2_exception 168 val s2_cache_miss = io.dcacheResp.bits.miss 169 val s2_cache_replay = io.dcacheResp.bits.replay 170 171 io.dcacheResp.ready := true.B 172 val dcacheShouldResp = !(s2_tlb_miss || s2_exception || s2_mmio) 173 assert(!(io.in.valid && dcacheShouldResp && !io.dcacheResp.valid), "DCache response got lost") 174 175 // feedback tlb result to RS 176 io.tlbFeedback.valid := io.in.valid 177 io.tlbFeedback.bits.hit := !s2_tlb_miss && (!s2_cache_replay || s2_mmio || s2_exception) 178 io.tlbFeedback.bits.rsIdx := io.in.bits.rsIdx 179 io.tlbFeedback.bits.flushState := io.in.bits.ptwBack 180 io.needReplayFromRS := s2_cache_replay 181 182 // merge forward result 183 // lsq has higher priority than sbuffer 184 val forwardMask = Wire(Vec(8, Bool())) 185 val forwardData = Wire(Vec(8, UInt(8.W))) 186 187 val fullForward = (~forwardMask.asUInt & s2_mask) === 0.U 188 io.lsq := DontCare 189 io.sbuffer := DontCare 190 191 // generate XLEN/8 Muxs 192 for (i <- 0 until XLEN / 8) { 193 forwardMask(i) := io.lsq.forwardMask(i) || io.sbuffer.forwardMask(i) 194 forwardData(i) := Mux(io.lsq.forwardMask(i), io.lsq.forwardData(i), io.sbuffer.forwardData(i)) 195 } 196 197 XSDebug(io.out.fire(), "[FWD LOAD RESP] pc %x fwd %x(%b) + %x(%b)\n", 198 s2_uop.cf.pc, 199 io.lsq.forwardData.asUInt, io.lsq.forwardMask.asUInt, 200 io.in.bits.forwardData.asUInt, io.in.bits.forwardMask.asUInt 201 ) 202 203 // data merge 204 val rdataVec = VecInit((0 until XLEN / 8).map(j => 205 Mux(forwardMask(j), forwardData(j), io.dcacheResp.bits.data(8*(j+1)-1, 8*j)))) 206 val rdata = rdataVec.asUInt 207 val rdataSel = LookupTree(s2_paddr(2, 0), List( 208 "b000".U -> rdata(63, 0), 209 "b001".U -> rdata(63, 8), 210 "b010".U -> rdata(63, 16), 211 "b011".U -> rdata(63, 24), 212 "b100".U -> rdata(63, 32), 213 "b101".U -> rdata(63, 40), 214 "b110".U -> rdata(63, 48), 215 "b111".U -> rdata(63, 56) 216 )) 217 val rdataPartialLoad = rdataHelper(s2_uop, rdataSel) 218 219 io.out.valid := io.in.valid && !s2_tlb_miss 220 // Inst will be canceled in store queue / lsq, 221 // so we do not need to care about flush in load / store unit's out.valid 222 io.out.bits := io.in.bits 223 io.out.bits.data := rdataPartialLoad 224 // when exception occurs, set it to not miss and let it write back to roq (via int port) 225 io.out.bits.miss := s2_cache_miss && !s2_exception 226 io.out.bits.uop.ctrl.fpWen := io.in.bits.uop.ctrl.fpWen && !s2_exception 227 io.out.bits.mmio := s2_mmio 228 229 // For timing reasons, we can not let 230 // io.out.bits.miss := s2_cache_miss && !s2_exception && !fullForward 231 // We use io.dataForwarded instead. It means forward logic have prepared all data needed, 232 // and dcache query is no longer needed. 233 // Such inst will be writebacked from load queue. 234 io.dataForwarded := s2_cache_miss && fullForward && !s2_exception 235 // io.out.bits.forwardX will be send to lq 236 io.out.bits.forwardMask := forwardMask 237 // data retbrived from dcache is also included in io.out.bits.forwardData 238 io.out.bits.forwardData := rdataVec 239 240 io.in.ready := io.out.ready || !io.in.valid 241 242 XSDebug(io.out.fire(), "[DCACHE LOAD RESP] pc %x rdata %x <- D$ %x + fwd %x(%b)\n", 243 s2_uop.cf.pc, rdataPartialLoad, io.dcacheResp.bits.data, 244 forwardData.asUInt, forwardMask.asUInt 245 ) 246 247 XSPerf("in", io.in.valid) 248 XSPerf("dcache_miss", io.in.valid && s2_cache_miss) 249 XSPerf("full_forward", io.in.valid && fullForward) 250 XSPerf("dcache_miss_full_forward", io.in.valid && s2_cache_miss && fullForward) 251 XSPerf("replay", io.tlbFeedback.valid && !io.tlbFeedback.bits.hit) 252 XSPerf("replay_tlb_miss", io.tlbFeedback.valid && !io.tlbFeedback.bits.hit && s2_tlb_miss) 253 XSPerf("replay_cache", io.tlbFeedback.valid && !io.tlbFeedback.bits.hit && !s2_tlb_miss && s2_cache_replay) 254 XSPerf("stall_out", io.out.valid && !io.out.ready) 255} 256 257class LoadUnit extends XSModule with HasLoadHelper { 258 val io = IO(new Bundle() { 259 val ldin = Flipped(Decoupled(new ExuInput)) 260 val ldout = Decoupled(new ExuOutput) 261 val redirect = Flipped(ValidIO(new Redirect)) 262 val flush = Input(Bool()) 263 val tlbFeedback = ValidIO(new TlbFeedback) 264 val rsIdx = Input(UInt(log2Up(IssQueSize).W)) 265 val dcache = new DCacheLoadIO 266 val dtlb = new TlbRequestIO() 267 val sbuffer = new LoadForwardQueryIO 268 val lsq = new LoadToLsqIO 269 }) 270 271 val load_s0 = Module(new LoadUnit_S0) 272 val load_s1 = Module(new LoadUnit_S1) 273 val load_s2 = Module(new LoadUnit_S2) 274 275 load_s0.io.in <> io.ldin 276 load_s0.io.dtlbReq <> io.dtlb.req 277 load_s0.io.dcacheReq <> io.dcache.req 278 load_s0.io.rsIdx := io.rsIdx 279 280 PipelineConnect(load_s0.io.out, load_s1.io.in, true.B, load_s0.io.out.bits.uop.roqIdx.needFlush(io.redirect, io.flush)) 281 282 load_s1.io.dtlbResp <> io.dtlb.resp 283 io.dcache.s1_paddr <> load_s1.io.dcachePAddr 284 io.dcache.s1_kill <> load_s1.io.dcacheKill 285 load_s1.io.sbuffer <> io.sbuffer 286 load_s1.io.lsq <> io.lsq.forward 287 288 PipelineConnect(load_s1.io.out, load_s2.io.in, true.B, load_s1.io.out.bits.uop.roqIdx.needFlush(io.redirect, io.flush)) 289 290 load_s2.io.dcacheResp <> io.dcache.resp 291 load_s2.io.lsq.forwardData <> io.lsq.forward.forwardData 292 load_s2.io.lsq.forwardMask <> io.lsq.forward.forwardMask 293 load_s2.io.sbuffer.forwardData <> io.sbuffer.forwardData 294 load_s2.io.sbuffer.forwardMask <> io.sbuffer.forwardMask 295 load_s2.io.dataForwarded <> io.lsq.loadDataForwarded 296 io.tlbFeedback.bits := RegNext(load_s2.io.tlbFeedback.bits) 297 io.tlbFeedback.valid := RegNext(load_s2.io.tlbFeedback.valid && !load_s2.io.out.bits.uop.roqIdx.needFlush(io.redirect, io.flush)) 298 io.lsq.needReplayFromRS := load_s2.io.needReplayFromRS 299 300 // pre-calcuate sqIdx mask in s0, then send it to lsq in s1 for forwarding 301 val sqIdxMaskReg = RegNext(UIntToMask(load_s0.io.in.bits.uop.sqIdx.value, StoreQueueSize)) 302 io.lsq.forward.sqIdxMask := sqIdxMaskReg 303 304 XSDebug(load_s0.io.out.valid, 305 p"S0: pc ${Hexadecimal(load_s0.io.out.bits.uop.cf.pc)}, lId ${Hexadecimal(load_s0.io.out.bits.uop.lqIdx.asUInt)}, " + 306 p"vaddr ${Hexadecimal(load_s0.io.out.bits.vaddr)}, mask ${Hexadecimal(load_s0.io.out.bits.mask)}\n") 307 XSDebug(load_s1.io.out.valid, 308 p"S1: pc ${Hexadecimal(load_s1.io.out.bits.uop.cf.pc)}, lId ${Hexadecimal(load_s1.io.out.bits.uop.lqIdx.asUInt)}, tlb_miss ${io.dtlb.resp.bits.miss}, " + 309 p"paddr ${Hexadecimal(load_s1.io.out.bits.paddr)}, mmio ${load_s1.io.out.bits.mmio}\n") 310 311 // writeback to LSQ 312 // Current dcache use MSHR 313 // Load queue will be updated at s2 for both hit/miss int/fp load 314 io.lsq.loadIn.valid := load_s2.io.out.valid 315 io.lsq.loadIn.bits := load_s2.io.out.bits 316 317 // write to rob and writeback bus 318 val s2_wb_valid = load_s2.io.out.valid && !load_s2.io.out.bits.miss 319 320 // Int load, if hit, will be writebacked at s2 321 val hitLoadOut = Wire(Valid(new ExuOutput)) 322 hitLoadOut.valid := s2_wb_valid 323 hitLoadOut.bits.uop := load_s2.io.out.bits.uop 324 hitLoadOut.bits.data := load_s2.io.out.bits.data 325 hitLoadOut.bits.redirectValid := false.B 326 hitLoadOut.bits.redirect := DontCare 327 hitLoadOut.bits.debug.isMMIO := load_s2.io.out.bits.mmio 328 hitLoadOut.bits.debug.isPerfCnt := false.B 329 hitLoadOut.bits.debug.paddr := load_s2.io.out.bits.paddr 330 hitLoadOut.bits.fflags := DontCare 331 332 load_s2.io.out.ready := true.B 333 334 io.ldout.bits := Mux(hitLoadOut.valid, hitLoadOut.bits, io.lsq.ldout.bits) 335 io.ldout.valid := hitLoadOut.valid || io.lsq.ldout.valid 336 337 io.lsq.ldout.ready := !hitLoadOut.valid 338 339 when(io.ldout.fire()){ 340 XSDebug("ldout %x\n", io.ldout.bits.uop.cf.pc) 341 } 342} 343