1package xiangshan.mem 2 3import chipsalliance.rocketchip.config.Parameters 4import chisel3._ 5import chisel3.util._ 6import utils._ 7import xiangshan._ 8import xiangshan.backend.decode.ImmUnion 9import xiangshan.cache._ 10 11class LoadToLsqIO(implicit p: Parameters) extends XSBundle { 12 val loadIn = ValidIO(new LsPipelineBundle) 13 val ldout = Flipped(DecoupledIO(new ExuOutput)) 14 val loadDataForwarded = Output(Bool()) 15 val needReplayFromRS = Output(Bool()) 16 val forward = new PipeLoadForwardQueryIO 17} 18 19// Load Pipeline Stage 0 20// Generate addr, use addr to query DCache and DTLB 21class LoadUnit_S0(implicit p: Parameters) extends XSModule { 22 val io = IO(new Bundle() { 23 val in = Flipped(Decoupled(new ExuInput)) 24 val out = Decoupled(new LsPipelineBundle) 25 val dtlbReq = DecoupledIO(new TlbReq) 26 val dcacheReq = DecoupledIO(new DCacheWordReq) 27 val rsIdx = Input(UInt(log2Up(IssQueSize).W)) 28 val isFirstIssue = Input(Bool()) 29 }) 30 31 val s0_uop = io.in.bits.uop 32 // val s0_vaddr = io.in.bits.src(0) + SignExt(s0_uop.ctrl.imm(11,0), VAddrBits) 33 // val s0_mask = genWmask(s0_vaddr, s0_uop.ctrl.fuOpType(1,0)) 34 val imm12 = WireInit(s0_uop.ctrl.imm(11,0)) 35 val s0_vaddr_lo = io.in.bits.src(0)(11,0) + Cat(0.U(1.W), imm12) 36 val s0_vaddr_hi = Mux(s0_vaddr_lo(12), 37 Mux(imm12(11), io.in.bits.src(0)(VAddrBits-1, 12), io.in.bits.src(0)(VAddrBits-1, 12)+1.U), 38 Mux(imm12(11), io.in.bits.src(0)(VAddrBits-1, 12)+SignExt(1.U, VAddrBits-12), io.in.bits.src(0)(VAddrBits-1, 12)), 39 ) 40 val s0_vaddr = Cat(s0_vaddr_hi, s0_vaddr_lo(11,0)) 41 val s0_mask = genWmask(s0_vaddr_lo, s0_uop.ctrl.fuOpType(1,0)) 42 43 // query DTLB 44 io.dtlbReq.valid := io.in.valid 45 io.dtlbReq.bits.vaddr := s0_vaddr 46 io.dtlbReq.bits.cmd := TlbCmd.read 47 io.dtlbReq.bits.roqIdx := s0_uop.roqIdx 48 io.dtlbReq.bits.debug.pc := s0_uop.cf.pc 49 io.dtlbReq.bits.debug.isFirstIssue := io.isFirstIssue 50 51 // query DCache 52 io.dcacheReq.valid := io.in.valid 53 io.dcacheReq.bits.cmd := MemoryOpConstants.M_XRD 54 io.dcacheReq.bits.addr := s0_vaddr 55 io.dcacheReq.bits.mask := s0_mask 56 io.dcacheReq.bits.data := DontCare 57 58 // TODO: update cache meta 59 io.dcacheReq.bits.id := DontCare 60 61 val addrAligned = LookupTree(s0_uop.ctrl.fuOpType(1, 0), List( 62 "b00".U -> true.B, //b 63 "b01".U -> (s0_vaddr(0) === 0.U), //h 64 "b10".U -> (s0_vaddr(1, 0) === 0.U), //w 65 "b11".U -> (s0_vaddr(2, 0) === 0.U) //d 66 )) 67 68 io.out.valid := io.in.valid && io.dcacheReq.ready 69 70 io.out.bits := DontCare 71 io.out.bits.vaddr := s0_vaddr 72 io.out.bits.mask := s0_mask 73 io.out.bits.uop := s0_uop 74 io.out.bits.uop.cf.exceptionVec(loadAddrMisaligned) := !addrAligned 75 io.out.bits.rsIdx := io.rsIdx 76 77 io.in.ready := !io.in.valid || (io.out.ready && io.dcacheReq.ready) 78 79 XSDebug(io.dcacheReq.fire(), 80 p"[DCACHE LOAD REQ] pc ${Hexadecimal(s0_uop.cf.pc)}, vaddr ${Hexadecimal(s0_vaddr)}\n" 81 ) 82 XSPerfAccumulate("in", io.in.valid) 83 XSPerfAccumulate("stall_out", io.out.valid && !io.out.ready && io.dcacheReq.ready) 84 XSPerfAccumulate("stall_dcache", io.out.valid && io.out.ready && !io.dcacheReq.ready) 85 XSPerfAccumulate("addr_spec_success", io.out.fire() && s0_vaddr(VAddrBits-1, 12) === io.in.bits.src(0)(VAddrBits-1, 12)) 86 XSPerfAccumulate("addr_spec_failed", io.out.fire() && s0_vaddr(VAddrBits-1, 12) =/= io.in.bits.src(0)(VAddrBits-1, 12)) 87 XSPerfAccumulate("addr_spec_success_once", io.out.fire() && s0_vaddr(VAddrBits-1, 12) === io.in.bits.src(0)(VAddrBits-1, 12) && io.isFirstIssue) 88 XSPerfAccumulate("addr_spec_failed_once", io.out.fire() && s0_vaddr(VAddrBits-1, 12) =/= io.in.bits.src(0)(VAddrBits-1, 12) && io.isFirstIssue) 89} 90 91 92// Load Pipeline Stage 1 93// TLB resp (send paddr to dcache) 94class LoadUnit_S1(implicit p: Parameters) extends XSModule { 95 val io = IO(new Bundle() { 96 val in = Flipped(Decoupled(new LsPipelineBundle)) 97 val out = Decoupled(new LsPipelineBundle) 98 val dtlbResp = Flipped(DecoupledIO(new TlbResp)) 99 val dcachePAddr = Output(UInt(PAddrBits.W)) 100 val dcacheKill = Output(Bool()) 101 val sbuffer = new LoadForwardQueryIO 102 val lsq = new PipeLoadForwardQueryIO 103 }) 104 105 val s1_uop = io.in.bits.uop 106 val s1_paddr = io.dtlbResp.bits.paddr 107 val s1_exception = selectLoad(io.out.bits.uop.cf.exceptionVec, false).asUInt.orR 108 val s1_tlb_miss = io.dtlbResp.bits.miss 109 val s1_mmio = !s1_tlb_miss && io.dtlbResp.bits.mmio 110 val s1_mask = io.in.bits.mask 111 112 io.out.bits := io.in.bits // forwardXX field will be updated in s1 113 114 io.dtlbResp.ready := true.B 115 116 // TOOD: PMA check 117 io.dcachePAddr := s1_paddr 118 io.dcacheKill := s1_tlb_miss || s1_exception || s1_mmio 119 120 // load forward query datapath 121 io.sbuffer.valid := io.in.valid 122 io.sbuffer.paddr := s1_paddr 123 io.sbuffer.uop := s1_uop 124 io.sbuffer.sqIdx := s1_uop.sqIdx 125 io.sbuffer.mask := s1_mask 126 io.sbuffer.pc := s1_uop.cf.pc // FIXME: remove it 127 128 io.lsq.valid := io.in.valid 129 io.lsq.paddr := s1_paddr 130 io.lsq.uop := s1_uop 131 io.lsq.sqIdx := s1_uop.sqIdx 132 io.lsq.sqIdxMask := DontCare // will be overwritten by sqIdxMask pre-generated in s0 133 io.lsq.mask := s1_mask 134 io.lsq.pc := s1_uop.cf.pc // FIXME: remove it 135 136 io.out.valid := io.in.valid// && !s1_tlb_miss 137 io.out.bits.paddr := s1_paddr 138 io.out.bits.mmio := s1_mmio && !s1_exception 139 io.out.bits.tlbMiss := s1_tlb_miss 140 io.out.bits.uop.cf.exceptionVec(loadPageFault) := io.dtlbResp.bits.excp.pf.ld 141 io.out.bits.uop.cf.exceptionVec(loadAccessFault) := io.dtlbResp.bits.excp.af.ld 142 io.out.bits.ptwBack := io.dtlbResp.bits.ptwBack 143 io.out.bits.rsIdx := io.in.bits.rsIdx 144 145 io.in.ready := !io.in.valid || io.out.ready 146 147 XSPerfAccumulate("in", io.in.valid) 148 XSPerfAccumulate("tlb_miss", io.in.valid && s1_tlb_miss) 149 XSPerfAccumulate("stall_out", io.out.valid && !io.out.ready) 150} 151 152 153// Load Pipeline Stage 2 154// DCache resp 155class LoadUnit_S2(implicit p: Parameters) extends XSModule with HasLoadHelper { 156 val io = IO(new Bundle() { 157 val in = Flipped(Decoupled(new LsPipelineBundle)) 158 val out = Decoupled(new LsPipelineBundle) 159 val rsFeedback = ValidIO(new RSFeedback) 160 val dcacheResp = Flipped(DecoupledIO(new DCacheWordResp)) 161 val lsq = new LoadForwardQueryIO 162 val sbuffer = new LoadForwardQueryIO 163 val dataForwarded = Output(Bool()) 164 val needReplayFromRS = Output(Bool()) 165 }) 166 167 val s2_uop = io.in.bits.uop 168 val s2_mask = io.in.bits.mask 169 val s2_paddr = io.in.bits.paddr 170 val s2_tlb_miss = io.in.bits.tlbMiss 171 val s2_data_invalid = io.lsq.dataInvalid 172 val s2_exception = selectLoad(io.in.bits.uop.cf.exceptionVec, false).asUInt.orR 173 val s2_mmio = io.in.bits.mmio && !s2_exception 174 val s2_cache_miss = io.dcacheResp.bits.miss 175 val s2_cache_replay = io.dcacheResp.bits.replay 176 177 io.dcacheResp.ready := true.B 178 val dcacheShouldResp = !(s2_tlb_miss || s2_exception || s2_mmio) 179 assert(!(io.in.valid && dcacheShouldResp && !io.dcacheResp.valid), "DCache response got lost") 180 181 // feedback tlb result to RS 182 io.rsFeedback.valid := io.in.valid 183 io.rsFeedback.bits.hit := !s2_tlb_miss && (!s2_cache_replay || s2_mmio || s2_exception) && !s2_data_invalid 184 io.rsFeedback.bits.rsIdx := io.in.bits.rsIdx 185 io.rsFeedback.bits.flushState := io.in.bits.ptwBack 186 io.rsFeedback.bits.sourceType := Mux(s2_tlb_miss, RSFeedbackType.tlbMiss, 187 Mux(io.lsq.dataInvalid, 188 RSFeedbackType.dataInvalid, 189 RSFeedbackType.mshrFull 190 ) 191 ) 192 193 // s2_cache_replay is quite slow to generate, send it separately to LQ 194 io.needReplayFromRS := s2_cache_replay 195 196 // merge forward result 197 // lsq has higher priority than sbuffer 198 val forwardMask = Wire(Vec(8, Bool())) 199 val forwardData = Wire(Vec(8, UInt(8.W))) 200 201 val fullForward = (~forwardMask.asUInt & s2_mask) === 0.U && !io.lsq.dataInvalid 202 io.lsq := DontCare 203 io.sbuffer := DontCare 204 205 // generate XLEN/8 Muxs 206 for (i <- 0 until XLEN / 8) { 207 forwardMask(i) := io.lsq.forwardMask(i) || io.sbuffer.forwardMask(i) 208 forwardData(i) := Mux(io.lsq.forwardMask(i), io.lsq.forwardData(i), io.sbuffer.forwardData(i)) 209 } 210 211 XSDebug(io.out.fire(), "[FWD LOAD RESP] pc %x fwd %x(%b) + %x(%b)\n", 212 s2_uop.cf.pc, 213 io.lsq.forwardData.asUInt, io.lsq.forwardMask.asUInt, 214 io.in.bits.forwardData.asUInt, io.in.bits.forwardMask.asUInt 215 ) 216 217 // data merge 218 val rdataVec = VecInit((0 until XLEN / 8).map(j => 219 Mux(forwardMask(j), forwardData(j), io.dcacheResp.bits.data(8*(j+1)-1, 8*j)))) 220 val rdata = rdataVec.asUInt 221 val rdataSel = LookupTree(s2_paddr(2, 0), List( 222 "b000".U -> rdata(63, 0), 223 "b001".U -> rdata(63, 8), 224 "b010".U -> rdata(63, 16), 225 "b011".U -> rdata(63, 24), 226 "b100".U -> rdata(63, 32), 227 "b101".U -> rdata(63, 40), 228 "b110".U -> rdata(63, 48), 229 "b111".U -> rdata(63, 56) 230 )) 231 val rdataPartialLoad = rdataHelper(s2_uop, rdataSel) 232 233 io.out.valid := io.in.valid && !s2_tlb_miss && !s2_data_invalid 234 // Inst will be canceled in store queue / lsq, 235 // so we do not need to care about flush in load / store unit's out.valid 236 io.out.bits := io.in.bits 237 io.out.bits.data := rdataPartialLoad 238 // when exception occurs, set it to not miss and let it write back to roq (via int port) 239 io.out.bits.miss := s2_cache_miss && !s2_exception 240 io.out.bits.uop.ctrl.fpWen := io.in.bits.uop.ctrl.fpWen && !s2_exception 241 io.out.bits.mmio := s2_mmio 242 243 // For timing reasons, we can not let 244 // io.out.bits.miss := s2_cache_miss && !s2_exception && !fullForward 245 // We use io.dataForwarded instead. It means forward logic have prepared all data needed, 246 // and dcache query is no longer needed. 247 // Such inst will be writebacked from load queue. 248 io.dataForwarded := s2_cache_miss && fullForward && !s2_exception 249 // io.out.bits.forwardX will be send to lq 250 io.out.bits.forwardMask := forwardMask 251 // data retbrived from dcache is also included in io.out.bits.forwardData 252 io.out.bits.forwardData := rdataVec 253 254 io.in.ready := io.out.ready || !io.in.valid 255 256 XSDebug(io.out.fire(), "[DCACHE LOAD RESP] pc %x rdata %x <- D$ %x + fwd %x(%b)\n", 257 s2_uop.cf.pc, rdataPartialLoad, io.dcacheResp.bits.data, 258 forwardData.asUInt, forwardMask.asUInt 259 ) 260 261 XSPerfAccumulate("in", io.in.valid) 262 XSPerfAccumulate("dcache_miss", io.in.valid && s2_cache_miss) 263 XSPerfAccumulate("full_forward", io.in.valid && fullForward) 264 XSPerfAccumulate("dcache_miss_full_forward", io.in.valid && s2_cache_miss && fullForward) 265 XSPerfAccumulate("replay", io.rsFeedback.valid && !io.rsFeedback.bits.hit) 266 XSPerfAccumulate("replay_tlb_miss", io.rsFeedback.valid && !io.rsFeedback.bits.hit && s2_tlb_miss) 267 XSPerfAccumulate("replay_cache", io.rsFeedback.valid && !io.rsFeedback.bits.hit && !s2_tlb_miss && s2_cache_replay) 268 XSPerfAccumulate("stall_out", io.out.valid && !io.out.ready) 269} 270 271class LoadUnit(implicit p: Parameters) extends XSModule with HasLoadHelper { 272 val io = IO(new Bundle() { 273 val ldin = Flipped(Decoupled(new ExuInput)) 274 val ldout = Decoupled(new ExuOutput) 275 val redirect = Flipped(ValidIO(new Redirect)) 276 val flush = Input(Bool()) 277 val rsFeedback = ValidIO(new RSFeedback) 278 val rsIdx = Input(UInt(log2Up(IssQueSize).W)) 279 val isFirstIssue = Input(Bool()) 280 val dcache = new DCacheLoadIO 281 val dtlb = new TlbRequestIO() 282 val sbuffer = new LoadForwardQueryIO 283 val lsq = new LoadToLsqIO 284 val fastUop = ValidIO(new MicroOp) // early wakup signal generated in load_s1 285 }) 286 287 val load_s0 = Module(new LoadUnit_S0) 288 val load_s1 = Module(new LoadUnit_S1) 289 val load_s2 = Module(new LoadUnit_S2) 290 291 load_s0.io.in <> io.ldin 292 load_s0.io.dtlbReq <> io.dtlb.req 293 load_s0.io.dcacheReq <> io.dcache.req 294 load_s0.io.rsIdx := io.rsIdx 295 load_s0.io.isFirstIssue := io.isFirstIssue 296 297 PipelineConnect(load_s0.io.out, load_s1.io.in, true.B, load_s0.io.out.bits.uop.roqIdx.needFlush(io.redirect, io.flush)) 298 299 load_s1.io.dtlbResp <> io.dtlb.resp 300 io.dcache.s1_paddr <> load_s1.io.dcachePAddr 301 io.dcache.s1_kill <> load_s1.io.dcacheKill 302 load_s1.io.sbuffer <> io.sbuffer 303 load_s1.io.lsq <> io.lsq.forward 304 305 PipelineConnect(load_s1.io.out, load_s2.io.in, true.B, load_s1.io.out.bits.uop.roqIdx.needFlush(io.redirect, io.flush)) 306 307 load_s2.io.dcacheResp <> io.dcache.resp 308 load_s2.io.lsq.forwardData <> io.lsq.forward.forwardData 309 load_s2.io.lsq.forwardMask <> io.lsq.forward.forwardMask 310 load_s2.io.lsq.dataInvalid <> io.lsq.forward.dataInvalid 311 load_s2.io.sbuffer.forwardData <> io.sbuffer.forwardData 312 load_s2.io.sbuffer.forwardMask <> io.sbuffer.forwardMask 313 load_s2.io.sbuffer.dataInvalid <> io.sbuffer.dataInvalid // always false 314 load_s2.io.dataForwarded <> io.lsq.loadDataForwarded 315 io.rsFeedback.bits := RegNext(load_s2.io.rsFeedback.bits) 316 io.rsFeedback.valid := RegNext(load_s2.io.rsFeedback.valid && !load_s2.io.out.bits.uop.roqIdx.needFlush(io.redirect, io.flush)) 317 io.lsq.needReplayFromRS := load_s2.io.needReplayFromRS 318 319 // pre-calcuate sqIdx mask in s0, then send it to lsq in s1 for forwarding 320 val sqIdxMaskReg = RegNext(UIntToMask(load_s0.io.in.bits.uop.sqIdx.value, StoreQueueSize)) 321 io.lsq.forward.sqIdxMask := sqIdxMaskReg 322 323 // // use s2_hit_way to select data received in s1 324 // load_s2.io.dcacheResp.bits.data := Mux1H(RegNext(io.dcache.s1_hit_way), RegNext(io.dcache.s1_data)) 325 // assert(load_s2.io.dcacheResp.bits.data === io.dcache.resp.bits.data) 326 327 io.fastUop.valid := io.dcache.s1_hit_way.orR && !io.dcache.s1_disable_fast_wakeup && load_s1.io.in.valid && 328 !load_s1.io.dcacheKill && !io.lsq.forward.dataInvalidFast 329 io.fastUop.bits := load_s1.io.out.bits.uop 330 331 XSDebug(load_s0.io.out.valid, 332 p"S0: pc ${Hexadecimal(load_s0.io.out.bits.uop.cf.pc)}, lId ${Hexadecimal(load_s0.io.out.bits.uop.lqIdx.asUInt)}, " + 333 p"vaddr ${Hexadecimal(load_s0.io.out.bits.vaddr)}, mask ${Hexadecimal(load_s0.io.out.bits.mask)}\n") 334 XSDebug(load_s1.io.out.valid, 335 p"S1: pc ${Hexadecimal(load_s1.io.out.bits.uop.cf.pc)}, lId ${Hexadecimal(load_s1.io.out.bits.uop.lqIdx.asUInt)}, tlb_miss ${io.dtlb.resp.bits.miss}, " + 336 p"paddr ${Hexadecimal(load_s1.io.out.bits.paddr)}, mmio ${load_s1.io.out.bits.mmio}\n") 337 338 // writeback to LSQ 339 // Current dcache use MSHR 340 // Load queue will be updated at s2 for both hit/miss int/fp load 341 io.lsq.loadIn.valid := load_s2.io.out.valid 342 io.lsq.loadIn.bits := load_s2.io.out.bits 343 344 // write to rob and writeback bus 345 val s2_wb_valid = load_s2.io.out.valid && !load_s2.io.out.bits.miss && !load_s2.io.out.bits.mmio 346 347 // Int load, if hit, will be writebacked at s2 348 val hitLoadOut = Wire(Valid(new ExuOutput)) 349 hitLoadOut.valid := s2_wb_valid 350 hitLoadOut.bits.uop := load_s2.io.out.bits.uop 351 hitLoadOut.bits.data := load_s2.io.out.bits.data 352 hitLoadOut.bits.redirectValid := false.B 353 hitLoadOut.bits.redirect := DontCare 354 hitLoadOut.bits.debug.isMMIO := load_s2.io.out.bits.mmio 355 hitLoadOut.bits.debug.isPerfCnt := false.B 356 hitLoadOut.bits.debug.paddr := load_s2.io.out.bits.paddr 357 hitLoadOut.bits.fflags := DontCare 358 359 load_s2.io.out.ready := true.B 360 361 io.ldout.bits := Mux(hitLoadOut.valid, hitLoadOut.bits, io.lsq.ldout.bits) 362 io.ldout.valid := hitLoadOut.valid || io.lsq.ldout.valid 363 364 io.lsq.ldout.ready := !hitLoadOut.valid 365 366 when(io.ldout.fire()){ 367 XSDebug("ldout %x\n", io.ldout.bits.uop.cf.pc) 368 } 369} 370