xref: /XiangShan/src/main/scala/xiangshan/mem/pipeline/LoadUnit.scala (revision b81fc38e8510d919ccf70419de6ea10b06295596)
1package xiangshan.mem
2
3import chisel3._
4import chisel3.util._
5import utils._
6import xiangshan._
7import xiangshan.cache.{DCacheWordIO, TlbRequestIO, TlbCmd, MemoryOpConstants}
8import xiangshan.backend.LSUOpType
9import xiangshan.backend.fu.fpu.boxF32ToF64
10
11class LoadToLsroqIO extends XSBundle {
12  val loadIn = ValidIO(new LsPipelineBundle)
13  val ldout = Flipped(DecoupledIO(new ExuOutput))
14  val forward = new LoadForwardQueryIO
15}
16
17class LoadUnit extends XSModule {
18  val io = IO(new Bundle() {
19    val ldin = Flipped(Decoupled(new ExuInput))
20    val ldout = Decoupled(new ExuOutput)
21    val redirect = Flipped(ValidIO(new Redirect))
22    val tlbFeedback = ValidIO(new TlbFeedback)
23    val dcache = new DCacheWordIO
24    val dtlb = new TlbRequestIO()
25    val sbuffer = new LoadForwardQueryIO
26    val lsroq = new LoadToLsroqIO
27  })
28
29  when(io.ldin.valid){
30    XSDebug("load enpipe %x iw %x fw %x\n", io.ldin.bits.uop.cf.pc, io.ldin.bits.uop.ctrl.rfWen, io.ldin.bits.uop.ctrl.fpWen)
31  }
32
33  //-------------------------------------------------------
34  // Load Pipeline
35  //-------------------------------------------------------
36
37  val l2_out = Wire(Decoupled(new LsPipelineBundle))
38  val l4_out = Wire(Decoupled(new LsPipelineBundle))
39  val l5_in  = Wire(Flipped(Decoupled(new LsPipelineBundle)))
40
41  //-------------------------------------------------------
42  // LD Pipeline Stage 2
43  // Generate addr, use addr to query DCache Tag and DTLB
44  //-------------------------------------------------------
45
46  val l2_dtlb_hit  = Wire(new Bool())
47  val l2_dtlb_miss = Wire(new Bool())
48  val l2_dcache = Wire(new Bool())
49  val l2_mmio = Wire(new Bool())
50  val isMMIOReq = Wire(new Bool())
51
52  // send req to dtlb
53  io.dtlb.req.valid := l2_out.valid
54  io.dtlb.req.bits.vaddr := l2_out.bits.vaddr
55  io.dtlb.req.bits.cmd := TlbCmd.read
56  io.dtlb.req.bits.roqIdx := l2_out.bits.uop.roqIdx
57  io.dtlb.req.bits.debug.pc := l2_out.bits.uop.cf.pc
58  io.dtlb.req.bits.debug.lsroqIdx := l2_out.bits.uop.lsroqIdx // FIXME: need update
59
60  l2_dtlb_hit  := io.dtlb.resp.valid && !io.dtlb.resp.bits.miss
61  l2_dtlb_miss := io.dtlb.resp.valid && io.dtlb.resp.bits.miss
62  isMMIOReq := AddressSpace.isMMIO(io.dtlb.resp.bits.paddr)
63  l2_dcache := l2_dtlb_hit && !isMMIOReq
64  l2_mmio   := l2_dtlb_hit && isMMIOReq
65
66  // l2_out is used to generate dcache req
67  l2_out.bits := DontCare
68  l2_out.bits.vaddr := io.ldin.bits.src1 + io.ldin.bits.uop.ctrl.imm
69  l2_out.bits.paddr := io.dtlb.resp.bits.paddr
70  l2_out.bits.mask  := genWmask(l2_out.bits.vaddr, io.ldin.bits.uop.ctrl.fuOpType(1,0))
71  l2_out.bits.uop   := io.ldin.bits.uop
72  l2_out.bits.miss  := false.B
73  l2_out.bits.mmio  := l2_mmio
74  l2_out.valid := io.ldin.valid && !io.ldin.bits.uop.roqIdx.needFlush(io.redirect)
75  // when we are sure it's a MMIO req, we do not need to wait for cache ready
76  l2_out.ready := (l2_dcache && io.dcache.req.ready) || l2_mmio || l2_dtlb_miss
77  io.ldin.ready := l2_out.ready
78
79  // exception check
80  val addrAligned = LookupTree(io.ldin.bits.uop.ctrl.fuOpType(1,0), List(
81    "b00".U   -> true.B,              //b
82    "b01".U   -> (l2_out.bits.vaddr(0) === 0.U),   //h
83    "b10".U   -> (l2_out.bits.vaddr(1,0) === 0.U), //w
84    "b11".U   -> (l2_out.bits.vaddr(2,0) === 0.U)  //d
85  ))
86  l2_out.bits.uop.cf.exceptionVec(loadAddrMisaligned) := !addrAligned
87  l2_out.bits.uop.cf.exceptionVec(loadPageFault) := io.dtlb.resp.bits.excp.pf.ld
88
89  // send result to dcache
90  // never send tlb missed or MMIO reqs to dcache
91  io.dcache.req.valid     := l2_dcache
92
93  io.dcache.req.bits.cmd  := MemoryOpConstants.M_XRD
94  // TODO: vaddr
95  io.dcache.req.bits.addr := io.dtlb.resp.bits.paddr
96  io.dcache.req.bits.data := DontCare
97  io.dcache.req.bits.mask := l2_out.bits.mask
98
99  io.dcache.req.bits.meta.id       := DontCare
100  io.dcache.req.bits.meta.vaddr    := l2_out.bits.vaddr
101  io.dcache.req.bits.meta.paddr    := io.dtlb.resp.bits.paddr
102  io.dcache.req.bits.meta.uop      := l2_out.bits.uop
103  io.dcache.req.bits.meta.mmio     := isMMIOReq
104  io.dcache.req.bits.meta.tlb_miss := io.dtlb.resp.bits.miss
105  io.dcache.req.bits.meta.mask     := l2_out.bits.mask
106  io.dcache.req.bits.meta.replay   := false.B
107
108
109  val l2_tlbFeedback = Wire(new TlbFeedback)
110  l2_tlbFeedback.hit := !io.dtlb.resp.bits.miss
111  l2_tlbFeedback.roqIdx := l2_out.bits.uop.roqIdx
112
113  // dump l2
114  XSDebug(l2_out.valid, "L2: pc 0x%x addr 0x%x -> 0x%x op %b data 0x%x mask %x dltb_miss %b dcache %b mmio %b\n",
115    l2_out.bits.uop.cf.pc, l2_out.bits.vaddr, l2_out.bits.paddr,
116    l2_out.bits.uop.ctrl.fuOpType, l2_out.bits.data, l2_out.bits.mask,
117    l2_dtlb_miss, l2_dcache, l2_mmio)
118
119  XSDebug(l2_out.fire(), "load req: pc 0x%x addr 0x%x -> 0x%x op %b\n",
120    l2_out.bits.uop.cf.pc, l2_out.bits.vaddr, l2_out.bits.paddr, l2_out.bits.uop.ctrl.fuOpType)
121
122  XSDebug(io.dcache.req.valid, p"dcache req(${io.dcache.req.valid} ${io.dcache.req.ready}): pc:0x${Hexadecimal(io.dcache.req.bits.meta.uop.cf.pc)} roqIdx:${io.dcache.req.bits.meta.uop.roqIdx} lsroqIdx:${io.dcache.req.bits.meta.uop.lsroqIdx} addr:0x${Hexadecimal(io.dcache.req.bits.addr)} vaddr:0x${Hexadecimal(io.dcache.req.bits.meta.vaddr)} paddr:0x${Hexadecimal(io.dcache.req.bits.meta.paddr)} mmio:${io.dcache.req.bits.meta.mmio} tlb_miss:${io.dcache.req.bits.meta.tlb_miss} mask:${io.dcache.req.bits.meta.mask}\n")
123
124  //-------------------------------------------------------
125  // LD Pipeline Stage 3
126  // Compare tag, use addr to query DCache Data
127  //-------------------------------------------------------
128
129  val l3_valid = RegNext(l2_out.fire(), false.B)
130  val l3_dtlb_miss = RegEnable(next = l2_dtlb_miss, enable = l2_out.fire(), init = false.B)
131  val l3_dcache = RegEnable(next = l2_dcache, enable = l2_out.fire(), init = false.B)
132  val l3_tlbFeedback = RegEnable(next = l2_tlbFeedback, enable = l2_out.fire())
133  val l3_bundle = RegEnable(next = l2_out.bits, enable = l2_out.fire())
134  val l3_uop = l3_bundle.uop
135  // dltb miss reqs ends here
136  val l3_passdown = l3_valid && !l3_dtlb_miss && !l3_uop.roqIdx.needFlush(io.redirect)
137
138  io.tlbFeedback.valid := l3_valid
139  io.tlbFeedback.bits := l3_tlbFeedback
140  io.dcache.s1_kill := l3_valid && l3_dcache && l3_uop.roqIdx.needFlush(io.redirect)
141
142  // dump l3
143  XSDebug(l3_valid, "l3: pc 0x%x addr 0x%x -> 0x%x op %b data 0x%x mask %x dltb_miss %b dcache %b mmio %b\n",
144    l3_bundle.uop.cf.pc, l3_bundle.vaddr, l3_bundle.paddr,
145    l3_bundle.uop.ctrl.fuOpType, l3_bundle.data, l3_bundle.mask,
146    l3_dtlb_miss, l3_dcache, l3_bundle.mmio)
147
148  XSDebug(io.tlbFeedback.valid, "tlbFeedback: hit %b roqIdx %d\n",
149    io.tlbFeedback.bits.hit, io.tlbFeedback.bits.roqIdx.asUInt)
150
151  XSDebug(io.dcache.s1_kill, "l3: dcache s1_kill\n")
152
153  // Done in Dcache
154
155  //-------------------------------------------------------
156  // LD Pipeline Stage 4
157  // Dcache return result, do tag ecc check and forward check
158  //-------------------------------------------------------
159
160  val l4_valid = RegNext(l3_passdown, false.B)
161  val l4_dcache = RegNext(l3_dcache, false.B)
162  val l4_bundle = RegNext(l3_bundle)
163
164  val fullForward = Wire(Bool())
165
166  assert(!(io.dcache.resp.ready && !io.dcache.resp.valid), "DCache response got lost")
167  io.dcache.resp.ready := l4_valid && l4_dcache
168  when (io.dcache.resp.fire()) {
169    l4_out.bits := DontCare
170    l4_out.bits.data  := io.dcache.resp.bits.data
171    l4_out.bits.paddr := io.dcache.resp.bits.meta.paddr
172    l4_out.bits.uop   := io.dcache.resp.bits.meta.uop
173    l4_out.bits.mmio  := io.dcache.resp.bits.meta.mmio
174    l4_out.bits.mask  := io.dcache.resp.bits.meta.mask
175    // when we can get the data completely from forward
176    // we no longer need to access dcache
177    // treat nack as miss
178    l4_out.bits.miss  := Mux(fullForward, false.B,
179      io.dcache.resp.bits.miss || io.dcache.resp.bits.nack)
180    XSDebug(io.dcache.resp.fire(), p"DcacheResp(l4): data:0x${Hexadecimal(io.dcache.resp.bits.data)} paddr:0x${Hexadecimal(io.dcache.resp.bits.meta.paddr)} pc:0x${Hexadecimal(io.dcache.resp.bits.meta.uop.cf.pc)} roqIdx:${io.dcache.resp.bits.meta.uop.roqIdx} lsroqIdx:${io.dcache.resp.bits.meta.uop.lsroqIdx} miss:${io.dcache.resp.bits.miss}\n")
181  } .otherwise {
182    l4_out.bits := l4_bundle
183  }
184  l4_out.valid := l4_valid && !l4_out.bits.uop.roqIdx.needFlush(io.redirect)
185
186  // Store addr forward match
187  // If match, get data / fmask from store queue / store buffer
188
189  // io.lsroq.forward := DontCare
190  io.lsroq.forward.paddr := l4_out.bits.paddr
191  io.lsroq.forward.mask := io.dcache.resp.bits.meta.mask
192  io.lsroq.forward.lsroqIdx := l4_out.bits.uop.lsroqIdx
193  io.lsroq.forward.sqIdx := l4_out.bits.uop.sqIdx
194  io.lsroq.forward.uop := l4_out.bits.uop
195  io.lsroq.forward.pc := l4_out.bits.uop.cf.pc
196  io.lsroq.forward.valid := io.dcache.resp.valid //TODO: opt timing
197
198  io.sbuffer.paddr := l4_out.bits.paddr
199  io.sbuffer.mask := io.dcache.resp.bits.meta.mask
200  io.sbuffer.lsroqIdx := l4_out.bits.uop.lsroqIdx
201  io.sbuffer.sqIdx := l4_out.bits.uop.sqIdx
202  io.sbuffer.uop := DontCare
203  io.sbuffer.pc := l4_out.bits.uop.cf.pc
204  io.sbuffer.valid := l4_out.valid
205
206  val forwardVec = WireInit(io.sbuffer.forwardData)
207  val forwardMask = WireInit(io.sbuffer.forwardMask)
208  // generate XLEN/8 Muxs
209  (0 until XLEN/8).map(j => {
210    when(io.lsroq.forward.forwardMask(j)) {
211      forwardMask(j) := true.B
212      forwardVec(j) := io.lsroq.forward.forwardData(j)
213    }
214  })
215  l4_out.bits.forwardMask := forwardMask
216  l4_out.bits.forwardData := forwardVec
217  fullForward := (~l4_out.bits.forwardMask.asUInt & l4_out.bits.mask) === 0.U
218
219  PipelineConnect(l4_out, l5_in, io.ldout.fire() || (l5_in.bits.miss || l5_in.bits.mmio) && l5_in.valid, false.B)
220
221  XSDebug(l4_valid, "l4: out.valid:%d pc 0x%x addr 0x%x -> 0x%x op %b data 0x%x mask %x forwardData: 0x%x forwardMask: %x dcache %b mmio %b miss:%d\n",
222    l4_out.valid, l4_out.bits.uop.cf.pc, l4_out.bits.vaddr, l4_out.bits.paddr,
223    l4_out.bits.uop.ctrl.fuOpType, l4_out.bits.data, l4_out.bits.mask,
224    l4_out.bits.forwardData.asUInt, l4_out.bits.forwardMask.asUInt, l4_dcache, l4_out.bits.mmio, l4_out.bits.miss)
225
226  XSDebug(l5_in.valid, "L5(%d %d): pc 0x%x addr 0x%x -> 0x%x op %b data 0x%x mask %x forwardData: 0x%x forwardMask: %x\n",
227    l5_in.valid, l5_in.ready, l5_in.bits.uop.cf.pc,  l5_in.bits.vaddr, l5_in.bits.paddr,
228    l5_in.bits.uop.ctrl.fuOpType , l5_in.bits.data,  l5_in.bits.mask,
229    l5_in.bits.forwardData.asUInt, l5_in.bits.forwardMask.asUInt)
230
231  XSDebug(l4_valid, "l4: sbuffer forwardData: 0x%x forwardMask: %x\n",
232    io.sbuffer.forwardData.asUInt, io.sbuffer.forwardMask.asUInt)
233
234  XSDebug(l4_valid, "l4: lsroq forwardData: 0x%x forwardMask: %x\n",
235    io.lsroq.forward.forwardData.asUInt, io.lsroq.forward.forwardMask.asUInt)
236
237  XSDebug(io.redirect.valid,
238    p"Redirect: excp:${io.redirect.bits.isException} flushPipe:${io.redirect.bits.isFlushPipe} misp:${io.redirect.bits.isMisPred} " +
239    p"replay:${io.redirect.bits.isReplay} pc:0x${Hexadecimal(io.redirect.bits.pc)} target:0x${Hexadecimal(io.redirect.bits.target)} " +
240    p"brTag:${io.redirect.bits.brTag} l2:${io.ldin.bits.uop.roqIdx.needFlush(io.redirect)} l3:${l3_uop.roqIdx.needFlush(io.redirect)} " +
241    p"l4:${l4_out.bits.uop.roqIdx.needFlush(io.redirect)}\n"
242  )
243  //-------------------------------------------------------
244  // LD Pipeline Stage 5
245  // Do data ecc check, merge result and write back to LS ROQ
246  // If cache hit, return writeback result to CDB
247  //-------------------------------------------------------
248
249  val loadWriteBack = l5_in.fire()
250
251  // data merge
252  val rdata = VecInit((0 until 8).map(j => {
253    Mux(l5_in.bits.forwardMask(j),
254      l5_in.bits.forwardData(j),
255      l5_in.bits.data(8*(j+1)-1, 8*j)
256    )
257  })).asUInt
258  val func = l5_in.bits.uop.ctrl.fuOpType
259  val raddr = l5_in.bits.paddr
260  val rdataSel = LookupTree(raddr(2, 0), List(
261    "b000".U -> rdata(63, 0),
262    "b001".U -> rdata(63, 8),
263    "b010".U -> rdata(63, 16),
264    "b011".U -> rdata(63, 24),
265    "b100".U -> rdata(63, 32),
266    "b101".U -> rdata(63, 40),
267    "b110".U -> rdata(63, 48),
268    "b111".U -> rdata(63, 56)
269  ))
270  val rdataPartialLoad = LookupTree(func, List(
271      LSUOpType.lb   -> SignExt(rdataSel(7, 0) , XLEN),
272      LSUOpType.lh   -> SignExt(rdataSel(15, 0), XLEN),
273      LSUOpType.lw   -> SignExt(rdataSel(31, 0), XLEN),
274      LSUOpType.ld   -> SignExt(rdataSel(63, 0), XLEN),
275      LSUOpType.lbu  -> ZeroExt(rdataSel(7, 0) , XLEN),
276      LSUOpType.lhu  -> ZeroExt(rdataSel(15, 0), XLEN),
277      LSUOpType.lwu  -> ZeroExt(rdataSel(31, 0), XLEN),
278      LSUOpType.flw  -> boxF32ToF64(rdataSel(31, 0))
279  ))
280
281  // ecc check
282  // TODO
283
284  // if hit, writeback result to CDB
285  // val ldout = Vec(2, Decoupled(new ExuOutput))
286  // when io.loadIn(i).fire() && !io.io.loadIn(i).miss, commit load to cdb
287  val hitLoadOut = Wire(Decoupled(new ExuOutput))
288  hitLoadOut.bits.uop := l5_in.bits.uop
289  hitLoadOut.bits.data := rdataPartialLoad
290  hitLoadOut.bits.fflags := DontCare
291  hitLoadOut.bits.redirectValid := false.B
292  hitLoadOut.bits.redirect := DontCare
293  hitLoadOut.bits.brUpdate := DontCare
294  hitLoadOut.bits.debug.isMMIO := l5_in.bits.mmio
295  hitLoadOut.valid := l5_in.valid && !l5_in.bits.mmio && !l5_in.bits.miss // MMIO will be done in lsroq
296  XSDebug(hitLoadOut.fire(), "load writeback: pc %x data %x (%x + %x(%b))\n",
297    hitLoadOut.bits.uop.cf.pc, rdataPartialLoad, l5_in.bits.data,
298    l5_in.bits.forwardData.asUInt, l5_in.bits.forwardMask.asUInt
299  )
300
301  // writeback to LSROQ
302  // Current dcache use MSHR
303
304  io.lsroq.loadIn.bits := l5_in.bits
305  io.lsroq.loadIn.bits.data := rdataPartialLoad // for debug
306  io.lsroq.loadIn.valid := loadWriteBack
307
308  // pipeline control
309  l5_in.ready := io.ldout.ready
310
311  val cdbArb = Module(new Arbiter(new ExuOutput, 2))
312  io.ldout <> cdbArb.io.out
313  hitLoadOut <> cdbArb.io.in(0)
314  io.lsroq.ldout <> cdbArb.io.in(1) // missLoadOut
315
316  when(io.ldout.fire()){
317    XSDebug("ldout %x iw %x fw %x\n", io.ldout.bits.uop.cf.pc, io.ldout.bits.uop.ctrl.rfWen, io.ldout.bits.uop.ctrl.fpWen)
318  }
319}
320