xref: /XiangShan/src/main/scala/xiangshan/mem/lsqueue/LoadQueue.scala (revision d2b20d1a96e238e36a849bd253f65ec7b6a5db38)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.mem
18
19import chipsalliance.rocketchip.config.Parameters
20import chisel3._
21import chisel3.util._
22import utils._
23import utility._
24import xiangshan._
25import xiangshan.backend.fu.fpu.FPU
26import xiangshan.backend.rob.RobLsqIO
27import xiangshan.cache._
28import xiangshan.frontend.FtqPtr
29import xiangshan.ExceptionNO._
30import xiangshan.cache.dcache.ReplayCarry
31import xiangshan.mem.mdp._
32import xiangshan.backend.rob.RobPtr
33
34class LqPtr(implicit p: Parameters) extends CircularQueuePtr[LqPtr](
35  p => p(XSCoreParamsKey).VirtualLoadQueueSize
36){
37}
38
39object LqPtr {
40  def apply(f: Bool, v: UInt)(implicit p: Parameters): LqPtr = {
41    val ptr = Wire(new LqPtr)
42    ptr.flag := f
43    ptr.value := v
44    ptr
45  }
46}
47
48trait HasLoadHelper { this: XSModule =>
49  def rdataHelper(uop: MicroOp, rdata: UInt): UInt = {
50    val fpWen = uop.ctrl.fpWen
51    LookupTree(uop.ctrl.fuOpType, List(
52      LSUOpType.lb   -> SignExt(rdata(7, 0) , XLEN),
53      LSUOpType.lh   -> SignExt(rdata(15, 0), XLEN),
54      /*
55          riscv-spec-20191213: 12.2 NaN Boxing of Narrower Values
56          Any operation that writes a narrower result to an f register must write
57          all 1s to the uppermost FLEN−n bits to yield a legal NaN-boxed value.
58      */
59      LSUOpType.lw   -> Mux(fpWen, FPU.box(rdata, FPU.S), SignExt(rdata(31, 0), XLEN)),
60      LSUOpType.ld   -> Mux(fpWen, FPU.box(rdata, FPU.D), SignExt(rdata(63, 0), XLEN)),
61      LSUOpType.lbu  -> ZeroExt(rdata(7, 0) , XLEN),
62      LSUOpType.lhu  -> ZeroExt(rdata(15, 0), XLEN),
63      LSUOpType.lwu  -> ZeroExt(rdata(31, 0), XLEN),
64    ))
65  }
66}
67
68class LqEnqIO(implicit p: Parameters) extends XSBundle {
69  val canAccept = Output(Bool())
70  val sqCanAccept = Input(Bool())
71  val needAlloc = Vec(exuParameters.LsExuCnt, Input(Bool()))
72  val req = Vec(exuParameters.LsExuCnt, Flipped(ValidIO(new MicroOp)))
73  val resp = Vec(exuParameters.LsExuCnt, Output(new LqPtr))
74}
75
76class LqTriggerIO(implicit p: Parameters) extends XSBundle {
77  val hitLoadAddrTriggerHitVec = Input(Vec(3, Bool()))
78  val lqLoadAddrTriggerHitVec = Output(Vec(3, Bool()))
79}
80
81class LqExceptionBuffer(implicit p: Parameters) extends XSModule with HasCircularQueuePtrHelper {
82  val io = IO(new Bundle() {
83    val redirect = Flipped(Valid(new Redirect))
84    val req = Vec(LoadPipelineWidth, Flipped(Valid(new LqWriteBundle)))
85    val exceptionAddr = new ExceptionAddrIO
86  })
87
88  val req_valid = RegInit(false.B)
89  val req = Reg(new LqWriteBundle)
90
91  // enqueue
92  // s1:
93  val s1_req = VecInit(io.req.map(_.bits))
94  val s1_valid = VecInit(io.req.map(x => x.valid))
95
96  // s2: delay 1 cycle
97  val s2_req = RegNext(s1_req)
98  val s2_valid = (0 until LoadPipelineWidth).map(i =>
99    RegNext(s1_valid(i)) &&
100    !s2_req(i).uop.robIdx.needFlush(RegNext(io.redirect)) &&
101    !s2_req(i).uop.robIdx.needFlush(io.redirect)
102  )
103  val s2_has_exception = s2_req.map(x => ExceptionNO.selectByFu(x.uop.cf.exceptionVec, lduCfg).asUInt.orR)
104
105  val s2_enqueue = Wire(Vec(LoadPipelineWidth, Bool()))
106  for (w <- 0 until LoadPipelineWidth) {
107    s2_enqueue(w) := s2_valid(w) && s2_has_exception(w)
108  }
109
110  when (req.uop.robIdx.needFlush(io.redirect)) {
111    req_valid := false.B
112  } .elsewhen (s2_enqueue.asUInt.orR) {
113    req_valid := req_valid || true.B
114  }
115
116  def selectOldest[T <: LqWriteBundle](valid: Seq[Bool], bits: Seq[T]): (Seq[Bool], Seq[T]) = {
117    assert(valid.length == bits.length)
118    if (valid.length == 0 || valid.length == 1) {
119      (valid, bits)
120    } else if (valid.length == 2) {
121      val res = Seq.fill(2)(Wire(ValidIO(chiselTypeOf(bits(0)))))
122      for (i <- res.indices) {
123        res(i).valid := valid(i)
124        res(i).bits := bits(i)
125      }
126      val oldest = Mux(valid(0) && valid(1), Mux(isAfter(bits(0).uop.robIdx, bits(1).uop.robIdx), res(1), res(0)), Mux(valid(0) && !valid(1), res(0), res(1)))
127      (Seq(oldest.valid), Seq(oldest.bits))
128    } else {
129      val left = selectOldest(valid.take(valid.length / 2), bits.take(bits.length / 2))
130      val right = selectOldest(valid.takeRight(valid.length - (valid.length / 2)), bits.takeRight(bits.length - (bits.length / 2)))
131      selectOldest(left._1 ++ right._1, left._2 ++ right._2)
132    }
133  }
134
135  val reqSel = selectOldest(s2_enqueue, s2_req)
136
137  when (req_valid) {
138    req := Mux(reqSel._1(0) && isAfter(req.uop.robIdx, reqSel._2(0).uop.robIdx), reqSel._2(0), req)
139  } .elsewhen (s2_enqueue.asUInt.orR) {
140    req := reqSel._2(0)
141  }
142
143  io.exceptionAddr.vaddr := req.vaddr
144  XSPerfAccumulate("exception", !RegNext(req_valid) && req_valid)
145
146  // end
147}
148
149class LoadQueue(implicit p: Parameters) extends XSModule
150  with HasDCacheParameters
151  with HasCircularQueuePtrHelper
152  with HasLoadHelper
153  with HasPerfEvents
154{
155  val io = IO(new Bundle() {
156    val redirect = Flipped(Valid(new Redirect))
157    val enq = new LqEnqIO
158    val ldu = new Bundle() {
159        val storeLoadViolationQuery = Vec(LoadPipelineWidth, Flipped(new LoadViolationQueryIO)) // from load_s2
160        val loadLoadViolationQuery = Vec(LoadPipelineWidth, Flipped(new LoadViolationQueryIO)) // from load_s2
161        val loadIn = Vec(StorePipelineWidth, Flipped(Decoupled(new LqWriteBundle))) // from load_s3
162    }
163    val sta = new Bundle() {
164      val storeAddrIn = Vec(StorePipelineWidth, Flipped(Valid(new LsPipelineBundle))) // from store_s1
165    }
166    val std = new Bundle() {
167      val storeDataIn = Vec(StorePipelineWidth, Flipped(Valid(new ExuOutput))) // from store_s0, store data, send to sq from rs
168    }
169    val sq = new Bundle() {
170      val stAddrReadySqPtr = Input(new SqPtr)
171      val stAddrReadyVec = Input(Vec(StoreQueueSize, Bool()))
172      val stDataReadySqPtr = Input(new SqPtr)
173      val stDataReadyVec = Input(Vec(StoreQueueSize, Bool()))
174      val stIssuePtr = Input(new SqPtr)
175      val sqEmpty = Input(Bool())
176    }
177    val loadOut = Vec(LoadPipelineWidth, DecoupledIO(new ExuOutput))
178    val ldRawDataOut = Vec(LoadPipelineWidth, Output(new LoadDataFromLQBundle))
179    val replay = Vec(LoadPipelineWidth, Decoupled(new LsPipelineBundle))
180    val refill = Flipped(ValidIO(new Refill))
181    val release = Flipped(Valid(new Release))
182    val rollback = Output(Valid(new Redirect))
183    val rob = Flipped(new RobLsqIO)
184    val uncache = new UncacheWordIO
185    val trigger = Vec(LoadPipelineWidth, new LqTriggerIO)
186    val exceptionAddr = new ExceptionAddrIO
187    val lqFull = Output(Bool())
188    val lqDeq = Output(UInt(log2Up(CommitWidth + 1).W))
189    val lqCancelCnt = Output(UInt(log2Up(VirtualLoadQueueSize+1).W))
190    val lqReplayFull = Output(Bool())
191    val tlbReplayDelayCycleCtrl = Vec(4, Input(UInt(ReSelectLen.W)))
192    val l2Hint = Input(Valid(new L2ToL1Hint()))
193  })
194
195  val loadQueueRAR = Module(new LoadQueueRAR)  //  read-after-read violation
196  val loadQueueRAW = Module(new LoadQueueRAW)  //  read-after-write violation
197  val loadQueueReplay = Module(new LoadQueueReplay)  //  enqueue if need replay
198  val virtualLoadQueue = Module(new VirtualLoadQueue)  //  control state
199  val exceptionBuffer = Module(new LqExceptionBuffer) // exception buffer
200  val uncacheBuffer = Module(new UncacheBuffer) // uncache buffer
201
202  /**
203   * LoadQueueRAR
204   */
205  loadQueueRAR.io.redirect <> io.redirect
206  loadQueueRAR.io.release <> io.release
207  loadQueueRAR.io.ldWbPtr <> virtualLoadQueue.io.ldWbPtr
208  for (w <- 0 until LoadPipelineWidth) {
209    loadQueueRAR.io.query(w).req <> io.ldu.loadLoadViolationQuery(w).req // from load_s1
210    loadQueueRAR.io.query(w).resp <> io.ldu.loadLoadViolationQuery(w).resp // to load_s2
211    loadQueueRAR.io.query(w).preReq := io.ldu.loadLoadViolationQuery(w).preReq // from load_s1
212    loadQueueRAR.io.query(w).release := io.ldu.loadLoadViolationQuery(w).release // from load_s3
213  }
214
215  /**
216   * LoadQueueRAW
217   */
218  loadQueueRAW.io.redirect <> io.redirect
219  loadQueueRAW.io.storeIn <> io.sta.storeAddrIn
220  loadQueueRAW.io.stAddrReadySqPtr <> io.sq.stAddrReadySqPtr
221  loadQueueRAW.io.stIssuePtr <> io.sq.stIssuePtr
222  for (w <- 0 until LoadPipelineWidth) {
223    loadQueueRAW.io.query(w).req <> io.ldu.storeLoadViolationQuery(w).req // from load_s1
224    loadQueueRAW.io.query(w).resp <> io.ldu.storeLoadViolationQuery(w).resp // to load_s2
225    loadQueueRAW.io.query(w).preReq := io.ldu.storeLoadViolationQuery(w).preReq // from load_s1
226    loadQueueRAW.io.query(w).release := io.ldu.storeLoadViolationQuery(w).release // from load_s3
227  }
228
229  /**
230   * VirtualLoadQueue
231   */
232  virtualLoadQueue.io.redirect <> io.redirect
233  virtualLoadQueue.io.enq <> io.enq
234  virtualLoadQueue.io.loadIn <> io.ldu.loadIn // from load_s3
235  virtualLoadQueue.io.lqFull <> io.lqFull
236  virtualLoadQueue.io.lqDeq <> io.lqDeq
237  virtualLoadQueue.io.lqCancelCnt <> io.lqCancelCnt
238
239  /**
240   * Load queue exception buffer
241   */
242  exceptionBuffer.io.redirect <> io.redirect
243  for ((buff, w) <- exceptionBuffer.io.req.zipWithIndex) {
244    buff.valid := io.ldu.loadIn(w).valid // from load_s3
245    buff.bits := io.ldu.loadIn(w).bits
246  }
247  io.exceptionAddr <> exceptionBuffer.io.exceptionAddr
248
249  /**
250   * Load uncache buffer
251   */
252  uncacheBuffer.io.redirect <> io.redirect
253  uncacheBuffer.io.loadOut <> io.loadOut
254  uncacheBuffer.io.loadRawDataOut <> io.ldRawDataOut
255  uncacheBuffer.io.rob <> io.rob
256  uncacheBuffer.io.uncache <> io.uncache
257  uncacheBuffer.io.trigger <> io.trigger
258  for ((buff, w) <- uncacheBuffer.io.req.zipWithIndex) {
259    buff.valid := io.ldu.loadIn(w).valid // from load_s3
260    buff.bits := io.ldu.loadIn(w).bits // from load_s3
261  }
262
263  // rollback
264  def selectOldest[T <: Redirect](valid: Seq[Bool], bits: Seq[T]): (Seq[Bool], Seq[T]) = {
265    assert(valid.length == bits.length)
266    if (valid.length == 0 || valid.length == 1) {
267      (valid, bits)
268    } else if (valid.length == 2) {
269      val res = Seq.fill(2)(Wire(ValidIO(chiselTypeOf(bits(0)))))
270      for (i <- res.indices) {
271        res(i).valid := valid(i)
272        res(i).bits := bits(i)
273      }
274      val oldest = Mux(valid(0) && valid(1), Mux(isAfter(bits(0).robIdx, bits(1).robIdx), res(1), res(0)), Mux(valid(0) && !valid(1), res(0), res(1)))
275      (Seq(oldest.valid), Seq(oldest.bits))
276    } else {
277      val left = selectOldest(valid.take(valid.length / 2), bits.take(bits.length / 2))
278      val right = selectOldest(valid.takeRight(valid.length - (valid.length / 2)), bits.takeRight(bits.length - (bits.length / 2)))
279      selectOldest(left._1 ++ right._1, left._2 ++ right._2)
280    }
281  }
282
283  val (rollbackSelV, rollbackSelBits) = selectOldest(
284                                          Seq(loadQueueRAW.io.rollback.valid, uncacheBuffer.io.rollback.valid),
285                                          Seq(loadQueueRAW.io.rollback.bits, uncacheBuffer.io.rollback.bits)
286                                        )
287  io.rollback.valid := rollbackSelV.head
288  io.rollback.bits := rollbackSelBits.head
289
290  /* <------- DANGEROUS: Don't change sequence here ! -------> */
291
292  /**
293   * LoadQueueReplay
294   */
295  loadQueueReplay.io.redirect <> io.redirect
296  loadQueueReplay.io.enq <> io.ldu.loadIn // from load_s3
297  loadQueueReplay.io.storeAddrIn <> io.sta.storeAddrIn // from store_s1
298  loadQueueReplay.io.storeDataIn <> io.std.storeDataIn // from store_s0
299  loadQueueReplay.io.replay <> io.replay
300  loadQueueReplay.io.refill <> io.refill
301  loadQueueReplay.io.stAddrReadySqPtr <> io.sq.stAddrReadySqPtr
302  loadQueueReplay.io.stAddrReadyVec <> io.sq.stAddrReadyVec
303  loadQueueReplay.io.stDataReadySqPtr <> io.sq.stDataReadySqPtr
304  loadQueueReplay.io.stDataReadyVec <> io.sq.stDataReadyVec
305  loadQueueReplay.io.sqEmpty <> io.sq.sqEmpty
306  loadQueueReplay.io.lqFull <> io.lqReplayFull
307  loadQueueReplay.io.tlbReplayDelayCycleCtrl <> io.tlbReplayDelayCycleCtrl
308  loadQueueReplay.io.ldWbPtr := virtualLoadQueue.io.ldWbPtr
309  loadQueueReplay.io.rarFull := loadQueueRAR.io.lqFull
310  loadQueueReplay.io.rawFull := loadQueueRAW.io.lqFull
311  loadQueueReplay.io.l2Hint  <> io.l2Hint
312
313  val full_mask = Cat(loadQueueRAR.io.lqFull, loadQueueRAW.io.lqFull, loadQueueReplay.io.lqFull)
314  XSPerfAccumulate("full_mask_000", full_mask === 0.U)
315  XSPerfAccumulate("full_mask_001", full_mask === 1.U)
316  XSPerfAccumulate("full_mask_010", full_mask === 2.U)
317  XSPerfAccumulate("full_mask_011", full_mask === 3.U)
318  XSPerfAccumulate("full_mask_100", full_mask === 4.U)
319  XSPerfAccumulate("full_mask_101", full_mask === 5.U)
320  XSPerfAccumulate("full_mask_110", full_mask === 6.U)
321  XSPerfAccumulate("full_mask_111", full_mask === 7.U)
322  XSPerfAccumulate("rollback", io.rollback.valid)
323
324  // perf cnt
325  val perfEvents = Seq(virtualLoadQueue, loadQueueRAR, loadQueueRAW, loadQueueReplay).flatMap(_.getPerfEvents) ++
326  Seq(
327    ("full_mask_000", full_mask === 0.U),
328    ("full_mask_001", full_mask === 1.U),
329    ("full_mask_010", full_mask === 2.U),
330    ("full_mask_011", full_mask === 3.U),
331    ("full_mask_100", full_mask === 4.U),
332    ("full_mask_101", full_mask === 5.U),
333    ("full_mask_110", full_mask === 6.U),
334    ("full_mask_111", full_mask === 7.U),
335    ("rollback", io.rollback.valid)
336  )
337  generatePerfEvent()
338  // end
339}