xref: /XiangShan/src/main/scala/xiangshan/backend/CtrlBlock.scala (revision f973ab00f36a43196f948e67b30d0eff5b65d055)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.backend
18
19import chipsalliance.rocketchip.config.Parameters
20import chisel3._
21import chisel3.util._
22import utils._
23import xiangshan._
24import xiangshan.backend.decode.{DecodeStage, ImmUnion}
25import xiangshan.backend.dispatch.{Dispatch, DispatchQueue}
26import xiangshan.backend.rename.{Rename, RenameTableWrapper}
27import xiangshan.backend.rob.{Rob, RobCSRIO, RobLsqIO}
28import xiangshan.frontend.{FtqPtr, FtqRead}
29import xiangshan.mem.LsqEnqIO
30import difftest._
31
32class CtrlToFtqIO(implicit p: Parameters) extends XSBundle {
33  val rob_commits = Vec(CommitWidth, Valid(new RobCommitInfo))
34  val stage2Redirect = Valid(new Redirect)
35  val stage3Redirect = ValidIO(new Redirect)
36  val robFlush = Valid(new Bundle {
37    val ftqIdx = Output(new FtqPtr)
38    val ftqOffset = Output(UInt(log2Up(PredictWidth).W))
39    val replayInst = Output(Bool()) // not used for now
40  })
41}
42
43class RedirectGenerator(implicit p: Parameters) extends XSModule
44  with HasCircularQueuePtrHelper {
45  val numRedirect = exuParameters.JmpCnt + exuParameters.AluCnt
46  val io = IO(new Bundle() {
47    val exuMispredict = Vec(numRedirect, Flipped(ValidIO(new ExuOutput)))
48    val loadReplay = Flipped(ValidIO(new Redirect))
49    val flush = Input(Bool())
50    val stage1PcRead = Vec(numRedirect+1, new FtqRead(UInt(VAddrBits.W)))
51    val stage2Redirect = ValidIO(new Redirect)
52    val stage3Redirect = ValidIO(new Redirect)
53    val memPredUpdate = Output(new MemPredUpdateReq)
54    val memPredPcRead = new FtqRead(UInt(VAddrBits.W)) // read req send form stage 2
55  })
56  /*
57        LoadQueue  Jump  ALU0  ALU1  ALU2  ALU3   exception    Stage1
58          |         |      |    |     |     |         |
59          |============= reg & compare =====|         |       ========
60                            |                         |
61                            |                         |
62                            |                         |        Stage2
63                            |                         |
64                    redirect (flush backend)          |
65                    |                                 |
66               === reg ===                            |       ========
67                    |                                 |
68                    |----- mux (exception first) -----|        Stage3
69                            |
70                redirect (send to frontend)
71   */
72  private class Wrapper(val n: Int) extends Bundle {
73    val redirect = new Redirect
74    val valid = Bool()
75    val idx = UInt(log2Up(n).W)
76  }
77  def selectOldestRedirect(xs: Seq[Valid[Redirect]]): Vec[Bool] = {
78    val compareVec = (0 until xs.length).map(i => (0 until i).map(j => isAfter(xs(j).bits.robIdx, xs(i).bits.robIdx)))
79    val resultOnehot = VecInit((0 until xs.length).map(i => Cat((0 until xs.length).map(j =>
80      (if (j < i) !xs(j).valid || compareVec(i)(j)
81      else if (j == i) xs(i).valid
82      else !xs(j).valid || !compareVec(j)(i))
83    )).andR))
84    resultOnehot
85  }
86
87  val redirects = io.exuMispredict.map(_.bits.redirect) :+ io.loadReplay.bits
88  val stage1FtqReadPcs =
89    (io.stage1PcRead zip redirects).map{ case (r, redirect) =>
90      r(redirect.ftqIdx, redirect.ftqOffset)
91    }
92
93  def getRedirect(exuOut: Valid[ExuOutput]): ValidIO[Redirect] = {
94    val redirect = Wire(Valid(new Redirect))
95    redirect.valid := exuOut.valid && exuOut.bits.redirect.cfiUpdate.isMisPred
96    redirect.bits := exuOut.bits.redirect
97    redirect
98  }
99
100  val jumpOut = io.exuMispredict.head
101  val allRedirect = VecInit(io.exuMispredict.map(x => getRedirect(x)) :+ io.loadReplay)
102  val oldestOneHot = selectOldestRedirect(allRedirect)
103  val needFlushVec = VecInit(allRedirect.map(_.bits.robIdx.needFlush(io.stage2Redirect, io.flush)))
104  val oldestValid = VecInit(oldestOneHot.zip(needFlushVec).map{ case (v, f) => v && !f }).asUInt.orR
105  val oldestExuOutput = Mux1H(io.exuMispredict.indices.map(oldestOneHot), io.exuMispredict)
106  val oldestRedirect = Mux1H(oldestOneHot, allRedirect)
107
108  val s1_jumpTarget = RegEnable(jumpOut.bits.redirect.cfiUpdate.target, jumpOut.valid)
109  val s1_imm12_reg = RegNext(oldestExuOutput.bits.uop.ctrl.imm(11, 0))
110  val s1_pd = RegNext(oldestExuOutput.bits.uop.cf.pd)
111  val s1_redirect_bits_reg = RegNext(oldestRedirect.bits)
112  val s1_redirect_valid_reg = RegNext(oldestValid)
113  val s1_redirect_onehot = RegNext(oldestOneHot)
114
115  // stage1 -> stage2
116  io.stage2Redirect.valid := s1_redirect_valid_reg && !io.flush
117  io.stage2Redirect.bits := s1_redirect_bits_reg
118  io.stage2Redirect.bits.cfiUpdate := DontCare
119
120  val s1_isReplay = s1_redirect_onehot.last
121  val s1_isJump = s1_redirect_onehot.head
122  val real_pc = Mux1H(s1_redirect_onehot, stage1FtqReadPcs)
123  val brTarget = real_pc + SignExt(ImmUnion.B.toImm32(s1_imm12_reg), XLEN)
124  val snpc = real_pc + Mux(s1_pd.isRVC, 2.U, 4.U)
125  val target = Mux(s1_isReplay,
126    real_pc, // replay from itself
127    Mux(s1_redirect_bits_reg.cfiUpdate.taken,
128      Mux(s1_isJump, s1_jumpTarget, brTarget),
129      snpc
130    )
131  )
132
133  // get pc from ftq
134  // valid only if redirect is caused by load violation
135  // store_pc is used to update store set
136  val store_pc = io.memPredPcRead(s1_redirect_bits_reg.stFtqIdx, s1_redirect_bits_reg.stFtqOffset)
137
138  // update load violation predictor if load violation redirect triggered
139  io.memPredUpdate.valid := RegNext(s1_isReplay && s1_redirect_valid_reg, init = false.B)
140  // update wait table
141  io.memPredUpdate.waddr := RegNext(XORFold(real_pc(VAddrBits-1, 1), MemPredPCWidth))
142  io.memPredUpdate.wdata := true.B
143  // update store set
144  io.memPredUpdate.ldpc := RegNext(XORFold(real_pc(VAddrBits-1, 1), MemPredPCWidth))
145  // store pc is ready 1 cycle after s1_isReplay is judged
146  io.memPredUpdate.stpc := XORFold(store_pc(VAddrBits-1, 1), MemPredPCWidth)
147
148  val s2_target = RegEnable(target, enable = s1_redirect_valid_reg)
149  val s2_pd = RegEnable(s1_pd, enable = s1_redirect_valid_reg)
150  val s2_pc = RegEnable(real_pc, enable = s1_redirect_valid_reg)
151  val s2_redirect_bits_reg = RegEnable(s1_redirect_bits_reg, enable = s1_redirect_valid_reg)
152  val s2_redirect_valid_reg = RegNext(s1_redirect_valid_reg && !io.flush, init = false.B)
153
154  io.stage3Redirect.valid := s2_redirect_valid_reg
155  io.stage3Redirect.bits := s2_redirect_bits_reg
156  val stage3CfiUpdate = io.stage3Redirect.bits.cfiUpdate
157  stage3CfiUpdate.pc := s2_pc
158  stage3CfiUpdate.pd := s2_pd
159  stage3CfiUpdate.predTaken := s2_redirect_bits_reg.cfiUpdate.predTaken
160  stage3CfiUpdate.target := s2_target
161  stage3CfiUpdate.taken := s2_redirect_bits_reg.cfiUpdate.taken
162  stage3CfiUpdate.isMisPred := s2_redirect_bits_reg.cfiUpdate.isMisPred
163
164  // recover runahead checkpoint if redirect
165  if (!env.FPGAPlatform) {
166    val runahead_redirect = Module(new DifftestRunaheadRedirectEvent)
167    runahead_redirect.io.clock := clock
168    runahead_redirect.io.coreid := hardId.U
169    runahead_redirect.io.valid := io.stage3Redirect.valid
170    runahead_redirect.io.pc :=  s2_pc // for debug only
171    runahead_redirect.io.target_pc := s2_target // for debug only
172    runahead_redirect.io.checkpoint_id := io.stage3Redirect.bits.debug_runahead_checkpoint_id // make sure it is right
173  }
174}
175
176class CtrlBlock(implicit p: Parameters) extends XSModule
177  with HasCircularQueuePtrHelper {
178  val io = IO(new Bundle {
179    val frontend = Flipped(new FrontendToCtrlIO)
180    val allocPregs = Vec(RenameWidth, Output(new ResetPregStateReq))
181    val dispatch = Vec(3*dpParams.IntDqDeqWidth, DecoupledIO(new MicroOp))
182    // from int block
183    val exuRedirect = Vec(exuParameters.AluCnt + exuParameters.JmpCnt, Flipped(ValidIO(new ExuOutput)))
184    val stIn = Vec(exuParameters.StuCnt, Flipped(ValidIO(new ExuInput)))
185    val stOut = Vec(exuParameters.StuCnt, Flipped(ValidIO(new ExuOutput)))
186    val memoryViolation = Flipped(ValidIO(new Redirect))
187    val enqLsq = Flipped(new LsqEnqIO)
188    val jumpPc = Output(UInt(VAddrBits.W))
189    val jalr_target = Output(UInt(VAddrBits.W))
190    val robio = new Bundle {
191      // to int block
192      val toCSR = new RobCSRIO
193      val exception = ValidIO(new ExceptionInfo)
194      // to mem block
195      val lsq = new RobLsqIO
196    }
197    val csrCtrl = Input(new CustomCSRCtrlIO)
198    val perfInfo = Output(new Bundle{
199      val ctrlInfo = new Bundle {
200        val robFull   = Input(Bool())
201        val intdqFull = Input(Bool())
202        val fpdqFull  = Input(Bool())
203        val lsdqFull  = Input(Bool())
204      }
205    })
206    val writeback = Vec(NRIntWritePorts + NRFpWritePorts, Flipped(ValidIO(new ExuOutput)))
207    // redirect out
208    val redirect = ValidIO(new Redirect)
209    val flush = Output(Bool())
210    val debug_int_rat = Vec(32, Output(UInt(PhyRegIdxWidth.W)))
211    val debug_fp_rat = Vec(32, Output(UInt(PhyRegIdxWidth.W)))
212  })
213
214  val decode = Module(new DecodeStage)
215  val rat = Module(new RenameTableWrapper)
216  val rename = Module(new Rename)
217  val dispatch = Module(new Dispatch)
218  val intDq = Module(new DispatchQueue(dpParams.IntDqSize, RenameWidth, dpParams.IntDqDeqWidth, "int"))
219  val fpDq = Module(new DispatchQueue(dpParams.FpDqSize, RenameWidth, dpParams.FpDqDeqWidth, "fp"))
220  val lsDq = Module(new DispatchQueue(dpParams.LsDqSize, RenameWidth, dpParams.LsDqDeqWidth, "ls"))
221  val redirectGen = Module(new RedirectGenerator)
222
223  val robWbSize = NRIntWritePorts + NRFpWritePorts + exuParameters.StuCnt
224  val rob = Module(new Rob(robWbSize))
225
226  val stage2Redirect = redirectGen.io.stage2Redirect
227  val stage3Redirect = redirectGen.io.stage3Redirect
228  val flush = rob.io.flushOut.valid
229  val flushReg = RegNext(flush)
230
231  val exuRedirect = io.exuRedirect.map(x => {
232    val valid = x.valid && x.bits.redirectValid
233    val killedByOlder = x.bits.uop.robIdx.needFlush(stage2Redirect, flushReg)
234    val delayed = Wire(Valid(new ExuOutput))
235    delayed.valid := RegNext(valid && !killedByOlder, init = false.B)
236    delayed.bits := RegEnable(x.bits, x.valid)
237    delayed
238  })
239  val loadReplay = Wire(Valid(new Redirect))
240  loadReplay.valid := RegNext(io.memoryViolation.valid &&
241    !io.memoryViolation.bits.robIdx.needFlush(stage2Redirect, flushReg),
242    init = false.B
243  )
244  loadReplay.bits := RegEnable(io.memoryViolation.bits, io.memoryViolation.valid)
245  io.frontend.fromFtq.getRedirectPcRead <> redirectGen.io.stage1PcRead
246  io.frontend.fromFtq.getMemPredPcRead <> redirectGen.io.memPredPcRead
247  redirectGen.io.exuMispredict <> exuRedirect
248  redirectGen.io.loadReplay <> loadReplay
249  redirectGen.io.flush := flushReg
250
251  for(i <- 0 until CommitWidth){
252    io.frontend.toFtq.rob_commits(i).valid := rob.io.commits.valid(i) && !rob.io.commits.isWalk
253    io.frontend.toFtq.rob_commits(i).bits := rob.io.commits.info(i)
254  }
255  io.frontend.toFtq.stage2Redirect <> stage2Redirect
256  io.frontend.toFtq.robFlush <> RegNext(rob.io.flushOut)
257
258  val robPcRead = io.frontend.fromFtq.getRobFlushPcRead
259  val flushPC = robPcRead(rob.io.flushOut.bits.ftqIdx, rob.io.flushOut.bits.ftqOffset)
260
261  val flushRedirect = Wire(Valid(new Redirect))
262  flushRedirect.valid := flushReg
263  flushRedirect.bits := DontCare
264  flushRedirect.bits.ftqIdx := RegEnable(rob.io.flushOut.bits.ftqIdx, flush)
265  flushRedirect.bits.interrupt := true.B
266  flushRedirect.bits.cfiUpdate.target := Mux(io.robio.toCSR.isXRet || rob.io.exception.valid,
267    io.robio.toCSR.trapTarget,
268    Mux(RegEnable(rob.io.flushOut.bits.replayInst, flush),
269      flushPC, // replay inst
270      flushPC + 4.U // flush pipe
271    )
272  )
273  when (flushRedirect.valid && RegEnable(rob.io.flushOut.bits.replayInst, flush)) {
274    XSDebug("replay inst (%x) from rob\n", flushPC);
275  }
276  val flushRedirectReg = Wire(Valid(new Redirect))
277  flushRedirectReg.valid := RegNext(flushRedirect.valid, init = false.B)
278  flushRedirectReg.bits := RegEnable(flushRedirect.bits, enable = flushRedirect.valid)
279
280  io.frontend.toFtq.stage3Redirect := Mux(flushRedirectReg.valid, flushRedirectReg, stage3Redirect)
281
282  decode.io.in <> io.frontend.cfVec
283  // currently, we only update wait table when isReplay
284  decode.io.memPredUpdate(0) <> RegNext(redirectGen.io.memPredUpdate)
285  decode.io.memPredUpdate(1) := DontCare
286  decode.io.memPredUpdate(1).valid := false.B
287  decode.io.csrCtrl := RegNext(io.csrCtrl)
288
289  rat.io.flush := flushReg
290  rat.io.robCommits := rob.io.commits
291  for ((r, i) <- rat.io.intReadPorts.zipWithIndex) {
292    val raddr = decode.io.out(i).bits.ctrl.lsrc.take(2) :+ decode.io.out(i).bits.ctrl.ldest
293    r.map(_.addr).zip(raddr).foreach(x => x._1 := x._2)
294    rename.io.intReadPorts(i) := r.map(_.data)
295    r.foreach(_.hold := !rename.io.in(i).ready)
296  }
297  rat.io.intRenamePorts := rename.io.intRenamePorts
298  for ((r, i) <- rat.io.fpReadPorts.zipWithIndex) {
299    val raddr = decode.io.out(i).bits.ctrl.lsrc.take(3) :+ decode.io.out(i).bits.ctrl.ldest
300    r.map(_.addr).zip(raddr).foreach(x => x._1 := x._2)
301    rename.io.fpReadPorts(i) := r.map(_.data)
302    r.foreach(_.hold := !rename.io.in(i).ready)
303  }
304  rat.io.fpRenamePorts := rename.io.fpRenamePorts
305  rat.io.debug_int_rat <> io.debug_int_rat
306  rat.io.debug_fp_rat <> io.debug_fp_rat
307
308  // pipeline between decode and rename
309  val redirectValid = stage2Redirect.valid || flushReg
310  for (i <- 0 until RenameWidth) {
311    PipelineConnect(decode.io.out(i), rename.io.in(i), rename.io.in(i).ready,
312      flushReg || io.frontend.toFtq.stage3Redirect.valid)
313  }
314
315  rename.io.redirect <> stage2Redirect
316  rename.io.flush := flushReg
317  rename.io.robCommits <> rob.io.commits
318
319  // pipeline between rename and dispatch
320  for (i <- 0 until RenameWidth) {
321    PipelineConnect(rename.io.out(i), dispatch.io.fromRename(i), dispatch.io.recv(i), redirectValid)
322  }
323  dispatch.io.renameBypass := RegEnable(rename.io.renameBypass, rename.io.out(0).fire)
324  dispatch.io.preDpInfo := RegEnable(rename.io.dispatchInfo, rename.io.out(0).fire)
325
326  dispatch.io.flush <> flushReg
327  dispatch.io.redirect <> stage2Redirect
328  dispatch.io.enqRob <> rob.io.enq
329  dispatch.io.enqLsq <> io.enqLsq
330  dispatch.io.toIntDq <> intDq.io.enq
331  dispatch.io.toFpDq <> fpDq.io.enq
332  dispatch.io.toLsDq <> lsDq.io.enq
333  dispatch.io.allocPregs <> io.allocPregs
334  dispatch.io.csrCtrl <> io.csrCtrl
335  dispatch.io.storeIssue <> io.stIn
336  dispatch.io.singleStep := false.B
337
338  intDq.io.redirect <> stage2Redirect
339  intDq.io.flush <> flushReg
340  fpDq.io.redirect <> stage2Redirect
341  fpDq.io.flush <> flushReg
342  lsDq.io.redirect <> stage2Redirect
343  lsDq.io.flush <> flushReg
344
345  io.dispatch <> intDq.io.deq ++ lsDq.io.deq ++ fpDq.io.deq
346
347  val pingpong = RegInit(false.B)
348  pingpong := !pingpong
349  val jumpInst = Mux(pingpong && (exuParameters.AluCnt > 2).B, io.dispatch(2).bits, io.dispatch(0).bits)
350  val jumpPcRead = io.frontend.fromFtq.getJumpPcRead
351  io.jumpPc := jumpPcRead(jumpInst.cf.ftqPtr, jumpInst.cf.ftqOffset)
352  val jumpTargetRead = io.frontend.fromFtq.target_read
353  io.jalr_target := jumpTargetRead(jumpInst.cf.ftqPtr, jumpInst.cf.ftqOffset)
354
355  rob.io.redirect <> stage2Redirect
356  val exeWbResults = VecInit(io.writeback ++ io.stOut)
357  val timer = GTimer()
358  for((rob_wb, wb) <- rob.io.exeWbResults.zip(exeWbResults)) {
359    rob_wb.valid := RegNext(wb.valid && !wb.bits.uop.robIdx.needFlush(stage2Redirect, flushReg))
360    rob_wb.bits := RegNext(wb.bits)
361    rob_wb.bits.uop.debugInfo.writebackTime := timer
362  }
363
364  io.redirect <> stage2Redirect
365  io.flush <> flushReg
366
367  // rob to int block
368  io.robio.toCSR <> rob.io.csr
369  io.robio.toCSR.perfinfo.retiredInstr <> RegNext(rob.io.csr.perfinfo.retiredInstr)
370  io.robio.exception := rob.io.exception
371  io.robio.exception.bits.uop.cf.pc := flushPC
372
373  // rob to mem block
374  io.robio.lsq <> rob.io.lsq
375
376  io.perfInfo.ctrlInfo.robFull := RegNext(rob.io.robFull)
377  io.perfInfo.ctrlInfo.intdqFull := RegNext(intDq.io.dqFull)
378  io.perfInfo.ctrlInfo.fpdqFull := RegNext(fpDq.io.dqFull)
379  io.perfInfo.ctrlInfo.lsdqFull := RegNext(lsDq.io.dqFull)
380}
381