xref: /XiangShan/src/main/scala/xiangshan/frontend/Frontend.scala (revision aeedc8ee24c606b62f87b4a2382c7af1cca1fcd7)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.frontend
18import org.chipsalliance.cde.config.Parameters
19import chisel3._
20import chisel3.util._
21import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp}
22import utils._
23import utility._
24import xiangshan._
25import xiangshan.backend.fu.{PFEvent, PMP, PMPChecker, PMPReqBundle}
26import xiangshan.cache.mmu._
27import xiangshan.frontend.icache._
28
29
30class Frontend()(implicit p: Parameters) extends LazyModule with HasXSParameter {
31  override def shouldBeInlined: Boolean = false
32
33  val instrUncache  = LazyModule(new InstrUncache())
34  val icache        = LazyModule(new ICache())
35
36  lazy val module = new FrontendImp(this)
37}
38
39
40class FrontendImp (outer: Frontend) extends LazyModuleImp(outer)
41  with HasXSParameter
42  with HasPerfEvents
43{
44  val io = IO(new Bundle() {
45    val hartId = Input(UInt(hartIdLen.W))
46    val reset_vector = Input(UInt(PAddrBits.W))
47    val fencei = Input(Bool())
48    val ptw = new TlbPtwIO()
49    val backend = new FrontendToCtrlIO
50    val softPrefetch = Vec(backendParams.LduCnt, Flipped(Valid(new SoftIfetchPrefetchBundle)))
51    val sfence = Input(new SfenceBundle)
52    val tlbCsr = Input(new TlbCsrBundle)
53    val csrCtrl = Input(new CustomCSRCtrlIO)
54    val error  = ValidIO(new L1CacheErrorInfo)
55    val frontendInfo = new Bundle {
56      val ibufFull  = Output(Bool())
57      val bpuInfo = new Bundle {
58        val bpRight = Output(UInt(XLEN.W))
59        val bpWrong = Output(UInt(XLEN.W))
60      }
61    }
62    val debugTopDown = new Bundle {
63      val robHeadVaddr = Flipped(Valid(UInt(VAddrBits.W)))
64    }
65  })
66
67  //decouped-frontend modules
68  val instrUncache = outer.instrUncache.module
69  val icache       = outer.icache.module
70  val bpu     = Module(new Predictor)
71  val ifu     = Module(new NewIFU)
72  val ibuffer =  Module(new IBuffer)
73  val ftq = Module(new Ftq)
74
75  val needFlush = RegNext(io.backend.toFtq.redirect.valid)
76  val FlushControlRedirect = RegNext(io.backend.toFtq.redirect.bits.debugIsCtrl)
77  val FlushMemVioRedirect = RegNext(io.backend.toFtq.redirect.bits.debugIsMemVio)
78  val FlushControlBTBMiss = Wire(Bool())
79  val FlushTAGEMiss = Wire(Bool())
80  val FlushSCMiss = Wire(Bool())
81  val FlushITTAGEMiss = Wire(Bool())
82  val FlushRASMiss = Wire(Bool())
83
84  val tlbCsr = DelayN(io.tlbCsr, 2)
85  val csrCtrl = DelayN(io.csrCtrl, 2)
86  val sfence = RegNext(RegNext(io.sfence))
87
88  // trigger
89  ifu.io.frontendTrigger := csrCtrl.frontend_trigger
90
91  // bpu ctrl
92  bpu.io.ctrl := csrCtrl.bp_ctrl
93  bpu.io.reset_vector := RegEnable(io.reset_vector, reset.asBool)
94
95// pmp
96  val PortNumber = ICacheParameters().PortNumber
97  val pmp = Module(new PMP())
98  val pmp_check = VecInit(Seq.fill(coreParams.ipmpPortNum)(Module(new PMPChecker(3, sameCycle = true)).io))
99  pmp.io.distribute_csr := csrCtrl.distribute_csr
100  val pmp_req_vec     = Wire(Vec(coreParams.ipmpPortNum, Valid(new PMPReqBundle())))
101  (0 until 2 * PortNumber).foreach(i => pmp_req_vec(i) <> icache.io.pmp(i).req)
102  pmp_req_vec.last <> ifu.io.pmp.req
103
104  for (i <- pmp_check.indices) {
105    pmp_check(i).apply(tlbCsr.priv.imode, pmp.io.pmp, pmp.io.pma, pmp_req_vec(i))
106  }
107  (0 until 2 * PortNumber).foreach(i => icache.io.pmp(i).resp <> pmp_check(i).resp)
108  ifu.io.pmp.resp <> pmp_check.last.resp
109
110  val itlb = Module(new TLB(coreParams.itlbPortNum, nRespDups = 1,
111    Seq.fill(PortNumber)(false) ++ Seq(true), itlbParams))
112  itlb.io.requestor.take(PortNumber) zip icache.io.itlb foreach {case (a,b) => a <> b}
113  itlb.io.requestor.last <> ifu.io.iTLBInter // mmio may need re-tlb, blocked
114  itlb.io.hartId := io.hartId
115  itlb.io.base_connect(sfence, tlbCsr)
116  itlb.io.flushPipe.map(_ := needFlush)
117  itlb.io.redirect := DontCare // itlb has flushpipe, don't need redirect signal
118
119  val itlb_ptw = Wire(new VectorTlbPtwIO(coreParams.itlbPortNum))
120  itlb_ptw.connect(itlb.io.ptw)
121  val itlbRepeater1 = PTWFilter(itlbParams.fenceDelay, itlb_ptw, sfence, tlbCsr, l2tlbParams.ifilterSize)
122  val itlbRepeater2 = PTWRepeaterNB(passReady = false, itlbParams.fenceDelay, itlbRepeater1.io.ptw, io.ptw, sfence, tlbCsr)
123
124  icache.io.ftqPrefetch <> ftq.io.toPrefetch
125  icache.io.softPrefetch <> io.softPrefetch
126
127  //IFU-Ftq
128  ifu.io.ftqInter.fromFtq <> ftq.io.toIfu
129  ftq.io.toIfu.req.ready :=  ifu.io.ftqInter.fromFtq.req.ready && icache.io.fetch.req.ready
130
131  ftq.io.fromIfu          <> ifu.io.ftqInter.toFtq
132  bpu.io.ftq_to_bpu       <> ftq.io.toBpu
133  ftq.io.fromBpu          <> bpu.io.bpu_to_ftq
134
135  ftq.io.mmioCommitRead   <> ifu.io.mmioCommitRead
136  //IFU-ICache
137
138  icache.io.fetch.req <> ftq.io.toICache.req
139  ftq.io.toICache.req.ready :=  ifu.io.ftqInter.fromFtq.req.ready && icache.io.fetch.req.ready
140
141  ifu.io.icacheInter.resp <>    icache.io.fetch.resp
142  ifu.io.icacheInter.icacheReady :=  icache.io.toIFU
143  ifu.io.icacheInter.topdownIcacheMiss := icache.io.fetch.topdownIcacheMiss
144  ifu.io.icacheInter.topdownItlbMiss := icache.io.fetch.topdownItlbMiss
145  icache.io.stop := ifu.io.icacheStop
146  icache.io.flush := ftq.io.icacheFlush
147
148  ifu.io.icachePerfInfo := icache.io.perfInfo
149
150  icache.io.csr_pf_enable     := RegNext(csrCtrl.l1I_pf_enable)
151  icache.io.csr_parity_enable := RegNext(csrCtrl.icache_parity_enable)
152
153  icache.io.fencei := RegNext(io.fencei)
154
155  //IFU-Ibuffer
156  ifu.io.toIbuffer    <> ibuffer.io.in
157
158  ftq.io.fromBackend <> io.backend.toFtq
159  io.backend.fromFtq <> ftq.io.toBackend
160  io.backend.fromIfu <> ifu.io.toBackend
161  io.frontendInfo.bpuInfo <> ftq.io.bpuInfo
162
163  val checkPcMem = Reg(Vec(FtqSize, new Ftq_RF_Components))
164  when (ftq.io.toBackend.pc_mem_wen) {
165    checkPcMem(ftq.io.toBackend.pc_mem_waddr) := ftq.io.toBackend.pc_mem_wdata
166  }
167
168  val checkTargetIdx = Wire(Vec(DecodeWidth, UInt(log2Up(FtqSize).W)))
169  val checkTarget = Wire(Vec(DecodeWidth, UInt(VAddrBits.W)))
170
171  for (i <- 0 until DecodeWidth) {
172    checkTargetIdx(i) := ibuffer.io.out(i).bits.ftqPtr.value
173    checkTarget(i) := Mux(ftq.io.toBackend.newest_entry_ptr.value === checkTargetIdx(i),
174                        ftq.io.toBackend.newest_entry_target,
175                        checkPcMem(checkTargetIdx(i) + 1.U).startAddr)
176  }
177
178  // IFU-backend
179  io.backend.illBuf := ifu.io.illBuf
180
181  // commented out for this br could be the last instruction in the fetch block
182  def checkNotTakenConsecutive = {
183    val prevNotTakenValid = RegInit(0.B)
184    val prevNotTakenFtqIdx = Reg(UInt(log2Up(FtqSize).W))
185    for (i <- 0 until DecodeWidth - 1) {
186      // for instrs that is not the last, if a not-taken br, the next instr should have the same ftqPtr
187      // for instrs that is the last, record and check next request
188      when (ibuffer.io.out(i).fire && ibuffer.io.out(i).bits.pd.isBr) {
189        when (ibuffer.io.out(i+1).fire) {
190          // not last br, check now
191          XSError(checkTargetIdx(i) =/= checkTargetIdx(i+1), "not-taken br should have same ftqPtr\n")
192        } .otherwise {
193          // last br, record its info
194          prevNotTakenValid := true.B
195          prevNotTakenFtqIdx := checkTargetIdx(i)
196        }
197      }
198    }
199    when (ibuffer.io.out(DecodeWidth - 1).fire && ibuffer.io.out(DecodeWidth - 1).bits.pd.isBr) {
200      // last instr is a br, record its info
201      prevNotTakenValid := true.B
202      prevNotTakenFtqIdx := checkTargetIdx(DecodeWidth - 1)
203    }
204    when (prevNotTakenValid && ibuffer.io.out(0).fire) {
205      XSError(prevNotTakenFtqIdx =/= checkTargetIdx(0), "not-taken br should have same ftqPtr\n")
206      prevNotTakenValid := false.B
207    }
208    when (needFlush) {
209      prevNotTakenValid := false.B
210    }
211  }
212
213  def checkTakenNotConsecutive = {
214    val prevTakenValid = RegInit(0.B)
215    val prevTakenFtqIdx = Reg(UInt(log2Up(FtqSize).W))
216    for (i <- 0 until DecodeWidth - 1) {
217      // for instrs that is not the last, if a taken br, the next instr should not have the same ftqPtr
218      // for instrs that is the last, record and check next request
219      when (ibuffer.io.out(i).fire && ibuffer.io.out(i).bits.pd.isBr && ibuffer.io.out(i).bits.pred_taken) {
220        when (ibuffer.io.out(i+1).fire) {
221          // not last br, check now
222          XSError(checkTargetIdx(i) + 1.U =/= checkTargetIdx(i+1), "taken br should have consecutive ftqPtr\n")
223        } .otherwise {
224          // last br, record its info
225          prevTakenValid := true.B
226          prevTakenFtqIdx := checkTargetIdx(i)
227        }
228      }
229    }
230    when (ibuffer.io.out(DecodeWidth - 1).fire && ibuffer.io.out(DecodeWidth - 1).bits.pd.isBr && ibuffer.io.out(DecodeWidth - 1).bits.pred_taken) {
231      // last instr is a br, record its info
232      prevTakenValid := true.B
233      prevTakenFtqIdx := checkTargetIdx(DecodeWidth - 1)
234    }
235    when (prevTakenValid && ibuffer.io.out(0).fire) {
236      XSError(prevTakenFtqIdx + 1.U =/= checkTargetIdx(0), "taken br should have consecutive ftqPtr\n")
237      prevTakenValid := false.B
238    }
239    when (needFlush) {
240      prevTakenValid := false.B
241    }
242  }
243
244  def checkNotTakenPC = {
245    val prevNotTakenPC = Reg(UInt(VAddrBits.W))
246    val prevIsRVC = Reg(Bool())
247    val prevNotTakenValid = RegInit(0.B)
248
249    for (i <- 0 until DecodeWidth - 1) {
250      when (ibuffer.io.out(i).fire && ibuffer.io.out(i).bits.pd.isBr && !ibuffer.io.out(i).bits.pred_taken) {
251        when (ibuffer.io.out(i+1).fire) {
252          XSError(ibuffer.io.out(i).bits.pc + Mux(ibuffer.io.out(i).bits.pd.isRVC, 2.U, 4.U) =/= ibuffer.io.out(i+1).bits.pc, "not-taken br should have consecutive pc\n")
253        } .otherwise {
254          prevNotTakenValid := true.B
255          prevIsRVC := ibuffer.io.out(i).bits.pd.isRVC
256          prevNotTakenPC := ibuffer.io.out(i).bits.pc
257        }
258      }
259    }
260    when (ibuffer.io.out(DecodeWidth - 1).fire && ibuffer.io.out(DecodeWidth - 1).bits.pd.isBr && !ibuffer.io.out(DecodeWidth - 1).bits.pred_taken) {
261      prevNotTakenValid := true.B
262      prevIsRVC := ibuffer.io.out(DecodeWidth - 1).bits.pd.isRVC
263      prevNotTakenPC := ibuffer.io.out(DecodeWidth - 1).bits.pc
264    }
265    when (prevNotTakenValid && ibuffer.io.out(0).fire) {
266      XSError(prevNotTakenPC + Mux(prevIsRVC, 2.U, 4.U) =/= ibuffer.io.out(0).bits.pc, "not-taken br should have same pc\n")
267      prevNotTakenValid := false.B
268    }
269    when (needFlush) {
270      prevNotTakenValid := false.B
271    }
272  }
273
274  def checkTakenPC = {
275    val prevTakenFtqIdx = Reg(UInt(log2Up(FtqSize).W))
276    val prevTakenValid = RegInit(0.B)
277    val prevTakenTarget = Wire(UInt(VAddrBits.W))
278    prevTakenTarget := checkPcMem(prevTakenFtqIdx + 1.U).startAddr
279
280    for (i <- 0 until DecodeWidth - 1) {
281      when (ibuffer.io.out(i).fire && !ibuffer.io.out(i).bits.pd.notCFI && ibuffer.io.out(i).bits.pred_taken) {
282        when (ibuffer.io.out(i+1).fire) {
283          XSError(checkTarget(i) =/= ibuffer.io.out(i+1).bits.pc, "taken instr should follow target pc\n")
284        } .otherwise {
285          prevTakenValid := true.B
286          prevTakenFtqIdx := checkTargetIdx(i)
287        }
288      }
289    }
290    when (ibuffer.io.out(DecodeWidth - 1).fire && !ibuffer.io.out(DecodeWidth - 1).bits.pd.notCFI && ibuffer.io.out(DecodeWidth - 1).bits.pred_taken) {
291      prevTakenValid := true.B
292      prevTakenFtqIdx := checkTargetIdx(DecodeWidth - 1)
293    }
294    when (prevTakenValid && ibuffer.io.out(0).fire) {
295      XSError(prevTakenTarget =/= ibuffer.io.out(0).bits.pc, "taken instr should follow target pc\n")
296      prevTakenValid := false.B
297    }
298    when (needFlush) {
299      prevTakenValid := false.B
300    }
301  }
302
303  //checkNotTakenConsecutive
304  checkTakenNotConsecutive
305  checkTakenPC
306  checkNotTakenPC
307
308  ifu.io.rob_commits <> io.backend.toFtq.rob_commits
309
310  ibuffer.io.flush := needFlush
311  ibuffer.io.ControlRedirect := FlushControlRedirect
312  ibuffer.io.MemVioRedirect := FlushMemVioRedirect
313  ibuffer.io.ControlBTBMissBubble := FlushControlBTBMiss
314  ibuffer.io.TAGEMissBubble := FlushTAGEMiss
315  ibuffer.io.SCMissBubble := FlushSCMiss
316  ibuffer.io.ITTAGEMissBubble := FlushITTAGEMiss
317  ibuffer.io.RASMissBubble := FlushRASMiss
318  ibuffer.io.decodeCanAccept := io.backend.canAccept
319
320  FlushControlBTBMiss := ftq.io.ControlBTBMissBubble
321  FlushTAGEMiss := ftq.io.TAGEMissBubble
322  FlushSCMiss := ftq.io.SCMissBubble
323  FlushITTAGEMiss := ftq.io.ITTAGEMissBubble
324  FlushRASMiss := ftq.io.RASMissBubble
325
326  io.backend.cfVec <> ibuffer.io.out
327  io.backend.stallReason <> ibuffer.io.stallReason
328
329  instrUncache.io.req   <> ifu.io.uncacheInter.toUncache
330  ifu.io.uncacheInter.fromUncache <> instrUncache.io.resp
331  instrUncache.io.flush := false.B
332  io.error <> RegNext(RegNext(icache.io.error))
333
334  icache.io.hartId := io.hartId
335
336  itlbRepeater1.io.debugTopDown.robHeadVaddr := io.debugTopDown.robHeadVaddr
337
338  val frontendBubble = PopCount((0 until DecodeWidth).map(i => io.backend.cfVec(i).ready && !ibuffer.io.out(i).valid))
339  XSPerfAccumulate("FrontendBubble", frontendBubble)
340  io.frontendInfo.ibufFull := RegNext(ibuffer.io.full)
341
342  // PFEvent
343  val pfevent = Module(new PFEvent)
344  pfevent.io.distribute_csr := io.csrCtrl.distribute_csr
345  val csrevents = pfevent.io.hpmevent.take(8)
346
347  val perfFromUnits = Seq(ifu, ibuffer, icache, ftq, bpu).flatMap(_.getPerfEvents)
348  val perfFromIO    = Seq()
349  val perfBlock     = Seq()
350  // let index = 0 be no event
351  val allPerfEvents = Seq(("noEvent", 0.U)) ++ perfFromUnits ++ perfFromIO ++ perfBlock
352
353  if (printEventCoding) {
354    for (((name, inc), i) <- allPerfEvents.zipWithIndex) {
355      println("Frontend perfEvents Set", name, inc, i)
356    }
357  }
358
359  val allPerfInc = allPerfEvents.map(_._2.asTypeOf(new PerfEvent))
360  override val perfEvents = HPerfMonitor(csrevents, allPerfInc).getPerfEvents
361  generatePerfEvent()
362}
363