xref: /XiangShan/src/main/scala/xiangshan/frontend/Frontend.scala (revision 71b6c42e3aeca685ce6901f71e45118144cfbd1d)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.frontend
18import chisel3._
19import chisel3.util._
20import freechips.rocketchip.diplomacy.LazyModule
21import freechips.rocketchip.diplomacy.LazyModuleImp
22import org.chipsalliance.cde.config.Parameters
23import utility._
24import xiangshan._
25import xiangshan.backend.fu.PFEvent
26import xiangshan.backend.fu.PMP
27import xiangshan.backend.fu.PMPChecker
28import xiangshan.backend.fu.PMPReqBundle
29import xiangshan.cache.mmu._
30import xiangshan.frontend.icache._
31
32class Frontend()(implicit p: Parameters) extends LazyModule with HasXSParameter {
33  override def shouldBeInlined: Boolean = false
34  val inner       = LazyModule(new FrontendInlined)
35  lazy val module = new FrontendImp(this)
36}
37
38class FrontendImp(wrapper: Frontend)(implicit p: Parameters) extends LazyModuleImp(wrapper) {
39  val io      = IO(wrapper.inner.module.io.cloneType)
40  val io_perf = IO(wrapper.inner.module.io_perf.cloneType)
41  io <> wrapper.inner.module.io
42  io_perf <> wrapper.inner.module.io_perf
43  if (p(DebugOptionsKey).ResetGen) {
44    ResetGen(ResetGenNode(Seq(ModuleNode(wrapper.inner.module))), reset, sim = false)
45  }
46}
47
48class FrontendInlined()(implicit p: Parameters) extends LazyModule with HasXSParameter {
49  override def shouldBeInlined: Boolean = true
50
51  val instrUncache = LazyModule(new InstrUncache())
52  val icache       = LazyModule(new ICache())
53
54  lazy val module = new FrontendInlinedImp(this)
55}
56
57class FrontendInlinedImp(outer: FrontendInlined) extends LazyModuleImp(outer)
58    with HasXSParameter
59    with HasPerfEvents {
60  val io = IO(new Bundle() {
61    val hartId       = Input(UInt(hartIdLen.W))
62    val reset_vector = Input(UInt(PAddrBits.W))
63    val fencei       = Input(Bool())
64    val ptw          = new TlbPtwIO()
65    val backend      = new FrontendToCtrlIO
66    val softPrefetch = Vec(backendParams.LduCnt, Flipped(Valid(new SoftIfetchPrefetchBundle)))
67    val sfence       = Input(new SfenceBundle)
68    val tlbCsr       = Input(new TlbCsrBundle)
69    val csrCtrl      = Input(new CustomCSRCtrlIO)
70    val error        = ValidIO(new L1CacheErrorInfo)
71    val frontendInfo = new Bundle {
72      val ibufFull = Output(Bool())
73      val bpuInfo = new Bundle {
74        val bpRight = Output(UInt(XLEN.W))
75        val bpWrong = Output(UInt(XLEN.W))
76      }
77    }
78    val resetInFrontend = Output(Bool())
79    val debugTopDown = new Bundle {
80      val robHeadVaddr = Flipped(Valid(UInt(VAddrBits.W)))
81    }
82  })
83
84  // decouped-frontend modules
85  val instrUncache = outer.instrUncache.module
86  val icache       = outer.icache.module
87  val bpu          = Module(new Predictor)
88  val ifu          = Module(new NewIFU)
89  val ibuffer      = Module(new IBuffer)
90  val ftq          = Module(new Ftq)
91
92  val needFlush            = RegNext(io.backend.toFtq.redirect.valid)
93  val FlushControlRedirect = RegNext(io.backend.toFtq.redirect.bits.debugIsCtrl)
94  val FlushMemVioRedirect  = RegNext(io.backend.toFtq.redirect.bits.debugIsMemVio)
95  val FlushControlBTBMiss  = Wire(Bool())
96  val FlushTAGEMiss        = Wire(Bool())
97  val FlushSCMiss          = Wire(Bool())
98  val FlushITTAGEMiss      = Wire(Bool())
99  val FlushRASMiss         = Wire(Bool())
100
101  val tlbCsr  = DelayN(io.tlbCsr, 2)
102  val csrCtrl = DelayN(io.csrCtrl, 2)
103  val sfence  = RegNext(RegNext(io.sfence))
104
105  // trigger
106  ifu.io.frontendTrigger := csrCtrl.frontend_trigger
107
108  // RVCDecoder fsIsOff
109  ifu.io.csr_fsIsOff := csrCtrl.fsIsOff
110
111  // bpu ctrl
112  bpu.io.ctrl         := csrCtrl.bp_ctrl
113  bpu.io.reset_vector := io.reset_vector
114
115  // pmp
116  val PortNumber = ICacheParameters().PortNumber
117  val pmp        = Module(new PMP())
118  val pmp_check  = VecInit(Seq.fill(coreParams.ipmpPortNum)(Module(new PMPChecker(3, sameCycle = true)).io))
119  pmp.io.distribute_csr := csrCtrl.distribute_csr
120  val pmp_req_vec = Wire(Vec(coreParams.ipmpPortNum, Valid(new PMPReqBundle())))
121  (0 until 2 * PortNumber).foreach(i => pmp_req_vec(i) <> icache.io.pmp(i).req)
122  pmp_req_vec.last <> ifu.io.pmp.req
123
124  for (i <- pmp_check.indices) {
125    pmp_check(i).apply(tlbCsr.priv.imode, pmp.io.pmp, pmp.io.pma, pmp_req_vec(i))
126  }
127  (0 until 2 * PortNumber).foreach(i => icache.io.pmp(i).resp <> pmp_check(i).resp)
128  ifu.io.pmp.resp <> pmp_check.last.resp
129
130  val itlb =
131    Module(new TLB(coreParams.itlbPortNum, nRespDups = 1, Seq.fill(PortNumber)(false) ++ Seq(true), itlbParams))
132  itlb.io.requestor.take(PortNumber) zip icache.io.itlb foreach { case (a, b) => a <> b }
133  itlb.io.requestor.last <> ifu.io.iTLBInter // mmio may need re-tlb, blocked
134  itlb.io.hartId := io.hartId
135  itlb.io.base_connect(sfence, tlbCsr)
136  itlb.io.flushPipe.map(_ := needFlush)
137  itlb.io.redirect := DontCare // itlb has flushpipe, don't need redirect signal
138
139  val itlb_ptw = Wire(new VectorTlbPtwIO(coreParams.itlbPortNum))
140  itlb_ptw.connect(itlb.io.ptw)
141  val itlbRepeater1 = PTWFilter(itlbParams.fenceDelay, itlb_ptw, sfence, tlbCsr, l2tlbParams.ifilterSize)
142  val itlbRepeater2 =
143    PTWRepeaterNB(passReady = false, itlbParams.fenceDelay, itlbRepeater1.io.ptw, io.ptw, sfence, tlbCsr)
144
145  icache.io.ftqPrefetch <> ftq.io.toPrefetch
146  icache.io.softPrefetch <> io.softPrefetch
147
148  // IFU-Ftq
149  ifu.io.ftqInter.fromFtq <> ftq.io.toIfu
150  ftq.io.toIfu.req.ready := ifu.io.ftqInter.fromFtq.req.ready && icache.io.fetch.req.ready
151
152  ftq.io.fromIfu <> ifu.io.ftqInter.toFtq
153  bpu.io.ftq_to_bpu <> ftq.io.toBpu
154  ftq.io.fromBpu <> bpu.io.bpu_to_ftq
155
156  ftq.io.mmioCommitRead <> ifu.io.mmioCommitRead
157
158  // IFU-ICache
159  icache.io.fetch.req <> ftq.io.toICache.req
160  ftq.io.toICache.req.ready := ifu.io.ftqInter.fromFtq.req.ready && icache.io.fetch.req.ready
161
162  ifu.io.icacheInter.resp <> icache.io.fetch.resp
163  ifu.io.icacheInter.icacheReady       := icache.io.toIFU
164  ifu.io.icacheInter.topdownIcacheMiss := icache.io.fetch.topdownIcacheMiss
165  ifu.io.icacheInter.topdownItlbMiss   := icache.io.fetch.topdownItlbMiss
166  icache.io.stop                       := ifu.io.icacheStop
167  icache.io.flush                      := ftq.io.icacheFlush
168
169  ifu.io.icachePerfInfo := icache.io.perfInfo
170
171  icache.io.csr_pf_enable     := RegNext(csrCtrl.l1I_pf_enable)
172  icache.io.csr_parity_enable := RegNext(csrCtrl.icache_parity_enable)
173
174  icache.io.fencei := RegNext(io.fencei)
175
176  // IFU-Ibuffer
177  ifu.io.toIbuffer <> ibuffer.io.in
178
179  ftq.io.fromBackend <> io.backend.toFtq
180  io.backend.fromFtq := ftq.io.toBackend
181  io.backend.fromIfu := ifu.io.toBackend
182  io.frontendInfo.bpuInfo <> ftq.io.bpuInfo
183
184  val checkPcMem = Reg(Vec(FtqSize, new Ftq_RF_Components))
185  when(ftq.io.toBackend.pc_mem_wen) {
186    checkPcMem(ftq.io.toBackend.pc_mem_waddr) := ftq.io.toBackend.pc_mem_wdata
187  }
188
189  val checkTargetIdx = Wire(Vec(DecodeWidth, UInt(log2Up(FtqSize).W)))
190  val checkTarget    = Wire(Vec(DecodeWidth, UInt(VAddrBits.W)))
191
192  for (i <- 0 until DecodeWidth) {
193    checkTargetIdx(i) := ibuffer.io.out(i).bits.ftqPtr.value
194    checkTarget(i) := Mux(
195      ftq.io.toBackend.newest_entry_ptr.value === checkTargetIdx(i),
196      ftq.io.toBackend.newest_entry_target,
197      checkPcMem(checkTargetIdx(i) + 1.U).startAddr
198    )
199  }
200
201  // commented out for this br could be the last instruction in the fetch block
202  def checkNotTakenConsecutive = {
203    val prevNotTakenValid  = RegInit(0.B)
204    val prevNotTakenFtqIdx = Reg(UInt(log2Up(FtqSize).W))
205    for (i <- 0 until DecodeWidth - 1) {
206      // for instrs that is not the last, if a not-taken br, the next instr should have the same ftqPtr
207      // for instrs that is the last, record and check next request
208      when(ibuffer.io.out(i).fire && ibuffer.io.out(i).bits.pd.isBr) {
209        when(ibuffer.io.out(i + 1).fire) {
210          // not last br, check now
211          XSError(checkTargetIdx(i) =/= checkTargetIdx(i + 1), "not-taken br should have same ftqPtr\n")
212        }.otherwise {
213          // last br, record its info
214          prevNotTakenValid  := true.B
215          prevNotTakenFtqIdx := checkTargetIdx(i)
216        }
217      }
218    }
219    when(ibuffer.io.out(DecodeWidth - 1).fire && ibuffer.io.out(DecodeWidth - 1).bits.pd.isBr) {
220      // last instr is a br, record its info
221      prevNotTakenValid  := true.B
222      prevNotTakenFtqIdx := checkTargetIdx(DecodeWidth - 1)
223    }
224    when(prevNotTakenValid && ibuffer.io.out(0).fire) {
225      XSError(prevNotTakenFtqIdx =/= checkTargetIdx(0), "not-taken br should have same ftqPtr\n")
226      prevNotTakenValid := false.B
227    }
228    when(needFlush) {
229      prevNotTakenValid := false.B
230    }
231  }
232
233  def checkTakenNotConsecutive = {
234    val prevTakenValid  = RegInit(0.B)
235    val prevTakenFtqIdx = Reg(UInt(log2Up(FtqSize).W))
236    for (i <- 0 until DecodeWidth - 1) {
237      // for instrs that is not the last, if a taken br, the next instr should not have the same ftqPtr
238      // for instrs that is the last, record and check next request
239      when(ibuffer.io.out(i).fire && ibuffer.io.out(i).bits.pd.isBr && ibuffer.io.out(i).bits.pred_taken) {
240        when(ibuffer.io.out(i + 1).fire) {
241          // not last br, check now
242          XSError(checkTargetIdx(i) + 1.U =/= checkTargetIdx(i + 1), "taken br should have consecutive ftqPtr\n")
243        }.otherwise {
244          // last br, record its info
245          prevTakenValid  := true.B
246          prevTakenFtqIdx := checkTargetIdx(i)
247        }
248      }
249    }
250    when(ibuffer.io.out(DecodeWidth - 1).fire && ibuffer.io.out(DecodeWidth - 1).bits.pd.isBr && ibuffer.io.out(
251      DecodeWidth - 1
252    ).bits.pred_taken) {
253      // last instr is a br, record its info
254      prevTakenValid  := true.B
255      prevTakenFtqIdx := checkTargetIdx(DecodeWidth - 1)
256    }
257    when(prevTakenValid && ibuffer.io.out(0).fire) {
258      XSError(prevTakenFtqIdx + 1.U =/= checkTargetIdx(0), "taken br should have consecutive ftqPtr\n")
259      prevTakenValid := false.B
260    }
261    when(needFlush) {
262      prevTakenValid := false.B
263    }
264  }
265
266  def checkNotTakenPC = {
267    val prevNotTakenPC    = Reg(UInt(VAddrBits.W))
268    val prevIsRVC         = Reg(Bool())
269    val prevNotTakenValid = RegInit(0.B)
270
271    for (i <- 0 until DecodeWidth - 1) {
272      when(ibuffer.io.out(i).fire && ibuffer.io.out(i).bits.pd.isBr && !ibuffer.io.out(i).bits.pred_taken) {
273        when(ibuffer.io.out(i + 1).fire) {
274          XSError(
275            ibuffer.io.out(i).bits.pc + Mux(ibuffer.io.out(i).bits.pd.isRVC, 2.U, 4.U) =/= ibuffer.io.out(
276              i + 1
277            ).bits.pc,
278            "not-taken br should have consecutive pc\n"
279          )
280        }.otherwise {
281          prevNotTakenValid := true.B
282          prevIsRVC         := ibuffer.io.out(i).bits.pd.isRVC
283          prevNotTakenPC    := ibuffer.io.out(i).bits.pc
284        }
285      }
286    }
287    when(ibuffer.io.out(DecodeWidth - 1).fire && ibuffer.io.out(DecodeWidth - 1).bits.pd.isBr && !ibuffer.io.out(
288      DecodeWidth - 1
289    ).bits.pred_taken) {
290      prevNotTakenValid := true.B
291      prevIsRVC         := ibuffer.io.out(DecodeWidth - 1).bits.pd.isRVC
292      prevNotTakenPC    := ibuffer.io.out(DecodeWidth - 1).bits.pc
293    }
294    when(prevNotTakenValid && ibuffer.io.out(0).fire) {
295      XSError(
296        prevNotTakenPC + Mux(prevIsRVC, 2.U, 4.U) =/= ibuffer.io.out(0).bits.pc,
297        "not-taken br should have same pc\n"
298      )
299      prevNotTakenValid := false.B
300    }
301    when(needFlush) {
302      prevNotTakenValid := false.B
303    }
304  }
305
306  def checkTakenPC = {
307    val prevTakenFtqIdx = Reg(UInt(log2Up(FtqSize).W))
308    val prevTakenValid  = RegInit(0.B)
309    val prevTakenTarget = Wire(UInt(VAddrBits.W))
310    prevTakenTarget := checkPcMem(prevTakenFtqIdx + 1.U).startAddr
311
312    for (i <- 0 until DecodeWidth - 1) {
313      when(ibuffer.io.out(i).fire && !ibuffer.io.out(i).bits.pd.notCFI && ibuffer.io.out(i).bits.pred_taken) {
314        when(ibuffer.io.out(i + 1).fire) {
315          XSError(checkTarget(i) =/= ibuffer.io.out(i + 1).bits.pc, "taken instr should follow target pc\n")
316        }.otherwise {
317          prevTakenValid  := true.B
318          prevTakenFtqIdx := checkTargetIdx(i)
319        }
320      }
321    }
322    when(ibuffer.io.out(DecodeWidth - 1).fire && !ibuffer.io.out(DecodeWidth - 1).bits.pd.notCFI && ibuffer.io.out(
323      DecodeWidth - 1
324    ).bits.pred_taken) {
325      prevTakenValid  := true.B
326      prevTakenFtqIdx := checkTargetIdx(DecodeWidth - 1)
327    }
328    when(prevTakenValid && ibuffer.io.out(0).fire) {
329      XSError(prevTakenTarget =/= ibuffer.io.out(0).bits.pc, "taken instr should follow target pc\n")
330      prevTakenValid := false.B
331    }
332    when(needFlush) {
333      prevTakenValid := false.B
334    }
335  }
336
337  // checkNotTakenConsecutive
338  checkTakenNotConsecutive
339  checkTakenPC
340  checkNotTakenPC
341
342  ifu.io.rob_commits <> io.backend.toFtq.rob_commits
343
344  ibuffer.io.flush                := needFlush
345  ibuffer.io.ControlRedirect      := FlushControlRedirect
346  ibuffer.io.MemVioRedirect       := FlushMemVioRedirect
347  ibuffer.io.ControlBTBMissBubble := FlushControlBTBMiss
348  ibuffer.io.TAGEMissBubble       := FlushTAGEMiss
349  ibuffer.io.SCMissBubble         := FlushSCMiss
350  ibuffer.io.ITTAGEMissBubble     := FlushITTAGEMiss
351  ibuffer.io.RASMissBubble        := FlushRASMiss
352  ibuffer.io.decodeCanAccept      := io.backend.canAccept
353
354  FlushControlBTBMiss := ftq.io.ControlBTBMissBubble
355  FlushTAGEMiss       := ftq.io.TAGEMissBubble
356  FlushSCMiss         := ftq.io.SCMissBubble
357  FlushITTAGEMiss     := ftq.io.ITTAGEMissBubble
358  FlushRASMiss        := ftq.io.RASMissBubble
359
360  io.backend.cfVec <> ibuffer.io.out
361  io.backend.stallReason <> ibuffer.io.stallReason
362
363  instrUncache.io.req <> ifu.io.uncacheInter.toUncache
364  ifu.io.uncacheInter.fromUncache <> instrUncache.io.resp
365  instrUncache.io.flush := false.B
366  io.error <> RegNext(RegNext(icache.io.error))
367
368  icache.io.hartId := io.hartId
369
370  itlbRepeater1.io.debugTopDown.robHeadVaddr := io.debugTopDown.robHeadVaddr
371
372  val frontendBubble = Mux(io.backend.canAccept, DecodeWidth.U - PopCount(ibuffer.io.out.map(_.valid)), 0.U)
373  XSPerfAccumulate("FrontendBubble", frontendBubble)
374  io.frontendInfo.ibufFull := RegNext(ibuffer.io.full)
375  io.resetInFrontend       := reset.asBool
376
377  // PFEvent
378  val pfevent = Module(new PFEvent)
379  pfevent.io.distribute_csr := io.csrCtrl.distribute_csr
380  val csrevents = pfevent.io.hpmevent.take(8)
381
382  val perfFromUnits = Seq(ifu, ibuffer, icache, ftq, bpu).flatMap(_.getPerfEvents)
383  val perfFromIO    = Seq()
384  val perfBlock     = Seq()
385  // let index = 0 be no event
386  val allPerfEvents = Seq(("noEvent", 0.U)) ++ perfFromUnits ++ perfFromIO ++ perfBlock
387
388  if (printEventCoding) {
389    for (((name, inc), i) <- allPerfEvents.zipWithIndex) {
390      println("Frontend perfEvents Set", name, inc, i)
391    }
392  }
393
394  val allPerfInc          = allPerfEvents.map(_._2.asTypeOf(new PerfEvent))
395  override val perfEvents = HPerfMonitor(csrevents, allPerfInc).getPerfEvents
396  generatePerfEvent()
397}
398