xref: /XiangShan/src/main/scala/xiangshan/frontend/IFU.scala (revision 068bf978a62360db6c16671704497c3e01d6843f)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.frontend
18
19import chipsalliance.rocketchip.config.Parameters
20import chisel3._
21import chisel3.util._
22import xiangshan._
23import xiangshan.cache.mmu._
24import xiangshan.frontend.icache._
25import utils._
26import xiangshan.backend.fu.{PMPReqBundle, PMPRespBundle}
27
28trait HasInstrMMIOConst extends HasXSParameter with HasIFUConst{
29  def mmioBusWidth = 64
30  def mmioBusBytes = mmioBusWidth / 8
31  def maxInstrLen = 32
32}
33
34trait HasIFUConst extends HasXSParameter {
35  def addrAlign(addr: UInt, bytes: Int, highest: Int): UInt = Cat(addr(highest-1, log2Ceil(bytes)), 0.U(log2Ceil(bytes).W))
36  def fetchQueueSize = 2
37}
38
39class IfuPtr(implicit p: Parameters) extends CircularQueuePtr[IfuPtr](entries = 2){
40  override def cloneType = (new IfuPtr).asInstanceOf[this.type]
41}
42
43object IfuPtr {
44  def apply(f: Bool, v: UInt)(implicit p: Parameters): IfuPtr = {
45    val ptr = Wire(new IfuPtr)
46    ptr.flag := f
47    ptr.value := v
48    ptr
49  }
50  def inverse(ptr: IfuPtr)(implicit p: Parameters): IfuPtr = {
51    apply(!ptr.flag, ptr.value)
52  }
53}
54
55class IfuToFtqIO(implicit p:Parameters) extends XSBundle {
56  val pdWb = Valid(new PredecodeWritebackBundle)
57}
58
59class FtqInterface(implicit p: Parameters) extends XSBundle {
60  val fromFtq = Flipped(new FtqToIfuIO)
61  val toFtq   = new IfuToFtqIO
62}
63
64class UncacheInterface(implicit p: Parameters) extends XSBundle {
65  val fromUncache = Flipped(DecoupledIO(new InsUncacheResp))
66  val toUncache   = DecoupledIO( new InsUncacheReq )
67}
68class NewIFUIO(implicit p: Parameters) extends XSBundle {
69  val ftqInter        = new FtqInterface
70  val icacheInter     = Vec(2, Flipped(new ICacheMainPipeBundle))
71  val icacheStop      = Output(Bool())
72  val icachePerfInfo  = Input(new ICachePerfInfo)
73  val toIbuffer       = Decoupled(new FetchToIBuffer)
74  val uncacheInter   =  new UncacheInterface
75  val frontendTrigger = Flipped(new FrontendTdataDistributeIO)
76  val csrTriggerEnable = Input(Vec(4, Bool()))
77  val rob_commits = Flipped(Vec(CommitWidth, Valid(new RobCommitInfo)))
78}
79
80// record the situation in which fallThruAddr falls into
81// the middle of an RVI inst
82class LastHalfInfo(implicit p: Parameters) extends XSBundle {
83  val valid = Bool()
84  val middlePC = UInt(VAddrBits.W)
85  def matchThisBlock(startAddr: UInt) = valid && middlePC === startAddr
86}
87
88class IfuToPreDecode(implicit p: Parameters) extends XSBundle {
89  val data          = if(HasCExtension) Vec(PredictWidth + 1, UInt(16.W)) else Vec(PredictWidth, UInt(32.W))
90  val startAddr     = UInt(VAddrBits.W)
91  val fallThruAddr  = UInt(VAddrBits.W)
92  val fallThruError = Bool()
93  val isDoubleLine  = Bool()
94  val ftqOffset     = Valid(UInt(log2Ceil(PredictWidth).W))
95  val target        = UInt(VAddrBits.W)
96  val pageFault     = Vec(2, Bool())
97  val accessFault   = Vec(2, Bool())
98  val instValid     = Bool()
99  val lastHalfMatch = Bool()
100  val oversize      = Bool()
101  val mmio = Bool()
102  val frontendTrigger = new FrontendTdataDistributeIO
103  val csrTriggerEnable = Vec(4, Bool())
104}
105
106class NewIFU(implicit p: Parameters) extends XSModule with HasICacheParameters with HasIFUConst
107  with HasCircularQueuePtrHelper with HasPerfEvents
108{
109  println(s"icache ways: ${nWays} sets:${nSets}")
110  val io = IO(new NewIFUIO)
111  val (toFtq, fromFtq)    = (io.ftqInter.toFtq, io.ftqInter.fromFtq)
112  val (toICache, fromICache) = (VecInit(io.icacheInter.map(_.req)), VecInit(io.icacheInter.map(_.resp)))
113  val (toUncache, fromUncache) = (io.uncacheInter.toUncache , io.uncacheInter.fromUncache)
114
115  def isCrossLineReq(start: UInt, end: UInt): Bool = start(blockOffBits) ^ end(blockOffBits)
116
117  def isLastInCacheline(fallThruAddr: UInt): Bool = fallThruAddr(blockOffBits - 1, 1) === 0.U
118
119  class TlbExept(implicit p: Parameters) extends XSBundle{
120    val pageFault = Bool()
121    val accessFault = Bool()
122    val mmio = Bool()
123  }
124
125
126  //---------------------------------------------
127  //  Fetch Stage 1 :
128  //  * Send req to ICache Meta/Data
129  //  * Check whether need 2 line fetch
130  //---------------------------------------------
131
132  val f0_valid                             = fromFtq.req.valid
133  val f0_ftq_req                           = fromFtq.req.bits
134  val f0_situation                         = VecInit(Seq(isCrossLineReq(f0_ftq_req.startAddr, f0_ftq_req.fallThruAddr), isLastInCacheline(f0_ftq_req.fallThruAddr)))
135  val f0_doubleLine                        = f0_situation(0) || f0_situation(1)
136  val f0_vSetIdx                           = VecInit(get_idx((f0_ftq_req.startAddr)), get_idx(f0_ftq_req.fallThruAddr))
137  val f0_fire                              = fromFtq.req.fire()
138
139  val f0_flush, f1_flush, f2_flush, f3_flush = WireInit(false.B)
140  val from_bpu_f0_flush, from_bpu_f1_flush, from_bpu_f2_flush, from_bpu_f3_flush = WireInit(false.B)
141
142  from_bpu_f0_flush := fromFtq.flushFromBpu.shouldFlushByStage2(f0_ftq_req.ftqIdx) ||
143                       fromFtq.flushFromBpu.shouldFlushByStage3(f0_ftq_req.ftqIdx)
144
145  val f3_redirect = WireInit(false.B)
146  f3_flush := fromFtq.redirect.valid
147  f2_flush := f3_flush || f3_redirect
148  f1_flush := f2_flush || from_bpu_f1_flush
149  f0_flush := f1_flush || from_bpu_f0_flush
150
151  val f1_ready, f2_ready, f3_ready         = WireInit(false.B)
152
153  fromFtq.req.ready := toICache(0).ready && toICache(1).ready && f2_ready && GTimer() > 500.U
154
155  toICache(0).valid       := fromFtq.req.fire() && !f0_flush
156  toICache(0).bits.vaddr  := fromFtq.req.bits.startAddr
157  toICache(1).valid       := fromFtq.req.fire() && f0_doubleLine && !f0_flush
158  toICache(1).bits.vaddr  := fromFtq.req.bits.fallThruAddr
159
160
161  //---------------------------------------------
162  //  Fetch Stage 1 :
163  //  * Send req to ITLB and TLB Response (Get Paddr)
164  //  * ICache Response (Get Meta and Data)
165  //  * Hit Check (Generate hit signal and hit vector)
166  //  * Get victim way
167  //---------------------------------------------
168
169  val f1_valid      = RegInit(false.B)
170  val f1_ftq_req    = RegEnable(next = f0_ftq_req,    enable=f0_fire)
171  val f1_situation  = RegEnable(next = f0_situation,  enable=f0_fire)
172  val f1_doubleLine = RegEnable(next = f0_doubleLine, enable=f0_fire)
173  val f1_vSetIdx    = RegEnable(next = f0_vSetIdx,    enable=f0_fire)
174  val f1_fire       = f1_valid && f1_ready
175
176  f1_ready := f2_ready || !f1_valid
177
178  from_bpu_f1_flush := fromFtq.flushFromBpu.shouldFlushByStage3(f1_ftq_req.ftqIdx)
179
180  when(f1_flush)                  {f1_valid  := false.B}
181  .elsewhen(f0_fire && !f0_flush) {f1_valid  := true.B}
182  .elsewhen(f1_fire)              {f1_valid  := false.B}
183  //---------------------------------------------
184  //  Fetch Stage 2 :
185  //  * Send req to ITLB and TLB Response (Get Paddr)
186  //  * ICache Response (Get Meta and Data)
187  //  * Hit Check (Generate hit signal and hit vector)
188  //  * Get victim way
189  //---------------------------------------------
190  val icacheRespAllValid = WireInit(false.B)
191
192  val f2_valid      = RegInit(false.B)
193  val f2_ftq_req    = RegEnable(next = f1_ftq_req,    enable=f1_fire)
194  val f2_situation  = RegEnable(next = f1_situation,  enable=f1_fire)
195  val f2_doubleLine = RegEnable(next = f1_doubleLine, enable=f1_fire)
196  val f2_vSetIdx    = RegEnable(next = f1_vSetIdx,    enable=f1_fire)
197  val f2_fire       = f2_valid && f2_ready
198
199  f2_ready := f3_ready && icacheRespAllValid || !f2_valid
200  //TODO: addr compare may be timing critical
201  val f2_icache_all_resp_wire       =  fromICache(0).valid && (fromICache(0).bits.vaddr ===  f2_ftq_req.startAddr) && ((fromICache(1).valid && (fromICache(1).bits.vaddr ===  f2_ftq_req.fallThruAddr)) || !f2_doubleLine)
202  val f2_icache_all_resp_reg        = RegInit(false.B)
203
204  icacheRespAllValid := f2_icache_all_resp_reg || f2_icache_all_resp_wire
205
206  io.icacheStop := !f3_ready
207
208  when(f2_flush)                                              {f2_icache_all_resp_reg := false.B}
209  .elsewhen(f2_valid && f2_icache_all_resp_wire && !f3_ready) {f2_icache_all_resp_reg := true.B}
210  .elsewhen(f2_fire && f2_icache_all_resp_reg)                {f2_icache_all_resp_reg := false.B}
211
212  when(f2_flush)                  {f2_valid := false.B}
213  .elsewhen(f1_fire && !f1_flush) {f2_valid := true.B }
214  .elsewhen(f2_fire)              {f2_valid := false.B}
215
216  val f2_cache_response_data = ResultHoldBypass(valid = f2_icache_all_resp_wire, data = VecInit(fromICache.map(_.bits.readData)))
217
218  val f2_datas        = VecInit((0 until PortNumber).map(i => f2_cache_response_data(i)))
219  val f2_except_pf    = VecInit((0 until PortNumber).map(i => fromICache(i).bits.tlbExcp.pageFault))
220  val f2_except_af    = VecInit((0 until PortNumber).map(i => fromICache(i).bits.tlbExcp.accessFault))
221  val f2_mmio         = fromICache(0).bits.tlbExcp.mmio && !fromICache(0).bits.tlbExcp.accessFault &&
222                                                           !fromICache(0).bits.tlbExcp.pageFault
223
224  val f2_paddrs       = VecInit((0 until PortNumber).map(i => fromICache(i).bits.paddr))
225  val f2_perf_info    = io.icachePerfInfo
226
227  def cut(cacheline: UInt, start: UInt) : Vec[UInt] ={
228    if(HasCExtension){
229      val result   = Wire(Vec(PredictWidth + 1, UInt(16.W)))
230      val dataVec  = cacheline.asTypeOf(Vec(blockBytes * 2/ 2, UInt(16.W)))
231      val startPtr = Cat(0.U(1.W), start(blockOffBits-1, 1))
232      (0 until PredictWidth + 1).foreach( i =>
233        result(i) := dataVec(startPtr + i.U)
234      )
235      result
236    } else {
237      val result   = Wire(Vec(PredictWidth, UInt(32.W)) )
238      val dataVec  = cacheline.asTypeOf(Vec(blockBytes * 2/ 4, UInt(32.W)))
239      val startPtr = Cat(0.U(1.W), start(blockOffBits-1, 2))
240      (0 until PredictWidth).foreach( i =>
241        result(i) := dataVec(startPtr + i.U)
242      )
243      result
244    }
245  }
246
247  val preDecoder      = Module(new PreDecode)
248  val (preDecoderIn, preDecoderOut)   = (preDecoder.io.in, preDecoder.io.out)
249  val predecodeOutValid = WireInit(false.B)
250
251  val f2_cut_data = cut( Cat(f2_datas.map(cacheline => cacheline.asUInt ).reverse).asUInt, f2_ftq_req.startAddr )
252
253  //---------------------------------------------
254  //  Fetch Stage 3 :
255  //  * get data from last stage (hit from f2_hit_data/miss from missQueue response)
256  //  * if at least one needed cacheline miss, wait for miss queue response (a wait_state machine) THIS IS TOO UGLY!!!
257  //  * cut cacheline(s) and send to PreDecode
258  //  * check if prediction is right (branch target and type, jump direction and type , jal target )
259  //---------------------------------------------
260  val f3_valid          = RegInit(false.B)
261  val f3_ftq_req        = RegEnable(next = f2_ftq_req,    enable=f2_fire)
262  val f3_situation      = RegEnable(next = f2_situation,  enable=f2_fire)
263  val f3_doubleLine     = RegEnable(next = f2_doubleLine, enable=f2_fire)
264  val f3_fire           = io.toIbuffer.fire()
265
266  f3_ready := io.toIbuffer.ready || !f3_valid
267
268  val f3_cut_data       = RegEnable(next = f2_cut_data, enable=f2_fire)
269
270  val f3_except_pf      = RegEnable(next = f2_except_pf, enable = f2_fire)
271  val f3_except_af      = RegEnable(next = f2_except_af, enable = f2_fire)
272  val f3_mmio           = RegEnable(next = f2_mmio   , enable = f2_fire)
273
274  val f3_lastHalf       = RegInit(0.U.asTypeOf(new LastHalfInfo))
275  val f3_lastHalfMatch  = f3_lastHalf.matchThisBlock(f3_ftq_req.startAddr)
276  val f3_except         = VecInit((0 until 2).map{i => f3_except_pf(i) || f3_except_af(i)})
277  val f3_has_except     = f3_valid && (f3_except_af.reduce(_||_) || f3_except_pf.reduce(_||_))
278  val f3_pAddrs   = RegEnable(next = f2_paddrs, enable = f2_fire)
279
280  val f3_mmio_data    = Reg(UInt(maxInstrLen.W))
281
282  val f3_data = if(HasCExtension) Wire(Vec(PredictWidth + 1, UInt(16.W))) else Wire(Vec(PredictWidth, UInt(32.W)))
283  f3_data       :=  f3_cut_data
284
285  val mmio_idle :: mmio_send_req :: mmio_w_resp :: mmio_resend :: mmio_resend_w_resp :: mmio_w_commit :: Nil = Enum(6)
286  val mmio_state = RegInit(mmio_idle)
287
288  val f3_req_is_mmio     = f3_mmio && f3_valid
289  val mmio_has_commited = VecInit(io.rob_commits.map{commit => commit.valid && commit.bits.ftqIdx === f3_ftq_req.ftqIdx &&  commit.bits.ftqOffset === 0.U}).asUInt.orR
290  val f3_mmio_req_commit = f3_req_is_mmio && mmio_state === mmio_w_commit && mmio_has_commited
291
292  val f3_mmio_to_commit =  f3_req_is_mmio && mmio_state === mmio_w_commit
293  val f3_mmio_to_commit_next = RegNext(f3_mmio_to_commit)
294  val f3_mmio_can_go      = f3_mmio_to_commit && !f3_mmio_to_commit_next
295
296  val f3_ftq_flush_self     = fromFtq.redirect.valid && RedirectLevel.flushItself(fromFtq.redirect.bits.level)
297  val f3_ftq_flush_by_older = fromFtq.redirect.valid && isBefore(fromFtq.redirect.bits.ftqIdx, f3_ftq_req.ftqIdx)
298
299  val f3_need_not_flush = f3_req_is_mmio && fromFtq.redirect.valid && !f3_ftq_flush_self && !f3_ftq_flush_by_older
300
301  when(f3_flush && !f3_need_not_flush)               {f3_valid := false.B}
302  .elsewhen(f2_fire && !f2_flush)                    {f3_valid := true.B }
303  .elsewhen(io.toIbuffer.fire() && !f3_req_is_mmio)  {f3_valid := false.B}
304  .elsewhen{f3_req_is_mmio && f3_mmio_req_commit}    {f3_valid := false.B}
305
306  val f3_mmio_use_seq_pc = RegInit(false.B)
307
308  val (redirect_ftqIdx, redirect_ftqOffset)  = (fromFtq.redirect.bits.ftqIdx,fromFtq.redirect.bits.ftqOffset)
309  val redirect_mmio_req = fromFtq.redirect.valid && redirect_ftqIdx === f3_ftq_req.ftqIdx && redirect_ftqOffset === 0.U
310
311  when(RegNext(f2_fire && !f2_flush) && f3_req_is_mmio)        { f3_mmio_use_seq_pc := true.B  }
312  .elsewhen(redirect_mmio_req)                                 { f3_mmio_use_seq_pc := false.B }
313
314  f3_ready := Mux(f3_req_is_mmio, io.toIbuffer.ready && f3_mmio_req_commit || !f3_valid , io.toIbuffer.ready || !f3_valid)
315
316  when(f3_req_is_mmio){
317    f3_data(0) := f3_mmio_data(15, 0)
318    f3_data(1) := f3_mmio_data(31, 16)
319  }
320
321  when(fromUncache.fire())    {f3_mmio_data   :=  fromUncache.bits.data}
322
323
324  switch(mmio_state){
325    is(mmio_idle){
326      when(f3_req_is_mmio){
327        mmio_state :=  mmio_send_req
328      }
329    }
330
331    is(mmio_send_req){
332      mmio_state :=  Mux(toUncache.fire(), mmio_w_resp, mmio_send_req )
333    }
334
335    is(mmio_w_resp){
336      when(fromUncache.fire()){
337          val isRVC =  fromUncache.bits.data(1,0) =/= 3.U
338          mmio_state :=  Mux(isRVC, mmio_resend , mmio_w_commit)
339      }
340    }
341
342    is(mmio_resend){
343      mmio_state :=  Mux(toUncache.fire(), mmio_resend_w_resp, mmio_resend )
344    }
345
346    is(mmio_resend_w_resp){
347      when(fromUncache.fire()){
348          mmio_state :=  mmio_w_commit
349      }
350    }
351
352    is(mmio_w_commit){
353      when(mmio_has_commited){
354          mmio_state  :=  mmio_idle
355      }
356    }
357  }
358
359  when(f3_ftq_flush_self || f3_ftq_flush_by_older)  {
360    mmio_state := mmio_idle
361    f3_mmio_data := 0.U
362  }
363
364  toUncache.valid     :=  ((mmio_state === mmio_send_req) || (mmio_state === mmio_resend)) && f3_req_is_mmio
365  toUncache.bits.addr := Mux((mmio_state === mmio_resend), f3_pAddrs(0) + 2.U, f3_pAddrs(0))
366  fromUncache.ready   := true.B
367
368  preDecoderIn.instValid     :=  f3_valid && !f3_has_except
369  preDecoderIn.data          :=  f3_data
370  preDecoderIn.startAddr     :=  f3_ftq_req.startAddr
371  preDecoderIn.fallThruAddr  :=  f3_ftq_req.fallThruAddr
372  preDecoderIn.fallThruError :=  f3_ftq_req.fallThruError
373  preDecoderIn.isDoubleLine  :=  f3_doubleLine
374  preDecoderIn.ftqOffset     :=  f3_ftq_req.ftqOffset
375  preDecoderIn.target        :=  f3_ftq_req.target
376  preDecoderIn.oversize      :=  f3_ftq_req.oversize
377  preDecoderIn.lastHalfMatch :=  f3_lastHalfMatch
378  preDecoderIn.pageFault     :=  f3_except_pf
379  preDecoderIn.accessFault   :=  f3_except_af
380  preDecoderIn.mmio          :=  f3_mmio
381  preDecoderIn.frontendTrigger := io.frontendTrigger
382  preDecoderIn.csrTriggerEnable := io.csrTriggerEnable
383
384
385  // TODO: What if next packet does not match?
386  when (f3_flush) {
387    f3_lastHalf.valid := false.B
388  }.elsewhen (io.toIbuffer.fire()) {
389    f3_lastHalf.valid := preDecoderOut.hasLastHalf
390    f3_lastHalf.middlePC := preDecoderOut.realEndPC
391  }
392
393  val f3_predecode_range = VecInit(preDecoderOut.pd.map(inst => inst.valid)).asUInt
394  val f3_mmio_range      = VecInit((0 until PredictWidth).map(i => if(i ==0) true.B else false.B))
395
396  io.toIbuffer.valid          := f3_valid && (!f3_req_is_mmio || f3_mmio_can_go)
397  io.toIbuffer.bits.instrs    := preDecoderOut.instrs
398  io.toIbuffer.bits.valid     := Mux(f3_req_is_mmio, f3_mmio_range.asUInt, f3_predecode_range & preDecoderOut.instrRange.asUInt)
399  io.toIbuffer.bits.pd        := preDecoderOut.pd
400  io.toIbuffer.bits.ftqPtr    := f3_ftq_req.ftqIdx
401  io.toIbuffer.bits.pc        := preDecoderOut.pc
402  io.toIbuffer.bits.ftqOffset.zipWithIndex.map{case(a, i) => a.bits := i.U; a.valid := preDecoderOut.takens(i) && !f3_req_is_mmio}
403  io.toIbuffer.bits.foldpc    := preDecoderOut.pc.map(i => XORFold(i(VAddrBits-1,1), MemPredPCWidth))
404  io.toIbuffer.bits.ipf       := preDecoderOut.pageFault
405  io.toIbuffer.bits.acf       := preDecoderOut.accessFault
406  io.toIbuffer.bits.crossPageIPFFix := preDecoderOut.crossPageIPF
407  io.toIbuffer.bits.triggered := preDecoderOut.triggered
408
409  //Write back to Ftq
410  val f3_cache_fetch = f3_valid && !(f2_fire && !f2_flush)
411  val finishFetchMaskReg = RegNext(f3_cache_fetch)
412
413
414  val f3_mmio_missOffset = Wire(ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)))
415  f3_mmio_missOffset.valid := f3_req_is_mmio
416  f3_mmio_missOffset.bits  := 0.U
417
418  toFtq.pdWb.valid           := (!finishFetchMaskReg && f3_valid && !f3_req_is_mmio) || (f3_mmio_req_commit && f3_mmio_use_seq_pc)
419  toFtq.pdWb.bits.pc         := preDecoderOut.pc
420  toFtq.pdWb.bits.pd         := preDecoderOut.pd
421  toFtq.pdWb.bits.pd.zipWithIndex.map{case(instr,i) => instr.valid :=  Mux(f3_req_is_mmio, f3_mmio_range(i), f3_predecode_range(i))}
422  toFtq.pdWb.bits.ftqIdx     := f3_ftq_req.ftqIdx
423  toFtq.pdWb.bits.ftqOffset  := f3_ftq_req.ftqOffset.bits
424  toFtq.pdWb.bits.misOffset  := Mux(f3_req_is_mmio, f3_mmio_missOffset, preDecoderOut.misOffset)
425  toFtq.pdWb.bits.cfiOffset  := preDecoderOut.cfiOffset
426  toFtq.pdWb.bits.target     := Mux(f3_req_is_mmio,Mux((f3_mmio_data(1,0) =/= 3.U), f3_ftq_req.startAddr + 2.U , f3_ftq_req.startAddr + 4.U) ,preDecoderOut.target)
427  toFtq.pdWb.bits.jalTarget  := preDecoderOut.jalTarget
428  toFtq.pdWb.bits.instrRange := Mux(f3_req_is_mmio, f3_mmio_range, preDecoderOut.instrRange)
429
430  val predecodeFlush     = preDecoderOut.misOffset.valid && f3_valid
431  val predecodeFlushReg  = RegNext(predecodeFlush && !(f2_fire && !f2_flush))
432  f3_redirect := (!predecodeFlushReg && predecodeFlush && !f3_req_is_mmio) || (f3_mmio_req_commit && f3_mmio_use_seq_pc)
433
434
435  /** performance counter */
436  val f3_perf_info     = RegEnable(next = f2_perf_info, enable = f2_fire)
437  val f3_req_0    = io.toIbuffer.fire()
438  val f3_req_1    = io.toIbuffer.fire() && f3_doubleLine
439  val f3_hit_0    = io.toIbuffer.fire() && f3_perf_info.bank_hit(0)
440  val f3_hit_1    = io.toIbuffer.fire() && f3_doubleLine & f3_perf_info.bank_hit(1)
441  val f3_hit      = f3_perf_info.hit
442  val perfEvents = Seq(
443    ("frontendFlush                ", f3_redirect                                ),
444    ("ifu_req                      ", io.toIbuffer.fire()                        ),
445    ("ifu_miss                     ", io.toIbuffer.fire() && !f3_perf_info.hit   ),
446    ("ifu_req_cacheline_0          ", f3_req_0                                   ),
447    ("ifu_req_cacheline_1          ", f3_req_1                                   ),
448    ("ifu_req_cacheline_0_hit      ", f3_hit_1                                   ),
449    ("ifu_req_cacheline_1_hit      ", f3_hit_1                                   ),
450    ("only_0_hit                   ", f3_perf_info.only_0_hit       && io.toIbuffer.fire() ),
451    ("only_0_miss                  ", f3_perf_info.only_0_miss      && io.toIbuffer.fire() ),
452    ("hit_0_hit_1                  ", f3_perf_info.hit_0_hit_1      && io.toIbuffer.fire() ),
453    ("hit_0_miss_1                 ", f3_perf_info.hit_0_miss_1     && io.toIbuffer.fire() ),
454    ("miss_0_hit_1                 ", f3_perf_info.miss_0_hit_1     && io.toIbuffer.fire() ),
455    ("miss_0_miss_1                ", f3_perf_info.miss_0_miss_1    && io.toIbuffer.fire() ),
456    ("cross_line_block             ", io.toIbuffer.fire() && f3_situation(0)     ),
457    ("fall_through_is_cacheline_end", io.toIbuffer.fire() && f3_situation(1)     ),
458  )
459  generatePerfEvent()
460
461  XSPerfAccumulate("ifu_req",   io.toIbuffer.fire() )
462  XSPerfAccumulate("ifu_miss",  io.toIbuffer.fire() && !f3_hit )
463  XSPerfAccumulate("ifu_req_cacheline_0", f3_req_0  )
464  XSPerfAccumulate("ifu_req_cacheline_1", f3_req_1  )
465  XSPerfAccumulate("ifu_req_cacheline_0_hit",   f3_hit_0 )
466  XSPerfAccumulate("ifu_req_cacheline_1_hit",   f3_hit_1 )
467  XSPerfAccumulate("frontendFlush",   f3_redirect )
468  XSPerfAccumulate("only_0_hit",      f3_perf_info.only_0_hit   && io.toIbuffer.fire()  )
469  XSPerfAccumulate("only_0_miss",     f3_perf_info.only_0_miss  && io.toIbuffer.fire()  )
470  XSPerfAccumulate("hit_0_hit_1",     f3_perf_info.hit_0_hit_1  && io.toIbuffer.fire()  )
471  XSPerfAccumulate("hit_0_miss_1",    f3_perf_info.hit_0_miss_1  && io.toIbuffer.fire()  )
472  XSPerfAccumulate("miss_0_hit_1",    f3_perf_info.miss_0_hit_1   && io.toIbuffer.fire() )
473  XSPerfAccumulate("miss_0_miss_1",   f3_perf_info.miss_0_miss_1 && io.toIbuffer.fire() )
474  XSPerfAccumulate("cross_line_block", io.toIbuffer.fire() && f3_situation(0) )
475  XSPerfAccumulate("fall_through_is_cacheline_end", io.toIbuffer.fire() && f3_situation(1) )
476}
477