xref: /XiangShan/src/main/scala/xiangshan/frontend/IBuffer.scala (revision 47e7896cdf17e844ff21c8c6aa8aa1a7c13cfdab)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.frontend
18
19import org.chipsalliance.cde.config.Parameters
20import chisel3._
21import chisel3.util._
22import xiangshan._
23import utils._
24import utility._
25import xiangshan.ExceptionNO._
26
27class IBufPtr(implicit p: Parameters) extends CircularQueuePtr[IBufPtr](
28  p => p(XSCoreParamsKey).IBufSize
29) {
30}
31
32class IBufInBankPtr(implicit p: Parameters) extends CircularQueuePtr[IBufInBankPtr](
33  p => p(XSCoreParamsKey).IBufSize / p(XSCoreParamsKey).IBufNBank
34) {
35}
36
37class IBufBankPtr(implicit p: Parameters) extends CircularQueuePtr[IBufBankPtr](
38  p => p(XSCoreParamsKey).IBufNBank
39) {
40}
41
42class IBufferIO(implicit p: Parameters) extends XSBundle {
43  val flush = Input(Bool())
44  val ControlRedirect = Input(Bool())
45  val ControlBTBMissBubble = Input(Bool())
46  val TAGEMissBubble = Input(Bool())
47  val SCMissBubble = Input(Bool())
48  val ITTAGEMissBubble = Input(Bool())
49  val RASMissBubble = Input(Bool())
50  val MemVioRedirect = Input(Bool())
51  val in = Flipped(DecoupledIO(new FetchToIBuffer))
52  val out = Vec(DecodeWidth, DecoupledIO(new CtrlFlow))
53  val full = Output(Bool())
54  val decodeCanAccept = Input(Bool())
55  val stallReason = new StallReasonIO(DecodeWidth)
56}
57
58class IBufEntry(implicit p: Parameters) extends XSBundle {
59  val inst = UInt(32.W)
60  val pc = UInt(VAddrBits.W)
61  val foldpc = UInt(MemPredPCWidth.W)
62  val pd = new PreDecodeInfo
63  val pred_taken = Bool()
64  val ftqPtr = new FtqPtr
65  val ftqOffset = UInt(log2Ceil(PredictWidth).W)
66  val ipf = Bool()
67  val acf = Bool()
68  val crossPageIPFFix = Bool()
69  val triggered = new TriggerCf
70
71  def fromFetch(fetch: FetchToIBuffer, i: Int): IBufEntry = {
72    inst   := fetch.instrs(i)
73    pc     := fetch.pc(i)
74    foldpc := fetch.foldpc(i)
75    pd     := fetch.pd(i)
76    pred_taken := fetch.ftqOffset(i).valid
77    ftqPtr := fetch.ftqPtr
78    ftqOffset := fetch.ftqOffset(i).bits
79    ipf := fetch.ipf(i)
80    acf := fetch.acf(i)
81    crossPageIPFFix := fetch.crossPageIPFFix(i)
82    triggered := fetch.triggered(i)
83    this
84  }
85
86  def toCtrlFlow: CtrlFlow = {
87    val cf = Wire(new CtrlFlow)
88    cf.instr := inst
89    cf.pc := pc
90    cf.foldpc := foldpc
91    cf.exceptionVec := 0.U.asTypeOf(ExceptionVec())
92    cf.exceptionVec(instrPageFault) := ipf
93    cf.exceptionVec(instrAccessFault) := acf
94    cf.trigger := triggered
95    cf.pd := pd
96    cf.pred_taken := pred_taken
97    cf.crossPageIPFFix := crossPageIPFFix
98    cf.storeSetHit := DontCare
99    cf.waitForRobIdx := DontCare
100    cf.loadWaitBit := DontCare
101    cf.loadWaitStrict := DontCare
102    cf.ssid := DontCare
103    cf.ftqPtr := ftqPtr
104    cf.ftqOffset := ftqOffset
105    cf
106  }
107}
108
109class IBuffer(implicit p: Parameters) extends XSModule with HasCircularQueuePtrHelper with HasPerfEvents {
110  val io = IO(new IBufferIO)
111
112  // io alias
113  private val decodeCanAccept = io.decodeCanAccept
114
115  // Parameter Check
116  private val bankSize = IBufSize / IBufNBank
117  require(IBufSize % IBufNBank == 0, s"IBufNBank should divide IBufSize, IBufNBank: $IBufNBank, IBufSize: $IBufSize")
118  require(IBufNBank >= DecodeWidth,
119    s"IBufNBank should be equal or larger than DecodeWidth, IBufNBank: $IBufNBank, DecodeWidth: $DecodeWidth")
120
121  // IBuffer is organized as raw registers
122  // This is due to IBuffer is a huge queue, read & write port logic should be precisely controlled
123  //                             . + + E E E - .
124  //                             . + + E E E - .
125  //                             . . + E E E - .
126  //                             . . + E E E E -
127  // As shown above, + means enqueue, - means dequeue, E is current content
128  // When dequeue, read port is organized like a banked FIFO
129  // Dequeue reads no more than 1 entry from each bank sequentially, this can be exploit to reduce area
130  // Enqueue writes cannot benefit from this characteristic unless use a SRAM
131  // For detail see Enqueue and Dequeue below
132  private val ibuf: Vec[IBufEntry] = RegInit(VecInit.fill(IBufSize)(0.U.asTypeOf(new IBufEntry)))
133  private val bankedIBufView: Vec[Vec[IBufEntry]] = VecInit.tabulate(IBufNBank)(
134    bankID => VecInit.tabulate(bankSize)(
135      inBankOffset => ibuf(bankID + inBankOffset * IBufNBank)
136    )
137  )
138
139
140  // Bypass wire
141  private val bypassEntries = WireDefault(VecInit.fill(DecodeWidth)(0.U.asTypeOf(Valid(new IBufEntry))))
142  // Normal read wire
143  private val deqEntries = WireDefault(VecInit.fill(DecodeWidth)(0.U.asTypeOf(Valid(new IBufEntry))))
144  // Output register
145  private val outputEntries = RegInit(VecInit.fill(DecodeWidth)(0.U.asTypeOf(Valid(new IBufEntry))))
146
147  // Between Bank
148  private val deqBankPtrVec: Vec[IBufBankPtr] = RegInit(VecInit.tabulate(DecodeWidth)(_.U.asTypeOf(new IBufBankPtr)))
149  private val deqBankPtr: IBufBankPtr = deqBankPtrVec(0)
150  private val deqBankPtrVecNext = Wire(deqBankPtrVec.cloneType)
151  // Inside Bank
152  private val deqInBankPtr: Vec[IBufInBankPtr] = RegInit(VecInit.fill(IBufNBank)(0.U.asTypeOf(new IBufInBankPtr)))
153  private val deqInBankPtrNext = Wire(deqInBankPtr.cloneType)
154
155  val deqPtr = RegInit(0.U.asTypeOf(new IBufPtr))
156  val deqPtrNext = Wire(deqPtr.cloneType)
157
158  val enqPtrVec = RegInit(VecInit.tabulate(PredictWidth)(_.U.asTypeOf(new IBufPtr)))
159  val enqPtr = enqPtrVec(0)
160
161  val numTryEnq = WireDefault(0.U)
162  val numEnq = Mux(io.in.fire, numTryEnq, 0.U)
163
164  val useBypass = enqPtr === deqPtr && decodeCanAccept // empty and decode can accept insts
165  // Record the insts in output entries are from bypass or deq.
166  // Update deqPtr if they are from deq
167  val currentOutUseBypass = RegInit(false.B)
168
169  // The number of decode accepted insts.
170  // Since decode promises accepting insts in order, use priority encoder to simplify the accumulation.
171  private val numOut: UInt = PriorityMuxDefault(io.out.map(x => !x.ready) zip (0 until DecodeWidth).map(_.U), DecodeWidth.U)
172  private val numDeq = Mux(currentOutUseBypass, 0.U, numOut)
173
174  // counter current number of valid
175  val numValid = distanceBetween(enqPtr, deqPtr)
176  val numValidAfterDeq = numValid - numDeq
177  // counter next number of valid
178  val numValidNext = numValid + numEnq - numDeq
179  val allowEnq = RegInit(true.B)
180  val numFromFetch = Mux(io.in.valid, PopCount(io.in.bits.enqEnable), 0.U)
181  val numBypass = PopCount(bypassEntries.map(_.valid))
182
183  allowEnq := (IBufSize - PredictWidth).U >= numValidNext // Disable when almost full
184
185  val enqOffset = VecInit.tabulate(PredictWidth)(i => PopCount(io.in.bits.valid.asBools.take(i)))
186  val enqData = VecInit.tabulate(PredictWidth)(i => Wire(new IBufEntry).fromFetch(io.in.bits, i))
187
188  // when using bypass, bypassed entries do not enqueue
189  when(useBypass) {
190    when(numFromFetch >= DecodeWidth.U) {
191      numTryEnq := numFromFetch - DecodeWidth.U
192    } .otherwise {
193      numTryEnq := 0.U
194    }
195  } .otherwise {
196    numTryEnq := numFromFetch
197  }
198
199  /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
200  // Bypass
201  /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
202  bypassEntries.zipWithIndex.foreach {
203    case (entry, idx) =>
204      // Select
205      val validOH = Range(0, PredictWidth).map {
206        i =>
207          io.in.bits.valid(i) &&
208            io.in.bits.enqEnable(i) &&
209            enqOffset(i) === idx.asUInt
210      } // Should be OneHot
211      entry.valid := validOH.reduce(_ || _) && io.in.fire && !io.flush
212      entry.bits := Mux1H(validOH, enqData)
213
214      // Debug Assertion
215      XSError(PopCount(validOH) > 1.asUInt, "validOH is not OneHot")
216  }
217
218  // => Decode Output
219  // clean register output
220  io.out zip outputEntries foreach {
221    case (io, reg) =>
222      io.valid := reg.valid
223      io.bits := reg.bits.toCtrlFlow
224  }
225  outputEntries zip bypassEntries zip deqEntries foreach {
226    case ((out, bypass), deq) =>
227      when(decodeCanAccept) {
228        out := deq
229        currentOutUseBypass := false.B
230        when(useBypass && io.in.valid) {
231          out := bypass
232          currentOutUseBypass := true.B
233        }
234      }
235  }
236
237  /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
238  // Enqueue
239  /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
240  io.in.ready := allowEnq
241  // Data
242  ibuf.zipWithIndex.foreach {
243    case (entry, idx) => {
244      // Select
245      val validOH = Range(0, PredictWidth).map {
246        i =>
247          val useBypassMatch = enqOffset(i) >= DecodeWidth.U &&
248            enqPtrVec(enqOffset(i) - DecodeWidth.U).value === idx.asUInt
249          val normalMatch = enqPtrVec(enqOffset(i)).value === idx.asUInt
250          val m = Mux(useBypass, useBypassMatch, normalMatch) // when using bypass, bypassed entries do not enqueue
251
252          io.in.bits.valid(i) && io.in.bits.enqEnable(i) && m
253      } // Should be OneHot
254      val wen = validOH.reduce(_ || _) && io.in.fire && !io.flush
255
256      // Write port
257      // Each IBuffer entry has a PredictWidth -> 1 Mux
258      val writeEntry = Mux1H(validOH, enqData)
259      entry := Mux(wen, writeEntry, entry)
260
261      // Debug Assertion
262      XSError(PopCount(validOH) > 1.asUInt, "validOH is not OneHot")
263    }
264  }
265  // Pointer maintenance
266  when (io.in.fire && !io.flush) {
267    enqPtrVec := VecInit(enqPtrVec.map(_ + numTryEnq))
268  }
269
270  /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
271  // Dequeue
272  /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
273  val validVec = Mux(numValidAfterDeq >= DecodeWidth.U,
274    ((1 << DecodeWidth) - 1).U,
275    UIntToMask(numValidAfterDeq(log2Ceil(DecodeWidth) - 1, 0), DecodeWidth)
276  )
277  // Data
278  // Read port
279  // 2-stage, IBufNBank * (bankSize -> 1) + IBufNBank -> 1
280  // Should be better than IBufSize -> 1 in area, with no significant latency increase
281  private val readStage1: Vec[IBufEntry] = VecInit.tabulate(IBufNBank)(
282    bankID => Mux1H(UIntToOH(deqInBankPtrNext(bankID).value), bankedIBufView(bankID))
283  )
284  for (i <- 0 until DecodeWidth) {
285    deqEntries(i).valid := validVec(i)
286    deqEntries(i).bits := Mux1H(UIntToOH(deqBankPtrVecNext(i).value), readStage1)
287  }
288  // Pointer maintenance
289  deqBankPtrVecNext := VecInit(deqBankPtrVec.map(_ + numDeq))
290  deqPtrNext := deqPtr + numDeq
291  deqInBankPtrNext.zip(deqInBankPtr).zipWithIndex.foreach {
292    case ((ptrNext, ptr), idx) => {
293      // validVec[k] == bankValid[deqBankPtr + k]
294      // So bankValid[n] == validVec[n - deqBankPtr]
295      val validIdx = Mux(idx.asUInt >= deqBankPtr.value,
296        idx.asUInt - deqBankPtr.value,
297        ((idx + IBufNBank).asUInt - deqBankPtr.value)(log2Ceil(IBufNBank) - 1, 0)
298      )(log2Ceil(DecodeWidth) - 1, 0)
299      val bankAdvance = Mux(validIdx >= DecodeWidth.U,
300        false.B,
301        io.out(validIdx).ready // `ready` depends on `valid`, so we need only `ready`, not fire
302      ) && !currentOutUseBypass
303      ptrNext := Mux(bankAdvance , ptr + 1.U, ptr)
304    }
305  }
306
307  // Flush
308  when (io.flush) {
309    allowEnq := true.B
310    enqPtrVec := enqPtrVec.indices.map(_.U.asTypeOf(new IBufPtr))
311    deqBankPtrVec := deqBankPtrVec.indices.map(_.U.asTypeOf(new IBufBankPtr))
312    deqInBankPtr := VecInit.fill(IBufNBank)(0.U.asTypeOf(new IBufInBankPtr))
313    deqPtr := 0.U.asTypeOf(new IBufPtr())
314    outputEntries.foreach(_.valid := false.B)
315  }.otherwise {
316    deqPtr := deqPtrNext
317    deqInBankPtr := deqInBankPtrNext
318    deqBankPtrVec := deqBankPtrVecNext
319  }
320  io.full := !allowEnq
321
322  /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
323  // TopDown
324  /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
325  val topdown_stage = RegInit(0.U.asTypeOf(new FrontendTopDownBundle))
326  topdown_stage := io.in.bits.topdown_info
327  when(io.flush) {
328    when(io.ControlRedirect) {
329      when(io.ControlBTBMissBubble) {
330        topdown_stage.reasons(TopDownCounters.BTBMissBubble.id) := true.B
331      }.elsewhen(io.TAGEMissBubble) {
332        topdown_stage.reasons(TopDownCounters.TAGEMissBubble.id) := true.B
333      }.elsewhen(io.SCMissBubble) {
334        topdown_stage.reasons(TopDownCounters.SCMissBubble.id) := true.B
335      }.elsewhen(io.ITTAGEMissBubble) {
336        topdown_stage.reasons(TopDownCounters.ITTAGEMissBubble.id) := true.B
337      }.elsewhen(io.RASMissBubble) {
338        topdown_stage.reasons(TopDownCounters.RASMissBubble.id) := true.B
339      }
340    }.elsewhen(io.MemVioRedirect) {
341      topdown_stage.reasons(TopDownCounters.MemVioRedirectBubble.id) := true.B
342    }.otherwise {
343      topdown_stage.reasons(TopDownCounters.OtherRedirectBubble.id) := true.B
344    }
345  }
346
347
348  val dequeueInsufficient = Wire(Bool())
349  val matchBubble = Wire(UInt(log2Up(TopDownCounters.NumStallReasons.id).W))
350  val deqValidCount = PopCount(validVec.asBools)
351  val deqWasteCount = DecodeWidth.U - deqValidCount
352  dequeueInsufficient := deqValidCount < DecodeWidth.U
353  matchBubble := (TopDownCounters.NumStallReasons.id - 1).U - PriorityEncoder(topdown_stage.reasons.reverse)
354
355  io.stallReason.reason.map(_ := 0.U)
356  for (i <- 0 until DecodeWidth) {
357    when(i.U < deqWasteCount) {
358      io.stallReason.reason(DecodeWidth - i - 1) := matchBubble
359    }
360  }
361
362  when(!(deqWasteCount === DecodeWidth.U || topdown_stage.reasons.asUInt.orR)) {
363    // should set reason for FetchFragmentationStall
364    // topdown_stage.reasons(TopDownCounters.FetchFragmentationStall.id) := true.B
365    for (i <- 0 until DecodeWidth) {
366      when(i.U < deqWasteCount) {
367        io.stallReason.reason(DecodeWidth - i - 1) := TopDownCounters.FetchFragBubble.id.U
368      }
369    }
370  }
371
372  when(io.stallReason.backReason.valid) {
373    io.stallReason.reason.map(_ := io.stallReason.backReason.bits)
374  }
375
376  // Debug info
377  XSError(
378    deqPtr.value =/= deqBankPtr.value + deqInBankPtr(deqBankPtr.value).value * IBufNBank.asUInt,
379    "Dequeue PTR mismatch"
380  )
381  XSError(isBefore(enqPtr, deqPtr) && !isFull(enqPtr, deqPtr), "\ndeqPtr is older than enqPtr!\n")
382
383  XSDebug(io.flush, "IBuffer Flushed\n")
384
385  when(io.in.fire) {
386    XSDebug("Enque:\n")
387    XSDebug(p"MASK=${Binary(io.in.bits.valid)}\n")
388    for(i <- 0 until PredictWidth){
389      XSDebug(p"PC=${Hexadecimal(io.in.bits.pc(i))} ${Hexadecimal(io.in.bits.instrs(i))}\n")
390    }
391  }
392
393  for (i <- 0 until DecodeWidth) {
394    XSDebug(io.out(i).fire,
395      p"deq: ${Hexadecimal(io.out(i).bits.instr)} PC=${Hexadecimal(io.out(i).bits.pc)}" +
396      p"v=${io.out(i).valid} r=${io.out(i).ready} " +
397      p"excpVec=${Binary(io.out(i).bits.exceptionVec.asUInt)} crossPageIPF=${io.out(i).bits.crossPageIPFFix}\n")
398  }
399
400  XSDebug(p"numValid: ${numValid}\n")
401  XSDebug(p"EnqNum: ${numEnq}\n")
402  XSDebug(p"DeqNum: ${numDeq}\n")
403
404  val afterInit = RegInit(false.B)
405  val headBubble = RegInit(false.B)
406  when (io.in.fire) { afterInit := true.B }
407  when (io.flush) {
408    headBubble := true.B
409  } .elsewhen(numValid =/= 0.U) {
410    headBubble := false.B
411  }
412  val instrHungry = afterInit && (numValid === 0.U) && !headBubble
413
414  QueuePerf(IBufSize, numValid, !allowEnq)
415  XSPerfAccumulate("flush", io.flush)
416  XSPerfAccumulate("hungry", instrHungry)
417
418  val ibuffer_IDWidth_hvButNotFull = afterInit && (numValid =/= 0.U) && (numValid < DecodeWidth.U) && !headBubble
419  XSPerfAccumulate("ibuffer_IDWidth_hvButNotFull", ibuffer_IDWidth_hvButNotFull)
420  /*
421  XSPerfAccumulate("ICacheMissBubble", Mux(matchBubbleVec(TopDownCounters.ICacheMissBubble.id), deqWasteCount, 0.U))
422  XSPerfAccumulate("ITLBMissBubble", Mux(matchBubbleVec(TopDownCounters.ITLBMissBubble.id), deqWasteCount, 0.U))
423  XSPerfAccumulate("ControlRedirectBubble", Mux(matchBubbleVec(TopDownCounters.ControlRedirectBubble.id), deqWasteCount, 0.U))
424  XSPerfAccumulate("MemVioRedirectBubble", Mux(matchBubbleVec(TopDownCounters.MemVioRedirectBubble.id), deqWasteCount, 0.U))
425  XSPerfAccumulate("OtherRedirectBubble", Mux(matchBubbleVec(TopDownCounters.OtherRedirectBubble.id), deqWasteCount, 0.U))
426  XSPerfAccumulate("BTBMissBubble", Mux(matchBubbleVec(TopDownCounters.BTBMissBubble.id), deqWasteCount, 0.U))
427  XSPerfAccumulate("OverrideBubble", Mux(matchBubbleVec(TopDownCounters.OverrideBubble.id), deqWasteCount, 0.U))
428  XSPerfAccumulate("FtqUpdateBubble", Mux(matchBubbleVec(TopDownCounters.FtqUpdateBubble.id), deqWasteCount, 0.U))
429  XSPerfAccumulate("FtqFullStall", Mux(matchBubbleVec(TopDownCounters.FtqFullStall.id), deqWasteCount, 0.U))
430  XSPerfAccumulate("FetchFragmentBubble",
431  Mux(deqWasteCount === DecodeWidth.U || topdown_stage.reasons.asUInt.orR, 0.U, deqWasteCount))
432  XSPerfAccumulate("TAGEMissBubble", Mux(matchBubbleVec(TopDownCounters.TAGEMissBubble.id), deqWasteCount, 0.U))
433  XSPerfAccumulate("SCMissBubble", Mux(matchBubbleVec(TopDownCounters.SCMissBubble.id), deqWasteCount, 0.U))
434  XSPerfAccumulate("ITTAGEMissBubble", Mux(matchBubbleVec(TopDownCounters.ITTAGEMissBubble.id), deqWasteCount, 0.U))
435  XSPerfAccumulate("RASMissBubble", Mux(matchBubbleVec(TopDownCounters.RASMissBubble.id), deqWasteCount, 0.U))
436  */
437
438  val perfEvents = Seq(
439    ("IBuffer_Flushed  ", io.flush                                                                     ),
440    ("IBuffer_hungry   ", instrHungry                                                                  ),
441    ("IBuffer_1_4_valid", (numValid >  (0*(IBufSize/4)).U) & (numValid < (1*(IBufSize/4)).U)   ),
442    ("IBuffer_2_4_valid", (numValid >= (1*(IBufSize/4)).U) & (numValid < (2*(IBufSize/4)).U)   ),
443    ("IBuffer_3_4_valid", (numValid >= (2*(IBufSize/4)).U) & (numValid < (3*(IBufSize/4)).U)   ),
444    ("IBuffer_4_4_valid", (numValid >= (3*(IBufSize/4)).U) & (numValid < (4*(IBufSize/4)).U)   ),
445    ("IBuffer_full     ",  numValid.andR                                                           ),
446    ("Front_Bubble     ", PopCount((0 until DecodeWidth).map(i => io.out(i).ready && !io.out(i).valid)))
447  )
448  generatePerfEvent()
449}
450