xref: /XiangShan/src/main/scala/xiangshan/frontend/FrontendBundle.scala (revision 211d620b07edb797ba35b635d24fef4e7294bae2)
1/***************************************************************************************
2* Copyright (c) 2024 Beijing Institute of Open Source Chip (BOSC)
3* Copyright (c) 2020-2024 Institute of Computing Technology, Chinese Academy of Sciences
4* Copyright (c) 2020-2021 Peng Cheng Laboratory
5*
6* XiangShan is licensed under Mulan PSL v2.
7* You can use this software according to the terms and conditions of the Mulan PSL v2.
8* You may obtain a copy of Mulan PSL v2 at:
9*          http://license.coscl.org.cn/MulanPSL2
10*
11* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
12* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
13* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
14*
15* See the Mulan PSL v2 for more details.
16***************************************************************************************/
17package xiangshan.frontend
18
19import chisel3._
20import chisel3.util._
21import org.chipsalliance.cde.config.Parameters
22import utility._
23import xiangshan._
24import xiangshan.backend.fu.PMPRespBundle
25import xiangshan.cache.mmu.TlbResp
26import xiangshan.frontend.icache._
27
28class FrontendTopDownBundle(implicit p: Parameters) extends XSBundle {
29  val reasons    = Vec(TopDownCounters.NumStallReasons.id, Bool())
30  val stallWidth = UInt(log2Ceil(PredictWidth).W)
31}
32
33class FetchRequestBundle(implicit p: Parameters) extends XSBundle with HasICacheParameters {
34
35  // fast path: Timing critical
36  val startAddr     = UInt(VAddrBits.W)
37  val nextlineStart = UInt(VAddrBits.W)
38  val nextStartAddr = UInt(VAddrBits.W)
39  // slow path
40  val ftqIdx    = new FtqPtr
41  val ftqOffset = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W))
42
43  val topdown_info = new FrontendTopDownBundle
44
45  def crossCacheline = startAddr(blockOffBits - 1) === 1.U
46
47  def fromFtqPcBundle(b: Ftq_RF_Components) = {
48    this.startAddr     := b.startAddr
49    this.nextlineStart := b.nextLineAddr
50    // when (b.fallThruError) {
51    //   val nextBlockHigherTemp = Mux(startAddr(log2Ceil(PredictWidth)+instOffsetBits), b.nextLineAddr, b.startAddr)
52    //   val nextBlockHigher = nextBlockHigherTemp(VAddrBits-1, log2Ceil(PredictWidth)+instOffsetBits+1)
53    //   this.nextStartAddr :=
54    //     Cat(nextBlockHigher,
55    //       startAddr(log2Ceil(PredictWidth)+instOffsetBits) ^ 1.U(1.W),
56    //       startAddr(log2Ceil(PredictWidth)+instOffsetBits-1, instOffsetBits),
57    //       0.U(instOffsetBits.W)
58    //     )
59    // }
60    this
61  }
62  override def toPrintable: Printable =
63    p"[start] ${Hexadecimal(startAddr)} [next] ${Hexadecimal(nextlineStart)}" +
64      p"[tgt] ${Hexadecimal(nextStartAddr)} [ftqIdx] $ftqIdx [jmp] v:${ftqOffset.valid}" +
65      p" offset: ${ftqOffset.bits}\n"
66}
67
68class FtqICacheInfo(implicit p: Parameters) extends XSBundle with HasICacheParameters {
69  val startAddr      = UInt(VAddrBits.W)
70  val nextlineStart  = UInt(VAddrBits.W)
71  val ftqIdx         = new FtqPtr
72  def crossCacheline = startAddr(blockOffBits - 1) === 1.U
73  def fromFtqPcBundle(b: Ftq_RF_Components) = {
74    this.startAddr     := b.startAddr
75    this.nextlineStart := b.nextLineAddr
76    this
77  }
78}
79
80class IFUICacheIO(implicit p: Parameters) extends XSBundle with HasICacheParameters {
81  val icacheReady       = Output(Bool())
82  val resp              = Vec(PortNumber, ValidIO(new ICacheMainPipeResp))
83  val topdownIcacheMiss = Output(Bool())
84  val topdownItlbMiss   = Output(Bool())
85}
86
87class FtqToICacheRequestBundle(implicit p: Parameters) extends XSBundle with HasICacheParameters {
88  val pcMemRead   = Vec(5, new FtqICacheInfo)
89  val readValid   = Vec(5, Bool())
90  val backendIpf  = Bool()
91  val backendIgpf = Bool()
92  val backendIaf  = Bool()
93}
94
95class PredecodeWritebackBundle(implicit p: Parameters) extends XSBundle {
96  val pc         = Vec(PredictWidth, UInt(VAddrBits.W))
97  val pd         = Vec(PredictWidth, new PreDecodeInfo) // TODO: redefine Predecode
98  val ftqIdx     = new FtqPtr
99  val ftqOffset  = UInt(log2Ceil(PredictWidth).W)
100  val misOffset  = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W))
101  val cfiOffset  = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W))
102  val target     = UInt(VAddrBits.W)
103  val jalTarget  = UInt(VAddrBits.W)
104  val instrRange = Vec(PredictWidth, Bool())
105}
106
107class mmioCommitRead(implicit p: Parameters) extends XSBundle {
108  val mmioFtqPtr     = Output(new FtqPtr)
109  val mmioLastCommit = Input(Bool())
110}
111
112object ExceptionType {
113  def none:  UInt = "b00".U
114  def pf:    UInt = "b01".U // instruction page fault
115  def gpf:   UInt = "b10".U // instruction guest page fault
116  def af:    UInt = "b11".U // instruction access fault
117  def width: Int  = 2
118
119  def fromOH(has_pf: Bool, has_gpf: Bool, has_af: Bool): UInt = {
120    assert(
121      PopCount(VecInit(has_pf, has_gpf, has_af)) <= 1.U,
122      "ExceptionType.fromOH receives input that is not one-hot: pf=%d, gpf=%d, af=%d",
123      has_pf,
124      has_gpf,
125      has_af
126    )
127    // input is at-most-one-hot encoded, so we don't worry about priority here.
128    MuxCase(
129      none,
130      Seq(
131        has_pf  -> pf,
132        has_gpf -> gpf,
133        has_af  -> af
134      )
135    )
136  }
137
138  // raise pf/gpf/af according to ftq(backend) request
139  def fromFtq(req: FtqToICacheRequestBundle): UInt =
140    fromOH(
141      req.backendIpf,
142      req.backendIgpf,
143      req.backendIaf
144    )
145
146  // raise pf/gpf/af according to itlb response
147  def fromTlbResp(resp: TlbResp, useDup: Int = 0): UInt = {
148    require(useDup >= 0 && useDup < resp.excp.length)
149    // itlb is guaranteed to respond at most one exception
150    fromOH(
151      resp.excp(useDup).pf.instr,
152      resp.excp(useDup).gpf.instr,
153      resp.excp(useDup).af.instr
154    )
155  }
156
157  // raise af if pmp check failed
158  def fromPMPResp(resp: PMPRespBundle): UInt =
159    Mux(resp.instr, af, none)
160
161  // raise af if meta/data array ecc check failed or l2 cache respond with tilelink corrupt
162  /* FIXME: RISC-V Machine ISA v1.13 (draft) introduced a "hardware error" exception, described as:
163   * > A Hardware Error exception is a synchronous exception triggered when corrupted or
164   * > uncorrectable data is accessed explicitly or implicitly by an instruction. In this context,
165   * > "data" encompasses all types of information used within a RISC-V hart. Upon a hardware
166   * > error exception, the xepc register is set to the address of the instruction that attempted to
167   * > access corrupted data, while the xtval register is set either to 0 or to the virtual address
168   * > of an instruction fetch, load, or store that attempted to access corrupted data. The priority
169   * > of Hardware Error exception is implementation-defined, but any given occurrence is
170   * > generally expected to be recognized at the point in the overall priority order at which the
171   * > hardware error is discovered.
172   * Maybe it's better to raise hardware error instead of access fault when ECC check failed.
173   * But it's draft and XiangShan backend does not implement this exception code yet, so we still raise af here.
174   */
175  def fromECC(enable: Bool, corrupt: Bool): UInt =
176    Mux(enable && corrupt, af, none)
177
178  /**Generates exception mux tree
179   *
180   * Exceptions that are further to the left in the parameter list have higher priority
181   * @example
182   * {{{
183   *   val itlb_exception = ExceptionType.fromTlbResp(io.itlb.resp.bits)
184   *   // so as pmp_exception, meta_corrupt
185   *   // ExceptionType.merge(itlb_exception, pmp_exception, meta_corrupt) is equivalent to:
186   *   Mux(
187   *     itlb_exception =/= none,
188   *     itlb_exception,
189   *     Mux(pmp_exception =/= none, pmp_exception, meta_corrupt)
190   *   )
191   * }}}
192   */
193  def merge(exceptions: UInt*): UInt = {
194//    // recursively generate mux tree
195//    if (exceptions.length == 1) {
196//      require(exceptions.head.getWidth == width)
197//      exceptions.head
198//    } else {
199//      Mux(exceptions.head =/= none, exceptions.head, merge(exceptions.tail: _*))
200//    }
201    // use MuxCase with default
202    exceptions.foreach(e => require(e.getWidth == width))
203    val mapping = exceptions.init.map(e => (e =/= none) -> e)
204    val default = exceptions.last
205    MuxCase(default, mapping)
206  }
207
208  /**Generates exception mux tree for multi-port exception vectors
209   *
210   * Exceptions that are further to the left in the parameter list have higher priority
211   * @example
212   * {{{
213   *   val itlb_exception = VecInit((0 until PortNumber).map(i => ExceptionType.fromTlbResp(io.itlb(i).resp.bits)))
214   *   // so as pmp_exception, meta_corrupt
215   *   // ExceptionType.merge(itlb_exception, pmp_exception, meta_corrupt) is equivalent to:
216   *   VecInit((0 until PortNumber).map(i => Mux(
217   *     itlb_exception(i) =/= none,
218   *     itlb_exception(i),
219   *     Mux(pmp_exception(i) =/= none, pmp_exception(i), meta_corrupt(i))
220   *   ))
221   * }}}
222   */
223  def merge(exceptionVecs: Vec[UInt]*): Vec[UInt] = {
224//    // recursively generate mux tree
225//    if (exceptionVecs.length == 1) {
226//      exceptionVecs.head.foreach(e => require(e.getWidth == width))
227//      exceptionVecs.head
228//    } else {
229//      require(exceptionVecs.head.length == exceptionVecs.last.length)
230//      VecInit((exceptionVecs.head zip merge(exceptionVecs.tail: _*)).map{ case (high, low) =>
231//        Mux(high =/= none, high, low)
232//      })
233//    }
234    // merge port-by-port
235    val length = exceptionVecs.head.length
236    exceptionVecs.tail.foreach(vec => require(vec.length == length))
237    VecInit((0 until length).map(i => merge(exceptionVecs.map(_(i)): _*)))
238  }
239}
240
241class FetchToIBuffer(implicit p: Parameters) extends XSBundle {
242  val instrs               = Vec(PredictWidth, UInt(32.W))
243  val valid                = UInt(PredictWidth.W)
244  val enqEnable            = UInt(PredictWidth.W)
245  val pd                   = Vec(PredictWidth, new PreDecodeInfo)
246  val foldpc               = Vec(PredictWidth, UInt(MemPredPCWidth.W))
247  val ftqOffset            = Vec(PredictWidth, ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)))
248  val exceptionFromBackend = Vec(PredictWidth, Bool())
249  val exceptionType        = Vec(PredictWidth, UInt(ExceptionType.width.W))
250  val crossPageIPFFix      = Vec(PredictWidth, Bool())
251  val illegalInstr         = Vec(PredictWidth, Bool())
252  val triggered            = Vec(PredictWidth, TriggerAction())
253  val isLastInFtqEntry     = Vec(PredictWidth, Bool())
254
255  val pc           = Vec(PredictWidth, UInt(VAddrBits.W))
256  val ftqPtr       = new FtqPtr
257  val topdown_info = new FrontendTopDownBundle
258}
259
260// class BitWiseUInt(val width: Int, val init: UInt) extends Module {
261//   val io = IO(new Bundle {
262//     val set
263//   })
264// }
265// Move from BPU
266abstract class GlobalHistory(implicit p: Parameters) extends XSBundle with HasBPUConst {
267  def update(br_valids: Vec[Bool], real_taken_mask: Vec[Bool]): GlobalHistory
268}
269
270class ShiftingGlobalHistory(implicit p: Parameters) extends GlobalHistory {
271  val predHist = UInt(HistoryLength.W)
272
273  def update(shift: UInt, taken: Bool, hist: UInt = this.predHist): ShiftingGlobalHistory = {
274    val g = Wire(new ShiftingGlobalHistory)
275    g.predHist := (hist << shift) | taken
276    g
277  }
278
279  def update(br_valids: Vec[Bool], real_taken_mask: Vec[Bool]): ShiftingGlobalHistory = {
280    require(br_valids.length == numBr)
281    require(real_taken_mask.length == numBr)
282    val last_valid_idx = PriorityMux(
283      br_valids.reverse :+ true.B,
284      (numBr to 0 by -1).map(_.U(log2Ceil(numBr + 1).W))
285    )
286    val first_taken_idx = PriorityEncoder(false.B +: real_taken_mask)
287    val smaller         = Mux(last_valid_idx < first_taken_idx, last_valid_idx, first_taken_idx)
288    val shift           = smaller
289    val taken           = real_taken_mask.reduce(_ || _)
290    update(shift, taken, this.predHist)
291  }
292
293  // static read
294  def read(n: Int): Bool = predHist.asBools(n)
295
296  final def ===(that: ShiftingGlobalHistory): Bool =
297    predHist === that.predHist
298
299  final def =/=(that: ShiftingGlobalHistory): Bool = !(this === that)
300}
301
302// circular global history pointer
303class CGHPtr(implicit p: Parameters) extends CircularQueuePtr[CGHPtr](p => p(XSCoreParamsKey).HistoryLength) {}
304
305object CGHPtr {
306  def apply(f: Bool, v: UInt)(implicit p: Parameters): CGHPtr = {
307    val ptr = Wire(new CGHPtr)
308    ptr.flag  := f
309    ptr.value := v
310    ptr
311  }
312  def inverse(ptr: CGHPtr)(implicit p: Parameters): CGHPtr =
313    apply(!ptr.flag, ptr.value)
314}
315
316class CircularGlobalHistory(implicit p: Parameters) extends GlobalHistory {
317  val buffer = Vec(HistoryLength, Bool())
318  type HistPtr = UInt
319  def update(br_valids: Vec[Bool], real_taken_mask: Vec[Bool]): CircularGlobalHistory =
320    this
321}
322
323class FoldedHistory(val len: Int, val compLen: Int, val max_update_num: Int)(implicit p: Parameters)
324    extends XSBundle with HasBPUConst {
325  require(compLen >= 1)
326  require(len > 0)
327  // require(folded_len <= len)
328  require(compLen >= max_update_num)
329  val folded_hist = UInt(compLen.W)
330
331  def need_oldest_bits           = len > compLen
332  def info                       = (len, compLen)
333  def oldest_bit_to_get_from_ghr = (0 until max_update_num).map(len - _ - 1)
334  def oldest_bit_pos_in_folded   = oldest_bit_to_get_from_ghr map (_ % compLen)
335  def oldest_bit_wrap_around     = oldest_bit_to_get_from_ghr map (_ / compLen > 0)
336  def oldest_bit_start           = oldest_bit_pos_in_folded.head
337
338  def get_oldest_bits_from_ghr(ghr: Vec[Bool], histPtr: CGHPtr) =
339    // TODO: wrap inc for histPtr value
340    oldest_bit_to_get_from_ghr.map(i => ghr((histPtr + (i + 1).U).value))
341
342  def circular_shift_left(src: UInt, shamt: Int) = {
343    val srcLen      = src.getWidth
344    val src_doubled = Cat(src, src)
345    val shifted     = src_doubled(srcLen * 2 - 1 - shamt, srcLen - shamt)
346    shifted
347  }
348
349  // slow path, read bits from ghr
350  def update(ghr: Vec[Bool], histPtr: CGHPtr, num: Int, taken: Bool): FoldedHistory = {
351    val oldest_bits = VecInit(get_oldest_bits_from_ghr(ghr, histPtr))
352    update(oldest_bits, num, taken)
353  }
354
355  // fast path, use pre-read oldest bits
356  def update(ob: Vec[Bool], num: Int, taken: Bool): FoldedHistory = {
357    // do xors for several bitsets at specified bits
358    def bitsets_xor(len: Int, bitsets: Seq[Seq[Tuple2[Int, Bool]]]) = {
359      val res = Wire(Vec(len, Bool()))
360      // println(f"num bitsets: ${bitsets.length}")
361      // println(f"bitsets $bitsets")
362      val resArr = Array.fill(len)(List[Bool]())
363      for (bs <- bitsets) {
364        for ((n, b) <- bs) {
365          resArr(n) = b :: resArr(n)
366        }
367      }
368      // println(f"${resArr.mkString}")
369      // println(f"histLen: ${this.len}, foldedLen: $folded_len")
370      for (i <- 0 until len) {
371        // println(f"bit[$i], ${resArr(i).mkString}")
372        if (resArr(i).length == 0) {
373          println(f"[error] bits $i is not assigned in folded hist update logic! histlen:${this.len}, compLen:$compLen")
374        }
375        res(i) := resArr(i).foldLeft(false.B)(_ ^ _)
376      }
377      res.asUInt
378    }
379
380    val new_folded_hist = if (need_oldest_bits) {
381      val oldest_bits = ob
382      require(oldest_bits.length == max_update_num)
383      // mask off bits that do not update
384      val oldest_bits_masked = oldest_bits.zipWithIndex.map {
385        case (ob, i) => ob && (i < num).B
386      }
387      // if a bit does not wrap around, it should not be xored when it exits
388      val oldest_bits_set = (0 until max_update_num).filter(oldest_bit_wrap_around).map(i =>
389        (oldest_bit_pos_in_folded(i), oldest_bits_masked(i))
390      )
391
392      // println(f"old bits pos ${oldest_bits_set.map(_._1)}")
393
394      // only the last bit could be 1, as we have at most one taken branch at a time
395      val newest_bits_masked = VecInit((0 until max_update_num).map(i => taken && ((i + 1) == num).B)).asUInt
396      // if a bit does not wrap around, newest bits should not be xored onto it either
397      val newest_bits_set = (0 until max_update_num).map(i => (compLen - 1 - i, newest_bits_masked(i)))
398
399      // println(f"new bits set ${newest_bits_set.map(_._1)}")
400      //
401      val original_bits_masked = VecInit(folded_hist.asBools.zipWithIndex.map {
402        case (fb, i) => fb && !(num >= (len - i)).B
403      })
404      val original_bits_set = (0 until compLen).map(i => (i, original_bits_masked(i)))
405
406      // do xor then shift
407      val xored = bitsets_xor(compLen, Seq(original_bits_set, oldest_bits_set, newest_bits_set))
408      circular_shift_left(xored, num)
409    } else {
410      // histLen too short to wrap around
411      ((folded_hist << num) | taken)(compLen - 1, 0)
412    }
413
414    val fh = WireInit(this)
415    fh.folded_hist := new_folded_hist
416    fh
417  }
418}
419
420class AheadFoldedHistoryOldestBits(val len: Int, val max_update_num: Int)(implicit p: Parameters) extends XSBundle {
421  val bits = Vec(max_update_num * 2, Bool())
422  // def info = (len, compLen)
423  def getRealOb(brNumOH: UInt): Vec[Bool] = {
424    val ob = Wire(Vec(max_update_num, Bool()))
425    for (i <- 0 until max_update_num) {
426      ob(i) := Mux1H(brNumOH, bits.drop(i).take(numBr + 1))
427    }
428    ob
429  }
430}
431
432class AllAheadFoldedHistoryOldestBits(val gen: Seq[Tuple2[Int, Int]])(implicit p: Parameters) extends XSBundle
433    with HasBPUConst {
434  val afhob = MixedVec(gen.filter(t => t._1 > t._2).map(_._1)
435    .toSet.toList.map(l => new AheadFoldedHistoryOldestBits(l, numBr))) // remove duplicates
436  require(gen.toSet.toList.equals(gen))
437  def getObWithInfo(info: Tuple2[Int, Int]) = {
438    val selected = afhob.filter(_.len == info._1)
439    require(selected.length == 1)
440    selected(0)
441  }
442  def read(ghv: Vec[Bool], ptr: CGHPtr) = {
443    val hisLens      = afhob.map(_.len)
444    val bitsToRead   = hisLens.flatMap(l => (0 until numBr * 2).map(i => l - i - 1)).toSet // remove duplicates
445    val bitsWithInfo = bitsToRead.map(pos => (pos, ghv((ptr + (pos + 1).U).value)))
446    for (ob <- afhob) {
447      for (i <- 0 until numBr * 2) {
448        val pos       = ob.len - i - 1
449        val bit_found = bitsWithInfo.filter(_._1 == pos).toList
450        require(bit_found.length == 1)
451        ob.bits(i) := bit_found(0)._2
452      }
453    }
454  }
455}
456
457class AllFoldedHistories(val gen: Seq[Tuple2[Int, Int]])(implicit p: Parameters) extends XSBundle with HasBPUConst {
458  val hist = MixedVec(gen.map { case (l, cl) => new FoldedHistory(l, cl, numBr) })
459  // println(gen.mkString)
460  require(gen.toSet.toList.equals(gen))
461  def getHistWithInfo(info: Tuple2[Int, Int]) = {
462    val selected = hist.filter(_.info.equals(info))
463    require(selected.length == 1)
464    selected(0)
465  }
466  def autoConnectFrom(that: AllFoldedHistories) = {
467    require(this.hist.length <= that.hist.length)
468    for (h <- this.hist) {
469      h := that.getHistWithInfo(h.info)
470    }
471  }
472  def update(ghv: Vec[Bool], ptr: CGHPtr, shift: Int, taken: Bool): AllFoldedHistories = {
473    val res = WireInit(this)
474    for (i <- 0 until this.hist.length) {
475      res.hist(i) := this.hist(i).update(ghv, ptr, shift, taken)
476    }
477    res
478  }
479  def update(afhob: AllAheadFoldedHistoryOldestBits, lastBrNumOH: UInt, shift: Int, taken: Bool): AllFoldedHistories = {
480    val res = WireInit(this)
481    for (i <- 0 until this.hist.length) {
482      val fh = this.hist(i)
483      if (fh.need_oldest_bits) {
484        val info          = fh.info
485        val selectedAfhob = afhob.getObWithInfo(info)
486        val ob            = selectedAfhob.getRealOb(lastBrNumOH)
487        res.hist(i) := this.hist(i).update(ob, shift, taken)
488      } else {
489        val dumb = Wire(Vec(numBr, Bool())) // not needed
490        dumb        := DontCare
491        res.hist(i) := this.hist(i).update(dumb, shift, taken)
492      }
493    }
494    res
495  }
496
497  def display(cond: Bool) =
498    for (h <- hist) {
499      XSDebug(cond, p"hist len ${h.len}, folded len ${h.compLen}, value ${Binary(h.folded_hist)}\n")
500    }
501}
502
503class TableAddr(val idxBits: Int, val banks: Int)(implicit p: Parameters) extends XSBundle {
504  def tagBits = VAddrBits - idxBits - instOffsetBits
505
506  val tag    = UInt(tagBits.W)
507  val idx    = UInt(idxBits.W)
508  val offset = UInt(instOffsetBits.W)
509
510  def fromUInt(x:   UInt) = x.asTypeOf(UInt(VAddrBits.W)).asTypeOf(this)
511  def getTag(x:     UInt) = fromUInt(x).tag
512  def getIdx(x:     UInt) = fromUInt(x).idx
513  def getBank(x:    UInt) = if (banks > 1) getIdx(x)(log2Up(banks) - 1, 0) else 0.U
514  def getBankIdx(x: UInt) = if (banks > 1) getIdx(x)(idxBits - 1, log2Up(banks)) else getIdx(x)
515}
516
517trait BasicPrediction extends HasXSParameter {
518  def cfiIndex: ValidUndirectioned[UInt]
519  def target(pc: UInt): UInt
520  def lastBrPosOH:    Vec[Bool]
521  def brTaken:        Bool
522  def shouldShiftVec: Vec[Bool]
523  def fallThruError:  Bool
524}
525
526// selectByTaken selects some data according to takenMask
527// allTargets should be in a Vec, like [taken0, taken1, ..., not taken, not hit]
528object selectByTaken {
529  def apply[T <: Data](takenMask: Vec[Bool], hit: Bool, allTargets: Vec[T]): T = {
530    val selVecOH =
531      takenMask.zipWithIndex.map { case (t, i) =>
532        !takenMask.take(i).fold(false.B)(_ || _) && t && hit
533      } :+
534        (!takenMask.asUInt.orR && hit) :+ !hit
535    Mux1H(selVecOH, allTargets)
536  }
537}
538
539class FullBranchPrediction(val isNotS3: Boolean)(implicit p: Parameters) extends XSBundle with HasBPUConst
540    with BasicPrediction {
541  val br_taken_mask = Vec(numBr, Bool())
542
543  val slot_valids = Vec(totalSlot, Bool())
544
545  val targets         = Vec(totalSlot, UInt(VAddrBits.W))
546  val jalr_target     = UInt(VAddrBits.W) // special path for indirect predictors
547  val offsets         = Vec(totalSlot, UInt(log2Ceil(PredictWidth).W))
548  val fallThroughAddr = UInt(VAddrBits.W)
549  val fallThroughErr  = Bool()
550  val multiHit        = Bool()
551
552  val is_jal               = Bool()
553  val is_jalr              = Bool()
554  val is_call              = Bool()
555  val is_ret               = Bool()
556  val last_may_be_rvi_call = Bool()
557  val is_br_sharing        = Bool()
558
559  // val call_is_rvc = Bool()
560  val hit = Bool()
561
562  val predCycle = if (!env.FPGAPlatform) Some(UInt(64.W)) else None
563
564  def br_slot_valids  = slot_valids.init
565  def tail_slot_valid = slot_valids.last
566
567  def br_valids =
568    VecInit(br_slot_valids :+ (tail_slot_valid && is_br_sharing))
569
570  def taken_mask_on_slot =
571    VecInit(
572      (br_slot_valids zip br_taken_mask.init).map { case (t, v) => t && v } :+ (
573        tail_slot_valid && (
574          is_br_sharing && br_taken_mask.last || !is_br_sharing
575        )
576      )
577    )
578
579  def real_slot_taken_mask(): Vec[Bool] =
580    VecInit(taken_mask_on_slot.map(_ && hit))
581
582  // len numBr
583  def real_br_taken_mask(): Vec[Bool] =
584    VecInit(
585      taken_mask_on_slot.map(_ && hit).init :+
586        (br_taken_mask.last && tail_slot_valid && is_br_sharing && hit)
587    )
588
589  // the vec indicating if ghr should shift on each branch
590  def shouldShiftVec =
591    VecInit(br_valids.zipWithIndex.map { case (v, i) =>
592      v && hit && !real_br_taken_mask().take(i).reduceOption(_ || _).getOrElse(false.B)
593    })
594
595  def lastBrPosOH =
596    VecInit((!hit || !br_valids.reduce(_ || _)) +: // not hit or no brs in entry
597      (0 until numBr).map(i =>
598        br_valids(i) &&
599          !real_br_taken_mask().take(i).reduceOption(_ || _).getOrElse(false.B) && // no brs taken in front it
600          (real_br_taken_mask()(i) || !br_valids.drop(i + 1).reduceOption(_ || _).getOrElse(
601            false.B
602          )) && // no brs behind it
603          hit
604      ))
605
606  def brTaken = (br_valids zip br_taken_mask).map { case (a, b) => a && b && hit }.reduce(_ || _)
607
608  def target(pc: UInt): UInt =
609    if (isNotS3) {
610      selectByTaken(taken_mask_on_slot, hit, allTarget(pc))
611    } else {
612      selectByTaken(taken_mask_on_slot, hit && !fallThroughErr, allTarget(pc))
613    }
614
615  // allTarget return a Vec of all possible target of a BP stage
616  // in the following order: [taken_target0, taken_target1, ..., fallThroughAddr, not hit (plus fetch width)]
617  //
618  // This exposes internal targets for timing optimization,
619  // since usually targets are generated quicker than taken
620  def allTarget(pc: UInt): Vec[UInt] =
621    VecInit(targets :+ fallThroughAddr :+ (pc + (FetchWidth * 4).U))
622
623  def fallThruError: Bool = hit && fallThroughErr
624  def ftbMultiHit:   Bool = hit && multiHit
625
626  def hit_taken_on_jmp =
627    !real_slot_taken_mask().init.reduce(_ || _) &&
628      real_slot_taken_mask().last && !is_br_sharing
629  def hit_taken_on_call = hit_taken_on_jmp && is_call
630  def hit_taken_on_ret  = hit_taken_on_jmp && is_ret
631  def hit_taken_on_jalr = hit_taken_on_jmp && is_jalr
632
633  def cfiIndex = {
634    val cfiIndex = Wire(ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)))
635    cfiIndex.valid := real_slot_taken_mask().asUInt.orR
636    // when no takens, set cfiIndex to PredictWidth-1
637    cfiIndex.bits :=
638      ParallelPriorityMux(real_slot_taken_mask(), offsets) |
639        Fill(log2Ceil(PredictWidth), (!real_slot_taken_mask().asUInt.orR).asUInt)
640    cfiIndex
641  }
642
643  def taken = br_taken_mask.reduce(_ || _) || slot_valids.last // || (is_jal || is_jalr)
644
645  def fromFtbEntry(
646      entry:            FTBEntry,
647      pc:               UInt,
648      last_stage_pc:    Option[Tuple2[UInt, Bool]] = None,
649      last_stage_entry: Option[Tuple2[FTBEntry, Bool]] = None
650  ) = {
651    slot_valids          := entry.brSlots.map(_.valid) :+ entry.tailSlot.valid
652    targets              := entry.getTargetVec(pc, last_stage_pc) // Use previous stage pc for better timing
653    jalr_target          := targets.last
654    offsets              := entry.getOffsetVec
655    is_jal               := entry.tailSlot.valid && entry.isJal
656    is_jalr              := entry.tailSlot.valid && entry.isJalr
657    is_call              := entry.tailSlot.valid && entry.isCall
658    is_ret               := entry.tailSlot.valid && entry.isRet
659    last_may_be_rvi_call := entry.last_may_be_rvi_call
660    is_br_sharing        := entry.tailSlot.valid && entry.tailSlot.sharing
661    predCycle.map(_ := GTimer())
662
663    val startLower        = Cat(0.U(1.W), pc(instOffsetBits + log2Ceil(PredictWidth) - 1, instOffsetBits))
664    val endLowerwithCarry = Cat(entry.carry, entry.pftAddr)
665    fallThroughErr  := startLower >= endLowerwithCarry || endLowerwithCarry > (startLower + PredictWidth.U)
666    fallThroughAddr := Mux(fallThroughErr, pc + (FetchWidth * 4).U, entry.getFallThrough(pc, last_stage_entry))
667  }
668
669  def display(cond: Bool): Unit =
670    XSDebug(cond, p"[taken_mask] ${Binary(br_taken_mask.asUInt)} [hit] $hit\n")
671}
672
673class SpeculativeInfo(implicit p: Parameters) extends XSBundle
674    with HasBPUConst with BPUUtils {
675  val histPtr = new CGHPtr
676  val ssp     = UInt(log2Up(RasSize).W)
677  val sctr    = UInt(RasCtrSize.W)
678  val TOSW    = new RASPtr
679  val TOSR    = new RASPtr
680  val NOS     = new RASPtr
681  val topAddr = UInt(VAddrBits.W)
682}
683
684//
685class BranchPredictionBundle(val isNotS3: Boolean)(implicit p: Parameters) extends XSBundle
686    with HasBPUConst with BPUUtils {
687  val pc          = Vec(numDup, UInt(VAddrBits.W))
688  val valid       = Vec(numDup, Bool())
689  val hasRedirect = Vec(numDup, Bool())
690  val ftq_idx     = new FtqPtr
691  val full_pred   = Vec(numDup, new FullBranchPrediction(isNotS3))
692
693  def target(pc:     UInt)      = VecInit(full_pred.map(_.target(pc)))
694  def targets(pc:    Vec[UInt]) = VecInit(pc.zipWithIndex.map { case (pc, idx) => full_pred(idx).target(pc) })
695  def allTargets(pc: Vec[UInt]) = VecInit(pc.zipWithIndex.map { case (pc, idx) => full_pred(idx).allTarget(pc) })
696  def cfiIndex       = VecInit(full_pred.map(_.cfiIndex))
697  def lastBrPosOH    = VecInit(full_pred.map(_.lastBrPosOH))
698  def brTaken        = VecInit(full_pred.map(_.brTaken))
699  def shouldShiftVec = VecInit(full_pred.map(_.shouldShiftVec))
700  def fallThruError  = VecInit(full_pred.map(_.fallThruError))
701  def ftbMultiHit    = VecInit(full_pred.map(_.ftbMultiHit))
702
703  def taken = VecInit(cfiIndex.map(_.valid))
704
705  def getTarget     = targets(pc)
706  def getAllTargets = allTargets(pc)
707
708  def display(cond: Bool): Unit = {
709    XSDebug(cond, p"[pc] ${Hexadecimal(pc(0))}\n")
710    full_pred(0).display(cond)
711  }
712}
713
714class BranchPredictionResp(implicit p: Parameters) extends XSBundle with HasBPUConst {
715  val s1 = new BranchPredictionBundle(isNotS3 = true)
716  val s2 = new BranchPredictionBundle(isNotS3 = true)
717  val s3 = new BranchPredictionBundle(isNotS3 = false)
718
719  val s1_uftbHit         = Bool()
720  val s1_uftbHasIndirect = Bool()
721  val s1_ftbCloseReq     = Bool()
722
723  val last_stage_meta      = UInt(MaxMetaLength.W)
724  val last_stage_spec_info = new Ftq_Redirect_SRAMEntry
725  val last_stage_ftb_entry = new FTBEntry
726
727  val topdown_info = new FrontendTopDownBundle
728
729  def selectedResp = {
730    val res =
731      PriorityMux(Seq(
732        (s3.valid(3) && s3.hasRedirect(3)) -> s3,
733        (s2.valid(3) && s2.hasRedirect(3)) -> s2,
734        s1.valid(3)                        -> s1
735      ))
736    res
737  }
738  def selectedRespIdxForFtq =
739    PriorityMux(Seq(
740      (s3.valid(3) && s3.hasRedirect(3)) -> BP_S3,
741      (s2.valid(3) && s2.hasRedirect(3)) -> BP_S2,
742      s1.valid(3)                        -> BP_S1
743    ))
744  def lastStage = s3
745}
746
747class BpuToFtqBundle(implicit p: Parameters) extends BranchPredictionResp {}
748
749class BranchPredictionUpdate(implicit p: Parameters) extends XSBundle with HasBPUConst {
750  val pc        = UInt(VAddrBits.W)
751  val spec_info = new SpeculativeInfo
752  val ftb_entry = new FTBEntry()
753
754  val cfi_idx           = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W))
755  val br_taken_mask     = Vec(numBr, Bool())
756  val br_committed      = Vec(numBr, Bool()) // High only when br valid && br committed
757  val jmp_taken         = Bool()
758  val mispred_mask      = Vec(numBr + 1, Bool())
759  val pred_hit          = Bool()
760  val false_hit         = Bool()
761  val new_br_insert_pos = Vec(numBr, Bool())
762  val old_entry         = Bool()
763  val meta              = UInt(MaxMetaLength.W)
764  val full_target       = UInt(VAddrBits.W)
765  val from_stage        = UInt(2.W)
766  val ghist             = UInt(HistoryLength.W)
767
768  def is_jal  = ftb_entry.tailSlot.valid && ftb_entry.isJal
769  def is_jalr = ftb_entry.tailSlot.valid && ftb_entry.isJalr
770  def is_call = ftb_entry.tailSlot.valid && ftb_entry.isCall
771  def is_ret  = ftb_entry.tailSlot.valid && ftb_entry.isRet
772
773  def is_call_taken = is_call && jmp_taken && cfi_idx.valid && cfi_idx.bits === ftb_entry.tailSlot.offset
774  def is_ret_taken  = is_ret && jmp_taken && cfi_idx.valid && cfi_idx.bits === ftb_entry.tailSlot.offset
775
776  def display(cond: Bool) = {
777    XSDebug(cond, p"-----------BranchPredictionUpdate-----------\n")
778    XSDebug(cond, p"[mispred_mask] ${Binary(mispred_mask.asUInt)} [false_hit] $false_hit\n")
779    XSDebug(cond, p"[new_br_insert_pos] ${Binary(new_br_insert_pos.asUInt)}\n")
780    XSDebug(cond, p"--------------------------------------------\n")
781  }
782}
783
784class BranchPredictionRedirect(implicit p: Parameters) extends Redirect with HasBPUConst {
785  // override def toPrintable: Printable = {
786  //   p"-----------BranchPredictionRedirect----------- " +
787  //     p"-----------cfiUpdate----------- " +
788  //     p"[pc] ${Hexadecimal(cfiUpdate.pc)} " +
789  //     p"[predTaken] ${cfiUpdate.predTaken}, [taken] ${cfiUpdate.taken}, [isMisPred] ${cfiUpdate.isMisPred} " +
790  //     p"[target] ${Hexadecimal(cfiUpdate.target)} " +
791  //     p"------------------------------- " +
792  //     p"[robPtr] f=${robIdx.flag} v=${robIdx.value} " +
793  //     p"[ftqPtr] f=${ftqIdx.flag} v=${ftqIdx.value} " +
794  //     p"[ftqOffset] ${ftqOffset} " +
795  //     p"[level] ${level}, [interrupt] ${interrupt} " +
796  //     p"[stFtqIdx] f=${stFtqIdx.flag} v=${stFtqIdx.value} " +
797  //     p"[stFtqOffset] ${stFtqOffset} " +
798  //     p"\n"
799
800  // }
801
802  // TODO: backend should pass topdown signals here
803  // must not change its parent since BPU has used asTypeOf(this type) from its parent class
804  require(isInstanceOf[Redirect])
805  val BTBMissBubble         = Bool()
806  def ControlRedirectBubble = debugIsCtrl
807  // if mispred br not in ftb, count as BTB miss
808  def ControlBTBMissBubble = ControlRedirectBubble && !cfiUpdate.br_hit && !cfiUpdate.jr_hit
809  def TAGEMissBubble       = ControlRedirectBubble && cfiUpdate.br_hit && !cfiUpdate.sc_hit
810  def SCMissBubble         = ControlRedirectBubble && cfiUpdate.br_hit && cfiUpdate.sc_hit
811  def ITTAGEMissBubble     = ControlRedirectBubble && cfiUpdate.jr_hit && !cfiUpdate.pd.isRet
812  def RASMissBubble        = ControlRedirectBubble && cfiUpdate.jr_hit && cfiUpdate.pd.isRet
813  def MemVioRedirectBubble = debugIsMemVio
814  def OtherRedirectBubble  = !debugIsCtrl && !debugIsMemVio
815
816  def connectRedirect(source: Redirect): Unit =
817    for ((name, data) <- this.elements) {
818      if (source.elements.contains(name)) {
819        data := source.elements(name)
820      }
821    }
822
823  def display(cond: Bool): Unit = {
824    XSDebug(cond, p"-----------BranchPredictionRedirect----------- \n")
825    XSDebug(cond, p"-----------cfiUpdate----------- \n")
826    XSDebug(cond, p"[pc] ${Hexadecimal(cfiUpdate.pc)}\n")
827    // XSDebug(cond, p"[hist] ${Binary(cfiUpdate.hist.predHist)}\n")
828    XSDebug(cond, p"[br_hit] ${cfiUpdate.br_hit} [isMisPred] ${cfiUpdate.isMisPred}\n")
829    XSDebug(
830      cond,
831      p"[pred_taken] ${cfiUpdate.predTaken} [taken] ${cfiUpdate.taken} [isMisPred] ${cfiUpdate.isMisPred}\n"
832    )
833    XSDebug(cond, p"[target] ${Hexadecimal(cfiUpdate.target)} \n")
834    XSDebug(cond, p"[shift] ${cfiUpdate.shift}\n")
835    XSDebug(cond, p"------------------------------- \n")
836    XSDebug(cond, p"[robPtr] f=${robIdx.flag} v=${robIdx.value}\n")
837    XSDebug(cond, p"[ftqPtr] f=${ftqIdx.flag} v=${ftqIdx.value} \n")
838    XSDebug(cond, p"[ftqOffset] ${ftqOffset} \n")
839    XSDebug(cond, p"[stFtqIdx] f=${stFtqIdx.flag} v=${stFtqIdx.value}\n")
840    XSDebug(cond, p"[stFtqOffset] ${stFtqOffset}\n")
841    XSDebug(cond, p"---------------------------------------------- \n")
842  }
843}
844