xref: /XiangShan/src/main/scala/xiangshan/frontend/FrontendBundle.scala (revision 803124a63779cc65b44dd1b8b1d848bb8407a6ac)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16package xiangshan.frontend
17
18import chipsalliance.rocketchip.config.Parameters
19import chisel3._
20import chisel3.util._
21import chisel3.experimental.chiselName
22import xiangshan._
23import xiangshan.frontend.icache._
24import utils._
25import scala.math._
26
27@chiselName
28class FetchRequestBundle(implicit p: Parameters) extends XSBundle with HasICacheParameters {
29
30  //fast path: Timing critical
31  val startAddr       = UInt(VAddrBits.W)
32  val nextlineStart   = UInt(VAddrBits.W)
33  val nextStartAddr   = UInt(VAddrBits.W)
34  //slow path
35  val ftqIdx          = new FtqPtr
36  val ftqOffset       = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W))
37
38  def crossCacheline =  startAddr(blockOffBits - 1) === 1.U
39
40  def fromFtqPcBundle(b: Ftq_RF_Components) = {
41    this.startAddr := b.startAddr
42    this.nextlineStart := b.nextLineAddr
43    when (b.fallThruError) {
44      val nextBlockHigherTemp = Mux(startAddr(log2Ceil(PredictWidth)+instOffsetBits), b.startAddr, b.nextLineAddr)
45      val nextBlockHigher = nextBlockHigherTemp(VAddrBits-1, log2Ceil(PredictWidth)+instOffsetBits+1)
46      this.nextStartAddr :=
47        Cat(nextBlockHigher,
48          startAddr(log2Ceil(PredictWidth)+instOffsetBits) ^ 1.U(1.W),
49          startAddr(log2Ceil(PredictWidth)+instOffsetBits-1, instOffsetBits),
50          0.U(instOffsetBits.W)
51        )
52    }
53    this
54  }
55  override def toPrintable: Printable = {
56    p"[start] ${Hexadecimal(startAddr)} [next] ${Hexadecimal(nextlineStart)}" +
57      p"[tgt] ${Hexadecimal(nextStartAddr)} [ftqIdx] $ftqIdx [jmp] v:${ftqOffset.valid}" +
58      p" offset: ${ftqOffset.bits}\n"
59  }
60}
61
62class FtqICacheInfo(implicit p: Parameters)extends XSBundle with HasICacheParameters{
63  val startAddr           = UInt(VAddrBits.W)
64  val nextlineStart       = UInt(VAddrBits.W)
65  def crossCacheline =  startAddr(blockOffBits - 1) === 1.U
66  def fromFtqPcBundle(b: Ftq_RF_Components) = {
67    this.startAddr := b.startAddr
68    this.nextlineStart := b.nextLineAddr
69    this
70  }
71}
72
73class IFUICacheIO(implicit p: Parameters)extends XSBundle with HasICacheParameters{
74  val icacheReady       = Output(Bool())
75  val resp              = Vec(PortNumber, ValidIO(new ICacheMainPipeResp))
76}
77
78class FtqToICacheRequestBundle(implicit p: Parameters)extends XSBundle with HasICacheParameters{
79  val pcMemRead           = Vec(5, new FtqICacheInfo)
80  val readValid           = Vec(5, Bool())
81}
82
83
84class PredecodeWritebackBundle(implicit p:Parameters) extends XSBundle {
85  val pc           = Vec(PredictWidth, UInt(VAddrBits.W))
86  val pd           = Vec(PredictWidth, new PreDecodeInfo) // TODO: redefine Predecode
87  val ftqIdx       = new FtqPtr
88  val ftqOffset    = UInt(log2Ceil(PredictWidth).W)
89  val misOffset    = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W))
90  val cfiOffset    = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W))
91  val target       = UInt(VAddrBits.W)
92  val jalTarget    = UInt(VAddrBits.W)
93  val instrRange   = Vec(PredictWidth, Bool())
94}
95
96// Ftq send req to Prefetch
97class PrefetchRequest(implicit p:Parameters) extends XSBundle {
98  val target          = UInt(VAddrBits.W)
99}
100
101class FtqPrefechBundle(implicit p:Parameters) extends XSBundle {
102  val req = DecoupledIO(new PrefetchRequest)
103}
104
105class FetchToIBuffer(implicit p: Parameters) extends XSBundle {
106  val instrs    = Vec(PredictWidth, UInt(32.W))
107  val valid     = UInt(PredictWidth.W)
108  val enqEnable = UInt(PredictWidth.W)
109  val pd        = Vec(PredictWidth, new PreDecodeInfo)
110  val pc        = Vec(PredictWidth, UInt(VAddrBits.W))
111  val foldpc    = Vec(PredictWidth, UInt(MemPredPCWidth.W))
112  val ftqPtr       = new FtqPtr
113  val ftqOffset    = Vec(PredictWidth, ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)))
114  val ipf          = Vec(PredictWidth, Bool())
115  val acf          = Vec(PredictWidth, Bool())
116  val crossPageIPFFix = Vec(PredictWidth, Bool())
117  val triggered    = Vec(PredictWidth, new TriggerCf)
118}
119
120// class BitWiseUInt(val width: Int, val init: UInt) extends Module {
121//   val io = IO(new Bundle {
122//     val set
123//   })
124// }
125// Move from BPU
126abstract class GlobalHistory(implicit p: Parameters) extends XSBundle with HasBPUConst {
127  def update(br_valids: Vec[Bool], real_taken_mask: Vec[Bool]): GlobalHistory
128}
129
130class ShiftingGlobalHistory(implicit p: Parameters) extends GlobalHistory {
131  val predHist = UInt(HistoryLength.W)
132
133  def update(shift: UInt, taken: Bool, hist: UInt = this.predHist): ShiftingGlobalHistory = {
134    val g = Wire(new ShiftingGlobalHistory)
135    g.predHist := (hist << shift) | taken
136    g
137  }
138
139  def update(br_valids: Vec[Bool], real_taken_mask: Vec[Bool]): ShiftingGlobalHistory = {
140    require(br_valids.length == numBr)
141    require(real_taken_mask.length == numBr)
142    val last_valid_idx = PriorityMux(
143      br_valids.reverse :+ true.B,
144      (numBr to 0 by -1).map(_.U(log2Ceil(numBr+1).W))
145    )
146    val first_taken_idx = PriorityEncoder(false.B +: real_taken_mask)
147    val smaller = Mux(last_valid_idx < first_taken_idx,
148      last_valid_idx,
149      first_taken_idx
150    )
151    val shift = smaller
152    val taken = real_taken_mask.reduce(_||_)
153    update(shift, taken, this.predHist)
154  }
155
156  // static read
157  def read(n: Int): Bool = predHist.asBools()(n)
158
159  final def === (that: ShiftingGlobalHistory): Bool = {
160    predHist === that.predHist
161  }
162
163  final def =/= (that: ShiftingGlobalHistory): Bool = !(this === that)
164}
165
166// circular global history pointer
167class CGHPtr(implicit p: Parameters) extends CircularQueuePtr[CGHPtr](
168  p => p(XSCoreParamsKey).HistoryLength
169){
170}
171
172object CGHPtr {
173  def apply(f: Bool, v: UInt)(implicit p: Parameters): CGHPtr = {
174    val ptr = Wire(new CGHPtr)
175    ptr.flag := f
176    ptr.value := v
177    ptr
178  }
179  def inverse(ptr: CGHPtr)(implicit p: Parameters): CGHPtr = {
180    apply(!ptr.flag, ptr.value)
181  }
182}
183
184class CircularGlobalHistory(implicit p: Parameters) extends GlobalHistory {
185  val buffer = Vec(HistoryLength, Bool())
186  type HistPtr = UInt
187  def update(br_valids: Vec[Bool], real_taken_mask: Vec[Bool]): CircularGlobalHistory = {
188    this
189  }
190}
191
192class FoldedHistory(val len: Int, val compLen: Int, val max_update_num: Int)(implicit p: Parameters)
193  extends XSBundle with HasBPUConst {
194  require(compLen >= 1)
195  require(len > 0)
196  // require(folded_len <= len)
197  require(compLen >= max_update_num)
198  val folded_hist = UInt(compLen.W)
199
200  def need_oldest_bits = len > compLen
201  def info = (len, compLen)
202  def oldest_bit_to_get_from_ghr = (0 until max_update_num).map(len - _ - 1)
203  def oldest_bit_pos_in_folded = oldest_bit_to_get_from_ghr map (_ % compLen)
204  def oldest_bit_wrap_around = oldest_bit_to_get_from_ghr map (_ / compLen > 0)
205  def oldest_bit_start = oldest_bit_pos_in_folded.head
206
207  def get_oldest_bits_from_ghr(ghr: Vec[Bool], histPtr: CGHPtr) = {
208    // TODO: wrap inc for histPtr value
209    oldest_bit_to_get_from_ghr.map(i => ghr((histPtr + (i+1).U).value))
210  }
211
212  def circular_shift_left(src: UInt, shamt: Int) = {
213    val srcLen = src.getWidth
214    val src_doubled = Cat(src, src)
215    val shifted = src_doubled(srcLen*2-1-shamt, srcLen-shamt)
216    shifted
217  }
218
219  // slow path, read bits from ghr
220  def update(ghr: Vec[Bool], histPtr: CGHPtr, num: Int, taken: Bool): FoldedHistory = {
221    val oldest_bits = VecInit(get_oldest_bits_from_ghr(ghr, histPtr))
222    update(oldest_bits, num, taken)
223  }
224
225
226  // fast path, use pre-read oldest bits
227  def update(ob: Vec[Bool], num: Int, taken: Bool): FoldedHistory = {
228    // do xors for several bitsets at specified bits
229    def bitsets_xor(len: Int, bitsets: Seq[Seq[Tuple2[Int, Bool]]]) = {
230      val res = Wire(Vec(len, Bool()))
231      // println(f"num bitsets: ${bitsets.length}")
232      // println(f"bitsets $bitsets")
233      val resArr = Array.fill(len)(List[Bool]())
234      for (bs <- bitsets) {
235        for ((n, b) <- bs) {
236          resArr(n) = b :: resArr(n)
237        }
238      }
239      // println(f"${resArr.mkString}")
240      // println(f"histLen: ${this.len}, foldedLen: $folded_len")
241      for (i <- 0 until len) {
242        // println(f"bit[$i], ${resArr(i).mkString}")
243        if (resArr(i).length > 2) {
244          println(f"[warning] update logic of foldest history has two or more levels of xor gates! " +
245            f"histlen:${this.len}, compLen:$compLen, at bit $i")
246        }
247        if (resArr(i).length == 0) {
248          println(f"[error] bits $i is not assigned in folded hist update logic! histlen:${this.len}, compLen:$compLen")
249        }
250        res(i) := resArr(i).foldLeft(false.B)(_^_)
251      }
252      res.asUInt
253    }
254
255    val new_folded_hist = if (need_oldest_bits) {
256      val oldest_bits = ob
257      require(oldest_bits.length == max_update_num)
258      // mask off bits that do not update
259      val oldest_bits_masked = oldest_bits.zipWithIndex.map{
260        case (ob, i) => ob && (i < num).B
261      }
262      // if a bit does not wrap around, it should not be xored when it exits
263      val oldest_bits_set = (0 until max_update_num).filter(oldest_bit_wrap_around).map(i => (oldest_bit_pos_in_folded(i), oldest_bits_masked(i)))
264
265      // println(f"old bits pos ${oldest_bits_set.map(_._1)}")
266
267      // only the last bit could be 1, as we have at most one taken branch at a time
268      val newest_bits_masked = VecInit((0 until max_update_num).map(i => taken && ((i+1) == num).B)).asUInt
269      // if a bit does not wrap around, newest bits should not be xored onto it either
270      val newest_bits_set = (0 until max_update_num).map(i => (compLen-1-i, newest_bits_masked(i)))
271
272      // println(f"new bits set ${newest_bits_set.map(_._1)}")
273      //
274      val original_bits_masked = VecInit(folded_hist.asBools.zipWithIndex.map{
275        case (fb, i) => fb && !(num >= (len-i)).B
276      })
277      val original_bits_set = (0 until compLen).map(i => (i, original_bits_masked(i)))
278
279      // do xor then shift
280      val xored = bitsets_xor(compLen, Seq(original_bits_set, oldest_bits_set, newest_bits_set))
281      circular_shift_left(xored, num)
282    } else {
283      // histLen too short to wrap around
284      ((folded_hist << num) | taken)(compLen-1,0)
285    }
286
287    val fh = WireInit(this)
288    fh.folded_hist := new_folded_hist
289    fh
290  }
291}
292
293class AheadFoldedHistoryOldestBits(val len: Int, val max_update_num: Int)(implicit p: Parameters) extends XSBundle {
294  val bits = Vec(max_update_num*2, Bool())
295  // def info = (len, compLen)
296  def getRealOb(brNumOH: UInt): Vec[Bool] = {
297    val ob = Wire(Vec(max_update_num, Bool()))
298    for (i <- 0 until max_update_num) {
299      ob(i) := Mux1H(brNumOH, bits.drop(i).take(numBr+1))
300    }
301    ob
302  }
303}
304
305class AllAheadFoldedHistoryOldestBits(val gen: Seq[Tuple2[Int, Int]])(implicit p: Parameters) extends XSBundle with HasBPUConst {
306  val afhob = MixedVec(gen.filter(t => t._1 > t._2).map{_._1}
307    .toSet.toList.map(l => new AheadFoldedHistoryOldestBits(l, numBr))) // remove duplicates
308  require(gen.toSet.toList.equals(gen))
309  def getObWithInfo(info: Tuple2[Int, Int]) = {
310    val selected = afhob.filter(_.len == info._1)
311    require(selected.length == 1)
312    selected(0)
313  }
314  def read(ghv: Vec[Bool], ptr: CGHPtr) = {
315    val hisLens = afhob.map(_.len)
316    val bitsToRead = hisLens.flatMap(l => (0 until numBr*2).map(i => l-i-1)).toSet // remove duplicates
317    val bitsWithInfo = bitsToRead.map(pos => (pos, ghv((ptr+(pos+1).U).value)))
318    for (ob <- afhob) {
319      for (i <- 0 until numBr*2) {
320        val pos = ob.len - i - 1
321        val bit_found = bitsWithInfo.filter(_._1 == pos).toList
322        require(bit_found.length == 1)
323        ob.bits(i) := bit_found(0)._2
324      }
325    }
326  }
327}
328
329class AllFoldedHistories(val gen: Seq[Tuple2[Int, Int]])(implicit p: Parameters) extends XSBundle with HasBPUConst {
330  val hist = MixedVec(gen.map{case (l, cl) => new FoldedHistory(l, cl, numBr)})
331  // println(gen.mkString)
332  require(gen.toSet.toList.equals(gen))
333  def getHistWithInfo(info: Tuple2[Int, Int]) = {
334    val selected = hist.filter(_.info.equals(info))
335    require(selected.length == 1)
336    selected(0)
337  }
338  def autoConnectFrom(that: AllFoldedHistories) = {
339    require(this.hist.length <= that.hist.length)
340    for (h <- this.hist) {
341      h := that.getHistWithInfo(h.info)
342    }
343  }
344  def update(ghv: Vec[Bool], ptr: CGHPtr, shift: Int, taken: Bool): AllFoldedHistories = {
345    val res = WireInit(this)
346    for (i <- 0 until this.hist.length) {
347      res.hist(i) := this.hist(i).update(ghv, ptr, shift, taken)
348    }
349    res
350  }
351  def update(afhob: AllAheadFoldedHistoryOldestBits, lastBrNumOH: UInt, shift: Int, taken: Bool): AllFoldedHistories = {
352    val res = WireInit(this)
353    for (i <- 0 until this.hist.length) {
354      val fh = this.hist(i)
355      if (fh.need_oldest_bits) {
356        val info = fh.info
357        val selectedAfhob = afhob.getObWithInfo(info)
358        val ob = selectedAfhob.getRealOb(lastBrNumOH)
359        res.hist(i) := this.hist(i).update(ob, shift, taken)
360      } else {
361        val dumb = Wire(Vec(numBr, Bool())) // not needed
362        dumb := DontCare
363        res.hist(i) := this.hist(i).update(dumb, shift, taken)
364      }
365    }
366    res
367  }
368
369  def display(cond: Bool) = {
370    for (h <- hist) {
371      XSDebug(cond, p"hist len ${h.len}, folded len ${h.compLen}, value ${Binary(h.folded_hist)}\n")
372    }
373  }
374}
375
376class TableAddr(val idxBits: Int, val banks: Int)(implicit p: Parameters) extends XSBundle{
377  def tagBits = VAddrBits - idxBits - instOffsetBits
378
379  val tag = UInt(tagBits.W)
380  val idx = UInt(idxBits.W)
381  val offset = UInt(instOffsetBits.W)
382
383  def fromUInt(x: UInt) = x.asTypeOf(UInt(VAddrBits.W)).asTypeOf(this)
384  def getTag(x: UInt) = fromUInt(x).tag
385  def getIdx(x: UInt) = fromUInt(x).idx
386  def getBank(x: UInt) = if (banks > 1) getIdx(x)(log2Up(banks) - 1, 0) else 0.U
387  def getBankIdx(x: UInt) = if (banks > 1) getIdx(x)(idxBits - 1, log2Up(banks)) else getIdx(x)
388}
389
390trait BasicPrediction extends HasXSParameter {
391  def cfiIndex: ValidUndirectioned[UInt]
392  def target(pc: UInt): UInt
393  def lastBrPosOH: Vec[Bool]
394  def brTaken: Bool
395  def shouldShiftVec: Vec[Bool]
396  def fallThruError: Bool
397}
398class MinimalBranchPrediction(implicit p: Parameters) extends NewMicroBTBEntry with BasicPrediction {
399  val valid = Bool()
400  def cfiIndex = {
401    val res = Wire(ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)))
402    res.valid := taken && valid
403    res.bits := cfiOffset | Fill(res.bits.getWidth, !valid)
404    res
405  }
406  def target(pc: UInt) = nextAddr
407  def lastBrPosOH: Vec[Bool] = VecInit(brNumOH.asBools())
408  def brTaken = takenOnBr
409  def shouldShiftVec: Vec[Bool] = VecInit((0 until numBr).map(i => lastBrPosOH.drop(i+1).reduce(_||_)))
410  def fallThruError: Bool = false.B // we do this check on the following stages
411
412  def fromMicroBTBEntry(valid: Bool, entry: NewMicroBTBEntry, pc: UInt) = {
413    this.valid := valid
414    this.nextAddr := Mux(valid, entry.nextAddr, pc + (FetchWidth*4).U)
415    this.cfiOffset := entry.cfiOffset | Fill(cfiOffset.getWidth, !valid)
416    this.taken := entry.taken && valid
417    this.takenOnBr := entry.takenOnBr && valid
418    this.brNumOH := Mux(valid, entry.brNumOH, 1.U((numBr+1).W))
419  }
420}
421@chiselName
422class FullBranchPrediction(implicit p: Parameters) extends XSBundle with HasBPUConst with BasicPrediction {
423  val br_taken_mask = Vec(numBr, Bool())
424
425  val slot_valids = Vec(totalSlot, Bool())
426
427  val targets = Vec(totalSlot, UInt(VAddrBits.W))
428  val jalr_target = UInt(VAddrBits.W) // special path for indirect predictors
429  val offsets = Vec(totalSlot, UInt(log2Ceil(PredictWidth).W))
430  val fallThroughAddr = UInt(VAddrBits.W)
431  val fallThroughErr = Bool()
432
433  val is_jal = Bool()
434  val is_jalr = Bool()
435  val is_call = Bool()
436  val is_ret = Bool()
437  val last_may_be_rvi_call = Bool()
438  val is_br_sharing = Bool()
439
440  // val call_is_rvc = Bool()
441  val hit = Bool()
442
443  def br_slot_valids = slot_valids.init
444  def tail_slot_valid = slot_valids.last
445
446  def br_valids = {
447    VecInit(br_slot_valids :+ (tail_slot_valid && is_br_sharing))
448  }
449
450  def taken_mask_on_slot = {
451    VecInit(
452      (br_slot_valids zip br_taken_mask.init).map{ case (t, v) => t && v } :+ (
453        tail_slot_valid && (
454          is_br_sharing && br_taken_mask.last || !is_br_sharing
455        )
456      )
457    )
458  }
459
460  def real_slot_taken_mask(): Vec[Bool] = {
461    VecInit(taken_mask_on_slot.map(_ && hit))
462  }
463
464  // len numBr
465  def real_br_taken_mask(): Vec[Bool] = {
466    VecInit(
467      taken_mask_on_slot.map(_ && hit).init :+
468      (br_taken_mask.last && tail_slot_valid && is_br_sharing && hit)
469    )
470  }
471
472  // the vec indicating if ghr should shift on each branch
473  def shouldShiftVec =
474    VecInit(br_valids.zipWithIndex.map{ case (v, i) =>
475      v && !real_br_taken_mask.take(i).reduceOption(_||_).getOrElse(false.B)})
476
477  def lastBrPosOH =
478    VecInit((!hit || !br_valids.reduce(_||_)) +: // not hit or no brs in entry
479      (0 until numBr).map(i =>
480        br_valids(i) &&
481        !real_br_taken_mask.take(i).reduceOption(_||_).getOrElse(false.B) && // no brs taken in front it
482        (real_br_taken_mask()(i) || !br_valids.drop(i+1).reduceOption(_||_).getOrElse(false.B)) && // no brs behind it
483        hit
484      )
485    )
486
487  def brTaken = (br_valids zip br_taken_mask).map{ case (a, b) => a && b && hit}.reduce(_||_)
488
489  def target(pc: UInt): UInt = {
490    val targetVec = targets :+ fallThroughAddr :+ (pc + (FetchWidth * 4).U)
491    val tm = taken_mask_on_slot
492    val selVecOH =
493      tm.zipWithIndex.map{ case (t, i) => !tm.take(i).fold(false.B)(_||_) && t && hit} :+
494      (!tm.asUInt.orR && hit) :+ !hit
495    Mux1H(selVecOH, targetVec)
496  }
497
498  def fallThruError: Bool = hit && fallThroughErr
499
500  def hit_taken_on_jmp =
501    !real_slot_taken_mask().init.reduce(_||_) &&
502    real_slot_taken_mask().last && !is_br_sharing
503  def hit_taken_on_call = hit_taken_on_jmp && is_call
504  def hit_taken_on_ret  = hit_taken_on_jmp && is_ret
505  def hit_taken_on_jalr = hit_taken_on_jmp && is_jalr
506
507  def cfiIndex = {
508    val cfiIndex = Wire(ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)))
509    cfiIndex.valid := real_slot_taken_mask().asUInt.orR
510    // when no takens, set cfiIndex to PredictWidth-1
511    cfiIndex.bits :=
512      ParallelPriorityMux(real_slot_taken_mask(), offsets) |
513      Fill(log2Ceil(PredictWidth), (!real_slot_taken_mask().asUInt.orR).asUInt)
514    cfiIndex
515  }
516
517  def taken = br_taken_mask.reduce(_||_) || slot_valids.last // || (is_jal || is_jalr)
518
519  def fromFtbEntry(entry: FTBEntry, pc: UInt, last_stage: Option[Tuple2[UInt, Bool]] = None) = {
520    slot_valids := entry.brSlots.map(_.valid) :+ entry.tailSlot.valid
521    targets := entry.getTargetVec(pc)
522    jalr_target := targets.last
523    offsets := entry.getOffsetVec
524    is_jal := entry.tailSlot.valid && entry.isJal
525    is_jalr := entry.tailSlot.valid && entry.isJalr
526    is_call := entry.tailSlot.valid && entry.isCall
527    is_ret := entry.tailSlot.valid && entry.isRet
528    last_may_be_rvi_call := entry.last_may_be_rvi_call
529    is_br_sharing := entry.tailSlot.valid && entry.tailSlot.sharing
530
531    val startLower        = Cat(0.U(1.W),    pc(instOffsetBits+log2Ceil(PredictWidth)-1, instOffsetBits))
532    val endLowerwithCarry = Cat(entry.carry, entry.pftAddr)
533    fallThroughErr := startLower >= endLowerwithCarry
534    fallThroughAddr := Mux(fallThroughErr, pc + (FetchWidth * 4).U, entry.getFallThrough(pc))
535  }
536
537  def display(cond: Bool): Unit = {
538    XSDebug(cond, p"[taken_mask] ${Binary(br_taken_mask.asUInt)} [hit] $hit\n")
539  }
540}
541
542class SpeculativeInfo(implicit p: Parameters) extends XSBundle
543  with HasBPUConst with BPUUtils {
544  val folded_hist = new AllFoldedHistories(foldedGHistInfos)
545  val afhob = new AllAheadFoldedHistoryOldestBits(foldedGHistInfos)
546  val lastBrNumOH = UInt((numBr+1).W)
547  val histPtr = new CGHPtr
548  val rasSp = UInt(log2Ceil(RasSize).W)
549  val rasTop = new RASEntry
550
551  def fromFtqRedirectSram(entry: Ftq_Redirect_SRAMEntry) = {
552    folded_hist := entry.folded_hist
553    afhob := entry.afhob
554    lastBrNumOH := entry.lastBrNumOH
555    histPtr := entry.histPtr
556    rasSp := entry.rasSp
557    rasTop := entry.rasEntry
558    this
559  }
560}
561
562@chiselName
563class BranchPredictionBundle(implicit p: Parameters) extends XSBundle
564  with HasBPUConst with BPUUtils {
565  // def full_pred_info[T <: Data](x: T) = if (is_minimal) None else Some(x)
566  val pc = UInt(VAddrBits.W)
567
568  val valid = Bool()
569
570  val hasRedirect = Bool()
571  val ftq_idx = new FtqPtr
572  // val hit = Bool()
573  val is_minimal = Bool()
574  val minimal_pred = new MinimalBranchPrediction
575  val full_pred = new FullBranchPrediction
576
577  val spec_info = new SpeculativeInfo
578
579
580  // val specCnt = Vec(numBr, UInt(10.W))
581  // val meta = UInt(MaxMetaLength.W)
582
583  val ftb_entry = new FTBEntry()
584
585  def target(pc: UInt) = Mux(is_minimal, minimal_pred.target(pc),     full_pred.target(pc))
586  def cfiIndex         = Mux(is_minimal, minimal_pred.cfiIndex,       full_pred.cfiIndex)
587  def lastBrPosOH      = Mux(is_minimal, minimal_pred.lastBrPosOH,    full_pred.lastBrPosOH)
588  def brTaken          = Mux(is_minimal, minimal_pred.brTaken,        full_pred.brTaken)
589  def shouldShiftVec   = Mux(is_minimal, minimal_pred.shouldShiftVec, full_pred.shouldShiftVec)
590  def fallThruError    = Mux(is_minimal, minimal_pred.fallThruError,  full_pred.fallThruError)
591
592  def getTarget = target(pc)
593  def taken = cfiIndex.valid
594
595  def display(cond: Bool): Unit = {
596    XSDebug(cond, p"[pc] ${Hexadecimal(pc)}\n")
597    spec_info.folded_hist.display(cond)
598    full_pred.display(cond)
599    ftb_entry.display(cond)
600  }
601}
602
603@chiselName
604class BranchPredictionResp(implicit p: Parameters) extends XSBundle with HasBPUConst {
605  // val valids = Vec(3, Bool())
606  val s1 = new BranchPredictionBundle
607  val s2 = new BranchPredictionBundle
608  val s3 = new BranchPredictionBundle
609
610  def selectedResp ={
611    val res =
612      PriorityMux(Seq(
613        ((s3.valid && s3.hasRedirect) -> s3),
614        ((s2.valid && s2.hasRedirect) -> s2),
615        (s1.valid -> s1)
616      ))
617    // println("is minimal: ", res.is_minimal)
618    res
619  }
620  def selectedRespIdx =
621    PriorityMux(Seq(
622      ((s3.valid && s3.hasRedirect) -> BP_S3),
623      ((s2.valid && s2.hasRedirect) -> BP_S2),
624      (s1.valid -> BP_S1)
625    ))
626  def lastStage = s3
627}
628
629class BpuToFtqBundle(implicit p: Parameters) extends BranchPredictionResp with HasBPUConst {
630  val meta = UInt(MaxMetaLength.W)
631}
632
633object BpuToFtqBundle {
634  def apply(resp: BranchPredictionResp)(implicit p: Parameters): BpuToFtqBundle = {
635    val e = Wire(new BpuToFtqBundle())
636    e.s1 := resp.s1
637    e.s2 := resp.s2
638    e.s3 := resp.s3
639
640    e.meta := DontCare
641    e
642  }
643}
644
645class BranchPredictionUpdate(implicit p: Parameters) extends XSBundle with HasBPUConst {
646  val pc = UInt(VAddrBits.W)
647  val spec_info = new SpeculativeInfo
648  val ftb_entry = new FTBEntry()
649
650  val cfi_idx = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W))
651  val br_taken_mask = Vec(numBr, Bool())
652  val jmp_taken = Bool()
653  val mispred_mask = Vec(numBr+1, Bool())
654  val pred_hit = Bool()
655  val false_hit = Bool()
656  val new_br_insert_pos = Vec(numBr, Bool())
657  val old_entry = Bool()
658  val meta = UInt(MaxMetaLength.W)
659  val full_target = UInt(VAddrBits.W)
660  val from_stage = UInt(2.W)
661  val ghist = UInt(HistoryLength.W)
662
663  def is_jal = ftb_entry.tailSlot.valid && ftb_entry.isJal
664  def is_jalr = ftb_entry.tailSlot.valid && ftb_entry.isJalr
665  def is_call = ftb_entry.tailSlot.valid && ftb_entry.isCall
666  def is_ret = ftb_entry.tailSlot.valid && ftb_entry.isRet
667
668
669  def fromFtqRedirectSram(entry: Ftq_Redirect_SRAMEntry) = {
670    spec_info.fromFtqRedirectSram(entry)
671  }
672
673  def display(cond: Bool) = {
674    XSDebug(cond, p"-----------BranchPredictionUpdate-----------\n")
675    XSDebug(cond, p"[mispred_mask] ${Binary(mispred_mask.asUInt)} [false_hit] $false_hit\n")
676    XSDebug(cond, p"[new_br_insert_pos] ${Binary(new_br_insert_pos.asUInt)}\n")
677    XSDebug(cond, p"--------------------------------------------\n")
678  }
679}
680
681class BranchPredictionRedirect(implicit p: Parameters) extends Redirect with HasBPUConst {
682  // override def toPrintable: Printable = {
683  //   p"-----------BranchPredictionRedirect----------- " +
684  //     p"-----------cfiUpdate----------- " +
685  //     p"[pc] ${Hexadecimal(cfiUpdate.pc)} " +
686  //     p"[predTaken] ${cfiUpdate.predTaken}, [taken] ${cfiUpdate.taken}, [isMisPred] ${cfiUpdate.isMisPred} " +
687  //     p"[target] ${Hexadecimal(cfiUpdate.target)} " +
688  //     p"------------------------------- " +
689  //     p"[robPtr] f=${robIdx.flag} v=${robIdx.value} " +
690  //     p"[ftqPtr] f=${ftqIdx.flag} v=${ftqIdx.value} " +
691  //     p"[ftqOffset] ${ftqOffset} " +
692  //     p"[level] ${level}, [interrupt] ${interrupt} " +
693  //     p"[stFtqIdx] f=${stFtqIdx.flag} v=${stFtqIdx.value} " +
694  //     p"[stFtqOffset] ${stFtqOffset} " +
695  //     p"\n"
696
697  // }
698
699  def display(cond: Bool): Unit = {
700    XSDebug(cond, p"-----------BranchPredictionRedirect----------- \n")
701    XSDebug(cond, p"-----------cfiUpdate----------- \n")
702    XSDebug(cond, p"[pc] ${Hexadecimal(cfiUpdate.pc)}\n")
703    // XSDebug(cond, p"[hist] ${Binary(cfiUpdate.hist.predHist)}\n")
704    XSDebug(cond, p"[br_hit] ${cfiUpdate.br_hit} [isMisPred] ${cfiUpdate.isMisPred}\n")
705    XSDebug(cond, p"[pred_taken] ${cfiUpdate.predTaken} [taken] ${cfiUpdate.taken} [isMisPred] ${cfiUpdate.isMisPred}\n")
706    XSDebug(cond, p"[target] ${Hexadecimal(cfiUpdate.target)} \n")
707    XSDebug(cond, p"[shift] ${cfiUpdate.shift}\n")
708    XSDebug(cond, p"------------------------------- \n")
709    XSDebug(cond, p"[robPtr] f=${robIdx.flag} v=${robIdx.value}\n")
710    XSDebug(cond, p"[ftqPtr] f=${ftqIdx.flag} v=${ftqIdx.value} \n")
711    XSDebug(cond, p"[ftqOffset] ${ftqOffset} \n")
712    XSDebug(cond, p"[stFtqIdx] f=${stFtqIdx.flag} v=${stFtqIdx.value}\n")
713    XSDebug(cond, p"[stFtqOffset] ${stFtqOffset}\n")
714    XSDebug(cond, p"---------------------------------------------- \n")
715  }
716}
717