1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16package xiangshan.frontend 17 18import chipsalliance.rocketchip.config.Parameters 19import chisel3._ 20import chisel3.util._ 21import chisel3.experimental.chiselName 22import xiangshan._ 23import xiangshan.frontend.icache._ 24import utils._ 25import scala.math._ 26 27@chiselName 28class FetchRequestBundle(implicit p: Parameters) extends XSBundle with HasICacheParameters { 29 30 //fast path: Timing critical 31 val startAddr = UInt(VAddrBits.W) 32 val nextlineStart = UInt(VAddrBits.W) 33 val nextStartAddr = UInt(VAddrBits.W) 34 //slow path 35 val ftqIdx = new FtqPtr 36 val ftqOffset = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)) 37 38 def crossCacheline = startAddr(blockOffBits - 1) === 1.U 39 40 def fromFtqPcBundle(b: Ftq_RF_Components) = { 41 this.startAddr := b.startAddr 42 this.nextlineStart := b.nextLineAddr 43 when (b.fallThruError) { 44 val nextBlockHigherTemp = Mux(startAddr(log2Ceil(PredictWidth)+instOffsetBits), b.startAddr, b.nextLineAddr) 45 val nextBlockHigher = nextBlockHigherTemp(VAddrBits-1, log2Ceil(PredictWidth)+instOffsetBits+1) 46 this.nextStartAddr := 47 Cat(nextBlockHigher, 48 startAddr(log2Ceil(PredictWidth)+instOffsetBits) ^ 1.U(1.W), 49 startAddr(log2Ceil(PredictWidth)+instOffsetBits-1, instOffsetBits), 50 0.U(instOffsetBits.W) 51 ) 52 } 53 this 54 } 55 override def toPrintable: Printable = { 56 p"[start] ${Hexadecimal(startAddr)} [next] ${Hexadecimal(nextlineStart)}" + 57 p"[tgt] ${Hexadecimal(nextStartAddr)} [ftqIdx] $ftqIdx [jmp] v:${ftqOffset.valid}" + 58 p" offset: ${ftqOffset.bits}\n" 59 } 60} 61 62class FtqICacheInfo(implicit p: Parameters)extends XSBundle with HasICacheParameters{ 63 val startAddr = UInt(VAddrBits.W) 64 val nextlineStart = UInt(VAddrBits.W) 65 def crossCacheline = startAddr(blockOffBits - 1) === 1.U 66 def fromFtqPcBundle(b: Ftq_RF_Components) = { 67 this.startAddr := b.startAddr 68 this.nextlineStart := b.nextLineAddr 69 this 70 } 71} 72 73class IFUICacheIO(implicit p: Parameters)extends XSBundle with HasICacheParameters{ 74 val icacheReady = Output(Bool()) 75 val resp = Vec(PortNumber, ValidIO(new ICacheMainPipeResp)) 76} 77 78class FtqToICacheRequestBundle(implicit p: Parameters)extends XSBundle with HasICacheParameters{ 79 val pcMemRead = Vec(5, new FtqICacheInfo) 80 val readValid = Vec(5, Bool()) 81} 82 83 84class PredecodeWritebackBundle(implicit p:Parameters) extends XSBundle { 85 val pc = Vec(PredictWidth, UInt(VAddrBits.W)) 86 val pd = Vec(PredictWidth, new PreDecodeInfo) // TODO: redefine Predecode 87 val ftqIdx = new FtqPtr 88 val ftqOffset = UInt(log2Ceil(PredictWidth).W) 89 val misOffset = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)) 90 val cfiOffset = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)) 91 val target = UInt(VAddrBits.W) 92 val jalTarget = UInt(VAddrBits.W) 93 val instrRange = Vec(PredictWidth, Bool()) 94} 95 96// Ftq send req to Prefetch 97class PrefetchRequest(implicit p:Parameters) extends XSBundle { 98 val target = UInt(VAddrBits.W) 99} 100 101class FtqPrefechBundle(implicit p:Parameters) extends XSBundle { 102 val req = DecoupledIO(new PrefetchRequest) 103} 104 105class mmioCommitRead(implicit p: Parameters) extends XSBundle { 106 val mmioFtqPtr = Output(new FtqPtr) 107 val mmioLastCommit = Input(Bool()) 108} 109 110class FetchToIBuffer(implicit p: Parameters) extends XSBundle { 111 val instrs = Vec(PredictWidth, UInt(32.W)) 112 val valid = UInt(PredictWidth.W) 113 val enqEnable = UInt(PredictWidth.W) 114 val pd = Vec(PredictWidth, new PreDecodeInfo) 115 val pc = Vec(PredictWidth, UInt(VAddrBits.W)) 116 val foldpc = Vec(PredictWidth, UInt(MemPredPCWidth.W)) 117 val ftqPtr = new FtqPtr 118 val ftqOffset = Vec(PredictWidth, ValidUndirectioned(UInt(log2Ceil(PredictWidth).W))) 119 val ipf = Vec(PredictWidth, Bool()) 120 val acf = Vec(PredictWidth, Bool()) 121 val crossPageIPFFix = Vec(PredictWidth, Bool()) 122 val triggered = Vec(PredictWidth, new TriggerCf) 123} 124 125// class BitWiseUInt(val width: Int, val init: UInt) extends Module { 126// val io = IO(new Bundle { 127// val set 128// }) 129// } 130// Move from BPU 131abstract class GlobalHistory(implicit p: Parameters) extends XSBundle with HasBPUConst { 132 def update(br_valids: Vec[Bool], real_taken_mask: Vec[Bool]): GlobalHistory 133} 134 135class ShiftingGlobalHistory(implicit p: Parameters) extends GlobalHistory { 136 val predHist = UInt(HistoryLength.W) 137 138 def update(shift: UInt, taken: Bool, hist: UInt = this.predHist): ShiftingGlobalHistory = { 139 val g = Wire(new ShiftingGlobalHistory) 140 g.predHist := (hist << shift) | taken 141 g 142 } 143 144 def update(br_valids: Vec[Bool], real_taken_mask: Vec[Bool]): ShiftingGlobalHistory = { 145 require(br_valids.length == numBr) 146 require(real_taken_mask.length == numBr) 147 val last_valid_idx = PriorityMux( 148 br_valids.reverse :+ true.B, 149 (numBr to 0 by -1).map(_.U(log2Ceil(numBr+1).W)) 150 ) 151 val first_taken_idx = PriorityEncoder(false.B +: real_taken_mask) 152 val smaller = Mux(last_valid_idx < first_taken_idx, 153 last_valid_idx, 154 first_taken_idx 155 ) 156 val shift = smaller 157 val taken = real_taken_mask.reduce(_||_) 158 update(shift, taken, this.predHist) 159 } 160 161 // static read 162 def read(n: Int): Bool = predHist.asBools()(n) 163 164 final def === (that: ShiftingGlobalHistory): Bool = { 165 predHist === that.predHist 166 } 167 168 final def =/= (that: ShiftingGlobalHistory): Bool = !(this === that) 169} 170 171// circular global history pointer 172class CGHPtr(implicit p: Parameters) extends CircularQueuePtr[CGHPtr]( 173 p => p(XSCoreParamsKey).HistoryLength 174){ 175} 176 177object CGHPtr { 178 def apply(f: Bool, v: UInt)(implicit p: Parameters): CGHPtr = { 179 val ptr = Wire(new CGHPtr) 180 ptr.flag := f 181 ptr.value := v 182 ptr 183 } 184 def inverse(ptr: CGHPtr)(implicit p: Parameters): CGHPtr = { 185 apply(!ptr.flag, ptr.value) 186 } 187} 188 189class CircularGlobalHistory(implicit p: Parameters) extends GlobalHistory { 190 val buffer = Vec(HistoryLength, Bool()) 191 type HistPtr = UInt 192 def update(br_valids: Vec[Bool], real_taken_mask: Vec[Bool]): CircularGlobalHistory = { 193 this 194 } 195} 196 197class FoldedHistory(val len: Int, val compLen: Int, val max_update_num: Int)(implicit p: Parameters) 198 extends XSBundle with HasBPUConst { 199 require(compLen >= 1) 200 require(len > 0) 201 // require(folded_len <= len) 202 require(compLen >= max_update_num) 203 val folded_hist = UInt(compLen.W) 204 205 def need_oldest_bits = len > compLen 206 def info = (len, compLen) 207 def oldest_bit_to_get_from_ghr = (0 until max_update_num).map(len - _ - 1) 208 def oldest_bit_pos_in_folded = oldest_bit_to_get_from_ghr map (_ % compLen) 209 def oldest_bit_wrap_around = oldest_bit_to_get_from_ghr map (_ / compLen > 0) 210 def oldest_bit_start = oldest_bit_pos_in_folded.head 211 212 def get_oldest_bits_from_ghr(ghr: Vec[Bool], histPtr: CGHPtr) = { 213 // TODO: wrap inc for histPtr value 214 oldest_bit_to_get_from_ghr.map(i => ghr((histPtr + (i+1).U).value)) 215 } 216 217 def circular_shift_left(src: UInt, shamt: Int) = { 218 val srcLen = src.getWidth 219 val src_doubled = Cat(src, src) 220 val shifted = src_doubled(srcLen*2-1-shamt, srcLen-shamt) 221 shifted 222 } 223 224 // slow path, read bits from ghr 225 def update(ghr: Vec[Bool], histPtr: CGHPtr, num: Int, taken: Bool): FoldedHistory = { 226 val oldest_bits = VecInit(get_oldest_bits_from_ghr(ghr, histPtr)) 227 update(oldest_bits, num, taken) 228 } 229 230 231 // fast path, use pre-read oldest bits 232 def update(ob: Vec[Bool], num: Int, taken: Bool): FoldedHistory = { 233 // do xors for several bitsets at specified bits 234 def bitsets_xor(len: Int, bitsets: Seq[Seq[Tuple2[Int, Bool]]]) = { 235 val res = Wire(Vec(len, Bool())) 236 // println(f"num bitsets: ${bitsets.length}") 237 // println(f"bitsets $bitsets") 238 val resArr = Array.fill(len)(List[Bool]()) 239 for (bs <- bitsets) { 240 for ((n, b) <- bs) { 241 resArr(n) = b :: resArr(n) 242 } 243 } 244 // println(f"${resArr.mkString}") 245 // println(f"histLen: ${this.len}, foldedLen: $folded_len") 246 for (i <- 0 until len) { 247 // println(f"bit[$i], ${resArr(i).mkString}") 248 if (resArr(i).length > 2) { 249 println(f"[warning] update logic of foldest history has two or more levels of xor gates! " + 250 f"histlen:${this.len}, compLen:$compLen, at bit $i") 251 } 252 if (resArr(i).length == 0) { 253 println(f"[error] bits $i is not assigned in folded hist update logic! histlen:${this.len}, compLen:$compLen") 254 } 255 res(i) := resArr(i).foldLeft(false.B)(_^_) 256 } 257 res.asUInt 258 } 259 260 val new_folded_hist = if (need_oldest_bits) { 261 val oldest_bits = ob 262 require(oldest_bits.length == max_update_num) 263 // mask off bits that do not update 264 val oldest_bits_masked = oldest_bits.zipWithIndex.map{ 265 case (ob, i) => ob && (i < num).B 266 } 267 // if a bit does not wrap around, it should not be xored when it exits 268 val oldest_bits_set = (0 until max_update_num).filter(oldest_bit_wrap_around).map(i => (oldest_bit_pos_in_folded(i), oldest_bits_masked(i))) 269 270 // println(f"old bits pos ${oldest_bits_set.map(_._1)}") 271 272 // only the last bit could be 1, as we have at most one taken branch at a time 273 val newest_bits_masked = VecInit((0 until max_update_num).map(i => taken && ((i+1) == num).B)).asUInt 274 // if a bit does not wrap around, newest bits should not be xored onto it either 275 val newest_bits_set = (0 until max_update_num).map(i => (compLen-1-i, newest_bits_masked(i))) 276 277 // println(f"new bits set ${newest_bits_set.map(_._1)}") 278 // 279 val original_bits_masked = VecInit(folded_hist.asBools.zipWithIndex.map{ 280 case (fb, i) => fb && !(num >= (len-i)).B 281 }) 282 val original_bits_set = (0 until compLen).map(i => (i, original_bits_masked(i))) 283 284 // do xor then shift 285 val xored = bitsets_xor(compLen, Seq(original_bits_set, oldest_bits_set, newest_bits_set)) 286 circular_shift_left(xored, num) 287 } else { 288 // histLen too short to wrap around 289 ((folded_hist << num) | taken)(compLen-1,0) 290 } 291 292 val fh = WireInit(this) 293 fh.folded_hist := new_folded_hist 294 fh 295 } 296} 297 298class AheadFoldedHistoryOldestBits(val len: Int, val max_update_num: Int)(implicit p: Parameters) extends XSBundle { 299 val bits = Vec(max_update_num*2, Bool()) 300 // def info = (len, compLen) 301 def getRealOb(brNumOH: UInt): Vec[Bool] = { 302 val ob = Wire(Vec(max_update_num, Bool())) 303 for (i <- 0 until max_update_num) { 304 ob(i) := Mux1H(brNumOH, bits.drop(i).take(numBr+1)) 305 } 306 ob 307 } 308} 309 310class AllAheadFoldedHistoryOldestBits(val gen: Seq[Tuple2[Int, Int]])(implicit p: Parameters) extends XSBundle with HasBPUConst { 311 val afhob = MixedVec(gen.filter(t => t._1 > t._2).map{_._1} 312 .toSet.toList.map(l => new AheadFoldedHistoryOldestBits(l, numBr))) // remove duplicates 313 require(gen.toSet.toList.equals(gen)) 314 def getObWithInfo(info: Tuple2[Int, Int]) = { 315 val selected = afhob.filter(_.len == info._1) 316 require(selected.length == 1) 317 selected(0) 318 } 319 def read(ghv: Vec[Bool], ptr: CGHPtr) = { 320 val hisLens = afhob.map(_.len) 321 val bitsToRead = hisLens.flatMap(l => (0 until numBr*2).map(i => l-i-1)).toSet // remove duplicates 322 val bitsWithInfo = bitsToRead.map(pos => (pos, ghv((ptr+(pos+1).U).value))) 323 for (ob <- afhob) { 324 for (i <- 0 until numBr*2) { 325 val pos = ob.len - i - 1 326 val bit_found = bitsWithInfo.filter(_._1 == pos).toList 327 require(bit_found.length == 1) 328 ob.bits(i) := bit_found(0)._2 329 } 330 } 331 } 332} 333 334class AllFoldedHistories(val gen: Seq[Tuple2[Int, Int]])(implicit p: Parameters) extends XSBundle with HasBPUConst { 335 val hist = MixedVec(gen.map{case (l, cl) => new FoldedHistory(l, cl, numBr)}) 336 // println(gen.mkString) 337 require(gen.toSet.toList.equals(gen)) 338 def getHistWithInfo(info: Tuple2[Int, Int]) = { 339 val selected = hist.filter(_.info.equals(info)) 340 require(selected.length == 1) 341 selected(0) 342 } 343 def autoConnectFrom(that: AllFoldedHistories) = { 344 require(this.hist.length <= that.hist.length) 345 for (h <- this.hist) { 346 h := that.getHistWithInfo(h.info) 347 } 348 } 349 def update(ghv: Vec[Bool], ptr: CGHPtr, shift: Int, taken: Bool): AllFoldedHistories = { 350 val res = WireInit(this) 351 for (i <- 0 until this.hist.length) { 352 res.hist(i) := this.hist(i).update(ghv, ptr, shift, taken) 353 } 354 res 355 } 356 def update(afhob: AllAheadFoldedHistoryOldestBits, lastBrNumOH: UInt, shift: Int, taken: Bool): AllFoldedHistories = { 357 val res = WireInit(this) 358 for (i <- 0 until this.hist.length) { 359 val fh = this.hist(i) 360 if (fh.need_oldest_bits) { 361 val info = fh.info 362 val selectedAfhob = afhob.getObWithInfo(info) 363 val ob = selectedAfhob.getRealOb(lastBrNumOH) 364 res.hist(i) := this.hist(i).update(ob, shift, taken) 365 } else { 366 val dumb = Wire(Vec(numBr, Bool())) // not needed 367 dumb := DontCare 368 res.hist(i) := this.hist(i).update(dumb, shift, taken) 369 } 370 } 371 res 372 } 373 374 def display(cond: Bool) = { 375 for (h <- hist) { 376 XSDebug(cond, p"hist len ${h.len}, folded len ${h.compLen}, value ${Binary(h.folded_hist)}\n") 377 } 378 } 379} 380 381class TableAddr(val idxBits: Int, val banks: Int)(implicit p: Parameters) extends XSBundle{ 382 def tagBits = VAddrBits - idxBits - instOffsetBits 383 384 val tag = UInt(tagBits.W) 385 val idx = UInt(idxBits.W) 386 val offset = UInt(instOffsetBits.W) 387 388 def fromUInt(x: UInt) = x.asTypeOf(UInt(VAddrBits.W)).asTypeOf(this) 389 def getTag(x: UInt) = fromUInt(x).tag 390 def getIdx(x: UInt) = fromUInt(x).idx 391 def getBank(x: UInt) = if (banks > 1) getIdx(x)(log2Up(banks) - 1, 0) else 0.U 392 def getBankIdx(x: UInt) = if (banks > 1) getIdx(x)(idxBits - 1, log2Up(banks)) else getIdx(x) 393} 394 395trait BasicPrediction extends HasXSParameter { 396 def cfiIndex: ValidUndirectioned[UInt] 397 def target(pc: UInt): UInt 398 def lastBrPosOH: Vec[Bool] 399 def brTaken: Bool 400 def shouldShiftVec: Vec[Bool] 401 def fallThruError: Bool 402} 403@chiselName 404class FullBranchPrediction(implicit p: Parameters) extends XSBundle with HasBPUConst with BasicPrediction { 405 val br_taken_mask = Vec(numBr, Bool()) 406 407 val slot_valids = Vec(totalSlot, Bool()) 408 409 val targets = Vec(totalSlot, UInt(VAddrBits.W)) 410 val jalr_target = UInt(VAddrBits.W) // special path for indirect predictors 411 val offsets = Vec(totalSlot, UInt(log2Ceil(PredictWidth).W)) 412 val fallThroughAddr = UInt(VAddrBits.W) 413 val fallThroughErr = Bool() 414 415 val is_jal = Bool() 416 val is_jalr = Bool() 417 val is_call = Bool() 418 val is_ret = Bool() 419 val last_may_be_rvi_call = Bool() 420 val is_br_sharing = Bool() 421 422 // val call_is_rvc = Bool() 423 val hit = Bool() 424 425 def br_slot_valids = slot_valids.init 426 def tail_slot_valid = slot_valids.last 427 428 def br_valids = { 429 VecInit(br_slot_valids :+ (tail_slot_valid && is_br_sharing)) 430 } 431 432 def taken_mask_on_slot = { 433 VecInit( 434 (br_slot_valids zip br_taken_mask.init).map{ case (t, v) => t && v } :+ ( 435 tail_slot_valid && ( 436 is_br_sharing && br_taken_mask.last || !is_br_sharing 437 ) 438 ) 439 ) 440 } 441 442 def real_slot_taken_mask(): Vec[Bool] = { 443 VecInit(taken_mask_on_slot.map(_ && hit)) 444 } 445 446 // len numBr 447 def real_br_taken_mask(): Vec[Bool] = { 448 VecInit( 449 taken_mask_on_slot.map(_ && hit).init :+ 450 (br_taken_mask.last && tail_slot_valid && is_br_sharing && hit) 451 ) 452 } 453 454 // the vec indicating if ghr should shift on each branch 455 def shouldShiftVec = 456 VecInit(br_valids.zipWithIndex.map{ case (v, i) => 457 v && !real_br_taken_mask.take(i).reduceOption(_||_).getOrElse(false.B)}) 458 459 def lastBrPosOH = 460 VecInit((!hit || !br_valids.reduce(_||_)) +: // not hit or no brs in entry 461 (0 until numBr).map(i => 462 br_valids(i) && 463 !real_br_taken_mask.take(i).reduceOption(_||_).getOrElse(false.B) && // no brs taken in front it 464 (real_br_taken_mask()(i) || !br_valids.drop(i+1).reduceOption(_||_).getOrElse(false.B)) && // no brs behind it 465 hit 466 ) 467 ) 468 469 def brTaken = (br_valids zip br_taken_mask).map{ case (a, b) => a && b && hit}.reduce(_||_) 470 471 def target(pc: UInt): UInt = { 472 val targetVec = targets :+ fallThroughAddr :+ (pc + (FetchWidth * 4).U) 473 val tm = taken_mask_on_slot 474 val selVecOH = 475 tm.zipWithIndex.map{ case (t, i) => !tm.take(i).fold(false.B)(_||_) && t && hit} :+ 476 (!tm.asUInt.orR && hit) :+ !hit 477 Mux1H(selVecOH, targetVec) 478 } 479 480 def fallThruError: Bool = hit && fallThroughErr 481 482 def hit_taken_on_jmp = 483 !real_slot_taken_mask().init.reduce(_||_) && 484 real_slot_taken_mask().last && !is_br_sharing 485 def hit_taken_on_call = hit_taken_on_jmp && is_call 486 def hit_taken_on_ret = hit_taken_on_jmp && is_ret 487 def hit_taken_on_jalr = hit_taken_on_jmp && is_jalr 488 489 def cfiIndex = { 490 val cfiIndex = Wire(ValidUndirectioned(UInt(log2Ceil(PredictWidth).W))) 491 cfiIndex.valid := real_slot_taken_mask().asUInt.orR 492 // when no takens, set cfiIndex to PredictWidth-1 493 cfiIndex.bits := 494 ParallelPriorityMux(real_slot_taken_mask(), offsets) | 495 Fill(log2Ceil(PredictWidth), (!real_slot_taken_mask().asUInt.orR).asUInt) 496 cfiIndex 497 } 498 499 def taken = br_taken_mask.reduce(_||_) || slot_valids.last // || (is_jal || is_jalr) 500 501 def fromFtbEntry(entry: FTBEntry, pc: UInt, last_stage: Option[Tuple2[UInt, Bool]] = None) = { 502 slot_valids := entry.brSlots.map(_.valid) :+ entry.tailSlot.valid 503 targets := entry.getTargetVec(pc) 504 jalr_target := targets.last 505 offsets := entry.getOffsetVec 506 is_jal := entry.tailSlot.valid && entry.isJal 507 is_jalr := entry.tailSlot.valid && entry.isJalr 508 is_call := entry.tailSlot.valid && entry.isCall 509 is_ret := entry.tailSlot.valid && entry.isRet 510 last_may_be_rvi_call := entry.last_may_be_rvi_call 511 is_br_sharing := entry.tailSlot.valid && entry.tailSlot.sharing 512 513 val startLower = Cat(0.U(1.W), pc(instOffsetBits+log2Ceil(PredictWidth)-1, instOffsetBits)) 514 val endLowerwithCarry = Cat(entry.carry, entry.pftAddr) 515 fallThroughErr := startLower >= endLowerwithCarry 516 fallThroughAddr := Mux(fallThroughErr, pc + (FetchWidth * 4).U, entry.getFallThrough(pc)) 517 } 518 519 def display(cond: Bool): Unit = { 520 XSDebug(cond, p"[taken_mask] ${Binary(br_taken_mask.asUInt)} [hit] $hit\n") 521 } 522} 523 524class SpeculativeInfo(implicit p: Parameters) extends XSBundle 525 with HasBPUConst with BPUUtils { 526 val folded_hist = new AllFoldedHistories(foldedGHistInfos) 527 val afhob = new AllAheadFoldedHistoryOldestBits(foldedGHistInfos) 528 val lastBrNumOH = UInt((numBr+1).W) 529 val histPtr = new CGHPtr 530 val rasSp = UInt(log2Ceil(RasSize).W) 531 val rasTop = new RASEntry 532} 533 534@chiselName 535class BranchPredictionBundle(implicit p: Parameters) extends XSBundle 536 with HasBPUConst with BPUUtils { 537 val pc = UInt(VAddrBits.W) 538 val valid = Bool() 539 val hasRedirect = Bool() 540 val ftq_idx = new FtqPtr 541 val full_pred = new FullBranchPrediction 542 543 544 def target(pc: UInt) = full_pred.target(pc) 545 def cfiIndex = full_pred.cfiIndex 546 def lastBrPosOH = full_pred.lastBrPosOH 547 def brTaken = full_pred.brTaken 548 def shouldShiftVec = full_pred.shouldShiftVec 549 def fallThruError = full_pred.fallThruError 550 551 def getTarget = target(pc) 552 def taken = cfiIndex.valid 553 554 def display(cond: Bool): Unit = { 555 XSDebug(cond, p"[pc] ${Hexadecimal(pc)}\n") 556 full_pred.display(cond) 557 } 558} 559 560@chiselName 561class BranchPredictionResp(implicit p: Parameters) extends XSBundle with HasBPUConst { 562 // val valids = Vec(3, Bool()) 563 val s1 = new BranchPredictionBundle 564 val s2 = new BranchPredictionBundle 565 val s3 = new BranchPredictionBundle 566 567 val last_stage_meta = UInt(MaxMetaLength.W) 568 val last_stage_spec_info = new SpeculativeInfo 569 val last_stage_ftb_entry = new FTBEntry 570 571 def selectedResp ={ 572 val res = 573 PriorityMux(Seq( 574 ((s3.valid && s3.hasRedirect) -> s3), 575 ((s2.valid && s2.hasRedirect) -> s2), 576 (s1.valid -> s1) 577 )) 578 res 579 } 580 def selectedRespIdx = 581 PriorityMux(Seq( 582 ((s3.valid && s3.hasRedirect) -> BP_S3), 583 ((s2.valid && s2.hasRedirect) -> BP_S2), 584 (s1.valid -> BP_S1) 585 )) 586 def lastStage = s3 587} 588 589class BpuToFtqBundle(implicit p: Parameters) extends BranchPredictionResp {} 590 591class BranchPredictionUpdate(implicit p: Parameters) extends XSBundle with HasBPUConst { 592 val pc = UInt(VAddrBits.W) 593 val spec_info = new SpeculativeInfo 594 val ftb_entry = new FTBEntry() 595 596 val cfi_idx = ValidUndirectioned(UInt(log2Ceil(PredictWidth).W)) 597 val br_taken_mask = Vec(numBr, Bool()) 598 val jmp_taken = Bool() 599 val mispred_mask = Vec(numBr+1, Bool()) 600 val pred_hit = Bool() 601 val false_hit = Bool() 602 val new_br_insert_pos = Vec(numBr, Bool()) 603 val old_entry = Bool() 604 val meta = UInt(MaxMetaLength.W) 605 val full_target = UInt(VAddrBits.W) 606 val from_stage = UInt(2.W) 607 val ghist = UInt(HistoryLength.W) 608 609 def is_jal = ftb_entry.tailSlot.valid && ftb_entry.isJal 610 def is_jalr = ftb_entry.tailSlot.valid && ftb_entry.isJalr 611 def is_call = ftb_entry.tailSlot.valid && ftb_entry.isCall 612 def is_ret = ftb_entry.tailSlot.valid && ftb_entry.isRet 613 614 def display(cond: Bool) = { 615 XSDebug(cond, p"-----------BranchPredictionUpdate-----------\n") 616 XSDebug(cond, p"[mispred_mask] ${Binary(mispred_mask.asUInt)} [false_hit] $false_hit\n") 617 XSDebug(cond, p"[new_br_insert_pos] ${Binary(new_br_insert_pos.asUInt)}\n") 618 XSDebug(cond, p"--------------------------------------------\n") 619 } 620} 621 622class BranchPredictionRedirect(implicit p: Parameters) extends Redirect with HasBPUConst { 623 // override def toPrintable: Printable = { 624 // p"-----------BranchPredictionRedirect----------- " + 625 // p"-----------cfiUpdate----------- " + 626 // p"[pc] ${Hexadecimal(cfiUpdate.pc)} " + 627 // p"[predTaken] ${cfiUpdate.predTaken}, [taken] ${cfiUpdate.taken}, [isMisPred] ${cfiUpdate.isMisPred} " + 628 // p"[target] ${Hexadecimal(cfiUpdate.target)} " + 629 // p"------------------------------- " + 630 // p"[robPtr] f=${robIdx.flag} v=${robIdx.value} " + 631 // p"[ftqPtr] f=${ftqIdx.flag} v=${ftqIdx.value} " + 632 // p"[ftqOffset] ${ftqOffset} " + 633 // p"[level] ${level}, [interrupt] ${interrupt} " + 634 // p"[stFtqIdx] f=${stFtqIdx.flag} v=${stFtqIdx.value} " + 635 // p"[stFtqOffset] ${stFtqOffset} " + 636 // p"\n" 637 638 // } 639 640 def display(cond: Bool): Unit = { 641 XSDebug(cond, p"-----------BranchPredictionRedirect----------- \n") 642 XSDebug(cond, p"-----------cfiUpdate----------- \n") 643 XSDebug(cond, p"[pc] ${Hexadecimal(cfiUpdate.pc)}\n") 644 // XSDebug(cond, p"[hist] ${Binary(cfiUpdate.hist.predHist)}\n") 645 XSDebug(cond, p"[br_hit] ${cfiUpdate.br_hit} [isMisPred] ${cfiUpdate.isMisPred}\n") 646 XSDebug(cond, p"[pred_taken] ${cfiUpdate.predTaken} [taken] ${cfiUpdate.taken} [isMisPred] ${cfiUpdate.isMisPred}\n") 647 XSDebug(cond, p"[target] ${Hexadecimal(cfiUpdate.target)} \n") 648 XSDebug(cond, p"[shift] ${cfiUpdate.shift}\n") 649 XSDebug(cond, p"------------------------------- \n") 650 XSDebug(cond, p"[robPtr] f=${robIdx.flag} v=${robIdx.value}\n") 651 XSDebug(cond, p"[ftqPtr] f=${ftqIdx.flag} v=${ftqIdx.value} \n") 652 XSDebug(cond, p"[ftqOffset] ${ftqOffset} \n") 653 XSDebug(cond, p"[stFtqIdx] f=${stFtqIdx.flag} v=${stFtqIdx.value}\n") 654 XSDebug(cond, p"[stFtqOffset] ${stFtqOffset}\n") 655 XSDebug(cond, p"---------------------------------------------- \n") 656 } 657} 658