1package xiangshan.frontend 2 3import chisel3._ 4import chisel3.util._ 5import device.RAMHelper 6import xiangshan._ 7import utils._ 8import xiangshan.cache._ 9 10trait HasIFUConst { this: XSModule => 11 val resetVector = 0x80000000L//TODO: set reset vec 12 val groupAlign = log2Up(FetchWidth * 4 * 2) 13 def groupPC(pc: UInt): UInt = Cat(pc(VAddrBits-1, groupAlign), 0.U(groupAlign.W)) 14 // each 1 bit in mask stands for 2 Bytes 15 def mask(pc: UInt): UInt = (Fill(PredictWidth * 2, 1.U(1.W)) >> pc(groupAlign - 1, 1))(PredictWidth - 1, 0) 16 def snpc(pc: UInt): UInt = pc + (PopCount(mask(pc)) << 1) 17 18 val IFUDebug = true 19} 20 21class GlobalHistoryInfo() extends XSBundle { 22 val sawNTBr = Bool() 23 val takenOnBr = Bool() 24 val saveHalfRVI = Bool() 25 def shifted = takenOnBr || sawNTBr 26 def newPtr(ptr: UInt) = Mux(shifted, ptr - 1.U, ptr) 27 implicit val name = "IFU" 28 def debug = XSDebug("[GHInfo] sawNTBr=%d, takenOnBr=%d, saveHalfRVI=%d\n", sawNTBr, takenOnBr, saveHalfRVI) 29 // override def toString(): String = "histPtr=%d, sawNTBr=%d, takenOnBr=%d, saveHalfRVI=%d".format(histPtr, sawNTBr, takenOnBr, saveHalfRVI) 30} 31 32class IFUIO extends XSBundle 33{ 34 val fetchPacket = DecoupledIO(new FetchPacket) 35 val redirect = Flipped(ValidIO(new Redirect)) 36 val outOfOrderBrInfo = Flipped(ValidIO(new BranchUpdateInfo)) 37 val inOrderBrInfo = Flipped(ValidIO(new BranchUpdateInfo)) 38 val icacheReq = DecoupledIO(new ICacheReq) 39 val icacheResp = Flipped(DecoupledIO(new ICacheResp)) 40 val icacheFlush = Output(UInt(2.W)) 41 // val loopBufPar = Flipped(new LoopBufferParameters) 42} 43 44class IFU extends XSModule with HasIFUConst 45{ 46 val io = IO(new IFUIO) 47 val bpu = BPU(EnableBPU) 48 val pd = Module(new PreDecode) 49 val loopBuffer = if(EnableLB) { Module(new LoopBuffer) } else { Module(new FakeLoopBuffer) } 50 51 val if2_redirect, if3_redirect, if4_redirect = WireInit(false.B) 52 val if1_flush, if2_flush, if3_flush, if4_flush = WireInit(false.B) 53 54 val loopBufPar = loopBuffer.io.loopBufPar 55 val inLoop = WireInit(loopBuffer.io.out.valid) 56 val icacheResp = WireInit(Mux(inLoop, loopBuffer.io.out.bits, io.icacheResp.bits)) 57 58 if4_flush := io.redirect.valid || loopBufPar.LBredirect.valid 59 if3_flush := if4_flush || if4_redirect 60 if2_flush := if3_flush || if3_redirect 61 if1_flush := if2_flush || if2_redirect 62 63 loopBuffer.io.flush := io.redirect.valid 64 65 //********************** IF1 ****************************// 66 val if1_valid = !reset.asBool && GTimer() > 500.U 67 val if1_npc = WireInit(0.U(VAddrBits.W)) 68 val if2_ready = WireInit(false.B) 69 val if1_fire = if1_valid && (if2_ready || if1_flush) && (inLoop || io.icacheReq.ready) 70 71 72 val if1_histPtr, if2_histPtr, if3_histPtr, if4_histPtr = Wire(UInt(log2Up(ExtHistoryLength).W)) 73 val if2_newPtr, if3_newPtr, if4_newPtr = Wire(UInt(log2Up(ExtHistoryLength).W)) 74 75 val extHist = RegInit(VecInit(Seq.fill(ExtHistoryLength)(0.U(1.W)))) 76 val shiftPtr = WireInit(false.B) 77 val newPtr = Wire(UInt(log2Up(ExtHistoryLength).W)) 78 val ptr = Mux(shiftPtr, newPtr, if1_histPtr) 79 val hist = Wire(Vec(HistoryLength, UInt(1.W))) 80 for (i <- 0 until HistoryLength) { 81 hist(i) := extHist(ptr + i.U) 82 } 83 84 shiftPtr := false.B 85 newPtr := if1_histPtr 86 87 88 89 val if1_GHInfo = Wire(new GlobalHistoryInfo()) 90 if1_GHInfo := 0.U.asTypeOf(new GlobalHistoryInfo) 91 92 //********************** IF2 ****************************// 93 val if2_valid = RegEnable(next = if1_valid, init = false.B, enable = if1_fire) 94 val if3_ready = WireInit(false.B) 95 val if2_fire = if2_valid && if3_ready && !if2_flush 96 val if2_pc = RegEnable(next = if1_npc, init = resetVector.U, enable = if1_fire) 97 val if2_snpc = snpc(if2_pc) 98 val if2_GHInfo = RegEnable(if1_GHInfo, if1_fire) 99 val if2_predHistPtr = RegEnable(ptr, enable=if1_fire) 100 if2_ready := if2_fire || !if2_valid || if2_flush 101 when (if2_flush) { if2_valid := if1_fire } 102 .elsewhen (if1_fire) { if2_valid := if1_valid } 103 .elsewhen (if2_fire) { if2_valid := false.B } 104 105 when (RegNext(reset.asBool) && !reset.asBool) { 106 if1_npc := resetVector.U(VAddrBits.W) 107 }.elsewhen (if2_fire) { 108 if1_npc := if2_snpc 109 }.otherwise { 110 if1_npc := RegNext(if1_npc) 111 } 112 113 val if2_bp = bpu.io.out(0).bits 114 // if taken, bp_redirect should be true 115 // when taken on half RVI, we suppress this redirect signal 116 if2_redirect := if2_fire && bpu.io.out(0).valid && if2_bp.redirect && !if2_bp.saveHalfRVI 117 when (if2_redirect) { 118 if1_npc := if2_bp.target 119 } 120 121 val if2_realGHInfo = Wire(new GlobalHistoryInfo()) 122 if2_realGHInfo.sawNTBr := if2_bp.hasNotTakenBrs 123 if2_realGHInfo.takenOnBr := if2_bp.takenOnBr 124 if2_realGHInfo.saveHalfRVI := if2_bp.saveHalfRVI 125 126 when (if2_fire && if2_realGHInfo.shifted) { 127 shiftPtr := true.B 128 newPtr := if2_newPtr 129 } 130 when (if2_realGHInfo.shifted && if2_newPtr >= ptr) { 131 hist(if2_newPtr-ptr) := if2_realGHInfo.takenOnBr.asUInt 132 } 133 134 135 136 //********************** IF3 ****************************// 137 val if3_valid = RegEnable(next = if2_valid, init = false.B, enable = if2_fire) 138 val if4_ready = WireInit(false.B) 139 val if3_fire = if3_valid && if4_ready && (inLoop || io.icacheResp.valid) && !if3_flush 140 val if3_pc = RegEnable(if2_pc, if2_fire) 141 val if3_GHInfo = RegEnable(if2_realGHInfo, if2_fire) 142 val if3_predHistPtr = RegEnable(if2_predHistPtr, enable=if2_fire) 143 if3_ready := if3_fire || !if3_valid || if3_flush 144 when (if3_flush) { if3_valid := false.B } 145 .elsewhen (if2_fire) { if3_valid := if2_valid } 146 .elsewhen (if3_fire) { if3_valid := false.B } 147 148 val if3_bp = bpu.io.out(1).bits 149 150 val if3_realGHInfo = Wire(new GlobalHistoryInfo()) 151 if3_realGHInfo.sawNTBr := if3_bp.hasNotTakenBrs 152 if3_realGHInfo.takenOnBr := if3_bp.takenOnBr 153 if3_realGHInfo.saveHalfRVI := if3_bp.saveHalfRVI 154 155 class PrevHalfInstr extends Bundle { 156 val valid = Bool() 157 val taken = Bool() 158 val ghInfo = new GlobalHistoryInfo() 159 val fetchpc = UInt(VAddrBits.W) // only for debug 160 val idx = UInt(VAddrBits.W) // only for debug 161 val pc = UInt(VAddrBits.W) 162 val target = UInt(VAddrBits.W) 163 val instr = UInt(16.W) 164 val ipf = Bool() 165 val newPtr = UInt(log2Up(ExtHistoryLength).W) 166 } 167 168 val if3_prevHalfInstr = RegInit(0.U.asTypeOf(new PrevHalfInstr)) 169 val if4_prevHalfInstr = Wire(new PrevHalfInstr) 170 // 32-bit instr crosses 2 pages, and the higher 16-bit triggers page fault 171 val crossPageIPF = WireInit(false.B) 172 when (if4_prevHalfInstr.valid) { 173 if3_prevHalfInstr := if4_prevHalfInstr 174 } 175 val prevHalfInstr = Mux(if4_prevHalfInstr.valid, if4_prevHalfInstr, if3_prevHalfInstr) 176 177 // the previous half of RVI instruction waits until it meets its last half 178 val if3_hasPrevHalfInstr = prevHalfInstr.valid && (prevHalfInstr.pc + 2.U) === if3_pc 179 // set to invalid once consumed or redirect from backend 180 val prevHalfConsumed = if3_hasPrevHalfInstr && if3_fire || if4_flush 181 when (prevHalfConsumed) { 182 if3_prevHalfInstr.valid := false.B 183 } 184 185 // when bp signal a redirect, we distinguish between taken and not taken 186 // if taken and saveHalfRVI is true, we do not redirect to the target 187 if3_redirect := if3_fire && bpu.io.out(1).valid && (if3_hasPrevHalfInstr && prevHalfInstr.taken || if3_bp.redirect && (if3_bp.taken && !if3_bp.saveHalfRVI || !if3_bp.taken) ) 188 189 when (if3_redirect) { 190 when (!(if3_hasPrevHalfInstr && prevHalfInstr.taken)) { 191 if1_npc := if3_bp.target 192 when (if3_realGHInfo.shifted){ 193 shiftPtr := true.B 194 newPtr := if3_newPtr 195 } 196 } 197 } 198 199 // when it does not redirect, we still need to modify hist(wire) 200 when(if3_realGHInfo.shifted && if3_newPtr >= ptr) { 201 hist(if3_newPtr-ptr) := if3_realGHInfo.takenOnBr 202 } 203 when (if3_hasPrevHalfInstr && prevHalfInstr.ghInfo.shifted && prevHalfInstr.newPtr >= ptr) { 204 hist(prevHalfInstr.newPtr-ptr) := prevHalfInstr.ghInfo.takenOnBr 205 } 206 207 //********************** IF4 ****************************// 208 val if4_pd = RegEnable(pd.io.out, if3_fire) 209 val if4_ipf = RegEnable(icacheResp.ipf || if3_hasPrevHalfInstr && prevHalfInstr.ipf, if3_fire) 210 val if4_crossPageIPF = RegEnable(crossPageIPF, if3_fire) 211 val if4_valid = RegInit(false.B) 212 val if4_fire = if4_valid && io.fetchPacket.ready 213 val if4_pc = RegEnable(if3_pc, if3_fire) 214 215 val if4_GHInfo = RegEnable(if3_realGHInfo, if3_fire) 216 val if4_predHistPtr = RegEnable(if3_predHistPtr, enable=if3_fire) 217 if4_ready := (if4_fire || !if4_valid || if4_flush) && GTimer() > 500.U 218 when (if4_flush) { if4_valid := false.B } 219 .elsewhen (if3_fire) { if4_valid := if3_valid } 220 .elsewhen(if4_fire) { if4_valid := false.B } 221 222 val if4_bp = Wire(new BranchPrediction) 223 if4_bp := bpu.io.out(2).bits 224 225 val if4_realGHInfo = Wire(new GlobalHistoryInfo()) 226 if4_realGHInfo.sawNTBr := if4_bp.hasNotTakenBrs 227 if4_realGHInfo.takenOnBr := if4_bp.takenOnBr 228 if4_realGHInfo.saveHalfRVI := if4_bp.saveHalfRVI 229 230 231 val if4_cfi_jal = if4_pd.instrs(if4_bp.jmpIdx) 232 val if4_cfi_jal_tgt = if4_pd.pc(if4_bp.jmpIdx) + Mux(if4_pd.pd(if4_bp.jmpIdx).isRVC, 233 SignExt(Cat(if4_cfi_jal(12), if4_cfi_jal(8), if4_cfi_jal(10, 9), if4_cfi_jal(6), if4_cfi_jal(7), if4_cfi_jal(2), if4_cfi_jal(11), if4_cfi_jal(5, 3), 0.U(1.W)), XLEN), 234 SignExt(Cat(if4_cfi_jal(31), if4_cfi_jal(19, 12), if4_cfi_jal(20), if4_cfi_jal(30, 21), 0.U(1.W)), XLEN)) 235 if4_bp.target := Mux(if4_pd.pd(if4_bp.jmpIdx).isJal && if4_bp.taken, if4_cfi_jal_tgt, bpu.io.out(2).bits.target) 236 if4_bp.redirect := bpu.io.out(2).bits.redirect || if4_pd.pd(if4_bp.jmpIdx).isJal && if4_bp.taken && if4_cfi_jal_tgt =/= bpu.io.out(2).bits.target 237 238 if4_prevHalfInstr := 0.U.asTypeOf(new PrevHalfInstr) 239 when (bpu.io.out(2).valid && if4_fire && if4_bp.saveHalfRVI) { 240 if4_prevHalfInstr.valid := true.B 241 if4_prevHalfInstr.taken := if4_bp.taken 242 if4_prevHalfInstr.ghInfo := if4_realGHInfo 243 // Make sure shifted can work 244 if4_prevHalfInstr.ghInfo.saveHalfRVI := false.B 245 if4_prevHalfInstr.newPtr := if4_newPtr 246 if4_prevHalfInstr.fetchpc := if4_pc 247 if4_prevHalfInstr.idx := PopCount(mask(if4_pc)) - 1.U 248 if4_prevHalfInstr.pc := if4_pd.pc(if4_prevHalfInstr.idx) 249 if4_prevHalfInstr.target := if4_bp.target 250 if4_prevHalfInstr.instr := if4_pd.instrs(if4_prevHalfInstr.idx)(15, 0) 251 if4_prevHalfInstr.ipf := if4_ipf 252 } 253 254 // Redirect and npc logic for if4 255 when (bpu.io.out(2).valid && if4_fire && if4_bp.redirect) { 256 if4_redirect := true.B 257 when (if4_bp.saveHalfRVI) { 258 if1_npc := snpc(if4_pc) 259 }.otherwise { 260 if1_npc := if4_bp.target 261 } 262 } 263 // }.elsewhen (bpu.io.out(2).valid && if4_fire/* && !if4_bp.redirect*/) { 264 // // We redirect the pipeline to the next fetch packet, 265 // // which contains the last half of the RVI instruction 266 // when (if4_bp.saveHalfRVI && if4_bp.taken) { 267 // if4_redirect := true.B 268 // if1_npc := snpc(if4_pc) 269 // } 270 // } 271 272 // This should cover the if4 redirect to snpc when saveHalfRVI 273 when (if3_redirect) { 274 when (if3_hasPrevHalfInstr && prevHalfInstr.taken) { 275 if1_npc := prevHalfInstr.target 276 } 277 } 278 279 // history logic for if4 280 when (bpu.io.out(2).valid && if4_fire && if4_bp.redirect) { 281 shiftPtr := true.B 282 newPtr := if4_newPtr 283 // }.elsewhen (bpu.io.out(2).valid && if4_fire/* && !if4_bp.redirect*/) { 284 // // only if we hasn't seen not taken branches and 285 // // see a not taken branch in if4 should we tell 286 // // if3 and if4 to update histptr 287 // // We do not shift global history pointer unless we have the full 288 // // RVI instruction 289 // when (if4_newSawNTBrs && !if4_bp.takenOnBr) { 290 // shiftPtr := true.B 291 // // newPtr := if4_realGHInfo.newPtr 292 // } 293 } 294 295 when (if4_realGHInfo.shifted && if4_newPtr >= ptr) { 296 hist(if4_newPtr-ptr) := if4_realGHInfo.takenOnBr 297 } 298 299 when (if3_redirect) { 300 // when redirect and if3_hasPrevHalfInstr, this prevHalfInstr should only be taken 301 when (if3_hasPrevHalfInstr && prevHalfInstr.ghInfo.shifted) { 302 shiftPtr := true.B 303 newPtr := prevHalfInstr.newPtr 304 extHist(prevHalfInstr.newPtr) := prevHalfInstr.ghInfo.takenOnBr 305 } 306 } 307 308 // modify GHR at the end of a prediction lifetime 309 when (if4_fire && if4_realGHInfo.shifted) { 310 extHist(if4_newPtr) := if4_realGHInfo.takenOnBr 311 } 312 313 // This is a histPtr which is only modified when a prediction 314 // is sent, so that it can get the final prediction info 315 val finalPredHistPtr = RegInit(0.U(log2Up(ExtHistoryLength).W)) 316 if4_histPtr := finalPredHistPtr 317 if4_newPtr := if3_histPtr 318 when (if4_fire && if4_realGHInfo.shifted) { 319 finalPredHistPtr := if4_newPtr 320 } 321 322 if3_histPtr := Mux(if4_realGHInfo.shifted && if4_valid && !if4_flush, if4_histPtr - 1.U, if4_histPtr) 323 if3_newPtr := if2_histPtr 324 325 if2_histPtr := Mux(if3_realGHInfo.shifted && if3_valid && !if3_flush, if3_histPtr - 1.U, if3_histPtr) 326 if2_newPtr := if1_histPtr 327 328 if1_histPtr := Mux(if2_realGHInfo.shifted && if2_valid && !if2_flush, if2_histPtr - 1.U, if2_histPtr) 329 330 331 332 333 when (io.outOfOrderBrInfo.valid && io.outOfOrderBrInfo.bits.isMisPred) { 334 val b = io.outOfOrderBrInfo.bits 335 val oldPtr = b.brInfo.histPtr 336 shiftPtr := true.B 337 when (!b.pd.isBr && !b.brInfo.sawNotTakenBranch) { 338 // If mispredicted cfi is not a branch, 339 // and there wasn't any not taken branch before it, 340 // we should only recover the pointer to an unshifted state 341 newPtr := oldPtr 342 finalPredHistPtr := oldPtr 343 }.otherwise { 344 newPtr := oldPtr - 1.U 345 finalPredHistPtr := oldPtr - 1.U 346 hist(0) := Mux(b.pd.isBr, b.taken, 0.U) 347 extHist(newPtr) := Mux(b.pd.isBr, b.taken, 0.U) 348 } 349 } 350 351 when (loopBufPar.LBredirect.valid) { 352 if1_npc := loopBufPar.LBredirect.bits 353 } 354 355 when (io.redirect.valid) { 356 if1_npc := io.redirect.bits.target 357 } 358 359 when(inLoop) { 360 io.icacheReq.valid := if4_flush 361 }.otherwise { 362 io.icacheReq.valid := if1_valid && if2_ready 363 } 364 io.icacheResp.ready := if4_ready 365 io.icacheReq.bits.addr := if1_npc 366 367 // when(if4_bp.taken) { 368 // when(if4_bp.saveHalfRVI) { 369 // io.loopBufPar.LBReq := snpc(if4_pc) 370 // }.otherwise { 371 // io.loopBufPar.LBReq := if4_bp.target 372 // } 373 // }.otherwise { 374 // io.loopBufPar.LBReq := snpc(if4_pc) 375 // XSDebug(p"snpc(if4_pc)=${Hexadecimal(snpc(if4_pc))}\n") 376 // } 377 loopBufPar.fetchReq := if3_pc 378 379 io.icacheReq.bits.mask := mask(if1_npc) 380 381 io.icacheFlush := Cat(if3_flush, if2_flush) 382 383 val inOrderBrHist = Wire(Vec(HistoryLength, UInt(1.W))) 384 (0 until HistoryLength).foreach(i => inOrderBrHist(i) := extHist(i.U + io.inOrderBrInfo.bits.brInfo.predHistPtr)) 385 bpu.io.inOrderBrInfo.valid := io.inOrderBrInfo.valid 386 bpu.io.inOrderBrInfo.bits := BranchUpdateInfoWithHist(io.inOrderBrInfo.bits, inOrderBrHist.asUInt) 387 bpu.io.outOfOrderBrInfo.valid := io.outOfOrderBrInfo.valid 388 bpu.io.outOfOrderBrInfo.bits := BranchUpdateInfoWithHist(io.outOfOrderBrInfo.bits, inOrderBrHist.asUInt) // Dont care about hist 389 390 // bpu.io.flush := Cat(if4_flush, if3_flush, if2_flush) 391 bpu.io.flush := VecInit(if2_flush, if3_flush, if4_flush) 392 bpu.io.cacheValid := (inLoop || io.icacheResp.valid) 393 bpu.io.in.valid := if1_fire 394 bpu.io.in.bits.pc := if1_npc 395 bpu.io.in.bits.hist := hist.asUInt 396 bpu.io.in.bits.histPtr := ptr 397 bpu.io.in.bits.inMask := mask(if1_npc) 398 bpu.io.out(0).ready := if2_fire 399 bpu.io.out(1).ready := if3_fire 400 bpu.io.out(2).ready := if4_fire 401 bpu.io.predecode.valid := if4_valid 402 bpu.io.predecode.bits.mask := if4_pd.mask 403 bpu.io.predecode.bits.pd := if4_pd.pd 404 bpu.io.predecode.bits.isFetchpcEqualFirstpc := if4_pc === if4_pd.pc(0) 405 bpu.io.branchInfo.ready := if4_fire 406 407 pd.io.in := icacheResp 408 when(inLoop) { 409 pd.io.in.mask := loopBuffer.io.out.bits.mask & mask(loopBuffer.io.out.bits.pc) // TODO: Maybe this is unnecessary 410 // XSDebug("Fetch from LB\n") 411 // XSDebug(p"pc=${Hexadecimal(io.loopBufPar.LBResp.pc)}\n") 412 // XSDebug(p"data=${Hexadecimal(io.loopBufPar.LBResp.data)}\n") 413 // XSDebug(p"mask=${Hexadecimal(io.loopBufPar.LBResp.mask)}\n") 414 } 415 416 pd.io.prev.valid := if3_hasPrevHalfInstr 417 pd.io.prev.bits := prevHalfInstr.instr 418 // if a fetch packet triggers page fault, set the pf instruction to nop 419 when (!if3_hasPrevHalfInstr && icacheResp.ipf) { 420 val instrs = Wire(Vec(FetchWidth, UInt(32.W))) 421 (0 until FetchWidth).foreach(i => instrs(i) := ZeroExt("b0010011".U, 32)) // nop 422 pd.io.in.data := instrs.asUInt 423 }.elsewhen (if3_hasPrevHalfInstr && (prevHalfInstr.ipf || icacheResp.ipf)) { 424 pd.io.prev.bits := ZeroExt("b0010011".U, 16) 425 val instrs = Wire(Vec(FetchWidth, UInt(32.W))) 426 (0 until FetchWidth).foreach(i => instrs(i) := Cat(ZeroExt("b0010011".U, 16), Fill(16, 0.U(1.W)))) 427 pd.io.in.data := instrs.asUInt 428 429 when (icacheResp.ipf && !prevHalfInstr.ipf) { crossPageIPF := true.B } // higher 16 bits page fault 430 } 431 432 //Performance Counter 433 // if (!env.FPGAPlatform ) { 434 // ExcitingUtils.addSource(io.fetchPacket.fire && !inLoop, "CntFetchFromICache", Perf) 435 // ExcitingUtils.addSource(io.fetchPacket.fire && inLoop, "CntFetchFromLoopBuffer", Perf) 436 // } 437 438 val fetchPacketValid = if4_valid && !io.redirect.valid 439 val fetchPacketWire = Wire(new FetchPacket) 440 441 // io.fetchPacket.valid := if4_valid && !io.redirect.valid 442 fetchPacketWire.instrs := if4_pd.instrs 443 fetchPacketWire.mask := if4_pd.mask & (Fill(PredictWidth, !if4_bp.taken) | (Fill(PredictWidth, 1.U(1.W)) >> (~if4_bp.jmpIdx))) 444 loopBufPar.noTakenMask := if4_pd.mask 445 fetchPacketWire.pc := if4_pd.pc 446 (0 until PredictWidth).foreach(i => fetchPacketWire.pnpc(i) := if4_pd.pc(i) + Mux(if4_pd.pd(i).isRVC, 2.U, 4.U)) 447 when (if4_bp.taken) { 448 fetchPacketWire.pnpc(if4_bp.jmpIdx) := if4_bp.target 449 } 450 fetchPacketWire.brInfo := bpu.io.branchInfo.bits 451 (0 until PredictWidth).foreach(i => fetchPacketWire.brInfo(i).histPtr := finalPredHistPtr) 452 (0 until PredictWidth).foreach(i => fetchPacketWire.brInfo(i).predHistPtr := if4_predHistPtr) 453 fetchPacketWire.pd := if4_pd.pd 454 fetchPacketWire.ipf := if4_ipf 455 fetchPacketWire.crossPageIPFFix := if4_crossPageIPF 456 457 // predTaken Vec 458 fetchPacketWire.predTaken := if4_bp.taken 459 460 loopBuffer.io.in.bits := fetchPacketWire 461 io.fetchPacket.bits := fetchPacketWire 462 io.fetchPacket.valid := fetchPacketValid 463 loopBuffer.io.in.valid := io.fetchPacket.fire 464 465 // debug info 466 if (IFUDebug) { 467 XSDebug(RegNext(reset.asBool) && !reset.asBool, "Reseting...\n") 468 XSDebug(io.icacheFlush(0).asBool, "Flush icache stage2...\n") 469 XSDebug(io.icacheFlush(1).asBool, "Flush icache stage3...\n") 470 XSDebug(io.redirect.valid, "Redirect from backend! isExcp=%d isFpp:%d isMisPred=%d isReplay=%d pc=%x\n", 471 io.redirect.bits.isException, io.redirect.bits.isFlushPipe, io.redirect.bits.isMisPred, io.redirect.bits.isReplay, io.redirect.bits.pc) 472 XSDebug(io.redirect.valid, p"Redirect from backend! target=${Hexadecimal(io.redirect.bits.target)} brTag=${io.redirect.bits.brTag}\n") 473 474 XSDebug("[IF1] v=%d fire=%d flush=%d pc=%x ptr=%d mask=%b\n", if1_valid, if1_fire, if1_flush, if1_npc, ptr, mask(if1_npc)) 475 XSDebug("[IF2] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x ptr=%d snpc=%x\n", if2_valid, if2_ready, if2_fire, if2_redirect, if2_flush, if2_pc, if2_histPtr, if2_snpc) 476 XSDebug("[IF3] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x ptr=%d crossPageIPF=%d sawNTBrs=%d\n", if3_valid, if3_ready, if3_fire, if3_redirect, if3_flush, if3_pc, if3_histPtr, crossPageIPF, if3_realGHInfo.sawNTBr) 477 XSDebug("[IF4] v=%d r=%d fire=%d redirect=%d flush=%d pc=%x ptr=%d crossPageIPF=%d sawNTBrs=%d\n", if4_valid, if4_ready, if4_fire, if4_redirect, if4_flush, if4_pc, if4_histPtr, if4_crossPageIPF, if4_realGHInfo.sawNTBr) 478 XSDebug("[IF1][icacheReq] v=%d r=%d addr=%x\n", io.icacheReq.valid, io.icacheReq.ready, io.icacheReq.bits.addr) 479 XSDebug("[IF1][ghr] headPtr=%d shiftPtr=%d newPtr=%d ptr=%d\n", if1_histPtr, shiftPtr, newPtr, ptr) 480 XSDebug("[IF1][ghr] hist=%b\n", hist.asUInt) 481 XSDebug("[IF1][ghr] extHist=%b\n\n", extHist.asUInt) 482 483 XSDebug("[IF2][bp] redirect=%d taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n\n", if2_bp.redirect, if2_bp.taken, if2_bp.jmpIdx, if2_bp.hasNotTakenBrs, if2_bp.target, if2_bp.saveHalfRVI) 484 // XSDebug("[IF2][GHInfo]: %s\n", if2_realGHInfo) 485 if2_realGHInfo.debug 486 487 XSDebug("[IF3][icacheResp] v=%d r=%d pc=%x mask=%b\n", io.icacheResp.valid, io.icacheResp.ready, io.icacheResp.bits.pc, io.icacheResp.bits.mask) 488 XSDebug("[IF3][bp] redirect=%d taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n", if3_bp.redirect, if3_bp.taken, if3_bp.jmpIdx, if3_bp.hasNotTakenBrs, if3_bp.target, if3_bp.saveHalfRVI) 489 // XSDebug("[IF3][prevHalfInstr] v=%d redirect=%d fetchpc=%x idx=%d tgt=%x taken=%d instr=%x\n\n", 490 // prev_half_valid, prev_half_redirect, prev_half_fetchpc, prev_half_idx, prev_half_tgt, prev_half_taken, prev_half_instr) 491 XSDebug("[IF3][ prevHalfInstr] v=%d taken=%d fetchpc=%x idx=%d pc=%x tgt=%x instr=%x ipf=%d\n", 492 prevHalfInstr.valid, prevHalfInstr.taken, prevHalfInstr.fetchpc, prevHalfInstr.idx, prevHalfInstr.pc, prevHalfInstr.target, prevHalfInstr.instr, prevHalfInstr.ipf) 493 XSDebug("[IF3][if3_prevHalfInstr] v=%d taken=%d fetchpc=%x idx=%d pc=%x tgt=%x instr=%x ipf=%d\n\n", 494 if3_prevHalfInstr.valid, if3_prevHalfInstr.taken, if3_prevHalfInstr.fetchpc, if3_prevHalfInstr.idx, if3_prevHalfInstr.pc, if3_prevHalfInstr.target, if3_prevHalfInstr.instr, if3_prevHalfInstr.ipf) 495 // XSDebug("[IF3][GHInfo]: %s\n", if3_realGHInfo) 496 if3_realGHInfo.debug 497 498 XSDebug("[IF4][predecode] mask=%b\n", if4_pd.mask) 499 XSDebug("[IF4][bp] redirect=%d taken=%d jmpIdx=%d hasNTBrs=%d target=%x saveHalfRVI=%d\n", if4_bp.redirect, if4_bp.taken, if4_bp.jmpIdx, if4_bp.hasNotTakenBrs, if4_bp.target, if4_bp.saveHalfRVI) 500 XSDebug(if4_pd.pd(if4_bp.jmpIdx).isJal && if4_bp.taken, "[IF4] cfi is jal! instr=%x target=%x\n", if4_cfi_jal, if4_cfi_jal_tgt) 501 XSDebug("[IF4][if4_prevHalfInstr] v=%d taken=%d fetchpc=%x idx=%d pc=%x tgt=%x instr=%x ipf=%d\n", 502 if4_prevHalfInstr.valid, if4_prevHalfInstr.taken, if4_prevHalfInstr.fetchpc, if4_prevHalfInstr.idx, if4_prevHalfInstr.pc, if4_prevHalfInstr.target, if4_prevHalfInstr.instr, if4_prevHalfInstr.ipf) 503 // XSDebug("[IF4][GHInfo]: %s\n", if4_realGHInfo) 504 if4_realGHInfo.debug 505 XSDebug(io.fetchPacket.fire(), "[IF4][fetchPacket] v=%d r=%d mask=%b ipf=%d crossPageIPF=%d\n", 506 io.fetchPacket.valid, io.fetchPacket.ready, io.fetchPacket.bits.mask, io.fetchPacket.bits.ipf, io.fetchPacket.bits.crossPageIPFFix) 507 for (i <- 0 until PredictWidth) { 508 XSDebug(io.fetchPacket.fire(), "[IF4][fetchPacket] %b %x pc=%x pnpc=%x pd: rvc=%d brType=%b call=%d ret=%d\n", 509 io.fetchPacket.bits.mask(i), 510 io.fetchPacket.bits.instrs(i), 511 io.fetchPacket.bits.pc(i), 512 io.fetchPacket.bits.pnpc(i), 513 io.fetchPacket.bits.pd(i).isRVC, 514 io.fetchPacket.bits.pd(i).brType, 515 io.fetchPacket.bits.pd(i).isCall, 516 io.fetchPacket.bits.pd(i).isRet 517 ) 518 } 519 } 520}