1package xiangshan.backend.ctrlblock 2 3import org.chipsalliance.cde.config.Parameters 4import chisel3.util._ 5import chisel3._ 6import utility.{HasCircularQueuePtrHelper, XORFold, GatedValidRegNext} 7import xiangshan.frontend.{FtqRead, PreDecodeInfo} 8import xiangshan.{MemPredUpdateReq, Redirect, XSBundle, XSModule, AddrTransType} 9 10class RedirectGenerator(implicit p: Parameters) extends XSModule 11 with HasCircularQueuePtrHelper { 12 13 class RedirectGeneratorIO(implicit p: Parameters) extends XSBundle { 14 def numRedirect = backendParams.numRedirect 15 16 val hartId = Input(UInt(8.W)) 17 val oldestExuRedirect = Flipped(ValidIO(new Redirect)) 18 val oldestExuRedirectIsCSR = Input(Bool()) 19 val instrAddrTransType = Input(new AddrTransType) 20 val oldestExuOutPredecode = Input(new PreDecodeInfo) // guarded by exuRedirect.valid 21 val loadReplay = Flipped(ValidIO(new Redirect)) 22 val robFlush = Flipped(ValidIO(new Redirect)) 23 val stage2Redirect = ValidIO(new Redirect) 24 25 val memPredUpdate = Output(new MemPredUpdateReq) 26 val memPredPcRead = new FtqRead(UInt(VAddrBits.W)) // read req send form stage 2 27 val stage2oldestOH = Output(UInt((1 + 1).W)) 28 } 29 30 val io = IO(new RedirectGeneratorIO) 31 32 val loadRedirect = io.loadReplay 33 val robFlush = io.robFlush 34 val oldestExuRedirect = Wire(chiselTypeOf(io.oldestExuRedirect)) 35 oldestExuRedirect := io.oldestExuRedirect 36 oldestExuRedirect.bits.fullTarget := Cat(io.oldestExuRedirect.bits.fullTarget.head(XLEN - VAddrBits), io.oldestExuRedirect.bits.cfiUpdate.target) 37 when(!io.oldestExuRedirectIsCSR){ 38 oldestExuRedirect.bits.cfiUpdate.backendIAF := io.instrAddrTransType.checkAccessFault(oldestExuRedirect.bits.fullTarget) 39 oldestExuRedirect.bits.cfiUpdate.backendIPF := io.instrAddrTransType.checkPageFault(oldestExuRedirect.bits.fullTarget) 40 oldestExuRedirect.bits.cfiUpdate.backendIGPF := io.instrAddrTransType.checkGuestPageFault(oldestExuRedirect.bits.fullTarget) 41 } 42 val allRedirect: Vec[ValidIO[Redirect]] = VecInit(oldestExuRedirect, loadRedirect) 43 val oldestOneHot = Redirect.selectOldestRedirect(allRedirect) 44 val flushAfter = RegInit(0.U.asTypeOf(ValidIO(new Redirect))) 45 val needFlushVec = VecInit(allRedirect.map(_.bits.robIdx.needFlush(flushAfter) || robFlush.valid)) 46 val oldestValid = VecInit(oldestOneHot.zip(needFlushVec).map { case (v, f) => v && !f }).asUInt.orR 47 val oldestExuPredecode = io.oldestExuOutPredecode 48 val oldestRedirect = Mux1H(oldestOneHot, allRedirect) 49 val s1_redirect_bits_reg = RegEnable(oldestRedirect.bits, oldestValid) 50 val s1_redirect_valid_reg = GatedValidRegNext(oldestValid) 51 val s1_redirect_onehot = VecInit(oldestOneHot.map(x => GatedValidRegNext(x))) 52 53 if (backendParams.debugEn){ 54 dontTouch(oldestValid) 55 dontTouch(needFlushVec) 56 } 57 val flushAfterCounter = Reg(UInt(3.W)) 58 val robFlushOrExuFlushValid = oldestValid || robFlush.valid 59 when(robFlushOrExuFlushValid) { 60 flushAfter.valid := true.B 61 flushAfter.bits := Mux(robFlush.valid, robFlush.bits, oldestRedirect.bits) 62 }.elsewhen(!flushAfterCounter(0)) { 63 flushAfter.valid := false.B 64 } 65 when(robFlushOrExuFlushValid) { 66 flushAfterCounter := "b111".U 67 }.elsewhen(flushAfterCounter(0)){ 68 flushAfterCounter := flushAfterCounter >> 1 69 } 70 // stage1 -> stage2 71 io.stage2Redirect.valid := s1_redirect_valid_reg && !robFlush.valid 72 io.stage2Redirect.bits := s1_redirect_bits_reg 73 io.stage2Redirect.bits.cfiUpdate.pd := RegEnable(oldestExuPredecode, oldestValid) 74 io.stage2oldestOH := s1_redirect_onehot.asUInt 75 76 val s1_isReplay = s1_redirect_onehot.last 77 78 // get pc from ftq 79 // valid only if redirect is caused by load violation 80 // store_pc is used to update store set 81 val store_pc = io.memPredPcRead(s1_redirect_valid_reg, s1_redirect_bits_reg.stFtqIdx, s1_redirect_bits_reg.stFtqOffset) 82 val real_pc = s1_redirect_bits_reg.cfiUpdate.pc 83 // update load violation predictor if load violation redirect triggered 84 val s2_redirect_bits_reg = RegEnable(s1_redirect_bits_reg, s1_redirect_valid_reg) 85 io.memPredUpdate.valid := GatedValidRegNext(s1_isReplay && s1_redirect_valid_reg && s1_redirect_bits_reg.flushItself(), init = false.B) 86 // update wait table 87 io.memPredUpdate.waddr := RegEnable(XORFold(real_pc(VAddrBits - 1, 1), MemPredPCWidth), s1_isReplay && s1_redirect_valid_reg) 88 io.memPredUpdate.wdata := true.B 89 // update store set 90 io.memPredUpdate.ldpc := RegEnable(XORFold(real_pc(VAddrBits - 1, 1), MemPredPCWidth), s1_isReplay && s1_redirect_valid_reg) 91 // store pc is ready 1 cycle after s1_isReplay is judged 92 io.memPredUpdate.stpc := RegEnable(XORFold(store_pc(VAddrBits - 1, 1), MemPredPCWidth), s1_isReplay && s1_redirect_valid_reg) 93 94} 95