1package xiangshan.backend.datapath 2 3import org.chipsalliance.cde.config.Parameters 4import chisel3._ 5import chisel3.util._ 6import difftest.{DiffArchFpRegState, DiffArchIntRegState, DiffArchVecRegState, DifftestModule} 7import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp} 8import utility._ 9import utils.SeqUtils._ 10import xiangshan._ 11import xiangshan.backend.BackendParams 12import xiangshan.backend.Bundles._ 13import xiangshan.backend.decode.ImmUnion 14import xiangshan.backend.datapath.DataConfig._ 15import xiangshan.backend.datapath.RdConfig._ 16import xiangshan.backend.issue.{ImmExtractor, IntScheduler, MemScheduler, VfScheduler} 17import xiangshan.backend.implicitCast._ 18import xiangshan.backend.regfile._ 19 20class DataPath(params: BackendParams)(implicit p: Parameters) extends LazyModule { 21 override def shouldBeInlined: Boolean = false 22 23 private implicit val dpParams: BackendParams = params 24 lazy val module = new DataPathImp(this) 25 26 println(s"[DataPath] Preg Params: ") 27 println(s"[DataPath] Int R(${params.getRfReadSize(IntData())}), W(${params.getRfWriteSize(IntData())}) ") 28 println(s"[DataPath] Vf R(${params.getRfReadSize(VecData())}), W(${params.getRfWriteSize(VecData())}) ") 29} 30 31class DataPathImp(override val wrapper: DataPath)(implicit p: Parameters, params: BackendParams) 32 extends LazyModuleImp(wrapper) with HasXSParameter { 33 34 private val VCONFIG_PORT = params.vconfigPort 35 private val VLD_PORT = params.vldPort 36 37 val io = IO(new DataPathIO()) 38 39 private val (fromIntIQ, toIntIQ, toIntExu) = (io.fromIntIQ, io.toIntIQ, io.toIntExu) 40 private val (fromMemIQ, toMemIQ, toMemExu) = (io.fromMemIQ, io.toMemIQ, io.toMemExu) 41 private val (fromVfIQ , toVfIQ , toVfExu ) = (io.fromVfIQ , io.toVfIQ , io.toFpExu) 42 43 println(s"[DataPath] IntIQ(${fromIntIQ.size}), MemIQ(${fromMemIQ.size})") 44 println(s"[DataPath] IntExu(${fromIntIQ.map(_.size).sum}), MemExu(${fromMemIQ.map(_.size).sum})") 45 46 // just refences for convience 47 private val fromIQ: Seq[MixedVec[DecoupledIO[IssueQueueIssueBundle]]] = fromIntIQ ++ fromVfIQ ++ fromMemIQ 48 49 private val toIQs = toIntIQ ++ toVfIQ ++ toMemIQ 50 51 private val toExu: Seq[MixedVec[DecoupledIO[ExuInput]]] = toIntExu ++ toVfExu ++ toMemExu 52 53 private val fromFlattenIQ: Seq[DecoupledIO[IssueQueueIssueBundle]] = fromIQ.flatten 54 55 private val toFlattenExu: Seq[DecoupledIO[ExuInput]] = toExu.flatten 56 57 private val intWbBusyArbiter = Module(new IntRFWBCollideChecker(backendParams)) 58 private val vfWbBusyArbiter = Module(new VfRFWBCollideChecker(backendParams)) 59 private val intRFReadArbiter = Module(new IntRFReadArbiter(backendParams)) 60 private val vfRFReadArbiter = Module(new VfRFReadArbiter(backendParams)) 61 62 private val og0FailedVec2: MixedVec[Vec[Bool]] = Wire(MixedVec(fromIQ.map(x => Vec(x.size, Bool())).toSeq)) 63 private val og1FailedVec2: MixedVec[Vec[Bool]] = Wire(MixedVec(fromIQ.map(x => Vec(x.size, Bool())).toSeq)) 64 65 // port -> win 66 private val intRdArbWinner: Seq2[MixedVec[Bool]] = intRFReadArbiter.io.in.map(_.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq).toSeq 67 private val vfRdArbWinner: Seq2[MixedVec[Bool]] = vfRFReadArbiter.io.in.map(_.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq).toSeq 68 private val intWbNotBlock: Seq[MixedVec[Bool]] = intWbBusyArbiter.io.in.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq 69 private val vfWbNotBlock: Seq[MixedVec[Bool]] = vfWbBusyArbiter.io.in.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq 70 71 private val intRdNotBlock: Seq2[Bool] = intRdArbWinner.map(_.map(_.asUInt.andR)) 72 private val vfRdNotBlock: Seq2[Bool] = vfRdArbWinner.map(_.map(_.asUInt.andR)) 73 74 private val intRFReadReq: Seq3[ValidIO[RfReadPortWithConfig]] = fromIQ.map(x => x.map(xx => xx.bits.getIntRfReadValidBundle(xx.valid)).toSeq).toSeq 75 private val intDataSources: Seq[Seq[Vec[DataSource]]] = fromIQ.map(x => x.map(xx => xx.bits.common.dataSources)) 76 77 intRFReadArbiter.io.in.zip(intRFReadReq).zipWithIndex.foreach { case ((arbInSeq2, inRFReadReqSeq2), iqIdx) => 78 arbInSeq2.zip(inRFReadReqSeq2).zipWithIndex.foreach { case ((arbInSeq, inRFReadReqSeq), exuIdx) => 79 val srcIndices: Seq[Int] = fromIQ(iqIdx)(exuIdx).bits.exuParams.getRfReadSrcIdx(IntData()) 80 for (srcIdx <- 0 until fromIQ(iqIdx)(exuIdx).bits.exuParams.numRegSrc) { 81 if (srcIndices.contains(srcIdx) && inRFReadReqSeq.isDefinedAt(srcIdx)) { 82 arbInSeq(srcIdx).valid := inRFReadReqSeq(srcIdx).valid && intDataSources(iqIdx)(exuIdx)(srcIdx).readReg 83 arbInSeq(srcIdx).bits.addr := inRFReadReqSeq(srcIdx).bits.addr 84 } else { 85 arbInSeq(srcIdx).valid := false.B 86 arbInSeq(srcIdx).bits.addr := 0.U 87 } 88 } 89 } 90 } 91 92 private val vfRFReadReq: Seq3[ValidIO[RfReadPortWithConfig]] = fromIQ.map(x => x.map(xx => xx.bits.getVfRfReadValidBundle(xx.valid)).toSeq).toSeq 93 94 vfRFReadArbiter.io.in.zip(vfRFReadReq).zipWithIndex.foreach { case ((arbInSeq2, inRFReadReqSeq2), iqIdx) => 95 arbInSeq2.zip(inRFReadReqSeq2).zipWithIndex.foreach { case ((arbInSeq, inRFReadReqSeq), exuIdx) => 96 val srcIndices: Seq[Int] = VfRegSrcDataSet.flatMap(data => fromIQ(iqIdx)(exuIdx).bits.exuParams.getRfReadSrcIdx(data)).toSeq.sorted 97 for (srcIdx <- 0 until fromIQ(iqIdx)(exuIdx).bits.exuParams.numRegSrc) { 98 if (srcIndices.contains(srcIdx) && inRFReadReqSeq.isDefinedAt(srcIdx)) { 99 arbInSeq(srcIdx).valid := inRFReadReqSeq(srcIdx).valid 100 arbInSeq(srcIdx).bits.addr := inRFReadReqSeq(srcIdx).bits.addr 101 } else { 102 arbInSeq(srcIdx).valid := false.B 103 arbInSeq(srcIdx).bits.addr := 0.U 104 } 105 } 106 } 107 } 108 109 private val intRFWriteReq: Seq2[Bool] = fromIQ.map(x => x.map(xx => xx.valid && xx.bits.common.rfWen.getOrElse(false.B)).toSeq).toSeq 110 private val vfRFWriteReq: Seq2[Bool] = fromIQ.map(x => x.map(xx => xx.valid && xx.bits.common.getVfWen.getOrElse(false.B)).toSeq).toSeq 111 112 intWbBusyArbiter.io.in.zip(intRFWriteReq).foreach { case (arbInSeq, inRFWriteReqSeq) => 113 arbInSeq.zip(inRFWriteReqSeq).foreach { case (arbIn, inRFWriteReq) => 114 arbIn.valid := inRFWriteReq 115 } 116 } 117 118 vfWbBusyArbiter.io.in.zip(vfRFWriteReq).foreach { case (arbInSeq, inRFWriteReqSeq) => 119 arbInSeq.zip(inRFWriteReqSeq).foreach { case (arbIn, inRFWriteReq) => 120 arbIn.valid := inRFWriteReq 121 } 122 } 123 124 private val intSchdParams = params.schdParams(IntScheduler()) 125 private val vfSchdParams = params.schdParams(VfScheduler()) 126 private val memSchdParams = params.schdParams(MemScheduler()) 127 128 private val numIntRfReadByExu = intSchdParams.numIntRfReadByExu + memSchdParams.numIntRfReadByExu 129 private val numVfRfReadByExu = vfSchdParams.numVfRfReadByExu + memSchdParams.numVfRfReadByExu 130 // Todo: limit read port 131 private val numIntR = numIntRfReadByExu 132 private val numVfR = numVfRfReadByExu 133 println(s"[DataPath] RegFile read req needed by Exu: Int(${numIntRfReadByExu}), Vf(${numVfRfReadByExu})") 134 println(s"[DataPath] RegFile read port: Int(${numIntR}), Vf(${numVfR})") 135 136 private val schdParams = params.allSchdParams 137 138 private val intRfRaddr = Wire(Vec(params.numPregRd(IntData()), UInt(intSchdParams.pregIdxWidth.W))) 139 private val intRfRdata = Wire(Vec(params.numPregRd(IntData()), UInt(intSchdParams.rfDataWidth.W))) 140 private val intRfWen = Wire(Vec(io.fromIntWb.length, Bool())) 141 private val intRfWaddr = Wire(Vec(io.fromIntWb.length, UInt(intSchdParams.pregIdxWidth.W))) 142 private val intRfWdata = Wire(Vec(io.fromIntWb.length, UInt(intSchdParams.rfDataWidth.W))) 143 144 private val vfRfSplitNum = VLEN / XLEN 145 private val vfRfRaddr = Wire(Vec(params.numPregRd(VecData()), UInt(vfSchdParams.pregIdxWidth.W))) 146 private val vfRfRdata = Wire(Vec(params.numPregRd(VecData()), UInt(vfSchdParams.rfDataWidth.W))) 147 private val vfRfWen = Wire(Vec(vfRfSplitNum, Vec(io.fromVfWb.length, Bool()))) 148 private val vfRfWaddr = Wire(Vec(io.fromVfWb.length, UInt(vfSchdParams.pregIdxWidth.W))) 149 private val vfRfWdata = Wire(Vec(io.fromVfWb.length, UInt(vfSchdParams.rfDataWidth.W))) 150 151 private val intDebugRead: Option[(Vec[UInt], Vec[UInt])] = 152 if (env.AlwaysBasicDiff || env.EnableDifftest) { 153 Some(Wire(Vec(32, UInt(intSchdParams.pregIdxWidth.W))), Wire(Vec(32, UInt(XLEN.W)))) 154 } else { None } 155 private val vfDebugRead: Option[(Vec[UInt], Vec[UInt])] = 156 if (env.AlwaysBasicDiff || env.EnableDifftest) { 157 Some(Wire(Vec(32 + 32 + 1, UInt(vfSchdParams.pregIdxWidth.W))), Wire(Vec(32 + 32 + 1, UInt(VLEN.W)))) 158 } else { None } 159 160 private val fpDebugReadData: Option[Vec[UInt]] = 161 if (env.AlwaysBasicDiff || env.EnableDifftest) { 162 Some(Wire(Vec(32, UInt(XLEN.W)))) 163 } else { None } 164 private val vecDebugReadData: Option[Vec[UInt]] = 165 if (env.AlwaysBasicDiff || env.EnableDifftest) { 166 Some(Wire(Vec(64, UInt(64.W)))) // v0 = Cat(Vec(1), Vec(0)) 167 } else { None } 168 private val vconfigDebugReadData: Option[UInt] = 169 if (env.AlwaysBasicDiff || env.EnableDifftest) { 170 Some(Wire(UInt(64.W))) 171 } else { None } 172 173 174 fpDebugReadData.foreach(_ := vfDebugRead 175 .get._2 176 .slice(0, 32) 177 .map(_(63, 0)) 178 ) // fp only used [63, 0] 179 vecDebugReadData.foreach(_ := vfDebugRead 180 .get._2 181 .slice(32, 64) 182 .map(x => Seq(x(63, 0), x(127, 64))).flatten 183 ) 184 vconfigDebugReadData.foreach(_ := vfDebugRead 185 .get._2(64)(63, 0) 186 ) 187 188 io.debugVconfig.foreach(_ := vconfigDebugReadData.get) 189 190 IntRegFile("IntRegFile", intSchdParams.numPregs, intRfRaddr, intRfRdata, intRfWen, intRfWaddr, intRfWdata, 191 debugReadAddr = intDebugRead.map(_._1), 192 debugReadData = intDebugRead.map(_._2)) 193 VfRegFile("VfRegFile", vfSchdParams.numPregs, vfRfSplitNum, vfRfRaddr, vfRfRdata, vfRfWen, vfRfWaddr, vfRfWdata, 194 debugReadAddr = vfDebugRead.map(_._1), 195 debugReadData = vfDebugRead.map(_._2)) 196 197 intRfWaddr := io.fromIntWb.map(_.addr).toSeq 198 intRfWdata := io.fromIntWb.map(_.data).toSeq 199 intRfWen := io.fromIntWb.map(_.wen).toSeq 200 201 for (portIdx <- intRfRaddr.indices) { 202 if (intRFReadArbiter.io.out.isDefinedAt(portIdx)) 203 intRfRaddr(portIdx) := intRFReadArbiter.io.out(portIdx).bits.addr 204 else 205 intRfRaddr(portIdx) := 0.U 206 } 207 208 vfRfWaddr := io.fromVfWb.map(_.addr).toSeq 209 vfRfWdata := io.fromVfWb.map(_.data).toSeq 210 vfRfWen.foreach(_.zip(io.fromVfWb.map(_.wen)).foreach { case (wenSink, wenSource) => wenSink := wenSource } )// Todo: support fp multi-write 211 212 for (portIdx <- vfRfRaddr.indices) { 213 if (vfRFReadArbiter.io.out.isDefinedAt(portIdx)) 214 vfRfRaddr(portIdx) := vfRFReadArbiter.io.out(portIdx).bits.addr 215 else 216 vfRfRaddr(portIdx) := 0.U 217 } 218 219 vfRfRaddr(VCONFIG_PORT) := io.vconfigReadPort.addr 220 io.vconfigReadPort.data := vfRfRdata(VCONFIG_PORT) 221 // vfRfRaddr(VLD_PORT) := io.vldReadPort.addr 222 io.vldReadPort.data := DontCare 223 224 intDebugRead.foreach { case (addr, _) => 225 addr := io.debugIntRat.get 226 } 227 228 vfDebugRead.foreach { case (addr, _) => 229 addr := io.debugFpRat.get ++ io.debugVecRat.get :+ io.debugVconfigRat.get 230 } 231 println(s"[DataPath] " + 232 s"has intDebugRead: ${intDebugRead.nonEmpty}, " + 233 s"has vfDebugRead: ${vfDebugRead.nonEmpty}") 234 235 val s1_addrOHs = Reg(MixedVec( 236 fromIQ.map(x => MixedVec(x.map(_.bits.addrOH.cloneType).toSeq)).toSeq 237 )) 238 val s1_toExuValid: MixedVec[MixedVec[Bool]] = Reg(MixedVec( 239 toExu.map(x => MixedVec(x.map(_.valid.cloneType).toSeq)).toSeq 240 )) 241 val s1_toExuData: MixedVec[MixedVec[ExuInput]] = Reg(MixedVec(toExu.map(x => MixedVec(x.map(_.bits.cloneType).toSeq)).toSeq)) 242 val s1_toExuReady = Wire(MixedVec(toExu.map(x => MixedVec(x.map(_.ready.cloneType))))) // Todo 243 val s1_srcType: MixedVec[MixedVec[Vec[UInt]]] = MixedVecInit(fromIQ.map(x => MixedVecInit(x.map(xx => RegEnable(xx.bits.srcType, xx.fire))))) 244 245 val s1_intPregRData: MixedVec[MixedVec[Vec[UInt]]] = Wire(MixedVec(toExu.map(x => MixedVec(x.map(_.bits.src.cloneType))))) 246 val s1_vfPregRData: MixedVec[MixedVec[Vec[UInt]]] = Wire(MixedVec(toExu.map(x => MixedVec(x.map(_.bits.src.cloneType))))) 247 248 val rfrPortConfigs = schdParams.map(_.issueBlockParams).flatten.map(_.exuBlockParams.map(_.rfrPortConfigs)) 249 250 println(s"[DataPath] s1_intPregRData.flatten.flatten.size: ${s1_intPregRData.flatten.flatten.size}, intRfRdata.size: ${intRfRdata.size}") 251 s1_intPregRData.foreach(_.foreach(_.foreach(_ := 0.U))) 252 s1_intPregRData.zip(rfrPortConfigs).foreach { case (iqRdata, iqCfg) => 253 iqRdata.zip(iqCfg).foreach { case (iuRdata, iuCfg) => 254 val realIuCfg = iuCfg.map(x => if(x.size > 1) x.filter(_.isInstanceOf[IntRD]) else x).flatten 255 assert(iuRdata.size == realIuCfg.size, "iuRdata.size != realIuCfg.size") 256 iuRdata.zip(realIuCfg) 257 .filter { case (_, rfrPortConfig) => rfrPortConfig.isInstanceOf[IntRD] } 258 .foreach { case (sink, cfg) => sink := intRfRdata(cfg.port) } 259 } 260 } 261 262 println(s"[DataPath] s1_vfPregRData.flatten.flatten.size: ${s1_vfPregRData.flatten.flatten.size}, vfRfRdata.size: ${vfRfRdata.size}") 263 s1_vfPregRData.foreach(_.foreach(_.foreach(_ := 0.U))) 264 s1_vfPregRData.zip(rfrPortConfigs).foreach{ case(iqRdata, iqCfg) => 265 iqRdata.zip(iqCfg).foreach{ case(iuRdata, iuCfg) => 266 val realIuCfg = iuCfg.map(x => if(x.size > 1) x.filter(_.isInstanceOf[VfRD]) else x).flatten 267 assert(iuRdata.size == realIuCfg.size, "iuRdata.size != realIuCfg.size") 268 iuRdata.zip(realIuCfg) 269 .filter { case (_, rfrPortConfig) => rfrPortConfig.isInstanceOf[VfRD] } 270 .foreach { case (sink, cfg) => sink := vfRfRdata(cfg.port) } 271 } 272 } 273 274 for (i <- fromIQ.indices) { 275 for (j <- fromIQ(i).indices) { 276 // IQ(s0) --[Ctrl]--> s1Reg ---------- begin 277 // refs 278 val s1_valid = s1_toExuValid(i)(j) 279 val s1_ready = s1_toExuReady(i)(j) 280 val s1_data = s1_toExuData(i)(j) 281 val s1_addrOH = s1_addrOHs(i)(j) 282 val s0 = fromIQ(i)(j) // s0 283 val srcNotBlock = s0.bits.common.dataSources.zip(intRdArbWinner(i)(j) zip vfRdArbWinner(i)(j)).map { case (source, win) => 284 !source.readReg || win._1 && win._2 285 }.fold(true.B)(_ && _) 286 val notBlock = srcNotBlock && intWbNotBlock(i)(j) && vfWbNotBlock(i)(j) 287 val s1_flush = s0.bits.common.robIdx.needFlush(Seq(io.flush, RegNextWithEnable(io.flush))) 288 val s1_cancel = og1FailedVec2(i)(j) 289 val s1_ldCancel = LoadShouldCancel(s0.bits.common.loadDependency, io.ldCancel) 290 when (s0.fire && !s1_flush && notBlock && !s1_cancel && !s1_ldCancel) { 291 s1_valid := s0.valid 292 s1_data.fromIssueBundle(s0.bits) // no src data here 293 s1_addrOH := s0.bits.addrOH 294 }.otherwise { 295 s1_valid := false.B 296 } 297 s0.ready := (s1_ready || !s1_valid) && notBlock 298 // IQ(s0) --[Ctrl]--> s1Reg ---------- end 299 300 // IQ(s0) --[Data]--> s1Reg ---------- begin 301 // imm extract 302 when (s0.fire && !s1_flush && notBlock) { 303 if (s1_data.params.immType.nonEmpty && s1_data.src.size > 1) { 304 // rs1 is always int reg, rs2 may be imm 305 when(SrcType.isImm(s0.bits.srcType(1))) { 306 s1_data.src(1) := ImmExtractor( 307 s0.bits.common.imm, 308 s0.bits.immType, 309 s1_data.params.dataBitsMax, 310 s1_data.params.immType.map(_.litValue) 311 ) 312 } 313 } 314 if (s1_data.params.hasJmpFu) { 315 when(SrcType.isPc(s0.bits.srcType(0))) { 316 s1_data.src(0) := SignExt(s0.bits.common.pc.get, XLEN) 317 } 318 } else if (s1_data.params.hasVecFu) { 319 // Fuck off riscv vector imm!!! Why not src1??? 320 when(SrcType.isImm(s0.bits.srcType(0))) { 321 s1_data.src(0) := ImmExtractor( 322 s0.bits.common.imm, 323 s0.bits.immType, 324 s1_data.params.dataBitsMax, 325 s1_data.params.immType.map(_.litValue) 326 ) 327 } 328 } else if (s1_data.params.hasLoadFu || s1_data.params.hasHyldaFu) { 329 // dirty code for fused_lui_load 330 when(SrcType.isImm(s0.bits.srcType(0))) { 331 s1_data.src(0) := SignExt(ImmUnion.U.toImm32(s0.bits.common.imm(s0.bits.common.imm.getWidth - 1, ImmUnion.I.len)), XLEN) 332 } 333 } 334 } 335 // IQ(s0) --[Data]--> s1Reg ---------- end 336 } 337 } 338 339 private val fromIQFire = fromIQ.map(_.map(_.fire)) 340 private val toExuFire = toExu.map(_.map(_.fire)) 341 toIQs.zipWithIndex.foreach { 342 case(toIQ, iqIdx) => 343 toIQ.zipWithIndex.foreach { 344 case (toIU, iuIdx) => 345 // IU: issue unit 346 val og0resp = toIU.og0resp 347 og0FailedVec2(iqIdx)(iuIdx) := fromIQ(iqIdx)(iuIdx).valid && (!fromIQFire(iqIdx)(iuIdx)) 348 og0resp.valid := og0FailedVec2(iqIdx)(iuIdx) 349 og0resp.bits.respType := RSFeedbackType.rfArbitFail 350 og0resp.bits.dataInvalidSqIdx := DontCare 351 og0resp.bits.robIdx := fromIQ(iqIdx)(iuIdx).bits.common.robIdx 352 og0resp.bits.uopIdx := fromIQ(iqIdx)(iuIdx).bits.common.vpu.getOrElse(0.U.asTypeOf(new VPUCtrlSignals)).vuopIdx 353 og0resp.bits.rfWen := fromIQ(iqIdx)(iuIdx).bits.common.rfWen.getOrElse(false.B) 354 og0resp.bits.fuType := fromIQ(iqIdx)(iuIdx).bits.common.fuType 355 356 val og1resp = toIU.og1resp 357 og1FailedVec2(iqIdx)(iuIdx) := s1_toExuValid(iqIdx)(iuIdx) && !toExuFire(iqIdx)(iuIdx) 358 og1resp.valid := s1_toExuValid(iqIdx)(iuIdx) 359 // respType: fuIdle ->IQ entry clear 360 // fuUncertain ->IQ entry no action 361 // fuBusy ->IQ entry issued set false, then re-issue 362 // Only hyu, lda and sta are fuUncertain at OG1 stage 363 og1resp.bits.respType := Mux( 364 !og1FailedVec2(iqIdx)(iuIdx), 365 if (toIU.issueQueueParams match { case x => x.isHyAddrIQ || x.isLdAddrIQ || x.isStAddrIQ } ) RSFeedbackType.fuUncertain else RSFeedbackType.fuIdle, 366 RSFeedbackType.fuBusy 367 ) 368 og1resp.bits.dataInvalidSqIdx := DontCare 369 og1resp.bits.robIdx := s1_toExuData(iqIdx)(iuIdx).robIdx 370 og1resp.bits.uopIdx := s1_toExuData(iqIdx)(iuIdx).vpu.getOrElse(0.U.asTypeOf(new VPUCtrlSignals)).vuopIdx 371 og1resp.bits.rfWen := s1_toExuData(iqIdx)(iuIdx).rfWen.getOrElse(false.B) 372 og1resp.bits.fuType := s1_toExuData(iqIdx)(iuIdx).fuType 373 } 374 } 375 376 io.og0CancelOH := VecInit(fromFlattenIQ.map(x => x.valid && !x.fire)).asUInt 377 io.og1CancelOH := VecInit(toFlattenExu.map(x => x.valid && !x.fire)).asUInt 378 379 io.cancelToBusyTable.zipWithIndex.foreach { case (cancel, i) => 380 cancel.valid := fromFlattenIQ(i).valid && !fromFlattenIQ(i).fire && { 381 if (fromFlattenIQ(i).bits.common.rfWen.isDefined) 382 fromFlattenIQ(i).bits.common.rfWen.get && fromFlattenIQ(i).bits.common.pdest =/= 0.U 383 else 384 true.B 385 } 386 cancel.bits.rfWen := fromFlattenIQ(i).bits.common.rfWen.getOrElse(false.B) 387 cancel.bits.fpWen := fromFlattenIQ(i).bits.common.fpWen.getOrElse(false.B) 388 cancel.bits.vecWen := fromFlattenIQ(i).bits.common.vecWen.getOrElse(false.B) 389 cancel.bits.pdest := fromFlattenIQ(i).bits.common.pdest 390 } 391 392 for (i <- toExu.indices) { 393 for (j <- toExu(i).indices) { 394 // s1Reg --[Ctrl]--> exu(s1) ---------- begin 395 // refs 396 val sinkData = toExu(i)(j).bits 397 // assign 398 toExu(i)(j).valid := s1_toExuValid(i)(j) 399 s1_toExuReady(i)(j) := toExu(i)(j).ready 400 sinkData := s1_toExuData(i)(j) 401 // s1Reg --[Ctrl]--> exu(s1) ---------- end 402 403 // s1Reg --[Data]--> exu(s1) ---------- begin 404 // data source1: preg read data 405 for (k <- sinkData.src.indices) { 406 val srcDataTypeSet: Set[DataConfig] = sinkData.params.getSrcDataType(k) 407 408 val readRfMap: Seq[(Bool, UInt)] = (Seq(None) :+ 409 (if (s1_intPregRData(i)(j).isDefinedAt(k) && srcDataTypeSet.intersect(IntRegSrcDataSet).nonEmpty) 410 Some(SrcType.isXp(s1_srcType(i)(j)(k)) -> s1_intPregRData(i)(j)(k)) 411 else None) :+ 412 (if (s1_vfPregRData(i)(j).isDefinedAt(k) && srcDataTypeSet.intersect(VfRegSrcDataSet).nonEmpty) 413 Some(SrcType.isVfp(s1_srcType(i)(j)(k))-> s1_vfPregRData(i)(j)(k)) 414 else None) 415 ).filter(_.nonEmpty).map(_.get) 416 if (readRfMap.nonEmpty) 417 sinkData.src(k) := Mux1H(readRfMap) 418 } 419 420 // data source2: extracted imm and pc saved in s1Reg 421 if (sinkData.params.immType.nonEmpty && sinkData.src.size > 1) { 422 when(SrcType.isImm(s1_srcType(i)(j)(1))) { 423 sinkData.src(1) := s1_toExuData(i)(j).src(1) 424 } 425 } 426 if (sinkData.params.hasJmpFu) { 427 when(SrcType.isPc(s1_srcType(i)(j)(0))) { 428 sinkData.src(0) := s1_toExuData(i)(j).src(0) 429 } 430 } else if (sinkData.params.hasVecFu) { 431 when(SrcType.isImm(s1_srcType(i)(j)(0))) { 432 sinkData.src(0) := s1_toExuData(i)(j).src(0) 433 } 434 } else if (sinkData.params.hasLoadFu || sinkData.params.hasHyldaFu) { 435 when(SrcType.isImm(s1_srcType(i)(j)(0))) { 436 sinkData.src(0) := s1_toExuData(i)(j).src(0) 437 } 438 } 439 // s1Reg --[Data]--> exu(s1) ---------- end 440 } 441 } 442 443 if (env.AlwaysBasicDiff || env.EnableDifftest) { 444 val delayedCnt = 2 445 val difftestArchIntRegState = DifftestModule(new DiffArchIntRegState, delay = delayedCnt) 446 difftestArchIntRegState.coreid := io.hartId 447 difftestArchIntRegState.value := intDebugRead.get._2 448 449 val difftestArchFpRegState = DifftestModule(new DiffArchFpRegState, delay = delayedCnt) 450 difftestArchFpRegState.coreid := io.hartId 451 difftestArchFpRegState.value := fpDebugReadData.get 452 453 val difftestArchVecRegState = DifftestModule(new DiffArchVecRegState, delay = delayedCnt) 454 difftestArchVecRegState.coreid := io.hartId 455 difftestArchVecRegState.value := vecDebugReadData.get 456 } 457} 458 459class DataPathIO()(implicit p: Parameters, params: BackendParams) extends XSBundle { 460 // params 461 private val intSchdParams = params.schdParams(IntScheduler()) 462 private val vfSchdParams = params.schdParams(VfScheduler()) 463 private val memSchdParams = params.schdParams(MemScheduler()) 464 // bundles 465 val hartId = Input(UInt(8.W)) 466 467 val flush: ValidIO[Redirect] = Flipped(ValidIO(new Redirect)) 468 469 // Todo: check if this can be removed 470 val vconfigReadPort = new RfReadPort(XLEN, PhyRegIdxWidth) 471 472 val vldReadPort = new RfReadPort(VLEN, PhyRegIdxWidth) 473 474 val wbConfictRead = Input(MixedVec(params.allSchdParams.map(x => MixedVec(x.issueBlockParams.map(x => x.genWbConflictBundle()))))) 475 476 val fromIntIQ: MixedVec[MixedVec[DecoupledIO[IssueQueueIssueBundle]]] = 477 Flipped(MixedVec(intSchdParams.issueBlockParams.map(_.genIssueDecoupledBundle))) 478 479 val fromMemIQ: MixedVec[MixedVec[DecoupledIO[IssueQueueIssueBundle]]] = 480 Flipped(MixedVec(memSchdParams.issueBlockParams.map(_.genIssueDecoupledBundle))) 481 482 val fromVfIQ = Flipped(MixedVec(vfSchdParams.issueBlockParams.map(_.genIssueDecoupledBundle))) 483 484 val toIntIQ = MixedVec(intSchdParams.issueBlockParams.map(_.genOGRespBundle)) 485 486 val toMemIQ = MixedVec(memSchdParams.issueBlockParams.map(_.genOGRespBundle)) 487 488 val toVfIQ = MixedVec(vfSchdParams.issueBlockParams.map(_.genOGRespBundle)) 489 490 val og0CancelOH = Output(ExuOH(backendParams.numExu)) 491 492 val og1CancelOH = Output(ExuOH(backendParams.numExu)) 493 494 val ldCancel = Vec(backendParams.LduCnt + backendParams.HyuCnt, Flipped(new LoadCancelIO)) 495 496 val cancelToBusyTable = Vec(backendParams.numExu, ValidIO(new CancelSignal)) 497 498 val toIntExu: MixedVec[MixedVec[DecoupledIO[ExuInput]]] = intSchdParams.genExuInputBundle 499 500 val toFpExu: MixedVec[MixedVec[DecoupledIO[ExuInput]]] = MixedVec(vfSchdParams.genExuInputBundle) 501 502 val toMemExu: MixedVec[MixedVec[DecoupledIO[ExuInput]]] = memSchdParams.genExuInputBundle 503 504 val fromIntWb: MixedVec[RfWritePortWithConfig] = MixedVec(params.genIntWriteBackBundle) 505 506 val fromVfWb: MixedVec[RfWritePortWithConfig] = MixedVec(params.genVfWriteBackBundle) 507 508 val debugIntRat = if (params.debugEn) Some(Input(Vec(32, UInt(intSchdParams.pregIdxWidth.W)))) else None 509 val debugFpRat = if (params.debugEn) Some(Input(Vec(32, UInt(vfSchdParams.pregIdxWidth.W)))) else None 510 val debugVecRat = if (params.debugEn) Some(Input(Vec(32, UInt(vfSchdParams.pregIdxWidth.W)))) else None 511 val debugVconfigRat = if (params.debugEn) Some(Input(UInt(vfSchdParams.pregIdxWidth.W))) else None 512 val debugVconfig = if (params.debugEn) Some(Output(UInt(XLEN.W))) else None 513} 514