xref: /XiangShan/src/main/scala/xiangshan/backend/datapath/DataPath.scala (revision 1592abd11eecf7bec0f1453ffe4a7617167f8ba9)
1package xiangshan.backend.datapath
2
3import org.chipsalliance.cde.config.Parameters
4import chisel3._
5import chisel3.util._
6import difftest.{DiffArchFpRegState, DiffArchIntRegState, DiffArchVecRegState, DifftestModule}
7import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp}
8import utility._
9import utils.SeqUtils._
10import utils._
11import xiangshan._
12import xiangshan.backend.{BackendParams, ExcpModToVprf, PcToDataPathIO, VprfToExcpMod}
13import xiangshan.backend.Bundles._
14import xiangshan.backend.decode.ImmUnion
15import xiangshan.backend.datapath.DataConfig._
16import xiangshan.backend.datapath.RdConfig._
17import xiangshan.backend.issue.{FpScheduler, ImmExtractor, IntScheduler, MemScheduler, VfScheduler}
18import xiangshan.backend.issue.EntryBundles._
19import xiangshan.backend.regfile._
20import xiangshan.backend.regcache._
21import xiangshan.backend.fu.FuConfig
22import xiangshan.backend.fu.FuType.is0latency
23import xiangshan.mem.{LqPtr, SqPtr}
24
25class DataPath(params: BackendParams)(implicit p: Parameters) extends LazyModule {
26  override def shouldBeInlined: Boolean = false
27
28  private implicit val dpParams: BackendParams = params
29  lazy val module = new DataPathImp(this)
30
31  println(s"[DataPath] Preg Params: ")
32  println(s"[DataPath]   Int R(${params.getRfReadSize(IntData())}), W(${params.getRfWriteSize(IntData())}) ")
33  println(s"[DataPath]   Fp R(${params.getRfReadSize(FpData())}), W(${params.getRfWriteSize(FpData())}) ")
34  println(s"[DataPath]   Vf R(${params.getRfReadSize(VecData())}), W(${params.getRfWriteSize(VecData())}) ")
35  println(s"[DataPath]   V0 R(${params.getRfReadSize(V0Data())}), W(${params.getRfWriteSize(V0Data())}) ")
36  println(s"[DataPath]   Vl R(${params.getRfReadSize(VlData())}), W(${params.getRfWriteSize(VlData())}) ")
37}
38
39class DataPathImp(override val wrapper: DataPath)(implicit p: Parameters, params: BackendParams)
40  extends LazyModuleImp(wrapper) with HasXSParameter with HasPerfEvents {
41
42  val io = IO(new DataPathIO())
43
44  private val (fromIntIQ, toIntIQ, toIntExu) = (io.fromIntIQ, io.toIntIQ, io.toIntExu)
45  private val (fromFpIQ,  toFpIQ,  toFpExu)  = (io.fromFpIQ,  io.toFpIQ,  io.toFpExu)
46  private val (fromMemIQ, toMemIQ, toMemExu) = (io.fromMemIQ, io.toMemIQ, io.toMemExu)
47  private val (fromVfIQ,  toVfIQ,  toVfExu ) = (io.fromVfIQ,  io.toVfIQ,  io.toVecExu)
48  private val (fromVecExcp, toVecExcp)       = (io.fromVecExcpMod, io.toVecExcpMod)
49
50  println(s"[DataPath] IntIQ(${fromIntIQ.size}), FpIQ(${fromFpIQ.size}), VecIQ(${fromVfIQ.size}), MemIQ(${fromMemIQ.size})")
51  println(s"[DataPath] IntExu(${fromIntIQ.map(_.size).sum}), FpExu(${fromFpIQ.map(_.size).sum}), VecExu(${fromVfIQ.map(_.size).sum}), MemExu(${fromMemIQ.map(_.size).sum})")
52
53  // just refences for convience
54  private val fromIQ: Seq[MixedVec[DecoupledIO[IssueQueueIssueBundle]]] = (fromIntIQ ++ fromFpIQ ++ fromVfIQ ++ fromMemIQ).toSeq
55
56  private val toIQs = toIntIQ ++ toFpIQ ++ toVfIQ ++ toMemIQ
57
58  private val toExu: Seq[MixedVec[DecoupledIO[ExuInput]]] = (toIntExu ++ toFpExu ++ toVfExu ++ toMemExu).toSeq
59
60  private val fromFlattenIQ: Seq[DecoupledIO[IssueQueueIssueBundle]] = fromIQ.flatten
61
62  private val toFlattenExu: Seq[DecoupledIO[ExuInput]] = toExu.flatten
63
64  private val intWbBusyArbiter = Module(new IntRFWBCollideChecker(backendParams))
65  private val fpWbBusyArbiter = Module(new FpRFWBCollideChecker(backendParams))
66  private val vfWbBusyArbiter = Module(new VfRFWBCollideChecker(backendParams))
67  private val v0WbBusyArbiter = Module(new V0RFWBCollideChecker(backendParams))
68  private val vlWbBusyArbiter = Module(new VlRFWBCollideChecker(backendParams))
69
70  private val intRFReadArbiter = Module(new IntRFReadArbiter(backendParams))
71  private val fpRFReadArbiter = Module(new FpRFReadArbiter(backendParams))
72  private val vfRFReadArbiter = Module(new VfRFReadArbiter(backendParams))
73  private val v0RFReadArbiter = Module(new V0RFReadArbiter(backendParams))
74  private val vlRFReadArbiter = Module(new VlRFReadArbiter(backendParams))
75
76  private val og0FailedVec2: MixedVec[Vec[Bool]] = Wire(MixedVec(fromIQ.map(x => Vec(x.size, Bool())).toSeq))
77  private val og1FailedVec2: MixedVec[Vec[Bool]] = Wire(MixedVec(fromIQ.map(x => Vec(x.size, Bool())).toSeq))
78
79  // port -> win
80  private val intRdArbWinner: Seq2[MixedVec[Bool]] = intRFReadArbiter.io.in.map(_.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq).toSeq
81  private val fpRdArbWinner: Seq2[MixedVec[Bool]] = fpRFReadArbiter.io.in.map(_.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq).toSeq
82  private val vfRdArbWinner: Seq2[MixedVec[Bool]] = vfRFReadArbiter.io.in.map(_.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq).toSeq
83  private val v0RdArbWinner: Seq2[MixedVec[Bool]] = v0RFReadArbiter.io.in.map(_.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq).toSeq
84  private val vlRdArbWinner: Seq2[MixedVec[Bool]] = vlRFReadArbiter.io.in.map(_.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq).toSeq
85
86  private val intWbNotBlock: Seq[MixedVec[Bool]] = intWbBusyArbiter.io.in.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq
87  private val fpWbNotBlock: Seq[MixedVec[Bool]] = fpWbBusyArbiter.io.in.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq
88  private val vfWbNotBlock: Seq[MixedVec[Bool]] = vfWbBusyArbiter.io.in.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq
89  private val v0WbNotBlock: Seq[MixedVec[Bool]] = v0WbBusyArbiter.io.in.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq
90  private val vlWbNotBlock: Seq[MixedVec[Bool]] = vlWbBusyArbiter.io.in.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq
91
92  private val intRdNotBlock: Seq2[Bool] = intRdArbWinner.map(_.map(_.asUInt.andR))
93  private val fpRdNotBlock: Seq2[Bool] = fpRdArbWinner.map(_.map(_.asUInt.andR))
94  private val vfRdNotBlock: Seq2[Bool] = vfRdArbWinner.map(_.map(_.asUInt.andR))
95  private val v0RdNotBlock: Seq2[Bool] = v0RdArbWinner.map(_.map(_.asUInt.andR))
96  private val vlRdNotBlock: Seq2[Bool] = vlRdArbWinner.map(_.map(_.asUInt.andR))
97
98  private val intRFReadReq: Seq3[ValidIO[RfReadPortWithConfig]] = fromIQ.map(x => x.map(xx => xx.bits.getRfReadValidBundle(xx.valid)).toSeq).toSeq
99  private val fpRFReadReq: Seq3[ValidIO[RfReadPortWithConfig]] = fromIQ.map(x => x.map(xx => xx.bits.getRfReadValidBundle(xx.valid)).toSeq).toSeq
100  private val vfRFReadReq: Seq3[ValidIO[RfReadPortWithConfig]] = fromIQ.map(x => x.map(xx => xx.bits.getRfReadValidBundle(xx.valid)).toSeq).toSeq
101  private val v0RFReadReq: Seq3[ValidIO[RfReadPortWithConfig]] = fromIQ.map(x => x.map(xx => xx.bits.getRfReadValidBundle(xx.valid)).toSeq).toSeq
102  private val vlRFReadReq: Seq3[ValidIO[RfReadPortWithConfig]] = fromIQ.map(x => x.map(xx => xx.bits.getRfReadValidBundle(xx.valid)).toSeq).toSeq
103
104  private val allDataSources: Seq[Seq[Vec[DataSource]]] = fromIQ.map(x => x.map(xx => xx.bits.common.dataSources).toSeq)
105  private val allNumRegSrcs: Seq[Seq[Int]] = fromIQ.map(x => x.map(xx => xx.bits.exuParams.numRegSrc).toSeq)
106
107  intRFReadArbiter.io.in.zip(intRFReadReq).zipWithIndex.foreach { case ((arbInSeq2, inRFReadReqSeq2), iqIdx) =>
108    arbInSeq2.zip(inRFReadReqSeq2).zipWithIndex.foreach { case ((arbInSeq, inRFReadReqSeq), exuIdx) =>
109      val srcIndices: Seq[Int] = fromIQ(iqIdx)(exuIdx).bits.exuParams.getRfReadSrcIdx(IntData())
110      for (srcIdx <- 0 until fromIQ(iqIdx)(exuIdx).bits.exuParams.numRegSrc) {
111        if (srcIndices.contains(srcIdx) && inRFReadReqSeq.isDefinedAt(srcIdx)) {
112          arbInSeq(srcIdx).valid := inRFReadReqSeq(srcIdx).valid && allDataSources(iqIdx)(exuIdx)(srcIdx).readReg
113          arbInSeq(srcIdx).bits.addr := inRFReadReqSeq(srcIdx).bits.addr
114        } else {
115          arbInSeq(srcIdx).valid := false.B
116          arbInSeq(srcIdx).bits.addr := 0.U
117        }
118      }
119    }
120  }
121  fpRFReadArbiter.io.in.zip(fpRFReadReq).zipWithIndex.foreach { case ((arbInSeq2, inRFReadReqSeq2), iqIdx) =>
122    arbInSeq2.zip(inRFReadReqSeq2).zipWithIndex.foreach { case ((arbInSeq, inRFReadReqSeq), exuIdx) =>
123      val srcIndices: Seq[Int] = FpRegSrcDataSet.flatMap(data => fromIQ(iqIdx)(exuIdx).bits.exuParams.getRfReadSrcIdx(data)).toSeq.sorted
124      for (srcIdx <- 0 until fromIQ(iqIdx)(exuIdx).bits.exuParams.numRegSrc) {
125        if (srcIndices.contains(srcIdx) && inRFReadReqSeq.isDefinedAt(srcIdx)) {
126          arbInSeq(srcIdx).valid := inRFReadReqSeq(srcIdx).valid && allDataSources(iqIdx)(exuIdx)(srcIdx).readReg
127          arbInSeq(srcIdx).bits.addr := inRFReadReqSeq(srcIdx).bits.addr
128        } else {
129          arbInSeq(srcIdx).valid := false.B
130          arbInSeq(srcIdx).bits.addr := 0.U
131        }
132      }
133    }
134  }
135
136  vfRFReadArbiter.io.in.zip(vfRFReadReq).zipWithIndex.foreach { case ((arbInSeq2, inRFReadReqSeq2), iqIdx) =>
137    arbInSeq2.zip(inRFReadReqSeq2).zipWithIndex.foreach { case ((arbInSeq, inRFReadReqSeq), exuIdx) =>
138      val srcIndices: Seq[Int] = VecRegSrcDataSet.flatMap(data => fromIQ(iqIdx)(exuIdx).bits.exuParams.getRfReadSrcIdx(data)).toSeq.sorted
139      for (srcIdx <- 0 until fromIQ(iqIdx)(exuIdx).bits.exuParams.numRegSrc) {
140        if (srcIndices.contains(srcIdx) && inRFReadReqSeq.isDefinedAt(srcIdx)) {
141          arbInSeq(srcIdx).valid := inRFReadReqSeq(srcIdx).valid && allDataSources(iqIdx)(exuIdx)(srcIdx).readReg
142          arbInSeq(srcIdx).bits.addr := inRFReadReqSeq(srcIdx).bits.addr
143        } else {
144          arbInSeq(srcIdx).valid := false.B
145          arbInSeq(srcIdx).bits.addr := 0.U
146        }
147      }
148    }
149  }
150
151  v0RFReadArbiter.io.in.zip(v0RFReadReq).zipWithIndex.foreach { case ((arbInSeq2, inRFReadReqSeq2), iqIdx) =>
152    arbInSeq2.zip(inRFReadReqSeq2).zipWithIndex.foreach { case ((arbInSeq, inRFReadReqSeq), exuIdx) =>
153      val srcIndices: Seq[Int] = V0RegSrcDataSet.flatMap(data => fromIQ(iqIdx)(exuIdx).bits.exuParams.getRfReadSrcIdx(data)).toSeq.sorted
154      for (srcIdx <- 0 until fromIQ(iqIdx)(exuIdx).bits.exuParams.numRegSrc) {
155        if (srcIndices.contains(srcIdx) && inRFReadReqSeq.isDefinedAt(srcIdx)) {
156          arbInSeq(srcIdx).valid := inRFReadReqSeq(srcIdx).valid && allDataSources(iqIdx)(exuIdx)(srcIdx).readReg
157          arbInSeq(srcIdx).bits.addr := inRFReadReqSeq(srcIdx).bits.addr
158        } else {
159          arbInSeq(srcIdx).valid := false.B
160          arbInSeq(srcIdx).bits.addr := 0.U
161        }
162      }
163    }
164  }
165
166  vlRFReadArbiter.io.in.zip(vlRFReadReq).zipWithIndex.foreach { case ((arbInSeq2, inRFReadReqSeq2), iqIdx) =>
167    arbInSeq2.zip(inRFReadReqSeq2).zipWithIndex.foreach { case ((arbInSeq, inRFReadReqSeq), exuIdx) =>
168      val srcIndices: Seq[Int] = VlRegSrcDataSet.flatMap(data => fromIQ(iqIdx)(exuIdx).bits.exuParams.getRfReadSrcIdx(data)).toSeq.sorted
169      for (srcIdx <- 0 until fromIQ(iqIdx)(exuIdx).bits.exuParams.numRegSrc) {
170        if (srcIndices.contains(srcIdx) && inRFReadReqSeq.isDefinedAt(srcIdx)) {
171          arbInSeq(srcIdx).valid := inRFReadReqSeq(srcIdx).valid && allDataSources(iqIdx)(exuIdx)(srcIdx).readReg
172          arbInSeq(srcIdx).bits.addr := inRFReadReqSeq(srcIdx).bits.addr
173        } else {
174          arbInSeq(srcIdx).valid := false.B
175          arbInSeq(srcIdx).bits.addr := 0.U
176        }
177      }
178    }
179  }
180
181  private val intRFWriteReq: Seq2[Bool] = fromIQ.map(x => x.map(xx => xx.valid && xx.bits.common.rfWen.getOrElse(false.B)).toSeq).toSeq
182  private val fpRFWriteReq: Seq2[Bool] = fromIQ.map(x => x.map(xx => xx.valid && xx.bits.common.fpWen.getOrElse(false.B)).toSeq).toSeq
183  private val vfRFWriteReq: Seq2[Bool] = fromIQ.map(x => x.map(xx => xx.valid && xx.bits.common.vecWen.getOrElse(false.B)).toSeq).toSeq
184  private val v0RFWriteReq: Seq2[Bool] = fromIQ.map(x => x.map(xx => xx.valid && xx.bits.common.v0Wen.getOrElse(false.B)).toSeq).toSeq
185  private val vlRFWriteReq: Seq2[Bool] = fromIQ.map(x => x.map(xx => xx.valid && xx.bits.common.vlWen.getOrElse(false.B)).toSeq).toSeq
186
187  intWbBusyArbiter.io.in.zip(intRFWriteReq).foreach { case (arbInSeq, inRFWriteReqSeq) =>
188    arbInSeq.zip(inRFWriteReqSeq).foreach { case (arbIn, inRFWriteReq) =>
189      arbIn.valid := inRFWriteReq
190    }
191  }
192
193  fpWbBusyArbiter.io.in.zip(fpRFWriteReq).foreach { case (arbInSeq, inRFWriteReqSeq) =>
194    arbInSeq.zip(inRFWriteReqSeq).foreach { case (arbIn, inRFWriteReq) =>
195      arbIn.valid := inRFWriteReq
196    }
197  }
198
199  vfWbBusyArbiter.io.in.zip(vfRFWriteReq).foreach { case (arbInSeq, inRFWriteReqSeq) =>
200    arbInSeq.zip(inRFWriteReqSeq).foreach { case (arbIn, inRFWriteReq) =>
201      arbIn.valid := inRFWriteReq
202    }
203  }
204
205  v0WbBusyArbiter.io.in.zip(v0RFWriteReq).foreach { case (arbInSeq, inRFWriteReqSeq) =>
206    arbInSeq.zip(inRFWriteReqSeq).foreach { case (arbIn, inRFWriteReq) =>
207      arbIn.valid := inRFWriteReq
208    }
209  }
210
211  vlWbBusyArbiter.io.in.zip(vlRFWriteReq).foreach { case (arbInSeq, inRFWriteReqSeq) =>
212    arbInSeq.zip(inRFWriteReqSeq).foreach { case (arbIn, inRFWriteReq) =>
213      arbIn.valid := inRFWriteReq
214    }
215  }
216
217  private val intSchdParams = params.schdParams(IntScheduler())
218  private val fpSchdParams = params.schdParams(FpScheduler())
219  private val vfSchdParams = params.schdParams(VfScheduler())
220  private val memSchdParams = params.schdParams(MemScheduler())
221
222  private val schdParams = params.allSchdParams
223
224  private val pcReadValid = Wire(chiselTypeOf(io.fromPcTargetMem.fromDataPathValid))
225  private val pcReadFtqPtr = Wire(chiselTypeOf(io.fromPcTargetMem.fromDataPathFtqPtr))
226  private val pcReadFtqOffset = Wire(chiselTypeOf(io.fromPcTargetMem.fromDataPathFtqOffset))
227  private val targetPCRdata = io.fromPcTargetMem.toDataPathTargetPC
228  private val pcRdata = io.fromPcTargetMem.toDataPathPC
229  private val intRfRaddr = Wire(Vec(params.numPregRd(IntData()), UInt(intSchdParams.pregIdxWidth.W)))
230  private val intRfRdata = Wire(Vec(params.numPregRd(IntData()), UInt(intSchdParams.rfDataWidth.W)))
231  private val intRfWen = Wire(Vec(io.fromIntWb.length, Bool()))
232  private val intRfWaddr = Wire(Vec(io.fromIntWb.length, UInt(intSchdParams.pregIdxWidth.W)))
233  private val intRfWdata = Wire(Vec(io.fromIntWb.length, UInt(intSchdParams.rfDataWidth.W)))
234
235  private val fpRfRaddr = Wire(Vec(params.numPregRd(FpData()), UInt(fpSchdParams.pregIdxWidth.W)))
236  private val fpRfRdata = Wire(Vec(params.numPregRd(FpData()), UInt(fpSchdParams.rfDataWidth.W)))
237  private val fpRfWen = Wire(Vec(io.fromFpWb.length, Bool()))
238  private val fpRfWaddr = Wire(Vec(io.fromFpWb.length, UInt(fpSchdParams.pregIdxWidth.W)))
239  private val fpRfWdata = Wire(Vec(io.fromFpWb.length, UInt(fpSchdParams.rfDataWidth.W)))
240
241  private val vfRfSplitNum = 4
242  private val vfRfRaddr = Wire(Vec(params.numPregRd(VecData()), UInt(vfSchdParams.pregIdxWidth.W)))
243  private val vfRfRdata = Wire(Vec(params.numPregRd(VecData()), UInt(vfSchdParams.rfDataWidth.W)))
244  private val vfRfWen = Wire(Vec(vfRfSplitNum, Vec(io.fromVfWb.length, Bool())))
245  private val vfRfWaddr = Wire(Vec(io.fromVfWb.length, UInt(vfSchdParams.pregIdxWidth.W)))
246  private val vfRfWdata = Wire(Vec(io.fromVfWb.length, UInt(vfSchdParams.rfDataWidth.W)))
247
248  private val v0RfSplitNum = VLEN / XLEN
249  private val v0RfRaddr = Wire(Vec(params.numPregRd(V0Data()), UInt(log2Up(V0PhyRegs).W)))
250  private val v0RfRdata = Wire(Vec(params.numPregRd(V0Data()), UInt(V0Data().dataWidth.W)))
251  private val v0RfWen = Wire(Vec(v0RfSplitNum, Vec(io.fromV0Wb.length, Bool())))
252  private val v0RfWaddr = Wire(Vec(io.fromV0Wb.length, UInt(log2Up(V0PhyRegs).W)))
253  private val v0RfWdata = Wire(Vec(io.fromV0Wb.length, UInt(V0Data().dataWidth.W)))
254
255  private val vlRfRaddr = Wire(Vec(params.numPregRd(VlData()), UInt(log2Up(VlPhyRegs).W)))
256  private val vlRfRdata = Wire(Vec(params.numPregRd(VlData()), UInt(VlData().dataWidth.W)))
257  private val vlRfWen = Wire(Vec(io.fromVlWb.length, Bool()))
258  private val vlRfWaddr = Wire(Vec(io.fromVlWb.length, UInt(log2Up(VlPhyRegs).W)))
259  private val vlRfWdata = Wire(Vec(io.fromVlWb.length, UInt(VlData().dataWidth.W)))
260
261  val pcReadFtqPtrFormIQ = (fromIntIQ ++ fromMemIQ).flatten.filter(x => x.bits.exuParams.needPc)
262  assert(pcReadFtqPtrFormIQ.size == pcReadFtqPtr.size, s"pcReadFtqPtrFormIQ.size ${pcReadFtqPtrFormIQ.size} not equal pcReadFtqPtr.size ${pcReadFtqPtr.size}")
263  pcReadValid.zip(pcReadFtqPtrFormIQ.map(_.valid)).map(x => x._1 := x._2)
264  pcReadFtqPtr.zip(pcReadFtqPtrFormIQ.map(_.bits.common.ftqIdx.get)).map(x => x._1 := x._2)
265  pcReadFtqOffset.zip(pcReadFtqPtrFormIQ.map(_.bits.common.ftqOffset.get)).map(x => x._1 := x._2)
266  io.fromPcTargetMem.fromDataPathValid := pcReadValid
267  io.fromPcTargetMem.fromDataPathFtqPtr := pcReadFtqPtr
268  io.fromPcTargetMem.fromDataPathFtqOffset := pcReadFtqOffset
269
270  private val intDiffRead: Option[(Vec[UInt], Vec[UInt])] =
271    OptionWrapper(backendParams.basicDebugEn, (Wire(Vec(32, UInt(intSchdParams.pregIdxWidth.W))), Wire(Vec(32, UInt(XLEN.W)))))
272  private val fpDiffRead: Option[(Vec[UInt], Vec[UInt])] =
273    OptionWrapper(backendParams.basicDebugEn, (Wire(Vec(32, UInt(fpSchdParams.pregIdxWidth.W))), Wire(Vec(32, UInt(XLEN.W)))))
274  private val vfDiffRead: Option[(Vec[UInt], Vec[UInt])] =
275    OptionWrapper(backendParams.basicDebugEn, (Wire(Vec(31, UInt(vfSchdParams.pregIdxWidth.W))), Wire(Vec(31, UInt(VLEN.W)))))
276  private val v0DiffRead: Option[(Vec[UInt], Vec[UInt])] =
277    OptionWrapper(backendParams.basicDebugEn, (Wire(Vec(1, UInt(log2Up(V0PhyRegs).W))), Wire(Vec(1, UInt(V0Data().dataWidth.W)))))
278  private val vlDiffRead: Option[(Vec[UInt], Vec[UInt])] =
279    OptionWrapper(backendParams.basicDebugEn, (Wire(Vec(1, UInt(log2Up(VlPhyRegs).W))), Wire(Vec(1, UInt(VlData().dataWidth.W)))))
280
281  private val fpDiffReadData: Option[Vec[UInt]] =
282    OptionWrapper(backendParams.basicDebugEn, Wire(Vec(32, UInt(XLEN.W))))
283  private val vecDiffReadData: Option[Vec[UInt]] =
284    OptionWrapper(backendParams.basicDebugEn, Wire(Vec(64, UInt(64.W)))) // v0 = Cat(Vec(1), Vec(0))
285  private val vlDiffReadData: Option[UInt] =
286    OptionWrapper(backendParams.basicDebugEn, Wire(UInt(VlData().dataWidth.W)))
287
288
289  fpDiffReadData.foreach(_ := fpDiffRead
290    .get._2
291    .slice(0, 32)
292    .map(_(63, 0))
293  ) // fp only used [63, 0]
294  vecDiffReadData.foreach(_ :=
295    v0DiffRead
296    .get._2
297    .slice(0, 1)
298    .map(x => Seq(x(63, 0), x(127, 64))).flatten ++
299    vfDiffRead
300    .get._2
301    .slice(0, 31)
302    .map(x => Seq(x(63, 0), x(127, 64))).flatten
303  )
304  vlDiffReadData.foreach(_ := vlDiffRead
305    .get._2(0)
306  )
307
308  io.diffVl.foreach(_ := vlDiffReadData.get)
309
310  IntRegFileSplit("IntRegFile", intSchdParams.numPregs, splitNum = 4, intRfRaddr, intRfRdata, intRfWen, intRfWaddr, intRfWdata,
311    bankNum = 1,
312    debugReadAddr = intDiffRead.map(_._1),
313    debugReadData = intDiffRead.map(_._2)
314  )
315  FpRegFileSplit("FpRegFile", fpSchdParams.numPregs, splitNum = 4, fpRfRaddr, fpRfRdata, fpRfWen, fpRfWaddr, fpRfWdata,
316    bankNum = 1,
317    debugReadAddr = fpDiffRead.map(_._1),
318    debugReadData = fpDiffRead.map(_._2)
319  )
320  VfRegFile("VfRegFile", vfSchdParams.numPregs, vfRfSplitNum, vfRfRaddr, vfRfRdata, vfRfWen, vfRfWaddr, vfRfWdata,
321    debugReadAddr = vfDiffRead.map(_._1),
322    debugReadData = vfDiffRead.map(_._2)
323  )
324  VfRegFile("V0RegFile", V0PhyRegs, v0RfSplitNum, v0RfRaddr, v0RfRdata, v0RfWen, v0RfWaddr, v0RfWdata,
325    debugReadAddr = v0DiffRead.map(_._1),
326    debugReadData = v0DiffRead.map(_._2)
327  )
328  FpRegFile("VlRegFile", VlPhyRegs, vlRfRaddr, vlRfRdata, vlRfWen, vlRfWaddr, vlRfWdata,
329    bankNum = 1,
330    isVlRegfile = true,
331    debugReadAddr = vlDiffRead.map(_._1),
332    debugReadData = vlDiffRead.map(_._2)
333  )
334
335  intRfWaddr := io.fromIntWb.map(x => RegEnable(x.addr, x.wen)).toSeq
336  intRfWdata := io.fromIntWb.map(x => RegEnable(x.data, x.wen)).toSeq
337  intRfWen := RegNext(VecInit(io.fromIntWb.map(_.wen).toSeq))
338
339  for (portIdx <- intRfRaddr.indices) {
340    if (intRFReadArbiter.io.out.isDefinedAt(portIdx))
341      intRfRaddr(portIdx) := intRFReadArbiter.io.out(portIdx).bits.addr
342    else
343      intRfRaddr(portIdx) := 0.U
344  }
345
346  fpRfWaddr := io.fromFpWb.map(x => RegEnable(x.addr, x.wen)).toSeq
347  fpRfWdata := io.fromFpWb.map(x => RegEnable(x.data, x.wen)).toSeq
348  fpRfWen := RegNext(VecInit(io.fromFpWb.map(_.wen).toSeq))
349
350  for (portIdx <- fpRfRaddr.indices) {
351    if (fpRFReadArbiter.io.out.isDefinedAt(portIdx))
352      fpRfRaddr(portIdx) := fpRFReadArbiter.io.out(portIdx).bits.addr
353    else
354      fpRfRaddr(portIdx) := 0.U
355  }
356
357  vfRfWaddr := io.fromVfWb.map(x => RegEnable(x.addr, x.wen)).toSeq
358  vfRfWdata := io.fromVfWb.map(x => RegEnable(x.data, x.wen)).toSeq
359  vfRfWen.foreach(_.zip(io.fromVfWb.map(x => RegNext(x.wen))).foreach { case (wenSink, wenSource) => wenSink := wenSource } )
360
361  for (portIdx <- vfRfRaddr.indices) {
362    if (vfRFReadArbiter.io.out.isDefinedAt(portIdx))
363      vfRfRaddr(portIdx) := vfRFReadArbiter.io.out(portIdx).bits.addr
364    else
365      vfRfRaddr(portIdx) := 0.U
366  }
367
368  v0RfWaddr := io.fromV0Wb.map(x => RegEnable(x.addr, x.wen)).toSeq
369  v0RfWdata := io.fromV0Wb.map(x => RegEnable(x.data, x.wen)).toSeq
370  v0RfWen.foreach(_.zip(io.fromV0Wb.map(x => RegNext(x.wen))).foreach { case (wenSink, wenSource) => wenSink := wenSource } )
371
372  for (portIdx <- v0RfRaddr.indices) {
373    if (v0RFReadArbiter.io.out.isDefinedAt(portIdx))
374      v0RfRaddr(portIdx) := v0RFReadArbiter.io.out(portIdx).bits.addr
375    else
376      v0RfRaddr(portIdx) := 0.U
377  }
378
379  private val vecExcpUseVecRdPorts = Seq(6, 7, 8, 9, 10, 11, 0, 1)
380  private val vecExcpUseVecWrPorts = Seq(1, 4, 5, 3)
381  private val vecExcpUseV0RdPorts = Seq(2, 3)
382  private val vecExcpUsev0WrPorts = Seq(4)
383
384  private var v0RdPortsIter: Iterator[Int] = vecExcpUseV0RdPorts.iterator
385  private val v0WrPortsIter: Iterator[Int] = vecExcpUsev0WrPorts.iterator
386
387  for (i <- fromVecExcp.r.indices) {
388    when (fromVecExcp.r(i).valid && !fromVecExcp.r(i).bits.isV0) {
389      vfRfRaddr(vecExcpUseVecRdPorts(i)) := fromVecExcp.r(i).bits.addr
390    }
391    if (i % maxMergeNumPerCycle == 0) {
392      val v0RdPort = v0RdPortsIter.next()
393      when (fromVecExcp.r(i).valid && fromVecExcp.r(i).bits.isV0) {
394        v0RfRaddr(v0RdPort) := fromVecExcp.r(i).bits.addr
395      }
396    }
397  }
398
399  for (i <- fromVecExcp.w.indices) {
400    when (fromVecExcp.w(i).valid && !fromVecExcp.w(i).bits.isV0) {
401      val vecWrPort = vecExcpUseVecWrPorts(i)
402      vfRfWen.foreach(_(vecWrPort) := true.B)
403      vfRfWaddr(vecWrPort) := fromVecExcp.w(i).bits.newVdAddr
404      vfRfWdata(vecWrPort) := fromVecExcp.w(i).bits.newVdData
405    }
406    if (i % maxMergeNumPerCycle == 0) {
407      when(fromVecExcp.w(i).valid && fromVecExcp.w(i).bits.isV0) {
408        val v0WrPort = v0WrPortsIter.next()
409        v0RfWen.foreach(_(v0WrPort) := true.B)
410        v0RfWaddr(v0WrPort) := fromVecExcp.w(i).bits.newVdAddr
411        v0RfWdata(v0WrPort) := fromVecExcp.w(i).bits.newVdData
412      }
413    }
414  }
415
416  vlRfWaddr := io.fromVlWb.map(x => RegEnable(x.addr, x.wen)).toSeq
417  vlRfWdata := io.fromVlWb.map(x => RegEnable(x.data, x.wen)).toSeq
418  vlRfWen := io.fromVlWb.map(x => RegNext(x.wen)).toSeq
419
420  for (portIdx <- vlRfRaddr.indices) {
421    if (vlRFReadArbiter.io.out.isDefinedAt(portIdx))
422      vlRfRaddr(portIdx) := vlRFReadArbiter.io.out(portIdx).bits.addr
423    else
424      vlRfRaddr(portIdx) := 0.U
425  }
426
427
428  intDiffRead.foreach { case (addr, _) =>
429    addr := io.diffIntRat.get
430  }
431
432  fpDiffRead.foreach { case (addr, _) =>
433    addr := io.diffFpRat.get
434  }
435
436  vfDiffRead.foreach { case (addr, _) =>
437    addr := io.diffVecRat.get
438  }
439  v0DiffRead.foreach { case (addr, _) =>
440    addr := io.diffV0Rat.get
441  }
442  vlDiffRead.foreach { case (addr, _) =>
443    addr := io.diffVlRat.get
444  }
445
446  println(s"[DataPath] " +
447    s"has intDiffRead: ${intDiffRead.nonEmpty}, " +
448    s"has fpDiffRead: ${fpDiffRead.nonEmpty}, " +
449    s"has vecDiffRead: ${vfDiffRead.nonEmpty}, " +
450    s"has v0DiffRead: ${v0DiffRead.nonEmpty}, " +
451    s"has vlDiffRead: ${vlDiffRead.nonEmpty}")
452
453  // regcache
454  private val regCache = Module(new RegCache())
455
456  def IssueBundle2RCReadPort(issue: DecoupledIO[IssueQueueIssueBundle]): Vec[RCReadPort] = {
457    val readPorts = Wire(Vec(issue.bits.exuParams.numIntSrc, new RCReadPort(params.intSchdParams.get.rfDataWidth, RegCacheIdxWidth)))
458    readPorts.zipWithIndex.foreach{ case (r, idx) =>
459      r.ren  := issue.valid && issue.bits.common.dataSources(idx).readRegCache
460      r.addr := issue.bits.rcIdx.get(idx)
461      r.data := DontCare
462    }
463    readPorts
464  }
465
466  private val regCacheReadReq = fromIntIQ.flatten.filter(_.bits.exuParams.numIntSrc > 0).flatMap(IssueBundle2RCReadPort(_)) ++
467                                fromMemIQ.flatten.filter(_.bits.exuParams.numIntSrc > 0).flatMap(IssueBundle2RCReadPort(_))
468  private val regCacheReadData = regCache.io.readPorts.map(_.data)
469
470  println(s"[DataPath] regCache readPorts size: ${regCache.io.readPorts.size}, regCacheReadReq size: ${regCacheReadReq.size}")
471  require(regCache.io.readPorts.size == regCacheReadReq.size, "reg cache's readPorts size should be equal to regCacheReadReq")
472
473  regCache.io.readPorts.zip(regCacheReadReq).foreach{ case (r, req) =>
474    r.ren := req.ren
475    r.addr := req.addr
476  }
477
478  val s1_RCReadData: MixedVec[MixedVec[Vec[UInt]]] = Wire(MixedVec(toExu.map(x => MixedVec(x.map(_.bits.src.cloneType).toSeq))))
479  s1_RCReadData.foreach(_.foreach(_.foreach(_ := 0.U)))
480  s1_RCReadData.zip(toExu).filter(_._2.map(_.bits.params.isIntExeUnit).reduce(_ || _)).flatMap(_._1).flatten
481    .zip(regCacheReadData.take(params.getIntExuRCReadSize)).foreach{ case (s1_data, rdata) =>
482      s1_data := rdata
483    }
484  s1_RCReadData.zip(toExu).filter(_._2.map(x => x.bits.params.isMemExeUnit && x.bits.params.readIntRf).reduce(_ || _)).flatMap(_._1).flatten
485    .zip(regCacheReadData.takeRight(params.getMemExuRCReadSize)).foreach{ case (s1_data, rdata) =>
486      s1_data := rdata
487    }
488
489  println(s"[DataPath] s1_RCReadData.int.size: ${s1_RCReadData.zip(toExu).filter(_._2.map(_.bits.params.isIntExeUnit).reduce(_ || _)).flatMap(_._1).flatten.size}, RCRdata.int.size: ${params.getIntExuRCReadSize}")
490  println(s"[DataPath] s1_RCReadData.mem.size: ${s1_RCReadData.zip(toExu).filter(_._2.map(x => x.bits.params.isMemExeUnit && x.bits.params.readIntRf).reduce(_ || _)).flatMap(_._1).flatten.size}, RCRdata.mem.size: ${params.getMemExuRCReadSize}")
491
492  io.toWakeupQueueRCIdx := regCache.io.toWakeupQueueRCIdx
493  io.toBypassNetworkRCData := s1_RCReadData
494  regCache.io.writePorts := io.fromBypassNetwork
495
496  val s1_addrOHs = Reg(MixedVec(
497    fromIQ.map(x => MixedVec(x.map(_.bits.addrOH.cloneType).toSeq)).toSeq
498  ))
499  val s1_toExuValid: MixedVec[MixedVec[Bool]] = Reg(MixedVec(
500    toExu.map(x => MixedVec(x.map(_.valid.cloneType).toSeq)).toSeq
501  ))
502  val s1_toExuData: MixedVec[MixedVec[ExuInput]] = Reg(MixedVec(toExu.map(x => MixedVec(x.map(_.bits.cloneType).toSeq)).toSeq))
503  val s1_immInfo = Reg(MixedVec(toExu.map(x => MixedVec(x.map(x => new ImmInfo).toSeq)).toSeq))
504  s1_immInfo.zip(fromIQ).map { case (s1Vec, s0Vec) =>
505    s1Vec.zip(s0Vec).map { case (s1, s0) =>
506      s1.imm := Mux(s0.valid, s0.bits.common.imm, s1.imm)
507      s1.immType := Mux(s0.valid, s0.bits.immType, s1.immType)
508    }
509  }
510  io.og1ImmInfo.zip(s1_immInfo.flatten).map{ case(out, reg) =>
511    out := reg
512  }
513  val s1_toExuReady = Wire(MixedVec(toExu.map(x => MixedVec(x.map(_.ready.cloneType).toSeq))))
514  val s1_srcType: MixedVec[MixedVec[Vec[UInt]]] = MixedVecInit(fromIQ.map(x => MixedVecInit(x.map(xx => RegEnable(xx.bits.srcType, xx.fire)).toSeq)))
515
516  val s1_intPregRData: MixedVec[MixedVec[Vec[UInt]]] = Wire(MixedVec(toExu.map(x => MixedVec(x.map(_.bits.src.cloneType).toSeq))))
517  val s1_fpPregRData: MixedVec[MixedVec[Vec[UInt]]] = Wire(MixedVec(toExu.map(x => MixedVec(x.map(_.bits.src.cloneType).toSeq))))
518  val s1_vfPregRData: MixedVec[MixedVec[Vec[UInt]]] = Wire(MixedVec(toExu.map(x => MixedVec(x.map(_.bits.src.cloneType).toSeq))))
519  val s1_v0PregRData: MixedVec[MixedVec[Vec[UInt]]] = Wire(MixedVec(toExu.map(x => MixedVec(x.map(_.bits.src.cloneType).toSeq))))
520  val s1_vlPregRData: MixedVec[MixedVec[Vec[UInt]]] = Wire(MixedVec(toExu.map(x => MixedVec(x.map(_.bits.src.cloneType).toSeq))))
521
522  val rfrPortConfigs = schdParams.map(_.issueBlockParams).flatten.map(_.exuBlockParams.map(_.rfrPortConfigs))
523
524  println(s"[DataPath] s1_intPregRData.flatten.flatten.size: ${s1_intPregRData.flatten.flatten.size}, intRfRdata.size: ${intRfRdata.size}")
525  s1_intPregRData.foreach(_.foreach(_.foreach(_ := 0.U)))
526  s1_intPregRData.zip(rfrPortConfigs).foreach { case (iqRdata, iqCfg) =>
527      iqRdata.zip(iqCfg).foreach { case (iuRdata, iuCfg) =>
528        iuRdata.zip(iuCfg)
529          .filter { case (_, cfg) => cfg.count(_.isInstanceOf[IntRD]) > 0 }
530          .foreach { case (sink, cfg) => sink := intRfRdata(cfg.find(_.isInstanceOf[IntRD]).get.port) }
531      }
532  }
533
534  println(s"[DataPath] s1_fpPregRData.flatten.flatten.size: ${s1_fpPregRData.flatten.flatten.size}, fpRfRdata.size: ${fpRfRdata.size}")
535  s1_fpPregRData.foreach(_.foreach(_.foreach(_ := 0.U)))
536  s1_fpPregRData.zip(rfrPortConfigs).foreach { case (iqRdata, iqCfg) =>
537      iqRdata.zip(iqCfg).foreach { case (iuRdata, iuCfg) =>
538        iuRdata.zip(iuCfg)
539          .filter { case (_, cfg) => cfg.count(_.isInstanceOf[FpRD]) > 0 }
540          .foreach { case (sink, cfg) => sink := fpRfRdata(cfg.find(_.isInstanceOf[FpRD]).get.port) }
541      }
542  }
543
544  println(s"[DataPath] s1_vfPregRData.flatten.flatten.size: ${s1_vfPregRData.flatten.flatten.size}, vfRfRdata.size: ${vfRfRdata.size}")
545  s1_vfPregRData.foreach(_.foreach(_.foreach(_ := 0.U)))
546  s1_vfPregRData.zip(rfrPortConfigs).foreach{ case(iqRdata, iqCfg) =>
547      iqRdata.zip(iqCfg).foreach{ case(iuRdata, iuCfg) =>
548        iuRdata.zip(iuCfg)
549          .filter { case (_, cfg) => cfg.count(_.isInstanceOf[VfRD]) > 0 }
550          .foreach { case (sink, cfg) => sink := vfRfRdata(cfg.find(_.isInstanceOf[VfRD]).get.port) }
551      }
552  }
553
554  println(s"[DataPath] s1_v0PregRData.flatten.flatten.size: ${s1_v0PregRData.flatten.flatten.size}, v0RfRdata.size: ${v0RfRdata.size}")
555  s1_v0PregRData.foreach(_.foreach(_.foreach(_ := 0.U)))
556  s1_v0PregRData.zip(rfrPortConfigs).foreach{ case(iqRdata, iqCfg) =>
557      iqRdata.zip(iqCfg).foreach{ case(iuRdata, iuCfg) =>
558        iuRdata.zip(iuCfg)
559          .filter { case (_, cfg) => cfg.count(_.isInstanceOf[V0RD]) > 0 }
560          .foreach { case (sink, cfg) => sink := v0RfRdata(cfg.find(_.isInstanceOf[V0RD]).get.port) }
561      }
562  }
563
564  println(s"[DataPath] s1_vlPregRData.flatten.flatten.size: ${s1_vlPregRData.flatten.flatten.size}, vlRfRdata.size: ${vlRfRdata.size}")
565  s1_vlPregRData.foreach(_.foreach(_.foreach(_ := 0.U)))
566  s1_vlPregRData.zip(rfrPortConfigs).foreach{ case(iqRdata, iqCfg) =>
567      iqRdata.zip(iqCfg).foreach{ case(iuRdata, iuCfg) =>
568        iuRdata.zip(iuCfg)
569          .filter { case (_, cfg) => cfg.count(_.isInstanceOf[VlRD]) > 0 }
570          .foreach { case (sink, cfg) => sink := vlRfRdata(cfg.find(_.isInstanceOf[VlRD]).get.port) }
571      }
572  }
573
574  val og0_cancel_no_load = VecInit(og0FailedVec2.flatten.zip(params.allExuParams).filter(!_._2.hasLoadFu).map(_._1).toSeq)
575  val exuParamsNoLoad = fromIQ.flatten.zip(params.allExuParams).filter(!_._2.hasLoadFu)
576  val is_0latency = Wire(Vec(og0_cancel_no_load.size, Bool()))
577  is_0latency := exuParamsNoLoad.map(x => is0latency(x._1.bits.common.fuType))
578  val og0_cancel_delay = RegNext(VecInit(og0_cancel_no_load.zip(is_0latency).map(x => x._1 && x._2)))
579  for (i <- fromIQ.indices) {
580    for (j <- fromIQ(i).indices) {
581      // IQ(s0) --[Ctrl]--> s1Reg ---------- begin
582      // refs
583      val s1_valid = s1_toExuValid(i)(j)
584      val s1_ready = s1_toExuReady(i)(j)
585      val s1_data = s1_toExuData(i)(j)
586      val s1_addrOH = s1_addrOHs(i)(j)
587      val s0 = fromIQ(i)(j) // s0
588      PerfCCT.updateInstPos(s0.bits.common.debug_seqNum, PerfCCT.InstPos.AtIssueArb.id.U, s0.valid, clock, reset)
589      PerfCCT.updateInstPos(s1_data.debug_seqNum, PerfCCT.InstPos.AtIssueReadReg.id.U, s1_valid, clock, reset)
590
591      val srcNotBlock = Wire(Bool())
592      srcNotBlock := s0.bits.common.dataSources.zip(intRdArbWinner(i)(j) zip fpRdArbWinner(i)(j) zip vfRdArbWinner(i)(j) zip v0RdArbWinner(i)(j) zip vlRdArbWinner(i)(j)).map {
593        case (source, ((((win_int, win_fp), win_vf), win_v0), win_vl)) =>
594        !source.readReg || win_int && win_fp && win_vf && win_v0 && win_vl
595      }.fold(true.B)(_ && _)
596      val notBlock = srcNotBlock && intWbNotBlock(i)(j) && fpWbNotBlock(i)(j) && vfWbNotBlock(i)(j) && v0WbNotBlock(i)(j) && vlWbNotBlock(i)(j)
597      val s1_flush = s0.bits.common.robIdx.needFlush(Seq(io.flush, RegNextWithEnable(io.flush)))
598      val s1_cancel = og1FailedVec2(i)(j)
599      val s0_cancel = Wire(Bool())
600      if (s0.bits.exuParams.isIQWakeUpSink) {
601        val exuOHNoLoad = s0.bits.common.exuSources.get.map(x => x.toExuOH(s0.bits.exuParams).zip(params.allExuParams).filter(!_._2.hasLoadFu).map(_._1))
602        s0_cancel := exuOHNoLoad.zip(s0.bits.common.dataSources).map{
603          case (exuOH, dataSource) => (VecInit(exuOH).asUInt & og0_cancel_delay.asUInt).orR && dataSource.readForward
604        }.reduce(_ || _) && s0.valid
605      } else s0_cancel := false.B
606      val s0_ldCancel = LoadShouldCancel(s0.bits.common.loadDependency, io.ldCancel)
607      when (s0.fire && !s1_flush && !s0_ldCancel) {
608        s1_valid := true.B
609      }.otherwise {
610        s1_valid := false.B
611      }
612      when (s0.valid) {
613        s1_data.fromIssueBundle(s0.bits) // no src data here
614        s1_addrOH := s0.bits.addrOH
615      }
616      s0.ready := notBlock && !s0_cancel
617      // IQ(s0) --[Ctrl]--> s1Reg ---------- end
618    }
619  }
620
621  private val fromIQFire = fromIQ.map(_.map(_.fire))
622  private val toExuFire = toExu.map(_.map(_.fire))
623  toIQs.zipWithIndex.foreach {
624    case(toIQ, iqIdx) =>
625      toIQ.zipWithIndex.foreach {
626        case (toIU, iuIdx) =>
627          // IU: issue unit
628          val og0resp = toIU.og0resp
629          og0FailedVec2(iqIdx)(iuIdx)   := fromIQ(iqIdx)(iuIdx).valid && !fromIQ(iqIdx)(iuIdx).ready
630          og0resp.valid                 := og0FailedVec2(iqIdx)(iuIdx)
631          og0resp.bits.robIdx           := fromIQ(iqIdx)(iuIdx).bits.common.robIdx
632          og0resp.bits.uopIdx.foreach(_ := fromIQ(iqIdx)(iuIdx).bits.common.vpu.get.vuopIdx)
633          og0resp.bits.sqIdx.foreach(_ := 0.U.asTypeOf(new SqPtr))
634          og0resp.bits.lqIdx.foreach(_ := 0.U.asTypeOf(new LqPtr))
635          og0resp.bits.resp             := RespType.block
636          og0resp.bits.fuType           := fromIQ(iqIdx)(iuIdx).bits.common.fuType
637
638          val og1resp = toIU.og1resp
639          og1FailedVec2(iqIdx)(iuIdx)   := s1_toExuValid(iqIdx)(iuIdx) && !s1_toExuReady(iqIdx)(iuIdx)
640          og1resp.valid                 := s1_toExuValid(iqIdx)(iuIdx)
641          og1resp.bits.robIdx           := s1_toExuData(iqIdx)(iuIdx).robIdx
642          og1resp.bits.uopIdx.foreach(_ := s1_toExuData(iqIdx)(iuIdx).vpu.get.vuopIdx)
643          og1resp.bits.sqIdx.foreach(_ :=  0.U.asTypeOf(new SqPtr))
644          og1resp.bits.lqIdx.foreach(_ :=  0.U.asTypeOf(new LqPtr))
645          // respType:  success    -> IQ entry clear
646          //            uncertain  -> IQ entry no action
647          //            block      -> IQ entry issued set false, then re-issue
648          // hyu, lda and sta are uncertain at OG1 stage
649          // and all vector arith exu should check success in og2 stage
650          og1resp.bits.resp             := Mux(og1FailedVec2(iqIdx)(iuIdx),
651            RespType.block,
652            if (toIU.issueQueueParams match { case x => x.isLdAddrIQ || x.isStAddrIQ || x.isHyAddrIQ || x.isVecLduIQ || x.isVecStuIQ || x.inVfSchd})
653              RespType.uncertain
654            else
655              RespType.success,
656          )
657          og1resp.bits.fuType           := s1_toExuData(iqIdx)(iuIdx).fuType
658      }
659  }
660
661  io.og0Cancel := og0FailedVec2.flatten.zip(params.allExuParams).map{ case (cancel, params) =>
662                    if (params.isIQWakeUpSource && params.latencyCertain && params.wakeUpFuLatancySet.contains(0)) cancel else false.B
663                  }.toSeq
664  io.og1Cancel := toFlattenExu.map(x => x.valid && !x.fire)
665
666
667  if (backendParams.debugEn){
668    dontTouch(og0_cancel_no_load)
669    dontTouch(is_0latency)
670    dontTouch(og0_cancel_delay)
671  }
672  for (i <- toExu.indices) {
673    for (j <- toExu(i).indices) {
674      // s1Reg --[Ctrl]--> exu(s1) ---------- begin
675      // refs
676      val sinkData = toExu(i)(j).bits
677      // assign
678      toExu(i)(j).valid := s1_toExuValid(i)(j)
679      s1_toExuReady(i)(j) := toExu(i)(j).ready
680      sinkData := s1_toExuData(i)(j)
681      // s1Reg --[Ctrl]--> exu(s1) ---------- end
682
683      // s1Reg --[Data]--> exu(s1) ---------- begin
684      // data source1: preg read data
685      for (k <- sinkData.src.indices) {
686        val srcDataTypeSet: Set[DataConfig] = sinkData.params.getSrcDataType(k)
687        val readRfMap: Seq[(Bool, UInt)] = (
688          if (k == 3) {(
689            Seq(None)
690            :+
691            OptionWrapper(s1_v0PregRData(i)(j).isDefinedAt(k) && srcDataTypeSet.intersect(V0RegSrcDataSet).nonEmpty,
692              (SrcType.isV0(s1_srcType(i)(j)(k)) -> s1_v0PregRData(i)(j)(k)))
693          )}
694          else if (k == 4) {(
695            Seq(None)
696            :+
697            OptionWrapper(s1_vlPregRData(i)(j).isDefinedAt(k) && srcDataTypeSet.intersect(VlRegSrcDataSet).nonEmpty,
698              (SrcType.isVp(s1_srcType(i)(j)(k)) -> s1_vlPregRData(i)(j)(k)))
699          )}
700          else {(
701            Seq(None)
702            :+
703            OptionWrapper(s1_intPregRData(i)(j).isDefinedAt(k) && srcDataTypeSet.intersect(IntRegSrcDataSet).nonEmpty,
704              (SrcType.isXp(s1_srcType(i)(j)(k)) -> s1_intPregRData(i)(j)(k)))
705            :+
706            OptionWrapper(s1_vfPregRData(i)(j).isDefinedAt(k) && srcDataTypeSet.intersect(VecRegSrcDataSet).nonEmpty,
707              (SrcType.isVp(s1_srcType(i)(j)(k)) -> s1_vfPregRData(i)(j)(k)))
708            :+
709            OptionWrapper(s1_fpPregRData(i)(j).isDefinedAt(k) && srcDataTypeSet.intersect(FpRegSrcDataSet).nonEmpty,
710              (SrcType.isFp(s1_srcType(i)(j)(k)) -> s1_fpPregRData(i)(j)(k)))
711          )}
712        ).filter(_.nonEmpty).map(_.get)
713
714        if (readRfMap.nonEmpty)
715          sinkData.src(k) := Mux1H(readRfMap)
716      }
717      if (sinkData.params.hasJmpFu || sinkData.params.hasLoadFu) {
718        val index = pcReadFtqPtrFormIQ.map(_.bits.exuParams).indexOf(sinkData.params)
719        sinkData.pc.get := pcRdata(index)
720      }
721      if (sinkData.params.needTarget) {
722        val index = pcReadFtqPtrFormIQ.map(_.bits.exuParams).indexOf(sinkData.params)
723        sinkData.predictInfo.get.target := targetPCRdata(index)
724      }
725    }
726  }
727
728  if (env.AlwaysBasicDiff || env.EnableDifftest) {
729    val delayedCnt = 2
730    val difftestArchIntRegState = DifftestModule(new DiffArchIntRegState, delay = delayedCnt)
731    difftestArchIntRegState.coreid := io.hartId
732    difftestArchIntRegState.value := intDiffRead.get._2
733
734    val difftestArchFpRegState = DifftestModule(new DiffArchFpRegState, delay = delayedCnt)
735    difftestArchFpRegState.coreid := io.hartId
736    difftestArchFpRegState.value := fpDiffReadData.get
737
738    val difftestArchVecRegState = DifftestModule(new DiffArchVecRegState, delay = delayedCnt)
739    difftestArchVecRegState.coreid := io.hartId
740    difftestArchVecRegState.value := vecDiffReadData.get
741  }
742
743  val int_regcache_size = 48
744  val int_regcache_tag = RegInit(VecInit(Seq.fill(int_regcache_size)(0.U(intSchdParams.pregIdxWidth.W))))
745  val int_regcache_enqPtr = RegInit(0.U(log2Up(int_regcache_size).W))
746  int_regcache_enqPtr := int_regcache_enqPtr + PopCount(intRfWen)
747  for (i <- intRfWen.indices) {
748    when (intRfWen(i)) {
749      int_regcache_tag(int_regcache_enqPtr + PopCount(intRfWen.take(i))) := intRfWaddr(i)
750    }
751  }
752
753  val vf_regcache_size = 48
754  val vf_regcache_tag = RegInit(VecInit(Seq.fill(vf_regcache_size)(0.U(vfSchdParams.pregIdxWidth.W))))
755  val vf_regcache_enqPtr = RegInit(0.U(log2Up(vf_regcache_size).W))
756  vf_regcache_enqPtr := vf_regcache_enqPtr + PopCount(vfRfWen.head)
757  for (i <- vfRfWen.indices) {
758    when (vfRfWen.head(i)) {
759      vf_regcache_tag(vf_regcache_enqPtr + PopCount(vfRfWen.head.take(i))) := vfRfWaddr(i)
760    }
761  }
762
763  v0RdPortsIter = vecExcpUseV0RdPorts.iterator
764  for (i <- toVecExcp.rdata.indices) {
765    toVecExcp.rdata(i).valid := RegNext(fromVecExcp.r(i).valid)
766    toVecExcp.rdata(i).bits := Mux(
767      RegEnable(!fromVecExcp.r(i).bits.isV0, fromVecExcp.r(i).valid),
768      vfRfRdata(vecExcpUseVecRdPorts(i)),
769      if (i % maxMergeNumPerCycle == 0) v0RfRdata(v0RdPortsIter.next()) else 0.U,
770    )
771  }
772
773  XSPerfHistogram(s"IntRegFileRead_hist", PopCount(intRFReadArbiter.io.in.flatten.flatten.map(_.valid)), true.B, 0, 20, 1)
774  XSPerfHistogram(s"FpRegFileRead_hist", PopCount(fpRFReadArbiter.io.in.flatten.flatten.map(_.valid)), true.B, 0, 20, 1)
775  XSPerfHistogram(s"VfRegFileRead_hist", PopCount(vfRFReadArbiter.io.in.flatten.flatten.map(_.valid)), true.B, 0, 20, 1)
776  XSPerfHistogram(s"IntRegFileWrite_hist", PopCount(intRFWriteReq.flatten), true.B, 0, 20, 1)
777  XSPerfHistogram(s"FpRegFileWrite_hist", PopCount(fpRFWriteReq.flatten), true.B, 0, 20, 1)
778  XSPerfHistogram(s"VfRegFileWrite_hist", PopCount(vfRFWriteReq.flatten), true.B, 0, 20, 1)
779
780  val int_regcache_part32 = (1 until 33).map(i => int_regcache_tag(int_regcache_enqPtr - i.U))
781  val int_regcache_part24 = (1 until 24).map(i => int_regcache_tag(int_regcache_enqPtr - i.U))
782  val int_regcache_part16 = (1 until 17).map(i => int_regcache_tag(int_regcache_enqPtr - i.U))
783  val int_regcache_part8 = (1 until 9).map(i => int_regcache_tag(int_regcache_enqPtr - i.U))
784
785  val int_regcache_48_hit_vec = intRFReadArbiter.io.in.flatten.flatten.map(x => x.valid && int_regcache_tag.map(_ === x.bits.addr).reduce(_ || _))
786  val int_regcache_8_hit_vec = intRFReadArbiter.io.in.flatten.flatten.map(x => x.valid && int_regcache_part8.map(_ === x.bits.addr).reduce(_ || _))
787  val int_regcache_16_hit_vec = intRFReadArbiter.io.in.flatten.flatten.map(x => x.valid && int_regcache_part16.map(_ === x.bits.addr).reduce(_ || _))
788  val int_regcache_24_hit_vec = intRFReadArbiter.io.in.flatten.flatten.map(x => x.valid && int_regcache_part24.map(_ === x.bits.addr).reduce(_ || _))
789  val int_regcache_32_hit_vec = intRFReadArbiter.io.in.flatten.flatten.map(x => x.valid && int_regcache_part32.map(_ === x.bits.addr).reduce(_ || _))
790  XSPerfAccumulate("IntRegCache48Hit", PopCount(int_regcache_48_hit_vec))
791  XSPerfAccumulate("IntRegCache8Hit", PopCount(int_regcache_8_hit_vec))
792  XSPerfAccumulate("IntRegCache16Hit", PopCount(int_regcache_16_hit_vec))
793  XSPerfAccumulate("IntRegCache24Hit", PopCount(int_regcache_24_hit_vec))
794  XSPerfAccumulate("IntRegCache32Hit", PopCount(int_regcache_32_hit_vec))
795  XSPerfHistogram("IntRegCache48Hit_hist", PopCount(int_regcache_48_hit_vec), true.B, 0, 16, 2)
796
797  XSPerfAccumulate(s"IntRFReadBeforeArb", PopCount(intRFReadArbiter.io.in.flatten.flatten.map(_.valid)))
798  XSPerfAccumulate(s"IntRFReadAfterArb", PopCount(intRFReadArbiter.io.out.map(_.valid)))
799  XSPerfAccumulate(s"FpRFReadBeforeArb", PopCount(fpRFReadArbiter.io.in.flatten.flatten.map(_.valid)))
800  XSPerfAccumulate(s"FpRFReadAfterArb", PopCount(fpRFReadArbiter.io.out.map(_.valid)))
801  XSPerfAccumulate(s"VfRFReadBeforeArb", PopCount(vfRFReadArbiter.io.in.flatten.flatten.map(_.valid)))
802  XSPerfAccumulate(s"VfRFReadAfterArb", PopCount(vfRFReadArbiter.io.out.map(_.valid)))
803  XSPerfAccumulate(s"IntUopBeforeArb", PopCount(fromIntIQ.flatten.map(_.valid)))
804  XSPerfAccumulate(s"IntUopAfterArb", PopCount(fromIntIQ.flatten.map(_.fire)))
805  XSPerfAccumulate(s"MemUopBeforeArb", PopCount(fromMemIQ.flatten.map(_.valid)))
806  XSPerfAccumulate(s"MemUopAfterArb", PopCount(fromMemIQ.flatten.map(_.fire)))
807  XSPerfAccumulate(s"VfUopBeforeArb", PopCount(fromVfIQ.flatten.map(_.valid)))
808  XSPerfAccumulate(s"VfUopAfterArb", PopCount(fromVfIQ.flatten.map(_.fire)))
809
810  XSPerfHistogram(s"IntRFReadBeforeArb_hist", PopCount(intRFReadArbiter.io.in.flatten.flatten.map(_.valid)), true.B, 0, 16, 2)
811  XSPerfHistogram(s"IntRFReadAfterArb_hist", PopCount(intRFReadArbiter.io.out.map(_.valid)), true.B, 0, 16, 2)
812  XSPerfHistogram(s"FpRFReadBeforeArb_hist", PopCount(fpRFReadArbiter.io.in.flatten.flatten.map(_.valid)), true.B, 0, 16, 2)
813  XSPerfHistogram(s"FpRFReadAfterArb_hist", PopCount(fpRFReadArbiter.io.out.map(_.valid)), true.B, 0, 16, 2)
814  XSPerfHistogram(s"VfRFReadBeforeArb_hist", PopCount(vfRFReadArbiter.io.in.flatten.flatten.map(_.valid)), true.B, 0, 16, 2)
815  XSPerfHistogram(s"VfRFReadAfterArb_hist", PopCount(vfRFReadArbiter.io.out.map(_.valid)), true.B, 0, 16, 2)
816  XSPerfHistogram(s"IntUopBeforeArb_hist", PopCount(fromIntIQ.flatten.map(_.valid)), true.B, 0, 8, 2)
817  XSPerfHistogram(s"IntUopAfterArb_hist", PopCount(fromIntIQ.flatten.map(_.fire)), true.B, 0, 8, 2)
818  XSPerfHistogram(s"MemUopBeforeArb_hist", PopCount(fromMemIQ.flatten.map(_.valid)), true.B, 0, 8, 2)
819  XSPerfHistogram(s"MemUopAfterArb_hist", PopCount(fromMemIQ.flatten.map(_.fire)), true.B, 0, 8, 2)
820  XSPerfHistogram(s"VfUopBeforeArb_hist", PopCount(fromVfIQ.flatten.map(_.valid)), true.B, 0, 8, 2)
821  XSPerfHistogram(s"VfUopAfterArb_hist", PopCount(fromVfIQ.flatten.map(_.fire)), true.B, 0, 8, 2)
822
823  // datasource perf counter (after arbiter)
824  fromIQ.foreach(iq => iq.foreach{exu =>
825    val exuParams = exu.bits.exuParams
826    if (exuParams.isIntExeUnit) {
827      for (i <- 0 until 2) {
828        XSPerfAccumulate(s"INT_ExuId${exuParams.exuIdx}_src${i}_dataSource_forward",  exu.fire && exu.bits.common.dataSources(i).readForward)
829        XSPerfAccumulate(s"INT_ExuId${exuParams.exuIdx}_src${i}_dataSource_bypass",   exu.fire && exu.bits.common.dataSources(i).readBypass)
830        XSPerfAccumulate(s"INT_ExuId${exuParams.exuIdx}_src${i}_dataSource_regcache", exu.fire && exu.bits.common.dataSources(i).readRegCache)
831        XSPerfAccumulate(s"INT_ExuId${exuParams.exuIdx}_src${i}_dataSource_reg",      exu.fire && exu.bits.common.dataSources(i).readReg)
832        XSPerfAccumulate(s"INT_ExuId${exuParams.exuIdx}_src${i}_dataSource_zero",     exu.fire && exu.bits.common.dataSources(i).readZero)
833      }
834    }
835    if (exuParams.isMemExeUnit && exuParams.readIntRf) {
836      XSPerfAccumulate(s"MEM_ExuId${exuParams.exuIdx}_src0_dataSource_forward",  exu.fire && exu.bits.common.dataSources(0).readForward)
837      XSPerfAccumulate(s"MEM_ExuId${exuParams.exuIdx}_src0_dataSource_bypass",   exu.fire && exu.bits.common.dataSources(0).readBypass)
838      XSPerfAccumulate(s"MEM_ExuId${exuParams.exuIdx}_src0_dataSource_regcache", exu.fire && exu.bits.common.dataSources(0).readRegCache)
839      XSPerfAccumulate(s"MEM_ExuId${exuParams.exuIdx}_src0_dataSource_reg",      exu.fire && exu.bits.common.dataSources(0).readReg)
840      XSPerfAccumulate(s"MEM_ExuId${exuParams.exuIdx}_src0_dataSource_zero",     exu.fire && exu.bits.common.dataSources(0).readZero)
841    }
842  })
843
844  // Top-Down
845  def FewUops = 4
846
847  val lqEmpty = io.topDownInfo.lqEmpty
848  val sqEmpty = io.topDownInfo.sqEmpty
849  val l1Miss = io.topDownInfo.l1Miss
850  val l2Miss = io.topDownInfo.l2TopMiss.l2Miss
851  val l3Miss = io.topDownInfo.l2TopMiss.l3Miss
852
853  val uopsIssued = fromIQ.flatten.map(_.fire).reduce(_ || _)
854  val uopsIssuedCnt = PopCount(fromIQ.flatten.map(_.fire))
855  val fewUopsIssued = (0 until FewUops).map(_.U === uopsIssuedCnt).reduce(_ || _)
856
857  val stallLoad = !uopsIssued
858
859  val noStoreIssued = !fromMemIQ.flatten.filter(memIq => memIq.bits.exuParams.fuConfigs.contains(FuConfig.StaCfg) ||
860                                                         memIq.bits.exuParams.fuConfigs.contains(FuConfig.StdCfg)
861  ).map(_.fire).reduce(_ || _)
862  val stallStore = uopsIssued && noStoreIssued
863
864  val stallLoadReg = DelayN(stallLoad, 2)
865  val stallStoreReg = DelayN(stallStore, 2)
866
867  val memStallAnyLoad = stallLoadReg && !lqEmpty
868  val memStallStore = stallStoreReg && !sqEmpty
869  val memStallL1Miss = memStallAnyLoad && l1Miss
870  val memStallL2Miss = memStallL1Miss && l2Miss
871  val memStallL3Miss = memStallL2Miss && l3Miss
872
873  io.topDownInfo.noUopsIssued := stallLoad
874
875  XSPerfAccumulate("exec_stall_cycle",   fewUopsIssued)
876  XSPerfAccumulate("mem_stall_store",    memStallStore)
877  XSPerfAccumulate("mem_stall_l1miss",   memStallL1Miss)
878  XSPerfAccumulate("mem_stall_l2miss",   memStallL2Miss)
879  XSPerfAccumulate("mem_stall_l3miss",   memStallL3Miss)
880
881  val perfEvents = Seq(
882    ("EXEC_STALL_CYCLE",  fewUopsIssued),
883    ("MEMSTALL_STORE",    memStallStore),
884    ("MEMSTALL_L1MISS",   memStallL1Miss),
885    ("MEMSTALL_L2MISS",   memStallL2Miss),
886    ("MEMSTALL_L3MISS",   memStallL3Miss),
887  )
888  generatePerfEvent()
889}
890
891class DataPathIO()(implicit p: Parameters, params: BackendParams) extends XSBundle {
892  // params
893  private val intSchdParams = params.schdParams(IntScheduler())
894  private val fpSchdParams = params.schdParams(FpScheduler())
895  private val vfSchdParams = params.schdParams(VfScheduler())
896  private val memSchdParams = params.schdParams(MemScheduler())
897  // bundles
898  val hartId = Input(UInt(8.W))
899
900  val flush: ValidIO[Redirect] = Flipped(ValidIO(new Redirect))
901
902  val wbConfictRead = Input(MixedVec(params.allSchdParams.map(x => MixedVec(x.issueBlockParams.map(x => x.genWbConflictBundle())))))
903
904  val fromIntIQ: MixedVec[MixedVec[DecoupledIO[IssueQueueIssueBundle]]] =
905    Flipped(MixedVec(intSchdParams.issueBlockParams.map(_.genIssueDecoupledBundle)))
906
907  val fromFpIQ: MixedVec[MixedVec[DecoupledIO[IssueQueueIssueBundle]]] =
908    Flipped(MixedVec(fpSchdParams.issueBlockParams.map(_.genIssueDecoupledBundle)))
909
910  val fromMemIQ: MixedVec[MixedVec[DecoupledIO[IssueQueueIssueBundle]]] =
911    Flipped(MixedVec(memSchdParams.issueBlockParams.map(_.genIssueDecoupledBundle)))
912
913  val fromVfIQ = Flipped(MixedVec(vfSchdParams.issueBlockParams.map(_.genIssueDecoupledBundle)))
914
915  val fromVecExcpMod = Input(new ExcpModToVprf(maxMergeNumPerCycle * 2, maxMergeNumPerCycle))
916
917  val toIntIQ = MixedVec(intSchdParams.issueBlockParams.map(_.genOGRespBundle))
918
919  val toFpIQ = MixedVec(fpSchdParams.issueBlockParams.map(_.genOGRespBundle))
920
921  val toMemIQ = MixedVec(memSchdParams.issueBlockParams.map(_.genOGRespBundle))
922
923  val toVfIQ = MixedVec(vfSchdParams.issueBlockParams.map(_.genOGRespBundle))
924
925  val toVecExcpMod = Output(new VprfToExcpMod(maxMergeNumPerCycle * 2))
926
927  val og0Cancel = Output(ExuVec())
928
929  val og1Cancel = Output(ExuVec())
930
931  val ldCancel = Vec(backendParams.LduCnt + backendParams.HyuCnt, Flipped(new LoadCancelIO))
932
933  val toIntExu: MixedVec[MixedVec[DecoupledIO[ExuInput]]] = intSchdParams.genExuInputBundle
934
935  val toFpExu: MixedVec[MixedVec[DecoupledIO[ExuInput]]] = MixedVec(fpSchdParams.genExuInputBundle)
936
937  val toVecExu: MixedVec[MixedVec[DecoupledIO[ExuInput]]] = MixedVec(vfSchdParams.genExuInputBundle)
938
939  val toMemExu: MixedVec[MixedVec[DecoupledIO[ExuInput]]] = memSchdParams.genExuInputBundle
940
941  val og1ImmInfo: Vec[ImmInfo] = Output(Vec(params.allExuParams.size, new ImmInfo))
942
943  val fromIntWb: MixedVec[RfWritePortWithConfig] = MixedVec(params.genIntWriteBackBundle)
944
945  val fromFpWb: MixedVec[RfWritePortWithConfig] = MixedVec(params.genFpWriteBackBundle)
946
947  val fromVfWb: MixedVec[RfWritePortWithConfig] = MixedVec(params.genVfWriteBackBundle)
948
949  val fromV0Wb: MixedVec[RfWritePortWithConfig] = MixedVec(params.genV0WriteBackBundle)
950
951  val fromVlWb: MixedVec[RfWritePortWithConfig] = MixedVec(params.genVlWriteBackBundle)
952
953  val fromPcTargetMem = Flipped(new PcToDataPathIO(params))
954
955  val fromBypassNetwork: Vec[RCWritePort] = Vec(params.getIntExuRCWriteSize + params.getMemExuRCWriteSize,
956    new RCWritePort(params.intSchdParams.get.rfDataWidth, RegCacheIdxWidth, params.intSchdParams.get.pregIdxWidth, params.debugEn)
957  )
958
959  val toBypassNetworkRCData: MixedVec[MixedVec[Vec[UInt]]] = MixedVec(
960    Seq(intSchdParams, fpSchdParams, vfSchdParams, memSchdParams).map(schd => schd.issueBlockParams.map(iq =>
961      MixedVec(iq.exuBlockParams.map(exu => Output(Vec(exu.numRegSrc, UInt(exu.srcDataBitsMax.W)))))
962    )).flatten
963  )
964
965  val toWakeupQueueRCIdx: Vec[UInt] = Vec(params.getIntExuRCWriteSize + params.getMemExuRCWriteSize,
966    Output(UInt(RegCacheIdxWidth.W))
967  )
968
969  val diffIntRat = if (params.basicDebugEn) Some(Input(Vec(32, UInt(intSchdParams.pregIdxWidth.W)))) else None
970  val diffFpRat  = if (params.basicDebugEn) Some(Input(Vec(32, UInt(fpSchdParams.pregIdxWidth.W)))) else None
971  val diffVecRat = if (params.basicDebugEn) Some(Input(Vec(31, UInt(vfSchdParams.pregIdxWidth.W)))) else None
972  val diffV0Rat  = if (params.basicDebugEn) Some(Input(Vec(1, UInt(log2Up(V0PhyRegs).W)))) else None
973  val diffVlRat  = if (params.basicDebugEn) Some(Input(Vec(1, UInt(log2Up(VlPhyRegs).W)))) else None
974  val diffVl     = if (params.basicDebugEn) Some(Output(UInt(VlData().dataWidth.W))) else None
975
976  val topDownInfo = new TopDownInfo
977}
978