xref: /XiangShan/src/main/scala/xiangshan/backend/datapath/DataPath.scala (revision ff3fcdf11874ffacafd64ec81fd1c4893f58150b)
1package xiangshan.backend.datapath
2
3import org.chipsalliance.cde.config.Parameters
4import chisel3._
5import chisel3.util._
6import difftest.{DiffArchFpRegState, DiffArchIntRegState, DiffArchVecRegState, DifftestModule}
7import freechips.rocketchip.diplomacy.{LazyModule, LazyModuleImp}
8import utility._
9import utils.SeqUtils._
10import utils.{XSPerfAccumulate, XSPerfHistogram}
11import xiangshan._
12import xiangshan.backend.BackendParams
13import xiangshan.backend.Bundles._
14import xiangshan.backend.decode.ImmUnion
15import xiangshan.backend.datapath.DataConfig._
16import xiangshan.backend.datapath.RdConfig._
17import xiangshan.backend.issue.{ImmExtractor, IntScheduler, MemScheduler, VfScheduler}
18import xiangshan.backend.issue.EntryBundles._
19import xiangshan.backend.regfile._
20import xiangshan.backend.PcToDataPathIO
21
22class DataPath(params: BackendParams)(implicit p: Parameters) extends LazyModule {
23  override def shouldBeInlined: Boolean = false
24
25  private implicit val dpParams: BackendParams = params
26  lazy val module = new DataPathImp(this)
27
28  println(s"[DataPath] Preg Params: ")
29  println(s"[DataPath]   Int R(${params.getRfReadSize(IntData())}), W(${params.getRfWriteSize(IntData())}) ")
30  println(s"[DataPath]   Vf R(${params.getRfReadSize(VecData())}), W(${params.getRfWriteSize(VecData())}) ")
31}
32
33class DataPathImp(override val wrapper: DataPath)(implicit p: Parameters, params: BackendParams)
34  extends LazyModuleImp(wrapper) with HasXSParameter {
35
36  private val VCONFIG_PORT = params.vconfigPort
37  private val VLD_PORT = params.vldPort
38
39  val io = IO(new DataPathIO())
40
41  private val (fromIntIQ, toIntIQ, toIntExu) = (io.fromIntIQ, io.toIntIQ, io.toIntExu)
42  private val (fromMemIQ, toMemIQ, toMemExu) = (io.fromMemIQ, io.toMemIQ, io.toMemExu)
43  private val (fromVfIQ , toVfIQ , toVfExu ) = (io.fromVfIQ , io.toVfIQ , io.toFpExu)
44
45  println(s"[DataPath] IntIQ(${fromIntIQ.size}), MemIQ(${fromMemIQ.size})")
46  println(s"[DataPath] IntExu(${fromIntIQ.map(_.size).sum}), MemExu(${fromMemIQ.map(_.size).sum})")
47
48  // just refences for convience
49  private val fromIQ: Seq[MixedVec[DecoupledIO[IssueQueueIssueBundle]]] = (fromIntIQ ++ fromVfIQ ++ fromMemIQ).toSeq
50
51  private val toIQs = toIntIQ ++ toVfIQ ++ toMemIQ
52
53  private val toExu: Seq[MixedVec[DecoupledIO[ExuInput]]] = (toIntExu ++ toVfExu ++ toMemExu).toSeq
54
55  private val fromFlattenIQ: Seq[DecoupledIO[IssueQueueIssueBundle]] = fromIQ.flatten
56
57  private val toFlattenExu: Seq[DecoupledIO[ExuInput]] = toExu.flatten
58
59  private val intWbBusyArbiter = Module(new IntRFWBCollideChecker(backendParams))
60  private val vfWbBusyArbiter = Module(new VfRFWBCollideChecker(backendParams))
61  private val intRFReadArbiter = Module(new IntRFReadArbiter(backendParams))
62  private val vfRFReadArbiter = Module(new VfRFReadArbiter(backendParams))
63
64  private val og0FailedVec2: MixedVec[Vec[Bool]] = Wire(MixedVec(fromIQ.map(x => Vec(x.size, Bool())).toSeq))
65  private val og1FailedVec2: MixedVec[Vec[Bool]] = Wire(MixedVec(fromIQ.map(x => Vec(x.size, Bool())).toSeq))
66
67  // port -> win
68  private val intRdArbWinner: Seq2[MixedVec[Bool]] = intRFReadArbiter.io.in.map(_.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq).toSeq
69  private val vfRdArbWinner: Seq2[MixedVec[Bool]] = vfRFReadArbiter.io.in.map(_.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq).toSeq
70  private val intWbNotBlock: Seq[MixedVec[Bool]] = intWbBusyArbiter.io.in.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq
71  private val vfWbNotBlock: Seq[MixedVec[Bool]] = vfWbBusyArbiter.io.in.map(x => MixedVecInit(x.map(_.ready).toSeq)).toSeq
72
73  private val intRdNotBlock: Seq2[Bool] = intRdArbWinner.map(_.map(_.asUInt.andR))
74  private val vfRdNotBlock: Seq2[Bool] = vfRdArbWinner.map(_.map(_.asUInt.andR))
75
76  private val intRFReadReq: Seq3[ValidIO[RfReadPortWithConfig]] = fromIQ.map(x => x.map(xx => xx.bits.getIntRfReadValidBundle(xx.valid)).toSeq).toSeq
77  private val intDataSources: Seq[Seq[Vec[DataSource]]] = fromIQ.map(x => x.map(xx => xx.bits.common.dataSources).toSeq)
78
79  intRFReadArbiter.io.in.zip(intRFReadReq).zipWithIndex.foreach { case ((arbInSeq2, inRFReadReqSeq2), iqIdx) =>
80    arbInSeq2.zip(inRFReadReqSeq2).zipWithIndex.foreach { case ((arbInSeq, inRFReadReqSeq), exuIdx) =>
81      val srcIndices: Seq[Int] = fromIQ(iqIdx)(exuIdx).bits.exuParams.getRfReadSrcIdx(IntData())
82      for (srcIdx <- 0 until fromIQ(iqIdx)(exuIdx).bits.exuParams.numRegSrc) {
83        if (srcIndices.contains(srcIdx) && inRFReadReqSeq.isDefinedAt(srcIdx)) {
84          arbInSeq(srcIdx).valid := inRFReadReqSeq(srcIdx).valid && intDataSources(iqIdx)(exuIdx)(srcIdx).readReg
85          arbInSeq(srcIdx).bits.addr := inRFReadReqSeq(srcIdx).bits.addr
86        } else {
87          arbInSeq(srcIdx).valid := false.B
88          arbInSeq(srcIdx).bits.addr := 0.U
89        }
90      }
91    }
92  }
93
94  private val vfRFReadReq: Seq3[ValidIO[RfReadPortWithConfig]] = fromIQ.map(x => x.map(xx => xx.bits.getVfRfReadValidBundle(xx.valid)).toSeq).toSeq
95
96  vfRFReadArbiter.io.in.zip(vfRFReadReq).zipWithIndex.foreach { case ((arbInSeq2, inRFReadReqSeq2), iqIdx) =>
97    arbInSeq2.zip(inRFReadReqSeq2).zipWithIndex.foreach { case ((arbInSeq, inRFReadReqSeq), exuIdx) =>
98      val srcIndices: Seq[Int] = VfRegSrcDataSet.flatMap(data => fromIQ(iqIdx)(exuIdx).bits.exuParams.getRfReadSrcIdx(data)).toSeq.sorted
99      for (srcIdx <- 0 until fromIQ(iqIdx)(exuIdx).bits.exuParams.numRegSrc) {
100        if (srcIndices.contains(srcIdx) && inRFReadReqSeq.isDefinedAt(srcIdx)) {
101          arbInSeq(srcIdx).valid := inRFReadReqSeq(srcIdx).valid
102          arbInSeq(srcIdx).bits.addr := inRFReadReqSeq(srcIdx).bits.addr
103        } else {
104          arbInSeq(srcIdx).valid := false.B
105          arbInSeq(srcIdx).bits.addr := 0.U
106        }
107      }
108    }
109  }
110
111  private val intRFWriteReq: Seq2[Bool] = fromIQ.map(x => x.map(xx => xx.valid && xx.bits.common.rfWen.getOrElse(false.B)).toSeq).toSeq
112  private val vfRFWriteReq: Seq2[Bool] = fromIQ.map(x => x.map(xx => xx.valid && xx.bits.common.getVfWen.getOrElse(false.B)).toSeq).toSeq
113
114  intWbBusyArbiter.io.in.zip(intRFWriteReq).foreach { case (arbInSeq, inRFWriteReqSeq) =>
115    arbInSeq.zip(inRFWriteReqSeq).foreach { case (arbIn, inRFWriteReq) =>
116      arbIn.valid := inRFWriteReq
117    }
118  }
119
120  vfWbBusyArbiter.io.in.zip(vfRFWriteReq).foreach { case (arbInSeq, inRFWriteReqSeq) =>
121    arbInSeq.zip(inRFWriteReqSeq).foreach { case (arbIn, inRFWriteReq) =>
122      arbIn.valid := inRFWriteReq
123    }
124  }
125
126  private val intSchdParams = params.schdParams(IntScheduler())
127  private val vfSchdParams = params.schdParams(VfScheduler())
128  private val memSchdParams = params.schdParams(MemScheduler())
129
130  private val numIntRfReadByExu = intSchdParams.numIntRfReadByExu + memSchdParams.numIntRfReadByExu
131  private val numVfRfReadByExu = vfSchdParams.numVfRfReadByExu + memSchdParams.numVfRfReadByExu
132  // Todo: limit read port
133  private val numIntR = numIntRfReadByExu
134  private val numVfR = numVfRfReadByExu
135  println(s"[DataPath] RegFile read req needed by Exu: Int(${numIntRfReadByExu}), Vf(${numVfRfReadByExu})")
136  println(s"[DataPath] RegFile read port: Int(${numIntR}), Vf(${numVfR})")
137
138  private val schdParams = params.allSchdParams
139
140  private val pcReadFtqPtr = Wire(chiselTypeOf(io.pcFromPcTargetMem.fromDataPathFtqPtr))
141  private val pcReadFtqOffset = Wire(chiselTypeOf(io.pcFromPcTargetMem.fromDataPathFtqOffset))
142  private val pcRdata = io.pcFromPcTargetMem.toDataPathPC
143  private val intRfRaddr = Wire(Vec(params.numPregRd(IntData()), UInt(intSchdParams.pregIdxWidth.W)))
144  private val intRfRdata = Wire(Vec(params.numPregRd(IntData()), UInt(intSchdParams.rfDataWidth.W)))
145  private val intRfWen = Wire(Vec(io.fromIntWb.length, Bool()))
146  private val intRfWaddr = Wire(Vec(io.fromIntWb.length, UInt(intSchdParams.pregIdxWidth.W)))
147  private val intRfWdata = Wire(Vec(io.fromIntWb.length, UInt(intSchdParams.rfDataWidth.W)))
148
149  private val vfRfSplitNum = VLEN / XLEN
150  private val vfRfRaddr = Wire(Vec(params.numPregRd(VecData()), UInt(vfSchdParams.pregIdxWidth.W)))
151  private val vfRfRdata = Wire(Vec(params.numPregRd(VecData()), UInt(vfSchdParams.rfDataWidth.W)))
152  private val vfRfWen = Wire(Vec(vfRfSplitNum, Vec(io.fromVfWb.length, Bool())))
153  private val vfRfWaddr = Wire(Vec(io.fromVfWb.length, UInt(vfSchdParams.pregIdxWidth.W)))
154  private val vfRfWdata = Wire(Vec(io.fromVfWb.length, UInt(vfSchdParams.rfDataWidth.W)))
155
156  val pcReadFtqPtrFormIQ = fromIntIQ.flatten.filter(x => x.bits.exuParams.needPc)
157  assert(pcReadFtqPtrFormIQ.size == pcReadFtqPtr.size, s"pcReadFtqPtrFormIQ.size ${pcReadFtqPtrFormIQ.size} not equal pcReadFtqPtr.size ${pcReadFtqPtr.size}")
158  pcReadFtqPtr.zip(pcReadFtqPtrFormIQ.map(_.bits.common.ftqIdx.get)).map(x => x._1 := x._2)
159  pcReadFtqOffset.zip(pcReadFtqPtrFormIQ.map(_.bits.common.ftqOffset.get)).map(x => x._1 := x._2)
160  io.pcFromPcTargetMem.fromDataPathFtqPtr := pcReadFtqPtr
161  io.pcFromPcTargetMem.fromDataPathFtqOffset := pcReadFtqOffset
162  private val intDebugRead: Option[(Vec[UInt], Vec[UInt])] =
163    if (env.AlwaysBasicDiff || env.EnableDifftest) {
164      Some(Wire(Vec(32, UInt(intSchdParams.pregIdxWidth.W))), Wire(Vec(32, UInt(XLEN.W))))
165    } else { None }
166  private val vfDebugRead: Option[(Vec[UInt], Vec[UInt])] =
167    if (env.AlwaysBasicDiff || env.EnableDifftest) {
168      Some(Wire(Vec(32 + 32 + 1, UInt(vfSchdParams.pregIdxWidth.W))), Wire(Vec(32 + 32 + 1, UInt(VLEN.W))))
169    } else { None }
170
171  private val fpDebugReadData: Option[Vec[UInt]] =
172    if (env.AlwaysBasicDiff || env.EnableDifftest) {
173      Some(Wire(Vec(32, UInt(XLEN.W))))
174    } else { None }
175  private val vecDebugReadData: Option[Vec[UInt]] =
176    if (env.AlwaysBasicDiff || env.EnableDifftest) {
177      Some(Wire(Vec(64, UInt(64.W)))) // v0 = Cat(Vec(1), Vec(0))
178    } else { None }
179  private val vconfigDebugReadData: Option[UInt] =
180    if (env.AlwaysBasicDiff || env.EnableDifftest) {
181      Some(Wire(UInt(64.W)))
182    } else { None }
183
184
185  fpDebugReadData.foreach(_ := vfDebugRead
186    .get._2
187    .slice(0, 32)
188    .map(_(63, 0))
189  ) // fp only used [63, 0]
190  vecDebugReadData.foreach(_ := vfDebugRead
191    .get._2
192    .slice(32, 64)
193    .map(x => Seq(x(63, 0), x(127, 64))).flatten
194  )
195  vconfigDebugReadData.foreach(_ := vfDebugRead
196    .get._2(64)(63, 0)
197  )
198
199  io.debugVconfig.foreach(_ := vconfigDebugReadData.get)
200
201  IntRegFile("IntRegFile", intSchdParams.numPregs, intRfRaddr, intRfRdata, intRfWen, intRfWaddr, intRfWdata,
202    debugReadAddr = intDebugRead.map(_._1),
203    debugReadData = intDebugRead.map(_._2))
204  VfRegFile("VfRegFile", vfSchdParams.numPregs, vfRfSplitNum, vfRfRaddr, vfRfRdata, vfRfWen, vfRfWaddr, vfRfWdata,
205    debugReadAddr = vfDebugRead.map(_._1),
206    debugReadData = vfDebugRead.map(_._2))
207
208  intRfWaddr := io.fromIntWb.map(_.addr).toSeq
209  intRfWdata := io.fromIntWb.map(_.data).toSeq
210  intRfWen := io.fromIntWb.map(_.wen).toSeq
211
212  for (portIdx <- intRfRaddr.indices) {
213    if (intRFReadArbiter.io.out.isDefinedAt(portIdx))
214      intRfRaddr(portIdx) := intRFReadArbiter.io.out(portIdx).bits.addr
215    else
216      intRfRaddr(portIdx) := 0.U
217  }
218
219  vfRfWaddr := io.fromVfWb.map(_.addr).toSeq
220  vfRfWdata := io.fromVfWb.map(_.data).toSeq
221  vfRfWen.foreach(_.zip(io.fromVfWb.map(_.wen)).foreach { case (wenSink, wenSource) => wenSink := wenSource } )// Todo: support fp multi-write
222
223  for (portIdx <- vfRfRaddr.indices) {
224    if (vfRFReadArbiter.io.out.isDefinedAt(portIdx))
225      vfRfRaddr(portIdx) := vfRFReadArbiter.io.out(portIdx).bits.addr
226    else
227      vfRfRaddr(portIdx) := 0.U
228  }
229
230  vfRfRaddr(VCONFIG_PORT) := io.vconfigReadPort.addr
231  io.vconfigReadPort.data := vfRfRdata(VCONFIG_PORT)
232  // vfRfRaddr(VLD_PORT) := io.vldReadPort.addr
233  io.vldReadPort.data := DontCare
234
235  intDebugRead.foreach { case (addr, _) =>
236    addr := io.debugIntRat.get
237  }
238
239  vfDebugRead.foreach { case (addr, _) =>
240    addr := io.debugFpRat.get ++ io.debugVecRat.get :+ io.debugVconfigRat.get
241  }
242  println(s"[DataPath] " +
243    s"has intDebugRead: ${intDebugRead.nonEmpty}, " +
244    s"has vfDebugRead: ${vfDebugRead.nonEmpty}")
245
246  val s1_addrOHs = Reg(MixedVec(
247    fromIQ.map(x => MixedVec(x.map(_.bits.addrOH.cloneType).toSeq)).toSeq
248  ))
249  val s1_toExuValid: MixedVec[MixedVec[Bool]] = Reg(MixedVec(
250    toExu.map(x => MixedVec(x.map(_.valid.cloneType).toSeq)).toSeq
251  ))
252  val s1_toExuData: MixedVec[MixedVec[ExuInput]] = Reg(MixedVec(toExu.map(x => MixedVec(x.map(_.bits.cloneType).toSeq)).toSeq))
253  val s1_toExuReady = Wire(MixedVec(toExu.map(x => MixedVec(x.map(_.ready.cloneType).toSeq))))
254  val s1_srcType: MixedVec[MixedVec[Vec[UInt]]] = MixedVecInit(fromIQ.map(x => MixedVecInit(x.map(xx => RegEnable(xx.bits.srcType, xx.fire)).toSeq)))
255
256  val s1_intPregRData: MixedVec[MixedVec[Vec[UInt]]] = Wire(MixedVec(toExu.map(x => MixedVec(x.map(_.bits.src.cloneType).toSeq))))
257  val s1_vfPregRData: MixedVec[MixedVec[Vec[UInt]]] = Wire(MixedVec(toExu.map(x => MixedVec(x.map(_.bits.src.cloneType).toSeq))))
258
259  val rfrPortConfigs = schdParams.map(_.issueBlockParams).flatten.map(_.exuBlockParams.map(_.rfrPortConfigs))
260
261  println(s"[DataPath] s1_intPregRData.flatten.flatten.size: ${s1_intPregRData.flatten.flatten.size}, intRfRdata.size: ${intRfRdata.size}")
262  s1_intPregRData.foreach(_.foreach(_.foreach(_ := 0.U)))
263  s1_intPregRData.zip(rfrPortConfigs).foreach { case (iqRdata, iqCfg) =>
264      iqRdata.zip(iqCfg).foreach { case (iuRdata, iuCfg) =>
265        val realIuCfg = iuCfg.map(x => if(x.size > 1) x.filter(_.isInstanceOf[IntRD]) else x).flatten
266        assert(iuRdata.size == realIuCfg.size, "iuRdata.size != realIuCfg.size")
267        iuRdata.zip(realIuCfg)
268          .filter { case (_, rfrPortConfig) => rfrPortConfig.isInstanceOf[IntRD] }
269          .foreach { case (sink, cfg) => sink := intRfRdata(cfg.port) }
270      }
271  }
272
273  println(s"[DataPath] s1_vfPregRData.flatten.flatten.size: ${s1_vfPregRData.flatten.flatten.size}, vfRfRdata.size: ${vfRfRdata.size}")
274  s1_vfPregRData.foreach(_.foreach(_.foreach(_ := 0.U)))
275  s1_vfPregRData.zip(rfrPortConfigs).foreach{ case(iqRdata, iqCfg) =>
276      iqRdata.zip(iqCfg).foreach{ case(iuRdata, iuCfg) =>
277        val realIuCfg = iuCfg.map(x => if(x.size > 1) x.filter(_.isInstanceOf[VfRD]) else x).flatten
278        assert(iuRdata.size == realIuCfg.size, "iuRdata.size != realIuCfg.size")
279        iuRdata.zip(realIuCfg)
280          .filter { case (_, rfrPortConfig) => rfrPortConfig.isInstanceOf[VfRD] }
281          .foreach { case (sink, cfg) => sink := vfRfRdata(cfg.port) }
282      }
283  }
284
285  for (i <- fromIQ.indices) {
286    for (j <- fromIQ(i).indices) {
287      // IQ(s0) --[Ctrl]--> s1Reg ---------- begin
288      // refs
289      val s1_valid = s1_toExuValid(i)(j)
290      val s1_ready = s1_toExuReady(i)(j)
291      val s1_data = s1_toExuData(i)(j)
292      val s1_addrOH = s1_addrOHs(i)(j)
293      val s0 = fromIQ(i)(j) // s0
294      val srcNotBlock = s0.bits.common.dataSources.zip(intRdArbWinner(i)(j) zip vfRdArbWinner(i)(j)).map { case (source, win) =>
295        !source.readReg || win._1 && win._2
296      }.fold(true.B)(_ && _)
297      val notBlock = srcNotBlock && intWbNotBlock(i)(j) && vfWbNotBlock(i)(j)
298      val s1_flush = s0.bits.common.robIdx.needFlush(Seq(io.flush, RegNextWithEnable(io.flush)))
299      val s1_cancel = og1FailedVec2(i)(j)
300      val s1_ldCancel = LoadShouldCancel(s0.bits.common.loadDependency, io.ldCancel)
301      when (s0.fire && !s1_flush && notBlock && !s1_cancel && !s1_ldCancel) {
302        s1_valid := s0.valid
303        s1_data.fromIssueBundle(s0.bits) // no src data here
304        s1_addrOH := s0.bits.addrOH
305      }.otherwise {
306        s1_valid := false.B
307      }
308      s0.ready := (s1_ready || !s1_valid) && notBlock
309      // IQ(s0) --[Ctrl]--> s1Reg ---------- end
310
311      // IQ(s0) --[Data]--> s1Reg ---------- begin
312      // imm extract
313      when (s0.fire && !s1_flush && notBlock) {
314        if (s1_data.params.immType.nonEmpty && s1_data.src.size > 1) {
315          // rs1 is always int reg, rs2 may be imm
316          when(SrcType.isImm(s0.bits.srcType(1))) {
317            s1_data.src(1) := ImmExtractor(
318              s0.bits.common.imm,
319              s0.bits.immType,
320              s1_data.params.dataBitsMax,
321              s1_data.params.immType.map(_.litValue)
322            )
323          }
324        }
325        if (s1_data.params.hasVecFu) {
326          // Fuck off riscv vector imm!!! Why not src1???
327          when(SrcType.isImm(s0.bits.srcType(0))) {
328            s1_data.src(0) := ImmExtractor(
329              s0.bits.common.imm,
330              s0.bits.immType,
331              s1_data.params.dataBitsMax,
332              s1_data.params.immType.map(_.litValue)
333            )
334          }
335        } else if (s1_data.params.hasLoadFu || s1_data.params.hasHyldaFu) {
336          // dirty code for fused_lui_load
337          when(SrcType.isImm(s0.bits.srcType(0))) {
338            s1_data.src(0) := SignExt(ImmUnion.U.toImm32(s0.bits.common.imm(s0.bits.common.imm.getWidth - 1, ImmUnion.I.len)), XLEN)
339          }
340        }
341      }
342      // IQ(s0) --[Data]--> s1Reg ---------- end
343    }
344  }
345
346  private val fromIQFire = fromIQ.map(_.map(_.fire))
347  private val toExuFire = toExu.map(_.map(_.fire))
348  toIQs.zipWithIndex.foreach {
349    case(toIQ, iqIdx) =>
350      toIQ.zipWithIndex.foreach {
351        case (toIU, iuIdx) =>
352          // IU: issue unit
353          val og0resp = toIU.og0resp
354          og0FailedVec2(iqIdx)(iuIdx) := fromIQ(iqIdx)(iuIdx).valid && (!fromIQFire(iqIdx)(iuIdx))
355          og0resp.valid                 := og0FailedVec2(iqIdx)(iuIdx)
356          og0resp.bits.robIdx           := fromIQ(iqIdx)(iuIdx).bits.common.robIdx
357          og0resp.bits.uopIdx.foreach(_ := fromIQ(iqIdx)(iuIdx).bits.common.vpu.get.vuopIdx)
358          og0resp.bits.resp             := RespType.block
359          og0resp.bits.fuType           := fromIQ(iqIdx)(iuIdx).bits.common.fuType
360
361          val og1resp = toIU.og1resp
362          og1FailedVec2(iqIdx)(iuIdx)   := s1_toExuValid(iqIdx)(iuIdx) && !toExuFire(iqIdx)(iuIdx)
363          og1resp.valid                 := s1_toExuValid(iqIdx)(iuIdx)
364          og1resp.bits.robIdx           := s1_toExuData(iqIdx)(iuIdx).robIdx
365          og1resp.bits.uopIdx.foreach(_ := s1_toExuData(iqIdx)(iuIdx).vpu.get.vuopIdx)
366          // respType:  fuIdle      ->IQ entry clear
367          //            fuUncertain ->IQ entry no action
368          //            fuBusy      ->IQ entry issued set false, then re-issue
369          // Only hyu, lda and sta are fuUncertain at OG1 stage
370          og1resp.bits.resp             := Mux(!og1FailedVec2(iqIdx)(iuIdx),
371            if (toIU.issueQueueParams.isMemAddrIQ) RespType.uncertain else RespType.success,
372            RespType.block
373          )
374          og1resp.bits.fuType           := s1_toExuData(iqIdx)(iuIdx).fuType
375      }
376  }
377
378  io.og0CancelOH := VecInit(fromFlattenIQ.map(x => x.valid && !x.fire)).asUInt
379  io.og1CancelOH := VecInit(toFlattenExu.map(x => x.valid && !x.fire)).asUInt
380
381  io.cancelToBusyTable.zipWithIndex.foreach { case (cancel, i) =>
382    cancel.valid := fromFlattenIQ(i).valid && !fromFlattenIQ(i).fire && {
383      if (fromFlattenIQ(i).bits.common.rfWen.isDefined)
384        fromFlattenIQ(i).bits.common.rfWen.get && fromFlattenIQ(i).bits.common.pdest =/= 0.U
385      else
386        true.B
387    }
388    cancel.bits.rfWen := fromFlattenIQ(i).bits.common.rfWen.getOrElse(false.B)
389    cancel.bits.fpWen := fromFlattenIQ(i).bits.common.fpWen.getOrElse(false.B)
390    cancel.bits.vecWen := fromFlattenIQ(i).bits.common.vecWen.getOrElse(false.B)
391    cancel.bits.pdest := fromFlattenIQ(i).bits.common.pdest
392  }
393
394  for (i <- toExu.indices) {
395    for (j <- toExu(i).indices) {
396      // s1Reg --[Ctrl]--> exu(s1) ---------- begin
397      // refs
398      val sinkData = toExu(i)(j).bits
399      // assign
400      toExu(i)(j).valid := s1_toExuValid(i)(j)
401      s1_toExuReady(i)(j) := toExu(i)(j).ready
402      sinkData := s1_toExuData(i)(j)
403      // s1Reg --[Ctrl]--> exu(s1) ---------- end
404
405      // s1Reg --[Data]--> exu(s1) ---------- begin
406      // data source1: preg read data
407      for (k <- sinkData.src.indices) {
408        val srcDataTypeSet: Set[DataConfig] = sinkData.params.getSrcDataType(k)
409
410        val readRfMap: Seq[(Bool, UInt)] = (Seq(None) :+
411          (if (s1_intPregRData(i)(j).isDefinedAt(k) && srcDataTypeSet.intersect(IntRegSrcDataSet).nonEmpty)
412            Some(SrcType.isXp(s1_srcType(i)(j)(k)) -> s1_intPregRData(i)(j)(k))
413          else None) :+
414          (if (s1_vfPregRData(i)(j).isDefinedAt(k) && srcDataTypeSet.intersect(VfRegSrcDataSet).nonEmpty)
415            Some(SrcType.isVfp(s1_srcType(i)(j)(k))-> s1_vfPregRData(i)(j)(k))
416          else None)
417        ).filter(_.nonEmpty).map(_.get)
418        if (readRfMap.nonEmpty)
419          sinkData.src(k) := Mux1H(readRfMap)
420      }
421
422      // data source2: extracted imm and pc saved in s1Reg
423      if (sinkData.params.immType.nonEmpty && sinkData.src.size > 1) {
424        when(SrcType.isImm(s1_srcType(i)(j)(1))) {
425          sinkData.src(1) := s1_toExuData(i)(j).src(1)
426        }
427      }
428      if (sinkData.params.hasJmpFu) {
429        val index = pcReadFtqPtrFormIQ.map(_.bits.exuParams).indexOf(sinkData.params)
430        sinkData.pc.get := pcRdata(index)
431      } else if (sinkData.params.hasVecFu) {
432        when(SrcType.isImm(s1_srcType(i)(j)(0))) {
433          sinkData.src(0) := s1_toExuData(i)(j).src(0)
434        }
435      } else if (sinkData.params.hasLoadFu || sinkData.params.hasHyldaFu) {
436        when(SrcType.isImm(s1_srcType(i)(j)(0))) {
437          sinkData.src(0) := s1_toExuData(i)(j).src(0)
438        }
439      }
440      // s1Reg --[Data]--> exu(s1) ---------- end
441    }
442  }
443
444  if (env.AlwaysBasicDiff || env.EnableDifftest) {
445    val delayedCnt = 2
446    val difftestArchIntRegState = DifftestModule(new DiffArchIntRegState, delay = delayedCnt)
447    difftestArchIntRegState.coreid := io.hartId
448    difftestArchIntRegState.value := intDebugRead.get._2
449
450    val difftestArchFpRegState = DifftestModule(new DiffArchFpRegState, delay = delayedCnt)
451    difftestArchFpRegState.coreid := io.hartId
452    difftestArchFpRegState.value := fpDebugReadData.get
453
454    val difftestArchVecRegState = DifftestModule(new DiffArchVecRegState, delay = delayedCnt)
455    difftestArchVecRegState.coreid := io.hartId
456    difftestArchVecRegState.value := vecDebugReadData.get
457  }
458
459  val int_regcache_size = 48
460  val int_regcache_tag = RegInit(VecInit(Seq.fill(int_regcache_size)(0.U(intSchdParams.pregIdxWidth.W))))
461  val int_regcache_enqPtr = RegInit(0.U(log2Up(int_regcache_size).W))
462  int_regcache_enqPtr := int_regcache_enqPtr + PopCount(intRfWen)
463  for (i <- intRfWen.indices) {
464    when (intRfWen(i)) {
465      int_regcache_tag(int_regcache_enqPtr + PopCount(intRfWen.take(i))) := intRfWaddr(i)
466    }
467  }
468
469  val vf_regcache_size = 48
470  val vf_regcache_tag = RegInit(VecInit(Seq.fill(vf_regcache_size)(0.U(vfSchdParams.pregIdxWidth.W))))
471  val vf_regcache_enqPtr = RegInit(0.U(log2Up(vf_regcache_size).W))
472  vf_regcache_enqPtr := vf_regcache_enqPtr + PopCount(vfRfWen.head)
473  for (i <- vfRfWen.indices) {
474    when (vfRfWen.head(i)) {
475      vf_regcache_tag(vf_regcache_enqPtr + PopCount(vfRfWen.head.take(i))) := vfRfWaddr(i)
476    }
477  }
478
479  XSPerfHistogram(s"IntRegFileRead_hist", PopCount(intRFReadArbiter.io.in.flatten.flatten.map(_.valid)), true.B, 0, 20, 1)
480  XSPerfHistogram(s"VfRegFileRead_hist", PopCount(vfRFReadArbiter.io.in.flatten.flatten.map(_.valid)), true.B, 0, 20, 1)
481  XSPerfHistogram(s"IntRegFileWrite_hist", PopCount(intRFWriteReq.flatten), true.B, 0, 20, 1)
482  XSPerfHistogram(s"VfRegFileWrite_hist", PopCount(vfRFWriteReq.flatten), true.B, 0, 20, 1)
483
484  val int_regcache_part32 = (1 until 33).map(i => int_regcache_tag(int_regcache_enqPtr - i.U))
485  val int_regcache_part24 = (1 until 24).map(i => int_regcache_tag(int_regcache_enqPtr - i.U))
486  val int_regcache_part16 = (1 until 17).map(i => int_regcache_tag(int_regcache_enqPtr - i.U))
487  val int_regcache_part8 = (1 until 9).map(i => int_regcache_tag(int_regcache_enqPtr - i.U))
488
489  val int_regcache_48_hit_vec = intRFReadArbiter.io.in.flatten.flatten.map(x => x.valid && int_regcache_tag.map(_ === x.bits.addr).reduce(_ || _))
490  val int_regcache_8_hit_vec = intRFReadArbiter.io.in.flatten.flatten.map(x => x.valid && int_regcache_part8.map(_ === x.bits.addr).reduce(_ || _))
491  val int_regcache_16_hit_vec = intRFReadArbiter.io.in.flatten.flatten.map(x => x.valid && int_regcache_part16.map(_ === x.bits.addr).reduce(_ || _))
492  val int_regcache_24_hit_vec = intRFReadArbiter.io.in.flatten.flatten.map(x => x.valid && int_regcache_part24.map(_ === x.bits.addr).reduce(_ || _))
493  val int_regcache_32_hit_vec = intRFReadArbiter.io.in.flatten.flatten.map(x => x.valid && int_regcache_part32.map(_ === x.bits.addr).reduce(_ || _))
494  XSPerfAccumulate("IntRegCache48Hit", PopCount(int_regcache_48_hit_vec))
495  XSPerfAccumulate("IntRegCache8Hit", PopCount(int_regcache_8_hit_vec))
496  XSPerfAccumulate("IntRegCache16Hit", PopCount(int_regcache_16_hit_vec))
497  XSPerfAccumulate("IntRegCache24Hit", PopCount(int_regcache_24_hit_vec))
498  XSPerfAccumulate("IntRegCache32Hit", PopCount(int_regcache_32_hit_vec))
499  XSPerfHistogram("IntRegCache48Hit_hist", PopCount(int_regcache_48_hit_vec), true.B, 0, 16, 2)
500
501  XSPerfAccumulate(s"IntRFReadBeforeArb", PopCount(intRFReadArbiter.io.in.flatten.flatten.map(_.valid)))
502  XSPerfAccumulate(s"IntRFReadAfterArb", PopCount(intRFReadArbiter.io.out.map(_.valid)))
503  XSPerfAccumulate(s"VfRFReadBeforeArb", PopCount(vfRFReadArbiter.io.in.flatten.flatten.map(_.valid)))
504  XSPerfAccumulate(s"VfRFReadAfterArb", PopCount(vfRFReadArbiter.io.out.map(_.valid)))
505  XSPerfAccumulate(s"IntUopBeforeArb", PopCount(fromIntIQ.flatten.map(_.valid)))
506  XSPerfAccumulate(s"IntUopAfterArb", PopCount(fromIntIQ.flatten.map(_.fire)))
507  XSPerfAccumulate(s"MemUopBeforeArb", PopCount(fromMemIQ.flatten.map(_.valid)))
508  XSPerfAccumulate(s"MemUopAfterArb", PopCount(fromMemIQ.flatten.map(_.fire)))
509  XSPerfAccumulate(s"VfUopBeforeArb", PopCount(fromVfIQ.flatten.map(_.valid)))
510  XSPerfAccumulate(s"VfUopAfterArb", PopCount(fromVfIQ.flatten.map(_.fire)))
511
512  XSPerfHistogram(s"IntRFReadBeforeArb_hist", PopCount(intRFReadArbiter.io.in.flatten.flatten.map(_.valid)), true.B, 0, 16, 2)
513  XSPerfHistogram(s"IntRFReadAfterArb_hist", PopCount(intRFReadArbiter.io.out.map(_.valid)), true.B, 0, 16, 2)
514  XSPerfHistogram(s"VfRFReadBeforeArb_hist", PopCount(vfRFReadArbiter.io.in.flatten.flatten.map(_.valid)), true.B, 0, 16, 2)
515  XSPerfHistogram(s"VfRFReadAfterArb_hist", PopCount(vfRFReadArbiter.io.out.map(_.valid)), true.B, 0, 16, 2)
516  XSPerfHistogram(s"IntUopBeforeArb_hist", PopCount(fromIntIQ.flatten.map(_.valid)), true.B, 0, 8, 2)
517  XSPerfHistogram(s"IntUopAfterArb_hist", PopCount(fromIntIQ.flatten.map(_.fire)), true.B, 0, 8, 2)
518  XSPerfHistogram(s"MemUopBeforeArb_hist", PopCount(fromMemIQ.flatten.map(_.valid)), true.B, 0, 8, 2)
519  XSPerfHistogram(s"MemUopAfterArb_hist", PopCount(fromMemIQ.flatten.map(_.fire)), true.B, 0, 8, 2)
520  XSPerfHistogram(s"VfUopBeforeArb_hist", PopCount(fromVfIQ.flatten.map(_.valid)), true.B, 0, 8, 2)
521  XSPerfHistogram(s"VfUopAfterArb_hist", PopCount(fromVfIQ.flatten.map(_.fire)), true.B, 0, 8, 2)
522}
523
524class DataPathIO()(implicit p: Parameters, params: BackendParams) extends XSBundle {
525  // params
526  private val intSchdParams = params.schdParams(IntScheduler())
527  private val vfSchdParams = params.schdParams(VfScheduler())
528  private val memSchdParams = params.schdParams(MemScheduler())
529  // bundles
530  val hartId = Input(UInt(8.W))
531
532  val flush: ValidIO[Redirect] = Flipped(ValidIO(new Redirect))
533
534  // Todo: check if this can be removed
535  val vconfigReadPort = new RfReadPort(XLEN, PhyRegIdxWidth)
536
537  val vldReadPort = new RfReadPort(VLEN, PhyRegIdxWidth)
538
539  val wbConfictRead = Input(MixedVec(params.allSchdParams.map(x => MixedVec(x.issueBlockParams.map(x => x.genWbConflictBundle())))))
540
541  val fromIntIQ: MixedVec[MixedVec[DecoupledIO[IssueQueueIssueBundle]]] =
542    Flipped(MixedVec(intSchdParams.issueBlockParams.map(_.genIssueDecoupledBundle)))
543
544  val fromMemIQ: MixedVec[MixedVec[DecoupledIO[IssueQueueIssueBundle]]] =
545    Flipped(MixedVec(memSchdParams.issueBlockParams.map(_.genIssueDecoupledBundle)))
546
547  val fromVfIQ = Flipped(MixedVec(vfSchdParams.issueBlockParams.map(_.genIssueDecoupledBundle)))
548
549  val toIntIQ = MixedVec(intSchdParams.issueBlockParams.map(_.genOGRespBundle))
550
551  val toMemIQ = MixedVec(memSchdParams.issueBlockParams.map(_.genOGRespBundle))
552
553  val toVfIQ = MixedVec(vfSchdParams.issueBlockParams.map(_.genOGRespBundle))
554
555  val og0CancelOH = Output(ExuOH(backendParams.numExu))
556
557  val og1CancelOH = Output(ExuOH(backendParams.numExu))
558
559  val ldCancel = Vec(backendParams.LduCnt + backendParams.HyuCnt, Flipped(new LoadCancelIO))
560
561  val cancelToBusyTable = Vec(backendParams.numExu, ValidIO(new CancelSignal))
562
563  val toIntExu: MixedVec[MixedVec[DecoupledIO[ExuInput]]] = intSchdParams.genExuInputBundle
564
565  val toFpExu: MixedVec[MixedVec[DecoupledIO[ExuInput]]] = MixedVec(vfSchdParams.genExuInputBundle)
566
567  val toMemExu: MixedVec[MixedVec[DecoupledIO[ExuInput]]] = memSchdParams.genExuInputBundle
568
569  val fromIntWb: MixedVec[RfWritePortWithConfig] = MixedVec(params.genIntWriteBackBundle)
570
571  val fromVfWb: MixedVec[RfWritePortWithConfig] = MixedVec(params.genVfWriteBackBundle)
572
573  val pcFromPcTargetMem = Flipped(new PcToDataPathIO(params))
574
575  val debugIntRat     = if (params.debugEn) Some(Input(Vec(32, UInt(intSchdParams.pregIdxWidth.W)))) else None
576  val debugFpRat      = if (params.debugEn) Some(Input(Vec(32, UInt(vfSchdParams.pregIdxWidth.W)))) else None
577  val debugVecRat     = if (params.debugEn) Some(Input(Vec(32, UInt(vfSchdParams.pregIdxWidth.W)))) else None
578  val debugVconfigRat = if (params.debugEn) Some(Input(UInt(vfSchdParams.pregIdxWidth.W))) else None
579  val debugVconfig    = if (params.debugEn) Some(Output(UInt(XLEN.W))) else None
580}
581