xref: /XiangShan/src/main/scala/xiangshan/mem/pipeline/LoadUnit.scala (revision ef6723f9795e8222d080df5d74a2a307c1e68a86)
1/***************************************************************************************
2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3* Copyright (c) 2020-2021 Peng Cheng Laboratory
4*
5* XiangShan is licensed under Mulan PSL v2.
6* You can use this software according to the terms and conditions of the Mulan PSL v2.
7* You may obtain a copy of Mulan PSL v2 at:
8*          http://license.coscl.org.cn/MulanPSL2
9*
10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13*
14* See the Mulan PSL v2 for more details.
15***************************************************************************************/
16
17package xiangshan.mem
18
19import org.chipsalliance.cde.config.Parameters
20import chisel3._
21import chisel3.util._
22import utils._
23import utility._
24import xiangshan.ExceptionNO._
25import xiangshan._
26import xiangshan.backend.fu.PMPRespBundle
27import xiangshan.backend.rob.{DebugLsInfoBundle, LsTopdownInfo, RobPtr}
28import xiangshan.cache._
29import xiangshan.cache.wpu.ReplayCarry
30import xiangshan.cache.mmu._
31import xiangshan.mem.mdp._
32
33class LoadToLsqReplayIO(implicit p: Parameters) extends XSBundle
34  with HasDCacheParameters
35  with HasTlbConst
36{
37  // mshr refill index
38  val mshr_id         = UInt(log2Up(cfg.nMissEntries).W)
39  // get full data from store queue and sbuffer
40  val full_fwd        = Bool()
41  // wait for data from store inst's store queue index
42  val data_inv_sq_idx = new SqPtr
43  // wait for address from store queue index
44  val addr_inv_sq_idx = new SqPtr
45  // replay carry
46  val rep_carry       = new ReplayCarry(nWays)
47  // data in last beat
48  val last_beat       = Bool()
49  // replay cause
50  val cause           = Vec(LoadReplayCauses.allCauses, Bool())
51  // performance debug information
52  val debug           = new PerfDebugInfo
53  // tlb hint
54  val tlb_id          = UInt(log2Up(loadfiltersize).W)
55  val tlb_full        = Bool()
56
57  // alias
58  def mem_amb       = cause(LoadReplayCauses.C_MA)
59  def tlb_miss      = cause(LoadReplayCauses.C_TM)
60  def fwd_fail      = cause(LoadReplayCauses.C_FF)
61  def dcache_rep    = cause(LoadReplayCauses.C_DR)
62  def dcache_miss   = cause(LoadReplayCauses.C_DM)
63  def wpu_fail      = cause(LoadReplayCauses.C_WF)
64  def bank_conflict = cause(LoadReplayCauses.C_BC)
65  def rar_nack      = cause(LoadReplayCauses.C_RAR)
66  def raw_nack      = cause(LoadReplayCauses.C_RAW)
67  def nuke          = cause(LoadReplayCauses.C_NK)
68  def need_rep      = cause.asUInt.orR
69}
70
71
72class LoadToLsqIO(implicit p: Parameters) extends XSBundle {
73  val ldin            = DecoupledIO(new LqWriteBundle)
74  val uncache         = Flipped(DecoupledIO(new ExuOutput))
75  val ld_raw_data     = Input(new LoadDataFromLQBundle)
76  val forward         = new PipeLoadForwardQueryIO
77  val stld_nuke_query = new LoadNukeQueryIO
78  val ldld_nuke_query = new LoadNukeQueryIO
79}
80
81class LoadToLoadIO(implicit p: Parameters) extends XSBundle {
82  val valid      = Bool()
83  val data       = UInt(XLEN.W) // load to load fast path is limited to ld (64 bit) used as vaddr src1 only
84  val dly_ld_err = Bool()
85}
86
87class LoadUnitTriggerIO(implicit p: Parameters) extends XSBundle {
88  val tdata2      = Input(UInt(64.W))
89  val matchType   = Input(UInt(2.W))
90  val tEnable     = Input(Bool()) // timing is calculated before this
91  val addrHit     = Output(Bool())
92  val lastDataHit = Output(Bool())
93}
94
95class LoadUnit(implicit p: Parameters) extends XSModule
96  with HasLoadHelper
97  with HasPerfEvents
98  with HasDCacheParameters
99  with HasCircularQueuePtrHelper
100{
101  val io = IO(new Bundle() {
102    // control
103    val redirect      = Flipped(ValidIO(new Redirect))
104    val csrCtrl       = Flipped(new CustomCSRCtrlIO)
105
106    // int issue path
107    val ldin          = Flipped(Decoupled(new ExuInput))
108    val ldout         = Decoupled(new ExuOutput)
109    val rsIdx         = Input(UInt())
110    val isFirstIssue  = Input(Bool())
111
112    // data path
113    val tlb           = new TlbRequestIO(2)
114    val pmp           = Flipped(new PMPRespBundle()) // arrive same to tlb now
115    val dcache        = new DCacheLoadIO
116    val sbuffer       = new LoadForwardQueryIO
117    val lsq           = new LoadToLsqIO
118    val tl_d_channel  = Input(new DcacheToLduForwardIO)
119    val forward_mshr  = Flipped(new LduToMissqueueForwardIO)
120   // val refill        = Flipped(ValidIO(new Refill))
121    val l2_hint       = Input(Valid(new L2ToL1Hint))
122    val tlb_hint      = Flipped(new TlbHintReq)
123    // fast wakeup
124    val fast_uop = ValidIO(new MicroOp) // early wakeup signal generated in load_s1, send to RS in load_s2
125
126    // prefetch
127    val prefetch_train            = ValidIO(new LdPrefetchTrainBundle()) // provide prefetch info to sms
128    val prefetch_train_l1         = ValidIO(new LdPrefetchTrainBundle()) // provide prefetch info to stream & stride
129    val prefetch_req              = Flipped(ValidIO(new L1PrefetchReq)) // hardware prefetch to l1 cache req
130    val canAcceptLowConfPrefetch  = Output(Bool())
131    val canAcceptHighConfPrefetch = Output(Bool())
132
133    // load to load fast path
134    val l2l_fwd_in    = Input(new LoadToLoadIO)
135    val l2l_fwd_out   = Output(new LoadToLoadIO)
136
137    val ld_fast_match    = Input(Bool())
138    val ld_fast_fuOpType = Input(UInt())
139    val ld_fast_imm      = Input(UInt(12.W))
140
141    // rs feedback
142    val feedback_fast = ValidIO(new RSFeedback) // stage 2
143    val feedback_slow = ValidIO(new RSFeedback) // stage 3
144
145    // load ecc error
146    val s3_dly_ld_err = Output(Bool()) // Note that io.s3_dly_ld_err and io.lsq.s3_dly_ld_err is different
147
148    // schedule error query
149    val stld_nuke_query = Flipped(Vec(StorePipelineWidth, Valid(new StoreNukeQueryIO)))
150
151    // queue-based replay
152    val replay       = Flipped(Decoupled(new LsPipelineBundle))
153    val lq_rep_full  = Input(Bool())
154
155    // misc
156    val s2_ptr_chasing = Output(Bool()) // provide right pc for hw prefetch
157
158    // Load fast replay path
159    val fast_rep_in  = Flipped(Decoupled(new LqWriteBundle))
160    val fast_rep_out = Decoupled(new LqWriteBundle)
161
162    // Load RAR rollback
163    val rollback = Valid(new Redirect)
164
165    // perf
166    val debug_ls         = Output(new DebugLsInfoBundle)
167    val lsTopdownInfo    = Output(new LsTopdownInfo)
168    val correctMissTrain = Input(Bool())
169  })
170
171  val s1_ready, s2_ready, s3_ready = WireInit(false.B)
172
173  // Pipeline
174  // --------------------------------------------------------------------------------
175  // stage 0
176  // --------------------------------------------------------------------------------
177  // generate addr, use addr to query DCache and DTLB
178  val s0_valid         = Wire(Bool())
179  val s0_kill          = Wire(Bool())
180  val s0_can_go        = s1_ready
181  val s0_fire          = s0_valid && s0_can_go
182  val s0_out           = Wire(new LqWriteBundle)
183
184  // flow source bundle
185  class FlowSource extends Bundle {
186    val vaddr         = UInt(VAddrBits.W)
187    val mask          = UInt((VLEN/8).W)
188    val uop           = new MicroOp
189    val try_l2l       = Bool()
190    val has_rob_entry = Bool()
191    val rsIdx         = UInt(log2Up(IssQueSize).W)
192    val rep_carry     = new ReplayCarry(nWays)
193    val mshrid        = UInt(log2Up(cfg.nMissEntries).W)
194    val isFirstIssue  = Bool()
195    val fast_rep      = Bool()
196    val ld_rep        = Bool()
197    val l2l_fwd       = Bool()
198    val prf           = Bool()
199    val prf_rd        = Bool()
200    val prf_wr        = Bool()
201    val sched_idx     = UInt(log2Up(LoadQueueReplaySize+1).W)
202    val hlv           = Bool()
203    val hlvx          = Bool()
204  }
205  val s0_sel_src = Wire(new FlowSource)
206
207  // load flow select/gen
208  // src0: super load replayed by LSQ (cache miss replay) (io.replay)
209  // src1: fast load replay (io.fast_rep_in)
210  // src2: load replayed by LSQ (io.replay)
211  // src3: hardware prefetch from prefetchor (high confidence) (io.prefetch)
212  // src4: int read / software prefetch first issue from RS (io.in)
213  // src5: vec read first issue from RS (TODO)
214  // src6: load try pointchaising when no issued or replayed load (io.fastpath)
215  // src7: hardware prefetch from prefetchor (high confidence) (io.prefetch)
216  // priority: high to low
217  val s0_rep_stall           = io.ldin.valid && isAfter(io.replay.bits.uop.robIdx, io.ldin.bits.uop.robIdx)
218  val s0_super_ld_rep_valid  = io.replay.valid && io.replay.bits.forward_tlDchannel
219  val s0_ld_fast_rep_valid   = io.fast_rep_in.valid
220  val s0_ld_rep_valid        = io.replay.valid && !io.replay.bits.forward_tlDchannel && !s0_rep_stall
221  val s0_high_conf_prf_valid = io.prefetch_req.valid && io.prefetch_req.bits.confidence > 0.U
222  val s0_int_iss_valid       = io.ldin.valid // int flow first issue or software prefetch
223  val s0_vec_iss_valid       = WireInit(false.B) // TODO
224  val s0_l2l_fwd_valid       = io.l2l_fwd_in.valid
225  val s0_low_conf_prf_valid  = io.prefetch_req.valid && io.prefetch_req.bits.confidence === 0.U
226  dontTouch(s0_super_ld_rep_valid)
227  dontTouch(s0_ld_fast_rep_valid)
228  dontTouch(s0_ld_rep_valid)
229  dontTouch(s0_high_conf_prf_valid)
230  dontTouch(s0_int_iss_valid)
231  dontTouch(s0_vec_iss_valid)
232  dontTouch(s0_l2l_fwd_valid)
233  dontTouch(s0_low_conf_prf_valid)
234
235  // load flow source ready
236  val s0_super_ld_rep_ready  = WireInit(true.B)
237  val s0_ld_fast_rep_ready   = !s0_super_ld_rep_valid
238  val s0_ld_rep_ready        = !s0_super_ld_rep_valid &&
239                               !s0_ld_fast_rep_valid
240  val s0_high_conf_prf_ready = !s0_super_ld_rep_valid &&
241                               !s0_ld_fast_rep_valid &&
242                               !s0_ld_rep_valid
243
244  val s0_int_iss_ready       = !s0_super_ld_rep_valid &&
245                               !s0_ld_fast_rep_valid &&
246                               !s0_ld_rep_valid &&
247                               !s0_high_conf_prf_valid
248
249  val s0_vec_iss_ready       = !s0_super_ld_rep_valid &&
250                               !s0_ld_fast_rep_valid &&
251                               !s0_ld_rep_valid &&
252                               !s0_high_conf_prf_valid &&
253                               !s0_int_iss_valid
254
255  val s0_l2l_fwd_ready       = !s0_super_ld_rep_valid &&
256                               !s0_ld_fast_rep_valid &&
257                               !s0_ld_rep_valid &&
258                               !s0_high_conf_prf_valid &&
259                               !s0_int_iss_valid &&
260                               !s0_vec_iss_valid
261
262  val s0_low_conf_prf_ready  = !s0_super_ld_rep_valid &&
263                               !s0_ld_fast_rep_valid &&
264                               !s0_ld_rep_valid &&
265                               !s0_high_conf_prf_valid &&
266                               !s0_int_iss_valid &&
267                               !s0_vec_iss_valid &&
268                               !s0_l2l_fwd_valid
269  dontTouch(s0_super_ld_rep_ready)
270  dontTouch(s0_ld_fast_rep_ready)
271  dontTouch(s0_ld_rep_ready)
272  dontTouch(s0_high_conf_prf_ready)
273  dontTouch(s0_int_iss_ready)
274  dontTouch(s0_vec_iss_ready)
275  dontTouch(s0_l2l_fwd_ready)
276  dontTouch(s0_low_conf_prf_ready)
277
278  // load flow source select (OH)
279  val s0_super_ld_rep_select = s0_super_ld_rep_valid && s0_super_ld_rep_ready
280  val s0_ld_fast_rep_select  = s0_ld_fast_rep_valid && s0_ld_fast_rep_ready
281  val s0_ld_rep_select       = s0_ld_rep_valid && s0_ld_rep_ready
282  val s0_hw_prf_select       = s0_high_conf_prf_ready && s0_high_conf_prf_valid ||
283                               s0_low_conf_prf_ready && s0_low_conf_prf_valid
284  val s0_int_iss_select      = s0_int_iss_ready && s0_int_iss_valid
285  val s0_vec_iss_select      = s0_vec_iss_ready && s0_vec_iss_valid
286  val s0_l2l_fwd_select      = s0_l2l_fwd_ready && s0_l2l_fwd_valid
287  assert(!s0_vec_iss_select) // to be added
288  dontTouch(s0_super_ld_rep_select)
289  dontTouch(s0_ld_fast_rep_select)
290  dontTouch(s0_ld_rep_select)
291  dontTouch(s0_hw_prf_select)
292  dontTouch(s0_int_iss_select)
293  dontTouch(s0_vec_iss_select)
294  dontTouch(s0_l2l_fwd_select)
295
296  s0_valid := (s0_super_ld_rep_valid ||
297               s0_ld_fast_rep_valid ||
298               s0_ld_rep_valid ||
299               s0_high_conf_prf_valid ||
300               s0_int_iss_valid ||
301               s0_vec_iss_valid ||
302               s0_l2l_fwd_valid ||
303               s0_low_conf_prf_valid) && io.dcache.req.ready && !s0_kill
304
305  // which is S0's out is ready and dcache is ready
306  val s0_try_ptr_chasing      = s0_l2l_fwd_select
307  val s0_do_try_ptr_chasing   = s0_try_ptr_chasing && s0_can_go && io.dcache.req.ready
308  val s0_ptr_chasing_vaddr    = io.l2l_fwd_in.data(5, 0) +& io.ld_fast_imm(5, 0)
309  val s0_ptr_chasing_canceled = WireInit(false.B)
310  s0_kill := s0_ptr_chasing_canceled
311
312  // prefetch related ctrl signal
313  io.canAcceptLowConfPrefetch  := s0_low_conf_prf_ready
314  io.canAcceptHighConfPrefetch := s0_high_conf_prf_ready
315
316  // query DTLB
317  io.tlb.req.valid                   := s0_valid
318  io.tlb.req.bits.cmd                := Mux(s0_sel_src.prf,
319                                         Mux(s0_sel_src.prf_wr, TlbCmd.write, TlbCmd.read),
320                                         TlbCmd.read
321                                       )
322  io.tlb.req.bits.vaddr              := Mux(s0_hw_prf_select, io.prefetch_req.bits.paddr, s0_sel_src.vaddr)
323  io.tlb.req.bits.hyperinst          := s0_sel_src.hlv
324  io.tlb.req.bits.hlvx               := s0_sel_src.hlvx
325  io.tlb.req.bits.size               := LSUOpType.size(s0_sel_src.uop.ctrl.fuOpType)
326  io.tlb.req.bits.kill               := s0_kill
327  io.tlb.req.bits.memidx.is_ld       := true.B
328  io.tlb.req.bits.memidx.is_st       := false.B
329  io.tlb.req.bits.memidx.idx         := s0_sel_src.uop.lqIdx.value
330  io.tlb.req.bits.debug.robIdx       := s0_sel_src.uop.robIdx
331  io.tlb.req.bits.no_translate       := s0_hw_prf_select  // hw b.reqetch addr does not need to be translated
332  io.tlb.req.bits.debug.pc           := s0_sel_src.uop.cf.pc
333  io.tlb.req.bits.debug.isFirstIssue := s0_sel_src.isFirstIssue
334
335  // query DCache
336  io.dcache.req.valid             := s0_valid
337  io.dcache.req.bits.cmd          := Mux(s0_sel_src.prf_rd,
338                                      MemoryOpConstants.M_PFR,
339                                      Mux(s0_sel_src.prf_wr, MemoryOpConstants.M_PFW, MemoryOpConstants.M_XRD)
340                                    )
341  io.dcache.req.bits.vaddr        := s0_sel_src.vaddr
342  io.dcache.req.bits.mask         := s0_sel_src.mask
343  io.dcache.req.bits.data         := DontCare
344  io.dcache.req.bits.isFirstIssue := s0_sel_src.isFirstIssue
345  io.dcache.req.bits.instrtype    := Mux(s0_sel_src.prf, DCACHE_PREFETCH_SOURCE.U, LOAD_SOURCE.U)
346  io.dcache.req.bits.debug_robIdx := s0_sel_src.uop.robIdx.value
347  io.dcache.req.bits.replayCarry  := s0_sel_src.rep_carry
348  io.dcache.req.bits.id           := DontCare // TODO: update cache meta
349  io.dcache.pf_source             := Mux(s0_hw_prf_select, io.prefetch_req.bits.pf_source.value, L1_HW_PREFETCH_NULL)
350  io.dcache.req.bits.lqIdx        := s0_sel_src.uop.lqIdx
351  // load flow priority mux
352  def fromNullSource(): FlowSource = {
353    val out = WireInit(0.U.asTypeOf(new FlowSource))
354    out
355  }
356
357  def fromFastReplaySource(src: LqWriteBundle): FlowSource = {
358    val out = WireInit(0.U.asTypeOf(new FlowSource))
359    out.vaddr         := src.vaddr
360    out.mask          := src.mask
361    out.uop           := src.uop
362    out.try_l2l       := false.B
363    out.has_rob_entry := src.hasROBEntry
364    out.rep_carry     := src.rep_info.rep_carry
365    out.mshrid        := src.rep_info.mshr_id
366    out.rsIdx         := src.rsIdx
367    out.isFirstIssue  := false.B
368    out.fast_rep      := true.B
369    out.ld_rep        := src.isLoadReplay
370    out.l2l_fwd       := false.B
371    out.prf           := LSUOpType.isPrefetch(src.uop.ctrl.fuOpType)
372    out.prf_rd        := src.uop.ctrl.fuOpType === LSUOpType.prefetch_r
373    out.prf_wr        := src.uop.ctrl.fuOpType === LSUOpType.prefetch_w
374    out.sched_idx     := src.schedIndex
375    out.hlv           := LSUOpType.isHlv(src.uop.ctrl.fuOpType)
376    out.hlvx          := LSUOpType.isHlvx(src.uop.ctrl.fuOpType)
377    out
378  }
379
380  def fromNormalReplaySource(src: LsPipelineBundle): FlowSource = {
381    val out = WireInit(0.U.asTypeOf(new FlowSource))
382    out.vaddr         := src.vaddr
383    out.mask          := genVWmask(src.vaddr, src.uop.ctrl.fuOpType(1, 0))
384    out.uop           := src.uop
385    out.try_l2l       := false.B
386    out.has_rob_entry := true.B
387    out.rsIdx         := src.rsIdx
388    out.rep_carry     := src.replayCarry
389    out.mshrid        := src.mshrid
390    out.isFirstIssue  := false.B
391    out.fast_rep      := false.B
392    out.ld_rep        := true.B
393    out.l2l_fwd       := false.B
394    out.prf           := LSUOpType.isPrefetch(src.uop.ctrl.fuOpType)
395    out.prf_rd        := src.uop.ctrl.fuOpType === LSUOpType.prefetch_r
396    out.prf_wr        := src.uop.ctrl.fuOpType === LSUOpType.prefetch_w
397    out.sched_idx     := src.schedIndex
398    out.hlv           := LSUOpType.isHlv(src.uop.ctrl.fuOpType)
399    out.hlvx          := LSUOpType.isHlvx(src.uop.ctrl.fuOpType)
400    out
401  }
402
403  def fromPrefetchSource(src: L1PrefetchReq): FlowSource = {
404    val out = WireInit(0.U.asTypeOf(new FlowSource))
405    out.vaddr         := src.getVaddr()
406    out.mask          := 0.U
407    out.uop           := DontCare
408    out.try_l2l       := false.B
409    out.has_rob_entry := false.B
410    out.rsIdx         := 0.U
411    out.rep_carry     := 0.U.asTypeOf(out.rep_carry.cloneType)
412    out.mshrid        := 0.U
413    out.isFirstIssue  := false.B
414    out.fast_rep      := false.B
415    out.ld_rep        := false.B
416    out.l2l_fwd       := false.B
417    out.prf           := true.B
418    out.prf_rd        := !src.is_store
419    out.prf_wr        := src.is_store
420    out.sched_idx     := 0.U
421    out.hlv           := false.B
422    out.hlvx          := false.B
423    out
424  }
425
426  def fromIntIssueSource(src: ExuInput): FlowSource = {
427    val out = WireInit(0.U.asTypeOf(new FlowSource))
428    out.vaddr         := src.src(0) + SignExt(src.uop.ctrl.imm(11, 0), VAddrBits)
429    out.mask          := genVWmask(out.vaddr, src.uop.ctrl.fuOpType(1,0))
430    out.uop           := src.uop
431    out.try_l2l       := false.B
432    out.has_rob_entry := true.B
433    out.rsIdx         := io.rsIdx
434    out.rep_carry     := 0.U.asTypeOf(out.rep_carry.cloneType)
435    out.mshrid        := 0.U
436    out.isFirstIssue  := true.B
437    out.fast_rep      := false.B
438    out.ld_rep        := false.B
439    out.l2l_fwd       := false.B
440    out.prf           := LSUOpType.isPrefetch(src.uop.ctrl.fuOpType)
441    out.prf_rd        := src.uop.ctrl.fuOpType === LSUOpType.prefetch_r
442    out.prf_wr        := src.uop.ctrl.fuOpType === LSUOpType.prefetch_w
443    out.sched_idx     := 0.U
444    out.hlv           := LSUOpType.isHlv(src.uop.ctrl.fuOpType)
445    out.hlvx          := LSUOpType.isHlvx(src.uop.ctrl.fuOpType)
446    out
447  }
448
449  def fromVecIssueSource(): FlowSource = {
450    val out = WireInit(0.U.asTypeOf(new FlowSource))
451    out.vaddr         := 0.U
452    out.mask          := 0.U
453    out.uop           := 0.U.asTypeOf(new MicroOp)
454    out.try_l2l       := false.B
455    out.has_rob_entry := false.B
456    out.rsIdx         := 0.U
457    out.rep_carry     := 0.U.asTypeOf(out.rep_carry.cloneType)
458    out.mshrid        := 0.U
459    out.isFirstIssue  := false.B
460    out.fast_rep      := false.B
461    out.ld_rep        := false.B
462    out.l2l_fwd       := false.B
463    out.prf           := false.B
464    out.prf_rd        := false.B
465    out.prf_wr        := false.B
466    out.sched_idx     := 0.U
467    out.hlv           := false.B
468    out.hlvx          := false.B
469    out
470  }
471
472  def fromLoadToLoadSource(src: LoadToLoadIO): FlowSource = {
473    val out = WireInit(0.U.asTypeOf(new FlowSource))
474    out.vaddr              := Cat(src.data(XLEN-1, 6), s0_ptr_chasing_vaddr(5,0))
475    out.mask               := genVWmask(0.U, LSUOpType.ld)
476    // When there's no valid instruction from RS and LSQ, we try the load-to-load forwarding.
477    // Assume the pointer chasing is always ld.
478    out.uop.ctrl.fuOpType  := LSUOpType.ld
479    out.try_l2l            := true.B
480    // we dont care s0_isFirstIssue and s0_rsIdx and s0_sqIdx in S0 when trying pointchasing
481    // because these signals will be updated in S1
482    out.has_rob_entry      := false.B
483    out.rsIdx              := 0.U
484    out.mshrid             := 0.U
485    out.rep_carry          := 0.U.asTypeOf(out.rep_carry.cloneType)
486    out.isFirstIssue       := true.B
487    out.fast_rep           := false.B
488    out.ld_rep             := false.B
489    out.l2l_fwd            := true.B
490    out.prf                := false.B
491    out.prf_rd             := false.B
492    out.prf_wr             := false.B
493    out.sched_idx          := 0.U
494    out.hlv                := LSUOpType.isHlv(out.uop.ctrl.fuOpType)
495    out.hlvx               := LSUOpType.isHlvx(out.uop.ctrl.fuOpType)
496    out
497  }
498
499  // set default
500  val s0_src_selector = Seq(
501    s0_super_ld_rep_select,
502    s0_ld_fast_rep_select,
503    s0_ld_rep_select,
504    s0_hw_prf_select,
505    s0_int_iss_select,
506    s0_vec_iss_select,
507    (if (EnableLoadToLoadForward) s0_l2l_fwd_select else true.B)
508  )
509  val s0_src_format = Seq(
510    fromNormalReplaySource(io.replay.bits),
511    fromFastReplaySource(io.fast_rep_in.bits),
512    fromNormalReplaySource(io.replay.bits),
513    fromPrefetchSource(io.prefetch_req.bits),
514    fromIntIssueSource(io.ldin.bits),
515    fromVecIssueSource(),
516    (if (EnableLoadToLoadForward) fromLoadToLoadSource(io.l2l_fwd_in) else fromNullSource())
517  )
518  s0_sel_src := ParallelPriorityMux(s0_src_selector, s0_src_format)
519
520  // address align check
521  val s0_addr_aligned = LookupTree(s0_sel_src.uop.ctrl.fuOpType(1, 0), List(
522    "b00".U   -> true.B,                   //b
523    "b01".U   -> (s0_sel_src.vaddr(0)    === 0.U), //h
524    "b10".U   -> (s0_sel_src.vaddr(1, 0) === 0.U), //w
525    "b11".U   -> (s0_sel_src.vaddr(2, 0) === 0.U)  //d
526  ))
527
528  // accept load flow if dcache ready (tlb is always ready)
529  // TODO: prefetch need writeback to loadQueueFlag
530  s0_out               := DontCare
531  s0_out.rsIdx         := s0_sel_src.rsIdx
532  s0_out.vaddr         := s0_sel_src.vaddr
533  s0_out.mask          := s0_sel_src.mask
534  s0_out.uop           := s0_sel_src.uop
535  s0_out.isFirstIssue  := s0_sel_src.isFirstIssue
536  s0_out.hasROBEntry   := s0_sel_src.has_rob_entry
537  s0_out.isPrefetch    := s0_sel_src.prf
538  s0_out.isHWPrefetch  := s0_hw_prf_select
539  s0_out.isFastReplay  := s0_sel_src.fast_rep
540  s0_out.isLoadReplay  := s0_sel_src.ld_rep
541  s0_out.isFastPath    := s0_sel_src.l2l_fwd
542  s0_out.mshrid        := s0_sel_src.mshrid
543  s0_out.uop.cf.exceptionVec(loadAddrMisaligned) := !s0_addr_aligned
544  s0_out.forward_tlDchannel := s0_super_ld_rep_select
545  when(io.tlb.req.valid && s0_sel_src.isFirstIssue) {
546    s0_out.uop.debugInfo.tlbFirstReqTime := GTimer()
547  }.otherwise{
548    s0_out.uop.debugInfo.tlbFirstReqTime := s0_sel_src.uop.debugInfo.tlbFirstReqTime
549  }
550  s0_out.schedIndex     := s0_sel_src.sched_idx
551
552  // load fast replay
553  io.fast_rep_in.ready := (s0_can_go && io.dcache.req.ready && s0_ld_fast_rep_ready)
554
555  // load flow source ready
556  // cache missed load has highest priority
557  // always accept cache missed load flow from load replay queue
558  io.replay.ready := (s0_can_go && io.dcache.req.ready && (s0_ld_rep_ready && !s0_rep_stall || s0_super_ld_rep_select))
559
560  // accept load flow from rs when:
561  // 1) there is no lsq-replayed load
562  // 2) there is no fast replayed load
563  // 3) there is no high confidence prefetch request
564  io.ldin.ready := (s0_can_go && io.dcache.req.ready && s0_int_iss_ready)
565
566  // for hw prefetch load flow feedback, to be added later
567  // io.prefetch_in.ready := s0_hw_prf_select
568
569  // dcache replacement extra info
570  // TODO: should prefetch load update replacement?
571  io.dcache.replacementUpdated := Mux(s0_ld_rep_select || s0_super_ld_rep_select, io.replay.bits.replacementUpdated, false.B)
572
573  XSDebug(io.dcache.req.fire,
574    p"[DCACHE LOAD REQ] pc ${Hexadecimal(s0_sel_src.uop.cf.pc)}, vaddr ${Hexadecimal(s0_sel_src.vaddr)}\n"
575  )
576  XSDebug(s0_valid,
577    p"S0: pc ${Hexadecimal(s0_out.uop.cf.pc)}, lId ${Hexadecimal(s0_out.uop.lqIdx.asUInt)}, " +
578    p"vaddr ${Hexadecimal(s0_out.vaddr)}, mask ${Hexadecimal(s0_out.mask)}\n")
579
580  // Pipeline
581  // --------------------------------------------------------------------------------
582  // stage 1
583  // --------------------------------------------------------------------------------
584  // TLB resp (send paddr to dcache)
585  val s1_valid      = RegInit(false.B)
586  val s1_in         = Wire(new LqWriteBundle)
587  val s1_out        = Wire(new LqWriteBundle)
588  val s1_kill       = Wire(Bool())
589  val s1_can_go     = s2_ready
590  val s1_fire       = s1_valid && !s1_kill && s1_can_go
591
592  s1_ready := !s1_valid || s1_kill || s2_ready
593  when (s0_fire) { s1_valid := true.B }
594  .elsewhen (s1_fire) { s1_valid := false.B }
595  .elsewhen (s1_kill) { s1_valid := false.B }
596  s1_in   := RegEnable(s0_out, s0_fire)
597
598  val s1_fast_rep_dly_kill = RegNext(io.fast_rep_in.bits.lateKill) && s1_in.isFastReplay
599  val s1_fast_rep_dly_err =  RegNext(io.fast_rep_in.bits.delayedLoadError) && s1_in.isFastReplay
600  val s1_l2l_fwd_dly_err  = RegNext(io.l2l_fwd_in.dly_ld_err) && s1_in.isFastPath
601  val s1_dly_err          = s1_fast_rep_dly_err || s1_l2l_fwd_dly_err
602  val s1_vaddr_hi         = Wire(UInt())
603  val s1_vaddr_lo         = Wire(UInt())
604  val s1_vaddr            = Wire(UInt())
605  val s1_paddr_dup_lsu    = Wire(UInt())
606  val s1_gpaddr_dup_lsu   = Wire(UInt())
607  val s1_paddr_dup_dcache = Wire(UInt())
608  val s1_exception        = ExceptionNO.selectByFu(s1_out.uop.cf.exceptionVec, lduCfg).asUInt.orR   // af & pf exception were modified below.
609  val s1_tlb_miss         = io.tlb.resp.bits.miss
610  val s1_prf              = s1_in.isPrefetch
611  val s1_hw_prf           = s1_in.isHWPrefetch
612  val s1_sw_prf           = s1_prf && !s1_hw_prf
613  val s1_tlb_memidx       = io.tlb.resp.bits.memidx
614
615  s1_vaddr_hi         := s1_in.vaddr(VAddrBits - 1, 6)
616  s1_vaddr_lo         := s1_in.vaddr(5, 0)
617  s1_vaddr            := Cat(s1_vaddr_hi, s1_vaddr_lo)
618  s1_paddr_dup_lsu    := io.tlb.resp.bits.paddr(0)
619  s1_paddr_dup_dcache := io.tlb.resp.bits.paddr(1)
620  s1_gpaddr_dup_lsu   := io.tlb.resp.bits.gpaddr(0)
621
622  when (s1_tlb_memidx.is_ld && io.tlb.resp.valid && !s1_tlb_miss && s1_tlb_memidx.idx === s1_in.uop.lqIdx.value) {
623    // printf("load idx = %d\n", s1_tlb_memidx.idx)
624    s1_out.uop.debugInfo.tlbRespTime := GTimer()
625  }
626
627  io.tlb.req_kill   := s1_kill || s1_dly_err
628  io.tlb.resp.ready := true.B
629
630  io.dcache.s1_paddr_dup_lsu    <> s1_paddr_dup_lsu
631  io.dcache.s1_paddr_dup_dcache <> s1_paddr_dup_dcache
632  io.dcache.s1_kill             := s1_kill || s1_dly_err || s1_tlb_miss || s1_exception
633
634  // store to load forwarding
635  io.sbuffer.valid := s1_valid && !(s1_exception || s1_tlb_miss || s1_kill || s1_dly_err || s1_prf)
636  io.sbuffer.vaddr := s1_vaddr
637  io.sbuffer.paddr := s1_paddr_dup_lsu
638  io.sbuffer.gpaddr:= s1_gpaddr_dup_lsu
639  io.sbuffer.uop   := s1_in.uop
640  io.sbuffer.sqIdx := s1_in.uop.sqIdx
641  io.sbuffer.mask  := s1_in.mask
642  io.sbuffer.pc    := s1_in.uop.cf.pc // FIXME: remove it
643
644  io.lsq.forward.valid     := s1_valid && !(s1_exception || s1_tlb_miss || s1_kill || s1_dly_err || s1_prf)
645  io.lsq.forward.vaddr     := s1_vaddr
646  io.lsq.forward.paddr     := s1_paddr_dup_lsu
647  io.lsq.forward.gpaddr    := s1_gpaddr_dup_lsu
648  io.lsq.forward.uop       := s1_in.uop
649  io.lsq.forward.sqIdx     := s1_in.uop.sqIdx
650  io.lsq.forward.sqIdxMask := 0.U
651  io.lsq.forward.mask      := s1_in.mask
652  io.lsq.forward.pc        := s1_in.uop.cf.pc // FIXME: remove it
653
654  // st-ld violation query
655  val s1_nuke = VecInit((0 until StorePipelineWidth).map(w => {
656                       io.stld_nuke_query(w).valid && // query valid
657                       isAfter(s1_in.uop.robIdx, io.stld_nuke_query(w).bits.robIdx) && // older store
658                       // TODO: Fix me when vector instruction
659                       (s1_paddr_dup_lsu(PAddrBits-1, 3) === io.stld_nuke_query(w).bits.paddr(PAddrBits-1, 3)) && // paddr match
660                       (s1_in.mask & io.stld_nuke_query(w).bits.mask).orR // data mask contain
661                      })).asUInt.orR && !s1_tlb_miss
662
663  s1_out                   := s1_in
664  s1_out.vaddr             := s1_vaddr
665  s1_out.paddr             := s1_paddr_dup_lsu
666  s1_out.gpaddr            := s1_gpaddr_dup_lsu
667  s1_out.tlbMiss           := s1_tlb_miss
668  s1_out.ptwBack           := io.tlb.resp.bits.ptwBack
669  s1_out.rsIdx             := s1_in.rsIdx
670  s1_out.rep_info.debug    := s1_in.uop.debugInfo
671  s1_out.rep_info.nuke     := s1_nuke && !s1_sw_prf
672  s1_out.delayedLoadError  := s1_dly_err
673
674  when (!s1_dly_err) {
675    // current ori test will cause the case of ldest == 0, below will be modifeid in the future.
676    // af & pf exception were modified
677    s1_out.uop.cf.exceptionVec(loadPageFault)   := io.tlb.resp.bits.excp(0).pf.ld && !s1_tlb_miss
678    s1_out.uop.cf.exceptionVec(loadGuestPageFault)   := io.tlb.resp.bits.excp(0).gpf.ld && !s1_tlb_miss
679    s1_out.uop.cf.exceptionVec(loadAccessFault) := io.tlb.resp.bits.excp(0).af.ld && !s1_tlb_miss
680  } .otherwise {
681    s1_out.uop.cf.exceptionVec(loadPageFault)      := false.B
682    s1_out.uop.cf.exceptionVec(loadGuestPageFault) := false.B
683    s1_out.uop.cf.exceptionVec(loadAddrMisaligned) := false.B
684    s1_out.uop.cf.exceptionVec(loadAccessFault)    := s1_dly_err
685  }
686
687  // pointer chasing
688  val s1_try_ptr_chasing       = RegNext(s0_do_try_ptr_chasing, false.B)
689  val s1_ptr_chasing_vaddr     = RegEnable(s0_ptr_chasing_vaddr, s0_do_try_ptr_chasing)
690  val s1_fu_op_type_not_ld     = WireInit(false.B)
691  val s1_not_fast_match        = WireInit(false.B)
692  val s1_addr_mismatch         = WireInit(false.B)
693  val s1_addr_misaligned       = WireInit(false.B)
694  val s1_fast_mismatch         = WireInit(false.B)
695  val s1_ptr_chasing_canceled  = WireInit(false.B)
696  val s1_cancel_ptr_chasing    = WireInit(false.B)
697
698  s1_kill := s1_fast_rep_dly_kill ||
699             s1_cancel_ptr_chasing ||
700             s1_in.uop.robIdx.needFlush(io.redirect) ||
701            (s1_in.uop.robIdx.needFlush(RegNext(io.redirect)) && !RegNext(s0_try_ptr_chasing)) ||
702             RegEnable(s0_kill, false.B, io.ldin.valid || io.replay.valid || io.l2l_fwd_in.valid || io.fast_rep_in.valid)
703
704  if (EnableLoadToLoadForward) {
705    // Sometimes, we need to cancel the load-load forwarding.
706    // These can be put at S0 if timing is bad at S1.
707    // Case 0: CACHE_SET(base + offset) != CACHE_SET(base) (lowest 6-bit addition has an overflow)
708    s1_addr_mismatch     := s1_ptr_chasing_vaddr(6) ||
709                             RegEnable(io.ld_fast_imm(11, 6).orR, s0_do_try_ptr_chasing)
710    // Case 1: the address is not 64-bit aligned or the fuOpType is not LD
711    s1_addr_misaligned := s1_ptr_chasing_vaddr(2, 0).orR
712    s1_fu_op_type_not_ld := io.ldin.bits.uop.ctrl.fuOpType =/= LSUOpType.ld
713    // Case 2: this load-load uop is cancelled
714    s1_ptr_chasing_canceled := !io.ldin.valid
715    // Case 3: fast mismatch
716    s1_fast_mismatch := RegEnable(!io.ld_fast_match, s0_do_try_ptr_chasing)
717
718    when (s1_try_ptr_chasing) {
719      s1_cancel_ptr_chasing := s1_addr_mismatch ||
720                               s1_addr_misaligned ||
721                               s1_fu_op_type_not_ld ||
722                               s1_ptr_chasing_canceled ||
723                               s1_fast_mismatch
724
725      s1_in.uop           := io.ldin.bits.uop
726      s1_in.rsIdx         := io.rsIdx
727      s1_in.isFirstIssue  := io.isFirstIssue
728      s1_vaddr_lo         := s1_ptr_chasing_vaddr(5, 0)
729      s1_paddr_dup_lsu    := Cat(io.tlb.resp.bits.paddr(0)(PAddrBits - 1, 6), s1_vaddr_lo)
730      s1_paddr_dup_dcache := Cat(io.tlb.resp.bits.paddr(0)(PAddrBits - 1, 6), s1_vaddr_lo)
731
732      // recored tlb time when get the data to ensure the correctness of the latency calculation (although it should not record in here, because it does not use tlb)
733      s1_in.uop.debugInfo.tlbFirstReqTime := GTimer()
734      s1_in.uop.debugInfo.tlbRespTime     := GTimer()
735    }
736    when (!s1_cancel_ptr_chasing) {
737      s0_ptr_chasing_canceled := s1_try_ptr_chasing && !io.replay.fire && !io.fast_rep_in.fire
738      when (s1_try_ptr_chasing) {
739        io.ldin.ready := true.B
740      }
741    }
742  }
743
744  // pre-calcuate sqIdx mask in s0, then send it to lsq in s1 for forwarding
745  val s1_sqIdx_mask = RegNext(UIntToMask(s0_out.uop.sqIdx.value, StoreQueueSize))
746  // to enable load-load, sqIdxMask must be calculated based on ldin.uop
747  // If the timing here is not OK, load-load forwarding has to be disabled.
748  // Or we calculate sqIdxMask at RS??
749  io.lsq.forward.sqIdxMask := s1_sqIdx_mask
750  if (EnableLoadToLoadForward) {
751    when (s1_try_ptr_chasing) {
752      io.lsq.forward.sqIdxMask := UIntToMask(io.ldin.bits.uop.sqIdx.value, StoreQueueSize)
753    }
754  }
755
756  io.forward_mshr.valid  := s1_valid && s1_out.forward_tlDchannel
757  io.forward_mshr.mshrid := s1_out.mshrid
758  io.forward_mshr.paddr  := s1_out.paddr
759
760  XSDebug(s1_valid,
761    p"S1: pc ${Hexadecimal(s1_out.uop.cf.pc)}, lId ${Hexadecimal(s1_out.uop.lqIdx.asUInt)}, tlb_miss ${io.tlb.resp.bits.miss}, " +
762    p"paddr ${Hexadecimal(s1_out.paddr)}, mmio ${s1_out.mmio}\n")
763
764  // Pipeline
765  // --------------------------------------------------------------------------------
766  // stage 2
767  // --------------------------------------------------------------------------------
768  // s2: DCache resp
769  val s2_valid  = RegInit(false.B)
770  val s2_in     = Wire(new LqWriteBundle)
771  val s2_out    = Wire(new LqWriteBundle)
772  val s2_kill   = Wire(Bool())
773  val s2_can_go = s3_ready
774  val s2_fire   = s2_valid && !s2_kill && s2_can_go
775
776  s2_kill := s2_in.uop.robIdx.needFlush(io.redirect)
777  s2_ready := !s2_valid || s2_kill || s3_ready
778  when (s1_fire) { s2_valid := true.B }
779  .elsewhen (s2_fire) { s2_valid := false.B }
780  .elsewhen (s2_kill) { s2_valid := false.B }
781  s2_in := RegEnable(s1_out, s1_fire)
782
783  val s2_pmp = WireInit(io.pmp)
784
785  val s2_prf    = s2_in.isPrefetch
786  val s2_hw_prf = s2_in.isHWPrefetch
787
788  // exception that may cause load addr to be invalid / illegal
789  // if such exception happen, that inst and its exception info
790  // will be force writebacked to rob
791  val s2_exception_vec = WireInit(s2_in.uop.cf.exceptionVec)
792  when (!s2_in.delayedLoadError) {
793    s2_exception_vec(loadAccessFault) := s2_in.uop.cf.exceptionVec(loadAccessFault) || s2_pmp.ld ||
794                                       (io.dcache.resp.bits.tag_error && RegNext(io.csrCtrl.cache_error_enable))
795  }
796
797  // soft prefetch will not trigger any exception (but ecc error interrupt may
798  // be triggered)
799  when (!s2_in.delayedLoadError && (s2_prf || s2_in.tlbMiss)) {
800    s2_exception_vec := 0.U.asTypeOf(s2_exception_vec.cloneType)
801  }
802  val s2_exception = ExceptionNO.selectByFu(s2_exception_vec, lduCfg).asUInt.orR
803
804  val (s2_fwd_frm_d_chan, s2_fwd_data_frm_d_chan) = io.tl_d_channel.forward(s1_valid && s1_out.forward_tlDchannel, s1_out.mshrid, s1_out.paddr)
805  val (s2_fwd_data_valid, s2_fwd_frm_mshr, s2_fwd_data_frm_mshr) = io.forward_mshr.forward()
806  val s2_fwd_frm_d_chan_or_mshr = s2_fwd_data_valid && (s2_fwd_frm_d_chan || s2_fwd_frm_mshr)
807
808  // writeback access fault caused by ecc error / bus error
809  // * ecc data error is slow to generate, so we will not use it until load stage 3
810  // * in load stage 3, an extra signal io.load_error will be used to
811  val s2_actually_mmio = s2_pmp.mmio
812  val s2_mmio          = !s2_prf &&
813                          s2_actually_mmio &&
814                         !s2_exception &&
815                         !s2_in.tlbMiss
816
817  val s2_full_fwd      = Wire(Bool())
818  val s2_mem_amb       = s2_in.uop.cf.storeSetHit &&
819                         io.lsq.forward.addrInvalid
820
821  val s2_tlb_miss      = s2_in.tlbMiss
822  val s2_fwd_fail      = io.lsq.forward.dataInvalid
823  val s2_dcache_miss   = io.dcache.resp.bits.miss &&
824                         !s2_fwd_frm_d_chan_or_mshr &&
825                         !s2_full_fwd
826
827  val s2_mq_nack       = io.dcache.s2_mq_nack &&
828                         !s2_fwd_frm_d_chan_or_mshr &&
829                         !s2_full_fwd
830
831  val s2_bank_conflict = io.dcache.s2_bank_conflict &&
832                         !s2_fwd_frm_d_chan_or_mshr &&
833                         !s2_full_fwd
834
835  val s2_wpu_pred_fail = io.dcache.s2_wpu_pred_fail &&
836                        !s2_fwd_frm_d_chan_or_mshr &&
837                        !s2_full_fwd
838
839  val s2_rar_nack      = io.lsq.ldld_nuke_query.req.valid &&
840                         !io.lsq.ldld_nuke_query.req.ready
841
842  val s2_raw_nack      = io.lsq.stld_nuke_query.req.valid &&
843                         !io.lsq.stld_nuke_query.req.ready
844  // st-ld violation query
845  //  NeedFastRecovery Valid when
846  //  1. Fast recovery query request Valid.
847  //  2. Load instruction is younger than requestors(store instructions).
848  //  3. Physical address match.
849  //  4. Data contains.
850  val s2_nuke          = VecInit((0 until StorePipelineWidth).map(w => {
851                          io.stld_nuke_query(w).valid && // query valid
852                          isAfter(s2_in.uop.robIdx, io.stld_nuke_query(w).bits.robIdx) && // older store
853                          // TODO: Fix me when vector instruction
854                          (s2_in.paddr(PAddrBits-1, 3) === io.stld_nuke_query(w).bits.paddr(PAddrBits-1, 3)) && // paddr match
855                          (s2_in.mask & io.stld_nuke_query(w).bits.mask).orR // data mask contain
856                        })).asUInt.orR && !s2_tlb_miss || s2_in.rep_info.nuke
857
858  val s2_cache_handled   = io.dcache.resp.bits.handled
859  val s2_cache_tag_error = RegNext(io.csrCtrl.cache_error_enable) &&
860                           io.dcache.resp.bits.tag_error
861
862  val s2_troublem        = !s2_exception &&
863                           !s2_mmio &&
864                           !s2_prf &&
865                           !s2_in.delayedLoadError
866
867  io.dcache.resp.ready  := true.B
868  val s2_dcache_should_resp = !(s2_in.tlbMiss || s2_exception || s2_in.delayedLoadError || s2_mmio || s2_prf)
869  assert(!(s2_valid && (s2_dcache_should_resp && !io.dcache.resp.valid)), "DCache response got lost")
870
871  // fast replay require
872  val s2_dcache_fast_rep = (s2_mq_nack || !s2_dcache_miss && (s2_bank_conflict || s2_wpu_pred_fail))
873  val s2_nuke_fast_rep   = !s2_mq_nack &&
874                           !s2_dcache_miss &&
875                           !s2_bank_conflict &&
876                           !s2_wpu_pred_fail &&
877                           !s2_rar_nack &&
878                           !s2_raw_nack &&
879                           s2_nuke
880
881  val s2_fast_rep = !s2_mem_amb &&
882                    !s2_tlb_miss &&
883                    !s2_fwd_fail &&
884                    (s2_dcache_fast_rep || s2_nuke_fast_rep) &&
885                    s2_troublem
886
887  // need allocate new entry
888  val s2_can_query = !s2_mem_amb &&
889                     !s2_tlb_miss &&
890                     !s2_fwd_fail &&
891                     s2_troublem
892
893  val s2_data_fwded = s2_dcache_miss && (s2_full_fwd || s2_cache_tag_error)
894
895  // ld-ld violation require
896  io.lsq.ldld_nuke_query.req.valid           := s2_valid && s2_can_query
897  io.lsq.ldld_nuke_query.req.bits.uop        := s2_in.uop
898  io.lsq.ldld_nuke_query.req.bits.mask       := s2_in.mask
899  io.lsq.ldld_nuke_query.req.bits.paddr      := s2_in.paddr
900  io.lsq.ldld_nuke_query.req.bits.data_valid := Mux(s2_full_fwd || s2_fwd_data_valid, true.B, !s2_dcache_miss)
901
902  // st-ld violation require
903  io.lsq.stld_nuke_query.req.valid           := s2_valid && s2_can_query
904  io.lsq.stld_nuke_query.req.bits.uop        := s2_in.uop
905  io.lsq.stld_nuke_query.req.bits.mask       := s2_in.mask
906  io.lsq.stld_nuke_query.req.bits.paddr      := s2_in.paddr
907  io.lsq.stld_nuke_query.req.bits.data_valid := Mux(s2_full_fwd || s2_fwd_data_valid, true.B, !s2_dcache_miss)
908
909  // merge forward result
910  // lsq has higher priority than sbuffer
911  val s2_fwd_mask = Wire(Vec((VLEN/8), Bool()))
912  val s2_fwd_data = Wire(Vec((VLEN/8), UInt(8.W)))
913  s2_full_fwd := ((~s2_fwd_mask.asUInt).asUInt & s2_in.mask) === 0.U && !io.lsq.forward.dataInvalid
914  // generate XLEN/8 Muxs
915  for (i <- 0 until VLEN / 8) {
916    s2_fwd_mask(i) := io.lsq.forward.forwardMask(i) || io.sbuffer.forwardMask(i)
917    s2_fwd_data(i) := Mux(io.lsq.forward.forwardMask(i), io.lsq.forward.forwardData(i), io.sbuffer.forwardData(i))
918  }
919
920  XSDebug(s2_fire, "[FWD LOAD RESP] pc %x fwd %x(%b) + %x(%b)\n",
921    s2_in.uop.cf.pc,
922    io.lsq.forward.forwardData.asUInt, io.lsq.forward.forwardMask.asUInt,
923    s2_in.forwardData.asUInt, s2_in.forwardMask.asUInt
924  )
925
926  //
927  s2_out                     := s2_in
928  s2_out.data                := 0.U // data will be generated in load s3
929  s2_out.uop.ctrl.fpWen      := s2_in.uop.ctrl.fpWen && !s2_exception
930  s2_out.mmio                := s2_mmio
931  s2_out.uop.ctrl.flushPipe  := false.B
932  s2_out.uop.cf.exceptionVec := s2_exception_vec
933  s2_out.forwardMask         := s2_fwd_mask
934  s2_out.forwardData         := s2_fwd_data
935  s2_out.handledByMSHR       := s2_cache_handled
936  s2_out.miss                := s2_dcache_miss && s2_troublem
937  s2_out.feedbacked          := io.feedback_fast.valid
938
939  // Generate replay signal caused by:
940  // * st-ld violation check
941  // * tlb miss
942  // * dcache replay
943  // * forward data invalid
944  // * dcache miss
945  s2_out.rep_info.mem_amb         := s2_mem_amb && s2_troublem
946  s2_out.rep_info.tlb_miss        := s2_tlb_miss && s2_troublem
947  s2_out.rep_info.fwd_fail        := s2_fwd_fail && s2_troublem
948  s2_out.rep_info.dcache_rep      := s2_mq_nack && s2_troublem
949  s2_out.rep_info.dcache_miss     := s2_dcache_miss && s2_troublem
950  s2_out.rep_info.bank_conflict   := s2_bank_conflict && s2_troublem
951  s2_out.rep_info.wpu_fail        := s2_wpu_pred_fail && s2_troublem
952  s2_out.rep_info.rar_nack        := s2_rar_nack && s2_troublem
953  s2_out.rep_info.raw_nack        := s2_raw_nack && s2_troublem
954  s2_out.rep_info.nuke            := s2_nuke && s2_troublem
955  s2_out.rep_info.full_fwd        := s2_data_fwded
956  s2_out.rep_info.data_inv_sq_idx := io.lsq.forward.dataInvalidSqIdx
957  s2_out.rep_info.addr_inv_sq_idx := io.lsq.forward.addrInvalidSqIdx
958  s2_out.rep_info.rep_carry       := io.dcache.resp.bits.replayCarry
959  s2_out.rep_info.mshr_id         := io.dcache.resp.bits.mshr_id
960  s2_out.rep_info.last_beat       := s2_in.paddr(log2Up(refillBytes))
961  s2_out.rep_info.debug           := s2_in.uop.debugInfo
962  s2_out.rep_info.tlb_id          := io.tlb_hint.id
963  s2_out.rep_info.tlb_full        := io.tlb_hint.full
964
965  // if forward fail, replay this inst from fetch
966  val debug_fwd_fail_rep = s2_fwd_fail && !s2_troublem && !s2_in.tlbMiss
967  // if ld-ld violation is detected, replay from this inst from fetch
968  val debug_ldld_nuke_rep = false.B // s2_ldld_violation && !s2_mmio && !s2_is_prefetch && !s2_in.tlbMiss
969  // io.out.bits.uop.ctrl.replayInst := false.B
970
971  // to be removed
972  io.feedback_fast.valid                 := false.B
973  io.feedback_fast.bits.hit              := false.B
974  io.feedback_fast.bits.flushState       := s2_in.ptwBack
975  io.feedback_fast.bits.rsIdx            := s2_in.rsIdx
976  io.feedback_fast.bits.sourceType       := RSFeedbackType.lrqFull
977  io.feedback_fast.bits.dataInvalidSqIdx := DontCare
978
979  // fast wakeup
980  io.fast_uop.valid := RegNext(
981    !io.dcache.s1_disable_fast_wakeup &&
982    s1_valid &&
983    !s1_kill &&
984    !io.tlb.resp.bits.miss &&
985    !io.lsq.forward.dataInvalidFast
986  ) && (s2_valid && !s2_out.rep_info.need_rep && !s2_mmio)
987  io.fast_uop.bits := RegNext(s1_out.uop)
988
989  //
990  io.s2_ptr_chasing                    := RegEnable(s1_try_ptr_chasing && !s1_cancel_ptr_chasing, false.B, s1_fire)
991
992  // RegNext prefetch train for better timing
993  // ** Now, prefetch train is valid at load s3 **
994  io.prefetch_train.valid              := RegNext(s2_valid && !s2_actually_mmio && !s2_in.tlbMiss)
995  io.prefetch_train.bits.fromLsPipelineBundle(s2_in, latch = true)
996  io.prefetch_train.bits.miss          := RegNext(io.dcache.resp.bits.miss) // TODO: use trace with bank conflict?
997  io.prefetch_train.bits.meta_prefetch := RegNext(io.dcache.resp.bits.meta_prefetch)
998  io.prefetch_train.bits.meta_access   := RegNext(io.dcache.resp.bits.meta_access)
999
1000  io.prefetch_train_l1.valid              := RegNext(s2_valid && !s2_actually_mmio)
1001  io.prefetch_train_l1.bits.fromLsPipelineBundle(s2_in, latch = true)
1002  io.prefetch_train_l1.bits.miss          := RegNext(io.dcache.resp.bits.miss)
1003  io.prefetch_train_l1.bits.meta_prefetch := RegNext(io.dcache.resp.bits.meta_prefetch)
1004  io.prefetch_train_l1.bits.meta_access   := RegNext(io.dcache.resp.bits.meta_access)
1005  if (env.FPGAPlatform){
1006    io.dcache.s0_pc := DontCare
1007    io.dcache.s1_pc := DontCare
1008    io.dcache.s2_pc := DontCare
1009  }else{
1010    io.dcache.s0_pc := s0_out.uop.cf.pc
1011    io.dcache.s1_pc := s1_out.uop.cf.pc
1012    io.dcache.s2_pc := s2_out.uop.cf.pc
1013  }
1014  io.dcache.s2_kill := s2_pmp.ld || s2_actually_mmio || s2_kill
1015
1016  val s1_ld_left_fire = s1_valid && !s1_kill && s2_ready
1017  val s2_ld_valid_dup = RegInit(0.U(6.W))
1018  s2_ld_valid_dup := 0x0.U(6.W)
1019  when (s1_ld_left_fire && !s1_out.isHWPrefetch) { s2_ld_valid_dup := 0x3f.U(6.W) }
1020  when (s1_kill || s1_out.isHWPrefetch) { s2_ld_valid_dup := 0x0.U(6.W) }
1021  assert(RegNext((s2_valid === s2_ld_valid_dup(0)) || RegNext(s1_out.isHWPrefetch)))
1022
1023  // Pipeline
1024  // --------------------------------------------------------------------------------
1025  // stage 3
1026  // --------------------------------------------------------------------------------
1027  // writeback and update load queue
1028  val s3_valid        = RegNext(s2_valid && !s2_out.isHWPrefetch && !s2_out.uop.robIdx.needFlush(io.redirect))
1029  val s3_in           = RegEnable(s2_out, s2_fire)
1030  val s3_out          = Wire(Valid(new ExuOutput))
1031  val s3_dcache_rep   = RegEnable(s2_dcache_fast_rep && s2_troublem, false.B, s2_fire)
1032  val s3_ld_valid_dup = RegEnable(s2_ld_valid_dup, s2_fire)
1033  val s3_fast_rep     = Wire(Bool())
1034  val s3_troublem     = RegNext(s2_troublem)
1035  val s3_kill         = s3_in.uop.robIdx.needFlush(io.redirect)
1036  s3_ready := !s3_valid || s3_kill || io.ldout.ready
1037
1038  // forwrad last beat
1039  val (s3_fwd_frm_d_chan, s3_fwd_data_frm_d_chan) = io.tl_d_channel.forward(s2_valid && s2_out.forward_tlDchannel, s2_out.mshrid, s2_out.paddr)
1040  val s3_fwd_data_valid = RegEnable(s2_fwd_data_valid, false.B, s2_valid)
1041  val s3_fwd_frm_d_chan_valid = (s3_fwd_frm_d_chan && s3_fwd_data_valid)
1042
1043  val s3_fast_rep_canceled = io.replay.valid && io.replay.bits.forward_tlDchannel || !io.dcache.req.ready
1044  io.lsq.ldin.valid := s3_valid && (!s3_fast_rep || s3_fast_rep_canceled) && !s3_in.feedbacked
1045  io.lsq.ldin.bits := s3_in
1046  io.lsq.ldin.bits.miss := s3_in.miss && !s3_fwd_frm_d_chan_valid
1047
1048  /* <------- DANGEROUS: Don't change sequence here ! -------> */
1049  io.lsq.ldin.bits.data_wen_dup := s3_ld_valid_dup.asBools
1050  io.lsq.ldin.bits.replacementUpdated := io.dcache.resp.bits.replacementUpdated
1051  io.lsq.ldin.bits.missDbUpdated := RegNext(s2_fire && s2_in.hasROBEntry && !s2_in.tlbMiss && !s2_in.missDbUpdated)
1052
1053  val s3_dly_ld_err =
1054    if (EnableAccurateLoadError) {
1055      io.dcache.resp.bits.error_delayed && RegNext(io.csrCtrl.cache_error_enable) && s3_troublem
1056    } else {
1057      WireInit(false.B)
1058    }
1059  io.s3_dly_ld_err := false.B // s3_dly_ld_err && s3_valid
1060  io.lsq.ldin.bits.dcacheRequireReplay  := s3_dcache_rep
1061  io.fast_rep_out.bits.delayedLoadError := s3_dly_ld_err
1062
1063  val s3_vp_match_fail = RegNext(io.lsq.forward.matchInvalid || io.sbuffer.matchInvalid) && s3_troublem
1064  val s3_rep_frm_fetch = s3_vp_match_fail
1065  val s3_ldld_rep_inst =
1066      io.lsq.ldld_nuke_query.resp.valid &&
1067      io.lsq.ldld_nuke_query.resp.bits.rep_frm_fetch &&
1068      RegNext(io.csrCtrl.ldld_vio_check_enable)
1069  val s3_flushPipe = s3_ldld_rep_inst
1070
1071  val s3_rep_info = WireInit(s3_in.rep_info)
1072  s3_rep_info.dcache_miss   := s3_in.rep_info.dcache_miss && !s3_fwd_frm_d_chan_valid
1073  val s3_sel_rep_cause = PriorityEncoderOH(s3_rep_info.cause.asUInt)
1074
1075  val s3_exception = ExceptionNO.selectByFu(s3_in.uop.cf.exceptionVec, lduCfg).asUInt.orR
1076  when (s3_exception || s3_dly_ld_err || s3_rep_frm_fetch) {
1077    io.lsq.ldin.bits.rep_info.cause := 0.U.asTypeOf(s3_rep_info.cause.cloneType)
1078  } .otherwise {
1079    io.lsq.ldin.bits.rep_info.cause := VecInit(s3_sel_rep_cause.asBools)
1080  }
1081
1082  // Int load, if hit, will be writebacked at s3
1083  s3_out.valid                := s3_valid && !io.lsq.ldin.bits.rep_info.need_rep && !s3_in.mmio
1084  s3_out.bits.uop             := s3_in.uop
1085  s3_out.bits.uop.cf.exceptionVec(loadAccessFault) := s3_dly_ld_err  || s3_in.uop.cf.exceptionVec(loadAccessFault)
1086  s3_out.bits.uop.ctrl.flushPipe := false.B
1087  s3_out.bits.uop.ctrl.replayInst := false.B
1088  s3_out.bits.data            := s3_in.data
1089  s3_out.bits.redirectValid   := false.B
1090  s3_out.bits.redirect        := DontCare
1091  s3_out.bits.debug.isMMIO    := s3_in.mmio
1092  s3_out.bits.debug.isPerfCnt := false.B
1093  s3_out.bits.debug.paddr     := s3_in.paddr
1094  s3_out.bits.debug.vaddr     := s3_in.vaddr
1095  s3_out.bits.fflags          := DontCare
1096
1097  io.rollback.valid := s3_valid && (s3_rep_frm_fetch || s3_flushPipe) && !s3_exception
1098  io.rollback.bits             := DontCare
1099  io.rollback.bits.isRVC       := s3_out.bits.uop.cf.pd.isRVC
1100  io.rollback.bits.robIdx      := s3_out.bits.uop.robIdx
1101  io.rollback.bits.ftqIdx      := s3_out.bits.uop.cf.ftqPtr
1102  io.rollback.bits.ftqOffset   := s3_out.bits.uop.cf.ftqOffset
1103  io.rollback.bits.level       := Mux(s3_rep_frm_fetch, RedirectLevel.flush, RedirectLevel.flushAfter)
1104  io.rollback.bits.cfiUpdate.target := s3_out.bits.uop.cf.pc
1105  io.rollback.bits.debug_runahead_checkpoint_id := s3_out.bits.uop.debugInfo.runahead_checkpoint_id
1106  /* <------- DANGEROUS: Don't change sequence here ! -------> */
1107
1108  io.lsq.ldin.bits.uop := s3_out.bits.uop
1109
1110  val s3_revoke = s3_exception || io.lsq.ldin.bits.rep_info.need_rep
1111  io.lsq.ldld_nuke_query.revoke := s3_revoke
1112  io.lsq.stld_nuke_query.revoke := s3_revoke
1113
1114  // feedback slow
1115  s3_fast_rep := RegNext(s2_fast_rep)
1116
1117  val s3_fb_no_waiting = !s3_in.isLoadReplay &&
1118                        (!(s3_fast_rep && !s3_fast_rep_canceled)) &&
1119                        !s3_in.feedbacked
1120
1121  //
1122  io.feedback_slow.valid                 := s3_valid && s3_fb_no_waiting
1123  io.feedback_slow.bits.hit              := !s3_rep_info.need_rep || io.lsq.ldin.ready
1124  io.feedback_slow.bits.flushState       := s3_in.ptwBack
1125  io.feedback_slow.bits.rsIdx            := s3_in.rsIdx
1126  io.feedback_slow.bits.sourceType       := RSFeedbackType.lrqFull
1127  io.feedback_slow.bits.dataInvalidSqIdx := DontCare
1128
1129  val s3_ld_wb_meta = Mux(s3_valid, s3_out.bits, io.lsq.uncache.bits)
1130
1131  // data from load queue refill
1132  val s3_ld_raw_data_frm_uncache = io.lsq.ld_raw_data
1133  val s3_merged_data_frm_uncache = s3_ld_raw_data_frm_uncache.mergedData()
1134  val s3_picked_data_frm_uncache = LookupTree(s3_ld_raw_data_frm_uncache.addrOffset, List(
1135    "b000".U -> s3_merged_data_frm_uncache(63,  0),
1136    "b001".U -> s3_merged_data_frm_uncache(63,  8),
1137    "b010".U -> s3_merged_data_frm_uncache(63, 16),
1138    "b011".U -> s3_merged_data_frm_uncache(63, 24),
1139    "b100".U -> s3_merged_data_frm_uncache(63, 32),
1140    "b101".U -> s3_merged_data_frm_uncache(63, 40),
1141    "b110".U -> s3_merged_data_frm_uncache(63, 48),
1142    "b111".U -> s3_merged_data_frm_uncache(63, 56)
1143  ))
1144  val s3_ld_data_frm_uncache = rdataHelper(s3_ld_raw_data_frm_uncache.uop, s3_picked_data_frm_uncache)
1145
1146  // data from dcache hit
1147  val s3_ld_raw_data_frm_cache = Wire(new LoadDataFromDcacheBundle)
1148  s3_ld_raw_data_frm_cache.respDcacheData       := io.dcache.resp.bits.data_delayed
1149  s3_ld_raw_data_frm_cache.forwardMask          := RegEnable(s2_fwd_mask, s2_valid)
1150  s3_ld_raw_data_frm_cache.forwardData          := RegEnable(s2_fwd_data, s2_valid)
1151  s3_ld_raw_data_frm_cache.uop                  := RegEnable(s2_out.uop, s2_valid)
1152  s3_ld_raw_data_frm_cache.addrOffset           := RegEnable(s2_out.paddr(3, 0), s2_valid)
1153  s3_ld_raw_data_frm_cache.forward_D            := RegEnable(s2_fwd_frm_d_chan, false.B, s2_valid) || s3_fwd_frm_d_chan_valid
1154  s3_ld_raw_data_frm_cache.forwardData_D        := Mux(s3_fwd_frm_d_chan_valid, s3_fwd_data_frm_d_chan, RegEnable(s2_fwd_data_frm_d_chan, s2_valid))
1155  s3_ld_raw_data_frm_cache.forward_mshr         := RegEnable(s2_fwd_frm_mshr, false.B, s2_valid)
1156  s3_ld_raw_data_frm_cache.forwardData_mshr     := RegEnable(s2_fwd_data_frm_mshr, s2_valid)
1157  s3_ld_raw_data_frm_cache.forward_result_valid := RegEnable(s2_fwd_data_valid, false.B, s2_valid)
1158
1159  val s3_merged_data_frm_cache = s3_ld_raw_data_frm_cache.mergedData()
1160  val s3_picked_data_frm_cache = LookupTree(s3_ld_raw_data_frm_cache.addrOffset, List(
1161    "b0000".U -> s3_merged_data_frm_cache(63,    0),
1162    "b0001".U -> s3_merged_data_frm_cache(63,    8),
1163    "b0010".U -> s3_merged_data_frm_cache(63,   16),
1164    "b0011".U -> s3_merged_data_frm_cache(63,   24),
1165    "b0100".U -> s3_merged_data_frm_cache(63,   32),
1166    "b0101".U -> s3_merged_data_frm_cache(63,   40),
1167    "b0110".U -> s3_merged_data_frm_cache(63,   48),
1168    "b0111".U -> s3_merged_data_frm_cache(63,   56),
1169    "b1000".U -> s3_merged_data_frm_cache(127,  64),
1170    "b1001".U -> s3_merged_data_frm_cache(127,  72),
1171    "b1010".U -> s3_merged_data_frm_cache(127,  80),
1172    "b1011".U -> s3_merged_data_frm_cache(127,  88),
1173    "b1100".U -> s3_merged_data_frm_cache(127,  96),
1174    "b1101".U -> s3_merged_data_frm_cache(127, 104),
1175    "b1110".U -> s3_merged_data_frm_cache(127, 112),
1176    "b1111".U -> s3_merged_data_frm_cache(127, 120)
1177  ))
1178  val s3_ld_data_frm_cache = rdataHelper(s3_ld_raw_data_frm_cache.uop, s3_picked_data_frm_cache)
1179
1180  // FIXME: add 1 cycle delay ?
1181  io.lsq.uncache.ready := !s3_valid
1182  io.ldout.bits        := s3_ld_wb_meta
1183  io.ldout.bits.data   := Mux(s3_valid, s3_ld_data_frm_cache, s3_ld_data_frm_uncache)
1184  io.ldout.valid       := s3_out.valid || (io.lsq.uncache.valid && !s3_valid)
1185
1186  // s3 load fast replay
1187  io.fast_rep_out.valid := s3_valid && s3_fast_rep
1188  io.fast_rep_out.bits := s3_in
1189  io.fast_rep_out.bits.lateKill := s3_rep_frm_fetch
1190
1191
1192  // fast load to load forward
1193  if (EnableLoadToLoadForward) {
1194    io.l2l_fwd_out.valid      := s3_valid && !s3_in.mmio && !s3_rep_info.need_rep
1195    io.l2l_fwd_out.data       := Mux(s3_in.vaddr(3), s3_merged_data_frm_cache(127, 64), s3_merged_data_frm_cache(63, 0))
1196    io.l2l_fwd_out.dly_ld_err := s3_dly_ld_err || // ecc delayed error
1197                                 s3_ldld_rep_inst ||
1198                                 s3_rep_frm_fetch
1199  } else {
1200    io.l2l_fwd_out.valid := false.B
1201    io.l2l_fwd_out.data := DontCare
1202    io.l2l_fwd_out.dly_ld_err := DontCare
1203  }
1204
1205
1206  // FIXME: please move this part to LoadQueueReplay
1207  io.debug_ls := DontCare
1208
1209
1210  // Topdown
1211  io.lsTopdownInfo.s1.robIdx          := s1_in.uop.robIdx.value
1212  io.lsTopdownInfo.s1.vaddr_valid     := s1_valid && s1_in.hasROBEntry
1213  io.lsTopdownInfo.s1.vaddr_bits      := s1_vaddr
1214  io.lsTopdownInfo.s2.robIdx          := s2_in.uop.robIdx.value
1215  io.lsTopdownInfo.s2.paddr_valid     := s2_fire && s2_in.hasROBEntry && !s2_in.tlbMiss
1216  io.lsTopdownInfo.s2.paddr_bits      := s2_in.paddr
1217  io.lsTopdownInfo.s2.first_real_miss := io.dcache.resp.bits.real_miss
1218  io.lsTopdownInfo.s2.cache_miss_en   := s2_fire && s2_in.hasROBEntry && !s2_in.tlbMiss && !s2_in.missDbUpdated
1219
1220  // perf cnt
1221  XSPerfAccumulate("s0_in_valid",                  io.ldin.valid)
1222  XSPerfAccumulate("s0_in_block",                  io.ldin.valid && !io.ldin.fire)
1223  XSPerfAccumulate("s0_in_fire_first_issue",       s0_valid && s0_sel_src.isFirstIssue)
1224  XSPerfAccumulate("s0_lsq_fire_first_issue",      io.replay.fire)
1225  XSPerfAccumulate("s0_ldu_fire_first_issue",      io.ldin.fire && s0_sel_src.isFirstIssue)
1226  XSPerfAccumulate("s0_fast_replay_issue",         io.fast_rep_in.fire)
1227  XSPerfAccumulate("s0_stall_out",                 s0_valid && !s0_can_go)
1228  XSPerfAccumulate("s0_stall_dcache",              s0_valid && !io.dcache.req.ready)
1229  XSPerfAccumulate("s0_addr_spec_success",         s0_fire && s0_sel_src.vaddr(VAddrBits-1, 12) === io.ldin.bits.src(0)(VAddrBits-1, 12))
1230  XSPerfAccumulate("s0_addr_spec_failed",          s0_fire && s0_sel_src.vaddr(VAddrBits-1, 12) =/= io.ldin.bits.src(0)(VAddrBits-1, 12))
1231  XSPerfAccumulate("s0_addr_spec_success_once",    s0_fire && s0_sel_src.vaddr(VAddrBits-1, 12) === io.ldin.bits.src(0)(VAddrBits-1, 12) && s0_sel_src.isFirstIssue)
1232  XSPerfAccumulate("s0_addr_spec_failed_once",     s0_fire && s0_sel_src.vaddr(VAddrBits-1, 12) =/= io.ldin.bits.src(0)(VAddrBits-1, 12) && s0_sel_src.isFirstIssue)
1233  XSPerfAccumulate("s0_forward_tl_d_channel",      s0_out.forward_tlDchannel)
1234  XSPerfAccumulate("s0_hardware_prefetch_fire",    s0_fire && s0_hw_prf_select)
1235  XSPerfAccumulate("s0_software_prefetch_fire",    s0_fire && s0_sel_src.prf && s0_int_iss_select)
1236  XSPerfAccumulate("s0_hardware_prefetch_blocked", io.prefetch_req.valid && !s0_hw_prf_select)
1237  XSPerfAccumulate("s0_hardware_prefetch_total",   io.prefetch_req.valid)
1238
1239  XSPerfAccumulate("s1_in_valid",                  s1_valid)
1240  XSPerfAccumulate("s1_in_fire",                   s1_fire)
1241  XSPerfAccumulate("s1_in_fire_first_issue",       s1_fire && s1_in.isFirstIssue)
1242  XSPerfAccumulate("s1_tlb_miss",                  s1_fire && s1_tlb_miss)
1243  XSPerfAccumulate("s1_tlb_miss_first_issue",      s1_fire && s1_tlb_miss && s1_in.isFirstIssue)
1244  XSPerfAccumulate("s1_stall_out",                 s1_valid && !s1_can_go)
1245  XSPerfAccumulate("s1_dly_err",                   s1_valid && s1_fast_rep_dly_err)
1246
1247  XSPerfAccumulate("s2_in_valid",                  s2_valid)
1248  XSPerfAccumulate("s2_in_fire",                   s2_fire)
1249  XSPerfAccumulate("s2_in_fire_first_issue",       s2_fire && s2_in.isFirstIssue)
1250  XSPerfAccumulate("s2_dcache_miss",               s2_fire && io.dcache.resp.bits.miss)
1251  XSPerfAccumulate("s2_dcache_miss_first_issue",   s2_fire && io.dcache.resp.bits.miss && s2_in.isFirstIssue)
1252  XSPerfAccumulate("s2_dcache_real_miss_first_issue",   s2_fire && io.dcache.resp.bits.miss && s2_in.isFirstIssue)
1253  XSPerfAccumulate("s2_full_forward",              s2_fire && s2_full_fwd)
1254  XSPerfAccumulate("s2_dcache_miss_full_forward",  s2_fire && s2_dcache_miss)
1255  XSPerfAccumulate("s2_fwd_frm_d_can",             s2_valid && s2_fwd_frm_d_chan)
1256  XSPerfAccumulate("s2_fwd_frm_d_chan_or_mshr",    s2_valid && s2_fwd_frm_d_chan_or_mshr)
1257  XSPerfAccumulate("s2_stall_out",                 s2_fire && !s2_can_go)
1258  XSPerfAccumulate("s2_prefetch",                  s2_fire && s2_prf)
1259  XSPerfAccumulate("s2_prefetch_ignored",          s2_fire && s2_prf && s2_mq_nack) // ignore prefetch for mshr full / miss req port conflict
1260  XSPerfAccumulate("s2_prefetch_miss",             s2_fire && s2_prf && io.dcache.resp.bits.miss) // prefetch req miss in l1
1261  XSPerfAccumulate("s2_prefetch_hit",              s2_fire && s2_prf && !io.dcache.resp.bits.miss) // prefetch req hit in l1
1262  XSPerfAccumulate("s2_prefetch_accept",           s2_fire && s2_prf && io.dcache.resp.bits.miss && !s2_mq_nack) // prefetch a missed line in l1, and l1 accepted it
1263  XSPerfAccumulate("s2_forward_req",               s2_fire && s2_in.forward_tlDchannel)
1264  XSPerfAccumulate("s2_successfully_forward_channel_D", s2_fire && s2_fwd_frm_d_chan && s2_fwd_data_valid)
1265  XSPerfAccumulate("s2_successfully_forward_mshr",      s2_fire && s2_fwd_frm_mshr && s2_fwd_data_valid)
1266
1267  XSPerfAccumulate("s3_fwd_frm_d_chan",            s3_valid && s3_fwd_frm_d_chan_valid)
1268
1269  XSPerfAccumulate("load_to_load_forward",                      s1_try_ptr_chasing && !s1_ptr_chasing_canceled)
1270  XSPerfAccumulate("load_to_load_forward_try",                  s1_try_ptr_chasing)
1271  XSPerfAccumulate("load_to_load_forward_fail",                 s1_cancel_ptr_chasing)
1272  XSPerfAccumulate("load_to_load_forward_fail_cancelled",       s1_cancel_ptr_chasing && s1_ptr_chasing_canceled)
1273  XSPerfAccumulate("load_to_load_forward_fail_wakeup_mismatch", s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && s1_not_fast_match)
1274  XSPerfAccumulate("load_to_load_forward_fail_op_not_ld",       s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && !s1_not_fast_match && s1_fu_op_type_not_ld)
1275  XSPerfAccumulate("load_to_load_forward_fail_addr_align",      s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && !s1_not_fast_match && !s1_fu_op_type_not_ld && s1_addr_misaligned)
1276  XSPerfAccumulate("load_to_load_forward_fail_set_mismatch",    s1_cancel_ptr_chasing && !s1_ptr_chasing_canceled && !s1_not_fast_match && !s1_fu_op_type_not_ld && !s1_addr_misaligned && s1_addr_mismatch)
1277
1278  // bug lyq: some signals in perfEvents are no longer suitable for the current MemBlock design
1279  // hardware performance counter
1280  val perfEvents = Seq(
1281    ("load_s0_in_fire         ", s0_fire                                                        ),
1282    ("load_to_load_forward    ", s1_fire && s1_try_ptr_chasing && !s1_ptr_chasing_canceled      ),
1283    ("stall_dcache            ", s0_valid && s0_can_go && !io.dcache.req.ready                  ),
1284    ("load_s1_in_fire         ", s0_fire                                                        ),
1285    ("load_s1_tlb_miss        ", s1_fire && io.tlb.resp.bits.miss                               ),
1286    ("load_s2_in_fire         ", s1_fire                                                        ),
1287    ("load_s2_dcache_miss     ", s2_fire && io.dcache.resp.bits.miss                            ),
1288  )
1289  generatePerfEvent()
1290
1291  when(io.ldout.fire){
1292    XSDebug("ldout %x\n", io.ldout.bits.uop.cf.pc)
1293  }
1294  // end
1295}