xref: /XiangShan/src/main/scala/xiangshan/cache/dcache/storepipe/StorePipe.scala (revision 44f2941b36bd01d0dea9e5e076949f6438c0014d)
1/***************************************************************************************
2  * Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
3  * Copyright (c) 2020-2021 Peng Cheng Laboratory
4  *
5  * XiangShan is licensed under Mulan PSL v2.
6  * You can use this software according to the terms and conditions of the Mulan PSL v2.
7  * You may obtain a copy of Mulan PSL v2 at:
8  *          http://license.coscl.org.cn/MulanPSL2
9  *
10  * THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
11  * EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
12  * MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
13  *
14  * See the Mulan PSL v2 for more details.
15  ***************************************************************************************/
16
17package xiangshan.cache
18
19import org.chipsalliance.cde.config.Parameters
20import chisel3._
21import chisel3.util._
22import freechips.rocketchip.tilelink.ClientMetadata
23import utility.{XSDebug, XSPerfAccumulate, HasPerfEvents}
24import xiangshan.L1CacheErrorInfo
25
26class DcacheStoreRequestIO(implicit p: Parameters) extends DCacheBundle {
27  val cmd = UInt(M_SZ.W)
28  val vaddr = UInt(VAddrBits.W)
29  val instrtype   = UInt(sourceTypeWidth.W)
30}
31
32class DCacheStoreIO(implicit p: Parameters) extends DCacheBundle {
33  // Paddr in STA s1, used for hit check
34  val s1_paddr = Output(UInt(PAddrBits.W))
35  // TLB miss or Exception in STA s1, kill Dcache req
36  val s1_kill = Output(Bool())
37  // Access Fault or MMIO in STA s2, kill Dcache req
38  val s2_kill = Output(Bool())
39  // Debug PC
40  val s2_pc = Output(UInt(VAddrBits.W))
41
42  val req = DecoupledIO(new DcacheStoreRequestIO)
43  val resp = Flipped(DecoupledIO(new DCacheBundle() {
44    // this store misses (for now, not used)
45    val miss = Bool()
46    // this store needs replay (for now, not used)
47    val replay = Bool()
48    // tag error TODO: add logic
49    val tag_error = Bool()
50  }))
51}
52/** Non-Blocking Store Dcache Pipeline
53  *
54  *  Associated with STA Pipeline
55  *  Issue a store write prefetch to dcache if miss (if EnableStorePrefetchAtIssue)
56  *  Issue a prefetch train request to sms if miss (if EnableStorePrefetchSMS)
57  *  Recieve prefetch request, Issue a store write prefetch to dcache if miss (if EnableStorePrefetchAtCommit or EnableStorePrefetchSPB)
58  */
59class StorePipe(id: Int)(implicit p: Parameters) extends DCacheModule{
60  val io = IO(new DCacheBundle {
61    // incoming requests
62    val lsu = Flipped(new DCacheStoreIO)
63
64    // meta and data array read port
65    val meta_read = DecoupledIO(new MetaReadReq)
66    val meta_resp = Input(Vec(nWays, new Meta))
67    // TODO extra_meta_resp: error; prefetch; access (prefetch hit?)
68    // val extra_meta_resp = Input(Vec(nWays, new DCacheExtraMeta))
69
70    val tag_read = DecoupledIO(new TagReadReq)
71    val tag_resp = Input(Vec(nWays, UInt(encTagBits.W)))
72
73    // send miss request to dcache miss queue
74    val miss_req = DecoupledIO(new MissReq)
75
76    // update state vec in replacement algo, for now, set this as false
77    val replace_access = ValidIO(new ReplacementAccessBundle)
78    // find the way to be replaced
79    val replace_way = new ReplacementWayReqIO
80
81    // ecc error
82    val error = Output(ValidIO(new L1CacheErrorInfo))
83  })
84
85  // TODO: error
86  io.error := 0.U.asTypeOf(ValidIO(new L1CacheErrorInfo))
87
88/** S0:
89  *   send tag and meta read req
90  */
91  val s0_valid = io.lsu.req.valid
92  val s0_req = io.lsu.req.bits
93  val s0_fire = io.lsu.req.fire
94
95  io.meta_read.valid        := s0_valid
96  io.meta_read.bits.idx     := get_idx(io.lsu.req.bits.vaddr)
97  io.meta_read.bits.way_en  := ~0.U(nWays.W)
98
99  io.tag_read.valid         := s0_valid
100  io.tag_read.bits.idx      := get_idx(io.lsu.req.bits.vaddr)
101  io.tag_read.bits.way_en   := ~0.U(nWays.W)
102
103  io.lsu.req.ready := io.meta_read.ready && io.tag_read.ready
104
105  XSPerfAccumulate("s0_valid", io.lsu.req.valid)
106  XSPerfAccumulate("s0_valid_not_ready", io.lsu.req.valid && !io.lsu.req.ready)
107
108
109/** S1:
110  * get tag and meta read resp
111  * judge hit or miss
112  */
113  def wayMap[T <: Data](f: Int => T) = VecInit((0 until nWays).map(f))
114
115  val s1_valid = RegNext(s0_fire)
116  val s1_req = RegEnable(s0_req, s0_fire)
117
118  val s1_meta_resp = io.meta_resp
119  val s1_tag_resp  = io.tag_resp.map(tag => tag(tagBits - 1, 0))
120
121  val s1_paddr = io.lsu.s1_paddr
122
123  /**
124    * get hit meta
125    */
126  val s1_tag_match = Wire(UInt(nWays.W))
127  s1_tag_match := wayMap((wayid: Int) => {s1_tag_resp(wayid) === get_tag(s1_paddr) && s1_meta_resp(wayid).coh.isValid()}).asUInt
128  val s1_fake_meta = Wire(new Meta)
129  s1_fake_meta.coh := ClientMetadata.onReset
130  val s1_hit_meta = Mux(s1_tag_match.orR, Mux1H(s1_tag_match, wayMap((wayid: Int) => {s1_meta_resp(wayid)})), s1_fake_meta)
131  val s1_hit_coh = s1_hit_meta.coh
132
133  val (s1_has_permission, _, s1_new_hit_coh) = s1_hit_coh.onAccess(s1_req.cmd)
134  val s1_hit = s1_has_permission && s1_new_hit_coh === s1_hit_coh && s1_tag_match.orR
135
136  /**
137    * Don't choose a replace_way anymore
138    */
139  io.replace_way.set.valid := false.B
140  io.replace_way.set.bits  := get_idx(s1_req.vaddr)
141  io.replace_way.dmWay     := get_direct_map_way(s1_req.vaddr)
142
143  val s1_need_replacement = !s1_tag_match.orR
144
145/** S2:
146  * miss: send a write hint to Dache
147  * hit : update replace algrithom to make the hited line stay longer
148  */
149  val s2_valid = RegNext(s1_valid) && RegNext(!io.lsu.s1_kill)
150  val s2_req = RegEnable(s1_req, s1_valid)
151
152  val s2_hit = RegEnable(s1_hit, s1_valid)
153  val s2_paddr = RegEnable(s1_paddr, s1_valid)
154  val s2_hit_coh = RegEnable(s1_hit_coh, s1_valid)
155  val s2_is_prefetch = RegEnable(s1_req.instrtype === DCACHE_PREFETCH_SOURCE.U, s1_valid)
156
157  io.lsu.resp.valid := s2_valid
158  io.lsu.resp.bits.miss := !s2_hit
159  io.lsu.resp.bits.replay := false.B
160  // TODO: consider tag error
161  io.lsu.resp.bits.tag_error := false.B
162
163
164  /**
165    * send req to Dcache MissQueue
166    */
167  if(EnableStorePrefetchAtIssue) {
168    // all miss stores, whether prefetched or normal, send requests directly to mshr
169    io.miss_req.valid := s2_valid && !s2_hit
170  }else {
171    // only prefetched miss stores will send requests directly to mshr
172    io.miss_req.valid := s2_valid && !s2_hit && s2_is_prefetch
173  }
174  io.miss_req.bits := DontCare
175  // only send out a prefetch write to Dcache
176  io.miss_req.bits.source := DCACHE_PREFETCH_SOURCE.U
177  io.miss_req.bits.pf_source := L1_HW_PREFETCH_STORE
178  io.miss_req.bits.cmd := MemoryOpConstants.M_PFW
179  io.miss_req.bits.addr := get_block_addr(s2_paddr)
180  io.miss_req.bits.vaddr := s2_req.vaddr
181  io.miss_req.bits.req_coh := s2_hit_coh
182  // TODO: consider tag error
183  io.miss_req.bits.cancel := io.lsu.s2_kill
184  io.miss_req.bits.pc := io.lsu.s2_pc
185
186  /**
187    * update replacer, for now, disable this
188    */
189  io.replace_access.valid := false.B
190  io.replace_access.bits  := DontCare
191
192  XSPerfAccumulate("store_fire", s2_valid && !io.lsu.s2_kill)
193  XSPerfAccumulate("sta_hit",  s2_valid &&  s2_hit && !io.lsu.s2_kill)
194  XSPerfAccumulate("sta_miss", s2_valid && !s2_hit && !io.lsu.s2_kill)
195  XSPerfAccumulate("store_miss_prefetch_fire", io.miss_req.fire && !io.miss_req.bits.cancel)
196  XSPerfAccumulate("store_miss_prefetch_not_fire", io.miss_req.valid && !io.miss_req.ready && !io.miss_req.bits.cancel)
197}