1/*************************************************************************************** 2* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences 3* Copyright (c) 2020-2021 Peng Cheng Laboratory 4* 5* XiangShan is licensed under Mulan PSL v2. 6* You can use this software according to the terms and conditions of the Mulan PSL v2. 7* You may obtain a copy of Mulan PSL v2 at: 8* http://license.coscl.org.cn/MulanPSL2 9* 10* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, 11* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, 12* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE. 13* 14* See the Mulan PSL v2 for more details. 15***************************************************************************************/ 16 17package xiangshan.frontend.icache 18 19import chipsalliance.rocketchip.config.Parameters 20import chisel3._ 21import chisel3.util._ 22import freechips.rocketchip.tilelink.ClientStates 23import xiangshan._ 24import xiangshan.cache.mmu._ 25import utils._ 26import xiangshan.backend.fu.{PMPReqBundle, PMPRespBundle} 27 28class ICacheMainPipeReq(implicit p: Parameters) extends ICacheBundle 29{ 30 val vaddr = UInt(VAddrBits.W) 31 def vsetIdx = get_idx(vaddr) 32} 33 34class ICacheMainPipeResp(implicit p: Parameters) extends ICacheBundle 35{ 36 val vaddr = UInt(VAddrBits.W) 37 val readData = UInt(blockBits.W) 38 val paddr = UInt(PAddrBits.W) 39 val tlbExcp = new Bundle{ 40 val pageFault = Bool() 41 val accessFault = Bool() 42 val mmio = Bool() 43 } 44} 45 46class ICacheMainPipeBundle(implicit p: Parameters) extends ICacheBundle 47{ 48 val req = Flipped(DecoupledIO(new ICacheMainPipeReq)) 49 val resp = ValidIO(new ICacheMainPipeResp) 50} 51 52class ICacheMetaReqBundle(implicit p: Parameters) extends ICacheBundle{ 53 val toIMeta = Decoupled(new ICacheReadBundle) 54 val fromIMeta = Input(new ICacheMetaRespBundle) 55} 56 57class ICacheDataReqBundle(implicit p: Parameters) extends ICacheBundle{ 58 val toIData = Decoupled(new ICacheReadBundle) 59 val fromIData = Input(new ICacheDataRespBundle) 60} 61 62class ICacheMSHRBundle(implicit p: Parameters) extends ICacheBundle{ 63 val toMSHR = Decoupled(new ICacheMissReq) 64 val fromMSHR = Flipped(ValidIO(new ICacheMissResp)) 65} 66 67class ICachePMPBundle(implicit p: Parameters) extends ICacheBundle{ 68 val req = Valid(new PMPReqBundle()) 69 val resp = Input(new PMPRespBundle()) 70} 71 72class ICachePerfInfo(implicit p: Parameters) extends ICacheBundle{ 73 val only_0_hit = Bool() 74 val only_0_miss = Bool() 75 val hit_0_hit_1 = Bool() 76 val hit_0_miss_1 = Bool() 77 val miss_0_hit_1 = Bool() 78 val miss_0_miss_1 = Bool() 79 val bank_hit = Vec(2,Bool()) 80 val hit = Bool() 81} 82 83class ICacheMainPipeInterface(implicit p: Parameters) extends ICacheBundle { 84 /* internal interface */ 85 val metaArray = new ICacheMetaReqBundle 86 val dataArray = new ICacheDataReqBundle 87 val mshr = Vec(PortNumber, new ICacheMSHRBundle) 88 /* outside interface */ 89 val fetch = Vec(PortNumber, new ICacheMainPipeBundle) 90 val pmp = Vec(PortNumber, new ICachePMPBundle) 91 val itlb = Vec(PortNumber, new BlockTlbRequestIO) 92 val respStall = Input(Bool()) 93// val toReleaseUnit = Vec(2, Decoupled(new ReleaseReq)) 94// val victimInfor = new Bundle() { 95// val s1 = Vec(2, Output(new ICacheVictimInfor())) 96// val s2 = Vec(2, Output(new ICacheVictimInfor())) 97// } 98// val setInfor = new Bundle(){ 99// val s1 = Vec(2, Output(new ICacheSetInfor())) 100// val s2 = Vec(2, Output(new ICacheSetInfor())) 101// } 102 val perfInfo = Output(new ICachePerfInfo) 103} 104 105class ICacheMainPipe(implicit p: Parameters) extends ICacheModule 106{ 107 val io = IO(new ICacheMainPipeInterface) 108 109 val (fromIFU, toIFU) = (io.fetch.map(_.req), io.fetch.map(_.resp)) 110 val (toMeta, toData, metaResp, dataResp) = (io.metaArray.toIMeta, io.dataArray.toIData, io.metaArray.fromIMeta, io.dataArray.fromIData) 111 val (toMSHR, fromMSHR) = (io.mshr.map(_.toMSHR), io.mshr.map(_.fromMSHR)) 112 val (toITLB, fromITLB) = (io.itlb.map(_.req), io.itlb.map(_.resp)) 113 val (toPMP, fromPMP) = (io.pmp.map(_.req), io.pmp.map(_.resp)) 114 115 val s0_ready, s1_ready, s2_ready = WireInit(false.B) 116 val s0_fire, s1_fire , s2_fire = WireInit(false.B) 117 118 //Stage 1 119 val s0_valid = fromIFU.map(_.valid).reduce(_||_) 120 val s0_req_vaddr = VecInit(fromIFU.map(_.bits.vaddr)) 121 val s0_req_vsetIdx = VecInit(fromIFU.map(_.bits.vsetIdx)) 122 val s0_only_fisrt = fromIFU(0).valid && !fromIFU(0).valid 123 val s0_double_line = fromIFU(0).valid && fromIFU(1).valid 124 125 s0_fire := s0_valid && s1_ready 126 127 //fetch: send addr to Meta/TLB and Data simultaneously 128 val fetch_req = List(toMeta, toData) 129 for(i <- 0 until 2) { 130 fetch_req(i).valid := s0_valid 131 fetch_req(i).bits.isDoubleLine := s0_double_line 132 fetch_req(i).bits.vSetIdx := s0_req_vsetIdx 133 } 134 //TODO: fix GTimer() condition 135 fromIFU.map(_.ready := fetch_req(0).ready && fetch_req(1).ready && s1_ready && GTimer() > 500.U) 136 137 138// XSPerfAccumulate("ifu_bubble_ftq_not_valid", !f0_valid ) 139// XSPerfAccumulate("ifu_bubble_pipe_stall", f0_valid && fetch_req(0).ready && fetch_req(1).ready && !s1_ready ) 140// XSPerfAccumulate("ifu_bubble_sram_0_busy", f0_valid && !fetch_req(0).ready ) 141// XSPerfAccumulate("ifu_bubble_sram_1_busy", f0_valid && !fetch_req(1).ready ) 142 143 //--------------------------------------------- 144 // Fetch Stage 2 : 145 // * Send req to ITLB and TLB Response (Get Paddr) 146 // * ICache Response (Get Meta and Data) 147 // * Hit Check (Generate hit signal and hit vector) 148 // * Get victim way 149 //--------------------------------------------- 150 151 //TODO: handle fetch exceptions 152 153 val tlbRespAllValid = WireInit(false.B) 154 155 val s1_valid = generatePipeControl(lastFire = s0_fire, thisFire = s1_fire, thisFlush = false.B, lastFlush = false.B) 156 157 val s1_req_vaddr = RegEnable(next = s0_req_vaddr, enable = s0_fire) 158 val s1_req_vsetIdx = RegEnable(next = s0_req_vsetIdx, enable = s0_fire) 159 val s1_only_fisrt = RegEnable(next = s0_only_fisrt, enable = s0_fire) 160 val s1_double_line = RegEnable(next = s0_double_line, enable = s0_fire) 161 162 s1_ready := s2_ready && tlbRespAllValid || !s1_valid 163 s1_fire := s1_valid && tlbRespAllValid && s2_ready 164 165 toITLB(0).valid := s1_valid 166 toITLB(0).bits.size := 3.U // TODO: fix the size 167 toITLB(0).bits.vaddr := s1_req_vaddr(0) 168 toITLB(0).bits.debug.pc := s1_req_vaddr(0) 169 170 toITLB(1).valid := s1_valid && s1_double_line 171 toITLB(1).bits.size := 3.U // TODO: fix the size 172 toITLB(1).bits.vaddr := s1_req_vaddr(1) 173 toITLB(1).bits.debug.pc := s1_req_vaddr(1) 174 175 toITLB.map{port => 176 port.bits.cmd := TlbCmd.exec 177 port.bits.robIdx := DontCare 178 port.bits.debug.isFirstIssue := DontCare 179 } 180 181 fromITLB.map(_.ready := true.B) 182 183 val (tlbRespValid, tlbRespPAddr) = (fromITLB.map(_.valid), VecInit(fromITLB.map(_.bits.paddr))) 184 val (tlbRespMiss) = fromITLB.map(port => port.bits.miss && port.valid) 185 val (tlbExcpPF, tlbExcpAF) = (fromITLB.map(port => port.bits.excp.pf.instr && port.valid), 186 fromITLB.map(port => (port.bits.excp.af.instr) && port.valid)) 187 188 tlbRespAllValid := tlbRespValid(0) && (tlbRespValid(1) || !s1_double_line) 189 190 val s1_req_paddr = tlbRespPAddr 191 val s1_req_ptags = VecInit(s1_req_paddr.map(get_phy_tag(_))) 192 193 val s1_meta_ptags = ResultHoldBypass(data = metaResp.tags, valid = RegNext(s0_fire)) 194 val s1_meta_cohs = ResultHoldBypass(data = metaResp.cohs, valid = RegNext(s0_fire)) 195 val s1_data_cacheline = ResultHoldBypass(data = dataResp.datas, valid = RegNext(s0_fire)) 196 197 val s1_tag_eq_vec = VecInit((0 until PortNumber).map( p => VecInit((0 until nWays).map( w => s1_meta_ptags(p)(w) === s1_req_ptags(p) )))) 198 val s1_tag_match_vec = VecInit((0 until PortNumber).map( k => VecInit(s1_tag_eq_vec(k).zipWithIndex.map{ case(way_tag_eq, w) => way_tag_eq && s1_meta_cohs(k)(w).isValid()}))) 199 val s1_tag_match = VecInit(s1_tag_match_vec.map(vector => ParallelOR(vector))) 200 201 val s1_port_hit = VecInit(Seq(s1_tag_match(0) && s1_valid && !tlbExcpPF(0) && !tlbExcpAF(0), s1_tag_match(1) && s1_valid && s1_double_line && !tlbExcpPF(1) && !tlbExcpAF(1) )) 202 val s1_bank_miss = VecInit(Seq(!s1_tag_match(0) && s1_valid && !tlbExcpPF(0) && !tlbExcpAF(0), !s1_tag_match(1) && s1_valid && s1_double_line && !tlbExcpPF(1) && !tlbExcpAF(1) )) 203 val s1_hit = (s1_port_hit(0) && s1_port_hit(1)) || (!s1_double_line && s1_port_hit(0)) 204 205 /** choose victim cacheline */ 206 val replacers = Seq.fill(PortNumber)(ReplacementPolicy.fromString(cacheParams.replacer,nWays,nSets/PortNumber)) 207 val s1_victim_oh = ResultHoldBypass(data = VecInit(replacers.zipWithIndex.map{case (replacer, i) => UIntToOH(replacer.way(s1_req_vsetIdx(i)))}), valid = RegNext(s0_fire)) 208 209 val s1_victim_coh = VecInit(s1_victim_oh.zipWithIndex.map {case(oh, port) => Mux1H(oh, s1_meta_cohs(port))}) 210// val s1_victim_tag = VecInit(s1_victim_oh.zipWithIndex.map {case(oh, port) => Mux1H(oh, s1_meta_ptags(port))}) 211// val s1_victim_data = VecInit(s1_victim_oh.zipWithIndex.map {case(oh, port) => Mux1H(oh, s1_data_cacheline(port))}) 212// val s1_need_replace = VecInit(s1_victim_coh.zipWithIndex.map{case(coh, port) => coh.isValid() && s1_bank_miss(port)}) 213// 214// (0 until PortNumber).map{ i => 215// io.victimInfor.s1(i).valid := s1_valid && s1_need_replace(i) 216// io.victimInfor.s1(i).ptag := s1_victim_tag(i) 217// io.victimInfor.s1(i).vidx := get_idx(s1_req_vaddr(i)) 218// } 219 220// (0 until PortNumber).map{ i => 221// io.setInfor.s1(i).valid := s1_bank_miss(i) 222// io.setInfor.s1(i).vidx := s1_req_vsetIdx(i) 223// } 224 225 assert(PopCount(s1_tag_match_vec(0)) <= 1.U && PopCount(s1_tag_match_vec(1)) <= 1.U, "Multiple hit in main pipe") 226 227 val touch_sets = Seq.fill(2)(Wire(Vec(2, UInt(log2Ceil(nSets/2).W)))) 228 val touch_ways = Seq.fill(2)(Wire(Vec(2, Valid(UInt(log2Ceil(nWays).W)))) ) 229 230 ((replacers zip touch_sets) zip touch_ways).map{case ((r, s),w) => r.access(s,w)} 231 232 val s1_hit_data = VecInit(s1_data_cacheline.zipWithIndex.map { case(bank, i) => 233 val port_hit_data = Mux1H(s1_tag_match_vec(i).asUInt, bank) 234 port_hit_data 235 }) 236 237 (0 until nWays).map{ w => 238 XSPerfAccumulate("line_0_hit_way_" + Integer.toString(w, 10), s1_fire && s1_port_hit(0) && OHToUInt(s1_tag_match_vec(0)) === w.U) 239 } 240 241 (0 until nWays).map{ w => 242 XSPerfAccumulate("line_0_victim_way_" + Integer.toString(w, 10), s1_fire && !s1_port_hit(0) && OHToUInt(s1_victim_oh(0)) === w.U) 243 } 244 245 (0 until nWays).map{ w => 246 XSPerfAccumulate("line_1_hit_way_" + Integer.toString(w, 10), s1_fire && s1_double_line && s1_port_hit(1) && OHToUInt(s1_tag_match_vec(1)) === w.U) 247 } 248 249 (0 until nWays).map{ w => 250 XSPerfAccumulate("line_1_victim_way_" + Integer.toString(w, 10), s1_fire && s1_double_line && !s1_port_hit(1) && OHToUInt(s1_victim_oh(1)) === w.U) 251 } 252 253 XSPerfAccumulate("ifu_bubble_s1_tlb_miss", s1_valid && !tlbRespAllValid ) 254 255 //--------------------------------------------- 256 // Fetch Stage 2 : 257 // * get data from last stage (hit from s1_hit_data/miss from missQueue response) 258 // * if at least one needed cacheline miss, wait for miss queue response (a wait_state machine) THIS IS TOO UGLY!!! 259 // * cut cacheline(s) and send to PreDecode 260 // * check if prediction is right (branch target and type, jump direction and type , jal target ) 261 //--------------------------------------------- 262 val s2_fetch_finish = Wire(Bool()) 263 264 val s2_valid = generatePipeControl(lastFire = s1_fire, thisFire = s2_fire, thisFlush = false.B, lastFlush = false.B) 265 val s2_miss_available = Wire(Bool()) 266 267 s2_ready := (s2_valid && s2_fetch_finish && !io.respStall) || (!s2_valid && s2_miss_available) 268 s2_fire := s2_valid && s2_fetch_finish && !io.respStall 269 270 val pmpExcpAF = fromPMP.map(port => port.instr) 271 val mmio = fromPMP.map(port => port.mmio) // TODO: handle it 272 273 val (s2_req_paddr , s2_req_vaddr) = (RegEnable(next = s1_req_paddr, enable = s1_fire), RegEnable(next = s1_req_vaddr, enable = s1_fire)) 274 val s2_req_vsetIdx = RegEnable(next = s1_req_vsetIdx, enable = s1_fire) 275 val s2_req_ptags = RegEnable(next = s1_req_ptags, enable = s1_fire) 276 val s2_only_fisrt = RegEnable(next = s1_only_fisrt, enable = s1_fire) 277 val s2_double_line = RegEnable(next = s1_double_line, enable = s1_fire) 278 val s2_hit = RegEnable(next = s1_hit , enable = s1_fire) 279 val s2_port_hit = RegEnable(next = s1_port_hit, enable = s1_fire) 280 val s2_bank_miss = RegEnable(next = s1_bank_miss, enable = s1_fire) 281 282 val sec_meet_vec = Wire(Vec(2, Bool())) 283 val s2_fixed_hit_vec = VecInit((0 until 2).map(i => s2_port_hit(i) || sec_meet_vec(i))) 284 val s2_fixed_hit = (s2_valid && s2_fixed_hit_vec(0) && s2_fixed_hit_vec(1) && s2_double_line) || (s2_valid && s2_fixed_hit_vec(0) && !s2_double_line) 285 286 //replacement 287 val s2_waymask = RegEnable(next = s1_victim_oh, enable = s1_fire) 288 val s2_victim_coh = RegEnable(next = s1_victim_coh, enable = s1_fire) 289// val s2_victim_tag = RegEnable(next = s1_victim_tag, enable = s1_fire) 290// val s2_victim_data = RegEnable(next = s1_victim_data, enable = s1_fire) 291// val s2_need_replace = RegEnable(next = s1_need_replace, enable = s1_fire) 292// val s2_has_replace = s2_need_replace.asUInt.orR 293 294 /** exception and pmp logic **/ 295 //exception information 296 val s2_except_pf = RegEnable(next = VecInit(tlbExcpPF), enable = s1_fire) 297 val s2_except_af = VecInit(RegEnable(next = VecInit(tlbExcpAF), enable = s1_fire).zip(pmpExcpAF).map(a => a._1 || DataHoldBypass(a._2, RegNext(s1_fire)).asBool)) 298 val s2_except = VecInit((0 until 2).map{i => s2_except_pf(i) || s2_except_af(i)}) 299 val s2_has_except = s2_valid && (s2_except_af.reduce(_||_) || s2_except_pf.reduce(_||_)) 300 //MMIO 301 val s2_mmio = DataHoldBypass(io.pmp(0).resp.mmio && !s2_except_af(0) && !s2_except_pf(0), RegNext(s1_fire)).asBool() 302 303 io.pmp.zipWithIndex.map { case (p, i) => 304 p.req.valid := s2_fire 305 p.req.bits.addr := s2_req_paddr(i) 306 p.req.bits.size := 3.U // TODO 307 p.req.bits.cmd := TlbCmd.exec 308 } 309 310 /*** cacheline miss logic ***/ 311 val wait_idle :: wait_queue_ready :: wait_send_req :: wait_two_resp :: wait_0_resp :: wait_1_resp :: wait_one_resp ::wait_finish :: Nil = Enum(8) 312 val wait_state = RegInit(wait_idle) 313 314 val port_miss_fix = VecInit(Seq(fromMSHR(0).fire() && !s2_port_hit(0), fromMSHR(1).fire() && s2_double_line && !s2_port_hit(1) )) 315 316 class MissSlot(implicit p: Parameters) extends XSBundle with HasICacheParameters { 317 val m_vSetIdx = UInt(idxBits.W) 318 val m_pTag = UInt(tagBits.W) 319 val m_data = UInt(blockBits.W) 320 } 321 322 val missSlot = Seq.fill(2)(RegInit(0.U.asTypeOf(new MissSlot))) 323 val m_invalid :: m_valid :: m_refilled :: m_flushed :: m_wait_sec_miss :: m_check_final ::Nil = Enum(6) 324 val missStateQueue = RegInit(VecInit(Seq.fill(2)(m_invalid)) ) 325 val reservedRefillData = Wire(Vec(2, UInt(blockBits.W))) 326 327 s2_miss_available := VecInit(missStateQueue.map(entry => entry === m_invalid || entry === m_wait_sec_miss)).reduce(_&&_) 328 329 val fix_sec_miss = Wire(Vec(4, Bool())) 330 val sec_meet_0_miss = fix_sec_miss(0) || fix_sec_miss(2) 331 val sec_meet_1_miss = fix_sec_miss(1) || fix_sec_miss(3) 332 sec_meet_vec := VecInit(Seq(sec_meet_0_miss,sec_meet_1_miss )) 333 334 //only raise at the first cycle of s2_valid 335 val only_0_miss = RegNext(s1_fire) && !s2_hit && !s2_double_line && !s2_has_except && !sec_meet_0_miss && !s2_mmio 336 val only_0_hit = RegNext(s1_fire) && s2_hit && !s2_double_line && !s2_mmio 337 val hit_0_hit_1 = RegNext(s1_fire) && s2_hit && s2_double_line && !s2_mmio 338 val hit_0_miss_1 = RegNext(s1_fire) && !s2_port_hit(1) && !sec_meet_1_miss && (s2_port_hit(0) || sec_meet_0_miss) && s2_double_line && !s2_has_except && !s2_mmio 339 val miss_0_hit_1 = RegNext(s1_fire) && !s2_port_hit(0) && !sec_meet_0_miss && (s2_port_hit(1) || sec_meet_1_miss) && s2_double_line && !s2_has_except && !s2_mmio 340 val miss_0_miss_1 = RegNext(s1_fire) && !s2_port_hit(0) && !s2_port_hit(1) && !sec_meet_0_miss && !sec_meet_1_miss && s2_double_line && !s2_has_except && !s2_mmio 341 342 val hit_0_except_1 = RegNext(s1_fire) && s2_double_line && !s2_except(0) && s2_except(1) && s2_port_hit(0) 343 val miss_0_except_1 = RegNext(s1_fire) && s2_double_line && !s2_except(0) && s2_except(1) && !s2_port_hit(0) 344 val except_0 = RegNext(s1_fire) && s2_except(0) 345 346 def holdReleaseLatch(valid: Bool, release: Bool, flush: Bool): Bool ={ 347 val bit = RegInit(false.B) 348 when(flush) { bit := false.B } 349 .elsewhen(valid && !release) { bit := true.B } 350 .elsewhen(release) { bit := false.B} 351 bit || valid 352 } 353 354 val miss_0_hit_1_latch = holdReleaseLatch(valid = miss_0_hit_1, release = s2_fire, flush = false.B) 355 val miss_0_miss_1_latch = holdReleaseLatch(valid = miss_0_miss_1, release = s2_fire, flush = false.B) 356 val only_0_miss_latch = holdReleaseLatch(valid = only_0_miss, release = s2_fire, flush = false.B) 357 val hit_0_miss_1_latch = holdReleaseLatch(valid = hit_0_miss_1, release = s2_fire, flush = false.B) 358 359 val miss_0_except_1_latch = holdReleaseLatch(valid = miss_0_except_1, release = s2_fire, flush = false.B) 360 val except_0_latch = holdReleaseLatch(valid = except_0, release = s2_fire, flush = false.B) 361 val hit_0_except_1_latch = holdReleaseLatch(valid = hit_0_except_1, release = s2_fire, flush = false.B) 362 363 val only_0_hit_latch = holdReleaseLatch(valid = only_0_hit, release = s2_fire, flush = false.B) 364 val hit_0_hit_1_latch = holdReleaseLatch(valid = hit_0_hit_1, release = s2_fire, flush = false.B) 365 366 367 def waitSecondComeIn(missState: UInt): Bool = (missState === m_wait_sec_miss) 368 369 // deal with secondary miss when s1 enter f2 370 def getMissSituat(slotNum : Int, missNum : Int ) :Bool = { 371 RegNext(s1_fire) && (missSlot(slotNum).m_vSetIdx === s2_req_vsetIdx(missNum)) && (missSlot(slotNum).m_pTag === s2_req_ptags(missNum)) && !s2_port_hit(missNum) && waitSecondComeIn(missStateQueue(slotNum)) && !s2_mmio 372 } 373 374 val miss_0_s2_0 = getMissSituat(slotNum = 0, missNum = 0) 375 val miss_0_s2_1 = getMissSituat(slotNum = 0, missNum = 1) 376 val miss_1_s2_0 = getMissSituat(slotNum = 1, missNum = 0) 377 val miss_1_s2_1 = getMissSituat(slotNum = 1, missNum = 1) 378 379 val miss_0_s2_0_latch = holdReleaseLatch(valid = miss_0_s2_0, release = s2_fire, flush = false.B) 380 val miss_0_s2_1_latch = holdReleaseLatch(valid = miss_0_s2_1, release = s2_fire, flush = false.B) 381 val miss_1_s2_0_latch = holdReleaseLatch(valid = miss_1_s2_0, release = s2_fire, flush = false.B) 382 val miss_1_s2_1_latch = holdReleaseLatch(valid = miss_1_s2_1, release = s2_fire, flush = false.B) 383 384 385 val slot_0_solve = fix_sec_miss(0) || fix_sec_miss(1) 386 val slot_1_solve = fix_sec_miss(2) || fix_sec_miss(3) 387 val slot_slove = VecInit(Seq(slot_0_solve, slot_1_solve)) 388 389 fix_sec_miss := VecInit(Seq(miss_0_s2_0_latch, miss_0_s2_1_latch, miss_1_s2_0_latch, miss_1_s2_1_latch)) 390 391 reservedRefillData(0) := DataHoldBypass(data = missSlot(0).m_data, valid = miss_0_s2_0 || miss_0_s2_1) 392 reservedRefillData(1) := DataHoldBypass(data = missSlot(1).m_data, valid = miss_1_s2_0 || miss_1_s2_1) 393 394 switch(wait_state){ 395 is(wait_idle){ 396 when(miss_0_except_1_latch){ 397 wait_state := Mux(toMSHR(0).ready, wait_queue_ready ,wait_idle ) 398 }.elsewhen( only_0_miss_latch || miss_0_hit_1_latch){ 399 wait_state := Mux(toMSHR(0).ready, wait_queue_ready ,wait_idle ) 400 }.elsewhen(hit_0_miss_1_latch){ 401 wait_state := Mux(toMSHR(1).ready, wait_queue_ready ,wait_idle ) 402 }.elsewhen( miss_0_miss_1_latch ){ 403 wait_state := Mux(toMSHR(0).ready && toMSHR(1).ready, wait_queue_ready ,wait_idle) 404 } 405 } 406 407 is(wait_queue_ready){ 408 wait_state := wait_send_req 409 } 410 411 is(wait_send_req) { 412 when(miss_0_except_1_latch || only_0_miss_latch || hit_0_miss_1_latch || miss_0_hit_1_latch){ 413 wait_state := wait_one_resp 414 }.elsewhen( miss_0_miss_1_latch ){ 415 wait_state := wait_two_resp 416 } 417 } 418 419 is(wait_one_resp) { 420 when( (miss_0_except_1_latch ||only_0_miss_latch || miss_0_hit_1_latch) && fromMSHR(0).fire()){ 421 wait_state := wait_finish 422 }.elsewhen( hit_0_miss_1_latch && fromMSHR(1).fire()){ 423 wait_state := wait_finish 424 } 425 } 426 427 is(wait_two_resp) { 428 when(fromMSHR(0).fire() && fromMSHR(1).fire()){ 429 wait_state := wait_finish 430 }.elsewhen( !fromMSHR(0).fire() && fromMSHR(1).fire() ){ 431 wait_state := wait_0_resp 432 }.elsewhen(fromMSHR(0).fire() && !fromMSHR(1).fire()){ 433 wait_state := wait_1_resp 434 } 435 } 436 437 is(wait_0_resp) { 438 when(fromMSHR(0).fire()){ 439 wait_state := wait_finish 440 } 441 } 442 443 is(wait_1_resp) { 444 when(fromMSHR(1).fire()){ 445 wait_state := wait_finish 446 } 447 } 448 449 is(wait_finish) {when(s2_fire) {wait_state := wait_idle } 450 } 451 } 452 453 454 (0 until 2).map { i => 455 if(i == 1) toMSHR(i).valid := (hit_0_miss_1_latch || miss_0_miss_1_latch) && wait_state === wait_queue_ready && !s2_mmio 456 else toMSHR(i).valid := (only_0_miss_latch || miss_0_hit_1_latch || miss_0_miss_1_latch || miss_0_except_1_latch) && wait_state === wait_queue_ready && !s2_mmio 457 toMSHR(i).bits.paddr := s2_req_paddr(i) 458 toMSHR(i).bits.vaddr := s2_req_vaddr(i) 459 toMSHR(i).bits.waymask := s2_waymask(i) 460 toMSHR(i).bits.coh := s2_victim_coh(i) 461 462 463 when(toMSHR(i).fire() && missStateQueue(i) === m_invalid){ 464 missStateQueue(i) := m_valid 465 missSlot(i).m_vSetIdx := s2_req_vsetIdx(i) 466 missSlot(i).m_pTag := get_phy_tag(s2_req_paddr(i)) 467 } 468 469 when(fromMSHR(i).fire() && missStateQueue(i) === m_valid ){ 470 missStateQueue(i) := m_refilled 471 missSlot(i).m_data := fromMSHR(i).bits.data 472 } 473 474 475 when(s2_fire && missStateQueue(i) === m_refilled){ 476 missStateQueue(i) := m_wait_sec_miss 477 } 478 479 //only the first cycle to check whether meet the secondary miss 480 when(missStateQueue(i) === m_wait_sec_miss){ 481 //the seondary req has been fix by this slot and another also hit || the secondary req for other cacheline and hit 482 when((slot_slove(i) && s2_fire) || (!slot_slove(i) && s2_fire) ) { 483 missStateQueue(i) := m_invalid 484 } 485 //the seondary req has been fix by this slot but another miss/f3 not ready || the seondary req for other cacheline and miss 486 .elsewhen((slot_slove(i) && !s2_fire && s2_valid) || (s2_valid && !slot_slove(i) && !s2_fire) ){ 487 missStateQueue(i) := m_check_final 488 } 489 } 490 491 when(missStateQueue(i) === m_check_final && toMSHR(i).fire()){ 492 missStateQueue(i) := m_valid 493 missSlot(i).m_vSetIdx := s2_req_vsetIdx(i) 494 missSlot(i).m_pTag := get_phy_tag(s2_req_paddr(i)) 495 }.elsewhen(missStateQueue(i) === m_check_final) { 496 missStateQueue(i) := m_invalid 497 } 498 } 499 500 501// val release_idle :: release_wait_fire ::Nil = Enum(2) 502// val release_state = RegInit(VecInit(Seq.fill(2)(release_idle)) ) 503// val s2_need_release = VecInit((0 until PortNumber).map(i =>s2_valid && s2_need_replace(i) && !s2_mmio && !s2_except_af(i) && !s2_except_pf(i))) 504// 505// val toRealseUnit = io.toReleaseUnit 506 507 508// (0 until 2).map{ i => 509// switch(release_state(i)){ 510// is(release_idle){ 511// when(s2_need_release(i)){ 512// release_state(i) := Mux(toRealseUnit(i).fire() , release_wait_fire ,release_idle ) 513// } 514// } 515// 516// is(release_wait_fire){ 517// when(s2_fire){ release_state(i) := release_idle} 518// } 519// } 520 521// toRealseUnit(i).valid := s2_valid && s2_need_release(i) && (release_state(i) === release_idle) 522// toRealseUnit(i).bits.addr := get_block_addr(Cat(s2_victim_tag(i), get_untag(s2_req_vaddr(i))) ) 523// toRealseUnit(i).bits.param := s2_victim_coh(i).onCacheControl(M_FLUSH)._2 524// toRealseUnit(i).bits.voluntary := true.B 525// toRealseUnit(i).bits.hasData := s2_victim_coh(i) === ClientStates.Dirty 526// toRealseUnit(i).bits.dirty := s2_victim_coh(i) === ClientStates.Dirty 527// toRealseUnit(i).bits.data := s2_victim_data(i) 528// toRealseUnit(i).bits.waymask := s2_waymask(i) 529// toRealseUnit(i).bits.vidx := s2_req_vsetIdx(i) 530// } 531 532// (0 until PortNumber).map{ i => 533// io.victimInfor.s2(i).valid := s2_valid && s2_need_release(i) 534// io.victimInfor.s2(i).ptag := s2_victim_tag(i) 535// io.victimInfor.s2(i).vidx := get_idx(s2_req_vaddr(i)) 536// } 537// 538// (0 until PortNumber).map{ i => 539// io.setInfor.s2(i).valid := s2_bank_miss(i) && s2_valid 540// io.setInfor.s2(i).vidx := s1_req_vsetIdx(i) 541// } 542 543 val miss_all_fix = wait_state === wait_finish 544// val release_all_fix = VecInit((0 until PortNumber).map(i => !s2_need_release(i) || release_state(i) === release_wait_fire)) 545 s2_fetch_finish := ((s2_valid && s2_fixed_hit) || miss_all_fix || hit_0_except_1_latch || except_0_latch || s2_mmio) //&& release_all_fix.reduce(_&&_) 546 547 XSPerfAccumulate("ifu_bubble_s2_miss", s2_valid && !s2_fetch_finish ) 548 549 (touch_ways zip touch_sets).zipWithIndex.map{ case((t_w,t_s), i) => 550 t_s(0) := s1_req_vsetIdx(i) 551 t_w(0).valid := s1_port_hit(i) 552 t_w(0).bits := OHToUInt(s1_tag_match_vec(i)) 553 554 t_s(1) := s2_req_vsetIdx(i) 555 t_w(1).valid := s2_valid && !s2_port_hit(i) 556 t_w(1).bits := OHToUInt(s2_waymask(i)) 557 } 558 559 val s2_hit_datas = RegEnable(next = s1_hit_data, enable = s1_fire) 560 val s2_datas = Wire(Vec(2, UInt(blockBits.W))) 561 562 s2_datas.zipWithIndex.map{case(bank,i) => 563 if(i == 0) bank := Mux(s2_port_hit(i), s2_hit_datas(i),Mux(miss_0_s2_0_latch,reservedRefillData(0), Mux(miss_1_s2_0_latch,reservedRefillData(1), missSlot(0).m_data))) 564 else bank := Mux(s2_port_hit(i), s2_hit_datas(i),Mux(miss_0_s2_1_latch,reservedRefillData(0), Mux(miss_1_s2_1_latch,reservedRefillData(1), missSlot(1).m_data))) 565 } 566 567 568 (0 until PortNumber).map{ i => 569 if(i ==0) toIFU(i).valid := s2_fire 570 else toIFU(i).valid := s2_fire && s2_double_line 571 toIFU(i).bits.readData := s2_datas(i) 572 toIFU(i).bits.paddr := s2_req_paddr(i) 573 toIFU(i).bits.vaddr := s2_req_vaddr(i) 574 toIFU(i).bits.tlbExcp.pageFault := s2_except_pf(i) 575 toIFU(i).bits.tlbExcp.accessFault := s2_except_af(i) 576 toIFU(i).bits.tlbExcp.mmio := s2_mmio 577 } 578 579 io.perfInfo.only_0_hit := only_0_miss_latch 580 io.perfInfo.only_0_miss := only_0_miss_latch 581 io.perfInfo.hit_0_hit_1 := hit_0_hit_1_latch 582 io.perfInfo.hit_0_miss_1 := hit_0_miss_1_latch 583 io.perfInfo.miss_0_hit_1 := miss_0_hit_1_latch 584 io.perfInfo.miss_0_miss_1 := miss_0_miss_1_latch 585 io.perfInfo.bank_hit(0) := only_0_miss_latch || hit_0_hit_1_latch || hit_0_miss_1_latch || hit_0_except_1_latch 586 io.perfInfo.bank_hit(1) := miss_0_hit_1_latch || hit_0_hit_1_latch 587 io.perfInfo.hit := hit_0_hit_1_latch 588} 589