-
Notifications
You must be signed in to change notification settings - Fork 32
Expand file tree
/
Copy pathPrefetcher.scala
More file actions
438 lines (390 loc) · 17 KB
/
Prefetcher.scala
File metadata and controls
438 lines (390 loc) · 17 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
/** *************************************************************************************
* Copyright (c) 2020-2021 Institute of Computing Technology, Chinese Academy of Sciences
* Copyright (c) 2020-2021 Peng Cheng Laboratory
*
* XiangShan is licensed under Mulan PSL v2.
* You can use this software according to the terms and conditions of the Mulan PSL v2.
* You may obtain a copy of Mulan PSL v2 at:
* http://license.coscl.org.cn/MulanPSL2
*
* THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
* EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
* MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
*
* See the Mulan PSL v2 for more details.
* *************************************************************************************
*/
package coupledL2.prefetch
import chisel3._
import chisel3.util._
import utility._
import org.chipsalliance.cde.config.Parameters
import utility.mbist.MbistPipeline
import coupledL2._
/* virtual address */
trait HasPrefetcherHelper extends HasCircularQueuePtrHelper with HasCoupledL2Parameters {
// filter
val TRAIN_FILTER_SIZE = 4
val REQ_FILTER_SIZE = 16
val TLB_REPLAY_CNT = 10
// parameters
val BLK_ADDR_RAW_WIDTH = 10
val REGION_SIZE = 1024
val PAGE_OFFSET = pageOffsetBits
val VADDR_HASH_WIDTH = 5
// vaddr:
// | tag | index | offset |
// | block addr | block offset |
// | region addr | region offset |
val BLOCK_OFFSET = offsetBits
val REGION_OFFSET = log2Up(REGION_SIZE)
val REGION_BLKS = REGION_SIZE / blockBytes
val INDEX_BITS = log2Up(REGION_BLKS)
val TAG_BITS = fullVAddrBits - REGION_OFFSET
val PTAG_BITS = fullAddressBits - REGION_OFFSET
val BLOCK_ADDR_BITS = fullVAddrBits - BLOCK_OFFSET
// hash related
val HASH_TAG_WIDTH = VADDR_HASH_WIDTH + BLK_ADDR_RAW_WIDTH
def get_tag(vaddr: UInt) = {
require(vaddr.getWidth == fullVAddrBits)
vaddr(vaddr.getWidth - 1, REGION_OFFSET)
}
def get_ptag(vaddr: UInt) = {
require(vaddr.getWidth == fullAddressBits)
vaddr(vaddr.getWidth - 1, REGION_OFFSET)
}
def get_index(addr: UInt) = {
require(addr.getWidth >= REGION_OFFSET)
addr(REGION_OFFSET - 1, BLOCK_OFFSET)
}
def get_index_oh(vaddr: UInt): UInt = {
UIntToOH(get_index(vaddr))
}
def get_block_vaddr(vaddr: UInt): UInt = {
vaddr(vaddr.getWidth - 1, BLOCK_OFFSET)
}
def _vaddr_hash(x: UInt): UInt = {
val width = VADDR_HASH_WIDTH
val low = x(width - 1, 0)
val mid = x(2 * width - 1, width)
val high = x(3 * width - 1, 2 * width)
low ^ mid ^ high
}
def block_hash_tag(vaddr: UInt): UInt = {
val blk_addr = get_block_vaddr(vaddr)
val low = blk_addr(BLK_ADDR_RAW_WIDTH - 1, 0)
val high = blk_addr(BLK_ADDR_RAW_WIDTH - 1 + 3 * VADDR_HASH_WIDTH, BLK_ADDR_RAW_WIDTH)
val high_hash = _vaddr_hash(high)
Cat(high_hash, low)
}
def region_hash_tag(vaddr: UInt): UInt = {
val region_tag = get_tag(vaddr)
val low = region_tag(BLK_ADDR_RAW_WIDTH - 1, 0)
val high = region_tag(BLK_ADDR_RAW_WIDTH - 1 + 3 * VADDR_HASH_WIDTH, BLK_ADDR_RAW_WIDTH)
val high_hash = _vaddr_hash(high)
Cat(high_hash, low)
}
def region_to_block_addr(tag: UInt, index: UInt): UInt = {
Cat(tag, index)
}
def toBinary(n: Int): String = n match {
case 0 | 1 => s"$n"
case _ => s"${toBinary(n / 2)}${n % 2}"
}
}
class PrefetchReq(implicit p: Parameters) extends PrefetchBundle {
val tag = UInt(fullTagBits.W)
val set = UInt(setBits.W)
// NOTE: the vaddr is the train address for response update, not virtual address of prefetch paddr.
val vaddr = vaddrBitsOpt.map(_ => UInt(vaddrBitsOpt.get.W))
val needT = Bool()
val source = UInt(sourceIdBits.W)
val pfSource = UInt(MemReqSource.reqSourceBits.W)
def addr: UInt = Cat(tag, set, 0.U(offsetBits.W))
def isBOP:Bool = pfSource === MemReqSource.Prefetch2L2BOP.id.U
def isPBOP:Bool = pfSource === MemReqSource.Prefetch2L2PBOP.id.U
def isSMS:Bool = pfSource === MemReqSource.Prefetch2L2SMS.id.U
def isTP:Bool = pfSource === MemReqSource.Prefetch2L2TP.id.U
def needAck:Bool = pfSource === MemReqSource.Prefetch2L2BOP.id.U || pfSource === MemReqSource.Prefetch2L2PBOP.id.U
def fromL2:Bool =
pfSource === MemReqSource.Prefetch2L2BOP.id.U ||
pfSource === MemReqSource.Prefetch2L2PBOP.id.U ||
pfSource === MemReqSource.Prefetch2L2SMS.id.U ||
pfSource === MemReqSource.Prefetch2L2TP.id.U
}
class PrefetchResp(implicit p: Parameters) extends PrefetchBundle {
// val id = UInt(sourceIdBits.W)
val tag = UInt(fullTagBits.W)
val set = UInt(setBits.W)
val vaddr = vaddrBitsOpt.map(_ => UInt(vaddrBitsOpt.get.W))
val pfSource = UInt(MemReqSource.reqSourceBits.W)
def addr = Cat(tag, set, 0.U(offsetBits.W))
def isBOP: Bool = pfSource === MemReqSource.Prefetch2L2BOP.id.U
def isPBOP: Bool = pfSource === MemReqSource.Prefetch2L2PBOP.id.U
def isSMS: Bool = pfSource === MemReqSource.Prefetch2L2SMS.id.U
def isTP: Bool = pfSource === MemReqSource.Prefetch2L2TP.id.U
def fromL2: Bool =
pfSource === MemReqSource.Prefetch2L2BOP.id.U ||
pfSource === MemReqSource.Prefetch2L2PBOP.id.U ||
pfSource === MemReqSource.Prefetch2L2SMS.id.U ||
pfSource === MemReqSource.Prefetch2L2TP.id.U
}
class PrefetchTrain(implicit p: Parameters) extends PrefetchBundle {
val tag = UInt(fullTagBits.W)
val set = UInt(setBits.W)
val needT = Bool()
val source = UInt(sourceIdBits.W)
val vaddr = vaddrBitsOpt.map(_ => UInt(vaddrBitsOpt.get.W))
val hit = Bool()
val prefetched = Bool()
val pfsource = UInt(PfSource.pfSourceBits.W)
val reqsource = UInt(MemReqSource.reqSourceBits.W)
def addr: UInt = Cat(tag, set, 0.U(offsetBits.W))
}
class L2PrefetchIO(implicit p: Parameters) extends PrefetchBundle {
val train = Flipped(DecoupledIO(new PrefetchTrain))
val tlb_req = new L2ToL1TlbIO(nRespDups= 1)
val req = DecoupledIO(new PrefetchReq)
val resp = Flipped(DecoupledIO(new PrefetchResp))
}
class PrefetchIO(implicit p: Parameters) extends L2PrefetchIO {
val recv_addr = Flipped(ValidIO(new Bundle() {
val addr = UInt(64.W)
val pfSource = UInt(MemReqSource.reqSourceBits.W)
}))
}
class PrefetchQueue(implicit p: Parameters) extends PrefetchModule {
val io = IO(new Bundle {
val enq = Flipped(DecoupledIO(new PrefetchReq))
val deq = DecoupledIO(new PrefetchReq)
})
/* Here we implement a queue that
* 1. is pipelined 2. flows
* 3. always has the latest reqs, which means the queue is always ready for enq and deserting the eldest ones
*/
val queue = RegInit(VecInit(Seq.fill(inflightEntries)(0.U.asTypeOf(new PrefetchReq))))
val valids = RegInit(VecInit(Seq.fill(inflightEntries)(false.B)))
val idxWidth = log2Up(inflightEntries)
val head = RegInit(0.U(idxWidth.W))
val tail = RegInit(0.U(idxWidth.W))
val empty = head === tail && !valids.last
val full = head === tail && valids.last
when(!empty && io.deq.ready) {
valids(head) := false.B
head := head + 1.U
}
when(io.enq.valid) {
queue(tail) := io.enq.bits
valids(tail) := !empty || !io.deq.ready // true.B
tail := tail + (!empty || !io.deq.ready).asUInt
when(full && !io.deq.ready) {
head := head + 1.U
}
}
io.enq.ready := true.B
io.deq.valid := !empty || io.enq.valid
io.deq.bits := Mux(empty, io.enq.bits, queue(head))
// The reqs that are discarded = enq - deq
XSPerfAccumulate("prefetch_queue_enq", io.enq.fire)
XSPerfAccumulate("prefetch_queue_enq_fromBOP", io.enq.fire && io.enq.bits.isBOP)
XSPerfAccumulate("prefetch_queue_enq_fromPBOP", io.enq.fire && io.enq.bits.isPBOP)
XSPerfAccumulate("prefetch_queue_enq_fromSMS", io.enq.fire && io.enq.bits.isSMS)
XSPerfAccumulate("prefetch_queue_enq_fromTP", io.enq.fire && io.enq.bits.isTP)
XSPerfAccumulate("prefetch_queue_deq", io.deq.fire)
XSPerfAccumulate("prefetch_queue_deq_fromBOP", io.deq.fire && io.deq.bits.isBOP)
XSPerfAccumulate("prefetch_queue_deq_fromPBOP", io.deq.fire && io.deq.bits.isPBOP)
XSPerfAccumulate("prefetch_queue_deq_fromSMS", io.deq.fire && io.deq.bits.isSMS)
XSPerfAccumulate("prefetch_queue_deq_fromTP", io.deq.fire && io.deq.bits.isTP)
XSPerfHistogram("prefetch_queue_entry", PopCount(valids.asUInt),
true.B, 0, inflightEntries, 1)
XSPerfAccumulate("prefetch_queue_empty", empty)
XSPerfAccumulate("prefetch_queue_full", full)
}
class Prefetcher(implicit p: Parameters) extends PrefetchModule {
val io = IO(new PrefetchIO)
val tpio = IO(new Bundle() {
val tpmeta_port = if (hasTPPrefetcher) Some(new tpmetaPortIO(hartIdLen, fullAddressBits, offsetBits)) else None
})
val hartId = IO(Input(UInt(hartIdLen.W)))
val pfCtrlFromCore = IO(Input(new PrefetchCtrlFromCore))
// l2 receive need 2 cycles to transmit from core
val pfRcv_en = RegNextN(pfCtrlFromCore.l2_pf_master_en && pfCtrlFromCore.l2_pf_recv_en, 2, Some(true.B))
val pbop_en = pfCtrlFromCore.l2_pf_master_en && pfCtrlFromCore.l2_pbop_en
val vbop_en = pfCtrlFromCore.l2_pf_master_en && pfCtrlFromCore.l2_vbop_en
val tp_en = pfCtrlFromCore.l2_pf_master_en && pfCtrlFromCore.l2_tp_en
// =================== Prefetchers =====================
// TODO: consider separate VBOP and PBOP in prefetch param
val pbop = if (hasBOP) Some(
Module(new PBestOffsetPrefetch()(p.alterPartial({
case L2ParamKey => p(L2ParamKey).copy(prefetch = Seq(BOPParameters(
virtualTrain = false,
badScore = 1,
offsetList = Seq(
-32, -30, -27, -25, -24, -20, -18, -16, -15,
-12, -10, -9, -8, -6, -5, -4, -3, -2, -1,
1, 2, 3, 4, 5, 6, 8, 9, 10,
12, 15, 16, 18, 20, 24, 25, 27, 30
)
)))
})))
) else None
val vbop = if (hasBOP) Some(
Module(new VBestOffsetPrefetch()(p.alterPartial({
case L2ParamKey => p(L2ParamKey).copy(prefetch = Seq(BOPParameters(
badScore = 2,
offsetList = Seq(
-117, -147, -91, 117, 147, 91,
-256, -250, -243, -240, -225, -216, -200,
-192, -180, -162, -160, -150, -144, -135, -128,
-125, -120, -108, -100, -96, -90, -81, -80,
-75, -72, -64, -60, -54, -50, -48, -45,
-40, -36, -32, -30, -27, -25, -24, -20,
-18, -16, -15, -12, -10, -9, -8, -6,
-5, -4, -3, -2, -1,
1, 2, 3, 4, 5, 6, 8,
9, 10, 12, 15, 16, 18, 20, 24,
25, 27, 30, 32, 36, 40, 45, 48,
50, 54, 60, 64, 72, 75, 80, 81,
90, 96, 100, 108, 120, 125, 128, 135,
144, 150, 160, 162, 180, 192, 200, 216,
225, 240, 243, 250 /*, 256*/
)
)))
})))
) else None
val tp = if (hasTPPrefetcher) Some(Module(new TemporalPrefetch())) else None
// prefetch from upper level
val pfRcv = if (hasReceiver) Some(Module(new PrefetchReceiver())) else None
val hasMyPrefetch = prefetchers.exists(_.isInstanceOf[MyPrefetchParameters])
val myPrefetch = if (hasMyPrefetch) Some(Module(new MyPrefetch())) else None
// =================== Connection for each Prefetcher =====================
// Rcv > VBOP > PBOP > TP
if (hasBOP) {
vbop.get.io_enable := vbop_en
vbop.get.io.req.ready := (if(hasReceiver) !pfRcv.get.io.req.valid else true.B)
vbop.get.io.train <> io.train
vbop.get.io.train.valid := io.train.valid && (io.train.bits.reqsource =/= MemReqSource.L1DataPrefetch.id.U)
vbop.get.io.resp <> io.resp
vbop.get.io.resp.valid := io.resp.valid && io.resp.bits.isBOP
vbop.get.io.tlb_req <> io.tlb_req
pbop.get.io_enable := pbop_en
pbop.get.io.req.ready :=
(if(hasReceiver) !pfRcv.get.io.req.valid else true.B) &&
(if(hasBOP) !vbop.get.io.req.valid else true.B)
pbop.get.io.train <> io.train
pbop.get.io.train.valid := io.train.valid && (io.train.bits.reqsource =/= MemReqSource.L1DataPrefetch.id.U)
pbop.get.io.resp <> io.resp
pbop.get.io.resp.valid := io.resp.valid && io.resp.bits.isPBOP
pbop.get.io.tlb_req <> DontCare
}
if (hasReceiver) {
pfRcv.get.io_enable := pfRcv_en
pfRcv.get.io.req.ready := true.B
pfRcv.get.io.recv_addr := ValidIODelay(io.recv_addr, 2)
pfRcv.get.io.train.valid := false.B
pfRcv.get.io.train.bits := 0.U.asTypeOf(new PrefetchTrain)
pfRcv.get.io.resp.valid := false.B
pfRcv.get.io.resp.bits := 0.U.asTypeOf(new PrefetchResp)
pfRcv.get.io.tlb_req.req.ready := true.B
pfRcv.get.io.tlb_req.resp.valid := false.B
pfRcv.get.io.tlb_req.resp.bits := DontCare
pfRcv.get.io.tlb_req.pmp_resp := DontCare
assert(!pfRcv.get.io.req.valid ||
pfRcv.get.io.req.bits.pfSource === MemReqSource.Prefetch2L2SMS.id.U ||
pfRcv.get.io.req.bits.pfSource === MemReqSource.Prefetch2L2Stream.id.U ||
pfRcv.get.io.req.bits.pfSource === MemReqSource.Prefetch2L2Stride.id.U
)
}
if (hasTPPrefetcher) {
tp.get.io.enable := tp_en
tp.get.io.train <> io.train
tp.get.io.resp <> io.resp
tp.get.io.hartid := hartId
tp.get.io.req.ready := (if(hasReceiver) !pfRcv.get.io.req.valid else true.B) &&
(if(hasBOP) !vbop.get.io.req.valid && !pbop.get.io.req.valid else true.B)
tp.get.io.tpmeta_port <> tpio.tpmeta_port.get
}
if (hasMyPrefetch) {
myPrefetch.get.io <> DontCare
myPrefetch.get.io.train <> io.train
}
private val mbistPl = MbistPipeline.PlaceMbistPipeline(2, "MbistPipeL2Prefetcher", cacheParams.hasMbist && (hasBOP || hasTPPrefetcher))
// =================== Connection of all Prefetchers =====================
/* prefetchers -> pftQueue -> pipe -> Slices.SinkA */
val pftQueue = Module(new PrefetchQueue)
val pipe = Module(new Pipeline(io.req.bits.cloneType, 1))
val bopReq = Wire(DecoupledIO(new PrefetchReq()))
if (hasBOP) { arb(Seq(vbop.get.io.req, pbop.get.io.req), bopReq) }
arb(
in = prefetchers.map {
case _: PrefetchReceiverParams => pfRcv.get.io.req
case _: BOPParameters => bopReq
case _: TPParameters => tp.get.io.req
case _: MyPrefetchParameters => myPrefetch.get.io.req
},
out = pftQueue.io.enq,
name = Some("pftQueue")
)
pipe.io.in <> pftQueue.io.deq
io.req <> pipe.io.out
val hasReceiverReq = if (hasReceiver) pfRcv.get.io.req.valid else false.B
val hasVBOPReq = if (hasBOP) vbop.get.io.req.valid else false.B
val hasPBOPReq = if (hasBOP) pbop.get.io.req.valid else false.B
val hasTPReq = if (hasTPPrefetcher) tp.get.io.req.valid else false.B
XSPerfAccumulate("prefetch_req_fromL1", hasReceiverReq)
XSPerfAccumulate("prefetch_req_fromVBOP", hasVBOPReq)
XSPerfAccumulate("prefetch_req_fromPBOP", hasPBOPReq)
XSPerfAccumulate("prefetch_req_fromBOP", hasVBOPReq || hasPBOPReq)
XSPerfAccumulate("prefetch_req_fromTP", hasTPReq)
XSPerfAccumulate("prefetch_req_selectL1", hasReceiverReq)
XSPerfAccumulate("prefetch_req_selectVBOP", hasVBOPReq && !hasReceiverReq)
XSPerfAccumulate("prefetch_req_selectPBOP", hasPBOPReq && !hasReceiverReq && !hasVBOPReq)
XSPerfAccumulate("prefetch_req_selectBOP", (hasPBOPReq || hasVBOPReq) && !hasReceiverReq)
XSPerfAccumulate("prefetch_req_selectTP", hasTPReq && !hasReceiverReq && !hasVBOPReq && !hasPBOPReq)
XSPerfAccumulate("prefetch_req_SMS_other_overlapped",
hasReceiverReq && (hasVBOPReq || hasPBOPReq || hasTPReq))
// NOTE: set basicDB false when debug over
// TODO: change the enable signal to not target the BOP
class TrainEntry extends Bundle{
val paddr = UInt(fullAddressBits.W)
val vaddr = UInt(fullVAddrBits.W)
val needT = Bool()
val hit = Bool()
val prefetched = Bool()
val source = UInt(sourceIdBits.W)
val pfsource = UInt(PfSource.pfSourceBits.W)
val reqsource = UInt(MemReqSource.reqSourceBits.W)
}
val trainTT = ChiselDB.createTable("L2PrefetchTrainTable", new TrainEntry, basicDB = false)
val e1 = Wire(new TrainEntry)
e1.paddr := io.train.bits.addr
e1.vaddr := io.train.bits.vaddr.getOrElse(0.U) << offsetBits
e1.needT := io.train.bits.needT
e1.hit := io.train.bits.hit
e1.prefetched := io.train.bits.prefetched
e1.source := io.train.bits.source
e1.pfsource := io.train.bits.pfsource
e1.reqsource := io.train.bits.reqsource
trainTT.log(
data = e1,
en = io.train.valid && (io.train.bits.reqsource =/= MemReqSource.L1DataPrefetch.id.U),
site = "L2Train_onlyBOP",
clock, reset
)
class PrefetchEntry extends Bundle{
val paddr = UInt(fullAddressBits.W)
val needT = Bool()
val pfsource = UInt(MemReqSource.reqSourceBits.W)
}
val pfTT = ChiselDB.createTable("L2PrefetchPrefetchTable", new PrefetchEntry, basicDB = false)
val e2 = Wire(new PrefetchEntry)
e2.paddr := io.req.bits.addr
e2.needT := io.req.bits.needT
e2.pfsource := io.req.bits.pfSource
pfTT.log(
data = e2,
en = io.req.fire && io.req.bits.pfSource === MemReqSource.Prefetch2L2BOP.id.U,
site = "L2Prefetch_onlyBOP",
clock, reset
)
}