Skip to content

Commit

Permalink
MemBlock: refactor selectOldest of rollback for better timing
Browse files Browse the repository at this point in the history
Don't select oldest rollback twice in LoadQueueRAW,  send to memblock select oldest with other, will have  port to send rollback request to memblock in LoadQueueRAW.
  • Loading branch information
weidingliu authored and Tang-Haojin committed Jul 11, 2024
1 parent 6529dbb commit 16ede6b
Show file tree
Hide file tree
Showing 4 changed files with 12 additions and 21 deletions.
2 changes: 1 addition & 1 deletion src/main/scala/xiangshan/backend/MemBlock.scala
Original file line number Diff line number Diff line change
Expand Up @@ -1244,7 +1244,7 @@ class MemBlockImp(outer: MemBlock) extends LazyModuleImp(outer)
)).andR))
resultOnehot
}
val allRedirect = Seq(lsq.io.nuke_rollback, lsq.io.nack_rollback) ++ loadUnits.map(_.io.rollback) ++ hybridUnits.map(_.io.ldu_io.rollback)
val allRedirect = loadUnits.map(_.io.rollback) ++ hybridUnits.map(_.io.ldu_io.rollback) ++ Seq(lsq.io.nack_rollback) ++ lsq.io.nuke_rollback
val oldestOneHot = selectOldestRedirect(allRedirect)
val oldestRedirect = Mux1H(oldestOneHot, allRedirect)
io.mem_to_ooo.memoryViolation := oldestRedirect
Expand Down
2 changes: 1 addition & 1 deletion src/main/scala/xiangshan/mem/lsqueue/LSQWrapper.scala
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ class LsqWrapper(implicit p: Parameters) extends XSModule with HasDCacheParamete
val sbufferVecDifftestInfo = Vec(EnsbufferWidth, Decoupled(new DynInst)) // The vector store difftest needs is
val forward = Vec(LoadPipelineWidth, Flipped(new PipeLoadForwardQueryIO))
val rob = Flipped(new RobLsqIO)
val nuke_rollback = Output(Valid(new Redirect))
val nuke_rollback = Vec(StorePipelineWidth, Output(Valid(new Redirect)))
val nack_rollback = Output(Valid(new Redirect))
val release = Flipped(Valid(new Release))
// val refill = Flipped(Valid(new Refill))
Expand Down
6 changes: 3 additions & 3 deletions src/main/scala/xiangshan/mem/lsqueue/LoadQueue.scala
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ class LoadQueue(implicit p: Parameters) extends XSModule
// val refill = Flipped(ValidIO(new Refill))
val tl_d_channel = Input(new DcacheToLduForwardIO)
val release = Flipped(Valid(new Release))
val nuke_rollback = Output(Valid(new Redirect))
val nuke_rollback = Vec(StorePipelineWidth, Output(Valid(new Redirect)))
val nack_rollback = Output(Valid(new Redirect))
val rob = Flipped(new RobLsqIO)
val uncache = new UncacheWordIO
Expand Down Expand Up @@ -292,7 +292,7 @@ class LoadQueue(implicit p: Parameters) extends XSModule
XSPerfAccumulate("full_mask_101", full_mask === 5.U)
XSPerfAccumulate("full_mask_110", full_mask === 6.U)
XSPerfAccumulate("full_mask_111", full_mask === 7.U)
XSPerfAccumulate("nuke_rollback", io.nuke_rollback.valid)
XSPerfAccumulate("nuke_rollback", io.nuke_rollback.map(_.valid).reduce(_ || _).asUInt)
XSPerfAccumulate("nack_rollabck", io.nack_rollback.valid)

// perf cnt
Expand All @@ -306,7 +306,7 @@ class LoadQueue(implicit p: Parameters) extends XSModule
("full_mask_101", full_mask === 5.U),
("full_mask_110", full_mask === 6.U),
("full_mask_111", full_mask === 7.U),
("nuke_rollback", io.nuke_rollback.valid),
("nuke_rollback", io.nuke_rollback.map(_.valid).reduce(_ || _).asUInt),
("nack_rollback", io.nack_rollback.valid)
)
generatePerfEvent()
Expand Down
23 changes: 7 additions & 16 deletions src/main/scala/xiangshan/mem/lsqueue/LoadQueueRAW.scala
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ class LoadQueueRAW(implicit p: Parameters) extends XSModule
val storeIn = Vec(StorePipelineWidth, Flipped(Valid(new LsPipelineBundle)))

// global rollback flush
val rollback = Output(Valid(new Redirect))
val rollback = Vec(StorePipelineWidth,Output(Valid(new Redirect)))

// to LoadQueueReplay
val stAddrReadySqPtr = Input(new SqPtr)
Expand Down Expand Up @@ -274,7 +274,7 @@ class LoadQueueRAW(implicit p: Parameters) extends XSModule
val (selValid, selBits) = selectPartialOldest(valid, bits)
val selValidNext = RegNext(selValid(0))
val selBitsNext = RegNext(selBits(0))
(Seq(selValidNext && !selBitsNext.uop.robIdx.needFlush(io.redirect) && !selBitsNext.uop.robIdx.needFlush(RegNext(io.redirect))), Seq(selBitsNext))
(Seq(selValidNext && !selBitsNext.uop.robIdx.needFlush(RegNext(io.redirect))), Seq(selBitsNext))
} else {
val select = (0 until numSelectGroups).map(g => {
val (selValid, selBits) = selectPartialOldest(selectValidGroups(g), selectBitsGroups(g))
Expand Down Expand Up @@ -341,15 +341,7 @@ class LoadQueueRAW(implicit p: Parameters) extends XSModule
// Thus, here if last cycle's robIdx equals to this cycle's robIdx, it still triggers the redirect.

// select uop in parallel
def selectOldestRedirect(xs: Seq[Valid[Redirect]]): Vec[Bool] = {
val compareVec = (0 until xs.length).map(i => (0 until i).map(j => isAfter(xs(j).bits.robIdx, xs(i).bits.robIdx)))
val resultOnehot = VecInit((0 until xs.length).map(i => Cat((0 until xs.length).map(j =>
(if (j < i) !xs(j).valid || compareVec(i)(j)
else if (j == i) xs(i).valid
else !xs(j).valid || !compareVec(j)(i))
)).andR))
resultOnehot
}

val allRedirect = (0 until StorePipelineWidth).map(i => {
val redirect = Wire(Valid(new Redirect))
redirect.valid := rollbackLqWb(i).valid
Expand All @@ -365,21 +357,20 @@ class LoadQueueRAW(implicit p: Parameters) extends XSModule
redirect.bits.debug_runahead_checkpoint_id := rollbackLqWb(i).bits.debugInfo.runahead_checkpoint_id
redirect
})
val oldestOneHot = selectOldestRedirect(allRedirect)
val oldestRedirect = Mux1H(oldestOneHot, allRedirect)
io.rollback := oldestRedirect
io.rollback := allRedirect

// perf cnt
val canEnqCount = PopCount(io.query.map(_.req.fire))
val validCount = freeList.io.validCount
val allowEnqueue = validCount <= (LoadQueueRAWSize - LoadPipelineWidth).U
val rollbaclValid = io.rollback.map(_.valid).reduce(_ || _).asUInt

QueuePerf(LoadQueueRAWSize, validCount, !allowEnqueue)
XSPerfAccumulate("enqs", canEnqCount)
XSPerfAccumulate("stld_rollback", io.rollback.valid)
XSPerfAccumulate("stld_rollback", rollbaclValid)
val perfEvents: Seq[(String, UInt)] = Seq(
("enq ", canEnqCount),
("stld_rollback", io.rollback.valid),
("stld_rollback", rollbaclValid),
)
generatePerfEvent()
// end
Expand Down

0 comments on commit 16ede6b

Please sign in to comment.