提交 6060732c 编写于 作者: L LinJiawei

fix backend bugs

上级 ac067a5c
......@@ -64,10 +64,22 @@ class RedirectGenerator extends XSModule with HasCircularQueuePtrHelper {
redirect (send to frontend)
*/
def selectOlderRedirect(x: Valid[Redirect], y: Valid[Redirect]): Valid[Redirect] = {
Mux(isAfter(x.bits.roqIdx, y.bits.roqIdx) && y.valid, y, x)
Mux(x.valid,
Mux(y.valid,
Mux(isAfter(x.bits.roqIdx, y.bits.roqIdx), y, x),
x
),
y
)
}
def selectOlderExuOut(x: Valid[ExuOutput], y: Valid[ExuOutput]): Valid[ExuOutput] = {
Mux(isAfter(x.bits.redirect.roqIdx, y.bits.redirect.roqIdx) && y.valid, y, x)
Mux(x.valid,
Mux(y.valid,
Mux(isAfter(x.bits.redirect.roqIdx, y.bits.redirect.roqIdx), y, x),
x
),
y
)
}
val jumpOut = io.exuMispredict.head
val oldestAluOut = ParallelOperation(io.exuMispredict.tail, selectOlderExuOut)
......@@ -80,8 +92,8 @@ class RedirectGenerator extends XSModule with HasCircularQueuePtrHelper {
redirect
})
val s1_isJalr = RegEnable(JumpOpType.jumpOpisJalr(jumpOut.bits.uop.ctrl.fuOpType), jumpOut.valid)
val s1_JalrTarget = RegEnable(jumpOut.bits.redirect.cfiUpdate.target, jumpOut.valid)
val s1_isJump = RegNext(jumpOut.valid && !oldestAluOut.valid, init = false.B)
val s1_jumpTarget = RegEnable(jumpOut.bits.redirect.cfiUpdate.target, jumpOut.valid)
val s1_imm12_reg = RegEnable(oldestExuOut.bits.uop.ctrl.imm(11, 0), oldestExuOut.valid)
val s1_pd = RegEnable(oldestExuOut.bits.uop.cf.pd, oldestExuOut.valid)
val s1_redirect_bits_reg = Reg(new Redirect)
......@@ -101,22 +113,23 @@ class RedirectGenerator extends XSModule with HasCircularQueuePtrHelper {
io.stage2FtqRead.ptr := s1_redirect_bits_reg.ftqIdx
// stage3, calculate redirect target
val s2_isJalr = RegEnable(s1_isJalr, s1_redirect_valid_reg)
val s2_JalrTarget = RegEnable(s1_JalrTarget, s1_redirect_valid_reg)
val s2_isJump = RegNext(s1_isJump)
val s2_jumpTarget = RegEnable(s1_jumpTarget, s1_redirect_valid_reg)
val s2_imm12_reg = RegEnable(s1_imm12_reg, s1_redirect_valid_reg)
val s2_pd = RegEnable(s1_pd, s1_redirect_valid_reg)
val s2_redirect_bits_reg = Reg(new Redirect)
val s2_redirect_valid_reg = RegInit(false.B)
val s2_redirect_bits_reg = RegEnable(s1_redirect_bits_reg, enable = s1_redirect_valid_reg)
val s2_redirect_valid_reg = RegNext(s1_redirect_valid_reg, init = false.B)
val ftqRead = io.stage2FtqRead.entry
val pc = GetPcByFtq(ftqRead.ftqPC, s2_redirect_bits_reg.ftqOffset)
val brTarget = pc + SignExt(s2_imm12_reg, XLEN)
val snpc = pc + Mux(s2_pd.isRVC, 2.U, 4.U)
val isReplay = RedirectLevel.flushItself(s2_redirect_bits_reg.level)
val target = Mux(isReplay,
pc, // repaly from itself
Mux(s2_isJalr,
s2_JalrTarget, // jalr already save target
brTarget // branch
Mux(s2_redirect_bits_reg.cfiUpdate.taken,
Mux(s2_isJump, s2_jumpTarget, brTarget),
snpc
)
)
io.stage3Redirect.valid := s2_redirect_valid_reg
......@@ -182,7 +195,7 @@ class CtrlBlock extends XSModule with HasCircularQueuePtrHelper {
ftq.io.enq <> io.frontend.fetchInfo
for(i <- 0 until CommitWidth){
ftq.io.roq_commits(i).valid := roq.io.commits.valid(i)
ftq.io.roq_commits(i).valid := roq.io.commits.valid(i) && !roq.io.commits.isWalk
ftq.io.roq_commits(i).bits := roq.io.commits.info(i)
}
ftq.io.redirect <> backendRedirect
......@@ -200,8 +213,10 @@ class CtrlBlock extends XSModule with HasCircularQueuePtrHelper {
decode.io.in <> io.frontend.cfVec
val jumpInst = dispatch.io.enqIQCtrl(0).bits
val ftqOffsetReg = Reg(UInt(log2Up(PredictWidth).W))
ftqOffsetReg := jumpInst.cf.ftqOffset
ftq.io.ftqRead(0).ptr := jumpInst.cf.ftqPtr // jump
io.toIntBlock.jumpPc := GetPcByFtq(ftq.io.ftqRead(0).entry.ftqPC, jumpInst.cf.ftqOffset)
io.toIntBlock.jumpPc := GetPcByFtq(ftq.io.ftqRead(0).entry.ftqPC, ftqOffsetReg)
io.toIntBlock.jalr_target := ftq.io.ftqRead(0).entry.target
// pipeline between decode and dispatch
......
......@@ -61,8 +61,10 @@ class Ftq extends XSModule with HasCircularQueuePtrHelper {
io.enq.ready := validEntries < FtqSize.U
io.enqPtr := tailPtr
val real_fire = io.enq.fire() && !io.redirect.valid && !io.frontendRedirect.valid
val dataModule = Module(new DataModuleTemplate(new FtqEntry, FtqSize, 4, 1, true))
dataModule.io.wen(0) := io.enq.fire()
dataModule.io.wen(0) := real_fire
dataModule.io.waddr(0) := tailPtr.value
dataModule.io.wdata(0) := io.enq.bits
......@@ -77,7 +79,7 @@ class Ftq extends XSModule with HasCircularQueuePtrHelper {
val commitStateQueue = Reg(Vec(FtqSize, Vec(PredictWidth, Bool())))
when(io.enq.fire()){
when(real_fire){
val enqIdx = tailPtr.value
commitStateQueue(enqIdx) := io.enq.bits.valids
cfiIndex_vec(enqIdx) := io.enq.bits.cfiIndex
......@@ -87,7 +89,7 @@ class Ftq extends XSModule with HasCircularQueuePtrHelper {
mispredict_vec(enqIdx) := WireInit(VecInit(Seq.fill(PredictWidth)(false.B)))
}
tailPtr := tailPtr + io.enq.fire()
tailPtr := tailPtr + real_fire
// exu write back, update some info
for((wb, i) <- io.exuWriteback.zipWithIndex){
......@@ -155,13 +157,15 @@ class Ftq extends XSModule with HasCircularQueuePtrHelper {
assert(headPtr === io.redirect.bits.ftqIdx)
}.otherwise{ // branch misprediction or load replay
val isReplay = RedirectLevel.flushItself(io.redirect.bits.level)
tailPtr := io.redirect.bits.ftqIdx + 1.U
val next = io.redirect.bits.ftqIdx + 1.U
tailPtr := next
val offset = io.redirect.bits.ftqOffset
commitStateQueue(io.redirect.bits.ftqIdx.value).zipWithIndex.foreach({case(v, i) =>
when(i.U > offset || (isReplay && i.U === offset)){ // replay will not commit
v := false.B
}
})
commitStateQueue(next.value).foreach(_ := false.B)
}
}
}
......@@ -59,7 +59,7 @@ class Alu extends FunctionUnit with HasRedirectOut {
val isBranch = ALUOpType.isBranch(func)
val isRVC = uop.cf.pd.isRVC
val taken = isBranch && LookupTree(ALUOpType.getBranchType(func), branchOpTable) ^ ALUOpType.isBranchInvert(func)
val taken = LookupTree(ALUOpType.getBranchType(func), branchOpTable) ^ ALUOpType.isBranchInvert(func)
val target = (pc + offset)(VAddrBits-1,0)
val snpc = Mux(isRVC, pc + 2.U, pc + 4.U)
......@@ -71,7 +71,7 @@ class Alu extends FunctionUnit with HasRedirectOut {
redirectOut.roqIdx := uop.roqIdx
redirectOut.ftqIdx := uop.cf.ftqPtr
redirectOut.ftqOffset := uop.cf.ftqOffset
redirectOut.cfiUpdate.isMisPred := !uop.cf.pred_taken && taken
redirectOut.cfiUpdate.isMisPred := (uop.cf.pred_taken ^ taken) && isBranch
redirectOut.cfiUpdate.taken := taken
redirectOut.cfiUpdate.predTaken := uop.cf.pred_taken
......
......@@ -16,13 +16,13 @@ trait HasRedirectOut { this: RawModule =>
class Jump extends FunctionUnit with HasRedirectOut {
val (src1, immMin, func, pc, uop) = (
val (src1, immMin, func, uop) = (
io.in.bits.src(0),
io.in.bits.uop.ctrl.imm,
io.in.bits.uop.ctrl.fuOpType,
SignExt(io.in.bits.uop.cf.pc, AddrBits),
io.in.bits.uop
)
val pc = src1(VAddrBits - 1, 0)
val jalr_target = io.in.bits.src(1)(VAddrBits - 1, 0)
val isJalr = JumpOpType.jumpOpisJalr(func)
......@@ -40,7 +40,7 @@ class Jump extends FunctionUnit with HasRedirectOut {
val snpc = Mux(isRVC, pc + 2.U, pc + 4.U)
val target = src1 + offset // NOTE: src1 is (pc/rf(rs1)), src2 is (offset)
redirectOutValid := valid
redirectOutValid := valid && !isAuipc
redirectOut := DontCare
redirectOut.cfiUpdate.target := target
redirectOut.level := RedirectLevel.flushAfter
......
......@@ -73,6 +73,7 @@ class RoqDeqPtrWrapper extends XSModule with HasCircularQueuePtrHelper {
val intrBitSetReg = Input(Bool())
val hasNoSpecExec = Input(Bool())
val commitType = Input(CommitType())
val misPredBlock = Input(Bool())
// output: the CommitWidth deqPtr
val out = Vec(CommitWidth, Output(new RoqPtr))
val next_out = Vec(CommitWidth, Output(new RoqPtr))
......@@ -90,7 +91,7 @@ class RoqDeqPtrWrapper extends XSModule with HasCircularQueuePtrHelper {
// for normal commits: only to consider when there're no exceptions
// we don't need to consider whether the first instruction has exceptions since it wil trigger exceptions.
val commitBlocked = VecInit((0 until CommitWidth).map(i => if (i == 0) false.B else possibleException(i).asUInt.orR || io.deq_flushPipe(i)))
val canCommit = VecInit((0 until CommitWidth).map(i => io.deq_v(i) && io.deq_w(i) && !commitBlocked(i)))
val canCommit = VecInit((0 until CommitWidth).map(i => io.deq_v(i) && io.deq_w(i) && !commitBlocked(i) && !io.misPredBlock))
val normalCommitCnt = PriorityEncoder(canCommit.map(c => !c) :+ true.B)
// when io.intrBitSetReg, only one instruction is allowed to commit
val commitCnt = Mux(io.intrBitSetReg, io.deq_v(0) && io.deq_w(0), normalCommitCnt)
......@@ -400,7 +401,7 @@ class Roq(numWbPorts: Int) extends XSModule with HasCircularQueuePtrHelper {
io.exeWbResults(i).bits.redirect.cfiUpdate.isMisPred && io.exeWbResults(i).valid
))).orR()
val misPredBlockCounter = Reg(UInt(2.W))
misPredBlockCounter := Mux(misPredWb,
misPredBlockCounter := Mux(misPredWb,
"b11".U,
misPredBlockCounter >> 1.U
)
......@@ -488,6 +489,8 @@ class Roq(numWbPorts: Int) extends XSModule with HasCircularQueuePtrHelper {
deqPtrGenModule.io.intrBitSetReg := intrBitSetReg
deqPtrGenModule.io.hasNoSpecExec := hasNoSpecExec
deqPtrGenModule.io.commitType := deqDispatchData.commitType
deqPtrGenModule.io.misPredBlock := misPredBlock
deqPtrVec := deqPtrGenModule.io.out
val deqPtrVec_next = deqPtrGenModule.io.next_out
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册