提交 eb9add3c 编写于 作者: W William Wang

Merge remote-tracking branch 'origin/master' into temp-mem-timing-merge

......@@ -26,10 +26,11 @@ object XSLog {
val logTimestamp = WireInit(0.U(64.W))
val enableDebug = Parameters.get.envParameters.EnableDebug && debugLevel != XSLogLevel.PERF
val enablePerf = Parameters.get.envParameters.EnablePerfDebug && debugLevel == XSLogLevel.PERF
if (enableDebug || enablePerf) {
if (enableDebug || enablePerf || debugLevel == XSLogLevel.ERROR) {
ExcitingUtils.addSink(logEnable, "DISPLAY_LOG_ENABLE")
ExcitingUtils.addSink(logTimestamp, "logTimestamp")
when (cond && logEnable) {
val check_cond = (if (debugLevel == XSLogLevel.ERROR) true.B else logEnable) && cond
when (check_cond) {
val commonInfo = p"[$debugLevel][time=$logTimestamp] $MagicStr: "
printf((if (prefix) commonInfo else p"") + pable)
if (debugLevel >= XSLogLevel.ERROR) {
......
......@@ -186,9 +186,8 @@ class CtrlFlow extends XSBundle {
class FtqEntry extends XSBundle {
// fetch pc, pc of each inst could be generated by concatenation
val ftqPC = UInt((VAddrBits.W))
val hasLastPrev = Bool()
val ftqPC = UInt(VAddrBits.W)
val lastPacketPC = ValidUndirectioned(UInt(VAddrBits.W))
// prediction metas
val hist = new GlobalHistory
val predHist = new GlobalHistory
......@@ -208,9 +207,10 @@ class FtqEntry extends XSBundle {
val target = UInt(VAddrBits.W)
def takens = VecInit((0 until PredictWidth).map(i => cfiIndex.valid && cfiIndex.bits === i.U))
def hasLastPrev = lastPacketPC.valid
override def toPrintable: Printable = {
p"ftqPC: ${Hexadecimal(ftqPC)} hasLastPrec:$hasLastPrev " +
p"ftqPC: ${Hexadecimal(ftqPC)} lastPacketPC: ${Hexadecimal(lastPacketPC.bits)} hasLastPrev:$hasLastPrev " +
p"rasSp:$rasSp specCnt:$specCnt brmask:${Binary(Cat(br_mask))} rvcmask:${Binary(Cat(rvc_mask))} " +
p"valids:${Binary(valids.asUInt())} cfi valid: ${cfiIndex.valid} " +
p"cfi index: ${cfiIndex.bits} isCall:$cfiIsCall isRet:$cfiIsRet isRvc:$cfiIsRVC " +
......
......@@ -78,9 +78,9 @@ case class XSCoreParameters
StoreQueueSize: Int = 48,
RoqSize: Int = 192,
dpParams: DispatchParameters = DispatchParameters(
IntDqSize = 32,
FpDqSize = 32,
LsDqSize = 32,
IntDqSize = 16,
FpDqSize = 16,
LsDqSize = 16,
IntDqDeqWidth = 4,
FpDqDeqWidth = 4,
LsDqDeqWidth = 4
......
......@@ -116,7 +116,7 @@ class RedirectGenerator extends XSModule with HasCircularQueuePtrHelper {
}.otherwise({
s1_redirect_valid_reg := false.B
})
io.stage2Redirect.valid := s1_redirect_valid_reg
io.stage2Redirect.valid := s1_redirect_valid_reg && !io.flush
io.stage2Redirect.bits := s1_redirect_bits_reg
io.stage2Redirect.bits.cfiUpdate := DontCare
// at stage2, we read ftq to get pc
......@@ -131,12 +131,19 @@ class RedirectGenerator extends XSModule with HasCircularQueuePtrHelper {
val s2_redirect_valid_reg = RegNext(s1_redirect_valid_reg && !io.flush, init = false.B)
val ftqRead = io.stage2FtqRead.entry
val pc = GetPcByFtq(ftqRead.ftqPC, s2_redirect_bits_reg.ftqOffset, ftqRead.hasLastPrev)
val brTarget = pc + SignExt(ImmUnion.B.toImm32(s2_imm12_reg), XLEN)
val snpc = pc + Mux(s2_pd.isRVC, 2.U, 4.U)
val cfiUpdate_pc =
Cat(ftqRead.ftqPC.head(VAddrBits - s2_redirect_bits_reg.ftqOffset.getWidth - instOffsetBits),
s2_redirect_bits_reg.ftqOffset,
0.U(instOffsetBits.W))
val real_pc =
GetPcByFtq(ftqRead.ftqPC, s2_redirect_bits_reg.ftqOffset,
ftqRead.lastPacketPC.valid,
ftqRead.lastPacketPC.bits)
val brTarget = real_pc + SignExt(ImmUnion.B.toImm32(s2_imm12_reg), XLEN)
val snpc = real_pc + Mux(s2_pd.isRVC, 2.U, 4.U)
val isReplay = RedirectLevel.flushItself(s2_redirect_bits_reg.level)
val target = Mux(isReplay,
pc, // repaly from itself
real_pc, // repaly from itself
Mux(s2_redirect_bits_reg.cfiUpdate.taken,
Mux(s2_isJump, s2_jumpTarget, brTarget),
snpc
......@@ -145,7 +152,7 @@ class RedirectGenerator extends XSModule with HasCircularQueuePtrHelper {
io.stage3Redirect.valid := s2_redirect_valid_reg
io.stage3Redirect.bits := s2_redirect_bits_reg
val stage3CfiUpdate = io.stage3Redirect.bits.cfiUpdate
stage3CfiUpdate.pc := pc
stage3CfiUpdate.pc := cfiUpdate_pc
stage3CfiUpdate.pd := s2_pd
stage3CfiUpdate.rasSp := ftqRead.rasSp
stage3CfiUpdate.rasEntry := ftqRead.rasTop
......@@ -213,13 +220,14 @@ class CtrlBlock extends XSModule with HasCircularQueuePtrHelper {
val backendRedirect = redirectGen.io.stage2Redirect
val frontendRedirect = redirectGen.io.stage3Redirect
val flush = roq.io.flushOut.valid
val flushReg = RegNext(flush)
redirectGen.io.exuMispredict.zip(io.fromIntBlock.exuRedirect).map({case (x, y) =>
x.valid := y.valid && y.bits.redirect.cfiUpdate.isMisPred
x.bits := y.bits
})
redirectGen.io.loadRelay := io.fromLsBlock.replay
redirectGen.io.flush := flush
redirectGen.io.flush := flushReg
ftq.io.enq <> io.frontend.fetchInfo
for(i <- 0 until CommitWidth){
......@@ -227,9 +235,9 @@ class CtrlBlock extends XSModule with HasCircularQueuePtrHelper {
ftq.io.roq_commits(i).bits := roq.io.commits.info(i)
}
ftq.io.redirect <> backendRedirect
ftq.io.flush := flush
ftq.io.flushIdx := roq.io.flushOut.bits.ftqIdx
ftq.io.flushOffset := roq.io.flushOut.bits.ftqOffset
ftq.io.flush := flushReg
ftq.io.flushIdx := RegNext(roq.io.flushOut.bits.ftqIdx)
ftq.io.flushOffset := RegNext(roq.io.flushOut.bits.ftqOffset)
ftq.io.frontendRedirect <> frontendRedirect
ftq.io.exuWriteback <> io.fromIntBlock.exuRedirect
......@@ -238,11 +246,12 @@ class CtrlBlock extends XSModule with HasCircularQueuePtrHelper {
val flushPC = GetPcByFtq(
ftq.io.ftqRead(2).entry.ftqPC,
RegEnable(roq.io.flushOut.bits.ftqOffset, roq.io.flushOut.valid),
ftq.io.ftqRead(2).entry.hasLastPrev
ftq.io.ftqRead(2).entry.lastPacketPC.valid,
ftq.io.ftqRead(2).entry.lastPacketPC.bits
)
val flushRedirect = Wire(Valid(new Redirect))
flushRedirect.valid := RegNext(flush)
flushRedirect.valid := flushReg
flushRedirect.bits := DontCare
flushRedirect.bits.ftqIdx := RegEnable(roq.io.flushOut.bits.ftqIdx, flush)
flushRedirect.bits.interrupt := true.B
......@@ -263,24 +272,26 @@ class CtrlBlock extends XSModule with HasCircularQueuePtrHelper {
ftqOffsetReg := jumpInst.cf.ftqOffset
ftq.io.ftqRead(0).ptr := jumpInst.cf.ftqPtr // jump
io.toIntBlock.jumpPc := GetPcByFtq(
ftq.io.ftqRead(0).entry.ftqPC, ftqOffsetReg, ftq.io.ftqRead(0).entry.hasLastPrev
ftq.io.ftqRead(0).entry.ftqPC, ftqOffsetReg,
ftq.io.ftqRead(0).entry.lastPacketPC.valid,
ftq.io.ftqRead(0).entry.lastPacketPC.bits
)
io.toIntBlock.jalr_target := ftq.io.ftqRead(0).entry.target
// pipeline between decode and dispatch
for (i <- 0 until RenameWidth) {
PipelineConnect(decode.io.out(i), rename.io.in(i), rename.io.in(i).ready,
backendRedirect.valid || flush || io.frontend.redirect_cfiUpdate.valid)
io.frontend.redirect_cfiUpdate.valid)
}
rename.io.redirect <> backendRedirect
rename.io.flush := flush
rename.io.flush := flushReg
rename.io.roqCommits <> roq.io.commits
rename.io.out <> dispatch.io.fromRename
rename.io.renameBypass <> dispatch.io.renameBypass
dispatch.io.redirect <> backendRedirect
dispatch.io.flush := flush
dispatch.io.flush := flushReg
dispatch.io.enqRoq <> roq.io.enq
dispatch.io.enqLsq <> io.toLsBlock.enqLsq
dispatch.io.readIntRf <> io.toIntBlock.readRf
......@@ -296,8 +307,8 @@ class CtrlBlock extends XSModule with HasCircularQueuePtrHelper {
// dispatch.io.enqIQData <> io.toIntBlock.enqIqData ++ io.toFpBlock.enqIqData ++ io.toLsBlock.enqIqData
fpBusyTable.io.flush := flush
intBusyTable.io.flush := flush
fpBusyTable.io.flush := flushReg
intBusyTable.io.flush := flushReg
for((wb, setPhyRegRdy) <- io.fromIntBlock.wbRegs.zip(intBusyTable.io.wbPregs)){
setPhyRegRdy.valid := wb.valid && wb.bits.uop.ctrl.rfWen
setPhyRegRdy.bits := wb.bits.uop.pdest
......@@ -320,11 +331,11 @@ class CtrlBlock extends XSModule with HasCircularQueuePtrHelper {
// TODO: is 'backendRedirect' necesscary?
io.toIntBlock.redirect <> backendRedirect
io.toIntBlock.flush <> flush
io.toIntBlock.flush <> flushReg
io.toFpBlock.redirect <> backendRedirect
io.toFpBlock.flush <> flush
io.toFpBlock.flush <> flushReg
io.toLsBlock.redirect <> backendRedirect
io.toLsBlock.flush <> flush
io.toLsBlock.flush <> flushReg
if (env.DualCoreDifftest) {
difftestIO.fromRoq <> roq.difftestIO
......
......@@ -102,7 +102,7 @@ class Dispatch1 extends XSModule with HasExceptionNO {
updatedUop(i).ctrl.commitType := updatedCommitType(i)
// update roqIdx, lqIdx, sqIdx
// updatedUop(i).roqIdx := io.enqRoq.resp(i)
XSError(io.fromRename(i).valid && updatedUop(i).roqIdx.asUInt =/= io.enqRoq.resp(i).asUInt, "they should equal")
// XSError(io.fromRename(i).valid && updatedUop(i).roqIdx.asUInt =/= io.enqRoq.resp(i).asUInt, "they should equal")
updatedUop(i).lqIdx := io.enqLsq.resp(i).lqIdx
updatedUop(i).sqIdx := io.enqLsq.resp(i).sqIdx
}
......
......@@ -81,7 +81,7 @@ class DispatchQueue(size: Int, enqnum: Int, deqnum: Int) extends XSModule with H
when (io.deq(i).fire() && !(io.redirect.valid || io.flush)) {
stateEntries(headPtr(i).value) := s_invalid
XSError(stateEntries(headPtr(i).value) =/= s_valid, "state of the dispatch entry is not s_valid\n")
// XSError(stateEntries(headPtr(i).value) =/= s_valid, "state of the dispatch entry is not s_valid\n")
}
}
......@@ -149,8 +149,7 @@ class DispatchQueue(size: Int, enqnum: Int, deqnum: Int) extends XSModule with H
Mux(isTrueEmpty, headPtr(0), walkedTailPtr),
tailPtr(0) + numEnq))
)
val lastCycleException = RegNext(io.flush)
val lastLastCycleMisprediction = RegNext(lastCycleMisprediction)
val lastLastCycleMisprediction = RegNext(lastCycleMisprediction && !io.flush)
for (i <- 1 until enqnum) {
tailPtr(i) := Mux(io.flush,
i.U.asTypeOf(new CircularQueuePtr(size)),
......@@ -203,6 +202,6 @@ class DispatchQueue(size: Int, enqnum: Int, deqnum: Int) extends XSModule with H
}
XSDebug(false, true.B, "\n")
XSError(isAfter(headPtr(0), tailPtr(0)), p"assert greaterOrEqualThan(tailPtr: ${tailPtr(0)}, headPtr: ${headPtr(0)}) failed\n")
// XSError(isAfter(headPtr(0), tailPtr(0)), p"assert greaterOrEqualThan(tailPtr: ${tailPtr(0)}, headPtr: ${headPtr(0)}) failed\n")
XSPerf("utilization", PopCount(stateEntries.map(_ =/= s_invalid)))
}
......@@ -18,12 +18,14 @@ object FtqPtr extends HasXSParameter {
}
object GetPcByFtq extends HasXSParameter {
def apply(ftqPC: UInt, ftqOffset: UInt, hasLastPrev: Bool) = {
def apply(ftqPC: UInt, ftqOffset: UInt, hasLastPrev: Bool, lastPacketPC: UInt) = {
assert(ftqPC.getWidth == VAddrBits)
assert(lastPacketPC.getWidth == VAddrBits)
assert(ftqOffset.getWidth == log2Up(PredictWidth))
val idxBits = ftqPC.head(VAddrBits - ftqOffset.getWidth - instOffsetBits)
val lastIdxBits = lastPacketPC.head(VAddrBits - ftqOffset.getWidth - instOffsetBits)
val selLastPacket = hasLastPrev && (ftqOffset === 0.U)
val packetIdx = Mux(selLastPacket, idxBits - 1.U, idxBits)
val packetIdx = Mux(selLastPacket, lastIdxBits, idxBits)
Cat(
packetIdx, // packet pc
Mux(selLastPacket, Fill(ftqOffset.getWidth, 1.U(1.W)), ftqOffset),
......@@ -58,7 +60,7 @@ class FtqNRSRAM[T <: Data](gen: T, numRead: Int) extends XSModule {
class Ftq_4R_SRAMEntry extends XSBundle {
val ftqPC = UInt(VAddrBits.W)
val hasLastPrev = Bool()
val lastPacketPC = ValidUndirectioned(UInt(VAddrBits.W))
}
// redirect and commit need read these infos
......@@ -120,7 +122,7 @@ class Ftq extends XSModule with HasCircularQueuePtrHelper {
ftq_4r_sram.io.wen := real_fire
ftq_4r_sram.io.waddr := tailPtr.value
ftq_4r_sram.io.wdata.ftqPC := io.enq.bits.ftqPC
ftq_4r_sram.io.wdata.hasLastPrev := io.enq.bits.hasLastPrev
ftq_4r_sram.io.wdata.lastPacketPC := io.enq.bits.lastPacketPC
val ftq_2r_sram = Module(new FtqNRSRAM(new Ftq_2R_SRAMEntry, 2))
ftq_2r_sram.io.wen := real_fire
ftq_2r_sram.io.waddr := tailPtr.value
......@@ -220,7 +222,7 @@ class Ftq extends XSModule with HasCircularQueuePtrHelper {
})
// from 4r sram
commitEntry.ftqPC := ftq_4r_sram.io.rdata(0).ftqPC
commitEntry.hasLastPrev := ftq_4r_sram.io.rdata(0).hasLastPrev
commitEntry.lastPacketPC := ftq_4r_sram.io.rdata(0).lastPacketPC
// from 2r sram
commitEntry.rasSp := ftq_2r_sram.io.rdata(0).rasSp
commitEntry.rasTop := ftq_2r_sram.io.rdata(0).rasEntry
......@@ -249,7 +251,7 @@ class Ftq extends XSModule with HasCircularQueuePtrHelper {
ftq_4r_sram.io.raddr(1 + i) := req.ptr.value
ftq_4r_sram.io.ren(1 + i) := true.B
req.entry.ftqPC := ftq_4r_sram.io.rdata(1 + i).ftqPC
req.entry.hasLastPrev := ftq_4r_sram.io.rdata(1 + i).hasLastPrev
req.entry.lastPacketPC := ftq_4r_sram.io.rdata(1 + i).lastPacketPC
if(i == 0){ // jump, read npc
pred_target_sram.io.raddr(0) := req.ptr.value
pred_target_sram.io.ren(0) := true.B
......
......@@ -746,7 +746,7 @@ class CSR extends FunctionUnit with HasCSRConst
// val delegS = ((deleg & (1 << (causeNO & 0xf))) != 0) && (priviledgeMode < ModeM);
val delegS = deleg(causeNO(3,0)) && (priviledgeMode < ModeM)
val tvalWen = !(hasInstrPageFault || hasLoadPageFault || hasStorePageFault || hasLoadAddrMisaligned || hasStoreAddrMisaligned) || raiseIntr // TODO: need check
val isXRet = func === CSROpType.jmp && !isEcall
val isXRet = io.in.valid && func === CSROpType.jmp && !isEcall
// ctrl block use these 2 cycles later
// 0 1 2
// XRet
......@@ -894,7 +894,7 @@ class CSR extends FunctionUnit with HasCSRConst
}
ExcitingUtils.addSource(difftestIntrNO, "difftestIntrNOfromCSR")
ExcitingUtils.addSource(causeNO, "difftestCausefromCSR")
ExcitingUtils.addSource(Mux(csrio.exception.valid, causeNO, 0.U), "difftestCausefromCSR")
ExcitingUtils.addSource(priviledgeMode, "difftestMode", Debug)
ExcitingUtils.addSource(mstatus, "difftestMstatus", Debug)
ExcitingUtils.addSource(mstatus & sstatusRmask, "difftestSstatus", Debug)
......
......@@ -19,7 +19,7 @@ class Jump extends FunctionUnit with HasRedirectOut {
val (src1, jalr_target, pc, immMin, func, uop) = (
io.in.bits.src(0),
io.in.bits.src(1)(VAddrBits - 1, 0),
io.in.bits.uop.cf.pc,
SignExt(io.in.bits.uop.cf.pc, XLEN),
io.in.bits.uop.ctrl.imm,
io.in.bits.uop.ctrl.fuOpType,
io.in.bits.uop
......
package xiangshan.backend.fu
import chisel3._
import chisel3.stage.{ChiselGeneratorAnnotation, ChiselStage}
import chisel3.util._
import utils.SignExt
import xiangshan.backend.fu.util.CSA3_2
......@@ -11,7 +12,7 @@ import xiangshan.backend.fu.util.CSA3_2
*/
class SRT4Divider(len: Int) extends AbstractDivider(len) {
val s_idle :: s_lzd :: s_normlize :: s_recurrence :: s_recovery :: s_finish :: Nil = Enum(6)
val s_idle :: s_lzd :: s_normlize :: s_recurrence :: s_recovery_1 :: s_recovery_2 :: s_finish :: Nil = Enum(7)
val state = RegInit(s_idle)
val newReq = (state === s_idle) && io.in.fire()
val cnt_next = Wire(UInt(log2Up((len+3)/2).W))
......@@ -48,9 +49,12 @@ class SRT4Divider(len: Int) extends AbstractDivider(len) {
state := s_recurrence
}
is(s_recurrence){ // (ws[j+1], wc[j+1]) = 4(ws[j],wc[j]) - q(j+1)*d
when(rec_enough){ state := s_recovery }
when(rec_enough){ state := s_recovery_1 }
}
is(s_recovery){ // if rem < 0, rem = rem + d
is(s_recovery_1){ // if rem < 0, rem = rem + d
state := s_recovery_2
}
is(s_recovery_2){ // recovery shift
state := s_finish
}
is(s_finish){
......@@ -99,7 +103,7 @@ class SRT4Divider(len: Int) extends AbstractDivider(len) {
val rem_temp = ws + wc
val rem_fixed = Mux(rem_temp(wLen-1), rem_temp + d, rem_temp)
val rem_abs = (rem_fixed << recoveryShift)(2*len, len+1)
val rem_abs = (ws << recoveryShift)(2*len, len+1)
when(newReq){
ws := Cat(0.U(4.W), Mux(divZero, a, aVal))
......@@ -111,7 +115,9 @@ class SRT4Divider(len: Int) extends AbstractDivider(len) {
}.elsewhen(state === s_recurrence){
ws := Mux(rec_enough, ws_next, ws_next << 2)
wc := Mux(rec_enough, wc_next, wc_next << 2)
}.elsewhen(state === s_recovery){
}.elsewhen(state === s_recovery_1){
ws := rem_fixed
}.elsewhen(state === s_recovery_2){
ws := rem_abs
}
......@@ -208,7 +214,7 @@ class SRT4Divider(len: Int) extends AbstractDivider(len) {
qm := MuxLookup(q_sel, 0.U,
qmMap.map(m => m._1 -> Cat(m._2._1(len-3, 0), m._2._2.U(2.W)))
)
}.elsewhen(state === s_recovery){
}.elsewhen(state === s_recovery_1){
q := Mux(rem_temp(wLen-1), qm, q)
}
......
......@@ -152,7 +152,7 @@ class RoqEnqPtrWrapper extends XSModule with HasCircularQueuePtrHelper {
// enqueue
val canAccept = io.allowEnqueue && !io.hasBlockBackward
val dispatchNum = Mux(canAccept, PopCount(io.enq), 0.U)
val dispatchNum = Mux(canAccept && !RegNext(redirectOutValid), PopCount(io.enq), 0.U)
when (redirectOutValid) {
enqPtr := 0.U.asTypeOf(new RoqPtr)
......@@ -395,7 +395,6 @@ class Roq(numWbPorts: Int) extends XSModule with HasCircularQueuePtrHelper {
io.exception.valid := RegNext(exceptionHappen)
io.exception.bits.uop := RegEnable(debug_deqUop, exceptionHappen)
io.exception.bits.uop.ctrl.commitType := RegEnable(deqDispatchData.commitType, exceptionHappen)
io.exception.bits.uop.cf.pc := DontCare // we get pc at ftq, so roq don't save pc
io.exception.bits.uop.cf.exceptionVec := RegEnable(deqExceptionVec, exceptionHappen)
io.exception.bits.uop.cf.crossPageIPFFix := RegEnable(deqDispatchData.crossPageIPFFix, exceptionHappen)
io.exception.bits.isInterrupt := RegEnable(intrEnable, exceptionHappen)
......@@ -620,7 +619,7 @@ class Roq(numWbPorts: Int) extends XSModule with HasCircularQueuePtrHelper {
// enqueue logic writes 6 valid
for (i <- 0 until RenameWidth) {
when (canEnqueue(i) && !io.redirect.valid) {
when (canEnqueue(i) && !io.redirect.valid && !RegNext(io.flushOut.valid)) {
valid(enqPtrVec(i).value) := true.B
}
}
......@@ -843,8 +842,8 @@ class Roq(numWbPorts: Int) extends XSModule with HasCircularQueuePtrHelper {
isRVC(i) := uop.cf.pd.isRVC
}
val retireCounterFix = Mux(io.exception.valid, 1.U, retireCounter)
val retirePCFix = SignExt(Mux(io.exception.valid, debug_deqUop.cf.pc, debug_microOp(firstValidCommit).cf.pc), XLEN)
val retireInstFix = Mux(io.exception.valid, debug_deqUop.cf.instr, debug_microOp(firstValidCommit).cf.instr)
val retirePCFix = SignExt(Mux(io.exception.valid, io.exception.bits.uop.cf.pc, debug_microOp(firstValidCommit).cf.pc), XLEN)
val retireInstFix = Mux(io.exception.valid, io.exception.bits.uop.cf.instr, debug_microOp(firstValidCommit).cf.instr)
val scFailed = !diffTestDebugLrScValid(0) &&
debug_deqUop.ctrl.fuType === FuType.mou &&
......
......@@ -409,13 +409,15 @@ class IFU extends XSModule with HasIFUConst with HasCircularQueuePtrHelper
toFtqBuf := DontCare
toFtqBuf.ftqPC := if4_pc
toFtqBuf.lastPacketPC.valid := if4_pendingPrevHalfInstr
toFtqBuf.lastPacketPC.bits := if4_prevHalfInstr.bits.pc
toFtqBuf.hist := final_gh
toFtqBuf.predHist := if4_predHist.asTypeOf(new GlobalHistory)
toFtqBuf.rasSp := bpu.io.brInfo.rasSp
toFtqBuf.rasTop := bpu.io.brInfo.rasTop
toFtqBuf.specCnt := bpu.io.brInfo.specCnt
toFtqBuf.metas := bpu.io.brInfo.metas
toFtqBuf.hasLastPrev := if4_pendingPrevHalfInstr
val if4_jmpIdx = WireInit(if4_bp.jmpIdx)
val if4_taken = WireInit(if4_bp.taken)
......
......@@ -237,7 +237,7 @@ class LoadQueue extends XSModule
})).asUInt() // use uint instead vec to reduce verilog lines
val evenDeqMask = getEvenBits(deqMask)
val oddDeqMask = getOddBits(deqMask)
// generate lastCycleSelect mask
// generate lastCycleSelect mask
val evenSelectMask = Mux(io.ldout(0).fire(), getEvenBits(UIntToOH(loadWbSel(0))), 0.U)
val oddSelectMask = Mux(io.ldout(1).fire(), getOddBits(UIntToOH(loadWbSel(1))), 0.U)
// generate real select vec
......@@ -254,7 +254,7 @@ class LoadQueue extends XSModule
loadWbSelVGen(0):= loadEvenSelVec.asUInt.orR
loadWbSelGen(1) := Cat(getFirstOne(toVec(loadOddSelVec), oddDeqMask), 1.U(1.W))
loadWbSelVGen(1) := loadOddSelVec.asUInt.orR
(0 until LoadPipelineWidth).map(i => {
loadWbSel(i) := RegNext(loadWbSelGen(i))
loadWbSelV(i) := RegNext(loadWbSelVGen(i), init = false.B)
......
......@@ -157,8 +157,8 @@ class LoadUnit_S2 extends XSModule with HasLoadHelper {
val s2_mask = io.in.bits.mask
val s2_paddr = io.in.bits.paddr
val s2_tlb_miss = io.in.bits.tlbMiss
val s2_mmio = io.in.bits.mmio
val s2_exception = selectLoad(io.in.bits.uop.cf.exceptionVec, false).asUInt.orR
val s2_mmio = io.in.bits.mmio && !s2_exception
val s2_cache_miss = io.dcacheResp.bits.miss
val s2_cache_replay = io.dcacheResp.bits.replay
......
......@@ -76,6 +76,8 @@ class StoreUnit_S1 extends XSModule {
val s1_paddr = io.dtlbResp.bits.paddr
val s1_tlb_miss = io.dtlbResp.bits.miss
val s1_mmio = io.dtlbResp.bits.mmio
val s1_exception = selectStore(io.out.bits.uop.cf.exceptionVec, false).asUInt.orR
io.in.ready := true.B
......@@ -98,13 +100,12 @@ class StoreUnit_S1 extends XSModule {
io.lsq.bits := io.in.bits
io.lsq.bits.paddr := s1_paddr
io.lsq.bits.miss := false.B
io.lsq.bits.mmio := io.dtlbResp.bits.mmio
io.lsq.bits.mmio := s1_mmio && !s1_exception
io.lsq.bits.uop.cf.exceptionVec(storePageFault) := io.dtlbResp.bits.excp.pf.st
io.lsq.bits.uop.cf.exceptionVec(storeAccessFault) := io.dtlbResp.bits.excp.af.st
// mmio inst with exception will be writebacked immediately
val hasException = selectStore(io.out.bits.uop.cf.exceptionVec, false).asUInt.orR
io.out.valid := io.in.valid && (!io.out.bits.mmio || hasException) && !s1_tlb_miss
io.out.valid := io.in.valid && (!io.out.bits.mmio || s1_exception) && !s1_tlb_miss
io.out.bits := io.lsq.bits
// encode data for fp store
......
......@@ -6,9 +6,9 @@ import chisel3.util._
import chipsalliance.rocketchip.config
import chisel3.stage.ChiselGeneratorAnnotation
import device._
import freechips.rocketchip.amba.axi4.{AXI4UserYanker, AXI4Xbar, AXI4IdentityNode}
import freechips.rocketchip.amba.axi4.{AXI4IdIndexer, AXI4IdentityNode, AXI4UserYanker, AXI4Xbar}
import freechips.rocketchip.diplomacy.{AddressSet, BufferParams, LazyModule, LazyModuleImp}
import freechips.rocketchip.tilelink.{TLToAXI4}
import freechips.rocketchip.tilelink.TLToAXI4
import xiangshan._
import utils._
import ExcitingUtils.Debug
......@@ -100,7 +100,7 @@ class XSSimSoC(axiSim: Boolean)(implicit p: config.Parameters) extends LazyModul
startAddr = 0x80000000L,
nOp = 0,
beatBytes = L3BusWidth / 8))
soc.dma := burst.node
soc.dma := AXI4IdIndexer(16) := burst.node
// AXI MMIO
// -----------------------------------
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册