diff --git a/arch/powerpc/include/asm/exception-64s.h b/arch/powerpc/include/asm/exception-64s.h index 49b7faf7bbf2aa01a2dfa61c51a961208894fa76..ed3cdcb501985e42b0da216a68d59d363342334d 100644 --- a/arch/powerpc/include/asm/exception-64s.h +++ b/arch/powerpc/include/asm/exception-64s.h @@ -47,6 +47,7 @@ #define EX_R3 64 #define EX_LR 72 #define EX_CFAR 80 +#define EX_PPR 88 /* SMT thread status register (priority) */ #ifdef CONFIG_RELOCATABLE #define EXCEPTION_RELON_PROLOG_PSERIES_1(label, h) \ diff --git a/arch/powerpc/include/asm/paca.h b/arch/powerpc/include/asm/paca.h index e9e7a6999bb8ed4aff7e02da633b69a606b3b264..c47d687ab01beb8e641472b604a8fad4794785d8 100644 --- a/arch/powerpc/include/asm/paca.h +++ b/arch/powerpc/include/asm/paca.h @@ -93,9 +93,9 @@ struct paca_struct { * Now, starting in cacheline 2, the exception save areas */ /* used for most interrupts/exceptions */ - u64 exgen[11] __attribute__((aligned(0x80))); - u64 exmc[11]; /* used for machine checks */ - u64 exslb[11]; /* used for SLB/segment table misses + u64 exgen[12] __attribute__((aligned(0x80))); + u64 exmc[12]; /* used for machine checks */ + u64 exslb[12]; /* used for SLB/segment table misses * on the linear mapping */ /* SLB related definitions */ u16 vmalloc_sllp;