2c2
< * Copyright (c) 2010 ARM Limited
---
> * Copyright (c) 2010, 2012-2013 ARM Limited
42a43,44
> * Giacomo Gabrielli
> * Thomas Grocutt
45a48,51
> #include "arch/arm/system.hh"
> #include "arch/arm/utility.hh"
> #include "arch/arm/insts/static_inst.hh"
> #include "base/compiler.hh"
55,56c61,100
< template<> ArmFault::FaultVals ArmFaultVals<Reset>::vals =
< {"reset", 0x00, MODE_SVC, 0, 0, true, true, FaultStat()};
---
> uint8_t ArmFault::shortDescFaultSources[] = {
> 0x01, // AlignmentFault
> 0x04, // InstructionCacheMaintenance
> 0xff, // SynchExtAbtOnTranslTableWalkL0 (INVALID)
> 0x0c, // SynchExtAbtOnTranslTableWalkL1
> 0x0e, // SynchExtAbtOnTranslTableWalkL2
> 0xff, // SynchExtAbtOnTranslTableWalkL3 (INVALID)
> 0xff, // SynchPtyErrOnTranslTableWalkL0 (INVALID)
> 0x1c, // SynchPtyErrOnTranslTableWalkL1
> 0x1e, // SynchPtyErrOnTranslTableWalkL2
> 0xff, // SynchPtyErrOnTranslTableWalkL3 (INVALID)
> 0xff, // TranslationL0 (INVALID)
> 0x05, // TranslationL1
> 0x07, // TranslationL2
> 0xff, // TranslationL3 (INVALID)
> 0xff, // AccessFlagL0 (INVALID)
> 0x03, // AccessFlagL1
> 0x06, // AccessFlagL2
> 0xff, // AccessFlagL3 (INVALID)
> 0xff, // DomainL0 (INVALID)
> 0x09, // DomainL1
> 0x0b, // DomainL2
> 0xff, // DomainL3 (INVALID)
> 0xff, // PermissionL0 (INVALID)
> 0x0d, // PermissionL1
> 0x0f, // PermissionL2
> 0xff, // PermissionL3 (INVALID)
> 0x02, // DebugEvent
> 0x08, // SynchronousExternalAbort
> 0x10, // TLBConflictAbort
> 0x19, // SynchPtyErrOnMemoryAccess
> 0x16, // AsynchronousExternalAbort
> 0x18, // AsynchPtyErrOnMemoryAccess
> 0xff, // AddressSizeL0 (INVALID)
> 0xff, // AddressSizeL1 (INVALID)
> 0xff, // AddressSizeL2 (INVALID)
> 0xff, // AddressSizeL3 (INVALID)
> 0x40, // PrefetchTLBMiss
> 0x80 // PrefetchUncacheable
> };
58,60c102,104
< template<> ArmFault::FaultVals ArmFaultVals<UndefinedInstruction>::vals =
< {"Undefined Instruction", 0x04, MODE_UNDEFINED, 4 ,2, false, false,
< FaultStat()} ;
---
> static_assert(sizeof(ArmFault::shortDescFaultSources) ==
> ArmFault::NumFaultSources,
> "Invalid size of ArmFault::shortDescFaultSources[]");
62,63c106,145
< template<> ArmFault::FaultVals ArmFaultVals<SupervisorCall>::vals =
< {"Supervisor Call", 0x08, MODE_SVC, 4, 2, false, false, FaultStat()};
---
> uint8_t ArmFault::longDescFaultSources[] = {
> 0x21, // AlignmentFault
> 0xff, // InstructionCacheMaintenance (INVALID)
> 0xff, // SynchExtAbtOnTranslTableWalkL0 (INVALID)
> 0x15, // SynchExtAbtOnTranslTableWalkL1
> 0x16, // SynchExtAbtOnTranslTableWalkL2
> 0x17, // SynchExtAbtOnTranslTableWalkL3
> 0xff, // SynchPtyErrOnTranslTableWalkL0 (INVALID)
> 0x1d, // SynchPtyErrOnTranslTableWalkL1
> 0x1e, // SynchPtyErrOnTranslTableWalkL2
> 0x1f, // SynchPtyErrOnTranslTableWalkL3
> 0xff, // TranslationL0 (INVALID)
> 0x05, // TranslationL1
> 0x06, // TranslationL2
> 0x07, // TranslationL3
> 0xff, // AccessFlagL0 (INVALID)
> 0x09, // AccessFlagL1
> 0x0a, // AccessFlagL2
> 0x0b, // AccessFlagL3
> 0xff, // DomainL0 (INVALID)
> 0x3d, // DomainL1
> 0x3e, // DomainL2
> 0xff, // DomainL3 (RESERVED)
> 0xff, // PermissionL0 (INVALID)
> 0x0d, // PermissionL1
> 0x0e, // PermissionL2
> 0x0f, // PermissionL3
> 0x22, // DebugEvent
> 0x10, // SynchronousExternalAbort
> 0x30, // TLBConflictAbort
> 0x18, // SynchPtyErrOnMemoryAccess
> 0x11, // AsynchronousExternalAbort
> 0x19, // AsynchPtyErrOnMemoryAccess
> 0xff, // AddressSizeL0 (INVALID)
> 0xff, // AddressSizeL1 (INVALID)
> 0xff, // AddressSizeL2 (INVALID)
> 0xff, // AddressSizeL3 (INVALID)
> 0x40, // PrefetchTLBMiss
> 0x80 // PrefetchUncacheable
> };
65,66c147,149
< template<> ArmFault::FaultVals ArmFaultVals<PrefetchAbort>::vals =
< {"Prefetch Abort", 0x0C, MODE_ABORT, 4, 4, true, false, FaultStat()};
---
> static_assert(sizeof(ArmFault::longDescFaultSources) ==
> ArmFault::NumFaultSources,
> "Invalid size of ArmFault::longDescFaultSources[]");
68,69c151,191
< template<> ArmFault::FaultVals ArmFaultVals<DataAbort>::vals =
< {"Data Abort", 0x10, MODE_ABORT, 8, 8, true, false, FaultStat()};
---
> uint8_t ArmFault::aarch64FaultSources[] = {
> 0x21, // AlignmentFault
> 0xff, // InstructionCacheMaintenance (INVALID)
> 0x14, // SynchExtAbtOnTranslTableWalkL0
> 0x15, // SynchExtAbtOnTranslTableWalkL1
> 0x16, // SynchExtAbtOnTranslTableWalkL2
> 0x17, // SynchExtAbtOnTranslTableWalkL3
> 0x1c, // SynchPtyErrOnTranslTableWalkL0
> 0x1d, // SynchPtyErrOnTranslTableWalkL1
> 0x1e, // SynchPtyErrOnTranslTableWalkL2
> 0x1f, // SynchPtyErrOnTranslTableWalkL3
> 0x04, // TranslationL0
> 0x05, // TranslationL1
> 0x06, // TranslationL2
> 0x07, // TranslationL3
> 0x08, // AccessFlagL0
> 0x09, // AccessFlagL1
> 0x0a, // AccessFlagL2
> 0x0b, // AccessFlagL3
> // @todo: Section & Page Domain Fault in AArch64?
> 0xff, // DomainL0 (INVALID)
> 0xff, // DomainL1 (INVALID)
> 0xff, // DomainL2 (INVALID)
> 0xff, // DomainL3 (INVALID)
> 0x0c, // PermissionL0
> 0x0d, // PermissionL1
> 0x0e, // PermissionL2
> 0x0f, // PermissionL3
> 0xff, // DebugEvent (INVALID)
> 0x10, // SynchronousExternalAbort
> 0x30, // TLBConflictAbort
> 0x18, // SynchPtyErrOnMemoryAccess
> 0xff, // AsynchronousExternalAbort (INVALID)
> 0xff, // AsynchPtyErrOnMemoryAccess (INVALID)
> 0x00, // AddressSizeL0
> 0x01, // AddressSizeL1
> 0x02, // AddressSizeL2
> 0x03, // AddressSizeL3
> 0x40, // PrefetchTLBMiss
> 0x80 // PrefetchUncacheable
> };
71,72c193,195
< template<> ArmFault::FaultVals ArmFaultVals<Interrupt>::vals =
< {"IRQ", 0x18, MODE_IRQ, 4, 4, true, false, FaultStat()};
---
> static_assert(sizeof(ArmFault::aarch64FaultSources) ==
> ArmFault::NumFaultSources,
> "Invalid size of ArmFault::aarch64FaultSources[]");
74,75c197,294
< template<> ArmFault::FaultVals ArmFaultVals<FastInterrupt>::vals =
< {"FIQ", 0x1C, MODE_FIQ, 4, 4, true, true, FaultStat()};
---
> // Fields: name, offset, cur{ELT,ELH}Offset, lowerEL{64,32}Offset, next mode,
> // {ARM, Thumb, ARM_ELR, Thumb_ELR} PC offset, hyp trap,
> // {A, F} disable, class, stat
> template<> ArmFault::FaultVals ArmFaultVals<Reset>::vals = {
> // Some dummy values (the reset vector has an IMPLEMENTATION DEFINED
> // location in AArch64)
> "Reset", 0x000, 0x000, 0x000, 0x000, 0x000, MODE_SVC,
> 0, 0, 0, 0, false, true, true, EC_UNKNOWN, FaultStat()
> };
> template<> ArmFault::FaultVals ArmFaultVals<UndefinedInstruction>::vals = {
> "Undefined Instruction", 0x004, 0x000, 0x200, 0x400, 0x600, MODE_UNDEFINED,
> 4, 2, 0, 0, true, false, false, EC_UNKNOWN, FaultStat()
> };
> template<> ArmFault::FaultVals ArmFaultVals<SupervisorCall>::vals = {
> "Supervisor Call", 0x008, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
> 4, 2, 4, 2, true, false, false, EC_SVC_TO_HYP, FaultStat()
> };
> template<> ArmFault::FaultVals ArmFaultVals<SecureMonitorCall>::vals = {
> "Secure Monitor Call", 0x008, 0x000, 0x200, 0x400, 0x600, MODE_MON,
> 4, 4, 4, 4, false, true, true, EC_SMC_TO_HYP, FaultStat()
> };
> template<> ArmFault::FaultVals ArmFaultVals<HypervisorCall>::vals = {
> "Hypervisor Call", 0x008, 0x000, 0x200, 0x400, 0x600, MODE_HYP,
> 4, 4, 4, 4, true, false, false, EC_HVC, FaultStat()
> };
> template<> ArmFault::FaultVals ArmFaultVals<PrefetchAbort>::vals = {
> "Prefetch Abort", 0x00C, 0x000, 0x200, 0x400, 0x600, MODE_ABORT,
> 4, 4, 0, 0, true, true, false, EC_PREFETCH_ABORT_TO_HYP, FaultStat()
> };
> template<> ArmFault::FaultVals ArmFaultVals<DataAbort>::vals = {
> "Data Abort", 0x010, 0x000, 0x200, 0x400, 0x600, MODE_ABORT,
> 8, 8, 0, 0, true, true, false, EC_DATA_ABORT_TO_HYP, FaultStat()
> };
> template<> ArmFault::FaultVals ArmFaultVals<VirtualDataAbort>::vals = {
> "Virtual Data Abort", 0x010, 0x000, 0x200, 0x400, 0x600, MODE_ABORT,
> 8, 8, 0, 0, true, true, false, EC_INVALID, FaultStat()
> };
> template<> ArmFault::FaultVals ArmFaultVals<HypervisorTrap>::vals = {
> // @todo: double check these values
> "Hypervisor Trap", 0x014, 0x000, 0x200, 0x400, 0x600, MODE_HYP,
> 0, 0, 0, 0, false, false, false, EC_UNKNOWN, FaultStat()
> };
> template<> ArmFault::FaultVals ArmFaultVals<Interrupt>::vals = {
> "IRQ", 0x018, 0x080, 0x280, 0x480, 0x680, MODE_IRQ,
> 4, 4, 0, 0, false, true, false, EC_UNKNOWN, FaultStat()
> };
> template<> ArmFault::FaultVals ArmFaultVals<VirtualInterrupt>::vals = {
> "Virtual IRQ", 0x018, 0x080, 0x280, 0x480, 0x680, MODE_IRQ,
> 4, 4, 0, 0, false, true, false, EC_INVALID, FaultStat()
> };
> template<> ArmFault::FaultVals ArmFaultVals<FastInterrupt>::vals = {
> "FIQ", 0x01C, 0x100, 0x300, 0x500, 0x700, MODE_FIQ,
> 4, 4, 0, 0, false, true, true, EC_UNKNOWN, FaultStat()
> };
> template<> ArmFault::FaultVals ArmFaultVals<VirtualFastInterrupt>::vals = {
> "Virtual FIQ", 0x01C, 0x100, 0x300, 0x500, 0x700, MODE_FIQ,
> 4, 4, 0, 0, false, true, true, EC_INVALID, FaultStat()
> };
> template<> ArmFault::FaultVals ArmFaultVals<SupervisorTrap>::vals = {
> // Some dummy values (SupervisorTrap is AArch64-only)
> "Supervisor Trap", 0x014, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
> 0, 0, 0, 0, false, false, false, EC_UNKNOWN, FaultStat()
> };
> template<> ArmFault::FaultVals ArmFaultVals<SecureMonitorTrap>::vals = {
> // Some dummy values (SecureMonitorTrap is AArch64-only)
> "Secure Monitor Trap", 0x014, 0x000, 0x200, 0x400, 0x600, MODE_MON,
> 0, 0, 0, 0, false, false, false, EC_UNKNOWN, FaultStat()
> };
> template<> ArmFault::FaultVals ArmFaultVals<PCAlignmentFault>::vals = {
> // Some dummy values (PCAlignmentFault is AArch64-only)
> "PC Alignment Fault", 0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
> 0, 0, 0, 0, true, false, false, EC_PC_ALIGNMENT, FaultStat()
> };
> template<> ArmFault::FaultVals ArmFaultVals<SPAlignmentFault>::vals = {
> // Some dummy values (SPAlignmentFault is AArch64-only)
> "SP Alignment Fault", 0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
> 0, 0, 0, 0, true, false, false, EC_STACK_PTR_ALIGNMENT, FaultStat()
> };
> template<> ArmFault::FaultVals ArmFaultVals<SystemError>::vals = {
> // Some dummy values (SError is AArch64-only)
> "SError", 0x000, 0x180, 0x380, 0x580, 0x780, MODE_SVC,
> 0, 0, 0, 0, false, true, true, EC_SERROR, FaultStat()
> };
> template<> ArmFault::FaultVals ArmFaultVals<FlushPipe>::vals = {
> // Some dummy values
> "Pipe Flush", 0x000, 0x000, 0x000, 0x000, 0x000, MODE_SVC,
> 0, 0, 0, 0, false, true, true, EC_UNKNOWN, FaultStat()
> };
> template<> ArmFault::FaultVals ArmFaultVals<ArmSev>::vals = {
> // Some dummy values
> "ArmSev Flush", 0x000, 0x000, 0x000, 0x000, 0x000, MODE_SVC,
> 0, 0, 0, 0, false, true, true, EC_UNKNOWN, FaultStat()
> };
> template<> ArmFault::FaultVals ArmFaultVals<IllegalInstSetStateFault>::vals = {
> // Some dummy values (SPAlignmentFault is AArch64-only)
> "Illegal Inst Set State Fault", 0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
> 0, 0, 0, 0, true, false, false, EC_ILLEGAL_INST, FaultStat()
> };
77,82c296
< template<> ArmFault::FaultVals ArmFaultVals<FlushPipe>::vals =
< {"Pipe Flush", 0x00, MODE_SVC, 0, 0, true, true, FaultStat()}; // dummy values
<
< template<> ArmFault::FaultVals ArmFaultVals<ArmSev>::vals =
< {"ArmSev Flush", 0x00, MODE_SVC, 0, 0, true, true, FaultStat()}; // dummy values
< Addr
---
> Addr
85c299
< // ARM ARM B1-3
---
> Addr base;
87c301,302
< SCTLR sctlr = tc->readMiscReg(MISCREG_SCTLR);
---
> // ARM ARM issue C B1.8.1
> bool haveSecurity = ArmSystem::haveSecurity(tc);
90a306
> SCTLR sctlr = tc->readMiscReg(MISCREG_SCTLR);
91a308,311
> // Check for invalid modes
> CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
> assert(haveSecurity || cpsr.mode != MODE_MON);
> assert(ArmSystem::haveVirtualization(tc) || cpsr.mode != MODE_HYP);
93,95c313,330
< if (!sctlr.v)
< return offset();
< return offset() + HighVecs;
---
> switch (cpsr.mode)
> {
> case MODE_MON:
> base = tc->readMiscReg(MISCREG_MVBAR);
> break;
> case MODE_HYP:
> base = tc->readMiscReg(MISCREG_HVBAR);
> break;
> default:
> if (sctlr.v) {
> base = HighVecs;
> } else {
> base = haveSecurity ? tc->readMiscReg(MISCREG_VBAR) : 0;
> }
> break;
> }
> return base + offset(tc);
> }
96a332,353
> Addr
> ArmFault::getVector64(ThreadContext *tc)
> {
> Addr vbar;
> switch (toEL) {
> case EL3:
> assert(ArmSystem::haveSecurity(tc));
> vbar = tc->readMiscReg(MISCREG_VBAR_EL3);
> break;
> // @todo: uncomment this to enable Virtualization
> // case EL2:
> // assert(ArmSystem::haveVirtualization(tc));
> // vbar = tc->readMiscReg(MISCREG_VBAR_EL2);
> // break;
> case EL1:
> vbar = tc->readMiscReg(MISCREG_VBAR_EL1);
> break;
> default:
> panic("Invalid target exception level");
> break;
> }
> return vbar + offset64();
99c356,428
< void
---
> MiscRegIndex
> ArmFault::getSyndromeReg64() const
> {
> switch (toEL) {
> case EL1:
> return MISCREG_ESR_EL1;
> case EL2:
> return MISCREG_ESR_EL2;
> case EL3:
> return MISCREG_ESR_EL3;
> default:
> panic("Invalid exception level");
> break;
> }
> }
>
> MiscRegIndex
> ArmFault::getFaultAddrReg64() const
> {
> switch (toEL) {
> case EL1:
> return MISCREG_FAR_EL1;
> case EL2:
> return MISCREG_FAR_EL2;
> case EL3:
> return MISCREG_FAR_EL3;
> default:
> panic("Invalid exception level");
> break;
> }
> }
>
> void
> ArmFault::setSyndrome(ThreadContext *tc, MiscRegIndex syndrome_reg)
> {
> uint32_t value;
> uint32_t exc_class = (uint32_t) ec(tc);
> uint32_t issVal = iss();
> assert(!from64 || ArmSystem::highestELIs64(tc));
>
> value = exc_class << 26;
>
> // HSR.IL not valid for Prefetch Aborts (0x20, 0x21) and Data Aborts (0x24,
> // 0x25) for which the ISS information is not valid (ARMv7).
> // @todo: ARMv8 revises AArch32 functionality: when HSR.IL is not
> // valid it is treated as RES1.
> if (to64) {
> value |= 1 << 25;
> } else if ((bits(exc_class, 5, 3) != 4) ||
> (bits(exc_class, 2) && bits(issVal, 24))) {
> if (!machInst.thumb || machInst.bigThumb)
> value |= 1 << 25;
> }
> // Condition code valid for EC[5:4] nonzero
> if (!from64 && ((bits(exc_class, 5, 4) == 0) &&
> (bits(exc_class, 3, 0) != 0))) {
> if (!machInst.thumb) {
> uint32_t cond;
> ConditionCode condCode = (ConditionCode) (uint32_t) machInst.condCode;
> // If its on unconditional instruction report with a cond code of
> // 0xE, ie the unconditional code
> cond = (condCode == COND_UC) ? COND_AL : condCode;
> value |= cond << 20;
> value |= 1 << 24;
> }
> value |= bits(issVal, 19, 0);
> } else {
> value |= issVal;
> }
> tc->setMiscReg(syndrome_reg, value);
> }
>
> void
102c431,460
< // ARM ARM B1.6.3
---
> CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
>
> if (ArmSystem::highestELIs64(tc)) { // ARMv8
> // Determine source exception level and mode
> fromMode = (OperatingMode) (uint8_t) cpsr.mode;
> fromEL = opModeToEL(fromMode);
> if (opModeIs64(fromMode))
> from64 = true;
>
> // Determine target exception level
> if (ArmSystem::haveSecurity(tc) && routeToMonitor(tc))
> toEL = EL3;
> else
> toEL = opModeToEL(nextMode());
> if (fromEL > toEL)
> toEL = fromEL;
>
> if (toEL == ArmSystem::highestEL(tc) || ELIs64(tc, toEL)) {
> // Invoke exception handler in AArch64 state
> to64 = true;
> invoke64(tc, inst);
> return;
> }
> }
>
> // ARMv7 (ARM ARM issue C B1.9)
>
> bool have_security = ArmSystem::haveSecurity(tc);
> bool have_virtualization = ArmSystem::haveVirtualization(tc);
>
109c467
< CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
---
> SCR scr = tc->readMiscReg(MISCREG_SCR);
121c479,526
< cpsr.mode = nextMode();
---
> // if we have a valid instruction then use it to annotate this fault with
> // extra information. This is used to generate the correct fault syndrome
> // information
> if (inst) {
> ArmStaticInst *armInst = reinterpret_cast<ArmStaticInst *>(inst.get());
> armInst->annotateFault(this);
> }
>
> if (have_security && routeToMonitor(tc))
> cpsr.mode = MODE_MON;
> else if (have_virtualization && routeToHyp(tc))
> cpsr.mode = MODE_HYP;
> else
> cpsr.mode = nextMode();
>
> // Ensure Secure state if initially in Monitor mode
> if (have_security && saved_cpsr.mode == MODE_MON) {
> SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
> if (scr.ns) {
> scr.ns = 0;
> tc->setMiscRegNoEffect(MISCREG_SCR, scr);
> }
> }
>
> // some bits are set differently if we have been routed to hyp mode
> if (cpsr.mode == MODE_HYP) {
> SCTLR hsctlr = tc->readMiscReg(MISCREG_HSCTLR);
> cpsr.t = hsctlr.te;
> cpsr.e = hsctlr.ee;
> if (!scr.ea) {cpsr.a = 1;}
> if (!scr.fiq) {cpsr.f = 1;}
> if (!scr.irq) {cpsr.i = 1;}
> } else if (cpsr.mode == MODE_MON) {
> // Special case handling when entering monitor mode
> cpsr.t = sctlr.te;
> cpsr.e = sctlr.ee;
> cpsr.a = 1;
> cpsr.f = 1;
> cpsr.i = 1;
> } else {
> cpsr.t = sctlr.te;
> cpsr.e = sctlr.ee;
>
> // The *Disable functions are virtual and different per fault
> cpsr.a = cpsr.a | abortDisable(tc);
> cpsr.f = cpsr.f | fiqDisable(tc);
> cpsr.i = 1;
> }
124,129d528
<
< cpsr.t = sctlr.te;
< cpsr.a = cpsr.a | abortDisable();
< cpsr.f = cpsr.f | fiqDisable();
< cpsr.i = 1;
< cpsr.e = sctlr.ee;
130a530
>
133,134d532
< tc->setIntReg(INTREG_LR, curPc +
< (saved_cpsr.t ? thumbPcOffset() : armPcOffset()));
136c534,545
< switch (nextMode()) {
---
> // Clear the exclusive monitor
> tc->setMiscReg(MISCREG_LOCKFLAG, 0);
>
> if (cpsr.mode == MODE_HYP) {
> tc->setMiscReg(MISCREG_ELR_HYP, curPc +
> (saved_cpsr.t ? thumbPcOffset(true) : armPcOffset(true)));
> } else {
> tc->setIntReg(INTREG_LR, curPc +
> (saved_cpsr.t ? thumbPcOffset(false) : armPcOffset(false)));
> }
>
> switch (cpsr.mode) {
146,147c555,557
< case MODE_UNDEFINED:
< tc->setMiscReg(MISCREG_SPSR_UND, saved_cpsr);
---
> case MODE_MON:
> assert(have_security);
> tc->setMiscReg(MISCREG_SPSR_MON, saved_cpsr);
151a562,571
> case MODE_UNDEFINED:
> tc->setMiscReg(MISCREG_SPSR_UND, saved_cpsr);
> if (ec(tc) != EC_UNKNOWN)
> setSyndrome(tc, MISCREG_HSR);
> break;
> case MODE_HYP:
> assert(have_virtualization);
> tc->setMiscReg(MISCREG_SPSR_HYP, saved_cpsr);
> setSyndrome(tc, MISCREG_HSR);
> break;
163a584,585
> pc.aarch64(!cpsr.width);
> pc.nextAArch64(!cpsr.width);
167a590,680
> ArmFault::invoke64(ThreadContext *tc, StaticInstPtr inst)
> {
> // Determine actual misc. register indices for ELR_ELx and SPSR_ELx
> MiscRegIndex elr_idx, spsr_idx;
> switch (toEL) {
> case EL1:
> elr_idx = MISCREG_ELR_EL1;
> spsr_idx = MISCREG_SPSR_EL1;
> break;
> // @todo: uncomment this to enable Virtualization
> // case EL2:
> // assert(ArmSystem::haveVirtualization());
> // elr_idx = MISCREG_ELR_EL2;
> // spsr_idx = MISCREG_SPSR_EL2;
> // break;
> case EL3:
> assert(ArmSystem::haveSecurity(tc));
> elr_idx = MISCREG_ELR_EL3;
> spsr_idx = MISCREG_SPSR_EL3;
> break;
> default:
> panic("Invalid target exception level");
> break;
> }
>
> // Save process state into SPSR_ELx
> CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
> CPSR spsr = cpsr;
> spsr.nz = tc->readIntReg(INTREG_CONDCODES_NZ);
> spsr.c = tc->readIntReg(INTREG_CONDCODES_C);
> spsr.v = tc->readIntReg(INTREG_CONDCODES_V);
> if (from64) {
> // Force some bitfields to 0
> spsr.q = 0;
> spsr.it1 = 0;
> spsr.j = 0;
> spsr.res0_23_22 = 0;
> spsr.ge = 0;
> spsr.it2 = 0;
> spsr.t = 0;
> } else {
> spsr.ge = tc->readIntReg(INTREG_CONDCODES_GE);
> ITSTATE it = tc->pcState().itstate();
> spsr.it2 = it.top6;
> spsr.it1 = it.bottom2;
> // Force some bitfields to 0
> spsr.res0_23_22 = 0;
> spsr.ss = 0;
> }
> tc->setMiscReg(spsr_idx, spsr);
>
> // Save preferred return address into ELR_ELx
> Addr curr_pc = tc->pcState().pc();
> Addr ret_addr = curr_pc;
> if (from64)
> ret_addr += armPcElrOffset();
> else
> ret_addr += spsr.t ? thumbPcElrOffset() : armPcElrOffset();
> tc->setMiscReg(elr_idx, ret_addr);
>
> // Update process state
> OperatingMode64 mode = 0;
> mode.spX = 1;
> mode.el = toEL;
> mode.width = 0;
> cpsr.mode = mode;
> cpsr.daif = 0xf;
> cpsr.il = 0;
> cpsr.ss = 0;
> tc->setMiscReg(MISCREG_CPSR, cpsr);
>
> // Set PC to start of exception handler
> Addr new_pc = purifyTaggedAddr(getVector64(tc), tc, toEL);
> DPRINTF(Faults, "Invoking Fault (AArch64 target EL):%s cpsr:%#x PC:%#x "
> "elr:%#x newVec: %#x\n", name(), cpsr, curr_pc, ret_addr, new_pc);
> PCState pc(new_pc);
> pc.aarch64(!cpsr.width);
> pc.nextAArch64(!cpsr.width);
> tc->pcState(pc);
>
> // If we have a valid instruction then use it to annotate this fault with
> // extra information. This is used to generate the correct fault syndrome
> // information
> if (inst)
> reinterpret_cast<ArmStaticInst *>(inst.get())->annotateFault(this);
> // Save exception syndrome
> if ((nextMode() != MODE_IRQ) && (nextMode() != MODE_FIQ))
> setSyndrome(tc, getSyndromeReg64());
> }
>
> void
174c687,705
< ArmFault::invoke(tc, inst);
---
> if (!ArmSystem::highestELIs64(tc)) {
> ArmFault::invoke(tc, inst);
> tc->setMiscReg(MISCREG_VMPIDR,
> getMPIDR(dynamic_cast<ArmSystem*>(tc->getSystemPtr()), tc));
>
> // Unless we have SMC code to get us there, boot in HYP!
> if (ArmSystem::haveVirtualization(tc) &&
> !ArmSystem::haveSecurity(tc)) {
> CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
> cpsr.mode = MODE_HYP;
> tc->setMiscReg(MISCREG_CPSR, cpsr);
> }
> } else {
> // Advance the PC to the IMPLEMENTATION DEFINED reset value
> PCState pc = ArmSystem::resetAddr64(tc);
> pc.aarch64(true);
> pc.nextAArch64(true);
> tc->pcState(pc);
> }
198a730,768
> bool
> UndefinedInstruction::routeToHyp(ThreadContext *tc) const
> {
> bool toHyp;
>
> SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
> HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR);
> CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
>
> // if in Hyp mode then stay in Hyp mode
> toHyp = scr.ns && (cpsr.mode == MODE_HYP);
> // if HCR.TGE is set to 1, take to Hyp mode through Hyp Trap vector
> toHyp |= !inSecureState(scr, cpsr) && hcr.tge && (cpsr.mode == MODE_USER);
> return toHyp;
> }
>
> uint32_t
> UndefinedInstruction::iss() const
> {
> if (overrideEc == EC_INVALID)
> return issRaw;
>
> uint32_t new_iss = 0;
> uint32_t op0, op1, op2, CRn, CRm, Rt, dir;
>
> dir = bits(machInst, 21, 21);
> op0 = bits(machInst, 20, 19);
> op1 = bits(machInst, 18, 16);
> CRn = bits(machInst, 15, 12);
> CRm = bits(machInst, 11, 8);
> op2 = bits(machInst, 7, 5);
> Rt = bits(machInst, 4, 0);
>
> new_iss = op0 << 20 | op2 << 17 | op1 << 14 | CRn << 10 |
> Rt << 5 | CRm << 1 | dir;
>
> return new_iss;
> }
>
210c780,785
< callNum = tc->readIntReg(INTREG_R7);
---
> CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
> OperatingMode mode = (OperatingMode)(uint8_t)cpsr.mode;
> if (opModeIs64(mode))
> callNum = tc->readIntReg(INTREG_X8);
> else
> callNum = tc->readIntReg(INTREG_R7);
219a795,850
> bool
> SupervisorCall::routeToHyp(ThreadContext *tc) const
> {
> bool toHyp;
>
> SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
> HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR);
> CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
>
> // if in Hyp mode then stay in Hyp mode
> toHyp = scr.ns && (cpsr.mode == MODE_HYP);
> // if HCR.TGE is set to 1, take to Hyp mode through Hyp Trap vector
> toHyp |= !inSecureState(scr, cpsr) && hcr.tge && (cpsr.mode == MODE_USER);
> return toHyp;
> }
>
> ExceptionClass
> SupervisorCall::ec(ThreadContext *tc) const
> {
> return (overrideEc != EC_INVALID) ? overrideEc :
> (from64 ? EC_SVC_64 : vals.ec);
> }
>
> uint32_t
> SupervisorCall::iss() const
> {
> // Even if we have a 24 bit imm from an arm32 instruction then we only use
> // the bottom 16 bits for the ISS value (it doesn't hurt for AArch64 SVC).
> return issRaw & 0xFFFF;
> }
>
> uint32_t
> SecureMonitorCall::iss() const
> {
> if (from64)
> return bits(machInst, 20, 5);
> return 0;
> }
>
> ExceptionClass
> UndefinedInstruction::ec(ThreadContext *tc) const
> {
> return (overrideEc != EC_INVALID) ? overrideEc : vals.ec;
> }
>
>
> HypervisorCall::HypervisorCall(ExtMachInst _machInst, uint32_t _imm) :
> ArmFaultVals<HypervisorCall>(_machInst, _imm)
> {}
>
> ExceptionClass
> HypervisorTrap::ec(ThreadContext *tc) const
> {
> return (overrideEc != EC_INVALID) ? overrideEc : vals.ec;
> }
>
220a852,885
> FaultOffset
> ArmFaultVals<T>::offset(ThreadContext *tc)
> {
> bool isHypTrap = false;
>
> // Normally we just use the exception vector from the table at the top if
> // this file, however if this exception has caused a transition to hype
> // mode, and its an exception type that would only do this if it has been
> // trapped then we use the hyp trap vector instead of the normal vector
> if (vals.hypTrappable) {
> CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
> if (cpsr.mode == MODE_HYP) {
> CPSR spsr = tc->readMiscReg(MISCREG_SPSR_HYP);
> isHypTrap = spsr.mode != MODE_HYP;
> }
> }
> return isHypTrap ? 0x14 : vals.offset;
> }
>
> // void
> // SupervisorCall::setSyndrome64(ThreadContext *tc, MiscRegIndex esr_idx)
> // {
> // ESR esr = 0;
> // esr.ec = machInst.aarch64 ? SvcAArch64 : SvcAArch32;
> // esr.il = !machInst.thumb;
> // if (machInst.aarch64)
> // esr.imm16 = bits(machInst.instBits, 20, 5);
> // else if (machInst.thumb)
> // esr.imm16 = bits(machInst.instBits, 7, 0);
> // else
> // esr.imm16 = bits(machInst.instBits, 15, 0);
> // tc->setMiscReg(esr_idx, esr);
> // }
>
221a887,915
> SecureMonitorCall::invoke(ThreadContext *tc, StaticInstPtr inst)
> {
> if (FullSystem) {
> ArmFault::invoke(tc, inst);
> return;
> }
> }
>
> ExceptionClass
> SecureMonitorCall::ec(ThreadContext *tc) const
> {
> return (from64 ? EC_SMC_64 : vals.ec);
> }
>
> ExceptionClass
> SupervisorTrap::ec(ThreadContext *tc) const
> {
> return (overrideEc != EC_INVALID) ? overrideEc : vals.ec;
> }
>
> ExceptionClass
> SecureMonitorTrap::ec(ThreadContext *tc) const
> {
> return (overrideEc != EC_INVALID) ? overrideEc :
> (from64 ? EC_SMC_64 : vals.ec);
> }
>
> template<class T>
> void
223a918,948
> if (tranMethod == ArmFault::UnknownTran) {
> tranMethod = longDescFormatInUse(tc) ? ArmFault::LpaeTran
> : ArmFault::VmsaTran;
>
> if ((tranMethod == ArmFault::VmsaTran) && this->routeToMonitor(tc)) {
> // See ARM ARM B3-1416
> bool override_LPAE = false;
> TTBCR ttbcr_s = tc->readMiscReg(MISCREG_TTBCR_S);
> TTBCR M5_VAR_USED ttbcr_ns = tc->readMiscReg(MISCREG_TTBCR_NS);
> if (ttbcr_s.eae) {
> override_LPAE = true;
> } else {
> // Unimplemented code option, not seen in testing. May need
> // extension according to the manual exceprt above.
> DPRINTF(Faults, "Warning: Incomplete translation method "
> "override detected.\n");
> }
> if (override_LPAE)
> tranMethod = ArmFault::LpaeTran;
> }
> }
>
> if (source == ArmFault::AsynchronousExternalAbort) {
> tc->getCpuPtr()->clearInterrupt(INT_ABT, 0);
> }
> // Get effective fault source encoding
> CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
> FSR fsr = getFsr(tc);
>
> // source must be determined BEFORE invoking generic routines which will
> // try to set hsr etc. and are based upon source!
224a950,973
>
> if (cpsr.width) { // AArch32
> if (cpsr.mode == MODE_HYP) {
> tc->setMiscReg(T::HFarIndex, faultAddr);
> } else if (stage2) {
> tc->setMiscReg(MISCREG_HPFAR, (faultAddr >> 8) & ~0xf);
> tc->setMiscReg(T::HFarIndex, OVAddr);
> } else {
> tc->setMiscReg(T::FsrIndex, fsr);
> tc->setMiscReg(T::FarIndex, faultAddr);
> }
> DPRINTF(Faults, "Abort Fault source=%#x fsr=%#x faultAddr=%#x "\
> "tranMethod=%#x\n", source, fsr, faultAddr, tranMethod);
> } else { // AArch64
> // Set the FAR register. Nothing else to do if we are in AArch64 state
> // because the syndrome register has already been set inside invoke64()
> tc->setMiscReg(AbortFault<T>::getFaultAddrReg64(), faultAddr);
> }
> }
>
> template<class T>
> FSR
> AbortFault<T>::getFsr(ThreadContext *tc)
> {
226,232d974
< fsr.fsLow = bits(status, 3, 0);
< fsr.fsHigh = bits(status, 4);
< fsr.domain = domain;
< fsr.wnr = (write ? 1 : 0);
< fsr.ext = 0;
< tc->setMiscReg(T::FsrIndex, fsr);
< tc->setMiscReg(T::FarIndex, faultAddr);
234c976,998
< DPRINTF(Faults, "Abort Fault fsr=%#x faultAddr=%#x\n", fsr, faultAddr);
---
> if (((CPSR) tc->readMiscRegNoEffect(MISCREG_CPSR)).width) {
> // AArch32
> assert(tranMethod != ArmFault::UnknownTran);
> if (tranMethod == ArmFault::LpaeTran) {
> srcEncoded = ArmFault::longDescFaultSources[source];
> fsr.status = srcEncoded;
> fsr.lpae = 1;
> } else {
> srcEncoded = ArmFault::shortDescFaultSources[source];
> fsr.fsLow = bits(srcEncoded, 3, 0);
> fsr.fsHigh = bits(srcEncoded, 4);
> fsr.domain = static_cast<uint8_t>(domain);
> }
> fsr.wnr = (write ? 1 : 0);
> fsr.ext = 0;
> } else {
> // AArch64
> srcEncoded = ArmFault::aarch64FaultSources[source];
> }
> if (srcEncoded == ArmFault::FaultSourceInvalid) {
> panic("Invalid fault source\n");
> }
> return fsr;
236a1001,1012
> template<class T>
> bool
> AbortFault<T>::abortDisable(ThreadContext *tc)
> {
> if (ArmSystem::haveSecurity(tc)) {
> SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
> return (!scr.ns || scr.aw);
> }
> return true;
> }
>
> template<class T>
237a1014,1384
> AbortFault<T>::annotate(ArmFault::AnnotationIDs id, uint64_t val)
> {
> switch (id)
> {
> case ArmFault::S1PTW:
> s1ptw = val;
> break;
> case ArmFault::OVA:
> OVAddr = val;
> break;
>
> // Just ignore unknown ID's
> default:
> break;
> }
> }
>
> template<class T>
> uint32_t
> AbortFault<T>::iss() const
> {
> uint32_t val;
>
> val = srcEncoded & 0x3F;
> val |= write << 6;
> val |= s1ptw << 7;
> return (val);
> }
>
> template<class T>
> bool
> AbortFault<T>::isMMUFault() const
> {
> // NOTE: Not relying on LL information being aligned to lowest bits here
> return
> (source == ArmFault::AlignmentFault) ||
> ((source >= ArmFault::TranslationLL) &&
> (source < ArmFault::TranslationLL + 4)) ||
> ((source >= ArmFault::AccessFlagLL) &&
> (source < ArmFault::AccessFlagLL + 4)) ||
> ((source >= ArmFault::DomainLL) &&
> (source < ArmFault::DomainLL + 4)) ||
> ((source >= ArmFault::PermissionLL) &&
> (source < ArmFault::PermissionLL + 4));
> }
>
> ExceptionClass
> PrefetchAbort::ec(ThreadContext *tc) const
> {
> if (to64) {
> // AArch64
> if (toEL == fromEL)
> return EC_PREFETCH_ABORT_CURR_EL;
> else
> return EC_PREFETCH_ABORT_LOWER_EL;
> } else {
> // AArch32
> // Abort faults have different EC codes depending on whether
> // the fault originated within HYP mode, or not. So override
> // the method and add the extra adjustment of the EC value.
>
> ExceptionClass ec = ArmFaultVals<PrefetchAbort>::vals.ec;
>
> CPSR spsr = tc->readMiscReg(MISCREG_SPSR_HYP);
> if (spsr.mode == MODE_HYP) {
> ec = ((ExceptionClass) (((uint32_t) ec) + 1));
> }
> return ec;
> }
> }
>
> bool
> PrefetchAbort::routeToMonitor(ThreadContext *tc) const
> {
> SCR scr = 0;
> if (from64)
> scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
> else
> scr = tc->readMiscRegNoEffect(MISCREG_SCR);
>
> return scr.ea && !isMMUFault();
> }
>
> bool
> PrefetchAbort::routeToHyp(ThreadContext *tc) const
> {
> bool toHyp;
>
> SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
> HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR);
> CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
> HDCR hdcr = tc->readMiscRegNoEffect(MISCREG_HDCR);
>
> // if in Hyp mode then stay in Hyp mode
> toHyp = scr.ns && (cpsr.mode == MODE_HYP);
> // otherwise, check whether to take to Hyp mode through Hyp Trap vector
> toHyp |= (stage2 ||
> ( (source == DebugEvent) && hdcr.tde && (cpsr.mode != MODE_HYP)) ||
> ( (source == SynchronousExternalAbort) && hcr.tge && (cpsr.mode == MODE_USER))
> ) && !inSecureState(scr, cpsr);
> return toHyp;
> }
>
> ExceptionClass
> DataAbort::ec(ThreadContext *tc) const
> {
> if (to64) {
> // AArch64
> if (source == ArmFault::AsynchronousExternalAbort) {
> panic("Asynchronous External Abort should be handled with \
> SystemErrors (SErrors)!");
> }
> if (toEL == fromEL)
> return EC_DATA_ABORT_CURR_EL;
> else
> return EC_DATA_ABORT_LOWER_EL;
> } else {
> // AArch32
> // Abort faults have different EC codes depending on whether
> // the fault originated within HYP mode, or not. So override
> // the method and add the extra adjustment of the EC value.
>
> ExceptionClass ec = ArmFaultVals<DataAbort>::vals.ec;
>
> CPSR spsr = tc->readMiscReg(MISCREG_SPSR_HYP);
> if (spsr.mode == MODE_HYP) {
> ec = ((ExceptionClass) (((uint32_t) ec) + 1));
> }
> return ec;
> }
> }
>
> bool
> DataAbort::routeToMonitor(ThreadContext *tc) const
> {
> SCR scr = 0;
> if (from64)
> scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
> else
> scr = tc->readMiscRegNoEffect(MISCREG_SCR);
>
> return scr.ea && !isMMUFault();
> }
>
> bool
> DataAbort::routeToHyp(ThreadContext *tc) const
> {
> bool toHyp;
>
> SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
> HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR);
> CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
> HDCR hdcr = tc->readMiscRegNoEffect(MISCREG_HDCR);
>
> // if in Hyp mode then stay in Hyp mode
> toHyp = scr.ns && (cpsr.mode == MODE_HYP);
> // otherwise, check whether to take to Hyp mode through Hyp Trap vector
> toHyp |= (stage2 ||
> ( (cpsr.mode != MODE_HYP) && ( ((source == AsynchronousExternalAbort) && hcr.amo) ||
> ((source == DebugEvent) && hdcr.tde) )
> ) ||
> ( (cpsr.mode == MODE_USER) && hcr.tge &&
> ((source == AlignmentFault) ||
> (source == SynchronousExternalAbort))
> )
> ) && !inSecureState(scr, cpsr);
> return toHyp;
> }
>
> uint32_t
> DataAbort::iss() const
> {
> uint32_t val;
>
> // Add on the data abort specific fields to the generic abort ISS value
> val = AbortFault<DataAbort>::iss();
> // ISS is valid if not caused by a stage 1 page table walk, and when taken
> // to AArch64 only when directed to EL2
> if (!s1ptw && (!to64 || toEL == EL2)) {
> val |= isv << 24;
> if (isv) {
> val |= sas << 22;
> val |= sse << 21;
> val |= srt << 16;
> // AArch64 only. These assignments are safe on AArch32 as well
> // because these vars are initialized to false
> val |= sf << 15;
> val |= ar << 14;
> }
> }
> return (val);
> }
>
> void
> DataAbort::annotate(AnnotationIDs id, uint64_t val)
> {
> AbortFault<DataAbort>::annotate(id, val);
> switch (id)
> {
> case SAS:
> isv = true;
> sas = val;
> break;
> case SSE:
> isv = true;
> sse = val;
> break;
> case SRT:
> isv = true;
> srt = val;
> break;
> case SF:
> isv = true;
> sf = val;
> break;
> case AR:
> isv = true;
> ar = val;
> break;
> // Just ignore unknown ID's
> default:
> break;
> }
> }
>
> void
> VirtualDataAbort::invoke(ThreadContext *tc, StaticInstPtr inst)
> {
> AbortFault<VirtualDataAbort>::invoke(tc, inst);
> HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR);
> hcr.va = 0;
> tc->setMiscRegNoEffect(MISCREG_HCR, hcr);
> }
>
> bool
> Interrupt::routeToMonitor(ThreadContext *tc) const
> {
> assert(ArmSystem::haveSecurity(tc));
> SCR scr = 0;
> if (from64)
> scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
> else
> scr = tc->readMiscRegNoEffect(MISCREG_SCR);
> return scr.irq;
> }
>
> bool
> Interrupt::routeToHyp(ThreadContext *tc) const
> {
> bool toHyp;
>
> SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
> HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR);
> CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
> // Determine whether IRQs are routed to Hyp mode.
> toHyp = (!scr.irq && hcr.imo && !inSecureState(scr, cpsr)) ||
> (cpsr.mode == MODE_HYP);
> return toHyp;
> }
>
> bool
> Interrupt::abortDisable(ThreadContext *tc)
> {
> if (ArmSystem::haveSecurity(tc)) {
> SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
> return (!scr.ns || scr.aw);
> }
> return true;
> }
>
> VirtualInterrupt::VirtualInterrupt()
> {}
>
> bool
> FastInterrupt::routeToMonitor(ThreadContext *tc) const
> {
> assert(ArmSystem::haveSecurity(tc));
> SCR scr = 0;
> if (from64)
> scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
> else
> scr = tc->readMiscRegNoEffect(MISCREG_SCR);
> return scr.fiq;
> }
>
> bool
> FastInterrupt::routeToHyp(ThreadContext *tc) const
> {
> bool toHyp;
>
> SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
> HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR);
> CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
> // Determine whether IRQs are routed to Hyp mode.
> toHyp = (!scr.fiq && hcr.fmo && !inSecureState(scr, cpsr)) ||
> (cpsr.mode == MODE_HYP);
> return toHyp;
> }
>
> bool
> FastInterrupt::abortDisable(ThreadContext *tc)
> {
> if (ArmSystem::haveSecurity(tc)) {
> SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
> return (!scr.ns || scr.aw);
> }
> return true;
> }
>
> bool
> FastInterrupt::fiqDisable(ThreadContext *tc)
> {
> if (ArmSystem::haveVirtualization(tc)) {
> return true;
> } else if (ArmSystem::haveSecurity(tc)) {
> SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
> return (!scr.ns || scr.fw);
> }
> return true;
> }
>
> VirtualFastInterrupt::VirtualFastInterrupt()
> {}
>
> void
> PCAlignmentFault::invoke(ThreadContext *tc, StaticInstPtr inst)
> {
> ArmFaultVals<PCAlignmentFault>::invoke(tc, inst);
> assert(from64);
> // Set the FAR
> tc->setMiscReg(getFaultAddrReg64(), faultPC);
> }
>
> SPAlignmentFault::SPAlignmentFault()
> {}
>
> SystemError::SystemError()
> {}
>
> void
> SystemError::invoke(ThreadContext *tc, StaticInstPtr inst)
> {
> tc->getCpuPtr()->clearInterrupt(INT_ABT, 0);
> ArmFault::invoke(tc, inst);
> }
>
> bool
> SystemError::routeToMonitor(ThreadContext *tc) const
> {
> assert(ArmSystem::haveSecurity(tc));
> assert(from64);
> SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
> return scr.ea;
> }
>
> bool
> SystemError::routeToHyp(ThreadContext *tc) const
> {
> bool toHyp;
> assert(from64);
>
> SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
> HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR);
> CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
>
> toHyp = (!scr.ea && hcr.amo && !inSecureState(scr, cpsr)) ||
> (!scr.ea && !scr.rw && !hcr.amo && !inSecureState(scr,cpsr));
> return toHyp;
> }
>
> void
250,254d1396
< template void AbortFault<PrefetchAbort>::invoke(ThreadContext *tc,
< StaticInstPtr inst);
< template void AbortFault<DataAbort>::invoke(ThreadContext *tc,
< StaticInstPtr inst);
<
268c1410,1433
< // return via SUBS pc, lr, xxx; rfe, movs, ldm
---
> // Instantiate all the templates to make the linker happy
> template class ArmFaultVals<Reset>;
> template class ArmFaultVals<UndefinedInstruction>;
> template class ArmFaultVals<SupervisorCall>;
> template class ArmFaultVals<SecureMonitorCall>;
> template class ArmFaultVals<HypervisorCall>;
> template class ArmFaultVals<PrefetchAbort>;
> template class ArmFaultVals<DataAbort>;
> template class ArmFaultVals<VirtualDataAbort>;
> template class ArmFaultVals<HypervisorTrap>;
> template class ArmFaultVals<Interrupt>;
> template class ArmFaultVals<VirtualInterrupt>;
> template class ArmFaultVals<FastInterrupt>;
> template class ArmFaultVals<VirtualFastInterrupt>;
> template class ArmFaultVals<SupervisorTrap>;
> template class ArmFaultVals<SecureMonitorTrap>;
> template class ArmFaultVals<PCAlignmentFault>;
> template class ArmFaultVals<SPAlignmentFault>;
> template class ArmFaultVals<SystemError>;
> template class ArmFaultVals<FlushPipe>;
> template class ArmFaultVals<ArmSev>;
> template class AbortFault<PrefetchAbort>;
> template class AbortFault<DataAbort>;
> template class AbortFault<VirtualDataAbort>;
269a1435,1439
>
> IllegalInstSetStateFault::IllegalInstSetStateFault()
> {}
>
>