faults.cc (9551:f867e530f39b) | faults.cc (10037:5cac77888310) |
---|---|
1/* | 1/* |
2 * Copyright (c) 2010 ARM Limited | 2 * Copyright (c) 2010, 2012-2013 ARM Limited |
3 * All rights reserved 4 * 5 * The license below extends only to copyright in the software and shall 6 * not be construed as granting a license to any other intellectual 7 * property including but not limited to intellectual property relating 8 * to a hardware implementation of the functionality of the software 9 * licensed hereunder. You may use the software subject to the license 10 * terms below provided that you ensure that this notice is replicated --- 24 unchanged lines hidden (view full) --- 35 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 36 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 37 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 38 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 39 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 40 * 41 * Authors: Ali Saidi 42 * Gabe Black | 3 * All rights reserved 4 * 5 * The license below extends only to copyright in the software and shall 6 * not be construed as granting a license to any other intellectual 7 * property including but not limited to intellectual property relating 8 * to a hardware implementation of the functionality of the software 9 * licensed hereunder. You may use the software subject to the license 10 * terms below provided that you ensure that this notice is replicated --- 24 unchanged lines hidden (view full) --- 35 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 36 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 37 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 38 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 39 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 40 * 41 * Authors: Ali Saidi 42 * Gabe Black |
43 * Giacomo Gabrielli 44 * Thomas Grocutt |
|
43 */ 44 45#include "arch/arm/faults.hh" | 45 */ 46 47#include "arch/arm/faults.hh" |
48#include "arch/arm/system.hh" 49#include "arch/arm/utility.hh" 50#include "arch/arm/insts/static_inst.hh" 51#include "base/compiler.hh" |
|
46#include "base/trace.hh" 47#include "cpu/base.hh" 48#include "cpu/thread_context.hh" 49#include "debug/Faults.hh" 50#include "sim/full_system.hh" 51 52namespace ArmISA 53{ 54 | 52#include "base/trace.hh" 53#include "cpu/base.hh" 54#include "cpu/thread_context.hh" 55#include "debug/Faults.hh" 56#include "sim/full_system.hh" 57 58namespace ArmISA 59{ 60 |
55template<> ArmFault::FaultVals ArmFaultVals<Reset>::vals = 56{"reset", 0x00, MODE_SVC, 0, 0, true, true, FaultStat()}; | 61uint8_t ArmFault::shortDescFaultSources[] = { 62 0x01, // AlignmentFault 63 0x04, // InstructionCacheMaintenance 64 0xff, // SynchExtAbtOnTranslTableWalkL0 (INVALID) 65 0x0c, // SynchExtAbtOnTranslTableWalkL1 66 0x0e, // SynchExtAbtOnTranslTableWalkL2 67 0xff, // SynchExtAbtOnTranslTableWalkL3 (INVALID) 68 0xff, // SynchPtyErrOnTranslTableWalkL0 (INVALID) 69 0x1c, // SynchPtyErrOnTranslTableWalkL1 70 0x1e, // SynchPtyErrOnTranslTableWalkL2 71 0xff, // SynchPtyErrOnTranslTableWalkL3 (INVALID) 72 0xff, // TranslationL0 (INVALID) 73 0x05, // TranslationL1 74 0x07, // TranslationL2 75 0xff, // TranslationL3 (INVALID) 76 0xff, // AccessFlagL0 (INVALID) 77 0x03, // AccessFlagL1 78 0x06, // AccessFlagL2 79 0xff, // AccessFlagL3 (INVALID) 80 0xff, // DomainL0 (INVALID) 81 0x09, // DomainL1 82 0x0b, // DomainL2 83 0xff, // DomainL3 (INVALID) 84 0xff, // PermissionL0 (INVALID) 85 0x0d, // PermissionL1 86 0x0f, // PermissionL2 87 0xff, // PermissionL3 (INVALID) 88 0x02, // DebugEvent 89 0x08, // SynchronousExternalAbort 90 0x10, // TLBConflictAbort 91 0x19, // SynchPtyErrOnMemoryAccess 92 0x16, // AsynchronousExternalAbort 93 0x18, // AsynchPtyErrOnMemoryAccess 94 0xff, // AddressSizeL0 (INVALID) 95 0xff, // AddressSizeL1 (INVALID) 96 0xff, // AddressSizeL2 (INVALID) 97 0xff, // AddressSizeL3 (INVALID) 98 0x40, // PrefetchTLBMiss 99 0x80 // PrefetchUncacheable 100}; |
57 | 101 |
58template<> ArmFault::FaultVals ArmFaultVals<UndefinedInstruction>::vals = 59{"Undefined Instruction", 0x04, MODE_UNDEFINED, 4 ,2, false, false, 60 FaultStat()} ; | 102static_assert(sizeof(ArmFault::shortDescFaultSources) == 103 ArmFault::NumFaultSources, 104 "Invalid size of ArmFault::shortDescFaultSources[]"); |
61 | 105 |
62template<> ArmFault::FaultVals ArmFaultVals<SupervisorCall>::vals = 63{"Supervisor Call", 0x08, MODE_SVC, 4, 2, false, false, FaultStat()}; | 106uint8_t ArmFault::longDescFaultSources[] = { 107 0x21, // AlignmentFault 108 0xff, // InstructionCacheMaintenance (INVALID) 109 0xff, // SynchExtAbtOnTranslTableWalkL0 (INVALID) 110 0x15, // SynchExtAbtOnTranslTableWalkL1 111 0x16, // SynchExtAbtOnTranslTableWalkL2 112 0x17, // SynchExtAbtOnTranslTableWalkL3 113 0xff, // SynchPtyErrOnTranslTableWalkL0 (INVALID) 114 0x1d, // SynchPtyErrOnTranslTableWalkL1 115 0x1e, // SynchPtyErrOnTranslTableWalkL2 116 0x1f, // SynchPtyErrOnTranslTableWalkL3 117 0xff, // TranslationL0 (INVALID) 118 0x05, // TranslationL1 119 0x06, // TranslationL2 120 0x07, // TranslationL3 121 0xff, // AccessFlagL0 (INVALID) 122 0x09, // AccessFlagL1 123 0x0a, // AccessFlagL2 124 0x0b, // AccessFlagL3 125 0xff, // DomainL0 (INVALID) 126 0x3d, // DomainL1 127 0x3e, // DomainL2 128 0xff, // DomainL3 (RESERVED) 129 0xff, // PermissionL0 (INVALID) 130 0x0d, // PermissionL1 131 0x0e, // PermissionL2 132 0x0f, // PermissionL3 133 0x22, // DebugEvent 134 0x10, // SynchronousExternalAbort 135 0x30, // TLBConflictAbort 136 0x18, // SynchPtyErrOnMemoryAccess 137 0x11, // AsynchronousExternalAbort 138 0x19, // AsynchPtyErrOnMemoryAccess 139 0xff, // AddressSizeL0 (INVALID) 140 0xff, // AddressSizeL1 (INVALID) 141 0xff, // AddressSizeL2 (INVALID) 142 0xff, // AddressSizeL3 (INVALID) 143 0x40, // PrefetchTLBMiss 144 0x80 // PrefetchUncacheable 145}; |
64 | 146 |
65template<> ArmFault::FaultVals ArmFaultVals<PrefetchAbort>::vals = 66{"Prefetch Abort", 0x0C, MODE_ABORT, 4, 4, true, false, FaultStat()}; | 147static_assert(sizeof(ArmFault::longDescFaultSources) == 148 ArmFault::NumFaultSources, 149 "Invalid size of ArmFault::longDescFaultSources[]"); |
67 | 150 |
68template<> ArmFault::FaultVals ArmFaultVals<DataAbort>::vals = 69{"Data Abort", 0x10, MODE_ABORT, 8, 8, true, false, FaultStat()}; | 151uint8_t ArmFault::aarch64FaultSources[] = { 152 0x21, // AlignmentFault 153 0xff, // InstructionCacheMaintenance (INVALID) 154 0x14, // SynchExtAbtOnTranslTableWalkL0 155 0x15, // SynchExtAbtOnTranslTableWalkL1 156 0x16, // SynchExtAbtOnTranslTableWalkL2 157 0x17, // SynchExtAbtOnTranslTableWalkL3 158 0x1c, // SynchPtyErrOnTranslTableWalkL0 159 0x1d, // SynchPtyErrOnTranslTableWalkL1 160 0x1e, // SynchPtyErrOnTranslTableWalkL2 161 0x1f, // SynchPtyErrOnTranslTableWalkL3 162 0x04, // TranslationL0 163 0x05, // TranslationL1 164 0x06, // TranslationL2 165 0x07, // TranslationL3 166 0x08, // AccessFlagL0 167 0x09, // AccessFlagL1 168 0x0a, // AccessFlagL2 169 0x0b, // AccessFlagL3 170 // @todo: Section & Page Domain Fault in AArch64? 171 0xff, // DomainL0 (INVALID) 172 0xff, // DomainL1 (INVALID) 173 0xff, // DomainL2 (INVALID) 174 0xff, // DomainL3 (INVALID) 175 0x0c, // PermissionL0 176 0x0d, // PermissionL1 177 0x0e, // PermissionL2 178 0x0f, // PermissionL3 179 0xff, // DebugEvent (INVALID) 180 0x10, // SynchronousExternalAbort 181 0x30, // TLBConflictAbort 182 0x18, // SynchPtyErrOnMemoryAccess 183 0xff, // AsynchronousExternalAbort (INVALID) 184 0xff, // AsynchPtyErrOnMemoryAccess (INVALID) 185 0x00, // AddressSizeL0 186 0x01, // AddressSizeL1 187 0x02, // AddressSizeL2 188 0x03, // AddressSizeL3 189 0x40, // PrefetchTLBMiss 190 0x80 // PrefetchUncacheable 191}; |
70 | 192 |
71template<> ArmFault::FaultVals ArmFaultVals<Interrupt>::vals = 72{"IRQ", 0x18, MODE_IRQ, 4, 4, true, false, FaultStat()}; | 193static_assert(sizeof(ArmFault::aarch64FaultSources) == 194 ArmFault::NumFaultSources, 195 "Invalid size of ArmFault::aarch64FaultSources[]"); |
73 | 196 |
74template<> ArmFault::FaultVals ArmFaultVals<FastInterrupt>::vals = 75{"FIQ", 0x1C, MODE_FIQ, 4, 4, true, true, FaultStat()}; | 197// Fields: name, offset, cur{ELT,ELH}Offset, lowerEL{64,32}Offset, next mode, 198// {ARM, Thumb, ARM_ELR, Thumb_ELR} PC offset, hyp trap, 199// {A, F} disable, class, stat 200template<> ArmFault::FaultVals ArmFaultVals<Reset>::vals = { 201 // Some dummy values (the reset vector has an IMPLEMENTATION DEFINED 202 // location in AArch64) 203 "Reset", 0x000, 0x000, 0x000, 0x000, 0x000, MODE_SVC, 204 0, 0, 0, 0, false, true, true, EC_UNKNOWN, FaultStat() 205}; 206template<> ArmFault::FaultVals ArmFaultVals<UndefinedInstruction>::vals = { 207 "Undefined Instruction", 0x004, 0x000, 0x200, 0x400, 0x600, MODE_UNDEFINED, 208 4, 2, 0, 0, true, false, false, EC_UNKNOWN, FaultStat() 209}; 210template<> ArmFault::FaultVals ArmFaultVals<SupervisorCall>::vals = { 211 "Supervisor Call", 0x008, 0x000, 0x200, 0x400, 0x600, MODE_SVC, 212 4, 2, 4, 2, true, false, false, EC_SVC_TO_HYP, FaultStat() 213}; 214template<> ArmFault::FaultVals ArmFaultVals<SecureMonitorCall>::vals = { 215 "Secure Monitor Call", 0x008, 0x000, 0x200, 0x400, 0x600, MODE_MON, 216 4, 4, 4, 4, false, true, true, EC_SMC_TO_HYP, FaultStat() 217}; 218template<> ArmFault::FaultVals ArmFaultVals<HypervisorCall>::vals = { 219 "Hypervisor Call", 0x008, 0x000, 0x200, 0x400, 0x600, MODE_HYP, 220 4, 4, 4, 4, true, false, false, EC_HVC, FaultStat() 221}; 222template<> ArmFault::FaultVals ArmFaultVals<PrefetchAbort>::vals = { 223 "Prefetch Abort", 0x00C, 0x000, 0x200, 0x400, 0x600, MODE_ABORT, 224 4, 4, 0, 0, true, true, false, EC_PREFETCH_ABORT_TO_HYP, FaultStat() 225}; 226template<> ArmFault::FaultVals ArmFaultVals<DataAbort>::vals = { 227 "Data Abort", 0x010, 0x000, 0x200, 0x400, 0x600, MODE_ABORT, 228 8, 8, 0, 0, true, true, false, EC_DATA_ABORT_TO_HYP, FaultStat() 229}; 230template<> ArmFault::FaultVals ArmFaultVals<VirtualDataAbort>::vals = { 231 "Virtual Data Abort", 0x010, 0x000, 0x200, 0x400, 0x600, MODE_ABORT, 232 8, 8, 0, 0, true, true, false, EC_INVALID, FaultStat() 233}; 234template<> ArmFault::FaultVals ArmFaultVals<HypervisorTrap>::vals = { 235 // @todo: double check these values 236 "Hypervisor Trap", 0x014, 0x000, 0x200, 0x400, 0x600, MODE_HYP, 237 0, 0, 0, 0, false, false, false, EC_UNKNOWN, FaultStat() 238}; 239template<> ArmFault::FaultVals ArmFaultVals<Interrupt>::vals = { 240 "IRQ", 0x018, 0x080, 0x280, 0x480, 0x680, MODE_IRQ, 241 4, 4, 0, 0, false, true, false, EC_UNKNOWN, FaultStat() 242}; 243template<> ArmFault::FaultVals ArmFaultVals<VirtualInterrupt>::vals = { 244 "Virtual IRQ", 0x018, 0x080, 0x280, 0x480, 0x680, MODE_IRQ, 245 4, 4, 0, 0, false, true, false, EC_INVALID, FaultStat() 246}; 247template<> ArmFault::FaultVals ArmFaultVals<FastInterrupt>::vals = { 248 "FIQ", 0x01C, 0x100, 0x300, 0x500, 0x700, MODE_FIQ, 249 4, 4, 0, 0, false, true, true, EC_UNKNOWN, FaultStat() 250}; 251template<> ArmFault::FaultVals ArmFaultVals<VirtualFastInterrupt>::vals = { 252 "Virtual FIQ", 0x01C, 0x100, 0x300, 0x500, 0x700, MODE_FIQ, 253 4, 4, 0, 0, false, true, true, EC_INVALID, FaultStat() 254}; 255template<> ArmFault::FaultVals ArmFaultVals<SupervisorTrap>::vals = { 256 // Some dummy values (SupervisorTrap is AArch64-only) 257 "Supervisor Trap", 0x014, 0x000, 0x200, 0x400, 0x600, MODE_SVC, 258 0, 0, 0, 0, false, false, false, EC_UNKNOWN, FaultStat() 259}; 260template<> ArmFault::FaultVals ArmFaultVals<SecureMonitorTrap>::vals = { 261 // Some dummy values (SecureMonitorTrap is AArch64-only) 262 "Secure Monitor Trap", 0x014, 0x000, 0x200, 0x400, 0x600, MODE_MON, 263 0, 0, 0, 0, false, false, false, EC_UNKNOWN, FaultStat() 264}; 265template<> ArmFault::FaultVals ArmFaultVals<PCAlignmentFault>::vals = { 266 // Some dummy values (PCAlignmentFault is AArch64-only) 267 "PC Alignment Fault", 0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC, 268 0, 0, 0, 0, true, false, false, EC_PC_ALIGNMENT, FaultStat() 269}; 270template<> ArmFault::FaultVals ArmFaultVals<SPAlignmentFault>::vals = { 271 // Some dummy values (SPAlignmentFault is AArch64-only) 272 "SP Alignment Fault", 0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC, 273 0, 0, 0, 0, true, false, false, EC_STACK_PTR_ALIGNMENT, FaultStat() 274}; 275template<> ArmFault::FaultVals ArmFaultVals<SystemError>::vals = { 276 // Some dummy values (SError is AArch64-only) 277 "SError", 0x000, 0x180, 0x380, 0x580, 0x780, MODE_SVC, 278 0, 0, 0, 0, false, true, true, EC_SERROR, FaultStat() 279}; 280template<> ArmFault::FaultVals ArmFaultVals<FlushPipe>::vals = { 281 // Some dummy values 282 "Pipe Flush", 0x000, 0x000, 0x000, 0x000, 0x000, MODE_SVC, 283 0, 0, 0, 0, false, true, true, EC_UNKNOWN, FaultStat() 284}; 285template<> ArmFault::FaultVals ArmFaultVals<ArmSev>::vals = { 286 // Some dummy values 287 "ArmSev Flush", 0x000, 0x000, 0x000, 0x000, 0x000, MODE_SVC, 288 0, 0, 0, 0, false, true, true, EC_UNKNOWN, FaultStat() 289}; 290template<> ArmFault::FaultVals ArmFaultVals<IllegalInstSetStateFault>::vals = { 291 // Some dummy values (SPAlignmentFault is AArch64-only) 292 "Illegal Inst Set State Fault", 0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC, 293 0, 0, 0, 0, true, false, false, EC_ILLEGAL_INST, FaultStat() 294}; |
76 | 295 |
77template<> ArmFault::FaultVals ArmFaultVals<FlushPipe>::vals = 78{"Pipe Flush", 0x00, MODE_SVC, 0, 0, true, true, FaultStat()}; // dummy values 79 80template<> ArmFault::FaultVals ArmFaultVals<ArmSev>::vals = 81{"ArmSev Flush", 0x00, MODE_SVC, 0, 0, true, true, FaultStat()}; // dummy values 82Addr | 296Addr |
83ArmFault::getVector(ThreadContext *tc) 84{ | 297ArmFault::getVector(ThreadContext *tc) 298{ |
85 // ARM ARM B1-3 | 299 Addr base; |
86 | 300 |
87 SCTLR sctlr = tc->readMiscReg(MISCREG_SCTLR); | 301 // ARM ARM issue C B1.8.1 302 bool haveSecurity = ArmSystem::haveSecurity(tc); |
88 89 // panic if SCTLR.VE because I have no idea what to do with vectored 90 // interrupts | 303 304 // panic if SCTLR.VE because I have no idea what to do with vectored 305 // interrupts |
306 SCTLR sctlr = tc->readMiscReg(MISCREG_SCTLR); |
|
91 assert(!sctlr.ve); | 307 assert(!sctlr.ve); |
308 // Check for invalid modes 309 CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR); 310 assert(haveSecurity || cpsr.mode != MODE_MON); 311 assert(ArmSystem::haveVirtualization(tc) || cpsr.mode != MODE_HYP); |
|
92 | 312 |
93 if (!sctlr.v) 94 return offset(); 95 return offset() + HighVecs; | 313 switch (cpsr.mode) 314 { 315 case MODE_MON: 316 base = tc->readMiscReg(MISCREG_MVBAR); 317 break; 318 case MODE_HYP: 319 base = tc->readMiscReg(MISCREG_HVBAR); 320 break; 321 default: 322 if (sctlr.v) { 323 base = HighVecs; 324 } else { 325 base = haveSecurity ? tc->readMiscReg(MISCREG_VBAR) : 0; 326 } 327 break; 328 } 329 return base + offset(tc); 330} |
96 | 331 |
332Addr 333ArmFault::getVector64(ThreadContext *tc) 334{ 335 Addr vbar; 336 switch (toEL) { 337 case EL3: 338 assert(ArmSystem::haveSecurity(tc)); 339 vbar = tc->readMiscReg(MISCREG_VBAR_EL3); 340 break; 341 // @todo: uncomment this to enable Virtualization 342 // case EL2: 343 // assert(ArmSystem::haveVirtualization(tc)); 344 // vbar = tc->readMiscReg(MISCREG_VBAR_EL2); 345 // break; 346 case EL1: 347 vbar = tc->readMiscReg(MISCREG_VBAR_EL1); 348 break; 349 default: 350 panic("Invalid target exception level"); 351 break; 352 } 353 return vbar + offset64(); |
|
97} 98 | 354} 355 |
99void | 356MiscRegIndex 357ArmFault::getSyndromeReg64() const 358{ 359 switch (toEL) { 360 case EL1: 361 return MISCREG_ESR_EL1; 362 case EL2: 363 return MISCREG_ESR_EL2; 364 case EL3: 365 return MISCREG_ESR_EL3; 366 default: 367 panic("Invalid exception level"); 368 break; 369 } 370} 371 372MiscRegIndex 373ArmFault::getFaultAddrReg64() const 374{ 375 switch (toEL) { 376 case EL1: 377 return MISCREG_FAR_EL1; 378 case EL2: 379 return MISCREG_FAR_EL2; 380 case EL3: 381 return MISCREG_FAR_EL3; 382 default: 383 panic("Invalid exception level"); 384 break; 385 } 386} 387 388void 389ArmFault::setSyndrome(ThreadContext *tc, MiscRegIndex syndrome_reg) 390{ 391 uint32_t value; 392 uint32_t exc_class = (uint32_t) ec(tc); 393 uint32_t issVal = iss(); 394 assert(!from64 || ArmSystem::highestELIs64(tc)); 395 396 value = exc_class << 26; 397 398 // HSR.IL not valid for Prefetch Aborts (0x20, 0x21) and Data Aborts (0x24, 399 // 0x25) for which the ISS information is not valid (ARMv7). 400 // @todo: ARMv8 revises AArch32 functionality: when HSR.IL is not 401 // valid it is treated as RES1. 402 if (to64) { 403 value |= 1 << 25; 404 } else if ((bits(exc_class, 5, 3) != 4) || 405 (bits(exc_class, 2) && bits(issVal, 24))) { 406 if (!machInst.thumb || machInst.bigThumb) 407 value |= 1 << 25; 408 } 409 // Condition code valid for EC[5:4] nonzero 410 if (!from64 && ((bits(exc_class, 5, 4) == 0) && 411 (bits(exc_class, 3, 0) != 0))) { 412 if (!machInst.thumb) { 413 uint32_t cond; 414 ConditionCode condCode = (ConditionCode) (uint32_t) machInst.condCode; 415 // If its on unconditional instruction report with a cond code of 416 // 0xE, ie the unconditional code 417 cond = (condCode == COND_UC) ? COND_AL : condCode; 418 value |= cond << 20; 419 value |= 1 << 24; 420 } 421 value |= bits(issVal, 19, 0); 422 } else { 423 value |= issVal; 424 } 425 tc->setMiscReg(syndrome_reg, value); 426} 427 428void |
100ArmFault::invoke(ThreadContext *tc, StaticInstPtr inst) 101{ | 429ArmFault::invoke(ThreadContext *tc, StaticInstPtr inst) 430{ |
102 // ARM ARM B1.6.3 | 431 CPSR cpsr = tc->readMiscReg(MISCREG_CPSR); 432 433 if (ArmSystem::highestELIs64(tc)) { // ARMv8 434 // Determine source exception level and mode 435 fromMode = (OperatingMode) (uint8_t) cpsr.mode; 436 fromEL = opModeToEL(fromMode); 437 if (opModeIs64(fromMode)) 438 from64 = true; 439 440 // Determine target exception level 441 if (ArmSystem::haveSecurity(tc) && routeToMonitor(tc)) 442 toEL = EL3; 443 else 444 toEL = opModeToEL(nextMode()); 445 if (fromEL > toEL) 446 toEL = fromEL; 447 448 if (toEL == ArmSystem::highestEL(tc) || ELIs64(tc, toEL)) { 449 // Invoke exception handler in AArch64 state 450 to64 = true; 451 invoke64(tc, inst); 452 return; 453 } 454 } 455 456 // ARMv7 (ARM ARM issue C B1.9) 457 458 bool have_security = ArmSystem::haveSecurity(tc); 459 bool have_virtualization = ArmSystem::haveVirtualization(tc); 460 |
103 FaultBase::invoke(tc); 104 if (!FullSystem) 105 return; 106 countStat()++; 107 108 SCTLR sctlr = tc->readMiscReg(MISCREG_SCTLR); | 461 FaultBase::invoke(tc); 462 if (!FullSystem) 463 return; 464 countStat()++; 465 466 SCTLR sctlr = tc->readMiscReg(MISCREG_SCTLR); |
109 CPSR cpsr = tc->readMiscReg(MISCREG_CPSR); | 467 SCR scr = tc->readMiscReg(MISCREG_SCR); |
110 CPSR saved_cpsr = tc->readMiscReg(MISCREG_CPSR); 111 saved_cpsr.nz = tc->readIntReg(INTREG_CONDCODES_NZ); 112 saved_cpsr.c = tc->readIntReg(INTREG_CONDCODES_C); 113 saved_cpsr.v = tc->readIntReg(INTREG_CONDCODES_V); 114 saved_cpsr.ge = tc->readIntReg(INTREG_CONDCODES_GE); 115 116 Addr curPc M5_VAR_USED = tc->pcState().pc(); 117 ITSTATE it = tc->pcState().itstate(); 118 saved_cpsr.it2 = it.top6; 119 saved_cpsr.it1 = it.bottom2; 120 | 468 CPSR saved_cpsr = tc->readMiscReg(MISCREG_CPSR); 469 saved_cpsr.nz = tc->readIntReg(INTREG_CONDCODES_NZ); 470 saved_cpsr.c = tc->readIntReg(INTREG_CONDCODES_C); 471 saved_cpsr.v = tc->readIntReg(INTREG_CONDCODES_V); 472 saved_cpsr.ge = tc->readIntReg(INTREG_CONDCODES_GE); 473 474 Addr curPc M5_VAR_USED = tc->pcState().pc(); 475 ITSTATE it = tc->pcState().itstate(); 476 saved_cpsr.it2 = it.top6; 477 saved_cpsr.it1 = it.bottom2; 478 |
121 cpsr.mode = nextMode(); | 479 // if we have a valid instruction then use it to annotate this fault with 480 // extra information. This is used to generate the correct fault syndrome 481 // information 482 if (inst) { 483 ArmStaticInst *armInst = reinterpret_cast<ArmStaticInst *>(inst.get()); 484 armInst->annotateFault(this); 485 } 486 487 if (have_security && routeToMonitor(tc)) 488 cpsr.mode = MODE_MON; 489 else if (have_virtualization && routeToHyp(tc)) 490 cpsr.mode = MODE_HYP; 491 else 492 cpsr.mode = nextMode(); 493 494 // Ensure Secure state if initially in Monitor mode 495 if (have_security && saved_cpsr.mode == MODE_MON) { 496 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR); 497 if (scr.ns) { 498 scr.ns = 0; 499 tc->setMiscRegNoEffect(MISCREG_SCR, scr); 500 } 501 } 502 503 // some bits are set differently if we have been routed to hyp mode 504 if (cpsr.mode == MODE_HYP) { 505 SCTLR hsctlr = tc->readMiscReg(MISCREG_HSCTLR); 506 cpsr.t = hsctlr.te; 507 cpsr.e = hsctlr.ee; 508 if (!scr.ea) {cpsr.a = 1;} 509 if (!scr.fiq) {cpsr.f = 1;} 510 if (!scr.irq) {cpsr.i = 1;} 511 } else if (cpsr.mode == MODE_MON) { 512 // Special case handling when entering monitor mode 513 cpsr.t = sctlr.te; 514 cpsr.e = sctlr.ee; 515 cpsr.a = 1; 516 cpsr.f = 1; 517 cpsr.i = 1; 518 } else { 519 cpsr.t = sctlr.te; 520 cpsr.e = sctlr.ee; 521 522 // The *Disable functions are virtual and different per fault 523 cpsr.a = cpsr.a | abortDisable(tc); 524 cpsr.f = cpsr.f | fiqDisable(tc); 525 cpsr.i = 1; 526 } |
122 cpsr.it1 = cpsr.it2 = 0; 123 cpsr.j = 0; | 527 cpsr.it1 = cpsr.it2 = 0; 528 cpsr.j = 0; |
124 125 cpsr.t = sctlr.te; 126 cpsr.a = cpsr.a | abortDisable(); 127 cpsr.f = cpsr.f | fiqDisable(); 128 cpsr.i = 1; 129 cpsr.e = sctlr.ee; | |
130 tc->setMiscReg(MISCREG_CPSR, cpsr); | 529 tc->setMiscReg(MISCREG_CPSR, cpsr); |
530 |
|
131 // Make sure mailbox sets to one always 132 tc->setMiscReg(MISCREG_SEV_MAILBOX, 1); | 531 // Make sure mailbox sets to one always 532 tc->setMiscReg(MISCREG_SEV_MAILBOX, 1); |
133 tc->setIntReg(INTREG_LR, curPc + 134 (saved_cpsr.t ? thumbPcOffset() : armPcOffset())); | |
135 | 533 |
136 switch (nextMode()) { | 534 // Clear the exclusive monitor 535 tc->setMiscReg(MISCREG_LOCKFLAG, 0); 536 537 if (cpsr.mode == MODE_HYP) { 538 tc->setMiscReg(MISCREG_ELR_HYP, curPc + 539 (saved_cpsr.t ? thumbPcOffset(true) : armPcOffset(true))); 540 } else { 541 tc->setIntReg(INTREG_LR, curPc + 542 (saved_cpsr.t ? thumbPcOffset(false) : armPcOffset(false))); 543 } 544 545 switch (cpsr.mode) { |
137 case MODE_FIQ: 138 tc->setMiscReg(MISCREG_SPSR_FIQ, saved_cpsr); 139 break; 140 case MODE_IRQ: 141 tc->setMiscReg(MISCREG_SPSR_IRQ, saved_cpsr); 142 break; 143 case MODE_SVC: 144 tc->setMiscReg(MISCREG_SPSR_SVC, saved_cpsr); 145 break; | 546 case MODE_FIQ: 547 tc->setMiscReg(MISCREG_SPSR_FIQ, saved_cpsr); 548 break; 549 case MODE_IRQ: 550 tc->setMiscReg(MISCREG_SPSR_IRQ, saved_cpsr); 551 break; 552 case MODE_SVC: 553 tc->setMiscReg(MISCREG_SPSR_SVC, saved_cpsr); 554 break; |
146 case MODE_UNDEFINED: 147 tc->setMiscReg(MISCREG_SPSR_UND, saved_cpsr); | 555 case MODE_MON: 556 assert(have_security); 557 tc->setMiscReg(MISCREG_SPSR_MON, saved_cpsr); |
148 break; 149 case MODE_ABORT: 150 tc->setMiscReg(MISCREG_SPSR_ABT, saved_cpsr); 151 break; | 558 break; 559 case MODE_ABORT: 560 tc->setMiscReg(MISCREG_SPSR_ABT, saved_cpsr); 561 break; |
562 case MODE_UNDEFINED: 563 tc->setMiscReg(MISCREG_SPSR_UND, saved_cpsr); 564 if (ec(tc) != EC_UNKNOWN) 565 setSyndrome(tc, MISCREG_HSR); 566 break; 567 case MODE_HYP: 568 assert(have_virtualization); 569 tc->setMiscReg(MISCREG_SPSR_HYP, saved_cpsr); 570 setSyndrome(tc, MISCREG_HSR); 571 break; |
|
152 default: 153 panic("unknown Mode\n"); 154 } 155 156 Addr newPc = getVector(tc); 157 DPRINTF(Faults, "Invoking Fault:%s cpsr:%#x PC:%#x lr:%#x newVec: %#x\n", 158 name(), cpsr, curPc, tc->readIntReg(INTREG_LR), newPc); 159 PCState pc(newPc); 160 pc.thumb(cpsr.t); 161 pc.nextThumb(pc.thumb()); 162 pc.jazelle(cpsr.j); 163 pc.nextJazelle(pc.jazelle()); | 572 default: 573 panic("unknown Mode\n"); 574 } 575 576 Addr newPc = getVector(tc); 577 DPRINTF(Faults, "Invoking Fault:%s cpsr:%#x PC:%#x lr:%#x newVec: %#x\n", 578 name(), cpsr, curPc, tc->readIntReg(INTREG_LR), newPc); 579 PCState pc(newPc); 580 pc.thumb(cpsr.t); 581 pc.nextThumb(pc.thumb()); 582 pc.jazelle(cpsr.j); 583 pc.nextJazelle(pc.jazelle()); |
584 pc.aarch64(!cpsr.width); 585 pc.nextAArch64(!cpsr.width); |
|
164 tc->pcState(pc); 165} 166 167void | 586 tc->pcState(pc); 587} 588 589void |
590ArmFault::invoke64(ThreadContext *tc, StaticInstPtr inst) 591{ 592 // Determine actual misc. register indices for ELR_ELx and SPSR_ELx 593 MiscRegIndex elr_idx, spsr_idx; 594 switch (toEL) { 595 case EL1: 596 elr_idx = MISCREG_ELR_EL1; 597 spsr_idx = MISCREG_SPSR_EL1; 598 break; 599 // @todo: uncomment this to enable Virtualization 600 // case EL2: 601 // assert(ArmSystem::haveVirtualization()); 602 // elr_idx = MISCREG_ELR_EL2; 603 // spsr_idx = MISCREG_SPSR_EL2; 604 // break; 605 case EL3: 606 assert(ArmSystem::haveSecurity(tc)); 607 elr_idx = MISCREG_ELR_EL3; 608 spsr_idx = MISCREG_SPSR_EL3; 609 break; 610 default: 611 panic("Invalid target exception level"); 612 break; 613 } 614 615 // Save process state into SPSR_ELx 616 CPSR cpsr = tc->readMiscReg(MISCREG_CPSR); 617 CPSR spsr = cpsr; 618 spsr.nz = tc->readIntReg(INTREG_CONDCODES_NZ); 619 spsr.c = tc->readIntReg(INTREG_CONDCODES_C); 620 spsr.v = tc->readIntReg(INTREG_CONDCODES_V); 621 if (from64) { 622 // Force some bitfields to 0 623 spsr.q = 0; 624 spsr.it1 = 0; 625 spsr.j = 0; 626 spsr.res0_23_22 = 0; 627 spsr.ge = 0; 628 spsr.it2 = 0; 629 spsr.t = 0; 630 } else { 631 spsr.ge = tc->readIntReg(INTREG_CONDCODES_GE); 632 ITSTATE it = tc->pcState().itstate(); 633 spsr.it2 = it.top6; 634 spsr.it1 = it.bottom2; 635 // Force some bitfields to 0 636 spsr.res0_23_22 = 0; 637 spsr.ss = 0; 638 } 639 tc->setMiscReg(spsr_idx, spsr); 640 641 // Save preferred return address into ELR_ELx 642 Addr curr_pc = tc->pcState().pc(); 643 Addr ret_addr = curr_pc; 644 if (from64) 645 ret_addr += armPcElrOffset(); 646 else 647 ret_addr += spsr.t ? thumbPcElrOffset() : armPcElrOffset(); 648 tc->setMiscReg(elr_idx, ret_addr); 649 650 // Update process state 651 OperatingMode64 mode = 0; 652 mode.spX = 1; 653 mode.el = toEL; 654 mode.width = 0; 655 cpsr.mode = mode; 656 cpsr.daif = 0xf; 657 cpsr.il = 0; 658 cpsr.ss = 0; 659 tc->setMiscReg(MISCREG_CPSR, cpsr); 660 661 // Set PC to start of exception handler 662 Addr new_pc = purifyTaggedAddr(getVector64(tc), tc, toEL); 663 DPRINTF(Faults, "Invoking Fault (AArch64 target EL):%s cpsr:%#x PC:%#x " 664 "elr:%#x newVec: %#x\n", name(), cpsr, curr_pc, ret_addr, new_pc); 665 PCState pc(new_pc); 666 pc.aarch64(!cpsr.width); 667 pc.nextAArch64(!cpsr.width); 668 tc->pcState(pc); 669 670 // If we have a valid instruction then use it to annotate this fault with 671 // extra information. This is used to generate the correct fault syndrome 672 // information 673 if (inst) 674 reinterpret_cast<ArmStaticInst *>(inst.get())->annotateFault(this); 675 // Save exception syndrome 676 if ((nextMode() != MODE_IRQ) && (nextMode() != MODE_FIQ)) 677 setSyndrome(tc, getSyndromeReg64()); 678} 679 680void |
|
168Reset::invoke(ThreadContext *tc, StaticInstPtr inst) 169{ 170 if (FullSystem) { 171 tc->getCpuPtr()->clearInterrupts(); 172 tc->clearArchRegs(); 173 } | 681Reset::invoke(ThreadContext *tc, StaticInstPtr inst) 682{ 683 if (FullSystem) { 684 tc->getCpuPtr()->clearInterrupts(); 685 tc->clearArchRegs(); 686 } |
174 ArmFault::invoke(tc, inst); | 687 if (!ArmSystem::highestELIs64(tc)) { 688 ArmFault::invoke(tc, inst); 689 tc->setMiscReg(MISCREG_VMPIDR, 690 getMPIDR(dynamic_cast<ArmSystem*>(tc->getSystemPtr()), tc)); 691 692 // Unless we have SMC code to get us there, boot in HYP! 693 if (ArmSystem::haveVirtualization(tc) && 694 !ArmSystem::haveSecurity(tc)) { 695 CPSR cpsr = tc->readMiscReg(MISCREG_CPSR); 696 cpsr.mode = MODE_HYP; 697 tc->setMiscReg(MISCREG_CPSR, cpsr); 698 } 699 } else { 700 // Advance the PC to the IMPLEMENTATION DEFINED reset value 701 PCState pc = ArmSystem::resetAddr64(tc); 702 pc.aarch64(true); 703 pc.nextAArch64(true); 704 tc->pcState(pc); 705 } |
175} 176 177void 178UndefinedInstruction::invoke(ThreadContext *tc, StaticInstPtr inst) 179{ 180 if (FullSystem) { 181 ArmFault::invoke(tc, inst); 182 return; --- 8 unchanged lines hidden (view full) --- 191 panic("Attempted to execute unknown instruction (inst 0x%08x)", 192 machInst); 193 } else { 194 panic("Attempted to execute unimplemented instruction " 195 "'%s' (inst 0x%08x)", mnemonic, machInst); 196 } 197} 198 | 706} 707 708void 709UndefinedInstruction::invoke(ThreadContext *tc, StaticInstPtr inst) 710{ 711 if (FullSystem) { 712 ArmFault::invoke(tc, inst); 713 return; --- 8 unchanged lines hidden (view full) --- 722 panic("Attempted to execute unknown instruction (inst 0x%08x)", 723 machInst); 724 } else { 725 panic("Attempted to execute unimplemented instruction " 726 "'%s' (inst 0x%08x)", mnemonic, machInst); 727 } 728} 729 |
730bool 731UndefinedInstruction::routeToHyp(ThreadContext *tc) const 732{ 733 bool toHyp; 734 735 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR); 736 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR); 737 CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR); 738 739 // if in Hyp mode then stay in Hyp mode 740 toHyp = scr.ns && (cpsr.mode == MODE_HYP); 741 // if HCR.TGE is set to 1, take to Hyp mode through Hyp Trap vector 742 toHyp |= !inSecureState(scr, cpsr) && hcr.tge && (cpsr.mode == MODE_USER); 743 return toHyp; 744} 745 746uint32_t 747UndefinedInstruction::iss() const 748{ 749 if (overrideEc == EC_INVALID) 750 return issRaw; 751 752 uint32_t new_iss = 0; 753 uint32_t op0, op1, op2, CRn, CRm, Rt, dir; 754 755 dir = bits(machInst, 21, 21); 756 op0 = bits(machInst, 20, 19); 757 op1 = bits(machInst, 18, 16); 758 CRn = bits(machInst, 15, 12); 759 CRm = bits(machInst, 11, 8); 760 op2 = bits(machInst, 7, 5); 761 Rt = bits(machInst, 4, 0); 762 763 new_iss = op0 << 20 | op2 << 17 | op1 << 14 | CRn << 10 | 764 Rt << 5 | CRm << 1 | dir; 765 766 return new_iss; 767} 768 |
|
199void 200SupervisorCall::invoke(ThreadContext *tc, StaticInstPtr inst) 201{ 202 if (FullSystem) { 203 ArmFault::invoke(tc, inst); 204 return; 205 } 206 207 // As of now, there isn't a 32 bit thumb version of this instruction. 208 assert(!machInst.bigThumb); 209 uint32_t callNum; | 769void 770SupervisorCall::invoke(ThreadContext *tc, StaticInstPtr inst) 771{ 772 if (FullSystem) { 773 ArmFault::invoke(tc, inst); 774 return; 775 } 776 777 // As of now, there isn't a 32 bit thumb version of this instruction. 778 assert(!machInst.bigThumb); 779 uint32_t callNum; |
210 callNum = tc->readIntReg(INTREG_R7); | 780 CPSR cpsr = tc->readMiscReg(MISCREG_CPSR); 781 OperatingMode mode = (OperatingMode)(uint8_t)cpsr.mode; 782 if (opModeIs64(mode)) 783 callNum = tc->readIntReg(INTREG_X8); 784 else 785 callNum = tc->readIntReg(INTREG_R7); |
211 tc->syscall(callNum); 212 213 // Advance the PC since that won't happen automatically. 214 PCState pc = tc->pcState(); 215 assert(inst); 216 inst->advancePC(pc); 217 tc->pcState(pc); 218} 219 | 786 tc->syscall(callNum); 787 788 // Advance the PC since that won't happen automatically. 789 PCState pc = tc->pcState(); 790 assert(inst); 791 inst->advancePC(pc); 792 tc->pcState(pc); 793} 794 |
795bool 796SupervisorCall::routeToHyp(ThreadContext *tc) const 797{ 798 bool toHyp; 799 800 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR); 801 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR); 802 CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR); 803 804 // if in Hyp mode then stay in Hyp mode 805 toHyp = scr.ns && (cpsr.mode == MODE_HYP); 806 // if HCR.TGE is set to 1, take to Hyp mode through Hyp Trap vector 807 toHyp |= !inSecureState(scr, cpsr) && hcr.tge && (cpsr.mode == MODE_USER); 808 return toHyp; 809} 810 811ExceptionClass 812SupervisorCall::ec(ThreadContext *tc) const 813{ 814 return (overrideEc != EC_INVALID) ? overrideEc : 815 (from64 ? EC_SVC_64 : vals.ec); 816} 817 818uint32_t 819SupervisorCall::iss() const 820{ 821 // Even if we have a 24 bit imm from an arm32 instruction then we only use 822 // the bottom 16 bits for the ISS value (it doesn't hurt for AArch64 SVC). 823 return issRaw & 0xFFFF; 824} 825 826uint32_t 827SecureMonitorCall::iss() const 828{ 829 if (from64) 830 return bits(machInst, 20, 5); 831 return 0; 832} 833 834ExceptionClass 835UndefinedInstruction::ec(ThreadContext *tc) const 836{ 837 return (overrideEc != EC_INVALID) ? overrideEc : vals.ec; 838} 839 840 841HypervisorCall::HypervisorCall(ExtMachInst _machInst, uint32_t _imm) : 842 ArmFaultVals<HypervisorCall>(_machInst, _imm) 843{} 844 845ExceptionClass 846HypervisorTrap::ec(ThreadContext *tc) const 847{ 848 return (overrideEc != EC_INVALID) ? overrideEc : vals.ec; 849} 850 |
|
220template<class T> | 851template<class T> |
852FaultOffset 853ArmFaultVals<T>::offset(ThreadContext *tc) 854{ 855 bool isHypTrap = false; 856 857 // Normally we just use the exception vector from the table at the top if 858 // this file, however if this exception has caused a transition to hype 859 // mode, and its an exception type that would only do this if it has been 860 // trapped then we use the hyp trap vector instead of the normal vector 861 if (vals.hypTrappable) { 862 CPSR cpsr = tc->readMiscReg(MISCREG_CPSR); 863 if (cpsr.mode == MODE_HYP) { 864 CPSR spsr = tc->readMiscReg(MISCREG_SPSR_HYP); 865 isHypTrap = spsr.mode != MODE_HYP; 866 } 867 } 868 return isHypTrap ? 0x14 : vals.offset; 869} 870 871// void 872// SupervisorCall::setSyndrome64(ThreadContext *tc, MiscRegIndex esr_idx) 873// { 874// ESR esr = 0; 875// esr.ec = machInst.aarch64 ? SvcAArch64 : SvcAArch32; 876// esr.il = !machInst.thumb; 877// if (machInst.aarch64) 878// esr.imm16 = bits(machInst.instBits, 20, 5); 879// else if (machInst.thumb) 880// esr.imm16 = bits(machInst.instBits, 7, 0); 881// else 882// esr.imm16 = bits(machInst.instBits, 15, 0); 883// tc->setMiscReg(esr_idx, esr); 884// } 885 |
|
221void | 886void |
887SecureMonitorCall::invoke(ThreadContext *tc, StaticInstPtr inst) 888{ 889 if (FullSystem) { 890 ArmFault::invoke(tc, inst); 891 return; 892 } 893} 894 895ExceptionClass 896SecureMonitorCall::ec(ThreadContext *tc) const 897{ 898 return (from64 ? EC_SMC_64 : vals.ec); 899} 900 901ExceptionClass 902SupervisorTrap::ec(ThreadContext *tc) const 903{ 904 return (overrideEc != EC_INVALID) ? overrideEc : vals.ec; 905} 906 907ExceptionClass 908SecureMonitorTrap::ec(ThreadContext *tc) const 909{ 910 return (overrideEc != EC_INVALID) ? overrideEc : 911 (from64 ? EC_SMC_64 : vals.ec); 912} 913 914template<class T> 915void |
|
222AbortFault<T>::invoke(ThreadContext *tc, StaticInstPtr inst) 223{ | 916AbortFault<T>::invoke(ThreadContext *tc, StaticInstPtr inst) 917{ |
918 if (tranMethod == ArmFault::UnknownTran) { 919 tranMethod = longDescFormatInUse(tc) ? ArmFault::LpaeTran 920 : ArmFault::VmsaTran; 921 922 if ((tranMethod == ArmFault::VmsaTran) && this->routeToMonitor(tc)) { 923 // See ARM ARM B3-1416 924 bool override_LPAE = false; 925 TTBCR ttbcr_s = tc->readMiscReg(MISCREG_TTBCR_S); 926 TTBCR M5_VAR_USED ttbcr_ns = tc->readMiscReg(MISCREG_TTBCR_NS); 927 if (ttbcr_s.eae) { 928 override_LPAE = true; 929 } else { 930 // Unimplemented code option, not seen in testing. May need 931 // extension according to the manual exceprt above. 932 DPRINTF(Faults, "Warning: Incomplete translation method " 933 "override detected.\n"); 934 } 935 if (override_LPAE) 936 tranMethod = ArmFault::LpaeTran; 937 } 938 } 939 940 if (source == ArmFault::AsynchronousExternalAbort) { 941 tc->getCpuPtr()->clearInterrupt(INT_ABT, 0); 942 } 943 // Get effective fault source encoding 944 CPSR cpsr = tc->readMiscReg(MISCREG_CPSR); 945 FSR fsr = getFsr(tc); 946 947 // source must be determined BEFORE invoking generic routines which will 948 // try to set hsr etc. and are based upon source! |
|
224 ArmFaultVals<T>::invoke(tc, inst); | 949 ArmFaultVals<T>::invoke(tc, inst); |
950 951 if (cpsr.width) { // AArch32 952 if (cpsr.mode == MODE_HYP) { 953 tc->setMiscReg(T::HFarIndex, faultAddr); 954 } else if (stage2) { 955 tc->setMiscReg(MISCREG_HPFAR, (faultAddr >> 8) & ~0xf); 956 tc->setMiscReg(T::HFarIndex, OVAddr); 957 } else { 958 tc->setMiscReg(T::FsrIndex, fsr); 959 tc->setMiscReg(T::FarIndex, faultAddr); 960 } 961 DPRINTF(Faults, "Abort Fault source=%#x fsr=%#x faultAddr=%#x "\ 962 "tranMethod=%#x\n", source, fsr, faultAddr, tranMethod); 963 } else { // AArch64 964 // Set the FAR register. Nothing else to do if we are in AArch64 state 965 // because the syndrome register has already been set inside invoke64() 966 tc->setMiscReg(AbortFault<T>::getFaultAddrReg64(), faultAddr); 967 } 968} 969 970template<class T> 971FSR 972AbortFault<T>::getFsr(ThreadContext *tc) 973{ |
|
225 FSR fsr = 0; | 974 FSR fsr = 0; |
226 fsr.fsLow = bits(status, 3, 0); 227 fsr.fsHigh = bits(status, 4); 228 fsr.domain = domain; 229 fsr.wnr = (write ? 1 : 0); 230 fsr.ext = 0; 231 tc->setMiscReg(T::FsrIndex, fsr); 232 tc->setMiscReg(T::FarIndex, faultAddr); | |
233 | 975 |
234 DPRINTF(Faults, "Abort Fault fsr=%#x faultAddr=%#x\n", fsr, faultAddr); | 976 if (((CPSR) tc->readMiscRegNoEffect(MISCREG_CPSR)).width) { 977 // AArch32 978 assert(tranMethod != ArmFault::UnknownTran); 979 if (tranMethod == ArmFault::LpaeTran) { 980 srcEncoded = ArmFault::longDescFaultSources[source]; 981 fsr.status = srcEncoded; 982 fsr.lpae = 1; 983 } else { 984 srcEncoded = ArmFault::shortDescFaultSources[source]; 985 fsr.fsLow = bits(srcEncoded, 3, 0); 986 fsr.fsHigh = bits(srcEncoded, 4); 987 fsr.domain = static_cast<uint8_t>(domain); 988 } 989 fsr.wnr = (write ? 1 : 0); 990 fsr.ext = 0; 991 } else { 992 // AArch64 993 srcEncoded = ArmFault::aarch64FaultSources[source]; 994 } 995 if (srcEncoded == ArmFault::FaultSourceInvalid) { 996 panic("Invalid fault source\n"); 997 } 998 return fsr; |
235} 236 | 999} 1000 |
1001template<class T> 1002bool 1003AbortFault<T>::abortDisable(ThreadContext *tc) 1004{ 1005 if (ArmSystem::haveSecurity(tc)) { 1006 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1007 return (!scr.ns || scr.aw); 1008 } 1009 return true; 1010} 1011 1012template<class T> |
|
237void | 1013void |
1014AbortFault<T>::annotate(ArmFault::AnnotationIDs id, uint64_t val) 1015{ 1016 switch (id) 1017 { 1018 case ArmFault::S1PTW: 1019 s1ptw = val; 1020 break; 1021 case ArmFault::OVA: 1022 OVAddr = val; 1023 break; 1024 1025 // Just ignore unknown ID's 1026 default: 1027 break; 1028 } 1029} 1030 1031template<class T> 1032uint32_t 1033AbortFault<T>::iss() const 1034{ 1035 uint32_t val; 1036 1037 val = srcEncoded & 0x3F; 1038 val |= write << 6; 1039 val |= s1ptw << 7; 1040 return (val); 1041} 1042 1043template<class T> 1044bool 1045AbortFault<T>::isMMUFault() const 1046{ 1047 // NOTE: Not relying on LL information being aligned to lowest bits here 1048 return 1049 (source == ArmFault::AlignmentFault) || 1050 ((source >= ArmFault::TranslationLL) && 1051 (source < ArmFault::TranslationLL + 4)) || 1052 ((source >= ArmFault::AccessFlagLL) && 1053 (source < ArmFault::AccessFlagLL + 4)) || 1054 ((source >= ArmFault::DomainLL) && 1055 (source < ArmFault::DomainLL + 4)) || 1056 ((source >= ArmFault::PermissionLL) && 1057 (source < ArmFault::PermissionLL + 4)); 1058} 1059 1060ExceptionClass 1061PrefetchAbort::ec(ThreadContext *tc) const 1062{ 1063 if (to64) { 1064 // AArch64 1065 if (toEL == fromEL) 1066 return EC_PREFETCH_ABORT_CURR_EL; 1067 else 1068 return EC_PREFETCH_ABORT_LOWER_EL; 1069 } else { 1070 // AArch32 1071 // Abort faults have different EC codes depending on whether 1072 // the fault originated within HYP mode, or not. So override 1073 // the method and add the extra adjustment of the EC value. 1074 1075 ExceptionClass ec = ArmFaultVals<PrefetchAbort>::vals.ec; 1076 1077 CPSR spsr = tc->readMiscReg(MISCREG_SPSR_HYP); 1078 if (spsr.mode == MODE_HYP) { 1079 ec = ((ExceptionClass) (((uint32_t) ec) + 1)); 1080 } 1081 return ec; 1082 } 1083} 1084 1085bool 1086PrefetchAbort::routeToMonitor(ThreadContext *tc) const 1087{ 1088 SCR scr = 0; 1089 if (from64) 1090 scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3); 1091 else 1092 scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1093 1094 return scr.ea && !isMMUFault(); 1095} 1096 1097bool 1098PrefetchAbort::routeToHyp(ThreadContext *tc) const 1099{ 1100 bool toHyp; 1101 1102 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1103 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR); 1104 CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR); 1105 HDCR hdcr = tc->readMiscRegNoEffect(MISCREG_HDCR); 1106 1107 // if in Hyp mode then stay in Hyp mode 1108 toHyp = scr.ns && (cpsr.mode == MODE_HYP); 1109 // otherwise, check whether to take to Hyp mode through Hyp Trap vector 1110 toHyp |= (stage2 || 1111 ( (source == DebugEvent) && hdcr.tde && (cpsr.mode != MODE_HYP)) || 1112 ( (source == SynchronousExternalAbort) && hcr.tge && (cpsr.mode == MODE_USER)) 1113 ) && !inSecureState(scr, cpsr); 1114 return toHyp; 1115} 1116 1117ExceptionClass 1118DataAbort::ec(ThreadContext *tc) const 1119{ 1120 if (to64) { 1121 // AArch64 1122 if (source == ArmFault::AsynchronousExternalAbort) { 1123 panic("Asynchronous External Abort should be handled with \ 1124 SystemErrors (SErrors)!"); 1125 } 1126 if (toEL == fromEL) 1127 return EC_DATA_ABORT_CURR_EL; 1128 else 1129 return EC_DATA_ABORT_LOWER_EL; 1130 } else { 1131 // AArch32 1132 // Abort faults have different EC codes depending on whether 1133 // the fault originated within HYP mode, or not. So override 1134 // the method and add the extra adjustment of the EC value. 1135 1136 ExceptionClass ec = ArmFaultVals<DataAbort>::vals.ec; 1137 1138 CPSR spsr = tc->readMiscReg(MISCREG_SPSR_HYP); 1139 if (spsr.mode == MODE_HYP) { 1140 ec = ((ExceptionClass) (((uint32_t) ec) + 1)); 1141 } 1142 return ec; 1143 } 1144} 1145 1146bool 1147DataAbort::routeToMonitor(ThreadContext *tc) const 1148{ 1149 SCR scr = 0; 1150 if (from64) 1151 scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3); 1152 else 1153 scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1154 1155 return scr.ea && !isMMUFault(); 1156} 1157 1158bool 1159DataAbort::routeToHyp(ThreadContext *tc) const 1160{ 1161 bool toHyp; 1162 1163 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1164 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR); 1165 CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR); 1166 HDCR hdcr = tc->readMiscRegNoEffect(MISCREG_HDCR); 1167 1168 // if in Hyp mode then stay in Hyp mode 1169 toHyp = scr.ns && (cpsr.mode == MODE_HYP); 1170 // otherwise, check whether to take to Hyp mode through Hyp Trap vector 1171 toHyp |= (stage2 || 1172 ( (cpsr.mode != MODE_HYP) && ( ((source == AsynchronousExternalAbort) && hcr.amo) || 1173 ((source == DebugEvent) && hdcr.tde) ) 1174 ) || 1175 ( (cpsr.mode == MODE_USER) && hcr.tge && 1176 ((source == AlignmentFault) || 1177 (source == SynchronousExternalAbort)) 1178 ) 1179 ) && !inSecureState(scr, cpsr); 1180 return toHyp; 1181} 1182 1183uint32_t 1184DataAbort::iss() const 1185{ 1186 uint32_t val; 1187 1188 // Add on the data abort specific fields to the generic abort ISS value 1189 val = AbortFault<DataAbort>::iss(); 1190 // ISS is valid if not caused by a stage 1 page table walk, and when taken 1191 // to AArch64 only when directed to EL2 1192 if (!s1ptw && (!to64 || toEL == EL2)) { 1193 val |= isv << 24; 1194 if (isv) { 1195 val |= sas << 22; 1196 val |= sse << 21; 1197 val |= srt << 16; 1198 // AArch64 only. These assignments are safe on AArch32 as well 1199 // because these vars are initialized to false 1200 val |= sf << 15; 1201 val |= ar << 14; 1202 } 1203 } 1204 return (val); 1205} 1206 1207void 1208DataAbort::annotate(AnnotationIDs id, uint64_t val) 1209{ 1210 AbortFault<DataAbort>::annotate(id, val); 1211 switch (id) 1212 { 1213 case SAS: 1214 isv = true; 1215 sas = val; 1216 break; 1217 case SSE: 1218 isv = true; 1219 sse = val; 1220 break; 1221 case SRT: 1222 isv = true; 1223 srt = val; 1224 break; 1225 case SF: 1226 isv = true; 1227 sf = val; 1228 break; 1229 case AR: 1230 isv = true; 1231 ar = val; 1232 break; 1233 // Just ignore unknown ID's 1234 default: 1235 break; 1236 } 1237} 1238 1239void 1240VirtualDataAbort::invoke(ThreadContext *tc, StaticInstPtr inst) 1241{ 1242 AbortFault<VirtualDataAbort>::invoke(tc, inst); 1243 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR); 1244 hcr.va = 0; 1245 tc->setMiscRegNoEffect(MISCREG_HCR, hcr); 1246} 1247 1248bool 1249Interrupt::routeToMonitor(ThreadContext *tc) const 1250{ 1251 assert(ArmSystem::haveSecurity(tc)); 1252 SCR scr = 0; 1253 if (from64) 1254 scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3); 1255 else 1256 scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1257 return scr.irq; 1258} 1259 1260bool 1261Interrupt::routeToHyp(ThreadContext *tc) const 1262{ 1263 bool toHyp; 1264 1265 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1266 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR); 1267 CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR); 1268 // Determine whether IRQs are routed to Hyp mode. 1269 toHyp = (!scr.irq && hcr.imo && !inSecureState(scr, cpsr)) || 1270 (cpsr.mode == MODE_HYP); 1271 return toHyp; 1272} 1273 1274bool 1275Interrupt::abortDisable(ThreadContext *tc) 1276{ 1277 if (ArmSystem::haveSecurity(tc)) { 1278 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1279 return (!scr.ns || scr.aw); 1280 } 1281 return true; 1282} 1283 1284VirtualInterrupt::VirtualInterrupt() 1285{} 1286 1287bool 1288FastInterrupt::routeToMonitor(ThreadContext *tc) const 1289{ 1290 assert(ArmSystem::haveSecurity(tc)); 1291 SCR scr = 0; 1292 if (from64) 1293 scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3); 1294 else 1295 scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1296 return scr.fiq; 1297} 1298 1299bool 1300FastInterrupt::routeToHyp(ThreadContext *tc) const 1301{ 1302 bool toHyp; 1303 1304 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1305 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR); 1306 CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR); 1307 // Determine whether IRQs are routed to Hyp mode. 1308 toHyp = (!scr.fiq && hcr.fmo && !inSecureState(scr, cpsr)) || 1309 (cpsr.mode == MODE_HYP); 1310 return toHyp; 1311} 1312 1313bool 1314FastInterrupt::abortDisable(ThreadContext *tc) 1315{ 1316 if (ArmSystem::haveSecurity(tc)) { 1317 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1318 return (!scr.ns || scr.aw); 1319 } 1320 return true; 1321} 1322 1323bool 1324FastInterrupt::fiqDisable(ThreadContext *tc) 1325{ 1326 if (ArmSystem::haveVirtualization(tc)) { 1327 return true; 1328 } else if (ArmSystem::haveSecurity(tc)) { 1329 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1330 return (!scr.ns || scr.fw); 1331 } 1332 return true; 1333} 1334 1335VirtualFastInterrupt::VirtualFastInterrupt() 1336{} 1337 1338void 1339PCAlignmentFault::invoke(ThreadContext *tc, StaticInstPtr inst) 1340{ 1341 ArmFaultVals<PCAlignmentFault>::invoke(tc, inst); 1342 assert(from64); 1343 // Set the FAR 1344 tc->setMiscReg(getFaultAddrReg64(), faultPC); 1345} 1346 1347SPAlignmentFault::SPAlignmentFault() 1348{} 1349 1350SystemError::SystemError() 1351{} 1352 1353void 1354SystemError::invoke(ThreadContext *tc, StaticInstPtr inst) 1355{ 1356 tc->getCpuPtr()->clearInterrupt(INT_ABT, 0); 1357 ArmFault::invoke(tc, inst); 1358} 1359 1360bool 1361SystemError::routeToMonitor(ThreadContext *tc) const 1362{ 1363 assert(ArmSystem::haveSecurity(tc)); 1364 assert(from64); 1365 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3); 1366 return scr.ea; 1367} 1368 1369bool 1370SystemError::routeToHyp(ThreadContext *tc) const 1371{ 1372 bool toHyp; 1373 assert(from64); 1374 1375 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3); 1376 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR); 1377 CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR); 1378 1379 toHyp = (!scr.ea && hcr.amo && !inSecureState(scr, cpsr)) || 1380 (!scr.ea && !scr.rw && !hcr.amo && !inSecureState(scr,cpsr)); 1381 return toHyp; 1382} 1383 1384void |
|
238FlushPipe::invoke(ThreadContext *tc, StaticInstPtr inst) { 239 DPRINTF(Faults, "Invoking FlushPipe Fault\n"); 240 241 // Set the PC to the next instruction of the faulting instruction. 242 // Net effect is simply squashing all instructions behind and 243 // start refetching from the next instruction. 244 PCState pc = tc->pcState(); 245 assert(inst); 246 inst->advancePC(pc); 247 tc->pcState(pc); 248} 249 | 1385FlushPipe::invoke(ThreadContext *tc, StaticInstPtr inst) { 1386 DPRINTF(Faults, "Invoking FlushPipe Fault\n"); 1387 1388 // Set the PC to the next instruction of the faulting instruction. 1389 // Net effect is simply squashing all instructions behind and 1390 // start refetching from the next instruction. 1391 PCState pc = tc->pcState(); 1392 assert(inst); 1393 inst->advancePC(pc); 1394 tc->pcState(pc); 1395} 1396 |
250template void AbortFault<PrefetchAbort>::invoke(ThreadContext *tc, 251 StaticInstPtr inst); 252template void AbortFault<DataAbort>::invoke(ThreadContext *tc, 253 StaticInstPtr inst); 254 | |
255void 256ArmSev::invoke(ThreadContext *tc, StaticInstPtr inst) { 257 DPRINTF(Faults, "Invoking ArmSev Fault\n"); 258 if (!FullSystem) 259 return; 260 261 // Set sev_mailbox to 1, clear the pending interrupt from remote 262 // SEV execution and let pipeline continue as pcState is still 263 // valid. 264 tc->setMiscReg(MISCREG_SEV_MAILBOX, 1); 265 tc->getCpuPtr()->clearInterrupt(INT_SEV, 0); 266} 267 | 1397void 1398ArmSev::invoke(ThreadContext *tc, StaticInstPtr inst) { 1399 DPRINTF(Faults, "Invoking ArmSev Fault\n"); 1400 if (!FullSystem) 1401 return; 1402 1403 // Set sev_mailbox to 1, clear the pending interrupt from remote 1404 // SEV execution and let pipeline continue as pcState is still 1405 // valid. 1406 tc->setMiscReg(MISCREG_SEV_MAILBOX, 1); 1407 tc->getCpuPtr()->clearInterrupt(INT_SEV, 0); 1408} 1409 |
268// return via SUBS pc, lr, xxx; rfe, movs, ldm | 1410// Instantiate all the templates to make the linker happy 1411template class ArmFaultVals<Reset>; 1412template class ArmFaultVals<UndefinedInstruction>; 1413template class ArmFaultVals<SupervisorCall>; 1414template class ArmFaultVals<SecureMonitorCall>; 1415template class ArmFaultVals<HypervisorCall>; 1416template class ArmFaultVals<PrefetchAbort>; 1417template class ArmFaultVals<DataAbort>; 1418template class ArmFaultVals<VirtualDataAbort>; 1419template class ArmFaultVals<HypervisorTrap>; 1420template class ArmFaultVals<Interrupt>; 1421template class ArmFaultVals<VirtualInterrupt>; 1422template class ArmFaultVals<FastInterrupt>; 1423template class ArmFaultVals<VirtualFastInterrupt>; 1424template class ArmFaultVals<SupervisorTrap>; 1425template class ArmFaultVals<SecureMonitorTrap>; 1426template class ArmFaultVals<PCAlignmentFault>; 1427template class ArmFaultVals<SPAlignmentFault>; 1428template class ArmFaultVals<SystemError>; 1429template class ArmFaultVals<FlushPipe>; 1430template class ArmFaultVals<ArmSev>; 1431template class AbortFault<PrefetchAbort>; 1432template class AbortFault<DataAbort>; 1433template class AbortFault<VirtualDataAbort>; |
269 | 1434 |
1435 1436IllegalInstSetStateFault::IllegalInstSetStateFault() 1437{} 1438 1439 |
|
270} // namespace ArmISA | 1440} // namespace ArmISA |