faults.cc revision 13394
1/* 2 * Copyright (c) 2010, 2012-2014, 2016-2018 ARM Limited 3 * All rights reserved 4 * 5 * The license below extends only to copyright in the software and shall 6 * not be construed as granting a license to any other intellectual 7 * property including but not limited to intellectual property relating 8 * to a hardware implementation of the functionality of the software 9 * licensed hereunder. You may use the software subject to the license 10 * terms below provided that you ensure that this notice is replicated 11 * unmodified and in its entirety in all distributions of the software, 12 * modified or unmodified, in source code or in binary form. 13 * 14 * Copyright (c) 2003-2005 The Regents of The University of Michigan 15 * Copyright (c) 2007-2008 The Florida State University 16 * All rights reserved. 17 * 18 * Redistribution and use in source and binary forms, with or without 19 * modification, are permitted provided that the following conditions are 20 * met: redistributions of source code must retain the above copyright 21 * notice, this list of conditions and the following disclaimer; 22 * redistributions in binary form must reproduce the above copyright 23 * notice, this list of conditions and the following disclaimer in the 24 * documentation and/or other materials provided with the distribution; 25 * neither the name of the copyright holders nor the names of its 26 * contributors may be used to endorse or promote products derived from 27 * this software without specific prior written permission. 28 * 29 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 30 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 31 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 32 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 33 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 34 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 35 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 36 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 37 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 38 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 39 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 40 * 41 * Authors: Ali Saidi 42 * Gabe Black 43 * Giacomo Gabrielli 44 * Thomas Grocutt 45 */ 46 47#include "arch/arm/faults.hh" 48 49#include "arch/arm/insts/static_inst.hh" 50#include "arch/arm/system.hh" 51#include "arch/arm/utility.hh" 52#include "base/compiler.hh" 53#include "base/trace.hh" 54#include "cpu/base.hh" 55#include "cpu/thread_context.hh" 56#include "debug/Faults.hh" 57#include "sim/full_system.hh" 58 59namespace ArmISA 60{ 61 62uint8_t ArmFault::shortDescFaultSources[] = { 63 0x01, // AlignmentFault 64 0x04, // InstructionCacheMaintenance 65 0xff, // SynchExtAbtOnTranslTableWalkL0 (INVALID) 66 0x0c, // SynchExtAbtOnTranslTableWalkL1 67 0x0e, // SynchExtAbtOnTranslTableWalkL2 68 0xff, // SynchExtAbtOnTranslTableWalkL3 (INVALID) 69 0xff, // SynchPtyErrOnTranslTableWalkL0 (INVALID) 70 0x1c, // SynchPtyErrOnTranslTableWalkL1 71 0x1e, // SynchPtyErrOnTranslTableWalkL2 72 0xff, // SynchPtyErrOnTranslTableWalkL3 (INVALID) 73 0xff, // TranslationL0 (INVALID) 74 0x05, // TranslationL1 75 0x07, // TranslationL2 76 0xff, // TranslationL3 (INVALID) 77 0xff, // AccessFlagL0 (INVALID) 78 0x03, // AccessFlagL1 79 0x06, // AccessFlagL2 80 0xff, // AccessFlagL3 (INVALID) 81 0xff, // DomainL0 (INVALID) 82 0x09, // DomainL1 83 0x0b, // DomainL2 84 0xff, // DomainL3 (INVALID) 85 0xff, // PermissionL0 (INVALID) 86 0x0d, // PermissionL1 87 0x0f, // PermissionL2 88 0xff, // PermissionL3 (INVALID) 89 0x02, // DebugEvent 90 0x08, // SynchronousExternalAbort 91 0x10, // TLBConflictAbort 92 0x19, // SynchPtyErrOnMemoryAccess 93 0x16, // AsynchronousExternalAbort 94 0x18, // AsynchPtyErrOnMemoryAccess 95 0xff, // AddressSizeL0 (INVALID) 96 0xff, // AddressSizeL1 (INVALID) 97 0xff, // AddressSizeL2 (INVALID) 98 0xff, // AddressSizeL3 (INVALID) 99 0x40, // PrefetchTLBMiss 100 0x80 // PrefetchUncacheable 101}; 102 103static_assert(sizeof(ArmFault::shortDescFaultSources) == 104 ArmFault::NumFaultSources, 105 "Invalid size of ArmFault::shortDescFaultSources[]"); 106 107uint8_t ArmFault::longDescFaultSources[] = { 108 0x21, // AlignmentFault 109 0xff, // InstructionCacheMaintenance (INVALID) 110 0xff, // SynchExtAbtOnTranslTableWalkL0 (INVALID) 111 0x15, // SynchExtAbtOnTranslTableWalkL1 112 0x16, // SynchExtAbtOnTranslTableWalkL2 113 0x17, // SynchExtAbtOnTranslTableWalkL3 114 0xff, // SynchPtyErrOnTranslTableWalkL0 (INVALID) 115 0x1d, // SynchPtyErrOnTranslTableWalkL1 116 0x1e, // SynchPtyErrOnTranslTableWalkL2 117 0x1f, // SynchPtyErrOnTranslTableWalkL3 118 0xff, // TranslationL0 (INVALID) 119 0x05, // TranslationL1 120 0x06, // TranslationL2 121 0x07, // TranslationL3 122 0xff, // AccessFlagL0 (INVALID) 123 0x09, // AccessFlagL1 124 0x0a, // AccessFlagL2 125 0x0b, // AccessFlagL3 126 0xff, // DomainL0 (INVALID) 127 0x3d, // DomainL1 128 0x3e, // DomainL2 129 0xff, // DomainL3 (RESERVED) 130 0xff, // PermissionL0 (INVALID) 131 0x0d, // PermissionL1 132 0x0e, // PermissionL2 133 0x0f, // PermissionL3 134 0x22, // DebugEvent 135 0x10, // SynchronousExternalAbort 136 0x30, // TLBConflictAbort 137 0x18, // SynchPtyErrOnMemoryAccess 138 0x11, // AsynchronousExternalAbort 139 0x19, // AsynchPtyErrOnMemoryAccess 140 0xff, // AddressSizeL0 (INVALID) 141 0xff, // AddressSizeL1 (INVALID) 142 0xff, // AddressSizeL2 (INVALID) 143 0xff, // AddressSizeL3 (INVALID) 144 0x40, // PrefetchTLBMiss 145 0x80 // PrefetchUncacheable 146}; 147 148static_assert(sizeof(ArmFault::longDescFaultSources) == 149 ArmFault::NumFaultSources, 150 "Invalid size of ArmFault::longDescFaultSources[]"); 151 152uint8_t ArmFault::aarch64FaultSources[] = { 153 0x21, // AlignmentFault 154 0xff, // InstructionCacheMaintenance (INVALID) 155 0x14, // SynchExtAbtOnTranslTableWalkL0 156 0x15, // SynchExtAbtOnTranslTableWalkL1 157 0x16, // SynchExtAbtOnTranslTableWalkL2 158 0x17, // SynchExtAbtOnTranslTableWalkL3 159 0x1c, // SynchPtyErrOnTranslTableWalkL0 160 0x1d, // SynchPtyErrOnTranslTableWalkL1 161 0x1e, // SynchPtyErrOnTranslTableWalkL2 162 0x1f, // SynchPtyErrOnTranslTableWalkL3 163 0x04, // TranslationL0 164 0x05, // TranslationL1 165 0x06, // TranslationL2 166 0x07, // TranslationL3 167 0x08, // AccessFlagL0 168 0x09, // AccessFlagL1 169 0x0a, // AccessFlagL2 170 0x0b, // AccessFlagL3 171 // @todo: Section & Page Domain Fault in AArch64? 172 0xff, // DomainL0 (INVALID) 173 0xff, // DomainL1 (INVALID) 174 0xff, // DomainL2 (INVALID) 175 0xff, // DomainL3 (INVALID) 176 0x0c, // PermissionL0 177 0x0d, // PermissionL1 178 0x0e, // PermissionL2 179 0x0f, // PermissionL3 180 0x22, // DebugEvent 181 0x10, // SynchronousExternalAbort 182 0x30, // TLBConflictAbort 183 0x18, // SynchPtyErrOnMemoryAccess 184 0xff, // AsynchronousExternalAbort (INVALID) 185 0xff, // AsynchPtyErrOnMemoryAccess (INVALID) 186 0x00, // AddressSizeL0 187 0x01, // AddressSizeL1 188 0x02, // AddressSizeL2 189 0x03, // AddressSizeL3 190 0x40, // PrefetchTLBMiss 191 0x80 // PrefetchUncacheable 192}; 193 194static_assert(sizeof(ArmFault::aarch64FaultSources) == 195 ArmFault::NumFaultSources, 196 "Invalid size of ArmFault::aarch64FaultSources[]"); 197 198// Fields: name, offset, cur{ELT,ELH}Offset, lowerEL{64,32}Offset, next mode, 199// {ARM, Thumb, ARM_ELR, Thumb_ELR} PC offset, hyp trap, 200// {A, F} disable, class, stat 201template<> ArmFault::FaultVals ArmFaultVals<Reset>::vals( 202 // Some dummy values (the reset vector has an IMPLEMENTATION DEFINED 203 // location in AArch64) 204 "Reset", 0x000, 0x000, 0x000, 0x000, 0x000, MODE_SVC, 205 0, 0, 0, 0, false, true, true, EC_UNKNOWN 206); 207template<> ArmFault::FaultVals ArmFaultVals<UndefinedInstruction>::vals( 208 "Undefined Instruction", 0x004, 0x000, 0x200, 0x400, 0x600, MODE_UNDEFINED, 209 4, 2, 0, 0, true, false, false, EC_UNKNOWN 210); 211template<> ArmFault::FaultVals ArmFaultVals<SupervisorCall>::vals( 212 "Supervisor Call", 0x008, 0x000, 0x200, 0x400, 0x600, MODE_SVC, 213 4, 2, 4, 2, true, false, false, EC_SVC_TO_HYP 214); 215template<> ArmFault::FaultVals ArmFaultVals<SecureMonitorCall>::vals( 216 "Secure Monitor Call", 0x008, 0x000, 0x200, 0x400, 0x600, MODE_MON, 217 4, 4, 4, 4, false, true, true, EC_SMC_TO_HYP 218); 219template<> ArmFault::FaultVals ArmFaultVals<HypervisorCall>::vals( 220 "Hypervisor Call", 0x008, 0x000, 0x200, 0x400, 0x600, MODE_HYP, 221 4, 4, 4, 4, true, false, false, EC_HVC 222); 223template<> ArmFault::FaultVals ArmFaultVals<PrefetchAbort>::vals( 224 "Prefetch Abort", 0x00C, 0x000, 0x200, 0x400, 0x600, MODE_ABORT, 225 4, 4, 0, 0, true, true, false, EC_PREFETCH_ABORT_TO_HYP 226); 227template<> ArmFault::FaultVals ArmFaultVals<DataAbort>::vals( 228 "Data Abort", 0x010, 0x000, 0x200, 0x400, 0x600, MODE_ABORT, 229 8, 8, 0, 0, true, true, false, EC_DATA_ABORT_TO_HYP 230); 231template<> ArmFault::FaultVals ArmFaultVals<VirtualDataAbort>::vals( 232 "Virtual Data Abort", 0x010, 0x000, 0x200, 0x400, 0x600, MODE_ABORT, 233 8, 8, 0, 0, true, true, false, EC_INVALID 234); 235template<> ArmFault::FaultVals ArmFaultVals<HypervisorTrap>::vals( 236 // @todo: double check these values 237 "Hypervisor Trap", 0x014, 0x000, 0x200, 0x400, 0x600, MODE_HYP, 238 0, 0, 0, 0, false, false, false, EC_UNKNOWN 239); 240template<> ArmFault::FaultVals ArmFaultVals<SecureMonitorTrap>::vals( 241 "Secure Monitor Trap", 0x004, 0x000, 0x200, 0x400, 0x600, MODE_MON, 242 4, 2, 0, 0, false, false, false, EC_UNKNOWN 243); 244template<> ArmFault::FaultVals ArmFaultVals<Interrupt>::vals( 245 "IRQ", 0x018, 0x080, 0x280, 0x480, 0x680, MODE_IRQ, 246 4, 4, 0, 0, false, true, false, EC_UNKNOWN 247); 248template<> ArmFault::FaultVals ArmFaultVals<VirtualInterrupt>::vals( 249 "Virtual IRQ", 0x018, 0x080, 0x280, 0x480, 0x680, MODE_IRQ, 250 4, 4, 0, 0, false, true, false, EC_INVALID 251); 252template<> ArmFault::FaultVals ArmFaultVals<FastInterrupt>::vals( 253 "FIQ", 0x01C, 0x100, 0x300, 0x500, 0x700, MODE_FIQ, 254 4, 4, 0, 0, false, true, true, EC_UNKNOWN 255); 256template<> ArmFault::FaultVals ArmFaultVals<VirtualFastInterrupt>::vals( 257 "Virtual FIQ", 0x01C, 0x100, 0x300, 0x500, 0x700, MODE_FIQ, 258 4, 4, 0, 0, false, true, true, EC_INVALID 259); 260template<> ArmFault::FaultVals ArmFaultVals<IllegalInstSetStateFault>::vals( 261 "Illegal Inst Set State Fault", 0x004, 0x000, 0x200, 0x400, 0x600, MODE_UNDEFINED, 262 4, 2, 0, 0, true, false, false, EC_ILLEGAL_INST 263); 264template<> ArmFault::FaultVals ArmFaultVals<SupervisorTrap>::vals( 265 // Some dummy values (SupervisorTrap is AArch64-only) 266 "Supervisor Trap", 0x014, 0x000, 0x200, 0x400, 0x600, MODE_SVC, 267 0, 0, 0, 0, false, false, false, EC_UNKNOWN 268); 269template<> ArmFault::FaultVals ArmFaultVals<PCAlignmentFault>::vals( 270 // Some dummy values (PCAlignmentFault is AArch64-only) 271 "PC Alignment Fault", 0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC, 272 0, 0, 0, 0, true, false, false, EC_PC_ALIGNMENT 273); 274template<> ArmFault::FaultVals ArmFaultVals<SPAlignmentFault>::vals( 275 // Some dummy values (SPAlignmentFault is AArch64-only) 276 "SP Alignment Fault", 0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC, 277 0, 0, 0, 0, true, false, false, EC_STACK_PTR_ALIGNMENT 278); 279template<> ArmFault::FaultVals ArmFaultVals<SystemError>::vals( 280 // Some dummy values (SError is AArch64-only) 281 "SError", 0x000, 0x180, 0x380, 0x580, 0x780, MODE_SVC, 282 0, 0, 0, 0, false, true, true, EC_SERROR 283); 284template<> ArmFault::FaultVals ArmFaultVals<SoftwareBreakpoint>::vals( 285 // Some dummy values (SoftwareBreakpoint is AArch64-only) 286 "Software Breakpoint", 0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC, 287 0, 0, 0, 0, true, false, false, EC_SOFTWARE_BREAKPOINT 288); 289template<> ArmFault::FaultVals ArmFaultVals<ArmSev>::vals( 290 // Some dummy values 291 "ArmSev Flush", 0x000, 0x000, 0x000, 0x000, 0x000, MODE_SVC, 292 0, 0, 0, 0, false, true, true, EC_UNKNOWN 293); 294 295Addr 296ArmFault::getVector(ThreadContext *tc) 297{ 298 Addr base; 299 300 // ARM ARM issue C B1.8.1 301 bool haveSecurity = ArmSystem::haveSecurity(tc); 302 303 // Check for invalid modes 304 CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR); 305 assert(haveSecurity || cpsr.mode != MODE_MON); 306 assert(ArmSystem::haveVirtualization(tc) || cpsr.mode != MODE_HYP); 307 308 switch (cpsr.mode) 309 { 310 case MODE_MON: 311 base = tc->readMiscReg(MISCREG_MVBAR); 312 break; 313 case MODE_HYP: 314 base = tc->readMiscReg(MISCREG_HVBAR); 315 break; 316 default: 317 SCTLR sctlr = tc->readMiscReg(MISCREG_SCTLR); 318 if (sctlr.v) { 319 base = HighVecs; 320 } else { 321 base = haveSecurity ? tc->readMiscReg(MISCREG_VBAR) : 0; 322 } 323 break; 324 } 325 return base + offset(tc); 326} 327 328Addr 329ArmFault::getVector64(ThreadContext *tc) 330{ 331 Addr vbar; 332 switch (toEL) { 333 case EL3: 334 assert(ArmSystem::haveSecurity(tc)); 335 vbar = tc->readMiscReg(MISCREG_VBAR_EL3); 336 break; 337 case EL2: 338 assert(ArmSystem::haveVirtualization(tc)); 339 vbar = tc->readMiscReg(MISCREG_VBAR_EL2); 340 break; 341 case EL1: 342 vbar = tc->readMiscReg(MISCREG_VBAR_EL1); 343 break; 344 default: 345 panic("Invalid target exception level"); 346 break; 347 } 348 return vbar + offset64(tc); 349} 350 351MiscRegIndex 352ArmFault::getSyndromeReg64() const 353{ 354 switch (toEL) { 355 case EL1: 356 return MISCREG_ESR_EL1; 357 case EL2: 358 return MISCREG_ESR_EL2; 359 case EL3: 360 return MISCREG_ESR_EL3; 361 default: 362 panic("Invalid exception level"); 363 break; 364 } 365} 366 367MiscRegIndex 368ArmFault::getFaultAddrReg64() const 369{ 370 switch (toEL) { 371 case EL1: 372 return MISCREG_FAR_EL1; 373 case EL2: 374 return MISCREG_FAR_EL2; 375 case EL3: 376 return MISCREG_FAR_EL3; 377 default: 378 panic("Invalid exception level"); 379 break; 380 } 381} 382 383void 384ArmFault::setSyndrome(ThreadContext *tc, MiscRegIndex syndrome_reg) 385{ 386 uint32_t value; 387 uint32_t exc_class = (uint32_t) ec(tc); 388 uint32_t issVal = iss(); 389 390 assert(!from64 || ArmSystem::highestELIs64(tc)); 391 392 value = exc_class << 26; 393 394 // HSR.IL not valid for Prefetch Aborts (0x20, 0x21) and Data Aborts (0x24, 395 // 0x25) for which the ISS information is not valid (ARMv7). 396 // @todo: ARMv8 revises AArch32 functionality: when HSR.IL is not 397 // valid it is treated as RES1. 398 if (to64) { 399 value |= 1 << 25; 400 } else if ((bits(exc_class, 5, 3) != 4) || 401 (bits(exc_class, 2) && bits(issVal, 24))) { 402 if (!machInst.thumb || machInst.bigThumb) 403 value |= 1 << 25; 404 } 405 // Condition code valid for EC[5:4] nonzero 406 if (!from64 && ((bits(exc_class, 5, 4) == 0) && 407 (bits(exc_class, 3, 0) != 0))) { 408 if (!machInst.thumb) { 409 uint32_t cond; 410 ConditionCode condCode = (ConditionCode) (uint32_t) machInst.condCode; 411 // If its on unconditional instruction report with a cond code of 412 // 0xE, ie the unconditional code 413 cond = (condCode == COND_UC) ? COND_AL : condCode; 414 value |= cond << 20; 415 value |= 1 << 24; 416 } 417 value |= bits(issVal, 19, 0); 418 } else { 419 value |= issVal; 420 } 421 tc->setMiscReg(syndrome_reg, value); 422} 423 424void 425ArmFault::update(ThreadContext *tc) 426{ 427 CPSR cpsr = tc->readMiscReg(MISCREG_CPSR); 428 429 // Determine source exception level and mode 430 fromMode = (OperatingMode) (uint8_t) cpsr.mode; 431 fromEL = opModeToEL(fromMode); 432 if (opModeIs64(fromMode)) 433 from64 = true; 434 435 // Determine target exception level (aarch64) or target execution 436 // mode (aarch32). 437 if (ArmSystem::haveSecurity(tc) && routeToMonitor(tc)) { 438 toMode = MODE_MON; 439 toEL = EL3; 440 } else if (ArmSystem::haveVirtualization(tc) && routeToHyp(tc)) { 441 toMode = MODE_HYP; 442 toEL = EL2; 443 hypRouted = true; 444 } else { 445 toMode = nextMode(); 446 toEL = opModeToEL(toMode); 447 } 448 449 if (fromEL > toEL) 450 toEL = fromEL; 451 452 to64 = ELIs64(tc, toEL); 453 454 // The fault specific informations have been updated; it is 455 // now possible to use them inside the fault. 456 faultUpdated = true; 457} 458 459void 460ArmFault::invoke(ThreadContext *tc, const StaticInstPtr &inst) 461{ 462 463 // Update fault state informations, like the starting mode (aarch32) 464 // or EL (aarch64) and the ending mode or EL. 465 // From the update function we are also evaluating if the fault must 466 // be handled in AArch64 mode (to64). 467 update(tc); 468 469 if (to64) { 470 // Invoke exception handler in AArch64 state 471 invoke64(tc, inst); 472 return; 473 } 474 475 // ARMv7 (ARM ARM issue C B1.9) 476 477 bool have_security = ArmSystem::haveSecurity(tc); 478 479 FaultBase::invoke(tc); 480 if (!FullSystem) 481 return; 482 countStat()++; 483 484 SCTLR sctlr = tc->readMiscReg(MISCREG_SCTLR); 485 SCR scr = tc->readMiscReg(MISCREG_SCR); 486 CPSR saved_cpsr = tc->readMiscReg(MISCREG_CPSR); 487 saved_cpsr.nz = tc->readCCReg(CCREG_NZ); 488 saved_cpsr.c = tc->readCCReg(CCREG_C); 489 saved_cpsr.v = tc->readCCReg(CCREG_V); 490 saved_cpsr.ge = tc->readCCReg(CCREG_GE); 491 492 Addr curPc M5_VAR_USED = tc->pcState().pc(); 493 ITSTATE it = tc->pcState().itstate(); 494 saved_cpsr.it2 = it.top6; 495 saved_cpsr.it1 = it.bottom2; 496 497 // if we have a valid instruction then use it to annotate this fault with 498 // extra information. This is used to generate the correct fault syndrome 499 // information 500 if (inst) { 501 ArmStaticInst *armInst = static_cast<ArmStaticInst *>(inst.get()); 502 armInst->annotateFault(this); 503 } 504 505 // Ensure Secure state if initially in Monitor mode 506 if (have_security && saved_cpsr.mode == MODE_MON) { 507 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR); 508 if (scr.ns) { 509 scr.ns = 0; 510 tc->setMiscRegNoEffect(MISCREG_SCR, scr); 511 } 512 } 513 514 CPSR cpsr = tc->readMiscReg(MISCREG_CPSR); 515 cpsr.mode = toMode; 516 517 // some bits are set differently if we have been routed to hyp mode 518 if (cpsr.mode == MODE_HYP) { 519 SCTLR hsctlr = tc->readMiscReg(MISCREG_HSCTLR); 520 cpsr.t = hsctlr.te; 521 cpsr.e = hsctlr.ee; 522 if (!scr.ea) {cpsr.a = 1;} 523 if (!scr.fiq) {cpsr.f = 1;} 524 if (!scr.irq) {cpsr.i = 1;} 525 } else if (cpsr.mode == MODE_MON) { 526 // Special case handling when entering monitor mode 527 cpsr.t = sctlr.te; 528 cpsr.e = sctlr.ee; 529 cpsr.a = 1; 530 cpsr.f = 1; 531 cpsr.i = 1; 532 } else { 533 cpsr.t = sctlr.te; 534 cpsr.e = sctlr.ee; 535 536 // The *Disable functions are virtual and different per fault 537 cpsr.a = cpsr.a | abortDisable(tc); 538 cpsr.f = cpsr.f | fiqDisable(tc); 539 cpsr.i = 1; 540 } 541 cpsr.it1 = cpsr.it2 = 0; 542 cpsr.j = 0; 543 tc->setMiscReg(MISCREG_CPSR, cpsr); 544 545 // Make sure mailbox sets to one always 546 tc->setMiscReg(MISCREG_SEV_MAILBOX, 1); 547 548 // Clear the exclusive monitor 549 tc->setMiscReg(MISCREG_LOCKFLAG, 0); 550 551 if (cpsr.mode == MODE_HYP) { 552 tc->setMiscReg(MISCREG_ELR_HYP, curPc + 553 (saved_cpsr.t ? thumbPcOffset(true) : armPcOffset(true))); 554 } else { 555 tc->setIntReg(INTREG_LR, curPc + 556 (saved_cpsr.t ? thumbPcOffset(false) : armPcOffset(false))); 557 } 558 559 switch (cpsr.mode) { 560 case MODE_FIQ: 561 tc->setMiscReg(MISCREG_SPSR_FIQ, saved_cpsr); 562 break; 563 case MODE_IRQ: 564 tc->setMiscReg(MISCREG_SPSR_IRQ, saved_cpsr); 565 break; 566 case MODE_SVC: 567 tc->setMiscReg(MISCREG_SPSR_SVC, saved_cpsr); 568 break; 569 case MODE_MON: 570 assert(have_security); 571 tc->setMiscReg(MISCREG_SPSR_MON, saved_cpsr); 572 break; 573 case MODE_ABORT: 574 tc->setMiscReg(MISCREG_SPSR_ABT, saved_cpsr); 575 break; 576 case MODE_UNDEFINED: 577 tc->setMiscReg(MISCREG_SPSR_UND, saved_cpsr); 578 if (ec(tc) != EC_UNKNOWN) 579 setSyndrome(tc, MISCREG_HSR); 580 break; 581 case MODE_HYP: 582 assert(ArmSystem::haveVirtualization(tc)); 583 tc->setMiscReg(MISCREG_SPSR_HYP, saved_cpsr); 584 setSyndrome(tc, MISCREG_HSR); 585 break; 586 default: 587 panic("unknown Mode\n"); 588 } 589 590 Addr newPc = getVector(tc); 591 DPRINTF(Faults, "Invoking Fault:%s cpsr:%#x PC:%#x lr:%#x newVec: %#x\n", 592 name(), cpsr, curPc, tc->readIntReg(INTREG_LR), newPc); 593 PCState pc(newPc); 594 pc.thumb(cpsr.t); 595 pc.nextThumb(pc.thumb()); 596 pc.jazelle(cpsr.j); 597 pc.nextJazelle(pc.jazelle()); 598 pc.aarch64(!cpsr.width); 599 pc.nextAArch64(!cpsr.width); 600 pc.illegalExec(false); 601 tc->pcState(pc); 602} 603 604void 605ArmFault::invoke64(ThreadContext *tc, const StaticInstPtr &inst) 606{ 607 // Determine actual misc. register indices for ELR_ELx and SPSR_ELx 608 MiscRegIndex elr_idx, spsr_idx; 609 switch (toEL) { 610 case EL1: 611 elr_idx = MISCREG_ELR_EL1; 612 spsr_idx = MISCREG_SPSR_EL1; 613 break; 614 case EL2: 615 assert(ArmSystem::haveVirtualization(tc)); 616 elr_idx = MISCREG_ELR_EL2; 617 spsr_idx = MISCREG_SPSR_EL2; 618 break; 619 case EL3: 620 assert(ArmSystem::haveSecurity(tc)); 621 elr_idx = MISCREG_ELR_EL3; 622 spsr_idx = MISCREG_SPSR_EL3; 623 break; 624 default: 625 panic("Invalid target exception level"); 626 break; 627 } 628 629 // Save process state into SPSR_ELx 630 CPSR cpsr = tc->readMiscReg(MISCREG_CPSR); 631 CPSR spsr = cpsr; 632 spsr.nz = tc->readCCReg(CCREG_NZ); 633 spsr.c = tc->readCCReg(CCREG_C); 634 spsr.v = tc->readCCReg(CCREG_V); 635 if (from64) { 636 // Force some bitfields to 0 637 spsr.q = 0; 638 spsr.it1 = 0; 639 spsr.j = 0; 640 spsr.res0_23_22 = 0; 641 spsr.ge = 0; 642 spsr.it2 = 0; 643 spsr.t = 0; 644 } else { 645 spsr.ge = tc->readCCReg(CCREG_GE); 646 ITSTATE it = tc->pcState().itstate(); 647 spsr.it2 = it.top6; 648 spsr.it1 = it.bottom2; 649 // Force some bitfields to 0 650 spsr.res0_23_22 = 0; 651 spsr.ss = 0; 652 } 653 tc->setMiscReg(spsr_idx, spsr); 654 655 // Save preferred return address into ELR_ELx 656 Addr curr_pc = tc->pcState().pc(); 657 Addr ret_addr = curr_pc; 658 if (from64) 659 ret_addr += armPcElrOffset(); 660 else 661 ret_addr += spsr.t ? thumbPcElrOffset() : armPcElrOffset(); 662 tc->setMiscReg(elr_idx, ret_addr); 663 664 Addr vec_address = getVector64(tc); 665 666 // Update process state 667 OperatingMode64 mode = 0; 668 mode.spX = 1; 669 mode.el = toEL; 670 mode.width = 0; 671 cpsr.mode = mode; 672 cpsr.daif = 0xf; 673 cpsr.il = 0; 674 cpsr.ss = 0; 675 tc->setMiscReg(MISCREG_CPSR, cpsr); 676 677 // Set PC to start of exception handler 678 Addr new_pc = purifyTaggedAddr(vec_address, tc, toEL); 679 DPRINTF(Faults, "Invoking Fault (AArch64 target EL):%s cpsr:%#x PC:%#x " 680 "elr:%#x newVec: %#x\n", name(), cpsr, curr_pc, ret_addr, new_pc); 681 PCState pc(new_pc); 682 pc.aarch64(!cpsr.width); 683 pc.nextAArch64(!cpsr.width); 684 pc.illegalExec(false); 685 tc->pcState(pc); 686 687 // If we have a valid instruction then use it to annotate this fault with 688 // extra information. This is used to generate the correct fault syndrome 689 // information 690 if (inst) 691 static_cast<ArmStaticInst *>(inst.get())->annotateFault(this); 692 // Save exception syndrome 693 if ((nextMode() != MODE_IRQ) && (nextMode() != MODE_FIQ)) 694 setSyndrome(tc, getSyndromeReg64()); 695} 696 697void 698Reset::invoke(ThreadContext *tc, const StaticInstPtr &inst) 699{ 700 if (FullSystem) { 701 tc->getCpuPtr()->clearInterrupts(tc->threadId()); 702 tc->clearArchRegs(); 703 } 704 if (!ArmSystem::highestELIs64(tc)) { 705 ArmFault::invoke(tc, inst); 706 tc->setMiscReg(MISCREG_VMPIDR, 707 getMPIDR(dynamic_cast<ArmSystem*>(tc->getSystemPtr()), tc)); 708 709 // Unless we have SMC code to get us there, boot in HYP! 710 if (ArmSystem::haveVirtualization(tc) && 711 !ArmSystem::haveSecurity(tc)) { 712 CPSR cpsr = tc->readMiscReg(MISCREG_CPSR); 713 cpsr.mode = MODE_HYP; 714 tc->setMiscReg(MISCREG_CPSR, cpsr); 715 } 716 } else { 717 // Advance the PC to the IMPLEMENTATION DEFINED reset value 718 PCState pc = ArmSystem::resetAddr64(tc); 719 pc.aarch64(true); 720 pc.nextAArch64(true); 721 tc->pcState(pc); 722 } 723} 724 725void 726UndefinedInstruction::invoke(ThreadContext *tc, const StaticInstPtr &inst) 727{ 728 if (FullSystem) { 729 ArmFault::invoke(tc, inst); 730 return; 731 } 732 733 // If the mnemonic isn't defined this has to be an unknown instruction. 734 assert(unknown || mnemonic != NULL); 735 if (disabled) { 736 panic("Attempted to execute disabled instruction " 737 "'%s' (inst 0x%08x)", mnemonic, machInst); 738 } else if (unknown) { 739 panic("Attempted to execute unknown instruction (inst 0x%08x)", 740 machInst); 741 } else { 742 panic("Attempted to execute unimplemented instruction " 743 "'%s' (inst 0x%08x)", mnemonic, machInst); 744 } 745} 746 747bool 748UndefinedInstruction::routeToHyp(ThreadContext *tc) const 749{ 750 bool toHyp; 751 752 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR); 753 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR); 754 CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR); 755 756 // if in Hyp mode then stay in Hyp mode 757 toHyp = scr.ns && (cpsr.mode == MODE_HYP); 758 // if HCR.TGE is set to 1, take to Hyp mode through Hyp Trap vector 759 toHyp |= !inSecureState(scr, cpsr) && hcr.tge && (cpsr.mode == MODE_USER); 760 return toHyp; 761} 762 763uint32_t 764UndefinedInstruction::iss() const 765{ 766 767 // If UndefinedInstruction is routed to hypervisor, iss field is 0. 768 if (hypRouted) { 769 return 0; 770 } 771 772 if (overrideEc == EC_INVALID) 773 return issRaw; 774 775 uint32_t new_iss = 0; 776 uint32_t op0, op1, op2, CRn, CRm, Rt, dir; 777 778 dir = bits(machInst, 21, 21); 779 op0 = bits(machInst, 20, 19); 780 op1 = bits(machInst, 18, 16); 781 CRn = bits(machInst, 15, 12); 782 CRm = bits(machInst, 11, 8); 783 op2 = bits(machInst, 7, 5); 784 Rt = bits(machInst, 4, 0); 785 786 new_iss = op0 << 20 | op2 << 17 | op1 << 14 | CRn << 10 | 787 Rt << 5 | CRm << 1 | dir; 788 789 return new_iss; 790} 791 792void 793SupervisorCall::invoke(ThreadContext *tc, const StaticInstPtr &inst) 794{ 795 if (FullSystem) { 796 ArmFault::invoke(tc, inst); 797 return; 798 } 799 800 // As of now, there isn't a 32 bit thumb version of this instruction. 801 assert(!machInst.bigThumb); 802 uint32_t callNum; 803 CPSR cpsr = tc->readMiscReg(MISCREG_CPSR); 804 OperatingMode mode = (OperatingMode)(uint8_t)cpsr.mode; 805 if (opModeIs64(mode)) 806 callNum = tc->readIntReg(INTREG_X8); 807 else 808 callNum = tc->readIntReg(INTREG_R7); 809 Fault fault; 810 tc->syscall(callNum, &fault); 811 812 // Advance the PC since that won't happen automatically. 813 PCState pc = tc->pcState(); 814 assert(inst); 815 inst->advancePC(pc); 816 tc->pcState(pc); 817} 818 819bool 820SupervisorCall::routeToHyp(ThreadContext *tc) const 821{ 822 bool toHyp; 823 824 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR); 825 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR); 826 CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR); 827 828 // if in Hyp mode then stay in Hyp mode 829 toHyp = scr.ns && (cpsr.mode == MODE_HYP); 830 // if HCR.TGE is set to 1, take to Hyp mode through Hyp Trap vector 831 toHyp |= !inSecureState(scr, cpsr) && hcr.tge && (cpsr.mode == MODE_USER); 832 return toHyp; 833} 834 835ExceptionClass 836SupervisorCall::ec(ThreadContext *tc) const 837{ 838 return (overrideEc != EC_INVALID) ? overrideEc : 839 (from64 ? EC_SVC_64 : vals.ec); 840} 841 842uint32_t 843SupervisorCall::iss() const 844{ 845 // Even if we have a 24 bit imm from an arm32 instruction then we only use 846 // the bottom 16 bits for the ISS value (it doesn't hurt for AArch64 SVC). 847 return issRaw & 0xFFFF; 848} 849 850uint32_t 851SecureMonitorCall::iss() const 852{ 853 if (from64) 854 return bits(machInst, 20, 5); 855 return 0; 856} 857 858ExceptionClass 859UndefinedInstruction::ec(ThreadContext *tc) const 860{ 861 // If UndefinedInstruction is routed to hypervisor, 862 // HSR.EC field is 0. 863 if (hypRouted) 864 return EC_UNKNOWN; 865 else 866 return (overrideEc != EC_INVALID) ? overrideEc : vals.ec; 867} 868 869 870HypervisorCall::HypervisorCall(ExtMachInst _machInst, uint32_t _imm) : 871 ArmFaultVals<HypervisorCall>(_machInst, _imm) 872{} 873 874ExceptionClass 875HypervisorCall::ec(ThreadContext *tc) const 876{ 877 return from64 ? EC_HVC_64 : vals.ec; 878} 879 880ExceptionClass 881HypervisorTrap::ec(ThreadContext *tc) const 882{ 883 return (overrideEc != EC_INVALID) ? overrideEc : vals.ec; 884} 885 886template<class T> 887FaultOffset 888ArmFaultVals<T>::offset(ThreadContext *tc) 889{ 890 bool isHypTrap = false; 891 892 // Normally we just use the exception vector from the table at the top if 893 // this file, however if this exception has caused a transition to hype 894 // mode, and its an exception type that would only do this if it has been 895 // trapped then we use the hyp trap vector instead of the normal vector 896 if (vals.hypTrappable) { 897 CPSR cpsr = tc->readMiscReg(MISCREG_CPSR); 898 if (cpsr.mode == MODE_HYP) { 899 CPSR spsr = tc->readMiscReg(MISCREG_SPSR_HYP); 900 isHypTrap = spsr.mode != MODE_HYP; 901 } 902 } 903 return isHypTrap ? 0x14 : vals.offset; 904} 905 906template<class T> 907FaultOffset 908ArmFaultVals<T>::offset64(ThreadContext *tc) 909{ 910 if (toEL == fromEL) { 911 if (opModeIsT(fromMode)) 912 return vals.currELTOffset; 913 return vals.currELHOffset; 914 } else { 915 bool lower_32 = false; 916 if (toEL == EL3) { 917 if (!inSecureState(tc) && ArmSystem::haveEL(tc, EL2)) 918 lower_32 = ELIs32(tc, EL2); 919 else 920 lower_32 = ELIs32(tc, EL1); 921 } else { 922 lower_32 = ELIs32(tc, static_cast<ExceptionLevel>(toEL - 1)); 923 } 924 925 if (lower_32) 926 return vals.lowerEL32Offset; 927 return vals.lowerEL64Offset; 928 } 929} 930 931// void 932// SupervisorCall::setSyndrome64(ThreadContext *tc, MiscRegIndex esr_idx) 933// { 934// ESR esr = 0; 935// esr.ec = machInst.aarch64 ? SvcAArch64 : SvcAArch32; 936// esr.il = !machInst.thumb; 937// if (machInst.aarch64) 938// esr.imm16 = bits(machInst.instBits, 20, 5); 939// else if (machInst.thumb) 940// esr.imm16 = bits(machInst.instBits, 7, 0); 941// else 942// esr.imm16 = bits(machInst.instBits, 15, 0); 943// tc->setMiscReg(esr_idx, esr); 944// } 945 946void 947SecureMonitorCall::invoke(ThreadContext *tc, const StaticInstPtr &inst) 948{ 949 if (FullSystem) { 950 ArmFault::invoke(tc, inst); 951 return; 952 } 953} 954 955ExceptionClass 956SecureMonitorCall::ec(ThreadContext *tc) const 957{ 958 return (from64 ? EC_SMC_64 : vals.ec); 959} 960 961bool 962SupervisorTrap::routeToHyp(ThreadContext *tc) const 963{ 964 bool toHyp = false; 965 966 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3); 967 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR_EL2); 968 CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR); 969 970 // if HCR.TGE is set to 1, take to Hyp mode through Hyp Trap vector 971 toHyp |= !inSecureState(scr, cpsr) && hcr.tge && (cpsr.el == EL0); 972 return toHyp; 973} 974 975uint32_t 976SupervisorTrap::iss() const 977{ 978 // If SupervisorTrap is routed to hypervisor, iss field is 0. 979 if (hypRouted) { 980 return 0; 981 } 982 return issRaw; 983} 984 985ExceptionClass 986SupervisorTrap::ec(ThreadContext *tc) const 987{ 988 if (hypRouted) 989 return EC_UNKNOWN; 990 else 991 return (overrideEc != EC_INVALID) ? overrideEc : vals.ec; 992} 993 994ExceptionClass 995SecureMonitorTrap::ec(ThreadContext *tc) const 996{ 997 return (overrideEc != EC_INVALID) ? overrideEc : 998 (from64 ? EC_SMC_64 : vals.ec); 999} 1000 1001template<class T> 1002void 1003AbortFault<T>::invoke(ThreadContext *tc, const StaticInstPtr &inst) 1004{ 1005 if (tranMethod == ArmFault::UnknownTran) { 1006 tranMethod = longDescFormatInUse(tc) ? ArmFault::LpaeTran 1007 : ArmFault::VmsaTran; 1008 1009 if ((tranMethod == ArmFault::VmsaTran) && this->routeToMonitor(tc)) { 1010 // See ARM ARM B3-1416 1011 bool override_LPAE = false; 1012 TTBCR ttbcr_s = tc->readMiscReg(MISCREG_TTBCR_S); 1013 TTBCR M5_VAR_USED ttbcr_ns = tc->readMiscReg(MISCREG_TTBCR_NS); 1014 if (ttbcr_s.eae) { 1015 override_LPAE = true; 1016 } else { 1017 // Unimplemented code option, not seen in testing. May need 1018 // extension according to the manual exceprt above. 1019 DPRINTF(Faults, "Warning: Incomplete translation method " 1020 "override detected.\n"); 1021 } 1022 if (override_LPAE) 1023 tranMethod = ArmFault::LpaeTran; 1024 } 1025 } 1026 1027 if (source == ArmFault::AsynchronousExternalAbort) { 1028 tc->getCpuPtr()->clearInterrupt(tc->threadId(), INT_ABT, 0); 1029 } 1030 // Get effective fault source encoding 1031 CPSR cpsr = tc->readMiscReg(MISCREG_CPSR); 1032 1033 // source must be determined BEFORE invoking generic routines which will 1034 // try to set hsr etc. and are based upon source! 1035 ArmFaultVals<T>::invoke(tc, inst); 1036 1037 if (!this->to64) { // AArch32 1038 FSR fsr = getFsr(tc); 1039 if (cpsr.mode == MODE_HYP) { 1040 tc->setMiscReg(T::HFarIndex, faultAddr); 1041 } else if (stage2) { 1042 tc->setMiscReg(MISCREG_HPFAR, (faultAddr >> 8) & ~0xf); 1043 tc->setMiscReg(T::HFarIndex, OVAddr); 1044 } else { 1045 tc->setMiscReg(T::FsrIndex, fsr); 1046 tc->setMiscReg(T::FarIndex, faultAddr); 1047 } 1048 DPRINTF(Faults, "Abort Fault source=%#x fsr=%#x faultAddr=%#x "\ 1049 "tranMethod=%#x\n", source, fsr, faultAddr, tranMethod); 1050 } else { // AArch64 1051 // Set the FAR register. Nothing else to do if we are in AArch64 state 1052 // because the syndrome register has already been set inside invoke64() 1053 if (stage2) { 1054 // stage 2 fault, set HPFAR_EL2 to the faulting IPA 1055 // and FAR_EL2 to the Original VA 1056 tc->setMiscReg(AbortFault<T>::getFaultAddrReg64(), OVAddr); 1057 tc->setMiscReg(MISCREG_HPFAR_EL2, bits(faultAddr, 47, 12) << 4); 1058 1059 DPRINTF(Faults, "Abort Fault (Stage 2) VA: 0x%x IPA: 0x%x\n", 1060 OVAddr, faultAddr); 1061 } else { 1062 tc->setMiscReg(AbortFault<T>::getFaultAddrReg64(), faultAddr); 1063 } 1064 } 1065} 1066 1067template<class T> 1068void 1069AbortFault<T>::setSyndrome(ThreadContext *tc, MiscRegIndex syndrome_reg) 1070{ 1071 srcEncoded = getFaultStatusCode(tc); 1072 if (srcEncoded == ArmFault::FaultSourceInvalid) { 1073 panic("Invalid fault source\n"); 1074 } 1075 ArmFault::setSyndrome(tc, syndrome_reg); 1076} 1077 1078template<class T> 1079uint8_t 1080AbortFault<T>::getFaultStatusCode(ThreadContext *tc) const 1081{ 1082 1083 panic_if(!this->faultUpdated, 1084 "Trying to use un-updated ArmFault internal variables\n"); 1085 1086 uint8_t fsc = 0; 1087 1088 if (!this->to64) { 1089 // AArch32 1090 assert(tranMethod != ArmFault::UnknownTran); 1091 if (tranMethod == ArmFault::LpaeTran) { 1092 fsc = ArmFault::longDescFaultSources[source]; 1093 } else { 1094 fsc = ArmFault::shortDescFaultSources[source]; 1095 } 1096 } else { 1097 // AArch64 1098 fsc = ArmFault::aarch64FaultSources[source]; 1099 } 1100 1101 return fsc; 1102} 1103 1104template<class T> 1105FSR 1106AbortFault<T>::getFsr(ThreadContext *tc) const 1107{ 1108 FSR fsr = 0; 1109 1110 auto fsc = getFaultStatusCode(tc); 1111 1112 // AArch32 1113 assert(tranMethod != ArmFault::UnknownTran); 1114 if (tranMethod == ArmFault::LpaeTran) { 1115 fsr.status = fsc; 1116 fsr.lpae = 1; 1117 } else { 1118 fsr.fsLow = bits(fsc, 3, 0); 1119 fsr.fsHigh = bits(fsc, 4); 1120 fsr.domain = static_cast<uint8_t>(domain); 1121 } 1122 1123 fsr.wnr = (write ? 1 : 0); 1124 fsr.ext = 0; 1125 1126 return fsr; 1127} 1128 1129template<class T> 1130bool 1131AbortFault<T>::abortDisable(ThreadContext *tc) 1132{ 1133 if (ArmSystem::haveSecurity(tc)) { 1134 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1135 return (!scr.ns || scr.aw); 1136 } 1137 return true; 1138} 1139 1140template<class T> 1141void 1142AbortFault<T>::annotate(ArmFault::AnnotationIDs id, uint64_t val) 1143{ 1144 switch (id) 1145 { 1146 case ArmFault::S1PTW: 1147 s1ptw = val; 1148 break; 1149 case ArmFault::OVA: 1150 OVAddr = val; 1151 break; 1152 1153 // Just ignore unknown ID's 1154 default: 1155 break; 1156 } 1157} 1158 1159template<class T> 1160uint32_t 1161AbortFault<T>::iss() const 1162{ 1163 uint32_t val; 1164 1165 val = srcEncoded & 0x3F; 1166 val |= write << 6; 1167 val |= s1ptw << 7; 1168 return (val); 1169} 1170 1171template<class T> 1172bool 1173AbortFault<T>::isMMUFault() const 1174{ 1175 // NOTE: Not relying on LL information being aligned to lowest bits here 1176 return 1177 (source == ArmFault::AlignmentFault) || 1178 ((source >= ArmFault::TranslationLL) && 1179 (source < ArmFault::TranslationLL + 4)) || 1180 ((source >= ArmFault::AccessFlagLL) && 1181 (source < ArmFault::AccessFlagLL + 4)) || 1182 ((source >= ArmFault::DomainLL) && 1183 (source < ArmFault::DomainLL + 4)) || 1184 ((source >= ArmFault::PermissionLL) && 1185 (source < ArmFault::PermissionLL + 4)); 1186} 1187 1188ExceptionClass 1189PrefetchAbort::ec(ThreadContext *tc) const 1190{ 1191 if (to64) { 1192 // AArch64 1193 if (toEL == fromEL) 1194 return EC_PREFETCH_ABORT_CURR_EL; 1195 else 1196 return EC_PREFETCH_ABORT_LOWER_EL; 1197 } else { 1198 // AArch32 1199 // Abort faults have different EC codes depending on whether 1200 // the fault originated within HYP mode, or not. So override 1201 // the method and add the extra adjustment of the EC value. 1202 1203 ExceptionClass ec = ArmFaultVals<PrefetchAbort>::vals.ec; 1204 1205 CPSR spsr = tc->readMiscReg(MISCREG_SPSR_HYP); 1206 if (spsr.mode == MODE_HYP) { 1207 ec = ((ExceptionClass) (((uint32_t) ec) + 1)); 1208 } 1209 return ec; 1210 } 1211} 1212 1213bool 1214PrefetchAbort::routeToMonitor(ThreadContext *tc) const 1215{ 1216 SCR scr = 0; 1217 if (from64) 1218 scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3); 1219 else 1220 scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1221 1222 return scr.ea && !isMMUFault(); 1223} 1224 1225bool 1226PrefetchAbort::routeToHyp(ThreadContext *tc) const 1227{ 1228 bool toHyp; 1229 1230 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1231 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR); 1232 CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR); 1233 HDCR hdcr = tc->readMiscRegNoEffect(MISCREG_HDCR); 1234 1235 // if in Hyp mode then stay in Hyp mode 1236 toHyp = scr.ns && (cpsr.mode == MODE_HYP); 1237 // otherwise, check whether to take to Hyp mode through Hyp Trap vector 1238 toHyp |= (stage2 || 1239 ( (source == DebugEvent) && hdcr.tde && (cpsr.mode != MODE_HYP)) || 1240 ( (source == SynchronousExternalAbort) && hcr.tge && (cpsr.mode == MODE_USER)) 1241 ) && !inSecureState(tc); 1242 return toHyp; 1243} 1244 1245ExceptionClass 1246DataAbort::ec(ThreadContext *tc) const 1247{ 1248 if (to64) { 1249 // AArch64 1250 if (source == ArmFault::AsynchronousExternalAbort) { 1251 panic("Asynchronous External Abort should be handled with " 1252 "SystemErrors (SErrors)!"); 1253 } 1254 if (toEL == fromEL) 1255 return EC_DATA_ABORT_CURR_EL; 1256 else 1257 return EC_DATA_ABORT_LOWER_EL; 1258 } else { 1259 // AArch32 1260 // Abort faults have different EC codes depending on whether 1261 // the fault originated within HYP mode, or not. So override 1262 // the method and add the extra adjustment of the EC value. 1263 1264 ExceptionClass ec = ArmFaultVals<DataAbort>::vals.ec; 1265 1266 CPSR spsr = tc->readMiscReg(MISCREG_SPSR_HYP); 1267 if (spsr.mode == MODE_HYP) { 1268 ec = ((ExceptionClass) (((uint32_t) ec) + 1)); 1269 } 1270 return ec; 1271 } 1272} 1273 1274bool 1275DataAbort::routeToMonitor(ThreadContext *tc) const 1276{ 1277 SCR scr = 0; 1278 if (from64) 1279 scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3); 1280 else 1281 scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1282 1283 return scr.ea && !isMMUFault(); 1284} 1285 1286bool 1287DataAbort::routeToHyp(ThreadContext *tc) const 1288{ 1289 bool toHyp; 1290 1291 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1292 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR); 1293 CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR); 1294 HDCR hdcr = tc->readMiscRegNoEffect(MISCREG_HDCR); 1295 1296 // if in Hyp mode then stay in Hyp mode 1297 toHyp = scr.ns && (cpsr.mode == MODE_HYP); 1298 // otherwise, check whether to take to Hyp mode through Hyp Trap vector 1299 toHyp |= (stage2 || 1300 ( (cpsr.mode != MODE_HYP) && ( ((source == AsynchronousExternalAbort) && hcr.amo) || 1301 ((source == DebugEvent) && hdcr.tde) ) 1302 ) || 1303 ( (cpsr.mode == MODE_USER) && hcr.tge && 1304 ((source == AlignmentFault) || 1305 (source == SynchronousExternalAbort)) 1306 ) 1307 ) && !inSecureState(tc); 1308 return toHyp; 1309} 1310 1311uint32_t 1312DataAbort::iss() const 1313{ 1314 uint32_t val; 1315 1316 // Add on the data abort specific fields to the generic abort ISS value 1317 val = AbortFault<DataAbort>::iss(); 1318 // ISS is valid if not caused by a stage 1 page table walk, and when taken 1319 // to AArch64 only when directed to EL2 1320 if (!s1ptw && (!to64 || toEL == EL2)) { 1321 val |= isv << 24; 1322 if (isv) { 1323 val |= sas << 22; 1324 val |= sse << 21; 1325 val |= srt << 16; 1326 // AArch64 only. These assignments are safe on AArch32 as well 1327 // because these vars are initialized to false 1328 val |= sf << 15; 1329 val |= ar << 14; 1330 } 1331 } 1332 return (val); 1333} 1334 1335void 1336DataAbort::annotate(AnnotationIDs id, uint64_t val) 1337{ 1338 AbortFault<DataAbort>::annotate(id, val); 1339 switch (id) 1340 { 1341 case SAS: 1342 isv = true; 1343 sas = val; 1344 break; 1345 case SSE: 1346 isv = true; 1347 sse = val; 1348 break; 1349 case SRT: 1350 isv = true; 1351 srt = val; 1352 break; 1353 case SF: 1354 isv = true; 1355 sf = val; 1356 break; 1357 case AR: 1358 isv = true; 1359 ar = val; 1360 break; 1361 // Just ignore unknown ID's 1362 default: 1363 break; 1364 } 1365} 1366 1367void 1368VirtualDataAbort::invoke(ThreadContext *tc, const StaticInstPtr &inst) 1369{ 1370 AbortFault<VirtualDataAbort>::invoke(tc, inst); 1371 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR); 1372 hcr.va = 0; 1373 tc->setMiscRegNoEffect(MISCREG_HCR, hcr); 1374} 1375 1376bool 1377Interrupt::routeToMonitor(ThreadContext *tc) const 1378{ 1379 assert(ArmSystem::haveSecurity(tc)); 1380 SCR scr = 0; 1381 if (from64) 1382 scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3); 1383 else 1384 scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1385 return scr.irq; 1386} 1387 1388bool 1389Interrupt::routeToHyp(ThreadContext *tc) const 1390{ 1391 bool toHyp; 1392 1393 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1394 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR); 1395 CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR); 1396 // Determine whether IRQs are routed to Hyp mode. 1397 toHyp = (!scr.irq && hcr.imo && !inSecureState(tc)) || 1398 (cpsr.mode == MODE_HYP); 1399 return toHyp; 1400} 1401 1402bool 1403Interrupt::abortDisable(ThreadContext *tc) 1404{ 1405 if (ArmSystem::haveSecurity(tc)) { 1406 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1407 return (!scr.ns || scr.aw); 1408 } 1409 return true; 1410} 1411 1412VirtualInterrupt::VirtualInterrupt() 1413{} 1414 1415bool 1416FastInterrupt::routeToMonitor(ThreadContext *tc) const 1417{ 1418 assert(ArmSystem::haveSecurity(tc)); 1419 SCR scr = 0; 1420 if (from64) 1421 scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3); 1422 else 1423 scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1424 return scr.fiq; 1425} 1426 1427bool 1428FastInterrupt::routeToHyp(ThreadContext *tc) const 1429{ 1430 bool toHyp; 1431 1432 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1433 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR); 1434 CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR); 1435 // Determine whether IRQs are routed to Hyp mode. 1436 toHyp = (!scr.fiq && hcr.fmo && !inSecureState(tc)) || 1437 (cpsr.mode == MODE_HYP); 1438 return toHyp; 1439} 1440 1441bool 1442FastInterrupt::abortDisable(ThreadContext *tc) 1443{ 1444 if (ArmSystem::haveSecurity(tc)) { 1445 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1446 return (!scr.ns || scr.aw); 1447 } 1448 return true; 1449} 1450 1451bool 1452FastInterrupt::fiqDisable(ThreadContext *tc) 1453{ 1454 if (ArmSystem::haveVirtualization(tc)) { 1455 return true; 1456 } else if (ArmSystem::haveSecurity(tc)) { 1457 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1458 return (!scr.ns || scr.fw); 1459 } 1460 return true; 1461} 1462 1463VirtualFastInterrupt::VirtualFastInterrupt() 1464{} 1465 1466void 1467PCAlignmentFault::invoke(ThreadContext *tc, const StaticInstPtr &inst) 1468{ 1469 ArmFaultVals<PCAlignmentFault>::invoke(tc, inst); 1470 assert(from64); 1471 // Set the FAR 1472 tc->setMiscReg(getFaultAddrReg64(), faultPC); 1473} 1474 1475bool 1476PCAlignmentFault::routeToHyp(ThreadContext *tc) const 1477{ 1478 bool toHyp = false; 1479 1480 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3); 1481 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR_EL2); 1482 CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR); 1483 1484 // if HCR.TGE is set to 1, take to Hyp mode through Hyp Trap vector 1485 toHyp |= !inSecureState(scr, cpsr) && hcr.tge && (cpsr.el == EL0); 1486 return toHyp; 1487} 1488 1489SPAlignmentFault::SPAlignmentFault() 1490{} 1491 1492SystemError::SystemError() 1493{} 1494 1495void 1496SystemError::invoke(ThreadContext *tc, const StaticInstPtr &inst) 1497{ 1498 tc->getCpuPtr()->clearInterrupt(tc->threadId(), INT_ABT, 0); 1499 ArmFault::invoke(tc, inst); 1500} 1501 1502bool 1503SystemError::routeToMonitor(ThreadContext *tc) const 1504{ 1505 assert(ArmSystem::haveSecurity(tc)); 1506 assert(from64); 1507 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3); 1508 return scr.ea; 1509} 1510 1511bool 1512SystemError::routeToHyp(ThreadContext *tc) const 1513{ 1514 bool toHyp; 1515 assert(from64); 1516 1517 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3); 1518 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR); 1519 1520 toHyp = (!scr.ea && hcr.amo && !inSecureState(tc)) || 1521 (!scr.ea && !scr.rw && !hcr.amo && !inSecureState(tc)); 1522 return toHyp; 1523} 1524 1525 1526SoftwareBreakpoint::SoftwareBreakpoint(ExtMachInst _mach_inst, uint32_t _iss) 1527 : ArmFaultVals<SoftwareBreakpoint>(_mach_inst, _iss) 1528{} 1529 1530bool 1531SoftwareBreakpoint::routeToHyp(ThreadContext *tc) const 1532{ 1533 const bool have_el2 = ArmSystem::haveVirtualization(tc); 1534 1535 const HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR_EL2); 1536 const HDCR mdcr = tc->readMiscRegNoEffect(MISCREG_MDCR_EL2); 1537 1538 return have_el2 && !inSecureState(tc) && fromEL <= EL1 && 1539 (hcr.tge || mdcr.tde); 1540} 1541 1542ExceptionClass 1543SoftwareBreakpoint::ec(ThreadContext *tc) const 1544{ 1545 return from64 ? EC_SOFTWARE_BREAKPOINT_64 : vals.ec; 1546} 1547 1548void 1549ArmSev::invoke(ThreadContext *tc, const StaticInstPtr &inst) { 1550 DPRINTF(Faults, "Invoking ArmSev Fault\n"); 1551 if (!FullSystem) 1552 return; 1553 1554 // Set sev_mailbox to 1, clear the pending interrupt from remote 1555 // SEV execution and let pipeline continue as pcState is still 1556 // valid. 1557 tc->setMiscReg(MISCREG_SEV_MAILBOX, 1); 1558 tc->getCpuPtr()->clearInterrupt(tc->threadId(), INT_SEV, 0); 1559} 1560 1561// Instantiate all the templates to make the linker happy 1562template class ArmFaultVals<Reset>; 1563template class ArmFaultVals<UndefinedInstruction>; 1564template class ArmFaultVals<SupervisorCall>; 1565template class ArmFaultVals<SecureMonitorCall>; 1566template class ArmFaultVals<HypervisorCall>; 1567template class ArmFaultVals<PrefetchAbort>; 1568template class ArmFaultVals<DataAbort>; 1569template class ArmFaultVals<VirtualDataAbort>; 1570template class ArmFaultVals<HypervisorTrap>; 1571template class ArmFaultVals<Interrupt>; 1572template class ArmFaultVals<VirtualInterrupt>; 1573template class ArmFaultVals<FastInterrupt>; 1574template class ArmFaultVals<VirtualFastInterrupt>; 1575template class ArmFaultVals<SupervisorTrap>; 1576template class ArmFaultVals<SecureMonitorTrap>; 1577template class ArmFaultVals<PCAlignmentFault>; 1578template class ArmFaultVals<SPAlignmentFault>; 1579template class ArmFaultVals<SystemError>; 1580template class ArmFaultVals<SoftwareBreakpoint>; 1581template class ArmFaultVals<ArmSev>; 1582template class AbortFault<PrefetchAbort>; 1583template class AbortFault<DataAbort>; 1584template class AbortFault<VirtualDataAbort>; 1585 1586 1587IllegalInstSetStateFault::IllegalInstSetStateFault() 1588{} 1589 1590 1591} // namespace ArmISA 1592