faults.cc revision 12511
1/* 2 * Copyright (c) 2010, 2012-2014, 2016-2018 ARM Limited 3 * All rights reserved 4 * 5 * The license below extends only to copyright in the software and shall 6 * not be construed as granting a license to any other intellectual 7 * property including but not limited to intellectual property relating 8 * to a hardware implementation of the functionality of the software 9 * licensed hereunder. You may use the software subject to the license 10 * terms below provided that you ensure that this notice is replicated 11 * unmodified and in its entirety in all distributions of the software, 12 * modified or unmodified, in source code or in binary form. 13 * 14 * Copyright (c) 2003-2005 The Regents of The University of Michigan 15 * Copyright (c) 2007-2008 The Florida State University 16 * All rights reserved. 17 * 18 * Redistribution and use in source and binary forms, with or without 19 * modification, are permitted provided that the following conditions are 20 * met: redistributions of source code must retain the above copyright 21 * notice, this list of conditions and the following disclaimer; 22 * redistributions in binary form must reproduce the above copyright 23 * notice, this list of conditions and the following disclaimer in the 24 * documentation and/or other materials provided with the distribution; 25 * neither the name of the copyright holders nor the names of its 26 * contributors may be used to endorse or promote products derived from 27 * this software without specific prior written permission. 28 * 29 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 30 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 31 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 32 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 33 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 34 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 35 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 36 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 37 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 38 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 39 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 40 * 41 * Authors: Ali Saidi 42 * Gabe Black 43 * Giacomo Gabrielli 44 * Thomas Grocutt 45 */ 46 47#include "arch/arm/faults.hh" 48 49#include "arch/arm/insts/static_inst.hh" 50#include "arch/arm/system.hh" 51#include "arch/arm/utility.hh" 52#include "base/compiler.hh" 53#include "base/trace.hh" 54#include "cpu/base.hh" 55#include "cpu/thread_context.hh" 56#include "debug/Faults.hh" 57#include "sim/full_system.hh" 58 59namespace ArmISA 60{ 61 62uint8_t ArmFault::shortDescFaultSources[] = { 63 0x01, // AlignmentFault 64 0x04, // InstructionCacheMaintenance 65 0xff, // SynchExtAbtOnTranslTableWalkL0 (INVALID) 66 0x0c, // SynchExtAbtOnTranslTableWalkL1 67 0x0e, // SynchExtAbtOnTranslTableWalkL2 68 0xff, // SynchExtAbtOnTranslTableWalkL3 (INVALID) 69 0xff, // SynchPtyErrOnTranslTableWalkL0 (INVALID) 70 0x1c, // SynchPtyErrOnTranslTableWalkL1 71 0x1e, // SynchPtyErrOnTranslTableWalkL2 72 0xff, // SynchPtyErrOnTranslTableWalkL3 (INVALID) 73 0xff, // TranslationL0 (INVALID) 74 0x05, // TranslationL1 75 0x07, // TranslationL2 76 0xff, // TranslationL3 (INVALID) 77 0xff, // AccessFlagL0 (INVALID) 78 0x03, // AccessFlagL1 79 0x06, // AccessFlagL2 80 0xff, // AccessFlagL3 (INVALID) 81 0xff, // DomainL0 (INVALID) 82 0x09, // DomainL1 83 0x0b, // DomainL2 84 0xff, // DomainL3 (INVALID) 85 0xff, // PermissionL0 (INVALID) 86 0x0d, // PermissionL1 87 0x0f, // PermissionL2 88 0xff, // PermissionL3 (INVALID) 89 0x02, // DebugEvent 90 0x08, // SynchronousExternalAbort 91 0x10, // TLBConflictAbort 92 0x19, // SynchPtyErrOnMemoryAccess 93 0x16, // AsynchronousExternalAbort 94 0x18, // AsynchPtyErrOnMemoryAccess 95 0xff, // AddressSizeL0 (INVALID) 96 0xff, // AddressSizeL1 (INVALID) 97 0xff, // AddressSizeL2 (INVALID) 98 0xff, // AddressSizeL3 (INVALID) 99 0x40, // PrefetchTLBMiss 100 0x80 // PrefetchUncacheable 101}; 102 103static_assert(sizeof(ArmFault::shortDescFaultSources) == 104 ArmFault::NumFaultSources, 105 "Invalid size of ArmFault::shortDescFaultSources[]"); 106 107uint8_t ArmFault::longDescFaultSources[] = { 108 0x21, // AlignmentFault 109 0xff, // InstructionCacheMaintenance (INVALID) 110 0xff, // SynchExtAbtOnTranslTableWalkL0 (INVALID) 111 0x15, // SynchExtAbtOnTranslTableWalkL1 112 0x16, // SynchExtAbtOnTranslTableWalkL2 113 0x17, // SynchExtAbtOnTranslTableWalkL3 114 0xff, // SynchPtyErrOnTranslTableWalkL0 (INVALID) 115 0x1d, // SynchPtyErrOnTranslTableWalkL1 116 0x1e, // SynchPtyErrOnTranslTableWalkL2 117 0x1f, // SynchPtyErrOnTranslTableWalkL3 118 0xff, // TranslationL0 (INVALID) 119 0x05, // TranslationL1 120 0x06, // TranslationL2 121 0x07, // TranslationL3 122 0xff, // AccessFlagL0 (INVALID) 123 0x09, // AccessFlagL1 124 0x0a, // AccessFlagL2 125 0x0b, // AccessFlagL3 126 0xff, // DomainL0 (INVALID) 127 0x3d, // DomainL1 128 0x3e, // DomainL2 129 0xff, // DomainL3 (RESERVED) 130 0xff, // PermissionL0 (INVALID) 131 0x0d, // PermissionL1 132 0x0e, // PermissionL2 133 0x0f, // PermissionL3 134 0x22, // DebugEvent 135 0x10, // SynchronousExternalAbort 136 0x30, // TLBConflictAbort 137 0x18, // SynchPtyErrOnMemoryAccess 138 0x11, // AsynchronousExternalAbort 139 0x19, // AsynchPtyErrOnMemoryAccess 140 0xff, // AddressSizeL0 (INVALID) 141 0xff, // AddressSizeL1 (INVALID) 142 0xff, // AddressSizeL2 (INVALID) 143 0xff, // AddressSizeL3 (INVALID) 144 0x40, // PrefetchTLBMiss 145 0x80 // PrefetchUncacheable 146}; 147 148static_assert(sizeof(ArmFault::longDescFaultSources) == 149 ArmFault::NumFaultSources, 150 "Invalid size of ArmFault::longDescFaultSources[]"); 151 152uint8_t ArmFault::aarch64FaultSources[] = { 153 0x21, // AlignmentFault 154 0xff, // InstructionCacheMaintenance (INVALID) 155 0x14, // SynchExtAbtOnTranslTableWalkL0 156 0x15, // SynchExtAbtOnTranslTableWalkL1 157 0x16, // SynchExtAbtOnTranslTableWalkL2 158 0x17, // SynchExtAbtOnTranslTableWalkL3 159 0x1c, // SynchPtyErrOnTranslTableWalkL0 160 0x1d, // SynchPtyErrOnTranslTableWalkL1 161 0x1e, // SynchPtyErrOnTranslTableWalkL2 162 0x1f, // SynchPtyErrOnTranslTableWalkL3 163 0x04, // TranslationL0 164 0x05, // TranslationL1 165 0x06, // TranslationL2 166 0x07, // TranslationL3 167 0x08, // AccessFlagL0 168 0x09, // AccessFlagL1 169 0x0a, // AccessFlagL2 170 0x0b, // AccessFlagL3 171 // @todo: Section & Page Domain Fault in AArch64? 172 0xff, // DomainL0 (INVALID) 173 0xff, // DomainL1 (INVALID) 174 0xff, // DomainL2 (INVALID) 175 0xff, // DomainL3 (INVALID) 176 0x0c, // PermissionL0 177 0x0d, // PermissionL1 178 0x0e, // PermissionL2 179 0x0f, // PermissionL3 180 0xff, // DebugEvent (INVALID) 181 0x10, // SynchronousExternalAbort 182 0x30, // TLBConflictAbort 183 0x18, // SynchPtyErrOnMemoryAccess 184 0xff, // AsynchronousExternalAbort (INVALID) 185 0xff, // AsynchPtyErrOnMemoryAccess (INVALID) 186 0x00, // AddressSizeL0 187 0x01, // AddressSizeL1 188 0x02, // AddressSizeL2 189 0x03, // AddressSizeL3 190 0x40, // PrefetchTLBMiss 191 0x80 // PrefetchUncacheable 192}; 193 194static_assert(sizeof(ArmFault::aarch64FaultSources) == 195 ArmFault::NumFaultSources, 196 "Invalid size of ArmFault::aarch64FaultSources[]"); 197 198// Fields: name, offset, cur{ELT,ELH}Offset, lowerEL{64,32}Offset, next mode, 199// {ARM, Thumb, ARM_ELR, Thumb_ELR} PC offset, hyp trap, 200// {A, F} disable, class, stat 201template<> ArmFault::FaultVals ArmFaultVals<Reset>::vals = { 202 // Some dummy values (the reset vector has an IMPLEMENTATION DEFINED 203 // location in AArch64) 204 "Reset", 0x000, 0x000, 0x000, 0x000, 0x000, MODE_SVC, 205 0, 0, 0, 0, false, true, true, EC_UNKNOWN, FaultStat() 206}; 207template<> ArmFault::FaultVals ArmFaultVals<UndefinedInstruction>::vals = { 208 "Undefined Instruction", 0x004, 0x000, 0x200, 0x400, 0x600, MODE_UNDEFINED, 209 4, 2, 0, 0, true, false, false, EC_UNKNOWN, FaultStat() 210}; 211template<> ArmFault::FaultVals ArmFaultVals<SupervisorCall>::vals = { 212 "Supervisor Call", 0x008, 0x000, 0x200, 0x400, 0x600, MODE_SVC, 213 4, 2, 4, 2, true, false, false, EC_SVC_TO_HYP, FaultStat() 214}; 215template<> ArmFault::FaultVals ArmFaultVals<SecureMonitorCall>::vals = { 216 "Secure Monitor Call", 0x008, 0x000, 0x200, 0x400, 0x600, MODE_MON, 217 4, 4, 4, 4, false, true, true, EC_SMC_TO_HYP, FaultStat() 218}; 219template<> ArmFault::FaultVals ArmFaultVals<HypervisorCall>::vals = { 220 "Hypervisor Call", 0x008, 0x000, 0x200, 0x400, 0x600, MODE_HYP, 221 4, 4, 4, 4, true, false, false, EC_HVC, FaultStat() 222}; 223template<> ArmFault::FaultVals ArmFaultVals<PrefetchAbort>::vals = { 224 "Prefetch Abort", 0x00C, 0x000, 0x200, 0x400, 0x600, MODE_ABORT, 225 4, 4, 0, 0, true, true, false, EC_PREFETCH_ABORT_TO_HYP, FaultStat() 226}; 227template<> ArmFault::FaultVals ArmFaultVals<DataAbort>::vals = { 228 "Data Abort", 0x010, 0x000, 0x200, 0x400, 0x600, MODE_ABORT, 229 8, 8, 0, 0, true, true, false, EC_DATA_ABORT_TO_HYP, FaultStat() 230}; 231template<> ArmFault::FaultVals ArmFaultVals<VirtualDataAbort>::vals = { 232 "Virtual Data Abort", 0x010, 0x000, 0x200, 0x400, 0x600, MODE_ABORT, 233 8, 8, 0, 0, true, true, false, EC_INVALID, FaultStat() 234}; 235template<> ArmFault::FaultVals ArmFaultVals<HypervisorTrap>::vals = { 236 // @todo: double check these values 237 "Hypervisor Trap", 0x014, 0x000, 0x200, 0x400, 0x600, MODE_HYP, 238 0, 0, 0, 0, false, false, false, EC_UNKNOWN, FaultStat() 239}; 240template<> ArmFault::FaultVals ArmFaultVals<Interrupt>::vals = { 241 "IRQ", 0x018, 0x080, 0x280, 0x480, 0x680, MODE_IRQ, 242 4, 4, 0, 0, false, true, false, EC_UNKNOWN, FaultStat() 243}; 244template<> ArmFault::FaultVals ArmFaultVals<VirtualInterrupt>::vals = { 245 "Virtual IRQ", 0x018, 0x080, 0x280, 0x480, 0x680, MODE_IRQ, 246 4, 4, 0, 0, false, true, false, EC_INVALID, FaultStat() 247}; 248template<> ArmFault::FaultVals ArmFaultVals<FastInterrupt>::vals = { 249 "FIQ", 0x01C, 0x100, 0x300, 0x500, 0x700, MODE_FIQ, 250 4, 4, 0, 0, false, true, true, EC_UNKNOWN, FaultStat() 251}; 252template<> ArmFault::FaultVals ArmFaultVals<VirtualFastInterrupt>::vals = { 253 "Virtual FIQ", 0x01C, 0x100, 0x300, 0x500, 0x700, MODE_FIQ, 254 4, 4, 0, 0, false, true, true, EC_INVALID, FaultStat() 255}; 256template<> ArmFault::FaultVals ArmFaultVals<SupervisorTrap>::vals = { 257 // Some dummy values (SupervisorTrap is AArch64-only) 258 "Supervisor Trap", 0x014, 0x000, 0x200, 0x400, 0x600, MODE_SVC, 259 0, 0, 0, 0, false, false, false, EC_UNKNOWN, FaultStat() 260}; 261template<> ArmFault::FaultVals ArmFaultVals<SecureMonitorTrap>::vals = { 262 // Some dummy values (SecureMonitorTrap is AArch64-only) 263 "Secure Monitor Trap", 0x014, 0x000, 0x200, 0x400, 0x600, MODE_MON, 264 0, 0, 0, 0, false, false, false, EC_UNKNOWN, FaultStat() 265}; 266template<> ArmFault::FaultVals ArmFaultVals<PCAlignmentFault>::vals = { 267 // Some dummy values (PCAlignmentFault is AArch64-only) 268 "PC Alignment Fault", 0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC, 269 0, 0, 0, 0, true, false, false, EC_PC_ALIGNMENT, FaultStat() 270}; 271template<> ArmFault::FaultVals ArmFaultVals<SPAlignmentFault>::vals = { 272 // Some dummy values (SPAlignmentFault is AArch64-only) 273 "SP Alignment Fault", 0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC, 274 0, 0, 0, 0, true, false, false, EC_STACK_PTR_ALIGNMENT, FaultStat() 275}; 276template<> ArmFault::FaultVals ArmFaultVals<SystemError>::vals = { 277 // Some dummy values (SError is AArch64-only) 278 "SError", 0x000, 0x180, 0x380, 0x580, 0x780, MODE_SVC, 279 0, 0, 0, 0, false, true, true, EC_SERROR, FaultStat() 280}; 281template<> ArmFault::FaultVals ArmFaultVals<SoftwareBreakpoint>::vals = { 282 // Some dummy values (SoftwareBreakpoint is AArch64-only) 283 "Software Breakpoint", 0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC, 284 0, 0, 0, 0, true, false, false, EC_SOFTWARE_BREAKPOINT, FaultStat() 285}; 286template<> ArmFault::FaultVals ArmFaultVals<ArmSev>::vals = { 287 // Some dummy values 288 "ArmSev Flush", 0x000, 0x000, 0x000, 0x000, 0x000, MODE_SVC, 289 0, 0, 0, 0, false, true, true, EC_UNKNOWN, FaultStat() 290}; 291template<> ArmFault::FaultVals ArmFaultVals<IllegalInstSetStateFault>::vals = { 292 // Some dummy values (SPAlignmentFault is AArch64-only) 293 "Illegal Inst Set State Fault", 0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC, 294 0, 0, 0, 0, true, false, false, EC_ILLEGAL_INST, FaultStat() 295}; 296 297Addr 298ArmFault::getVector(ThreadContext *tc) 299{ 300 Addr base; 301 302 // ARM ARM issue C B1.8.1 303 bool haveSecurity = ArmSystem::haveSecurity(tc); 304 305 // panic if SCTLR.VE because I have no idea what to do with vectored 306 // interrupts 307 SCTLR sctlr = tc->readMiscReg(MISCREG_SCTLR); 308 assert(!sctlr.ve); 309 // Check for invalid modes 310 CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR); 311 assert(haveSecurity || cpsr.mode != MODE_MON); 312 assert(ArmSystem::haveVirtualization(tc) || cpsr.mode != MODE_HYP); 313 314 switch (cpsr.mode) 315 { 316 case MODE_MON: 317 base = tc->readMiscReg(MISCREG_MVBAR); 318 break; 319 case MODE_HYP: 320 base = tc->readMiscReg(MISCREG_HVBAR); 321 break; 322 default: 323 if (sctlr.v) { 324 base = HighVecs; 325 } else { 326 base = haveSecurity ? tc->readMiscReg(MISCREG_VBAR) : 0; 327 } 328 break; 329 } 330 return base + offset(tc); 331} 332 333Addr 334ArmFault::getVector64(ThreadContext *tc) 335{ 336 Addr vbar; 337 switch (toEL) { 338 case EL3: 339 assert(ArmSystem::haveSecurity(tc)); 340 vbar = tc->readMiscReg(MISCREG_VBAR_EL3); 341 break; 342 case EL2: 343 assert(ArmSystem::haveVirtualization(tc)); 344 vbar = tc->readMiscReg(MISCREG_VBAR_EL2); 345 break; 346 case EL1: 347 vbar = tc->readMiscReg(MISCREG_VBAR_EL1); 348 break; 349 default: 350 panic("Invalid target exception level"); 351 break; 352 } 353 return vbar + offset64(tc); 354} 355 356MiscRegIndex 357ArmFault::getSyndromeReg64() const 358{ 359 switch (toEL) { 360 case EL1: 361 return MISCREG_ESR_EL1; 362 case EL2: 363 return MISCREG_ESR_EL2; 364 case EL3: 365 return MISCREG_ESR_EL3; 366 default: 367 panic("Invalid exception level"); 368 break; 369 } 370} 371 372MiscRegIndex 373ArmFault::getFaultAddrReg64() const 374{ 375 switch (toEL) { 376 case EL1: 377 return MISCREG_FAR_EL1; 378 case EL2: 379 return MISCREG_FAR_EL2; 380 case EL3: 381 return MISCREG_FAR_EL3; 382 default: 383 panic("Invalid exception level"); 384 break; 385 } 386} 387 388void 389ArmFault::setSyndrome(ThreadContext *tc, MiscRegIndex syndrome_reg) 390{ 391 uint32_t value; 392 uint32_t exc_class = (uint32_t) ec(tc); 393 uint32_t issVal = iss(); 394 395 assert(!from64 || ArmSystem::highestELIs64(tc)); 396 397 value = exc_class << 26; 398 399 // HSR.IL not valid for Prefetch Aborts (0x20, 0x21) and Data Aborts (0x24, 400 // 0x25) for which the ISS information is not valid (ARMv7). 401 // @todo: ARMv8 revises AArch32 functionality: when HSR.IL is not 402 // valid it is treated as RES1. 403 if (to64) { 404 value |= 1 << 25; 405 } else if ((bits(exc_class, 5, 3) != 4) || 406 (bits(exc_class, 2) && bits(issVal, 24))) { 407 if (!machInst.thumb || machInst.bigThumb) 408 value |= 1 << 25; 409 } 410 // Condition code valid for EC[5:4] nonzero 411 if (!from64 && ((bits(exc_class, 5, 4) == 0) && 412 (bits(exc_class, 3, 0) != 0))) { 413 if (!machInst.thumb) { 414 uint32_t cond; 415 ConditionCode condCode = (ConditionCode) (uint32_t) machInst.condCode; 416 // If its on unconditional instruction report with a cond code of 417 // 0xE, ie the unconditional code 418 cond = (condCode == COND_UC) ? COND_AL : condCode; 419 value |= cond << 20; 420 value |= 1 << 24; 421 } 422 value |= bits(issVal, 19, 0); 423 } else { 424 value |= issVal; 425 } 426 tc->setMiscReg(syndrome_reg, value); 427} 428 429void 430ArmFault::invoke(ThreadContext *tc, const StaticInstPtr &inst) 431{ 432 CPSR cpsr = tc->readMiscReg(MISCREG_CPSR); 433 434 if (ArmSystem::highestELIs64(tc)) { // ARMv8 435 // Determine source exception level and mode 436 fromMode = (OperatingMode) (uint8_t) cpsr.mode; 437 fromEL = opModeToEL(fromMode); 438 if (opModeIs64(fromMode)) 439 from64 = true; 440 441 // Determine target exception level 442 if (ArmSystem::haveSecurity(tc) && routeToMonitor(tc)) { 443 toEL = EL3; 444 } else if (ArmSystem::haveVirtualization(tc) && routeToHyp(tc)) { 445 toEL = EL2; 446 hypRouted = true; 447 } else { 448 toEL = opModeToEL(nextMode()); 449 } 450 451 if (fromEL > toEL) 452 toEL = fromEL; 453 454 if (toEL == ArmSystem::highestEL(tc) || ELIs64(tc, toEL)) { 455 // Invoke exception handler in AArch64 state 456 to64 = true; 457 invoke64(tc, inst); 458 return; 459 } 460 } 461 462 // ARMv7 (ARM ARM issue C B1.9) 463 464 bool have_security = ArmSystem::haveSecurity(tc); 465 bool have_virtualization = ArmSystem::haveVirtualization(tc); 466 467 FaultBase::invoke(tc); 468 if (!FullSystem) 469 return; 470 countStat()++; 471 472 SCTLR sctlr = tc->readMiscReg(MISCREG_SCTLR); 473 SCR scr = tc->readMiscReg(MISCREG_SCR); 474 CPSR saved_cpsr = tc->readMiscReg(MISCREG_CPSR); 475 saved_cpsr.nz = tc->readCCReg(CCREG_NZ); 476 saved_cpsr.c = tc->readCCReg(CCREG_C); 477 saved_cpsr.v = tc->readCCReg(CCREG_V); 478 saved_cpsr.ge = tc->readCCReg(CCREG_GE); 479 480 Addr curPc M5_VAR_USED = tc->pcState().pc(); 481 ITSTATE it = tc->pcState().itstate(); 482 saved_cpsr.it2 = it.top6; 483 saved_cpsr.it1 = it.bottom2; 484 485 // if we have a valid instruction then use it to annotate this fault with 486 // extra information. This is used to generate the correct fault syndrome 487 // information 488 if (inst) { 489 ArmStaticInst *armInst = static_cast<ArmStaticInst *>(inst.get()); 490 armInst->annotateFault(this); 491 } 492 493 if (have_security && routeToMonitor(tc)) { 494 cpsr.mode = MODE_MON; 495 } else if (have_virtualization && routeToHyp(tc)) { 496 cpsr.mode = MODE_HYP; 497 hypRouted = true; 498 } else { 499 cpsr.mode = nextMode(); 500 } 501 502 // Ensure Secure state if initially in Monitor mode 503 if (have_security && saved_cpsr.mode == MODE_MON) { 504 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR); 505 if (scr.ns) { 506 scr.ns = 0; 507 tc->setMiscRegNoEffect(MISCREG_SCR, scr); 508 } 509 } 510 511 // some bits are set differently if we have been routed to hyp mode 512 if (cpsr.mode == MODE_HYP) { 513 SCTLR hsctlr = tc->readMiscReg(MISCREG_HSCTLR); 514 cpsr.t = hsctlr.te; 515 cpsr.e = hsctlr.ee; 516 if (!scr.ea) {cpsr.a = 1;} 517 if (!scr.fiq) {cpsr.f = 1;} 518 if (!scr.irq) {cpsr.i = 1;} 519 } else if (cpsr.mode == MODE_MON) { 520 // Special case handling when entering monitor mode 521 cpsr.t = sctlr.te; 522 cpsr.e = sctlr.ee; 523 cpsr.a = 1; 524 cpsr.f = 1; 525 cpsr.i = 1; 526 } else { 527 cpsr.t = sctlr.te; 528 cpsr.e = sctlr.ee; 529 530 // The *Disable functions are virtual and different per fault 531 cpsr.a = cpsr.a | abortDisable(tc); 532 cpsr.f = cpsr.f | fiqDisable(tc); 533 cpsr.i = 1; 534 } 535 cpsr.it1 = cpsr.it2 = 0; 536 cpsr.j = 0; 537 tc->setMiscReg(MISCREG_CPSR, cpsr); 538 539 // Make sure mailbox sets to one always 540 tc->setMiscReg(MISCREG_SEV_MAILBOX, 1); 541 542 // Clear the exclusive monitor 543 tc->setMiscReg(MISCREG_LOCKFLAG, 0); 544 545 if (cpsr.mode == MODE_HYP) { 546 tc->setMiscReg(MISCREG_ELR_HYP, curPc + 547 (saved_cpsr.t ? thumbPcOffset(true) : armPcOffset(true))); 548 } else { 549 tc->setIntReg(INTREG_LR, curPc + 550 (saved_cpsr.t ? thumbPcOffset(false) : armPcOffset(false))); 551 } 552 553 switch (cpsr.mode) { 554 case MODE_FIQ: 555 tc->setMiscReg(MISCREG_SPSR_FIQ, saved_cpsr); 556 break; 557 case MODE_IRQ: 558 tc->setMiscReg(MISCREG_SPSR_IRQ, saved_cpsr); 559 break; 560 case MODE_SVC: 561 tc->setMiscReg(MISCREG_SPSR_SVC, saved_cpsr); 562 break; 563 case MODE_MON: 564 assert(have_security); 565 tc->setMiscReg(MISCREG_SPSR_MON, saved_cpsr); 566 break; 567 case MODE_ABORT: 568 tc->setMiscReg(MISCREG_SPSR_ABT, saved_cpsr); 569 break; 570 case MODE_UNDEFINED: 571 tc->setMiscReg(MISCREG_SPSR_UND, saved_cpsr); 572 if (ec(tc) != EC_UNKNOWN) 573 setSyndrome(tc, MISCREG_HSR); 574 break; 575 case MODE_HYP: 576 assert(have_virtualization); 577 tc->setMiscReg(MISCREG_SPSR_HYP, saved_cpsr); 578 setSyndrome(tc, MISCREG_HSR); 579 break; 580 default: 581 panic("unknown Mode\n"); 582 } 583 584 Addr newPc = getVector(tc); 585 DPRINTF(Faults, "Invoking Fault:%s cpsr:%#x PC:%#x lr:%#x newVec: %#x\n", 586 name(), cpsr, curPc, tc->readIntReg(INTREG_LR), newPc); 587 PCState pc(newPc); 588 pc.thumb(cpsr.t); 589 pc.nextThumb(pc.thumb()); 590 pc.jazelle(cpsr.j); 591 pc.nextJazelle(pc.jazelle()); 592 pc.aarch64(!cpsr.width); 593 pc.nextAArch64(!cpsr.width); 594 tc->pcState(pc); 595} 596 597void 598ArmFault::invoke64(ThreadContext *tc, const StaticInstPtr &inst) 599{ 600 // Determine actual misc. register indices for ELR_ELx and SPSR_ELx 601 MiscRegIndex elr_idx, spsr_idx; 602 switch (toEL) { 603 case EL1: 604 elr_idx = MISCREG_ELR_EL1; 605 spsr_idx = MISCREG_SPSR_EL1; 606 break; 607 case EL2: 608 assert(ArmSystem::haveVirtualization(tc)); 609 elr_idx = MISCREG_ELR_EL2; 610 spsr_idx = MISCREG_SPSR_EL2; 611 break; 612 case EL3: 613 assert(ArmSystem::haveSecurity(tc)); 614 elr_idx = MISCREG_ELR_EL3; 615 spsr_idx = MISCREG_SPSR_EL3; 616 break; 617 default: 618 panic("Invalid target exception level"); 619 break; 620 } 621 622 // Save process state into SPSR_ELx 623 CPSR cpsr = tc->readMiscReg(MISCREG_CPSR); 624 CPSR spsr = cpsr; 625 spsr.nz = tc->readCCReg(CCREG_NZ); 626 spsr.c = tc->readCCReg(CCREG_C); 627 spsr.v = tc->readCCReg(CCREG_V); 628 if (from64) { 629 // Force some bitfields to 0 630 spsr.q = 0; 631 spsr.it1 = 0; 632 spsr.j = 0; 633 spsr.res0_23_22 = 0; 634 spsr.ge = 0; 635 spsr.it2 = 0; 636 spsr.t = 0; 637 } else { 638 spsr.ge = tc->readCCReg(CCREG_GE); 639 ITSTATE it = tc->pcState().itstate(); 640 spsr.it2 = it.top6; 641 spsr.it1 = it.bottom2; 642 // Force some bitfields to 0 643 spsr.res0_23_22 = 0; 644 spsr.ss = 0; 645 } 646 tc->setMiscReg(spsr_idx, spsr); 647 648 // Save preferred return address into ELR_ELx 649 Addr curr_pc = tc->pcState().pc(); 650 Addr ret_addr = curr_pc; 651 if (from64) 652 ret_addr += armPcElrOffset(); 653 else 654 ret_addr += spsr.t ? thumbPcElrOffset() : armPcElrOffset(); 655 tc->setMiscReg(elr_idx, ret_addr); 656 657 Addr vec_address = getVector64(tc); 658 659 // Update process state 660 OperatingMode64 mode = 0; 661 mode.spX = 1; 662 mode.el = toEL; 663 mode.width = 0; 664 cpsr.mode = mode; 665 cpsr.daif = 0xf; 666 cpsr.il = 0; 667 cpsr.ss = 0; 668 tc->setMiscReg(MISCREG_CPSR, cpsr); 669 670 // Set PC to start of exception handler 671 Addr new_pc = purifyTaggedAddr(vec_address, tc, toEL); 672 DPRINTF(Faults, "Invoking Fault (AArch64 target EL):%s cpsr:%#x PC:%#x " 673 "elr:%#x newVec: %#x\n", name(), cpsr, curr_pc, ret_addr, new_pc); 674 PCState pc(new_pc); 675 pc.aarch64(!cpsr.width); 676 pc.nextAArch64(!cpsr.width); 677 tc->pcState(pc); 678 679 // If we have a valid instruction then use it to annotate this fault with 680 // extra information. This is used to generate the correct fault syndrome 681 // information 682 if (inst) 683 static_cast<ArmStaticInst *>(inst.get())->annotateFault(this); 684 // Save exception syndrome 685 if ((nextMode() != MODE_IRQ) && (nextMode() != MODE_FIQ)) 686 setSyndrome(tc, getSyndromeReg64()); 687} 688 689void 690Reset::invoke(ThreadContext *tc, const StaticInstPtr &inst) 691{ 692 if (FullSystem) { 693 tc->getCpuPtr()->clearInterrupts(tc->threadId()); 694 tc->clearArchRegs(); 695 } 696 if (!ArmSystem::highestELIs64(tc)) { 697 ArmFault::invoke(tc, inst); 698 tc->setMiscReg(MISCREG_VMPIDR, 699 getMPIDR(dynamic_cast<ArmSystem*>(tc->getSystemPtr()), tc)); 700 701 // Unless we have SMC code to get us there, boot in HYP! 702 if (ArmSystem::haveVirtualization(tc) && 703 !ArmSystem::haveSecurity(tc)) { 704 CPSR cpsr = tc->readMiscReg(MISCREG_CPSR); 705 cpsr.mode = MODE_HYP; 706 tc->setMiscReg(MISCREG_CPSR, cpsr); 707 } 708 } else { 709 // Advance the PC to the IMPLEMENTATION DEFINED reset value 710 PCState pc = ArmSystem::resetAddr64(tc); 711 pc.aarch64(true); 712 pc.nextAArch64(true); 713 tc->pcState(pc); 714 } 715} 716 717void 718UndefinedInstruction::invoke(ThreadContext *tc, const StaticInstPtr &inst) 719{ 720 if (FullSystem) { 721 ArmFault::invoke(tc, inst); 722 return; 723 } 724 725 // If the mnemonic isn't defined this has to be an unknown instruction. 726 assert(unknown || mnemonic != NULL); 727 if (disabled) { 728 panic("Attempted to execute disabled instruction " 729 "'%s' (inst 0x%08x)", mnemonic, machInst); 730 } else if (unknown) { 731 panic("Attempted to execute unknown instruction (inst 0x%08x)", 732 machInst); 733 } else { 734 panic("Attempted to execute unimplemented instruction " 735 "'%s' (inst 0x%08x)", mnemonic, machInst); 736 } 737} 738 739bool 740UndefinedInstruction::routeToHyp(ThreadContext *tc) const 741{ 742 bool toHyp; 743 744 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR); 745 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR); 746 CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR); 747 748 // if in Hyp mode then stay in Hyp mode 749 toHyp = scr.ns && (cpsr.mode == MODE_HYP); 750 // if HCR.TGE is set to 1, take to Hyp mode through Hyp Trap vector 751 toHyp |= !inSecureState(scr, cpsr) && hcr.tge && (cpsr.mode == MODE_USER); 752 return toHyp; 753} 754 755uint32_t 756UndefinedInstruction::iss() const 757{ 758 759 // If UndefinedInstruction is routed to hypervisor, iss field is 0. 760 if (hypRouted) { 761 return 0; 762 } 763 764 if (overrideEc == EC_INVALID) 765 return issRaw; 766 767 uint32_t new_iss = 0; 768 uint32_t op0, op1, op2, CRn, CRm, Rt, dir; 769 770 dir = bits(machInst, 21, 21); 771 op0 = bits(machInst, 20, 19); 772 op1 = bits(machInst, 18, 16); 773 CRn = bits(machInst, 15, 12); 774 CRm = bits(machInst, 11, 8); 775 op2 = bits(machInst, 7, 5); 776 Rt = bits(machInst, 4, 0); 777 778 new_iss = op0 << 20 | op2 << 17 | op1 << 14 | CRn << 10 | 779 Rt << 5 | CRm << 1 | dir; 780 781 return new_iss; 782} 783 784void 785SupervisorCall::invoke(ThreadContext *tc, const StaticInstPtr &inst) 786{ 787 if (FullSystem) { 788 ArmFault::invoke(tc, inst); 789 return; 790 } 791 792 // As of now, there isn't a 32 bit thumb version of this instruction. 793 assert(!machInst.bigThumb); 794 uint32_t callNum; 795 CPSR cpsr = tc->readMiscReg(MISCREG_CPSR); 796 OperatingMode mode = (OperatingMode)(uint8_t)cpsr.mode; 797 if (opModeIs64(mode)) 798 callNum = tc->readIntReg(INTREG_X8); 799 else 800 callNum = tc->readIntReg(INTREG_R7); 801 Fault fault; 802 tc->syscall(callNum, &fault); 803 804 // Advance the PC since that won't happen automatically. 805 PCState pc = tc->pcState(); 806 assert(inst); 807 inst->advancePC(pc); 808 tc->pcState(pc); 809} 810 811bool 812SupervisorCall::routeToHyp(ThreadContext *tc) const 813{ 814 bool toHyp; 815 816 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR); 817 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR); 818 CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR); 819 820 // if in Hyp mode then stay in Hyp mode 821 toHyp = scr.ns && (cpsr.mode == MODE_HYP); 822 // if HCR.TGE is set to 1, take to Hyp mode through Hyp Trap vector 823 toHyp |= !inSecureState(scr, cpsr) && hcr.tge && (cpsr.mode == MODE_USER); 824 return toHyp; 825} 826 827ExceptionClass 828SupervisorCall::ec(ThreadContext *tc) const 829{ 830 return (overrideEc != EC_INVALID) ? overrideEc : 831 (from64 ? EC_SVC_64 : vals.ec); 832} 833 834uint32_t 835SupervisorCall::iss() const 836{ 837 // Even if we have a 24 bit imm from an arm32 instruction then we only use 838 // the bottom 16 bits for the ISS value (it doesn't hurt for AArch64 SVC). 839 return issRaw & 0xFFFF; 840} 841 842uint32_t 843SecureMonitorCall::iss() const 844{ 845 if (from64) 846 return bits(machInst, 20, 5); 847 return 0; 848} 849 850ExceptionClass 851UndefinedInstruction::ec(ThreadContext *tc) const 852{ 853 // If UndefinedInstruction is routed to hypervisor, 854 // HSR.EC field is 0. 855 if (hypRouted) 856 return EC_UNKNOWN; 857 else 858 return (overrideEc != EC_INVALID) ? overrideEc : vals.ec; 859} 860 861 862HypervisorCall::HypervisorCall(ExtMachInst _machInst, uint32_t _imm) : 863 ArmFaultVals<HypervisorCall>(_machInst, _imm) 864{} 865 866ExceptionClass 867HypervisorCall::ec(ThreadContext *tc) const 868{ 869 return from64 ? EC_HVC_64 : vals.ec; 870} 871 872ExceptionClass 873HypervisorTrap::ec(ThreadContext *tc) const 874{ 875 return (overrideEc != EC_INVALID) ? overrideEc : vals.ec; 876} 877 878template<class T> 879FaultOffset 880ArmFaultVals<T>::offset(ThreadContext *tc) 881{ 882 bool isHypTrap = false; 883 884 // Normally we just use the exception vector from the table at the top if 885 // this file, however if this exception has caused a transition to hype 886 // mode, and its an exception type that would only do this if it has been 887 // trapped then we use the hyp trap vector instead of the normal vector 888 if (vals.hypTrappable) { 889 CPSR cpsr = tc->readMiscReg(MISCREG_CPSR); 890 if (cpsr.mode == MODE_HYP) { 891 CPSR spsr = tc->readMiscReg(MISCREG_SPSR_HYP); 892 isHypTrap = spsr.mode != MODE_HYP; 893 } 894 } 895 return isHypTrap ? 0x14 : vals.offset; 896} 897 898template<class T> 899FaultOffset 900ArmFaultVals<T>::offset64(ThreadContext *tc) 901{ 902 if (toEL == fromEL) { 903 if (opModeIsT(fromMode)) 904 return vals.currELTOffset; 905 return vals.currELHOffset; 906 } else { 907 bool lower_32 = false; 908 if (toEL == EL3) { 909 if (!inSecureState(tc) && ArmSystem::haveEL(tc, EL2)) 910 lower_32 = ELIs32(tc, EL2); 911 else 912 lower_32 = ELIs32(tc, EL1); 913 } else { 914 lower_32 = ELIs32(tc, static_cast<ExceptionLevel>(toEL - 1)); 915 } 916 917 if (lower_32) 918 return vals.lowerEL32Offset; 919 return vals.lowerEL64Offset; 920 } 921} 922 923// void 924// SupervisorCall::setSyndrome64(ThreadContext *tc, MiscRegIndex esr_idx) 925// { 926// ESR esr = 0; 927// esr.ec = machInst.aarch64 ? SvcAArch64 : SvcAArch32; 928// esr.il = !machInst.thumb; 929// if (machInst.aarch64) 930// esr.imm16 = bits(machInst.instBits, 20, 5); 931// else if (machInst.thumb) 932// esr.imm16 = bits(machInst.instBits, 7, 0); 933// else 934// esr.imm16 = bits(machInst.instBits, 15, 0); 935// tc->setMiscReg(esr_idx, esr); 936// } 937 938void 939SecureMonitorCall::invoke(ThreadContext *tc, const StaticInstPtr &inst) 940{ 941 if (FullSystem) { 942 ArmFault::invoke(tc, inst); 943 return; 944 } 945} 946 947ExceptionClass 948SecureMonitorCall::ec(ThreadContext *tc) const 949{ 950 return (from64 ? EC_SMC_64 : vals.ec); 951} 952 953bool 954SupervisorTrap::routeToHyp(ThreadContext *tc) const 955{ 956 bool toHyp = false; 957 958 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3); 959 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR_EL2); 960 CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR); 961 962 // if HCR.TGE is set to 1, take to Hyp mode through Hyp Trap vector 963 toHyp |= !inSecureState(scr, cpsr) && hcr.tge && (cpsr.el == EL0); 964 return toHyp; 965} 966 967uint32_t 968SupervisorTrap::iss() const 969{ 970 // If SupervisorTrap is routed to hypervisor, iss field is 0. 971 if (hypRouted) { 972 return 0; 973 } 974 return issRaw; 975} 976 977ExceptionClass 978SupervisorTrap::ec(ThreadContext *tc) const 979{ 980 if (hypRouted) 981 return EC_UNKNOWN; 982 else 983 return (overrideEc != EC_INVALID) ? overrideEc : vals.ec; 984} 985 986ExceptionClass 987SecureMonitorTrap::ec(ThreadContext *tc) const 988{ 989 return (overrideEc != EC_INVALID) ? overrideEc : 990 (from64 ? EC_SMC_64 : vals.ec); 991} 992 993template<class T> 994void 995AbortFault<T>::invoke(ThreadContext *tc, const StaticInstPtr &inst) 996{ 997 if (tranMethod == ArmFault::UnknownTran) { 998 tranMethod = longDescFormatInUse(tc) ? ArmFault::LpaeTran 999 : ArmFault::VmsaTran; 1000 1001 if ((tranMethod == ArmFault::VmsaTran) && this->routeToMonitor(tc)) { 1002 // See ARM ARM B3-1416 1003 bool override_LPAE = false; 1004 TTBCR ttbcr_s = tc->readMiscReg(MISCREG_TTBCR_S); 1005 TTBCR M5_VAR_USED ttbcr_ns = tc->readMiscReg(MISCREG_TTBCR_NS); 1006 if (ttbcr_s.eae) { 1007 override_LPAE = true; 1008 } else { 1009 // Unimplemented code option, not seen in testing. May need 1010 // extension according to the manual exceprt above. 1011 DPRINTF(Faults, "Warning: Incomplete translation method " 1012 "override detected.\n"); 1013 } 1014 if (override_LPAE) 1015 tranMethod = ArmFault::LpaeTran; 1016 } 1017 } 1018 1019 if (source == ArmFault::AsynchronousExternalAbort) { 1020 tc->getCpuPtr()->clearInterrupt(tc->threadId(), INT_ABT, 0); 1021 } 1022 // Get effective fault source encoding 1023 CPSR cpsr = tc->readMiscReg(MISCREG_CPSR); 1024 FSR fsr = getFsr(tc); 1025 1026 // source must be determined BEFORE invoking generic routines which will 1027 // try to set hsr etc. and are based upon source! 1028 ArmFaultVals<T>::invoke(tc, inst); 1029 1030 if (!this->to64) { // AArch32 1031 if (cpsr.mode == MODE_HYP) { 1032 tc->setMiscReg(T::HFarIndex, faultAddr); 1033 } else if (stage2) { 1034 tc->setMiscReg(MISCREG_HPFAR, (faultAddr >> 8) & ~0xf); 1035 tc->setMiscReg(T::HFarIndex, OVAddr); 1036 } else { 1037 tc->setMiscReg(T::FsrIndex, fsr); 1038 tc->setMiscReg(T::FarIndex, faultAddr); 1039 } 1040 DPRINTF(Faults, "Abort Fault source=%#x fsr=%#x faultAddr=%#x "\ 1041 "tranMethod=%#x\n", source, fsr, faultAddr, tranMethod); 1042 } else { // AArch64 1043 // Set the FAR register. Nothing else to do if we are in AArch64 state 1044 // because the syndrome register has already been set inside invoke64() 1045 if (stage2) { 1046 // stage 2 fault, set HPFAR_EL2 to the faulting IPA 1047 // and FAR_EL2 to the Original VA 1048 tc->setMiscReg(AbortFault<T>::getFaultAddrReg64(), OVAddr); 1049 tc->setMiscReg(MISCREG_HPFAR_EL2, bits(faultAddr, 47, 12) << 4); 1050 1051 DPRINTF(Faults, "Abort Fault (Stage 2) VA: 0x%x IPA: 0x%x\n", 1052 OVAddr, faultAddr); 1053 } else { 1054 tc->setMiscReg(AbortFault<T>::getFaultAddrReg64(), faultAddr); 1055 } 1056 } 1057} 1058 1059template<class T> 1060FSR 1061AbortFault<T>::getFsr(ThreadContext *tc) 1062{ 1063 FSR fsr = 0; 1064 1065 if (((CPSR) tc->readMiscRegNoEffect(MISCREG_CPSR)).width) { 1066 // AArch32 1067 assert(tranMethod != ArmFault::UnknownTran); 1068 if (tranMethod == ArmFault::LpaeTran) { 1069 srcEncoded = ArmFault::longDescFaultSources[source]; 1070 fsr.status = srcEncoded; 1071 fsr.lpae = 1; 1072 } else { 1073 srcEncoded = ArmFault::shortDescFaultSources[source]; 1074 fsr.fsLow = bits(srcEncoded, 3, 0); 1075 fsr.fsHigh = bits(srcEncoded, 4); 1076 fsr.domain = static_cast<uint8_t>(domain); 1077 } 1078 fsr.wnr = (write ? 1 : 0); 1079 fsr.ext = 0; 1080 } else { 1081 // AArch64 1082 srcEncoded = ArmFault::aarch64FaultSources[source]; 1083 } 1084 if (srcEncoded == ArmFault::FaultSourceInvalid) { 1085 panic("Invalid fault source\n"); 1086 } 1087 return fsr; 1088} 1089 1090template<class T> 1091bool 1092AbortFault<T>::abortDisable(ThreadContext *tc) 1093{ 1094 if (ArmSystem::haveSecurity(tc)) { 1095 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1096 return (!scr.ns || scr.aw); 1097 } 1098 return true; 1099} 1100 1101template<class T> 1102void 1103AbortFault<T>::annotate(ArmFault::AnnotationIDs id, uint64_t val) 1104{ 1105 switch (id) 1106 { 1107 case ArmFault::S1PTW: 1108 s1ptw = val; 1109 break; 1110 case ArmFault::OVA: 1111 OVAddr = val; 1112 break; 1113 1114 // Just ignore unknown ID's 1115 default: 1116 break; 1117 } 1118} 1119 1120template<class T> 1121uint32_t 1122AbortFault<T>::iss() const 1123{ 1124 uint32_t val; 1125 1126 val = srcEncoded & 0x3F; 1127 val |= write << 6; 1128 val |= s1ptw << 7; 1129 return (val); 1130} 1131 1132template<class T> 1133bool 1134AbortFault<T>::isMMUFault() const 1135{ 1136 // NOTE: Not relying on LL information being aligned to lowest bits here 1137 return 1138 (source == ArmFault::AlignmentFault) || 1139 ((source >= ArmFault::TranslationLL) && 1140 (source < ArmFault::TranslationLL + 4)) || 1141 ((source >= ArmFault::AccessFlagLL) && 1142 (source < ArmFault::AccessFlagLL + 4)) || 1143 ((source >= ArmFault::DomainLL) && 1144 (source < ArmFault::DomainLL + 4)) || 1145 ((source >= ArmFault::PermissionLL) && 1146 (source < ArmFault::PermissionLL + 4)); 1147} 1148 1149ExceptionClass 1150PrefetchAbort::ec(ThreadContext *tc) const 1151{ 1152 if (to64) { 1153 // AArch64 1154 if (toEL == fromEL) 1155 return EC_PREFETCH_ABORT_CURR_EL; 1156 else 1157 return EC_PREFETCH_ABORT_LOWER_EL; 1158 } else { 1159 // AArch32 1160 // Abort faults have different EC codes depending on whether 1161 // the fault originated within HYP mode, or not. So override 1162 // the method and add the extra adjustment of the EC value. 1163 1164 ExceptionClass ec = ArmFaultVals<PrefetchAbort>::vals.ec; 1165 1166 CPSR spsr = tc->readMiscReg(MISCREG_SPSR_HYP); 1167 if (spsr.mode == MODE_HYP) { 1168 ec = ((ExceptionClass) (((uint32_t) ec) + 1)); 1169 } 1170 return ec; 1171 } 1172} 1173 1174bool 1175PrefetchAbort::routeToMonitor(ThreadContext *tc) const 1176{ 1177 SCR scr = 0; 1178 if (from64) 1179 scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3); 1180 else 1181 scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1182 1183 return scr.ea && !isMMUFault(); 1184} 1185 1186bool 1187PrefetchAbort::routeToHyp(ThreadContext *tc) const 1188{ 1189 bool toHyp; 1190 1191 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1192 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR); 1193 CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR); 1194 HDCR hdcr = tc->readMiscRegNoEffect(MISCREG_HDCR); 1195 1196 // if in Hyp mode then stay in Hyp mode 1197 toHyp = scr.ns && (cpsr.mode == MODE_HYP); 1198 // otherwise, check whether to take to Hyp mode through Hyp Trap vector 1199 toHyp |= (stage2 || 1200 ( (source == DebugEvent) && hdcr.tde && (cpsr.mode != MODE_HYP)) || 1201 ( (source == SynchronousExternalAbort) && hcr.tge && (cpsr.mode == MODE_USER)) 1202 ) && !inSecureState(tc); 1203 return toHyp; 1204} 1205 1206ExceptionClass 1207DataAbort::ec(ThreadContext *tc) const 1208{ 1209 if (to64) { 1210 // AArch64 1211 if (source == ArmFault::AsynchronousExternalAbort) { 1212 panic("Asynchronous External Abort should be handled with " 1213 "SystemErrors (SErrors)!"); 1214 } 1215 if (toEL == fromEL) 1216 return EC_DATA_ABORT_CURR_EL; 1217 else 1218 return EC_DATA_ABORT_LOWER_EL; 1219 } else { 1220 // AArch32 1221 // Abort faults have different EC codes depending on whether 1222 // the fault originated within HYP mode, or not. So override 1223 // the method and add the extra adjustment of the EC value. 1224 1225 ExceptionClass ec = ArmFaultVals<DataAbort>::vals.ec; 1226 1227 CPSR spsr = tc->readMiscReg(MISCREG_SPSR_HYP); 1228 if (spsr.mode == MODE_HYP) { 1229 ec = ((ExceptionClass) (((uint32_t) ec) + 1)); 1230 } 1231 return ec; 1232 } 1233} 1234 1235bool 1236DataAbort::routeToMonitor(ThreadContext *tc) const 1237{ 1238 SCR scr = 0; 1239 if (from64) 1240 scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3); 1241 else 1242 scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1243 1244 return scr.ea && !isMMUFault(); 1245} 1246 1247bool 1248DataAbort::routeToHyp(ThreadContext *tc) const 1249{ 1250 bool toHyp; 1251 1252 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1253 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR); 1254 CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR); 1255 HDCR hdcr = tc->readMiscRegNoEffect(MISCREG_HDCR); 1256 1257 // if in Hyp mode then stay in Hyp mode 1258 toHyp = scr.ns && (cpsr.mode == MODE_HYP); 1259 // otherwise, check whether to take to Hyp mode through Hyp Trap vector 1260 toHyp |= (stage2 || 1261 ( (cpsr.mode != MODE_HYP) && ( ((source == AsynchronousExternalAbort) && hcr.amo) || 1262 ((source == DebugEvent) && hdcr.tde) ) 1263 ) || 1264 ( (cpsr.mode == MODE_USER) && hcr.tge && 1265 ((source == AlignmentFault) || 1266 (source == SynchronousExternalAbort)) 1267 ) 1268 ) && !inSecureState(tc); 1269 return toHyp; 1270} 1271 1272uint32_t 1273DataAbort::iss() const 1274{ 1275 uint32_t val; 1276 1277 // Add on the data abort specific fields to the generic abort ISS value 1278 val = AbortFault<DataAbort>::iss(); 1279 // ISS is valid if not caused by a stage 1 page table walk, and when taken 1280 // to AArch64 only when directed to EL2 1281 if (!s1ptw && (!to64 || toEL == EL2)) { 1282 val |= isv << 24; 1283 if (isv) { 1284 val |= sas << 22; 1285 val |= sse << 21; 1286 val |= srt << 16; 1287 // AArch64 only. These assignments are safe on AArch32 as well 1288 // because these vars are initialized to false 1289 val |= sf << 15; 1290 val |= ar << 14; 1291 } 1292 } 1293 return (val); 1294} 1295 1296void 1297DataAbort::annotate(AnnotationIDs id, uint64_t val) 1298{ 1299 AbortFault<DataAbort>::annotate(id, val); 1300 switch (id) 1301 { 1302 case SAS: 1303 isv = true; 1304 sas = val; 1305 break; 1306 case SSE: 1307 isv = true; 1308 sse = val; 1309 break; 1310 case SRT: 1311 isv = true; 1312 srt = val; 1313 break; 1314 case SF: 1315 isv = true; 1316 sf = val; 1317 break; 1318 case AR: 1319 isv = true; 1320 ar = val; 1321 break; 1322 // Just ignore unknown ID's 1323 default: 1324 break; 1325 } 1326} 1327 1328void 1329VirtualDataAbort::invoke(ThreadContext *tc, const StaticInstPtr &inst) 1330{ 1331 AbortFault<VirtualDataAbort>::invoke(tc, inst); 1332 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR); 1333 hcr.va = 0; 1334 tc->setMiscRegNoEffect(MISCREG_HCR, hcr); 1335} 1336 1337bool 1338Interrupt::routeToMonitor(ThreadContext *tc) const 1339{ 1340 assert(ArmSystem::haveSecurity(tc)); 1341 SCR scr = 0; 1342 if (from64) 1343 scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3); 1344 else 1345 scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1346 return scr.irq; 1347} 1348 1349bool 1350Interrupt::routeToHyp(ThreadContext *tc) const 1351{ 1352 bool toHyp; 1353 1354 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1355 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR); 1356 CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR); 1357 // Determine whether IRQs are routed to Hyp mode. 1358 toHyp = (!scr.irq && hcr.imo && !inSecureState(tc)) || 1359 (cpsr.mode == MODE_HYP); 1360 return toHyp; 1361} 1362 1363bool 1364Interrupt::abortDisable(ThreadContext *tc) 1365{ 1366 if (ArmSystem::haveSecurity(tc)) { 1367 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1368 return (!scr.ns || scr.aw); 1369 } 1370 return true; 1371} 1372 1373VirtualInterrupt::VirtualInterrupt() 1374{} 1375 1376bool 1377FastInterrupt::routeToMonitor(ThreadContext *tc) const 1378{ 1379 assert(ArmSystem::haveSecurity(tc)); 1380 SCR scr = 0; 1381 if (from64) 1382 scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3); 1383 else 1384 scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1385 return scr.fiq; 1386} 1387 1388bool 1389FastInterrupt::routeToHyp(ThreadContext *tc) const 1390{ 1391 bool toHyp; 1392 1393 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1394 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR); 1395 CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR); 1396 // Determine whether IRQs are routed to Hyp mode. 1397 toHyp = (!scr.fiq && hcr.fmo && !inSecureState(tc)) || 1398 (cpsr.mode == MODE_HYP); 1399 return toHyp; 1400} 1401 1402bool 1403FastInterrupt::abortDisable(ThreadContext *tc) 1404{ 1405 if (ArmSystem::haveSecurity(tc)) { 1406 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1407 return (!scr.ns || scr.aw); 1408 } 1409 return true; 1410} 1411 1412bool 1413FastInterrupt::fiqDisable(ThreadContext *tc) 1414{ 1415 if (ArmSystem::haveVirtualization(tc)) { 1416 return true; 1417 } else if (ArmSystem::haveSecurity(tc)) { 1418 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR); 1419 return (!scr.ns || scr.fw); 1420 } 1421 return true; 1422} 1423 1424VirtualFastInterrupt::VirtualFastInterrupt() 1425{} 1426 1427void 1428PCAlignmentFault::invoke(ThreadContext *tc, const StaticInstPtr &inst) 1429{ 1430 ArmFaultVals<PCAlignmentFault>::invoke(tc, inst); 1431 assert(from64); 1432 // Set the FAR 1433 tc->setMiscReg(getFaultAddrReg64(), faultPC); 1434} 1435 1436SPAlignmentFault::SPAlignmentFault() 1437{} 1438 1439SystemError::SystemError() 1440{} 1441 1442void 1443SystemError::invoke(ThreadContext *tc, const StaticInstPtr &inst) 1444{ 1445 tc->getCpuPtr()->clearInterrupt(tc->threadId(), INT_ABT, 0); 1446 ArmFault::invoke(tc, inst); 1447} 1448 1449bool 1450SystemError::routeToMonitor(ThreadContext *tc) const 1451{ 1452 assert(ArmSystem::haveSecurity(tc)); 1453 assert(from64); 1454 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3); 1455 return scr.ea; 1456} 1457 1458bool 1459SystemError::routeToHyp(ThreadContext *tc) const 1460{ 1461 bool toHyp; 1462 assert(from64); 1463 1464 SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3); 1465 HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR); 1466 1467 toHyp = (!scr.ea && hcr.amo && !inSecureState(tc)) || 1468 (!scr.ea && !scr.rw && !hcr.amo && !inSecureState(tc)); 1469 return toHyp; 1470} 1471 1472 1473SoftwareBreakpoint::SoftwareBreakpoint(ExtMachInst _mach_inst, uint32_t _iss) 1474 : ArmFaultVals<SoftwareBreakpoint>(_mach_inst, _iss) 1475{} 1476 1477bool 1478SoftwareBreakpoint::routeToHyp(ThreadContext *tc) const 1479{ 1480 assert(from64); 1481 1482 const bool have_el2 = ArmSystem::haveVirtualization(tc); 1483 1484 const HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR_EL2); 1485 const HDCR mdcr = tc->readMiscRegNoEffect(MISCREG_MDCR_EL2); 1486 1487 return have_el2 && !inSecureState(tc) && fromEL <= EL1 && 1488 (hcr.tge || mdcr.tde); 1489} 1490 1491void 1492ArmSev::invoke(ThreadContext *tc, const StaticInstPtr &inst) { 1493 DPRINTF(Faults, "Invoking ArmSev Fault\n"); 1494 if (!FullSystem) 1495 return; 1496 1497 // Set sev_mailbox to 1, clear the pending interrupt from remote 1498 // SEV execution and let pipeline continue as pcState is still 1499 // valid. 1500 tc->setMiscReg(MISCREG_SEV_MAILBOX, 1); 1501 tc->getCpuPtr()->clearInterrupt(tc->threadId(), INT_SEV, 0); 1502} 1503 1504// Instantiate all the templates to make the linker happy 1505template class ArmFaultVals<Reset>; 1506template class ArmFaultVals<UndefinedInstruction>; 1507template class ArmFaultVals<SupervisorCall>; 1508template class ArmFaultVals<SecureMonitorCall>; 1509template class ArmFaultVals<HypervisorCall>; 1510template class ArmFaultVals<PrefetchAbort>; 1511template class ArmFaultVals<DataAbort>; 1512template class ArmFaultVals<VirtualDataAbort>; 1513template class ArmFaultVals<HypervisorTrap>; 1514template class ArmFaultVals<Interrupt>; 1515template class ArmFaultVals<VirtualInterrupt>; 1516template class ArmFaultVals<FastInterrupt>; 1517template class ArmFaultVals<VirtualFastInterrupt>; 1518template class ArmFaultVals<SupervisorTrap>; 1519template class ArmFaultVals<SecureMonitorTrap>; 1520template class ArmFaultVals<PCAlignmentFault>; 1521template class ArmFaultVals<SPAlignmentFault>; 1522template class ArmFaultVals<SystemError>; 1523template class ArmFaultVals<SoftwareBreakpoint>; 1524template class ArmFaultVals<ArmSev>; 1525template class AbortFault<PrefetchAbort>; 1526template class AbortFault<DataAbort>; 1527template class AbortFault<VirtualDataAbort>; 1528 1529 1530IllegalInstSetStateFault::IllegalInstSetStateFault() 1531{} 1532 1533 1534} // namespace ArmISA 1535