faults.cc revision 14279
1/*
2 * Copyright (c) 2010, 2012-2014, 2016-2019 ARM Limited
3 * All rights reserved
4 *
5 * The license below extends only to copyright in the software and shall
6 * not be construed as granting a license to any other intellectual
7 * property including but not limited to intellectual property relating
8 * to a hardware implementation of the functionality of the software
9 * licensed hereunder.  You may use the software subject to the license
10 * terms below provided that you ensure that this notice is replicated
11 * unmodified and in its entirety in all distributions of the software,
12 * modified or unmodified, in source code or in binary form.
13 *
14 * Copyright (c) 2003-2005 The Regents of The University of Michigan
15 * Copyright (c) 2007-2008 The Florida State University
16 * All rights reserved.
17 *
18 * Redistribution and use in source and binary forms, with or without
19 * modification, are permitted provided that the following conditions are
20 * met: redistributions of source code must retain the above copyright
21 * notice, this list of conditions and the following disclaimer;
22 * redistributions in binary form must reproduce the above copyright
23 * notice, this list of conditions and the following disclaimer in the
24 * documentation and/or other materials provided with the distribution;
25 * neither the name of the copyright holders nor the names of its
26 * contributors may be used to endorse or promote products derived from
27 * this software without specific prior written permission.
28 *
29 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
30 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
31 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
32 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
33 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
34 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
35 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
36 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
37 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
38 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
39 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
40 *
41 * Authors: Ali Saidi
42 *          Gabe Black
43 *          Giacomo Gabrielli
44 *          Thomas Grocutt
45 */
46
47#include "arch/arm/faults.hh"
48
49#include "arch/arm/insts/static_inst.hh"
50#include "arch/arm/system.hh"
51#include "arch/arm/utility.hh"
52#include "base/compiler.hh"
53#include "base/trace.hh"
54#include "cpu/base.hh"
55#include "cpu/thread_context.hh"
56#include "debug/Faults.hh"
57#include "sim/full_system.hh"
58
59namespace ArmISA
60{
61
62uint8_t ArmFault::shortDescFaultSources[] = {
63    0x01,  // AlignmentFault
64    0x04,  // InstructionCacheMaintenance
65    0xff,  // SynchExtAbtOnTranslTableWalkL0 (INVALID)
66    0x0c,  // SynchExtAbtOnTranslTableWalkL1
67    0x0e,  // SynchExtAbtOnTranslTableWalkL2
68    0xff,  // SynchExtAbtOnTranslTableWalkL3 (INVALID)
69    0xff,  // SynchPtyErrOnTranslTableWalkL0 (INVALID)
70    0x1c,  // SynchPtyErrOnTranslTableWalkL1
71    0x1e,  // SynchPtyErrOnTranslTableWalkL2
72    0xff,  // SynchPtyErrOnTranslTableWalkL3 (INVALID)
73    0xff,  // TranslationL0 (INVALID)
74    0x05,  // TranslationL1
75    0x07,  // TranslationL2
76    0xff,  // TranslationL3 (INVALID)
77    0xff,  // AccessFlagL0 (INVALID)
78    0x03,  // AccessFlagL1
79    0x06,  // AccessFlagL2
80    0xff,  // AccessFlagL3 (INVALID)
81    0xff,  // DomainL0 (INVALID)
82    0x09,  // DomainL1
83    0x0b,  // DomainL2
84    0xff,  // DomainL3 (INVALID)
85    0xff,  // PermissionL0 (INVALID)
86    0x0d,  // PermissionL1
87    0x0f,  // PermissionL2
88    0xff,  // PermissionL3 (INVALID)
89    0x02,  // DebugEvent
90    0x08,  // SynchronousExternalAbort
91    0x10,  // TLBConflictAbort
92    0x19,  // SynchPtyErrOnMemoryAccess
93    0x16,  // AsynchronousExternalAbort
94    0x18,  // AsynchPtyErrOnMemoryAccess
95    0xff,  // AddressSizeL0 (INVALID)
96    0xff,  // AddressSizeL1 (INVALID)
97    0xff,  // AddressSizeL2 (INVALID)
98    0xff,  // AddressSizeL3 (INVALID)
99    0x40,  // PrefetchTLBMiss
100    0x80   // PrefetchUncacheable
101};
102
103static_assert(sizeof(ArmFault::shortDescFaultSources) ==
104              ArmFault::NumFaultSources,
105              "Invalid size of ArmFault::shortDescFaultSources[]");
106
107uint8_t ArmFault::longDescFaultSources[] = {
108    0x21,  // AlignmentFault
109    0xff,  // InstructionCacheMaintenance (INVALID)
110    0xff,  // SynchExtAbtOnTranslTableWalkL0 (INVALID)
111    0x15,  // SynchExtAbtOnTranslTableWalkL1
112    0x16,  // SynchExtAbtOnTranslTableWalkL2
113    0x17,  // SynchExtAbtOnTranslTableWalkL3
114    0xff,  // SynchPtyErrOnTranslTableWalkL0 (INVALID)
115    0x1d,  // SynchPtyErrOnTranslTableWalkL1
116    0x1e,  // SynchPtyErrOnTranslTableWalkL2
117    0x1f,  // SynchPtyErrOnTranslTableWalkL3
118    0xff,  // TranslationL0 (INVALID)
119    0x05,  // TranslationL1
120    0x06,  // TranslationL2
121    0x07,  // TranslationL3
122    0xff,  // AccessFlagL0 (INVALID)
123    0x09,  // AccessFlagL1
124    0x0a,  // AccessFlagL2
125    0x0b,  // AccessFlagL3
126    0xff,  // DomainL0 (INVALID)
127    0x3d,  // DomainL1
128    0x3e,  // DomainL2
129    0xff,  // DomainL3 (RESERVED)
130    0xff,  // PermissionL0 (INVALID)
131    0x0d,  // PermissionL1
132    0x0e,  // PermissionL2
133    0x0f,  // PermissionL3
134    0x22,  // DebugEvent
135    0x10,  // SynchronousExternalAbort
136    0x30,  // TLBConflictAbort
137    0x18,  // SynchPtyErrOnMemoryAccess
138    0x11,  // AsynchronousExternalAbort
139    0x19,  // AsynchPtyErrOnMemoryAccess
140    0xff,  // AddressSizeL0 (INVALID)
141    0xff,  // AddressSizeL1 (INVALID)
142    0xff,  // AddressSizeL2 (INVALID)
143    0xff,  // AddressSizeL3 (INVALID)
144    0x40,  // PrefetchTLBMiss
145    0x80   // PrefetchUncacheable
146};
147
148static_assert(sizeof(ArmFault::longDescFaultSources) ==
149              ArmFault::NumFaultSources,
150              "Invalid size of ArmFault::longDescFaultSources[]");
151
152uint8_t ArmFault::aarch64FaultSources[] = {
153    0x21,  // AlignmentFault
154    0xff,  // InstructionCacheMaintenance (INVALID)
155    0x14,  // SynchExtAbtOnTranslTableWalkL0
156    0x15,  // SynchExtAbtOnTranslTableWalkL1
157    0x16,  // SynchExtAbtOnTranslTableWalkL2
158    0x17,  // SynchExtAbtOnTranslTableWalkL3
159    0x1c,  // SynchPtyErrOnTranslTableWalkL0
160    0x1d,  // SynchPtyErrOnTranslTableWalkL1
161    0x1e,  // SynchPtyErrOnTranslTableWalkL2
162    0x1f,  // SynchPtyErrOnTranslTableWalkL3
163    0x04,  // TranslationL0
164    0x05,  // TranslationL1
165    0x06,  // TranslationL2
166    0x07,  // TranslationL3
167    0x08,  // AccessFlagL0
168    0x09,  // AccessFlagL1
169    0x0a,  // AccessFlagL2
170    0x0b,  // AccessFlagL3
171    // @todo: Section & Page Domain Fault in AArch64?
172    0xff,  // DomainL0 (INVALID)
173    0xff,  // DomainL1 (INVALID)
174    0xff,  // DomainL2 (INVALID)
175    0xff,  // DomainL3 (INVALID)
176    0x0c,  // PermissionL0
177    0x0d,  // PermissionL1
178    0x0e,  // PermissionL2
179    0x0f,  // PermissionL3
180    0x22,  // DebugEvent
181    0x10,  // SynchronousExternalAbort
182    0x30,  // TLBConflictAbort
183    0x18,  // SynchPtyErrOnMemoryAccess
184    0xff,  // AsynchronousExternalAbort (INVALID)
185    0xff,  // AsynchPtyErrOnMemoryAccess (INVALID)
186    0x00,  // AddressSizeL0
187    0x01,  // AddressSizeL1
188    0x02,  // AddressSizeL2
189    0x03,  // AddressSizeL3
190    0x40,  // PrefetchTLBMiss
191    0x80   // PrefetchUncacheable
192};
193
194static_assert(sizeof(ArmFault::aarch64FaultSources) ==
195              ArmFault::NumFaultSources,
196              "Invalid size of ArmFault::aarch64FaultSources[]");
197
198// Fields: name, offset, cur{ELT,ELH}Offset, lowerEL{64,32}Offset, next mode,
199//         {ARM, Thumb, ARM_ELR, Thumb_ELR} PC offset, hyp trap,
200//         {A, F} disable, class, stat
201template<> ArmFault::FaultVals ArmFaultVals<Reset>::vals(
202    // Some dummy values (the reset vector has an IMPLEMENTATION DEFINED
203    // location in AArch64)
204    "Reset",                 0x000, 0x000, 0x000, 0x000, 0x000, MODE_SVC,
205    0, 0, 0, 0, false, true,  true,  EC_UNKNOWN
206);
207template<> ArmFault::FaultVals ArmFaultVals<UndefinedInstruction>::vals(
208    "Undefined Instruction", 0x004, 0x000, 0x200, 0x400, 0x600, MODE_UNDEFINED,
209    4, 2, 0, 0, true,  false, false, EC_UNKNOWN
210);
211template<> ArmFault::FaultVals ArmFaultVals<SupervisorCall>::vals(
212    "Supervisor Call",       0x008, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
213    4, 2, 4, 2, true,  false, false, EC_SVC_TO_HYP
214);
215template<> ArmFault::FaultVals ArmFaultVals<SecureMonitorCall>::vals(
216    "Secure Monitor Call",   0x008, 0x000, 0x200, 0x400, 0x600, MODE_MON,
217    4, 4, 4, 4, false, true,  true,  EC_SMC_TO_HYP
218);
219template<> ArmFault::FaultVals ArmFaultVals<HypervisorCall>::vals(
220    "Hypervisor Call",       0x008, 0x000, 0x200, 0x400, 0x600, MODE_HYP,
221    4, 4, 4, 4, true,  false, false, EC_HVC
222);
223template<> ArmFault::FaultVals ArmFaultVals<PrefetchAbort>::vals(
224    "Prefetch Abort",        0x00C, 0x000, 0x200, 0x400, 0x600, MODE_ABORT,
225    4, 4, 0, 0, true,  true,  false, EC_PREFETCH_ABORT_TO_HYP
226);
227template<> ArmFault::FaultVals ArmFaultVals<DataAbort>::vals(
228    "Data Abort",            0x010, 0x000, 0x200, 0x400, 0x600, MODE_ABORT,
229    8, 8, 0, 0, true,  true,  false, EC_DATA_ABORT_TO_HYP
230);
231template<> ArmFault::FaultVals ArmFaultVals<VirtualDataAbort>::vals(
232    "Virtual Data Abort",    0x010, 0x000, 0x200, 0x400, 0x600, MODE_ABORT,
233    8, 8, 0, 0, true,  true,  false, EC_INVALID
234);
235template<> ArmFault::FaultVals ArmFaultVals<HypervisorTrap>::vals(
236    // @todo: double check these values
237    "Hypervisor Trap",       0x014, 0x000, 0x200, 0x400, 0x600, MODE_HYP,
238    0, 0, 0, 0, false, false, false, EC_UNKNOWN
239);
240template<> ArmFault::FaultVals ArmFaultVals<SecureMonitorTrap>::vals(
241    "Secure Monitor Trap",   0x004, 0x000, 0x200, 0x400, 0x600, MODE_MON,
242    4, 2, 0, 0, false, false, false, EC_UNKNOWN
243);
244template<> ArmFault::FaultVals ArmFaultVals<Interrupt>::vals(
245    "IRQ",                   0x018, 0x080, 0x280, 0x480, 0x680, MODE_IRQ,
246    4, 4, 0, 0, false, true,  false, EC_UNKNOWN
247);
248template<> ArmFault::FaultVals ArmFaultVals<VirtualInterrupt>::vals(
249    "Virtual IRQ",           0x018, 0x080, 0x280, 0x480, 0x680, MODE_IRQ,
250    4, 4, 0, 0, false, true,  false, EC_INVALID
251);
252template<> ArmFault::FaultVals ArmFaultVals<FastInterrupt>::vals(
253    "FIQ",                   0x01C, 0x100, 0x300, 0x500, 0x700, MODE_FIQ,
254    4, 4, 0, 0, false, true,  true,  EC_UNKNOWN
255);
256template<> ArmFault::FaultVals ArmFaultVals<VirtualFastInterrupt>::vals(
257    "Virtual FIQ",           0x01C, 0x100, 0x300, 0x500, 0x700, MODE_FIQ,
258    4, 4, 0, 0, false, true,  true,  EC_INVALID
259);
260template<> ArmFault::FaultVals ArmFaultVals<IllegalInstSetStateFault>::vals(
261    "Illegal Inst Set State Fault",   0x004, 0x000, 0x200, 0x400, 0x600, MODE_UNDEFINED,
262    4, 2, 0, 0, true, false, false, EC_ILLEGAL_INST
263);
264template<> ArmFault::FaultVals ArmFaultVals<SupervisorTrap>::vals(
265    // Some dummy values (SupervisorTrap is AArch64-only)
266    "Supervisor Trap",   0x014, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
267    0, 0, 0, 0, false, false, false, EC_UNKNOWN
268);
269template<> ArmFault::FaultVals ArmFaultVals<PCAlignmentFault>::vals(
270    // Some dummy values (PCAlignmentFault is AArch64-only)
271    "PC Alignment Fault",   0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
272    0, 0, 0, 0, true, false, false, EC_PC_ALIGNMENT
273);
274template<> ArmFault::FaultVals ArmFaultVals<SPAlignmentFault>::vals(
275    // Some dummy values (SPAlignmentFault is AArch64-only)
276    "SP Alignment Fault",   0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
277    0, 0, 0, 0, true, false, false, EC_STACK_PTR_ALIGNMENT
278);
279template<> ArmFault::FaultVals ArmFaultVals<SystemError>::vals(
280    // Some dummy values (SError is AArch64-only)
281    "SError",                0x000, 0x180, 0x380, 0x580, 0x780, MODE_SVC,
282    0, 0, 0, 0, false, true,  true,  EC_SERROR
283);
284template<> ArmFault::FaultVals ArmFaultVals<SoftwareBreakpoint>::vals(
285    // Some dummy values (SoftwareBreakpoint is AArch64-only)
286    "Software Breakpoint",   0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
287    0, 0, 0, 0, true, false, false,  EC_SOFTWARE_BREAKPOINT
288);
289template<> ArmFault::FaultVals ArmFaultVals<ArmSev>::vals(
290    // Some dummy values
291    "ArmSev Flush",          0x000, 0x000, 0x000, 0x000, 0x000, MODE_SVC,
292    0, 0, 0, 0, false, true,  true,  EC_UNKNOWN
293);
294
295Addr
296ArmFault::getVector(ThreadContext *tc)
297{
298    Addr base;
299
300    // Check for invalid modes
301    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
302    assert(ArmSystem::haveSecurity(tc) || cpsr.mode != MODE_MON);
303    assert(ArmSystem::haveVirtualization(tc) || cpsr.mode != MODE_HYP);
304
305    switch (cpsr.mode)
306    {
307      case MODE_MON:
308        base = tc->readMiscReg(MISCREG_MVBAR);
309        break;
310      case MODE_HYP:
311        base = tc->readMiscReg(MISCREG_HVBAR);
312        break;
313      default:
314        SCTLR sctlr = tc->readMiscReg(MISCREG_SCTLR);
315        if (sctlr.v) {
316            base = HighVecs;
317        } else {
318            base = ArmSystem::haveSecurity(tc) ?
319                tc->readMiscReg(MISCREG_VBAR) : 0;
320        }
321        break;
322    }
323
324    return base + offset(tc);
325}
326
327Addr
328ArmFault::getVector64(ThreadContext *tc)
329{
330    Addr vbar;
331    switch (toEL) {
332      case EL3:
333        assert(ArmSystem::haveSecurity(tc));
334        vbar = tc->readMiscReg(MISCREG_VBAR_EL3);
335        break;
336      case EL2:
337        assert(ArmSystem::haveVirtualization(tc));
338        vbar = tc->readMiscReg(MISCREG_VBAR_EL2);
339        break;
340      case EL1:
341        vbar = tc->readMiscReg(MISCREG_VBAR_EL1);
342        break;
343      default:
344        panic("Invalid target exception level");
345        break;
346    }
347    return vbar + offset64(tc);
348}
349
350MiscRegIndex
351ArmFault::getSyndromeReg64() const
352{
353    switch (toEL) {
354      case EL1:
355        return MISCREG_ESR_EL1;
356      case EL2:
357        return MISCREG_ESR_EL2;
358      case EL3:
359        return MISCREG_ESR_EL3;
360      default:
361        panic("Invalid exception level");
362        break;
363    }
364}
365
366MiscRegIndex
367ArmFault::getFaultAddrReg64() const
368{
369    switch (toEL) {
370      case EL1:
371        return MISCREG_FAR_EL1;
372      case EL2:
373        return MISCREG_FAR_EL2;
374      case EL3:
375        return MISCREG_FAR_EL3;
376      default:
377        panic("Invalid exception level");
378        break;
379    }
380}
381
382void
383ArmFault::setSyndrome(ThreadContext *tc, MiscRegIndex syndrome_reg)
384{
385    uint32_t value;
386    uint32_t exc_class = (uint32_t) ec(tc);
387    uint32_t issVal = iss();
388
389    assert(!from64 || ArmSystem::highestELIs64(tc));
390
391    value = exc_class << 26;
392
393    // HSR.IL not valid for Prefetch Aborts (0x20, 0x21) and Data Aborts (0x24,
394    // 0x25) for which the ISS information is not valid (ARMv7).
395    // @todo: ARMv8 revises AArch32 functionality: when HSR.IL is not
396    // valid it is treated as RES1.
397    if (to64) {
398        value |= 1 << 25;
399    } else if ((bits(exc_class, 5, 3) != 4) ||
400               (bits(exc_class, 2) && bits(issVal, 24))) {
401        if (!machInst.thumb || machInst.bigThumb)
402            value |= 1 << 25;
403    }
404    // Condition code valid for EC[5:4] nonzero
405    if (!from64 && ((bits(exc_class, 5, 4) == 0) &&
406                    (bits(exc_class, 3, 0) != 0))) {
407        if (!machInst.thumb) {
408            uint32_t      cond;
409            ConditionCode condCode = (ConditionCode) (uint32_t) machInst.condCode;
410            // If its on unconditional instruction report with a cond code of
411            // 0xE, ie the unconditional code
412            cond  = (condCode == COND_UC) ? COND_AL : condCode;
413            value |= cond << 20;
414            value |= 1    << 24;
415        }
416        value |= bits(issVal, 19, 0);
417    } else {
418        value |= issVal;
419    }
420    tc->setMiscReg(syndrome_reg, value);
421}
422
423void
424ArmFault::update(ThreadContext *tc)
425{
426    CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
427
428    // Determine source exception level and mode
429    fromMode = (OperatingMode) (uint8_t) cpsr.mode;
430    fromEL = opModeToEL(fromMode);
431    if (opModeIs64(fromMode))
432        from64 = true;
433
434    // Determine target exception level (aarch64) or target execution
435    // mode (aarch32).
436    if (ArmSystem::haveSecurity(tc) && routeToMonitor(tc)) {
437        toMode = MODE_MON;
438        toEL = EL3;
439    } else if (ArmSystem::haveVirtualization(tc) && routeToHyp(tc)) {
440        toMode = MODE_HYP;
441        toEL = EL2;
442        hypRouted = true;
443    } else {
444        toMode = nextMode();
445        toEL = opModeToEL(toMode);
446    }
447
448    if (fromEL > toEL)
449        toEL = fromEL;
450
451    // Check for Set Priviledge Access Never, if PAN is supported
452    AA64MMFR1 mmfr1 = tc->readMiscReg(MISCREG_ID_AA64MMFR1_EL1);
453    if (mmfr1.pan) {
454        if (toEL == EL1) {
455            const SCTLR sctlr = tc->readMiscReg(MISCREG_SCTLR_EL1);
456            span = !sctlr.span;
457        }
458
459        const HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR_EL2);
460        if (toEL == EL2 && hcr.e2h && hcr.tge) {
461            const SCTLR sctlr = tc->readMiscReg(MISCREG_SCTLR_EL2);
462            span = !sctlr.span;
463        }
464    }
465
466    to64 = ELIs64(tc, toEL);
467
468    // The fault specific informations have been updated; it is
469    // now possible to use them inside the fault.
470    faultUpdated = true;
471}
472
473void
474ArmFault::invoke(ThreadContext *tc, const StaticInstPtr &inst)
475{
476
477    // Update fault state informations, like the starting mode (aarch32)
478    // or EL (aarch64) and the ending mode or EL.
479    // From the update function we are also evaluating if the fault must
480    // be handled in AArch64 mode (to64).
481    update(tc);
482
483    if (to64) {
484        // Invoke exception handler in AArch64 state
485        invoke64(tc, inst);
486        return;
487    }
488
489    // ARMv7 (ARM ARM issue C B1.9)
490
491    bool have_security       = ArmSystem::haveSecurity(tc);
492
493    FaultBase::invoke(tc);
494    if (!FullSystem)
495        return;
496    countStat()++;
497
498    SCTLR sctlr = tc->readMiscReg(MISCREG_SCTLR);
499    SCR scr = tc->readMiscReg(MISCREG_SCR);
500    CPSR saved_cpsr = tc->readMiscReg(MISCREG_CPSR);
501    saved_cpsr.nz = tc->readCCReg(CCREG_NZ);
502    saved_cpsr.c = tc->readCCReg(CCREG_C);
503    saved_cpsr.v = tc->readCCReg(CCREG_V);
504    saved_cpsr.ge = tc->readCCReg(CCREG_GE);
505
506    Addr curPc M5_VAR_USED = tc->pcState().pc();
507    ITSTATE it = tc->pcState().itstate();
508    saved_cpsr.it2 = it.top6;
509    saved_cpsr.it1 = it.bottom2;
510
511    // if we have a valid instruction then use it to annotate this fault with
512    // extra information. This is used to generate the correct fault syndrome
513    // information
514    ArmStaticInst *arm_inst M5_VAR_USED = instrAnnotate(inst);
515
516    // Ensure Secure state if initially in Monitor mode
517    if (have_security && saved_cpsr.mode == MODE_MON) {
518        SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
519        if (scr.ns) {
520            scr.ns = 0;
521            tc->setMiscRegNoEffect(MISCREG_SCR, scr);
522        }
523    }
524
525    CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
526    cpsr.mode = toMode;
527
528    // some bits are set differently if we have been routed to hyp mode
529    if (cpsr.mode == MODE_HYP) {
530        SCTLR hsctlr = tc->readMiscReg(MISCREG_HSCTLR);
531        cpsr.t = hsctlr.te;
532        cpsr.e = hsctlr.ee;
533        if (!scr.ea)  {cpsr.a = 1;}
534        if (!scr.fiq) {cpsr.f = 1;}
535        if (!scr.irq) {cpsr.i = 1;}
536    } else if (cpsr.mode == MODE_MON) {
537        // Special case handling when entering monitor mode
538        cpsr.t = sctlr.te;
539        cpsr.e = sctlr.ee;
540        cpsr.a = 1;
541        cpsr.f = 1;
542        cpsr.i = 1;
543    } else {
544        cpsr.t = sctlr.te;
545        cpsr.e = sctlr.ee;
546
547        // The *Disable functions are virtual and different per fault
548        cpsr.a = cpsr.a | abortDisable(tc);
549        cpsr.f = cpsr.f | fiqDisable(tc);
550        cpsr.i = 1;
551    }
552    cpsr.it1 = cpsr.it2 = 0;
553    cpsr.j = 0;
554    cpsr.pan = span ? 1 : saved_cpsr.pan;
555    tc->setMiscReg(MISCREG_CPSR, cpsr);
556
557    // Make sure mailbox sets to one always
558    tc->setMiscReg(MISCREG_SEV_MAILBOX, 1);
559
560    // Clear the exclusive monitor
561    tc->setMiscReg(MISCREG_LOCKFLAG, 0);
562
563    if (cpsr.mode == MODE_HYP) {
564        tc->setMiscReg(MISCREG_ELR_HYP, curPc +
565                (saved_cpsr.t ? thumbPcOffset(true)  : armPcOffset(true)));
566    } else {
567        tc->setIntReg(INTREG_LR, curPc +
568                (saved_cpsr.t ? thumbPcOffset(false) : armPcOffset(false)));
569    }
570
571    switch (cpsr.mode) {
572      case MODE_FIQ:
573        tc->setMiscReg(MISCREG_SPSR_FIQ, saved_cpsr);
574        break;
575      case MODE_IRQ:
576        tc->setMiscReg(MISCREG_SPSR_IRQ, saved_cpsr);
577        break;
578      case MODE_SVC:
579        tc->setMiscReg(MISCREG_SPSR_SVC, saved_cpsr);
580        break;
581      case MODE_MON:
582        assert(have_security);
583        tc->setMiscReg(MISCREG_SPSR_MON, saved_cpsr);
584        break;
585      case MODE_ABORT:
586        tc->setMiscReg(MISCREG_SPSR_ABT, saved_cpsr);
587        break;
588      case MODE_UNDEFINED:
589        tc->setMiscReg(MISCREG_SPSR_UND, saved_cpsr);
590        if (ec(tc) != EC_UNKNOWN)
591            setSyndrome(tc, MISCREG_HSR);
592        break;
593      case MODE_HYP:
594        assert(ArmSystem::haveVirtualization(tc));
595        tc->setMiscReg(MISCREG_SPSR_HYP, saved_cpsr);
596        setSyndrome(tc, MISCREG_HSR);
597        break;
598      default:
599        panic("unknown Mode\n");
600    }
601
602    Addr newPc = getVector(tc);
603    DPRINTF(Faults, "Invoking Fault:%s cpsr:%#x PC:%#x lr:%#x newVec: %#x "
604            "%s\n", name(), cpsr, curPc, tc->readIntReg(INTREG_LR),
605            newPc, arm_inst ? csprintf("inst: %#x", arm_inst->encoding()) :
606            std::string());
607    PCState pc(newPc);
608    pc.thumb(cpsr.t);
609    pc.nextThumb(pc.thumb());
610    pc.jazelle(cpsr.j);
611    pc.nextJazelle(pc.jazelle());
612    pc.aarch64(!cpsr.width);
613    pc.nextAArch64(!cpsr.width);
614    pc.illegalExec(false);
615    tc->pcState(pc);
616}
617
618void
619ArmFault::invoke64(ThreadContext *tc, const StaticInstPtr &inst)
620{
621    // Determine actual misc. register indices for ELR_ELx and SPSR_ELx
622    MiscRegIndex elr_idx, spsr_idx;
623    switch (toEL) {
624      case EL1:
625        elr_idx = MISCREG_ELR_EL1;
626        spsr_idx = MISCREG_SPSR_EL1;
627        break;
628      case EL2:
629        assert(ArmSystem::haveVirtualization(tc));
630        elr_idx = MISCREG_ELR_EL2;
631        spsr_idx = MISCREG_SPSR_EL2;
632        break;
633      case EL3:
634        assert(ArmSystem::haveSecurity(tc));
635        elr_idx = MISCREG_ELR_EL3;
636        spsr_idx = MISCREG_SPSR_EL3;
637        break;
638      default:
639        panic("Invalid target exception level");
640        break;
641    }
642
643    // Save process state into SPSR_ELx
644    CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
645    CPSR spsr = cpsr;
646    spsr.nz = tc->readCCReg(CCREG_NZ);
647    spsr.c = tc->readCCReg(CCREG_C);
648    spsr.v = tc->readCCReg(CCREG_V);
649    if (from64) {
650        // Force some bitfields to 0
651        spsr.q = 0;
652        spsr.it1 = 0;
653        spsr.j = 0;
654        spsr.ge = 0;
655        spsr.it2 = 0;
656        spsr.t = 0;
657    } else {
658        spsr.ge = tc->readCCReg(CCREG_GE);
659        ITSTATE it = tc->pcState().itstate();
660        spsr.it2 = it.top6;
661        spsr.it1 = it.bottom2;
662        // Force some bitfields to 0
663        spsr.ss = 0;
664    }
665    tc->setMiscReg(spsr_idx, spsr);
666
667    // Save preferred return address into ELR_ELx
668    Addr curr_pc = tc->pcState().pc();
669    Addr ret_addr = curr_pc;
670    if (from64)
671        ret_addr += armPcElrOffset();
672    else
673        ret_addr += spsr.t ? thumbPcElrOffset() : armPcElrOffset();
674    tc->setMiscReg(elr_idx, ret_addr);
675
676    Addr vec_address = getVector64(tc);
677
678    // Update process state
679    OperatingMode64 mode = 0;
680    mode.spX = 1;
681    mode.el = toEL;
682    mode.width = 0;
683    cpsr.mode = mode;
684    cpsr.daif = 0xf;
685    cpsr.il = 0;
686    cpsr.ss = 0;
687    cpsr.pan = span ? 1 : spsr.pan;
688    tc->setMiscReg(MISCREG_CPSR, cpsr);
689
690    // If we have a valid instruction then use it to annotate this fault with
691    // extra information. This is used to generate the correct fault syndrome
692    // information
693    ArmStaticInst *arm_inst M5_VAR_USED = instrAnnotate(inst);
694
695    // Set PC to start of exception handler
696    Addr new_pc = purifyTaggedAddr(vec_address, tc, toEL);
697    DPRINTF(Faults, "Invoking Fault (AArch64 target EL):%s cpsr:%#x PC:%#x "
698            "elr:%#x newVec: %#x %s\n", name(), cpsr, curr_pc, ret_addr,
699            new_pc, arm_inst ? csprintf("inst: %#x", arm_inst->encoding()) :
700            std::string());
701    PCState pc(new_pc);
702    pc.aarch64(!cpsr.width);
703    pc.nextAArch64(!cpsr.width);
704    pc.illegalExec(false);
705    tc->pcState(pc);
706
707    // Save exception syndrome
708    if ((nextMode() != MODE_IRQ) && (nextMode() != MODE_FIQ))
709        setSyndrome(tc, getSyndromeReg64());
710}
711
712ArmStaticInst *
713ArmFault::instrAnnotate(const StaticInstPtr &inst)
714{
715    if (inst) {
716        auto arm_inst = static_cast<ArmStaticInst *>(inst.get());
717        arm_inst->annotateFault(this);
718        return arm_inst;
719    } else {
720        return nullptr;
721    }
722}
723
724Addr
725Reset::getVector(ThreadContext *tc)
726{
727    Addr base;
728
729    // Check for invalid modes
730    CPSR M5_VAR_USED cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
731    assert(ArmSystem::haveSecurity(tc) || cpsr.mode != MODE_MON);
732    assert(ArmSystem::haveVirtualization(tc) || cpsr.mode != MODE_HYP);
733
734    // RVBAR is aliased (implemented as) MVBAR in gem5, since the two
735    // are mutually exclusive; there is no need to check here for
736    // which register to use since they hold the same value
737    base = tc->readMiscReg(MISCREG_MVBAR);
738
739    return base + offset(tc);
740}
741
742void
743Reset::invoke(ThreadContext *tc, const StaticInstPtr &inst)
744{
745    if (FullSystem) {
746        tc->getCpuPtr()->clearInterrupts(tc->threadId());
747        tc->clearArchRegs();
748    }
749    if (!ArmSystem::highestELIs64(tc)) {
750        ArmFault::invoke(tc, inst);
751        tc->setMiscReg(MISCREG_VMPIDR,
752                       getMPIDR(dynamic_cast<ArmSystem*>(tc->getSystemPtr()), tc));
753
754        // Unless we have SMC code to get us there, boot in HYP!
755        if (ArmSystem::haveVirtualization(tc) &&
756            !ArmSystem::haveSecurity(tc)) {
757            CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
758            cpsr.mode = MODE_HYP;
759            tc->setMiscReg(MISCREG_CPSR, cpsr);
760        }
761    } else {
762        // Advance the PC to the IMPLEMENTATION DEFINED reset value
763        PCState pc = ArmSystem::resetAddr(tc);
764        pc.aarch64(true);
765        pc.nextAArch64(true);
766        tc->pcState(pc);
767    }
768}
769
770void
771UndefinedInstruction::invoke(ThreadContext *tc, const StaticInstPtr &inst)
772{
773    if (FullSystem) {
774        ArmFault::invoke(tc, inst);
775        return;
776    }
777
778    // If the mnemonic isn't defined this has to be an unknown instruction.
779    assert(unknown || mnemonic != NULL);
780    auto arm_inst = static_cast<ArmStaticInst *>(inst.get());
781    if (disabled) {
782        panic("Attempted to execute disabled instruction "
783                "'%s' (inst 0x%08x)", mnemonic, arm_inst->encoding());
784    } else if (unknown) {
785        panic("Attempted to execute unknown instruction (inst 0x%08x)",
786              arm_inst->encoding());
787    } else {
788        panic("Attempted to execute unimplemented instruction "
789                "'%s' (inst 0x%08x)", mnemonic, arm_inst->encoding());
790    }
791}
792
793bool
794UndefinedInstruction::routeToHyp(ThreadContext *tc) const
795{
796    bool toHyp;
797
798    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
799    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
800    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
801
802    // if in Hyp mode then stay in Hyp mode
803    toHyp  = scr.ns && (cpsr.mode == MODE_HYP);
804    // if HCR.TGE is set to 1, take to Hyp mode through Hyp Trap vector
805    toHyp |= !inSecureState(scr, cpsr) && hcr.tge && (cpsr.mode == MODE_USER);
806    return toHyp;
807}
808
809uint32_t
810UndefinedInstruction::iss() const
811{
812
813    // If UndefinedInstruction is routed to hypervisor, iss field is 0.
814    if (hypRouted) {
815        return 0;
816    }
817
818    if (overrideEc == EC_INVALID)
819        return issRaw;
820
821    uint32_t new_iss = 0;
822    uint32_t op0, op1, op2, CRn, CRm, Rt, dir;
823
824    dir = bits(machInst, 21, 21);
825    op0 = bits(machInst, 20, 19);
826    op1 = bits(machInst, 18, 16);
827    CRn = bits(machInst, 15, 12);
828    CRm = bits(machInst, 11, 8);
829    op2 = bits(machInst, 7, 5);
830    Rt = bits(machInst, 4, 0);
831
832    new_iss = op0 << 20 | op2 << 17 | op1 << 14 | CRn << 10 |
833            Rt << 5 | CRm << 1 | dir;
834
835    return new_iss;
836}
837
838void
839SupervisorCall::invoke(ThreadContext *tc, const StaticInstPtr &inst)
840{
841    if (FullSystem) {
842        ArmFault::invoke(tc, inst);
843        return;
844    }
845
846    // As of now, there isn't a 32 bit thumb version of this instruction.
847    assert(!machInst.bigThumb);
848    uint32_t callNum;
849    CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
850    OperatingMode mode = (OperatingMode)(uint8_t)cpsr.mode;
851    if (opModeIs64(mode))
852        callNum = tc->readIntReg(INTREG_X8);
853    else
854        callNum = tc->readIntReg(INTREG_R7);
855    Fault fault;
856    tc->syscall(callNum, &fault);
857
858    // Advance the PC since that won't happen automatically.
859    PCState pc = tc->pcState();
860    assert(inst);
861    inst->advancePC(pc);
862    tc->pcState(pc);
863}
864
865bool
866SupervisorCall::routeToHyp(ThreadContext *tc) const
867{
868    bool toHyp;
869
870    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
871    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
872    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
873
874    // if in Hyp mode then stay in Hyp mode
875    toHyp  = scr.ns && (cpsr.mode == MODE_HYP);
876    // if HCR.TGE is set to 1, take to Hyp mode through Hyp Trap vector
877    toHyp |= !inSecureState(scr, cpsr) && hcr.tge && (cpsr.mode == MODE_USER);
878    return toHyp;
879}
880
881ExceptionClass
882SupervisorCall::ec(ThreadContext *tc) const
883{
884    return (overrideEc != EC_INVALID) ? overrideEc :
885        (from64 ? EC_SVC_64 : vals.ec);
886}
887
888uint32_t
889SupervisorCall::iss() const
890{
891    // Even if we have a 24 bit imm from an arm32 instruction then we only use
892    // the bottom 16 bits for the ISS value (it doesn't hurt for AArch64 SVC).
893    return issRaw & 0xFFFF;
894}
895
896uint32_t
897SecureMonitorCall::iss() const
898{
899    if (from64)
900        return bits(machInst, 20, 5);
901    return 0;
902}
903
904ExceptionClass
905UndefinedInstruction::ec(ThreadContext *tc) const
906{
907    // If UndefinedInstruction is routed to hypervisor,
908    // HSR.EC field is 0.
909    if (hypRouted)
910        return EC_UNKNOWN;
911    else
912        return (overrideEc != EC_INVALID) ? overrideEc : vals.ec;
913}
914
915
916HypervisorCall::HypervisorCall(ExtMachInst _machInst, uint32_t _imm) :
917        ArmFaultVals<HypervisorCall>(_machInst, _imm)
918{}
919
920ExceptionClass
921HypervisorCall::ec(ThreadContext *tc) const
922{
923    return from64 ? EC_HVC_64 : vals.ec;
924}
925
926ExceptionClass
927HypervisorTrap::ec(ThreadContext *tc) const
928{
929    return (overrideEc != EC_INVALID) ? overrideEc : vals.ec;
930}
931
932template<class T>
933FaultOffset
934ArmFaultVals<T>::offset(ThreadContext *tc)
935{
936    bool isHypTrap = false;
937
938    // Normally we just use the exception vector from the table at the top if
939    // this file, however if this exception has caused a transition to hype
940    // mode, and its an exception type that would only do this if it has been
941    // trapped then we use the hyp trap vector instead of the normal vector
942    if (vals.hypTrappable) {
943        CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
944        if (cpsr.mode == MODE_HYP) {
945            CPSR spsr = tc->readMiscReg(MISCREG_SPSR_HYP);
946            isHypTrap = spsr.mode != MODE_HYP;
947        }
948    }
949    return isHypTrap ? 0x14 : vals.offset;
950}
951
952template<class T>
953FaultOffset
954ArmFaultVals<T>::offset64(ThreadContext *tc)
955{
956    if (toEL == fromEL) {
957        if (opModeIsT(fromMode))
958            return vals.currELTOffset;
959        return vals.currELHOffset;
960    } else {
961        bool lower_32 = false;
962        if (toEL == EL3) {
963            if (!inSecureState(tc) && ArmSystem::haveEL(tc, EL2))
964                lower_32 = ELIs32(tc, EL2);
965            else
966                lower_32 = ELIs32(tc, EL1);
967        } else {
968            lower_32 = ELIs32(tc, static_cast<ExceptionLevel>(toEL - 1));
969        }
970
971        if (lower_32)
972            return vals.lowerEL32Offset;
973        return vals.lowerEL64Offset;
974    }
975}
976
977// void
978// SupervisorCall::setSyndrome64(ThreadContext *tc, MiscRegIndex esr_idx)
979// {
980//     ESR esr = 0;
981//     esr.ec = machInst.aarch64 ? SvcAArch64 : SvcAArch32;
982//     esr.il = !machInst.thumb;
983//     if (machInst.aarch64)
984//         esr.imm16 = bits(machInst.instBits, 20, 5);
985//     else if (machInst.thumb)
986//         esr.imm16 = bits(machInst.instBits, 7, 0);
987//     else
988//         esr.imm16 = bits(machInst.instBits, 15, 0);
989//     tc->setMiscReg(esr_idx, esr);
990// }
991
992void
993SecureMonitorCall::invoke(ThreadContext *tc, const StaticInstPtr &inst)
994{
995    if (FullSystem) {
996        ArmFault::invoke(tc, inst);
997        return;
998    }
999}
1000
1001ExceptionClass
1002SecureMonitorCall::ec(ThreadContext *tc) const
1003{
1004    return (from64 ? EC_SMC_64 : vals.ec);
1005}
1006
1007bool
1008SupervisorTrap::routeToHyp(ThreadContext *tc) const
1009{
1010    bool toHyp = false;
1011
1012    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1013    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR_EL2);
1014    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
1015
1016    // if HCR.TGE is set to 1, take to Hyp mode through Hyp Trap vector
1017    toHyp |= !inSecureState(scr, cpsr) && hcr.tge && (currEL(tc) == EL0);
1018    return toHyp;
1019}
1020
1021uint32_t
1022SupervisorTrap::iss() const
1023{
1024    // If SupervisorTrap is routed to hypervisor, iss field is 0.
1025    if (hypRouted) {
1026        return 0;
1027    }
1028    return issRaw;
1029}
1030
1031ExceptionClass
1032SupervisorTrap::ec(ThreadContext *tc) const
1033{
1034    if (hypRouted)
1035        return EC_UNKNOWN;
1036    else
1037        return (overrideEc != EC_INVALID) ? overrideEc : vals.ec;
1038}
1039
1040ExceptionClass
1041SecureMonitorTrap::ec(ThreadContext *tc) const
1042{
1043    return (overrideEc != EC_INVALID) ? overrideEc :
1044        (from64 ? EC_SMC_64 : vals.ec);
1045}
1046
1047template<class T>
1048void
1049AbortFault<T>::invoke(ThreadContext *tc, const StaticInstPtr &inst)
1050{
1051    if (tranMethod == ArmFault::UnknownTran) {
1052        tranMethod = longDescFormatInUse(tc) ? ArmFault::LpaeTran
1053                                             : ArmFault::VmsaTran;
1054
1055        if ((tranMethod == ArmFault::VmsaTran) && this->routeToMonitor(tc)) {
1056            // See ARM ARM B3-1416
1057            bool override_LPAE = false;
1058            TTBCR ttbcr_s = tc->readMiscReg(MISCREG_TTBCR_S);
1059            TTBCR M5_VAR_USED ttbcr_ns = tc->readMiscReg(MISCREG_TTBCR_NS);
1060            if (ttbcr_s.eae) {
1061                override_LPAE = true;
1062            } else {
1063                // Unimplemented code option, not seen in testing.  May need
1064                // extension according to the manual exceprt above.
1065                DPRINTF(Faults, "Warning: Incomplete translation method "
1066                        "override detected.\n");
1067            }
1068            if (override_LPAE)
1069                tranMethod = ArmFault::LpaeTran;
1070        }
1071    }
1072
1073    if (source == ArmFault::AsynchronousExternalAbort) {
1074        tc->getCpuPtr()->clearInterrupt(tc->threadId(), INT_ABT, 0);
1075    }
1076    // Get effective fault source encoding
1077    CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
1078
1079    // source must be determined BEFORE invoking generic routines which will
1080    // try to set hsr etc. and are based upon source!
1081    ArmFaultVals<T>::invoke(tc, inst);
1082
1083    if (!this->to64) {  // AArch32
1084        FSR  fsr  = getFsr(tc);
1085        if (cpsr.mode == MODE_HYP) {
1086            tc->setMiscReg(T::HFarIndex, faultAddr);
1087        } else if (stage2) {
1088            tc->setMiscReg(MISCREG_HPFAR, (faultAddr >> 8) & ~0xf);
1089            tc->setMiscReg(T::HFarIndex,  OVAddr);
1090        } else {
1091            tc->setMiscReg(T::FsrIndex, fsr);
1092            tc->setMiscReg(T::FarIndex, faultAddr);
1093        }
1094        DPRINTF(Faults, "Abort Fault source=%#x fsr=%#x faultAddr=%#x "\
1095                "tranMethod=%#x\n", source, fsr, faultAddr, tranMethod);
1096    } else {  // AArch64
1097        // Set the FAR register.  Nothing else to do if we are in AArch64 state
1098        // because the syndrome register has already been set inside invoke64()
1099        if (stage2) {
1100            // stage 2 fault, set HPFAR_EL2 to the faulting IPA
1101            // and FAR_EL2 to the Original VA
1102            tc->setMiscReg(AbortFault<T>::getFaultAddrReg64(), OVAddr);
1103            tc->setMiscReg(MISCREG_HPFAR_EL2, bits(faultAddr, 47, 12) << 4);
1104
1105            DPRINTF(Faults, "Abort Fault (Stage 2) VA: 0x%x IPA: 0x%x\n",
1106                    OVAddr, faultAddr);
1107        } else {
1108            tc->setMiscReg(AbortFault<T>::getFaultAddrReg64(), faultAddr);
1109        }
1110    }
1111}
1112
1113template<class T>
1114void
1115AbortFault<T>::setSyndrome(ThreadContext *tc, MiscRegIndex syndrome_reg)
1116{
1117    srcEncoded = getFaultStatusCode(tc);
1118    if (srcEncoded == ArmFault::FaultSourceInvalid) {
1119        panic("Invalid fault source\n");
1120    }
1121    ArmFault::setSyndrome(tc, syndrome_reg);
1122}
1123
1124template<class T>
1125uint8_t
1126AbortFault<T>::getFaultStatusCode(ThreadContext *tc) const
1127{
1128
1129    panic_if(!this->faultUpdated,
1130             "Trying to use un-updated ArmFault internal variables\n");
1131
1132    uint8_t fsc = 0;
1133
1134    if (!this->to64) {
1135        // AArch32
1136        assert(tranMethod != ArmFault::UnknownTran);
1137        if (tranMethod == ArmFault::LpaeTran) {
1138            fsc = ArmFault::longDescFaultSources[source];
1139        } else {
1140            fsc = ArmFault::shortDescFaultSources[source];
1141        }
1142    } else {
1143        // AArch64
1144        fsc = ArmFault::aarch64FaultSources[source];
1145    }
1146
1147    return fsc;
1148}
1149
1150template<class T>
1151FSR
1152AbortFault<T>::getFsr(ThreadContext *tc) const
1153{
1154    FSR fsr = 0;
1155
1156    auto fsc = getFaultStatusCode(tc);
1157
1158    // AArch32
1159    assert(tranMethod != ArmFault::UnknownTran);
1160    if (tranMethod == ArmFault::LpaeTran) {
1161        fsr.status = fsc;
1162        fsr.lpae   = 1;
1163    } else {
1164        fsr.fsLow  = bits(fsc, 3, 0);
1165        fsr.fsHigh = bits(fsc, 4);
1166        fsr.domain = static_cast<uint8_t>(domain);
1167    }
1168
1169    fsr.wnr = (write ? 1 : 0);
1170    fsr.ext = 0;
1171
1172    return fsr;
1173}
1174
1175template<class T>
1176bool
1177AbortFault<T>::abortDisable(ThreadContext *tc)
1178{
1179    if (ArmSystem::haveSecurity(tc)) {
1180        SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1181        return (!scr.ns || scr.aw);
1182    }
1183    return true;
1184}
1185
1186template<class T>
1187void
1188AbortFault<T>::annotate(ArmFault::AnnotationIDs id, uint64_t val)
1189{
1190    switch (id)
1191    {
1192      case ArmFault::S1PTW:
1193        s1ptw = val;
1194        break;
1195      case ArmFault::OVA:
1196        OVAddr = val;
1197        break;
1198
1199      // Just ignore unknown ID's
1200      default:
1201        break;
1202    }
1203}
1204
1205template<class T>
1206uint32_t
1207AbortFault<T>::iss() const
1208{
1209    uint32_t val;
1210
1211    val  = srcEncoded & 0x3F;
1212    val |= write << 6;
1213    val |= s1ptw << 7;
1214    return (val);
1215}
1216
1217template<class T>
1218bool
1219AbortFault<T>::isMMUFault() const
1220{
1221    // NOTE: Not relying on LL information being aligned to lowest bits here
1222    return
1223         (source == ArmFault::AlignmentFault)     ||
1224        ((source >= ArmFault::TranslationLL) &&
1225         (source <  ArmFault::TranslationLL + 4)) ||
1226        ((source >= ArmFault::AccessFlagLL) &&
1227         (source <  ArmFault::AccessFlagLL + 4))  ||
1228        ((source >= ArmFault::DomainLL) &&
1229         (source <  ArmFault::DomainLL + 4))      ||
1230        ((source >= ArmFault::PermissionLL) &&
1231         (source <  ArmFault::PermissionLL + 4));
1232}
1233
1234template<class T>
1235bool
1236AbortFault<T>::getFaultVAddr(Addr &va) const
1237{
1238    va = (stage2 ?  OVAddr : faultAddr);
1239    return true;
1240}
1241
1242ExceptionClass
1243PrefetchAbort::ec(ThreadContext *tc) const
1244{
1245    if (to64) {
1246        // AArch64
1247        if (toEL == fromEL)
1248            return EC_PREFETCH_ABORT_CURR_EL;
1249        else
1250            return EC_PREFETCH_ABORT_LOWER_EL;
1251    } else {
1252        // AArch32
1253        // Abort faults have different EC codes depending on whether
1254        // the fault originated within HYP mode, or not. So override
1255        // the method and add the extra adjustment of the EC value.
1256
1257        ExceptionClass ec = ArmFaultVals<PrefetchAbort>::vals.ec;
1258
1259        CPSR spsr = tc->readMiscReg(MISCREG_SPSR_HYP);
1260        if (spsr.mode == MODE_HYP) {
1261            ec = ((ExceptionClass) (((uint32_t) ec) + 1));
1262        }
1263        return ec;
1264    }
1265}
1266
1267bool
1268PrefetchAbort::routeToMonitor(ThreadContext *tc) const
1269{
1270    SCR scr = 0;
1271    if (from64)
1272        scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1273    else
1274        scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1275
1276    return scr.ea && !isMMUFault();
1277}
1278
1279bool
1280PrefetchAbort::routeToHyp(ThreadContext *tc) const
1281{
1282    bool toHyp;
1283
1284    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
1285    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
1286    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
1287    HDCR hdcr = tc->readMiscRegNoEffect(MISCREG_HDCR);
1288
1289    // if in Hyp mode then stay in Hyp mode
1290    toHyp  = scr.ns && (cpsr.mode == MODE_HYP);
1291    // otherwise, check whether to take to Hyp mode through Hyp Trap vector
1292    toHyp |= (stage2 ||
1293                ( (source ==               DebugEvent) && hdcr.tde && (cpsr.mode !=  MODE_HYP)) ||
1294                ( (source == SynchronousExternalAbort) && hcr.tge  && (cpsr.mode == MODE_USER))
1295             ) && !inSecureState(tc);
1296    return toHyp;
1297}
1298
1299ExceptionClass
1300DataAbort::ec(ThreadContext *tc) const
1301{
1302    if (to64) {
1303        // AArch64
1304        if (source == ArmFault::AsynchronousExternalAbort) {
1305            panic("Asynchronous External Abort should be handled with "
1306                    "SystemErrors (SErrors)!");
1307        }
1308        if (toEL == fromEL)
1309            return EC_DATA_ABORT_CURR_EL;
1310        else
1311            return EC_DATA_ABORT_LOWER_EL;
1312    } else {
1313        // AArch32
1314        // Abort faults have different EC codes depending on whether
1315        // the fault originated within HYP mode, or not. So override
1316        // the method and add the extra adjustment of the EC value.
1317
1318        ExceptionClass ec = ArmFaultVals<DataAbort>::vals.ec;
1319
1320        CPSR spsr = tc->readMiscReg(MISCREG_SPSR_HYP);
1321        if (spsr.mode == MODE_HYP) {
1322            ec = ((ExceptionClass) (((uint32_t) ec) + 1));
1323        }
1324        return ec;
1325    }
1326}
1327
1328bool
1329DataAbort::routeToMonitor(ThreadContext *tc) const
1330{
1331    SCR scr = 0;
1332    if (from64)
1333        scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1334    else
1335        scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1336
1337    return scr.ea && !isMMUFault();
1338}
1339
1340bool
1341DataAbort::routeToHyp(ThreadContext *tc) const
1342{
1343    bool toHyp;
1344
1345    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
1346    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
1347    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
1348    HDCR hdcr = tc->readMiscRegNoEffect(MISCREG_HDCR);
1349
1350    // if in Hyp mode then stay in Hyp mode
1351    toHyp  = scr.ns && (cpsr.mode == MODE_HYP);
1352    // otherwise, check whether to take to Hyp mode through Hyp Trap vector
1353    toHyp |= (stage2 ||
1354                ( (cpsr.mode != MODE_HYP) && ( ((source == AsynchronousExternalAbort) && hcr.amo) ||
1355                                               ((source == DebugEvent) && hdcr.tde) )
1356                ) ||
1357                ( (cpsr.mode == MODE_USER) && hcr.tge &&
1358                  ((source == AlignmentFault)            ||
1359                   (source == SynchronousExternalAbort))
1360                )
1361             ) && !inSecureState(tc);
1362    return toHyp;
1363}
1364
1365uint32_t
1366DataAbort::iss() const
1367{
1368    uint32_t val;
1369
1370    // Add on the data abort specific fields to the generic abort ISS value
1371    val  = AbortFault<DataAbort>::iss();
1372    // ISS is valid if not caused by a stage 1 page table walk, and when taken
1373    // to AArch64 only when directed to EL2
1374    if (!s1ptw && stage2 && (!to64 || toEL == EL2)) {
1375        val |= isv << 24;
1376        if (isv) {
1377            val |= sas << 22;
1378            val |= sse << 21;
1379            val |= srt << 16;
1380            // AArch64 only. These assignments are safe on AArch32 as well
1381            // because these vars are initialized to false
1382            val |= sf << 15;
1383            val |= ar << 14;
1384        }
1385    }
1386    return (val);
1387}
1388
1389void
1390DataAbort::annotate(AnnotationIDs id, uint64_t val)
1391{
1392    AbortFault<DataAbort>::annotate(id, val);
1393    switch (id)
1394    {
1395      case SAS:
1396        isv = true;
1397        sas = val;
1398        break;
1399      case SSE:
1400        isv = true;
1401        sse = val;
1402        break;
1403      case SRT:
1404        isv = true;
1405        srt = val;
1406        break;
1407      case SF:
1408        isv = true;
1409        sf  = val;
1410        break;
1411      case AR:
1412        isv = true;
1413        ar  = val;
1414        break;
1415      // Just ignore unknown ID's
1416      default:
1417        break;
1418    }
1419}
1420
1421void
1422VirtualDataAbort::invoke(ThreadContext *tc, const StaticInstPtr &inst)
1423{
1424    AbortFault<VirtualDataAbort>::invoke(tc, inst);
1425    HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR);
1426    hcr.va = 0;
1427    tc->setMiscRegNoEffect(MISCREG_HCR, hcr);
1428}
1429
1430bool
1431Interrupt::routeToMonitor(ThreadContext *tc) const
1432{
1433    assert(ArmSystem::haveSecurity(tc));
1434    SCR scr = 0;
1435    if (from64)
1436        scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1437    else
1438        scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1439    return scr.irq;
1440}
1441
1442bool
1443Interrupt::routeToHyp(ThreadContext *tc) const
1444{
1445    bool toHyp;
1446
1447    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
1448    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
1449    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
1450    // Determine whether IRQs are routed to Hyp mode.
1451    toHyp = (!scr.irq && hcr.imo && !inSecureState(tc)) ||
1452            (cpsr.mode == MODE_HYP);
1453    return toHyp;
1454}
1455
1456bool
1457Interrupt::abortDisable(ThreadContext *tc)
1458{
1459    if (ArmSystem::haveSecurity(tc)) {
1460        SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1461        return (!scr.ns || scr.aw);
1462    }
1463    return true;
1464}
1465
1466VirtualInterrupt::VirtualInterrupt()
1467{}
1468
1469bool
1470FastInterrupt::routeToMonitor(ThreadContext *tc) const
1471{
1472    assert(ArmSystem::haveSecurity(tc));
1473    SCR scr = 0;
1474    if (from64)
1475        scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1476    else
1477        scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1478    return scr.fiq;
1479}
1480
1481bool
1482FastInterrupt::routeToHyp(ThreadContext *tc) const
1483{
1484    bool toHyp;
1485
1486    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
1487    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
1488    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
1489    // Determine whether IRQs are routed to Hyp mode.
1490    toHyp = (!scr.fiq && hcr.fmo && !inSecureState(tc)) ||
1491            (cpsr.mode == MODE_HYP);
1492    return toHyp;
1493}
1494
1495bool
1496FastInterrupt::abortDisable(ThreadContext *tc)
1497{
1498    if (ArmSystem::haveSecurity(tc)) {
1499        SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1500        return (!scr.ns || scr.aw);
1501    }
1502    return true;
1503}
1504
1505bool
1506FastInterrupt::fiqDisable(ThreadContext *tc)
1507{
1508    if (ArmSystem::haveVirtualization(tc)) {
1509        return true;
1510    } else if (ArmSystem::haveSecurity(tc)) {
1511        SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1512        return (!scr.ns || scr.fw);
1513    }
1514    return true;
1515}
1516
1517VirtualFastInterrupt::VirtualFastInterrupt()
1518{}
1519
1520void
1521PCAlignmentFault::invoke(ThreadContext *tc, const StaticInstPtr &inst)
1522{
1523    ArmFaultVals<PCAlignmentFault>::invoke(tc, inst);
1524    assert(from64);
1525    // Set the FAR
1526    tc->setMiscReg(getFaultAddrReg64(), faultPC);
1527}
1528
1529bool
1530PCAlignmentFault::routeToHyp(ThreadContext *tc) const
1531{
1532    bool toHyp = false;
1533
1534    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1535    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR_EL2);
1536    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
1537
1538    // if HCR.TGE is set to 1, take to Hyp mode through Hyp Trap vector
1539    toHyp |= !inSecureState(scr, cpsr) && hcr.tge && (currEL(tc) == EL0);
1540    return toHyp;
1541}
1542
1543SPAlignmentFault::SPAlignmentFault()
1544{}
1545
1546SystemError::SystemError()
1547{}
1548
1549void
1550SystemError::invoke(ThreadContext *tc, const StaticInstPtr &inst)
1551{
1552    tc->getCpuPtr()->clearInterrupt(tc->threadId(), INT_ABT, 0);
1553    ArmFault::invoke(tc, inst);
1554}
1555
1556bool
1557SystemError::routeToMonitor(ThreadContext *tc) const
1558{
1559    assert(ArmSystem::haveSecurity(tc));
1560    assert(from64);
1561    SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1562    return scr.ea;
1563}
1564
1565bool
1566SystemError::routeToHyp(ThreadContext *tc) const
1567{
1568    bool toHyp;
1569    assert(from64);
1570
1571    SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1572    HCR hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
1573
1574    toHyp = (!scr.ea && hcr.amo && !inSecureState(tc)) ||
1575            (!scr.ea && !scr.rw && !hcr.amo && !inSecureState(tc));
1576    return toHyp;
1577}
1578
1579
1580SoftwareBreakpoint::SoftwareBreakpoint(ExtMachInst _mach_inst, uint32_t _iss)
1581    : ArmFaultVals<SoftwareBreakpoint>(_mach_inst, _iss)
1582{}
1583
1584bool
1585SoftwareBreakpoint::routeToHyp(ThreadContext *tc) const
1586{
1587    const bool have_el2 = ArmSystem::haveVirtualization(tc);
1588
1589    const HCR hcr  = tc->readMiscRegNoEffect(MISCREG_HCR_EL2);
1590    const HDCR mdcr  = tc->readMiscRegNoEffect(MISCREG_MDCR_EL2);
1591
1592    return have_el2 && !inSecureState(tc) && fromEL <= EL1 &&
1593        (hcr.tge || mdcr.tde);
1594}
1595
1596ExceptionClass
1597SoftwareBreakpoint::ec(ThreadContext *tc) const
1598{
1599    return from64 ? EC_SOFTWARE_BREAKPOINT_64 : vals.ec;
1600}
1601
1602void
1603ArmSev::invoke(ThreadContext *tc, const StaticInstPtr &inst) {
1604    DPRINTF(Faults, "Invoking ArmSev Fault\n");
1605    if (!FullSystem)
1606        return;
1607
1608    // Set sev_mailbox to 1, clear the pending interrupt from remote
1609    // SEV execution and let pipeline continue as pcState is still
1610    // valid.
1611    tc->setMiscReg(MISCREG_SEV_MAILBOX, 1);
1612    tc->getCpuPtr()->clearInterrupt(tc->threadId(), INT_SEV, 0);
1613}
1614
1615// Instantiate all the templates to make the linker happy
1616template class ArmFaultVals<Reset>;
1617template class ArmFaultVals<UndefinedInstruction>;
1618template class ArmFaultVals<SupervisorCall>;
1619template class ArmFaultVals<SecureMonitorCall>;
1620template class ArmFaultVals<HypervisorCall>;
1621template class ArmFaultVals<PrefetchAbort>;
1622template class ArmFaultVals<DataAbort>;
1623template class ArmFaultVals<VirtualDataAbort>;
1624template class ArmFaultVals<HypervisorTrap>;
1625template class ArmFaultVals<Interrupt>;
1626template class ArmFaultVals<VirtualInterrupt>;
1627template class ArmFaultVals<FastInterrupt>;
1628template class ArmFaultVals<VirtualFastInterrupt>;
1629template class ArmFaultVals<SupervisorTrap>;
1630template class ArmFaultVals<SecureMonitorTrap>;
1631template class ArmFaultVals<PCAlignmentFault>;
1632template class ArmFaultVals<SPAlignmentFault>;
1633template class ArmFaultVals<SystemError>;
1634template class ArmFaultVals<SoftwareBreakpoint>;
1635template class ArmFaultVals<ArmSev>;
1636template class AbortFault<PrefetchAbort>;
1637template class AbortFault<DataAbort>;
1638template class AbortFault<VirtualDataAbort>;
1639
1640
1641IllegalInstSetStateFault::IllegalInstSetStateFault()
1642{}
1643
1644bool
1645getFaultVAddr(Fault fault, Addr &va)
1646{
1647    auto arm_fault = dynamic_cast<ArmFault *>(fault.get());
1648
1649    if (arm_fault) {
1650        return arm_fault->getFaultVAddr(va);
1651    } else {
1652        auto pgt_fault = dynamic_cast<GenericPageTableFault *>(fault.get());
1653        if (pgt_fault) {
1654            va = pgt_fault->getFaultVAddr();
1655            return true;
1656        }
1657
1658        auto align_fault = dynamic_cast<GenericAlignmentFault *>(fault.get());
1659        if (align_fault) {
1660            va = align_fault->getFaultVAddr();
1661            return true;
1662        }
1663
1664        // Return false since it's not an address triggered exception
1665        return false;
1666    }
1667}
1668
1669} // namespace ArmISA
1670