faults.cc revision 12259:f787f664d57a
1/*
2 * Copyright (c) 2010, 2012-2014, 2016-2017 ARM Limited
3 * All rights reserved
4 *
5 * The license below extends only to copyright in the software and shall
6 * not be construed as granting a license to any other intellectual
7 * property including but not limited to intellectual property relating
8 * to a hardware implementation of the functionality of the software
9 * licensed hereunder.  You may use the software subject to the license
10 * terms below provided that you ensure that this notice is replicated
11 * unmodified and in its entirety in all distributions of the software,
12 * modified or unmodified, in source code or in binary form.
13 *
14 * Copyright (c) 2003-2005 The Regents of The University of Michigan
15 * Copyright (c) 2007-2008 The Florida State University
16 * All rights reserved.
17 *
18 * Redistribution and use in source and binary forms, with or without
19 * modification, are permitted provided that the following conditions are
20 * met: redistributions of source code must retain the above copyright
21 * notice, this list of conditions and the following disclaimer;
22 * redistributions in binary form must reproduce the above copyright
23 * notice, this list of conditions and the following disclaimer in the
24 * documentation and/or other materials provided with the distribution;
25 * neither the name of the copyright holders nor the names of its
26 * contributors may be used to endorse or promote products derived from
27 * this software without specific prior written permission.
28 *
29 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
30 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
31 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
32 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
33 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
34 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
35 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
36 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
37 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
38 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
39 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
40 *
41 * Authors: Ali Saidi
42 *          Gabe Black
43 *          Giacomo Gabrielli
44 *          Thomas Grocutt
45 */
46
47#include "arch/arm/faults.hh"
48
49#include "arch/arm/insts/static_inst.hh"
50#include "arch/arm/system.hh"
51#include "arch/arm/utility.hh"
52#include "base/compiler.hh"
53#include "base/trace.hh"
54#include "cpu/base.hh"
55#include "cpu/thread_context.hh"
56#include "debug/Faults.hh"
57#include "sim/full_system.hh"
58
59namespace ArmISA
60{
61
62uint8_t ArmFault::shortDescFaultSources[] = {
63    0x01,  // AlignmentFault
64    0x04,  // InstructionCacheMaintenance
65    0xff,  // SynchExtAbtOnTranslTableWalkL0 (INVALID)
66    0x0c,  // SynchExtAbtOnTranslTableWalkL1
67    0x0e,  // SynchExtAbtOnTranslTableWalkL2
68    0xff,  // SynchExtAbtOnTranslTableWalkL3 (INVALID)
69    0xff,  // SynchPtyErrOnTranslTableWalkL0 (INVALID)
70    0x1c,  // SynchPtyErrOnTranslTableWalkL1
71    0x1e,  // SynchPtyErrOnTranslTableWalkL2
72    0xff,  // SynchPtyErrOnTranslTableWalkL3 (INVALID)
73    0xff,  // TranslationL0 (INVALID)
74    0x05,  // TranslationL1
75    0x07,  // TranslationL2
76    0xff,  // TranslationL3 (INVALID)
77    0xff,  // AccessFlagL0 (INVALID)
78    0x03,  // AccessFlagL1
79    0x06,  // AccessFlagL2
80    0xff,  // AccessFlagL3 (INVALID)
81    0xff,  // DomainL0 (INVALID)
82    0x09,  // DomainL1
83    0x0b,  // DomainL2
84    0xff,  // DomainL3 (INVALID)
85    0xff,  // PermissionL0 (INVALID)
86    0x0d,  // PermissionL1
87    0x0f,  // PermissionL2
88    0xff,  // PermissionL3 (INVALID)
89    0x02,  // DebugEvent
90    0x08,  // SynchronousExternalAbort
91    0x10,  // TLBConflictAbort
92    0x19,  // SynchPtyErrOnMemoryAccess
93    0x16,  // AsynchronousExternalAbort
94    0x18,  // AsynchPtyErrOnMemoryAccess
95    0xff,  // AddressSizeL0 (INVALID)
96    0xff,  // AddressSizeL1 (INVALID)
97    0xff,  // AddressSizeL2 (INVALID)
98    0xff,  // AddressSizeL3 (INVALID)
99    0x40,  // PrefetchTLBMiss
100    0x80   // PrefetchUncacheable
101};
102
103static_assert(sizeof(ArmFault::shortDescFaultSources) ==
104              ArmFault::NumFaultSources,
105              "Invalid size of ArmFault::shortDescFaultSources[]");
106
107uint8_t ArmFault::longDescFaultSources[] = {
108    0x21,  // AlignmentFault
109    0xff,  // InstructionCacheMaintenance (INVALID)
110    0xff,  // SynchExtAbtOnTranslTableWalkL0 (INVALID)
111    0x15,  // SynchExtAbtOnTranslTableWalkL1
112    0x16,  // SynchExtAbtOnTranslTableWalkL2
113    0x17,  // SynchExtAbtOnTranslTableWalkL3
114    0xff,  // SynchPtyErrOnTranslTableWalkL0 (INVALID)
115    0x1d,  // SynchPtyErrOnTranslTableWalkL1
116    0x1e,  // SynchPtyErrOnTranslTableWalkL2
117    0x1f,  // SynchPtyErrOnTranslTableWalkL3
118    0xff,  // TranslationL0 (INVALID)
119    0x05,  // TranslationL1
120    0x06,  // TranslationL2
121    0x07,  // TranslationL3
122    0xff,  // AccessFlagL0 (INVALID)
123    0x09,  // AccessFlagL1
124    0x0a,  // AccessFlagL2
125    0x0b,  // AccessFlagL3
126    0xff,  // DomainL0 (INVALID)
127    0x3d,  // DomainL1
128    0x3e,  // DomainL2
129    0xff,  // DomainL3 (RESERVED)
130    0xff,  // PermissionL0 (INVALID)
131    0x0d,  // PermissionL1
132    0x0e,  // PermissionL2
133    0x0f,  // PermissionL3
134    0x22,  // DebugEvent
135    0x10,  // SynchronousExternalAbort
136    0x30,  // TLBConflictAbort
137    0x18,  // SynchPtyErrOnMemoryAccess
138    0x11,  // AsynchronousExternalAbort
139    0x19,  // AsynchPtyErrOnMemoryAccess
140    0xff,  // AddressSizeL0 (INVALID)
141    0xff,  // AddressSizeL1 (INVALID)
142    0xff,  // AddressSizeL2 (INVALID)
143    0xff,  // AddressSizeL3 (INVALID)
144    0x40,  // PrefetchTLBMiss
145    0x80   // PrefetchUncacheable
146};
147
148static_assert(sizeof(ArmFault::longDescFaultSources) ==
149              ArmFault::NumFaultSources,
150              "Invalid size of ArmFault::longDescFaultSources[]");
151
152uint8_t ArmFault::aarch64FaultSources[] = {
153    0x21,  // AlignmentFault
154    0xff,  // InstructionCacheMaintenance (INVALID)
155    0x14,  // SynchExtAbtOnTranslTableWalkL0
156    0x15,  // SynchExtAbtOnTranslTableWalkL1
157    0x16,  // SynchExtAbtOnTranslTableWalkL2
158    0x17,  // SynchExtAbtOnTranslTableWalkL3
159    0x1c,  // SynchPtyErrOnTranslTableWalkL0
160    0x1d,  // SynchPtyErrOnTranslTableWalkL1
161    0x1e,  // SynchPtyErrOnTranslTableWalkL2
162    0x1f,  // SynchPtyErrOnTranslTableWalkL3
163    0x04,  // TranslationL0
164    0x05,  // TranslationL1
165    0x06,  // TranslationL2
166    0x07,  // TranslationL3
167    0x08,  // AccessFlagL0
168    0x09,  // AccessFlagL1
169    0x0a,  // AccessFlagL2
170    0x0b,  // AccessFlagL3
171    // @todo: Section & Page Domain Fault in AArch64?
172    0xff,  // DomainL0 (INVALID)
173    0xff,  // DomainL1 (INVALID)
174    0xff,  // DomainL2 (INVALID)
175    0xff,  // DomainL3 (INVALID)
176    0x0c,  // PermissionL0
177    0x0d,  // PermissionL1
178    0x0e,  // PermissionL2
179    0x0f,  // PermissionL3
180    0xff,  // DebugEvent (INVALID)
181    0x10,  // SynchronousExternalAbort
182    0x30,  // TLBConflictAbort
183    0x18,  // SynchPtyErrOnMemoryAccess
184    0xff,  // AsynchronousExternalAbort (INVALID)
185    0xff,  // AsynchPtyErrOnMemoryAccess (INVALID)
186    0x00,  // AddressSizeL0
187    0x01,  // AddressSizeL1
188    0x02,  // AddressSizeL2
189    0x03,  // AddressSizeL3
190    0x40,  // PrefetchTLBMiss
191    0x80   // PrefetchUncacheable
192};
193
194static_assert(sizeof(ArmFault::aarch64FaultSources) ==
195              ArmFault::NumFaultSources,
196              "Invalid size of ArmFault::aarch64FaultSources[]");
197
198// Fields: name, offset, cur{ELT,ELH}Offset, lowerEL{64,32}Offset, next mode,
199//         {ARM, Thumb, ARM_ELR, Thumb_ELR} PC offset, hyp trap,
200//         {A, F} disable, class, stat
201template<> ArmFault::FaultVals ArmFaultVals<Reset>::vals = {
202    // Some dummy values (the reset vector has an IMPLEMENTATION DEFINED
203    // location in AArch64)
204    "Reset",                 0x000, 0x000, 0x000, 0x000, 0x000, MODE_SVC,
205    0, 0, 0, 0, false, true,  true,  EC_UNKNOWN, FaultStat()
206};
207template<> ArmFault::FaultVals ArmFaultVals<UndefinedInstruction>::vals = {
208    "Undefined Instruction", 0x004, 0x000, 0x200, 0x400, 0x600, MODE_UNDEFINED,
209    4, 2, 0, 0, true,  false, false, EC_UNKNOWN, FaultStat()
210};
211template<> ArmFault::FaultVals ArmFaultVals<SupervisorCall>::vals = {
212    "Supervisor Call",       0x008, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
213    4, 2, 4, 2, true,  false, false, EC_SVC_TO_HYP, FaultStat()
214};
215template<> ArmFault::FaultVals ArmFaultVals<SecureMonitorCall>::vals = {
216    "Secure Monitor Call",   0x008, 0x000, 0x200, 0x400, 0x600, MODE_MON,
217    4, 4, 4, 4, false, true,  true,  EC_SMC_TO_HYP, FaultStat()
218};
219template<> ArmFault::FaultVals ArmFaultVals<HypervisorCall>::vals = {
220    "Hypervisor Call",       0x008, 0x000, 0x200, 0x400, 0x600, MODE_HYP,
221    4, 4, 4, 4, true,  false, false, EC_HVC, FaultStat()
222};
223template<> ArmFault::FaultVals ArmFaultVals<PrefetchAbort>::vals = {
224    "Prefetch Abort",        0x00C, 0x000, 0x200, 0x400, 0x600, MODE_ABORT,
225    4, 4, 0, 0, true,  true,  false, EC_PREFETCH_ABORT_TO_HYP, FaultStat()
226};
227template<> ArmFault::FaultVals ArmFaultVals<DataAbort>::vals = {
228    "Data Abort",            0x010, 0x000, 0x200, 0x400, 0x600, MODE_ABORT,
229    8, 8, 0, 0, true,  true,  false, EC_DATA_ABORT_TO_HYP, FaultStat()
230};
231template<> ArmFault::FaultVals ArmFaultVals<VirtualDataAbort>::vals = {
232    "Virtual Data Abort",    0x010, 0x000, 0x200, 0x400, 0x600, MODE_ABORT,
233    8, 8, 0, 0, true,  true,  false, EC_INVALID, FaultStat()
234};
235template<> ArmFault::FaultVals ArmFaultVals<HypervisorTrap>::vals = {
236    // @todo: double check these values
237    "Hypervisor Trap",       0x014, 0x000, 0x200, 0x400, 0x600, MODE_HYP,
238    0, 0, 0, 0, false, false, false, EC_UNKNOWN, FaultStat()
239};
240template<> ArmFault::FaultVals ArmFaultVals<Interrupt>::vals = {
241    "IRQ",                   0x018, 0x080, 0x280, 0x480, 0x680, MODE_IRQ,
242    4, 4, 0, 0, false, true,  false, EC_UNKNOWN, FaultStat()
243};
244template<> ArmFault::FaultVals ArmFaultVals<VirtualInterrupt>::vals = {
245    "Virtual IRQ",           0x018, 0x080, 0x280, 0x480, 0x680, MODE_IRQ,
246    4, 4, 0, 0, false, true,  false, EC_INVALID, FaultStat()
247};
248template<> ArmFault::FaultVals ArmFaultVals<FastInterrupt>::vals = {
249    "FIQ",                   0x01C, 0x100, 0x300, 0x500, 0x700, MODE_FIQ,
250    4, 4, 0, 0, false, true,  true,  EC_UNKNOWN, FaultStat()
251};
252template<> ArmFault::FaultVals ArmFaultVals<VirtualFastInterrupt>::vals = {
253    "Virtual FIQ",           0x01C, 0x100, 0x300, 0x500, 0x700, MODE_FIQ,
254    4, 4, 0, 0, false, true,  true,  EC_INVALID, FaultStat()
255};
256template<> ArmFault::FaultVals ArmFaultVals<SupervisorTrap>::vals = {
257    // Some dummy values (SupervisorTrap is AArch64-only)
258    "Supervisor Trap",   0x014, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
259    0, 0, 0, 0, false, false, false, EC_UNKNOWN, FaultStat()
260};
261template<> ArmFault::FaultVals ArmFaultVals<SecureMonitorTrap>::vals = {
262    // Some dummy values (SecureMonitorTrap is AArch64-only)
263    "Secure Monitor Trap",   0x014, 0x000, 0x200, 0x400, 0x600, MODE_MON,
264    0, 0, 0, 0, false, false, false, EC_UNKNOWN, FaultStat()
265};
266template<> ArmFault::FaultVals ArmFaultVals<PCAlignmentFault>::vals = {
267    // Some dummy values (PCAlignmentFault is AArch64-only)
268    "PC Alignment Fault",   0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
269    0, 0, 0, 0, true, false, false, EC_PC_ALIGNMENT, FaultStat()
270};
271template<> ArmFault::FaultVals ArmFaultVals<SPAlignmentFault>::vals = {
272    // Some dummy values (SPAlignmentFault is AArch64-only)
273    "SP Alignment Fault",   0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
274    0, 0, 0, 0, true, false, false, EC_STACK_PTR_ALIGNMENT, FaultStat()
275};
276template<> ArmFault::FaultVals ArmFaultVals<SystemError>::vals = {
277    // Some dummy values (SError is AArch64-only)
278    "SError",                0x000, 0x180, 0x380, 0x580, 0x780, MODE_SVC,
279    0, 0, 0, 0, false, true,  true,  EC_SERROR, FaultStat()
280};
281template<> ArmFault::FaultVals ArmFaultVals<ArmSev>::vals = {
282    // Some dummy values
283    "ArmSev Flush",          0x000, 0x000, 0x000, 0x000, 0x000, MODE_SVC,
284    0, 0, 0, 0, false, true,  true,  EC_UNKNOWN, FaultStat()
285};
286template<> ArmFault::FaultVals ArmFaultVals<IllegalInstSetStateFault>::vals = {
287    // Some dummy values (SPAlignmentFault is AArch64-only)
288    "Illegal Inst Set State Fault",   0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
289    0, 0, 0, 0, true, false, false, EC_ILLEGAL_INST, FaultStat()
290};
291
292Addr
293ArmFault::getVector(ThreadContext *tc)
294{
295    Addr base;
296
297    // ARM ARM issue C B1.8.1
298    bool haveSecurity = ArmSystem::haveSecurity(tc);
299
300    // panic if SCTLR.VE because I have no idea what to do with vectored
301    // interrupts
302    SCTLR sctlr = tc->readMiscReg(MISCREG_SCTLR);
303    assert(!sctlr.ve);
304    // Check for invalid modes
305    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
306    assert(haveSecurity                      || cpsr.mode != MODE_MON);
307    assert(ArmSystem::haveVirtualization(tc) || cpsr.mode != MODE_HYP);
308
309    switch (cpsr.mode)
310    {
311      case MODE_MON:
312        base = tc->readMiscReg(MISCREG_MVBAR);
313        break;
314      case MODE_HYP:
315        base = tc->readMiscReg(MISCREG_HVBAR);
316        break;
317      default:
318        if (sctlr.v) {
319            base = HighVecs;
320        } else {
321            base = haveSecurity ? tc->readMiscReg(MISCREG_VBAR) : 0;
322        }
323        break;
324    }
325    return base + offset(tc);
326}
327
328Addr
329ArmFault::getVector64(ThreadContext *tc)
330{
331    Addr vbar;
332    switch (toEL) {
333      case EL3:
334        assert(ArmSystem::haveSecurity(tc));
335        vbar = tc->readMiscReg(MISCREG_VBAR_EL3);
336        break;
337      case EL2:
338        assert(ArmSystem::haveVirtualization(tc));
339        vbar = tc->readMiscReg(MISCREG_VBAR_EL2);
340        break;
341      case EL1:
342        vbar = tc->readMiscReg(MISCREG_VBAR_EL1);
343        break;
344      default:
345        panic("Invalid target exception level");
346        break;
347    }
348    return vbar + offset64();
349}
350
351MiscRegIndex
352ArmFault::getSyndromeReg64() const
353{
354    switch (toEL) {
355      case EL1:
356        return MISCREG_ESR_EL1;
357      case EL2:
358        return MISCREG_ESR_EL2;
359      case EL3:
360        return MISCREG_ESR_EL3;
361      default:
362        panic("Invalid exception level");
363        break;
364    }
365}
366
367MiscRegIndex
368ArmFault::getFaultAddrReg64() const
369{
370    switch (toEL) {
371      case EL1:
372        return MISCREG_FAR_EL1;
373      case EL2:
374        return MISCREG_FAR_EL2;
375      case EL3:
376        return MISCREG_FAR_EL3;
377      default:
378        panic("Invalid exception level");
379        break;
380    }
381}
382
383void
384ArmFault::setSyndrome(ThreadContext *tc, MiscRegIndex syndrome_reg)
385{
386    uint32_t value;
387    uint32_t exc_class = (uint32_t) ec(tc);
388    uint32_t issVal = iss();
389    assert(!from64 || ArmSystem::highestELIs64(tc));
390
391    value = exc_class << 26;
392
393    // HSR.IL not valid for Prefetch Aborts (0x20, 0x21) and Data Aborts (0x24,
394    // 0x25) for which the ISS information is not valid (ARMv7).
395    // @todo: ARMv8 revises AArch32 functionality: when HSR.IL is not
396    // valid it is treated as RES1.
397    if (to64) {
398        value |= 1 << 25;
399    } else if ((bits(exc_class, 5, 3) != 4) ||
400               (bits(exc_class, 2) && bits(issVal, 24))) {
401        if (!machInst.thumb || machInst.bigThumb)
402            value |= 1 << 25;
403    }
404    // Condition code valid for EC[5:4] nonzero
405    if (!from64 && ((bits(exc_class, 5, 4) == 0) &&
406                    (bits(exc_class, 3, 0) != 0))) {
407        if (!machInst.thumb) {
408            uint32_t      cond;
409            ConditionCode condCode = (ConditionCode) (uint32_t) machInst.condCode;
410            // If its on unconditional instruction report with a cond code of
411            // 0xE, ie the unconditional code
412            cond  = (condCode == COND_UC) ? COND_AL : condCode;
413            value |= cond << 20;
414            value |= 1    << 24;
415        }
416        value |= bits(issVal, 19, 0);
417    } else {
418        value |= issVal;
419    }
420    tc->setMiscReg(syndrome_reg, value);
421}
422
423void
424ArmFault::invoke(ThreadContext *tc, const StaticInstPtr &inst)
425{
426    CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
427
428    if (ArmSystem::highestELIs64(tc)) {  // ARMv8
429        // Determine source exception level and mode
430        fromMode = (OperatingMode) (uint8_t) cpsr.mode;
431        fromEL = opModeToEL(fromMode);
432        if (opModeIs64(fromMode))
433            from64 = true;
434
435        // Determine target exception level
436        if (ArmSystem::haveSecurity(tc) && routeToMonitor(tc))
437            toEL = EL3;
438        else if (ArmSystem::haveVirtualization(tc) && routeToHyp(tc))
439            toEL = EL2;
440        else
441            toEL = opModeToEL(nextMode());
442        if (fromEL > toEL)
443            toEL = fromEL;
444
445        if (toEL == ArmSystem::highestEL(tc) || ELIs64(tc, toEL)) {
446            // Invoke exception handler in AArch64 state
447            to64 = true;
448            invoke64(tc, inst);
449            return;
450        }
451    }
452
453    // ARMv7 (ARM ARM issue C B1.9)
454
455    bool have_security       = ArmSystem::haveSecurity(tc);
456    bool have_virtualization = ArmSystem::haveVirtualization(tc);
457
458    FaultBase::invoke(tc);
459    if (!FullSystem)
460        return;
461    countStat()++;
462
463    SCTLR sctlr = tc->readMiscReg(MISCREG_SCTLR);
464    SCR scr = tc->readMiscReg(MISCREG_SCR);
465    CPSR saved_cpsr = tc->readMiscReg(MISCREG_CPSR);
466    saved_cpsr.nz = tc->readCCReg(CCREG_NZ);
467    saved_cpsr.c = tc->readCCReg(CCREG_C);
468    saved_cpsr.v = tc->readCCReg(CCREG_V);
469    saved_cpsr.ge = tc->readCCReg(CCREG_GE);
470
471    Addr curPc M5_VAR_USED = tc->pcState().pc();
472    ITSTATE it = tc->pcState().itstate();
473    saved_cpsr.it2 = it.top6;
474    saved_cpsr.it1 = it.bottom2;
475
476    // if we have a valid instruction then use it to annotate this fault with
477    // extra information. This is used to generate the correct fault syndrome
478    // information
479    if (inst) {
480        ArmStaticInst *armInst = reinterpret_cast<ArmStaticInst *>(inst.get());
481        armInst->annotateFault(this);
482    }
483
484    if (have_security && routeToMonitor(tc))
485        cpsr.mode = MODE_MON;
486    else if (have_virtualization && routeToHyp(tc))
487        cpsr.mode = MODE_HYP;
488    else
489        cpsr.mode = nextMode();
490
491    // Ensure Secure state if initially in Monitor mode
492    if (have_security && saved_cpsr.mode == MODE_MON) {
493        SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
494        if (scr.ns) {
495            scr.ns = 0;
496            tc->setMiscRegNoEffect(MISCREG_SCR, scr);
497        }
498    }
499
500    // some bits are set differently if we have been routed to hyp mode
501    if (cpsr.mode == MODE_HYP) {
502        SCTLR hsctlr = tc->readMiscReg(MISCREG_HSCTLR);
503        cpsr.t = hsctlr.te;
504        cpsr.e = hsctlr.ee;
505        if (!scr.ea)  {cpsr.a = 1;}
506        if (!scr.fiq) {cpsr.f = 1;}
507        if (!scr.irq) {cpsr.i = 1;}
508    } else if (cpsr.mode == MODE_MON) {
509        // Special case handling when entering monitor mode
510        cpsr.t = sctlr.te;
511        cpsr.e = sctlr.ee;
512        cpsr.a = 1;
513        cpsr.f = 1;
514        cpsr.i = 1;
515    } else {
516        cpsr.t = sctlr.te;
517        cpsr.e = sctlr.ee;
518
519        // The *Disable functions are virtual and different per fault
520        cpsr.a = cpsr.a | abortDisable(tc);
521        cpsr.f = cpsr.f | fiqDisable(tc);
522        cpsr.i = 1;
523    }
524    cpsr.it1 = cpsr.it2 = 0;
525    cpsr.j = 0;
526    tc->setMiscReg(MISCREG_CPSR, cpsr);
527
528    // Make sure mailbox sets to one always
529    tc->setMiscReg(MISCREG_SEV_MAILBOX, 1);
530
531    // Clear the exclusive monitor
532    tc->setMiscReg(MISCREG_LOCKFLAG, 0);
533
534    if (cpsr.mode == MODE_HYP) {
535        tc->setMiscReg(MISCREG_ELR_HYP, curPc +
536                (saved_cpsr.t ? thumbPcOffset(true)  : armPcOffset(true)));
537    } else {
538        tc->setIntReg(INTREG_LR, curPc +
539                (saved_cpsr.t ? thumbPcOffset(false) : armPcOffset(false)));
540    }
541
542    switch (cpsr.mode) {
543      case MODE_FIQ:
544        tc->setMiscReg(MISCREG_SPSR_FIQ, saved_cpsr);
545        break;
546      case MODE_IRQ:
547        tc->setMiscReg(MISCREG_SPSR_IRQ, saved_cpsr);
548        break;
549      case MODE_SVC:
550        tc->setMiscReg(MISCREG_SPSR_SVC, saved_cpsr);
551        break;
552      case MODE_MON:
553        assert(have_security);
554        tc->setMiscReg(MISCREG_SPSR_MON, saved_cpsr);
555        break;
556      case MODE_ABORT:
557        tc->setMiscReg(MISCREG_SPSR_ABT, saved_cpsr);
558        break;
559      case MODE_UNDEFINED:
560        tc->setMiscReg(MISCREG_SPSR_UND, saved_cpsr);
561        if (ec(tc) != EC_UNKNOWN)
562            setSyndrome(tc, MISCREG_HSR);
563        break;
564      case MODE_HYP:
565        assert(have_virtualization);
566        tc->setMiscReg(MISCREG_SPSR_HYP, saved_cpsr);
567        setSyndrome(tc, MISCREG_HSR);
568        break;
569      default:
570        panic("unknown Mode\n");
571    }
572
573    Addr newPc = getVector(tc);
574    DPRINTF(Faults, "Invoking Fault:%s cpsr:%#x PC:%#x lr:%#x newVec: %#x\n",
575            name(), cpsr, curPc, tc->readIntReg(INTREG_LR), newPc);
576    PCState pc(newPc);
577    pc.thumb(cpsr.t);
578    pc.nextThumb(pc.thumb());
579    pc.jazelle(cpsr.j);
580    pc.nextJazelle(pc.jazelle());
581    pc.aarch64(!cpsr.width);
582    pc.nextAArch64(!cpsr.width);
583    tc->pcState(pc);
584}
585
586void
587ArmFault::invoke64(ThreadContext *tc, const StaticInstPtr &inst)
588{
589    // Determine actual misc. register indices for ELR_ELx and SPSR_ELx
590    MiscRegIndex elr_idx, spsr_idx;
591    switch (toEL) {
592      case EL1:
593        elr_idx = MISCREG_ELR_EL1;
594        spsr_idx = MISCREG_SPSR_EL1;
595        break;
596      case EL2:
597        assert(ArmSystem::haveVirtualization(tc));
598        elr_idx = MISCREG_ELR_EL2;
599        spsr_idx = MISCREG_SPSR_EL2;
600        break;
601      case EL3:
602        assert(ArmSystem::haveSecurity(tc));
603        elr_idx = MISCREG_ELR_EL3;
604        spsr_idx = MISCREG_SPSR_EL3;
605        break;
606      default:
607        panic("Invalid target exception level");
608        break;
609    }
610
611    // Save process state into SPSR_ELx
612    CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
613    CPSR spsr = cpsr;
614    spsr.nz = tc->readCCReg(CCREG_NZ);
615    spsr.c = tc->readCCReg(CCREG_C);
616    spsr.v = tc->readCCReg(CCREG_V);
617    if (from64) {
618        // Force some bitfields to 0
619        spsr.q = 0;
620        spsr.it1 = 0;
621        spsr.j = 0;
622        spsr.res0_23_22 = 0;
623        spsr.ge = 0;
624        spsr.it2 = 0;
625        spsr.t = 0;
626    } else {
627        spsr.ge = tc->readCCReg(CCREG_GE);
628        ITSTATE it = tc->pcState().itstate();
629        spsr.it2 = it.top6;
630        spsr.it1 = it.bottom2;
631        // Force some bitfields to 0
632        spsr.res0_23_22 = 0;
633        spsr.ss = 0;
634    }
635    tc->setMiscReg(spsr_idx, spsr);
636
637    // Save preferred return address into ELR_ELx
638    Addr curr_pc = tc->pcState().pc();
639    Addr ret_addr = curr_pc;
640    if (from64)
641        ret_addr += armPcElrOffset();
642    else
643        ret_addr += spsr.t ? thumbPcElrOffset() : armPcElrOffset();
644    tc->setMiscReg(elr_idx, ret_addr);
645
646    // Update process state
647    OperatingMode64 mode = 0;
648    mode.spX = 1;
649    mode.el = toEL;
650    mode.width = 0;
651    cpsr.mode = mode;
652    cpsr.daif = 0xf;
653    cpsr.il = 0;
654    cpsr.ss = 0;
655    tc->setMiscReg(MISCREG_CPSR, cpsr);
656
657    // Set PC to start of exception handler
658    Addr new_pc = purifyTaggedAddr(getVector64(tc), tc, toEL);
659    DPRINTF(Faults, "Invoking Fault (AArch64 target EL):%s cpsr:%#x PC:%#x "
660            "elr:%#x newVec: %#x\n", name(), cpsr, curr_pc, ret_addr, new_pc);
661    PCState pc(new_pc);
662    pc.aarch64(!cpsr.width);
663    pc.nextAArch64(!cpsr.width);
664    tc->pcState(pc);
665
666    // If we have a valid instruction then use it to annotate this fault with
667    // extra information. This is used to generate the correct fault syndrome
668    // information
669    if (inst)
670        reinterpret_cast<ArmStaticInst *>(inst.get())->annotateFault(this);
671    // Save exception syndrome
672    if ((nextMode() != MODE_IRQ) && (nextMode() != MODE_FIQ))
673        setSyndrome(tc, getSyndromeReg64());
674}
675
676void
677Reset::invoke(ThreadContext *tc, const StaticInstPtr &inst)
678{
679    if (FullSystem) {
680        tc->getCpuPtr()->clearInterrupts(tc->threadId());
681        tc->clearArchRegs();
682    }
683    if (!ArmSystem::highestELIs64(tc)) {
684        ArmFault::invoke(tc, inst);
685        tc->setMiscReg(MISCREG_VMPIDR,
686                       getMPIDR(dynamic_cast<ArmSystem*>(tc->getSystemPtr()), tc));
687
688        // Unless we have SMC code to get us there, boot in HYP!
689        if (ArmSystem::haveVirtualization(tc) &&
690            !ArmSystem::haveSecurity(tc)) {
691            CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
692            cpsr.mode = MODE_HYP;
693            tc->setMiscReg(MISCREG_CPSR, cpsr);
694        }
695    } else {
696        // Advance the PC to the IMPLEMENTATION DEFINED reset value
697        PCState pc = ArmSystem::resetAddr64(tc);
698        pc.aarch64(true);
699        pc.nextAArch64(true);
700        tc->pcState(pc);
701    }
702}
703
704void
705UndefinedInstruction::invoke(ThreadContext *tc, const StaticInstPtr &inst)
706{
707    if (FullSystem) {
708        ArmFault::invoke(tc, inst);
709        return;
710    }
711
712    // If the mnemonic isn't defined this has to be an unknown instruction.
713    assert(unknown || mnemonic != NULL);
714    if (disabled) {
715        panic("Attempted to execute disabled instruction "
716                "'%s' (inst 0x%08x)", mnemonic, machInst);
717    } else if (unknown) {
718        panic("Attempted to execute unknown instruction (inst 0x%08x)",
719              machInst);
720    } else {
721        panic("Attempted to execute unimplemented instruction "
722                "'%s' (inst 0x%08x)", mnemonic, machInst);
723    }
724}
725
726bool
727UndefinedInstruction::routeToHyp(ThreadContext *tc) const
728{
729    bool toHyp;
730
731    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
732    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
733    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
734
735    // if in Hyp mode then stay in Hyp mode
736    toHyp  = scr.ns && (cpsr.mode == MODE_HYP);
737    // if HCR.TGE is set to 1, take to Hyp mode through Hyp Trap vector
738    toHyp |= !inSecureState(scr, cpsr) && hcr.tge && (cpsr.mode == MODE_USER);
739    return toHyp;
740}
741
742uint32_t
743UndefinedInstruction::iss() const
744{
745    if (overrideEc == EC_INVALID)
746        return issRaw;
747
748    uint32_t new_iss = 0;
749    uint32_t op0, op1, op2, CRn, CRm, Rt, dir;
750
751    dir = bits(machInst, 21, 21);
752    op0 = bits(machInst, 20, 19);
753    op1 = bits(machInst, 18, 16);
754    CRn = bits(machInst, 15, 12);
755    CRm = bits(machInst, 11, 8);
756    op2 = bits(machInst, 7, 5);
757    Rt = bits(machInst, 4, 0);
758
759    new_iss = op0 << 20 | op2 << 17 | op1 << 14 | CRn << 10 |
760            Rt << 5 | CRm << 1 | dir;
761
762    return new_iss;
763}
764
765void
766SupervisorCall::invoke(ThreadContext *tc, const StaticInstPtr &inst)
767{
768    if (FullSystem) {
769        ArmFault::invoke(tc, inst);
770        return;
771    }
772
773    // As of now, there isn't a 32 bit thumb version of this instruction.
774    assert(!machInst.bigThumb);
775    uint32_t callNum;
776    CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
777    OperatingMode mode = (OperatingMode)(uint8_t)cpsr.mode;
778    if (opModeIs64(mode))
779        callNum = tc->readIntReg(INTREG_X8);
780    else
781        callNum = tc->readIntReg(INTREG_R7);
782    Fault fault;
783    tc->syscall(callNum, &fault);
784
785    // Advance the PC since that won't happen automatically.
786    PCState pc = tc->pcState();
787    assert(inst);
788    inst->advancePC(pc);
789    tc->pcState(pc);
790}
791
792bool
793SupervisorCall::routeToHyp(ThreadContext *tc) const
794{
795    bool toHyp;
796
797    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
798    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
799    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
800
801    // if in Hyp mode then stay in Hyp mode
802    toHyp  = scr.ns && (cpsr.mode == MODE_HYP);
803    // if HCR.TGE is set to 1, take to Hyp mode through Hyp Trap vector
804    toHyp |= !inSecureState(scr, cpsr) && hcr.tge && (cpsr.mode == MODE_USER);
805    return toHyp;
806}
807
808ExceptionClass
809SupervisorCall::ec(ThreadContext *tc) const
810{
811    return (overrideEc != EC_INVALID) ? overrideEc :
812        (from64 ? EC_SVC_64 : vals.ec);
813}
814
815uint32_t
816SupervisorCall::iss() const
817{
818    // Even if we have a 24 bit imm from an arm32 instruction then we only use
819    // the bottom 16 bits for the ISS value (it doesn't hurt for AArch64 SVC).
820    return issRaw & 0xFFFF;
821}
822
823uint32_t
824SecureMonitorCall::iss() const
825{
826    if (from64)
827        return bits(machInst, 20, 5);
828    return 0;
829}
830
831ExceptionClass
832UndefinedInstruction::ec(ThreadContext *tc) const
833{
834    return (overrideEc != EC_INVALID) ? overrideEc : vals.ec;
835}
836
837
838HypervisorCall::HypervisorCall(ExtMachInst _machInst, uint32_t _imm) :
839        ArmFaultVals<HypervisorCall>(_machInst, _imm)
840{}
841
842ExceptionClass
843HypervisorCall::ec(ThreadContext *tc) const
844{
845    return from64 ? EC_HVC_64 : vals.ec;
846}
847
848ExceptionClass
849HypervisorTrap::ec(ThreadContext *tc) const
850{
851    return (overrideEc != EC_INVALID) ? overrideEc : vals.ec;
852}
853
854template<class T>
855FaultOffset
856ArmFaultVals<T>::offset(ThreadContext *tc)
857{
858    bool isHypTrap = false;
859
860    // Normally we just use the exception vector from the table at the top if
861    // this file, however if this exception has caused a transition to hype
862    // mode, and its an exception type that would only do this if it has been
863    // trapped then we use the hyp trap vector instead of the normal vector
864    if (vals.hypTrappable) {
865        CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
866        if (cpsr.mode == MODE_HYP) {
867            CPSR spsr = tc->readMiscReg(MISCREG_SPSR_HYP);
868            isHypTrap = spsr.mode != MODE_HYP;
869        }
870    }
871    return isHypTrap ? 0x14 : vals.offset;
872}
873
874// void
875// SupervisorCall::setSyndrome64(ThreadContext *tc, MiscRegIndex esr_idx)
876// {
877//     ESR esr = 0;
878//     esr.ec = machInst.aarch64 ? SvcAArch64 : SvcAArch32;
879//     esr.il = !machInst.thumb;
880//     if (machInst.aarch64)
881//         esr.imm16 = bits(machInst.instBits, 20, 5);
882//     else if (machInst.thumb)
883//         esr.imm16 = bits(machInst.instBits, 7, 0);
884//     else
885//         esr.imm16 = bits(machInst.instBits, 15, 0);
886//     tc->setMiscReg(esr_idx, esr);
887// }
888
889void
890SecureMonitorCall::invoke(ThreadContext *tc, const StaticInstPtr &inst)
891{
892    if (FullSystem) {
893        ArmFault::invoke(tc, inst);
894        return;
895    }
896}
897
898ExceptionClass
899SecureMonitorCall::ec(ThreadContext *tc) const
900{
901    return (from64 ? EC_SMC_64 : vals.ec);
902}
903
904ExceptionClass
905SupervisorTrap::ec(ThreadContext *tc) const
906{
907    return (overrideEc != EC_INVALID) ? overrideEc : vals.ec;
908}
909
910ExceptionClass
911SecureMonitorTrap::ec(ThreadContext *tc) const
912{
913    return (overrideEc != EC_INVALID) ? overrideEc :
914        (from64 ? EC_SMC_64 : vals.ec);
915}
916
917template<class T>
918void
919AbortFault<T>::invoke(ThreadContext *tc, const StaticInstPtr &inst)
920{
921    if (tranMethod == ArmFault::UnknownTran) {
922        tranMethod = longDescFormatInUse(tc) ? ArmFault::LpaeTran
923                                             : ArmFault::VmsaTran;
924
925        if ((tranMethod == ArmFault::VmsaTran) && this->routeToMonitor(tc)) {
926            // See ARM ARM B3-1416
927            bool override_LPAE = false;
928            TTBCR ttbcr_s = tc->readMiscReg(MISCREG_TTBCR_S);
929            TTBCR M5_VAR_USED ttbcr_ns = tc->readMiscReg(MISCREG_TTBCR_NS);
930            if (ttbcr_s.eae) {
931                override_LPAE = true;
932            } else {
933                // Unimplemented code option, not seen in testing.  May need
934                // extension according to the manual exceprt above.
935                DPRINTF(Faults, "Warning: Incomplete translation method "
936                        "override detected.\n");
937            }
938            if (override_LPAE)
939                tranMethod = ArmFault::LpaeTran;
940        }
941    }
942
943    if (source == ArmFault::AsynchronousExternalAbort) {
944        tc->getCpuPtr()->clearInterrupt(tc->threadId(), INT_ABT, 0);
945    }
946    // Get effective fault source encoding
947    CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
948    FSR  fsr  = getFsr(tc);
949
950    // source must be determined BEFORE invoking generic routines which will
951    // try to set hsr etc. and are based upon source!
952    ArmFaultVals<T>::invoke(tc, inst);
953
954    if (!this->to64) {  // AArch32
955        if (cpsr.mode == MODE_HYP) {
956            tc->setMiscReg(T::HFarIndex, faultAddr);
957        } else if (stage2) {
958            tc->setMiscReg(MISCREG_HPFAR, (faultAddr >> 8) & ~0xf);
959            tc->setMiscReg(T::HFarIndex,  OVAddr);
960        } else {
961            tc->setMiscReg(T::FsrIndex, fsr);
962            tc->setMiscReg(T::FarIndex, faultAddr);
963        }
964        DPRINTF(Faults, "Abort Fault source=%#x fsr=%#x faultAddr=%#x "\
965                "tranMethod=%#x\n", source, fsr, faultAddr, tranMethod);
966    } else {  // AArch64
967        // Set the FAR register.  Nothing else to do if we are in AArch64 state
968        // because the syndrome register has already been set inside invoke64()
969        if (stage2) {
970            // stage 2 fault, set HPFAR_EL2 to the faulting IPA
971            // and FAR_EL2 to the Original VA
972            tc->setMiscReg(AbortFault<T>::getFaultAddrReg64(), OVAddr);
973            tc->setMiscReg(MISCREG_HPFAR_EL2, bits(faultAddr, 47, 12) << 4);
974
975            DPRINTF(Faults, "Abort Fault (Stage 2) VA: 0x%x IPA: 0x%x\n",
976                    OVAddr, faultAddr);
977        } else {
978            tc->setMiscReg(AbortFault<T>::getFaultAddrReg64(), faultAddr);
979        }
980    }
981}
982
983template<class T>
984FSR
985AbortFault<T>::getFsr(ThreadContext *tc)
986{
987    FSR fsr = 0;
988
989    if (((CPSR) tc->readMiscRegNoEffect(MISCREG_CPSR)).width) {
990        // AArch32
991        assert(tranMethod != ArmFault::UnknownTran);
992        if (tranMethod == ArmFault::LpaeTran) {
993            srcEncoded = ArmFault::longDescFaultSources[source];
994            fsr.status = srcEncoded;
995            fsr.lpae   = 1;
996        } else {
997            srcEncoded = ArmFault::shortDescFaultSources[source];
998            fsr.fsLow  = bits(srcEncoded, 3, 0);
999            fsr.fsHigh = bits(srcEncoded, 4);
1000            fsr.domain = static_cast<uint8_t>(domain);
1001        }
1002        fsr.wnr = (write ? 1 : 0);
1003        fsr.ext = 0;
1004    } else {
1005        // AArch64
1006        srcEncoded = ArmFault::aarch64FaultSources[source];
1007    }
1008    if (srcEncoded == ArmFault::FaultSourceInvalid) {
1009        panic("Invalid fault source\n");
1010    }
1011    return fsr;
1012}
1013
1014template<class T>
1015bool
1016AbortFault<T>::abortDisable(ThreadContext *tc)
1017{
1018    if (ArmSystem::haveSecurity(tc)) {
1019        SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1020        return (!scr.ns || scr.aw);
1021    }
1022    return true;
1023}
1024
1025template<class T>
1026void
1027AbortFault<T>::annotate(ArmFault::AnnotationIDs id, uint64_t val)
1028{
1029    switch (id)
1030    {
1031      case ArmFault::S1PTW:
1032        s1ptw = val;
1033        break;
1034      case ArmFault::OVA:
1035        OVAddr = val;
1036        break;
1037
1038      // Just ignore unknown ID's
1039      default:
1040        break;
1041    }
1042}
1043
1044template<class T>
1045uint32_t
1046AbortFault<T>::iss() const
1047{
1048    uint32_t val;
1049
1050    val  = srcEncoded & 0x3F;
1051    val |= write << 6;
1052    val |= s1ptw << 7;
1053    return (val);
1054}
1055
1056template<class T>
1057bool
1058AbortFault<T>::isMMUFault() const
1059{
1060    // NOTE: Not relying on LL information being aligned to lowest bits here
1061    return
1062         (source == ArmFault::AlignmentFault)     ||
1063        ((source >= ArmFault::TranslationLL) &&
1064         (source <  ArmFault::TranslationLL + 4)) ||
1065        ((source >= ArmFault::AccessFlagLL) &&
1066         (source <  ArmFault::AccessFlagLL + 4))  ||
1067        ((source >= ArmFault::DomainLL) &&
1068         (source <  ArmFault::DomainLL + 4))      ||
1069        ((source >= ArmFault::PermissionLL) &&
1070         (source <  ArmFault::PermissionLL + 4));
1071}
1072
1073ExceptionClass
1074PrefetchAbort::ec(ThreadContext *tc) const
1075{
1076    if (to64) {
1077        // AArch64
1078        if (toEL == fromEL)
1079            return EC_PREFETCH_ABORT_CURR_EL;
1080        else
1081            return EC_PREFETCH_ABORT_LOWER_EL;
1082    } else {
1083        // AArch32
1084        // Abort faults have different EC codes depending on whether
1085        // the fault originated within HYP mode, or not. So override
1086        // the method and add the extra adjustment of the EC value.
1087
1088        ExceptionClass ec = ArmFaultVals<PrefetchAbort>::vals.ec;
1089
1090        CPSR spsr = tc->readMiscReg(MISCREG_SPSR_HYP);
1091        if (spsr.mode == MODE_HYP) {
1092            ec = ((ExceptionClass) (((uint32_t) ec) + 1));
1093        }
1094        return ec;
1095    }
1096}
1097
1098bool
1099PrefetchAbort::routeToMonitor(ThreadContext *tc) const
1100{
1101    SCR scr = 0;
1102    if (from64)
1103        scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1104    else
1105        scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1106
1107    return scr.ea && !isMMUFault();
1108}
1109
1110bool
1111PrefetchAbort::routeToHyp(ThreadContext *tc) const
1112{
1113    bool toHyp;
1114
1115    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
1116    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
1117    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
1118    HDCR hdcr = tc->readMiscRegNoEffect(MISCREG_HDCR);
1119
1120    // if in Hyp mode then stay in Hyp mode
1121    toHyp  = scr.ns && (cpsr.mode == MODE_HYP);
1122    // otherwise, check whether to take to Hyp mode through Hyp Trap vector
1123    toHyp |= (stage2 ||
1124                ( (source ==               DebugEvent) && hdcr.tde && (cpsr.mode !=  MODE_HYP)) ||
1125                ( (source == SynchronousExternalAbort) && hcr.tge  && (cpsr.mode == MODE_USER))
1126             ) && !inSecureState(tc);
1127    return toHyp;
1128}
1129
1130ExceptionClass
1131DataAbort::ec(ThreadContext *tc) const
1132{
1133    if (to64) {
1134        // AArch64
1135        if (source == ArmFault::AsynchronousExternalAbort) {
1136            panic("Asynchronous External Abort should be handled with "
1137                    "SystemErrors (SErrors)!");
1138        }
1139        if (toEL == fromEL)
1140            return EC_DATA_ABORT_CURR_EL;
1141        else
1142            return EC_DATA_ABORT_LOWER_EL;
1143    } else {
1144        // AArch32
1145        // Abort faults have different EC codes depending on whether
1146        // the fault originated within HYP mode, or not. So override
1147        // the method and add the extra adjustment of the EC value.
1148
1149        ExceptionClass ec = ArmFaultVals<DataAbort>::vals.ec;
1150
1151        CPSR spsr = tc->readMiscReg(MISCREG_SPSR_HYP);
1152        if (spsr.mode == MODE_HYP) {
1153            ec = ((ExceptionClass) (((uint32_t) ec) + 1));
1154        }
1155        return ec;
1156    }
1157}
1158
1159bool
1160DataAbort::routeToMonitor(ThreadContext *tc) const
1161{
1162    SCR scr = 0;
1163    if (from64)
1164        scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1165    else
1166        scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1167
1168    return scr.ea && !isMMUFault();
1169}
1170
1171bool
1172DataAbort::routeToHyp(ThreadContext *tc) const
1173{
1174    bool toHyp;
1175
1176    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
1177    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
1178    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
1179    HDCR hdcr = tc->readMiscRegNoEffect(MISCREG_HDCR);
1180
1181    // if in Hyp mode then stay in Hyp mode
1182    toHyp  = scr.ns && (cpsr.mode == MODE_HYP);
1183    // otherwise, check whether to take to Hyp mode through Hyp Trap vector
1184    toHyp |= (stage2 ||
1185                ( (cpsr.mode != MODE_HYP) && ( ((source == AsynchronousExternalAbort) && hcr.amo) ||
1186                                               ((source == DebugEvent) && hdcr.tde) )
1187                ) ||
1188                ( (cpsr.mode == MODE_USER) && hcr.tge &&
1189                  ((source == AlignmentFault)            ||
1190                   (source == SynchronousExternalAbort))
1191                )
1192             ) && !inSecureState(tc);
1193    return toHyp;
1194}
1195
1196uint32_t
1197DataAbort::iss() const
1198{
1199    uint32_t val;
1200
1201    // Add on the data abort specific fields to the generic abort ISS value
1202    val  = AbortFault<DataAbort>::iss();
1203    // ISS is valid if not caused by a stage 1 page table walk, and when taken
1204    // to AArch64 only when directed to EL2
1205    if (!s1ptw && (!to64 || toEL == EL2)) {
1206        val |= isv << 24;
1207        if (isv) {
1208            val |= sas << 22;
1209            val |= sse << 21;
1210            val |= srt << 16;
1211            // AArch64 only. These assignments are safe on AArch32 as well
1212            // because these vars are initialized to false
1213            val |= sf << 15;
1214            val |= ar << 14;
1215        }
1216    }
1217    return (val);
1218}
1219
1220void
1221DataAbort::annotate(AnnotationIDs id, uint64_t val)
1222{
1223    AbortFault<DataAbort>::annotate(id, val);
1224    switch (id)
1225    {
1226      case SAS:
1227        isv = true;
1228        sas = val;
1229        break;
1230      case SSE:
1231        isv = true;
1232        sse = val;
1233        break;
1234      case SRT:
1235        isv = true;
1236        srt = val;
1237        break;
1238      case SF:
1239        isv = true;
1240        sf  = val;
1241        break;
1242      case AR:
1243        isv = true;
1244        ar  = val;
1245        break;
1246      // Just ignore unknown ID's
1247      default:
1248        break;
1249    }
1250}
1251
1252void
1253VirtualDataAbort::invoke(ThreadContext *tc, const StaticInstPtr &inst)
1254{
1255    AbortFault<VirtualDataAbort>::invoke(tc, inst);
1256    HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR);
1257    hcr.va = 0;
1258    tc->setMiscRegNoEffect(MISCREG_HCR, hcr);
1259}
1260
1261bool
1262Interrupt::routeToMonitor(ThreadContext *tc) const
1263{
1264    assert(ArmSystem::haveSecurity(tc));
1265    SCR scr = 0;
1266    if (from64)
1267        scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1268    else
1269        scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1270    return scr.irq;
1271}
1272
1273bool
1274Interrupt::routeToHyp(ThreadContext *tc) const
1275{
1276    bool toHyp;
1277
1278    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
1279    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
1280    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
1281    // Determine whether IRQs are routed to Hyp mode.
1282    toHyp = (!scr.irq && hcr.imo && !inSecureState(tc)) ||
1283            (cpsr.mode == MODE_HYP);
1284    return toHyp;
1285}
1286
1287bool
1288Interrupt::abortDisable(ThreadContext *tc)
1289{
1290    if (ArmSystem::haveSecurity(tc)) {
1291        SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1292        return (!scr.ns || scr.aw);
1293    }
1294    return true;
1295}
1296
1297VirtualInterrupt::VirtualInterrupt()
1298{}
1299
1300bool
1301FastInterrupt::routeToMonitor(ThreadContext *tc) const
1302{
1303    assert(ArmSystem::haveSecurity(tc));
1304    SCR scr = 0;
1305    if (from64)
1306        scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1307    else
1308        scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1309    return scr.fiq;
1310}
1311
1312bool
1313FastInterrupt::routeToHyp(ThreadContext *tc) const
1314{
1315    bool toHyp;
1316
1317    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
1318    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
1319    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
1320    // Determine whether IRQs are routed to Hyp mode.
1321    toHyp = (!scr.fiq && hcr.fmo && !inSecureState(tc)) ||
1322            (cpsr.mode == MODE_HYP);
1323    return toHyp;
1324}
1325
1326bool
1327FastInterrupt::abortDisable(ThreadContext *tc)
1328{
1329    if (ArmSystem::haveSecurity(tc)) {
1330        SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1331        return (!scr.ns || scr.aw);
1332    }
1333    return true;
1334}
1335
1336bool
1337FastInterrupt::fiqDisable(ThreadContext *tc)
1338{
1339    if (ArmSystem::haveVirtualization(tc)) {
1340        return true;
1341    } else if (ArmSystem::haveSecurity(tc)) {
1342        SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1343        return (!scr.ns || scr.fw);
1344    }
1345    return true;
1346}
1347
1348VirtualFastInterrupt::VirtualFastInterrupt()
1349{}
1350
1351void
1352PCAlignmentFault::invoke(ThreadContext *tc, const StaticInstPtr &inst)
1353{
1354    ArmFaultVals<PCAlignmentFault>::invoke(tc, inst);
1355    assert(from64);
1356    // Set the FAR
1357    tc->setMiscReg(getFaultAddrReg64(), faultPC);
1358}
1359
1360SPAlignmentFault::SPAlignmentFault()
1361{}
1362
1363SystemError::SystemError()
1364{}
1365
1366void
1367SystemError::invoke(ThreadContext *tc, const StaticInstPtr &inst)
1368{
1369    tc->getCpuPtr()->clearInterrupt(tc->threadId(), INT_ABT, 0);
1370    ArmFault::invoke(tc, inst);
1371}
1372
1373bool
1374SystemError::routeToMonitor(ThreadContext *tc) const
1375{
1376    assert(ArmSystem::haveSecurity(tc));
1377    assert(from64);
1378    SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1379    return scr.ea;
1380}
1381
1382bool
1383SystemError::routeToHyp(ThreadContext *tc) const
1384{
1385    bool toHyp;
1386    assert(from64);
1387
1388    SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1389    HCR hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
1390
1391    toHyp = (!scr.ea && hcr.amo && !inSecureState(tc)) ||
1392            (!scr.ea && !scr.rw && !hcr.amo && !inSecureState(tc));
1393    return toHyp;
1394}
1395
1396void
1397ArmSev::invoke(ThreadContext *tc, const StaticInstPtr &inst) {
1398    DPRINTF(Faults, "Invoking ArmSev Fault\n");
1399    if (!FullSystem)
1400        return;
1401
1402    // Set sev_mailbox to 1, clear the pending interrupt from remote
1403    // SEV execution and let pipeline continue as pcState is still
1404    // valid.
1405    tc->setMiscReg(MISCREG_SEV_MAILBOX, 1);
1406    tc->getCpuPtr()->clearInterrupt(tc->threadId(), INT_SEV, 0);
1407}
1408
1409// Instantiate all the templates to make the linker happy
1410template class ArmFaultVals<Reset>;
1411template class ArmFaultVals<UndefinedInstruction>;
1412template class ArmFaultVals<SupervisorCall>;
1413template class ArmFaultVals<SecureMonitorCall>;
1414template class ArmFaultVals<HypervisorCall>;
1415template class ArmFaultVals<PrefetchAbort>;
1416template class ArmFaultVals<DataAbort>;
1417template class ArmFaultVals<VirtualDataAbort>;
1418template class ArmFaultVals<HypervisorTrap>;
1419template class ArmFaultVals<Interrupt>;
1420template class ArmFaultVals<VirtualInterrupt>;
1421template class ArmFaultVals<FastInterrupt>;
1422template class ArmFaultVals<VirtualFastInterrupt>;
1423template class ArmFaultVals<SupervisorTrap>;
1424template class ArmFaultVals<SecureMonitorTrap>;
1425template class ArmFaultVals<PCAlignmentFault>;
1426template class ArmFaultVals<SPAlignmentFault>;
1427template class ArmFaultVals<SystemError>;
1428template class ArmFaultVals<ArmSev>;
1429template class AbortFault<PrefetchAbort>;
1430template class AbortFault<DataAbort>;
1431template class AbortFault<VirtualDataAbort>;
1432
1433
1434IllegalInstSetStateFault::IllegalInstSetStateFault()
1435{}
1436
1437
1438} // namespace ArmISA
1439