faults.cc revision 12512:cb5ebe30bb9e
1/*
2 * Copyright (c) 2010, 2012-2014, 2016-2018 ARM Limited
3 * All rights reserved
4 *
5 * The license below extends only to copyright in the software and shall
6 * not be construed as granting a license to any other intellectual
7 * property including but not limited to intellectual property relating
8 * to a hardware implementation of the functionality of the software
9 * licensed hereunder.  You may use the software subject to the license
10 * terms below provided that you ensure that this notice is replicated
11 * unmodified and in its entirety in all distributions of the software,
12 * modified or unmodified, in source code or in binary form.
13 *
14 * Copyright (c) 2003-2005 The Regents of The University of Michigan
15 * Copyright (c) 2007-2008 The Florida State University
16 * All rights reserved.
17 *
18 * Redistribution and use in source and binary forms, with or without
19 * modification, are permitted provided that the following conditions are
20 * met: redistributions of source code must retain the above copyright
21 * notice, this list of conditions and the following disclaimer;
22 * redistributions in binary form must reproduce the above copyright
23 * notice, this list of conditions and the following disclaimer in the
24 * documentation and/or other materials provided with the distribution;
25 * neither the name of the copyright holders nor the names of its
26 * contributors may be used to endorse or promote products derived from
27 * this software without specific prior written permission.
28 *
29 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
30 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
31 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
32 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
33 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
34 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
35 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
36 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
37 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
38 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
39 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
40 *
41 * Authors: Ali Saidi
42 *          Gabe Black
43 *          Giacomo Gabrielli
44 *          Thomas Grocutt
45 */
46
47#include "arch/arm/faults.hh"
48
49#include "arch/arm/insts/static_inst.hh"
50#include "arch/arm/system.hh"
51#include "arch/arm/utility.hh"
52#include "base/compiler.hh"
53#include "base/trace.hh"
54#include "cpu/base.hh"
55#include "cpu/thread_context.hh"
56#include "debug/Faults.hh"
57#include "sim/full_system.hh"
58
59namespace ArmISA
60{
61
62uint8_t ArmFault::shortDescFaultSources[] = {
63    0x01,  // AlignmentFault
64    0x04,  // InstructionCacheMaintenance
65    0xff,  // SynchExtAbtOnTranslTableWalkL0 (INVALID)
66    0x0c,  // SynchExtAbtOnTranslTableWalkL1
67    0x0e,  // SynchExtAbtOnTranslTableWalkL2
68    0xff,  // SynchExtAbtOnTranslTableWalkL3 (INVALID)
69    0xff,  // SynchPtyErrOnTranslTableWalkL0 (INVALID)
70    0x1c,  // SynchPtyErrOnTranslTableWalkL1
71    0x1e,  // SynchPtyErrOnTranslTableWalkL2
72    0xff,  // SynchPtyErrOnTranslTableWalkL3 (INVALID)
73    0xff,  // TranslationL0 (INVALID)
74    0x05,  // TranslationL1
75    0x07,  // TranslationL2
76    0xff,  // TranslationL3 (INVALID)
77    0xff,  // AccessFlagL0 (INVALID)
78    0x03,  // AccessFlagL1
79    0x06,  // AccessFlagL2
80    0xff,  // AccessFlagL3 (INVALID)
81    0xff,  // DomainL0 (INVALID)
82    0x09,  // DomainL1
83    0x0b,  // DomainL2
84    0xff,  // DomainL3 (INVALID)
85    0xff,  // PermissionL0 (INVALID)
86    0x0d,  // PermissionL1
87    0x0f,  // PermissionL2
88    0xff,  // PermissionL3 (INVALID)
89    0x02,  // DebugEvent
90    0x08,  // SynchronousExternalAbort
91    0x10,  // TLBConflictAbort
92    0x19,  // SynchPtyErrOnMemoryAccess
93    0x16,  // AsynchronousExternalAbort
94    0x18,  // AsynchPtyErrOnMemoryAccess
95    0xff,  // AddressSizeL0 (INVALID)
96    0xff,  // AddressSizeL1 (INVALID)
97    0xff,  // AddressSizeL2 (INVALID)
98    0xff,  // AddressSizeL3 (INVALID)
99    0x40,  // PrefetchTLBMiss
100    0x80   // PrefetchUncacheable
101};
102
103static_assert(sizeof(ArmFault::shortDescFaultSources) ==
104              ArmFault::NumFaultSources,
105              "Invalid size of ArmFault::shortDescFaultSources[]");
106
107uint8_t ArmFault::longDescFaultSources[] = {
108    0x21,  // AlignmentFault
109    0xff,  // InstructionCacheMaintenance (INVALID)
110    0xff,  // SynchExtAbtOnTranslTableWalkL0 (INVALID)
111    0x15,  // SynchExtAbtOnTranslTableWalkL1
112    0x16,  // SynchExtAbtOnTranslTableWalkL2
113    0x17,  // SynchExtAbtOnTranslTableWalkL3
114    0xff,  // SynchPtyErrOnTranslTableWalkL0 (INVALID)
115    0x1d,  // SynchPtyErrOnTranslTableWalkL1
116    0x1e,  // SynchPtyErrOnTranslTableWalkL2
117    0x1f,  // SynchPtyErrOnTranslTableWalkL3
118    0xff,  // TranslationL0 (INVALID)
119    0x05,  // TranslationL1
120    0x06,  // TranslationL2
121    0x07,  // TranslationL3
122    0xff,  // AccessFlagL0 (INVALID)
123    0x09,  // AccessFlagL1
124    0x0a,  // AccessFlagL2
125    0x0b,  // AccessFlagL3
126    0xff,  // DomainL0 (INVALID)
127    0x3d,  // DomainL1
128    0x3e,  // DomainL2
129    0xff,  // DomainL3 (RESERVED)
130    0xff,  // PermissionL0 (INVALID)
131    0x0d,  // PermissionL1
132    0x0e,  // PermissionL2
133    0x0f,  // PermissionL3
134    0x22,  // DebugEvent
135    0x10,  // SynchronousExternalAbort
136    0x30,  // TLBConflictAbort
137    0x18,  // SynchPtyErrOnMemoryAccess
138    0x11,  // AsynchronousExternalAbort
139    0x19,  // AsynchPtyErrOnMemoryAccess
140    0xff,  // AddressSizeL0 (INVALID)
141    0xff,  // AddressSizeL1 (INVALID)
142    0xff,  // AddressSizeL2 (INVALID)
143    0xff,  // AddressSizeL3 (INVALID)
144    0x40,  // PrefetchTLBMiss
145    0x80   // PrefetchUncacheable
146};
147
148static_assert(sizeof(ArmFault::longDescFaultSources) ==
149              ArmFault::NumFaultSources,
150              "Invalid size of ArmFault::longDescFaultSources[]");
151
152uint8_t ArmFault::aarch64FaultSources[] = {
153    0x21,  // AlignmentFault
154    0xff,  // InstructionCacheMaintenance (INVALID)
155    0x14,  // SynchExtAbtOnTranslTableWalkL0
156    0x15,  // SynchExtAbtOnTranslTableWalkL1
157    0x16,  // SynchExtAbtOnTranslTableWalkL2
158    0x17,  // SynchExtAbtOnTranslTableWalkL3
159    0x1c,  // SynchPtyErrOnTranslTableWalkL0
160    0x1d,  // SynchPtyErrOnTranslTableWalkL1
161    0x1e,  // SynchPtyErrOnTranslTableWalkL2
162    0x1f,  // SynchPtyErrOnTranslTableWalkL3
163    0x04,  // TranslationL0
164    0x05,  // TranslationL1
165    0x06,  // TranslationL2
166    0x07,  // TranslationL3
167    0x08,  // AccessFlagL0
168    0x09,  // AccessFlagL1
169    0x0a,  // AccessFlagL2
170    0x0b,  // AccessFlagL3
171    // @todo: Section & Page Domain Fault in AArch64?
172    0xff,  // DomainL0 (INVALID)
173    0xff,  // DomainL1 (INVALID)
174    0xff,  // DomainL2 (INVALID)
175    0xff,  // DomainL3 (INVALID)
176    0x0c,  // PermissionL0
177    0x0d,  // PermissionL1
178    0x0e,  // PermissionL2
179    0x0f,  // PermissionL3
180    0xff,  // DebugEvent (INVALID)
181    0x10,  // SynchronousExternalAbort
182    0x30,  // TLBConflictAbort
183    0x18,  // SynchPtyErrOnMemoryAccess
184    0xff,  // AsynchronousExternalAbort (INVALID)
185    0xff,  // AsynchPtyErrOnMemoryAccess (INVALID)
186    0x00,  // AddressSizeL0
187    0x01,  // AddressSizeL1
188    0x02,  // AddressSizeL2
189    0x03,  // AddressSizeL3
190    0x40,  // PrefetchTLBMiss
191    0x80   // PrefetchUncacheable
192};
193
194static_assert(sizeof(ArmFault::aarch64FaultSources) ==
195              ArmFault::NumFaultSources,
196              "Invalid size of ArmFault::aarch64FaultSources[]");
197
198// Fields: name, offset, cur{ELT,ELH}Offset, lowerEL{64,32}Offset, next mode,
199//         {ARM, Thumb, ARM_ELR, Thumb_ELR} PC offset, hyp trap,
200//         {A, F} disable, class, stat
201template<> ArmFault::FaultVals ArmFaultVals<Reset>::vals = {
202    // Some dummy values (the reset vector has an IMPLEMENTATION DEFINED
203    // location in AArch64)
204    "Reset",                 0x000, 0x000, 0x000, 0x000, 0x000, MODE_SVC,
205    0, 0, 0, 0, false, true,  true,  EC_UNKNOWN, FaultStat()
206};
207template<> ArmFault::FaultVals ArmFaultVals<UndefinedInstruction>::vals = {
208    "Undefined Instruction", 0x004, 0x000, 0x200, 0x400, 0x600, MODE_UNDEFINED,
209    4, 2, 0, 0, true,  false, false, EC_UNKNOWN, FaultStat()
210};
211template<> ArmFault::FaultVals ArmFaultVals<SupervisorCall>::vals = {
212    "Supervisor Call",       0x008, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
213    4, 2, 4, 2, true,  false, false, EC_SVC_TO_HYP, FaultStat()
214};
215template<> ArmFault::FaultVals ArmFaultVals<SecureMonitorCall>::vals = {
216    "Secure Monitor Call",   0x008, 0x000, 0x200, 0x400, 0x600, MODE_MON,
217    4, 4, 4, 4, false, true,  true,  EC_SMC_TO_HYP, FaultStat()
218};
219template<> ArmFault::FaultVals ArmFaultVals<HypervisorCall>::vals = {
220    "Hypervisor Call",       0x008, 0x000, 0x200, 0x400, 0x600, MODE_HYP,
221    4, 4, 4, 4, true,  false, false, EC_HVC, FaultStat()
222};
223template<> ArmFault::FaultVals ArmFaultVals<PrefetchAbort>::vals = {
224    "Prefetch Abort",        0x00C, 0x000, 0x200, 0x400, 0x600, MODE_ABORT,
225    4, 4, 0, 0, true,  true,  false, EC_PREFETCH_ABORT_TO_HYP, FaultStat()
226};
227template<> ArmFault::FaultVals ArmFaultVals<DataAbort>::vals = {
228    "Data Abort",            0x010, 0x000, 0x200, 0x400, 0x600, MODE_ABORT,
229    8, 8, 0, 0, true,  true,  false, EC_DATA_ABORT_TO_HYP, FaultStat()
230};
231template<> ArmFault::FaultVals ArmFaultVals<VirtualDataAbort>::vals = {
232    "Virtual Data Abort",    0x010, 0x000, 0x200, 0x400, 0x600, MODE_ABORT,
233    8, 8, 0, 0, true,  true,  false, EC_INVALID, FaultStat()
234};
235template<> ArmFault::FaultVals ArmFaultVals<HypervisorTrap>::vals = {
236    // @todo: double check these values
237    "Hypervisor Trap",       0x014, 0x000, 0x200, 0x400, 0x600, MODE_HYP,
238    0, 0, 0, 0, false, false, false, EC_UNKNOWN, FaultStat()
239};
240template<> ArmFault::FaultVals ArmFaultVals<SecureMonitorTrap>::vals = {
241    "Secure Monitor Trap",   0x004, 0x000, 0x200, 0x400, 0x600, MODE_MON,
242    4, 2, 0, 0, false, false, false, EC_UNKNOWN, FaultStat()
243};
244template<> ArmFault::FaultVals ArmFaultVals<Interrupt>::vals = {
245    "IRQ",                   0x018, 0x080, 0x280, 0x480, 0x680, MODE_IRQ,
246    4, 4, 0, 0, false, true,  false, EC_UNKNOWN, FaultStat()
247};
248template<> ArmFault::FaultVals ArmFaultVals<VirtualInterrupt>::vals = {
249    "Virtual IRQ",           0x018, 0x080, 0x280, 0x480, 0x680, MODE_IRQ,
250    4, 4, 0, 0, false, true,  false, EC_INVALID, FaultStat()
251};
252template<> ArmFault::FaultVals ArmFaultVals<FastInterrupt>::vals = {
253    "FIQ",                   0x01C, 0x100, 0x300, 0x500, 0x700, MODE_FIQ,
254    4, 4, 0, 0, false, true,  true,  EC_UNKNOWN, FaultStat()
255};
256template<> ArmFault::FaultVals ArmFaultVals<VirtualFastInterrupt>::vals = {
257    "Virtual FIQ",           0x01C, 0x100, 0x300, 0x500, 0x700, MODE_FIQ,
258    4, 4, 0, 0, false, true,  true,  EC_INVALID, FaultStat()
259};
260template<> ArmFault::FaultVals ArmFaultVals<SupervisorTrap>::vals = {
261    // Some dummy values (SupervisorTrap is AArch64-only)
262    "Supervisor Trap",   0x014, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
263    0, 0, 0, 0, false, false, false, EC_UNKNOWN, FaultStat()
264};
265template<> ArmFault::FaultVals ArmFaultVals<PCAlignmentFault>::vals = {
266    // Some dummy values (PCAlignmentFault is AArch64-only)
267    "PC Alignment Fault",   0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
268    0, 0, 0, 0, true, false, false, EC_PC_ALIGNMENT, FaultStat()
269};
270template<> ArmFault::FaultVals ArmFaultVals<SPAlignmentFault>::vals = {
271    // Some dummy values (SPAlignmentFault is AArch64-only)
272    "SP Alignment Fault",   0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
273    0, 0, 0, 0, true, false, false, EC_STACK_PTR_ALIGNMENT, FaultStat()
274};
275template<> ArmFault::FaultVals ArmFaultVals<SystemError>::vals = {
276    // Some dummy values (SError is AArch64-only)
277    "SError",                0x000, 0x180, 0x380, 0x580, 0x780, MODE_SVC,
278    0, 0, 0, 0, false, true,  true,  EC_SERROR, FaultStat()
279};
280template<> ArmFault::FaultVals ArmFaultVals<SoftwareBreakpoint>::vals = {
281    // Some dummy values (SoftwareBreakpoint is AArch64-only)
282    "Software Breakpoint",   0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
283    0, 0, 0, 0, true, false, false,  EC_SOFTWARE_BREAKPOINT, FaultStat()
284};
285template<> ArmFault::FaultVals ArmFaultVals<ArmSev>::vals = {
286    // Some dummy values
287    "ArmSev Flush",          0x000, 0x000, 0x000, 0x000, 0x000, MODE_SVC,
288    0, 0, 0, 0, false, true,  true,  EC_UNKNOWN, FaultStat()
289};
290template<> ArmFault::FaultVals ArmFaultVals<IllegalInstSetStateFault>::vals = {
291    // Some dummy values (SPAlignmentFault is AArch64-only)
292    "Illegal Inst Set State Fault",   0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
293    0, 0, 0, 0, true, false, false, EC_ILLEGAL_INST, FaultStat()
294};
295
296Addr
297ArmFault::getVector(ThreadContext *tc)
298{
299    Addr base;
300
301    // ARM ARM issue C B1.8.1
302    bool haveSecurity = ArmSystem::haveSecurity(tc);
303
304    // panic if SCTLR.VE because I have no idea what to do with vectored
305    // interrupts
306    SCTLR sctlr = tc->readMiscReg(MISCREG_SCTLR);
307    assert(!sctlr.ve);
308    // Check for invalid modes
309    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
310    assert(haveSecurity                      || cpsr.mode != MODE_MON);
311    assert(ArmSystem::haveVirtualization(tc) || cpsr.mode != MODE_HYP);
312
313    switch (cpsr.mode)
314    {
315      case MODE_MON:
316        base = tc->readMiscReg(MISCREG_MVBAR);
317        break;
318      case MODE_HYP:
319        base = tc->readMiscReg(MISCREG_HVBAR);
320        break;
321      default:
322        if (sctlr.v) {
323            base = HighVecs;
324        } else {
325            base = haveSecurity ? tc->readMiscReg(MISCREG_VBAR) : 0;
326        }
327        break;
328    }
329    return base + offset(tc);
330}
331
332Addr
333ArmFault::getVector64(ThreadContext *tc)
334{
335    Addr vbar;
336    switch (toEL) {
337      case EL3:
338        assert(ArmSystem::haveSecurity(tc));
339        vbar = tc->readMiscReg(MISCREG_VBAR_EL3);
340        break;
341      case EL2:
342        assert(ArmSystem::haveVirtualization(tc));
343        vbar = tc->readMiscReg(MISCREG_VBAR_EL2);
344        break;
345      case EL1:
346        vbar = tc->readMiscReg(MISCREG_VBAR_EL1);
347        break;
348      default:
349        panic("Invalid target exception level");
350        break;
351    }
352    return vbar + offset64(tc);
353}
354
355MiscRegIndex
356ArmFault::getSyndromeReg64() const
357{
358    switch (toEL) {
359      case EL1:
360        return MISCREG_ESR_EL1;
361      case EL2:
362        return MISCREG_ESR_EL2;
363      case EL3:
364        return MISCREG_ESR_EL3;
365      default:
366        panic("Invalid exception level");
367        break;
368    }
369}
370
371MiscRegIndex
372ArmFault::getFaultAddrReg64() const
373{
374    switch (toEL) {
375      case EL1:
376        return MISCREG_FAR_EL1;
377      case EL2:
378        return MISCREG_FAR_EL2;
379      case EL3:
380        return MISCREG_FAR_EL3;
381      default:
382        panic("Invalid exception level");
383        break;
384    }
385}
386
387void
388ArmFault::setSyndrome(ThreadContext *tc, MiscRegIndex syndrome_reg)
389{
390    uint32_t value;
391    uint32_t exc_class = (uint32_t) ec(tc);
392    uint32_t issVal = iss();
393
394    assert(!from64 || ArmSystem::highestELIs64(tc));
395
396    value = exc_class << 26;
397
398    // HSR.IL not valid for Prefetch Aborts (0x20, 0x21) and Data Aborts (0x24,
399    // 0x25) for which the ISS information is not valid (ARMv7).
400    // @todo: ARMv8 revises AArch32 functionality: when HSR.IL is not
401    // valid it is treated as RES1.
402    if (to64) {
403        value |= 1 << 25;
404    } else if ((bits(exc_class, 5, 3) != 4) ||
405               (bits(exc_class, 2) && bits(issVal, 24))) {
406        if (!machInst.thumb || machInst.bigThumb)
407            value |= 1 << 25;
408    }
409    // Condition code valid for EC[5:4] nonzero
410    if (!from64 && ((bits(exc_class, 5, 4) == 0) &&
411                    (bits(exc_class, 3, 0) != 0))) {
412        if (!machInst.thumb) {
413            uint32_t      cond;
414            ConditionCode condCode = (ConditionCode) (uint32_t) machInst.condCode;
415            // If its on unconditional instruction report with a cond code of
416            // 0xE, ie the unconditional code
417            cond  = (condCode == COND_UC) ? COND_AL : condCode;
418            value |= cond << 20;
419            value |= 1    << 24;
420        }
421        value |= bits(issVal, 19, 0);
422    } else {
423        value |= issVal;
424    }
425    tc->setMiscReg(syndrome_reg, value);
426}
427
428void
429ArmFault::invoke(ThreadContext *tc, const StaticInstPtr &inst)
430{
431    CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
432
433    if (ArmSystem::highestELIs64(tc)) {  // ARMv8
434        // Determine source exception level and mode
435        fromMode = (OperatingMode) (uint8_t) cpsr.mode;
436        fromEL = opModeToEL(fromMode);
437        if (opModeIs64(fromMode))
438            from64 = true;
439
440        // Determine target exception level
441        if (ArmSystem::haveSecurity(tc) && routeToMonitor(tc)) {
442            toEL = EL3;
443        } else if (ArmSystem::haveVirtualization(tc) && routeToHyp(tc)) {
444            toEL = EL2;
445            hypRouted = true;
446        } else {
447            toEL = opModeToEL(nextMode());
448        }
449
450        if (fromEL > toEL)
451            toEL = fromEL;
452
453        if (toEL == ArmSystem::highestEL(tc) || ELIs64(tc, toEL)) {
454            // Invoke exception handler in AArch64 state
455            to64 = true;
456            invoke64(tc, inst);
457            return;
458        }
459    }
460
461    // ARMv7 (ARM ARM issue C B1.9)
462
463    bool have_security       = ArmSystem::haveSecurity(tc);
464    bool have_virtualization = ArmSystem::haveVirtualization(tc);
465
466    FaultBase::invoke(tc);
467    if (!FullSystem)
468        return;
469    countStat()++;
470
471    SCTLR sctlr = tc->readMiscReg(MISCREG_SCTLR);
472    SCR scr = tc->readMiscReg(MISCREG_SCR);
473    CPSR saved_cpsr = tc->readMiscReg(MISCREG_CPSR);
474    saved_cpsr.nz = tc->readCCReg(CCREG_NZ);
475    saved_cpsr.c = tc->readCCReg(CCREG_C);
476    saved_cpsr.v = tc->readCCReg(CCREG_V);
477    saved_cpsr.ge = tc->readCCReg(CCREG_GE);
478
479    Addr curPc M5_VAR_USED = tc->pcState().pc();
480    ITSTATE it = tc->pcState().itstate();
481    saved_cpsr.it2 = it.top6;
482    saved_cpsr.it1 = it.bottom2;
483
484    // if we have a valid instruction then use it to annotate this fault with
485    // extra information. This is used to generate the correct fault syndrome
486    // information
487    if (inst) {
488        ArmStaticInst *armInst = static_cast<ArmStaticInst *>(inst.get());
489        armInst->annotateFault(this);
490    }
491
492    if (have_security && routeToMonitor(tc)) {
493        cpsr.mode = MODE_MON;
494    } else if (have_virtualization && routeToHyp(tc)) {
495        cpsr.mode = MODE_HYP;
496        hypRouted = true;
497    } else {
498        cpsr.mode = nextMode();
499    }
500
501    // Ensure Secure state if initially in Monitor mode
502    if (have_security && saved_cpsr.mode == MODE_MON) {
503        SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
504        if (scr.ns) {
505            scr.ns = 0;
506            tc->setMiscRegNoEffect(MISCREG_SCR, scr);
507        }
508    }
509
510    // some bits are set differently if we have been routed to hyp mode
511    if (cpsr.mode == MODE_HYP) {
512        SCTLR hsctlr = tc->readMiscReg(MISCREG_HSCTLR);
513        cpsr.t = hsctlr.te;
514        cpsr.e = hsctlr.ee;
515        if (!scr.ea)  {cpsr.a = 1;}
516        if (!scr.fiq) {cpsr.f = 1;}
517        if (!scr.irq) {cpsr.i = 1;}
518    } else if (cpsr.mode == MODE_MON) {
519        // Special case handling when entering monitor mode
520        cpsr.t = sctlr.te;
521        cpsr.e = sctlr.ee;
522        cpsr.a = 1;
523        cpsr.f = 1;
524        cpsr.i = 1;
525    } else {
526        cpsr.t = sctlr.te;
527        cpsr.e = sctlr.ee;
528
529        // The *Disable functions are virtual and different per fault
530        cpsr.a = cpsr.a | abortDisable(tc);
531        cpsr.f = cpsr.f | fiqDisable(tc);
532        cpsr.i = 1;
533    }
534    cpsr.it1 = cpsr.it2 = 0;
535    cpsr.j = 0;
536    tc->setMiscReg(MISCREG_CPSR, cpsr);
537
538    // Make sure mailbox sets to one always
539    tc->setMiscReg(MISCREG_SEV_MAILBOX, 1);
540
541    // Clear the exclusive monitor
542    tc->setMiscReg(MISCREG_LOCKFLAG, 0);
543
544    if (cpsr.mode == MODE_HYP) {
545        tc->setMiscReg(MISCREG_ELR_HYP, curPc +
546                (saved_cpsr.t ? thumbPcOffset(true)  : armPcOffset(true)));
547    } else {
548        tc->setIntReg(INTREG_LR, curPc +
549                (saved_cpsr.t ? thumbPcOffset(false) : armPcOffset(false)));
550    }
551
552    switch (cpsr.mode) {
553      case MODE_FIQ:
554        tc->setMiscReg(MISCREG_SPSR_FIQ, saved_cpsr);
555        break;
556      case MODE_IRQ:
557        tc->setMiscReg(MISCREG_SPSR_IRQ, saved_cpsr);
558        break;
559      case MODE_SVC:
560        tc->setMiscReg(MISCREG_SPSR_SVC, saved_cpsr);
561        break;
562      case MODE_MON:
563        assert(have_security);
564        tc->setMiscReg(MISCREG_SPSR_MON, saved_cpsr);
565        break;
566      case MODE_ABORT:
567        tc->setMiscReg(MISCREG_SPSR_ABT, saved_cpsr);
568        break;
569      case MODE_UNDEFINED:
570        tc->setMiscReg(MISCREG_SPSR_UND, saved_cpsr);
571        if (ec(tc) != EC_UNKNOWN)
572            setSyndrome(tc, MISCREG_HSR);
573        break;
574      case MODE_HYP:
575        assert(have_virtualization);
576        tc->setMiscReg(MISCREG_SPSR_HYP, saved_cpsr);
577        setSyndrome(tc, MISCREG_HSR);
578        break;
579      default:
580        panic("unknown Mode\n");
581    }
582
583    Addr newPc = getVector(tc);
584    DPRINTF(Faults, "Invoking Fault:%s cpsr:%#x PC:%#x lr:%#x newVec: %#x\n",
585            name(), cpsr, curPc, tc->readIntReg(INTREG_LR), newPc);
586    PCState pc(newPc);
587    pc.thumb(cpsr.t);
588    pc.nextThumb(pc.thumb());
589    pc.jazelle(cpsr.j);
590    pc.nextJazelle(pc.jazelle());
591    pc.aarch64(!cpsr.width);
592    pc.nextAArch64(!cpsr.width);
593    tc->pcState(pc);
594}
595
596void
597ArmFault::invoke64(ThreadContext *tc, const StaticInstPtr &inst)
598{
599    // Determine actual misc. register indices for ELR_ELx and SPSR_ELx
600    MiscRegIndex elr_idx, spsr_idx;
601    switch (toEL) {
602      case EL1:
603        elr_idx = MISCREG_ELR_EL1;
604        spsr_idx = MISCREG_SPSR_EL1;
605        break;
606      case EL2:
607        assert(ArmSystem::haveVirtualization(tc));
608        elr_idx = MISCREG_ELR_EL2;
609        spsr_idx = MISCREG_SPSR_EL2;
610        break;
611      case EL3:
612        assert(ArmSystem::haveSecurity(tc));
613        elr_idx = MISCREG_ELR_EL3;
614        spsr_idx = MISCREG_SPSR_EL3;
615        break;
616      default:
617        panic("Invalid target exception level");
618        break;
619    }
620
621    // Save process state into SPSR_ELx
622    CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
623    CPSR spsr = cpsr;
624    spsr.nz = tc->readCCReg(CCREG_NZ);
625    spsr.c = tc->readCCReg(CCREG_C);
626    spsr.v = tc->readCCReg(CCREG_V);
627    if (from64) {
628        // Force some bitfields to 0
629        spsr.q = 0;
630        spsr.it1 = 0;
631        spsr.j = 0;
632        spsr.res0_23_22 = 0;
633        spsr.ge = 0;
634        spsr.it2 = 0;
635        spsr.t = 0;
636    } else {
637        spsr.ge = tc->readCCReg(CCREG_GE);
638        ITSTATE it = tc->pcState().itstate();
639        spsr.it2 = it.top6;
640        spsr.it1 = it.bottom2;
641        // Force some bitfields to 0
642        spsr.res0_23_22 = 0;
643        spsr.ss = 0;
644    }
645    tc->setMiscReg(spsr_idx, spsr);
646
647    // Save preferred return address into ELR_ELx
648    Addr curr_pc = tc->pcState().pc();
649    Addr ret_addr = curr_pc;
650    if (from64)
651        ret_addr += armPcElrOffset();
652    else
653        ret_addr += spsr.t ? thumbPcElrOffset() : armPcElrOffset();
654    tc->setMiscReg(elr_idx, ret_addr);
655
656    Addr vec_address = getVector64(tc);
657
658    // Update process state
659    OperatingMode64 mode = 0;
660    mode.spX = 1;
661    mode.el = toEL;
662    mode.width = 0;
663    cpsr.mode = mode;
664    cpsr.daif = 0xf;
665    cpsr.il = 0;
666    cpsr.ss = 0;
667    tc->setMiscReg(MISCREG_CPSR, cpsr);
668
669    // Set PC to start of exception handler
670    Addr new_pc = purifyTaggedAddr(vec_address, tc, toEL);
671    DPRINTF(Faults, "Invoking Fault (AArch64 target EL):%s cpsr:%#x PC:%#x "
672            "elr:%#x newVec: %#x\n", name(), cpsr, curr_pc, ret_addr, new_pc);
673    PCState pc(new_pc);
674    pc.aarch64(!cpsr.width);
675    pc.nextAArch64(!cpsr.width);
676    tc->pcState(pc);
677
678    // If we have a valid instruction then use it to annotate this fault with
679    // extra information. This is used to generate the correct fault syndrome
680    // information
681    if (inst)
682        static_cast<ArmStaticInst *>(inst.get())->annotateFault(this);
683    // Save exception syndrome
684    if ((nextMode() != MODE_IRQ) && (nextMode() != MODE_FIQ))
685        setSyndrome(tc, getSyndromeReg64());
686}
687
688void
689Reset::invoke(ThreadContext *tc, const StaticInstPtr &inst)
690{
691    if (FullSystem) {
692        tc->getCpuPtr()->clearInterrupts(tc->threadId());
693        tc->clearArchRegs();
694    }
695    if (!ArmSystem::highestELIs64(tc)) {
696        ArmFault::invoke(tc, inst);
697        tc->setMiscReg(MISCREG_VMPIDR,
698                       getMPIDR(dynamic_cast<ArmSystem*>(tc->getSystemPtr()), tc));
699
700        // Unless we have SMC code to get us there, boot in HYP!
701        if (ArmSystem::haveVirtualization(tc) &&
702            !ArmSystem::haveSecurity(tc)) {
703            CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
704            cpsr.mode = MODE_HYP;
705            tc->setMiscReg(MISCREG_CPSR, cpsr);
706        }
707    } else {
708        // Advance the PC to the IMPLEMENTATION DEFINED reset value
709        PCState pc = ArmSystem::resetAddr64(tc);
710        pc.aarch64(true);
711        pc.nextAArch64(true);
712        tc->pcState(pc);
713    }
714}
715
716void
717UndefinedInstruction::invoke(ThreadContext *tc, const StaticInstPtr &inst)
718{
719    if (FullSystem) {
720        ArmFault::invoke(tc, inst);
721        return;
722    }
723
724    // If the mnemonic isn't defined this has to be an unknown instruction.
725    assert(unknown || mnemonic != NULL);
726    if (disabled) {
727        panic("Attempted to execute disabled instruction "
728                "'%s' (inst 0x%08x)", mnemonic, machInst);
729    } else if (unknown) {
730        panic("Attempted to execute unknown instruction (inst 0x%08x)",
731              machInst);
732    } else {
733        panic("Attempted to execute unimplemented instruction "
734                "'%s' (inst 0x%08x)", mnemonic, machInst);
735    }
736}
737
738bool
739UndefinedInstruction::routeToHyp(ThreadContext *tc) const
740{
741    bool toHyp;
742
743    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
744    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
745    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
746
747    // if in Hyp mode then stay in Hyp mode
748    toHyp  = scr.ns && (cpsr.mode == MODE_HYP);
749    // if HCR.TGE is set to 1, take to Hyp mode through Hyp Trap vector
750    toHyp |= !inSecureState(scr, cpsr) && hcr.tge && (cpsr.mode == MODE_USER);
751    return toHyp;
752}
753
754uint32_t
755UndefinedInstruction::iss() const
756{
757
758    // If UndefinedInstruction is routed to hypervisor, iss field is 0.
759    if (hypRouted) {
760        return 0;
761    }
762
763    if (overrideEc == EC_INVALID)
764        return issRaw;
765
766    uint32_t new_iss = 0;
767    uint32_t op0, op1, op2, CRn, CRm, Rt, dir;
768
769    dir = bits(machInst, 21, 21);
770    op0 = bits(machInst, 20, 19);
771    op1 = bits(machInst, 18, 16);
772    CRn = bits(machInst, 15, 12);
773    CRm = bits(machInst, 11, 8);
774    op2 = bits(machInst, 7, 5);
775    Rt = bits(machInst, 4, 0);
776
777    new_iss = op0 << 20 | op2 << 17 | op1 << 14 | CRn << 10 |
778            Rt << 5 | CRm << 1 | dir;
779
780    return new_iss;
781}
782
783void
784SupervisorCall::invoke(ThreadContext *tc, const StaticInstPtr &inst)
785{
786    if (FullSystem) {
787        ArmFault::invoke(tc, inst);
788        return;
789    }
790
791    // As of now, there isn't a 32 bit thumb version of this instruction.
792    assert(!machInst.bigThumb);
793    uint32_t callNum;
794    CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
795    OperatingMode mode = (OperatingMode)(uint8_t)cpsr.mode;
796    if (opModeIs64(mode))
797        callNum = tc->readIntReg(INTREG_X8);
798    else
799        callNum = tc->readIntReg(INTREG_R7);
800    Fault fault;
801    tc->syscall(callNum, &fault);
802
803    // Advance the PC since that won't happen automatically.
804    PCState pc = tc->pcState();
805    assert(inst);
806    inst->advancePC(pc);
807    tc->pcState(pc);
808}
809
810bool
811SupervisorCall::routeToHyp(ThreadContext *tc) const
812{
813    bool toHyp;
814
815    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
816    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
817    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
818
819    // if in Hyp mode then stay in Hyp mode
820    toHyp  = scr.ns && (cpsr.mode == MODE_HYP);
821    // if HCR.TGE is set to 1, take to Hyp mode through Hyp Trap vector
822    toHyp |= !inSecureState(scr, cpsr) && hcr.tge && (cpsr.mode == MODE_USER);
823    return toHyp;
824}
825
826ExceptionClass
827SupervisorCall::ec(ThreadContext *tc) const
828{
829    return (overrideEc != EC_INVALID) ? overrideEc :
830        (from64 ? EC_SVC_64 : vals.ec);
831}
832
833uint32_t
834SupervisorCall::iss() const
835{
836    // Even if we have a 24 bit imm from an arm32 instruction then we only use
837    // the bottom 16 bits for the ISS value (it doesn't hurt for AArch64 SVC).
838    return issRaw & 0xFFFF;
839}
840
841uint32_t
842SecureMonitorCall::iss() const
843{
844    if (from64)
845        return bits(machInst, 20, 5);
846    return 0;
847}
848
849ExceptionClass
850UndefinedInstruction::ec(ThreadContext *tc) const
851{
852    // If UndefinedInstruction is routed to hypervisor,
853    // HSR.EC field is 0.
854    if (hypRouted)
855        return EC_UNKNOWN;
856    else
857        return (overrideEc != EC_INVALID) ? overrideEc : vals.ec;
858}
859
860
861HypervisorCall::HypervisorCall(ExtMachInst _machInst, uint32_t _imm) :
862        ArmFaultVals<HypervisorCall>(_machInst, _imm)
863{}
864
865ExceptionClass
866HypervisorCall::ec(ThreadContext *tc) const
867{
868    return from64 ? EC_HVC_64 : vals.ec;
869}
870
871ExceptionClass
872HypervisorTrap::ec(ThreadContext *tc) const
873{
874    return (overrideEc != EC_INVALID) ? overrideEc : vals.ec;
875}
876
877template<class T>
878FaultOffset
879ArmFaultVals<T>::offset(ThreadContext *tc)
880{
881    bool isHypTrap = false;
882
883    // Normally we just use the exception vector from the table at the top if
884    // this file, however if this exception has caused a transition to hype
885    // mode, and its an exception type that would only do this if it has been
886    // trapped then we use the hyp trap vector instead of the normal vector
887    if (vals.hypTrappable) {
888        CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
889        if (cpsr.mode == MODE_HYP) {
890            CPSR spsr = tc->readMiscReg(MISCREG_SPSR_HYP);
891            isHypTrap = spsr.mode != MODE_HYP;
892        }
893    }
894    return isHypTrap ? 0x14 : vals.offset;
895}
896
897template<class T>
898FaultOffset
899ArmFaultVals<T>::offset64(ThreadContext *tc)
900{
901    if (toEL == fromEL) {
902        if (opModeIsT(fromMode))
903            return vals.currELTOffset;
904        return vals.currELHOffset;
905    } else {
906        bool lower_32 = false;
907        if (toEL == EL3) {
908            if (!inSecureState(tc) && ArmSystem::haveEL(tc, EL2))
909                lower_32 = ELIs32(tc, EL2);
910            else
911                lower_32 = ELIs32(tc, EL1);
912        } else {
913            lower_32 = ELIs32(tc, static_cast<ExceptionLevel>(toEL - 1));
914        }
915
916        if (lower_32)
917            return vals.lowerEL32Offset;
918        return vals.lowerEL64Offset;
919    }
920}
921
922// void
923// SupervisorCall::setSyndrome64(ThreadContext *tc, MiscRegIndex esr_idx)
924// {
925//     ESR esr = 0;
926//     esr.ec = machInst.aarch64 ? SvcAArch64 : SvcAArch32;
927//     esr.il = !machInst.thumb;
928//     if (machInst.aarch64)
929//         esr.imm16 = bits(machInst.instBits, 20, 5);
930//     else if (machInst.thumb)
931//         esr.imm16 = bits(machInst.instBits, 7, 0);
932//     else
933//         esr.imm16 = bits(machInst.instBits, 15, 0);
934//     tc->setMiscReg(esr_idx, esr);
935// }
936
937void
938SecureMonitorCall::invoke(ThreadContext *tc, const StaticInstPtr &inst)
939{
940    if (FullSystem) {
941        ArmFault::invoke(tc, inst);
942        return;
943    }
944}
945
946ExceptionClass
947SecureMonitorCall::ec(ThreadContext *tc) const
948{
949    return (from64 ? EC_SMC_64 : vals.ec);
950}
951
952bool
953SupervisorTrap::routeToHyp(ThreadContext *tc) const
954{
955    bool toHyp = false;
956
957    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
958    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR_EL2);
959    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
960
961    // if HCR.TGE is set to 1, take to Hyp mode through Hyp Trap vector
962    toHyp |= !inSecureState(scr, cpsr) && hcr.tge && (cpsr.el == EL0);
963    return toHyp;
964}
965
966uint32_t
967SupervisorTrap::iss() const
968{
969    // If SupervisorTrap is routed to hypervisor, iss field is 0.
970    if (hypRouted) {
971        return 0;
972    }
973    return issRaw;
974}
975
976ExceptionClass
977SupervisorTrap::ec(ThreadContext *tc) const
978{
979    if (hypRouted)
980        return EC_UNKNOWN;
981    else
982        return (overrideEc != EC_INVALID) ? overrideEc : vals.ec;
983}
984
985ExceptionClass
986SecureMonitorTrap::ec(ThreadContext *tc) const
987{
988    return (overrideEc != EC_INVALID) ? overrideEc :
989        (from64 ? EC_SMC_64 : vals.ec);
990}
991
992template<class T>
993void
994AbortFault<T>::invoke(ThreadContext *tc, const StaticInstPtr &inst)
995{
996    if (tranMethod == ArmFault::UnknownTran) {
997        tranMethod = longDescFormatInUse(tc) ? ArmFault::LpaeTran
998                                             : ArmFault::VmsaTran;
999
1000        if ((tranMethod == ArmFault::VmsaTran) && this->routeToMonitor(tc)) {
1001            // See ARM ARM B3-1416
1002            bool override_LPAE = false;
1003            TTBCR ttbcr_s = tc->readMiscReg(MISCREG_TTBCR_S);
1004            TTBCR M5_VAR_USED ttbcr_ns = tc->readMiscReg(MISCREG_TTBCR_NS);
1005            if (ttbcr_s.eae) {
1006                override_LPAE = true;
1007            } else {
1008                // Unimplemented code option, not seen in testing.  May need
1009                // extension according to the manual exceprt above.
1010                DPRINTF(Faults, "Warning: Incomplete translation method "
1011                        "override detected.\n");
1012            }
1013            if (override_LPAE)
1014                tranMethod = ArmFault::LpaeTran;
1015        }
1016    }
1017
1018    if (source == ArmFault::AsynchronousExternalAbort) {
1019        tc->getCpuPtr()->clearInterrupt(tc->threadId(), INT_ABT, 0);
1020    }
1021    // Get effective fault source encoding
1022    CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
1023    FSR  fsr  = getFsr(tc);
1024
1025    // source must be determined BEFORE invoking generic routines which will
1026    // try to set hsr etc. and are based upon source!
1027    ArmFaultVals<T>::invoke(tc, inst);
1028
1029    if (!this->to64) {  // AArch32
1030        if (cpsr.mode == MODE_HYP) {
1031            tc->setMiscReg(T::HFarIndex, faultAddr);
1032        } else if (stage2) {
1033            tc->setMiscReg(MISCREG_HPFAR, (faultAddr >> 8) & ~0xf);
1034            tc->setMiscReg(T::HFarIndex,  OVAddr);
1035        } else {
1036            tc->setMiscReg(T::FsrIndex, fsr);
1037            tc->setMiscReg(T::FarIndex, faultAddr);
1038        }
1039        DPRINTF(Faults, "Abort Fault source=%#x fsr=%#x faultAddr=%#x "\
1040                "tranMethod=%#x\n", source, fsr, faultAddr, tranMethod);
1041    } else {  // AArch64
1042        // Set the FAR register.  Nothing else to do if we are in AArch64 state
1043        // because the syndrome register has already been set inside invoke64()
1044        if (stage2) {
1045            // stage 2 fault, set HPFAR_EL2 to the faulting IPA
1046            // and FAR_EL2 to the Original VA
1047            tc->setMiscReg(AbortFault<T>::getFaultAddrReg64(), OVAddr);
1048            tc->setMiscReg(MISCREG_HPFAR_EL2, bits(faultAddr, 47, 12) << 4);
1049
1050            DPRINTF(Faults, "Abort Fault (Stage 2) VA: 0x%x IPA: 0x%x\n",
1051                    OVAddr, faultAddr);
1052        } else {
1053            tc->setMiscReg(AbortFault<T>::getFaultAddrReg64(), faultAddr);
1054        }
1055    }
1056}
1057
1058template<class T>
1059FSR
1060AbortFault<T>::getFsr(ThreadContext *tc)
1061{
1062    FSR fsr = 0;
1063
1064    if (((CPSR) tc->readMiscRegNoEffect(MISCREG_CPSR)).width) {
1065        // AArch32
1066        assert(tranMethod != ArmFault::UnknownTran);
1067        if (tranMethod == ArmFault::LpaeTran) {
1068            srcEncoded = ArmFault::longDescFaultSources[source];
1069            fsr.status = srcEncoded;
1070            fsr.lpae   = 1;
1071        } else {
1072            srcEncoded = ArmFault::shortDescFaultSources[source];
1073            fsr.fsLow  = bits(srcEncoded, 3, 0);
1074            fsr.fsHigh = bits(srcEncoded, 4);
1075            fsr.domain = static_cast<uint8_t>(domain);
1076        }
1077        fsr.wnr = (write ? 1 : 0);
1078        fsr.ext = 0;
1079    } else {
1080        // AArch64
1081        srcEncoded = ArmFault::aarch64FaultSources[source];
1082    }
1083    if (srcEncoded == ArmFault::FaultSourceInvalid) {
1084        panic("Invalid fault source\n");
1085    }
1086    return fsr;
1087}
1088
1089template<class T>
1090bool
1091AbortFault<T>::abortDisable(ThreadContext *tc)
1092{
1093    if (ArmSystem::haveSecurity(tc)) {
1094        SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1095        return (!scr.ns || scr.aw);
1096    }
1097    return true;
1098}
1099
1100template<class T>
1101void
1102AbortFault<T>::annotate(ArmFault::AnnotationIDs id, uint64_t val)
1103{
1104    switch (id)
1105    {
1106      case ArmFault::S1PTW:
1107        s1ptw = val;
1108        break;
1109      case ArmFault::OVA:
1110        OVAddr = val;
1111        break;
1112
1113      // Just ignore unknown ID's
1114      default:
1115        break;
1116    }
1117}
1118
1119template<class T>
1120uint32_t
1121AbortFault<T>::iss() const
1122{
1123    uint32_t val;
1124
1125    val  = srcEncoded & 0x3F;
1126    val |= write << 6;
1127    val |= s1ptw << 7;
1128    return (val);
1129}
1130
1131template<class T>
1132bool
1133AbortFault<T>::isMMUFault() const
1134{
1135    // NOTE: Not relying on LL information being aligned to lowest bits here
1136    return
1137         (source == ArmFault::AlignmentFault)     ||
1138        ((source >= ArmFault::TranslationLL) &&
1139         (source <  ArmFault::TranslationLL + 4)) ||
1140        ((source >= ArmFault::AccessFlagLL) &&
1141         (source <  ArmFault::AccessFlagLL + 4))  ||
1142        ((source >= ArmFault::DomainLL) &&
1143         (source <  ArmFault::DomainLL + 4))      ||
1144        ((source >= ArmFault::PermissionLL) &&
1145         (source <  ArmFault::PermissionLL + 4));
1146}
1147
1148ExceptionClass
1149PrefetchAbort::ec(ThreadContext *tc) const
1150{
1151    if (to64) {
1152        // AArch64
1153        if (toEL == fromEL)
1154            return EC_PREFETCH_ABORT_CURR_EL;
1155        else
1156            return EC_PREFETCH_ABORT_LOWER_EL;
1157    } else {
1158        // AArch32
1159        // Abort faults have different EC codes depending on whether
1160        // the fault originated within HYP mode, or not. So override
1161        // the method and add the extra adjustment of the EC value.
1162
1163        ExceptionClass ec = ArmFaultVals<PrefetchAbort>::vals.ec;
1164
1165        CPSR spsr = tc->readMiscReg(MISCREG_SPSR_HYP);
1166        if (spsr.mode == MODE_HYP) {
1167            ec = ((ExceptionClass) (((uint32_t) ec) + 1));
1168        }
1169        return ec;
1170    }
1171}
1172
1173bool
1174PrefetchAbort::routeToMonitor(ThreadContext *tc) const
1175{
1176    SCR scr = 0;
1177    if (from64)
1178        scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1179    else
1180        scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1181
1182    return scr.ea && !isMMUFault();
1183}
1184
1185bool
1186PrefetchAbort::routeToHyp(ThreadContext *tc) const
1187{
1188    bool toHyp;
1189
1190    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
1191    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
1192    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
1193    HDCR hdcr = tc->readMiscRegNoEffect(MISCREG_HDCR);
1194
1195    // if in Hyp mode then stay in Hyp mode
1196    toHyp  = scr.ns && (cpsr.mode == MODE_HYP);
1197    // otherwise, check whether to take to Hyp mode through Hyp Trap vector
1198    toHyp |= (stage2 ||
1199                ( (source ==               DebugEvent) && hdcr.tde && (cpsr.mode !=  MODE_HYP)) ||
1200                ( (source == SynchronousExternalAbort) && hcr.tge  && (cpsr.mode == MODE_USER))
1201             ) && !inSecureState(tc);
1202    return toHyp;
1203}
1204
1205ExceptionClass
1206DataAbort::ec(ThreadContext *tc) const
1207{
1208    if (to64) {
1209        // AArch64
1210        if (source == ArmFault::AsynchronousExternalAbort) {
1211            panic("Asynchronous External Abort should be handled with "
1212                    "SystemErrors (SErrors)!");
1213        }
1214        if (toEL == fromEL)
1215            return EC_DATA_ABORT_CURR_EL;
1216        else
1217            return EC_DATA_ABORT_LOWER_EL;
1218    } else {
1219        // AArch32
1220        // Abort faults have different EC codes depending on whether
1221        // the fault originated within HYP mode, or not. So override
1222        // the method and add the extra adjustment of the EC value.
1223
1224        ExceptionClass ec = ArmFaultVals<DataAbort>::vals.ec;
1225
1226        CPSR spsr = tc->readMiscReg(MISCREG_SPSR_HYP);
1227        if (spsr.mode == MODE_HYP) {
1228            ec = ((ExceptionClass) (((uint32_t) ec) + 1));
1229        }
1230        return ec;
1231    }
1232}
1233
1234bool
1235DataAbort::routeToMonitor(ThreadContext *tc) const
1236{
1237    SCR scr = 0;
1238    if (from64)
1239        scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1240    else
1241        scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1242
1243    return scr.ea && !isMMUFault();
1244}
1245
1246bool
1247DataAbort::routeToHyp(ThreadContext *tc) const
1248{
1249    bool toHyp;
1250
1251    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
1252    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
1253    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
1254    HDCR hdcr = tc->readMiscRegNoEffect(MISCREG_HDCR);
1255
1256    // if in Hyp mode then stay in Hyp mode
1257    toHyp  = scr.ns && (cpsr.mode == MODE_HYP);
1258    // otherwise, check whether to take to Hyp mode through Hyp Trap vector
1259    toHyp |= (stage2 ||
1260                ( (cpsr.mode != MODE_HYP) && ( ((source == AsynchronousExternalAbort) && hcr.amo) ||
1261                                               ((source == DebugEvent) && hdcr.tde) )
1262                ) ||
1263                ( (cpsr.mode == MODE_USER) && hcr.tge &&
1264                  ((source == AlignmentFault)            ||
1265                   (source == SynchronousExternalAbort))
1266                )
1267             ) && !inSecureState(tc);
1268    return toHyp;
1269}
1270
1271uint32_t
1272DataAbort::iss() const
1273{
1274    uint32_t val;
1275
1276    // Add on the data abort specific fields to the generic abort ISS value
1277    val  = AbortFault<DataAbort>::iss();
1278    // ISS is valid if not caused by a stage 1 page table walk, and when taken
1279    // to AArch64 only when directed to EL2
1280    if (!s1ptw && (!to64 || toEL == EL2)) {
1281        val |= isv << 24;
1282        if (isv) {
1283            val |= sas << 22;
1284            val |= sse << 21;
1285            val |= srt << 16;
1286            // AArch64 only. These assignments are safe on AArch32 as well
1287            // because these vars are initialized to false
1288            val |= sf << 15;
1289            val |= ar << 14;
1290        }
1291    }
1292    return (val);
1293}
1294
1295void
1296DataAbort::annotate(AnnotationIDs id, uint64_t val)
1297{
1298    AbortFault<DataAbort>::annotate(id, val);
1299    switch (id)
1300    {
1301      case SAS:
1302        isv = true;
1303        sas = val;
1304        break;
1305      case SSE:
1306        isv = true;
1307        sse = val;
1308        break;
1309      case SRT:
1310        isv = true;
1311        srt = val;
1312        break;
1313      case SF:
1314        isv = true;
1315        sf  = val;
1316        break;
1317      case AR:
1318        isv = true;
1319        ar  = val;
1320        break;
1321      // Just ignore unknown ID's
1322      default:
1323        break;
1324    }
1325}
1326
1327void
1328VirtualDataAbort::invoke(ThreadContext *tc, const StaticInstPtr &inst)
1329{
1330    AbortFault<VirtualDataAbort>::invoke(tc, inst);
1331    HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR);
1332    hcr.va = 0;
1333    tc->setMiscRegNoEffect(MISCREG_HCR, hcr);
1334}
1335
1336bool
1337Interrupt::routeToMonitor(ThreadContext *tc) const
1338{
1339    assert(ArmSystem::haveSecurity(tc));
1340    SCR scr = 0;
1341    if (from64)
1342        scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1343    else
1344        scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1345    return scr.irq;
1346}
1347
1348bool
1349Interrupt::routeToHyp(ThreadContext *tc) const
1350{
1351    bool toHyp;
1352
1353    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
1354    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
1355    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
1356    // Determine whether IRQs are routed to Hyp mode.
1357    toHyp = (!scr.irq && hcr.imo && !inSecureState(tc)) ||
1358            (cpsr.mode == MODE_HYP);
1359    return toHyp;
1360}
1361
1362bool
1363Interrupt::abortDisable(ThreadContext *tc)
1364{
1365    if (ArmSystem::haveSecurity(tc)) {
1366        SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1367        return (!scr.ns || scr.aw);
1368    }
1369    return true;
1370}
1371
1372VirtualInterrupt::VirtualInterrupt()
1373{}
1374
1375bool
1376FastInterrupt::routeToMonitor(ThreadContext *tc) const
1377{
1378    assert(ArmSystem::haveSecurity(tc));
1379    SCR scr = 0;
1380    if (from64)
1381        scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1382    else
1383        scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1384    return scr.fiq;
1385}
1386
1387bool
1388FastInterrupt::routeToHyp(ThreadContext *tc) const
1389{
1390    bool toHyp;
1391
1392    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
1393    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
1394    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
1395    // Determine whether IRQs are routed to Hyp mode.
1396    toHyp = (!scr.fiq && hcr.fmo && !inSecureState(tc)) ||
1397            (cpsr.mode == MODE_HYP);
1398    return toHyp;
1399}
1400
1401bool
1402FastInterrupt::abortDisable(ThreadContext *tc)
1403{
1404    if (ArmSystem::haveSecurity(tc)) {
1405        SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1406        return (!scr.ns || scr.aw);
1407    }
1408    return true;
1409}
1410
1411bool
1412FastInterrupt::fiqDisable(ThreadContext *tc)
1413{
1414    if (ArmSystem::haveVirtualization(tc)) {
1415        return true;
1416    } else if (ArmSystem::haveSecurity(tc)) {
1417        SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1418        return (!scr.ns || scr.fw);
1419    }
1420    return true;
1421}
1422
1423VirtualFastInterrupt::VirtualFastInterrupt()
1424{}
1425
1426void
1427PCAlignmentFault::invoke(ThreadContext *tc, const StaticInstPtr &inst)
1428{
1429    ArmFaultVals<PCAlignmentFault>::invoke(tc, inst);
1430    assert(from64);
1431    // Set the FAR
1432    tc->setMiscReg(getFaultAddrReg64(), faultPC);
1433}
1434
1435SPAlignmentFault::SPAlignmentFault()
1436{}
1437
1438SystemError::SystemError()
1439{}
1440
1441void
1442SystemError::invoke(ThreadContext *tc, const StaticInstPtr &inst)
1443{
1444    tc->getCpuPtr()->clearInterrupt(tc->threadId(), INT_ABT, 0);
1445    ArmFault::invoke(tc, inst);
1446}
1447
1448bool
1449SystemError::routeToMonitor(ThreadContext *tc) const
1450{
1451    assert(ArmSystem::haveSecurity(tc));
1452    assert(from64);
1453    SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1454    return scr.ea;
1455}
1456
1457bool
1458SystemError::routeToHyp(ThreadContext *tc) const
1459{
1460    bool toHyp;
1461    assert(from64);
1462
1463    SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1464    HCR hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
1465
1466    toHyp = (!scr.ea && hcr.amo && !inSecureState(tc)) ||
1467            (!scr.ea && !scr.rw && !hcr.amo && !inSecureState(tc));
1468    return toHyp;
1469}
1470
1471
1472SoftwareBreakpoint::SoftwareBreakpoint(ExtMachInst _mach_inst, uint32_t _iss)
1473    : ArmFaultVals<SoftwareBreakpoint>(_mach_inst, _iss)
1474{}
1475
1476bool
1477SoftwareBreakpoint::routeToHyp(ThreadContext *tc) const
1478{
1479    assert(from64);
1480
1481    const bool have_el2 = ArmSystem::haveVirtualization(tc);
1482
1483    const HCR hcr  = tc->readMiscRegNoEffect(MISCREG_HCR_EL2);
1484    const HDCR mdcr  = tc->readMiscRegNoEffect(MISCREG_MDCR_EL2);
1485
1486    return have_el2 && !inSecureState(tc) && fromEL <= EL1 &&
1487        (hcr.tge || mdcr.tde);
1488}
1489
1490void
1491ArmSev::invoke(ThreadContext *tc, const StaticInstPtr &inst) {
1492    DPRINTF(Faults, "Invoking ArmSev Fault\n");
1493    if (!FullSystem)
1494        return;
1495
1496    // Set sev_mailbox to 1, clear the pending interrupt from remote
1497    // SEV execution and let pipeline continue as pcState is still
1498    // valid.
1499    tc->setMiscReg(MISCREG_SEV_MAILBOX, 1);
1500    tc->getCpuPtr()->clearInterrupt(tc->threadId(), INT_SEV, 0);
1501}
1502
1503// Instantiate all the templates to make the linker happy
1504template class ArmFaultVals<Reset>;
1505template class ArmFaultVals<UndefinedInstruction>;
1506template class ArmFaultVals<SupervisorCall>;
1507template class ArmFaultVals<SecureMonitorCall>;
1508template class ArmFaultVals<HypervisorCall>;
1509template class ArmFaultVals<PrefetchAbort>;
1510template class ArmFaultVals<DataAbort>;
1511template class ArmFaultVals<VirtualDataAbort>;
1512template class ArmFaultVals<HypervisorTrap>;
1513template class ArmFaultVals<Interrupt>;
1514template class ArmFaultVals<VirtualInterrupt>;
1515template class ArmFaultVals<FastInterrupt>;
1516template class ArmFaultVals<VirtualFastInterrupt>;
1517template class ArmFaultVals<SupervisorTrap>;
1518template class ArmFaultVals<SecureMonitorTrap>;
1519template class ArmFaultVals<PCAlignmentFault>;
1520template class ArmFaultVals<SPAlignmentFault>;
1521template class ArmFaultVals<SystemError>;
1522template class ArmFaultVals<SoftwareBreakpoint>;
1523template class ArmFaultVals<ArmSev>;
1524template class AbortFault<PrefetchAbort>;
1525template class AbortFault<DataAbort>;
1526template class AbortFault<VirtualDataAbort>;
1527
1528
1529IllegalInstSetStateFault::IllegalInstSetStateFault()
1530{}
1531
1532
1533} // namespace ArmISA
1534