faults.cc revision 11585:83784c48fb73
1/*
2 * Copyright (c) 2010, 2012-2014, 2016 ARM Limited
3 * All rights reserved
4 *
5 * The license below extends only to copyright in the software and shall
6 * not be construed as granting a license to any other intellectual
7 * property including but not limited to intellectual property relating
8 * to a hardware implementation of the functionality of the software
9 * licensed hereunder.  You may use the software subject to the license
10 * terms below provided that you ensure that this notice is replicated
11 * unmodified and in its entirety in all distributions of the software,
12 * modified or unmodified, in source code or in binary form.
13 *
14 * Copyright (c) 2003-2005 The Regents of The University of Michigan
15 * Copyright (c) 2007-2008 The Florida State University
16 * All rights reserved.
17 *
18 * Redistribution and use in source and binary forms, with or without
19 * modification, are permitted provided that the following conditions are
20 * met: redistributions of source code must retain the above copyright
21 * notice, this list of conditions and the following disclaimer;
22 * redistributions in binary form must reproduce the above copyright
23 * notice, this list of conditions and the following disclaimer in the
24 * documentation and/or other materials provided with the distribution;
25 * neither the name of the copyright holders nor the names of its
26 * contributors may be used to endorse or promote products derived from
27 * this software without specific prior written permission.
28 *
29 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
30 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
31 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
32 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
33 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
34 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
35 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
36 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
37 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
38 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
39 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
40 *
41 * Authors: Ali Saidi
42 *          Gabe Black
43 *          Giacomo Gabrielli
44 *          Thomas Grocutt
45 */
46
47#include "arch/arm/faults.hh"
48#include "arch/arm/system.hh"
49#include "arch/arm/utility.hh"
50#include "arch/arm/insts/static_inst.hh"
51#include "base/compiler.hh"
52#include "base/trace.hh"
53#include "cpu/base.hh"
54#include "cpu/thread_context.hh"
55#include "debug/Faults.hh"
56#include "sim/full_system.hh"
57
58namespace ArmISA
59{
60
61uint8_t ArmFault::shortDescFaultSources[] = {
62    0x01,  // AlignmentFault
63    0x04,  // InstructionCacheMaintenance
64    0xff,  // SynchExtAbtOnTranslTableWalkL0 (INVALID)
65    0x0c,  // SynchExtAbtOnTranslTableWalkL1
66    0x0e,  // SynchExtAbtOnTranslTableWalkL2
67    0xff,  // SynchExtAbtOnTranslTableWalkL3 (INVALID)
68    0xff,  // SynchPtyErrOnTranslTableWalkL0 (INVALID)
69    0x1c,  // SynchPtyErrOnTranslTableWalkL1
70    0x1e,  // SynchPtyErrOnTranslTableWalkL2
71    0xff,  // SynchPtyErrOnTranslTableWalkL3 (INVALID)
72    0xff,  // TranslationL0 (INVALID)
73    0x05,  // TranslationL1
74    0x07,  // TranslationL2
75    0xff,  // TranslationL3 (INVALID)
76    0xff,  // AccessFlagL0 (INVALID)
77    0x03,  // AccessFlagL1
78    0x06,  // AccessFlagL2
79    0xff,  // AccessFlagL3 (INVALID)
80    0xff,  // DomainL0 (INVALID)
81    0x09,  // DomainL1
82    0x0b,  // DomainL2
83    0xff,  // DomainL3 (INVALID)
84    0xff,  // PermissionL0 (INVALID)
85    0x0d,  // PermissionL1
86    0x0f,  // PermissionL2
87    0xff,  // PermissionL3 (INVALID)
88    0x02,  // DebugEvent
89    0x08,  // SynchronousExternalAbort
90    0x10,  // TLBConflictAbort
91    0x19,  // SynchPtyErrOnMemoryAccess
92    0x16,  // AsynchronousExternalAbort
93    0x18,  // AsynchPtyErrOnMemoryAccess
94    0xff,  // AddressSizeL0 (INVALID)
95    0xff,  // AddressSizeL1 (INVALID)
96    0xff,  // AddressSizeL2 (INVALID)
97    0xff,  // AddressSizeL3 (INVALID)
98    0x40,  // PrefetchTLBMiss
99    0x80   // PrefetchUncacheable
100};
101
102static_assert(sizeof(ArmFault::shortDescFaultSources) ==
103              ArmFault::NumFaultSources,
104              "Invalid size of ArmFault::shortDescFaultSources[]");
105
106uint8_t ArmFault::longDescFaultSources[] = {
107    0x21,  // AlignmentFault
108    0xff,  // InstructionCacheMaintenance (INVALID)
109    0xff,  // SynchExtAbtOnTranslTableWalkL0 (INVALID)
110    0x15,  // SynchExtAbtOnTranslTableWalkL1
111    0x16,  // SynchExtAbtOnTranslTableWalkL2
112    0x17,  // SynchExtAbtOnTranslTableWalkL3
113    0xff,  // SynchPtyErrOnTranslTableWalkL0 (INVALID)
114    0x1d,  // SynchPtyErrOnTranslTableWalkL1
115    0x1e,  // SynchPtyErrOnTranslTableWalkL2
116    0x1f,  // SynchPtyErrOnTranslTableWalkL3
117    0xff,  // TranslationL0 (INVALID)
118    0x05,  // TranslationL1
119    0x06,  // TranslationL2
120    0x07,  // TranslationL3
121    0xff,  // AccessFlagL0 (INVALID)
122    0x09,  // AccessFlagL1
123    0x0a,  // AccessFlagL2
124    0x0b,  // AccessFlagL3
125    0xff,  // DomainL0 (INVALID)
126    0x3d,  // DomainL1
127    0x3e,  // DomainL2
128    0xff,  // DomainL3 (RESERVED)
129    0xff,  // PermissionL0 (INVALID)
130    0x0d,  // PermissionL1
131    0x0e,  // PermissionL2
132    0x0f,  // PermissionL3
133    0x22,  // DebugEvent
134    0x10,  // SynchronousExternalAbort
135    0x30,  // TLBConflictAbort
136    0x18,  // SynchPtyErrOnMemoryAccess
137    0x11,  // AsynchronousExternalAbort
138    0x19,  // AsynchPtyErrOnMemoryAccess
139    0xff,  // AddressSizeL0 (INVALID)
140    0xff,  // AddressSizeL1 (INVALID)
141    0xff,  // AddressSizeL2 (INVALID)
142    0xff,  // AddressSizeL3 (INVALID)
143    0x40,  // PrefetchTLBMiss
144    0x80   // PrefetchUncacheable
145};
146
147static_assert(sizeof(ArmFault::longDescFaultSources) ==
148              ArmFault::NumFaultSources,
149              "Invalid size of ArmFault::longDescFaultSources[]");
150
151uint8_t ArmFault::aarch64FaultSources[] = {
152    0x21,  // AlignmentFault
153    0xff,  // InstructionCacheMaintenance (INVALID)
154    0x14,  // SynchExtAbtOnTranslTableWalkL0
155    0x15,  // SynchExtAbtOnTranslTableWalkL1
156    0x16,  // SynchExtAbtOnTranslTableWalkL2
157    0x17,  // SynchExtAbtOnTranslTableWalkL3
158    0x1c,  // SynchPtyErrOnTranslTableWalkL0
159    0x1d,  // SynchPtyErrOnTranslTableWalkL1
160    0x1e,  // SynchPtyErrOnTranslTableWalkL2
161    0x1f,  // SynchPtyErrOnTranslTableWalkL3
162    0x04,  // TranslationL0
163    0x05,  // TranslationL1
164    0x06,  // TranslationL2
165    0x07,  // TranslationL3
166    0x08,  // AccessFlagL0
167    0x09,  // AccessFlagL1
168    0x0a,  // AccessFlagL2
169    0x0b,  // AccessFlagL3
170    // @todo: Section & Page Domain Fault in AArch64?
171    0xff,  // DomainL0 (INVALID)
172    0xff,  // DomainL1 (INVALID)
173    0xff,  // DomainL2 (INVALID)
174    0xff,  // DomainL3 (INVALID)
175    0x0c,  // PermissionL0
176    0x0d,  // PermissionL1
177    0x0e,  // PermissionL2
178    0x0f,  // PermissionL3
179    0xff,  // DebugEvent (INVALID)
180    0x10,  // SynchronousExternalAbort
181    0x30,  // TLBConflictAbort
182    0x18,  // SynchPtyErrOnMemoryAccess
183    0xff,  // AsynchronousExternalAbort (INVALID)
184    0xff,  // AsynchPtyErrOnMemoryAccess (INVALID)
185    0x00,  // AddressSizeL0
186    0x01,  // AddressSizeL1
187    0x02,  // AddressSizeL2
188    0x03,  // AddressSizeL3
189    0x40,  // PrefetchTLBMiss
190    0x80   // PrefetchUncacheable
191};
192
193static_assert(sizeof(ArmFault::aarch64FaultSources) ==
194              ArmFault::NumFaultSources,
195              "Invalid size of ArmFault::aarch64FaultSources[]");
196
197// Fields: name, offset, cur{ELT,ELH}Offset, lowerEL{64,32}Offset, next mode,
198//         {ARM, Thumb, ARM_ELR, Thumb_ELR} PC offset, hyp trap,
199//         {A, F} disable, class, stat
200template<> ArmFault::FaultVals ArmFaultVals<Reset>::vals = {
201    // Some dummy values (the reset vector has an IMPLEMENTATION DEFINED
202    // location in AArch64)
203    "Reset",                 0x000, 0x000, 0x000, 0x000, 0x000, MODE_SVC,
204    0, 0, 0, 0, false, true,  true,  EC_UNKNOWN, FaultStat()
205};
206template<> ArmFault::FaultVals ArmFaultVals<UndefinedInstruction>::vals = {
207    "Undefined Instruction", 0x004, 0x000, 0x200, 0x400, 0x600, MODE_UNDEFINED,
208    4, 2, 0, 0, true,  false, false, EC_UNKNOWN, FaultStat()
209};
210template<> ArmFault::FaultVals ArmFaultVals<SupervisorCall>::vals = {
211    "Supervisor Call",       0x008, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
212    4, 2, 4, 2, true,  false, false, EC_SVC_TO_HYP, FaultStat()
213};
214template<> ArmFault::FaultVals ArmFaultVals<SecureMonitorCall>::vals = {
215    "Secure Monitor Call",   0x008, 0x000, 0x200, 0x400, 0x600, MODE_MON,
216    4, 4, 4, 4, false, true,  true,  EC_SMC_TO_HYP, FaultStat()
217};
218template<> ArmFault::FaultVals ArmFaultVals<HypervisorCall>::vals = {
219    "Hypervisor Call",       0x008, 0x000, 0x200, 0x400, 0x600, MODE_HYP,
220    4, 4, 4, 4, true,  false, false, EC_HVC, FaultStat()
221};
222template<> ArmFault::FaultVals ArmFaultVals<PrefetchAbort>::vals = {
223    "Prefetch Abort",        0x00C, 0x000, 0x200, 0x400, 0x600, MODE_ABORT,
224    4, 4, 0, 0, true,  true,  false, EC_PREFETCH_ABORT_TO_HYP, FaultStat()
225};
226template<> ArmFault::FaultVals ArmFaultVals<DataAbort>::vals = {
227    "Data Abort",            0x010, 0x000, 0x200, 0x400, 0x600, MODE_ABORT,
228    8, 8, 0, 0, true,  true,  false, EC_DATA_ABORT_TO_HYP, FaultStat()
229};
230template<> ArmFault::FaultVals ArmFaultVals<VirtualDataAbort>::vals = {
231    "Virtual Data Abort",    0x010, 0x000, 0x200, 0x400, 0x600, MODE_ABORT,
232    8, 8, 0, 0, true,  true,  false, EC_INVALID, FaultStat()
233};
234template<> ArmFault::FaultVals ArmFaultVals<HypervisorTrap>::vals = {
235    // @todo: double check these values
236    "Hypervisor Trap",       0x014, 0x000, 0x200, 0x400, 0x600, MODE_HYP,
237    0, 0, 0, 0, false, false, false, EC_UNKNOWN, FaultStat()
238};
239template<> ArmFault::FaultVals ArmFaultVals<Interrupt>::vals = {
240    "IRQ",                   0x018, 0x080, 0x280, 0x480, 0x680, MODE_IRQ,
241    4, 4, 0, 0, false, true,  false, EC_UNKNOWN, FaultStat()
242};
243template<> ArmFault::FaultVals ArmFaultVals<VirtualInterrupt>::vals = {
244    "Virtual IRQ",           0x018, 0x080, 0x280, 0x480, 0x680, MODE_IRQ,
245    4, 4, 0, 0, false, true,  false, EC_INVALID, FaultStat()
246};
247template<> ArmFault::FaultVals ArmFaultVals<FastInterrupt>::vals = {
248    "FIQ",                   0x01C, 0x100, 0x300, 0x500, 0x700, MODE_FIQ,
249    4, 4, 0, 0, false, true,  true,  EC_UNKNOWN, FaultStat()
250};
251template<> ArmFault::FaultVals ArmFaultVals<VirtualFastInterrupt>::vals = {
252    "Virtual FIQ",           0x01C, 0x100, 0x300, 0x500, 0x700, MODE_FIQ,
253    4, 4, 0, 0, false, true,  true,  EC_INVALID, FaultStat()
254};
255template<> ArmFault::FaultVals ArmFaultVals<SupervisorTrap>::vals = {
256    // Some dummy values (SupervisorTrap is AArch64-only)
257    "Supervisor Trap",   0x014, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
258    0, 0, 0, 0, false, false, false, EC_UNKNOWN, FaultStat()
259};
260template<> ArmFault::FaultVals ArmFaultVals<SecureMonitorTrap>::vals = {
261    // Some dummy values (SecureMonitorTrap is AArch64-only)
262    "Secure Monitor Trap",   0x014, 0x000, 0x200, 0x400, 0x600, MODE_MON,
263    0, 0, 0, 0, false, false, false, EC_UNKNOWN, FaultStat()
264};
265template<> ArmFault::FaultVals ArmFaultVals<PCAlignmentFault>::vals = {
266    // Some dummy values (PCAlignmentFault is AArch64-only)
267    "PC Alignment Fault",   0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
268    0, 0, 0, 0, true, false, false, EC_PC_ALIGNMENT, FaultStat()
269};
270template<> ArmFault::FaultVals ArmFaultVals<SPAlignmentFault>::vals = {
271    // Some dummy values (SPAlignmentFault is AArch64-only)
272    "SP Alignment Fault",   0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
273    0, 0, 0, 0, true, false, false, EC_STACK_PTR_ALIGNMENT, FaultStat()
274};
275template<> ArmFault::FaultVals ArmFaultVals<SystemError>::vals = {
276    // Some dummy values (SError is AArch64-only)
277    "SError",                0x000, 0x180, 0x380, 0x580, 0x780, MODE_SVC,
278    0, 0, 0, 0, false, true,  true,  EC_SERROR, FaultStat()
279};
280template<> ArmFault::FaultVals ArmFaultVals<FlushPipe>::vals = {
281    // Some dummy values
282    "Pipe Flush",            0x000, 0x000, 0x000, 0x000, 0x000, MODE_SVC,
283    0, 0, 0, 0, false, true,  true,  EC_UNKNOWN, FaultStat()
284};
285template<> ArmFault::FaultVals ArmFaultVals<ArmSev>::vals = {
286    // Some dummy values
287    "ArmSev Flush",          0x000, 0x000, 0x000, 0x000, 0x000, MODE_SVC,
288    0, 0, 0, 0, false, true,  true,  EC_UNKNOWN, FaultStat()
289};
290template<> ArmFault::FaultVals ArmFaultVals<IllegalInstSetStateFault>::vals = {
291    // Some dummy values (SPAlignmentFault is AArch64-only)
292    "Illegal Inst Set State Fault",   0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
293    0, 0, 0, 0, true, false, false, EC_ILLEGAL_INST, FaultStat()
294};
295
296Addr
297ArmFault::getVector(ThreadContext *tc)
298{
299    Addr base;
300
301    // ARM ARM issue C B1.8.1
302    bool haveSecurity = ArmSystem::haveSecurity(tc);
303
304    // panic if SCTLR.VE because I have no idea what to do with vectored
305    // interrupts
306    SCTLR sctlr = tc->readMiscReg(MISCREG_SCTLR);
307    assert(!sctlr.ve);
308    // Check for invalid modes
309    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
310    assert(haveSecurity                      || cpsr.mode != MODE_MON);
311    assert(ArmSystem::haveVirtualization(tc) || cpsr.mode != MODE_HYP);
312
313    switch (cpsr.mode)
314    {
315      case MODE_MON:
316        base = tc->readMiscReg(MISCREG_MVBAR);
317        break;
318      case MODE_HYP:
319        base = tc->readMiscReg(MISCREG_HVBAR);
320        break;
321      default:
322        if (sctlr.v) {
323            base = HighVecs;
324        } else {
325            base = haveSecurity ? tc->readMiscReg(MISCREG_VBAR) : 0;
326        }
327        break;
328    }
329    return base + offset(tc);
330}
331
332Addr
333ArmFault::getVector64(ThreadContext *tc)
334{
335    Addr vbar;
336    switch (toEL) {
337      case EL3:
338        assert(ArmSystem::haveSecurity(tc));
339        vbar = tc->readMiscReg(MISCREG_VBAR_EL3);
340        break;
341      case EL2:
342        assert(ArmSystem::haveVirtualization(tc));
343        vbar = tc->readMiscReg(MISCREG_VBAR_EL2);
344        break;
345      case EL1:
346        vbar = tc->readMiscReg(MISCREG_VBAR_EL1);
347        break;
348      default:
349        panic("Invalid target exception level");
350        break;
351    }
352    return vbar + offset64();
353}
354
355MiscRegIndex
356ArmFault::getSyndromeReg64() const
357{
358    switch (toEL) {
359      case EL1:
360        return MISCREG_ESR_EL1;
361      case EL2:
362        return MISCREG_ESR_EL2;
363      case EL3:
364        return MISCREG_ESR_EL3;
365      default:
366        panic("Invalid exception level");
367        break;
368    }
369}
370
371MiscRegIndex
372ArmFault::getFaultAddrReg64() const
373{
374    switch (toEL) {
375      case EL1:
376        return MISCREG_FAR_EL1;
377      case EL2:
378        return MISCREG_FAR_EL2;
379      case EL3:
380        return MISCREG_FAR_EL3;
381      default:
382        panic("Invalid exception level");
383        break;
384    }
385}
386
387void
388ArmFault::setSyndrome(ThreadContext *tc, MiscRegIndex syndrome_reg)
389{
390    uint32_t value;
391    uint32_t exc_class = (uint32_t) ec(tc);
392    uint32_t issVal = iss();
393    assert(!from64 || ArmSystem::highestELIs64(tc));
394
395    value = exc_class << 26;
396
397    // HSR.IL not valid for Prefetch Aborts (0x20, 0x21) and Data Aborts (0x24,
398    // 0x25) for which the ISS information is not valid (ARMv7).
399    // @todo: ARMv8 revises AArch32 functionality: when HSR.IL is not
400    // valid it is treated as RES1.
401    if (to64) {
402        value |= 1 << 25;
403    } else if ((bits(exc_class, 5, 3) != 4) ||
404               (bits(exc_class, 2) && bits(issVal, 24))) {
405        if (!machInst.thumb || machInst.bigThumb)
406            value |= 1 << 25;
407    }
408    // Condition code valid for EC[5:4] nonzero
409    if (!from64 && ((bits(exc_class, 5, 4) == 0) &&
410                    (bits(exc_class, 3, 0) != 0))) {
411        if (!machInst.thumb) {
412            uint32_t      cond;
413            ConditionCode condCode = (ConditionCode) (uint32_t) machInst.condCode;
414            // If its on unconditional instruction report with a cond code of
415            // 0xE, ie the unconditional code
416            cond  = (condCode == COND_UC) ? COND_AL : condCode;
417            value |= cond << 20;
418            value |= 1    << 24;
419        }
420        value |= bits(issVal, 19, 0);
421    } else {
422        value |= issVal;
423    }
424    tc->setMiscReg(syndrome_reg, value);
425}
426
427void
428ArmFault::invoke(ThreadContext *tc, const StaticInstPtr &inst)
429{
430    CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
431
432    if (ArmSystem::highestELIs64(tc)) {  // ARMv8
433        // Determine source exception level and mode
434        fromMode = (OperatingMode) (uint8_t) cpsr.mode;
435        fromEL = opModeToEL(fromMode);
436        if (opModeIs64(fromMode))
437            from64 = true;
438
439        // Determine target exception level
440        if (ArmSystem::haveSecurity(tc) && routeToMonitor(tc))
441            toEL = EL3;
442        else if (ArmSystem::haveVirtualization(tc) && routeToHyp(tc))
443            toEL = EL2;
444        else
445            toEL = opModeToEL(nextMode());
446        if (fromEL > toEL)
447            toEL = fromEL;
448
449        if (toEL == ArmSystem::highestEL(tc) || ELIs64(tc, toEL)) {
450            // Invoke exception handler in AArch64 state
451            to64 = true;
452            invoke64(tc, inst);
453            return;
454        }
455    }
456
457    // ARMv7 (ARM ARM issue C B1.9)
458
459    bool have_security       = ArmSystem::haveSecurity(tc);
460    bool have_virtualization = ArmSystem::haveVirtualization(tc);
461
462    FaultBase::invoke(tc);
463    if (!FullSystem)
464        return;
465    countStat()++;
466
467    SCTLR sctlr = tc->readMiscReg(MISCREG_SCTLR);
468    SCR scr = tc->readMiscReg(MISCREG_SCR);
469    CPSR saved_cpsr = tc->readMiscReg(MISCREG_CPSR);
470    saved_cpsr.nz = tc->readCCReg(CCREG_NZ);
471    saved_cpsr.c = tc->readCCReg(CCREG_C);
472    saved_cpsr.v = tc->readCCReg(CCREG_V);
473    saved_cpsr.ge = tc->readCCReg(CCREG_GE);
474
475    Addr curPc M5_VAR_USED = tc->pcState().pc();
476    ITSTATE it = tc->pcState().itstate();
477    saved_cpsr.it2 = it.top6;
478    saved_cpsr.it1 = it.bottom2;
479
480    // if we have a valid instruction then use it to annotate this fault with
481    // extra information. This is used to generate the correct fault syndrome
482    // information
483    if (inst) {
484        ArmStaticInst *armInst = reinterpret_cast<ArmStaticInst *>(inst.get());
485        armInst->annotateFault(this);
486    }
487
488    if (have_security && routeToMonitor(tc))
489        cpsr.mode = MODE_MON;
490    else if (have_virtualization && routeToHyp(tc))
491        cpsr.mode = MODE_HYP;
492    else
493        cpsr.mode = nextMode();
494
495    // Ensure Secure state if initially in Monitor mode
496    if (have_security && saved_cpsr.mode == MODE_MON) {
497        SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
498        if (scr.ns) {
499            scr.ns = 0;
500            tc->setMiscRegNoEffect(MISCREG_SCR, scr);
501        }
502    }
503
504    // some bits are set differently if we have been routed to hyp mode
505    if (cpsr.mode == MODE_HYP) {
506        SCTLR hsctlr = tc->readMiscReg(MISCREG_HSCTLR);
507        cpsr.t = hsctlr.te;
508        cpsr.e = hsctlr.ee;
509        if (!scr.ea)  {cpsr.a = 1;}
510        if (!scr.fiq) {cpsr.f = 1;}
511        if (!scr.irq) {cpsr.i = 1;}
512    } else if (cpsr.mode == MODE_MON) {
513        // Special case handling when entering monitor mode
514        cpsr.t = sctlr.te;
515        cpsr.e = sctlr.ee;
516        cpsr.a = 1;
517        cpsr.f = 1;
518        cpsr.i = 1;
519    } else {
520        cpsr.t = sctlr.te;
521        cpsr.e = sctlr.ee;
522
523        // The *Disable functions are virtual and different per fault
524        cpsr.a = cpsr.a | abortDisable(tc);
525        cpsr.f = cpsr.f | fiqDisable(tc);
526        cpsr.i = 1;
527    }
528    cpsr.it1 = cpsr.it2 = 0;
529    cpsr.j = 0;
530    tc->setMiscReg(MISCREG_CPSR, cpsr);
531
532    // Make sure mailbox sets to one always
533    tc->setMiscReg(MISCREG_SEV_MAILBOX, 1);
534
535    // Clear the exclusive monitor
536    tc->setMiscReg(MISCREG_LOCKFLAG, 0);
537
538    if (cpsr.mode == MODE_HYP) {
539        tc->setMiscReg(MISCREG_ELR_HYP, curPc +
540                (saved_cpsr.t ? thumbPcOffset(true)  : armPcOffset(true)));
541    } else {
542        tc->setIntReg(INTREG_LR, curPc +
543                (saved_cpsr.t ? thumbPcOffset(false) : armPcOffset(false)));
544    }
545
546    switch (cpsr.mode) {
547      case MODE_FIQ:
548        tc->setMiscReg(MISCREG_SPSR_FIQ, saved_cpsr);
549        break;
550      case MODE_IRQ:
551        tc->setMiscReg(MISCREG_SPSR_IRQ, saved_cpsr);
552        break;
553      case MODE_SVC:
554        tc->setMiscReg(MISCREG_SPSR_SVC, saved_cpsr);
555        break;
556      case MODE_MON:
557        assert(have_security);
558        tc->setMiscReg(MISCREG_SPSR_MON, saved_cpsr);
559        break;
560      case MODE_ABORT:
561        tc->setMiscReg(MISCREG_SPSR_ABT, saved_cpsr);
562        break;
563      case MODE_UNDEFINED:
564        tc->setMiscReg(MISCREG_SPSR_UND, saved_cpsr);
565        if (ec(tc) != EC_UNKNOWN)
566            setSyndrome(tc, MISCREG_HSR);
567        break;
568      case MODE_HYP:
569        assert(have_virtualization);
570        tc->setMiscReg(MISCREG_SPSR_HYP, saved_cpsr);
571        setSyndrome(tc, MISCREG_HSR);
572        break;
573      default:
574        panic("unknown Mode\n");
575    }
576
577    Addr newPc = getVector(tc);
578    DPRINTF(Faults, "Invoking Fault:%s cpsr:%#x PC:%#x lr:%#x newVec: %#x\n",
579            name(), cpsr, curPc, tc->readIntReg(INTREG_LR), newPc);
580    PCState pc(newPc);
581    pc.thumb(cpsr.t);
582    pc.nextThumb(pc.thumb());
583    pc.jazelle(cpsr.j);
584    pc.nextJazelle(pc.jazelle());
585    pc.aarch64(!cpsr.width);
586    pc.nextAArch64(!cpsr.width);
587    tc->pcState(pc);
588}
589
590void
591ArmFault::invoke64(ThreadContext *tc, const StaticInstPtr &inst)
592{
593    // Determine actual misc. register indices for ELR_ELx and SPSR_ELx
594    MiscRegIndex elr_idx, spsr_idx;
595    switch (toEL) {
596      case EL1:
597        elr_idx = MISCREG_ELR_EL1;
598        spsr_idx = MISCREG_SPSR_EL1;
599        break;
600      case EL2:
601        assert(ArmSystem::haveVirtualization(tc));
602        elr_idx = MISCREG_ELR_EL2;
603        spsr_idx = MISCREG_SPSR_EL2;
604        break;
605      case EL3:
606        assert(ArmSystem::haveSecurity(tc));
607        elr_idx = MISCREG_ELR_EL3;
608        spsr_idx = MISCREG_SPSR_EL3;
609        break;
610      default:
611        panic("Invalid target exception level");
612        break;
613    }
614
615    // Save process state into SPSR_ELx
616    CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
617    CPSR spsr = cpsr;
618    spsr.nz = tc->readCCReg(CCREG_NZ);
619    spsr.c = tc->readCCReg(CCREG_C);
620    spsr.v = tc->readCCReg(CCREG_V);
621    if (from64) {
622        // Force some bitfields to 0
623        spsr.q = 0;
624        spsr.it1 = 0;
625        spsr.j = 0;
626        spsr.res0_23_22 = 0;
627        spsr.ge = 0;
628        spsr.it2 = 0;
629        spsr.t = 0;
630    } else {
631        spsr.ge = tc->readCCReg(CCREG_GE);
632        ITSTATE it = tc->pcState().itstate();
633        spsr.it2 = it.top6;
634        spsr.it1 = it.bottom2;
635        // Force some bitfields to 0
636        spsr.res0_23_22 = 0;
637        spsr.ss = 0;
638    }
639    tc->setMiscReg(spsr_idx, spsr);
640
641    // Save preferred return address into ELR_ELx
642    Addr curr_pc = tc->pcState().pc();
643    Addr ret_addr = curr_pc;
644    if (from64)
645        ret_addr += armPcElrOffset();
646    else
647        ret_addr += spsr.t ? thumbPcElrOffset() : armPcElrOffset();
648    tc->setMiscReg(elr_idx, ret_addr);
649
650    // Update process state
651    OperatingMode64 mode = 0;
652    mode.spX = 1;
653    mode.el = toEL;
654    mode.width = 0;
655    cpsr.mode = mode;
656    cpsr.daif = 0xf;
657    cpsr.il = 0;
658    cpsr.ss = 0;
659    tc->setMiscReg(MISCREG_CPSR, cpsr);
660
661    // Set PC to start of exception handler
662    Addr new_pc = purifyTaggedAddr(getVector64(tc), tc, toEL);
663    DPRINTF(Faults, "Invoking Fault (AArch64 target EL):%s cpsr:%#x PC:%#x "
664            "elr:%#x newVec: %#x\n", name(), cpsr, curr_pc, ret_addr, new_pc);
665    PCState pc(new_pc);
666    pc.aarch64(!cpsr.width);
667    pc.nextAArch64(!cpsr.width);
668    tc->pcState(pc);
669
670    // If we have a valid instruction then use it to annotate this fault with
671    // extra information. This is used to generate the correct fault syndrome
672    // information
673    if (inst)
674        reinterpret_cast<ArmStaticInst *>(inst.get())->annotateFault(this);
675    // Save exception syndrome
676    if ((nextMode() != MODE_IRQ) && (nextMode() != MODE_FIQ))
677        setSyndrome(tc, getSyndromeReg64());
678}
679
680void
681Reset::invoke(ThreadContext *tc, const StaticInstPtr &inst)
682{
683    if (FullSystem) {
684        tc->getCpuPtr()->clearInterrupts(tc->threadId());
685        tc->clearArchRegs();
686    }
687    if (!ArmSystem::highestELIs64(tc)) {
688        ArmFault::invoke(tc, inst);
689        tc->setMiscReg(MISCREG_VMPIDR,
690                       getMPIDR(dynamic_cast<ArmSystem*>(tc->getSystemPtr()), tc));
691
692        // Unless we have SMC code to get us there, boot in HYP!
693        if (ArmSystem::haveVirtualization(tc) &&
694            !ArmSystem::haveSecurity(tc)) {
695            CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
696            cpsr.mode = MODE_HYP;
697            tc->setMiscReg(MISCREG_CPSR, cpsr);
698        }
699    } else {
700        // Advance the PC to the IMPLEMENTATION DEFINED reset value
701        PCState pc = ArmSystem::resetAddr64(tc);
702        pc.aarch64(true);
703        pc.nextAArch64(true);
704        tc->pcState(pc);
705    }
706}
707
708void
709UndefinedInstruction::invoke(ThreadContext *tc, const StaticInstPtr &inst)
710{
711    if (FullSystem) {
712        ArmFault::invoke(tc, inst);
713        return;
714    }
715
716    // If the mnemonic isn't defined this has to be an unknown instruction.
717    assert(unknown || mnemonic != NULL);
718    if (disabled) {
719        panic("Attempted to execute disabled instruction "
720                "'%s' (inst 0x%08x)", mnemonic, machInst);
721    } else if (unknown) {
722        panic("Attempted to execute unknown instruction (inst 0x%08x)",
723              machInst);
724    } else {
725        panic("Attempted to execute unimplemented instruction "
726                "'%s' (inst 0x%08x)", mnemonic, machInst);
727    }
728}
729
730bool
731UndefinedInstruction::routeToHyp(ThreadContext *tc) const
732{
733    bool toHyp;
734
735    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
736    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
737    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
738
739    // if in Hyp mode then stay in Hyp mode
740    toHyp  = scr.ns && (cpsr.mode == MODE_HYP);
741    // if HCR.TGE is set to 1, take to Hyp mode through Hyp Trap vector
742    toHyp |= !inSecureState(scr, cpsr) && hcr.tge && (cpsr.mode == MODE_USER);
743    return toHyp;
744}
745
746uint32_t
747UndefinedInstruction::iss() const
748{
749    if (overrideEc == EC_INVALID)
750        return issRaw;
751
752    uint32_t new_iss = 0;
753    uint32_t op0, op1, op2, CRn, CRm, Rt, dir;
754
755    dir = bits(machInst, 21, 21);
756    op0 = bits(machInst, 20, 19);
757    op1 = bits(machInst, 18, 16);
758    CRn = bits(machInst, 15, 12);
759    CRm = bits(machInst, 11, 8);
760    op2 = bits(machInst, 7, 5);
761    Rt = bits(machInst, 4, 0);
762
763    new_iss = op0 << 20 | op2 << 17 | op1 << 14 | CRn << 10 |
764            Rt << 5 | CRm << 1 | dir;
765
766    return new_iss;
767}
768
769void
770SupervisorCall::invoke(ThreadContext *tc, const StaticInstPtr &inst)
771{
772    if (FullSystem) {
773        ArmFault::invoke(tc, inst);
774        return;
775    }
776
777    // As of now, there isn't a 32 bit thumb version of this instruction.
778    assert(!machInst.bigThumb);
779    uint32_t callNum;
780    CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
781    OperatingMode mode = (OperatingMode)(uint8_t)cpsr.mode;
782    if (opModeIs64(mode))
783        callNum = tc->readIntReg(INTREG_X8);
784    else
785        callNum = tc->readIntReg(INTREG_R7);
786    tc->syscall(callNum);
787
788    // Advance the PC since that won't happen automatically.
789    PCState pc = tc->pcState();
790    assert(inst);
791    inst->advancePC(pc);
792    tc->pcState(pc);
793}
794
795bool
796SupervisorCall::routeToHyp(ThreadContext *tc) const
797{
798    bool toHyp;
799
800    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
801    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
802    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
803
804    // if in Hyp mode then stay in Hyp mode
805    toHyp  = scr.ns && (cpsr.mode == MODE_HYP);
806    // if HCR.TGE is set to 1, take to Hyp mode through Hyp Trap vector
807    toHyp |= !inSecureState(scr, cpsr) && hcr.tge && (cpsr.mode == MODE_USER);
808    return toHyp;
809}
810
811ExceptionClass
812SupervisorCall::ec(ThreadContext *tc) const
813{
814    return (overrideEc != EC_INVALID) ? overrideEc :
815        (from64 ? EC_SVC_64 : vals.ec);
816}
817
818uint32_t
819SupervisorCall::iss() const
820{
821    // Even if we have a 24 bit imm from an arm32 instruction then we only use
822    // the bottom 16 bits for the ISS value (it doesn't hurt for AArch64 SVC).
823    return issRaw & 0xFFFF;
824}
825
826uint32_t
827SecureMonitorCall::iss() const
828{
829    if (from64)
830        return bits(machInst, 20, 5);
831    return 0;
832}
833
834ExceptionClass
835UndefinedInstruction::ec(ThreadContext *tc) const
836{
837    return (overrideEc != EC_INVALID) ? overrideEc : vals.ec;
838}
839
840
841HypervisorCall::HypervisorCall(ExtMachInst _machInst, uint32_t _imm) :
842        ArmFaultVals<HypervisorCall>(_machInst, _imm)
843{}
844
845ExceptionClass
846HypervisorCall::ec(ThreadContext *tc) const
847{
848    return from64 ? EC_HVC_64 : vals.ec;
849}
850
851ExceptionClass
852HypervisorTrap::ec(ThreadContext *tc) const
853{
854    return (overrideEc != EC_INVALID) ? overrideEc : vals.ec;
855}
856
857template<class T>
858FaultOffset
859ArmFaultVals<T>::offset(ThreadContext *tc)
860{
861    bool isHypTrap = false;
862
863    // Normally we just use the exception vector from the table at the top if
864    // this file, however if this exception has caused a transition to hype
865    // mode, and its an exception type that would only do this if it has been
866    // trapped then we use the hyp trap vector instead of the normal vector
867    if (vals.hypTrappable) {
868        CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
869        if (cpsr.mode == MODE_HYP) {
870            CPSR spsr = tc->readMiscReg(MISCREG_SPSR_HYP);
871            isHypTrap = spsr.mode != MODE_HYP;
872        }
873    }
874    return isHypTrap ? 0x14 : vals.offset;
875}
876
877// void
878// SupervisorCall::setSyndrome64(ThreadContext *tc, MiscRegIndex esr_idx)
879// {
880//     ESR esr = 0;
881//     esr.ec = machInst.aarch64 ? SvcAArch64 : SvcAArch32;
882//     esr.il = !machInst.thumb;
883//     if (machInst.aarch64)
884//         esr.imm16 = bits(machInst.instBits, 20, 5);
885//     else if (machInst.thumb)
886//         esr.imm16 = bits(machInst.instBits, 7, 0);
887//     else
888//         esr.imm16 = bits(machInst.instBits, 15, 0);
889//     tc->setMiscReg(esr_idx, esr);
890// }
891
892void
893SecureMonitorCall::invoke(ThreadContext *tc, const StaticInstPtr &inst)
894{
895    if (FullSystem) {
896        ArmFault::invoke(tc, inst);
897        return;
898    }
899}
900
901ExceptionClass
902SecureMonitorCall::ec(ThreadContext *tc) const
903{
904    return (from64 ? EC_SMC_64 : vals.ec);
905}
906
907ExceptionClass
908SupervisorTrap::ec(ThreadContext *tc) const
909{
910    return (overrideEc != EC_INVALID) ? overrideEc : vals.ec;
911}
912
913ExceptionClass
914SecureMonitorTrap::ec(ThreadContext *tc) const
915{
916    return (overrideEc != EC_INVALID) ? overrideEc :
917        (from64 ? EC_SMC_64 : vals.ec);
918}
919
920template<class T>
921void
922AbortFault<T>::invoke(ThreadContext *tc, const StaticInstPtr &inst)
923{
924    if (tranMethod == ArmFault::UnknownTran) {
925        tranMethod = longDescFormatInUse(tc) ? ArmFault::LpaeTran
926                                             : ArmFault::VmsaTran;
927
928        if ((tranMethod == ArmFault::VmsaTran) && this->routeToMonitor(tc)) {
929            // See ARM ARM B3-1416
930            bool override_LPAE = false;
931            TTBCR ttbcr_s = tc->readMiscReg(MISCREG_TTBCR_S);
932            TTBCR M5_VAR_USED ttbcr_ns = tc->readMiscReg(MISCREG_TTBCR_NS);
933            if (ttbcr_s.eae) {
934                override_LPAE = true;
935            } else {
936                // Unimplemented code option, not seen in testing.  May need
937                // extension according to the manual exceprt above.
938                DPRINTF(Faults, "Warning: Incomplete translation method "
939                        "override detected.\n");
940            }
941            if (override_LPAE)
942                tranMethod = ArmFault::LpaeTran;
943        }
944    }
945
946    if (source == ArmFault::AsynchronousExternalAbort) {
947        tc->getCpuPtr()->clearInterrupt(tc->threadId(), INT_ABT, 0);
948    }
949    // Get effective fault source encoding
950    CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
951    FSR  fsr  = getFsr(tc);
952
953    // source must be determined BEFORE invoking generic routines which will
954    // try to set hsr etc. and are based upon source!
955    ArmFaultVals<T>::invoke(tc, inst);
956
957    if (!this->to64) {  // AArch32
958        if (cpsr.mode == MODE_HYP) {
959            tc->setMiscReg(T::HFarIndex, faultAddr);
960        } else if (stage2) {
961            tc->setMiscReg(MISCREG_HPFAR, (faultAddr >> 8) & ~0xf);
962            tc->setMiscReg(T::HFarIndex,  OVAddr);
963        } else {
964            tc->setMiscReg(T::FsrIndex, fsr);
965            tc->setMiscReg(T::FarIndex, faultAddr);
966        }
967        DPRINTF(Faults, "Abort Fault source=%#x fsr=%#x faultAddr=%#x "\
968                "tranMethod=%#x\n", source, fsr, faultAddr, tranMethod);
969    } else {  // AArch64
970        // Set the FAR register.  Nothing else to do if we are in AArch64 state
971        // because the syndrome register has already been set inside invoke64()
972        if (stage2) {
973            // stage 2 fault, set HPFAR_EL2 to the faulting IPA
974            // and FAR_EL2 to the Original VA
975            tc->setMiscReg(AbortFault<T>::getFaultAddrReg64(), OVAddr);
976            tc->setMiscReg(MISCREG_HPFAR_EL2, bits(faultAddr, 47, 12) << 4);
977
978            DPRINTF(Faults, "Abort Fault (Stage 2) VA: 0x%x IPA: 0x%x\n",
979                    OVAddr, faultAddr);
980        } else {
981            tc->setMiscReg(AbortFault<T>::getFaultAddrReg64(), faultAddr);
982        }
983    }
984}
985
986template<class T>
987FSR
988AbortFault<T>::getFsr(ThreadContext *tc)
989{
990    FSR fsr = 0;
991
992    if (((CPSR) tc->readMiscRegNoEffect(MISCREG_CPSR)).width) {
993        // AArch32
994        assert(tranMethod != ArmFault::UnknownTran);
995        if (tranMethod == ArmFault::LpaeTran) {
996            srcEncoded = ArmFault::longDescFaultSources[source];
997            fsr.status = srcEncoded;
998            fsr.lpae   = 1;
999        } else {
1000            srcEncoded = ArmFault::shortDescFaultSources[source];
1001            fsr.fsLow  = bits(srcEncoded, 3, 0);
1002            fsr.fsHigh = bits(srcEncoded, 4);
1003            fsr.domain = static_cast<uint8_t>(domain);
1004        }
1005        fsr.wnr = (write ? 1 : 0);
1006        fsr.ext = 0;
1007    } else {
1008        // AArch64
1009        srcEncoded = ArmFault::aarch64FaultSources[source];
1010    }
1011    if (srcEncoded == ArmFault::FaultSourceInvalid) {
1012        panic("Invalid fault source\n");
1013    }
1014    return fsr;
1015}
1016
1017template<class T>
1018bool
1019AbortFault<T>::abortDisable(ThreadContext *tc)
1020{
1021    if (ArmSystem::haveSecurity(tc)) {
1022        SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1023        return (!scr.ns || scr.aw);
1024    }
1025    return true;
1026}
1027
1028template<class T>
1029void
1030AbortFault<T>::annotate(ArmFault::AnnotationIDs id, uint64_t val)
1031{
1032    switch (id)
1033    {
1034      case ArmFault::S1PTW:
1035        s1ptw = val;
1036        break;
1037      case ArmFault::OVA:
1038        OVAddr = val;
1039        break;
1040
1041      // Just ignore unknown ID's
1042      default:
1043        break;
1044    }
1045}
1046
1047template<class T>
1048uint32_t
1049AbortFault<T>::iss() const
1050{
1051    uint32_t val;
1052
1053    val  = srcEncoded & 0x3F;
1054    val |= write << 6;
1055    val |= s1ptw << 7;
1056    return (val);
1057}
1058
1059template<class T>
1060bool
1061AbortFault<T>::isMMUFault() const
1062{
1063    // NOTE: Not relying on LL information being aligned to lowest bits here
1064    return
1065         (source == ArmFault::AlignmentFault)     ||
1066        ((source >= ArmFault::TranslationLL) &&
1067         (source <  ArmFault::TranslationLL + 4)) ||
1068        ((source >= ArmFault::AccessFlagLL) &&
1069         (source <  ArmFault::AccessFlagLL + 4))  ||
1070        ((source >= ArmFault::DomainLL) &&
1071         (source <  ArmFault::DomainLL + 4))      ||
1072        ((source >= ArmFault::PermissionLL) &&
1073         (source <  ArmFault::PermissionLL + 4));
1074}
1075
1076ExceptionClass
1077PrefetchAbort::ec(ThreadContext *tc) const
1078{
1079    if (to64) {
1080        // AArch64
1081        if (toEL == fromEL)
1082            return EC_PREFETCH_ABORT_CURR_EL;
1083        else
1084            return EC_PREFETCH_ABORT_LOWER_EL;
1085    } else {
1086        // AArch32
1087        // Abort faults have different EC codes depending on whether
1088        // the fault originated within HYP mode, or not. So override
1089        // the method and add the extra adjustment of the EC value.
1090
1091        ExceptionClass ec = ArmFaultVals<PrefetchAbort>::vals.ec;
1092
1093        CPSR spsr = tc->readMiscReg(MISCREG_SPSR_HYP);
1094        if (spsr.mode == MODE_HYP) {
1095            ec = ((ExceptionClass) (((uint32_t) ec) + 1));
1096        }
1097        return ec;
1098    }
1099}
1100
1101bool
1102PrefetchAbort::routeToMonitor(ThreadContext *tc) const
1103{
1104    SCR scr = 0;
1105    if (from64)
1106        scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1107    else
1108        scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1109
1110    return scr.ea && !isMMUFault();
1111}
1112
1113bool
1114PrefetchAbort::routeToHyp(ThreadContext *tc) const
1115{
1116    bool toHyp;
1117
1118    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
1119    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
1120    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
1121    HDCR hdcr = tc->readMiscRegNoEffect(MISCREG_HDCR);
1122
1123    // if in Hyp mode then stay in Hyp mode
1124    toHyp  = scr.ns && (cpsr.mode == MODE_HYP);
1125    // otherwise, check whether to take to Hyp mode through Hyp Trap vector
1126    toHyp |= (stage2 ||
1127                ( (source ==               DebugEvent) && hdcr.tde && (cpsr.mode !=  MODE_HYP)) ||
1128                ( (source == SynchronousExternalAbort) && hcr.tge  && (cpsr.mode == MODE_USER))
1129             ) && !inSecureState(tc);
1130    return toHyp;
1131}
1132
1133ExceptionClass
1134DataAbort::ec(ThreadContext *tc) const
1135{
1136    if (to64) {
1137        // AArch64
1138        if (source == ArmFault::AsynchronousExternalAbort) {
1139            panic("Asynchronous External Abort should be handled with "
1140                    "SystemErrors (SErrors)!");
1141        }
1142        if (toEL == fromEL)
1143            return EC_DATA_ABORT_CURR_EL;
1144        else
1145            return EC_DATA_ABORT_LOWER_EL;
1146    } else {
1147        // AArch32
1148        // Abort faults have different EC codes depending on whether
1149        // the fault originated within HYP mode, or not. So override
1150        // the method and add the extra adjustment of the EC value.
1151
1152        ExceptionClass ec = ArmFaultVals<DataAbort>::vals.ec;
1153
1154        CPSR spsr = tc->readMiscReg(MISCREG_SPSR_HYP);
1155        if (spsr.mode == MODE_HYP) {
1156            ec = ((ExceptionClass) (((uint32_t) ec) + 1));
1157        }
1158        return ec;
1159    }
1160}
1161
1162bool
1163DataAbort::routeToMonitor(ThreadContext *tc) const
1164{
1165    SCR scr = 0;
1166    if (from64)
1167        scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1168    else
1169        scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1170
1171    return scr.ea && !isMMUFault();
1172}
1173
1174bool
1175DataAbort::routeToHyp(ThreadContext *tc) const
1176{
1177    bool toHyp;
1178
1179    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
1180    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
1181    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
1182    HDCR hdcr = tc->readMiscRegNoEffect(MISCREG_HDCR);
1183
1184    // if in Hyp mode then stay in Hyp mode
1185    toHyp  = scr.ns && (cpsr.mode == MODE_HYP);
1186    // otherwise, check whether to take to Hyp mode through Hyp Trap vector
1187    toHyp |= (stage2 ||
1188                ( (cpsr.mode != MODE_HYP) && ( ((source == AsynchronousExternalAbort) && hcr.amo) ||
1189                                               ((source == DebugEvent) && hdcr.tde) )
1190                ) ||
1191                ( (cpsr.mode == MODE_USER) && hcr.tge &&
1192                  ((source == AlignmentFault)            ||
1193                   (source == SynchronousExternalAbort))
1194                )
1195             ) && !inSecureState(tc);
1196    return toHyp;
1197}
1198
1199uint32_t
1200DataAbort::iss() const
1201{
1202    uint32_t val;
1203
1204    // Add on the data abort specific fields to the generic abort ISS value
1205    val  = AbortFault<DataAbort>::iss();
1206    // ISS is valid if not caused by a stage 1 page table walk, and when taken
1207    // to AArch64 only when directed to EL2
1208    if (!s1ptw && (!to64 || toEL == EL2)) {
1209        val |= isv << 24;
1210        if (isv) {
1211            val |= sas << 22;
1212            val |= sse << 21;
1213            val |= srt << 16;
1214            // AArch64 only. These assignments are safe on AArch32 as well
1215            // because these vars are initialized to false
1216            val |= sf << 15;
1217            val |= ar << 14;
1218        }
1219    }
1220    return (val);
1221}
1222
1223void
1224DataAbort::annotate(AnnotationIDs id, uint64_t val)
1225{
1226    AbortFault<DataAbort>::annotate(id, val);
1227    switch (id)
1228    {
1229      case SAS:
1230        isv = true;
1231        sas = val;
1232        break;
1233      case SSE:
1234        isv = true;
1235        sse = val;
1236        break;
1237      case SRT:
1238        isv = true;
1239        srt = val;
1240        break;
1241      case SF:
1242        isv = true;
1243        sf  = val;
1244        break;
1245      case AR:
1246        isv = true;
1247        ar  = val;
1248        break;
1249      // Just ignore unknown ID's
1250      default:
1251        break;
1252    }
1253}
1254
1255void
1256VirtualDataAbort::invoke(ThreadContext *tc, const StaticInstPtr &inst)
1257{
1258    AbortFault<VirtualDataAbort>::invoke(tc, inst);
1259    HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR);
1260    hcr.va = 0;
1261    tc->setMiscRegNoEffect(MISCREG_HCR, hcr);
1262}
1263
1264bool
1265Interrupt::routeToMonitor(ThreadContext *tc) const
1266{
1267    assert(ArmSystem::haveSecurity(tc));
1268    SCR scr = 0;
1269    if (from64)
1270        scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1271    else
1272        scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1273    return scr.irq;
1274}
1275
1276bool
1277Interrupt::routeToHyp(ThreadContext *tc) const
1278{
1279    bool toHyp;
1280
1281    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
1282    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
1283    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
1284    // Determine whether IRQs are routed to Hyp mode.
1285    toHyp = (!scr.irq && hcr.imo && !inSecureState(tc)) ||
1286            (cpsr.mode == MODE_HYP);
1287    return toHyp;
1288}
1289
1290bool
1291Interrupt::abortDisable(ThreadContext *tc)
1292{
1293    if (ArmSystem::haveSecurity(tc)) {
1294        SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1295        return (!scr.ns || scr.aw);
1296    }
1297    return true;
1298}
1299
1300VirtualInterrupt::VirtualInterrupt()
1301{}
1302
1303bool
1304FastInterrupt::routeToMonitor(ThreadContext *tc) const
1305{
1306    assert(ArmSystem::haveSecurity(tc));
1307    SCR scr = 0;
1308    if (from64)
1309        scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1310    else
1311        scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1312    return scr.fiq;
1313}
1314
1315bool
1316FastInterrupt::routeToHyp(ThreadContext *tc) const
1317{
1318    bool toHyp;
1319
1320    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
1321    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
1322    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
1323    // Determine whether IRQs are routed to Hyp mode.
1324    toHyp = (!scr.fiq && hcr.fmo && !inSecureState(tc)) ||
1325            (cpsr.mode == MODE_HYP);
1326    return toHyp;
1327}
1328
1329bool
1330FastInterrupt::abortDisable(ThreadContext *tc)
1331{
1332    if (ArmSystem::haveSecurity(tc)) {
1333        SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1334        return (!scr.ns || scr.aw);
1335    }
1336    return true;
1337}
1338
1339bool
1340FastInterrupt::fiqDisable(ThreadContext *tc)
1341{
1342    if (ArmSystem::haveVirtualization(tc)) {
1343        return true;
1344    } else if (ArmSystem::haveSecurity(tc)) {
1345        SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1346        return (!scr.ns || scr.fw);
1347    }
1348    return true;
1349}
1350
1351VirtualFastInterrupt::VirtualFastInterrupt()
1352{}
1353
1354void
1355PCAlignmentFault::invoke(ThreadContext *tc, const StaticInstPtr &inst)
1356{
1357    ArmFaultVals<PCAlignmentFault>::invoke(tc, inst);
1358    assert(from64);
1359    // Set the FAR
1360    tc->setMiscReg(getFaultAddrReg64(), faultPC);
1361}
1362
1363SPAlignmentFault::SPAlignmentFault()
1364{}
1365
1366SystemError::SystemError()
1367{}
1368
1369void
1370SystemError::invoke(ThreadContext *tc, const StaticInstPtr &inst)
1371{
1372    tc->getCpuPtr()->clearInterrupt(tc->threadId(), INT_ABT, 0);
1373    ArmFault::invoke(tc, inst);
1374}
1375
1376bool
1377SystemError::routeToMonitor(ThreadContext *tc) const
1378{
1379    assert(ArmSystem::haveSecurity(tc));
1380    assert(from64);
1381    SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1382    return scr.ea;
1383}
1384
1385bool
1386SystemError::routeToHyp(ThreadContext *tc) const
1387{
1388    bool toHyp;
1389    assert(from64);
1390
1391    SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1392    HCR hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
1393
1394    toHyp = (!scr.ea && hcr.amo && !inSecureState(tc)) ||
1395            (!scr.ea && !scr.rw && !hcr.amo && !inSecureState(tc));
1396    return toHyp;
1397}
1398
1399void
1400FlushPipe::invoke(ThreadContext *tc, const StaticInstPtr &inst) {
1401    DPRINTF(Faults, "Invoking FlushPipe Fault\n");
1402
1403    // Set the PC to the next instruction of the faulting instruction.
1404    // Net effect is simply squashing all instructions behind and
1405    // start refetching from the next instruction.
1406    PCState pc = tc->pcState();
1407    assert(inst);
1408    inst->advancePC(pc);
1409    tc->pcState(pc);
1410}
1411
1412void
1413ArmSev::invoke(ThreadContext *tc, const StaticInstPtr &inst) {
1414    DPRINTF(Faults, "Invoking ArmSev Fault\n");
1415    if (!FullSystem)
1416        return;
1417
1418    // Set sev_mailbox to 1, clear the pending interrupt from remote
1419    // SEV execution and let pipeline continue as pcState is still
1420    // valid.
1421    tc->setMiscReg(MISCREG_SEV_MAILBOX, 1);
1422    tc->getCpuPtr()->clearInterrupt(tc->threadId(), INT_SEV, 0);
1423}
1424
1425// Instantiate all the templates to make the linker happy
1426template class ArmFaultVals<Reset>;
1427template class ArmFaultVals<UndefinedInstruction>;
1428template class ArmFaultVals<SupervisorCall>;
1429template class ArmFaultVals<SecureMonitorCall>;
1430template class ArmFaultVals<HypervisorCall>;
1431template class ArmFaultVals<PrefetchAbort>;
1432template class ArmFaultVals<DataAbort>;
1433template class ArmFaultVals<VirtualDataAbort>;
1434template class ArmFaultVals<HypervisorTrap>;
1435template class ArmFaultVals<Interrupt>;
1436template class ArmFaultVals<VirtualInterrupt>;
1437template class ArmFaultVals<FastInterrupt>;
1438template class ArmFaultVals<VirtualFastInterrupt>;
1439template class ArmFaultVals<SupervisorTrap>;
1440template class ArmFaultVals<SecureMonitorTrap>;
1441template class ArmFaultVals<PCAlignmentFault>;
1442template class ArmFaultVals<SPAlignmentFault>;
1443template class ArmFaultVals<SystemError>;
1444template class ArmFaultVals<FlushPipe>;
1445template class ArmFaultVals<ArmSev>;
1446template class AbortFault<PrefetchAbort>;
1447template class AbortFault<DataAbort>;
1448template class AbortFault<VirtualDataAbort>;
1449
1450
1451IllegalInstSetStateFault::IllegalInstSetStateFault()
1452{}
1453
1454
1455} // namespace ArmISA
1456