faults.cc revision 12763:37c243ed1112
1/*
2 * Copyright (c) 2010, 2012-2014, 2016-2018 ARM Limited
3 * All rights reserved
4 *
5 * The license below extends only to copyright in the software and shall
6 * not be construed as granting a license to any other intellectual
7 * property including but not limited to intellectual property relating
8 * to a hardware implementation of the functionality of the software
9 * licensed hereunder.  You may use the software subject to the license
10 * terms below provided that you ensure that this notice is replicated
11 * unmodified and in its entirety in all distributions of the software,
12 * modified or unmodified, in source code or in binary form.
13 *
14 * Copyright (c) 2003-2005 The Regents of The University of Michigan
15 * Copyright (c) 2007-2008 The Florida State University
16 * All rights reserved.
17 *
18 * Redistribution and use in source and binary forms, with or without
19 * modification, are permitted provided that the following conditions are
20 * met: redistributions of source code must retain the above copyright
21 * notice, this list of conditions and the following disclaimer;
22 * redistributions in binary form must reproduce the above copyright
23 * notice, this list of conditions and the following disclaimer in the
24 * documentation and/or other materials provided with the distribution;
25 * neither the name of the copyright holders nor the names of its
26 * contributors may be used to endorse or promote products derived from
27 * this software without specific prior written permission.
28 *
29 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
30 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
31 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
32 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
33 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
34 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
35 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
36 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
37 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
38 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
39 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
40 *
41 * Authors: Ali Saidi
42 *          Gabe Black
43 *          Giacomo Gabrielli
44 *          Thomas Grocutt
45 */
46
47#include "arch/arm/faults.hh"
48
49#include "arch/arm/insts/static_inst.hh"
50#include "arch/arm/system.hh"
51#include "arch/arm/utility.hh"
52#include "base/compiler.hh"
53#include "base/trace.hh"
54#include "cpu/base.hh"
55#include "cpu/thread_context.hh"
56#include "debug/Faults.hh"
57#include "sim/full_system.hh"
58
59namespace ArmISA
60{
61
62uint8_t ArmFault::shortDescFaultSources[] = {
63    0x01,  // AlignmentFault
64    0x04,  // InstructionCacheMaintenance
65    0xff,  // SynchExtAbtOnTranslTableWalkL0 (INVALID)
66    0x0c,  // SynchExtAbtOnTranslTableWalkL1
67    0x0e,  // SynchExtAbtOnTranslTableWalkL2
68    0xff,  // SynchExtAbtOnTranslTableWalkL3 (INVALID)
69    0xff,  // SynchPtyErrOnTranslTableWalkL0 (INVALID)
70    0x1c,  // SynchPtyErrOnTranslTableWalkL1
71    0x1e,  // SynchPtyErrOnTranslTableWalkL2
72    0xff,  // SynchPtyErrOnTranslTableWalkL3 (INVALID)
73    0xff,  // TranslationL0 (INVALID)
74    0x05,  // TranslationL1
75    0x07,  // TranslationL2
76    0xff,  // TranslationL3 (INVALID)
77    0xff,  // AccessFlagL0 (INVALID)
78    0x03,  // AccessFlagL1
79    0x06,  // AccessFlagL2
80    0xff,  // AccessFlagL3 (INVALID)
81    0xff,  // DomainL0 (INVALID)
82    0x09,  // DomainL1
83    0x0b,  // DomainL2
84    0xff,  // DomainL3 (INVALID)
85    0xff,  // PermissionL0 (INVALID)
86    0x0d,  // PermissionL1
87    0x0f,  // PermissionL2
88    0xff,  // PermissionL3 (INVALID)
89    0x02,  // DebugEvent
90    0x08,  // SynchronousExternalAbort
91    0x10,  // TLBConflictAbort
92    0x19,  // SynchPtyErrOnMemoryAccess
93    0x16,  // AsynchronousExternalAbort
94    0x18,  // AsynchPtyErrOnMemoryAccess
95    0xff,  // AddressSizeL0 (INVALID)
96    0xff,  // AddressSizeL1 (INVALID)
97    0xff,  // AddressSizeL2 (INVALID)
98    0xff,  // AddressSizeL3 (INVALID)
99    0x40,  // PrefetchTLBMiss
100    0x80   // PrefetchUncacheable
101};
102
103static_assert(sizeof(ArmFault::shortDescFaultSources) ==
104              ArmFault::NumFaultSources,
105              "Invalid size of ArmFault::shortDescFaultSources[]");
106
107uint8_t ArmFault::longDescFaultSources[] = {
108    0x21,  // AlignmentFault
109    0xff,  // InstructionCacheMaintenance (INVALID)
110    0xff,  // SynchExtAbtOnTranslTableWalkL0 (INVALID)
111    0x15,  // SynchExtAbtOnTranslTableWalkL1
112    0x16,  // SynchExtAbtOnTranslTableWalkL2
113    0x17,  // SynchExtAbtOnTranslTableWalkL3
114    0xff,  // SynchPtyErrOnTranslTableWalkL0 (INVALID)
115    0x1d,  // SynchPtyErrOnTranslTableWalkL1
116    0x1e,  // SynchPtyErrOnTranslTableWalkL2
117    0x1f,  // SynchPtyErrOnTranslTableWalkL3
118    0xff,  // TranslationL0 (INVALID)
119    0x05,  // TranslationL1
120    0x06,  // TranslationL2
121    0x07,  // TranslationL3
122    0xff,  // AccessFlagL0 (INVALID)
123    0x09,  // AccessFlagL1
124    0x0a,  // AccessFlagL2
125    0x0b,  // AccessFlagL3
126    0xff,  // DomainL0 (INVALID)
127    0x3d,  // DomainL1
128    0x3e,  // DomainL2
129    0xff,  // DomainL3 (RESERVED)
130    0xff,  // PermissionL0 (INVALID)
131    0x0d,  // PermissionL1
132    0x0e,  // PermissionL2
133    0x0f,  // PermissionL3
134    0x22,  // DebugEvent
135    0x10,  // SynchronousExternalAbort
136    0x30,  // TLBConflictAbort
137    0x18,  // SynchPtyErrOnMemoryAccess
138    0x11,  // AsynchronousExternalAbort
139    0x19,  // AsynchPtyErrOnMemoryAccess
140    0xff,  // AddressSizeL0 (INVALID)
141    0xff,  // AddressSizeL1 (INVALID)
142    0xff,  // AddressSizeL2 (INVALID)
143    0xff,  // AddressSizeL3 (INVALID)
144    0x40,  // PrefetchTLBMiss
145    0x80   // PrefetchUncacheable
146};
147
148static_assert(sizeof(ArmFault::longDescFaultSources) ==
149              ArmFault::NumFaultSources,
150              "Invalid size of ArmFault::longDescFaultSources[]");
151
152uint8_t ArmFault::aarch64FaultSources[] = {
153    0x21,  // AlignmentFault
154    0xff,  // InstructionCacheMaintenance (INVALID)
155    0x14,  // SynchExtAbtOnTranslTableWalkL0
156    0x15,  // SynchExtAbtOnTranslTableWalkL1
157    0x16,  // SynchExtAbtOnTranslTableWalkL2
158    0x17,  // SynchExtAbtOnTranslTableWalkL3
159    0x1c,  // SynchPtyErrOnTranslTableWalkL0
160    0x1d,  // SynchPtyErrOnTranslTableWalkL1
161    0x1e,  // SynchPtyErrOnTranslTableWalkL2
162    0x1f,  // SynchPtyErrOnTranslTableWalkL3
163    0x04,  // TranslationL0
164    0x05,  // TranslationL1
165    0x06,  // TranslationL2
166    0x07,  // TranslationL3
167    0x08,  // AccessFlagL0
168    0x09,  // AccessFlagL1
169    0x0a,  // AccessFlagL2
170    0x0b,  // AccessFlagL3
171    // @todo: Section & Page Domain Fault in AArch64?
172    0xff,  // DomainL0 (INVALID)
173    0xff,  // DomainL1 (INVALID)
174    0xff,  // DomainL2 (INVALID)
175    0xff,  // DomainL3 (INVALID)
176    0x0c,  // PermissionL0
177    0x0d,  // PermissionL1
178    0x0e,  // PermissionL2
179    0x0f,  // PermissionL3
180    0x22,  // DebugEvent
181    0x10,  // SynchronousExternalAbort
182    0x30,  // TLBConflictAbort
183    0x18,  // SynchPtyErrOnMemoryAccess
184    0xff,  // AsynchronousExternalAbort (INVALID)
185    0xff,  // AsynchPtyErrOnMemoryAccess (INVALID)
186    0x00,  // AddressSizeL0
187    0x01,  // AddressSizeL1
188    0x02,  // AddressSizeL2
189    0x03,  // AddressSizeL3
190    0x40,  // PrefetchTLBMiss
191    0x80   // PrefetchUncacheable
192};
193
194static_assert(sizeof(ArmFault::aarch64FaultSources) ==
195              ArmFault::NumFaultSources,
196              "Invalid size of ArmFault::aarch64FaultSources[]");
197
198// Fields: name, offset, cur{ELT,ELH}Offset, lowerEL{64,32}Offset, next mode,
199//         {ARM, Thumb, ARM_ELR, Thumb_ELR} PC offset, hyp trap,
200//         {A, F} disable, class, stat
201template<> ArmFault::FaultVals ArmFaultVals<Reset>::vals(
202    // Some dummy values (the reset vector has an IMPLEMENTATION DEFINED
203    // location in AArch64)
204    "Reset",                 0x000, 0x000, 0x000, 0x000, 0x000, MODE_SVC,
205    0, 0, 0, 0, false, true,  true,  EC_UNKNOWN
206);
207template<> ArmFault::FaultVals ArmFaultVals<UndefinedInstruction>::vals(
208    "Undefined Instruction", 0x004, 0x000, 0x200, 0x400, 0x600, MODE_UNDEFINED,
209    4, 2, 0, 0, true,  false, false, EC_UNKNOWN
210);
211template<> ArmFault::FaultVals ArmFaultVals<SupervisorCall>::vals(
212    "Supervisor Call",       0x008, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
213    4, 2, 4, 2, true,  false, false, EC_SVC_TO_HYP
214);
215template<> ArmFault::FaultVals ArmFaultVals<SecureMonitorCall>::vals(
216    "Secure Monitor Call",   0x008, 0x000, 0x200, 0x400, 0x600, MODE_MON,
217    4, 4, 4, 4, false, true,  true,  EC_SMC_TO_HYP
218);
219template<> ArmFault::FaultVals ArmFaultVals<HypervisorCall>::vals(
220    "Hypervisor Call",       0x008, 0x000, 0x200, 0x400, 0x600, MODE_HYP,
221    4, 4, 4, 4, true,  false, false, EC_HVC
222);
223template<> ArmFault::FaultVals ArmFaultVals<PrefetchAbort>::vals(
224    "Prefetch Abort",        0x00C, 0x000, 0x200, 0x400, 0x600, MODE_ABORT,
225    4, 4, 0, 0, true,  true,  false, EC_PREFETCH_ABORT_TO_HYP
226);
227template<> ArmFault::FaultVals ArmFaultVals<DataAbort>::vals(
228    "Data Abort",            0x010, 0x000, 0x200, 0x400, 0x600, MODE_ABORT,
229    8, 8, 0, 0, true,  true,  false, EC_DATA_ABORT_TO_HYP
230);
231template<> ArmFault::FaultVals ArmFaultVals<VirtualDataAbort>::vals(
232    "Virtual Data Abort",    0x010, 0x000, 0x200, 0x400, 0x600, MODE_ABORT,
233    8, 8, 0, 0, true,  true,  false, EC_INVALID
234);
235template<> ArmFault::FaultVals ArmFaultVals<HypervisorTrap>::vals(
236    // @todo: double check these values
237    "Hypervisor Trap",       0x014, 0x000, 0x200, 0x400, 0x600, MODE_HYP,
238    0, 0, 0, 0, false, false, false, EC_UNKNOWN
239);
240template<> ArmFault::FaultVals ArmFaultVals<SecureMonitorTrap>::vals(
241    "Secure Monitor Trap",   0x004, 0x000, 0x200, 0x400, 0x600, MODE_MON,
242    4, 2, 0, 0, false, false, false, EC_UNKNOWN
243);
244template<> ArmFault::FaultVals ArmFaultVals<Interrupt>::vals(
245    "IRQ",                   0x018, 0x080, 0x280, 0x480, 0x680, MODE_IRQ,
246    4, 4, 0, 0, false, true,  false, EC_UNKNOWN
247);
248template<> ArmFault::FaultVals ArmFaultVals<VirtualInterrupt>::vals(
249    "Virtual IRQ",           0x018, 0x080, 0x280, 0x480, 0x680, MODE_IRQ,
250    4, 4, 0, 0, false, true,  false, EC_INVALID
251);
252template<> ArmFault::FaultVals ArmFaultVals<FastInterrupt>::vals(
253    "FIQ",                   0x01C, 0x100, 0x300, 0x500, 0x700, MODE_FIQ,
254    4, 4, 0, 0, false, true,  true,  EC_UNKNOWN
255);
256template<> ArmFault::FaultVals ArmFaultVals<VirtualFastInterrupt>::vals(
257    "Virtual FIQ",           0x01C, 0x100, 0x300, 0x500, 0x700, MODE_FIQ,
258    4, 4, 0, 0, false, true,  true,  EC_INVALID
259);
260template<> ArmFault::FaultVals ArmFaultVals<SupervisorTrap>::vals(
261    // Some dummy values (SupervisorTrap is AArch64-only)
262    "Supervisor Trap",   0x014, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
263    0, 0, 0, 0, false, false, false, EC_UNKNOWN
264);
265template<> ArmFault::FaultVals ArmFaultVals<PCAlignmentFault>::vals(
266    // Some dummy values (PCAlignmentFault is AArch64-only)
267    "PC Alignment Fault",   0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
268    0, 0, 0, 0, true, false, false, EC_PC_ALIGNMENT
269);
270template<> ArmFault::FaultVals ArmFaultVals<SPAlignmentFault>::vals(
271    // Some dummy values (SPAlignmentFault is AArch64-only)
272    "SP Alignment Fault",   0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
273    0, 0, 0, 0, true, false, false, EC_STACK_PTR_ALIGNMENT
274);
275template<> ArmFault::FaultVals ArmFaultVals<SystemError>::vals(
276    // Some dummy values (SError is AArch64-only)
277    "SError",                0x000, 0x180, 0x380, 0x580, 0x780, MODE_SVC,
278    0, 0, 0, 0, false, true,  true,  EC_SERROR
279);
280template<> ArmFault::FaultVals ArmFaultVals<SoftwareBreakpoint>::vals(
281    // Some dummy values (SoftwareBreakpoint is AArch64-only)
282    "Software Breakpoint",   0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
283    0, 0, 0, 0, true, false, false,  EC_SOFTWARE_BREAKPOINT
284);
285template<> ArmFault::FaultVals ArmFaultVals<ArmSev>::vals(
286    // Some dummy values
287    "ArmSev Flush",          0x000, 0x000, 0x000, 0x000, 0x000, MODE_SVC,
288    0, 0, 0, 0, false, true,  true,  EC_UNKNOWN
289);
290template<> ArmFault::FaultVals ArmFaultVals<IllegalInstSetStateFault>::vals(
291    // Some dummy values (SPAlignmentFault is AArch64-only)
292    "Illegal Inst Set State Fault",   0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
293    0, 0, 0, 0, true, false, false, EC_ILLEGAL_INST
294);
295
296Addr
297ArmFault::getVector(ThreadContext *tc)
298{
299    Addr base;
300
301    // ARM ARM issue C B1.8.1
302    bool haveSecurity = ArmSystem::haveSecurity(tc);
303
304    // panic if SCTLR.VE because I have no idea what to do with vectored
305    // interrupts
306    SCTLR sctlr = tc->readMiscReg(MISCREG_SCTLR);
307    assert(!sctlr.ve);
308    // Check for invalid modes
309    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
310    assert(haveSecurity                      || cpsr.mode != MODE_MON);
311    assert(ArmSystem::haveVirtualization(tc) || cpsr.mode != MODE_HYP);
312
313    switch (cpsr.mode)
314    {
315      case MODE_MON:
316        base = tc->readMiscReg(MISCREG_MVBAR);
317        break;
318      case MODE_HYP:
319        base = tc->readMiscReg(MISCREG_HVBAR);
320        break;
321      default:
322        if (sctlr.v) {
323            base = HighVecs;
324        } else {
325            base = haveSecurity ? tc->readMiscReg(MISCREG_VBAR) : 0;
326        }
327        break;
328    }
329    return base + offset(tc);
330}
331
332Addr
333ArmFault::getVector64(ThreadContext *tc)
334{
335    Addr vbar;
336    switch (toEL) {
337      case EL3:
338        assert(ArmSystem::haveSecurity(tc));
339        vbar = tc->readMiscReg(MISCREG_VBAR_EL3);
340        break;
341      case EL2:
342        assert(ArmSystem::haveVirtualization(tc));
343        vbar = tc->readMiscReg(MISCREG_VBAR_EL2);
344        break;
345      case EL1:
346        vbar = tc->readMiscReg(MISCREG_VBAR_EL1);
347        break;
348      default:
349        panic("Invalid target exception level");
350        break;
351    }
352    return vbar + offset64(tc);
353}
354
355MiscRegIndex
356ArmFault::getSyndromeReg64() const
357{
358    switch (toEL) {
359      case EL1:
360        return MISCREG_ESR_EL1;
361      case EL2:
362        return MISCREG_ESR_EL2;
363      case EL3:
364        return MISCREG_ESR_EL3;
365      default:
366        panic("Invalid exception level");
367        break;
368    }
369}
370
371MiscRegIndex
372ArmFault::getFaultAddrReg64() const
373{
374    switch (toEL) {
375      case EL1:
376        return MISCREG_FAR_EL1;
377      case EL2:
378        return MISCREG_FAR_EL2;
379      case EL3:
380        return MISCREG_FAR_EL3;
381      default:
382        panic("Invalid exception level");
383        break;
384    }
385}
386
387void
388ArmFault::setSyndrome(ThreadContext *tc, MiscRegIndex syndrome_reg)
389{
390    uint32_t value;
391    uint32_t exc_class = (uint32_t) ec(tc);
392    uint32_t issVal = iss();
393
394    assert(!from64 || ArmSystem::highestELIs64(tc));
395
396    value = exc_class << 26;
397
398    // HSR.IL not valid for Prefetch Aborts (0x20, 0x21) and Data Aborts (0x24,
399    // 0x25) for which the ISS information is not valid (ARMv7).
400    // @todo: ARMv8 revises AArch32 functionality: when HSR.IL is not
401    // valid it is treated as RES1.
402    if (to64) {
403        value |= 1 << 25;
404    } else if ((bits(exc_class, 5, 3) != 4) ||
405               (bits(exc_class, 2) && bits(issVal, 24))) {
406        if (!machInst.thumb || machInst.bigThumb)
407            value |= 1 << 25;
408    }
409    // Condition code valid for EC[5:4] nonzero
410    if (!from64 && ((bits(exc_class, 5, 4) == 0) &&
411                    (bits(exc_class, 3, 0) != 0))) {
412        if (!machInst.thumb) {
413            uint32_t      cond;
414            ConditionCode condCode = (ConditionCode) (uint32_t) machInst.condCode;
415            // If its on unconditional instruction report with a cond code of
416            // 0xE, ie the unconditional code
417            cond  = (condCode == COND_UC) ? COND_AL : condCode;
418            value |= cond << 20;
419            value |= 1    << 24;
420        }
421        value |= bits(issVal, 19, 0);
422    } else {
423        value |= issVal;
424    }
425    tc->setMiscReg(syndrome_reg, value);
426}
427
428void
429ArmFault::update(ThreadContext *tc)
430{
431    CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
432
433    // Determine source exception level and mode
434    fromMode = (OperatingMode) (uint8_t) cpsr.mode;
435    fromEL = opModeToEL(fromMode);
436    if (opModeIs64(fromMode))
437        from64 = true;
438
439    // Determine target exception level (aarch64) or target execution
440    // mode (aarch32).
441    if (ArmSystem::haveSecurity(tc) && routeToMonitor(tc)) {
442        toMode = MODE_MON;
443        toEL = EL3;
444    } else if (ArmSystem::haveVirtualization(tc) && routeToHyp(tc)) {
445        toMode = MODE_HYP;
446        toEL = EL2;
447        hypRouted = true;
448    } else {
449        toMode = nextMode();
450        toEL = opModeToEL(toMode);
451    }
452
453    if (fromEL > toEL)
454        toEL = fromEL;
455
456    to64 = ELIs64(tc, toEL);
457
458    // The fault specific informations have been updated; it is
459    // now possible to use them inside the fault.
460    faultUpdated = true;
461}
462
463void
464ArmFault::invoke(ThreadContext *tc, const StaticInstPtr &inst)
465{
466
467    // Update fault state informations, like the starting mode (aarch32)
468    // or EL (aarch64) and the ending mode or EL.
469    // From the update function we are also evaluating if the fault must
470    // be handled in AArch64 mode (to64).
471    update(tc);
472
473    if (to64) {
474        // Invoke exception handler in AArch64 state
475        invoke64(tc, inst);
476        return;
477    }
478
479    // ARMv7 (ARM ARM issue C B1.9)
480
481    bool have_security       = ArmSystem::haveSecurity(tc);
482
483    FaultBase::invoke(tc);
484    if (!FullSystem)
485        return;
486    countStat()++;
487
488    SCTLR sctlr = tc->readMiscReg(MISCREG_SCTLR);
489    SCR scr = tc->readMiscReg(MISCREG_SCR);
490    CPSR saved_cpsr = tc->readMiscReg(MISCREG_CPSR);
491    saved_cpsr.nz = tc->readCCReg(CCREG_NZ);
492    saved_cpsr.c = tc->readCCReg(CCREG_C);
493    saved_cpsr.v = tc->readCCReg(CCREG_V);
494    saved_cpsr.ge = tc->readCCReg(CCREG_GE);
495
496    Addr curPc M5_VAR_USED = tc->pcState().pc();
497    ITSTATE it = tc->pcState().itstate();
498    saved_cpsr.it2 = it.top6;
499    saved_cpsr.it1 = it.bottom2;
500
501    // if we have a valid instruction then use it to annotate this fault with
502    // extra information. This is used to generate the correct fault syndrome
503    // information
504    if (inst) {
505        ArmStaticInst *armInst = static_cast<ArmStaticInst *>(inst.get());
506        armInst->annotateFault(this);
507    }
508
509    // Ensure Secure state if initially in Monitor mode
510    if (have_security && saved_cpsr.mode == MODE_MON) {
511        SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
512        if (scr.ns) {
513            scr.ns = 0;
514            tc->setMiscRegNoEffect(MISCREG_SCR, scr);
515        }
516    }
517
518    CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
519    cpsr.mode = toMode;
520
521    // some bits are set differently if we have been routed to hyp mode
522    if (cpsr.mode == MODE_HYP) {
523        SCTLR hsctlr = tc->readMiscReg(MISCREG_HSCTLR);
524        cpsr.t = hsctlr.te;
525        cpsr.e = hsctlr.ee;
526        if (!scr.ea)  {cpsr.a = 1;}
527        if (!scr.fiq) {cpsr.f = 1;}
528        if (!scr.irq) {cpsr.i = 1;}
529    } else if (cpsr.mode == MODE_MON) {
530        // Special case handling when entering monitor mode
531        cpsr.t = sctlr.te;
532        cpsr.e = sctlr.ee;
533        cpsr.a = 1;
534        cpsr.f = 1;
535        cpsr.i = 1;
536    } else {
537        cpsr.t = sctlr.te;
538        cpsr.e = sctlr.ee;
539
540        // The *Disable functions are virtual and different per fault
541        cpsr.a = cpsr.a | abortDisable(tc);
542        cpsr.f = cpsr.f | fiqDisable(tc);
543        cpsr.i = 1;
544    }
545    cpsr.it1 = cpsr.it2 = 0;
546    cpsr.j = 0;
547    tc->setMiscReg(MISCREG_CPSR, cpsr);
548
549    // Make sure mailbox sets to one always
550    tc->setMiscReg(MISCREG_SEV_MAILBOX, 1);
551
552    // Clear the exclusive monitor
553    tc->setMiscReg(MISCREG_LOCKFLAG, 0);
554
555    if (cpsr.mode == MODE_HYP) {
556        tc->setMiscReg(MISCREG_ELR_HYP, curPc +
557                (saved_cpsr.t ? thumbPcOffset(true)  : armPcOffset(true)));
558    } else {
559        tc->setIntReg(INTREG_LR, curPc +
560                (saved_cpsr.t ? thumbPcOffset(false) : armPcOffset(false)));
561    }
562
563    switch (cpsr.mode) {
564      case MODE_FIQ:
565        tc->setMiscReg(MISCREG_SPSR_FIQ, saved_cpsr);
566        break;
567      case MODE_IRQ:
568        tc->setMiscReg(MISCREG_SPSR_IRQ, saved_cpsr);
569        break;
570      case MODE_SVC:
571        tc->setMiscReg(MISCREG_SPSR_SVC, saved_cpsr);
572        break;
573      case MODE_MON:
574        assert(have_security);
575        tc->setMiscReg(MISCREG_SPSR_MON, saved_cpsr);
576        break;
577      case MODE_ABORT:
578        tc->setMiscReg(MISCREG_SPSR_ABT, saved_cpsr);
579        break;
580      case MODE_UNDEFINED:
581        tc->setMiscReg(MISCREG_SPSR_UND, saved_cpsr);
582        if (ec(tc) != EC_UNKNOWN)
583            setSyndrome(tc, MISCREG_HSR);
584        break;
585      case MODE_HYP:
586        assert(ArmSystem::haveVirtualization(tc));
587        tc->setMiscReg(MISCREG_SPSR_HYP, saved_cpsr);
588        setSyndrome(tc, MISCREG_HSR);
589        break;
590      default:
591        panic("unknown Mode\n");
592    }
593
594    Addr newPc = getVector(tc);
595    DPRINTF(Faults, "Invoking Fault:%s cpsr:%#x PC:%#x lr:%#x newVec: %#x\n",
596            name(), cpsr, curPc, tc->readIntReg(INTREG_LR), newPc);
597    PCState pc(newPc);
598    pc.thumb(cpsr.t);
599    pc.nextThumb(pc.thumb());
600    pc.jazelle(cpsr.j);
601    pc.nextJazelle(pc.jazelle());
602    pc.aarch64(!cpsr.width);
603    pc.nextAArch64(!cpsr.width);
604    pc.illegalExec(false);
605    tc->pcState(pc);
606}
607
608void
609ArmFault::invoke64(ThreadContext *tc, const StaticInstPtr &inst)
610{
611    // Determine actual misc. register indices for ELR_ELx and SPSR_ELx
612    MiscRegIndex elr_idx, spsr_idx;
613    switch (toEL) {
614      case EL1:
615        elr_idx = MISCREG_ELR_EL1;
616        spsr_idx = MISCREG_SPSR_EL1;
617        break;
618      case EL2:
619        assert(ArmSystem::haveVirtualization(tc));
620        elr_idx = MISCREG_ELR_EL2;
621        spsr_idx = MISCREG_SPSR_EL2;
622        break;
623      case EL3:
624        assert(ArmSystem::haveSecurity(tc));
625        elr_idx = MISCREG_ELR_EL3;
626        spsr_idx = MISCREG_SPSR_EL3;
627        break;
628      default:
629        panic("Invalid target exception level");
630        break;
631    }
632
633    // Save process state into SPSR_ELx
634    CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
635    CPSR spsr = cpsr;
636    spsr.nz = tc->readCCReg(CCREG_NZ);
637    spsr.c = tc->readCCReg(CCREG_C);
638    spsr.v = tc->readCCReg(CCREG_V);
639    if (from64) {
640        // Force some bitfields to 0
641        spsr.q = 0;
642        spsr.it1 = 0;
643        spsr.j = 0;
644        spsr.res0_23_22 = 0;
645        spsr.ge = 0;
646        spsr.it2 = 0;
647        spsr.t = 0;
648    } else {
649        spsr.ge = tc->readCCReg(CCREG_GE);
650        ITSTATE it = tc->pcState().itstate();
651        spsr.it2 = it.top6;
652        spsr.it1 = it.bottom2;
653        // Force some bitfields to 0
654        spsr.res0_23_22 = 0;
655        spsr.ss = 0;
656    }
657    tc->setMiscReg(spsr_idx, spsr);
658
659    // Save preferred return address into ELR_ELx
660    Addr curr_pc = tc->pcState().pc();
661    Addr ret_addr = curr_pc;
662    if (from64)
663        ret_addr += armPcElrOffset();
664    else
665        ret_addr += spsr.t ? thumbPcElrOffset() : armPcElrOffset();
666    tc->setMiscReg(elr_idx, ret_addr);
667
668    Addr vec_address = getVector64(tc);
669
670    // Update process state
671    OperatingMode64 mode = 0;
672    mode.spX = 1;
673    mode.el = toEL;
674    mode.width = 0;
675    cpsr.mode = mode;
676    cpsr.daif = 0xf;
677    cpsr.il = 0;
678    cpsr.ss = 0;
679    tc->setMiscReg(MISCREG_CPSR, cpsr);
680
681    // Set PC to start of exception handler
682    Addr new_pc = purifyTaggedAddr(vec_address, tc, toEL);
683    DPRINTF(Faults, "Invoking Fault (AArch64 target EL):%s cpsr:%#x PC:%#x "
684            "elr:%#x newVec: %#x\n", name(), cpsr, curr_pc, ret_addr, new_pc);
685    PCState pc(new_pc);
686    pc.aarch64(!cpsr.width);
687    pc.nextAArch64(!cpsr.width);
688    pc.illegalExec(false);
689    tc->pcState(pc);
690
691    // If we have a valid instruction then use it to annotate this fault with
692    // extra information. This is used to generate the correct fault syndrome
693    // information
694    if (inst)
695        static_cast<ArmStaticInst *>(inst.get())->annotateFault(this);
696    // Save exception syndrome
697    if ((nextMode() != MODE_IRQ) && (nextMode() != MODE_FIQ))
698        setSyndrome(tc, getSyndromeReg64());
699}
700
701void
702Reset::invoke(ThreadContext *tc, const StaticInstPtr &inst)
703{
704    if (FullSystem) {
705        tc->getCpuPtr()->clearInterrupts(tc->threadId());
706        tc->clearArchRegs();
707    }
708    if (!ArmSystem::highestELIs64(tc)) {
709        ArmFault::invoke(tc, inst);
710        tc->setMiscReg(MISCREG_VMPIDR,
711                       getMPIDR(dynamic_cast<ArmSystem*>(tc->getSystemPtr()), tc));
712
713        // Unless we have SMC code to get us there, boot in HYP!
714        if (ArmSystem::haveVirtualization(tc) &&
715            !ArmSystem::haveSecurity(tc)) {
716            CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
717            cpsr.mode = MODE_HYP;
718            tc->setMiscReg(MISCREG_CPSR, cpsr);
719        }
720    } else {
721        // Advance the PC to the IMPLEMENTATION DEFINED reset value
722        PCState pc = ArmSystem::resetAddr64(tc);
723        pc.aarch64(true);
724        pc.nextAArch64(true);
725        tc->pcState(pc);
726    }
727}
728
729void
730UndefinedInstruction::invoke(ThreadContext *tc, const StaticInstPtr &inst)
731{
732    if (FullSystem) {
733        ArmFault::invoke(tc, inst);
734        return;
735    }
736
737    // If the mnemonic isn't defined this has to be an unknown instruction.
738    assert(unknown || mnemonic != NULL);
739    if (disabled) {
740        panic("Attempted to execute disabled instruction "
741                "'%s' (inst 0x%08x)", mnemonic, machInst);
742    } else if (unknown) {
743        panic("Attempted to execute unknown instruction (inst 0x%08x)",
744              machInst);
745    } else {
746        panic("Attempted to execute unimplemented instruction "
747                "'%s' (inst 0x%08x)", mnemonic, machInst);
748    }
749}
750
751bool
752UndefinedInstruction::routeToHyp(ThreadContext *tc) const
753{
754    bool toHyp;
755
756    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
757    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
758    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
759
760    // if in Hyp mode then stay in Hyp mode
761    toHyp  = scr.ns && (cpsr.mode == MODE_HYP);
762    // if HCR.TGE is set to 1, take to Hyp mode through Hyp Trap vector
763    toHyp |= !inSecureState(scr, cpsr) && hcr.tge && (cpsr.mode == MODE_USER);
764    return toHyp;
765}
766
767uint32_t
768UndefinedInstruction::iss() const
769{
770
771    // If UndefinedInstruction is routed to hypervisor, iss field is 0.
772    if (hypRouted) {
773        return 0;
774    }
775
776    if (overrideEc == EC_INVALID)
777        return issRaw;
778
779    uint32_t new_iss = 0;
780    uint32_t op0, op1, op2, CRn, CRm, Rt, dir;
781
782    dir = bits(machInst, 21, 21);
783    op0 = bits(machInst, 20, 19);
784    op1 = bits(machInst, 18, 16);
785    CRn = bits(machInst, 15, 12);
786    CRm = bits(machInst, 11, 8);
787    op2 = bits(machInst, 7, 5);
788    Rt = bits(machInst, 4, 0);
789
790    new_iss = op0 << 20 | op2 << 17 | op1 << 14 | CRn << 10 |
791            Rt << 5 | CRm << 1 | dir;
792
793    return new_iss;
794}
795
796void
797SupervisorCall::invoke(ThreadContext *tc, const StaticInstPtr &inst)
798{
799    if (FullSystem) {
800        ArmFault::invoke(tc, inst);
801        return;
802    }
803
804    // As of now, there isn't a 32 bit thumb version of this instruction.
805    assert(!machInst.bigThumb);
806    uint32_t callNum;
807    CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
808    OperatingMode mode = (OperatingMode)(uint8_t)cpsr.mode;
809    if (opModeIs64(mode))
810        callNum = tc->readIntReg(INTREG_X8);
811    else
812        callNum = tc->readIntReg(INTREG_R7);
813    Fault fault;
814    tc->syscall(callNum, &fault);
815
816    // Advance the PC since that won't happen automatically.
817    PCState pc = tc->pcState();
818    assert(inst);
819    inst->advancePC(pc);
820    tc->pcState(pc);
821}
822
823bool
824SupervisorCall::routeToHyp(ThreadContext *tc) const
825{
826    bool toHyp;
827
828    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
829    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
830    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
831
832    // if in Hyp mode then stay in Hyp mode
833    toHyp  = scr.ns && (cpsr.mode == MODE_HYP);
834    // if HCR.TGE is set to 1, take to Hyp mode through Hyp Trap vector
835    toHyp |= !inSecureState(scr, cpsr) && hcr.tge && (cpsr.mode == MODE_USER);
836    return toHyp;
837}
838
839ExceptionClass
840SupervisorCall::ec(ThreadContext *tc) const
841{
842    return (overrideEc != EC_INVALID) ? overrideEc :
843        (from64 ? EC_SVC_64 : vals.ec);
844}
845
846uint32_t
847SupervisorCall::iss() const
848{
849    // Even if we have a 24 bit imm from an arm32 instruction then we only use
850    // the bottom 16 bits for the ISS value (it doesn't hurt for AArch64 SVC).
851    return issRaw & 0xFFFF;
852}
853
854uint32_t
855SecureMonitorCall::iss() const
856{
857    if (from64)
858        return bits(machInst, 20, 5);
859    return 0;
860}
861
862ExceptionClass
863UndefinedInstruction::ec(ThreadContext *tc) const
864{
865    // If UndefinedInstruction is routed to hypervisor,
866    // HSR.EC field is 0.
867    if (hypRouted)
868        return EC_UNKNOWN;
869    else
870        return (overrideEc != EC_INVALID) ? overrideEc : vals.ec;
871}
872
873
874HypervisorCall::HypervisorCall(ExtMachInst _machInst, uint32_t _imm) :
875        ArmFaultVals<HypervisorCall>(_machInst, _imm)
876{}
877
878ExceptionClass
879HypervisorCall::ec(ThreadContext *tc) const
880{
881    return from64 ? EC_HVC_64 : vals.ec;
882}
883
884ExceptionClass
885HypervisorTrap::ec(ThreadContext *tc) const
886{
887    return (overrideEc != EC_INVALID) ? overrideEc : vals.ec;
888}
889
890template<class T>
891FaultOffset
892ArmFaultVals<T>::offset(ThreadContext *tc)
893{
894    bool isHypTrap = false;
895
896    // Normally we just use the exception vector from the table at the top if
897    // this file, however if this exception has caused a transition to hype
898    // mode, and its an exception type that would only do this if it has been
899    // trapped then we use the hyp trap vector instead of the normal vector
900    if (vals.hypTrappable) {
901        CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
902        if (cpsr.mode == MODE_HYP) {
903            CPSR spsr = tc->readMiscReg(MISCREG_SPSR_HYP);
904            isHypTrap = spsr.mode != MODE_HYP;
905        }
906    }
907    return isHypTrap ? 0x14 : vals.offset;
908}
909
910template<class T>
911FaultOffset
912ArmFaultVals<T>::offset64(ThreadContext *tc)
913{
914    if (toEL == fromEL) {
915        if (opModeIsT(fromMode))
916            return vals.currELTOffset;
917        return vals.currELHOffset;
918    } else {
919        bool lower_32 = false;
920        if (toEL == EL3) {
921            if (!inSecureState(tc) && ArmSystem::haveEL(tc, EL2))
922                lower_32 = ELIs32(tc, EL2);
923            else
924                lower_32 = ELIs32(tc, EL1);
925        } else {
926            lower_32 = ELIs32(tc, static_cast<ExceptionLevel>(toEL - 1));
927        }
928
929        if (lower_32)
930            return vals.lowerEL32Offset;
931        return vals.lowerEL64Offset;
932    }
933}
934
935// void
936// SupervisorCall::setSyndrome64(ThreadContext *tc, MiscRegIndex esr_idx)
937// {
938//     ESR esr = 0;
939//     esr.ec = machInst.aarch64 ? SvcAArch64 : SvcAArch32;
940//     esr.il = !machInst.thumb;
941//     if (machInst.aarch64)
942//         esr.imm16 = bits(machInst.instBits, 20, 5);
943//     else if (machInst.thumb)
944//         esr.imm16 = bits(machInst.instBits, 7, 0);
945//     else
946//         esr.imm16 = bits(machInst.instBits, 15, 0);
947//     tc->setMiscReg(esr_idx, esr);
948// }
949
950void
951SecureMonitorCall::invoke(ThreadContext *tc, const StaticInstPtr &inst)
952{
953    if (FullSystem) {
954        ArmFault::invoke(tc, inst);
955        return;
956    }
957}
958
959ExceptionClass
960SecureMonitorCall::ec(ThreadContext *tc) const
961{
962    return (from64 ? EC_SMC_64 : vals.ec);
963}
964
965bool
966SupervisorTrap::routeToHyp(ThreadContext *tc) const
967{
968    bool toHyp = false;
969
970    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
971    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR_EL2);
972    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
973
974    // if HCR.TGE is set to 1, take to Hyp mode through Hyp Trap vector
975    toHyp |= !inSecureState(scr, cpsr) && hcr.tge && (cpsr.el == EL0);
976    return toHyp;
977}
978
979uint32_t
980SupervisorTrap::iss() const
981{
982    // If SupervisorTrap is routed to hypervisor, iss field is 0.
983    if (hypRouted) {
984        return 0;
985    }
986    return issRaw;
987}
988
989ExceptionClass
990SupervisorTrap::ec(ThreadContext *tc) const
991{
992    if (hypRouted)
993        return EC_UNKNOWN;
994    else
995        return (overrideEc != EC_INVALID) ? overrideEc : vals.ec;
996}
997
998ExceptionClass
999SecureMonitorTrap::ec(ThreadContext *tc) const
1000{
1001    return (overrideEc != EC_INVALID) ? overrideEc :
1002        (from64 ? EC_SMC_64 : vals.ec);
1003}
1004
1005template<class T>
1006void
1007AbortFault<T>::invoke(ThreadContext *tc, const StaticInstPtr &inst)
1008{
1009    if (tranMethod == ArmFault::UnknownTran) {
1010        tranMethod = longDescFormatInUse(tc) ? ArmFault::LpaeTran
1011                                             : ArmFault::VmsaTran;
1012
1013        if ((tranMethod == ArmFault::VmsaTran) && this->routeToMonitor(tc)) {
1014            // See ARM ARM B3-1416
1015            bool override_LPAE = false;
1016            TTBCR ttbcr_s = tc->readMiscReg(MISCREG_TTBCR_S);
1017            TTBCR M5_VAR_USED ttbcr_ns = tc->readMiscReg(MISCREG_TTBCR_NS);
1018            if (ttbcr_s.eae) {
1019                override_LPAE = true;
1020            } else {
1021                // Unimplemented code option, not seen in testing.  May need
1022                // extension according to the manual exceprt above.
1023                DPRINTF(Faults, "Warning: Incomplete translation method "
1024                        "override detected.\n");
1025            }
1026            if (override_LPAE)
1027                tranMethod = ArmFault::LpaeTran;
1028        }
1029    }
1030
1031    if (source == ArmFault::AsynchronousExternalAbort) {
1032        tc->getCpuPtr()->clearInterrupt(tc->threadId(), INT_ABT, 0);
1033    }
1034    // Get effective fault source encoding
1035    CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
1036
1037    // source must be determined BEFORE invoking generic routines which will
1038    // try to set hsr etc. and are based upon source!
1039    ArmFaultVals<T>::invoke(tc, inst);
1040
1041    if (!this->to64) {  // AArch32
1042        FSR  fsr  = getFsr(tc);
1043        if (cpsr.mode == MODE_HYP) {
1044            tc->setMiscReg(T::HFarIndex, faultAddr);
1045        } else if (stage2) {
1046            tc->setMiscReg(MISCREG_HPFAR, (faultAddr >> 8) & ~0xf);
1047            tc->setMiscReg(T::HFarIndex,  OVAddr);
1048        } else {
1049            tc->setMiscReg(T::FsrIndex, fsr);
1050            tc->setMiscReg(T::FarIndex, faultAddr);
1051        }
1052        DPRINTF(Faults, "Abort Fault source=%#x fsr=%#x faultAddr=%#x "\
1053                "tranMethod=%#x\n", source, fsr, faultAddr, tranMethod);
1054    } else {  // AArch64
1055        // Set the FAR register.  Nothing else to do if we are in AArch64 state
1056        // because the syndrome register has already been set inside invoke64()
1057        if (stage2) {
1058            // stage 2 fault, set HPFAR_EL2 to the faulting IPA
1059            // and FAR_EL2 to the Original VA
1060            tc->setMiscReg(AbortFault<T>::getFaultAddrReg64(), OVAddr);
1061            tc->setMiscReg(MISCREG_HPFAR_EL2, bits(faultAddr, 47, 12) << 4);
1062
1063            DPRINTF(Faults, "Abort Fault (Stage 2) VA: 0x%x IPA: 0x%x\n",
1064                    OVAddr, faultAddr);
1065        } else {
1066            tc->setMiscReg(AbortFault<T>::getFaultAddrReg64(), faultAddr);
1067        }
1068    }
1069}
1070
1071template<class T>
1072void
1073AbortFault<T>::setSyndrome(ThreadContext *tc, MiscRegIndex syndrome_reg)
1074{
1075    srcEncoded = getFaultStatusCode(tc);
1076    if (srcEncoded == ArmFault::FaultSourceInvalid) {
1077        panic("Invalid fault source\n");
1078    }
1079    ArmFault::setSyndrome(tc, syndrome_reg);
1080}
1081
1082template<class T>
1083uint8_t
1084AbortFault<T>::getFaultStatusCode(ThreadContext *tc) const
1085{
1086
1087    panic_if(!this->faultUpdated,
1088             "Trying to use un-updated ArmFault internal variables\n");
1089
1090    uint8_t fsc = 0;
1091
1092    if (!this->to64) {
1093        // AArch32
1094        assert(tranMethod != ArmFault::UnknownTran);
1095        if (tranMethod == ArmFault::LpaeTran) {
1096            fsc = ArmFault::longDescFaultSources[source];
1097        } else {
1098            fsc = ArmFault::shortDescFaultSources[source];
1099        }
1100    } else {
1101        // AArch64
1102        fsc = ArmFault::aarch64FaultSources[source];
1103    }
1104
1105    return fsc;
1106}
1107
1108template<class T>
1109FSR
1110AbortFault<T>::getFsr(ThreadContext *tc) const
1111{
1112    FSR fsr = 0;
1113
1114    auto fsc = getFaultStatusCode(tc);
1115
1116    // AArch32
1117    assert(tranMethod != ArmFault::UnknownTran);
1118    if (tranMethod == ArmFault::LpaeTran) {
1119        fsr.status = fsc;
1120        fsr.lpae   = 1;
1121    } else {
1122        fsr.fsLow  = bits(fsc, 3, 0);
1123        fsr.fsHigh = bits(fsc, 4);
1124        fsr.domain = static_cast<uint8_t>(domain);
1125    }
1126
1127    fsr.wnr = (write ? 1 : 0);
1128    fsr.ext = 0;
1129
1130    return fsr;
1131}
1132
1133template<class T>
1134bool
1135AbortFault<T>::abortDisable(ThreadContext *tc)
1136{
1137    if (ArmSystem::haveSecurity(tc)) {
1138        SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1139        return (!scr.ns || scr.aw);
1140    }
1141    return true;
1142}
1143
1144template<class T>
1145void
1146AbortFault<T>::annotate(ArmFault::AnnotationIDs id, uint64_t val)
1147{
1148    switch (id)
1149    {
1150      case ArmFault::S1PTW:
1151        s1ptw = val;
1152        break;
1153      case ArmFault::OVA:
1154        OVAddr = val;
1155        break;
1156
1157      // Just ignore unknown ID's
1158      default:
1159        break;
1160    }
1161}
1162
1163template<class T>
1164uint32_t
1165AbortFault<T>::iss() const
1166{
1167    uint32_t val;
1168
1169    val  = srcEncoded & 0x3F;
1170    val |= write << 6;
1171    val |= s1ptw << 7;
1172    return (val);
1173}
1174
1175template<class T>
1176bool
1177AbortFault<T>::isMMUFault() const
1178{
1179    // NOTE: Not relying on LL information being aligned to lowest bits here
1180    return
1181         (source == ArmFault::AlignmentFault)     ||
1182        ((source >= ArmFault::TranslationLL) &&
1183         (source <  ArmFault::TranslationLL + 4)) ||
1184        ((source >= ArmFault::AccessFlagLL) &&
1185         (source <  ArmFault::AccessFlagLL + 4))  ||
1186        ((source >= ArmFault::DomainLL) &&
1187         (source <  ArmFault::DomainLL + 4))      ||
1188        ((source >= ArmFault::PermissionLL) &&
1189         (source <  ArmFault::PermissionLL + 4));
1190}
1191
1192ExceptionClass
1193PrefetchAbort::ec(ThreadContext *tc) const
1194{
1195    if (to64) {
1196        // AArch64
1197        if (toEL == fromEL)
1198            return EC_PREFETCH_ABORT_CURR_EL;
1199        else
1200            return EC_PREFETCH_ABORT_LOWER_EL;
1201    } else {
1202        // AArch32
1203        // Abort faults have different EC codes depending on whether
1204        // the fault originated within HYP mode, or not. So override
1205        // the method and add the extra adjustment of the EC value.
1206
1207        ExceptionClass ec = ArmFaultVals<PrefetchAbort>::vals.ec;
1208
1209        CPSR spsr = tc->readMiscReg(MISCREG_SPSR_HYP);
1210        if (spsr.mode == MODE_HYP) {
1211            ec = ((ExceptionClass) (((uint32_t) ec) + 1));
1212        }
1213        return ec;
1214    }
1215}
1216
1217bool
1218PrefetchAbort::routeToMonitor(ThreadContext *tc) const
1219{
1220    SCR scr = 0;
1221    if (from64)
1222        scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1223    else
1224        scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1225
1226    return scr.ea && !isMMUFault();
1227}
1228
1229bool
1230PrefetchAbort::routeToHyp(ThreadContext *tc) const
1231{
1232    bool toHyp;
1233
1234    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
1235    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
1236    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
1237    HDCR hdcr = tc->readMiscRegNoEffect(MISCREG_HDCR);
1238
1239    // if in Hyp mode then stay in Hyp mode
1240    toHyp  = scr.ns && (cpsr.mode == MODE_HYP);
1241    // otherwise, check whether to take to Hyp mode through Hyp Trap vector
1242    toHyp |= (stage2 ||
1243                ( (source ==               DebugEvent) && hdcr.tde && (cpsr.mode !=  MODE_HYP)) ||
1244                ( (source == SynchronousExternalAbort) && hcr.tge  && (cpsr.mode == MODE_USER))
1245             ) && !inSecureState(tc);
1246    return toHyp;
1247}
1248
1249ExceptionClass
1250DataAbort::ec(ThreadContext *tc) const
1251{
1252    if (to64) {
1253        // AArch64
1254        if (source == ArmFault::AsynchronousExternalAbort) {
1255            panic("Asynchronous External Abort should be handled with "
1256                    "SystemErrors (SErrors)!");
1257        }
1258        if (toEL == fromEL)
1259            return EC_DATA_ABORT_CURR_EL;
1260        else
1261            return EC_DATA_ABORT_LOWER_EL;
1262    } else {
1263        // AArch32
1264        // Abort faults have different EC codes depending on whether
1265        // the fault originated within HYP mode, or not. So override
1266        // the method and add the extra adjustment of the EC value.
1267
1268        ExceptionClass ec = ArmFaultVals<DataAbort>::vals.ec;
1269
1270        CPSR spsr = tc->readMiscReg(MISCREG_SPSR_HYP);
1271        if (spsr.mode == MODE_HYP) {
1272            ec = ((ExceptionClass) (((uint32_t) ec) + 1));
1273        }
1274        return ec;
1275    }
1276}
1277
1278bool
1279DataAbort::routeToMonitor(ThreadContext *tc) const
1280{
1281    SCR scr = 0;
1282    if (from64)
1283        scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1284    else
1285        scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1286
1287    return scr.ea && !isMMUFault();
1288}
1289
1290bool
1291DataAbort::routeToHyp(ThreadContext *tc) const
1292{
1293    bool toHyp;
1294
1295    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
1296    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
1297    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
1298    HDCR hdcr = tc->readMiscRegNoEffect(MISCREG_HDCR);
1299
1300    // if in Hyp mode then stay in Hyp mode
1301    toHyp  = scr.ns && (cpsr.mode == MODE_HYP);
1302    // otherwise, check whether to take to Hyp mode through Hyp Trap vector
1303    toHyp |= (stage2 ||
1304                ( (cpsr.mode != MODE_HYP) && ( ((source == AsynchronousExternalAbort) && hcr.amo) ||
1305                                               ((source == DebugEvent) && hdcr.tde) )
1306                ) ||
1307                ( (cpsr.mode == MODE_USER) && hcr.tge &&
1308                  ((source == AlignmentFault)            ||
1309                   (source == SynchronousExternalAbort))
1310                )
1311             ) && !inSecureState(tc);
1312    return toHyp;
1313}
1314
1315uint32_t
1316DataAbort::iss() const
1317{
1318    uint32_t val;
1319
1320    // Add on the data abort specific fields to the generic abort ISS value
1321    val  = AbortFault<DataAbort>::iss();
1322    // ISS is valid if not caused by a stage 1 page table walk, and when taken
1323    // to AArch64 only when directed to EL2
1324    if (!s1ptw && (!to64 || toEL == EL2)) {
1325        val |= isv << 24;
1326        if (isv) {
1327            val |= sas << 22;
1328            val |= sse << 21;
1329            val |= srt << 16;
1330            // AArch64 only. These assignments are safe on AArch32 as well
1331            // because these vars are initialized to false
1332            val |= sf << 15;
1333            val |= ar << 14;
1334        }
1335    }
1336    return (val);
1337}
1338
1339void
1340DataAbort::annotate(AnnotationIDs id, uint64_t val)
1341{
1342    AbortFault<DataAbort>::annotate(id, val);
1343    switch (id)
1344    {
1345      case SAS:
1346        isv = true;
1347        sas = val;
1348        break;
1349      case SSE:
1350        isv = true;
1351        sse = val;
1352        break;
1353      case SRT:
1354        isv = true;
1355        srt = val;
1356        break;
1357      case SF:
1358        isv = true;
1359        sf  = val;
1360        break;
1361      case AR:
1362        isv = true;
1363        ar  = val;
1364        break;
1365      // Just ignore unknown ID's
1366      default:
1367        break;
1368    }
1369}
1370
1371void
1372VirtualDataAbort::invoke(ThreadContext *tc, const StaticInstPtr &inst)
1373{
1374    AbortFault<VirtualDataAbort>::invoke(tc, inst);
1375    HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR);
1376    hcr.va = 0;
1377    tc->setMiscRegNoEffect(MISCREG_HCR, hcr);
1378}
1379
1380bool
1381Interrupt::routeToMonitor(ThreadContext *tc) const
1382{
1383    assert(ArmSystem::haveSecurity(tc));
1384    SCR scr = 0;
1385    if (from64)
1386        scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1387    else
1388        scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1389    return scr.irq;
1390}
1391
1392bool
1393Interrupt::routeToHyp(ThreadContext *tc) const
1394{
1395    bool toHyp;
1396
1397    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
1398    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
1399    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
1400    // Determine whether IRQs are routed to Hyp mode.
1401    toHyp = (!scr.irq && hcr.imo && !inSecureState(tc)) ||
1402            (cpsr.mode == MODE_HYP);
1403    return toHyp;
1404}
1405
1406bool
1407Interrupt::abortDisable(ThreadContext *tc)
1408{
1409    if (ArmSystem::haveSecurity(tc)) {
1410        SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1411        return (!scr.ns || scr.aw);
1412    }
1413    return true;
1414}
1415
1416VirtualInterrupt::VirtualInterrupt()
1417{}
1418
1419bool
1420FastInterrupt::routeToMonitor(ThreadContext *tc) const
1421{
1422    assert(ArmSystem::haveSecurity(tc));
1423    SCR scr = 0;
1424    if (from64)
1425        scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1426    else
1427        scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1428    return scr.fiq;
1429}
1430
1431bool
1432FastInterrupt::routeToHyp(ThreadContext *tc) const
1433{
1434    bool toHyp;
1435
1436    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
1437    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
1438    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
1439    // Determine whether IRQs are routed to Hyp mode.
1440    toHyp = (!scr.fiq && hcr.fmo && !inSecureState(tc)) ||
1441            (cpsr.mode == MODE_HYP);
1442    return toHyp;
1443}
1444
1445bool
1446FastInterrupt::abortDisable(ThreadContext *tc)
1447{
1448    if (ArmSystem::haveSecurity(tc)) {
1449        SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1450        return (!scr.ns || scr.aw);
1451    }
1452    return true;
1453}
1454
1455bool
1456FastInterrupt::fiqDisable(ThreadContext *tc)
1457{
1458    if (ArmSystem::haveVirtualization(tc)) {
1459        return true;
1460    } else if (ArmSystem::haveSecurity(tc)) {
1461        SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1462        return (!scr.ns || scr.fw);
1463    }
1464    return true;
1465}
1466
1467VirtualFastInterrupt::VirtualFastInterrupt()
1468{}
1469
1470void
1471PCAlignmentFault::invoke(ThreadContext *tc, const StaticInstPtr &inst)
1472{
1473    ArmFaultVals<PCAlignmentFault>::invoke(tc, inst);
1474    assert(from64);
1475    // Set the FAR
1476    tc->setMiscReg(getFaultAddrReg64(), faultPC);
1477}
1478
1479bool
1480PCAlignmentFault::routeToHyp(ThreadContext *tc) const
1481{
1482    bool toHyp = false;
1483
1484    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1485    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR_EL2);
1486    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
1487
1488    // if HCR.TGE is set to 1, take to Hyp mode through Hyp Trap vector
1489    toHyp |= !inSecureState(scr, cpsr) && hcr.tge && (cpsr.el == EL0);
1490    return toHyp;
1491}
1492
1493SPAlignmentFault::SPAlignmentFault()
1494{}
1495
1496SystemError::SystemError()
1497{}
1498
1499void
1500SystemError::invoke(ThreadContext *tc, const StaticInstPtr &inst)
1501{
1502    tc->getCpuPtr()->clearInterrupt(tc->threadId(), INT_ABT, 0);
1503    ArmFault::invoke(tc, inst);
1504}
1505
1506bool
1507SystemError::routeToMonitor(ThreadContext *tc) const
1508{
1509    assert(ArmSystem::haveSecurity(tc));
1510    assert(from64);
1511    SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1512    return scr.ea;
1513}
1514
1515bool
1516SystemError::routeToHyp(ThreadContext *tc) const
1517{
1518    bool toHyp;
1519    assert(from64);
1520
1521    SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1522    HCR hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
1523
1524    toHyp = (!scr.ea && hcr.amo && !inSecureState(tc)) ||
1525            (!scr.ea && !scr.rw && !hcr.amo && !inSecureState(tc));
1526    return toHyp;
1527}
1528
1529
1530SoftwareBreakpoint::SoftwareBreakpoint(ExtMachInst _mach_inst, uint32_t _iss)
1531    : ArmFaultVals<SoftwareBreakpoint>(_mach_inst, _iss)
1532{}
1533
1534bool
1535SoftwareBreakpoint::routeToHyp(ThreadContext *tc) const
1536{
1537    assert(from64);
1538
1539    const bool have_el2 = ArmSystem::haveVirtualization(tc);
1540
1541    const HCR hcr  = tc->readMiscRegNoEffect(MISCREG_HCR_EL2);
1542    const HDCR mdcr  = tc->readMiscRegNoEffect(MISCREG_MDCR_EL2);
1543
1544    return have_el2 && !inSecureState(tc) && fromEL <= EL1 &&
1545        (hcr.tge || mdcr.tde);
1546}
1547
1548ExceptionClass
1549SoftwareBreakpoint::ec(ThreadContext *tc) const
1550{
1551    return from64 ? EC_SOFTWARE_BREAKPOINT_64 : vals.ec;
1552}
1553
1554void
1555ArmSev::invoke(ThreadContext *tc, const StaticInstPtr &inst) {
1556    DPRINTF(Faults, "Invoking ArmSev Fault\n");
1557    if (!FullSystem)
1558        return;
1559
1560    // Set sev_mailbox to 1, clear the pending interrupt from remote
1561    // SEV execution and let pipeline continue as pcState is still
1562    // valid.
1563    tc->setMiscReg(MISCREG_SEV_MAILBOX, 1);
1564    tc->getCpuPtr()->clearInterrupt(tc->threadId(), INT_SEV, 0);
1565}
1566
1567// Instantiate all the templates to make the linker happy
1568template class ArmFaultVals<Reset>;
1569template class ArmFaultVals<UndefinedInstruction>;
1570template class ArmFaultVals<SupervisorCall>;
1571template class ArmFaultVals<SecureMonitorCall>;
1572template class ArmFaultVals<HypervisorCall>;
1573template class ArmFaultVals<PrefetchAbort>;
1574template class ArmFaultVals<DataAbort>;
1575template class ArmFaultVals<VirtualDataAbort>;
1576template class ArmFaultVals<HypervisorTrap>;
1577template class ArmFaultVals<Interrupt>;
1578template class ArmFaultVals<VirtualInterrupt>;
1579template class ArmFaultVals<FastInterrupt>;
1580template class ArmFaultVals<VirtualFastInterrupt>;
1581template class ArmFaultVals<SupervisorTrap>;
1582template class ArmFaultVals<SecureMonitorTrap>;
1583template class ArmFaultVals<PCAlignmentFault>;
1584template class ArmFaultVals<SPAlignmentFault>;
1585template class ArmFaultVals<SystemError>;
1586template class ArmFaultVals<SoftwareBreakpoint>;
1587template class ArmFaultVals<ArmSev>;
1588template class AbortFault<PrefetchAbort>;
1589template class AbortFault<DataAbort>;
1590template class AbortFault<VirtualDataAbort>;
1591
1592
1593IllegalInstSetStateFault::IllegalInstSetStateFault()
1594{}
1595
1596
1597} // namespace ArmISA
1598