faults.cc revision 12764:5f812c4e0701
1/*
2 * Copyright (c) 2010, 2012-2014, 2016-2018 ARM Limited
3 * All rights reserved
4 *
5 * The license below extends only to copyright in the software and shall
6 * not be construed as granting a license to any other intellectual
7 * property including but not limited to intellectual property relating
8 * to a hardware implementation of the functionality of the software
9 * licensed hereunder.  You may use the software subject to the license
10 * terms below provided that you ensure that this notice is replicated
11 * unmodified and in its entirety in all distributions of the software,
12 * modified or unmodified, in source code or in binary form.
13 *
14 * Copyright (c) 2003-2005 The Regents of The University of Michigan
15 * Copyright (c) 2007-2008 The Florida State University
16 * All rights reserved.
17 *
18 * Redistribution and use in source and binary forms, with or without
19 * modification, are permitted provided that the following conditions are
20 * met: redistributions of source code must retain the above copyright
21 * notice, this list of conditions and the following disclaimer;
22 * redistributions in binary form must reproduce the above copyright
23 * notice, this list of conditions and the following disclaimer in the
24 * documentation and/or other materials provided with the distribution;
25 * neither the name of the copyright holders nor the names of its
26 * contributors may be used to endorse or promote products derived from
27 * this software without specific prior written permission.
28 *
29 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
30 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
31 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
32 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
33 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
34 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
35 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
36 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
37 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
38 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
39 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
40 *
41 * Authors: Ali Saidi
42 *          Gabe Black
43 *          Giacomo Gabrielli
44 *          Thomas Grocutt
45 */
46
47#include "arch/arm/faults.hh"
48
49#include "arch/arm/insts/static_inst.hh"
50#include "arch/arm/system.hh"
51#include "arch/arm/utility.hh"
52#include "base/compiler.hh"
53#include "base/trace.hh"
54#include "cpu/base.hh"
55#include "cpu/thread_context.hh"
56#include "debug/Faults.hh"
57#include "sim/full_system.hh"
58
59namespace ArmISA
60{
61
62uint8_t ArmFault::shortDescFaultSources[] = {
63    0x01,  // AlignmentFault
64    0x04,  // InstructionCacheMaintenance
65    0xff,  // SynchExtAbtOnTranslTableWalkL0 (INVALID)
66    0x0c,  // SynchExtAbtOnTranslTableWalkL1
67    0x0e,  // SynchExtAbtOnTranslTableWalkL2
68    0xff,  // SynchExtAbtOnTranslTableWalkL3 (INVALID)
69    0xff,  // SynchPtyErrOnTranslTableWalkL0 (INVALID)
70    0x1c,  // SynchPtyErrOnTranslTableWalkL1
71    0x1e,  // SynchPtyErrOnTranslTableWalkL2
72    0xff,  // SynchPtyErrOnTranslTableWalkL3 (INVALID)
73    0xff,  // TranslationL0 (INVALID)
74    0x05,  // TranslationL1
75    0x07,  // TranslationL2
76    0xff,  // TranslationL3 (INVALID)
77    0xff,  // AccessFlagL0 (INVALID)
78    0x03,  // AccessFlagL1
79    0x06,  // AccessFlagL2
80    0xff,  // AccessFlagL3 (INVALID)
81    0xff,  // DomainL0 (INVALID)
82    0x09,  // DomainL1
83    0x0b,  // DomainL2
84    0xff,  // DomainL3 (INVALID)
85    0xff,  // PermissionL0 (INVALID)
86    0x0d,  // PermissionL1
87    0x0f,  // PermissionL2
88    0xff,  // PermissionL3 (INVALID)
89    0x02,  // DebugEvent
90    0x08,  // SynchronousExternalAbort
91    0x10,  // TLBConflictAbort
92    0x19,  // SynchPtyErrOnMemoryAccess
93    0x16,  // AsynchronousExternalAbort
94    0x18,  // AsynchPtyErrOnMemoryAccess
95    0xff,  // AddressSizeL0 (INVALID)
96    0xff,  // AddressSizeL1 (INVALID)
97    0xff,  // AddressSizeL2 (INVALID)
98    0xff,  // AddressSizeL3 (INVALID)
99    0x40,  // PrefetchTLBMiss
100    0x80   // PrefetchUncacheable
101};
102
103static_assert(sizeof(ArmFault::shortDescFaultSources) ==
104              ArmFault::NumFaultSources,
105              "Invalid size of ArmFault::shortDescFaultSources[]");
106
107uint8_t ArmFault::longDescFaultSources[] = {
108    0x21,  // AlignmentFault
109    0xff,  // InstructionCacheMaintenance (INVALID)
110    0xff,  // SynchExtAbtOnTranslTableWalkL0 (INVALID)
111    0x15,  // SynchExtAbtOnTranslTableWalkL1
112    0x16,  // SynchExtAbtOnTranslTableWalkL2
113    0x17,  // SynchExtAbtOnTranslTableWalkL3
114    0xff,  // SynchPtyErrOnTranslTableWalkL0 (INVALID)
115    0x1d,  // SynchPtyErrOnTranslTableWalkL1
116    0x1e,  // SynchPtyErrOnTranslTableWalkL2
117    0x1f,  // SynchPtyErrOnTranslTableWalkL3
118    0xff,  // TranslationL0 (INVALID)
119    0x05,  // TranslationL1
120    0x06,  // TranslationL2
121    0x07,  // TranslationL3
122    0xff,  // AccessFlagL0 (INVALID)
123    0x09,  // AccessFlagL1
124    0x0a,  // AccessFlagL2
125    0x0b,  // AccessFlagL3
126    0xff,  // DomainL0 (INVALID)
127    0x3d,  // DomainL1
128    0x3e,  // DomainL2
129    0xff,  // DomainL3 (RESERVED)
130    0xff,  // PermissionL0 (INVALID)
131    0x0d,  // PermissionL1
132    0x0e,  // PermissionL2
133    0x0f,  // PermissionL3
134    0x22,  // DebugEvent
135    0x10,  // SynchronousExternalAbort
136    0x30,  // TLBConflictAbort
137    0x18,  // SynchPtyErrOnMemoryAccess
138    0x11,  // AsynchronousExternalAbort
139    0x19,  // AsynchPtyErrOnMemoryAccess
140    0xff,  // AddressSizeL0 (INVALID)
141    0xff,  // AddressSizeL1 (INVALID)
142    0xff,  // AddressSizeL2 (INVALID)
143    0xff,  // AddressSizeL3 (INVALID)
144    0x40,  // PrefetchTLBMiss
145    0x80   // PrefetchUncacheable
146};
147
148static_assert(sizeof(ArmFault::longDescFaultSources) ==
149              ArmFault::NumFaultSources,
150              "Invalid size of ArmFault::longDescFaultSources[]");
151
152uint8_t ArmFault::aarch64FaultSources[] = {
153    0x21,  // AlignmentFault
154    0xff,  // InstructionCacheMaintenance (INVALID)
155    0x14,  // SynchExtAbtOnTranslTableWalkL0
156    0x15,  // SynchExtAbtOnTranslTableWalkL1
157    0x16,  // SynchExtAbtOnTranslTableWalkL2
158    0x17,  // SynchExtAbtOnTranslTableWalkL3
159    0x1c,  // SynchPtyErrOnTranslTableWalkL0
160    0x1d,  // SynchPtyErrOnTranslTableWalkL1
161    0x1e,  // SynchPtyErrOnTranslTableWalkL2
162    0x1f,  // SynchPtyErrOnTranslTableWalkL3
163    0x04,  // TranslationL0
164    0x05,  // TranslationL1
165    0x06,  // TranslationL2
166    0x07,  // TranslationL3
167    0x08,  // AccessFlagL0
168    0x09,  // AccessFlagL1
169    0x0a,  // AccessFlagL2
170    0x0b,  // AccessFlagL3
171    // @todo: Section & Page Domain Fault in AArch64?
172    0xff,  // DomainL0 (INVALID)
173    0xff,  // DomainL1 (INVALID)
174    0xff,  // DomainL2 (INVALID)
175    0xff,  // DomainL3 (INVALID)
176    0x0c,  // PermissionL0
177    0x0d,  // PermissionL1
178    0x0e,  // PermissionL2
179    0x0f,  // PermissionL3
180    0x22,  // DebugEvent
181    0x10,  // SynchronousExternalAbort
182    0x30,  // TLBConflictAbort
183    0x18,  // SynchPtyErrOnMemoryAccess
184    0xff,  // AsynchronousExternalAbort (INVALID)
185    0xff,  // AsynchPtyErrOnMemoryAccess (INVALID)
186    0x00,  // AddressSizeL0
187    0x01,  // AddressSizeL1
188    0x02,  // AddressSizeL2
189    0x03,  // AddressSizeL3
190    0x40,  // PrefetchTLBMiss
191    0x80   // PrefetchUncacheable
192};
193
194static_assert(sizeof(ArmFault::aarch64FaultSources) ==
195              ArmFault::NumFaultSources,
196              "Invalid size of ArmFault::aarch64FaultSources[]");
197
198// Fields: name, offset, cur{ELT,ELH}Offset, lowerEL{64,32}Offset, next mode,
199//         {ARM, Thumb, ARM_ELR, Thumb_ELR} PC offset, hyp trap,
200//         {A, F} disable, class, stat
201template<> ArmFault::FaultVals ArmFaultVals<Reset>::vals(
202    // Some dummy values (the reset vector has an IMPLEMENTATION DEFINED
203    // location in AArch64)
204    "Reset",                 0x000, 0x000, 0x000, 0x000, 0x000, MODE_SVC,
205    0, 0, 0, 0, false, true,  true,  EC_UNKNOWN
206);
207template<> ArmFault::FaultVals ArmFaultVals<UndefinedInstruction>::vals(
208    "Undefined Instruction", 0x004, 0x000, 0x200, 0x400, 0x600, MODE_UNDEFINED,
209    4, 2, 0, 0, true,  false, false, EC_UNKNOWN
210);
211template<> ArmFault::FaultVals ArmFaultVals<SupervisorCall>::vals(
212    "Supervisor Call",       0x008, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
213    4, 2, 4, 2, true,  false, false, EC_SVC_TO_HYP
214);
215template<> ArmFault::FaultVals ArmFaultVals<SecureMonitorCall>::vals(
216    "Secure Monitor Call",   0x008, 0x000, 0x200, 0x400, 0x600, MODE_MON,
217    4, 4, 4, 4, false, true,  true,  EC_SMC_TO_HYP
218);
219template<> ArmFault::FaultVals ArmFaultVals<HypervisorCall>::vals(
220    "Hypervisor Call",       0x008, 0x000, 0x200, 0x400, 0x600, MODE_HYP,
221    4, 4, 4, 4, true,  false, false, EC_HVC
222);
223template<> ArmFault::FaultVals ArmFaultVals<PrefetchAbort>::vals(
224    "Prefetch Abort",        0x00C, 0x000, 0x200, 0x400, 0x600, MODE_ABORT,
225    4, 4, 0, 0, true,  true,  false, EC_PREFETCH_ABORT_TO_HYP
226);
227template<> ArmFault::FaultVals ArmFaultVals<DataAbort>::vals(
228    "Data Abort",            0x010, 0x000, 0x200, 0x400, 0x600, MODE_ABORT,
229    8, 8, 0, 0, true,  true,  false, EC_DATA_ABORT_TO_HYP
230);
231template<> ArmFault::FaultVals ArmFaultVals<VirtualDataAbort>::vals(
232    "Virtual Data Abort",    0x010, 0x000, 0x200, 0x400, 0x600, MODE_ABORT,
233    8, 8, 0, 0, true,  true,  false, EC_INVALID
234);
235template<> ArmFault::FaultVals ArmFaultVals<HypervisorTrap>::vals(
236    // @todo: double check these values
237    "Hypervisor Trap",       0x014, 0x000, 0x200, 0x400, 0x600, MODE_HYP,
238    0, 0, 0, 0, false, false, false, EC_UNKNOWN
239);
240template<> ArmFault::FaultVals ArmFaultVals<SecureMonitorTrap>::vals(
241    "Secure Monitor Trap",   0x004, 0x000, 0x200, 0x400, 0x600, MODE_MON,
242    4, 2, 0, 0, false, false, false, EC_UNKNOWN
243);
244template<> ArmFault::FaultVals ArmFaultVals<Interrupt>::vals(
245    "IRQ",                   0x018, 0x080, 0x280, 0x480, 0x680, MODE_IRQ,
246    4, 4, 0, 0, false, true,  false, EC_UNKNOWN
247);
248template<> ArmFault::FaultVals ArmFaultVals<VirtualInterrupt>::vals(
249    "Virtual IRQ",           0x018, 0x080, 0x280, 0x480, 0x680, MODE_IRQ,
250    4, 4, 0, 0, false, true,  false, EC_INVALID
251);
252template<> ArmFault::FaultVals ArmFaultVals<FastInterrupt>::vals(
253    "FIQ",                   0x01C, 0x100, 0x300, 0x500, 0x700, MODE_FIQ,
254    4, 4, 0, 0, false, true,  true,  EC_UNKNOWN
255);
256template<> ArmFault::FaultVals ArmFaultVals<VirtualFastInterrupt>::vals(
257    "Virtual FIQ",           0x01C, 0x100, 0x300, 0x500, 0x700, MODE_FIQ,
258    4, 4, 0, 0, false, true,  true,  EC_INVALID
259);
260template<> ArmFault::FaultVals ArmFaultVals<IllegalInstSetStateFault>::vals(
261    "Illegal Inst Set State Fault",   0x004, 0x000, 0x200, 0x400, 0x600, MODE_UNDEFINED,
262    4, 2, 0, 0, true, false, false, EC_ILLEGAL_INST
263);
264template<> ArmFault::FaultVals ArmFaultVals<SupervisorTrap>::vals(
265    // Some dummy values (SupervisorTrap is AArch64-only)
266    "Supervisor Trap",   0x014, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
267    0, 0, 0, 0, false, false, false, EC_UNKNOWN
268);
269template<> ArmFault::FaultVals ArmFaultVals<PCAlignmentFault>::vals(
270    // Some dummy values (PCAlignmentFault is AArch64-only)
271    "PC Alignment Fault",   0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
272    0, 0, 0, 0, true, false, false, EC_PC_ALIGNMENT
273);
274template<> ArmFault::FaultVals ArmFaultVals<SPAlignmentFault>::vals(
275    // Some dummy values (SPAlignmentFault is AArch64-only)
276    "SP Alignment Fault",   0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
277    0, 0, 0, 0, true, false, false, EC_STACK_PTR_ALIGNMENT
278);
279template<> ArmFault::FaultVals ArmFaultVals<SystemError>::vals(
280    // Some dummy values (SError is AArch64-only)
281    "SError",                0x000, 0x180, 0x380, 0x580, 0x780, MODE_SVC,
282    0, 0, 0, 0, false, true,  true,  EC_SERROR
283);
284template<> ArmFault::FaultVals ArmFaultVals<SoftwareBreakpoint>::vals(
285    // Some dummy values (SoftwareBreakpoint is AArch64-only)
286    "Software Breakpoint",   0x000, 0x000, 0x200, 0x400, 0x600, MODE_SVC,
287    0, 0, 0, 0, true, false, false,  EC_SOFTWARE_BREAKPOINT
288);
289template<> ArmFault::FaultVals ArmFaultVals<ArmSev>::vals(
290    // Some dummy values
291    "ArmSev Flush",          0x000, 0x000, 0x000, 0x000, 0x000, MODE_SVC,
292    0, 0, 0, 0, false, true,  true,  EC_UNKNOWN
293);
294
295Addr
296ArmFault::getVector(ThreadContext *tc)
297{
298    Addr base;
299
300    // ARM ARM issue C B1.8.1
301    bool haveSecurity = ArmSystem::haveSecurity(tc);
302
303    // panic if SCTLR.VE because I have no idea what to do with vectored
304    // interrupts
305    SCTLR sctlr = tc->readMiscReg(MISCREG_SCTLR);
306    assert(!sctlr.ve);
307    // Check for invalid modes
308    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
309    assert(haveSecurity                      || cpsr.mode != MODE_MON);
310    assert(ArmSystem::haveVirtualization(tc) || cpsr.mode != MODE_HYP);
311
312    switch (cpsr.mode)
313    {
314      case MODE_MON:
315        base = tc->readMiscReg(MISCREG_MVBAR);
316        break;
317      case MODE_HYP:
318        base = tc->readMiscReg(MISCREG_HVBAR);
319        break;
320      default:
321        if (sctlr.v) {
322            base = HighVecs;
323        } else {
324            base = haveSecurity ? tc->readMiscReg(MISCREG_VBAR) : 0;
325        }
326        break;
327    }
328    return base + offset(tc);
329}
330
331Addr
332ArmFault::getVector64(ThreadContext *tc)
333{
334    Addr vbar;
335    switch (toEL) {
336      case EL3:
337        assert(ArmSystem::haveSecurity(tc));
338        vbar = tc->readMiscReg(MISCREG_VBAR_EL3);
339        break;
340      case EL2:
341        assert(ArmSystem::haveVirtualization(tc));
342        vbar = tc->readMiscReg(MISCREG_VBAR_EL2);
343        break;
344      case EL1:
345        vbar = tc->readMiscReg(MISCREG_VBAR_EL1);
346        break;
347      default:
348        panic("Invalid target exception level");
349        break;
350    }
351    return vbar + offset64(tc);
352}
353
354MiscRegIndex
355ArmFault::getSyndromeReg64() const
356{
357    switch (toEL) {
358      case EL1:
359        return MISCREG_ESR_EL1;
360      case EL2:
361        return MISCREG_ESR_EL2;
362      case EL3:
363        return MISCREG_ESR_EL3;
364      default:
365        panic("Invalid exception level");
366        break;
367    }
368}
369
370MiscRegIndex
371ArmFault::getFaultAddrReg64() const
372{
373    switch (toEL) {
374      case EL1:
375        return MISCREG_FAR_EL1;
376      case EL2:
377        return MISCREG_FAR_EL2;
378      case EL3:
379        return MISCREG_FAR_EL3;
380      default:
381        panic("Invalid exception level");
382        break;
383    }
384}
385
386void
387ArmFault::setSyndrome(ThreadContext *tc, MiscRegIndex syndrome_reg)
388{
389    uint32_t value;
390    uint32_t exc_class = (uint32_t) ec(tc);
391    uint32_t issVal = iss();
392
393    assert(!from64 || ArmSystem::highestELIs64(tc));
394
395    value = exc_class << 26;
396
397    // HSR.IL not valid for Prefetch Aborts (0x20, 0x21) and Data Aborts (0x24,
398    // 0x25) for which the ISS information is not valid (ARMv7).
399    // @todo: ARMv8 revises AArch32 functionality: when HSR.IL is not
400    // valid it is treated as RES1.
401    if (to64) {
402        value |= 1 << 25;
403    } else if ((bits(exc_class, 5, 3) != 4) ||
404               (bits(exc_class, 2) && bits(issVal, 24))) {
405        if (!machInst.thumb || machInst.bigThumb)
406            value |= 1 << 25;
407    }
408    // Condition code valid for EC[5:4] nonzero
409    if (!from64 && ((bits(exc_class, 5, 4) == 0) &&
410                    (bits(exc_class, 3, 0) != 0))) {
411        if (!machInst.thumb) {
412            uint32_t      cond;
413            ConditionCode condCode = (ConditionCode) (uint32_t) machInst.condCode;
414            // If its on unconditional instruction report with a cond code of
415            // 0xE, ie the unconditional code
416            cond  = (condCode == COND_UC) ? COND_AL : condCode;
417            value |= cond << 20;
418            value |= 1    << 24;
419        }
420        value |= bits(issVal, 19, 0);
421    } else {
422        value |= issVal;
423    }
424    tc->setMiscReg(syndrome_reg, value);
425}
426
427void
428ArmFault::update(ThreadContext *tc)
429{
430    CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
431
432    // Determine source exception level and mode
433    fromMode = (OperatingMode) (uint8_t) cpsr.mode;
434    fromEL = opModeToEL(fromMode);
435    if (opModeIs64(fromMode))
436        from64 = true;
437
438    // Determine target exception level (aarch64) or target execution
439    // mode (aarch32).
440    if (ArmSystem::haveSecurity(tc) && routeToMonitor(tc)) {
441        toMode = MODE_MON;
442        toEL = EL3;
443    } else if (ArmSystem::haveVirtualization(tc) && routeToHyp(tc)) {
444        toMode = MODE_HYP;
445        toEL = EL2;
446        hypRouted = true;
447    } else {
448        toMode = nextMode();
449        toEL = opModeToEL(toMode);
450    }
451
452    if (fromEL > toEL)
453        toEL = fromEL;
454
455    to64 = ELIs64(tc, toEL);
456
457    // The fault specific informations have been updated; it is
458    // now possible to use them inside the fault.
459    faultUpdated = true;
460}
461
462void
463ArmFault::invoke(ThreadContext *tc, const StaticInstPtr &inst)
464{
465
466    // Update fault state informations, like the starting mode (aarch32)
467    // or EL (aarch64) and the ending mode or EL.
468    // From the update function we are also evaluating if the fault must
469    // be handled in AArch64 mode (to64).
470    update(tc);
471
472    if (to64) {
473        // Invoke exception handler in AArch64 state
474        invoke64(tc, inst);
475        return;
476    }
477
478    // ARMv7 (ARM ARM issue C B1.9)
479
480    bool have_security       = ArmSystem::haveSecurity(tc);
481
482    FaultBase::invoke(tc);
483    if (!FullSystem)
484        return;
485    countStat()++;
486
487    SCTLR sctlr = tc->readMiscReg(MISCREG_SCTLR);
488    SCR scr = tc->readMiscReg(MISCREG_SCR);
489    CPSR saved_cpsr = tc->readMiscReg(MISCREG_CPSR);
490    saved_cpsr.nz = tc->readCCReg(CCREG_NZ);
491    saved_cpsr.c = tc->readCCReg(CCREG_C);
492    saved_cpsr.v = tc->readCCReg(CCREG_V);
493    saved_cpsr.ge = tc->readCCReg(CCREG_GE);
494
495    Addr curPc M5_VAR_USED = tc->pcState().pc();
496    ITSTATE it = tc->pcState().itstate();
497    saved_cpsr.it2 = it.top6;
498    saved_cpsr.it1 = it.bottom2;
499
500    // if we have a valid instruction then use it to annotate this fault with
501    // extra information. This is used to generate the correct fault syndrome
502    // information
503    if (inst) {
504        ArmStaticInst *armInst = static_cast<ArmStaticInst *>(inst.get());
505        armInst->annotateFault(this);
506    }
507
508    // Ensure Secure state if initially in Monitor mode
509    if (have_security && saved_cpsr.mode == MODE_MON) {
510        SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
511        if (scr.ns) {
512            scr.ns = 0;
513            tc->setMiscRegNoEffect(MISCREG_SCR, scr);
514        }
515    }
516
517    CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
518    cpsr.mode = toMode;
519
520    // some bits are set differently if we have been routed to hyp mode
521    if (cpsr.mode == MODE_HYP) {
522        SCTLR hsctlr = tc->readMiscReg(MISCREG_HSCTLR);
523        cpsr.t = hsctlr.te;
524        cpsr.e = hsctlr.ee;
525        if (!scr.ea)  {cpsr.a = 1;}
526        if (!scr.fiq) {cpsr.f = 1;}
527        if (!scr.irq) {cpsr.i = 1;}
528    } else if (cpsr.mode == MODE_MON) {
529        // Special case handling when entering monitor mode
530        cpsr.t = sctlr.te;
531        cpsr.e = sctlr.ee;
532        cpsr.a = 1;
533        cpsr.f = 1;
534        cpsr.i = 1;
535    } else {
536        cpsr.t = sctlr.te;
537        cpsr.e = sctlr.ee;
538
539        // The *Disable functions are virtual and different per fault
540        cpsr.a = cpsr.a | abortDisable(tc);
541        cpsr.f = cpsr.f | fiqDisable(tc);
542        cpsr.i = 1;
543    }
544    cpsr.it1 = cpsr.it2 = 0;
545    cpsr.j = 0;
546    tc->setMiscReg(MISCREG_CPSR, cpsr);
547
548    // Make sure mailbox sets to one always
549    tc->setMiscReg(MISCREG_SEV_MAILBOX, 1);
550
551    // Clear the exclusive monitor
552    tc->setMiscReg(MISCREG_LOCKFLAG, 0);
553
554    if (cpsr.mode == MODE_HYP) {
555        tc->setMiscReg(MISCREG_ELR_HYP, curPc +
556                (saved_cpsr.t ? thumbPcOffset(true)  : armPcOffset(true)));
557    } else {
558        tc->setIntReg(INTREG_LR, curPc +
559                (saved_cpsr.t ? thumbPcOffset(false) : armPcOffset(false)));
560    }
561
562    switch (cpsr.mode) {
563      case MODE_FIQ:
564        tc->setMiscReg(MISCREG_SPSR_FIQ, saved_cpsr);
565        break;
566      case MODE_IRQ:
567        tc->setMiscReg(MISCREG_SPSR_IRQ, saved_cpsr);
568        break;
569      case MODE_SVC:
570        tc->setMiscReg(MISCREG_SPSR_SVC, saved_cpsr);
571        break;
572      case MODE_MON:
573        assert(have_security);
574        tc->setMiscReg(MISCREG_SPSR_MON, saved_cpsr);
575        break;
576      case MODE_ABORT:
577        tc->setMiscReg(MISCREG_SPSR_ABT, saved_cpsr);
578        break;
579      case MODE_UNDEFINED:
580        tc->setMiscReg(MISCREG_SPSR_UND, saved_cpsr);
581        if (ec(tc) != EC_UNKNOWN)
582            setSyndrome(tc, MISCREG_HSR);
583        break;
584      case MODE_HYP:
585        assert(ArmSystem::haveVirtualization(tc));
586        tc->setMiscReg(MISCREG_SPSR_HYP, saved_cpsr);
587        setSyndrome(tc, MISCREG_HSR);
588        break;
589      default:
590        panic("unknown Mode\n");
591    }
592
593    Addr newPc = getVector(tc);
594    DPRINTF(Faults, "Invoking Fault:%s cpsr:%#x PC:%#x lr:%#x newVec: %#x\n",
595            name(), cpsr, curPc, tc->readIntReg(INTREG_LR), newPc);
596    PCState pc(newPc);
597    pc.thumb(cpsr.t);
598    pc.nextThumb(pc.thumb());
599    pc.jazelle(cpsr.j);
600    pc.nextJazelle(pc.jazelle());
601    pc.aarch64(!cpsr.width);
602    pc.nextAArch64(!cpsr.width);
603    pc.illegalExec(false);
604    tc->pcState(pc);
605}
606
607void
608ArmFault::invoke64(ThreadContext *tc, const StaticInstPtr &inst)
609{
610    // Determine actual misc. register indices for ELR_ELx and SPSR_ELx
611    MiscRegIndex elr_idx, spsr_idx;
612    switch (toEL) {
613      case EL1:
614        elr_idx = MISCREG_ELR_EL1;
615        spsr_idx = MISCREG_SPSR_EL1;
616        break;
617      case EL2:
618        assert(ArmSystem::haveVirtualization(tc));
619        elr_idx = MISCREG_ELR_EL2;
620        spsr_idx = MISCREG_SPSR_EL2;
621        break;
622      case EL3:
623        assert(ArmSystem::haveSecurity(tc));
624        elr_idx = MISCREG_ELR_EL3;
625        spsr_idx = MISCREG_SPSR_EL3;
626        break;
627      default:
628        panic("Invalid target exception level");
629        break;
630    }
631
632    // Save process state into SPSR_ELx
633    CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
634    CPSR spsr = cpsr;
635    spsr.nz = tc->readCCReg(CCREG_NZ);
636    spsr.c = tc->readCCReg(CCREG_C);
637    spsr.v = tc->readCCReg(CCREG_V);
638    if (from64) {
639        // Force some bitfields to 0
640        spsr.q = 0;
641        spsr.it1 = 0;
642        spsr.j = 0;
643        spsr.res0_23_22 = 0;
644        spsr.ge = 0;
645        spsr.it2 = 0;
646        spsr.t = 0;
647    } else {
648        spsr.ge = tc->readCCReg(CCREG_GE);
649        ITSTATE it = tc->pcState().itstate();
650        spsr.it2 = it.top6;
651        spsr.it1 = it.bottom2;
652        // Force some bitfields to 0
653        spsr.res0_23_22 = 0;
654        spsr.ss = 0;
655    }
656    tc->setMiscReg(spsr_idx, spsr);
657
658    // Save preferred return address into ELR_ELx
659    Addr curr_pc = tc->pcState().pc();
660    Addr ret_addr = curr_pc;
661    if (from64)
662        ret_addr += armPcElrOffset();
663    else
664        ret_addr += spsr.t ? thumbPcElrOffset() : armPcElrOffset();
665    tc->setMiscReg(elr_idx, ret_addr);
666
667    Addr vec_address = getVector64(tc);
668
669    // Update process state
670    OperatingMode64 mode = 0;
671    mode.spX = 1;
672    mode.el = toEL;
673    mode.width = 0;
674    cpsr.mode = mode;
675    cpsr.daif = 0xf;
676    cpsr.il = 0;
677    cpsr.ss = 0;
678    tc->setMiscReg(MISCREG_CPSR, cpsr);
679
680    // Set PC to start of exception handler
681    Addr new_pc = purifyTaggedAddr(vec_address, tc, toEL);
682    DPRINTF(Faults, "Invoking Fault (AArch64 target EL):%s cpsr:%#x PC:%#x "
683            "elr:%#x newVec: %#x\n", name(), cpsr, curr_pc, ret_addr, new_pc);
684    PCState pc(new_pc);
685    pc.aarch64(!cpsr.width);
686    pc.nextAArch64(!cpsr.width);
687    pc.illegalExec(false);
688    tc->pcState(pc);
689
690    // If we have a valid instruction then use it to annotate this fault with
691    // extra information. This is used to generate the correct fault syndrome
692    // information
693    if (inst)
694        static_cast<ArmStaticInst *>(inst.get())->annotateFault(this);
695    // Save exception syndrome
696    if ((nextMode() != MODE_IRQ) && (nextMode() != MODE_FIQ))
697        setSyndrome(tc, getSyndromeReg64());
698}
699
700void
701Reset::invoke(ThreadContext *tc, const StaticInstPtr &inst)
702{
703    if (FullSystem) {
704        tc->getCpuPtr()->clearInterrupts(tc->threadId());
705        tc->clearArchRegs();
706    }
707    if (!ArmSystem::highestELIs64(tc)) {
708        ArmFault::invoke(tc, inst);
709        tc->setMiscReg(MISCREG_VMPIDR,
710                       getMPIDR(dynamic_cast<ArmSystem*>(tc->getSystemPtr()), tc));
711
712        // Unless we have SMC code to get us there, boot in HYP!
713        if (ArmSystem::haveVirtualization(tc) &&
714            !ArmSystem::haveSecurity(tc)) {
715            CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
716            cpsr.mode = MODE_HYP;
717            tc->setMiscReg(MISCREG_CPSR, cpsr);
718        }
719    } else {
720        // Advance the PC to the IMPLEMENTATION DEFINED reset value
721        PCState pc = ArmSystem::resetAddr64(tc);
722        pc.aarch64(true);
723        pc.nextAArch64(true);
724        tc->pcState(pc);
725    }
726}
727
728void
729UndefinedInstruction::invoke(ThreadContext *tc, const StaticInstPtr &inst)
730{
731    if (FullSystem) {
732        ArmFault::invoke(tc, inst);
733        return;
734    }
735
736    // If the mnemonic isn't defined this has to be an unknown instruction.
737    assert(unknown || mnemonic != NULL);
738    if (disabled) {
739        panic("Attempted to execute disabled instruction "
740                "'%s' (inst 0x%08x)", mnemonic, machInst);
741    } else if (unknown) {
742        panic("Attempted to execute unknown instruction (inst 0x%08x)",
743              machInst);
744    } else {
745        panic("Attempted to execute unimplemented instruction "
746                "'%s' (inst 0x%08x)", mnemonic, machInst);
747    }
748}
749
750bool
751UndefinedInstruction::routeToHyp(ThreadContext *tc) const
752{
753    bool toHyp;
754
755    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
756    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
757    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
758
759    // if in Hyp mode then stay in Hyp mode
760    toHyp  = scr.ns && (cpsr.mode == MODE_HYP);
761    // if HCR.TGE is set to 1, take to Hyp mode through Hyp Trap vector
762    toHyp |= !inSecureState(scr, cpsr) && hcr.tge && (cpsr.mode == MODE_USER);
763    return toHyp;
764}
765
766uint32_t
767UndefinedInstruction::iss() const
768{
769
770    // If UndefinedInstruction is routed to hypervisor, iss field is 0.
771    if (hypRouted) {
772        return 0;
773    }
774
775    if (overrideEc == EC_INVALID)
776        return issRaw;
777
778    uint32_t new_iss = 0;
779    uint32_t op0, op1, op2, CRn, CRm, Rt, dir;
780
781    dir = bits(machInst, 21, 21);
782    op0 = bits(machInst, 20, 19);
783    op1 = bits(machInst, 18, 16);
784    CRn = bits(machInst, 15, 12);
785    CRm = bits(machInst, 11, 8);
786    op2 = bits(machInst, 7, 5);
787    Rt = bits(machInst, 4, 0);
788
789    new_iss = op0 << 20 | op2 << 17 | op1 << 14 | CRn << 10 |
790            Rt << 5 | CRm << 1 | dir;
791
792    return new_iss;
793}
794
795void
796SupervisorCall::invoke(ThreadContext *tc, const StaticInstPtr &inst)
797{
798    if (FullSystem) {
799        ArmFault::invoke(tc, inst);
800        return;
801    }
802
803    // As of now, there isn't a 32 bit thumb version of this instruction.
804    assert(!machInst.bigThumb);
805    uint32_t callNum;
806    CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
807    OperatingMode mode = (OperatingMode)(uint8_t)cpsr.mode;
808    if (opModeIs64(mode))
809        callNum = tc->readIntReg(INTREG_X8);
810    else
811        callNum = tc->readIntReg(INTREG_R7);
812    Fault fault;
813    tc->syscall(callNum, &fault);
814
815    // Advance the PC since that won't happen automatically.
816    PCState pc = tc->pcState();
817    assert(inst);
818    inst->advancePC(pc);
819    tc->pcState(pc);
820}
821
822bool
823SupervisorCall::routeToHyp(ThreadContext *tc) const
824{
825    bool toHyp;
826
827    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
828    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
829    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
830
831    // if in Hyp mode then stay in Hyp mode
832    toHyp  = scr.ns && (cpsr.mode == MODE_HYP);
833    // if HCR.TGE is set to 1, take to Hyp mode through Hyp Trap vector
834    toHyp |= !inSecureState(scr, cpsr) && hcr.tge && (cpsr.mode == MODE_USER);
835    return toHyp;
836}
837
838ExceptionClass
839SupervisorCall::ec(ThreadContext *tc) const
840{
841    return (overrideEc != EC_INVALID) ? overrideEc :
842        (from64 ? EC_SVC_64 : vals.ec);
843}
844
845uint32_t
846SupervisorCall::iss() const
847{
848    // Even if we have a 24 bit imm from an arm32 instruction then we only use
849    // the bottom 16 bits for the ISS value (it doesn't hurt for AArch64 SVC).
850    return issRaw & 0xFFFF;
851}
852
853uint32_t
854SecureMonitorCall::iss() const
855{
856    if (from64)
857        return bits(machInst, 20, 5);
858    return 0;
859}
860
861ExceptionClass
862UndefinedInstruction::ec(ThreadContext *tc) const
863{
864    // If UndefinedInstruction is routed to hypervisor,
865    // HSR.EC field is 0.
866    if (hypRouted)
867        return EC_UNKNOWN;
868    else
869        return (overrideEc != EC_INVALID) ? overrideEc : vals.ec;
870}
871
872
873HypervisorCall::HypervisorCall(ExtMachInst _machInst, uint32_t _imm) :
874        ArmFaultVals<HypervisorCall>(_machInst, _imm)
875{}
876
877ExceptionClass
878HypervisorCall::ec(ThreadContext *tc) const
879{
880    return from64 ? EC_HVC_64 : vals.ec;
881}
882
883ExceptionClass
884HypervisorTrap::ec(ThreadContext *tc) const
885{
886    return (overrideEc != EC_INVALID) ? overrideEc : vals.ec;
887}
888
889template<class T>
890FaultOffset
891ArmFaultVals<T>::offset(ThreadContext *tc)
892{
893    bool isHypTrap = false;
894
895    // Normally we just use the exception vector from the table at the top if
896    // this file, however if this exception has caused a transition to hype
897    // mode, and its an exception type that would only do this if it has been
898    // trapped then we use the hyp trap vector instead of the normal vector
899    if (vals.hypTrappable) {
900        CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
901        if (cpsr.mode == MODE_HYP) {
902            CPSR spsr = tc->readMiscReg(MISCREG_SPSR_HYP);
903            isHypTrap = spsr.mode != MODE_HYP;
904        }
905    }
906    return isHypTrap ? 0x14 : vals.offset;
907}
908
909template<class T>
910FaultOffset
911ArmFaultVals<T>::offset64(ThreadContext *tc)
912{
913    if (toEL == fromEL) {
914        if (opModeIsT(fromMode))
915            return vals.currELTOffset;
916        return vals.currELHOffset;
917    } else {
918        bool lower_32 = false;
919        if (toEL == EL3) {
920            if (!inSecureState(tc) && ArmSystem::haveEL(tc, EL2))
921                lower_32 = ELIs32(tc, EL2);
922            else
923                lower_32 = ELIs32(tc, EL1);
924        } else {
925            lower_32 = ELIs32(tc, static_cast<ExceptionLevel>(toEL - 1));
926        }
927
928        if (lower_32)
929            return vals.lowerEL32Offset;
930        return vals.lowerEL64Offset;
931    }
932}
933
934// void
935// SupervisorCall::setSyndrome64(ThreadContext *tc, MiscRegIndex esr_idx)
936// {
937//     ESR esr = 0;
938//     esr.ec = machInst.aarch64 ? SvcAArch64 : SvcAArch32;
939//     esr.il = !machInst.thumb;
940//     if (machInst.aarch64)
941//         esr.imm16 = bits(machInst.instBits, 20, 5);
942//     else if (machInst.thumb)
943//         esr.imm16 = bits(machInst.instBits, 7, 0);
944//     else
945//         esr.imm16 = bits(machInst.instBits, 15, 0);
946//     tc->setMiscReg(esr_idx, esr);
947// }
948
949void
950SecureMonitorCall::invoke(ThreadContext *tc, const StaticInstPtr &inst)
951{
952    if (FullSystem) {
953        ArmFault::invoke(tc, inst);
954        return;
955    }
956}
957
958ExceptionClass
959SecureMonitorCall::ec(ThreadContext *tc) const
960{
961    return (from64 ? EC_SMC_64 : vals.ec);
962}
963
964bool
965SupervisorTrap::routeToHyp(ThreadContext *tc) const
966{
967    bool toHyp = false;
968
969    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
970    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR_EL2);
971    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
972
973    // if HCR.TGE is set to 1, take to Hyp mode through Hyp Trap vector
974    toHyp |= !inSecureState(scr, cpsr) && hcr.tge && (cpsr.el == EL0);
975    return toHyp;
976}
977
978uint32_t
979SupervisorTrap::iss() const
980{
981    // If SupervisorTrap is routed to hypervisor, iss field is 0.
982    if (hypRouted) {
983        return 0;
984    }
985    return issRaw;
986}
987
988ExceptionClass
989SupervisorTrap::ec(ThreadContext *tc) const
990{
991    if (hypRouted)
992        return EC_UNKNOWN;
993    else
994        return (overrideEc != EC_INVALID) ? overrideEc : vals.ec;
995}
996
997ExceptionClass
998SecureMonitorTrap::ec(ThreadContext *tc) const
999{
1000    return (overrideEc != EC_INVALID) ? overrideEc :
1001        (from64 ? EC_SMC_64 : vals.ec);
1002}
1003
1004template<class T>
1005void
1006AbortFault<T>::invoke(ThreadContext *tc, const StaticInstPtr &inst)
1007{
1008    if (tranMethod == ArmFault::UnknownTran) {
1009        tranMethod = longDescFormatInUse(tc) ? ArmFault::LpaeTran
1010                                             : ArmFault::VmsaTran;
1011
1012        if ((tranMethod == ArmFault::VmsaTran) && this->routeToMonitor(tc)) {
1013            // See ARM ARM B3-1416
1014            bool override_LPAE = false;
1015            TTBCR ttbcr_s = tc->readMiscReg(MISCREG_TTBCR_S);
1016            TTBCR M5_VAR_USED ttbcr_ns = tc->readMiscReg(MISCREG_TTBCR_NS);
1017            if (ttbcr_s.eae) {
1018                override_LPAE = true;
1019            } else {
1020                // Unimplemented code option, not seen in testing.  May need
1021                // extension according to the manual exceprt above.
1022                DPRINTF(Faults, "Warning: Incomplete translation method "
1023                        "override detected.\n");
1024            }
1025            if (override_LPAE)
1026                tranMethod = ArmFault::LpaeTran;
1027        }
1028    }
1029
1030    if (source == ArmFault::AsynchronousExternalAbort) {
1031        tc->getCpuPtr()->clearInterrupt(tc->threadId(), INT_ABT, 0);
1032    }
1033    // Get effective fault source encoding
1034    CPSR cpsr = tc->readMiscReg(MISCREG_CPSR);
1035
1036    // source must be determined BEFORE invoking generic routines which will
1037    // try to set hsr etc. and are based upon source!
1038    ArmFaultVals<T>::invoke(tc, inst);
1039
1040    if (!this->to64) {  // AArch32
1041        FSR  fsr  = getFsr(tc);
1042        if (cpsr.mode == MODE_HYP) {
1043            tc->setMiscReg(T::HFarIndex, faultAddr);
1044        } else if (stage2) {
1045            tc->setMiscReg(MISCREG_HPFAR, (faultAddr >> 8) & ~0xf);
1046            tc->setMiscReg(T::HFarIndex,  OVAddr);
1047        } else {
1048            tc->setMiscReg(T::FsrIndex, fsr);
1049            tc->setMiscReg(T::FarIndex, faultAddr);
1050        }
1051        DPRINTF(Faults, "Abort Fault source=%#x fsr=%#x faultAddr=%#x "\
1052                "tranMethod=%#x\n", source, fsr, faultAddr, tranMethod);
1053    } else {  // AArch64
1054        // Set the FAR register.  Nothing else to do if we are in AArch64 state
1055        // because the syndrome register has already been set inside invoke64()
1056        if (stage2) {
1057            // stage 2 fault, set HPFAR_EL2 to the faulting IPA
1058            // and FAR_EL2 to the Original VA
1059            tc->setMiscReg(AbortFault<T>::getFaultAddrReg64(), OVAddr);
1060            tc->setMiscReg(MISCREG_HPFAR_EL2, bits(faultAddr, 47, 12) << 4);
1061
1062            DPRINTF(Faults, "Abort Fault (Stage 2) VA: 0x%x IPA: 0x%x\n",
1063                    OVAddr, faultAddr);
1064        } else {
1065            tc->setMiscReg(AbortFault<T>::getFaultAddrReg64(), faultAddr);
1066        }
1067    }
1068}
1069
1070template<class T>
1071void
1072AbortFault<T>::setSyndrome(ThreadContext *tc, MiscRegIndex syndrome_reg)
1073{
1074    srcEncoded = getFaultStatusCode(tc);
1075    if (srcEncoded == ArmFault::FaultSourceInvalid) {
1076        panic("Invalid fault source\n");
1077    }
1078    ArmFault::setSyndrome(tc, syndrome_reg);
1079}
1080
1081template<class T>
1082uint8_t
1083AbortFault<T>::getFaultStatusCode(ThreadContext *tc) const
1084{
1085
1086    panic_if(!this->faultUpdated,
1087             "Trying to use un-updated ArmFault internal variables\n");
1088
1089    uint8_t fsc = 0;
1090
1091    if (!this->to64) {
1092        // AArch32
1093        assert(tranMethod != ArmFault::UnknownTran);
1094        if (tranMethod == ArmFault::LpaeTran) {
1095            fsc = ArmFault::longDescFaultSources[source];
1096        } else {
1097            fsc = ArmFault::shortDescFaultSources[source];
1098        }
1099    } else {
1100        // AArch64
1101        fsc = ArmFault::aarch64FaultSources[source];
1102    }
1103
1104    return fsc;
1105}
1106
1107template<class T>
1108FSR
1109AbortFault<T>::getFsr(ThreadContext *tc) const
1110{
1111    FSR fsr = 0;
1112
1113    auto fsc = getFaultStatusCode(tc);
1114
1115    // AArch32
1116    assert(tranMethod != ArmFault::UnknownTran);
1117    if (tranMethod == ArmFault::LpaeTran) {
1118        fsr.status = fsc;
1119        fsr.lpae   = 1;
1120    } else {
1121        fsr.fsLow  = bits(fsc, 3, 0);
1122        fsr.fsHigh = bits(fsc, 4);
1123        fsr.domain = static_cast<uint8_t>(domain);
1124    }
1125
1126    fsr.wnr = (write ? 1 : 0);
1127    fsr.ext = 0;
1128
1129    return fsr;
1130}
1131
1132template<class T>
1133bool
1134AbortFault<T>::abortDisable(ThreadContext *tc)
1135{
1136    if (ArmSystem::haveSecurity(tc)) {
1137        SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1138        return (!scr.ns || scr.aw);
1139    }
1140    return true;
1141}
1142
1143template<class T>
1144void
1145AbortFault<T>::annotate(ArmFault::AnnotationIDs id, uint64_t val)
1146{
1147    switch (id)
1148    {
1149      case ArmFault::S1PTW:
1150        s1ptw = val;
1151        break;
1152      case ArmFault::OVA:
1153        OVAddr = val;
1154        break;
1155
1156      // Just ignore unknown ID's
1157      default:
1158        break;
1159    }
1160}
1161
1162template<class T>
1163uint32_t
1164AbortFault<T>::iss() const
1165{
1166    uint32_t val;
1167
1168    val  = srcEncoded & 0x3F;
1169    val |= write << 6;
1170    val |= s1ptw << 7;
1171    return (val);
1172}
1173
1174template<class T>
1175bool
1176AbortFault<T>::isMMUFault() const
1177{
1178    // NOTE: Not relying on LL information being aligned to lowest bits here
1179    return
1180         (source == ArmFault::AlignmentFault)     ||
1181        ((source >= ArmFault::TranslationLL) &&
1182         (source <  ArmFault::TranslationLL + 4)) ||
1183        ((source >= ArmFault::AccessFlagLL) &&
1184         (source <  ArmFault::AccessFlagLL + 4))  ||
1185        ((source >= ArmFault::DomainLL) &&
1186         (source <  ArmFault::DomainLL + 4))      ||
1187        ((source >= ArmFault::PermissionLL) &&
1188         (source <  ArmFault::PermissionLL + 4));
1189}
1190
1191ExceptionClass
1192PrefetchAbort::ec(ThreadContext *tc) const
1193{
1194    if (to64) {
1195        // AArch64
1196        if (toEL == fromEL)
1197            return EC_PREFETCH_ABORT_CURR_EL;
1198        else
1199            return EC_PREFETCH_ABORT_LOWER_EL;
1200    } else {
1201        // AArch32
1202        // Abort faults have different EC codes depending on whether
1203        // the fault originated within HYP mode, or not. So override
1204        // the method and add the extra adjustment of the EC value.
1205
1206        ExceptionClass ec = ArmFaultVals<PrefetchAbort>::vals.ec;
1207
1208        CPSR spsr = tc->readMiscReg(MISCREG_SPSR_HYP);
1209        if (spsr.mode == MODE_HYP) {
1210            ec = ((ExceptionClass) (((uint32_t) ec) + 1));
1211        }
1212        return ec;
1213    }
1214}
1215
1216bool
1217PrefetchAbort::routeToMonitor(ThreadContext *tc) const
1218{
1219    SCR scr = 0;
1220    if (from64)
1221        scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1222    else
1223        scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1224
1225    return scr.ea && !isMMUFault();
1226}
1227
1228bool
1229PrefetchAbort::routeToHyp(ThreadContext *tc) const
1230{
1231    bool toHyp;
1232
1233    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
1234    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
1235    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
1236    HDCR hdcr = tc->readMiscRegNoEffect(MISCREG_HDCR);
1237
1238    // if in Hyp mode then stay in Hyp mode
1239    toHyp  = scr.ns && (cpsr.mode == MODE_HYP);
1240    // otherwise, check whether to take to Hyp mode through Hyp Trap vector
1241    toHyp |= (stage2 ||
1242                ( (source ==               DebugEvent) && hdcr.tde && (cpsr.mode !=  MODE_HYP)) ||
1243                ( (source == SynchronousExternalAbort) && hcr.tge  && (cpsr.mode == MODE_USER))
1244             ) && !inSecureState(tc);
1245    return toHyp;
1246}
1247
1248ExceptionClass
1249DataAbort::ec(ThreadContext *tc) const
1250{
1251    if (to64) {
1252        // AArch64
1253        if (source == ArmFault::AsynchronousExternalAbort) {
1254            panic("Asynchronous External Abort should be handled with "
1255                    "SystemErrors (SErrors)!");
1256        }
1257        if (toEL == fromEL)
1258            return EC_DATA_ABORT_CURR_EL;
1259        else
1260            return EC_DATA_ABORT_LOWER_EL;
1261    } else {
1262        // AArch32
1263        // Abort faults have different EC codes depending on whether
1264        // the fault originated within HYP mode, or not. So override
1265        // the method and add the extra adjustment of the EC value.
1266
1267        ExceptionClass ec = ArmFaultVals<DataAbort>::vals.ec;
1268
1269        CPSR spsr = tc->readMiscReg(MISCREG_SPSR_HYP);
1270        if (spsr.mode == MODE_HYP) {
1271            ec = ((ExceptionClass) (((uint32_t) ec) + 1));
1272        }
1273        return ec;
1274    }
1275}
1276
1277bool
1278DataAbort::routeToMonitor(ThreadContext *tc) const
1279{
1280    SCR scr = 0;
1281    if (from64)
1282        scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1283    else
1284        scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1285
1286    return scr.ea && !isMMUFault();
1287}
1288
1289bool
1290DataAbort::routeToHyp(ThreadContext *tc) const
1291{
1292    bool toHyp;
1293
1294    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
1295    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
1296    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
1297    HDCR hdcr = tc->readMiscRegNoEffect(MISCREG_HDCR);
1298
1299    // if in Hyp mode then stay in Hyp mode
1300    toHyp  = scr.ns && (cpsr.mode == MODE_HYP);
1301    // otherwise, check whether to take to Hyp mode through Hyp Trap vector
1302    toHyp |= (stage2 ||
1303                ( (cpsr.mode != MODE_HYP) && ( ((source == AsynchronousExternalAbort) && hcr.amo) ||
1304                                               ((source == DebugEvent) && hdcr.tde) )
1305                ) ||
1306                ( (cpsr.mode == MODE_USER) && hcr.tge &&
1307                  ((source == AlignmentFault)            ||
1308                   (source == SynchronousExternalAbort))
1309                )
1310             ) && !inSecureState(tc);
1311    return toHyp;
1312}
1313
1314uint32_t
1315DataAbort::iss() const
1316{
1317    uint32_t val;
1318
1319    // Add on the data abort specific fields to the generic abort ISS value
1320    val  = AbortFault<DataAbort>::iss();
1321    // ISS is valid if not caused by a stage 1 page table walk, and when taken
1322    // to AArch64 only when directed to EL2
1323    if (!s1ptw && (!to64 || toEL == EL2)) {
1324        val |= isv << 24;
1325        if (isv) {
1326            val |= sas << 22;
1327            val |= sse << 21;
1328            val |= srt << 16;
1329            // AArch64 only. These assignments are safe on AArch32 as well
1330            // because these vars are initialized to false
1331            val |= sf << 15;
1332            val |= ar << 14;
1333        }
1334    }
1335    return (val);
1336}
1337
1338void
1339DataAbort::annotate(AnnotationIDs id, uint64_t val)
1340{
1341    AbortFault<DataAbort>::annotate(id, val);
1342    switch (id)
1343    {
1344      case SAS:
1345        isv = true;
1346        sas = val;
1347        break;
1348      case SSE:
1349        isv = true;
1350        sse = val;
1351        break;
1352      case SRT:
1353        isv = true;
1354        srt = val;
1355        break;
1356      case SF:
1357        isv = true;
1358        sf  = val;
1359        break;
1360      case AR:
1361        isv = true;
1362        ar  = val;
1363        break;
1364      // Just ignore unknown ID's
1365      default:
1366        break;
1367    }
1368}
1369
1370void
1371VirtualDataAbort::invoke(ThreadContext *tc, const StaticInstPtr &inst)
1372{
1373    AbortFault<VirtualDataAbort>::invoke(tc, inst);
1374    HCR hcr = tc->readMiscRegNoEffect(MISCREG_HCR);
1375    hcr.va = 0;
1376    tc->setMiscRegNoEffect(MISCREG_HCR, hcr);
1377}
1378
1379bool
1380Interrupt::routeToMonitor(ThreadContext *tc) const
1381{
1382    assert(ArmSystem::haveSecurity(tc));
1383    SCR scr = 0;
1384    if (from64)
1385        scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1386    else
1387        scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1388    return scr.irq;
1389}
1390
1391bool
1392Interrupt::routeToHyp(ThreadContext *tc) const
1393{
1394    bool toHyp;
1395
1396    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
1397    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
1398    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
1399    // Determine whether IRQs are routed to Hyp mode.
1400    toHyp = (!scr.irq && hcr.imo && !inSecureState(tc)) ||
1401            (cpsr.mode == MODE_HYP);
1402    return toHyp;
1403}
1404
1405bool
1406Interrupt::abortDisable(ThreadContext *tc)
1407{
1408    if (ArmSystem::haveSecurity(tc)) {
1409        SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1410        return (!scr.ns || scr.aw);
1411    }
1412    return true;
1413}
1414
1415VirtualInterrupt::VirtualInterrupt()
1416{}
1417
1418bool
1419FastInterrupt::routeToMonitor(ThreadContext *tc) const
1420{
1421    assert(ArmSystem::haveSecurity(tc));
1422    SCR scr = 0;
1423    if (from64)
1424        scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1425    else
1426        scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1427    return scr.fiq;
1428}
1429
1430bool
1431FastInterrupt::routeToHyp(ThreadContext *tc) const
1432{
1433    bool toHyp;
1434
1435    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR);
1436    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
1437    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
1438    // Determine whether IRQs are routed to Hyp mode.
1439    toHyp = (!scr.fiq && hcr.fmo && !inSecureState(tc)) ||
1440            (cpsr.mode == MODE_HYP);
1441    return toHyp;
1442}
1443
1444bool
1445FastInterrupt::abortDisable(ThreadContext *tc)
1446{
1447    if (ArmSystem::haveSecurity(tc)) {
1448        SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1449        return (!scr.ns || scr.aw);
1450    }
1451    return true;
1452}
1453
1454bool
1455FastInterrupt::fiqDisable(ThreadContext *tc)
1456{
1457    if (ArmSystem::haveVirtualization(tc)) {
1458        return true;
1459    } else if (ArmSystem::haveSecurity(tc)) {
1460        SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR);
1461        return (!scr.ns || scr.fw);
1462    }
1463    return true;
1464}
1465
1466VirtualFastInterrupt::VirtualFastInterrupt()
1467{}
1468
1469void
1470PCAlignmentFault::invoke(ThreadContext *tc, const StaticInstPtr &inst)
1471{
1472    ArmFaultVals<PCAlignmentFault>::invoke(tc, inst);
1473    assert(from64);
1474    // Set the FAR
1475    tc->setMiscReg(getFaultAddrReg64(), faultPC);
1476}
1477
1478bool
1479PCAlignmentFault::routeToHyp(ThreadContext *tc) const
1480{
1481    bool toHyp = false;
1482
1483    SCR  scr  = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1484    HCR  hcr  = tc->readMiscRegNoEffect(MISCREG_HCR_EL2);
1485    CPSR cpsr = tc->readMiscRegNoEffect(MISCREG_CPSR);
1486
1487    // if HCR.TGE is set to 1, take to Hyp mode through Hyp Trap vector
1488    toHyp |= !inSecureState(scr, cpsr) && hcr.tge && (cpsr.el == EL0);
1489    return toHyp;
1490}
1491
1492SPAlignmentFault::SPAlignmentFault()
1493{}
1494
1495SystemError::SystemError()
1496{}
1497
1498void
1499SystemError::invoke(ThreadContext *tc, const StaticInstPtr &inst)
1500{
1501    tc->getCpuPtr()->clearInterrupt(tc->threadId(), INT_ABT, 0);
1502    ArmFault::invoke(tc, inst);
1503}
1504
1505bool
1506SystemError::routeToMonitor(ThreadContext *tc) const
1507{
1508    assert(ArmSystem::haveSecurity(tc));
1509    assert(from64);
1510    SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1511    return scr.ea;
1512}
1513
1514bool
1515SystemError::routeToHyp(ThreadContext *tc) const
1516{
1517    bool toHyp;
1518    assert(from64);
1519
1520    SCR scr = tc->readMiscRegNoEffect(MISCREG_SCR_EL3);
1521    HCR hcr  = tc->readMiscRegNoEffect(MISCREG_HCR);
1522
1523    toHyp = (!scr.ea && hcr.amo && !inSecureState(tc)) ||
1524            (!scr.ea && !scr.rw && !hcr.amo && !inSecureState(tc));
1525    return toHyp;
1526}
1527
1528
1529SoftwareBreakpoint::SoftwareBreakpoint(ExtMachInst _mach_inst, uint32_t _iss)
1530    : ArmFaultVals<SoftwareBreakpoint>(_mach_inst, _iss)
1531{}
1532
1533bool
1534SoftwareBreakpoint::routeToHyp(ThreadContext *tc) const
1535{
1536    assert(from64);
1537
1538    const bool have_el2 = ArmSystem::haveVirtualization(tc);
1539
1540    const HCR hcr  = tc->readMiscRegNoEffect(MISCREG_HCR_EL2);
1541    const HDCR mdcr  = tc->readMiscRegNoEffect(MISCREG_MDCR_EL2);
1542
1543    return have_el2 && !inSecureState(tc) && fromEL <= EL1 &&
1544        (hcr.tge || mdcr.tde);
1545}
1546
1547ExceptionClass
1548SoftwareBreakpoint::ec(ThreadContext *tc) const
1549{
1550    return from64 ? EC_SOFTWARE_BREAKPOINT_64 : vals.ec;
1551}
1552
1553void
1554ArmSev::invoke(ThreadContext *tc, const StaticInstPtr &inst) {
1555    DPRINTF(Faults, "Invoking ArmSev Fault\n");
1556    if (!FullSystem)
1557        return;
1558
1559    // Set sev_mailbox to 1, clear the pending interrupt from remote
1560    // SEV execution and let pipeline continue as pcState is still
1561    // valid.
1562    tc->setMiscReg(MISCREG_SEV_MAILBOX, 1);
1563    tc->getCpuPtr()->clearInterrupt(tc->threadId(), INT_SEV, 0);
1564}
1565
1566// Instantiate all the templates to make the linker happy
1567template class ArmFaultVals<Reset>;
1568template class ArmFaultVals<UndefinedInstruction>;
1569template class ArmFaultVals<SupervisorCall>;
1570template class ArmFaultVals<SecureMonitorCall>;
1571template class ArmFaultVals<HypervisorCall>;
1572template class ArmFaultVals<PrefetchAbort>;
1573template class ArmFaultVals<DataAbort>;
1574template class ArmFaultVals<VirtualDataAbort>;
1575template class ArmFaultVals<HypervisorTrap>;
1576template class ArmFaultVals<Interrupt>;
1577template class ArmFaultVals<VirtualInterrupt>;
1578template class ArmFaultVals<FastInterrupt>;
1579template class ArmFaultVals<VirtualFastInterrupt>;
1580template class ArmFaultVals<SupervisorTrap>;
1581template class ArmFaultVals<SecureMonitorTrap>;
1582template class ArmFaultVals<PCAlignmentFault>;
1583template class ArmFaultVals<SPAlignmentFault>;
1584template class ArmFaultVals<SystemError>;
1585template class ArmFaultVals<SoftwareBreakpoint>;
1586template class ArmFaultVals<ArmSev>;
1587template class AbortFault<PrefetchAbort>;
1588template class AbortFault<DataAbort>;
1589template class AbortFault<VirtualDataAbort>;
1590
1591
1592IllegalInstSetStateFault::IllegalInstSetStateFault()
1593{}
1594
1595
1596} // namespace ArmISA
1597