static_inst.hh revision 12234:78ece221f9f5
1/*
2 * Copyright (c) 2010-2013, 2016 ARM Limited
3 * All rights reserved
4 *
5 * The license below extends only to copyright in the software and shall
6 * not be construed as granting a license to any other intellectual
7 * property including but not limited to intellectual property relating
8 * to a hardware implementation of the functionality of the software
9 * licensed hereunder.  You may use the software subject to the license
10 * terms below provided that you ensure that this notice is replicated
11 * unmodified and in its entirety in all distributions of the software,
12 * modified or unmodified, in source code or in binary form.
13 *
14 * Copyright (c) 2007-2008 The Florida State University
15 * All rights reserved.
16 *
17 * Redistribution and use in source and binary forms, with or without
18 * modification, are permitted provided that the following conditions are
19 * met: redistributions of source code must retain the above copyright
20 * notice, this list of conditions and the following disclaimer;
21 * redistributions in binary form must reproduce the above copyright
22 * notice, this list of conditions and the following disclaimer in the
23 * documentation and/or other materials provided with the distribution;
24 * neither the name of the copyright holders nor the names of its
25 * contributors may be used to endorse or promote products derived from
26 * this software without specific prior written permission.
27 *
28 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
29 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
30 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
31 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
32 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
33 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
34 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
35 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
36 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
37 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
38 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
39 *
40 * Authors: Stephen Hines
41 */
42#ifndef __ARCH_ARM_INSTS_STATICINST_HH__
43#define __ARCH_ARM_INSTS_STATICINST_HH__
44
45#include <memory>
46
47#include "arch/arm/faults.hh"
48#include "arch/arm/utility.hh"
49#include "arch/arm/system.hh"
50#include "base/trace.hh"
51#include "cpu/exec_context.hh"
52#include "cpu/static_inst.hh"
53#include "sim/byteswap.hh"
54#include "sim/full_system.hh"
55
56namespace ArmISA
57{
58
59class ArmStaticInst : public StaticInst
60{
61  protected:
62    bool aarch64;
63    uint8_t intWidth;
64
65    int32_t shift_rm_imm(uint32_t base, uint32_t shamt,
66                         uint32_t type, uint32_t cfval) const;
67    int32_t shift_rm_rs(uint32_t base, uint32_t shamt,
68                        uint32_t type, uint32_t cfval) const;
69
70    bool shift_carry_imm(uint32_t base, uint32_t shamt,
71                         uint32_t type, uint32_t cfval) const;
72    bool shift_carry_rs(uint32_t base, uint32_t shamt,
73                        uint32_t type, uint32_t cfval) const;
74
75    int64_t shiftReg64(uint64_t base, uint64_t shiftAmt,
76                       ArmShiftType type, uint8_t width) const;
77    int64_t extendReg64(uint64_t base, ArmExtendType type,
78                        uint64_t shiftAmt, uint8_t width) const;
79
80    template<int width>
81    static inline bool
82    saturateOp(int32_t &res, int64_t op1, int64_t op2, bool sub=false)
83    {
84        int64_t midRes = sub ? (op1 - op2) : (op1 + op2);
85        if (bits(midRes, width) != bits(midRes, width - 1)) {
86            if (midRes > 0)
87                res = (LL(1) << (width - 1)) - 1;
88            else
89                res = -(LL(1) << (width - 1));
90            return true;
91        } else {
92            res = midRes;
93            return false;
94        }
95    }
96
97    static inline bool
98    satInt(int32_t &res, int64_t op, int width)
99    {
100        width--;
101        if (op >= (LL(1) << width)) {
102            res = (LL(1) << width) - 1;
103            return true;
104        } else if (op < -(LL(1) << width)) {
105            res = -(LL(1) << width);
106            return true;
107        } else {
108            res = op;
109            return false;
110        }
111    }
112
113    template<int width>
114    static inline bool
115    uSaturateOp(uint32_t &res, int64_t op1, int64_t op2, bool sub=false)
116    {
117        int64_t midRes = sub ? (op1 - op2) : (op1 + op2);
118        if (midRes >= (LL(1) << width)) {
119            res = (LL(1) << width) - 1;
120            return true;
121        } else if (midRes < 0) {
122            res = 0;
123            return true;
124        } else {
125            res = midRes;
126            return false;
127        }
128    }
129
130    static inline bool
131    uSatInt(int32_t &res, int64_t op, int width)
132    {
133        if (op >= (LL(1) << width)) {
134            res = (LL(1) << width) - 1;
135            return true;
136        } else if (op < 0) {
137            res = 0;
138            return true;
139        } else {
140            res = op;
141            return false;
142        }
143    }
144
145    // Constructor
146    ArmStaticInst(const char *mnem, ExtMachInst _machInst,
147                  OpClass __opClass)
148        : StaticInst(mnem, _machInst, __opClass)
149    {
150        aarch64 = machInst.aarch64;
151        if (bits(machInst, 28, 24) == 0x10)
152            intWidth = 64;  // Force 64-bit width for ADR/ADRP
153        else
154            intWidth = (aarch64 && bits(machInst, 31)) ? 64 : 32;
155    }
156
157    /// Print a register name for disassembly given the unique
158    /// dependence tag number (FP or int).
159    void printIntReg(std::ostream &os, RegIndex reg_idx) const;
160    void printFloatReg(std::ostream &os, RegIndex reg_idx) const;
161    void printVecReg(std::ostream &os, RegIndex reg_idx) const;
162    void printCCReg(std::ostream &os, RegIndex reg_idx) const;
163    void printMiscReg(std::ostream &os, RegIndex reg_idx) const;
164    void printMnemonic(std::ostream &os,
165                       const std::string &suffix = "",
166                       bool withPred = true,
167                       bool withCond64 = false,
168                       ConditionCode cond64 = COND_UC) const;
169    void printTarget(std::ostream &os, Addr target,
170                     const SymbolTable *symtab) const;
171    void printCondition(std::ostream &os, unsigned code,
172                        bool noImplicit=false) const;
173    void printMemSymbol(std::ostream &os, const SymbolTable *symtab,
174                        const std::string &prefix, const Addr addr,
175                        const std::string &suffix) const;
176    void printShiftOperand(std::ostream &os, IntRegIndex rm,
177                           bool immShift, uint32_t shiftAmt,
178                           IntRegIndex rs, ArmShiftType type) const;
179    void printExtendOperand(bool firstOperand, std::ostream &os,
180                            IntRegIndex rm, ArmExtendType type,
181                            int64_t shiftAmt) const;
182
183
184    void printDataInst(std::ostream &os, bool withImm) const;
185    void printDataInst(std::ostream &os, bool withImm, bool immShift, bool s,
186                       IntRegIndex rd, IntRegIndex rn, IntRegIndex rm,
187                       IntRegIndex rs, uint32_t shiftAmt, ArmShiftType type,
188                       uint64_t imm) const;
189
190    void
191    advancePC(PCState &pcState) const
192    {
193        pcState.advance();
194    }
195
196    std::string generateDisassembly(Addr pc, const SymbolTable *symtab) const;
197
198    static inline uint32_t
199    cpsrWriteByInstr(CPSR cpsr, uint32_t val, SCR scr, NSACR nsacr,
200            uint8_t byteMask, bool affectState, bool nmfi, ThreadContext *tc)
201    {
202        bool privileged   = (cpsr.mode != MODE_USER);
203        bool haveVirt     = ArmSystem::haveVirtualization(tc);
204        bool haveSecurity = ArmSystem::haveSecurity(tc);
205        bool isSecure     = inSecureState(scr, cpsr) || !haveSecurity;
206
207        uint32_t bitMask = 0;
208
209        if (bits(byteMask, 3)) {
210            unsigned lowIdx = affectState ? 24 : 27;
211            bitMask = bitMask | mask(31, lowIdx);
212        }
213        if (bits(byteMask, 2)) {
214            bitMask = bitMask | mask(19, 16);
215        }
216        if (bits(byteMask, 1)) {
217            unsigned highIdx = affectState ? 15 : 9;
218            unsigned lowIdx = (privileged && (isSecure || scr.aw || haveVirt))
219                            ? 8 : 9;
220            bitMask = bitMask | mask(highIdx, lowIdx);
221        }
222        if (bits(byteMask, 0)) {
223            if (privileged) {
224                bitMask |= 1 << 7;
225                if ( (!nmfi || !((val >> 6) & 0x1)) &&
226                     (isSecure || scr.fw || haveVirt) ) {
227                    bitMask |= 1 << 6;
228                }
229                // Now check the new mode is allowed
230                OperatingMode newMode = (OperatingMode) (val & mask(5));
231                OperatingMode oldMode = (OperatingMode)(uint32_t)cpsr.mode;
232                if (!badMode(newMode)) {
233                    bool validModeChange = true;
234                    // Check for attempts to enter modes only permitted in
235                    // Secure state from Non-secure state. These are Monitor
236                    // mode ('10110'), and FIQ mode ('10001') if the Security
237                    // Extensions have reserved it.
238                    if (!isSecure && newMode == MODE_MON)
239                        validModeChange = false;
240                    if (!isSecure && newMode == MODE_FIQ && nsacr.rfr == '1')
241                        validModeChange = false;
242                    // There is no Hyp mode ('11010') in Secure state, so that
243                    // is UNPREDICTABLE
244                    if (scr.ns == '0' && newMode == MODE_HYP)
245                        validModeChange = false;
246                    // Cannot move into Hyp mode directly from a Non-secure
247                    // PL1 mode
248                    if (!isSecure && oldMode != MODE_HYP && newMode == MODE_HYP)
249                        validModeChange = false;
250                    // Cannot move out of Hyp mode with this function except
251                    // on an exception return
252                    if (oldMode == MODE_HYP && newMode != MODE_HYP && !affectState)
253                        validModeChange = false;
254                    // Must not change to 64 bit when running in 32 bit mode
255                    if (!opModeIs64(oldMode) && opModeIs64(newMode))
256                        validModeChange = false;
257
258                    // If we passed all of the above then set the bit mask to
259                    // copy the mode accross
260                    if (validModeChange) {
261                        bitMask = bitMask | mask(5);
262                    } else {
263                        warn_once("Illegal change to CPSR mode attempted\n");
264                    }
265                } else {
266                    warn_once("Ignoring write of bad mode to CPSR.\n");
267                }
268            }
269            if (affectState)
270                bitMask = bitMask | (1 << 5);
271        }
272
273        return ((uint32_t)cpsr & ~bitMask) | (val & bitMask);
274    }
275
276    static inline uint32_t
277    spsrWriteByInstr(uint32_t spsr, uint32_t val,
278            uint8_t byteMask, bool affectState)
279    {
280        uint32_t bitMask = 0;
281
282        if (bits(byteMask, 3))
283            bitMask = bitMask | mask(31, 24);
284        if (bits(byteMask, 2))
285            bitMask = bitMask | mask(19, 16);
286        if (bits(byteMask, 1))
287            bitMask = bitMask | mask(15, 8);
288        if (bits(byteMask, 0))
289            bitMask = bitMask | mask(7, 0);
290
291        return ((spsr & ~bitMask) | (val & bitMask));
292    }
293
294    static inline Addr
295    readPC(ExecContext *xc)
296    {
297        return xc->pcState().instPC();
298    }
299
300    static inline void
301    setNextPC(ExecContext *xc, Addr val)
302    {
303        PCState pc = xc->pcState();
304        pc.instNPC(val);
305        xc->pcState(pc);
306    }
307
308    template<class T>
309    static inline T
310    cSwap(T val, bool big)
311    {
312        if (big) {
313            return gtobe(val);
314        } else {
315            return gtole(val);
316        }
317    }
318
319    template<class T, class E>
320    static inline T
321    cSwap(T val, bool big)
322    {
323        const unsigned count = sizeof(T) / sizeof(E);
324        union {
325            T tVal;
326            E eVals[count];
327        } conv;
328        conv.tVal = htog(val);
329        if (big) {
330            for (unsigned i = 0; i < count; i++) {
331                conv.eVals[i] = gtobe(conv.eVals[i]);
332            }
333        } else {
334            for (unsigned i = 0; i < count; i++) {
335                conv.eVals[i] = gtole(conv.eVals[i]);
336            }
337        }
338        return gtoh(conv.tVal);
339    }
340
341    // Perform an interworking branch.
342    static inline void
343    setIWNextPC(ExecContext *xc, Addr val)
344    {
345        PCState pc = xc->pcState();
346        pc.instIWNPC(val);
347        xc->pcState(pc);
348    }
349
350    // Perform an interworking branch in ARM mode, a regular branch
351    // otherwise.
352    static inline void
353    setAIWNextPC(ExecContext *xc, Addr val)
354    {
355        PCState pc = xc->pcState();
356        pc.instAIWNPC(val);
357        xc->pcState(pc);
358    }
359
360    inline Fault
361    disabledFault() const
362    {
363        return std::make_shared<UndefinedInstruction>(machInst, false,
364                                                      mnemonic, true);
365    }
366
367    /**
368     * Trap an access to Advanced SIMD or FP registers due to access
369     * control bits.
370     *
371     * See aarch64/exceptions/traps/AArch64.AdvSIMDFPAccessTrap in the
372     * ARM ARM psueodcode library.
373     *
374     * @param el Target EL for the trap
375     */
376    Fault advSIMDFPAccessTrap64(ExceptionLevel el) const;
377
378
379    /**
380     * Check an Advaned SIMD access against CPTR_EL2 and CPTR_EL3.
381     *
382     * See aarch64/exceptions/traps/AArch64.CheckFPAdvSIMDTrap in the
383     * ARM ARM psueodcode library.
384     */
385    Fault checkFPAdvSIMDTrap64(ThreadContext *tc, CPSR cpsr) const;
386
387    /**
388     * Check an Advaned SIMD access against CPACR_EL1, CPTR_EL2, and
389     * CPTR_EL3.
390     *
391     * See aarch64/exceptions/traps/AArch64.CheckFPAdvSIMDEnabled in the
392     * ARM ARM psueodcode library.
393     */
394    Fault checkFPAdvSIMDEnabled64(ThreadContext *tc,
395                                  CPSR cpsr, CPACR cpacr) const;
396
397    /**
398     * Check if a VFP/SIMD access from aarch32 should be allowed.
399     *
400     * See aarch32/exceptions/traps/AArch32.CheckAdvSIMDOrFPEnabled in the
401     * ARM ARM psueodcode library.
402     */
403    Fault checkAdvSIMDOrFPEnabled32(ThreadContext *tc,
404                                    CPSR cpsr, CPACR cpacr,
405                                    NSACR nsacr, FPEXC fpexc,
406                                    bool fpexc_check, bool advsimd) const;
407
408    /**
409     * Get the new PSTATE from a SPSR register in preparation for an
410     * exception return.
411     *
412     * See shared/functions/system/SetPSTATEFromPSR in the ARM ARM
413     * psueodcode library.
414     */
415    CPSR getPSTATEFromPSR(ThreadContext *tc, CPSR cpsr, CPSR spsr) const;
416
417  public:
418    virtual void
419    annotateFault(ArmFault *fault) {}
420};
421}
422
423#endif //__ARCH_ARM_INSTS_STATICINST_HH__
424