vfp.hh revision 7378:de704acd042f
1/*
2 * Copyright (c) 2010 ARM Limited
3 * All rights reserved
4 *
5 * The license below extends only to copyright in the software and shall
6 * not be construed as granting a license to any other intellectual
7 * property including but not limited to intellectual property relating
8 * to a hardware implementation of the functionality of the software
9 * licensed hereunder.  You may use the software subject to the license
10 * terms below provided that you ensure that this notice is replicated
11 * unmodified and in its entirety in all distributions of the software,
12 * modified or unmodified, in source code or in binary form.
13 *
14 * Redistribution and use in source and binary forms, with or without
15 * modification, are permitted provided that the following conditions are
16 * met: redistributions of source code must retain the above copyright
17 * notice, this list of conditions and the following disclaimer;
18 * redistributions in binary form must reproduce the above copyright
19 * notice, this list of conditions and the following disclaimer in the
20 * documentation and/or other materials provided with the distribution;
21 * neither the name of the copyright holders nor the names of its
22 * contributors may be used to endorse or promote products derived from
23 * this software without specific prior written permission.
24 *
25 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
26 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
27 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
28 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
29 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
30 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
31 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
32 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
33 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
34 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
35 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 *
37 * Authors: Gabe Black
38 */
39
40#ifndef __ARCH_ARM_INSTS_VFP_HH__
41#define __ARCH_ARM_INSTS_VFP_HH__
42
43#include "arch/arm/insts/misc.hh"
44#include "arch/arm/miscregs.hh"
45#include <fenv.h>
46
47enum VfpMicroMode {
48    VfpNotAMicroop,
49    VfpMicroop,
50    VfpFirstMicroop,
51    VfpLastMicroop
52};
53
54template<class T>
55static inline void
56setVfpMicroFlags(VfpMicroMode mode, T &flags)
57{
58    switch (mode) {
59      case VfpMicroop:
60        flags[StaticInst::IsMicroop] = true;
61        break;
62      case VfpFirstMicroop:
63        flags[StaticInst::IsMicroop] =
64            flags[StaticInst::IsFirstMicroop] = true;
65        break;
66      case VfpLastMicroop:
67        flags[StaticInst::IsMicroop] =
68            flags[StaticInst::IsLastMicroop] = true;
69        break;
70      case VfpNotAMicroop:
71        break;
72    }
73    if (mode == VfpMicroop || mode == VfpFirstMicroop) {
74        flags[StaticInst::IsDelayedCommit] = true;
75    }
76}
77
78enum FeExceptionBit
79{
80    FeDivByZero = FE_DIVBYZERO,
81    FeInexact = FE_INEXACT,
82    FeInvalid = FE_INVALID,
83    FeOverflow = FE_OVERFLOW,
84    FeUnderflow = FE_UNDERFLOW,
85    FeAllExceptions = FE_ALL_EXCEPT
86};
87
88enum FeRoundingMode
89{
90    FeRoundDown = FE_DOWNWARD,
91    FeRoundNearest = FE_TONEAREST,
92    FeRoundZero = FE_TOWARDZERO,
93    FeRoundUpward = FE_UPWARD
94};
95
96enum VfpRoundingMode
97{
98    VfpRoundNearest = 0,
99    VfpRoundUpward = 1,
100    VfpRoundDown = 2,
101    VfpRoundZero = 3
102};
103
104typedef int VfpSavedState;
105
106static inline VfpSavedState
107prepVfpFpscr(FPSCR fpscr)
108{
109    int roundingMode = fegetround();
110    feclearexcept(FeAllExceptions);
111    switch (fpscr.rMode) {
112      case VfpRoundNearest:
113        fesetround(FeRoundNearest);
114        break;
115      case VfpRoundUpward:
116        fesetround(FeRoundUpward);
117        break;
118      case VfpRoundDown:
119        fesetround(FeRoundDown);
120        break;
121      case VfpRoundZero:
122        fesetround(FeRoundZero);
123        break;
124    }
125    return roundingMode;
126}
127
128static inline FPSCR
129setVfpFpscr(FPSCR fpscr, VfpSavedState state)
130{
131    int exceptions = fetestexcept(FeAllExceptions);
132    if (exceptions & FeInvalid) {
133        fpscr.ioc = 1;
134    }
135    if (exceptions & FeDivByZero) {
136        fpscr.dzc = 1;
137    }
138    if (exceptions & FeOverflow) {
139        fpscr.ofc = 1;
140    }
141    if (exceptions & FeUnderflow) {
142        fpscr.ufc = 1;
143    }
144    if (exceptions & FeInexact) {
145        fpscr.ixc = 1;
146    }
147    fesetround(state);
148    return fpscr;
149}
150
151class VfpMacroOp : public PredMacroOp
152{
153  public:
154    static bool
155    inScalarBank(IntRegIndex idx)
156    {
157        return (idx % 32) < 8;
158    }
159
160  protected:
161    bool wide;
162
163    VfpMacroOp(const char *mnem, ExtMachInst _machInst,
164            OpClass __opClass, bool _wide) :
165        PredMacroOp(mnem, _machInst, __opClass), wide(_wide)
166    {}
167
168    IntRegIndex
169    addStride(IntRegIndex idx, unsigned stride)
170    {
171        if (wide) {
172            stride *= 2;
173        }
174        unsigned offset = idx % 8;
175        idx = (IntRegIndex)(idx - offset);
176        offset += stride;
177        idx = (IntRegIndex)(idx + (offset % 8));
178        return idx;
179    }
180
181    void
182    nextIdxs(IntRegIndex &dest, IntRegIndex &op1, IntRegIndex &op2)
183    {
184        unsigned stride = (machInst.fpscrStride == 0) ? 1 : 2;
185        assert(!inScalarBank(dest));
186        dest = addStride(dest, stride);
187        op1 = addStride(op1, stride);
188        if (!inScalarBank(op2)) {
189            op2 = addStride(op2, stride);
190        }
191    }
192
193    void
194    nextIdxs(IntRegIndex &dest, IntRegIndex &op1)
195    {
196        unsigned stride = (machInst.fpscrStride == 0) ? 1 : 2;
197        assert(!inScalarBank(dest));
198        dest = addStride(dest, stride);
199        if (!inScalarBank(op1)) {
200            op1 = addStride(op1, stride);
201        }
202    }
203
204    void
205    nextIdxs(IntRegIndex &dest)
206    {
207        unsigned stride = (machInst.fpscrStride == 0) ? 1 : 2;
208        assert(!inScalarBank(dest));
209        dest = addStride(dest, stride);
210    }
211};
212
213class VfpRegRegOp : public RegRegOp
214{
215  protected:
216    VfpRegRegOp(const char *mnem, ExtMachInst _machInst, OpClass __opClass,
217                IntRegIndex _dest, IntRegIndex _op1,
218                VfpMicroMode mode = VfpNotAMicroop) :
219        RegRegOp(mnem, _machInst, __opClass, _dest, _op1)
220    {
221        setVfpMicroFlags(mode, flags);
222    }
223};
224
225class VfpRegImmOp : public RegImmOp
226{
227  protected:
228    VfpRegImmOp(const char *mnem, ExtMachInst _machInst, OpClass __opClass,
229                IntRegIndex _dest, uint64_t _imm,
230                VfpMicroMode mode = VfpNotAMicroop) :
231        RegImmOp(mnem, _machInst, __opClass, _dest, _imm)
232    {
233        setVfpMicroFlags(mode, flags);
234    }
235};
236
237class VfpRegRegImmOp : public RegRegImmOp
238{
239  protected:
240    VfpRegRegImmOp(const char *mnem, ExtMachInst _machInst, OpClass __opClass,
241                   IntRegIndex _dest, IntRegIndex _op1,
242                   uint64_t _imm, VfpMicroMode mode = VfpNotAMicroop) :
243        RegRegImmOp(mnem, _machInst, __opClass, _dest, _op1, _imm)
244    {
245        setVfpMicroFlags(mode, flags);
246    }
247};
248
249class VfpRegRegRegOp : public RegRegRegOp
250{
251  protected:
252    VfpRegRegRegOp(const char *mnem, ExtMachInst _machInst, OpClass __opClass,
253                   IntRegIndex _dest, IntRegIndex _op1, IntRegIndex _op2,
254                   VfpMicroMode mode = VfpNotAMicroop) :
255        RegRegRegOp(mnem, _machInst, __opClass, _dest, _op1, _op2)
256    {
257        setVfpMicroFlags(mode, flags);
258    }
259};
260
261#endif //__ARCH_ARM_INSTS_VFP_HH__
262