sve_macromem.hh (14091:090449e74135) sve_macromem.hh (14106:293e3f4b1321)
1/*
2 * Copyright (c) 2018 ARM Limited
3 * All rights reserved
4 *
5 * The license below extends only to copyright in the software and shall
6 * not be construed as granting a license to any other intellectual
7 * property including but not limited to intellectual property relating
8 * to a hardware implementation of the functionality of the software
9 * licensed hereunder. You may use the software subject to the license
10 * terms below provided that you ensure that this notice is replicated
11 * unmodified and in its entirety in all distributions of the software,
12 * modified or unmodified, in source code or in binary form.
13 *
14 * Redistribution and use in source and binary forms, with or without
15 * modification, are permitted provided that the following conditions are
16 * met: redistributions of source code must retain the above copyright
17 * notice, this list of conditions and the following disclaimer;
18 * redistributions in binary form must reproduce the above copyright
19 * notice, this list of conditions and the following disclaimer in the
20 * documentation and/or other materials provided with the distribution;
21 * neither the name of the copyright holders nor the names of its
22 * contributors may be used to endorse or promote products derived from
23 * this software without specific prior written permission.
24 *
25 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
26 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
27 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
28 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
29 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
30 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
31 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
32 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
33 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
34 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
35 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 *
37 * Authors: Giacomo Gabrielli
38 */
39
40#ifndef __ARCH_ARM_SVE_MACROMEM_HH__
41#define __ARCH_ARM_SVE_MACROMEM_HH__
42
43#include "arch/arm/generated/decoder.hh"
44#include "arch/arm/insts/pred_inst.hh"
45
46namespace ArmISA {
47
1/*
2 * Copyright (c) 2018 ARM Limited
3 * All rights reserved
4 *
5 * The license below extends only to copyright in the software and shall
6 * not be construed as granting a license to any other intellectual
7 * property including but not limited to intellectual property relating
8 * to a hardware implementation of the functionality of the software
9 * licensed hereunder. You may use the software subject to the license
10 * terms below provided that you ensure that this notice is replicated
11 * unmodified and in its entirety in all distributions of the software,
12 * modified or unmodified, in source code or in binary form.
13 *
14 * Redistribution and use in source and binary forms, with or without
15 * modification, are permitted provided that the following conditions are
16 * met: redistributions of source code must retain the above copyright
17 * notice, this list of conditions and the following disclaimer;
18 * redistributions in binary form must reproduce the above copyright
19 * notice, this list of conditions and the following disclaimer in the
20 * documentation and/or other materials provided with the distribution;
21 * neither the name of the copyright holders nor the names of its
22 * contributors may be used to endorse or promote products derived from
23 * this software without specific prior written permission.
24 *
25 * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
26 * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
27 * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
28 * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
29 * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
30 * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
31 * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
32 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
33 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
34 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
35 * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
36 *
37 * Authors: Giacomo Gabrielli
38 */
39
40#ifndef __ARCH_ARM_SVE_MACROMEM_HH__
41#define __ARCH_ARM_SVE_MACROMEM_HH__
42
43#include "arch/arm/generated/decoder.hh"
44#include "arch/arm/insts/pred_inst.hh"
45
46namespace ArmISA {
47
48template <typename Element,
49 template <typename> class MicroopLdMemType,
50 template <typename> class MicroopDeIntrlvType>
51class SveLdStructSS : public PredMacroOp
52{
53 protected:
54 IntRegIndex dest;
55 IntRegIndex gp;
56 IntRegIndex base;
57 IntRegIndex offset;
58 uint8_t numregs;
59
60 public:
61 SveLdStructSS(const char* mnem, ExtMachInst machInst, OpClass __opClass,
62 IntRegIndex _dest, IntRegIndex _gp, IntRegIndex _base,
63 IntRegIndex _offset, uint8_t _numregs)
64 : PredMacroOp(mnem, machInst, __opClass),
65 dest(_dest), gp(_gp), base(_base), offset(_offset), numregs(_numregs)
66 {
67 numMicroops = numregs * 2;
68
69 microOps = new StaticInstPtr[numMicroops];
70
71 for (int i = 0; i < numregs; ++i) {
72 microOps[i] = new MicroopLdMemType<Element>(
73 mnem, machInst, static_cast<IntRegIndex>(INTRLVREG0 + i),
74 _gp, _base, _offset, _numregs, i);
75 }
76 for (int i = 0; i < numregs; ++i) {
77 microOps[i + numregs] = new MicroopDeIntrlvType<Element>(
78 mnem, machInst, static_cast<IntRegIndex>((_dest + i) % 32),
79 _numregs, i, this);
80 }
81
82 microOps[0]->setFirstMicroop();
83 microOps[numMicroops - 1]->setLastMicroop();
84
85 for (StaticInstPtr *uop = microOps; !(*uop)->isLastMicroop(); uop++) {
86 (*uop)->setDelayedCommit();
87 }
88 }
89
90 Fault
91 execute(ExecContext *, Trace::InstRecord *) const
92 {
93 panic("Execute method called when it shouldn't!");
94 return NoFault;
95 }
96
97 std::string
98 generateDisassembly(Addr pc, const SymbolTable *symtab) const
99 {
100 std::stringstream ss;
101 printMnemonic(ss, "", false);
102 ccprintf(ss, "{");
103 for (int i = 0; i < numregs; ++i) {
104 printVecReg(ss, (dest + i) % 32, true);
105 if (i < numregs - 1)
106 ccprintf(ss, ", ");
107 }
108 ccprintf(ss, "}, ");
109 printVecPredReg(ss, gp);
110 ccprintf(ss, "/z, [");
111 printIntReg(ss, base);
112 ccprintf(ss, ", ");
113 printIntReg(ss, offset);
114 ccprintf(ss, "]");
115 return ss.str();
116 }
117};
118
119template <typename Element,
120 template <typename> class MicroopStMemType,
121 template <typename> class MicroopIntrlvType>
122class SveStStructSS : public PredMacroOp
123{
124 protected:
125 IntRegIndex dest;
126 IntRegIndex gp;
127 IntRegIndex base;
128 IntRegIndex offset;
129 uint8_t numregs;
130
131 public:
132 SveStStructSS(const char* mnem, ExtMachInst machInst, OpClass __opClass,
133 IntRegIndex _dest, IntRegIndex _gp, IntRegIndex _base,
134 IntRegIndex _offset, uint8_t _numregs)
135 : PredMacroOp(mnem, machInst, __opClass),
136 dest(_dest), gp(_gp), base(_base), offset(_offset), numregs(_numregs)
137 {
138 numMicroops = numregs * 2;
139
140 microOps = new StaticInstPtr[numMicroops];
141
142 for (int i = 0; i < numregs; ++i) {
143 microOps[i] = new MicroopIntrlvType<Element>(
144 mnem, machInst, static_cast<IntRegIndex>(INTRLVREG0 + i),
145 _dest, _numregs, i, this);
146 }
147
148 for (int i = 0; i < numregs; ++i) {
149 microOps[i + numregs] = new MicroopStMemType<Element>(
150 mnem, machInst, static_cast<IntRegIndex>(INTRLVREG0 + i),
151 _gp, _base, _offset, _numregs, i);
152 }
153
154 microOps[0]->setFirstMicroop();
155 microOps[numMicroops - 1]->setLastMicroop();
156
157 for (StaticInstPtr *uop = microOps; !(*uop)->isLastMicroop(); uop++) {
158 (*uop)->setDelayedCommit();
159 }
160 }
161
162 Fault
163 execute(ExecContext *, Trace::InstRecord *) const
164 {
165 panic("Execute method called when it shouldn't!");
166 return NoFault;
167 }
168
169 std::string
170 generateDisassembly(Addr pc, const SymbolTable *symtab) const
171 {
172 std::stringstream ss;
173 printMnemonic(ss, "", false);
174 ccprintf(ss, "{");
175 for (int i = 0; i < numregs; ++i) {
176 printVecReg(ss, (dest + i) % 32, true);
177 if (i < numregs - 1)
178 ccprintf(ss, ", ");
179 }
180 ccprintf(ss, "}, ");
181 printVecPredReg(ss, gp);
182 ccprintf(ss, ", [");
183 printIntReg(ss, base);
184 ccprintf(ss, ", ");
185 printIntReg(ss, offset);
186 ccprintf(ss, "]");
187 return ss.str();
188 }
189};
190
191
192template <typename Element,
193 template <typename> class MicroopLdMemType,
194 template <typename> class MicroopDeIntrlvType>
195class SveLdStructSI : public PredMacroOp
196{
197 protected:
198 IntRegIndex dest;
199 IntRegIndex gp;
200 IntRegIndex base;
201 int64_t imm;
202 uint8_t numregs;
203
204 public:
205 SveLdStructSI(const char* mnem, ExtMachInst machInst, OpClass __opClass,
206 IntRegIndex _dest, IntRegIndex _gp, IntRegIndex _base,
207 int64_t _imm, uint8_t _numregs)
208 : PredMacroOp(mnem, machInst, __opClass),
209 dest(_dest), gp(_gp), base(_base), imm(_imm), numregs(_numregs)
210 {
211 numMicroops = numregs * 2;
212
213 microOps = new StaticInstPtr[numMicroops];
214
215 for (int i = 0; i < numregs; ++i) {
216 microOps[i] = new MicroopLdMemType<Element>(
217 mnem, machInst, static_cast<IntRegIndex>(INTRLVREG0 + i),
218 _gp, _base, _imm, _numregs, i);
219 }
220 for (int i = 0; i < numregs; ++i) {
221 microOps[i + numregs] = new MicroopDeIntrlvType<Element>(
222 mnem, machInst, static_cast<IntRegIndex>((_dest + i) % 32),
223 _numregs, i, this);
224 }
225
226 microOps[0]->setFirstMicroop();
227 microOps[numMicroops - 1]->setLastMicroop();
228
229 for (StaticInstPtr *uop = microOps; !(*uop)->isLastMicroop(); uop++) {
230 (*uop)->setDelayedCommit();
231 }
232 }
233
234 Fault
235 execute(ExecContext *, Trace::InstRecord *) const
236 {
237 panic("Execute method called when it shouldn't!");
238 return NoFault;
239 }
240
241 std::string
242 generateDisassembly(Addr pc, const SymbolTable *symtab) const
243 {
244 std::stringstream ss;
245 printMnemonic(ss, "", false);
246 ccprintf(ss, "{");
247 for (int i = 0; i < numregs; ++i) {
248 printVecReg(ss, (dest + i) % 32, true);
249 if (i < numregs - 1)
250 ccprintf(ss, ", ");
251 }
252 ccprintf(ss, "}, ");
253 printVecPredReg(ss, gp);
254 ccprintf(ss, "/z, [");
255 printIntReg(ss, base);
256 if (imm != 0) {
257 ccprintf(ss, ", #%d, MUL VL", imm);
258 }
259 ccprintf(ss, "]");
260 return ss.str();
261 }
262};
263
264template <typename Element,
265 template <typename> class MicroopStMemType,
266 template <typename> class MicroopIntrlvType>
267class SveStStructSI : public PredMacroOp
268{
269 protected:
270 IntRegIndex dest;
271 IntRegIndex gp;
272 IntRegIndex base;
273 int64_t imm;
274 uint8_t numregs;
275
276 public:
277 SveStStructSI(const char* mnem, ExtMachInst machInst, OpClass __opClass,
278 IntRegIndex _dest, IntRegIndex _gp, IntRegIndex _base,
279 int64_t _imm, uint8_t _numregs)
280 : PredMacroOp(mnem, machInst, __opClass),
281 dest(_dest), gp(_gp), base(_base), imm(_imm), numregs(_numregs)
282 {
283 numMicroops = numregs * 2;
284
285 microOps = new StaticInstPtr[numMicroops];
286
287 for (int i = 0; i < numregs; ++i) {
288 microOps[i] = new MicroopIntrlvType<Element>(
289 mnem, machInst, static_cast<IntRegIndex>(INTRLVREG0 + i),
290 _dest, _numregs, i, this);
291 }
292
293 for (int i = 0; i < numregs; ++i) {
294 microOps[i + numregs] = new MicroopStMemType<Element>(
295 mnem, machInst, static_cast<IntRegIndex>(INTRLVREG0 + i),
296 _gp, _base, _imm, _numregs, i);
297 }
298
299 microOps[0]->setFirstMicroop();
300 microOps[numMicroops - 1]->setLastMicroop();
301
302 for (StaticInstPtr *uop = microOps; !(*uop)->isLastMicroop(); uop++) {
303 (*uop)->setDelayedCommit();
304 }
305 }
306
307 Fault
308 execute(ExecContext *, Trace::InstRecord *) const
309 {
310 panic("Execute method called when it shouldn't!");
311 return NoFault;
312 }
313
314 std::string
315 generateDisassembly(Addr pc, const SymbolTable *symtab) const
316 {
317 std::stringstream ss;
318 printMnemonic(ss, "", false);
319 ccprintf(ss, "{");
320 for (int i = 0; i < numregs; ++i) {
321 printVecReg(ss, (dest + i) % 32, true);
322 if (i < numregs - 1)
323 ccprintf(ss, ", ");
324 }
325 ccprintf(ss, "}, ");
326 printVecPredReg(ss, gp);
327 ccprintf(ss, ", [");
328 printIntReg(ss, base);
329 if (imm != 0) {
330 ccprintf(ss, ", #%d, MUL VL", imm);
331 }
332 ccprintf(ss, "]");
333 return ss.str();
334 }
335};
336
48template <typename RegElemType, typename MemElemType,
49 template <typename, typename> class MicroopType,
50 template <typename> class FirstFaultWritebackMicroopType>
51class SveIndexedMemVI : public PredMacroOp
52{
53 protected:
54 IntRegIndex dest;
55 IntRegIndex gp;
56 IntRegIndex base;
57 uint64_t imm;
58
59 public:
60 SveIndexedMemVI(const char *mnem, ExtMachInst machInst, OpClass __opClass,
61 IntRegIndex _dest, IntRegIndex _gp, IntRegIndex _base,
62 uint64_t _imm, bool firstFault)
63 : PredMacroOp(mnem, machInst, __opClass),
64 dest(_dest), gp(_gp), base(_base), imm(_imm)
65 {
66 bool isLoad = (__opClass == MemReadOp);
67 assert(!firstFault || isLoad);
68
69 int num_elems = ((machInst.sveLen + 1) * 16) / sizeof(RegElemType);
70
71 numMicroops = num_elems;
72 if (isLoad) {
73 if (firstFault) {
74 numMicroops += 2;
75 } else {
76 numMicroops++;
77 }
78 }
79
80 microOps = new StaticInstPtr[numMicroops];
81
82 StaticInstPtr *uop = microOps;
83
84 if (isLoad) {
85 // The first microop of a gather load copies the source vector
86 // register used for address calculation to an auxiliary register,
87 // with all subsequent microops reading from the latter. This is
88 // needed to properly handle cases where the source vector
89 // register is the same as the destination register
90 *uop = new ArmISAInst::SveGatherLoadCpySrcVecMicroop(
91 mnem, machInst, _base, this);
92 uop++;
93 }
94
95 for (int i = 0; i < num_elems; i++, uop++) {
96 *uop = new MicroopType<RegElemType, MemElemType>(
97 mnem, machInst, __opClass, _dest, _gp,
98 isLoad ? (IntRegIndex) VECREG_UREG0 : _base, _imm, i,
99 num_elems, firstFault);
100 }
101
102 if (firstFault) {
103 *uop = new FirstFaultWritebackMicroopType<RegElemType>(
104 mnem, machInst, __opClass, num_elems, this);
105 } else {
106 --uop;
107 }
108
109 (*uop)->setLastMicroop();
110 microOps[0]->setFirstMicroop();
111
112 for (StaticInstPtr *uop = microOps; !(*uop)->isLastMicroop(); uop++) {
113 (*uop)->setDelayedCommit();
114 }
115 }
116
117 Fault
118 execute(ExecContext *, Trace::InstRecord *) const
119 {
120 panic("Execute method called when it shouldn't!");
121 return NoFault;
122 }
123
124 std::string
125 generateDisassembly(Addr pc, const SymbolTable *symtab) const
126 {
127 // TODO: add suffix to transfer and base registers
128 std::stringstream ss;
129 printMnemonic(ss, "", false);
130 ccprintf(ss, "{");
131 printVecReg(ss, dest, true);
132 ccprintf(ss, "}, ");
133 printVecPredReg(ss, gp);
134 ccprintf(ss, "/z, [");
135 printVecReg(ss, base, true);
136 if (imm != 0) {
137 ccprintf(ss, ", #%d", imm * sizeof(MemElemType));
138 }
139 ccprintf(ss, "]");
140 return ss.str();
141 }
142};
143
144template <typename RegElemType, typename MemElemType,
145 template <typename, typename> class MicroopType,
146 template <typename> class FirstFaultWritebackMicroopType>
147class SveIndexedMemSV : public PredMacroOp
148{
149 protected:
150 IntRegIndex dest;
151 IntRegIndex gp;
152 IntRegIndex base;
153 IntRegIndex offset;
154
155 bool offsetIs32;
156 bool offsetIsSigned;
157 bool offsetIsScaled;
158
159 public:
160 SveIndexedMemSV(const char *mnem, ExtMachInst machInst, OpClass __opClass,
161 IntRegIndex _dest, IntRegIndex _gp, IntRegIndex _base,
162 IntRegIndex _offset, bool _offsetIs32,
163 bool _offsetIsSigned, bool _offsetIsScaled,
164 bool firstFault)
165 : PredMacroOp(mnem, machInst, __opClass),
166 dest(_dest), gp(_gp), base(_base), offset(_offset),
167 offsetIs32(_offsetIs32), offsetIsSigned(_offsetIsSigned),
168 offsetIsScaled(_offsetIsScaled)
169 {
170 bool isLoad = (__opClass == MemReadOp);
171 assert(!firstFault || isLoad);
172
173 int num_elems = ((machInst.sveLen + 1) * 16) / sizeof(RegElemType);
174
175 numMicroops = num_elems;
176 if (isLoad) {
177 if (firstFault) {
178 numMicroops += 2;
179 } else {
180 numMicroops++;
181 }
182 }
183
184 microOps = new StaticInstPtr[numMicroops];
185
186 StaticInstPtr *uop = microOps;
187
188 if (isLoad) {
189 // The first microop of a gather load copies the source vector
190 // register used for address calculation to an auxiliary register,
191 // with all subsequent microops reading from the latter. This is
192 // needed to properly handle cases where the source vector
193 // register is the same as the destination register
194 *uop = new ArmISAInst::SveGatherLoadCpySrcVecMicroop(
195 mnem, machInst, _offset, this);
196 uop++;
197 }
198
199 for (int i = 0; i < num_elems; i++, uop++) {
200 *uop = new MicroopType<RegElemType, MemElemType>(
201 mnem, machInst, __opClass, _dest, _gp, _base,
202 isLoad ? (IntRegIndex) VECREG_UREG0 : _offset, _offsetIs32,
203 _offsetIsSigned, _offsetIsScaled, i, num_elems, firstFault);
204 }
205
206 if (firstFault) {
207 *uop = new FirstFaultWritebackMicroopType<RegElemType>(
208 mnem, machInst, __opClass, num_elems, this);
209 } else {
210 --uop;
211 }
212
213 (*uop)->setLastMicroop();
214 microOps[0]->setFirstMicroop();
215
216 for (StaticInstPtr *uop = microOps; !(*uop)->isLastMicroop(); uop++) {
217 (*uop)->setDelayedCommit();
218 }
219 }
220
221 Fault
222 execute(ExecContext *, Trace::InstRecord *) const
223 {
224 panic("Execute method called when it shouldn't!");
225 return NoFault;
226 }
227
228 std::string
229 generateDisassembly(Addr pc, const SymbolTable *symtab) const
230 {
231 // TODO: add suffix to transfer and base registers
232 std::stringstream ss;
233 printMnemonic(ss, "", false);
234 ccprintf(ss, "{");
235 printVecReg(ss, dest, true);
236 ccprintf(ss, "}, ");
237 printVecPredReg(ss, gp);
238 ccprintf(ss, "/z, [");
239 printIntReg(ss, base);
240 ccprintf(ss, ", ");
241 printVecReg(ss, offset, true);
242 ccprintf(ss, "]");
243 return ss.str();
244 }
245};
246
247} // namespace ArmISA
248
249#endif // __ARCH_ARM_SVE_MACROMEM_HH__
337template <typename RegElemType, typename MemElemType,
338 template <typename, typename> class MicroopType,
339 template <typename> class FirstFaultWritebackMicroopType>
340class SveIndexedMemVI : public PredMacroOp
341{
342 protected:
343 IntRegIndex dest;
344 IntRegIndex gp;
345 IntRegIndex base;
346 uint64_t imm;
347
348 public:
349 SveIndexedMemVI(const char *mnem, ExtMachInst machInst, OpClass __opClass,
350 IntRegIndex _dest, IntRegIndex _gp, IntRegIndex _base,
351 uint64_t _imm, bool firstFault)
352 : PredMacroOp(mnem, machInst, __opClass),
353 dest(_dest), gp(_gp), base(_base), imm(_imm)
354 {
355 bool isLoad = (__opClass == MemReadOp);
356 assert(!firstFault || isLoad);
357
358 int num_elems = ((machInst.sveLen + 1) * 16) / sizeof(RegElemType);
359
360 numMicroops = num_elems;
361 if (isLoad) {
362 if (firstFault) {
363 numMicroops += 2;
364 } else {
365 numMicroops++;
366 }
367 }
368
369 microOps = new StaticInstPtr[numMicroops];
370
371 StaticInstPtr *uop = microOps;
372
373 if (isLoad) {
374 // The first microop of a gather load copies the source vector
375 // register used for address calculation to an auxiliary register,
376 // with all subsequent microops reading from the latter. This is
377 // needed to properly handle cases where the source vector
378 // register is the same as the destination register
379 *uop = new ArmISAInst::SveGatherLoadCpySrcVecMicroop(
380 mnem, machInst, _base, this);
381 uop++;
382 }
383
384 for (int i = 0; i < num_elems; i++, uop++) {
385 *uop = new MicroopType<RegElemType, MemElemType>(
386 mnem, machInst, __opClass, _dest, _gp,
387 isLoad ? (IntRegIndex) VECREG_UREG0 : _base, _imm, i,
388 num_elems, firstFault);
389 }
390
391 if (firstFault) {
392 *uop = new FirstFaultWritebackMicroopType<RegElemType>(
393 mnem, machInst, __opClass, num_elems, this);
394 } else {
395 --uop;
396 }
397
398 (*uop)->setLastMicroop();
399 microOps[0]->setFirstMicroop();
400
401 for (StaticInstPtr *uop = microOps; !(*uop)->isLastMicroop(); uop++) {
402 (*uop)->setDelayedCommit();
403 }
404 }
405
406 Fault
407 execute(ExecContext *, Trace::InstRecord *) const
408 {
409 panic("Execute method called when it shouldn't!");
410 return NoFault;
411 }
412
413 std::string
414 generateDisassembly(Addr pc, const SymbolTable *symtab) const
415 {
416 // TODO: add suffix to transfer and base registers
417 std::stringstream ss;
418 printMnemonic(ss, "", false);
419 ccprintf(ss, "{");
420 printVecReg(ss, dest, true);
421 ccprintf(ss, "}, ");
422 printVecPredReg(ss, gp);
423 ccprintf(ss, "/z, [");
424 printVecReg(ss, base, true);
425 if (imm != 0) {
426 ccprintf(ss, ", #%d", imm * sizeof(MemElemType));
427 }
428 ccprintf(ss, "]");
429 return ss.str();
430 }
431};
432
433template <typename RegElemType, typename MemElemType,
434 template <typename, typename> class MicroopType,
435 template <typename> class FirstFaultWritebackMicroopType>
436class SveIndexedMemSV : public PredMacroOp
437{
438 protected:
439 IntRegIndex dest;
440 IntRegIndex gp;
441 IntRegIndex base;
442 IntRegIndex offset;
443
444 bool offsetIs32;
445 bool offsetIsSigned;
446 bool offsetIsScaled;
447
448 public:
449 SveIndexedMemSV(const char *mnem, ExtMachInst machInst, OpClass __opClass,
450 IntRegIndex _dest, IntRegIndex _gp, IntRegIndex _base,
451 IntRegIndex _offset, bool _offsetIs32,
452 bool _offsetIsSigned, bool _offsetIsScaled,
453 bool firstFault)
454 : PredMacroOp(mnem, machInst, __opClass),
455 dest(_dest), gp(_gp), base(_base), offset(_offset),
456 offsetIs32(_offsetIs32), offsetIsSigned(_offsetIsSigned),
457 offsetIsScaled(_offsetIsScaled)
458 {
459 bool isLoad = (__opClass == MemReadOp);
460 assert(!firstFault || isLoad);
461
462 int num_elems = ((machInst.sveLen + 1) * 16) / sizeof(RegElemType);
463
464 numMicroops = num_elems;
465 if (isLoad) {
466 if (firstFault) {
467 numMicroops += 2;
468 } else {
469 numMicroops++;
470 }
471 }
472
473 microOps = new StaticInstPtr[numMicroops];
474
475 StaticInstPtr *uop = microOps;
476
477 if (isLoad) {
478 // The first microop of a gather load copies the source vector
479 // register used for address calculation to an auxiliary register,
480 // with all subsequent microops reading from the latter. This is
481 // needed to properly handle cases where the source vector
482 // register is the same as the destination register
483 *uop = new ArmISAInst::SveGatherLoadCpySrcVecMicroop(
484 mnem, machInst, _offset, this);
485 uop++;
486 }
487
488 for (int i = 0; i < num_elems; i++, uop++) {
489 *uop = new MicroopType<RegElemType, MemElemType>(
490 mnem, machInst, __opClass, _dest, _gp, _base,
491 isLoad ? (IntRegIndex) VECREG_UREG0 : _offset, _offsetIs32,
492 _offsetIsSigned, _offsetIsScaled, i, num_elems, firstFault);
493 }
494
495 if (firstFault) {
496 *uop = new FirstFaultWritebackMicroopType<RegElemType>(
497 mnem, machInst, __opClass, num_elems, this);
498 } else {
499 --uop;
500 }
501
502 (*uop)->setLastMicroop();
503 microOps[0]->setFirstMicroop();
504
505 for (StaticInstPtr *uop = microOps; !(*uop)->isLastMicroop(); uop++) {
506 (*uop)->setDelayedCommit();
507 }
508 }
509
510 Fault
511 execute(ExecContext *, Trace::InstRecord *) const
512 {
513 panic("Execute method called when it shouldn't!");
514 return NoFault;
515 }
516
517 std::string
518 generateDisassembly(Addr pc, const SymbolTable *symtab) const
519 {
520 // TODO: add suffix to transfer and base registers
521 std::stringstream ss;
522 printMnemonic(ss, "", false);
523 ccprintf(ss, "{");
524 printVecReg(ss, dest, true);
525 ccprintf(ss, "}, ");
526 printVecPredReg(ss, gp);
527 ccprintf(ss, "/z, [");
528 printIntReg(ss, base);
529 ccprintf(ss, ", ");
530 printVecReg(ss, offset, true);
531 ccprintf(ss, "]");
532 return ss.str();
533 }
534};
535
536} // namespace ArmISA
537
538#endif // __ARCH_ARM_SVE_MACROMEM_HH__