sve_mem.isa (14028:44edf7dbe672) sve_mem.isa (14091:090449e74135)
1// Copyright (c) 2017-2018 ARM Limited
2// All rights reserved
3//
4// The license below extends only to copyright in the software and shall
5// not be construed as granting a license to any other intellectual
6// property including but not limited to intellectual property relating
7// to a hardware implementation of the functionality of the software
8// licensed hereunder. You may use the software subject to the license
9// terms below provided that you ensure that this notice is replicated
10// unmodified and in its entirety in all distributions of the software,
11// modified or unmodified, in source code or in binary form.
12//
13// Redistribution and use in source and binary forms, with or without
14// modification, are permitted provided that the following conditions are
15// met: redistributions of source code must retain the above copyright
16// notice, this list of conditions and the following disclaimer;
17// redistributions in binary form must reproduce the above copyright
18// notice, this list of conditions and the following disclaimer in the
19// documentation and/or other materials provided with the distribution;
20// neither the name of the copyright holders nor the names of its
21// contributors may be used to endorse or promote products derived from
22// this software without specific prior written permission.
23//
24// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
25// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
26// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
27// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
28// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
29// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
30// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
31// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
32// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
33// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
34// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
35//
36// Authors: Giacomo Gabrielli
37
38def template SveMemFillSpillOpDeclare {{
39 class %(class_name)s : public %(base_class)s
40 {
41 protected:
42 typedef uint8_t TPElem;
43 typedef uint8_t RegElemType;
44 typedef uint8_t MemElemType;
45
46 public:
47 %(class_name)s(ExtMachInst machInst,
48 IntRegIndex _dest, IntRegIndex _base, uint64_t _imm)
49 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
50 _dest, _base, _imm)
51 {
52 %(constructor)s;
53 }
54
55 Fault execute(ExecContext *, Trace::InstRecord *) const;
56 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
57 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
58
59 virtual void
60 annotateFault(ArmFault *fault) {
61 %(fa_code)s
62 }
63 };
64}};
65
66def template SveContigMemSSOpDeclare {{
67 %(tpl_header)s
68 class %(class_name)s : public %(base_class)s
69 {
70 protected:
71 typedef RegElemType TPElem;
72
73 public:
74 %(class_name)s(const char* mnem, ExtMachInst machInst,
75 IntRegIndex _dest, IntRegIndex _gp, IntRegIndex _base,
76 IntRegIndex _offset)
77 : %(base_class)s(mnem, machInst, %(op_class)s,
78 _dest, _gp, _base, _offset)
79 {
80 %(constructor)s;
81 }
82
83 Fault execute(ExecContext *, Trace::InstRecord *) const;
84 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
85 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
86
87 virtual void
88 annotateFault(ArmFault *fault) {
89 %(fa_code)s
90 }
91 };
92}};
93
94def template SveContigMemSIOpDeclare {{
95 %(tpl_header)s
96 class %(class_name)s : public %(base_class)s
97 {
98 protected:
99 typedef RegElemType TPElem;
100
101 public:
102 %(class_name)s(const char* mnem, ExtMachInst machInst,
103 IntRegIndex _dest, IntRegIndex _gp, IntRegIndex _base,
104 uint64_t _imm)
105 : %(base_class)s(mnem, machInst, %(op_class)s,
106 _dest, _gp, _base, _imm)
107 {
108 %(constructor)s;
109 }
110
111 Fault execute(ExecContext *, Trace::InstRecord *) const;
112 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
113 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
114
115 virtual void
116 annotateFault(ArmFault *fault) {
117 %(fa_code)s
118 }
119 };
120}};
121
122def template SveContigMemExecDeclare {{
123 template
124 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *,
125 Trace::InstRecord *) const;
126
127 template
128 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *,
129 Trace::InstRecord *) const;
130
131 template
132 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr,
133 ExecContext *, Trace::InstRecord *) const;
134}};
135
136def template SveContigLoadExecute {{
137 %(tpl_header)s
138 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc,
139 Trace::InstRecord *traceData) const
140 {
141 Addr EA;
142 Fault fault = NoFault;
143 bool aarch64 M5_VAR_USED = true;
144 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
145 xc->tcBase());
146
147 %(op_decl)s;
148 %(op_rd)s;
149 %(ea_code)s;
150
151 TheISA::VecRegContainer memData;
152 auto memDataView = memData.as<MemElemType>();
153
1// Copyright (c) 2017-2018 ARM Limited
2// All rights reserved
3//
4// The license below extends only to copyright in the software and shall
5// not be construed as granting a license to any other intellectual
6// property including but not limited to intellectual property relating
7// to a hardware implementation of the functionality of the software
8// licensed hereunder. You may use the software subject to the license
9// terms below provided that you ensure that this notice is replicated
10// unmodified and in its entirety in all distributions of the software,
11// modified or unmodified, in source code or in binary form.
12//
13// Redistribution and use in source and binary forms, with or without
14// modification, are permitted provided that the following conditions are
15// met: redistributions of source code must retain the above copyright
16// notice, this list of conditions and the following disclaimer;
17// redistributions in binary form must reproduce the above copyright
18// notice, this list of conditions and the following disclaimer in the
19// documentation and/or other materials provided with the distribution;
20// neither the name of the copyright holders nor the names of its
21// contributors may be used to endorse or promote products derived from
22// this software without specific prior written permission.
23//
24// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
25// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
26// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
27// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
28// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
29// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
30// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
31// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
32// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
33// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
34// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
35//
36// Authors: Giacomo Gabrielli
37
38def template SveMemFillSpillOpDeclare {{
39 class %(class_name)s : public %(base_class)s
40 {
41 protected:
42 typedef uint8_t TPElem;
43 typedef uint8_t RegElemType;
44 typedef uint8_t MemElemType;
45
46 public:
47 %(class_name)s(ExtMachInst machInst,
48 IntRegIndex _dest, IntRegIndex _base, uint64_t _imm)
49 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
50 _dest, _base, _imm)
51 {
52 %(constructor)s;
53 }
54
55 Fault execute(ExecContext *, Trace::InstRecord *) const;
56 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
57 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
58
59 virtual void
60 annotateFault(ArmFault *fault) {
61 %(fa_code)s
62 }
63 };
64}};
65
66def template SveContigMemSSOpDeclare {{
67 %(tpl_header)s
68 class %(class_name)s : public %(base_class)s
69 {
70 protected:
71 typedef RegElemType TPElem;
72
73 public:
74 %(class_name)s(const char* mnem, ExtMachInst machInst,
75 IntRegIndex _dest, IntRegIndex _gp, IntRegIndex _base,
76 IntRegIndex _offset)
77 : %(base_class)s(mnem, machInst, %(op_class)s,
78 _dest, _gp, _base, _offset)
79 {
80 %(constructor)s;
81 }
82
83 Fault execute(ExecContext *, Trace::InstRecord *) const;
84 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
85 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
86
87 virtual void
88 annotateFault(ArmFault *fault) {
89 %(fa_code)s
90 }
91 };
92}};
93
94def template SveContigMemSIOpDeclare {{
95 %(tpl_header)s
96 class %(class_name)s : public %(base_class)s
97 {
98 protected:
99 typedef RegElemType TPElem;
100
101 public:
102 %(class_name)s(const char* mnem, ExtMachInst machInst,
103 IntRegIndex _dest, IntRegIndex _gp, IntRegIndex _base,
104 uint64_t _imm)
105 : %(base_class)s(mnem, machInst, %(op_class)s,
106 _dest, _gp, _base, _imm)
107 {
108 %(constructor)s;
109 }
110
111 Fault execute(ExecContext *, Trace::InstRecord *) const;
112 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
113 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
114
115 virtual void
116 annotateFault(ArmFault *fault) {
117 %(fa_code)s
118 }
119 };
120}};
121
122def template SveContigMemExecDeclare {{
123 template
124 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *,
125 Trace::InstRecord *) const;
126
127 template
128 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *,
129 Trace::InstRecord *) const;
130
131 template
132 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr,
133 ExecContext *, Trace::InstRecord *) const;
134}};
135
136def template SveContigLoadExecute {{
137 %(tpl_header)s
138 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc,
139 Trace::InstRecord *traceData) const
140 {
141 Addr EA;
142 Fault fault = NoFault;
143 bool aarch64 M5_VAR_USED = true;
144 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
145 xc->tcBase());
146
147 %(op_decl)s;
148 %(op_rd)s;
149 %(ea_code)s;
150
151 TheISA::VecRegContainer memData;
152 auto memDataView = memData.as<MemElemType>();
153
154 if (fault == NoFault) {
155 fault = xc->readMem(EA, memData.raw_ptr<uint8_t>(), memAccessSize,
156 this->memAccessFlags);
157 %(memacc_code)s;
158 }
154 %(rden_code)s;
159
155
156 fault = xc->readMem(EA, memData.raw_ptr<uint8_t>(), memAccessSize,
157 this->memAccessFlags, rdEn);
158
159 %(fault_code)s;
160
160 if (fault == NoFault) {
161 if (fault == NoFault) {
162 %(memacc_code)s;
161 %(op_wb)s;
162 }
163
164 return fault;
165 }
166}};
167
168def template SveContigLoadInitiateAcc {{
169 %(tpl_header)s
170 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc,
171 Trace::InstRecord *traceData) const
172 {
173 Addr EA;
174 Fault fault = NoFault;
175 bool aarch64 M5_VAR_USED = true;
176 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
177 xc->tcBase());
178
179 %(op_src_decl)s;
180 %(op_rd)s;
163 %(op_wb)s;
164 }
165
166 return fault;
167 }
168}};
169
170def template SveContigLoadInitiateAcc {{
171 %(tpl_header)s
172 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc,
173 Trace::InstRecord *traceData) const
174 {
175 Addr EA;
176 Fault fault = NoFault;
177 bool aarch64 M5_VAR_USED = true;
178 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
179 xc->tcBase());
180
181 %(op_src_decl)s;
182 %(op_rd)s;
181
182 %(ea_code)s;
183
183 %(ea_code)s;
184
184 if (fault == NoFault) {
185 fault = xc->initiateMemRead(EA, memAccessSize,
186 this->memAccessFlags);
187 }
185 %(rden_code)s;
188
186
187 fault = xc->initiateMemRead(EA, memAccessSize, this->memAccessFlags,
188 rdEn);
189
190 %(fault_code)s;
191
189 return fault;
190 }
191}};
192
193def template SveContigLoadCompleteAcc {{
194 %(tpl_header)s
195 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt,
196 ExecContext *xc, Trace::InstRecord *traceData) const
197 {
192 return fault;
193 }
194}};
195
196def template SveContigLoadCompleteAcc {{
197 %(tpl_header)s
198 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt,
199 ExecContext *xc, Trace::InstRecord *traceData) const
200 {
198 Fault fault = NoFault;
199 bool aarch64 M5_VAR_USED = true;
200 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
201 xc->tcBase());
202
203 %(op_decl)s;
204 %(op_rd)s;
205
206 TheISA::VecRegContainer memData;
207 auto memDataView = memData.as<MemElemType>();
208
201 bool aarch64 M5_VAR_USED = true;
202 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
203 xc->tcBase());
204
205 %(op_decl)s;
206 %(op_rd)s;
207
208 TheISA::VecRegContainer memData;
209 auto memDataView = memData.as<MemElemType>();
210
209 memcpy(memData.raw_ptr<uint8_t>(), pkt->getPtr<uint8_t>(),
210 pkt->getSize());
211
212 if (fault == NoFault) {
213 %(memacc_code)s;
211 if (xc->readMemAccPredicate()) {
212 memcpy(memData.raw_ptr<uint8_t>(), pkt->getPtr<uint8_t>(),
213 pkt->getSize());
214 }
215
214 }
215
216 if (fault == NoFault) {
217 %(op_wb)s;
218 }
216 %(memacc_code)s;
217 %(op_wb)s;
219
218
220 return fault;
219 return NoFault;
221 }
222}};
223
224def template SveContigStoreExecute {{
225 %(tpl_header)s
226 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc,
227 Trace::InstRecord *traceData) const
228 {
229 Addr EA;
230 Fault fault = NoFault;
231 bool aarch64 M5_VAR_USED = true;
232 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
233 xc->tcBase());
234
235 %(op_decl)s;
236 %(op_rd)s;
237 %(ea_code)s;
238
239 TheISA::VecRegContainer memData;
240 auto memDataView = memData.as<MemElemType>();
241
242 %(wren_code)s;
243
244 if (fault == NoFault) {
245 %(memacc_code)s;
246 }
247
248 if (fault == NoFault) {
249 fault = xc->writeMem(memData.raw_ptr<uint8_t>(), memAccessSize, EA,
250 this->memAccessFlags, NULL, wrEn);
251 }
252
253 if (fault == NoFault) {
254 %(op_wb)s;
255 }
256
257 return fault;
258 }
259}};
260
261def template SveContigStoreInitiateAcc {{
262 %(tpl_header)s
263 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc,
264 Trace::InstRecord *traceData) const
265 {
266 Addr EA;
267 Fault fault = NoFault;
268 bool aarch64 M5_VAR_USED = true;
269 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
270 xc->tcBase());
271
272 %(op_decl)s;
273 %(op_rd)s;
274 %(ea_code)s;
275
276 TheISA::VecRegContainer memData;
277 auto memDataView = memData.as<MemElemType>();
278
279 %(wren_code)s;
280
281 if (fault == NoFault) {
282 %(memacc_code)s;
283 }
284
285 if (fault == NoFault) {
286 fault = xc->writeMem(memData.raw_ptr<uint8_t>(), memAccessSize, EA,
287 this->memAccessFlags, NULL, wrEn);
288 }
289
290 return fault;
291 }
292}};
293
294def template SveContigStoreCompleteAcc {{
295 %(tpl_header)s
296 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt,
297 ExecContext *xc, Trace::InstRecord *traceData) const
298 {
299 return NoFault;
300 }
301}};
302
303def template SveLoadAndReplExecute {{
304 %(tpl_header)s
305 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc,
306 Trace::InstRecord *traceData) const
307 {
308 Addr EA;
309 Fault fault = NoFault;
310 bool aarch64 M5_VAR_USED = true;
311 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
312 xc->tcBase());
313
314 %(op_decl)s;
315 %(op_rd)s;
316 %(ea_code)s;
317
318 MemElemType memData;
319
320 if (fault == NoFault) {
321 fault = readMemAtomic(xc, traceData, EA, memData,
322 this->memAccessFlags);
323 %(memacc_code)s;
324 }
325
326 if (fault == NoFault) {
327 %(op_wb)s;
328 }
329
330 return fault;
331 }
332}};
333
334def template SveLoadAndReplInitiateAcc {{
335 %(tpl_header)s
336 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc,
337 Trace::InstRecord *traceData) const
338 {
339 Addr EA;
340 Fault fault = NoFault;
341 bool aarch64 M5_VAR_USED = true;
342
343 %(op_src_decl)s;
344 %(op_rd)s;
345
346 %(ea_code)s;
347
348 MemElemType memData;
349
350 if (fault == NoFault) {
351 fault = initiateMemRead(xc, traceData, EA, memData,
352 this->memAccessFlags);
353 }
354
355 return fault;
356 }
357}};
358
359def template SveLoadAndReplCompleteAcc {{
360 %(tpl_header)s
361 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt,
362 ExecContext *xc, Trace::InstRecord *traceData) const
363 {
364 Fault fault = NoFault;
365 bool aarch64 M5_VAR_USED = true;
366 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
367 xc->tcBase());
368
369 %(op_decl)s;
370 %(op_rd)s;
371
372 MemElemType memData;
373 getMem(pkt, memData, traceData);
374
375 if (fault == NoFault) {
376 %(memacc_code)s;
377 }
378
379 if (fault == NoFault) {
380 %(op_wb)s;
381 }
382
383 return fault;
384 }
385}};
386
387def template SveIndexedMemVIMicroopDeclare {{
388 %(tpl_header)s
389 class %(class_name)s : public %(base_class)s
390 {
391 protected:
392 typedef RegElemType TPElem;
393
394 IntRegIndex dest;
395 IntRegIndex gp;
396 IntRegIndex base;
397 uint64_t imm;
398
399 int elemIndex;
400 int numElems;
220 }
221}};
222
223def template SveContigStoreExecute {{
224 %(tpl_header)s
225 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc,
226 Trace::InstRecord *traceData) const
227 {
228 Addr EA;
229 Fault fault = NoFault;
230 bool aarch64 M5_VAR_USED = true;
231 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
232 xc->tcBase());
233
234 %(op_decl)s;
235 %(op_rd)s;
236 %(ea_code)s;
237
238 TheISA::VecRegContainer memData;
239 auto memDataView = memData.as<MemElemType>();
240
241 %(wren_code)s;
242
243 if (fault == NoFault) {
244 %(memacc_code)s;
245 }
246
247 if (fault == NoFault) {
248 fault = xc->writeMem(memData.raw_ptr<uint8_t>(), memAccessSize, EA,
249 this->memAccessFlags, NULL, wrEn);
250 }
251
252 if (fault == NoFault) {
253 %(op_wb)s;
254 }
255
256 return fault;
257 }
258}};
259
260def template SveContigStoreInitiateAcc {{
261 %(tpl_header)s
262 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc,
263 Trace::InstRecord *traceData) const
264 {
265 Addr EA;
266 Fault fault = NoFault;
267 bool aarch64 M5_VAR_USED = true;
268 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
269 xc->tcBase());
270
271 %(op_decl)s;
272 %(op_rd)s;
273 %(ea_code)s;
274
275 TheISA::VecRegContainer memData;
276 auto memDataView = memData.as<MemElemType>();
277
278 %(wren_code)s;
279
280 if (fault == NoFault) {
281 %(memacc_code)s;
282 }
283
284 if (fault == NoFault) {
285 fault = xc->writeMem(memData.raw_ptr<uint8_t>(), memAccessSize, EA,
286 this->memAccessFlags, NULL, wrEn);
287 }
288
289 return fault;
290 }
291}};
292
293def template SveContigStoreCompleteAcc {{
294 %(tpl_header)s
295 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt,
296 ExecContext *xc, Trace::InstRecord *traceData) const
297 {
298 return NoFault;
299 }
300}};
301
302def template SveLoadAndReplExecute {{
303 %(tpl_header)s
304 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc,
305 Trace::InstRecord *traceData) const
306 {
307 Addr EA;
308 Fault fault = NoFault;
309 bool aarch64 M5_VAR_USED = true;
310 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
311 xc->tcBase());
312
313 %(op_decl)s;
314 %(op_rd)s;
315 %(ea_code)s;
316
317 MemElemType memData;
318
319 if (fault == NoFault) {
320 fault = readMemAtomic(xc, traceData, EA, memData,
321 this->memAccessFlags);
322 %(memacc_code)s;
323 }
324
325 if (fault == NoFault) {
326 %(op_wb)s;
327 }
328
329 return fault;
330 }
331}};
332
333def template SveLoadAndReplInitiateAcc {{
334 %(tpl_header)s
335 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc,
336 Trace::InstRecord *traceData) const
337 {
338 Addr EA;
339 Fault fault = NoFault;
340 bool aarch64 M5_VAR_USED = true;
341
342 %(op_src_decl)s;
343 %(op_rd)s;
344
345 %(ea_code)s;
346
347 MemElemType memData;
348
349 if (fault == NoFault) {
350 fault = initiateMemRead(xc, traceData, EA, memData,
351 this->memAccessFlags);
352 }
353
354 return fault;
355 }
356}};
357
358def template SveLoadAndReplCompleteAcc {{
359 %(tpl_header)s
360 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt,
361 ExecContext *xc, Trace::InstRecord *traceData) const
362 {
363 Fault fault = NoFault;
364 bool aarch64 M5_VAR_USED = true;
365 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
366 xc->tcBase());
367
368 %(op_decl)s;
369 %(op_rd)s;
370
371 MemElemType memData;
372 getMem(pkt, memData, traceData);
373
374 if (fault == NoFault) {
375 %(memacc_code)s;
376 }
377
378 if (fault == NoFault) {
379 %(op_wb)s;
380 }
381
382 return fault;
383 }
384}};
385
386def template SveIndexedMemVIMicroopDeclare {{
387 %(tpl_header)s
388 class %(class_name)s : public %(base_class)s
389 {
390 protected:
391 typedef RegElemType TPElem;
392
393 IntRegIndex dest;
394 IntRegIndex gp;
395 IntRegIndex base;
396 uint64_t imm;
397
398 int elemIndex;
399 int numElems;
400 bool firstFault;
401
402 unsigned memAccessFlags;
403
404 public:
405 %(class_name)s(const char* mnem, ExtMachInst machInst,
406 OpClass __opClass, IntRegIndex _dest, IntRegIndex _gp,
401
402 unsigned memAccessFlags;
403
404 public:
405 %(class_name)s(const char* mnem, ExtMachInst machInst,
406 OpClass __opClass, IntRegIndex _dest, IntRegIndex _gp,
407 IntRegIndex _base, uint64_t _imm, int _elemIndex, int _numElems)
407 IntRegIndex _base, uint64_t _imm, int _elemIndex, int _numElems,
408 bool _firstFault)
408 : %(base_class)s(mnem, machInst, %(op_class)s),
409 dest(_dest), gp(_gp), base(_base), imm(_imm),
410 elemIndex(_elemIndex), numElems(_numElems),
409 : %(base_class)s(mnem, machInst, %(op_class)s),
410 dest(_dest), gp(_gp), base(_base), imm(_imm),
411 elemIndex(_elemIndex), numElems(_numElems),
412 firstFault(_firstFault),
411 memAccessFlags(ArmISA::TLB::AllowUnaligned |
412 ArmISA::TLB::MustBeOne)
413 {
414 %(constructor)s;
415 if (_opClass == MemReadOp && elemIndex == 0) {
416 // The first micro-op is responsible for pinning the
413 memAccessFlags(ArmISA::TLB::AllowUnaligned |
414 ArmISA::TLB::MustBeOne)
415 {
416 %(constructor)s;
417 if (_opClass == MemReadOp && elemIndex == 0) {
418 // The first micro-op is responsible for pinning the
417 // destination register
418 _destRegIdx[0].setNumPinnedWrites(numElems - 1);
419 // destination and the fault status registers
420 assert(_numDestRegs == 2);
421 _destRegIdx[0].setNumPinnedWrites(numElems - 1);
422 _destRegIdx[1].setNumPinnedWrites(numElems - 1);
419 }
420 }
421
422 Fault execute(ExecContext *, Trace::InstRecord *) const;
423 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
424 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
425
426 virtual void
427 annotateFault(ArmFault *fault)
428 {
429 %(fa_code)s
430 }
431
432 std::string
433 generateDisassembly(Addr pc, const SymbolTable *symtab) const
434 {
435 // TODO: add suffix to transfer register
436 std::stringstream ss;
437 printMnemonic(ss, "", false);
438 ccprintf(ss, "{");
439 printVecReg(ss, dest, true);
440 ccprintf(ss, "}, ");
441 printVecPredReg(ss, gp);
442 if (_opClass == MemReadOp) {
443 ccprintf(ss, "/z");
444 }
445 ccprintf(ss, ", [");
446 printVecReg(ss, base, true);
447 if (imm != 0) {
448 ccprintf(ss, ", #%d", imm * sizeof(MemElemType));
449 }
450 ccprintf(ss, "] (uop elem %d tfer)", elemIndex);
451 return ss.str();
452 }
453 };
454}};
455
456def template SveIndexedMemSVMicroopDeclare {{
457 %(tpl_header)s
458 class %(class_name)s : public %(base_class)s
459 {
460 protected:
461 typedef RegElemType TPElem;
462
463 IntRegIndex dest;
464 IntRegIndex gp;
465 IntRegIndex base;
466 IntRegIndex offset;
467
468 bool offsetIs32;
469 bool offsetIsSigned;
470 bool offsetIsScaled;
471
472 int elemIndex;
473 int numElems;
423 }
424 }
425
426 Fault execute(ExecContext *, Trace::InstRecord *) const;
427 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
428 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
429
430 virtual void
431 annotateFault(ArmFault *fault)
432 {
433 %(fa_code)s
434 }
435
436 std::string
437 generateDisassembly(Addr pc, const SymbolTable *symtab) const
438 {
439 // TODO: add suffix to transfer register
440 std::stringstream ss;
441 printMnemonic(ss, "", false);
442 ccprintf(ss, "{");
443 printVecReg(ss, dest, true);
444 ccprintf(ss, "}, ");
445 printVecPredReg(ss, gp);
446 if (_opClass == MemReadOp) {
447 ccprintf(ss, "/z");
448 }
449 ccprintf(ss, ", [");
450 printVecReg(ss, base, true);
451 if (imm != 0) {
452 ccprintf(ss, ", #%d", imm * sizeof(MemElemType));
453 }
454 ccprintf(ss, "] (uop elem %d tfer)", elemIndex);
455 return ss.str();
456 }
457 };
458}};
459
460def template SveIndexedMemSVMicroopDeclare {{
461 %(tpl_header)s
462 class %(class_name)s : public %(base_class)s
463 {
464 protected:
465 typedef RegElemType TPElem;
466
467 IntRegIndex dest;
468 IntRegIndex gp;
469 IntRegIndex base;
470 IntRegIndex offset;
471
472 bool offsetIs32;
473 bool offsetIsSigned;
474 bool offsetIsScaled;
475
476 int elemIndex;
477 int numElems;
478 bool firstFault;
474
475 unsigned memAccessFlags;
476
477 public:
478 %(class_name)s(const char* mnem, ExtMachInst machInst,
479 OpClass __opClass, IntRegIndex _dest, IntRegIndex _gp,
480 IntRegIndex _base, IntRegIndex _offset, bool _offsetIs32,
481 bool _offsetIsSigned, bool _offsetIsScaled, int _elemIndex,
479
480 unsigned memAccessFlags;
481
482 public:
483 %(class_name)s(const char* mnem, ExtMachInst machInst,
484 OpClass __opClass, IntRegIndex _dest, IntRegIndex _gp,
485 IntRegIndex _base, IntRegIndex _offset, bool _offsetIs32,
486 bool _offsetIsSigned, bool _offsetIsScaled, int _elemIndex,
482 int _numElems)
487 int _numElems, bool _firstFault)
483 : %(base_class)s(mnem, machInst, %(op_class)s),
484 dest(_dest), gp(_gp), base(_base), offset(_offset),
485 offsetIs32(_offsetIs32), offsetIsSigned(_offsetIsSigned),
486 offsetIsScaled(_offsetIsScaled), elemIndex(_elemIndex),
488 : %(base_class)s(mnem, machInst, %(op_class)s),
489 dest(_dest), gp(_gp), base(_base), offset(_offset),
490 offsetIs32(_offsetIs32), offsetIsSigned(_offsetIsSigned),
491 offsetIsScaled(_offsetIsScaled), elemIndex(_elemIndex),
487 numElems(_numElems),
492 numElems(_numElems), firstFault(_firstFault),
488 memAccessFlags(ArmISA::TLB::AllowUnaligned |
489 ArmISA::TLB::MustBeOne)
490 {
491 %(constructor)s;
492 if (_opClass == MemReadOp && elemIndex == 0) {
493 // The first micro-op is responsible for pinning the
493 memAccessFlags(ArmISA::TLB::AllowUnaligned |
494 ArmISA::TLB::MustBeOne)
495 {
496 %(constructor)s;
497 if (_opClass == MemReadOp && elemIndex == 0) {
498 // The first micro-op is responsible for pinning the
494 // destination register
495 _destRegIdx[0].setNumPinnedWrites(numElems - 1);
499 // destination and the fault status registers
500 assert(_numDestRegs == 2);
501 _destRegIdx[0].setNumPinnedWrites(numElems - 1);
502 _destRegIdx[1].setNumPinnedWrites(numElems - 1);
496 }
497 }
498
499 Fault execute(ExecContext *, Trace::InstRecord *) const;
500 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
501 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
502
503 virtual void
504 annotateFault(ArmFault *fault)
505 {
506 %(fa_code)s
507 }
508
509 std::string
510 generateDisassembly(Addr pc, const SymbolTable *symtab) const
511 {
512 // TODO: add suffix to transfer and base registers
513 std::stringstream ss;
514 printMnemonic(ss, "", false);
515 ccprintf(ss, "{");
516 printVecReg(ss, dest, true);
517 ccprintf(ss, "}, ");
518 printVecPredReg(ss, gp);
519 if (_opClass == MemReadOp) {
520 ccprintf(ss, "/z");
521 }
522 ccprintf(ss, ", [");
523 printIntReg(ss, base);
524 ccprintf(ss, ", ");
525 printVecReg(ss, offset, true);
526 ccprintf(ss, "] (uop elem %d tfer)", elemIndex);
527 return ss.str();
528 }
529 };
530}};
531
532def template SveGatherLoadMicroopExecute {{
533 %(tpl_header)s
534 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc,
535 Trace::InstRecord *traceData) const
536 {
537 Addr EA;
538 Fault fault = NoFault;
539 bool aarch64 M5_VAR_USED = true;
540
541 %(op_decl)s;
542 %(op_rd)s;
543 %(ea_code)s;
544
503 }
504 }
505
506 Fault execute(ExecContext *, Trace::InstRecord *) const;
507 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
508 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
509
510 virtual void
511 annotateFault(ArmFault *fault)
512 {
513 %(fa_code)s
514 }
515
516 std::string
517 generateDisassembly(Addr pc, const SymbolTable *symtab) const
518 {
519 // TODO: add suffix to transfer and base registers
520 std::stringstream ss;
521 printMnemonic(ss, "", false);
522 ccprintf(ss, "{");
523 printVecReg(ss, dest, true);
524 ccprintf(ss, "}, ");
525 printVecPredReg(ss, gp);
526 if (_opClass == MemReadOp) {
527 ccprintf(ss, "/z");
528 }
529 ccprintf(ss, ", [");
530 printIntReg(ss, base);
531 ccprintf(ss, ", ");
532 printVecReg(ss, offset, true);
533 ccprintf(ss, "] (uop elem %d tfer)", elemIndex);
534 return ss.str();
535 }
536 };
537}};
538
539def template SveGatherLoadMicroopExecute {{
540 %(tpl_header)s
541 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc,
542 Trace::InstRecord *traceData) const
543 {
544 Addr EA;
545 Fault fault = NoFault;
546 bool aarch64 M5_VAR_USED = true;
547
548 %(op_decl)s;
549 %(op_rd)s;
550 %(ea_code)s;
551
545 MemElemType memData;
552 MemElemType memData = 0;
546
553
554 int index = elemIndex;
547 if (%(pred_check_code)s) {
548 fault = readMemAtomic(xc, traceData, EA, memData,
549 this->memAccessFlags);
550 }
551
552 if (fault == NoFault) {
555 if (%(pred_check_code)s) {
556 fault = readMemAtomic(xc, traceData, EA, memData,
557 this->memAccessFlags);
558 }
559
560 if (fault == NoFault) {
561 %(fault_status_reset_code)s;
553 %(memacc_code)s;
554 %(op_wb)s;
562 %(memacc_code)s;
563 %(op_wb)s;
555 }
564 } else {
565 %(fault_status_set_code)s;
566 if (firstFault) {
567 for (index = 0;
568 index < numElems && !(%(pred_check_code)s);
569 index++);
556
570
571 if (index < elemIndex) {
572 fault = NoFault;
573 memData = 0;
574 %(memacc_code)s;
575 %(op_wb)s;
576 }
577 }
578 }
557 return fault;
558 }
559}};
560
561def template SveGatherLoadMicroopInitiateAcc {{
562 %(tpl_header)s
563 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc,
564 Trace::InstRecord *traceData) const
565 {
566 Addr EA;
567 Fault fault = NoFault;
568 bool aarch64 M5_VAR_USED = true;
569
570 %(op_src_decl)s;
571 %(op_rd)s;
572 %(ea_code)s;
573
574 MemElemType memData;
575
579 return fault;
580 }
581}};
582
583def template SveGatherLoadMicroopInitiateAcc {{
584 %(tpl_header)s
585 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc,
586 Trace::InstRecord *traceData) const
587 {
588 Addr EA;
589 Fault fault = NoFault;
590 bool aarch64 M5_VAR_USED = true;
591
592 %(op_src_decl)s;
593 %(op_rd)s;
594 %(ea_code)s;
595
596 MemElemType memData;
597
598 int index = elemIndex;
576 if (%(pred_check_code)s) {
577 fault = initiateMemRead(xc, traceData, EA, memData,
578 this->memAccessFlags);
599 if (%(pred_check_code)s) {
600 fault = initiateMemRead(xc, traceData, EA, memData,
601 this->memAccessFlags);
602 if (fault != NoFault) {
603 %(fault_status_set_code)s;
604 if (firstFault) {
605 for (index = 0;
606 index < numElems && !(%(pred_check_code)s);
607 index++);
608 if (index < elemIndex) {
609 fault = NoFault;
610 xc->setMemAccPredicate(false);
611 }
612 }
613 } else {
614 %(fault_status_reset_code)s;
615 }
579 } else {
580 xc->setMemAccPredicate(false);
616 } else {
617 xc->setMemAccPredicate(false);
618 %(fault_status_reset_code)s;
581 }
582
583 return fault;
584 }
585}};
586
587def template SveGatherLoadMicroopCompleteAcc {{
588 %(tpl_header)s
589 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt,
590 ExecContext *xc, Trace::InstRecord *traceData) const
591 {
619 }
620
621 return fault;
622 }
623}};
624
625def template SveGatherLoadMicroopCompleteAcc {{
626 %(tpl_header)s
627 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt,
628 ExecContext *xc, Trace::InstRecord *traceData) const
629 {
592 Fault fault = NoFault;
593 bool aarch64 M5_VAR_USED = true;
594
595 %(op_decl)s;
596 %(op_rd)s;
597
598 MemElemType memData = 0;
630 bool aarch64 M5_VAR_USED = true;
631
632 %(op_decl)s;
633 %(op_rd)s;
634
635 MemElemType memData = 0;
599 if (%(pred_check_code)s) {
636 if (xc->readMemAccPredicate()) {
600 getMem(pkt, memData, traceData);
601 }
602
637 getMem(pkt, memData, traceData);
638 }
639
603 if (fault == NoFault) {
604 %(memacc_code)s;
605 }
640 %(memacc_code)s;
641 %(op_wb)s;
606
642
607 if (fault == NoFault) {
608 %(op_wb)s;
609 }
610
611 return fault;
643 return NoFault;
612 }
613}};
614
615def template SveScatterStoreMicroopExecute {{
616 %(tpl_header)s
617 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc,
618 Trace::InstRecord *traceData) const
619 {
620 Addr EA;
621 Fault fault = NoFault;
622 bool aarch64 M5_VAR_USED = true;
623
624 %(op_decl)s;
625 %(op_rd)s;
626 %(ea_code)s;
627
628 MemElemType memData;
629 %(memacc_code)s;
630
644 }
645}};
646
647def template SveScatterStoreMicroopExecute {{
648 %(tpl_header)s
649 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc,
650 Trace::InstRecord *traceData) const
651 {
652 Addr EA;
653 Fault fault = NoFault;
654 bool aarch64 M5_VAR_USED = true;
655
656 %(op_decl)s;
657 %(op_rd)s;
658 %(ea_code)s;
659
660 MemElemType memData;
661 %(memacc_code)s;
662
663 int index = elemIndex;
631 if (%(pred_check_code)s) {
632 fault = writeMemAtomic(xc, traceData, memData, EA,
633 this->memAccessFlags, NULL);
634 }
635
636 if (fault == NoFault) {
637 %(op_wb)s;
638 }
639
640 return fault;
641 }
642}};
643
644def template SveScatterStoreMicroopInitiateAcc {{
645 %(tpl_header)s
646 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc,
647 Trace::InstRecord *traceData) const
648 {
649 Addr EA;
650 Fault fault = NoFault;
651 bool aarch64 M5_VAR_USED = true;
652
653 %(op_decl)s;
654 %(op_rd)s;
655 %(ea_code)s;
656
657 MemElemType memData;
658 %(memacc_code)s;
659
664 if (%(pred_check_code)s) {
665 fault = writeMemAtomic(xc, traceData, memData, EA,
666 this->memAccessFlags, NULL);
667 }
668
669 if (fault == NoFault) {
670 %(op_wb)s;
671 }
672
673 return fault;
674 }
675}};
676
677def template SveScatterStoreMicroopInitiateAcc {{
678 %(tpl_header)s
679 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc,
680 Trace::InstRecord *traceData) const
681 {
682 Addr EA;
683 Fault fault = NoFault;
684 bool aarch64 M5_VAR_USED = true;
685
686 %(op_decl)s;
687 %(op_rd)s;
688 %(ea_code)s;
689
690 MemElemType memData;
691 %(memacc_code)s;
692
693 int index = elemIndex;
660 if (%(pred_check_code)s) {
661 fault = writeMemTiming(xc, traceData, memData, EA,
662 this->memAccessFlags, NULL);
663 } else {
664 xc->setPredicate(false);
665 }
666
667 return fault;
668 }
669}};
670
671def template SveScatterStoreMicroopCompleteAcc {{
672 %(tpl_header)s
673 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt,
674 ExecContext *xc, Trace::InstRecord *traceData) const
675 {
676 return NoFault;
677 }
678}};
679
694 if (%(pred_check_code)s) {
695 fault = writeMemTiming(xc, traceData, memData, EA,
696 this->memAccessFlags, NULL);
697 } else {
698 xc->setPredicate(false);
699 }
700
701 return fault;
702 }
703}};
704
705def template SveScatterStoreMicroopCompleteAcc {{
706 %(tpl_header)s
707 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt,
708 ExecContext *xc, Trace::InstRecord *traceData) const
709 {
710 return NoFault;
711 }
712}};
713
714def template SveFirstFaultWritebackMicroopDeclare {{
715 %(tpl_header)s
716 class SveFirstFaultWritebackMicroop : public MicroOp
717 {
718 protected:
719 typedef RegElemType TPElem;
720
721 int numElems;
722 StaticInst *macroOp;
723
724 public:
725 SveFirstFaultWritebackMicroop(const char* mnem, ExtMachInst machInst,
726 OpClass __opClass, int _numElems, StaticInst *_macroOp)
727 : MicroOp(mnem, machInst, __opClass),
728 numElems(_numElems), macroOp(_macroOp)
729 {
730 %(constructor)s;
731 }
732
733 Fault execute(ExecContext *, Trace::InstRecord *) const;
734
735 std::string
736 generateDisassembly(Addr pc, const SymbolTable *symtab) const
737 {
738 std::stringstream ss;
739 ccprintf(ss, "%s", macroOp->disassemble(pc, symtab));
740 ccprintf(ss, " (uop%d)", numElems);
741 return ss.str();
742 }
743 };
744}};
745
746def template SveFirstFaultWritebackMicroopExecute {{
747 %(tpl_header)s
748 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc,
749 Trace::InstRecord *traceData) const
750 {
751 bool aarch64 M5_VAR_USED = true;
752
753 %(op_decl)s;
754 %(op_rd)s;
755
756 int index, firstFaultIndex;
757 for (index = 0;
758 index < numElems && !%(fault_status_check_code)s;
759 index++);
760 firstFaultIndex = index;
761 for (index = 0; index < numElems; index++) {
762 if (index < firstFaultIndex) {
763 %(first_fault_forward_code)s;
764 } else {
765 %(first_fault_reset_code)s;
766 }
767 }
768 return NoFault;
769 }
770}};
771
680def template SveGatherLoadCpySrcVecMicroopDeclare {{
681 class SveGatherLoadCpySrcVecMicroop : public MicroOp
682 {
683 protected:
684 IntRegIndex op1;
685
686 StaticInst *macroOp;
687
688 public:
689 SveGatherLoadCpySrcVecMicroop(const char* mnem, ExtMachInst machInst,
690 IntRegIndex _op1, StaticInst *_macroOp)
691 : MicroOp(mnem, machInst, SimdAluOp), op1(_op1), macroOp(_macroOp)
692 {
693 %(constructor)s;
694 }
695
696 Fault execute(ExecContext *, Trace::InstRecord *) const;
697
698 std::string
699 generateDisassembly(Addr pc, const SymbolTable *symtab) const
700 {
701 std::stringstream ss;
702 ccprintf(ss, "%s", macroOp->disassemble(pc, symtab));
703 ccprintf(ss, " (uop src vec cpy)");
704 return ss.str();
705 }
706 };
707}};
708
709def template SveGatherLoadCpySrcVecMicroopExecute {{
710 Fault SveGatherLoadCpySrcVecMicroop::execute(ExecContext *xc,
711 Trace::InstRecord *traceData) const
712 {
713 Fault fault = NoFault;
714 %(op_decl)s;
715 %(op_rd)s;
716
717 %(code)s;
718 if (fault == NoFault)
719 {
720 %(op_wb)s;
721 }
722
723 return fault;
724 }
725}};
772def template SveGatherLoadCpySrcVecMicroopDeclare {{
773 class SveGatherLoadCpySrcVecMicroop : public MicroOp
774 {
775 protected:
776 IntRegIndex op1;
777
778 StaticInst *macroOp;
779
780 public:
781 SveGatherLoadCpySrcVecMicroop(const char* mnem, ExtMachInst machInst,
782 IntRegIndex _op1, StaticInst *_macroOp)
783 : MicroOp(mnem, machInst, SimdAluOp), op1(_op1), macroOp(_macroOp)
784 {
785 %(constructor)s;
786 }
787
788 Fault execute(ExecContext *, Trace::InstRecord *) const;
789
790 std::string
791 generateDisassembly(Addr pc, const SymbolTable *symtab) const
792 {
793 std::stringstream ss;
794 ccprintf(ss, "%s", macroOp->disassemble(pc, symtab));
795 ccprintf(ss, " (uop src vec cpy)");
796 return ss.str();
797 }
798 };
799}};
800
801def template SveGatherLoadCpySrcVecMicroopExecute {{
802 Fault SveGatherLoadCpySrcVecMicroop::execute(ExecContext *xc,
803 Trace::InstRecord *traceData) const
804 {
805 Fault fault = NoFault;
806 %(op_decl)s;
807 %(op_rd)s;
808
809 %(code)s;
810 if (fault == NoFault)
811 {
812 %(op_wb)s;
813 }
814
815 return fault;
816 }
817}};