mem64.isa (12236:126ac9da6050) mem64.isa (12359:8fb4630c444f)
1// -*- mode:c++ -*-
2
1// -*- mode:c++ -*-
2
3// Copyright (c) 2011-2014 ARM Limited
3// Copyright (c) 2011-2014, 2017 ARM Limited
4// All rights reserved
5//
6// The license below extends only to copyright in the software and shall
7// not be construed as granting a license to any other intellectual
8// property including but not limited to intellectual property relating
9// to a hardware implementation of the functionality of the software
10// licensed hereunder. You may use the software subject to the license
11// terms below provided that you ensure that this notice is replicated
12// unmodified and in its entirety in all distributions of the software,
13// modified or unmodified, in source code or in binary form.
14//
15// Redistribution and use in source and binary forms, with or without
16// modification, are permitted provided that the following conditions are
17// met: redistributions of source code must retain the above copyright
18// notice, this list of conditions and the following disclaimer;
19// redistributions in binary form must reproduce the above copyright
20// notice, this list of conditions and the following disclaimer in the
21// documentation and/or other materials provided with the distribution;
22// neither the name of the copyright holders nor the names of its
23// contributors may be used to endorse or promote products derived from
24// this software without specific prior written permission.
25//
26// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
27// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
28// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
29// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
30// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
31// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
32// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
33// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
34// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
35// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
36// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
37//
38// Authors: Gabe Black
39
40let {{
41 SPAlignmentCheckCode = '''
42 if (baseIsSP && bits(XBase, 3, 0) &&
43 SPAlignmentCheckEnabled(xc->tcBase())) {
44 return std::make_shared<SPAlignmentFault>();
45 }
46 '''
47}};
48
49def template Load64Execute {{
50 Fault %(class_name)s::execute(ExecContext *xc,
51 Trace::InstRecord *traceData) const
52 {
53 Addr EA;
54 Fault fault = NoFault;
55
56 %(op_decl)s;
57 %(op_rd)s;
58 %(ea_code)s;
59
60 if (fault == NoFault) {
61 fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags);
62 %(memacc_code)s;
63 }
64
65 if (fault == NoFault) {
66 %(op_wb)s;
67 }
68
69 return fault;
70 }
71}};
72
73def template Store64Execute {{
74 Fault %(class_name)s::execute(ExecContext *xc,
75 Trace::InstRecord *traceData) const
76 {
77 Addr EA;
78 Fault fault = NoFault;
79
80 %(op_decl)s;
81 %(op_rd)s;
82 %(ea_code)s;
83
84 if (fault == NoFault) {
85 %(memacc_code)s;
86 }
87
88 if (fault == NoFault) {
89 fault = writeMemAtomic(xc, traceData, Mem, EA,
90 memAccessFlags, NULL);
91 }
92
93 if (fault == NoFault) {
94 %(op_wb)s;
95 }
96
97 return fault;
98 }
99}};
100
101def template Store64InitiateAcc {{
102 Fault %(class_name)s::initiateAcc(ExecContext *xc,
103 Trace::InstRecord *traceData) const
104 {
105 Addr EA;
106 Fault fault = NoFault;
107
108 %(op_decl)s;
109 %(op_rd)s;
110 %(ea_code)s;
111
112 if (fault == NoFault) {
113 %(memacc_code)s;
114 }
115
116 if (fault == NoFault) {
117 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
118 NULL);
119 }
120
121 return fault;
122 }
123}};
124
125def template StoreEx64Execute {{
126 Fault %(class_name)s::execute(ExecContext *xc,
127 Trace::InstRecord *traceData) const
128 {
129 Addr EA;
130 Fault fault = NoFault;
131
132 %(op_decl)s;
133 %(op_rd)s;
134 %(ea_code)s;
135
136 if (fault == NoFault) {
137 %(memacc_code)s;
138 }
139
140 uint64_t writeResult = 0;
141 if (fault == NoFault) {
142 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
143 &writeResult);
144 }
145
146 if (fault == NoFault) {
147 %(postacc_code)s;
148 }
149
150 if (fault == NoFault) {
151 %(op_wb)s;
152 }
153
154 return fault;
155 }
156}};
157
158def template StoreEx64InitiateAcc {{
159 Fault %(class_name)s::initiateAcc(ExecContext *xc,
160 Trace::InstRecord *traceData) const
161 {
162 Addr EA;
163 Fault fault = NoFault;
164
165 %(op_decl)s;
166 %(op_rd)s;
167 %(ea_code)s;
168
169 if (fault == NoFault) {
170 %(memacc_code)s;
171 }
172
173 if (fault == NoFault) {
174 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
175 NULL);
176 }
177
178 return fault;
179 }
180}};
181
182def template Load64InitiateAcc {{
183 Fault %(class_name)s::initiateAcc(ExecContext *xc,
184 Trace::InstRecord *traceData) const
185 {
186 Addr EA;
187 Fault fault = NoFault;
188
189 %(op_src_decl)s;
190 %(op_rd)s;
191 %(ea_code)s;
192
193 if (fault == NoFault) {
194 fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags);
195 }
196
197 return fault;
198 }
199}};
200
201def template Load64CompleteAcc {{
202 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
203 Trace::InstRecord *traceData) const
204 {
205 Fault fault = NoFault;
206
207 %(op_decl)s;
208 %(op_rd)s;
209
210 // ARM instructions will not have a pkt if the predicate is false
211 getMem(pkt, Mem, traceData);
212
213 if (fault == NoFault) {
214 %(memacc_code)s;
215 }
216
217 if (fault == NoFault) {
218 %(op_wb)s;
219 }
220
221 return fault;
222 }
223}};
224
225def template Store64CompleteAcc {{
226 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
227 Trace::InstRecord *traceData) const
228 {
229 return NoFault;
230 }
231}};
232
233def template StoreEx64CompleteAcc {{
234 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
235 Trace::InstRecord *traceData) const
236 {
237 Fault fault = NoFault;
238
239 %(op_decl)s;
240 %(op_rd)s;
241
242 uint64_t writeResult = pkt->req->getExtraData();
243 %(postacc_code)s;
244
245 if (fault == NoFault) {
246 %(op_wb)s;
247 }
248
249 return fault;
250 }
251}};
252
253def template DCStore64Declare {{
254 class %(class_name)s : public %(base_class)s
255 {
256 public:
257
258 /// Constructor.
4// All rights reserved
5//
6// The license below extends only to copyright in the software and shall
7// not be construed as granting a license to any other intellectual
8// property including but not limited to intellectual property relating
9// to a hardware implementation of the functionality of the software
10// licensed hereunder. You may use the software subject to the license
11// terms below provided that you ensure that this notice is replicated
12// unmodified and in its entirety in all distributions of the software,
13// modified or unmodified, in source code or in binary form.
14//
15// Redistribution and use in source and binary forms, with or without
16// modification, are permitted provided that the following conditions are
17// met: redistributions of source code must retain the above copyright
18// notice, this list of conditions and the following disclaimer;
19// redistributions in binary form must reproduce the above copyright
20// notice, this list of conditions and the following disclaimer in the
21// documentation and/or other materials provided with the distribution;
22// neither the name of the copyright holders nor the names of its
23// contributors may be used to endorse or promote products derived from
24// this software without specific prior written permission.
25//
26// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
27// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
28// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
29// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
30// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
31// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
32// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
33// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
34// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
35// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
36// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
37//
38// Authors: Gabe Black
39
40let {{
41 SPAlignmentCheckCode = '''
42 if (baseIsSP && bits(XBase, 3, 0) &&
43 SPAlignmentCheckEnabled(xc->tcBase())) {
44 return std::make_shared<SPAlignmentFault>();
45 }
46 '''
47}};
48
49def template Load64Execute {{
50 Fault %(class_name)s::execute(ExecContext *xc,
51 Trace::InstRecord *traceData) const
52 {
53 Addr EA;
54 Fault fault = NoFault;
55
56 %(op_decl)s;
57 %(op_rd)s;
58 %(ea_code)s;
59
60 if (fault == NoFault) {
61 fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags);
62 %(memacc_code)s;
63 }
64
65 if (fault == NoFault) {
66 %(op_wb)s;
67 }
68
69 return fault;
70 }
71}};
72
73def template Store64Execute {{
74 Fault %(class_name)s::execute(ExecContext *xc,
75 Trace::InstRecord *traceData) const
76 {
77 Addr EA;
78 Fault fault = NoFault;
79
80 %(op_decl)s;
81 %(op_rd)s;
82 %(ea_code)s;
83
84 if (fault == NoFault) {
85 %(memacc_code)s;
86 }
87
88 if (fault == NoFault) {
89 fault = writeMemAtomic(xc, traceData, Mem, EA,
90 memAccessFlags, NULL);
91 }
92
93 if (fault == NoFault) {
94 %(op_wb)s;
95 }
96
97 return fault;
98 }
99}};
100
101def template Store64InitiateAcc {{
102 Fault %(class_name)s::initiateAcc(ExecContext *xc,
103 Trace::InstRecord *traceData) const
104 {
105 Addr EA;
106 Fault fault = NoFault;
107
108 %(op_decl)s;
109 %(op_rd)s;
110 %(ea_code)s;
111
112 if (fault == NoFault) {
113 %(memacc_code)s;
114 }
115
116 if (fault == NoFault) {
117 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
118 NULL);
119 }
120
121 return fault;
122 }
123}};
124
125def template StoreEx64Execute {{
126 Fault %(class_name)s::execute(ExecContext *xc,
127 Trace::InstRecord *traceData) const
128 {
129 Addr EA;
130 Fault fault = NoFault;
131
132 %(op_decl)s;
133 %(op_rd)s;
134 %(ea_code)s;
135
136 if (fault == NoFault) {
137 %(memacc_code)s;
138 }
139
140 uint64_t writeResult = 0;
141 if (fault == NoFault) {
142 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
143 &writeResult);
144 }
145
146 if (fault == NoFault) {
147 %(postacc_code)s;
148 }
149
150 if (fault == NoFault) {
151 %(op_wb)s;
152 }
153
154 return fault;
155 }
156}};
157
158def template StoreEx64InitiateAcc {{
159 Fault %(class_name)s::initiateAcc(ExecContext *xc,
160 Trace::InstRecord *traceData) const
161 {
162 Addr EA;
163 Fault fault = NoFault;
164
165 %(op_decl)s;
166 %(op_rd)s;
167 %(ea_code)s;
168
169 if (fault == NoFault) {
170 %(memacc_code)s;
171 }
172
173 if (fault == NoFault) {
174 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
175 NULL);
176 }
177
178 return fault;
179 }
180}};
181
182def template Load64InitiateAcc {{
183 Fault %(class_name)s::initiateAcc(ExecContext *xc,
184 Trace::InstRecord *traceData) const
185 {
186 Addr EA;
187 Fault fault = NoFault;
188
189 %(op_src_decl)s;
190 %(op_rd)s;
191 %(ea_code)s;
192
193 if (fault == NoFault) {
194 fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags);
195 }
196
197 return fault;
198 }
199}};
200
201def template Load64CompleteAcc {{
202 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
203 Trace::InstRecord *traceData) const
204 {
205 Fault fault = NoFault;
206
207 %(op_decl)s;
208 %(op_rd)s;
209
210 // ARM instructions will not have a pkt if the predicate is false
211 getMem(pkt, Mem, traceData);
212
213 if (fault == NoFault) {
214 %(memacc_code)s;
215 }
216
217 if (fault == NoFault) {
218 %(op_wb)s;
219 }
220
221 return fault;
222 }
223}};
224
225def template Store64CompleteAcc {{
226 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
227 Trace::InstRecord *traceData) const
228 {
229 return NoFault;
230 }
231}};
232
233def template StoreEx64CompleteAcc {{
234 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
235 Trace::InstRecord *traceData) const
236 {
237 Fault fault = NoFault;
238
239 %(op_decl)s;
240 %(op_rd)s;
241
242 uint64_t writeResult = pkt->req->getExtraData();
243 %(postacc_code)s;
244
245 if (fault == NoFault) {
246 %(op_wb)s;
247 }
248
249 return fault;
250 }
251}};
252
253def template DCStore64Declare {{
254 class %(class_name)s : public %(base_class)s
255 {
256 public:
257
258 /// Constructor.
259 %(class_name)s(ExtMachInst machInst, IntRegIndex _base, IntRegIndex _dest, uint64_t _imm);
259 %(class_name)s(ExtMachInst machInst, IntRegIndex _base,
260 MiscRegIndex _dest, uint64_t _imm);
260
261 Fault execute(ExecContext *, Trace::InstRecord *) const;
262 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
263 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
264
265 virtual void
266 annotateFault(ArmFault *fault) {
267 %(fa_code)s
268 }
269 };
270}};
271
272def template DCStore64Constructor {{
261
262 Fault execute(ExecContext *, Trace::InstRecord *) const;
263 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
264 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
265
266 virtual void
267 annotateFault(ArmFault *fault) {
268 %(fa_code)s
269 }
270 };
271}};
272
273def template DCStore64Constructor {{
273 %(class_name)s::%(class_name)s(ExtMachInst machInst, IntRegIndex _base, IntRegIndex _dest, uint64_t _imm)
274 %(class_name)s::%(class_name)s(ExtMachInst machInst, IntRegIndex _base,
275 MiscRegIndex _dest, uint64_t _imm)
274 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
276 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
275 (IntRegIndex)_base, _dest, _imm)
277 _base, _dest, _imm)
276 {
277 %(constructor)s;
278 assert(!%(use_uops)d);
279 }
280}};
281
282def template DCStore64Execute {{
283 Fault %(class_name)s::execute(ExecContext *xc,
284 Trace::InstRecord *traceData) const
285 {
286 Addr EA;
287 Fault fault = NoFault;
288
289 %(op_decl)s;
290 %(op_rd)s;
291 %(ea_code)s;
292
293
294 if (fault == NoFault) {
295 %(memacc_code)s;
296 }
297
298 if (fault == NoFault) {
299 fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL);
300 }
301
302 if (fault == NoFault) {
303 %(op_wb)s;
304 }
305
306 return fault;
307 }
308}};
309
310def template DCStore64InitiateAcc {{
311 Fault %(class_name)s::initiateAcc(ExecContext *xc,
312 Trace::InstRecord *traceData) const
313 {
314 Addr EA;
315 Fault fault = NoFault;
316
317 %(op_decl)s;
318 %(op_rd)s;
319 %(ea_code)s;
320
321 if (fault == NoFault) {
322 %(memacc_code)s;
323 }
324
325 if (fault == NoFault) {
326 fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL);
327 }
328
329 return fault;
330 }
331}};
332
333
334def template LoadStoreImm64Declare {{
335 class %(class_name)s : public %(base_class)s
336 {
337 public:
338
339 /// Constructor.
340 %(class_name)s(ExtMachInst machInst,
341 IntRegIndex _dest, IntRegIndex _base, int64_t _imm);
342
343 Fault execute(ExecContext *, Trace::InstRecord *) const;
344 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
345 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
346
347 virtual void
348 annotateFault(ArmFault *fault) {
349 %(fa_code)s
350 }
351 };
352}};
353
354def template LoadStoreImmU64Declare {{
355 class %(class_name)s : public %(base_class)s
356 {
357 public:
358
359 /// Constructor.
360 %(class_name)s(ExtMachInst machInst,
361 IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
362 bool noAlloc = false, bool exclusive = false,
363 bool acrel = false);
364
365 Fault execute(ExecContext *, Trace::InstRecord *) const;
366 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
367 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
368
369 virtual void
370 annotateFault(ArmFault *fault) {
371 %(fa_code)s
372 }
373 };
374}};
375
376def template LoadStoreImmDU64Declare {{
377 class %(class_name)s : public %(base_class)s
378 {
379 public:
380
381 /// Constructor.
382 %(class_name)s(ExtMachInst machInst,
383 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
384 int64_t _imm = 0, bool noAlloc = false, bool exclusive = false,
385 bool acrel = false);
386
387 Fault execute(ExecContext *, Trace::InstRecord *) const;
388 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
389 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
390
391 virtual void
392 annotateFault(ArmFault *fault) {
393 %(fa_code)s
394 }
395 };
396}};
397
398def template StoreImmDEx64Declare {{
399 /**
400 * Static instruction class for "%(mnemonic)s".
401 */
402 class %(class_name)s : public %(base_class)s
403 {
404 public:
405
406 /// Constructor.
407 %(class_name)s(ExtMachInst machInst,
408 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
409 IntRegIndex _base, int64_t _imm = 0);
410
411 Fault execute(ExecContext *, Trace::InstRecord *) const;
412 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
413 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
414 };
415}};
416
417
418def template LoadStoreReg64Declare {{
419 class %(class_name)s : public %(base_class)s
420 {
421 public:
422
423 /// Constructor.
424 %(class_name)s(ExtMachInst machInst,
425 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
426 ArmExtendType _type, uint32_t _shiftAmt);
427
428 Fault execute(ExecContext *, Trace::InstRecord *) const;
429 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
430 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
431
432 virtual void
433 annotateFault(ArmFault *fault) {
434 %(fa_code)s
435 }
436 };
437}};
438
439def template LoadStoreRegU64Declare {{
440 class %(class_name)s : public %(base_class)s
441 {
442 public:
443
444 /// Constructor.
445 %(class_name)s(ExtMachInst machInst,
446 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
447 ArmExtendType _type, uint32_t _shiftAmt,
448 bool noAlloc = false, bool exclusive = false,
449 bool acrel = false);
450
451 Fault execute(ExecContext *, Trace::InstRecord *) const;
452 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
453 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
454
455 virtual void
456 annotateFault(ArmFault *fault) {
457 %(fa_code)s
458 }
459 };
460}};
461
462def template LoadStoreRaw64Declare {{
463 class %(class_name)s : public %(base_class)s
464 {
465 public:
466
467 /// Constructor.
468 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
469 IntRegIndex _base);
470
471 Fault execute(ExecContext *, Trace::InstRecord *) const;
472 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
473 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
474
475 virtual void
476 annotateFault(ArmFault *fault) {
477 %(fa_code)s
478 }
479 };
480}};
481
482def template LoadStoreEx64Declare {{
483 class %(class_name)s : public %(base_class)s
484 {
485 public:
486
487 /// Constructor.
488 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
489 IntRegIndex _base, IntRegIndex _result);
490
491 Fault execute(ExecContext *, Trace::InstRecord *) const;
492 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
493 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
494
495 virtual void
496 annotateFault(ArmFault *fault) {
497 %(fa_code)s
498 }
499 };
500}};
501
502def template LoadStoreLit64Declare {{
503 class %(class_name)s : public %(base_class)s
504 {
505 public:
506
507 /// Constructor.
508 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm);
509
510 Fault execute(ExecContext *, Trace::InstRecord *) const;
511 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
512 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
513
514 virtual void
515 annotateFault(ArmFault *fault) {
516 %(fa_code)s
517 }
518 };
519}};
520
521def template LoadStoreLitU64Declare {{
522 class %(class_name)s : public %(base_class)s
523 {
524 public:
525
526 /// Constructor.
527 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm,
528 bool noAlloc = false, bool exclusive = false,
529 bool acrel = false);
530
531 Fault execute(ExecContext *, Trace::InstRecord *) const;
532 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
533 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
534
535 virtual void
536 annotateFault(ArmFault *fault) {
537 %(fa_code)s
538 }
539 };
540}};
541
542def template LoadStoreImm64Constructor {{
543 %(class_name)s::%(class_name)s(ExtMachInst machInst,
544 IntRegIndex _dest, IntRegIndex _base, int64_t _imm)
545 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
546 (IntRegIndex)_dest, (IntRegIndex)_base, _imm)
547 {
548 %(constructor)s;
549#if %(use_uops)d
550 assert(numMicroops >= 2);
551 uops = new StaticInstPtr[numMicroops];
552 uops[0] = new %(acc_name)s(machInst, _dest, _base, _imm);
553 uops[0]->setDelayedCommit();
554 uops[0]->setFirstMicroop();
555 uops[1] = new %(wb_decl)s;
556 uops[1]->setLastMicroop();
557#endif
558 }
559}};
560
561def template LoadStoreImmU64Constructor {{
562 %(class_name)s::%(class_name)s(ExtMachInst machInst,
563 IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
564 bool noAlloc, bool exclusive, bool acrel)
565 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
566 _dest, _base, _imm)
567 {
568 %(constructor)s;
569 assert(!%(use_uops)d);
570 setExcAcRel(exclusive, acrel);
571 }
572}};
573
574def template LoadStoreImmDU64Constructor {{
575 %(class_name)s::%(class_name)s(ExtMachInst machInst,
576 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
577 int64_t _imm, bool noAlloc, bool exclusive, bool acrel)
578 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
579 _dest, _dest2, _base, _imm)
580 {
581 %(constructor)s;
582 assert(!%(use_uops)d);
583 setExcAcRel(exclusive, acrel);
584 }
585}};
586
587def template StoreImmDEx64Constructor {{
588 %(class_name)s::%(class_name)s(ExtMachInst machInst,
589 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
590 IntRegIndex _base, int64_t _imm)
591 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
592 _result, _dest, _dest2, _base, _imm)
593 {
594 %(constructor)s;
595 assert(!%(use_uops)d);
596 }
597}};
598
599
600def template LoadStoreReg64Constructor {{
601 %(class_name)s::%(class_name)s(ExtMachInst machInst,
602 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
603 ArmExtendType _type, uint32_t _shiftAmt)
604 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
605 _dest, _base, _offset, _type, _shiftAmt)
606 {
607 %(constructor)s;
608#if %(use_uops)d
609 assert(numMicroops >= 2);
610 uops = new StaticInstPtr[numMicroops];
611 uops[0] = new %(acc_name)s(machInst, _dest, _base, _offset,
612 _type, _shiftAmt);
613 uops[0]->setDelayedCommit();
614 uops[0]->setFirstMicroop();
615 uops[1] = new %(wb_decl)s;
616 uops[1]->setLastMicroop();
617#endif
618 }
619}};
620
621def template LoadStoreRegU64Constructor {{
622 %(class_name)s::%(class_name)s(ExtMachInst machInst,
623 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
624 ArmExtendType _type, uint32_t _shiftAmt,
625 bool noAlloc, bool exclusive, bool acrel)
626 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
627 _dest, _base, _offset, _type, _shiftAmt)
628 {
629 %(constructor)s;
630 assert(!%(use_uops)d);
631 setExcAcRel(exclusive, acrel);
632 }
633}};
634
635def template LoadStoreRaw64Constructor {{
636 %(class_name)s::%(class_name)s(ExtMachInst machInst,
637 IntRegIndex _dest, IntRegIndex _base)
638 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, _dest, _base)
639 {
640 %(constructor)s;
641 }
642}};
643
644def template LoadStoreEx64Constructor {{
645 %(class_name)s::%(class_name)s(ExtMachInst machInst,
646 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
647 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
648 _dest, _base, _result)
649 {
650 %(constructor)s;
651 }
652}};
653
654def template LoadStoreLit64Constructor {{
655 %(class_name)s::%(class_name)s(ExtMachInst machInst,
656 IntRegIndex _dest, int64_t _imm)
657 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
658 (IntRegIndex)_dest, _imm)
659 {
660 %(constructor)s;
661#if %(use_uops)d
662 assert(numMicroops >= 2);
663 uops = new StaticInstPtr[numMicroops];
664 uops[0] = new %(acc_name)s(machInst, _dest, _imm);
665 uops[0]->setDelayedCommit();
666 uops[0]->setFirstMicroop();
667 uops[1] = new %(wb_decl)s;
668 uops[1]->setLastMicroop();
669#endif
670 }
671}};
672
673def template LoadStoreLitU64Constructor {{
674 %(class_name)s::%(class_name)s(ExtMachInst machInst,
675 IntRegIndex _dest, int64_t _imm,
676 bool noAlloc, bool exclusive, bool acrel)
677 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
678 (IntRegIndex)_dest, _imm)
679 {
680 %(constructor)s;
681 assert(!%(use_uops)d);
682 setExcAcRel(exclusive, acrel);
683 }
684}};
278 {
279 %(constructor)s;
280 assert(!%(use_uops)d);
281 }
282}};
283
284def template DCStore64Execute {{
285 Fault %(class_name)s::execute(ExecContext *xc,
286 Trace::InstRecord *traceData) const
287 {
288 Addr EA;
289 Fault fault = NoFault;
290
291 %(op_decl)s;
292 %(op_rd)s;
293 %(ea_code)s;
294
295
296 if (fault == NoFault) {
297 %(memacc_code)s;
298 }
299
300 if (fault == NoFault) {
301 fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL);
302 }
303
304 if (fault == NoFault) {
305 %(op_wb)s;
306 }
307
308 return fault;
309 }
310}};
311
312def template DCStore64InitiateAcc {{
313 Fault %(class_name)s::initiateAcc(ExecContext *xc,
314 Trace::InstRecord *traceData) const
315 {
316 Addr EA;
317 Fault fault = NoFault;
318
319 %(op_decl)s;
320 %(op_rd)s;
321 %(ea_code)s;
322
323 if (fault == NoFault) {
324 %(memacc_code)s;
325 }
326
327 if (fault == NoFault) {
328 fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL);
329 }
330
331 return fault;
332 }
333}};
334
335
336def template LoadStoreImm64Declare {{
337 class %(class_name)s : public %(base_class)s
338 {
339 public:
340
341 /// Constructor.
342 %(class_name)s(ExtMachInst machInst,
343 IntRegIndex _dest, IntRegIndex _base, int64_t _imm);
344
345 Fault execute(ExecContext *, Trace::InstRecord *) const;
346 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
347 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
348
349 virtual void
350 annotateFault(ArmFault *fault) {
351 %(fa_code)s
352 }
353 };
354}};
355
356def template LoadStoreImmU64Declare {{
357 class %(class_name)s : public %(base_class)s
358 {
359 public:
360
361 /// Constructor.
362 %(class_name)s(ExtMachInst machInst,
363 IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
364 bool noAlloc = false, bool exclusive = false,
365 bool acrel = false);
366
367 Fault execute(ExecContext *, Trace::InstRecord *) const;
368 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
369 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
370
371 virtual void
372 annotateFault(ArmFault *fault) {
373 %(fa_code)s
374 }
375 };
376}};
377
378def template LoadStoreImmDU64Declare {{
379 class %(class_name)s : public %(base_class)s
380 {
381 public:
382
383 /// Constructor.
384 %(class_name)s(ExtMachInst machInst,
385 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
386 int64_t _imm = 0, bool noAlloc = false, bool exclusive = false,
387 bool acrel = false);
388
389 Fault execute(ExecContext *, Trace::InstRecord *) const;
390 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
391 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
392
393 virtual void
394 annotateFault(ArmFault *fault) {
395 %(fa_code)s
396 }
397 };
398}};
399
400def template StoreImmDEx64Declare {{
401 /**
402 * Static instruction class for "%(mnemonic)s".
403 */
404 class %(class_name)s : public %(base_class)s
405 {
406 public:
407
408 /// Constructor.
409 %(class_name)s(ExtMachInst machInst,
410 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
411 IntRegIndex _base, int64_t _imm = 0);
412
413 Fault execute(ExecContext *, Trace::InstRecord *) const;
414 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
415 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
416 };
417}};
418
419
420def template LoadStoreReg64Declare {{
421 class %(class_name)s : public %(base_class)s
422 {
423 public:
424
425 /// Constructor.
426 %(class_name)s(ExtMachInst machInst,
427 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
428 ArmExtendType _type, uint32_t _shiftAmt);
429
430 Fault execute(ExecContext *, Trace::InstRecord *) const;
431 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
432 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
433
434 virtual void
435 annotateFault(ArmFault *fault) {
436 %(fa_code)s
437 }
438 };
439}};
440
441def template LoadStoreRegU64Declare {{
442 class %(class_name)s : public %(base_class)s
443 {
444 public:
445
446 /// Constructor.
447 %(class_name)s(ExtMachInst machInst,
448 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
449 ArmExtendType _type, uint32_t _shiftAmt,
450 bool noAlloc = false, bool exclusive = false,
451 bool acrel = false);
452
453 Fault execute(ExecContext *, Trace::InstRecord *) const;
454 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
455 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
456
457 virtual void
458 annotateFault(ArmFault *fault) {
459 %(fa_code)s
460 }
461 };
462}};
463
464def template LoadStoreRaw64Declare {{
465 class %(class_name)s : public %(base_class)s
466 {
467 public:
468
469 /// Constructor.
470 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
471 IntRegIndex _base);
472
473 Fault execute(ExecContext *, Trace::InstRecord *) const;
474 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
475 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
476
477 virtual void
478 annotateFault(ArmFault *fault) {
479 %(fa_code)s
480 }
481 };
482}};
483
484def template LoadStoreEx64Declare {{
485 class %(class_name)s : public %(base_class)s
486 {
487 public:
488
489 /// Constructor.
490 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
491 IntRegIndex _base, IntRegIndex _result);
492
493 Fault execute(ExecContext *, Trace::InstRecord *) const;
494 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
495 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
496
497 virtual void
498 annotateFault(ArmFault *fault) {
499 %(fa_code)s
500 }
501 };
502}};
503
504def template LoadStoreLit64Declare {{
505 class %(class_name)s : public %(base_class)s
506 {
507 public:
508
509 /// Constructor.
510 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm);
511
512 Fault execute(ExecContext *, Trace::InstRecord *) const;
513 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
514 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
515
516 virtual void
517 annotateFault(ArmFault *fault) {
518 %(fa_code)s
519 }
520 };
521}};
522
523def template LoadStoreLitU64Declare {{
524 class %(class_name)s : public %(base_class)s
525 {
526 public:
527
528 /// Constructor.
529 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm,
530 bool noAlloc = false, bool exclusive = false,
531 bool acrel = false);
532
533 Fault execute(ExecContext *, Trace::InstRecord *) const;
534 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
535 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
536
537 virtual void
538 annotateFault(ArmFault *fault) {
539 %(fa_code)s
540 }
541 };
542}};
543
544def template LoadStoreImm64Constructor {{
545 %(class_name)s::%(class_name)s(ExtMachInst machInst,
546 IntRegIndex _dest, IntRegIndex _base, int64_t _imm)
547 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
548 (IntRegIndex)_dest, (IntRegIndex)_base, _imm)
549 {
550 %(constructor)s;
551#if %(use_uops)d
552 assert(numMicroops >= 2);
553 uops = new StaticInstPtr[numMicroops];
554 uops[0] = new %(acc_name)s(machInst, _dest, _base, _imm);
555 uops[0]->setDelayedCommit();
556 uops[0]->setFirstMicroop();
557 uops[1] = new %(wb_decl)s;
558 uops[1]->setLastMicroop();
559#endif
560 }
561}};
562
563def template LoadStoreImmU64Constructor {{
564 %(class_name)s::%(class_name)s(ExtMachInst machInst,
565 IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
566 bool noAlloc, bool exclusive, bool acrel)
567 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
568 _dest, _base, _imm)
569 {
570 %(constructor)s;
571 assert(!%(use_uops)d);
572 setExcAcRel(exclusive, acrel);
573 }
574}};
575
576def template LoadStoreImmDU64Constructor {{
577 %(class_name)s::%(class_name)s(ExtMachInst machInst,
578 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
579 int64_t _imm, bool noAlloc, bool exclusive, bool acrel)
580 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
581 _dest, _dest2, _base, _imm)
582 {
583 %(constructor)s;
584 assert(!%(use_uops)d);
585 setExcAcRel(exclusive, acrel);
586 }
587}};
588
589def template StoreImmDEx64Constructor {{
590 %(class_name)s::%(class_name)s(ExtMachInst machInst,
591 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
592 IntRegIndex _base, int64_t _imm)
593 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
594 _result, _dest, _dest2, _base, _imm)
595 {
596 %(constructor)s;
597 assert(!%(use_uops)d);
598 }
599}};
600
601
602def template LoadStoreReg64Constructor {{
603 %(class_name)s::%(class_name)s(ExtMachInst machInst,
604 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
605 ArmExtendType _type, uint32_t _shiftAmt)
606 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
607 _dest, _base, _offset, _type, _shiftAmt)
608 {
609 %(constructor)s;
610#if %(use_uops)d
611 assert(numMicroops >= 2);
612 uops = new StaticInstPtr[numMicroops];
613 uops[0] = new %(acc_name)s(machInst, _dest, _base, _offset,
614 _type, _shiftAmt);
615 uops[0]->setDelayedCommit();
616 uops[0]->setFirstMicroop();
617 uops[1] = new %(wb_decl)s;
618 uops[1]->setLastMicroop();
619#endif
620 }
621}};
622
623def template LoadStoreRegU64Constructor {{
624 %(class_name)s::%(class_name)s(ExtMachInst machInst,
625 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
626 ArmExtendType _type, uint32_t _shiftAmt,
627 bool noAlloc, bool exclusive, bool acrel)
628 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
629 _dest, _base, _offset, _type, _shiftAmt)
630 {
631 %(constructor)s;
632 assert(!%(use_uops)d);
633 setExcAcRel(exclusive, acrel);
634 }
635}};
636
637def template LoadStoreRaw64Constructor {{
638 %(class_name)s::%(class_name)s(ExtMachInst machInst,
639 IntRegIndex _dest, IntRegIndex _base)
640 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, _dest, _base)
641 {
642 %(constructor)s;
643 }
644}};
645
646def template LoadStoreEx64Constructor {{
647 %(class_name)s::%(class_name)s(ExtMachInst machInst,
648 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
649 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
650 _dest, _base, _result)
651 {
652 %(constructor)s;
653 }
654}};
655
656def template LoadStoreLit64Constructor {{
657 %(class_name)s::%(class_name)s(ExtMachInst machInst,
658 IntRegIndex _dest, int64_t _imm)
659 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
660 (IntRegIndex)_dest, _imm)
661 {
662 %(constructor)s;
663#if %(use_uops)d
664 assert(numMicroops >= 2);
665 uops = new StaticInstPtr[numMicroops];
666 uops[0] = new %(acc_name)s(machInst, _dest, _imm);
667 uops[0]->setDelayedCommit();
668 uops[0]->setFirstMicroop();
669 uops[1] = new %(wb_decl)s;
670 uops[1]->setLastMicroop();
671#endif
672 }
673}};
674
675def template LoadStoreLitU64Constructor {{
676 %(class_name)s::%(class_name)s(ExtMachInst machInst,
677 IntRegIndex _dest, int64_t _imm,
678 bool noAlloc, bool exclusive, bool acrel)
679 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
680 (IntRegIndex)_dest, _imm)
681 {
682 %(constructor)s;
683 assert(!%(use_uops)d);
684 setExcAcRel(exclusive, acrel);
685 }
686}};