mem64.isa (14058:a17b827fbf5e) mem64.isa (14150:1391e94a7b95)
1// -*- mode:c++ -*-
2
3// Copyright (c) 2011-2014, 2017, 2019 ARM Limited
4// All rights reserved
5//
6// The license below extends only to copyright in the software and shall
7// not be construed as granting a license to any other intellectual
8// property including but not limited to intellectual property relating
9// to a hardware implementation of the functionality of the software
10// licensed hereunder. You may use the software subject to the license
11// terms below provided that you ensure that this notice is replicated
12// unmodified and in its entirety in all distributions of the software,
13// modified or unmodified, in source code or in binary form.
14//
15// Redistribution and use in source and binary forms, with or without
16// modification, are permitted provided that the following conditions are
17// met: redistributions of source code must retain the above copyright
18// notice, this list of conditions and the following disclaimer;
19// redistributions in binary form must reproduce the above copyright
20// notice, this list of conditions and the following disclaimer in the
21// documentation and/or other materials provided with the distribution;
22// neither the name of the copyright holders nor the names of its
23// contributors may be used to endorse or promote products derived from
24// this software without specific prior written permission.
25//
26// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
27// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
28// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
29// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
30// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
31// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
32// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
33// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
34// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
35// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
36// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
37//
38// Authors: Gabe Black
39
40let {{
41 SPAlignmentCheckCode = '''
42 if (baseIsSP && bits(XBase, 3, 0) &&
43 SPAlignmentCheckEnabled(xc->tcBase())) {
44 return std::make_shared<SPAlignmentFault>();
45 }
46 '''
47}};
48
49def template Load64Execute {{
50 Fault %(class_name)s::execute(ExecContext *xc,
51 Trace::InstRecord *traceData) const
52 {
53 Addr EA;
54 Fault fault = NoFault;
55
56 %(op_decl)s;
57 %(op_rd)s;
58 %(ea_code)s;
59
60 if (fault == NoFault) {
61 fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags);
62 %(memacc_code)s;
63 }
64
65 if (fault == NoFault) {
66 %(op_wb)s;
67 }
68
69 return fault;
70 }
71}};
72
73def template Load64FpExecute {{
74 Fault %(class_name)s::execute(ExecContext *xc,
75 Trace::InstRecord *traceData) const
76 {
77 Addr EA;
78 Fault fault = NoFault;
79
80 %(op_decl)s;
81 %(op_rd)s;
82 %(ea_code)s;
83
84 if (fault == NoFault) {
85 fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags);
86 }
87
88 if (fault == NoFault) {
89 %(memacc_code)s;
90 %(op_wb)s;
91 }
92
93 return fault;
94 }
95}};
96
97def template Store64Execute {{
98 Fault %(class_name)s::execute(ExecContext *xc,
99 Trace::InstRecord *traceData) const
100 {
101 Addr EA;
102 Fault fault = NoFault;
103
104 %(op_decl)s;
105 %(op_rd)s;
106 %(ea_code)s;
107
108 if (fault == NoFault) {
109 %(memacc_code)s;
110 }
111
112 if (fault == NoFault) {
113 fault = writeMemAtomic(xc, traceData, Mem, EA,
114 memAccessFlags, NULL);
115 }
116
117 if (fault == NoFault) {
118 %(op_wb)s;
119 }
120
121 return fault;
122 }
123}};
124
125def template Store64InitiateAcc {{
126 Fault %(class_name)s::initiateAcc(ExecContext *xc,
127 Trace::InstRecord *traceData) const
128 {
129 Addr EA;
130 Fault fault = NoFault;
131
132 %(op_decl)s;
133 %(op_rd)s;
134 %(ea_code)s;
135
136 if (fault == NoFault) {
137 %(memacc_code)s;
138 }
139
140 if (fault == NoFault) {
141 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
142 NULL);
143 }
144
145 return fault;
146 }
147}};
148
149def template StoreEx64Execute {{
150 Fault %(class_name)s::execute(ExecContext *xc,
151 Trace::InstRecord *traceData) const
152 {
153 Addr EA;
154 Fault fault = NoFault;
155
156 %(op_decl)s;
157 %(op_rd)s;
158 %(ea_code)s;
159
160 if (fault == NoFault) {
161 %(memacc_code)s;
162 }
163
164 uint64_t writeResult = 0;
165 if (fault == NoFault) {
166 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
167 &writeResult);
168 }
169
170 if (fault == NoFault) {
171 %(postacc_code)s;
172 }
173
174 if (fault == NoFault) {
175 %(op_wb)s;
176 }
177
178 return fault;
179 }
180}};
181
182def template StoreEx64InitiateAcc {{
183 Fault %(class_name)s::initiateAcc(ExecContext *xc,
184 Trace::InstRecord *traceData) const
185 {
186 Addr EA;
187 Fault fault = NoFault;
188
189 %(op_decl)s;
190 %(op_rd)s;
191 %(ea_code)s;
192
193 if (fault == NoFault) {
194 %(memacc_code)s;
195 }
196
197 if (fault == NoFault) {
198 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
199 NULL);
200 }
201
202 return fault;
203 }
204}};
205
206def template Load64InitiateAcc {{
207 Fault %(class_name)s::initiateAcc(ExecContext *xc,
208 Trace::InstRecord *traceData) const
209 {
210 Addr EA;
211 Fault fault = NoFault;
212
213 %(op_src_decl)s;
214 %(op_rd)s;
215 %(ea_code)s;
216
217 if (fault == NoFault) {
218 fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags);
219 }
220
221 return fault;
222 }
223}};
224
225def template Load64CompleteAcc {{
226 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
227 Trace::InstRecord *traceData) const
228 {
229 Fault fault = NoFault;
230
231 %(op_decl)s;
232 %(op_rd)s;
233
234 // ARM instructions will not have a pkt if the predicate is false
235 getMem(pkt, Mem, traceData);
236
237 if (fault == NoFault) {
238 %(memacc_code)s;
239 }
240
241 if (fault == NoFault) {
242 %(op_wb)s;
243 }
244
245 return fault;
246 }
247}};
248
249def template Store64CompleteAcc {{
250 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
251 Trace::InstRecord *traceData) const
252 {
253 return NoFault;
254 }
255}};
256
257def template StoreEx64CompleteAcc {{
258 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
259 Trace::InstRecord *traceData) const
260 {
261 Fault fault = NoFault;
262
263 %(op_decl)s;
264 %(op_rd)s;
265
266 uint64_t writeResult = pkt->req->getExtraData();
267 %(postacc_code)s;
268
269 if (fault == NoFault) {
270 %(op_wb)s;
271 }
272
273 return fault;
274 }
275}};
276
277def template DCStore64Declare {{
278 class %(class_name)s : public %(base_class)s
279 {
280 public:
281
282 /// Constructor.
283 %(class_name)s(ExtMachInst machInst, IntRegIndex _base,
284 MiscRegIndex _dest, uint64_t _imm);
285
286 Fault execute(ExecContext *, Trace::InstRecord *) const override;
287 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
288 Fault completeAcc(PacketPtr, ExecContext *,
289 Trace::InstRecord *) const override;
290
291 void
292 annotateFault(ArmFault *fault) override
293 {
294 %(fa_code)s
295 }
296 };
297}};
298
299def template DCStore64Constructor {{
300 %(class_name)s::%(class_name)s(ExtMachInst machInst, IntRegIndex _base,
301 MiscRegIndex _dest, uint64_t _imm)
302 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
303 _base, _dest, _imm)
304 {
305 %(constructor)s;
306 assert(!%(use_uops)d);
307 }
308}};
309
310def template DCStore64Execute {{
311 Fault %(class_name)s::execute(ExecContext *xc,
312 Trace::InstRecord *traceData) const
313 {
314 Addr EA;
315 Fault fault = NoFault;
316
317 %(op_decl)s;
318 %(op_rd)s;
319 %(ea_code)s;
320
321
322 if (fault == NoFault) {
323 %(memacc_code)s;
324 }
325
326 if (fault == NoFault) {
327 fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL);
328 }
329
330 if (fault == NoFault) {
331 %(op_wb)s;
332 }
333
334 return fault;
335 }
336}};
337
338def template DCStore64InitiateAcc {{
339 Fault %(class_name)s::initiateAcc(ExecContext *xc,
340 Trace::InstRecord *traceData) const
341 {
342 Addr EA;
343 Fault fault = NoFault;
344
345 %(op_decl)s;
346 %(op_rd)s;
347 %(ea_code)s;
348
349 if (fault == NoFault) {
350 %(memacc_code)s;
351 }
352
353 if (fault == NoFault) {
354 fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL);
355 }
356
357 return fault;
358 }
359}};
360
361
362def template LoadStoreImm64Declare {{
363 class %(class_name)s : public %(base_class)s
364 {
365 public:
366
367 /// Constructor.
368 %(class_name)s(ExtMachInst machInst,
369 IntRegIndex _dest, IntRegIndex _base, int64_t _imm);
370
371 Fault execute(ExecContext *, Trace::InstRecord *) const override;
372 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
373 Fault completeAcc(PacketPtr, ExecContext *,
374 Trace::InstRecord *) const override;
375
376 void
377 annotateFault(ArmFault *fault) override
378 {
379 %(fa_code)s
380 }
381 };
382}};
383
384def template LoadStoreImmU64Declare {{
385 class %(class_name)s : public %(base_class)s
386 {
387 public:
388
389 /// Constructor.
390 %(class_name)s(ExtMachInst machInst,
391 IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
392 bool noAlloc = false, bool exclusive = false,
393 bool acrel = false);
394
395 Fault execute(ExecContext *, Trace::InstRecord *) const override;
396 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
397 Fault completeAcc(PacketPtr, ExecContext *,
398 Trace::InstRecord *) const override;
399
400 void
401 annotateFault(ArmFault *fault) override
402 {
403 %(fa_code)s
404 }
405 };
406}};
407
408def template LoadStoreImmDU64Declare {{
409 class %(class_name)s : public %(base_class)s
410 {
411 public:
412
413 /// Constructor.
414 %(class_name)s(ExtMachInst machInst,
415 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
416 int64_t _imm = 0, bool noAlloc = false, bool exclusive = false,
417 bool acrel = false);
418
419 Fault execute(ExecContext *, Trace::InstRecord *) const override;
420 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
421 Fault completeAcc(PacketPtr, ExecContext *,
422 Trace::InstRecord *) const override;
423
424 void
425 annotateFault(ArmFault *fault) override
426 {
427 %(fa_code)s
428 }
429 };
430}};
431
432def template StoreImmDEx64Declare {{
433 /**
434 * Static instruction class for "%(mnemonic)s".
435 */
436 class %(class_name)s : public %(base_class)s
437 {
438 public:
439
440 /// Constructor.
441 %(class_name)s(ExtMachInst machInst,
442 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
443 IntRegIndex _base, int64_t _imm = 0);
444
445 Fault execute(ExecContext *, Trace::InstRecord *) const override;
446 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
447 Fault completeAcc(PacketPtr, ExecContext *,
448 Trace::InstRecord *) const override;
449 };
450}};
451
452
453def template LoadStoreReg64Declare {{
454 class %(class_name)s : public %(base_class)s
455 {
456 public:
457
458 /// Constructor.
459 %(class_name)s(ExtMachInst machInst,
460 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
461 ArmExtendType _type, uint32_t _shiftAmt);
462
463 Fault execute(ExecContext *, Trace::InstRecord *) const override;
464 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
465 Fault completeAcc(PacketPtr, ExecContext *,
466 Trace::InstRecord *) const override;
467
468 void
469 annotateFault(ArmFault *fault) override
470 {
471 %(fa_code)s
472 }
473 };
474}};
475
476def template LoadStoreRegU64Declare {{
477 class %(class_name)s : public %(base_class)s
478 {
479 public:
480
481 /// Constructor.
482 %(class_name)s(ExtMachInst machInst,
483 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
484 ArmExtendType _type, uint32_t _shiftAmt,
485 bool noAlloc = false, bool exclusive = false,
486 bool acrel = false);
487
488 Fault execute(ExecContext *, Trace::InstRecord *) const override;
489 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
490 Fault completeAcc(PacketPtr, ExecContext *,
491 Trace::InstRecord *) const override;
492
493 void
494 annotateFault(ArmFault *fault) override
495 {
496 %(fa_code)s
497 }
498 };
499}};
500
501def template LoadStoreRaw64Declare {{
502 class %(class_name)s : public %(base_class)s
503 {
504 public:
505
506 /// Constructor.
507 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
508 IntRegIndex _base);
509
510 Fault execute(ExecContext *, Trace::InstRecord *) const override;
511 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
512 Fault completeAcc(PacketPtr, ExecContext *,
513 Trace::InstRecord *) const override;
514
515 void
516 annotateFault(ArmFault *fault) override
517 {
518 %(fa_code)s
519 }
520 };
521}};
522
523def template LoadStoreEx64Declare {{
524 class %(class_name)s : public %(base_class)s
525 {
526 public:
527
528 /// Constructor.
529 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
530 IntRegIndex _base, IntRegIndex _result);
531
532 Fault execute(ExecContext *, Trace::InstRecord *) const override;
533 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
534 Fault completeAcc(PacketPtr, ExecContext *,
535 Trace::InstRecord *) const override;
536
537 void
538 annotateFault(ArmFault *fault) override
539 {
540 %(fa_code)s
541 }
542 };
543}};
544
545def template LoadStoreLit64Declare {{
546 class %(class_name)s : public %(base_class)s
547 {
548 public:
549
550 /// Constructor.
551 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm);
552
553 Fault execute(ExecContext *, Trace::InstRecord *) const override;
554 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
555 Fault completeAcc(PacketPtr, ExecContext *,
556 Trace::InstRecord *) const override;
557
558 void
559 annotateFault(ArmFault *fault) override
560 {
561 %(fa_code)s
562 }
563 };
564}};
565
566def template LoadStoreLitU64Declare {{
567 class %(class_name)s : public %(base_class)s
568 {
569 public:
570
571 /// Constructor.
572 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm,
573 bool noAlloc = false, bool exclusive = false,
574 bool acrel = false);
575
576 Fault execute(ExecContext *, Trace::InstRecord *) const override;
577 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
578 Fault completeAcc(PacketPtr, ExecContext *,
579 Trace::InstRecord *) const override;
580
581 void
582 annotateFault(ArmFault *fault) override
583 {
584 %(fa_code)s
585 }
586 };
587}};
588
589def template LoadStoreImm64Constructor {{
590 %(class_name)s::%(class_name)s(ExtMachInst machInst,
591 IntRegIndex _dest, IntRegIndex _base, int64_t _imm)
592 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
593 (IntRegIndex)_dest, (IntRegIndex)_base, _imm)
594 {
595 %(constructor)s;
596#if %(use_uops)d
597 assert(numMicroops >= 2);
598 uops = new StaticInstPtr[numMicroops];
599 uops[0] = new %(acc_name)s(machInst, _dest, _base, _imm);
600 uops[0]->setDelayedCommit();
601 uops[0]->setFirstMicroop();
602 uops[1] = new %(wb_decl)s;
603 uops[1]->setLastMicroop();
604#endif
605 }
606}};
607
608def template LoadStoreImmU64Constructor {{
609 %(class_name)s::%(class_name)s(ExtMachInst machInst,
610 IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
611 bool noAlloc, bool exclusive, bool acrel)
612 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
613 _dest, _base, _imm)
614 {
615 %(constructor)s;
616 assert(!%(use_uops)d);
617 setExcAcRel(exclusive, acrel);
618 }
619}};
620
621def template LoadStoreImmDU64Constructor {{
622 %(class_name)s::%(class_name)s(ExtMachInst machInst,
623 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
624 int64_t _imm, bool noAlloc, bool exclusive, bool acrel)
625 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
626 _dest, _dest2, _base, _imm)
627 {
628 %(constructor)s;
629 assert(!%(use_uops)d);
630 setExcAcRel(exclusive, acrel);
631 }
632}};
633
634def template StoreImmDEx64Constructor {{
635 %(class_name)s::%(class_name)s(ExtMachInst machInst,
636 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
637 IntRegIndex _base, int64_t _imm)
638 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
639 _result, _dest, _dest2, _base, _imm)
640 {
641 %(constructor)s;
642 assert(!%(use_uops)d);
643 }
644}};
645
646
647def template LoadStoreReg64Constructor {{
648 %(class_name)s::%(class_name)s(ExtMachInst machInst,
649 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
650 ArmExtendType _type, uint32_t _shiftAmt)
651 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
652 _dest, _base, _offset, _type, _shiftAmt)
653 {
654 %(constructor)s;
655#if %(use_uops)d
656 assert(numMicroops >= 2);
657 uops = new StaticInstPtr[numMicroops];
658 uops[0] = new %(acc_name)s(machInst, _dest, _base, _offset,
659 _type, _shiftAmt);
660 uops[0]->setDelayedCommit();
661 uops[0]->setFirstMicroop();
662 uops[1] = new %(wb_decl)s;
663 uops[1]->setLastMicroop();
664#endif
665 }
666}};
667
668def template LoadStoreRegU64Constructor {{
669 %(class_name)s::%(class_name)s(ExtMachInst machInst,
670 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
671 ArmExtendType _type, uint32_t _shiftAmt,
672 bool noAlloc, bool exclusive, bool acrel)
673 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
674 _dest, _base, _offset, _type, _shiftAmt)
675 {
676 %(constructor)s;
677 assert(!%(use_uops)d);
678 setExcAcRel(exclusive, acrel);
679 }
680}};
681
682def template LoadStoreRaw64Constructor {{
683 %(class_name)s::%(class_name)s(ExtMachInst machInst,
684 IntRegIndex _dest, IntRegIndex _base)
685 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, _dest, _base)
686 {
687 %(constructor)s;
688 }
689}};
690
691def template LoadStoreEx64Constructor {{
692 %(class_name)s::%(class_name)s(ExtMachInst machInst,
693 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
694 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
695 _dest, _base, _result)
696 {
697 %(constructor)s;
698 }
699}};
700
701def template LoadStoreLit64Constructor {{
702 %(class_name)s::%(class_name)s(ExtMachInst machInst,
703 IntRegIndex _dest, int64_t _imm)
704 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
705 (IntRegIndex)_dest, _imm)
706 {
707 %(constructor)s;
708#if %(use_uops)d
709 assert(numMicroops >= 2);
710 uops = new StaticInstPtr[numMicroops];
711 uops[0] = new %(acc_name)s(machInst, _dest, _imm);
712 uops[0]->setDelayedCommit();
713 uops[0]->setFirstMicroop();
714 uops[1] = new %(wb_decl)s;
715 uops[1]->setLastMicroop();
716#endif
717 }
718}};
719
720def template LoadStoreLitU64Constructor {{
721 %(class_name)s::%(class_name)s(ExtMachInst machInst,
722 IntRegIndex _dest, int64_t _imm,
723 bool noAlloc, bool exclusive, bool acrel)
724 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
725 (IntRegIndex)_dest, _imm)
726 {
727 %(constructor)s;
728 assert(!%(use_uops)d);
729 setExcAcRel(exclusive, acrel);
730 }
731}};
1// -*- mode:c++ -*-
2
3// Copyright (c) 2011-2014, 2017, 2019 ARM Limited
4// All rights reserved
5//
6// The license below extends only to copyright in the software and shall
7// not be construed as granting a license to any other intellectual
8// property including but not limited to intellectual property relating
9// to a hardware implementation of the functionality of the software
10// licensed hereunder. You may use the software subject to the license
11// terms below provided that you ensure that this notice is replicated
12// unmodified and in its entirety in all distributions of the software,
13// modified or unmodified, in source code or in binary form.
14//
15// Redistribution and use in source and binary forms, with or without
16// modification, are permitted provided that the following conditions are
17// met: redistributions of source code must retain the above copyright
18// notice, this list of conditions and the following disclaimer;
19// redistributions in binary form must reproduce the above copyright
20// notice, this list of conditions and the following disclaimer in the
21// documentation and/or other materials provided with the distribution;
22// neither the name of the copyright holders nor the names of its
23// contributors may be used to endorse or promote products derived from
24// this software without specific prior written permission.
25//
26// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
27// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
28// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
29// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
30// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
31// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
32// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
33// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
34// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
35// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
36// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
37//
38// Authors: Gabe Black
39
40let {{
41 SPAlignmentCheckCode = '''
42 if (baseIsSP && bits(XBase, 3, 0) &&
43 SPAlignmentCheckEnabled(xc->tcBase())) {
44 return std::make_shared<SPAlignmentFault>();
45 }
46 '''
47}};
48
49def template Load64Execute {{
50 Fault %(class_name)s::execute(ExecContext *xc,
51 Trace::InstRecord *traceData) const
52 {
53 Addr EA;
54 Fault fault = NoFault;
55
56 %(op_decl)s;
57 %(op_rd)s;
58 %(ea_code)s;
59
60 if (fault == NoFault) {
61 fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags);
62 %(memacc_code)s;
63 }
64
65 if (fault == NoFault) {
66 %(op_wb)s;
67 }
68
69 return fault;
70 }
71}};
72
73def template Load64FpExecute {{
74 Fault %(class_name)s::execute(ExecContext *xc,
75 Trace::InstRecord *traceData) const
76 {
77 Addr EA;
78 Fault fault = NoFault;
79
80 %(op_decl)s;
81 %(op_rd)s;
82 %(ea_code)s;
83
84 if (fault == NoFault) {
85 fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags);
86 }
87
88 if (fault == NoFault) {
89 %(memacc_code)s;
90 %(op_wb)s;
91 }
92
93 return fault;
94 }
95}};
96
97def template Store64Execute {{
98 Fault %(class_name)s::execute(ExecContext *xc,
99 Trace::InstRecord *traceData) const
100 {
101 Addr EA;
102 Fault fault = NoFault;
103
104 %(op_decl)s;
105 %(op_rd)s;
106 %(ea_code)s;
107
108 if (fault == NoFault) {
109 %(memacc_code)s;
110 }
111
112 if (fault == NoFault) {
113 fault = writeMemAtomic(xc, traceData, Mem, EA,
114 memAccessFlags, NULL);
115 }
116
117 if (fault == NoFault) {
118 %(op_wb)s;
119 }
120
121 return fault;
122 }
123}};
124
125def template Store64InitiateAcc {{
126 Fault %(class_name)s::initiateAcc(ExecContext *xc,
127 Trace::InstRecord *traceData) const
128 {
129 Addr EA;
130 Fault fault = NoFault;
131
132 %(op_decl)s;
133 %(op_rd)s;
134 %(ea_code)s;
135
136 if (fault == NoFault) {
137 %(memacc_code)s;
138 }
139
140 if (fault == NoFault) {
141 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
142 NULL);
143 }
144
145 return fault;
146 }
147}};
148
149def template StoreEx64Execute {{
150 Fault %(class_name)s::execute(ExecContext *xc,
151 Trace::InstRecord *traceData) const
152 {
153 Addr EA;
154 Fault fault = NoFault;
155
156 %(op_decl)s;
157 %(op_rd)s;
158 %(ea_code)s;
159
160 if (fault == NoFault) {
161 %(memacc_code)s;
162 }
163
164 uint64_t writeResult = 0;
165 if (fault == NoFault) {
166 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
167 &writeResult);
168 }
169
170 if (fault == NoFault) {
171 %(postacc_code)s;
172 }
173
174 if (fault == NoFault) {
175 %(op_wb)s;
176 }
177
178 return fault;
179 }
180}};
181
182def template StoreEx64InitiateAcc {{
183 Fault %(class_name)s::initiateAcc(ExecContext *xc,
184 Trace::InstRecord *traceData) const
185 {
186 Addr EA;
187 Fault fault = NoFault;
188
189 %(op_decl)s;
190 %(op_rd)s;
191 %(ea_code)s;
192
193 if (fault == NoFault) {
194 %(memacc_code)s;
195 }
196
197 if (fault == NoFault) {
198 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
199 NULL);
200 }
201
202 return fault;
203 }
204}};
205
206def template Load64InitiateAcc {{
207 Fault %(class_name)s::initiateAcc(ExecContext *xc,
208 Trace::InstRecord *traceData) const
209 {
210 Addr EA;
211 Fault fault = NoFault;
212
213 %(op_src_decl)s;
214 %(op_rd)s;
215 %(ea_code)s;
216
217 if (fault == NoFault) {
218 fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags);
219 }
220
221 return fault;
222 }
223}};
224
225def template Load64CompleteAcc {{
226 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
227 Trace::InstRecord *traceData) const
228 {
229 Fault fault = NoFault;
230
231 %(op_decl)s;
232 %(op_rd)s;
233
234 // ARM instructions will not have a pkt if the predicate is false
235 getMem(pkt, Mem, traceData);
236
237 if (fault == NoFault) {
238 %(memacc_code)s;
239 }
240
241 if (fault == NoFault) {
242 %(op_wb)s;
243 }
244
245 return fault;
246 }
247}};
248
249def template Store64CompleteAcc {{
250 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
251 Trace::InstRecord *traceData) const
252 {
253 return NoFault;
254 }
255}};
256
257def template StoreEx64CompleteAcc {{
258 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
259 Trace::InstRecord *traceData) const
260 {
261 Fault fault = NoFault;
262
263 %(op_decl)s;
264 %(op_rd)s;
265
266 uint64_t writeResult = pkt->req->getExtraData();
267 %(postacc_code)s;
268
269 if (fault == NoFault) {
270 %(op_wb)s;
271 }
272
273 return fault;
274 }
275}};
276
277def template DCStore64Declare {{
278 class %(class_name)s : public %(base_class)s
279 {
280 public:
281
282 /// Constructor.
283 %(class_name)s(ExtMachInst machInst, IntRegIndex _base,
284 MiscRegIndex _dest, uint64_t _imm);
285
286 Fault execute(ExecContext *, Trace::InstRecord *) const override;
287 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
288 Fault completeAcc(PacketPtr, ExecContext *,
289 Trace::InstRecord *) const override;
290
291 void
292 annotateFault(ArmFault *fault) override
293 {
294 %(fa_code)s
295 }
296 };
297}};
298
299def template DCStore64Constructor {{
300 %(class_name)s::%(class_name)s(ExtMachInst machInst, IntRegIndex _base,
301 MiscRegIndex _dest, uint64_t _imm)
302 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
303 _base, _dest, _imm)
304 {
305 %(constructor)s;
306 assert(!%(use_uops)d);
307 }
308}};
309
310def template DCStore64Execute {{
311 Fault %(class_name)s::execute(ExecContext *xc,
312 Trace::InstRecord *traceData) const
313 {
314 Addr EA;
315 Fault fault = NoFault;
316
317 %(op_decl)s;
318 %(op_rd)s;
319 %(ea_code)s;
320
321
322 if (fault == NoFault) {
323 %(memacc_code)s;
324 }
325
326 if (fault == NoFault) {
327 fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL);
328 }
329
330 if (fault == NoFault) {
331 %(op_wb)s;
332 }
333
334 return fault;
335 }
336}};
337
338def template DCStore64InitiateAcc {{
339 Fault %(class_name)s::initiateAcc(ExecContext *xc,
340 Trace::InstRecord *traceData) const
341 {
342 Addr EA;
343 Fault fault = NoFault;
344
345 %(op_decl)s;
346 %(op_rd)s;
347 %(ea_code)s;
348
349 if (fault == NoFault) {
350 %(memacc_code)s;
351 }
352
353 if (fault == NoFault) {
354 fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL);
355 }
356
357 return fault;
358 }
359}};
360
361
362def template LoadStoreImm64Declare {{
363 class %(class_name)s : public %(base_class)s
364 {
365 public:
366
367 /// Constructor.
368 %(class_name)s(ExtMachInst machInst,
369 IntRegIndex _dest, IntRegIndex _base, int64_t _imm);
370
371 Fault execute(ExecContext *, Trace::InstRecord *) const override;
372 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
373 Fault completeAcc(PacketPtr, ExecContext *,
374 Trace::InstRecord *) const override;
375
376 void
377 annotateFault(ArmFault *fault) override
378 {
379 %(fa_code)s
380 }
381 };
382}};
383
384def template LoadStoreImmU64Declare {{
385 class %(class_name)s : public %(base_class)s
386 {
387 public:
388
389 /// Constructor.
390 %(class_name)s(ExtMachInst machInst,
391 IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
392 bool noAlloc = false, bool exclusive = false,
393 bool acrel = false);
394
395 Fault execute(ExecContext *, Trace::InstRecord *) const override;
396 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
397 Fault completeAcc(PacketPtr, ExecContext *,
398 Trace::InstRecord *) const override;
399
400 void
401 annotateFault(ArmFault *fault) override
402 {
403 %(fa_code)s
404 }
405 };
406}};
407
408def template LoadStoreImmDU64Declare {{
409 class %(class_name)s : public %(base_class)s
410 {
411 public:
412
413 /// Constructor.
414 %(class_name)s(ExtMachInst machInst,
415 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
416 int64_t _imm = 0, bool noAlloc = false, bool exclusive = false,
417 bool acrel = false);
418
419 Fault execute(ExecContext *, Trace::InstRecord *) const override;
420 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
421 Fault completeAcc(PacketPtr, ExecContext *,
422 Trace::InstRecord *) const override;
423
424 void
425 annotateFault(ArmFault *fault) override
426 {
427 %(fa_code)s
428 }
429 };
430}};
431
432def template StoreImmDEx64Declare {{
433 /**
434 * Static instruction class for "%(mnemonic)s".
435 */
436 class %(class_name)s : public %(base_class)s
437 {
438 public:
439
440 /// Constructor.
441 %(class_name)s(ExtMachInst machInst,
442 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
443 IntRegIndex _base, int64_t _imm = 0);
444
445 Fault execute(ExecContext *, Trace::InstRecord *) const override;
446 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
447 Fault completeAcc(PacketPtr, ExecContext *,
448 Trace::InstRecord *) const override;
449 };
450}};
451
452
453def template LoadStoreReg64Declare {{
454 class %(class_name)s : public %(base_class)s
455 {
456 public:
457
458 /// Constructor.
459 %(class_name)s(ExtMachInst machInst,
460 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
461 ArmExtendType _type, uint32_t _shiftAmt);
462
463 Fault execute(ExecContext *, Trace::InstRecord *) const override;
464 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
465 Fault completeAcc(PacketPtr, ExecContext *,
466 Trace::InstRecord *) const override;
467
468 void
469 annotateFault(ArmFault *fault) override
470 {
471 %(fa_code)s
472 }
473 };
474}};
475
476def template LoadStoreRegU64Declare {{
477 class %(class_name)s : public %(base_class)s
478 {
479 public:
480
481 /// Constructor.
482 %(class_name)s(ExtMachInst machInst,
483 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
484 ArmExtendType _type, uint32_t _shiftAmt,
485 bool noAlloc = false, bool exclusive = false,
486 bool acrel = false);
487
488 Fault execute(ExecContext *, Trace::InstRecord *) const override;
489 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
490 Fault completeAcc(PacketPtr, ExecContext *,
491 Trace::InstRecord *) const override;
492
493 void
494 annotateFault(ArmFault *fault) override
495 {
496 %(fa_code)s
497 }
498 };
499}};
500
501def template LoadStoreRaw64Declare {{
502 class %(class_name)s : public %(base_class)s
503 {
504 public:
505
506 /// Constructor.
507 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
508 IntRegIndex _base);
509
510 Fault execute(ExecContext *, Trace::InstRecord *) const override;
511 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
512 Fault completeAcc(PacketPtr, ExecContext *,
513 Trace::InstRecord *) const override;
514
515 void
516 annotateFault(ArmFault *fault) override
517 {
518 %(fa_code)s
519 }
520 };
521}};
522
523def template LoadStoreEx64Declare {{
524 class %(class_name)s : public %(base_class)s
525 {
526 public:
527
528 /// Constructor.
529 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
530 IntRegIndex _base, IntRegIndex _result);
531
532 Fault execute(ExecContext *, Trace::InstRecord *) const override;
533 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
534 Fault completeAcc(PacketPtr, ExecContext *,
535 Trace::InstRecord *) const override;
536
537 void
538 annotateFault(ArmFault *fault) override
539 {
540 %(fa_code)s
541 }
542 };
543}};
544
545def template LoadStoreLit64Declare {{
546 class %(class_name)s : public %(base_class)s
547 {
548 public:
549
550 /// Constructor.
551 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm);
552
553 Fault execute(ExecContext *, Trace::InstRecord *) const override;
554 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
555 Fault completeAcc(PacketPtr, ExecContext *,
556 Trace::InstRecord *) const override;
557
558 void
559 annotateFault(ArmFault *fault) override
560 {
561 %(fa_code)s
562 }
563 };
564}};
565
566def template LoadStoreLitU64Declare {{
567 class %(class_name)s : public %(base_class)s
568 {
569 public:
570
571 /// Constructor.
572 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm,
573 bool noAlloc = false, bool exclusive = false,
574 bool acrel = false);
575
576 Fault execute(ExecContext *, Trace::InstRecord *) const override;
577 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
578 Fault completeAcc(PacketPtr, ExecContext *,
579 Trace::InstRecord *) const override;
580
581 void
582 annotateFault(ArmFault *fault) override
583 {
584 %(fa_code)s
585 }
586 };
587}};
588
589def template LoadStoreImm64Constructor {{
590 %(class_name)s::%(class_name)s(ExtMachInst machInst,
591 IntRegIndex _dest, IntRegIndex _base, int64_t _imm)
592 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
593 (IntRegIndex)_dest, (IntRegIndex)_base, _imm)
594 {
595 %(constructor)s;
596#if %(use_uops)d
597 assert(numMicroops >= 2);
598 uops = new StaticInstPtr[numMicroops];
599 uops[0] = new %(acc_name)s(machInst, _dest, _base, _imm);
600 uops[0]->setDelayedCommit();
601 uops[0]->setFirstMicroop();
602 uops[1] = new %(wb_decl)s;
603 uops[1]->setLastMicroop();
604#endif
605 }
606}};
607
608def template LoadStoreImmU64Constructor {{
609 %(class_name)s::%(class_name)s(ExtMachInst machInst,
610 IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
611 bool noAlloc, bool exclusive, bool acrel)
612 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
613 _dest, _base, _imm)
614 {
615 %(constructor)s;
616 assert(!%(use_uops)d);
617 setExcAcRel(exclusive, acrel);
618 }
619}};
620
621def template LoadStoreImmDU64Constructor {{
622 %(class_name)s::%(class_name)s(ExtMachInst machInst,
623 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
624 int64_t _imm, bool noAlloc, bool exclusive, bool acrel)
625 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
626 _dest, _dest2, _base, _imm)
627 {
628 %(constructor)s;
629 assert(!%(use_uops)d);
630 setExcAcRel(exclusive, acrel);
631 }
632}};
633
634def template StoreImmDEx64Constructor {{
635 %(class_name)s::%(class_name)s(ExtMachInst machInst,
636 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
637 IntRegIndex _base, int64_t _imm)
638 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
639 _result, _dest, _dest2, _base, _imm)
640 {
641 %(constructor)s;
642 assert(!%(use_uops)d);
643 }
644}};
645
646
647def template LoadStoreReg64Constructor {{
648 %(class_name)s::%(class_name)s(ExtMachInst machInst,
649 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
650 ArmExtendType _type, uint32_t _shiftAmt)
651 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
652 _dest, _base, _offset, _type, _shiftAmt)
653 {
654 %(constructor)s;
655#if %(use_uops)d
656 assert(numMicroops >= 2);
657 uops = new StaticInstPtr[numMicroops];
658 uops[0] = new %(acc_name)s(machInst, _dest, _base, _offset,
659 _type, _shiftAmt);
660 uops[0]->setDelayedCommit();
661 uops[0]->setFirstMicroop();
662 uops[1] = new %(wb_decl)s;
663 uops[1]->setLastMicroop();
664#endif
665 }
666}};
667
668def template LoadStoreRegU64Constructor {{
669 %(class_name)s::%(class_name)s(ExtMachInst machInst,
670 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
671 ArmExtendType _type, uint32_t _shiftAmt,
672 bool noAlloc, bool exclusive, bool acrel)
673 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
674 _dest, _base, _offset, _type, _shiftAmt)
675 {
676 %(constructor)s;
677 assert(!%(use_uops)d);
678 setExcAcRel(exclusive, acrel);
679 }
680}};
681
682def template LoadStoreRaw64Constructor {{
683 %(class_name)s::%(class_name)s(ExtMachInst machInst,
684 IntRegIndex _dest, IntRegIndex _base)
685 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, _dest, _base)
686 {
687 %(constructor)s;
688 }
689}};
690
691def template LoadStoreEx64Constructor {{
692 %(class_name)s::%(class_name)s(ExtMachInst machInst,
693 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
694 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
695 _dest, _base, _result)
696 {
697 %(constructor)s;
698 }
699}};
700
701def template LoadStoreLit64Constructor {{
702 %(class_name)s::%(class_name)s(ExtMachInst machInst,
703 IntRegIndex _dest, int64_t _imm)
704 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
705 (IntRegIndex)_dest, _imm)
706 {
707 %(constructor)s;
708#if %(use_uops)d
709 assert(numMicroops >= 2);
710 uops = new StaticInstPtr[numMicroops];
711 uops[0] = new %(acc_name)s(machInst, _dest, _imm);
712 uops[0]->setDelayedCommit();
713 uops[0]->setFirstMicroop();
714 uops[1] = new %(wb_decl)s;
715 uops[1]->setLastMicroop();
716#endif
717 }
718}};
719
720def template LoadStoreLitU64Constructor {{
721 %(class_name)s::%(class_name)s(ExtMachInst machInst,
722 IntRegIndex _dest, int64_t _imm,
723 bool noAlloc, bool exclusive, bool acrel)
724 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
725 (IntRegIndex)_dest, _imm)
726 {
727 %(constructor)s;
728 assert(!%(use_uops)d);
729 setExcAcRel(exclusive, acrel);
730 }
731}};
732
733// Atomic operations in memory
734
735def template AmoOpExecute {{
736 Fault %(class_name)s::execute(ExecContext *xc,
737 Trace::InstRecord *traceData) const
738 {
739 Addr EA;
740 Fault fault = NoFault;
741
742 %(op_decl)s;
743 %(op_rd)s;
744 %(ea_code)s;
745
746 %(usrDecl)s;
747 if (fault == NoFault) {
748 %(memacc_code)s;
749 }
750
751 %(amo_code)s
752 assert(amo_op);
753
754 if (fault == NoFault) {
755 fault = amoMemAtomic(xc, traceData, Mem, EA,
756 memAccessFlags, amo_op);
757 }
758
759 if (fault == NoFault) {
760 %(postacc_code)s;
761 }
762
763 if (fault == NoFault) {
764 %(op_wb)s;
765 }
766
767 return fault;
768 }
769}};
770
771def template AmoOpInitiateAcc {{
772 Fault %(class_name)s::initiateAcc(ExecContext *xc,
773 Trace::InstRecord *traceData) const
774 {
775 Addr EA;
776 Fault fault = NoFault;
777
778 %(op_src_decl)s;
779 %(op_rd)s;
780 %(ea_code)s;
781 %(usrDecl)s;
782
783 if (fault == NoFault) {
784 %(memacc_code)s;
785 }
786
787 %(amo_code)s;
788
789 assert(amo_op);
790 if (fault == NoFault) {
791 fault = initiateMemAMO(xc, traceData, EA, Mem, memAccessFlags,
792 amo_op);
793 }
794
795 return fault;
796 }
797}};
798
799def template AmoOpCompleteAcc {{
800 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
801 Trace::InstRecord *traceData) const
802 {
803 Fault fault = NoFault;
804
805 %(op_decl)s;
806 %(op_rd)s;
807
808 // ARM instructions will not have a pkt if the predicate is false
809 getMem(pkt, Mem, traceData);
810
811 if (fault == NoFault) {
812 %(postacc_code)s;
813 }
814
815 if (fault == NoFault) {
816 %(op_wb)s;
817 }
818
819 return fault;
820 }
821
822}};
823
824def template AmoOpDeclare {{
825 class %(class_name)s : public %(base_class)s
826 {
827 public:
828
829 /// Constructor.
830 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
831 IntRegIndex _base, IntRegIndex _result);
832
833 Fault execute(ExecContext *, Trace::InstRecord *) const override;
834 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
835 Fault completeAcc(PacketPtr, ExecContext *,
836 Trace::InstRecord *) const override;
837
838 void
839 annotateFault(ArmFault *fault) override
840 {
841 %(fa_code)s
842 }
843 };
844}};
845
846
847def template AmoOpConstructor {{
848 %(class_name)s::%(class_name)s(ExtMachInst machInst,
849 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
850 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
851 _dest, _base, _result)
852 {
853 %(constructor)s;
854
855 }
856}};
857
858def template AmoPairOpDeclare {{
859 class %(class_name)s : public %(base_class)s
860 {
861 public:
862 uint32_t d2_src ;
863 uint32_t r2_src ;
864 uint32_t r2_dst ;
865 /// Constructor.
866 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
867 IntRegIndex _base, IntRegIndex _result);
868
869 Fault execute(ExecContext *, Trace::InstRecord *) const override;
870 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
871 Fault completeAcc(PacketPtr, ExecContext *,
872 Trace::InstRecord *) const override;
873
874 void
875 annotateFault(ArmFault *fault) override
876 {
877 %(fa_code)s
878 }
879 };
880}};
881
882
883def template AmoPairOpConstructor {{
884 %(class_name)s::%(class_name)s(ExtMachInst machInst,
885 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
886 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
887 _dest, _base, _result)
888 {
889 %(constructor)s;
890
891 uint32_t d2 = RegId(IntRegClass, dest).index() + 1 ;
892 uint32_t r2 = RegId(IntRegClass, result).index() + 1 ;
893
894 d2_src = _numSrcRegs ;
895 _srcRegIdx[_numSrcRegs++] = RegId(IntRegClass, d2);
896 r2_src = _numSrcRegs ;
897 _srcRegIdx[_numSrcRegs++] = RegId(IntRegClass, r2);
898 r2_dst = _numDestRegs ;
899 _destRegIdx[_numDestRegs++] = RegId(IntRegClass, r2);
900
901 }
902}};
903
904def template AmoArithmeticOpDeclare {{
905 class %(class_name)s : public %(base_class)s
906 {
907 public:
908 bool isXZR ;
909 /// Constructor.
910 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
911 IntRegIndex _base, IntRegIndex _result);
912
913 Fault execute(ExecContext *, Trace::InstRecord *) const override;
914 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
915 Fault completeAcc(PacketPtr, ExecContext *,
916 Trace::InstRecord *) const override;
917
918 void
919 annotateFault(ArmFault *fault) override
920 {
921 %(fa_code)s
922 }
923 };
924}};
925
926def template AmoArithmeticOpConstructor {{
927 %(class_name)s::%(class_name)s(ExtMachInst machInst,
928 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
929 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
930 _dest, _base, _result)
931 {
932 %(constructor)s;
933 isXZR = false;
934 uint32_t r2 = RegId(IntRegClass, dest).index() ;
935 if (r2 == 31){
936 flags[IsReadBarrier] = false;
937 isXZR = true;
938 }
939 }
940}};