sve_mem.isa (14091:090449e74135) sve_mem.isa (14106:293e3f4b1321)
1// Copyright (c) 2017-2018 ARM Limited
2// All rights reserved
3//
4// The license below extends only to copyright in the software and shall
5// not be construed as granting a license to any other intellectual
6// property including but not limited to intellectual property relating
7// to a hardware implementation of the functionality of the software
8// licensed hereunder. You may use the software subject to the license
9// terms below provided that you ensure that this notice is replicated
10// unmodified and in its entirety in all distributions of the software,
11// modified or unmodified, in source code or in binary form.
12//
13// Redistribution and use in source and binary forms, with or without
14// modification, are permitted provided that the following conditions are
15// met: redistributions of source code must retain the above copyright
16// notice, this list of conditions and the following disclaimer;
17// redistributions in binary form must reproduce the above copyright
18// notice, this list of conditions and the following disclaimer in the
19// documentation and/or other materials provided with the distribution;
20// neither the name of the copyright holders nor the names of its
21// contributors may be used to endorse or promote products derived from
22// this software without specific prior written permission.
23//
24// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
25// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
26// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
27// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
28// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
29// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
30// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
31// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
32// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
33// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
34// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
35//
36// Authors: Giacomo Gabrielli
37
38def template SveMemFillSpillOpDeclare {{
39 class %(class_name)s : public %(base_class)s
40 {
41 protected:
42 typedef uint8_t TPElem;
43 typedef uint8_t RegElemType;
44 typedef uint8_t MemElemType;
45
46 public:
47 %(class_name)s(ExtMachInst machInst,
48 IntRegIndex _dest, IntRegIndex _base, uint64_t _imm)
49 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
50 _dest, _base, _imm)
51 {
52 %(constructor)s;
53 }
54
55 Fault execute(ExecContext *, Trace::InstRecord *) const;
56 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
57 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
58
59 virtual void
60 annotateFault(ArmFault *fault) {
61 %(fa_code)s
62 }
63 };
64}};
65
66def template SveContigMemSSOpDeclare {{
67 %(tpl_header)s
68 class %(class_name)s : public %(base_class)s
69 {
70 protected:
71 typedef RegElemType TPElem;
72
73 public:
74 %(class_name)s(const char* mnem, ExtMachInst machInst,
75 IntRegIndex _dest, IntRegIndex _gp, IntRegIndex _base,
76 IntRegIndex _offset)
77 : %(base_class)s(mnem, machInst, %(op_class)s,
78 _dest, _gp, _base, _offset)
79 {
80 %(constructor)s;
81 }
82
83 Fault execute(ExecContext *, Trace::InstRecord *) const;
84 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
85 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
86
87 virtual void
88 annotateFault(ArmFault *fault) {
89 %(fa_code)s
90 }
91 };
92}};
93
94def template SveContigMemSIOpDeclare {{
95 %(tpl_header)s
96 class %(class_name)s : public %(base_class)s
97 {
98 protected:
99 typedef RegElemType TPElem;
100
101 public:
102 %(class_name)s(const char* mnem, ExtMachInst machInst,
103 IntRegIndex _dest, IntRegIndex _gp, IntRegIndex _base,
104 uint64_t _imm)
105 : %(base_class)s(mnem, machInst, %(op_class)s,
106 _dest, _gp, _base, _imm)
107 {
108 %(constructor)s;
109 }
110
111 Fault execute(ExecContext *, Trace::InstRecord *) const;
112 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
113 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
114
115 virtual void
116 annotateFault(ArmFault *fault) {
117 %(fa_code)s
118 }
119 };
120}};
121
122def template SveContigMemExecDeclare {{
123 template
124 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *,
125 Trace::InstRecord *) const;
126
127 template
128 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *,
129 Trace::InstRecord *) const;
130
131 template
132 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr,
133 ExecContext *, Trace::InstRecord *) const;
134}};
135
136def template SveContigLoadExecute {{
137 %(tpl_header)s
138 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc,
139 Trace::InstRecord *traceData) const
140 {
141 Addr EA;
142 Fault fault = NoFault;
143 bool aarch64 M5_VAR_USED = true;
144 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
145 xc->tcBase());
146
147 %(op_decl)s;
148 %(op_rd)s;
149 %(ea_code)s;
150
151 TheISA::VecRegContainer memData;
152 auto memDataView = memData.as<MemElemType>();
153
154 %(rden_code)s;
155
156 fault = xc->readMem(EA, memData.raw_ptr<uint8_t>(), memAccessSize,
157 this->memAccessFlags, rdEn);
158
159 %(fault_code)s;
160
161 if (fault == NoFault) {
162 %(memacc_code)s;
163 %(op_wb)s;
164 }
165
166 return fault;
167 }
168}};
169
170def template SveContigLoadInitiateAcc {{
171 %(tpl_header)s
172 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc,
173 Trace::InstRecord *traceData) const
174 {
175 Addr EA;
176 Fault fault = NoFault;
177 bool aarch64 M5_VAR_USED = true;
178 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
179 xc->tcBase());
180
181 %(op_src_decl)s;
182 %(op_rd)s;
183 %(ea_code)s;
184
185 %(rden_code)s;
186
187 fault = xc->initiateMemRead(EA, memAccessSize, this->memAccessFlags,
188 rdEn);
189
190 %(fault_code)s;
191
192 return fault;
193 }
194}};
195
196def template SveContigLoadCompleteAcc {{
197 %(tpl_header)s
198 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt,
199 ExecContext *xc, Trace::InstRecord *traceData) const
200 {
201 bool aarch64 M5_VAR_USED = true;
202 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
203 xc->tcBase());
204
205 %(op_decl)s;
206 %(op_rd)s;
207
208 TheISA::VecRegContainer memData;
209 auto memDataView = memData.as<MemElemType>();
210
211 if (xc->readMemAccPredicate()) {
212 memcpy(memData.raw_ptr<uint8_t>(), pkt->getPtr<uint8_t>(),
213 pkt->getSize());
214 }
215
216 %(memacc_code)s;
217 %(op_wb)s;
218
219 return NoFault;
220 }
221}};
222
223def template SveContigStoreExecute {{
224 %(tpl_header)s
225 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc,
226 Trace::InstRecord *traceData) const
227 {
228 Addr EA;
229 Fault fault = NoFault;
230 bool aarch64 M5_VAR_USED = true;
231 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
232 xc->tcBase());
233
234 %(op_decl)s;
235 %(op_rd)s;
236 %(ea_code)s;
237
238 TheISA::VecRegContainer memData;
239 auto memDataView = memData.as<MemElemType>();
240
241 %(wren_code)s;
242
243 if (fault == NoFault) {
244 %(memacc_code)s;
245 }
246
247 if (fault == NoFault) {
248 fault = xc->writeMem(memData.raw_ptr<uint8_t>(), memAccessSize, EA,
249 this->memAccessFlags, NULL, wrEn);
250 }
251
252 if (fault == NoFault) {
253 %(op_wb)s;
254 }
255
256 return fault;
257 }
258}};
259
260def template SveContigStoreInitiateAcc {{
261 %(tpl_header)s
262 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc,
263 Trace::InstRecord *traceData) const
264 {
265 Addr EA;
266 Fault fault = NoFault;
267 bool aarch64 M5_VAR_USED = true;
268 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
269 xc->tcBase());
270
271 %(op_decl)s;
272 %(op_rd)s;
273 %(ea_code)s;
274
275 TheISA::VecRegContainer memData;
276 auto memDataView = memData.as<MemElemType>();
277
278 %(wren_code)s;
279
280 if (fault == NoFault) {
281 %(memacc_code)s;
282 }
283
284 if (fault == NoFault) {
285 fault = xc->writeMem(memData.raw_ptr<uint8_t>(), memAccessSize, EA,
286 this->memAccessFlags, NULL, wrEn);
287 }
288
289 return fault;
290 }
291}};
292
293def template SveContigStoreCompleteAcc {{
294 %(tpl_header)s
295 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt,
296 ExecContext *xc, Trace::InstRecord *traceData) const
297 {
298 return NoFault;
299 }
300}};
301
302def template SveLoadAndReplExecute {{
303 %(tpl_header)s
304 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc,
305 Trace::InstRecord *traceData) const
306 {
307 Addr EA;
308 Fault fault = NoFault;
309 bool aarch64 M5_VAR_USED = true;
310 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
311 xc->tcBase());
312
313 %(op_decl)s;
314 %(op_rd)s;
315 %(ea_code)s;
316
317 MemElemType memData;
318
319 if (fault == NoFault) {
320 fault = readMemAtomic(xc, traceData, EA, memData,
321 this->memAccessFlags);
322 %(memacc_code)s;
323 }
324
325 if (fault == NoFault) {
326 %(op_wb)s;
327 }
328
329 return fault;
330 }
331}};
332
333def template SveLoadAndReplInitiateAcc {{
334 %(tpl_header)s
335 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc,
336 Trace::InstRecord *traceData) const
337 {
338 Addr EA;
339 Fault fault = NoFault;
340 bool aarch64 M5_VAR_USED = true;
341
342 %(op_src_decl)s;
343 %(op_rd)s;
344
345 %(ea_code)s;
346
347 MemElemType memData;
348
349 if (fault == NoFault) {
350 fault = initiateMemRead(xc, traceData, EA, memData,
351 this->memAccessFlags);
352 }
353
354 return fault;
355 }
356}};
357
358def template SveLoadAndReplCompleteAcc {{
359 %(tpl_header)s
360 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt,
361 ExecContext *xc, Trace::InstRecord *traceData) const
362 {
363 Fault fault = NoFault;
364 bool aarch64 M5_VAR_USED = true;
365 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
366 xc->tcBase());
367
368 %(op_decl)s;
369 %(op_rd)s;
370
371 MemElemType memData;
372 getMem(pkt, memData, traceData);
373
374 if (fault == NoFault) {
375 %(memacc_code)s;
376 }
377
378 if (fault == NoFault) {
379 %(op_wb)s;
380 }
381
382 return fault;
383 }
384}};
385
386def template SveIndexedMemVIMicroopDeclare {{
387 %(tpl_header)s
388 class %(class_name)s : public %(base_class)s
389 {
390 protected:
391 typedef RegElemType TPElem;
392
393 IntRegIndex dest;
394 IntRegIndex gp;
395 IntRegIndex base;
396 uint64_t imm;
397
398 int elemIndex;
399 int numElems;
400 bool firstFault;
401
402 unsigned memAccessFlags;
403
404 public:
405 %(class_name)s(const char* mnem, ExtMachInst machInst,
406 OpClass __opClass, IntRegIndex _dest, IntRegIndex _gp,
407 IntRegIndex _base, uint64_t _imm, int _elemIndex, int _numElems,
408 bool _firstFault)
409 : %(base_class)s(mnem, machInst, %(op_class)s),
410 dest(_dest), gp(_gp), base(_base), imm(_imm),
411 elemIndex(_elemIndex), numElems(_numElems),
412 firstFault(_firstFault),
413 memAccessFlags(ArmISA::TLB::AllowUnaligned |
414 ArmISA::TLB::MustBeOne)
415 {
416 %(constructor)s;
417 if (_opClass == MemReadOp && elemIndex == 0) {
418 // The first micro-op is responsible for pinning the
419 // destination and the fault status registers
420 assert(_numDestRegs == 2);
421 _destRegIdx[0].setNumPinnedWrites(numElems - 1);
422 _destRegIdx[1].setNumPinnedWrites(numElems - 1);
423 }
424 }
425
426 Fault execute(ExecContext *, Trace::InstRecord *) const;
427 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
428 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
429
430 virtual void
431 annotateFault(ArmFault *fault)
432 {
433 %(fa_code)s
434 }
435
436 std::string
437 generateDisassembly(Addr pc, const SymbolTable *symtab) const
438 {
439 // TODO: add suffix to transfer register
440 std::stringstream ss;
441 printMnemonic(ss, "", false);
442 ccprintf(ss, "{");
443 printVecReg(ss, dest, true);
444 ccprintf(ss, "}, ");
445 printVecPredReg(ss, gp);
446 if (_opClass == MemReadOp) {
447 ccprintf(ss, "/z");
448 }
449 ccprintf(ss, ", [");
450 printVecReg(ss, base, true);
451 if (imm != 0) {
452 ccprintf(ss, ", #%d", imm * sizeof(MemElemType));
453 }
454 ccprintf(ss, "] (uop elem %d tfer)", elemIndex);
455 return ss.str();
456 }
457 };
458}};
459
460def template SveIndexedMemSVMicroopDeclare {{
461 %(tpl_header)s
462 class %(class_name)s : public %(base_class)s
463 {
464 protected:
465 typedef RegElemType TPElem;
466
467 IntRegIndex dest;
468 IntRegIndex gp;
469 IntRegIndex base;
470 IntRegIndex offset;
471
472 bool offsetIs32;
473 bool offsetIsSigned;
474 bool offsetIsScaled;
475
476 int elemIndex;
477 int numElems;
478 bool firstFault;
479
480 unsigned memAccessFlags;
481
482 public:
483 %(class_name)s(const char* mnem, ExtMachInst machInst,
484 OpClass __opClass, IntRegIndex _dest, IntRegIndex _gp,
485 IntRegIndex _base, IntRegIndex _offset, bool _offsetIs32,
486 bool _offsetIsSigned, bool _offsetIsScaled, int _elemIndex,
487 int _numElems, bool _firstFault)
488 : %(base_class)s(mnem, machInst, %(op_class)s),
489 dest(_dest), gp(_gp), base(_base), offset(_offset),
490 offsetIs32(_offsetIs32), offsetIsSigned(_offsetIsSigned),
491 offsetIsScaled(_offsetIsScaled), elemIndex(_elemIndex),
492 numElems(_numElems), firstFault(_firstFault),
493 memAccessFlags(ArmISA::TLB::AllowUnaligned |
494 ArmISA::TLB::MustBeOne)
495 {
496 %(constructor)s;
497 if (_opClass == MemReadOp && elemIndex == 0) {
498 // The first micro-op is responsible for pinning the
499 // destination and the fault status registers
500 assert(_numDestRegs == 2);
501 _destRegIdx[0].setNumPinnedWrites(numElems - 1);
502 _destRegIdx[1].setNumPinnedWrites(numElems - 1);
503 }
504 }
505
506 Fault execute(ExecContext *, Trace::InstRecord *) const;
507 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
508 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
509
510 virtual void
511 annotateFault(ArmFault *fault)
512 {
513 %(fa_code)s
514 }
515
516 std::string
517 generateDisassembly(Addr pc, const SymbolTable *symtab) const
518 {
519 // TODO: add suffix to transfer and base registers
520 std::stringstream ss;
521 printMnemonic(ss, "", false);
522 ccprintf(ss, "{");
523 printVecReg(ss, dest, true);
524 ccprintf(ss, "}, ");
525 printVecPredReg(ss, gp);
526 if (_opClass == MemReadOp) {
527 ccprintf(ss, "/z");
528 }
529 ccprintf(ss, ", [");
530 printIntReg(ss, base);
531 ccprintf(ss, ", ");
532 printVecReg(ss, offset, true);
533 ccprintf(ss, "] (uop elem %d tfer)", elemIndex);
534 return ss.str();
535 }
536 };
537}};
538
539def template SveGatherLoadMicroopExecute {{
540 %(tpl_header)s
541 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc,
542 Trace::InstRecord *traceData) const
543 {
544 Addr EA;
545 Fault fault = NoFault;
546 bool aarch64 M5_VAR_USED = true;
547
548 %(op_decl)s;
549 %(op_rd)s;
550 %(ea_code)s;
551
552 MemElemType memData = 0;
553
554 int index = elemIndex;
555 if (%(pred_check_code)s) {
556 fault = readMemAtomic(xc, traceData, EA, memData,
557 this->memAccessFlags);
558 }
559
560 if (fault == NoFault) {
561 %(fault_status_reset_code)s;
562 %(memacc_code)s;
563 %(op_wb)s;
564 } else {
565 %(fault_status_set_code)s;
566 if (firstFault) {
567 for (index = 0;
568 index < numElems && !(%(pred_check_code)s);
569 index++);
570
571 if (index < elemIndex) {
572 fault = NoFault;
573 memData = 0;
574 %(memacc_code)s;
575 %(op_wb)s;
576 }
577 }
578 }
579 return fault;
580 }
581}};
582
583def template SveGatherLoadMicroopInitiateAcc {{
584 %(tpl_header)s
585 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc,
586 Trace::InstRecord *traceData) const
587 {
588 Addr EA;
589 Fault fault = NoFault;
590 bool aarch64 M5_VAR_USED = true;
591
592 %(op_src_decl)s;
593 %(op_rd)s;
594 %(ea_code)s;
595
596 MemElemType memData;
597
598 int index = elemIndex;
599 if (%(pred_check_code)s) {
600 fault = initiateMemRead(xc, traceData, EA, memData,
601 this->memAccessFlags);
602 if (fault != NoFault) {
603 %(fault_status_set_code)s;
604 if (firstFault) {
605 for (index = 0;
606 index < numElems && !(%(pred_check_code)s);
607 index++);
608 if (index < elemIndex) {
609 fault = NoFault;
610 xc->setMemAccPredicate(false);
611 }
612 }
613 } else {
614 %(fault_status_reset_code)s;
615 }
616 } else {
617 xc->setMemAccPredicate(false);
618 %(fault_status_reset_code)s;
619 }
620
621 return fault;
622 }
623}};
624
625def template SveGatherLoadMicroopCompleteAcc {{
626 %(tpl_header)s
627 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt,
628 ExecContext *xc, Trace::InstRecord *traceData) const
629 {
630 bool aarch64 M5_VAR_USED = true;
631
632 %(op_decl)s;
633 %(op_rd)s;
634
635 MemElemType memData = 0;
636 if (xc->readMemAccPredicate()) {
637 getMem(pkt, memData, traceData);
638 }
639
640 %(memacc_code)s;
641 %(op_wb)s;
642
643 return NoFault;
644 }
645}};
646
647def template SveScatterStoreMicroopExecute {{
648 %(tpl_header)s
649 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc,
650 Trace::InstRecord *traceData) const
651 {
652 Addr EA;
653 Fault fault = NoFault;
654 bool aarch64 M5_VAR_USED = true;
655
656 %(op_decl)s;
657 %(op_rd)s;
658 %(ea_code)s;
659
660 MemElemType memData;
661 %(memacc_code)s;
662
663 int index = elemIndex;
664 if (%(pred_check_code)s) {
665 fault = writeMemAtomic(xc, traceData, memData, EA,
666 this->memAccessFlags, NULL);
667 }
668
669 if (fault == NoFault) {
670 %(op_wb)s;
671 }
672
673 return fault;
674 }
675}};
676
677def template SveScatterStoreMicroopInitiateAcc {{
678 %(tpl_header)s
679 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc,
680 Trace::InstRecord *traceData) const
681 {
682 Addr EA;
683 Fault fault = NoFault;
684 bool aarch64 M5_VAR_USED = true;
685
686 %(op_decl)s;
687 %(op_rd)s;
688 %(ea_code)s;
689
690 MemElemType memData;
691 %(memacc_code)s;
692
693 int index = elemIndex;
694 if (%(pred_check_code)s) {
695 fault = writeMemTiming(xc, traceData, memData, EA,
696 this->memAccessFlags, NULL);
697 } else {
698 xc->setPredicate(false);
699 }
700
701 return fault;
702 }
703}};
704
705def template SveScatterStoreMicroopCompleteAcc {{
706 %(tpl_header)s
707 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt,
708 ExecContext *xc, Trace::InstRecord *traceData) const
709 {
710 return NoFault;
711 }
712}};
713
714def template SveFirstFaultWritebackMicroopDeclare {{
715 %(tpl_header)s
716 class SveFirstFaultWritebackMicroop : public MicroOp
717 {
718 protected:
719 typedef RegElemType TPElem;
720
721 int numElems;
722 StaticInst *macroOp;
723
724 public:
725 SveFirstFaultWritebackMicroop(const char* mnem, ExtMachInst machInst,
726 OpClass __opClass, int _numElems, StaticInst *_macroOp)
727 : MicroOp(mnem, machInst, __opClass),
728 numElems(_numElems), macroOp(_macroOp)
729 {
730 %(constructor)s;
731 }
732
733 Fault execute(ExecContext *, Trace::InstRecord *) const;
734
735 std::string
736 generateDisassembly(Addr pc, const SymbolTable *symtab) const
737 {
738 std::stringstream ss;
739 ccprintf(ss, "%s", macroOp->disassemble(pc, symtab));
740 ccprintf(ss, " (uop%d)", numElems);
741 return ss.str();
742 }
743 };
744}};
745
746def template SveFirstFaultWritebackMicroopExecute {{
747 %(tpl_header)s
748 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc,
749 Trace::InstRecord *traceData) const
750 {
751 bool aarch64 M5_VAR_USED = true;
752
753 %(op_decl)s;
754 %(op_rd)s;
755
756 int index, firstFaultIndex;
757 for (index = 0;
758 index < numElems && !%(fault_status_check_code)s;
759 index++);
760 firstFaultIndex = index;
761 for (index = 0; index < numElems; index++) {
762 if (index < firstFaultIndex) {
763 %(first_fault_forward_code)s;
764 } else {
765 %(first_fault_reset_code)s;
766 }
767 }
768 return NoFault;
769 }
770}};
771
772def template SveGatherLoadCpySrcVecMicroopDeclare {{
773 class SveGatherLoadCpySrcVecMicroop : public MicroOp
774 {
775 protected:
776 IntRegIndex op1;
777
778 StaticInst *macroOp;
779
780 public:
781 SveGatherLoadCpySrcVecMicroop(const char* mnem, ExtMachInst machInst,
782 IntRegIndex _op1, StaticInst *_macroOp)
783 : MicroOp(mnem, machInst, SimdAluOp), op1(_op1), macroOp(_macroOp)
784 {
785 %(constructor)s;
786 }
787
788 Fault execute(ExecContext *, Trace::InstRecord *) const;
789
790 std::string
791 generateDisassembly(Addr pc, const SymbolTable *symtab) const
792 {
793 std::stringstream ss;
794 ccprintf(ss, "%s", macroOp->disassemble(pc, symtab));
795 ccprintf(ss, " (uop src vec cpy)");
796 return ss.str();
797 }
798 };
799}};
800
801def template SveGatherLoadCpySrcVecMicroopExecute {{
802 Fault SveGatherLoadCpySrcVecMicroop::execute(ExecContext *xc,
803 Trace::InstRecord *traceData) const
804 {
805 Fault fault = NoFault;
806 %(op_decl)s;
807 %(op_rd)s;
808
809 %(code)s;
810 if (fault == NoFault)
811 {
812 %(op_wb)s;
813 }
814
815 return fault;
816 }
817}};
1// Copyright (c) 2017-2018 ARM Limited
2// All rights reserved
3//
4// The license below extends only to copyright in the software and shall
5// not be construed as granting a license to any other intellectual
6// property including but not limited to intellectual property relating
7// to a hardware implementation of the functionality of the software
8// licensed hereunder. You may use the software subject to the license
9// terms below provided that you ensure that this notice is replicated
10// unmodified and in its entirety in all distributions of the software,
11// modified or unmodified, in source code or in binary form.
12//
13// Redistribution and use in source and binary forms, with or without
14// modification, are permitted provided that the following conditions are
15// met: redistributions of source code must retain the above copyright
16// notice, this list of conditions and the following disclaimer;
17// redistributions in binary form must reproduce the above copyright
18// notice, this list of conditions and the following disclaimer in the
19// documentation and/or other materials provided with the distribution;
20// neither the name of the copyright holders nor the names of its
21// contributors may be used to endorse or promote products derived from
22// this software without specific prior written permission.
23//
24// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
25// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
26// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
27// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
28// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
29// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
30// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
31// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
32// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
33// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
34// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
35//
36// Authors: Giacomo Gabrielli
37
38def template SveMemFillSpillOpDeclare {{
39 class %(class_name)s : public %(base_class)s
40 {
41 protected:
42 typedef uint8_t TPElem;
43 typedef uint8_t RegElemType;
44 typedef uint8_t MemElemType;
45
46 public:
47 %(class_name)s(ExtMachInst machInst,
48 IntRegIndex _dest, IntRegIndex _base, uint64_t _imm)
49 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
50 _dest, _base, _imm)
51 {
52 %(constructor)s;
53 }
54
55 Fault execute(ExecContext *, Trace::InstRecord *) const;
56 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
57 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
58
59 virtual void
60 annotateFault(ArmFault *fault) {
61 %(fa_code)s
62 }
63 };
64}};
65
66def template SveContigMemSSOpDeclare {{
67 %(tpl_header)s
68 class %(class_name)s : public %(base_class)s
69 {
70 protected:
71 typedef RegElemType TPElem;
72
73 public:
74 %(class_name)s(const char* mnem, ExtMachInst machInst,
75 IntRegIndex _dest, IntRegIndex _gp, IntRegIndex _base,
76 IntRegIndex _offset)
77 : %(base_class)s(mnem, machInst, %(op_class)s,
78 _dest, _gp, _base, _offset)
79 {
80 %(constructor)s;
81 }
82
83 Fault execute(ExecContext *, Trace::InstRecord *) const;
84 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
85 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
86
87 virtual void
88 annotateFault(ArmFault *fault) {
89 %(fa_code)s
90 }
91 };
92}};
93
94def template SveContigMemSIOpDeclare {{
95 %(tpl_header)s
96 class %(class_name)s : public %(base_class)s
97 {
98 protected:
99 typedef RegElemType TPElem;
100
101 public:
102 %(class_name)s(const char* mnem, ExtMachInst machInst,
103 IntRegIndex _dest, IntRegIndex _gp, IntRegIndex _base,
104 uint64_t _imm)
105 : %(base_class)s(mnem, machInst, %(op_class)s,
106 _dest, _gp, _base, _imm)
107 {
108 %(constructor)s;
109 }
110
111 Fault execute(ExecContext *, Trace::InstRecord *) const;
112 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
113 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
114
115 virtual void
116 annotateFault(ArmFault *fault) {
117 %(fa_code)s
118 }
119 };
120}};
121
122def template SveContigMemExecDeclare {{
123 template
124 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *,
125 Trace::InstRecord *) const;
126
127 template
128 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *,
129 Trace::InstRecord *) const;
130
131 template
132 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr,
133 ExecContext *, Trace::InstRecord *) const;
134}};
135
136def template SveContigLoadExecute {{
137 %(tpl_header)s
138 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc,
139 Trace::InstRecord *traceData) const
140 {
141 Addr EA;
142 Fault fault = NoFault;
143 bool aarch64 M5_VAR_USED = true;
144 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
145 xc->tcBase());
146
147 %(op_decl)s;
148 %(op_rd)s;
149 %(ea_code)s;
150
151 TheISA::VecRegContainer memData;
152 auto memDataView = memData.as<MemElemType>();
153
154 %(rden_code)s;
155
156 fault = xc->readMem(EA, memData.raw_ptr<uint8_t>(), memAccessSize,
157 this->memAccessFlags, rdEn);
158
159 %(fault_code)s;
160
161 if (fault == NoFault) {
162 %(memacc_code)s;
163 %(op_wb)s;
164 }
165
166 return fault;
167 }
168}};
169
170def template SveContigLoadInitiateAcc {{
171 %(tpl_header)s
172 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc,
173 Trace::InstRecord *traceData) const
174 {
175 Addr EA;
176 Fault fault = NoFault;
177 bool aarch64 M5_VAR_USED = true;
178 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
179 xc->tcBase());
180
181 %(op_src_decl)s;
182 %(op_rd)s;
183 %(ea_code)s;
184
185 %(rden_code)s;
186
187 fault = xc->initiateMemRead(EA, memAccessSize, this->memAccessFlags,
188 rdEn);
189
190 %(fault_code)s;
191
192 return fault;
193 }
194}};
195
196def template SveContigLoadCompleteAcc {{
197 %(tpl_header)s
198 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt,
199 ExecContext *xc, Trace::InstRecord *traceData) const
200 {
201 bool aarch64 M5_VAR_USED = true;
202 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
203 xc->tcBase());
204
205 %(op_decl)s;
206 %(op_rd)s;
207
208 TheISA::VecRegContainer memData;
209 auto memDataView = memData.as<MemElemType>();
210
211 if (xc->readMemAccPredicate()) {
212 memcpy(memData.raw_ptr<uint8_t>(), pkt->getPtr<uint8_t>(),
213 pkt->getSize());
214 }
215
216 %(memacc_code)s;
217 %(op_wb)s;
218
219 return NoFault;
220 }
221}};
222
223def template SveContigStoreExecute {{
224 %(tpl_header)s
225 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc,
226 Trace::InstRecord *traceData) const
227 {
228 Addr EA;
229 Fault fault = NoFault;
230 bool aarch64 M5_VAR_USED = true;
231 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
232 xc->tcBase());
233
234 %(op_decl)s;
235 %(op_rd)s;
236 %(ea_code)s;
237
238 TheISA::VecRegContainer memData;
239 auto memDataView = memData.as<MemElemType>();
240
241 %(wren_code)s;
242
243 if (fault == NoFault) {
244 %(memacc_code)s;
245 }
246
247 if (fault == NoFault) {
248 fault = xc->writeMem(memData.raw_ptr<uint8_t>(), memAccessSize, EA,
249 this->memAccessFlags, NULL, wrEn);
250 }
251
252 if (fault == NoFault) {
253 %(op_wb)s;
254 }
255
256 return fault;
257 }
258}};
259
260def template SveContigStoreInitiateAcc {{
261 %(tpl_header)s
262 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc,
263 Trace::InstRecord *traceData) const
264 {
265 Addr EA;
266 Fault fault = NoFault;
267 bool aarch64 M5_VAR_USED = true;
268 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
269 xc->tcBase());
270
271 %(op_decl)s;
272 %(op_rd)s;
273 %(ea_code)s;
274
275 TheISA::VecRegContainer memData;
276 auto memDataView = memData.as<MemElemType>();
277
278 %(wren_code)s;
279
280 if (fault == NoFault) {
281 %(memacc_code)s;
282 }
283
284 if (fault == NoFault) {
285 fault = xc->writeMem(memData.raw_ptr<uint8_t>(), memAccessSize, EA,
286 this->memAccessFlags, NULL, wrEn);
287 }
288
289 return fault;
290 }
291}};
292
293def template SveContigStoreCompleteAcc {{
294 %(tpl_header)s
295 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt,
296 ExecContext *xc, Trace::InstRecord *traceData) const
297 {
298 return NoFault;
299 }
300}};
301
302def template SveLoadAndReplExecute {{
303 %(tpl_header)s
304 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc,
305 Trace::InstRecord *traceData) const
306 {
307 Addr EA;
308 Fault fault = NoFault;
309 bool aarch64 M5_VAR_USED = true;
310 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
311 xc->tcBase());
312
313 %(op_decl)s;
314 %(op_rd)s;
315 %(ea_code)s;
316
317 MemElemType memData;
318
319 if (fault == NoFault) {
320 fault = readMemAtomic(xc, traceData, EA, memData,
321 this->memAccessFlags);
322 %(memacc_code)s;
323 }
324
325 if (fault == NoFault) {
326 %(op_wb)s;
327 }
328
329 return fault;
330 }
331}};
332
333def template SveLoadAndReplInitiateAcc {{
334 %(tpl_header)s
335 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc,
336 Trace::InstRecord *traceData) const
337 {
338 Addr EA;
339 Fault fault = NoFault;
340 bool aarch64 M5_VAR_USED = true;
341
342 %(op_src_decl)s;
343 %(op_rd)s;
344
345 %(ea_code)s;
346
347 MemElemType memData;
348
349 if (fault == NoFault) {
350 fault = initiateMemRead(xc, traceData, EA, memData,
351 this->memAccessFlags);
352 }
353
354 return fault;
355 }
356}};
357
358def template SveLoadAndReplCompleteAcc {{
359 %(tpl_header)s
360 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt,
361 ExecContext *xc, Trace::InstRecord *traceData) const
362 {
363 Fault fault = NoFault;
364 bool aarch64 M5_VAR_USED = true;
365 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
366 xc->tcBase());
367
368 %(op_decl)s;
369 %(op_rd)s;
370
371 MemElemType memData;
372 getMem(pkt, memData, traceData);
373
374 if (fault == NoFault) {
375 %(memacc_code)s;
376 }
377
378 if (fault == NoFault) {
379 %(op_wb)s;
380 }
381
382 return fault;
383 }
384}};
385
386def template SveIndexedMemVIMicroopDeclare {{
387 %(tpl_header)s
388 class %(class_name)s : public %(base_class)s
389 {
390 protected:
391 typedef RegElemType TPElem;
392
393 IntRegIndex dest;
394 IntRegIndex gp;
395 IntRegIndex base;
396 uint64_t imm;
397
398 int elemIndex;
399 int numElems;
400 bool firstFault;
401
402 unsigned memAccessFlags;
403
404 public:
405 %(class_name)s(const char* mnem, ExtMachInst machInst,
406 OpClass __opClass, IntRegIndex _dest, IntRegIndex _gp,
407 IntRegIndex _base, uint64_t _imm, int _elemIndex, int _numElems,
408 bool _firstFault)
409 : %(base_class)s(mnem, machInst, %(op_class)s),
410 dest(_dest), gp(_gp), base(_base), imm(_imm),
411 elemIndex(_elemIndex), numElems(_numElems),
412 firstFault(_firstFault),
413 memAccessFlags(ArmISA::TLB::AllowUnaligned |
414 ArmISA::TLB::MustBeOne)
415 {
416 %(constructor)s;
417 if (_opClass == MemReadOp && elemIndex == 0) {
418 // The first micro-op is responsible for pinning the
419 // destination and the fault status registers
420 assert(_numDestRegs == 2);
421 _destRegIdx[0].setNumPinnedWrites(numElems - 1);
422 _destRegIdx[1].setNumPinnedWrites(numElems - 1);
423 }
424 }
425
426 Fault execute(ExecContext *, Trace::InstRecord *) const;
427 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
428 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
429
430 virtual void
431 annotateFault(ArmFault *fault)
432 {
433 %(fa_code)s
434 }
435
436 std::string
437 generateDisassembly(Addr pc, const SymbolTable *symtab) const
438 {
439 // TODO: add suffix to transfer register
440 std::stringstream ss;
441 printMnemonic(ss, "", false);
442 ccprintf(ss, "{");
443 printVecReg(ss, dest, true);
444 ccprintf(ss, "}, ");
445 printVecPredReg(ss, gp);
446 if (_opClass == MemReadOp) {
447 ccprintf(ss, "/z");
448 }
449 ccprintf(ss, ", [");
450 printVecReg(ss, base, true);
451 if (imm != 0) {
452 ccprintf(ss, ", #%d", imm * sizeof(MemElemType));
453 }
454 ccprintf(ss, "] (uop elem %d tfer)", elemIndex);
455 return ss.str();
456 }
457 };
458}};
459
460def template SveIndexedMemSVMicroopDeclare {{
461 %(tpl_header)s
462 class %(class_name)s : public %(base_class)s
463 {
464 protected:
465 typedef RegElemType TPElem;
466
467 IntRegIndex dest;
468 IntRegIndex gp;
469 IntRegIndex base;
470 IntRegIndex offset;
471
472 bool offsetIs32;
473 bool offsetIsSigned;
474 bool offsetIsScaled;
475
476 int elemIndex;
477 int numElems;
478 bool firstFault;
479
480 unsigned memAccessFlags;
481
482 public:
483 %(class_name)s(const char* mnem, ExtMachInst machInst,
484 OpClass __opClass, IntRegIndex _dest, IntRegIndex _gp,
485 IntRegIndex _base, IntRegIndex _offset, bool _offsetIs32,
486 bool _offsetIsSigned, bool _offsetIsScaled, int _elemIndex,
487 int _numElems, bool _firstFault)
488 : %(base_class)s(mnem, machInst, %(op_class)s),
489 dest(_dest), gp(_gp), base(_base), offset(_offset),
490 offsetIs32(_offsetIs32), offsetIsSigned(_offsetIsSigned),
491 offsetIsScaled(_offsetIsScaled), elemIndex(_elemIndex),
492 numElems(_numElems), firstFault(_firstFault),
493 memAccessFlags(ArmISA::TLB::AllowUnaligned |
494 ArmISA::TLB::MustBeOne)
495 {
496 %(constructor)s;
497 if (_opClass == MemReadOp && elemIndex == 0) {
498 // The first micro-op is responsible for pinning the
499 // destination and the fault status registers
500 assert(_numDestRegs == 2);
501 _destRegIdx[0].setNumPinnedWrites(numElems - 1);
502 _destRegIdx[1].setNumPinnedWrites(numElems - 1);
503 }
504 }
505
506 Fault execute(ExecContext *, Trace::InstRecord *) const;
507 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
508 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
509
510 virtual void
511 annotateFault(ArmFault *fault)
512 {
513 %(fa_code)s
514 }
515
516 std::string
517 generateDisassembly(Addr pc, const SymbolTable *symtab) const
518 {
519 // TODO: add suffix to transfer and base registers
520 std::stringstream ss;
521 printMnemonic(ss, "", false);
522 ccprintf(ss, "{");
523 printVecReg(ss, dest, true);
524 ccprintf(ss, "}, ");
525 printVecPredReg(ss, gp);
526 if (_opClass == MemReadOp) {
527 ccprintf(ss, "/z");
528 }
529 ccprintf(ss, ", [");
530 printIntReg(ss, base);
531 ccprintf(ss, ", ");
532 printVecReg(ss, offset, true);
533 ccprintf(ss, "] (uop elem %d tfer)", elemIndex);
534 return ss.str();
535 }
536 };
537}};
538
539def template SveGatherLoadMicroopExecute {{
540 %(tpl_header)s
541 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc,
542 Trace::InstRecord *traceData) const
543 {
544 Addr EA;
545 Fault fault = NoFault;
546 bool aarch64 M5_VAR_USED = true;
547
548 %(op_decl)s;
549 %(op_rd)s;
550 %(ea_code)s;
551
552 MemElemType memData = 0;
553
554 int index = elemIndex;
555 if (%(pred_check_code)s) {
556 fault = readMemAtomic(xc, traceData, EA, memData,
557 this->memAccessFlags);
558 }
559
560 if (fault == NoFault) {
561 %(fault_status_reset_code)s;
562 %(memacc_code)s;
563 %(op_wb)s;
564 } else {
565 %(fault_status_set_code)s;
566 if (firstFault) {
567 for (index = 0;
568 index < numElems && !(%(pred_check_code)s);
569 index++);
570
571 if (index < elemIndex) {
572 fault = NoFault;
573 memData = 0;
574 %(memacc_code)s;
575 %(op_wb)s;
576 }
577 }
578 }
579 return fault;
580 }
581}};
582
583def template SveGatherLoadMicroopInitiateAcc {{
584 %(tpl_header)s
585 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc,
586 Trace::InstRecord *traceData) const
587 {
588 Addr EA;
589 Fault fault = NoFault;
590 bool aarch64 M5_VAR_USED = true;
591
592 %(op_src_decl)s;
593 %(op_rd)s;
594 %(ea_code)s;
595
596 MemElemType memData;
597
598 int index = elemIndex;
599 if (%(pred_check_code)s) {
600 fault = initiateMemRead(xc, traceData, EA, memData,
601 this->memAccessFlags);
602 if (fault != NoFault) {
603 %(fault_status_set_code)s;
604 if (firstFault) {
605 for (index = 0;
606 index < numElems && !(%(pred_check_code)s);
607 index++);
608 if (index < elemIndex) {
609 fault = NoFault;
610 xc->setMemAccPredicate(false);
611 }
612 }
613 } else {
614 %(fault_status_reset_code)s;
615 }
616 } else {
617 xc->setMemAccPredicate(false);
618 %(fault_status_reset_code)s;
619 }
620
621 return fault;
622 }
623}};
624
625def template SveGatherLoadMicroopCompleteAcc {{
626 %(tpl_header)s
627 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt,
628 ExecContext *xc, Trace::InstRecord *traceData) const
629 {
630 bool aarch64 M5_VAR_USED = true;
631
632 %(op_decl)s;
633 %(op_rd)s;
634
635 MemElemType memData = 0;
636 if (xc->readMemAccPredicate()) {
637 getMem(pkt, memData, traceData);
638 }
639
640 %(memacc_code)s;
641 %(op_wb)s;
642
643 return NoFault;
644 }
645}};
646
647def template SveScatterStoreMicroopExecute {{
648 %(tpl_header)s
649 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc,
650 Trace::InstRecord *traceData) const
651 {
652 Addr EA;
653 Fault fault = NoFault;
654 bool aarch64 M5_VAR_USED = true;
655
656 %(op_decl)s;
657 %(op_rd)s;
658 %(ea_code)s;
659
660 MemElemType memData;
661 %(memacc_code)s;
662
663 int index = elemIndex;
664 if (%(pred_check_code)s) {
665 fault = writeMemAtomic(xc, traceData, memData, EA,
666 this->memAccessFlags, NULL);
667 }
668
669 if (fault == NoFault) {
670 %(op_wb)s;
671 }
672
673 return fault;
674 }
675}};
676
677def template SveScatterStoreMicroopInitiateAcc {{
678 %(tpl_header)s
679 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc,
680 Trace::InstRecord *traceData) const
681 {
682 Addr EA;
683 Fault fault = NoFault;
684 bool aarch64 M5_VAR_USED = true;
685
686 %(op_decl)s;
687 %(op_rd)s;
688 %(ea_code)s;
689
690 MemElemType memData;
691 %(memacc_code)s;
692
693 int index = elemIndex;
694 if (%(pred_check_code)s) {
695 fault = writeMemTiming(xc, traceData, memData, EA,
696 this->memAccessFlags, NULL);
697 } else {
698 xc->setPredicate(false);
699 }
700
701 return fault;
702 }
703}};
704
705def template SveScatterStoreMicroopCompleteAcc {{
706 %(tpl_header)s
707 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt,
708 ExecContext *xc, Trace::InstRecord *traceData) const
709 {
710 return NoFault;
711 }
712}};
713
714def template SveFirstFaultWritebackMicroopDeclare {{
715 %(tpl_header)s
716 class SveFirstFaultWritebackMicroop : public MicroOp
717 {
718 protected:
719 typedef RegElemType TPElem;
720
721 int numElems;
722 StaticInst *macroOp;
723
724 public:
725 SveFirstFaultWritebackMicroop(const char* mnem, ExtMachInst machInst,
726 OpClass __opClass, int _numElems, StaticInst *_macroOp)
727 : MicroOp(mnem, machInst, __opClass),
728 numElems(_numElems), macroOp(_macroOp)
729 {
730 %(constructor)s;
731 }
732
733 Fault execute(ExecContext *, Trace::InstRecord *) const;
734
735 std::string
736 generateDisassembly(Addr pc, const SymbolTable *symtab) const
737 {
738 std::stringstream ss;
739 ccprintf(ss, "%s", macroOp->disassemble(pc, symtab));
740 ccprintf(ss, " (uop%d)", numElems);
741 return ss.str();
742 }
743 };
744}};
745
746def template SveFirstFaultWritebackMicroopExecute {{
747 %(tpl_header)s
748 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc,
749 Trace::InstRecord *traceData) const
750 {
751 bool aarch64 M5_VAR_USED = true;
752
753 %(op_decl)s;
754 %(op_rd)s;
755
756 int index, firstFaultIndex;
757 for (index = 0;
758 index < numElems && !%(fault_status_check_code)s;
759 index++);
760 firstFaultIndex = index;
761 for (index = 0; index < numElems; index++) {
762 if (index < firstFaultIndex) {
763 %(first_fault_forward_code)s;
764 } else {
765 %(first_fault_reset_code)s;
766 }
767 }
768 return NoFault;
769 }
770}};
771
772def template SveGatherLoadCpySrcVecMicroopDeclare {{
773 class SveGatherLoadCpySrcVecMicroop : public MicroOp
774 {
775 protected:
776 IntRegIndex op1;
777
778 StaticInst *macroOp;
779
780 public:
781 SveGatherLoadCpySrcVecMicroop(const char* mnem, ExtMachInst machInst,
782 IntRegIndex _op1, StaticInst *_macroOp)
783 : MicroOp(mnem, machInst, SimdAluOp), op1(_op1), macroOp(_macroOp)
784 {
785 %(constructor)s;
786 }
787
788 Fault execute(ExecContext *, Trace::InstRecord *) const;
789
790 std::string
791 generateDisassembly(Addr pc, const SymbolTable *symtab) const
792 {
793 std::stringstream ss;
794 ccprintf(ss, "%s", macroOp->disassemble(pc, symtab));
795 ccprintf(ss, " (uop src vec cpy)");
796 return ss.str();
797 }
798 };
799}};
800
801def template SveGatherLoadCpySrcVecMicroopExecute {{
802 Fault SveGatherLoadCpySrcVecMicroop::execute(ExecContext *xc,
803 Trace::InstRecord *traceData) const
804 {
805 Fault fault = NoFault;
806 %(op_decl)s;
807 %(op_rd)s;
808
809 %(code)s;
810 if (fault == NoFault)
811 {
812 %(op_wb)s;
813 }
814
815 return fault;
816 }
817}};
818
819def template SveStructMemSIMicroopDeclare {{
820 template<class _Element>
821 class %(class_name)s : public %(base_class)s
822 {
823 protected:
824 typedef _Element Element;
825 typedef _Element TPElem;
826
827 IntRegIndex dest;
828 IntRegIndex gp;
829 IntRegIndex base;
830 int64_t imm;
831
832 uint8_t numRegs;
833 int regIndex;
834
835 unsigned memAccessFlags;
836
837 bool baseIsSP;
838
839 public:
840 %(class_name)s(const char* mnem, ExtMachInst machInst,
841 IntRegIndex _dest, IntRegIndex _gp, IntRegIndex _base,
842 int64_t _imm, uint8_t _numRegs, int _regIndex)
843 : %(base_class)s(mnem, machInst, %(op_class)s),
844 dest(_dest), gp(_gp), base(_base), imm(_imm),
845 numRegs(_numRegs), regIndex(_regIndex),
846 memAccessFlags(ArmISA::TLB::AllowUnaligned |
847 ArmISA::TLB::MustBeOne)
848 {
849 %(constructor)s;
850 baseIsSP = isSP(_base);
851 }
852
853 Fault execute(ExecContext *, Trace::InstRecord *) const;
854 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
855 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
856
857 virtual void
858 annotateFault(ArmFault *fault)
859 {
860 %(fa_code)s
861 }
862
863 std::string
864 generateDisassembly(Addr pc, const SymbolTable *symtab) const
865 {
866 std::stringstream ss;
867 printMnemonic(ss, "", false);
868 ccprintf(ss, "{");
869 switch (dest) {
870 case INTRLVREG0:
871 ccprintf(ss, "INTRLV0");
872 break;
873 case INTRLVREG1:
874 ccprintf(ss, "INTRLV1");
875 break;
876 case INTRLVREG2:
877 ccprintf(ss, "INTRLV2");
878 break;
879 case INTRLVREG3:
880 ccprintf(ss, "INTRLV3");
881 break;
882 default:
883 printVecReg(ss, dest, true);
884 break;
885 }
886 ccprintf(ss, "}, ");
887 printVecPredReg(ss, gp);
888 if (_opClass == MemReadOp) {
889 ccprintf(ss, "/z");
890 }
891 ccprintf(ss, ", [");
892 printVecReg(ss, base, true);
893 if (imm != 0) {
894 ccprintf(ss, ", #%d", imm * sizeof(Element));
895 }
896 ccprintf(ss, "] (uop reg %d tfer)", regIndex);
897 return ss.str();
898 }
899 };
900}};
901
902def template SveStructMemExecDeclare {{
903 template
904 Fault %(class_name)s<%(targs)s>::execute(ExecContext *,
905 Trace::InstRecord *) const;
906
907 template
908 Fault %(class_name)s<%(targs)s>::initiateAcc(ExecContext *,
909 Trace::InstRecord *) const;
910
911 template
912 Fault %(class_name)s<%(targs)s>::completeAcc(PacketPtr,
913 ExecContext *, Trace::InstRecord *) const;
914}};
915
916def template SveStructLoadExecute {{
917 template <class Element>
918 Fault %(class_name)s<Element>::execute(ExecContext *xc,
919 Trace::InstRecord *traceData) const
920 {
921 Addr EA;
922 Fault fault = NoFault;
923 bool aarch64 M5_VAR_USED = true;
924 unsigned eCount = ArmStaticInst::getCurSveVecLen<Element>(
925 xc->tcBase());
926
927 %(op_decl)s;
928 %(op_rd)s;
929 %(ea_code)s;
930
931 TheISA::VecRegContainer memData;
932 auto memDataView = memData.as<Element>();
933
934 if (fault == NoFault) {
935 fault = xc->readMem(EA, memData.raw_ptr<uint8_t>(), memAccessSize,
936 this->memAccessFlags);
937 %(memacc_code)s;
938 }
939
940 if (fault == NoFault) {
941 %(op_wb)s;
942 }
943
944 return fault;
945 }
946}};
947
948def template SveStructLoadInitiateAcc {{
949 template <class Element>
950 Fault %(class_name)s<Element>::initiateAcc(ExecContext *xc,
951 Trace::InstRecord *traceData) const
952 {
953 Addr EA;
954 Fault fault = NoFault;
955 bool aarch64 M5_VAR_USED = true;
956 unsigned eCount = ArmStaticInst::getCurSveVecLen<Element>(
957 xc->tcBase());
958
959 %(op_src_decl)s;
960 %(op_rd)s;
961
962 %(ea_code)s;
963
964 if (fault == NoFault) {
965 fault = xc->initiateMemRead(EA, memAccessSize,
966 this->memAccessFlags);
967 }
968
969 return fault;
970 }
971}};
972
973def template SveStructLoadCompleteAcc {{
974 template <class Element>
975 Fault %(class_name)s<Element>::completeAcc(PacketPtr pkt,
976 ExecContext *xc, Trace::InstRecord *traceData) const
977 {
978 Fault fault = NoFault;
979 bool aarch64 M5_VAR_USED = true;
980 unsigned eCount = ArmStaticInst::getCurSveVecLen<Element>(
981 xc->tcBase());
982
983 %(op_decl)s;
984 %(op_rd)s;
985
986 TheISA::VecRegContainer memData;
987 auto memDataView = memData.as<Element>();
988
989 memcpy(memData.raw_ptr<uint8_t>(), pkt->getPtr<uint8_t>(),
990 pkt->getSize());
991
992 if (fault == NoFault) {
993 %(memacc_code)s;
994 }
995
996 if (fault == NoFault) {
997 %(op_wb)s;
998 }
999
1000 return fault;
1001 }
1002}};
1003
1004def template SveStructStoreExecute {{
1005 template <class Element>
1006 Fault %(class_name)s<Element>::execute(ExecContext *xc,
1007 Trace::InstRecord *traceData) const
1008 {
1009 Addr EA;
1010 Fault fault = NoFault;
1011 bool aarch64 M5_VAR_USED = true;
1012 unsigned eCount = ArmStaticInst::getCurSveVecLen<Element>(
1013 xc->tcBase());
1014
1015 %(op_decl)s;
1016 %(op_rd)s;
1017 %(ea_code)s;
1018
1019 TheISA::VecRegContainer memData;
1020 auto memDataView = memData.as<Element>();
1021
1022 %(wren_code)s;
1023
1024 if (fault == NoFault) {
1025 %(memacc_code)s;
1026 }
1027
1028 if (fault == NoFault) {
1029 fault = xc->writeMem(memData.raw_ptr<uint8_t>(), memAccessSize, EA,
1030 this->memAccessFlags, NULL, wrEn);
1031 }
1032
1033 if (fault == NoFault) {
1034 %(op_wb)s;
1035 }
1036
1037 return fault;
1038 }
1039}};
1040
1041def template SveStructStoreInitiateAcc {{
1042 template <class Element>
1043 Fault %(class_name)s<Element>::initiateAcc(ExecContext *xc,
1044 Trace::InstRecord *traceData) const
1045 {
1046 Addr EA;
1047 Fault fault = NoFault;
1048 bool aarch64 M5_VAR_USED = true;
1049 unsigned eCount = ArmStaticInst::getCurSveVecLen<Element>(
1050 xc->tcBase());
1051
1052 %(op_decl)s;
1053 %(op_rd)s;
1054 %(ea_code)s;
1055
1056 TheISA::VecRegContainer memData;
1057 auto memDataView = memData.as<Element>();
1058
1059 %(wren_code)s;
1060
1061 if (fault == NoFault) {
1062 %(memacc_code)s;
1063 }
1064
1065 if (fault == NoFault) {
1066 fault = xc->writeMem(memData.raw_ptr<uint8_t>(), memAccessSize, EA,
1067 this->memAccessFlags, NULL, wrEn);
1068 }
1069
1070 return fault;
1071 }
1072}};
1073
1074def template SveStructStoreCompleteAcc {{
1075 template <class Element>
1076 Fault %(class_name)s<Element>::completeAcc(PacketPtr pkt,
1077 ExecContext *xc, Trace::InstRecord *traceData) const
1078 {
1079 return NoFault;
1080 }
1081}};
1082
1083def template SveStructMemSSMicroopDeclare {{
1084 template <class _Element>
1085 class %(class_name)s : public %(base_class)s
1086 {
1087 protected:
1088 typedef _Element Element;
1089 typedef _Element TPElem;
1090
1091 IntRegIndex dest;
1092 IntRegIndex gp;
1093 IntRegIndex base;
1094 IntRegIndex offset;
1095
1096 uint8_t numRegs;
1097 int regIndex;
1098
1099 unsigned memAccessFlags;
1100
1101 bool baseIsSP;
1102
1103 public:
1104 %(class_name)s(const char* mnem, ExtMachInst machInst,
1105 IntRegIndex _dest, IntRegIndex _gp, IntRegIndex _base,
1106 IntRegIndex _offset, uint8_t _numRegs, int _regIndex)
1107 : %(base_class)s(mnem, machInst, %(op_class)s),
1108 dest(_dest), gp(_gp), base(_base), offset(_offset),
1109 numRegs(_numRegs), regIndex(_regIndex),
1110 memAccessFlags(ArmISA::TLB::AllowUnaligned |
1111 ArmISA::TLB::MustBeOne)
1112 {
1113 %(constructor)s;
1114 baseIsSP = isSP(_base);
1115 }
1116
1117 Fault execute(ExecContext *, Trace::InstRecord *) const;
1118 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
1119 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
1120
1121 virtual void
1122 annotateFault(ArmFault *fault)
1123 {
1124 %(fa_code)s
1125 }
1126
1127 std::string
1128 generateDisassembly(Addr pc, const SymbolTable *symtab) const
1129 {
1130 std::stringstream ss;
1131 printMnemonic(ss, "", false);
1132 ccprintf(ss, "{");
1133 switch (dest) {
1134 case INTRLVREG0:
1135 ccprintf(ss, "INTRLV0");
1136 break;
1137 case INTRLVREG1:
1138 ccprintf(ss, "INTRLV1");
1139 break;
1140 case INTRLVREG2:
1141 ccprintf(ss, "INTRLV2");
1142 break;
1143 case INTRLVREG3:
1144 ccprintf(ss, "INTRLV3");
1145 break;
1146 default:
1147 printVecReg(ss, dest, true);
1148 break;
1149 }
1150 ccprintf(ss, "}, ");
1151 printVecPredReg(ss, gp);
1152 if (_opClass == MemReadOp) {
1153 ccprintf(ss, "/z");
1154 }
1155 ccprintf(ss, ", [");
1156 printIntReg(ss, base);
1157 ccprintf(ss, ", ");
1158 printVecReg(ss, offset, true);
1159 ccprintf(ss, "] (uop reg %d tfer)", regIndex);
1160 return ss.str();
1161 }
1162 };
1163}};
1164
1165def template SveIntrlvMicroopDeclare {{
1166 template <class _Element>
1167 class %(class_name)s: public %(base_class)s
1168 {
1169 protected:
1170 typedef _Element Element;
1171 typedef _Element TPElem;
1172 IntRegIndex dest;
1173 IntRegIndex op1;
1174 uint8_t numRegs;
1175 int regIndex;
1176
1177 StaticInst *macroOp;
1178
1179 public:
1180 %(class_name)s(const char* mnem, ExtMachInst machInst,
1181 IntRegIndex _dest, IntRegIndex _op1,
1182 uint8_t _numRegs, int _regIndex, StaticInst *_macroOp)
1183 : MicroOp(mnem, machInst, SimdAluOp),
1184 dest(_dest), op1(_op1), numRegs(_numRegs), regIndex(_regIndex),
1185 macroOp(_macroOp)
1186 {
1187 %(constructor)s;
1188 }
1189
1190 Fault execute(ExecContext *, Trace::InstRecord *) const;
1191
1192 std::string
1193 generateDisassembly(Addr pc, const SymbolTable *symtab) const
1194 {
1195 std::stringstream ss;
1196 ccprintf(ss, "%s", macroOp->disassemble(pc, symtab));
1197 ccprintf(ss, " (uop interleave)");
1198 return ss.str();
1199 }
1200 };
1201}};
1202
1203def template SveDeIntrlvMicroopDeclare {{
1204 template <class _Element>
1205 class %(class_name)s : public %(base_class)s
1206 {
1207 protected:
1208 typedef _Element Element;
1209 typedef _Element TPElem;
1210 IntRegIndex dest;
1211 uint8_t numRegs;
1212 int regIndex;
1213
1214 StaticInst *macroOp;
1215
1216 public:
1217 %(class_name)s(const char* mnem, ExtMachInst machInst,
1218 IntRegIndex _dest, uint8_t _numRegs, int _regIndex,
1219 StaticInst *_macroOp)
1220 : MicroOp(mnem, machInst, SimdAluOp),
1221 dest(_dest), numRegs(_numRegs), regIndex(_regIndex),
1222 macroOp(_macroOp)
1223 {
1224 %(constructor)s;
1225 }
1226
1227 Fault execute(ExecContext *, Trace::InstRecord *) const;
1228
1229 std::string
1230 generateDisassembly(Addr pc, const SymbolTable *symtab) const
1231 {
1232 std::stringstream ss;
1233 ccprintf(ss, "%s", macroOp->disassemble(pc, symtab));
1234 ccprintf(ss, " (uop deinterleave)");
1235 return ss.str();
1236 }
1237 };
1238}};
1239
1240def template SveIntrlvMicroopExecDeclare {{
1241 template
1242 Fault %(class_name)s<%(targs)s>::execute(
1243 ExecContext *, Trace::InstRecord *) const;
1244}};
1245
1246def template SveIntrlvMicroopExecute {{
1247 template <class Element>
1248 Fault %(class_name)s<Element>::execute(ExecContext *xc,
1249 Trace::InstRecord *traceData) const
1250 {
1251 Fault fault = NoFault;
1252 %(op_decl)s;
1253 %(op_rd)s;
1254
1255 %(code)s;
1256 if (fault == NoFault)
1257 {
1258 %(op_wb)s;
1259 }
1260
1261 return fault;
1262 }
1263}};