Deleted Added
sdiff udiff text old ( 14028:44edf7dbe672 ) new ( 14091:090449e74135 )
full compact
1// Copyright (c) 2017-2018 ARM Limited
2// All rights reserved
3//
4// The license below extends only to copyright in the software and shall
5// not be construed as granting a license to any other intellectual
6// property including but not limited to intellectual property relating
7// to a hardware implementation of the functionality of the software
8// licensed hereunder. You may use the software subject to the license
9// terms below provided that you ensure that this notice is replicated
10// unmodified and in its entirety in all distributions of the software,
11// modified or unmodified, in source code or in binary form.
12//
13// Redistribution and use in source and binary forms, with or without
14// modification, are permitted provided that the following conditions are
15// met: redistributions of source code must retain the above copyright
16// notice, this list of conditions and the following disclaimer;
17// redistributions in binary form must reproduce the above copyright
18// notice, this list of conditions and the following disclaimer in the
19// documentation and/or other materials provided with the distribution;
20// neither the name of the copyright holders nor the names of its
21// contributors may be used to endorse or promote products derived from
22// this software without specific prior written permission.
23//
24// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
25// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
26// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
27// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
28// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
29// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
30// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
31// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
32// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
33// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
34// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
35//
36// Authors: Giacomo Gabrielli
37
38def template SveMemFillSpillOpDeclare {{
39 class %(class_name)s : public %(base_class)s
40 {
41 protected:
42 typedef uint8_t TPElem;
43 typedef uint8_t RegElemType;
44 typedef uint8_t MemElemType;
45
46 public:
47 %(class_name)s(ExtMachInst machInst,
48 IntRegIndex _dest, IntRegIndex _base, uint64_t _imm)
49 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
50 _dest, _base, _imm)
51 {
52 %(constructor)s;
53 }
54
55 Fault execute(ExecContext *, Trace::InstRecord *) const;
56 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
57 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
58
59 virtual void
60 annotateFault(ArmFault *fault) {
61 %(fa_code)s
62 }
63 };
64}};
65
66def template SveContigMemSSOpDeclare {{
67 %(tpl_header)s
68 class %(class_name)s : public %(base_class)s
69 {
70 protected:
71 typedef RegElemType TPElem;
72
73 public:
74 %(class_name)s(const char* mnem, ExtMachInst machInst,
75 IntRegIndex _dest, IntRegIndex _gp, IntRegIndex _base,
76 IntRegIndex _offset)
77 : %(base_class)s(mnem, machInst, %(op_class)s,
78 _dest, _gp, _base, _offset)
79 {
80 %(constructor)s;
81 }
82
83 Fault execute(ExecContext *, Trace::InstRecord *) const;
84 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
85 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
86
87 virtual void
88 annotateFault(ArmFault *fault) {
89 %(fa_code)s
90 }
91 };
92}};
93
94def template SveContigMemSIOpDeclare {{
95 %(tpl_header)s
96 class %(class_name)s : public %(base_class)s
97 {
98 protected:
99 typedef RegElemType TPElem;
100
101 public:
102 %(class_name)s(const char* mnem, ExtMachInst machInst,
103 IntRegIndex _dest, IntRegIndex _gp, IntRegIndex _base,
104 uint64_t _imm)
105 : %(base_class)s(mnem, machInst, %(op_class)s,
106 _dest, _gp, _base, _imm)
107 {
108 %(constructor)s;
109 }
110
111 Fault execute(ExecContext *, Trace::InstRecord *) const;
112 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
113 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
114
115 virtual void
116 annotateFault(ArmFault *fault) {
117 %(fa_code)s
118 }
119 };
120}};
121
122def template SveContigMemExecDeclare {{
123 template
124 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *,
125 Trace::InstRecord *) const;
126
127 template
128 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *,
129 Trace::InstRecord *) const;
130
131 template
132 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr,
133 ExecContext *, Trace::InstRecord *) const;
134}};
135
136def template SveContigLoadExecute {{
137 %(tpl_header)s
138 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc,
139 Trace::InstRecord *traceData) const
140 {
141 Addr EA;
142 Fault fault = NoFault;
143 bool aarch64 M5_VAR_USED = true;
144 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
145 xc->tcBase());
146
147 %(op_decl)s;
148 %(op_rd)s;
149 %(ea_code)s;
150
151 TheISA::VecRegContainer memData;
152 auto memDataView = memData.as<MemElemType>();
153
154 if (fault == NoFault) {
155 fault = xc->readMem(EA, memData.raw_ptr<uint8_t>(), memAccessSize,
156 this->memAccessFlags);
157 %(memacc_code)s;
158 }
159
160 if (fault == NoFault) {
161 %(op_wb)s;
162 }
163
164 return fault;
165 }
166}};
167
168def template SveContigLoadInitiateAcc {{
169 %(tpl_header)s
170 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc,
171 Trace::InstRecord *traceData) const
172 {
173 Addr EA;
174 Fault fault = NoFault;
175 bool aarch64 M5_VAR_USED = true;
176 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
177 xc->tcBase());
178
179 %(op_src_decl)s;
180 %(op_rd)s;
181
182 %(ea_code)s;
183
184 if (fault == NoFault) {
185 fault = xc->initiateMemRead(EA, memAccessSize,
186 this->memAccessFlags);
187 }
188
189 return fault;
190 }
191}};
192
193def template SveContigLoadCompleteAcc {{
194 %(tpl_header)s
195 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt,
196 ExecContext *xc, Trace::InstRecord *traceData) const
197 {
198 Fault fault = NoFault;
199 bool aarch64 M5_VAR_USED = true;
200 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
201 xc->tcBase());
202
203 %(op_decl)s;
204 %(op_rd)s;
205
206 TheISA::VecRegContainer memData;
207 auto memDataView = memData.as<MemElemType>();
208
209 memcpy(memData.raw_ptr<uint8_t>(), pkt->getPtr<uint8_t>(),
210 pkt->getSize());
211
212 if (fault == NoFault) {
213 %(memacc_code)s;
214 }
215
216 if (fault == NoFault) {
217 %(op_wb)s;
218 }
219
220 return fault;
221 }
222}};
223
224def template SveContigStoreExecute {{
225 %(tpl_header)s
226 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc,
227 Trace::InstRecord *traceData) const
228 {
229 Addr EA;
230 Fault fault = NoFault;
231 bool aarch64 M5_VAR_USED = true;
232 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
233 xc->tcBase());
234
235 %(op_decl)s;
236 %(op_rd)s;
237 %(ea_code)s;
238
239 TheISA::VecRegContainer memData;
240 auto memDataView = memData.as<MemElemType>();
241
242 %(wren_code)s;
243
244 if (fault == NoFault) {
245 %(memacc_code)s;
246 }
247
248 if (fault == NoFault) {
249 fault = xc->writeMem(memData.raw_ptr<uint8_t>(), memAccessSize, EA,
250 this->memAccessFlags, NULL, wrEn);
251 }
252
253 if (fault == NoFault) {
254 %(op_wb)s;
255 }
256
257 return fault;
258 }
259}};
260
261def template SveContigStoreInitiateAcc {{
262 %(tpl_header)s
263 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc,
264 Trace::InstRecord *traceData) const
265 {
266 Addr EA;
267 Fault fault = NoFault;
268 bool aarch64 M5_VAR_USED = true;
269 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
270 xc->tcBase());
271
272 %(op_decl)s;
273 %(op_rd)s;
274 %(ea_code)s;
275
276 TheISA::VecRegContainer memData;
277 auto memDataView = memData.as<MemElemType>();
278
279 %(wren_code)s;
280
281 if (fault == NoFault) {
282 %(memacc_code)s;
283 }
284
285 if (fault == NoFault) {
286 fault = xc->writeMem(memData.raw_ptr<uint8_t>(), memAccessSize, EA,
287 this->memAccessFlags, NULL, wrEn);
288 }
289
290 return fault;
291 }
292}};
293
294def template SveContigStoreCompleteAcc {{
295 %(tpl_header)s
296 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt,
297 ExecContext *xc, Trace::InstRecord *traceData) const
298 {
299 return NoFault;
300 }
301}};
302
303def template SveLoadAndReplExecute {{
304 %(tpl_header)s
305 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc,
306 Trace::InstRecord *traceData) const
307 {
308 Addr EA;
309 Fault fault = NoFault;
310 bool aarch64 M5_VAR_USED = true;
311 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
312 xc->tcBase());
313
314 %(op_decl)s;
315 %(op_rd)s;
316 %(ea_code)s;
317
318 MemElemType memData;
319
320 if (fault == NoFault) {
321 fault = readMemAtomic(xc, traceData, EA, memData,
322 this->memAccessFlags);
323 %(memacc_code)s;
324 }
325
326 if (fault == NoFault) {
327 %(op_wb)s;
328 }
329
330 return fault;
331 }
332}};
333
334def template SveLoadAndReplInitiateAcc {{
335 %(tpl_header)s
336 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc,
337 Trace::InstRecord *traceData) const
338 {
339 Addr EA;
340 Fault fault = NoFault;
341 bool aarch64 M5_VAR_USED = true;
342
343 %(op_src_decl)s;
344 %(op_rd)s;
345
346 %(ea_code)s;
347
348 MemElemType memData;
349
350 if (fault == NoFault) {
351 fault = initiateMemRead(xc, traceData, EA, memData,
352 this->memAccessFlags);
353 }
354
355 return fault;
356 }
357}};
358
359def template SveLoadAndReplCompleteAcc {{
360 %(tpl_header)s
361 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt,
362 ExecContext *xc, Trace::InstRecord *traceData) const
363 {
364 Fault fault = NoFault;
365 bool aarch64 M5_VAR_USED = true;
366 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>(
367 xc->tcBase());
368
369 %(op_decl)s;
370 %(op_rd)s;
371
372 MemElemType memData;
373 getMem(pkt, memData, traceData);
374
375 if (fault == NoFault) {
376 %(memacc_code)s;
377 }
378
379 if (fault == NoFault) {
380 %(op_wb)s;
381 }
382
383 return fault;
384 }
385}};
386
387def template SveIndexedMemVIMicroopDeclare {{
388 %(tpl_header)s
389 class %(class_name)s : public %(base_class)s
390 {
391 protected:
392 typedef RegElemType TPElem;
393
394 IntRegIndex dest;
395 IntRegIndex gp;
396 IntRegIndex base;
397 uint64_t imm;
398
399 int elemIndex;
400 int numElems;
401
402 unsigned memAccessFlags;
403
404 public:
405 %(class_name)s(const char* mnem, ExtMachInst machInst,
406 OpClass __opClass, IntRegIndex _dest, IntRegIndex _gp,
407 IntRegIndex _base, uint64_t _imm, int _elemIndex, int _numElems)
408 : %(base_class)s(mnem, machInst, %(op_class)s),
409 dest(_dest), gp(_gp), base(_base), imm(_imm),
410 elemIndex(_elemIndex), numElems(_numElems),
411 memAccessFlags(ArmISA::TLB::AllowUnaligned |
412 ArmISA::TLB::MustBeOne)
413 {
414 %(constructor)s;
415 if (_opClass == MemReadOp && elemIndex == 0) {
416 // The first micro-op is responsible for pinning the
417 // destination register
418 _destRegIdx[0].setNumPinnedWrites(numElems - 1);
419 }
420 }
421
422 Fault execute(ExecContext *, Trace::InstRecord *) const;
423 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
424 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
425
426 virtual void
427 annotateFault(ArmFault *fault)
428 {
429 %(fa_code)s
430 }
431
432 std::string
433 generateDisassembly(Addr pc, const SymbolTable *symtab) const
434 {
435 // TODO: add suffix to transfer register
436 std::stringstream ss;
437 printMnemonic(ss, "", false);
438 ccprintf(ss, "{");
439 printVecReg(ss, dest, true);
440 ccprintf(ss, "}, ");
441 printVecPredReg(ss, gp);
442 if (_opClass == MemReadOp) {
443 ccprintf(ss, "/z");
444 }
445 ccprintf(ss, ", [");
446 printVecReg(ss, base, true);
447 if (imm != 0) {
448 ccprintf(ss, ", #%d", imm * sizeof(MemElemType));
449 }
450 ccprintf(ss, "] (uop elem %d tfer)", elemIndex);
451 return ss.str();
452 }
453 };
454}};
455
456def template SveIndexedMemSVMicroopDeclare {{
457 %(tpl_header)s
458 class %(class_name)s : public %(base_class)s
459 {
460 protected:
461 typedef RegElemType TPElem;
462
463 IntRegIndex dest;
464 IntRegIndex gp;
465 IntRegIndex base;
466 IntRegIndex offset;
467
468 bool offsetIs32;
469 bool offsetIsSigned;
470 bool offsetIsScaled;
471
472 int elemIndex;
473 int numElems;
474
475 unsigned memAccessFlags;
476
477 public:
478 %(class_name)s(const char* mnem, ExtMachInst machInst,
479 OpClass __opClass, IntRegIndex _dest, IntRegIndex _gp,
480 IntRegIndex _base, IntRegIndex _offset, bool _offsetIs32,
481 bool _offsetIsSigned, bool _offsetIsScaled, int _elemIndex,
482 int _numElems)
483 : %(base_class)s(mnem, machInst, %(op_class)s),
484 dest(_dest), gp(_gp), base(_base), offset(_offset),
485 offsetIs32(_offsetIs32), offsetIsSigned(_offsetIsSigned),
486 offsetIsScaled(_offsetIsScaled), elemIndex(_elemIndex),
487 numElems(_numElems),
488 memAccessFlags(ArmISA::TLB::AllowUnaligned |
489 ArmISA::TLB::MustBeOne)
490 {
491 %(constructor)s;
492 if (_opClass == MemReadOp && elemIndex == 0) {
493 // The first micro-op is responsible for pinning the
494 // destination register
495 _destRegIdx[0].setNumPinnedWrites(numElems - 1);
496 }
497 }
498
499 Fault execute(ExecContext *, Trace::InstRecord *) const;
500 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
501 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
502
503 virtual void
504 annotateFault(ArmFault *fault)
505 {
506 %(fa_code)s
507 }
508
509 std::string
510 generateDisassembly(Addr pc, const SymbolTable *symtab) const
511 {
512 // TODO: add suffix to transfer and base registers
513 std::stringstream ss;
514 printMnemonic(ss, "", false);
515 ccprintf(ss, "{");
516 printVecReg(ss, dest, true);
517 ccprintf(ss, "}, ");
518 printVecPredReg(ss, gp);
519 if (_opClass == MemReadOp) {
520 ccprintf(ss, "/z");
521 }
522 ccprintf(ss, ", [");
523 printIntReg(ss, base);
524 ccprintf(ss, ", ");
525 printVecReg(ss, offset, true);
526 ccprintf(ss, "] (uop elem %d tfer)", elemIndex);
527 return ss.str();
528 }
529 };
530}};
531
532def template SveGatherLoadMicroopExecute {{
533 %(tpl_header)s
534 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc,
535 Trace::InstRecord *traceData) const
536 {
537 Addr EA;
538 Fault fault = NoFault;
539 bool aarch64 M5_VAR_USED = true;
540
541 %(op_decl)s;
542 %(op_rd)s;
543 %(ea_code)s;
544
545 MemElemType memData;
546
547 if (%(pred_check_code)s) {
548 fault = readMemAtomic(xc, traceData, EA, memData,
549 this->memAccessFlags);
550 }
551
552 if (fault == NoFault) {
553 %(memacc_code)s;
554 %(op_wb)s;
555 }
556
557 return fault;
558 }
559}};
560
561def template SveGatherLoadMicroopInitiateAcc {{
562 %(tpl_header)s
563 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc,
564 Trace::InstRecord *traceData) const
565 {
566 Addr EA;
567 Fault fault = NoFault;
568 bool aarch64 M5_VAR_USED = true;
569
570 %(op_src_decl)s;
571 %(op_rd)s;
572 %(ea_code)s;
573
574 MemElemType memData;
575
576 if (%(pred_check_code)s) {
577 fault = initiateMemRead(xc, traceData, EA, memData,
578 this->memAccessFlags);
579 } else {
580 xc->setMemAccPredicate(false);
581 }
582
583 return fault;
584 }
585}};
586
587def template SveGatherLoadMicroopCompleteAcc {{
588 %(tpl_header)s
589 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt,
590 ExecContext *xc, Trace::InstRecord *traceData) const
591 {
592 Fault fault = NoFault;
593 bool aarch64 M5_VAR_USED = true;
594
595 %(op_decl)s;
596 %(op_rd)s;
597
598 MemElemType memData = 0;
599 if (%(pred_check_code)s) {
600 getMem(pkt, memData, traceData);
601 }
602
603 if (fault == NoFault) {
604 %(memacc_code)s;
605 }
606
607 if (fault == NoFault) {
608 %(op_wb)s;
609 }
610
611 return fault;
612 }
613}};
614
615def template SveScatterStoreMicroopExecute {{
616 %(tpl_header)s
617 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc,
618 Trace::InstRecord *traceData) const
619 {
620 Addr EA;
621 Fault fault = NoFault;
622 bool aarch64 M5_VAR_USED = true;
623
624 %(op_decl)s;
625 %(op_rd)s;
626 %(ea_code)s;
627
628 MemElemType memData;
629 %(memacc_code)s;
630
631 if (%(pred_check_code)s) {
632 fault = writeMemAtomic(xc, traceData, memData, EA,
633 this->memAccessFlags, NULL);
634 }
635
636 if (fault == NoFault) {
637 %(op_wb)s;
638 }
639
640 return fault;
641 }
642}};
643
644def template SveScatterStoreMicroopInitiateAcc {{
645 %(tpl_header)s
646 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc,
647 Trace::InstRecord *traceData) const
648 {
649 Addr EA;
650 Fault fault = NoFault;
651 bool aarch64 M5_VAR_USED = true;
652
653 %(op_decl)s;
654 %(op_rd)s;
655 %(ea_code)s;
656
657 MemElemType memData;
658 %(memacc_code)s;
659
660 if (%(pred_check_code)s) {
661 fault = writeMemTiming(xc, traceData, memData, EA,
662 this->memAccessFlags, NULL);
663 } else {
664 xc->setPredicate(false);
665 }
666
667 return fault;
668 }
669}};
670
671def template SveScatterStoreMicroopCompleteAcc {{
672 %(tpl_header)s
673 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt,
674 ExecContext *xc, Trace::InstRecord *traceData) const
675 {
676 return NoFault;
677 }
678}};
679
680def template SveGatherLoadCpySrcVecMicroopDeclare {{
681 class SveGatherLoadCpySrcVecMicroop : public MicroOp
682 {
683 protected:
684 IntRegIndex op1;
685
686 StaticInst *macroOp;
687
688 public:
689 SveGatherLoadCpySrcVecMicroop(const char* mnem, ExtMachInst machInst,
690 IntRegIndex _op1, StaticInst *_macroOp)
691 : MicroOp(mnem, machInst, SimdAluOp), op1(_op1), macroOp(_macroOp)
692 {
693 %(constructor)s;
694 }
695
696 Fault execute(ExecContext *, Trace::InstRecord *) const;
697
698 std::string
699 generateDisassembly(Addr pc, const SymbolTable *symtab) const
700 {
701 std::stringstream ss;
702 ccprintf(ss, "%s", macroOp->disassemble(pc, symtab));
703 ccprintf(ss, " (uop src vec cpy)");
704 return ss.str();
705 }
706 };
707}};
708
709def template SveGatherLoadCpySrcVecMicroopExecute {{
710 Fault SveGatherLoadCpySrcVecMicroop::execute(ExecContext *xc,
711 Trace::InstRecord *traceData) const
712 {
713 Fault fault = NoFault;
714 %(op_decl)s;
715 %(op_rd)s;
716
717 %(code)s;
718 if (fault == NoFault)
719 {
720 %(op_wb)s;
721 }
722
723 return fault;
724 }
725}};