1// Copyright (c) 2017-2018 ARM Limited 2// All rights reserved 3// 4// The license below extends only to copyright in the software and shall 5// not be construed as granting a license to any other intellectual 6// property including but not limited to intellectual property relating 7// to a hardware implementation of the functionality of the software 8// licensed hereunder. You may use the software subject to the license --- 137 unchanged lines hidden (view full) --- 146 147 %(op_decl)s; 148 %(op_rd)s; 149 %(ea_code)s; 150 151 TheISA::VecRegContainer memData; 152 auto memDataView = memData.as<MemElemType>(); 153 |
154 %(rden_code)s; |
155 |
156 fault = xc->readMem(EA, memData.raw_ptr<uint8_t>(), memAccessSize, 157 this->memAccessFlags, rdEn); 158 159 %(fault_code)s; 160 |
161 if (fault == NoFault) { |
162 %(memacc_code)s; |
163 %(op_wb)s; 164 } 165 166 return fault; 167 } 168}}; 169 170def template SveContigLoadInitiateAcc {{ --- 4 unchanged lines hidden (view full) --- 175 Addr EA; 176 Fault fault = NoFault; 177 bool aarch64 M5_VAR_USED = true; 178 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>( 179 xc->tcBase()); 180 181 %(op_src_decl)s; 182 %(op_rd)s; |
183 %(ea_code)s; 184 |
185 %(rden_code)s; |
186 |
187 fault = xc->initiateMemRead(EA, memAccessSize, this->memAccessFlags, 188 rdEn); 189 190 %(fault_code)s; 191 |
192 return fault; 193 } 194}}; 195 196def template SveContigLoadCompleteAcc {{ 197 %(tpl_header)s 198 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt, 199 ExecContext *xc, Trace::InstRecord *traceData) const 200 { |
201 bool aarch64 M5_VAR_USED = true; 202 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>( 203 xc->tcBase()); 204 205 %(op_decl)s; 206 %(op_rd)s; 207 208 TheISA::VecRegContainer memData; 209 auto memDataView = memData.as<MemElemType>(); 210 |
211 if (xc->readMemAccPredicate()) { 212 memcpy(memData.raw_ptr<uint8_t>(), pkt->getPtr<uint8_t>(), 213 pkt->getSize()); |
214 } 215 |
216 %(memacc_code)s; 217 %(op_wb)s; |
218 |
219 return NoFault; |
220 } 221}}; 222 223def template SveContigStoreExecute {{ 224 %(tpl_header)s 225 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc, 226 Trace::InstRecord *traceData) const 227 { --- 164 unchanged lines hidden (view full) --- 392 393 IntRegIndex dest; 394 IntRegIndex gp; 395 IntRegIndex base; 396 uint64_t imm; 397 398 int elemIndex; 399 int numElems; |
400 bool firstFault; |
401 402 unsigned memAccessFlags; 403 404 public: 405 %(class_name)s(const char* mnem, ExtMachInst machInst, 406 OpClass __opClass, IntRegIndex _dest, IntRegIndex _gp, |
407 IntRegIndex _base, uint64_t _imm, int _elemIndex, int _numElems, 408 bool _firstFault) |
409 : %(base_class)s(mnem, machInst, %(op_class)s), 410 dest(_dest), gp(_gp), base(_base), imm(_imm), 411 elemIndex(_elemIndex), numElems(_numElems), |
412 firstFault(_firstFault), |
413 memAccessFlags(ArmISA::TLB::AllowUnaligned | 414 ArmISA::TLB::MustBeOne) 415 { 416 %(constructor)s; 417 if (_opClass == MemReadOp && elemIndex == 0) { 418 // The first micro-op is responsible for pinning the |
419 // destination and the fault status registers 420 assert(_numDestRegs == 2); 421 _destRegIdx[0].setNumPinnedWrites(numElems - 1); 422 _destRegIdx[1].setNumPinnedWrites(numElems - 1); |
423 } 424 } 425 426 Fault execute(ExecContext *, Trace::InstRecord *) const; 427 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 428 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; 429 430 virtual void --- 39 unchanged lines hidden (view full) --- 470 IntRegIndex offset; 471 472 bool offsetIs32; 473 bool offsetIsSigned; 474 bool offsetIsScaled; 475 476 int elemIndex; 477 int numElems; |
478 bool firstFault; |
479 480 unsigned memAccessFlags; 481 482 public: 483 %(class_name)s(const char* mnem, ExtMachInst machInst, 484 OpClass __opClass, IntRegIndex _dest, IntRegIndex _gp, 485 IntRegIndex _base, IntRegIndex _offset, bool _offsetIs32, 486 bool _offsetIsSigned, bool _offsetIsScaled, int _elemIndex, |
487 int _numElems, bool _firstFault) |
488 : %(base_class)s(mnem, machInst, %(op_class)s), 489 dest(_dest), gp(_gp), base(_base), offset(_offset), 490 offsetIs32(_offsetIs32), offsetIsSigned(_offsetIsSigned), 491 offsetIsScaled(_offsetIsScaled), elemIndex(_elemIndex), |
492 numElems(_numElems), firstFault(_firstFault), |
493 memAccessFlags(ArmISA::TLB::AllowUnaligned | 494 ArmISA::TLB::MustBeOne) 495 { 496 %(constructor)s; 497 if (_opClass == MemReadOp && elemIndex == 0) { 498 // The first micro-op is responsible for pinning the |
499 // destination and the fault status registers 500 assert(_numDestRegs == 2); 501 _destRegIdx[0].setNumPinnedWrites(numElems - 1); 502 _destRegIdx[1].setNumPinnedWrites(numElems - 1); |
503 } 504 } 505 506 Fault execute(ExecContext *, Trace::InstRecord *) const; 507 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 508 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; 509 510 virtual void --- 33 unchanged lines hidden (view full) --- 544 Addr EA; 545 Fault fault = NoFault; 546 bool aarch64 M5_VAR_USED = true; 547 548 %(op_decl)s; 549 %(op_rd)s; 550 %(ea_code)s; 551 |
552 MemElemType memData = 0; |
553 |
554 int index = elemIndex; |
555 if (%(pred_check_code)s) { 556 fault = readMemAtomic(xc, traceData, EA, memData, 557 this->memAccessFlags); 558 } 559 560 if (fault == NoFault) { |
561 %(fault_status_reset_code)s; |
562 %(memacc_code)s; 563 %(op_wb)s; |
564 } else { 565 %(fault_status_set_code)s; 566 if (firstFault) { 567 for (index = 0; 568 index < numElems && !(%(pred_check_code)s); 569 index++); |
570 |
571 if (index < elemIndex) { 572 fault = NoFault; 573 memData = 0; 574 %(memacc_code)s; 575 %(op_wb)s; 576 } 577 } 578 } |
579 return fault; 580 } 581}}; 582 583def template SveGatherLoadMicroopInitiateAcc {{ 584 %(tpl_header)s 585 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc, 586 Trace::InstRecord *traceData) const 587 { 588 Addr EA; 589 Fault fault = NoFault; 590 bool aarch64 M5_VAR_USED = true; 591 592 %(op_src_decl)s; 593 %(op_rd)s; 594 %(ea_code)s; 595 596 MemElemType memData; 597 |
598 int index = elemIndex; |
599 if (%(pred_check_code)s) { 600 fault = initiateMemRead(xc, traceData, EA, memData, 601 this->memAccessFlags); |
602 if (fault != NoFault) { 603 %(fault_status_set_code)s; 604 if (firstFault) { 605 for (index = 0; 606 index < numElems && !(%(pred_check_code)s); 607 index++); 608 if (index < elemIndex) { 609 fault = NoFault; 610 xc->setMemAccPredicate(false); 611 } 612 } 613 } else { 614 %(fault_status_reset_code)s; 615 } |
616 } else { 617 xc->setMemAccPredicate(false); |
618 %(fault_status_reset_code)s; |
619 } 620 621 return fault; 622 } 623}}; 624 625def template SveGatherLoadMicroopCompleteAcc {{ 626 %(tpl_header)s 627 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt, 628 ExecContext *xc, Trace::InstRecord *traceData) const 629 { |
630 bool aarch64 M5_VAR_USED = true; 631 632 %(op_decl)s; 633 %(op_rd)s; 634 635 MemElemType memData = 0; |
636 if (xc->readMemAccPredicate()) { |
637 getMem(pkt, memData, traceData); 638 } 639 |
640 %(memacc_code)s; 641 %(op_wb)s; |
642 |
643 return NoFault; |
644 } 645}}; 646 647def template SveScatterStoreMicroopExecute {{ 648 %(tpl_header)s 649 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc, 650 Trace::InstRecord *traceData) const 651 { 652 Addr EA; 653 Fault fault = NoFault; 654 bool aarch64 M5_VAR_USED = true; 655 656 %(op_decl)s; 657 %(op_rd)s; 658 %(ea_code)s; 659 660 MemElemType memData; 661 %(memacc_code)s; 662 |
663 int index = elemIndex; |
664 if (%(pred_check_code)s) { 665 fault = writeMemAtomic(xc, traceData, memData, EA, 666 this->memAccessFlags, NULL); 667 } 668 669 if (fault == NoFault) { 670 %(op_wb)s; 671 } --- 13 unchanged lines hidden (view full) --- 685 686 %(op_decl)s; 687 %(op_rd)s; 688 %(ea_code)s; 689 690 MemElemType memData; 691 %(memacc_code)s; 692 |
693 int index = elemIndex; |
694 if (%(pred_check_code)s) { 695 fault = writeMemTiming(xc, traceData, memData, EA, 696 this->memAccessFlags, NULL); 697 } else { 698 xc->setPredicate(false); 699 } 700 701 return fault; --- 4 unchanged lines hidden (view full) --- 706 %(tpl_header)s 707 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt, 708 ExecContext *xc, Trace::InstRecord *traceData) const 709 { 710 return NoFault; 711 } 712}}; 713 |
714def template SveFirstFaultWritebackMicroopDeclare {{ 715 %(tpl_header)s 716 class SveFirstFaultWritebackMicroop : public MicroOp 717 { 718 protected: 719 typedef RegElemType TPElem; 720 721 int numElems; 722 StaticInst *macroOp; 723 724 public: 725 SveFirstFaultWritebackMicroop(const char* mnem, ExtMachInst machInst, 726 OpClass __opClass, int _numElems, StaticInst *_macroOp) 727 : MicroOp(mnem, machInst, __opClass), 728 numElems(_numElems), macroOp(_macroOp) 729 { 730 %(constructor)s; 731 } 732 733 Fault execute(ExecContext *, Trace::InstRecord *) const; 734 735 std::string 736 generateDisassembly(Addr pc, const SymbolTable *symtab) const 737 { 738 std::stringstream ss; 739 ccprintf(ss, "%s", macroOp->disassemble(pc, symtab)); 740 ccprintf(ss, " (uop%d)", numElems); 741 return ss.str(); 742 } 743 }; 744}}; 745 746def template SveFirstFaultWritebackMicroopExecute {{ 747 %(tpl_header)s 748 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc, 749 Trace::InstRecord *traceData) const 750 { 751 bool aarch64 M5_VAR_USED = true; 752 753 %(op_decl)s; 754 %(op_rd)s; 755 756 int index, firstFaultIndex; 757 for (index = 0; 758 index < numElems && !%(fault_status_check_code)s; 759 index++); 760 firstFaultIndex = index; 761 for (index = 0; index < numElems; index++) { 762 if (index < firstFaultIndex) { 763 %(first_fault_forward_code)s; 764 } else { 765 %(first_fault_reset_code)s; 766 } 767 } 768 return NoFault; 769 } 770}}; 771 |
772def template SveGatherLoadCpySrcVecMicroopDeclare {{ 773 class SveGatherLoadCpySrcVecMicroop : public MicroOp 774 { 775 protected: 776 IntRegIndex op1; 777 778 StaticInst *macroOp; 779 --- 38 unchanged lines hidden --- |