sve_mem.isa revision 14091:090449e74135
1// Copyright (c) 2017-2018 ARM Limited 2// All rights reserved 3// 4// The license below extends only to copyright in the software and shall 5// not be construed as granting a license to any other intellectual 6// property including but not limited to intellectual property relating 7// to a hardware implementation of the functionality of the software 8// licensed hereunder. You may use the software subject to the license 9// terms below provided that you ensure that this notice is replicated 10// unmodified and in its entirety in all distributions of the software, 11// modified or unmodified, in source code or in binary form. 12// 13// Redistribution and use in source and binary forms, with or without 14// modification, are permitted provided that the following conditions are 15// met: redistributions of source code must retain the above copyright 16// notice, this list of conditions and the following disclaimer; 17// redistributions in binary form must reproduce the above copyright 18// notice, this list of conditions and the following disclaimer in the 19// documentation and/or other materials provided with the distribution; 20// neither the name of the copyright holders nor the names of its 21// contributors may be used to endorse or promote products derived from 22// this software without specific prior written permission. 23// 24// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 25// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 26// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 27// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 28// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 29// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 30// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 31// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 32// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 33// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 34// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 35// 36// Authors: Giacomo Gabrielli 37 38def template SveMemFillSpillOpDeclare {{ 39 class %(class_name)s : public %(base_class)s 40 { 41 protected: 42 typedef uint8_t TPElem; 43 typedef uint8_t RegElemType; 44 typedef uint8_t MemElemType; 45 46 public: 47 %(class_name)s(ExtMachInst machInst, 48 IntRegIndex _dest, IntRegIndex _base, uint64_t _imm) 49 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, 50 _dest, _base, _imm) 51 { 52 %(constructor)s; 53 } 54 55 Fault execute(ExecContext *, Trace::InstRecord *) const; 56 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 57 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; 58 59 virtual void 60 annotateFault(ArmFault *fault) { 61 %(fa_code)s 62 } 63 }; 64}}; 65 66def template SveContigMemSSOpDeclare {{ 67 %(tpl_header)s 68 class %(class_name)s : public %(base_class)s 69 { 70 protected: 71 typedef RegElemType TPElem; 72 73 public: 74 %(class_name)s(const char* mnem, ExtMachInst machInst, 75 IntRegIndex _dest, IntRegIndex _gp, IntRegIndex _base, 76 IntRegIndex _offset) 77 : %(base_class)s(mnem, machInst, %(op_class)s, 78 _dest, _gp, _base, _offset) 79 { 80 %(constructor)s; 81 } 82 83 Fault execute(ExecContext *, Trace::InstRecord *) const; 84 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 85 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; 86 87 virtual void 88 annotateFault(ArmFault *fault) { 89 %(fa_code)s 90 } 91 }; 92}}; 93 94def template SveContigMemSIOpDeclare {{ 95 %(tpl_header)s 96 class %(class_name)s : public %(base_class)s 97 { 98 protected: 99 typedef RegElemType TPElem; 100 101 public: 102 %(class_name)s(const char* mnem, ExtMachInst machInst, 103 IntRegIndex _dest, IntRegIndex _gp, IntRegIndex _base, 104 uint64_t _imm) 105 : %(base_class)s(mnem, machInst, %(op_class)s, 106 _dest, _gp, _base, _imm) 107 { 108 %(constructor)s; 109 } 110 111 Fault execute(ExecContext *, Trace::InstRecord *) const; 112 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 113 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; 114 115 virtual void 116 annotateFault(ArmFault *fault) { 117 %(fa_code)s 118 } 119 }; 120}}; 121 122def template SveContigMemExecDeclare {{ 123 template 124 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *, 125 Trace::InstRecord *) const; 126 127 template 128 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *, 129 Trace::InstRecord *) const; 130 131 template 132 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr, 133 ExecContext *, Trace::InstRecord *) const; 134}}; 135 136def template SveContigLoadExecute {{ 137 %(tpl_header)s 138 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc, 139 Trace::InstRecord *traceData) const 140 { 141 Addr EA; 142 Fault fault = NoFault; 143 bool aarch64 M5_VAR_USED = true; 144 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>( 145 xc->tcBase()); 146 147 %(op_decl)s; 148 %(op_rd)s; 149 %(ea_code)s; 150 151 TheISA::VecRegContainer memData; 152 auto memDataView = memData.as<MemElemType>(); 153 154 %(rden_code)s; 155 156 fault = xc->readMem(EA, memData.raw_ptr<uint8_t>(), memAccessSize, 157 this->memAccessFlags, rdEn); 158 159 %(fault_code)s; 160 161 if (fault == NoFault) { 162 %(memacc_code)s; 163 %(op_wb)s; 164 } 165 166 return fault; 167 } 168}}; 169 170def template SveContigLoadInitiateAcc {{ 171 %(tpl_header)s 172 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc, 173 Trace::InstRecord *traceData) const 174 { 175 Addr EA; 176 Fault fault = NoFault; 177 bool aarch64 M5_VAR_USED = true; 178 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>( 179 xc->tcBase()); 180 181 %(op_src_decl)s; 182 %(op_rd)s; 183 %(ea_code)s; 184 185 %(rden_code)s; 186 187 fault = xc->initiateMemRead(EA, memAccessSize, this->memAccessFlags, 188 rdEn); 189 190 %(fault_code)s; 191 192 return fault; 193 } 194}}; 195 196def template SveContigLoadCompleteAcc {{ 197 %(tpl_header)s 198 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt, 199 ExecContext *xc, Trace::InstRecord *traceData) const 200 { 201 bool aarch64 M5_VAR_USED = true; 202 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>( 203 xc->tcBase()); 204 205 %(op_decl)s; 206 %(op_rd)s; 207 208 TheISA::VecRegContainer memData; 209 auto memDataView = memData.as<MemElemType>(); 210 211 if (xc->readMemAccPredicate()) { 212 memcpy(memData.raw_ptr<uint8_t>(), pkt->getPtr<uint8_t>(), 213 pkt->getSize()); 214 } 215 216 %(memacc_code)s; 217 %(op_wb)s; 218 219 return NoFault; 220 } 221}}; 222 223def template SveContigStoreExecute {{ 224 %(tpl_header)s 225 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc, 226 Trace::InstRecord *traceData) const 227 { 228 Addr EA; 229 Fault fault = NoFault; 230 bool aarch64 M5_VAR_USED = true; 231 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>( 232 xc->tcBase()); 233 234 %(op_decl)s; 235 %(op_rd)s; 236 %(ea_code)s; 237 238 TheISA::VecRegContainer memData; 239 auto memDataView = memData.as<MemElemType>(); 240 241 %(wren_code)s; 242 243 if (fault == NoFault) { 244 %(memacc_code)s; 245 } 246 247 if (fault == NoFault) { 248 fault = xc->writeMem(memData.raw_ptr<uint8_t>(), memAccessSize, EA, 249 this->memAccessFlags, NULL, wrEn); 250 } 251 252 if (fault == NoFault) { 253 %(op_wb)s; 254 } 255 256 return fault; 257 } 258}}; 259 260def template SveContigStoreInitiateAcc {{ 261 %(tpl_header)s 262 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc, 263 Trace::InstRecord *traceData) const 264 { 265 Addr EA; 266 Fault fault = NoFault; 267 bool aarch64 M5_VAR_USED = true; 268 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>( 269 xc->tcBase()); 270 271 %(op_decl)s; 272 %(op_rd)s; 273 %(ea_code)s; 274 275 TheISA::VecRegContainer memData; 276 auto memDataView = memData.as<MemElemType>(); 277 278 %(wren_code)s; 279 280 if (fault == NoFault) { 281 %(memacc_code)s; 282 } 283 284 if (fault == NoFault) { 285 fault = xc->writeMem(memData.raw_ptr<uint8_t>(), memAccessSize, EA, 286 this->memAccessFlags, NULL, wrEn); 287 } 288 289 return fault; 290 } 291}}; 292 293def template SveContigStoreCompleteAcc {{ 294 %(tpl_header)s 295 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt, 296 ExecContext *xc, Trace::InstRecord *traceData) const 297 { 298 return NoFault; 299 } 300}}; 301 302def template SveLoadAndReplExecute {{ 303 %(tpl_header)s 304 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc, 305 Trace::InstRecord *traceData) const 306 { 307 Addr EA; 308 Fault fault = NoFault; 309 bool aarch64 M5_VAR_USED = true; 310 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>( 311 xc->tcBase()); 312 313 %(op_decl)s; 314 %(op_rd)s; 315 %(ea_code)s; 316 317 MemElemType memData; 318 319 if (fault == NoFault) { 320 fault = readMemAtomic(xc, traceData, EA, memData, 321 this->memAccessFlags); 322 %(memacc_code)s; 323 } 324 325 if (fault == NoFault) { 326 %(op_wb)s; 327 } 328 329 return fault; 330 } 331}}; 332 333def template SveLoadAndReplInitiateAcc {{ 334 %(tpl_header)s 335 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc, 336 Trace::InstRecord *traceData) const 337 { 338 Addr EA; 339 Fault fault = NoFault; 340 bool aarch64 M5_VAR_USED = true; 341 342 %(op_src_decl)s; 343 %(op_rd)s; 344 345 %(ea_code)s; 346 347 MemElemType memData; 348 349 if (fault == NoFault) { 350 fault = initiateMemRead(xc, traceData, EA, memData, 351 this->memAccessFlags); 352 } 353 354 return fault; 355 } 356}}; 357 358def template SveLoadAndReplCompleteAcc {{ 359 %(tpl_header)s 360 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt, 361 ExecContext *xc, Trace::InstRecord *traceData) const 362 { 363 Fault fault = NoFault; 364 bool aarch64 M5_VAR_USED = true; 365 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>( 366 xc->tcBase()); 367 368 %(op_decl)s; 369 %(op_rd)s; 370 371 MemElemType memData; 372 getMem(pkt, memData, traceData); 373 374 if (fault == NoFault) { 375 %(memacc_code)s; 376 } 377 378 if (fault == NoFault) { 379 %(op_wb)s; 380 } 381 382 return fault; 383 } 384}}; 385 386def template SveIndexedMemVIMicroopDeclare {{ 387 %(tpl_header)s 388 class %(class_name)s : public %(base_class)s 389 { 390 protected: 391 typedef RegElemType TPElem; 392 393 IntRegIndex dest; 394 IntRegIndex gp; 395 IntRegIndex base; 396 uint64_t imm; 397 398 int elemIndex; 399 int numElems; 400 bool firstFault; 401 402 unsigned memAccessFlags; 403 404 public: 405 %(class_name)s(const char* mnem, ExtMachInst machInst, 406 OpClass __opClass, IntRegIndex _dest, IntRegIndex _gp, 407 IntRegIndex _base, uint64_t _imm, int _elemIndex, int _numElems, 408 bool _firstFault) 409 : %(base_class)s(mnem, machInst, %(op_class)s), 410 dest(_dest), gp(_gp), base(_base), imm(_imm), 411 elemIndex(_elemIndex), numElems(_numElems), 412 firstFault(_firstFault), 413 memAccessFlags(ArmISA::TLB::AllowUnaligned | 414 ArmISA::TLB::MustBeOne) 415 { 416 %(constructor)s; 417 if (_opClass == MemReadOp && elemIndex == 0) { 418 // The first micro-op is responsible for pinning the 419 // destination and the fault status registers 420 assert(_numDestRegs == 2); 421 _destRegIdx[0].setNumPinnedWrites(numElems - 1); 422 _destRegIdx[1].setNumPinnedWrites(numElems - 1); 423 } 424 } 425 426 Fault execute(ExecContext *, Trace::InstRecord *) const; 427 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 428 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; 429 430 virtual void 431 annotateFault(ArmFault *fault) 432 { 433 %(fa_code)s 434 } 435 436 std::string 437 generateDisassembly(Addr pc, const SymbolTable *symtab) const 438 { 439 // TODO: add suffix to transfer register 440 std::stringstream ss; 441 printMnemonic(ss, "", false); 442 ccprintf(ss, "{"); 443 printVecReg(ss, dest, true); 444 ccprintf(ss, "}, "); 445 printVecPredReg(ss, gp); 446 if (_opClass == MemReadOp) { 447 ccprintf(ss, "/z"); 448 } 449 ccprintf(ss, ", ["); 450 printVecReg(ss, base, true); 451 if (imm != 0) { 452 ccprintf(ss, ", #%d", imm * sizeof(MemElemType)); 453 } 454 ccprintf(ss, "] (uop elem %d tfer)", elemIndex); 455 return ss.str(); 456 } 457 }; 458}}; 459 460def template SveIndexedMemSVMicroopDeclare {{ 461 %(tpl_header)s 462 class %(class_name)s : public %(base_class)s 463 { 464 protected: 465 typedef RegElemType TPElem; 466 467 IntRegIndex dest; 468 IntRegIndex gp; 469 IntRegIndex base; 470 IntRegIndex offset; 471 472 bool offsetIs32; 473 bool offsetIsSigned; 474 bool offsetIsScaled; 475 476 int elemIndex; 477 int numElems; 478 bool firstFault; 479 480 unsigned memAccessFlags; 481 482 public: 483 %(class_name)s(const char* mnem, ExtMachInst machInst, 484 OpClass __opClass, IntRegIndex _dest, IntRegIndex _gp, 485 IntRegIndex _base, IntRegIndex _offset, bool _offsetIs32, 486 bool _offsetIsSigned, bool _offsetIsScaled, int _elemIndex, 487 int _numElems, bool _firstFault) 488 : %(base_class)s(mnem, machInst, %(op_class)s), 489 dest(_dest), gp(_gp), base(_base), offset(_offset), 490 offsetIs32(_offsetIs32), offsetIsSigned(_offsetIsSigned), 491 offsetIsScaled(_offsetIsScaled), elemIndex(_elemIndex), 492 numElems(_numElems), firstFault(_firstFault), 493 memAccessFlags(ArmISA::TLB::AllowUnaligned | 494 ArmISA::TLB::MustBeOne) 495 { 496 %(constructor)s; 497 if (_opClass == MemReadOp && elemIndex == 0) { 498 // The first micro-op is responsible for pinning the 499 // destination and the fault status registers 500 assert(_numDestRegs == 2); 501 _destRegIdx[0].setNumPinnedWrites(numElems - 1); 502 _destRegIdx[1].setNumPinnedWrites(numElems - 1); 503 } 504 } 505 506 Fault execute(ExecContext *, Trace::InstRecord *) const; 507 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 508 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; 509 510 virtual void 511 annotateFault(ArmFault *fault) 512 { 513 %(fa_code)s 514 } 515 516 std::string 517 generateDisassembly(Addr pc, const SymbolTable *symtab) const 518 { 519 // TODO: add suffix to transfer and base registers 520 std::stringstream ss; 521 printMnemonic(ss, "", false); 522 ccprintf(ss, "{"); 523 printVecReg(ss, dest, true); 524 ccprintf(ss, "}, "); 525 printVecPredReg(ss, gp); 526 if (_opClass == MemReadOp) { 527 ccprintf(ss, "/z"); 528 } 529 ccprintf(ss, ", ["); 530 printIntReg(ss, base); 531 ccprintf(ss, ", "); 532 printVecReg(ss, offset, true); 533 ccprintf(ss, "] (uop elem %d tfer)", elemIndex); 534 return ss.str(); 535 } 536 }; 537}}; 538 539def template SveGatherLoadMicroopExecute {{ 540 %(tpl_header)s 541 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc, 542 Trace::InstRecord *traceData) const 543 { 544 Addr EA; 545 Fault fault = NoFault; 546 bool aarch64 M5_VAR_USED = true; 547 548 %(op_decl)s; 549 %(op_rd)s; 550 %(ea_code)s; 551 552 MemElemType memData = 0; 553 554 int index = elemIndex; 555 if (%(pred_check_code)s) { 556 fault = readMemAtomic(xc, traceData, EA, memData, 557 this->memAccessFlags); 558 } 559 560 if (fault == NoFault) { 561 %(fault_status_reset_code)s; 562 %(memacc_code)s; 563 %(op_wb)s; 564 } else { 565 %(fault_status_set_code)s; 566 if (firstFault) { 567 for (index = 0; 568 index < numElems && !(%(pred_check_code)s); 569 index++); 570 571 if (index < elemIndex) { 572 fault = NoFault; 573 memData = 0; 574 %(memacc_code)s; 575 %(op_wb)s; 576 } 577 } 578 } 579 return fault; 580 } 581}}; 582 583def template SveGatherLoadMicroopInitiateAcc {{ 584 %(tpl_header)s 585 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc, 586 Trace::InstRecord *traceData) const 587 { 588 Addr EA; 589 Fault fault = NoFault; 590 bool aarch64 M5_VAR_USED = true; 591 592 %(op_src_decl)s; 593 %(op_rd)s; 594 %(ea_code)s; 595 596 MemElemType memData; 597 598 int index = elemIndex; 599 if (%(pred_check_code)s) { 600 fault = initiateMemRead(xc, traceData, EA, memData, 601 this->memAccessFlags); 602 if (fault != NoFault) { 603 %(fault_status_set_code)s; 604 if (firstFault) { 605 for (index = 0; 606 index < numElems && !(%(pred_check_code)s); 607 index++); 608 if (index < elemIndex) { 609 fault = NoFault; 610 xc->setMemAccPredicate(false); 611 } 612 } 613 } else { 614 %(fault_status_reset_code)s; 615 } 616 } else { 617 xc->setMemAccPredicate(false); 618 %(fault_status_reset_code)s; 619 } 620 621 return fault; 622 } 623}}; 624 625def template SveGatherLoadMicroopCompleteAcc {{ 626 %(tpl_header)s 627 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt, 628 ExecContext *xc, Trace::InstRecord *traceData) const 629 { 630 bool aarch64 M5_VAR_USED = true; 631 632 %(op_decl)s; 633 %(op_rd)s; 634 635 MemElemType memData = 0; 636 if (xc->readMemAccPredicate()) { 637 getMem(pkt, memData, traceData); 638 } 639 640 %(memacc_code)s; 641 %(op_wb)s; 642 643 return NoFault; 644 } 645}}; 646 647def template SveScatterStoreMicroopExecute {{ 648 %(tpl_header)s 649 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc, 650 Trace::InstRecord *traceData) const 651 { 652 Addr EA; 653 Fault fault = NoFault; 654 bool aarch64 M5_VAR_USED = true; 655 656 %(op_decl)s; 657 %(op_rd)s; 658 %(ea_code)s; 659 660 MemElemType memData; 661 %(memacc_code)s; 662 663 int index = elemIndex; 664 if (%(pred_check_code)s) { 665 fault = writeMemAtomic(xc, traceData, memData, EA, 666 this->memAccessFlags, NULL); 667 } 668 669 if (fault == NoFault) { 670 %(op_wb)s; 671 } 672 673 return fault; 674 } 675}}; 676 677def template SveScatterStoreMicroopInitiateAcc {{ 678 %(tpl_header)s 679 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc, 680 Trace::InstRecord *traceData) const 681 { 682 Addr EA; 683 Fault fault = NoFault; 684 bool aarch64 M5_VAR_USED = true; 685 686 %(op_decl)s; 687 %(op_rd)s; 688 %(ea_code)s; 689 690 MemElemType memData; 691 %(memacc_code)s; 692 693 int index = elemIndex; 694 if (%(pred_check_code)s) { 695 fault = writeMemTiming(xc, traceData, memData, EA, 696 this->memAccessFlags, NULL); 697 } else { 698 xc->setPredicate(false); 699 } 700 701 return fault; 702 } 703}}; 704 705def template SveScatterStoreMicroopCompleteAcc {{ 706 %(tpl_header)s 707 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt, 708 ExecContext *xc, Trace::InstRecord *traceData) const 709 { 710 return NoFault; 711 } 712}}; 713 714def template SveFirstFaultWritebackMicroopDeclare {{ 715 %(tpl_header)s 716 class SveFirstFaultWritebackMicroop : public MicroOp 717 { 718 protected: 719 typedef RegElemType TPElem; 720 721 int numElems; 722 StaticInst *macroOp; 723 724 public: 725 SveFirstFaultWritebackMicroop(const char* mnem, ExtMachInst machInst, 726 OpClass __opClass, int _numElems, StaticInst *_macroOp) 727 : MicroOp(mnem, machInst, __opClass), 728 numElems(_numElems), macroOp(_macroOp) 729 { 730 %(constructor)s; 731 } 732 733 Fault execute(ExecContext *, Trace::InstRecord *) const; 734 735 std::string 736 generateDisassembly(Addr pc, const SymbolTable *symtab) const 737 { 738 std::stringstream ss; 739 ccprintf(ss, "%s", macroOp->disassemble(pc, symtab)); 740 ccprintf(ss, " (uop%d)", numElems); 741 return ss.str(); 742 } 743 }; 744}}; 745 746def template SveFirstFaultWritebackMicroopExecute {{ 747 %(tpl_header)s 748 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc, 749 Trace::InstRecord *traceData) const 750 { 751 bool aarch64 M5_VAR_USED = true; 752 753 %(op_decl)s; 754 %(op_rd)s; 755 756 int index, firstFaultIndex; 757 for (index = 0; 758 index < numElems && !%(fault_status_check_code)s; 759 index++); 760 firstFaultIndex = index; 761 for (index = 0; index < numElems; index++) { 762 if (index < firstFaultIndex) { 763 %(first_fault_forward_code)s; 764 } else { 765 %(first_fault_reset_code)s; 766 } 767 } 768 return NoFault; 769 } 770}}; 771 772def template SveGatherLoadCpySrcVecMicroopDeclare {{ 773 class SveGatherLoadCpySrcVecMicroop : public MicroOp 774 { 775 protected: 776 IntRegIndex op1; 777 778 StaticInst *macroOp; 779 780 public: 781 SveGatherLoadCpySrcVecMicroop(const char* mnem, ExtMachInst machInst, 782 IntRegIndex _op1, StaticInst *_macroOp) 783 : MicroOp(mnem, machInst, SimdAluOp), op1(_op1), macroOp(_macroOp) 784 { 785 %(constructor)s; 786 } 787 788 Fault execute(ExecContext *, Trace::InstRecord *) const; 789 790 std::string 791 generateDisassembly(Addr pc, const SymbolTable *symtab) const 792 { 793 std::stringstream ss; 794 ccprintf(ss, "%s", macroOp->disassemble(pc, symtab)); 795 ccprintf(ss, " (uop src vec cpy)"); 796 return ss.str(); 797 } 798 }; 799}}; 800 801def template SveGatherLoadCpySrcVecMicroopExecute {{ 802 Fault SveGatherLoadCpySrcVecMicroop::execute(ExecContext *xc, 803 Trace::InstRecord *traceData) const 804 { 805 Fault fault = NoFault; 806 %(op_decl)s; 807 %(op_rd)s; 808 809 %(code)s; 810 if (fault == NoFault) 811 { 812 %(op_wb)s; 813 } 814 815 return fault; 816 } 817}}; 818