1// Copyright (c) 2017-2019 ARM Limited 2// All rights reserved 3// 4// The license below extends only to copyright in the software and shall 5// not be construed as granting a license to any other intellectual 6// property including but not limited to intellectual property relating 7// to a hardware implementation of the functionality of the software 8// licensed hereunder. You may use the software subject to the license 9// terms below provided that you ensure that this notice is replicated 10// unmodified and in its entirety in all distributions of the software, 11// modified or unmodified, in source code or in binary form. 12// 13// Redistribution and use in source and binary forms, with or without 14// modification, are permitted provided that the following conditions are 15// met: redistributions of source code must retain the above copyright 16// notice, this list of conditions and the following disclaimer; 17// redistributions in binary form must reproduce the above copyright 18// notice, this list of conditions and the following disclaimer in the 19// documentation and/or other materials provided with the distribution; 20// neither the name of the copyright holders nor the names of its 21// contributors may be used to endorse or promote products derived from 22// this software without specific prior written permission. 23// 24// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 25// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 26// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 27// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 28// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 29// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 30// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 31// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 32// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 33// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 34// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 35// 36// Authors: Giacomo Gabrielli 37 38def template SveMemFillSpillOpDeclare {{ 39 class %(class_name)s : public %(base_class)s 40 { 41 protected: 42 typedef uint8_t TPElem; 43 typedef uint8_t RegElemType; 44 typedef uint8_t MemElemType; 45 46 public: 47 %(class_name)s(ExtMachInst machInst, 48 IntRegIndex _dest, IntRegIndex _base, uint64_t _imm) 49 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, 50 _dest, _base, _imm) 51 { 52 %(constructor)s; 53 } 54 55 Fault execute(ExecContext *, Trace::InstRecord *) const; 56 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 57 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; 58 59 virtual void 60 annotateFault(ArmFault *fault) { 61 %(fa_code)s 62 } 63 }; 64}}; 65 66def template SveContigMemSSOpDeclare {{ 67 %(tpl_header)s 68 class %(class_name)s : public %(base_class)s 69 { 70 protected: 71 typedef RegElemType TPElem; 72 73 public: 74 %(class_name)s(const char* mnem, ExtMachInst machInst, 75 IntRegIndex _dest, IntRegIndex _gp, IntRegIndex _base, 76 IntRegIndex _offset) 77 : %(base_class)s(mnem, machInst, %(op_class)s, 78 _dest, _gp, _base, _offset) 79 { 80 %(constructor)s; 81 } 82 83 Fault execute(ExecContext *, Trace::InstRecord *) const; 84 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 85 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; 86 87 virtual void 88 annotateFault(ArmFault *fault) { 89 %(fa_code)s 90 } 91 }; 92}}; 93 94def template SveContigMemSIOpDeclare {{ 95 %(tpl_header)s 96 class %(class_name)s : public %(base_class)s 97 { 98 protected: 99 typedef RegElemType TPElem; 100 101 public: 102 %(class_name)s(const char* mnem, ExtMachInst machInst, 103 IntRegIndex _dest, IntRegIndex _gp, IntRegIndex _base, 104 uint64_t _imm) 105 : %(base_class)s(mnem, machInst, %(op_class)s, 106 _dest, _gp, _base, _imm) 107 { 108 %(constructor)s; 109 } 110 111 Fault execute(ExecContext *, Trace::InstRecord *) const; 112 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 113 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; 114 115 virtual void 116 annotateFault(ArmFault *fault) { 117 %(fa_code)s 118 } 119 }; 120}}; 121 122def template SveContigMemExecDeclare {{ 123 template 124 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *, 125 Trace::InstRecord *) const; 126 127 template 128 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *, 129 Trace::InstRecord *) const; 130 131 template 132 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr, 133 ExecContext *, Trace::InstRecord *) const; 134}}; 135 136def template SveContigLoadExecute {{ 137 %(tpl_header)s 138 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc, 139 Trace::InstRecord *traceData) const 140 { 141 Addr EA; 142 Fault fault = NoFault; 143 bool aarch64 M5_VAR_USED = true; 144 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>( 145 xc->tcBase()); 146 147 %(op_decl)s; 148 %(op_rd)s; 149 %(ea_code)s; 150 151 TheISA::VecRegContainer memData; 152 auto memDataView = memData.as<MemElemType>(); 153 154 %(rden_code)s; 155 156 fault = xc->readMem(EA, memData.raw_ptr<uint8_t>(), memAccessSize, 157 this->memAccessFlags, rdEn); 158 159 %(fault_code)s; 160 161 if (fault == NoFault) { 162 %(memacc_code)s; 163 %(op_wb)s; 164 } 165 166 return fault; 167 } 168}}; 169 170def template SveContigLoadInitiateAcc {{ 171 %(tpl_header)s 172 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc, 173 Trace::InstRecord *traceData) const 174 { 175 Addr EA; 176 Fault fault = NoFault; 177 bool aarch64 M5_VAR_USED = true; 178 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>( 179 xc->tcBase()); 180 181 %(op_src_decl)s; 182 %(op_rd)s; 183 %(ea_code)s; 184 185 %(rden_code)s; 186 187 fault = xc->initiateMemRead(EA, memAccessSize, this->memAccessFlags, 188 rdEn); 189 190 %(fault_code)s; 191 192 return fault; 193 } 194}}; 195 196def template SveContigLoadCompleteAcc {{ 197 %(tpl_header)s 198 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt, 199 ExecContext *xc, Trace::InstRecord *traceData) const 200 { 201 bool aarch64 M5_VAR_USED = true; 202 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>( 203 xc->tcBase()); 204 205 %(op_decl)s; 206 %(op_rd)s; 207 208 TheISA::VecRegContainer memData; 209 auto memDataView = memData.as<MemElemType>(); 210 211 if (xc->readMemAccPredicate()) { 212 memcpy(memData.raw_ptr<uint8_t>(), pkt->getPtr<uint8_t>(), 213 pkt->getSize()); 214 } 215 216 %(memacc_code)s; 217 %(op_wb)s; 218 219 return NoFault; 220 } 221}}; 222 223def template SveContigStoreExecute {{ 224 %(tpl_header)s 225 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc, 226 Trace::InstRecord *traceData) const 227 { 228 Addr EA; 229 Fault fault = NoFault; 230 bool aarch64 M5_VAR_USED = true; 231 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>( 232 xc->tcBase()); 233 234 %(op_decl)s; 235 %(op_rd)s; 236 %(ea_code)s; 237 238 TheISA::VecRegContainer memData; 239 auto memDataView = memData.as<MemElemType>(); 240 241 %(wren_code)s; 242 243 if (fault == NoFault) { 244 %(memacc_code)s; 245 } 246 247 if (fault == NoFault) { 248 fault = xc->writeMem(memData.raw_ptr<uint8_t>(), memAccessSize, EA, 249 this->memAccessFlags, NULL, wrEn); 250 } 251 252 if (fault == NoFault) { 253 %(op_wb)s; 254 } 255 256 return fault; 257 } 258}}; 259 260def template SveContigStoreInitiateAcc {{ 261 %(tpl_header)s 262 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc, 263 Trace::InstRecord *traceData) const 264 { 265 Addr EA; 266 Fault fault = NoFault; 267 bool aarch64 M5_VAR_USED = true; 268 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>( 269 xc->tcBase()); 270 271 %(op_decl)s; 272 %(op_rd)s; 273 %(ea_code)s; 274 275 TheISA::VecRegContainer memData; 276 auto memDataView = memData.as<MemElemType>(); 277 278 %(wren_code)s; 279 280 if (fault == NoFault) { 281 %(memacc_code)s; 282 } 283 284 if (fault == NoFault) { 285 fault = xc->writeMem(memData.raw_ptr<uint8_t>(), memAccessSize, EA, 286 this->memAccessFlags, NULL, wrEn); 287 } 288 289 return fault; 290 } 291}}; 292 293def template SveContigStoreCompleteAcc {{ 294 %(tpl_header)s 295 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt, 296 ExecContext *xc, Trace::InstRecord *traceData) const 297 { 298 return NoFault; 299 } 300}}; 301 302def template SveLoadAndReplExecute {{ 303 %(tpl_header)s 304 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc, 305 Trace::InstRecord *traceData) const 306 { 307 Addr EA; 308 Fault fault = NoFault; 309 bool aarch64 M5_VAR_USED = true; 310 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>( 311 xc->tcBase()); 312 313 %(op_decl)s; 314 %(op_rd)s; 315 %(ea_code)s; 316 317 MemElemType memData; 318 319 if (fault == NoFault) { 320 fault = readMemAtomic(xc, traceData, EA, memData, 321 this->memAccessFlags); 322 %(memacc_code)s; 323 } 324 325 if (fault == NoFault) { 326 %(op_wb)s; 327 } 328 329 return fault; 330 } 331}}; 332 333def template SveLoadAndReplInitiateAcc {{ 334 %(tpl_header)s 335 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc, 336 Trace::InstRecord *traceData) const 337 { 338 Addr EA; 339 Fault fault = NoFault; 340 bool aarch64 M5_VAR_USED = true; 341 342 %(op_src_decl)s; 343 %(op_rd)s; 344 345 %(ea_code)s; 346 347 MemElemType memData; 348 349 if (fault == NoFault) { 350 fault = initiateMemRead(xc, traceData, EA, memData, 351 this->memAccessFlags); 352 } 353 354 return fault; 355 } 356}}; 357 358def template SveLoadAndReplCompleteAcc {{ 359 %(tpl_header)s 360 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt, 361 ExecContext *xc, Trace::InstRecord *traceData) const 362 { 363 Fault fault = NoFault; 364 bool aarch64 M5_VAR_USED = true; 365 unsigned eCount = ArmStaticInst::getCurSveVecLen<RegElemType>( 366 xc->tcBase()); 367 368 %(op_decl)s; 369 %(op_rd)s; 370 371 MemElemType memData; 372 getMem(pkt, memData, traceData); 373 374 if (fault == NoFault) { 375 %(memacc_code)s; 376 } 377 378 if (fault == NoFault) { 379 %(op_wb)s; 380 } 381 382 return fault; 383 } 384}}; 385 386def template SveIndexedMemVIMicroopDeclare {{ 387 %(tpl_header)s 388 class %(class_name)s : public %(base_class)s 389 { 390 protected: 391 typedef RegElemType TPElem; 392 393 IntRegIndex dest; 394 IntRegIndex gp; 395 IntRegIndex base; 396 uint64_t imm; 397 398 int elemIndex; 399 int numElems; 400 bool firstFault; 401 402 unsigned memAccessFlags; 403 404 public: 405 %(class_name)s(const char* mnem, ExtMachInst machInst, 406 OpClass __opClass, IntRegIndex _dest, IntRegIndex _gp, 407 IntRegIndex _base, uint64_t _imm, int _elemIndex, int _numElems, 408 bool _firstFault) 409 : %(base_class)s(mnem, machInst, %(op_class)s), 410 dest(_dest), gp(_gp), base(_base), imm(_imm), 411 elemIndex(_elemIndex), numElems(_numElems), 412 firstFault(_firstFault), 413 memAccessFlags(ArmISA::TLB::AllowUnaligned | 414 ArmISA::TLB::MustBeOne) 415 { 416 %(constructor)s; 417 if (_opClass == MemReadOp && elemIndex == 0) { 418 // The first micro-op is responsible for pinning the 419 // destination and the fault status registers 420 assert(_numDestRegs == 2); 421 _destRegIdx[0].setNumPinnedWrites(numElems - 1); 422 _destRegIdx[1].setNumPinnedWrites(numElems - 1); 423 } 424 } 425 426 Fault execute(ExecContext *, Trace::InstRecord *) const; 427 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 428 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; 429 430 virtual void 431 annotateFault(ArmFault *fault) 432 { 433 %(fa_code)s 434 } 435 436 std::string 437 generateDisassembly(Addr pc, const SymbolTable *symtab) const 438 { 439 // TODO: add suffix to transfer register 440 std::stringstream ss; 441 printMnemonic(ss, "", false); 442 ccprintf(ss, "{"); 443 printVecReg(ss, dest, true); 444 ccprintf(ss, "}, "); 445 printVecPredReg(ss, gp); 446 if (_opClass == MemReadOp) { 447 ccprintf(ss, "/z"); 448 } 449 ccprintf(ss, ", ["); 450 printVecReg(ss, base, true); 451 if (imm != 0) { 452 ccprintf(ss, ", #%d", imm * sizeof(MemElemType)); 453 } 454 ccprintf(ss, "] (uop elem %d tfer)", elemIndex); 455 return ss.str(); 456 } 457 }; 458}}; 459 460def template SveIndexedMemSVMicroopDeclare {{ 461 %(tpl_header)s 462 class %(class_name)s : public %(base_class)s 463 { 464 protected: 465 typedef RegElemType TPElem; 466 467 IntRegIndex dest; 468 IntRegIndex gp; 469 IntRegIndex base; 470 IntRegIndex offset; 471 472 bool offsetIs32; 473 bool offsetIsSigned; 474 bool offsetIsScaled; 475 476 int elemIndex; 477 int numElems; 478 bool firstFault; 479 480 unsigned memAccessFlags; 481 482 public: 483 %(class_name)s(const char* mnem, ExtMachInst machInst, 484 OpClass __opClass, IntRegIndex _dest, IntRegIndex _gp, 485 IntRegIndex _base, IntRegIndex _offset, bool _offsetIs32, 486 bool _offsetIsSigned, bool _offsetIsScaled, int _elemIndex, 487 int _numElems, bool _firstFault) 488 : %(base_class)s(mnem, machInst, %(op_class)s), 489 dest(_dest), gp(_gp), base(_base), offset(_offset), 490 offsetIs32(_offsetIs32), offsetIsSigned(_offsetIsSigned), 491 offsetIsScaled(_offsetIsScaled), elemIndex(_elemIndex), 492 numElems(_numElems), firstFault(_firstFault), 493 memAccessFlags(ArmISA::TLB::AllowUnaligned | 494 ArmISA::TLB::MustBeOne) 495 { 496 %(constructor)s; 497 if (_opClass == MemReadOp && elemIndex == 0) { 498 // The first micro-op is responsible for pinning the 499 // destination and the fault status registers 500 assert(_numDestRegs == 2); 501 _destRegIdx[0].setNumPinnedWrites(numElems - 1); 502 _destRegIdx[1].setNumPinnedWrites(numElems - 1); 503 } 504 } 505 506 Fault execute(ExecContext *, Trace::InstRecord *) const; 507 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 508 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; 509 510 virtual void 511 annotateFault(ArmFault *fault) 512 { 513 %(fa_code)s 514 } 515 516 std::string 517 generateDisassembly(Addr pc, const SymbolTable *symtab) const 518 { 519 // TODO: add suffix to transfer and base registers 520 std::stringstream ss; 521 printMnemonic(ss, "", false); 522 ccprintf(ss, "{"); 523 printVecReg(ss, dest, true); 524 ccprintf(ss, "}, "); 525 printVecPredReg(ss, gp); 526 if (_opClass == MemReadOp) { 527 ccprintf(ss, "/z"); 528 } 529 ccprintf(ss, ", ["); 530 printIntReg(ss, base); 531 ccprintf(ss, ", "); 532 printVecReg(ss, offset, true); 533 ccprintf(ss, "] (uop elem %d tfer)", elemIndex); 534 return ss.str(); 535 } 536 }; 537}}; 538 539def template SveGatherLoadMicroopExecute {{ 540 %(tpl_header)s 541 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc, 542 Trace::InstRecord *traceData) const 543 { 544 Addr EA; 545 Fault fault = NoFault; 546 bool aarch64 M5_VAR_USED = true; 547 548 %(op_decl)s; 549 %(op_rd)s; 550 %(ea_code)s; 551 552 MemElemType memData = 0; 553 554 int index = elemIndex; 555 if (%(pred_check_code)s) { 556 fault = readMemAtomic(xc, traceData, EA, memData, 557 this->memAccessFlags); 558 } 559 560 if (fault == NoFault) { 561 %(fault_status_reset_code)s; 562 %(memacc_code)s; 563 if (traceData) { 564 traceData->setData(memData); 565 } 566 } else { 567 %(fault_status_set_code)s; 568 if (firstFault) { 569 for (index = 0; 570 index < numElems && !(%(pred_check_code)s); 571 index++); 572 573 if (index < elemIndex) { 574 fault = NoFault; 575 memData = 0; 576 %(memacc_code)s; 577 if (traceData) { 578 traceData->setData(memData); 579 } 580 } 581 } 582 } 583 return fault; 584 } 585}}; 586 587def template SveGatherLoadMicroopInitiateAcc {{ 588 %(tpl_header)s 589 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc, 590 Trace::InstRecord *traceData) const 591 { 592 Addr EA; 593 Fault fault = NoFault; 594 bool aarch64 M5_VAR_USED = true; 595 596 %(op_src_decl)s; 597 %(op_rd)s; 598 %(ea_code)s; 599 600 MemElemType memData; 601 602 int index = elemIndex; 603 if (%(pred_check_code)s) { 604 fault = initiateMemRead(xc, traceData, EA, memData, 605 this->memAccessFlags); 606 if (fault != NoFault) { 607 %(fault_status_set_code)s; 608 if (firstFault) { 609 for (index = 0; 610 index < numElems && !(%(pred_check_code)s); 611 index++); 612 if (index < elemIndex) { 613 fault = NoFault; 614 xc->setMemAccPredicate(false); 615 } 616 } 617 } else { 618 %(fault_status_reset_code)s; 619 } 620 } else { 621 xc->setMemAccPredicate(false); 622 %(fault_status_reset_code)s; 623 } 624 625 return fault; 626 } 627}}; 628 629def template SveGatherLoadMicroopCompleteAcc {{ 630 %(tpl_header)s 631 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt, 632 ExecContext *xc, Trace::InstRecord *traceData) const 633 { 634 bool aarch64 M5_VAR_USED = true; 635 636 %(op_decl)s; 637 %(op_rd)s; 638 639 MemElemType memData = 0; 640 if (xc->readMemAccPredicate()) { 641 getMem(pkt, memData, traceData); 642 } 643 644 %(memacc_code)s; 645 if (traceData) { 646 traceData->setData(memData); 647 } 648 649 return NoFault; 650 } 651}}; 652 653def template SveScatterStoreMicroopExecute {{ 654 %(tpl_header)s 655 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc, 656 Trace::InstRecord *traceData) const 657 { 658 Addr EA; 659 Fault fault = NoFault; 660 bool aarch64 M5_VAR_USED = true; 661 662 %(op_decl)s; 663 %(op_rd)s; 664 %(ea_code)s; 665 666 MemElemType memData; 667 %(memacc_code)s; 668 669 int index = elemIndex; 670 if (%(pred_check_code)s) { 671 fault = writeMemAtomic(xc, traceData, memData, EA, 672 this->memAccessFlags, NULL); 673 } 674 675 if (fault == NoFault) { 676 %(op_wb)s; 677 } 678 679 return fault; 680 } 681}}; 682 683def template SveScatterStoreMicroopInitiateAcc {{ 684 %(tpl_header)s 685 Fault %(class_name)s%(tpl_args)s::initiateAcc(ExecContext *xc, 686 Trace::InstRecord *traceData) const 687 { 688 Addr EA; 689 Fault fault = NoFault; 690 bool aarch64 M5_VAR_USED = true; 691 692 %(op_decl)s; 693 %(op_rd)s; 694 %(ea_code)s; 695 696 MemElemType memData; 697 %(memacc_code)s; 698 699 int index = elemIndex; 700 if (%(pred_check_code)s) { 701 fault = writeMemTiming(xc, traceData, memData, EA, 702 this->memAccessFlags, NULL); 703 } else { 704 xc->setPredicate(false); 705 } 706 707 return fault; 708 } 709}}; 710 711def template SveScatterStoreMicroopCompleteAcc {{ 712 %(tpl_header)s 713 Fault %(class_name)s%(tpl_args)s::completeAcc(PacketPtr pkt, 714 ExecContext *xc, Trace::InstRecord *traceData) const 715 { 716 return NoFault; 717 } 718}}; 719 720def template SveFirstFaultWritebackMicroopDeclare {{ 721 %(tpl_header)s 722 class SveFirstFaultWritebackMicroop : public MicroOp 723 { 724 protected: 725 typedef RegElemType TPElem; 726 727 int numElems; 728 StaticInst *macroOp; 729 730 public: 731 SveFirstFaultWritebackMicroop(const char* mnem, ExtMachInst machInst, 732 OpClass __opClass, int _numElems, StaticInst *_macroOp) 733 : MicroOp(mnem, machInst, __opClass), 734 numElems(_numElems), macroOp(_macroOp) 735 { 736 %(constructor)s; 737 } 738 739 Fault execute(ExecContext *, Trace::InstRecord *) const; 740 741 std::string 742 generateDisassembly(Addr pc, const SymbolTable *symtab) const 743 { 744 std::stringstream ss; 745 ccprintf(ss, "%s", macroOp->disassemble(pc, symtab)); 746 ccprintf(ss, " (uop%d)", numElems); 747 return ss.str(); 748 } 749 }; 750}}; 751 752def template SveFirstFaultWritebackMicroopExecute {{ 753 %(tpl_header)s 754 Fault %(class_name)s%(tpl_args)s::execute(ExecContext *xc, 755 Trace::InstRecord *traceData) const 756 { 757 bool aarch64 M5_VAR_USED = true; 758 759 %(op_decl)s; 760 %(op_rd)s; 761 762 int index, firstFaultIndex; 763 for (index = 0; 764 index < numElems && !%(fault_status_check_code)s; 765 index++); 766 firstFaultIndex = index; 767 for (index = 0; index < numElems; index++) { 768 if (index < firstFaultIndex) { 769 %(first_fault_forward_code)s; 770 } else { 771 %(first_fault_reset_code)s; 772 } 773 } 774 return NoFault; 775 } 776}}; 777 778def template SveGatherLoadCpySrcVecMicroopDeclare {{ 779 class SveGatherLoadCpySrcVecMicroop : public MicroOp 780 { 781 protected: 782 IntRegIndex op1; 783 784 StaticInst *macroOp; 785 786 public: 787 SveGatherLoadCpySrcVecMicroop(const char* mnem, ExtMachInst machInst, 788 IntRegIndex _op1, StaticInst *_macroOp) 789 : MicroOp(mnem, machInst, SimdAluOp), op1(_op1), macroOp(_macroOp) 790 { 791 %(constructor)s; 792 } 793 794 Fault execute(ExecContext *, Trace::InstRecord *) const; 795 796 std::string 797 generateDisassembly(Addr pc, const SymbolTable *symtab) const 798 { 799 std::stringstream ss; 800 ccprintf(ss, "%s", macroOp->disassemble(pc, symtab)); 801 ccprintf(ss, " (uop src vec cpy)"); 802 return ss.str(); 803 } 804 }; 805}}; 806 807def template SveGatherLoadCpySrcVecMicroopExecute {{ 808 Fault SveGatherLoadCpySrcVecMicroop::execute(ExecContext *xc, 809 Trace::InstRecord *traceData) const 810 { 811 Fault fault = NoFault; 812 %(op_decl)s; 813 %(op_rd)s; 814 815 %(code)s; 816 if (fault == NoFault) 817 { 818 %(op_wb)s; 819 } 820 821 return fault; 822 } 823}}; 824 825def template SveStructMemSIMicroopDeclare {{ 826 template<class _Element> 827 class %(class_name)s : public %(base_class)s 828 { 829 protected: 830 typedef _Element Element; 831 typedef _Element TPElem; 832 833 IntRegIndex dest; 834 IntRegIndex gp; 835 IntRegIndex base; 836 int64_t imm; 837 838 uint8_t numRegs; 839 int regIndex; 840 841 unsigned memAccessFlags; 842 843 bool baseIsSP; 844 845 public: 846 %(class_name)s(const char* mnem, ExtMachInst machInst, 847 IntRegIndex _dest, IntRegIndex _gp, IntRegIndex _base, 848 int64_t _imm, uint8_t _numRegs, int _regIndex) 849 : %(base_class)s(mnem, machInst, %(op_class)s), 850 dest(_dest), gp(_gp), base(_base), imm(_imm), 851 numRegs(_numRegs), regIndex(_regIndex), 852 memAccessFlags(ArmISA::TLB::AllowUnaligned | 853 ArmISA::TLB::MustBeOne) 854 { 855 %(constructor)s; 856 baseIsSP = isSP(_base); 857 } 858 859 Fault execute(ExecContext *, Trace::InstRecord *) const; 860 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 861 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; 862 863 virtual void 864 annotateFault(ArmFault *fault) 865 { 866 %(fa_code)s 867 } 868 869 std::string 870 generateDisassembly(Addr pc, const SymbolTable *symtab) const 871 { 872 std::stringstream ss; 873 printMnemonic(ss, "", false); 874 ccprintf(ss, "{"); 875 switch (dest) { 876 case INTRLVREG0: 877 ccprintf(ss, "INTRLV0"); 878 break; 879 case INTRLVREG1: 880 ccprintf(ss, "INTRLV1"); 881 break; 882 case INTRLVREG2: 883 ccprintf(ss, "INTRLV2"); 884 break; 885 case INTRLVREG3: 886 ccprintf(ss, "INTRLV3"); 887 break; 888 default: 889 printVecReg(ss, dest, true); 890 break; 891 } 892 ccprintf(ss, "}, "); 893 printVecPredReg(ss, gp); 894 if (_opClass == MemReadOp) { 895 ccprintf(ss, "/z"); 896 } 897 ccprintf(ss, ", ["); 898 printVecReg(ss, base, true); 899 if (imm != 0) { 900 ccprintf(ss, ", #%d", imm * sizeof(Element)); 901 } 902 ccprintf(ss, "] (uop reg %d tfer)", regIndex); 903 return ss.str(); 904 } 905 }; 906}}; 907 908def template SveStructMemExecDeclare {{ 909 template 910 Fault %(class_name)s<%(targs)s>::execute(ExecContext *, 911 Trace::InstRecord *) const; 912 913 template 914 Fault %(class_name)s<%(targs)s>::initiateAcc(ExecContext *, 915 Trace::InstRecord *) const; 916 917 template 918 Fault %(class_name)s<%(targs)s>::completeAcc(PacketPtr, 919 ExecContext *, Trace::InstRecord *) const; 920}}; 921 922def template SveStructLoadExecute {{ 923 template <class Element> 924 Fault %(class_name)s<Element>::execute(ExecContext *xc, 925 Trace::InstRecord *traceData) const 926 { 927 Addr EA; 928 Fault fault = NoFault; 929 bool aarch64 M5_VAR_USED = true; 930 unsigned eCount = ArmStaticInst::getCurSveVecLen<Element>( 931 xc->tcBase()); 932 933 %(op_decl)s; 934 %(op_rd)s; 935 %(ea_code)s; 936 937 TheISA::VecRegContainer memData; 938 auto memDataView = memData.as<Element>(); 939 940 if (fault == NoFault) { 941 fault = xc->readMem(EA, memData.raw_ptr<uint8_t>(), memAccessSize, 942 this->memAccessFlags); 943 %(memacc_code)s; 944 } 945 946 if (fault == NoFault) { 947 %(op_wb)s; 948 } 949 950 return fault; 951 } 952}}; 953 954def template SveStructLoadInitiateAcc {{ 955 template <class Element> 956 Fault %(class_name)s<Element>::initiateAcc(ExecContext *xc, 957 Trace::InstRecord *traceData) const 958 { 959 Addr EA; 960 Fault fault = NoFault; 961 bool aarch64 M5_VAR_USED = true; 962 unsigned eCount = ArmStaticInst::getCurSveVecLen<Element>( 963 xc->tcBase()); 964 965 %(op_src_decl)s; 966 %(op_rd)s; 967 968 %(ea_code)s; 969 970 if (fault == NoFault) { 971 fault = xc->initiateMemRead(EA, memAccessSize, 972 this->memAccessFlags); 973 } 974 975 return fault; 976 } 977}}; 978 979def template SveStructLoadCompleteAcc {{ 980 template <class Element> 981 Fault %(class_name)s<Element>::completeAcc(PacketPtr pkt, 982 ExecContext *xc, Trace::InstRecord *traceData) const 983 { 984 Fault fault = NoFault; 985 bool aarch64 M5_VAR_USED = true; 986 unsigned eCount = ArmStaticInst::getCurSveVecLen<Element>( 987 xc->tcBase()); 988 989 %(op_decl)s; 990 %(op_rd)s; 991 992 TheISA::VecRegContainer memData; 993 auto memDataView = memData.as<Element>(); 994 995 memcpy(memData.raw_ptr<uint8_t>(), pkt->getPtr<uint8_t>(), 996 pkt->getSize()); 997 998 if (fault == NoFault) { 999 %(memacc_code)s; 1000 } 1001 1002 if (fault == NoFault) { 1003 %(op_wb)s; 1004 } 1005 1006 return fault; 1007 } 1008}}; 1009 1010def template SveStructStoreExecute {{ 1011 template <class Element> 1012 Fault %(class_name)s<Element>::execute(ExecContext *xc, 1013 Trace::InstRecord *traceData) const 1014 { 1015 Addr EA; 1016 Fault fault = NoFault; 1017 bool aarch64 M5_VAR_USED = true; 1018 unsigned eCount = ArmStaticInst::getCurSveVecLen<Element>( 1019 xc->tcBase()); 1020 1021 %(op_decl)s; 1022 %(op_rd)s; 1023 %(ea_code)s; 1024 1025 TheISA::VecRegContainer memData; 1026 auto memDataView = memData.as<Element>(); 1027 1028 %(wren_code)s; 1029 1030 if (fault == NoFault) { 1031 %(memacc_code)s; 1032 } 1033 1034 if (fault == NoFault) { 1035 fault = xc->writeMem(memData.raw_ptr<uint8_t>(), memAccessSize, EA, 1036 this->memAccessFlags, NULL, wrEn); 1037 } 1038 1039 if (fault == NoFault) { 1040 %(op_wb)s; 1041 } 1042 1043 return fault; 1044 } 1045}}; 1046 1047def template SveStructStoreInitiateAcc {{ 1048 template <class Element> 1049 Fault %(class_name)s<Element>::initiateAcc(ExecContext *xc, 1050 Trace::InstRecord *traceData) const 1051 { 1052 Addr EA; 1053 Fault fault = NoFault; 1054 bool aarch64 M5_VAR_USED = true; 1055 unsigned eCount = ArmStaticInst::getCurSveVecLen<Element>( 1056 xc->tcBase()); 1057 1058 %(op_decl)s; 1059 %(op_rd)s; 1060 %(ea_code)s; 1061 1062 TheISA::VecRegContainer memData; 1063 auto memDataView = memData.as<Element>(); 1064 1065 %(wren_code)s; 1066 1067 if (fault == NoFault) { 1068 %(memacc_code)s; 1069 } 1070 1071 if (fault == NoFault) { 1072 fault = xc->writeMem(memData.raw_ptr<uint8_t>(), memAccessSize, EA, 1073 this->memAccessFlags, NULL, wrEn); 1074 } 1075 1076 return fault; 1077 } 1078}}; 1079 1080def template SveStructStoreCompleteAcc {{ 1081 template <class Element> 1082 Fault %(class_name)s<Element>::completeAcc(PacketPtr pkt, 1083 ExecContext *xc, Trace::InstRecord *traceData) const 1084 { 1085 return NoFault; 1086 } 1087}}; 1088 1089def template SveStructMemSSMicroopDeclare {{ 1090 template <class _Element> 1091 class %(class_name)s : public %(base_class)s 1092 { 1093 protected: 1094 typedef _Element Element; 1095 typedef _Element TPElem; 1096 1097 IntRegIndex dest; 1098 IntRegIndex gp; 1099 IntRegIndex base; 1100 IntRegIndex offset; 1101 1102 uint8_t numRegs; 1103 int regIndex; 1104 1105 unsigned memAccessFlags; 1106 1107 bool baseIsSP; 1108 1109 public: 1110 %(class_name)s(const char* mnem, ExtMachInst machInst, 1111 IntRegIndex _dest, IntRegIndex _gp, IntRegIndex _base, 1112 IntRegIndex _offset, uint8_t _numRegs, int _regIndex) 1113 : %(base_class)s(mnem, machInst, %(op_class)s), 1114 dest(_dest), gp(_gp), base(_base), offset(_offset), 1115 numRegs(_numRegs), regIndex(_regIndex), 1116 memAccessFlags(ArmISA::TLB::AllowUnaligned | 1117 ArmISA::TLB::MustBeOne) 1118 { 1119 %(constructor)s; 1120 baseIsSP = isSP(_base); 1121 } 1122 1123 Fault execute(ExecContext *, Trace::InstRecord *) const; 1124 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 1125 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; 1126 1127 virtual void 1128 annotateFault(ArmFault *fault) 1129 { 1130 %(fa_code)s 1131 } 1132 1133 std::string 1134 generateDisassembly(Addr pc, const SymbolTable *symtab) const 1135 { 1136 std::stringstream ss; 1137 printMnemonic(ss, "", false); 1138 ccprintf(ss, "{"); 1139 switch (dest) { 1140 case INTRLVREG0: 1141 ccprintf(ss, "INTRLV0"); 1142 break; 1143 case INTRLVREG1: 1144 ccprintf(ss, "INTRLV1"); 1145 break; 1146 case INTRLVREG2: 1147 ccprintf(ss, "INTRLV2"); 1148 break; 1149 case INTRLVREG3: 1150 ccprintf(ss, "INTRLV3"); 1151 break; 1152 default: 1153 printVecReg(ss, dest, true); 1154 break; 1155 } 1156 ccprintf(ss, "}, "); 1157 printVecPredReg(ss, gp); 1158 if (_opClass == MemReadOp) { 1159 ccprintf(ss, "/z"); 1160 } 1161 ccprintf(ss, ", ["); 1162 printIntReg(ss, base); 1163 ccprintf(ss, ", "); 1164 printVecReg(ss, offset, true); 1165 ccprintf(ss, "] (uop reg %d tfer)", regIndex); 1166 return ss.str(); 1167 } 1168 }; 1169}}; 1170 1171def template SveIntrlvMicroopDeclare {{ 1172 template <class _Element> 1173 class %(class_name)s: public %(base_class)s 1174 { 1175 protected: 1176 typedef _Element Element; 1177 typedef _Element TPElem; 1178 IntRegIndex dest; 1179 IntRegIndex op1; 1180 uint8_t numRegs; 1181 int regIndex; 1182 1183 StaticInst *macroOp; 1184 1185 public: 1186 %(class_name)s(const char* mnem, ExtMachInst machInst, 1187 IntRegIndex _dest, IntRegIndex _op1, 1188 uint8_t _numRegs, int _regIndex, StaticInst *_macroOp) 1189 : MicroOp(mnem, machInst, SimdAluOp), 1190 dest(_dest), op1(_op1), numRegs(_numRegs), regIndex(_regIndex), 1191 macroOp(_macroOp) 1192 { 1193 %(constructor)s; 1194 } 1195 1196 Fault execute(ExecContext *, Trace::InstRecord *) const; 1197 1198 std::string 1199 generateDisassembly(Addr pc, const SymbolTable *symtab) const 1200 { 1201 std::stringstream ss; 1202 ccprintf(ss, "%s", macroOp->disassemble(pc, symtab)); 1203 ccprintf(ss, " (uop interleave)"); 1204 return ss.str(); 1205 } 1206 }; 1207}}; 1208 1209def template SveDeIntrlvMicroopDeclare {{ 1210 template <class _Element> 1211 class %(class_name)s : public %(base_class)s 1212 { 1213 protected: 1214 typedef _Element Element; 1215 typedef _Element TPElem; 1216 IntRegIndex dest; 1217 uint8_t numRegs; 1218 int regIndex; 1219 1220 StaticInst *macroOp; 1221 1222 public: 1223 %(class_name)s(const char* mnem, ExtMachInst machInst, 1224 IntRegIndex _dest, uint8_t _numRegs, int _regIndex, 1225 StaticInst *_macroOp) 1226 : MicroOp(mnem, machInst, SimdAluOp), 1227 dest(_dest), numRegs(_numRegs), regIndex(_regIndex), 1228 macroOp(_macroOp) 1229 { 1230 %(constructor)s; 1231 } 1232 1233 Fault execute(ExecContext *, Trace::InstRecord *) const; 1234 1235 std::string 1236 generateDisassembly(Addr pc, const SymbolTable *symtab) const 1237 { 1238 std::stringstream ss; 1239 ccprintf(ss, "%s", macroOp->disassemble(pc, symtab)); 1240 ccprintf(ss, " (uop deinterleave)"); 1241 return ss.str(); 1242 } 1243 }; 1244}}; 1245 1246def template SveIntrlvMicroopExecDeclare {{ 1247 template 1248 Fault %(class_name)s<%(targs)s>::execute( 1249 ExecContext *, Trace::InstRecord *) const; 1250}}; 1251 1252def template SveIntrlvMicroopExecute {{ 1253 template <class Element> 1254 Fault %(class_name)s<Element>::execute(ExecContext *xc, 1255 Trace::InstRecord *traceData) const 1256 { 1257 Fault fault = NoFault; 1258 %(op_decl)s; 1259 %(op_rd)s; 1260 1261 %(code)s; 1262 if (fault == NoFault) 1263 { 1264 %(op_wb)s; 1265 } 1266 1267 return fault; 1268 } 1269}}; 1270