mem64.isa revision 12616:4b463b4dc098
1// -*- mode:c++ -*- 2 3// Copyright (c) 2011-2014, 2017 ARM Limited 4// All rights reserved 5// 6// The license below extends only to copyright in the software and shall 7// not be construed as granting a license to any other intellectual 8// property including but not limited to intellectual property relating 9// to a hardware implementation of the functionality of the software 10// licensed hereunder. You may use the software subject to the license 11// terms below provided that you ensure that this notice is replicated 12// unmodified and in its entirety in all distributions of the software, 13// modified or unmodified, in source code or in binary form. 14// 15// Redistribution and use in source and binary forms, with or without 16// modification, are permitted provided that the following conditions are 17// met: redistributions of source code must retain the above copyright 18// notice, this list of conditions and the following disclaimer; 19// redistributions in binary form must reproduce the above copyright 20// notice, this list of conditions and the following disclaimer in the 21// documentation and/or other materials provided with the distribution; 22// neither the name of the copyright holders nor the names of its 23// contributors may be used to endorse or promote products derived from 24// this software without specific prior written permission. 25// 26// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 27// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 28// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 29// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 30// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 31// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 32// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 33// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 34// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 35// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 36// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 37// 38// Authors: Gabe Black 39 40let {{ 41 SPAlignmentCheckCode = ''' 42 if (baseIsSP && bits(XBase, 3, 0) && 43 SPAlignmentCheckEnabled(xc->tcBase())) { 44 return std::make_shared<SPAlignmentFault>(); 45 } 46 ''' 47}}; 48 49def template Load64Execute {{ 50 Fault %(class_name)s::execute(ExecContext *xc, 51 Trace::InstRecord *traceData) const 52 { 53 Addr EA; 54 Fault fault = NoFault; 55 56 %(op_decl)s; 57 %(op_rd)s; 58 %(ea_code)s; 59 60 if (fault == NoFault) { 61 fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags); 62 %(memacc_code)s; 63 } 64 65 if (fault == NoFault) { 66 %(op_wb)s; 67 } 68 69 return fault; 70 } 71}}; 72 73def template Store64Execute {{ 74 Fault %(class_name)s::execute(ExecContext *xc, 75 Trace::InstRecord *traceData) const 76 { 77 Addr EA; 78 Fault fault = NoFault; 79 80 %(op_decl)s; 81 %(op_rd)s; 82 %(ea_code)s; 83 84 if (fault == NoFault) { 85 %(memacc_code)s; 86 } 87 88 if (fault == NoFault) { 89 fault = writeMemAtomic(xc, traceData, Mem, EA, 90 memAccessFlags, NULL); 91 } 92 93 if (fault == NoFault) { 94 %(op_wb)s; 95 } 96 97 return fault; 98 } 99}}; 100 101def template Store64InitiateAcc {{ 102 Fault %(class_name)s::initiateAcc(ExecContext *xc, 103 Trace::InstRecord *traceData) const 104 { 105 Addr EA; 106 Fault fault = NoFault; 107 108 %(op_decl)s; 109 %(op_rd)s; 110 %(ea_code)s; 111 112 if (fault == NoFault) { 113 %(memacc_code)s; 114 } 115 116 if (fault == NoFault) { 117 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags, 118 NULL); 119 } 120 121 return fault; 122 } 123}}; 124 125def template StoreEx64Execute {{ 126 Fault %(class_name)s::execute(ExecContext *xc, 127 Trace::InstRecord *traceData) const 128 { 129 Addr EA; 130 Fault fault = NoFault; 131 132 %(op_decl)s; 133 %(op_rd)s; 134 %(ea_code)s; 135 136 if (fault == NoFault) { 137 %(memacc_code)s; 138 } 139 140 uint64_t writeResult = 0; 141 if (fault == NoFault) { 142 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags, 143 &writeResult); 144 } 145 146 if (fault == NoFault) { 147 %(postacc_code)s; 148 } 149 150 if (fault == NoFault) { 151 %(op_wb)s; 152 } 153 154 return fault; 155 } 156}}; 157 158def template StoreEx64InitiateAcc {{ 159 Fault %(class_name)s::initiateAcc(ExecContext *xc, 160 Trace::InstRecord *traceData) const 161 { 162 Addr EA; 163 Fault fault = NoFault; 164 165 %(op_decl)s; 166 %(op_rd)s; 167 %(ea_code)s; 168 169 if (fault == NoFault) { 170 %(memacc_code)s; 171 } 172 173 if (fault == NoFault) { 174 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags, 175 NULL); 176 } 177 178 return fault; 179 } 180}}; 181 182def template Load64InitiateAcc {{ 183 Fault %(class_name)s::initiateAcc(ExecContext *xc, 184 Trace::InstRecord *traceData) const 185 { 186 Addr EA; 187 Fault fault = NoFault; 188 189 %(op_src_decl)s; 190 %(op_rd)s; 191 %(ea_code)s; 192 193 if (fault == NoFault) { 194 fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags); 195 } 196 197 return fault; 198 } 199}}; 200 201def template Load64CompleteAcc {{ 202 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc, 203 Trace::InstRecord *traceData) const 204 { 205 Fault fault = NoFault; 206 207 %(op_decl)s; 208 %(op_rd)s; 209 210 // ARM instructions will not have a pkt if the predicate is false 211 getMem(pkt, Mem, traceData); 212 213 if (fault == NoFault) { 214 %(memacc_code)s; 215 } 216 217 if (fault == NoFault) { 218 %(op_wb)s; 219 } 220 221 return fault; 222 } 223}}; 224 225def template Store64CompleteAcc {{ 226 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc, 227 Trace::InstRecord *traceData) const 228 { 229 return NoFault; 230 } 231}}; 232 233def template StoreEx64CompleteAcc {{ 234 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc, 235 Trace::InstRecord *traceData) const 236 { 237 Fault fault = NoFault; 238 239 %(op_decl)s; 240 %(op_rd)s; 241 242 uint64_t writeResult = pkt->req->getExtraData(); 243 %(postacc_code)s; 244 245 if (fault == NoFault) { 246 %(op_wb)s; 247 } 248 249 return fault; 250 } 251}}; 252 253def template DCStore64Declare {{ 254 class %(class_name)s : public %(base_class)s 255 { 256 public: 257 258 /// Constructor. 259 %(class_name)s(ExtMachInst machInst, IntRegIndex _base, 260 MiscRegIndex _dest, uint64_t _imm); 261 262 Fault execute(ExecContext *, Trace::InstRecord *) const override; 263 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override; 264 Fault completeAcc(PacketPtr, ExecContext *, 265 Trace::InstRecord *) const override; 266 267 void 268 annotateFault(ArmFault *fault) override 269 { 270 %(fa_code)s 271 } 272 }; 273}}; 274 275def template DCStore64Constructor {{ 276 %(class_name)s::%(class_name)s(ExtMachInst machInst, IntRegIndex _base, 277 MiscRegIndex _dest, uint64_t _imm) 278 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, 279 _base, _dest, _imm) 280 { 281 %(constructor)s; 282 assert(!%(use_uops)d); 283 } 284}}; 285 286def template DCStore64Execute {{ 287 Fault %(class_name)s::execute(ExecContext *xc, 288 Trace::InstRecord *traceData) const 289 { 290 Addr EA; 291 Fault fault = NoFault; 292 293 %(op_decl)s; 294 %(op_rd)s; 295 %(ea_code)s; 296 297 298 if (fault == NoFault) { 299 %(memacc_code)s; 300 } 301 302 if (fault == NoFault) { 303 fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL); 304 } 305 306 if (fault == NoFault) { 307 %(op_wb)s; 308 } 309 310 return fault; 311 } 312}}; 313 314def template DCStore64InitiateAcc {{ 315 Fault %(class_name)s::initiateAcc(ExecContext *xc, 316 Trace::InstRecord *traceData) const 317 { 318 Addr EA; 319 Fault fault = NoFault; 320 321 %(op_decl)s; 322 %(op_rd)s; 323 %(ea_code)s; 324 325 if (fault == NoFault) { 326 %(memacc_code)s; 327 } 328 329 if (fault == NoFault) { 330 fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL); 331 } 332 333 return fault; 334 } 335}}; 336 337 338def template LoadStoreImm64Declare {{ 339 class %(class_name)s : public %(base_class)s 340 { 341 public: 342 343 /// Constructor. 344 %(class_name)s(ExtMachInst machInst, 345 IntRegIndex _dest, IntRegIndex _base, int64_t _imm); 346 347 Fault execute(ExecContext *, Trace::InstRecord *) const override; 348 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override; 349 Fault completeAcc(PacketPtr, ExecContext *, 350 Trace::InstRecord *) const override; 351 352 void 353 annotateFault(ArmFault *fault) override 354 { 355 %(fa_code)s 356 } 357 }; 358}}; 359 360def template LoadStoreImmU64Declare {{ 361 class %(class_name)s : public %(base_class)s 362 { 363 public: 364 365 /// Constructor. 366 %(class_name)s(ExtMachInst machInst, 367 IntRegIndex _dest, IntRegIndex _base, int64_t _imm, 368 bool noAlloc = false, bool exclusive = false, 369 bool acrel = false); 370 371 Fault execute(ExecContext *, Trace::InstRecord *) const override; 372 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override; 373 Fault completeAcc(PacketPtr, ExecContext *, 374 Trace::InstRecord *) const override; 375 376 void 377 annotateFault(ArmFault *fault) override 378 { 379 %(fa_code)s 380 } 381 }; 382}}; 383 384def template LoadStoreImmDU64Declare {{ 385 class %(class_name)s : public %(base_class)s 386 { 387 public: 388 389 /// Constructor. 390 %(class_name)s(ExtMachInst machInst, 391 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base, 392 int64_t _imm = 0, bool noAlloc = false, bool exclusive = false, 393 bool acrel = false); 394 395 Fault execute(ExecContext *, Trace::InstRecord *) const override; 396 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override; 397 Fault completeAcc(PacketPtr, ExecContext *, 398 Trace::InstRecord *) const override; 399 400 void 401 annotateFault(ArmFault *fault) override 402 { 403 %(fa_code)s 404 } 405 }; 406}}; 407 408def template StoreImmDEx64Declare {{ 409 /** 410 * Static instruction class for "%(mnemonic)s". 411 */ 412 class %(class_name)s : public %(base_class)s 413 { 414 public: 415 416 /// Constructor. 417 %(class_name)s(ExtMachInst machInst, 418 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2, 419 IntRegIndex _base, int64_t _imm = 0); 420 421 Fault execute(ExecContext *, Trace::InstRecord *) const override; 422 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override; 423 Fault completeAcc(PacketPtr, ExecContext *, 424 Trace::InstRecord *) const override; 425 }; 426}}; 427 428 429def template LoadStoreReg64Declare {{ 430 class %(class_name)s : public %(base_class)s 431 { 432 public: 433 434 /// Constructor. 435 %(class_name)s(ExtMachInst machInst, 436 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset, 437 ArmExtendType _type, uint32_t _shiftAmt); 438 439 Fault execute(ExecContext *, Trace::InstRecord *) const override; 440 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override; 441 Fault completeAcc(PacketPtr, ExecContext *, 442 Trace::InstRecord *) const override; 443 444 void 445 annotateFault(ArmFault *fault) override 446 { 447 %(fa_code)s 448 } 449 }; 450}}; 451 452def template LoadStoreRegU64Declare {{ 453 class %(class_name)s : public %(base_class)s 454 { 455 public: 456 457 /// Constructor. 458 %(class_name)s(ExtMachInst machInst, 459 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset, 460 ArmExtendType _type, uint32_t _shiftAmt, 461 bool noAlloc = false, bool exclusive = false, 462 bool acrel = false); 463 464 Fault execute(ExecContext *, Trace::InstRecord *) const override; 465 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override; 466 Fault completeAcc(PacketPtr, ExecContext *, 467 Trace::InstRecord *) const override; 468 469 void 470 annotateFault(ArmFault *fault) override 471 { 472 %(fa_code)s 473 } 474 }; 475}}; 476 477def template LoadStoreRaw64Declare {{ 478 class %(class_name)s : public %(base_class)s 479 { 480 public: 481 482 /// Constructor. 483 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, 484 IntRegIndex _base); 485 486 Fault execute(ExecContext *, Trace::InstRecord *) const override; 487 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override; 488 Fault completeAcc(PacketPtr, ExecContext *, 489 Trace::InstRecord *) const override; 490 491 void 492 annotateFault(ArmFault *fault) override 493 { 494 %(fa_code)s 495 } 496 }; 497}}; 498 499def template LoadStoreEx64Declare {{ 500 class %(class_name)s : public %(base_class)s 501 { 502 public: 503 504 /// Constructor. 505 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, 506 IntRegIndex _base, IntRegIndex _result); 507 508 Fault execute(ExecContext *, Trace::InstRecord *) const override; 509 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override; 510 Fault completeAcc(PacketPtr, ExecContext *, 511 Trace::InstRecord *) const override; 512 513 void 514 annotateFault(ArmFault *fault) override 515 { 516 %(fa_code)s 517 } 518 }; 519}}; 520 521def template LoadStoreLit64Declare {{ 522 class %(class_name)s : public %(base_class)s 523 { 524 public: 525 526 /// Constructor. 527 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm); 528 529 Fault execute(ExecContext *, Trace::InstRecord *) const override; 530 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override; 531 Fault completeAcc(PacketPtr, ExecContext *, 532 Trace::InstRecord *) const override; 533 534 void 535 annotateFault(ArmFault *fault) override 536 { 537 %(fa_code)s 538 } 539 }; 540}}; 541 542def template LoadStoreLitU64Declare {{ 543 class %(class_name)s : public %(base_class)s 544 { 545 public: 546 547 /// Constructor. 548 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm, 549 bool noAlloc = false, bool exclusive = false, 550 bool acrel = false); 551 552 Fault execute(ExecContext *, Trace::InstRecord *) const override; 553 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override; 554 Fault completeAcc(PacketPtr, ExecContext *, 555 Trace::InstRecord *) const override; 556 557 void 558 annotateFault(ArmFault *fault) override 559 { 560 %(fa_code)s 561 } 562 }; 563}}; 564 565def template LoadStoreImm64Constructor {{ 566 %(class_name)s::%(class_name)s(ExtMachInst machInst, 567 IntRegIndex _dest, IntRegIndex _base, int64_t _imm) 568 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, 569 (IntRegIndex)_dest, (IntRegIndex)_base, _imm) 570 { 571 %(constructor)s; 572#if %(use_uops)d 573 assert(numMicroops >= 2); 574 uops = new StaticInstPtr[numMicroops]; 575 uops[0] = new %(acc_name)s(machInst, _dest, _base, _imm); 576 uops[0]->setDelayedCommit(); 577 uops[0]->setFirstMicroop(); 578 uops[1] = new %(wb_decl)s; 579 uops[1]->setLastMicroop(); 580#endif 581 } 582}}; 583 584def template LoadStoreImmU64Constructor {{ 585 %(class_name)s::%(class_name)s(ExtMachInst machInst, 586 IntRegIndex _dest, IntRegIndex _base, int64_t _imm, 587 bool noAlloc, bool exclusive, bool acrel) 588 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, 589 _dest, _base, _imm) 590 { 591 %(constructor)s; 592 assert(!%(use_uops)d); 593 setExcAcRel(exclusive, acrel); 594 } 595}}; 596 597def template LoadStoreImmDU64Constructor {{ 598 %(class_name)s::%(class_name)s(ExtMachInst machInst, 599 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base, 600 int64_t _imm, bool noAlloc, bool exclusive, bool acrel) 601 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, 602 _dest, _dest2, _base, _imm) 603 { 604 %(constructor)s; 605 assert(!%(use_uops)d); 606 setExcAcRel(exclusive, acrel); 607 } 608}}; 609 610def template StoreImmDEx64Constructor {{ 611 %(class_name)s::%(class_name)s(ExtMachInst machInst, 612 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2, 613 IntRegIndex _base, int64_t _imm) 614 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, 615 _result, _dest, _dest2, _base, _imm) 616 { 617 %(constructor)s; 618 assert(!%(use_uops)d); 619 } 620}}; 621 622 623def template LoadStoreReg64Constructor {{ 624 %(class_name)s::%(class_name)s(ExtMachInst machInst, 625 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset, 626 ArmExtendType _type, uint32_t _shiftAmt) 627 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, 628 _dest, _base, _offset, _type, _shiftAmt) 629 { 630 %(constructor)s; 631#if %(use_uops)d 632 assert(numMicroops >= 2); 633 uops = new StaticInstPtr[numMicroops]; 634 uops[0] = new %(acc_name)s(machInst, _dest, _base, _offset, 635 _type, _shiftAmt); 636 uops[0]->setDelayedCommit(); 637 uops[0]->setFirstMicroop(); 638 uops[1] = new %(wb_decl)s; 639 uops[1]->setLastMicroop(); 640#endif 641 } 642}}; 643 644def template LoadStoreRegU64Constructor {{ 645 %(class_name)s::%(class_name)s(ExtMachInst machInst, 646 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset, 647 ArmExtendType _type, uint32_t _shiftAmt, 648 bool noAlloc, bool exclusive, bool acrel) 649 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, 650 _dest, _base, _offset, _type, _shiftAmt) 651 { 652 %(constructor)s; 653 assert(!%(use_uops)d); 654 setExcAcRel(exclusive, acrel); 655 } 656}}; 657 658def template LoadStoreRaw64Constructor {{ 659 %(class_name)s::%(class_name)s(ExtMachInst machInst, 660 IntRegIndex _dest, IntRegIndex _base) 661 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, _dest, _base) 662 { 663 %(constructor)s; 664 } 665}}; 666 667def template LoadStoreEx64Constructor {{ 668 %(class_name)s::%(class_name)s(ExtMachInst machInst, 669 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result) 670 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, 671 _dest, _base, _result) 672 { 673 %(constructor)s; 674 } 675}}; 676 677def template LoadStoreLit64Constructor {{ 678 %(class_name)s::%(class_name)s(ExtMachInst machInst, 679 IntRegIndex _dest, int64_t _imm) 680 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, 681 (IntRegIndex)_dest, _imm) 682 { 683 %(constructor)s; 684#if %(use_uops)d 685 assert(numMicroops >= 2); 686 uops = new StaticInstPtr[numMicroops]; 687 uops[0] = new %(acc_name)s(machInst, _dest, _imm); 688 uops[0]->setDelayedCommit(); 689 uops[0]->setFirstMicroop(); 690 uops[1] = new %(wb_decl)s; 691 uops[1]->setLastMicroop(); 692#endif 693 } 694}}; 695 696def template LoadStoreLitU64Constructor {{ 697 %(class_name)s::%(class_name)s(ExtMachInst machInst, 698 IntRegIndex _dest, int64_t _imm, 699 bool noAlloc, bool exclusive, bool acrel) 700 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, 701 (IntRegIndex)_dest, _imm) 702 { 703 %(constructor)s; 704 assert(!%(use_uops)d); 705 setExcAcRel(exclusive, acrel); 706 } 707}}; 708