mem64.isa revision 12236:126ac9da6050
1// -*- mode:c++ -*- 2 3// Copyright (c) 2011-2014 ARM Limited 4// All rights reserved 5// 6// The license below extends only to copyright in the software and shall 7// not be construed as granting a license to any other intellectual 8// property including but not limited to intellectual property relating 9// to a hardware implementation of the functionality of the software 10// licensed hereunder. You may use the software subject to the license 11// terms below provided that you ensure that this notice is replicated 12// unmodified and in its entirety in all distributions of the software, 13// modified or unmodified, in source code or in binary form. 14// 15// Redistribution and use in source and binary forms, with or without 16// modification, are permitted provided that the following conditions are 17// met: redistributions of source code must retain the above copyright 18// notice, this list of conditions and the following disclaimer; 19// redistributions in binary form must reproduce the above copyright 20// notice, this list of conditions and the following disclaimer in the 21// documentation and/or other materials provided with the distribution; 22// neither the name of the copyright holders nor the names of its 23// contributors may be used to endorse or promote products derived from 24// this software without specific prior written permission. 25// 26// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 27// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 28// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 29// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 30// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 31// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 32// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 33// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 34// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 35// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 36// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 37// 38// Authors: Gabe Black 39 40let {{ 41 SPAlignmentCheckCode = ''' 42 if (baseIsSP && bits(XBase, 3, 0) && 43 SPAlignmentCheckEnabled(xc->tcBase())) { 44 return std::make_shared<SPAlignmentFault>(); 45 } 46 ''' 47}}; 48 49def template Load64Execute {{ 50 Fault %(class_name)s::execute(ExecContext *xc, 51 Trace::InstRecord *traceData) const 52 { 53 Addr EA; 54 Fault fault = NoFault; 55 56 %(op_decl)s; 57 %(op_rd)s; 58 %(ea_code)s; 59 60 if (fault == NoFault) { 61 fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags); 62 %(memacc_code)s; 63 } 64 65 if (fault == NoFault) { 66 %(op_wb)s; 67 } 68 69 return fault; 70 } 71}}; 72 73def template Store64Execute {{ 74 Fault %(class_name)s::execute(ExecContext *xc, 75 Trace::InstRecord *traceData) const 76 { 77 Addr EA; 78 Fault fault = NoFault; 79 80 %(op_decl)s; 81 %(op_rd)s; 82 %(ea_code)s; 83 84 if (fault == NoFault) { 85 %(memacc_code)s; 86 } 87 88 if (fault == NoFault) { 89 fault = writeMemAtomic(xc, traceData, Mem, EA, 90 memAccessFlags, NULL); 91 } 92 93 if (fault == NoFault) { 94 %(op_wb)s; 95 } 96 97 return fault; 98 } 99}}; 100 101def template Store64InitiateAcc {{ 102 Fault %(class_name)s::initiateAcc(ExecContext *xc, 103 Trace::InstRecord *traceData) const 104 { 105 Addr EA; 106 Fault fault = NoFault; 107 108 %(op_decl)s; 109 %(op_rd)s; 110 %(ea_code)s; 111 112 if (fault == NoFault) { 113 %(memacc_code)s; 114 } 115 116 if (fault == NoFault) { 117 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags, 118 NULL); 119 } 120 121 return fault; 122 } 123}}; 124 125def template StoreEx64Execute {{ 126 Fault %(class_name)s::execute(ExecContext *xc, 127 Trace::InstRecord *traceData) const 128 { 129 Addr EA; 130 Fault fault = NoFault; 131 132 %(op_decl)s; 133 %(op_rd)s; 134 %(ea_code)s; 135 136 if (fault == NoFault) { 137 %(memacc_code)s; 138 } 139 140 uint64_t writeResult = 0; 141 if (fault == NoFault) { 142 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags, 143 &writeResult); 144 } 145 146 if (fault == NoFault) { 147 %(postacc_code)s; 148 } 149 150 if (fault == NoFault) { 151 %(op_wb)s; 152 } 153 154 return fault; 155 } 156}}; 157 158def template StoreEx64InitiateAcc {{ 159 Fault %(class_name)s::initiateAcc(ExecContext *xc, 160 Trace::InstRecord *traceData) const 161 { 162 Addr EA; 163 Fault fault = NoFault; 164 165 %(op_decl)s; 166 %(op_rd)s; 167 %(ea_code)s; 168 169 if (fault == NoFault) { 170 %(memacc_code)s; 171 } 172 173 if (fault == NoFault) { 174 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags, 175 NULL); 176 } 177 178 return fault; 179 } 180}}; 181 182def template Load64InitiateAcc {{ 183 Fault %(class_name)s::initiateAcc(ExecContext *xc, 184 Trace::InstRecord *traceData) const 185 { 186 Addr EA; 187 Fault fault = NoFault; 188 189 %(op_src_decl)s; 190 %(op_rd)s; 191 %(ea_code)s; 192 193 if (fault == NoFault) { 194 fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags); 195 } 196 197 return fault; 198 } 199}}; 200 201def template Load64CompleteAcc {{ 202 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc, 203 Trace::InstRecord *traceData) const 204 { 205 Fault fault = NoFault; 206 207 %(op_decl)s; 208 %(op_rd)s; 209 210 // ARM instructions will not have a pkt if the predicate is false 211 getMem(pkt, Mem, traceData); 212 213 if (fault == NoFault) { 214 %(memacc_code)s; 215 } 216 217 if (fault == NoFault) { 218 %(op_wb)s; 219 } 220 221 return fault; 222 } 223}}; 224 225def template Store64CompleteAcc {{ 226 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc, 227 Trace::InstRecord *traceData) const 228 { 229 return NoFault; 230 } 231}}; 232 233def template StoreEx64CompleteAcc {{ 234 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc, 235 Trace::InstRecord *traceData) const 236 { 237 Fault fault = NoFault; 238 239 %(op_decl)s; 240 %(op_rd)s; 241 242 uint64_t writeResult = pkt->req->getExtraData(); 243 %(postacc_code)s; 244 245 if (fault == NoFault) { 246 %(op_wb)s; 247 } 248 249 return fault; 250 } 251}}; 252 253def template DCStore64Declare {{ 254 class %(class_name)s : public %(base_class)s 255 { 256 public: 257 258 /// Constructor. 259 %(class_name)s(ExtMachInst machInst, IntRegIndex _base, IntRegIndex _dest, uint64_t _imm); 260 261 Fault execute(ExecContext *, Trace::InstRecord *) const; 262 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 263 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; 264 265 virtual void 266 annotateFault(ArmFault *fault) { 267 %(fa_code)s 268 } 269 }; 270}}; 271 272def template DCStore64Constructor {{ 273 %(class_name)s::%(class_name)s(ExtMachInst machInst, IntRegIndex _base, IntRegIndex _dest, uint64_t _imm) 274 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, 275 (IntRegIndex)_base, _dest, _imm) 276 { 277 %(constructor)s; 278 assert(!%(use_uops)d); 279 } 280}}; 281 282def template DCStore64Execute {{ 283 Fault %(class_name)s::execute(ExecContext *xc, 284 Trace::InstRecord *traceData) const 285 { 286 Addr EA; 287 Fault fault = NoFault; 288 289 %(op_decl)s; 290 %(op_rd)s; 291 %(ea_code)s; 292 293 294 if (fault == NoFault) { 295 %(memacc_code)s; 296 } 297 298 if (fault == NoFault) { 299 fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL); 300 } 301 302 if (fault == NoFault) { 303 %(op_wb)s; 304 } 305 306 return fault; 307 } 308}}; 309 310def template DCStore64InitiateAcc {{ 311 Fault %(class_name)s::initiateAcc(ExecContext *xc, 312 Trace::InstRecord *traceData) const 313 { 314 Addr EA; 315 Fault fault = NoFault; 316 317 %(op_decl)s; 318 %(op_rd)s; 319 %(ea_code)s; 320 321 if (fault == NoFault) { 322 %(memacc_code)s; 323 } 324 325 if (fault == NoFault) { 326 fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL); 327 } 328 329 return fault; 330 } 331}}; 332 333 334def template LoadStoreImm64Declare {{ 335 class %(class_name)s : public %(base_class)s 336 { 337 public: 338 339 /// Constructor. 340 %(class_name)s(ExtMachInst machInst, 341 IntRegIndex _dest, IntRegIndex _base, int64_t _imm); 342 343 Fault execute(ExecContext *, Trace::InstRecord *) const; 344 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 345 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; 346 347 virtual void 348 annotateFault(ArmFault *fault) { 349 %(fa_code)s 350 } 351 }; 352}}; 353 354def template LoadStoreImmU64Declare {{ 355 class %(class_name)s : public %(base_class)s 356 { 357 public: 358 359 /// Constructor. 360 %(class_name)s(ExtMachInst machInst, 361 IntRegIndex _dest, IntRegIndex _base, int64_t _imm, 362 bool noAlloc = false, bool exclusive = false, 363 bool acrel = false); 364 365 Fault execute(ExecContext *, Trace::InstRecord *) const; 366 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 367 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; 368 369 virtual void 370 annotateFault(ArmFault *fault) { 371 %(fa_code)s 372 } 373 }; 374}}; 375 376def template LoadStoreImmDU64Declare {{ 377 class %(class_name)s : public %(base_class)s 378 { 379 public: 380 381 /// Constructor. 382 %(class_name)s(ExtMachInst machInst, 383 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base, 384 int64_t _imm = 0, bool noAlloc = false, bool exclusive = false, 385 bool acrel = false); 386 387 Fault execute(ExecContext *, Trace::InstRecord *) const; 388 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 389 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; 390 391 virtual void 392 annotateFault(ArmFault *fault) { 393 %(fa_code)s 394 } 395 }; 396}}; 397 398def template StoreImmDEx64Declare {{ 399 /** 400 * Static instruction class for "%(mnemonic)s". 401 */ 402 class %(class_name)s : public %(base_class)s 403 { 404 public: 405 406 /// Constructor. 407 %(class_name)s(ExtMachInst machInst, 408 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2, 409 IntRegIndex _base, int64_t _imm = 0); 410 411 Fault execute(ExecContext *, Trace::InstRecord *) const; 412 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 413 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; 414 }; 415}}; 416 417 418def template LoadStoreReg64Declare {{ 419 class %(class_name)s : public %(base_class)s 420 { 421 public: 422 423 /// Constructor. 424 %(class_name)s(ExtMachInst machInst, 425 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset, 426 ArmExtendType _type, uint32_t _shiftAmt); 427 428 Fault execute(ExecContext *, Trace::InstRecord *) const; 429 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 430 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; 431 432 virtual void 433 annotateFault(ArmFault *fault) { 434 %(fa_code)s 435 } 436 }; 437}}; 438 439def template LoadStoreRegU64Declare {{ 440 class %(class_name)s : public %(base_class)s 441 { 442 public: 443 444 /// Constructor. 445 %(class_name)s(ExtMachInst machInst, 446 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset, 447 ArmExtendType _type, uint32_t _shiftAmt, 448 bool noAlloc = false, bool exclusive = false, 449 bool acrel = false); 450 451 Fault execute(ExecContext *, Trace::InstRecord *) const; 452 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 453 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; 454 455 virtual void 456 annotateFault(ArmFault *fault) { 457 %(fa_code)s 458 } 459 }; 460}}; 461 462def template LoadStoreRaw64Declare {{ 463 class %(class_name)s : public %(base_class)s 464 { 465 public: 466 467 /// Constructor. 468 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, 469 IntRegIndex _base); 470 471 Fault execute(ExecContext *, Trace::InstRecord *) const; 472 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 473 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; 474 475 virtual void 476 annotateFault(ArmFault *fault) { 477 %(fa_code)s 478 } 479 }; 480}}; 481 482def template LoadStoreEx64Declare {{ 483 class %(class_name)s : public %(base_class)s 484 { 485 public: 486 487 /// Constructor. 488 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, 489 IntRegIndex _base, IntRegIndex _result); 490 491 Fault execute(ExecContext *, Trace::InstRecord *) const; 492 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 493 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; 494 495 virtual void 496 annotateFault(ArmFault *fault) { 497 %(fa_code)s 498 } 499 }; 500}}; 501 502def template LoadStoreLit64Declare {{ 503 class %(class_name)s : public %(base_class)s 504 { 505 public: 506 507 /// Constructor. 508 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm); 509 510 Fault execute(ExecContext *, Trace::InstRecord *) const; 511 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 512 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; 513 514 virtual void 515 annotateFault(ArmFault *fault) { 516 %(fa_code)s 517 } 518 }; 519}}; 520 521def template LoadStoreLitU64Declare {{ 522 class %(class_name)s : public %(base_class)s 523 { 524 public: 525 526 /// Constructor. 527 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm, 528 bool noAlloc = false, bool exclusive = false, 529 bool acrel = false); 530 531 Fault execute(ExecContext *, Trace::InstRecord *) const; 532 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 533 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; 534 535 virtual void 536 annotateFault(ArmFault *fault) { 537 %(fa_code)s 538 } 539 }; 540}}; 541 542def template LoadStoreImm64Constructor {{ 543 %(class_name)s::%(class_name)s(ExtMachInst machInst, 544 IntRegIndex _dest, IntRegIndex _base, int64_t _imm) 545 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, 546 (IntRegIndex)_dest, (IntRegIndex)_base, _imm) 547 { 548 %(constructor)s; 549#if %(use_uops)d 550 assert(numMicroops >= 2); 551 uops = new StaticInstPtr[numMicroops]; 552 uops[0] = new %(acc_name)s(machInst, _dest, _base, _imm); 553 uops[0]->setDelayedCommit(); 554 uops[0]->setFirstMicroop(); 555 uops[1] = new %(wb_decl)s; 556 uops[1]->setLastMicroop(); 557#endif 558 } 559}}; 560 561def template LoadStoreImmU64Constructor {{ 562 %(class_name)s::%(class_name)s(ExtMachInst machInst, 563 IntRegIndex _dest, IntRegIndex _base, int64_t _imm, 564 bool noAlloc, bool exclusive, bool acrel) 565 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, 566 _dest, _base, _imm) 567 { 568 %(constructor)s; 569 assert(!%(use_uops)d); 570 setExcAcRel(exclusive, acrel); 571 } 572}}; 573 574def template LoadStoreImmDU64Constructor {{ 575 %(class_name)s::%(class_name)s(ExtMachInst machInst, 576 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base, 577 int64_t _imm, bool noAlloc, bool exclusive, bool acrel) 578 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, 579 _dest, _dest2, _base, _imm) 580 { 581 %(constructor)s; 582 assert(!%(use_uops)d); 583 setExcAcRel(exclusive, acrel); 584 } 585}}; 586 587def template StoreImmDEx64Constructor {{ 588 %(class_name)s::%(class_name)s(ExtMachInst machInst, 589 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2, 590 IntRegIndex _base, int64_t _imm) 591 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, 592 _result, _dest, _dest2, _base, _imm) 593 { 594 %(constructor)s; 595 assert(!%(use_uops)d); 596 } 597}}; 598 599 600def template LoadStoreReg64Constructor {{ 601 %(class_name)s::%(class_name)s(ExtMachInst machInst, 602 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset, 603 ArmExtendType _type, uint32_t _shiftAmt) 604 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, 605 _dest, _base, _offset, _type, _shiftAmt) 606 { 607 %(constructor)s; 608#if %(use_uops)d 609 assert(numMicroops >= 2); 610 uops = new StaticInstPtr[numMicroops]; 611 uops[0] = new %(acc_name)s(machInst, _dest, _base, _offset, 612 _type, _shiftAmt); 613 uops[0]->setDelayedCommit(); 614 uops[0]->setFirstMicroop(); 615 uops[1] = new %(wb_decl)s; 616 uops[1]->setLastMicroop(); 617#endif 618 } 619}}; 620 621def template LoadStoreRegU64Constructor {{ 622 %(class_name)s::%(class_name)s(ExtMachInst machInst, 623 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset, 624 ArmExtendType _type, uint32_t _shiftAmt, 625 bool noAlloc, bool exclusive, bool acrel) 626 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, 627 _dest, _base, _offset, _type, _shiftAmt) 628 { 629 %(constructor)s; 630 assert(!%(use_uops)d); 631 setExcAcRel(exclusive, acrel); 632 } 633}}; 634 635def template LoadStoreRaw64Constructor {{ 636 %(class_name)s::%(class_name)s(ExtMachInst machInst, 637 IntRegIndex _dest, IntRegIndex _base) 638 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, _dest, _base) 639 { 640 %(constructor)s; 641 } 642}}; 643 644def template LoadStoreEx64Constructor {{ 645 %(class_name)s::%(class_name)s(ExtMachInst machInst, 646 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result) 647 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, 648 _dest, _base, _result) 649 { 650 %(constructor)s; 651 } 652}}; 653 654def template LoadStoreLit64Constructor {{ 655 %(class_name)s::%(class_name)s(ExtMachInst machInst, 656 IntRegIndex _dest, int64_t _imm) 657 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, 658 (IntRegIndex)_dest, _imm) 659 { 660 %(constructor)s; 661#if %(use_uops)d 662 assert(numMicroops >= 2); 663 uops = new StaticInstPtr[numMicroops]; 664 uops[0] = new %(acc_name)s(machInst, _dest, _imm); 665 uops[0]->setDelayedCommit(); 666 uops[0]->setFirstMicroop(); 667 uops[1] = new %(wb_decl)s; 668 uops[1]->setLastMicroop(); 669#endif 670 } 671}}; 672 673def template LoadStoreLitU64Constructor {{ 674 %(class_name)s::%(class_name)s(ExtMachInst machInst, 675 IntRegIndex _dest, int64_t _imm, 676 bool noAlloc, bool exclusive, bool acrel) 677 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, 678 (IntRegIndex)_dest, _imm) 679 { 680 %(constructor)s; 681 assert(!%(use_uops)d); 682 setExcAcRel(exclusive, acrel); 683 } 684}}; 685