mem64.isa revision 11303:f694764d656d
1// -*- mode:c++ -*- 2 3// Copyright (c) 2011-2014 ARM Limited 4// All rights reserved 5// 6// The license below extends only to copyright in the software and shall 7// not be construed as granting a license to any other intellectual 8// property including but not limited to intellectual property relating 9// to a hardware implementation of the functionality of the software 10// licensed hereunder. You may use the software subject to the license 11// terms below provided that you ensure that this notice is replicated 12// unmodified and in its entirety in all distributions of the software, 13// modified or unmodified, in source code or in binary form. 14// 15// Redistribution and use in source and binary forms, with or without 16// modification, are permitted provided that the following conditions are 17// met: redistributions of source code must retain the above copyright 18// notice, this list of conditions and the following disclaimer; 19// redistributions in binary form must reproduce the above copyright 20// notice, this list of conditions and the following disclaimer in the 21// documentation and/or other materials provided with the distribution; 22// neither the name of the copyright holders nor the names of its 23// contributors may be used to endorse or promote products derived from 24// this software without specific prior written permission. 25// 26// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 27// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 28// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 29// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 30// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 31// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 32// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 33// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 34// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 35// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 36// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 37// 38// Authors: Gabe Black 39 40let {{ 41 SPAlignmentCheckCode = ''' 42 if (baseIsSP && bits(XBase, 3, 0) && 43 SPAlignmentCheckEnabled(xc->tcBase())) { 44 return std::make_shared<SPAlignmentFault>(); 45 } 46 ''' 47}}; 48 49def template Load64Execute {{ 50 Fault %(class_name)s::execute(CPU_EXEC_CONTEXT *xc, 51 Trace::InstRecord *traceData) const 52 { 53 Addr EA; 54 Fault fault = NoFault; 55 56 %(op_decl)s; 57 %(op_rd)s; 58 %(ea_code)s; 59 60 if (fault == NoFault) { 61 fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags); 62 %(memacc_code)s; 63 } 64 65 if (fault == NoFault) { 66 %(op_wb)s; 67 } 68 69 return fault; 70 } 71}}; 72 73def template Store64Execute {{ 74 Fault %(class_name)s::execute(CPU_EXEC_CONTEXT *xc, 75 Trace::InstRecord *traceData) const 76 { 77 Addr EA; 78 Fault fault = NoFault; 79 80 %(op_decl)s; 81 %(op_rd)s; 82 %(ea_code)s; 83 84 if (fault == NoFault) { 85 %(memacc_code)s; 86 } 87 88 if (fault == NoFault) { 89 fault = writeMemAtomic(xc, traceData, Mem, EA, 90 memAccessFlags, NULL); 91 } 92 93 if (fault == NoFault) { 94 %(op_wb)s; 95 } 96 97 return fault; 98 } 99}}; 100 101def template Store64InitiateAcc {{ 102 Fault %(class_name)s::initiateAcc(CPU_EXEC_CONTEXT *xc, 103 Trace::InstRecord *traceData) const 104 { 105 Addr EA; 106 Fault fault = NoFault; 107 108 %(op_decl)s; 109 %(op_rd)s; 110 %(ea_code)s; 111 112 if (fault == NoFault) { 113 %(memacc_code)s; 114 } 115 116 if (fault == NoFault) { 117 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags, 118 NULL); 119 } 120 121 return fault; 122 } 123}}; 124 125def template StoreEx64Execute {{ 126 Fault %(class_name)s::execute(CPU_EXEC_CONTEXT *xc, 127 Trace::InstRecord *traceData) const 128 { 129 Addr EA; 130 Fault fault = NoFault; 131 132 %(op_decl)s; 133 %(op_rd)s; 134 %(ea_code)s; 135 136 if (fault == NoFault) { 137 %(memacc_code)s; 138 } 139 140 uint64_t writeResult = 0; 141 if (fault == NoFault) { 142 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags, 143 &writeResult); 144 } 145 146 if (fault == NoFault) { 147 %(postacc_code)s; 148 } 149 150 if (fault == NoFault) { 151 %(op_wb)s; 152 } 153 154 return fault; 155 } 156}}; 157 158def template StoreEx64InitiateAcc {{ 159 Fault %(class_name)s::initiateAcc(CPU_EXEC_CONTEXT *xc, 160 Trace::InstRecord *traceData) const 161 { 162 Addr EA; 163 Fault fault = NoFault; 164 165 %(op_decl)s; 166 %(op_rd)s; 167 %(ea_code)s; 168 169 if (fault == NoFault) { 170 %(memacc_code)s; 171 } 172 173 if (fault == NoFault) { 174 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags, 175 NULL); 176 } 177 178 return fault; 179 } 180}}; 181 182def template Load64InitiateAcc {{ 183 Fault %(class_name)s::initiateAcc(CPU_EXEC_CONTEXT *xc, 184 Trace::InstRecord *traceData) const 185 { 186 Addr EA; 187 Fault fault = NoFault; 188 189 %(op_src_decl)s; 190 %(op_rd)s; 191 %(ea_code)s; 192 193 if (fault == NoFault) { 194 fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags); 195 } 196 197 return fault; 198 } 199}}; 200 201def template Load64CompleteAcc {{ 202 Fault %(class_name)s::completeAcc(PacketPtr pkt, 203 CPU_EXEC_CONTEXT *xc, 204 Trace::InstRecord *traceData) const 205 { 206 Fault fault = NoFault; 207 208 %(op_decl)s; 209 %(op_rd)s; 210 211 // ARM instructions will not have a pkt if the predicate is false 212 getMem(pkt, Mem, traceData); 213 214 if (fault == NoFault) { 215 %(memacc_code)s; 216 } 217 218 if (fault == NoFault) { 219 %(op_wb)s; 220 } 221 222 return fault; 223 } 224}}; 225 226def template Store64CompleteAcc {{ 227 Fault %(class_name)s::completeAcc(PacketPtr pkt, 228 CPU_EXEC_CONTEXT *xc, 229 Trace::InstRecord *traceData) const 230 { 231 return NoFault; 232 } 233}}; 234 235def template StoreEx64CompleteAcc {{ 236 Fault %(class_name)s::completeAcc(PacketPtr pkt, 237 CPU_EXEC_CONTEXT *xc, 238 Trace::InstRecord *traceData) const 239 { 240 Fault fault = NoFault; 241 242 %(op_decl)s; 243 %(op_rd)s; 244 245 uint64_t writeResult = pkt->req->getExtraData(); 246 %(postacc_code)s; 247 248 if (fault == NoFault) { 249 %(op_wb)s; 250 } 251 252 return fault; 253 } 254}}; 255 256def template DCStore64Declare {{ 257 class %(class_name)s : public %(base_class)s 258 { 259 public: 260 261 /// Constructor. 262 %(class_name)s(ExtMachInst machInst, IntRegIndex _base, IntRegIndex _dest, uint64_t _imm); 263 264 %(BasicExecDeclare)s 265 %(InitiateAccDeclare)s 266 %(CompleteAccDeclare)s 267 268 virtual void 269 annotateFault(ArmFault *fault) { 270 %(fa_code)s 271 } 272 }; 273}}; 274 275def template DCStore64Constructor {{ 276 %(class_name)s::%(class_name)s(ExtMachInst machInst, IntRegIndex _base, IntRegIndex _dest, uint64_t _imm) 277 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, 278 (IntRegIndex)_base, _dest, _imm) 279 { 280 %(constructor)s; 281 assert(!%(use_uops)d); 282 } 283}}; 284 285def template DCStore64Execute {{ 286 Fault %(class_name)s::execute(CPU_EXEC_CONTEXT *xc, 287 Trace::InstRecord *traceData) const 288 { 289 Addr EA; 290 Fault fault = NoFault; 291 292 %(op_decl)s; 293 %(op_rd)s; 294 %(ea_code)s; 295 296 297 if (fault == NoFault) { 298 %(memacc_code)s; 299 } 300 301 if (fault == NoFault) { 302 fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL); 303 } 304 305 if (fault == NoFault) { 306 %(op_wb)s; 307 } 308 309 return fault; 310 } 311}}; 312 313def template DCStore64InitiateAcc {{ 314 Fault %(class_name)s::initiateAcc(CPU_EXEC_CONTEXT *xc, 315 Trace::InstRecord *traceData) const 316 { 317 Addr EA; 318 Fault fault = NoFault; 319 320 %(op_decl)s; 321 %(op_rd)s; 322 %(ea_code)s; 323 324 if (fault == NoFault) { 325 %(memacc_code)s; 326 } 327 328 if (fault == NoFault) { 329 fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL); 330 } 331 332 return fault; 333 } 334}}; 335 336 337def template LoadStoreImm64Declare {{ 338 class %(class_name)s : public %(base_class)s 339 { 340 public: 341 342 /// Constructor. 343 %(class_name)s(ExtMachInst machInst, 344 IntRegIndex _dest, IntRegIndex _base, int64_t _imm); 345 346 %(BasicExecDeclare)s 347 %(InitiateAccDeclare)s 348 %(CompleteAccDeclare)s 349 350 virtual void 351 annotateFault(ArmFault *fault) { 352 %(fa_code)s 353 } 354 }; 355}}; 356 357def template LoadStoreImmU64Declare {{ 358 class %(class_name)s : public %(base_class)s 359 { 360 public: 361 362 /// Constructor. 363 %(class_name)s(ExtMachInst machInst, 364 IntRegIndex _dest, IntRegIndex _base, int64_t _imm, 365 bool noAlloc = false, bool exclusive = false, 366 bool acrel = false); 367 368 %(BasicExecDeclare)s 369 %(InitiateAccDeclare)s 370 %(CompleteAccDeclare)s 371 372 virtual void 373 annotateFault(ArmFault *fault) { 374 %(fa_code)s 375 } 376 }; 377}}; 378 379def template LoadStoreImmDU64Declare {{ 380 class %(class_name)s : public %(base_class)s 381 { 382 public: 383 384 /// Constructor. 385 %(class_name)s(ExtMachInst machInst, 386 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base, 387 int64_t _imm = 0, bool noAlloc = false, bool exclusive = false, 388 bool acrel = false); 389 390 %(BasicExecDeclare)s 391 %(InitiateAccDeclare)s 392 %(CompleteAccDeclare)s 393 394 virtual void 395 annotateFault(ArmFault *fault) { 396 %(fa_code)s 397 } 398 }; 399}}; 400 401def template StoreImmDEx64Declare {{ 402 /** 403 * Static instruction class for "%(mnemonic)s". 404 */ 405 class %(class_name)s : public %(base_class)s 406 { 407 public: 408 409 /// Constructor. 410 %(class_name)s(ExtMachInst machInst, 411 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2, 412 IntRegIndex _base, int64_t _imm = 0); 413 414 %(BasicExecDeclare)s 415 416 %(InitiateAccDeclare)s 417 418 %(CompleteAccDeclare)s 419 }; 420}}; 421 422 423def template LoadStoreReg64Declare {{ 424 class %(class_name)s : public %(base_class)s 425 { 426 public: 427 428 /// Constructor. 429 %(class_name)s(ExtMachInst machInst, 430 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset, 431 ArmExtendType _type, uint32_t _shiftAmt); 432 433 %(BasicExecDeclare)s 434 %(InitiateAccDeclare)s 435 %(CompleteAccDeclare)s 436 437 virtual void 438 annotateFault(ArmFault *fault) { 439 %(fa_code)s 440 } 441 }; 442}}; 443 444def template LoadStoreRegU64Declare {{ 445 class %(class_name)s : public %(base_class)s 446 { 447 public: 448 449 /// Constructor. 450 %(class_name)s(ExtMachInst machInst, 451 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset, 452 ArmExtendType _type, uint32_t _shiftAmt, 453 bool noAlloc = false, bool exclusive = false, 454 bool acrel = false); 455 456 %(BasicExecDeclare)s 457 %(InitiateAccDeclare)s 458 %(CompleteAccDeclare)s 459 460 virtual void 461 annotateFault(ArmFault *fault) { 462 %(fa_code)s 463 } 464 }; 465}}; 466 467def template LoadStoreRaw64Declare {{ 468 class %(class_name)s : public %(base_class)s 469 { 470 public: 471 472 /// Constructor. 473 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, 474 IntRegIndex _base); 475 476 %(BasicExecDeclare)s 477 %(InitiateAccDeclare)s 478 %(CompleteAccDeclare)s 479 480 virtual void 481 annotateFault(ArmFault *fault) { 482 %(fa_code)s 483 } 484 }; 485}}; 486 487def template LoadStoreEx64Declare {{ 488 class %(class_name)s : public %(base_class)s 489 { 490 public: 491 492 /// Constructor. 493 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, 494 IntRegIndex _base, IntRegIndex _result); 495 496 %(BasicExecDeclare)s 497 %(InitiateAccDeclare)s 498 %(CompleteAccDeclare)s 499 500 virtual void 501 annotateFault(ArmFault *fault) { 502 %(fa_code)s 503 } 504 }; 505}}; 506 507def template LoadStoreLit64Declare {{ 508 class %(class_name)s : public %(base_class)s 509 { 510 public: 511 512 /// Constructor. 513 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm); 514 515 %(BasicExecDeclare)s 516 %(InitiateAccDeclare)s 517 %(CompleteAccDeclare)s 518 519 virtual void 520 annotateFault(ArmFault *fault) { 521 %(fa_code)s 522 } 523 }; 524}}; 525 526def template LoadStoreLitU64Declare {{ 527 class %(class_name)s : public %(base_class)s 528 { 529 public: 530 531 /// Constructor. 532 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm, 533 bool noAlloc = false, bool exclusive = false, 534 bool acrel = false); 535 536 %(BasicExecDeclare)s 537 %(InitiateAccDeclare)s 538 %(CompleteAccDeclare)s 539 540 virtual void 541 annotateFault(ArmFault *fault) { 542 %(fa_code)s 543 } 544 }; 545}}; 546 547def template LoadStoreImm64Constructor {{ 548 %(class_name)s::%(class_name)s(ExtMachInst machInst, 549 IntRegIndex _dest, IntRegIndex _base, int64_t _imm) 550 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, 551 (IntRegIndex)_dest, (IntRegIndex)_base, _imm) 552 { 553 %(constructor)s; 554#if %(use_uops)d 555 assert(numMicroops >= 2); 556 uops = new StaticInstPtr[numMicroops]; 557 uops[0] = new %(acc_name)s(machInst, _dest, _base, _imm); 558 uops[0]->setDelayedCommit(); 559 uops[0]->setFirstMicroop(); 560 uops[1] = new %(wb_decl)s; 561 uops[1]->setLastMicroop(); 562#endif 563 } 564}}; 565 566def template LoadStoreImmU64Constructor {{ 567 %(class_name)s::%(class_name)s(ExtMachInst machInst, 568 IntRegIndex _dest, IntRegIndex _base, int64_t _imm, 569 bool noAlloc, bool exclusive, bool acrel) 570 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, 571 _dest, _base, _imm) 572 { 573 %(constructor)s; 574 assert(!%(use_uops)d); 575 setExcAcRel(exclusive, acrel); 576 } 577}}; 578 579def template LoadStoreImmDU64Constructor {{ 580 %(class_name)s::%(class_name)s(ExtMachInst machInst, 581 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base, 582 int64_t _imm, bool noAlloc, bool exclusive, bool acrel) 583 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, 584 _dest, _dest2, _base, _imm) 585 { 586 %(constructor)s; 587 assert(!%(use_uops)d); 588 setExcAcRel(exclusive, acrel); 589 } 590}}; 591 592def template StoreImmDEx64Constructor {{ 593 %(class_name)s::%(class_name)s(ExtMachInst machInst, 594 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2, 595 IntRegIndex _base, int64_t _imm) 596 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, 597 _result, _dest, _dest2, _base, _imm) 598 { 599 %(constructor)s; 600 assert(!%(use_uops)d); 601 } 602}}; 603 604 605def template LoadStoreReg64Constructor {{ 606 %(class_name)s::%(class_name)s(ExtMachInst machInst, 607 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset, 608 ArmExtendType _type, uint32_t _shiftAmt) 609 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, 610 _dest, _base, _offset, _type, _shiftAmt) 611 { 612 %(constructor)s; 613#if %(use_uops)d 614 assert(numMicroops >= 2); 615 uops = new StaticInstPtr[numMicroops]; 616 uops[0] = new %(acc_name)s(machInst, _dest, _base, _offset, 617 _type, _shiftAmt); 618 uops[0]->setDelayedCommit(); 619 uops[0]->setFirstMicroop(); 620 uops[1] = new %(wb_decl)s; 621 uops[1]->setLastMicroop(); 622#endif 623 } 624}}; 625 626def template LoadStoreRegU64Constructor {{ 627 %(class_name)s::%(class_name)s(ExtMachInst machInst, 628 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset, 629 ArmExtendType _type, uint32_t _shiftAmt, 630 bool noAlloc, bool exclusive, bool acrel) 631 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, 632 _dest, _base, _offset, _type, _shiftAmt) 633 { 634 %(constructor)s; 635 assert(!%(use_uops)d); 636 setExcAcRel(exclusive, acrel); 637 } 638}}; 639 640def template LoadStoreRaw64Constructor {{ 641 %(class_name)s::%(class_name)s(ExtMachInst machInst, 642 IntRegIndex _dest, IntRegIndex _base) 643 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, _dest, _base) 644 { 645 %(constructor)s; 646 } 647}}; 648 649def template LoadStoreEx64Constructor {{ 650 %(class_name)s::%(class_name)s(ExtMachInst machInst, 651 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result) 652 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, 653 _dest, _base, _result) 654 { 655 %(constructor)s; 656 } 657}}; 658 659def template LoadStoreLit64Constructor {{ 660 %(class_name)s::%(class_name)s(ExtMachInst machInst, 661 IntRegIndex _dest, int64_t _imm) 662 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, 663 (IntRegIndex)_dest, _imm) 664 { 665 %(constructor)s; 666#if %(use_uops)d 667 assert(numMicroops >= 2); 668 uops = new StaticInstPtr[numMicroops]; 669 uops[0] = new %(acc_name)s(machInst, _dest, _imm); 670 uops[0]->setDelayedCommit(); 671 uops[0]->setFirstMicroop(); 672 uops[1] = new %(wb_decl)s; 673 uops[1]->setLastMicroop(); 674#endif 675 } 676}}; 677 678def template LoadStoreLitU64Constructor {{ 679 %(class_name)s::%(class_name)s(ExtMachInst machInst, 680 IntRegIndex _dest, int64_t _imm, 681 bool noAlloc, bool exclusive, bool acrel) 682 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, 683 (IntRegIndex)_dest, _imm) 684 { 685 %(constructor)s; 686 assert(!%(use_uops)d); 687 setExcAcRel(exclusive, acrel); 688 } 689}}; 690