ldstop.isa revision 12234:78ece221f9f5
1// Copyright (c) 2007-2008 The Hewlett-Packard Development Company 2// Copyright (c) 2015 Advanced Micro Devices, Inc. 3// All rights reserved. 4// 5// The license below extends only to copyright in the software and shall 6// not be construed as granting a license to any other intellectual 7// property including but not limited to intellectual property relating 8// to a hardware implementation of the functionality of the software 9// licensed hereunder. You may use the software subject to the license 10// terms below provided that you ensure that this notice is replicated 11// unmodified and in its entirety in all distributions of the software, 12// modified or unmodified, in source code or in binary form. 13// 14// Copyright (c) 2008 The Regents of The University of Michigan 15// All rights reserved. 16// 17// Redistribution and use in source and binary forms, with or without 18// modification, are permitted provided that the following conditions are 19// met: redistributions of source code must retain the above copyright 20// notice, this list of conditions and the following disclaimer; 21// redistributions in binary form must reproduce the above copyright 22// notice, this list of conditions and the following disclaimer in the 23// documentation and/or other materials provided with the distribution; 24// neither the name of the copyright holders nor the names of its 25// contributors may be used to endorse or promote products derived from 26// this software without specific prior written permission. 27// 28// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 29// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 30// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 31// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 32// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 33// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 34// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 35// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 36// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 37// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 38// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 39// 40// Authors: Gabe Black 41 42////////////////////////////////////////////////////////////////////////// 43// 44// LdStOp Microop templates 45// 46////////////////////////////////////////////////////////////////////////// 47 48// LEA template 49 50def template MicroLeaExecute {{ 51 Fault %(class_name)s::execute(ExecContext *xc, 52 Trace::InstRecord *traceData) const 53 { 54 Fault fault = NoFault; 55 Addr EA; 56 57 %(op_decl)s; 58 %(op_rd)s; 59 %(ea_code)s; 60 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA); 61 62 %(code)s; 63 if(fault == NoFault) 64 { 65 %(op_wb)s; 66 } 67 68 return fault; 69 } 70}}; 71 72def template MicroLeaDeclare {{ 73 class %(class_name)s : public %(base_class)s 74 { 75 public: 76 %(class_name)s(ExtMachInst _machInst, 77 const char * instMnem, uint64_t setFlags, 78 uint8_t _scale, InstRegIndex _index, InstRegIndex _base, 79 uint64_t _disp, InstRegIndex _segment, 80 InstRegIndex _data, 81 uint8_t _dataSize, uint8_t _addressSize, 82 Request::FlagsType _memFlags); 83 84 %(BasicExecDeclare)s 85 }; 86}}; 87 88// Load templates 89 90def template MicroLoadExecute {{ 91 Fault %(class_name)s::execute(ExecContext *xc, 92 Trace::InstRecord *traceData) const 93 { 94 Fault fault = NoFault; 95 Addr EA; 96 97 %(op_decl)s; 98 %(op_rd)s; 99 %(ea_code)s; 100 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA); 101 102 fault = readMemAtomic(xc, traceData, EA, Mem, 103 %(memDataSize)s, memFlags); 104 105 if (fault == NoFault) { 106 %(code)s; 107 } else if (memFlags & Request::PREFETCH) { 108 // For prefetches, ignore any faults/exceptions. 109 return NoFault; 110 } 111 if(fault == NoFault) 112 { 113 %(op_wb)s; 114 } 115 116 return fault; 117 } 118}}; 119 120def template MicroLoadInitiateAcc {{ 121 Fault %(class_name)s::initiateAcc(ExecContext * xc, 122 Trace::InstRecord * traceData) const 123 { 124 Fault fault = NoFault; 125 Addr EA; 126 127 %(op_decl)s; 128 %(op_rd)s; 129 %(ea_code)s; 130 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA); 131 132 fault = initiateMemRead(xc, traceData, EA, 133 %(memDataSize)s, memFlags); 134 135 return fault; 136 } 137}}; 138 139def template MicroLoadCompleteAcc {{ 140 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext * xc, 141 Trace::InstRecord * traceData) const 142 { 143 Fault fault = NoFault; 144 145 %(op_decl)s; 146 %(op_rd)s; 147 148 getMem(pkt, Mem, %(memDataSize)s, traceData); 149 150 %(code)s; 151 152 if(fault == NoFault) 153 { 154 %(op_wb)s; 155 } 156 157 return fault; 158 } 159}}; 160 161// Store templates 162 163def template MicroStoreExecute {{ 164 Fault %(class_name)s::execute(ExecContext * xc, 165 Trace::InstRecord *traceData) const 166 { 167 Fault fault = NoFault; 168 169 Addr EA; 170 %(op_decl)s; 171 %(op_rd)s; 172 %(ea_code)s; 173 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA); 174 175 %(code)s; 176 177 if(fault == NoFault) 178 { 179 fault = writeMemAtomic(xc, traceData, Mem, %(memDataSize)s, EA, 180 memFlags, NULL); 181 if(fault == NoFault) 182 { 183 %(op_wb)s; 184 } 185 } 186 187 return fault; 188 } 189}}; 190 191def template MicroStoreInitiateAcc {{ 192 Fault %(class_name)s::initiateAcc(ExecContext * xc, 193 Trace::InstRecord * traceData) const 194 { 195 Fault fault = NoFault; 196 197 Addr EA; 198 %(op_decl)s; 199 %(op_rd)s; 200 %(ea_code)s; 201 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA); 202 203 %(code)s; 204 205 if(fault == NoFault) 206 { 207 fault = writeMemTiming(xc, traceData, Mem, %(memDataSize)s, EA, 208 memFlags, NULL); 209 } 210 return fault; 211 } 212}}; 213 214def template MicroStoreCompleteAcc {{ 215 Fault %(class_name)s::completeAcc(PacketPtr pkt, 216 ExecContext * xc, Trace::InstRecord * traceData) const 217 { 218 %(op_decl)s; 219 %(op_rd)s; 220 %(complete_code)s; 221 %(op_wb)s; 222 return NoFault; 223 } 224}}; 225 226// Common templates 227 228//This delcares the initiateAcc function in memory operations 229def template InitiateAccDeclare {{ 230 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 231}}; 232 233//This declares the completeAcc function in memory operations 234def template CompleteAccDeclare {{ 235 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; 236}}; 237 238def template MicroLdStOpDeclare {{ 239 class %(class_name)s : public %(base_class)s 240 { 241 public: 242 %(class_name)s(ExtMachInst _machInst, 243 const char * instMnem, uint64_t setFlags, 244 uint8_t _scale, InstRegIndex _index, InstRegIndex _base, 245 uint64_t _disp, InstRegIndex _segment, 246 InstRegIndex _data, 247 uint8_t _dataSize, uint8_t _addressSize, 248 Request::FlagsType _memFlags); 249 250 %(BasicExecDeclare)s 251 252 %(InitiateAccDeclare)s 253 254 %(CompleteAccDeclare)s 255 }; 256}}; 257 258// LdStSplitOp is a load or store that uses a pair of regs as the 259// source or destination. Used for cmpxchg{8,16}b. 260def template MicroLdStSplitOpDeclare {{ 261 class %(class_name)s : public %(base_class)s 262 { 263 public: 264 %(class_name)s(ExtMachInst _machInst, 265 const char * instMnem, uint64_t setFlags, 266 uint8_t _scale, InstRegIndex _index, InstRegIndex _base, 267 uint64_t _disp, InstRegIndex _segment, 268 InstRegIndex _dataLow, InstRegIndex _dataHi, 269 uint8_t _dataSize, uint8_t _addressSize, 270 Request::FlagsType _memFlags); 271 272 %(BasicExecDeclare)s 273 274 %(InitiateAccDeclare)s 275 276 %(CompleteAccDeclare)s 277 }; 278}}; 279 280def template MicroLdStOpConstructor {{ 281 %(class_name)s::%(class_name)s( 282 ExtMachInst machInst, const char * instMnem, uint64_t setFlags, 283 uint8_t _scale, InstRegIndex _index, InstRegIndex _base, 284 uint64_t _disp, InstRegIndex _segment, 285 InstRegIndex _data, 286 uint8_t _dataSize, uint8_t _addressSize, 287 Request::FlagsType _memFlags) : 288 %(base_class)s(machInst, "%(mnemonic)s", instMnem, setFlags, 289 _scale, _index, _base, 290 _disp, _segment, _data, 291 _dataSize, _addressSize, _memFlags, %(op_class)s) 292 { 293 %(constructor)s; 294 } 295}}; 296 297def template MicroLdStSplitOpConstructor {{ 298 %(class_name)s::%(class_name)s( 299 ExtMachInst machInst, const char * instMnem, uint64_t setFlags, 300 uint8_t _scale, InstRegIndex _index, InstRegIndex _base, 301 uint64_t _disp, InstRegIndex _segment, 302 InstRegIndex _dataLow, InstRegIndex _dataHi, 303 uint8_t _dataSize, uint8_t _addressSize, 304 Request::FlagsType _memFlags) : 305 %(base_class)s(machInst, "%(mnemonic)s", instMnem, setFlags, 306 _scale, _index, _base, 307 _disp, _segment, _dataLow, _dataHi, 308 _dataSize, _addressSize, _memFlags, %(op_class)s) 309 { 310 %(constructor)s; 311 } 312}}; 313 314let {{ 315 class LdStOp(X86Microop): 316 def __init__(self, data, segment, addr, disp, 317 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec, 318 implicitStack): 319 self.data = data 320 [self.scale, self.index, self.base] = addr 321 self.disp = disp 322 self.segment = segment 323 self.dataSize = dataSize 324 self.addressSize = addressSize 325 self.memFlags = baseFlags 326 if atCPL0: 327 self.memFlags += " | (CPL0FlagBit << FlagShift)" 328 self.instFlags = "" 329 if prefetch: 330 self.memFlags += " | Request::PREFETCH" 331 self.instFlags += " | (1ULL << StaticInst::IsDataPrefetch)" 332 if nonSpec: 333 self.instFlags += " | (1ULL << StaticInst::IsNonSpeculative)" 334 # For implicit stack operations, we should use *not* use the 335 # alternative addressing mode for loads/stores if the prefix is set 336 if not implicitStack: 337 self.memFlags += " | (machInst.legacy.addr ? " + \ 338 "(AddrSizeFlagBit << FlagShift) : 0)" 339 340 def getAllocator(self, microFlags): 341 allocator = '''new %(class_name)s(machInst, macrocodeBlock, 342 %(flags)s, %(scale)s, %(index)s, %(base)s, 343 %(disp)s, %(segment)s, %(data)s, 344 %(dataSize)s, %(addressSize)s, %(memFlags)s)''' % { 345 "class_name" : self.className, 346 "flags" : self.microFlagsText(microFlags) + self.instFlags, 347 "scale" : self.scale, "index" : self.index, 348 "base" : self.base, 349 "disp" : self.disp, 350 "segment" : self.segment, "data" : self.data, 351 "dataSize" : self.dataSize, "addressSize" : self.addressSize, 352 "memFlags" : self.memFlags} 353 return allocator 354 355 class BigLdStOp(X86Microop): 356 def __init__(self, data, segment, addr, disp, 357 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec, 358 implicitStack): 359 self.data = data 360 [self.scale, self.index, self.base] = addr 361 self.disp = disp 362 self.segment = segment 363 self.dataSize = dataSize 364 self.addressSize = addressSize 365 self.memFlags = baseFlags 366 if atCPL0: 367 self.memFlags += " | (CPL0FlagBit << FlagShift)" 368 self.instFlags = "" 369 if prefetch: 370 self.memFlags += " | Request::PREFETCH" 371 self.instFlags += " | (1ULL << StaticInst::IsDataPrefetch)" 372 if nonSpec: 373 self.instFlags += " | (1ULL << StaticInst::IsNonSpeculative)" 374 # For implicit stack operations, we should use *not* use the 375 # alternative addressing mode for loads/stores if the prefix is set 376 if not implicitStack: 377 self.memFlags += " | (machInst.legacy.addr ? " + \ 378 "(AddrSizeFlagBit << FlagShift) : 0)" 379 380 def getAllocator(self, microFlags): 381 allocString = ''' 382 (%(dataSize)s >= 4) ? 383 (StaticInstPtr)(new %(class_name)sBig(machInst, 384 macrocodeBlock, %(flags)s, %(scale)s, %(index)s, 385 %(base)s, %(disp)s, %(segment)s, %(data)s, 386 %(dataSize)s, %(addressSize)s, %(memFlags)s)) : 387 (StaticInstPtr)(new %(class_name)s(machInst, 388 macrocodeBlock, %(flags)s, %(scale)s, %(index)s, 389 %(base)s, %(disp)s, %(segment)s, %(data)s, 390 %(dataSize)s, %(addressSize)s, %(memFlags)s)) 391 ''' 392 allocator = allocString % { 393 "class_name" : self.className, 394 "flags" : self.microFlagsText(microFlags) + self.instFlags, 395 "scale" : self.scale, "index" : self.index, 396 "base" : self.base, 397 "disp" : self.disp, 398 "segment" : self.segment, "data" : self.data, 399 "dataSize" : self.dataSize, "addressSize" : self.addressSize, 400 "memFlags" : self.memFlags} 401 return allocator 402 403 class LdStSplitOp(LdStOp): 404 def __init__(self, data, segment, addr, disp, 405 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec, 406 implicitStack): 407 super(LdStSplitOp, self).__init__(0, segment, addr, disp, 408 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec, 409 implicitStack) 410 (self.dataLow, self.dataHi) = data 411 412 def getAllocator(self, microFlags): 413 allocString = '''(StaticInstPtr)(new %(class_name)s(machInst, 414 macrocodeBlock, %(flags)s, %(scale)s, %(index)s, 415 %(base)s, %(disp)s, %(segment)s, 416 %(dataLow)s, %(dataHi)s, 417 %(dataSize)s, %(addressSize)s, %(memFlags)s)) 418 ''' 419 allocator = allocString % { 420 "class_name" : self.className, 421 "flags" : self.microFlagsText(microFlags) + self.instFlags, 422 "scale" : self.scale, "index" : self.index, 423 "base" : self.base, 424 "disp" : self.disp, 425 "segment" : self.segment, 426 "dataLow" : self.dataLow, "dataHi" : self.dataHi, 427 "dataSize" : self.dataSize, "addressSize" : self.addressSize, 428 "memFlags" : self.memFlags} 429 return allocator 430 431}}; 432 433let {{ 434 435 # Make these empty strings so that concatenating onto 436 # them will always work. 437 header_output = "" 438 decoder_output = "" 439 exec_output = "" 440 441 segmentEAExpr = \ 442 'bits(scale * Index + Base + disp, addressSize * 8 - 1, 0);' 443 444 calculateEA = 'EA = SegBase + ' + segmentEAExpr 445 446 def defineMicroLoadOp(mnemonic, code, bigCode='', 447 mem_flags="0", big=True, nonSpec=False, 448 implicitStack=False): 449 global header_output 450 global decoder_output 451 global exec_output 452 global microopClasses 453 Name = mnemonic 454 name = mnemonic.lower() 455 456 # Build up the all register version of this micro op 457 iops = [InstObjParams(name, Name, 'X86ISA::LdStOp', 458 { "code": code, 459 "ea_code": calculateEA, 460 "memDataSize": "dataSize" })] 461 if big: 462 iops += [InstObjParams(name, Name + "Big", 'X86ISA::LdStOp', 463 { "code": bigCode, 464 "ea_code": calculateEA, 465 "memDataSize": "dataSize" })] 466 for iop in iops: 467 header_output += MicroLdStOpDeclare.subst(iop) 468 decoder_output += MicroLdStOpConstructor.subst(iop) 469 exec_output += MicroLoadExecute.subst(iop) 470 exec_output += MicroLoadInitiateAcc.subst(iop) 471 exec_output += MicroLoadCompleteAcc.subst(iop) 472 473 if implicitStack: 474 # For instructions that implicitly access the stack, the address 475 # size is the same as the stack segment pointer size, not the 476 # address size if specified by the instruction prefix 477 addressSize = "env.stackSize" 478 else: 479 addressSize = "env.addressSize" 480 481 base = LdStOp 482 if big: 483 base = BigLdStOp 484 class LoadOp(base): 485 def __init__(self, data, segment, addr, disp = 0, 486 dataSize="env.dataSize", 487 addressSize=addressSize, 488 atCPL0=False, prefetch=False, nonSpec=nonSpec, 489 implicitStack=implicitStack): 490 super(LoadOp, self).__init__(data, segment, addr, 491 disp, dataSize, addressSize, mem_flags, 492 atCPL0, prefetch, nonSpec, implicitStack) 493 self.className = Name 494 self.mnemonic = name 495 496 microopClasses[name] = LoadOp 497 498 defineMicroLoadOp('Ld', 'Data = merge(Data, Mem, dataSize);', 499 'Data = Mem & mask(dataSize * 8);') 500 defineMicroLoadOp('Ldis', 'Data = merge(Data, Mem, dataSize);', 501 'Data = Mem & mask(dataSize * 8);', 502 implicitStack=True) 503 defineMicroLoadOp('Ldst', 'Data = merge(Data, Mem, dataSize);', 504 'Data = Mem & mask(dataSize * 8);', 505 '(StoreCheck << FlagShift)') 506 defineMicroLoadOp('Ldstl', 'Data = merge(Data, Mem, dataSize);', 507 'Data = Mem & mask(dataSize * 8);', 508 '(StoreCheck << FlagShift) | Request::LOCKED_RMW', 509 nonSpec=True) 510 511 defineMicroLoadOp('Ldfp', code='FpData_uqw = Mem', big = False) 512 513 defineMicroLoadOp('Ldfp87', code=''' 514 switch (dataSize) 515 { 516 case 4: 517 FpData_df = *(float *)&Mem; 518 break; 519 case 8: 520 FpData_df = *(double *)&Mem; 521 break; 522 default: 523 panic("Unhandled data size in LdFp87.\\n"); 524 } 525 ''', big = False) 526 527 # Load integer from memory into x87 top-of-stack register. 528 # Used to implement fild instruction. 529 defineMicroLoadOp('Ldifp87', code=''' 530 switch (dataSize) 531 { 532 case 2: 533 FpData_df = (int64_t)sext<16>(Mem); 534 break; 535 case 4: 536 FpData_df = (int64_t)sext<32>(Mem); 537 break; 538 case 8: 539 FpData_df = (int64_t)Mem; 540 break; 541 default: 542 panic("Unhandled data size in LdIFp87.\\n"); 543 } 544 ''', big = False) 545 546 def defineMicroLoadSplitOp(mnemonic, code, mem_flags="0", nonSpec=False): 547 global header_output 548 global decoder_output 549 global exec_output 550 global microopClasses 551 Name = mnemonic 552 name = mnemonic.lower() 553 554 iop = InstObjParams(name, Name, 'X86ISA::LdStSplitOp', 555 { "code": code, 556 "ea_code": calculateEA, 557 "memDataSize": "2 * dataSize" }) 558 559 header_output += MicroLdStSplitOpDeclare.subst(iop) 560 decoder_output += MicroLdStSplitOpConstructor.subst(iop) 561 exec_output += MicroLoadExecute.subst(iop) 562 exec_output += MicroLoadInitiateAcc.subst(iop) 563 exec_output += MicroLoadCompleteAcc.subst(iop) 564 565 class LoadOp(LdStSplitOp): 566 def __init__(self, data, segment, addr, disp = 0, 567 dataSize="env.dataSize", 568 addressSize="env.addressSize", 569 atCPL0=False, prefetch=False, nonSpec=nonSpec, 570 implicitStack=False): 571 super(LoadOp, self).__init__(data, segment, addr, 572 disp, dataSize, addressSize, mem_flags, 573 atCPL0, prefetch, nonSpec, implicitStack) 574 self.className = Name 575 self.mnemonic = name 576 577 microopClasses[name] = LoadOp 578 579 code = ''' 580 switch (dataSize) { 581 case 4: 582 DataLow = bits(Mem_u2qw[0], 31, 0); 583 DataHi = bits(Mem_u2qw[0], 63, 32); 584 break; 585 case 8: 586 DataLow = Mem_u2qw[0]; 587 DataHi = Mem_u2qw[1]; 588 break; 589 default: 590 panic("Unhandled data size %d in LdSplit.\\n", dataSize); 591 }''' 592 593 defineMicroLoadSplitOp('LdSplit', code, 594 '(StoreCheck << FlagShift)') 595 596 defineMicroLoadSplitOp('LdSplitl', code, 597 '(StoreCheck << FlagShift) | Request::LOCKED_RMW', 598 nonSpec=True) 599 600 def defineMicroStoreOp(mnemonic, code, completeCode="", mem_flags="0", 601 implicitStack=False): 602 global header_output 603 global decoder_output 604 global exec_output 605 global microopClasses 606 Name = mnemonic 607 name = mnemonic.lower() 608 609 # Build up the all register version of this micro op 610 iop = InstObjParams(name, Name, 'X86ISA::LdStOp', 611 { "code": code, 612 "complete_code": completeCode, 613 "ea_code": calculateEA, 614 "memDataSize": "dataSize" }) 615 header_output += MicroLdStOpDeclare.subst(iop) 616 decoder_output += MicroLdStOpConstructor.subst(iop) 617 exec_output += MicroStoreExecute.subst(iop) 618 exec_output += MicroStoreInitiateAcc.subst(iop) 619 exec_output += MicroStoreCompleteAcc.subst(iop) 620 621 if implicitStack: 622 # For instructions that implicitly access the stack, the address 623 # size is the same as the stack segment pointer size, not the 624 # address size if specified by the instruction prefix 625 addressSize = "env.stackSize" 626 else: 627 addressSize = "env.addressSize" 628 629 class StoreOp(LdStOp): 630 def __init__(self, data, segment, addr, disp = 0, 631 dataSize="env.dataSize", 632 addressSize=addressSize, 633 atCPL0=False, nonSpec=False, implicitStack=implicitStack): 634 super(StoreOp, self).__init__(data, segment, addr, disp, 635 dataSize, addressSize, mem_flags, atCPL0, False, 636 nonSpec, implicitStack) 637 self.className = Name 638 self.mnemonic = name 639 640 microopClasses[name] = StoreOp 641 642 defineMicroStoreOp('St', 'Mem = pick(Data, 2, dataSize);') 643 defineMicroStoreOp('Stis', 'Mem = pick(Data, 2, dataSize);', 644 implicitStack=True) 645 defineMicroStoreOp('Stul', 'Mem = pick(Data, 2, dataSize);', 646 mem_flags="Request::LOCKED_RMW") 647 648 defineMicroStoreOp('Stfp', code='Mem = FpData_uqw;') 649 650 defineMicroStoreOp('Stfp87', code=''' 651 switch (dataSize) 652 { 653 case 4: { 654 float single(FpData_df); 655 Mem = *(uint32_t *)&single; 656 } break; 657 case 8: 658 Mem = *(uint64_t *)&FpData_df; 659 break; 660 default: 661 panic("Unhandled data size in StFp87.\\n"); 662 } 663 ''') 664 665 defineMicroStoreOp('Cda', 'Mem = 0;', mem_flags="Request::NO_ACCESS") 666 667 def defineMicroStoreSplitOp(mnemonic, code, 668 completeCode="", mem_flags="0"): 669 global header_output 670 global decoder_output 671 global exec_output 672 global microopClasses 673 Name = mnemonic 674 name = mnemonic.lower() 675 676 iop = InstObjParams(name, Name, 'X86ISA::LdStSplitOp', 677 { "code": code, 678 "complete_code": completeCode, 679 "ea_code": calculateEA, 680 "memDataSize": "2 * dataSize" }) 681 682 header_output += MicroLdStSplitOpDeclare.subst(iop) 683 decoder_output += MicroLdStSplitOpConstructor.subst(iop) 684 exec_output += MicroStoreExecute.subst(iop) 685 exec_output += MicroStoreInitiateAcc.subst(iop) 686 exec_output += MicroStoreCompleteAcc.subst(iop) 687 688 class StoreOp(LdStSplitOp): 689 def __init__(self, data, segment, addr, disp = 0, 690 dataSize="env.dataSize", 691 addressSize="env.addressSize", 692 atCPL0=False, nonSpec=False, implicitStack=False): 693 super(StoreOp, self).__init__(data, segment, addr, disp, 694 dataSize, addressSize, mem_flags, atCPL0, False, 695 nonSpec, implicitStack) 696 self.className = Name 697 self.mnemonic = name 698 699 microopClasses[name] = StoreOp 700 701 code = ''' 702 switch (dataSize) { 703 case 4: 704 Mem_u2qw[0] = (DataHi << 32) | DataLow; 705 break; 706 case 8: 707 Mem_u2qw[0] = DataLow; 708 Mem_u2qw[1] = DataHi; 709 break; 710 default: 711 panic("Unhandled data size %d in StSplit.\\n", dataSize); 712 }''' 713 714 defineMicroStoreSplitOp('StSplit', code); 715 716 defineMicroStoreSplitOp('StSplitul', code, 717 mem_flags='Request::LOCKED_RMW') 718 719 iop = InstObjParams("lea", "Lea", 'X86ISA::LdStOp', 720 { "code": "Data = merge(Data, EA, dataSize);", 721 "ea_code": "EA = " + segmentEAExpr, 722 "memDataSize": "dataSize" }) 723 header_output += MicroLeaDeclare.subst(iop) 724 decoder_output += MicroLdStOpConstructor.subst(iop) 725 exec_output += MicroLeaExecute.subst(iop) 726 727 class LeaOp(LdStOp): 728 def __init__(self, data, segment, addr, disp = 0, 729 dataSize="env.dataSize", addressSize="env.addressSize"): 730 super(LeaOp, self).__init__(data, segment, addr, disp, 731 dataSize, addressSize, "0", False, False, False, False) 732 self.className = "Lea" 733 self.mnemonic = "lea" 734 735 microopClasses["lea"] = LeaOp 736 737 738 iop = InstObjParams("tia", "Tia", 'X86ISA::LdStOp', 739 { "code": "xc->demapPage(EA, 0);", 740 "ea_code": calculateEA, 741 "memDataSize": "dataSize" }) 742 header_output += MicroLeaDeclare.subst(iop) 743 decoder_output += MicroLdStOpConstructor.subst(iop) 744 exec_output += MicroLeaExecute.subst(iop) 745 746 class TiaOp(LdStOp): 747 def __init__(self, segment, addr, disp = 0, 748 dataSize="env.dataSize", 749 addressSize="env.addressSize"): 750 super(TiaOp, self).__init__("InstRegIndex(NUM_INTREGS)", segment, 751 addr, disp, dataSize, addressSize, "0", False, False, 752 False, False) 753 self.className = "Tia" 754 self.mnemonic = "tia" 755 756 microopClasses["tia"] = TiaOp 757 758 class CdaOp(LdStOp): 759 def __init__(self, segment, addr, disp = 0, 760 dataSize="env.dataSize", 761 addressSize="env.addressSize", atCPL0=False): 762 super(CdaOp, self).__init__("InstRegIndex(NUM_INTREGS)", segment, 763 addr, disp, dataSize, addressSize, "Request::NO_ACCESS", 764 atCPL0, False, False, False) 765 self.className = "Cda" 766 self.mnemonic = "cda" 767 768 microopClasses["cda"] = CdaOp 769}}; 770