ldstop.isa revision 12236:126ac9da6050
1// Copyright (c) 2007-2008 The Hewlett-Packard Development Company 2// Copyright (c) 2015 Advanced Micro Devices, Inc. 3// All rights reserved. 4// 5// The license below extends only to copyright in the software and shall 6// not be construed as granting a license to any other intellectual 7// property including but not limited to intellectual property relating 8// to a hardware implementation of the functionality of the software 9// licensed hereunder. You may use the software subject to the license 10// terms below provided that you ensure that this notice is replicated 11// unmodified and in its entirety in all distributions of the software, 12// modified or unmodified, in source code or in binary form. 13// 14// Copyright (c) 2008 The Regents of The University of Michigan 15// All rights reserved. 16// 17// Redistribution and use in source and binary forms, with or without 18// modification, are permitted provided that the following conditions are 19// met: redistributions of source code must retain the above copyright 20// notice, this list of conditions and the following disclaimer; 21// redistributions in binary form must reproduce the above copyright 22// notice, this list of conditions and the following disclaimer in the 23// documentation and/or other materials provided with the distribution; 24// neither the name of the copyright holders nor the names of its 25// contributors may be used to endorse or promote products derived from 26// this software without specific prior written permission. 27// 28// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 29// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 30// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 31// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 32// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 33// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 34// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 35// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 36// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 37// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 38// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 39// 40// Authors: Gabe Black 41 42////////////////////////////////////////////////////////////////////////// 43// 44// LdStOp Microop templates 45// 46////////////////////////////////////////////////////////////////////////// 47 48// LEA template 49 50def template MicroLeaExecute {{ 51 Fault %(class_name)s::execute(ExecContext *xc, 52 Trace::InstRecord *traceData) const 53 { 54 Fault fault = NoFault; 55 Addr EA; 56 57 %(op_decl)s; 58 %(op_rd)s; 59 %(ea_code)s; 60 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA); 61 62 %(code)s; 63 if(fault == NoFault) 64 { 65 %(op_wb)s; 66 } 67 68 return fault; 69 } 70}}; 71 72def template MicroLeaDeclare {{ 73 class %(class_name)s : public %(base_class)s 74 { 75 public: 76 %(class_name)s(ExtMachInst _machInst, 77 const char * instMnem, uint64_t setFlags, 78 uint8_t _scale, InstRegIndex _index, InstRegIndex _base, 79 uint64_t _disp, InstRegIndex _segment, 80 InstRegIndex _data, 81 uint8_t _dataSize, uint8_t _addressSize, 82 Request::FlagsType _memFlags); 83 84 Fault execute(ExecContext *, Trace::InstRecord *) const; 85 }; 86}}; 87 88// Load templates 89 90def template MicroLoadExecute {{ 91 Fault %(class_name)s::execute(ExecContext *xc, 92 Trace::InstRecord *traceData) const 93 { 94 Fault fault = NoFault; 95 Addr EA; 96 97 %(op_decl)s; 98 %(op_rd)s; 99 %(ea_code)s; 100 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA); 101 102 fault = readMemAtomic(xc, traceData, EA, Mem, 103 %(memDataSize)s, memFlags); 104 105 if (fault == NoFault) { 106 %(code)s; 107 } else if (memFlags & Request::PREFETCH) { 108 // For prefetches, ignore any faults/exceptions. 109 return NoFault; 110 } 111 if(fault == NoFault) 112 { 113 %(op_wb)s; 114 } 115 116 return fault; 117 } 118}}; 119 120def template MicroLoadInitiateAcc {{ 121 Fault %(class_name)s::initiateAcc(ExecContext * xc, 122 Trace::InstRecord * traceData) const 123 { 124 Fault fault = NoFault; 125 Addr EA; 126 127 %(op_decl)s; 128 %(op_rd)s; 129 %(ea_code)s; 130 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA); 131 132 fault = initiateMemRead(xc, traceData, EA, 133 %(memDataSize)s, memFlags); 134 135 return fault; 136 } 137}}; 138 139def template MicroLoadCompleteAcc {{ 140 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext * xc, 141 Trace::InstRecord * traceData) const 142 { 143 Fault fault = NoFault; 144 145 %(op_decl)s; 146 %(op_rd)s; 147 148 getMem(pkt, Mem, %(memDataSize)s, traceData); 149 150 %(code)s; 151 152 if(fault == NoFault) 153 { 154 %(op_wb)s; 155 } 156 157 return fault; 158 } 159}}; 160 161// Store templates 162 163def template MicroStoreExecute {{ 164 Fault %(class_name)s::execute(ExecContext * xc, 165 Trace::InstRecord *traceData) const 166 { 167 Fault fault = NoFault; 168 169 Addr EA; 170 %(op_decl)s; 171 %(op_rd)s; 172 %(ea_code)s; 173 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA); 174 175 %(code)s; 176 177 if(fault == NoFault) 178 { 179 fault = writeMemAtomic(xc, traceData, Mem, %(memDataSize)s, EA, 180 memFlags, NULL); 181 if(fault == NoFault) 182 { 183 %(op_wb)s; 184 } 185 } 186 187 return fault; 188 } 189}}; 190 191def template MicroStoreInitiateAcc {{ 192 Fault %(class_name)s::initiateAcc(ExecContext * xc, 193 Trace::InstRecord * traceData) const 194 { 195 Fault fault = NoFault; 196 197 Addr EA; 198 %(op_decl)s; 199 %(op_rd)s; 200 %(ea_code)s; 201 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA); 202 203 %(code)s; 204 205 if(fault == NoFault) 206 { 207 fault = writeMemTiming(xc, traceData, Mem, %(memDataSize)s, EA, 208 memFlags, NULL); 209 } 210 return fault; 211 } 212}}; 213 214def template MicroStoreCompleteAcc {{ 215 Fault %(class_name)s::completeAcc(PacketPtr pkt, 216 ExecContext * xc, Trace::InstRecord * traceData) const 217 { 218 %(op_decl)s; 219 %(op_rd)s; 220 %(complete_code)s; 221 %(op_wb)s; 222 return NoFault; 223 } 224}}; 225 226def template MicroLdStOpDeclare {{ 227 class %(class_name)s : public %(base_class)s 228 { 229 public: 230 %(class_name)s(ExtMachInst _machInst, 231 const char * instMnem, uint64_t setFlags, 232 uint8_t _scale, InstRegIndex _index, InstRegIndex _base, 233 uint64_t _disp, InstRegIndex _segment, 234 InstRegIndex _data, 235 uint8_t _dataSize, uint8_t _addressSize, 236 Request::FlagsType _memFlags); 237 238 Fault execute(ExecContext *, Trace::InstRecord *) const; 239 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 240 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; 241 }; 242}}; 243 244// LdStSplitOp is a load or store that uses a pair of regs as the 245// source or destination. Used for cmpxchg{8,16}b. 246def template MicroLdStSplitOpDeclare {{ 247 class %(class_name)s : public %(base_class)s 248 { 249 public: 250 %(class_name)s(ExtMachInst _machInst, 251 const char * instMnem, uint64_t setFlags, 252 uint8_t _scale, InstRegIndex _index, InstRegIndex _base, 253 uint64_t _disp, InstRegIndex _segment, 254 InstRegIndex _dataLow, InstRegIndex _dataHi, 255 uint8_t _dataSize, uint8_t _addressSize, 256 Request::FlagsType _memFlags); 257 258 Fault execute(ExecContext *, Trace::InstRecord *) const; 259 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 260 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; 261 }; 262}}; 263 264def template MicroLdStOpConstructor {{ 265 %(class_name)s::%(class_name)s( 266 ExtMachInst machInst, const char * instMnem, uint64_t setFlags, 267 uint8_t _scale, InstRegIndex _index, InstRegIndex _base, 268 uint64_t _disp, InstRegIndex _segment, 269 InstRegIndex _data, 270 uint8_t _dataSize, uint8_t _addressSize, 271 Request::FlagsType _memFlags) : 272 %(base_class)s(machInst, "%(mnemonic)s", instMnem, setFlags, 273 _scale, _index, _base, 274 _disp, _segment, _data, 275 _dataSize, _addressSize, _memFlags, %(op_class)s) 276 { 277 %(constructor)s; 278 } 279}}; 280 281def template MicroLdStSplitOpConstructor {{ 282 %(class_name)s::%(class_name)s( 283 ExtMachInst machInst, const char * instMnem, uint64_t setFlags, 284 uint8_t _scale, InstRegIndex _index, InstRegIndex _base, 285 uint64_t _disp, InstRegIndex _segment, 286 InstRegIndex _dataLow, InstRegIndex _dataHi, 287 uint8_t _dataSize, uint8_t _addressSize, 288 Request::FlagsType _memFlags) : 289 %(base_class)s(machInst, "%(mnemonic)s", instMnem, setFlags, 290 _scale, _index, _base, 291 _disp, _segment, _dataLow, _dataHi, 292 _dataSize, _addressSize, _memFlags, %(op_class)s) 293 { 294 %(constructor)s; 295 } 296}}; 297 298let {{ 299 class LdStOp(X86Microop): 300 def __init__(self, data, segment, addr, disp, 301 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec, 302 implicitStack): 303 self.data = data 304 [self.scale, self.index, self.base] = addr 305 self.disp = disp 306 self.segment = segment 307 self.dataSize = dataSize 308 self.addressSize = addressSize 309 self.memFlags = baseFlags 310 if atCPL0: 311 self.memFlags += " | (CPL0FlagBit << FlagShift)" 312 self.instFlags = "" 313 if prefetch: 314 self.memFlags += " | Request::PREFETCH" 315 self.instFlags += " | (1ULL << StaticInst::IsDataPrefetch)" 316 if nonSpec: 317 self.instFlags += " | (1ULL << StaticInst::IsNonSpeculative)" 318 # For implicit stack operations, we should use *not* use the 319 # alternative addressing mode for loads/stores if the prefix is set 320 if not implicitStack: 321 self.memFlags += " | (machInst.legacy.addr ? " + \ 322 "(AddrSizeFlagBit << FlagShift) : 0)" 323 324 def getAllocator(self, microFlags): 325 allocator = '''new %(class_name)s(machInst, macrocodeBlock, 326 %(flags)s, %(scale)s, %(index)s, %(base)s, 327 %(disp)s, %(segment)s, %(data)s, 328 %(dataSize)s, %(addressSize)s, %(memFlags)s)''' % { 329 "class_name" : self.className, 330 "flags" : self.microFlagsText(microFlags) + self.instFlags, 331 "scale" : self.scale, "index" : self.index, 332 "base" : self.base, 333 "disp" : self.disp, 334 "segment" : self.segment, "data" : self.data, 335 "dataSize" : self.dataSize, "addressSize" : self.addressSize, 336 "memFlags" : self.memFlags} 337 return allocator 338 339 class BigLdStOp(X86Microop): 340 def __init__(self, data, segment, addr, disp, 341 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec, 342 implicitStack): 343 self.data = data 344 [self.scale, self.index, self.base] = addr 345 self.disp = disp 346 self.segment = segment 347 self.dataSize = dataSize 348 self.addressSize = addressSize 349 self.memFlags = baseFlags 350 if atCPL0: 351 self.memFlags += " | (CPL0FlagBit << FlagShift)" 352 self.instFlags = "" 353 if prefetch: 354 self.memFlags += " | Request::PREFETCH" 355 self.instFlags += " | (1ULL << StaticInst::IsDataPrefetch)" 356 if nonSpec: 357 self.instFlags += " | (1ULL << StaticInst::IsNonSpeculative)" 358 # For implicit stack operations, we should use *not* use the 359 # alternative addressing mode for loads/stores if the prefix is set 360 if not implicitStack: 361 self.memFlags += " | (machInst.legacy.addr ? " + \ 362 "(AddrSizeFlagBit << FlagShift) : 0)" 363 364 def getAllocator(self, microFlags): 365 allocString = ''' 366 (%(dataSize)s >= 4) ? 367 (StaticInstPtr)(new %(class_name)sBig(machInst, 368 macrocodeBlock, %(flags)s, %(scale)s, %(index)s, 369 %(base)s, %(disp)s, %(segment)s, %(data)s, 370 %(dataSize)s, %(addressSize)s, %(memFlags)s)) : 371 (StaticInstPtr)(new %(class_name)s(machInst, 372 macrocodeBlock, %(flags)s, %(scale)s, %(index)s, 373 %(base)s, %(disp)s, %(segment)s, %(data)s, 374 %(dataSize)s, %(addressSize)s, %(memFlags)s)) 375 ''' 376 allocator = allocString % { 377 "class_name" : self.className, 378 "flags" : self.microFlagsText(microFlags) + self.instFlags, 379 "scale" : self.scale, "index" : self.index, 380 "base" : self.base, 381 "disp" : self.disp, 382 "segment" : self.segment, "data" : self.data, 383 "dataSize" : self.dataSize, "addressSize" : self.addressSize, 384 "memFlags" : self.memFlags} 385 return allocator 386 387 class LdStSplitOp(LdStOp): 388 def __init__(self, data, segment, addr, disp, 389 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec, 390 implicitStack): 391 super(LdStSplitOp, self).__init__(0, segment, addr, disp, 392 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec, 393 implicitStack) 394 (self.dataLow, self.dataHi) = data 395 396 def getAllocator(self, microFlags): 397 allocString = '''(StaticInstPtr)(new %(class_name)s(machInst, 398 macrocodeBlock, %(flags)s, %(scale)s, %(index)s, 399 %(base)s, %(disp)s, %(segment)s, 400 %(dataLow)s, %(dataHi)s, 401 %(dataSize)s, %(addressSize)s, %(memFlags)s)) 402 ''' 403 allocator = allocString % { 404 "class_name" : self.className, 405 "flags" : self.microFlagsText(microFlags) + self.instFlags, 406 "scale" : self.scale, "index" : self.index, 407 "base" : self.base, 408 "disp" : self.disp, 409 "segment" : self.segment, 410 "dataLow" : self.dataLow, "dataHi" : self.dataHi, 411 "dataSize" : self.dataSize, "addressSize" : self.addressSize, 412 "memFlags" : self.memFlags} 413 return allocator 414 415}}; 416 417let {{ 418 419 # Make these empty strings so that concatenating onto 420 # them will always work. 421 header_output = "" 422 decoder_output = "" 423 exec_output = "" 424 425 segmentEAExpr = \ 426 'bits(scale * Index + Base + disp, addressSize * 8 - 1, 0);' 427 428 calculateEA = 'EA = SegBase + ' + segmentEAExpr 429 430 def defineMicroLoadOp(mnemonic, code, bigCode='', 431 mem_flags="0", big=True, nonSpec=False, 432 implicitStack=False): 433 global header_output 434 global decoder_output 435 global exec_output 436 global microopClasses 437 Name = mnemonic 438 name = mnemonic.lower() 439 440 # Build up the all register version of this micro op 441 iops = [InstObjParams(name, Name, 'X86ISA::LdStOp', 442 { "code": code, 443 "ea_code": calculateEA, 444 "memDataSize": "dataSize" })] 445 if big: 446 iops += [InstObjParams(name, Name + "Big", 'X86ISA::LdStOp', 447 { "code": bigCode, 448 "ea_code": calculateEA, 449 "memDataSize": "dataSize" })] 450 for iop in iops: 451 header_output += MicroLdStOpDeclare.subst(iop) 452 decoder_output += MicroLdStOpConstructor.subst(iop) 453 exec_output += MicroLoadExecute.subst(iop) 454 exec_output += MicroLoadInitiateAcc.subst(iop) 455 exec_output += MicroLoadCompleteAcc.subst(iop) 456 457 if implicitStack: 458 # For instructions that implicitly access the stack, the address 459 # size is the same as the stack segment pointer size, not the 460 # address size if specified by the instruction prefix 461 addressSize = "env.stackSize" 462 else: 463 addressSize = "env.addressSize" 464 465 base = LdStOp 466 if big: 467 base = BigLdStOp 468 class LoadOp(base): 469 def __init__(self, data, segment, addr, disp = 0, 470 dataSize="env.dataSize", 471 addressSize=addressSize, 472 atCPL0=False, prefetch=False, nonSpec=nonSpec, 473 implicitStack=implicitStack): 474 super(LoadOp, self).__init__(data, segment, addr, 475 disp, dataSize, addressSize, mem_flags, 476 atCPL0, prefetch, nonSpec, implicitStack) 477 self.className = Name 478 self.mnemonic = name 479 480 microopClasses[name] = LoadOp 481 482 defineMicroLoadOp('Ld', 'Data = merge(Data, Mem, dataSize);', 483 'Data = Mem & mask(dataSize * 8);') 484 defineMicroLoadOp('Ldis', 'Data = merge(Data, Mem, dataSize);', 485 'Data = Mem & mask(dataSize * 8);', 486 implicitStack=True) 487 defineMicroLoadOp('Ldst', 'Data = merge(Data, Mem, dataSize);', 488 'Data = Mem & mask(dataSize * 8);', 489 '(StoreCheck << FlagShift)') 490 defineMicroLoadOp('Ldstl', 'Data = merge(Data, Mem, dataSize);', 491 'Data = Mem & mask(dataSize * 8);', 492 '(StoreCheck << FlagShift) | Request::LOCKED_RMW', 493 nonSpec=True) 494 495 defineMicroLoadOp('Ldfp', code='FpData_uqw = Mem', big = False) 496 497 defineMicroLoadOp('Ldfp87', code=''' 498 switch (dataSize) 499 { 500 case 4: 501 FpData_df = *(float *)&Mem; 502 break; 503 case 8: 504 FpData_df = *(double *)&Mem; 505 break; 506 default: 507 panic("Unhandled data size in LdFp87.\\n"); 508 } 509 ''', big = False) 510 511 # Load integer from memory into x87 top-of-stack register. 512 # Used to implement fild instruction. 513 defineMicroLoadOp('Ldifp87', code=''' 514 switch (dataSize) 515 { 516 case 2: 517 FpData_df = (int64_t)sext<16>(Mem); 518 break; 519 case 4: 520 FpData_df = (int64_t)sext<32>(Mem); 521 break; 522 case 8: 523 FpData_df = (int64_t)Mem; 524 break; 525 default: 526 panic("Unhandled data size in LdIFp87.\\n"); 527 } 528 ''', big = False) 529 530 def defineMicroLoadSplitOp(mnemonic, code, mem_flags="0", nonSpec=False): 531 global header_output 532 global decoder_output 533 global exec_output 534 global microopClasses 535 Name = mnemonic 536 name = mnemonic.lower() 537 538 iop = InstObjParams(name, Name, 'X86ISA::LdStSplitOp', 539 { "code": code, 540 "ea_code": calculateEA, 541 "memDataSize": "2 * dataSize" }) 542 543 header_output += MicroLdStSplitOpDeclare.subst(iop) 544 decoder_output += MicroLdStSplitOpConstructor.subst(iop) 545 exec_output += MicroLoadExecute.subst(iop) 546 exec_output += MicroLoadInitiateAcc.subst(iop) 547 exec_output += MicroLoadCompleteAcc.subst(iop) 548 549 class LoadOp(LdStSplitOp): 550 def __init__(self, data, segment, addr, disp = 0, 551 dataSize="env.dataSize", 552 addressSize="env.addressSize", 553 atCPL0=False, prefetch=False, nonSpec=nonSpec, 554 implicitStack=False): 555 super(LoadOp, self).__init__(data, segment, addr, 556 disp, dataSize, addressSize, mem_flags, 557 atCPL0, prefetch, nonSpec, implicitStack) 558 self.className = Name 559 self.mnemonic = name 560 561 microopClasses[name] = LoadOp 562 563 code = ''' 564 switch (dataSize) { 565 case 4: 566 DataLow = bits(Mem_u2qw[0], 31, 0); 567 DataHi = bits(Mem_u2qw[0], 63, 32); 568 break; 569 case 8: 570 DataLow = Mem_u2qw[0]; 571 DataHi = Mem_u2qw[1]; 572 break; 573 default: 574 panic("Unhandled data size %d in LdSplit.\\n", dataSize); 575 }''' 576 577 defineMicroLoadSplitOp('LdSplit', code, 578 '(StoreCheck << FlagShift)') 579 580 defineMicroLoadSplitOp('LdSplitl', code, 581 '(StoreCheck << FlagShift) | Request::LOCKED_RMW', 582 nonSpec=True) 583 584 def defineMicroStoreOp(mnemonic, code, completeCode="", mem_flags="0", 585 implicitStack=False): 586 global header_output 587 global decoder_output 588 global exec_output 589 global microopClasses 590 Name = mnemonic 591 name = mnemonic.lower() 592 593 # Build up the all register version of this micro op 594 iop = InstObjParams(name, Name, 'X86ISA::LdStOp', 595 { "code": code, 596 "complete_code": completeCode, 597 "ea_code": calculateEA, 598 "memDataSize": "dataSize" }) 599 header_output += MicroLdStOpDeclare.subst(iop) 600 decoder_output += MicroLdStOpConstructor.subst(iop) 601 exec_output += MicroStoreExecute.subst(iop) 602 exec_output += MicroStoreInitiateAcc.subst(iop) 603 exec_output += MicroStoreCompleteAcc.subst(iop) 604 605 if implicitStack: 606 # For instructions that implicitly access the stack, the address 607 # size is the same as the stack segment pointer size, not the 608 # address size if specified by the instruction prefix 609 addressSize = "env.stackSize" 610 else: 611 addressSize = "env.addressSize" 612 613 class StoreOp(LdStOp): 614 def __init__(self, data, segment, addr, disp = 0, 615 dataSize="env.dataSize", 616 addressSize=addressSize, 617 atCPL0=False, nonSpec=False, implicitStack=implicitStack): 618 super(StoreOp, self).__init__(data, segment, addr, disp, 619 dataSize, addressSize, mem_flags, atCPL0, False, 620 nonSpec, implicitStack) 621 self.className = Name 622 self.mnemonic = name 623 624 microopClasses[name] = StoreOp 625 626 defineMicroStoreOp('St', 'Mem = pick(Data, 2, dataSize);') 627 defineMicroStoreOp('Stis', 'Mem = pick(Data, 2, dataSize);', 628 implicitStack=True) 629 defineMicroStoreOp('Stul', 'Mem = pick(Data, 2, dataSize);', 630 mem_flags="Request::LOCKED_RMW") 631 632 defineMicroStoreOp('Stfp', code='Mem = FpData_uqw;') 633 634 defineMicroStoreOp('Stfp87', code=''' 635 switch (dataSize) 636 { 637 case 4: { 638 float single(FpData_df); 639 Mem = *(uint32_t *)&single; 640 } break; 641 case 8: 642 Mem = *(uint64_t *)&FpData_df; 643 break; 644 default: 645 panic("Unhandled data size in StFp87.\\n"); 646 } 647 ''') 648 649 defineMicroStoreOp('Cda', 'Mem = 0;', mem_flags="Request::NO_ACCESS") 650 651 def defineMicroStoreSplitOp(mnemonic, code, 652 completeCode="", mem_flags="0"): 653 global header_output 654 global decoder_output 655 global exec_output 656 global microopClasses 657 Name = mnemonic 658 name = mnemonic.lower() 659 660 iop = InstObjParams(name, Name, 'X86ISA::LdStSplitOp', 661 { "code": code, 662 "complete_code": completeCode, 663 "ea_code": calculateEA, 664 "memDataSize": "2 * dataSize" }) 665 666 header_output += MicroLdStSplitOpDeclare.subst(iop) 667 decoder_output += MicroLdStSplitOpConstructor.subst(iop) 668 exec_output += MicroStoreExecute.subst(iop) 669 exec_output += MicroStoreInitiateAcc.subst(iop) 670 exec_output += MicroStoreCompleteAcc.subst(iop) 671 672 class StoreOp(LdStSplitOp): 673 def __init__(self, data, segment, addr, disp = 0, 674 dataSize="env.dataSize", 675 addressSize="env.addressSize", 676 atCPL0=False, nonSpec=False, implicitStack=False): 677 super(StoreOp, self).__init__(data, segment, addr, disp, 678 dataSize, addressSize, mem_flags, atCPL0, False, 679 nonSpec, implicitStack) 680 self.className = Name 681 self.mnemonic = name 682 683 microopClasses[name] = StoreOp 684 685 code = ''' 686 switch (dataSize) { 687 case 4: 688 Mem_u2qw[0] = (DataHi << 32) | DataLow; 689 break; 690 case 8: 691 Mem_u2qw[0] = DataLow; 692 Mem_u2qw[1] = DataHi; 693 break; 694 default: 695 panic("Unhandled data size %d in StSplit.\\n", dataSize); 696 }''' 697 698 defineMicroStoreSplitOp('StSplit', code); 699 700 defineMicroStoreSplitOp('StSplitul', code, 701 mem_flags='Request::LOCKED_RMW') 702 703 iop = InstObjParams("lea", "Lea", 'X86ISA::LdStOp', 704 { "code": "Data = merge(Data, EA, dataSize);", 705 "ea_code": "EA = " + segmentEAExpr, 706 "memDataSize": "dataSize" }) 707 header_output += MicroLeaDeclare.subst(iop) 708 decoder_output += MicroLdStOpConstructor.subst(iop) 709 exec_output += MicroLeaExecute.subst(iop) 710 711 class LeaOp(LdStOp): 712 def __init__(self, data, segment, addr, disp = 0, 713 dataSize="env.dataSize", addressSize="env.addressSize"): 714 super(LeaOp, self).__init__(data, segment, addr, disp, 715 dataSize, addressSize, "0", False, False, False, False) 716 self.className = "Lea" 717 self.mnemonic = "lea" 718 719 microopClasses["lea"] = LeaOp 720 721 722 iop = InstObjParams("tia", "Tia", 'X86ISA::LdStOp', 723 { "code": "xc->demapPage(EA, 0);", 724 "ea_code": calculateEA, 725 "memDataSize": "dataSize" }) 726 header_output += MicroLeaDeclare.subst(iop) 727 decoder_output += MicroLdStOpConstructor.subst(iop) 728 exec_output += MicroLeaExecute.subst(iop) 729 730 class TiaOp(LdStOp): 731 def __init__(self, segment, addr, disp = 0, 732 dataSize="env.dataSize", 733 addressSize="env.addressSize"): 734 super(TiaOp, self).__init__("InstRegIndex(NUM_INTREGS)", segment, 735 addr, disp, dataSize, addressSize, "0", False, False, 736 False, False) 737 self.className = "Tia" 738 self.mnemonic = "tia" 739 740 microopClasses["tia"] = TiaOp 741 742 class CdaOp(LdStOp): 743 def __init__(self, segment, addr, disp = 0, 744 dataSize="env.dataSize", 745 addressSize="env.addressSize", atCPL0=False): 746 super(CdaOp, self).__init__("InstRegIndex(NUM_INTREGS)", segment, 747 addr, disp, dataSize, addressSize, "Request::NO_ACCESS", 748 atCPL0, False, False, False) 749 self.className = "Cda" 750 self.mnemonic = "cda" 751 752 microopClasses["cda"] = CdaOp 753}}; 754