ldstop.isa revision 12384:481add71d2e4
1// Copyright (c) 2007-2008 The Hewlett-Packard Development Company 2// Copyright (c) 2015 Advanced Micro Devices, Inc. 3// All rights reserved. 4// 5// The license below extends only to copyright in the software and shall 6// not be construed as granting a license to any other intellectual 7// property including but not limited to intellectual property relating 8// to a hardware implementation of the functionality of the software 9// licensed hereunder. You may use the software subject to the license 10// terms below provided that you ensure that this notice is replicated 11// unmodified and in its entirety in all distributions of the software, 12// modified or unmodified, in source code or in binary form. 13// 14// Copyright (c) 2008 The Regents of The University of Michigan 15// All rights reserved. 16// 17// Redistribution and use in source and binary forms, with or without 18// modification, are permitted provided that the following conditions are 19// met: redistributions of source code must retain the above copyright 20// notice, this list of conditions and the following disclaimer; 21// redistributions in binary form must reproduce the above copyright 22// notice, this list of conditions and the following disclaimer in the 23// documentation and/or other materials provided with the distribution; 24// neither the name of the copyright holders nor the names of its 25// contributors may be used to endorse or promote products derived from 26// this software without specific prior written permission. 27// 28// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 29// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 30// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 31// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 32// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 33// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 34// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 35// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 36// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 37// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 38// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 39// 40// Authors: Gabe Black 41 42////////////////////////////////////////////////////////////////////////// 43// 44// LdStOp Microop templates 45// 46////////////////////////////////////////////////////////////////////////// 47 48// LEA template 49 50def template MicroLeaExecute {{ 51 Fault %(class_name)s::execute(ExecContext *xc, 52 Trace::InstRecord *traceData) const 53 { 54 Fault fault = NoFault; 55 Addr EA; 56 57 %(op_decl)s; 58 %(op_rd)s; 59 %(ea_code)s; 60 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA); 61 62 %(code)s; 63 if(fault == NoFault) 64 { 65 %(op_wb)s; 66 } 67 68 return fault; 69 } 70}}; 71 72def template MicroLeaDeclare {{ 73 class %(class_name)s : public %(base_class)s 74 { 75 public: 76 %(class_name)s(ExtMachInst _machInst, 77 const char * instMnem, uint64_t setFlags, 78 uint8_t _scale, InstRegIndex _index, InstRegIndex _base, 79 uint64_t _disp, InstRegIndex _segment, 80 InstRegIndex _data, 81 uint8_t _dataSize, uint8_t _addressSize, 82 Request::FlagsType _memFlags); 83 84 Fault execute(ExecContext *, Trace::InstRecord *) const; 85 }; 86}}; 87 88// Load templates 89 90def template MicroLoadExecute {{ 91 Fault %(class_name)s::execute(ExecContext *xc, 92 Trace::InstRecord *traceData) const 93 { 94 Fault fault = NoFault; 95 Addr EA; 96 97 %(op_decl)s; 98 %(op_rd)s; 99 %(ea_code)s; 100 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA); 101 102 fault = readMemAtomic(xc, traceData, EA, Mem, dataSize, memFlags); 103 104 if (fault == NoFault) { 105 %(code)s; 106 } else if (memFlags & Request::PREFETCH) { 107 // For prefetches, ignore any faults/exceptions. 108 return NoFault; 109 } 110 if(fault == NoFault) 111 { 112 %(op_wb)s; 113 } 114 115 return fault; 116 } 117}}; 118 119def template MicroLoadInitiateAcc {{ 120 Fault %(class_name)s::initiateAcc(ExecContext * xc, 121 Trace::InstRecord * traceData) const 122 { 123 Fault fault = NoFault; 124 Addr EA; 125 126 %(op_decl)s; 127 %(op_rd)s; 128 %(ea_code)s; 129 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA); 130 131 fault = initiateMemRead(xc, traceData, EA, 132 %(memDataSize)s, memFlags); 133 134 return fault; 135 } 136}}; 137 138def template MicroLoadCompleteAcc {{ 139 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext * xc, 140 Trace::InstRecord * traceData) const 141 { 142 Fault fault = NoFault; 143 144 %(op_decl)s; 145 %(op_rd)s; 146 147 getMem(pkt, Mem, dataSize, traceData); 148 149 %(code)s; 150 151 if(fault == NoFault) 152 { 153 %(op_wb)s; 154 } 155 156 return fault; 157 } 158}}; 159 160// Store templates 161 162def template MicroStoreExecute {{ 163 Fault %(class_name)s::execute(ExecContext * xc, 164 Trace::InstRecord *traceData) const 165 { 166 Fault fault = NoFault; 167 168 Addr EA; 169 %(op_decl)s; 170 %(op_rd)s; 171 %(ea_code)s; 172 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA); 173 174 %(code)s; 175 176 if (fault == NoFault) { 177 fault = writeMemAtomic(xc, traceData, Mem, dataSize, EA, 178 memFlags, NULL); 179 if (fault == NoFault) { 180 %(op_wb)s; 181 } 182 } 183 184 return fault; 185 } 186}}; 187 188def template MicroStoreInitiateAcc {{ 189 Fault %(class_name)s::initiateAcc(ExecContext * xc, 190 Trace::InstRecord * traceData) const 191 { 192 Fault fault = NoFault; 193 194 Addr EA; 195 %(op_decl)s; 196 %(op_rd)s; 197 %(ea_code)s; 198 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA); 199 200 %(code)s; 201 202 if (fault == NoFault) { 203 fault = writeMemTiming(xc, traceData, Mem, dataSize, EA, 204 memFlags, NULL); 205 } 206 return fault; 207 } 208}}; 209 210def template MicroStoreCompleteAcc {{ 211 Fault %(class_name)s::completeAcc(PacketPtr pkt, 212 ExecContext * xc, Trace::InstRecord * traceData) const 213 { 214 %(op_decl)s; 215 %(op_rd)s; 216 %(complete_code)s; 217 %(op_wb)s; 218 return NoFault; 219 } 220}}; 221 222def template MicroLdStOpDeclare {{ 223 class %(class_name)s : public %(base_class)s 224 { 225 public: 226 %(class_name)s(ExtMachInst _machInst, 227 const char * instMnem, uint64_t setFlags, 228 uint8_t _scale, InstRegIndex _index, InstRegIndex _base, 229 uint64_t _disp, InstRegIndex _segment, 230 InstRegIndex _data, 231 uint8_t _dataSize, uint8_t _addressSize, 232 Request::FlagsType _memFlags); 233 234 Fault execute(ExecContext *, Trace::InstRecord *) const; 235 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 236 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; 237 }; 238}}; 239 240// LdStSplitOp is a load or store that uses a pair of regs as the 241// source or destination. Used for cmpxchg{8,16}b. 242def template MicroLdStSplitOpDeclare {{ 243 class %(class_name)s : public %(base_class)s 244 { 245 public: 246 %(class_name)s(ExtMachInst _machInst, 247 const char * instMnem, uint64_t setFlags, 248 uint8_t _scale, InstRegIndex _index, InstRegIndex _base, 249 uint64_t _disp, InstRegIndex _segment, 250 InstRegIndex _dataLow, InstRegIndex _dataHi, 251 uint8_t _dataSize, uint8_t _addressSize, 252 Request::FlagsType _memFlags); 253 254 Fault execute(ExecContext *, Trace::InstRecord *) const; 255 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 256 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; 257 }; 258}}; 259 260def template MicroLdStOpConstructor {{ 261 %(class_name)s::%(class_name)s( 262 ExtMachInst machInst, const char * instMnem, uint64_t setFlags, 263 uint8_t _scale, InstRegIndex _index, InstRegIndex _base, 264 uint64_t _disp, InstRegIndex _segment, 265 InstRegIndex _data, 266 uint8_t _dataSize, uint8_t _addressSize, 267 Request::FlagsType _memFlags) : 268 %(base_class)s(machInst, "%(mnemonic)s", instMnem, setFlags, 269 _scale, _index, _base, 270 _disp, _segment, _data, 271 _dataSize, _addressSize, _memFlags, %(op_class)s) 272 { 273 %(constructor)s; 274 } 275}}; 276 277def template MicroLdStSplitOpConstructor {{ 278 %(class_name)s::%(class_name)s( 279 ExtMachInst machInst, const char * instMnem, uint64_t setFlags, 280 uint8_t _scale, InstRegIndex _index, InstRegIndex _base, 281 uint64_t _disp, InstRegIndex _segment, 282 InstRegIndex _dataLow, InstRegIndex _dataHi, 283 uint8_t _dataSize, uint8_t _addressSize, 284 Request::FlagsType _memFlags) : 285 %(base_class)s(machInst, "%(mnemonic)s", instMnem, setFlags, 286 _scale, _index, _base, 287 _disp, _segment, _dataLow, _dataHi, 288 _dataSize, _addressSize, _memFlags, %(op_class)s) 289 { 290 %(constructor)s; 291 } 292}}; 293 294let {{ 295 class LdStOp(X86Microop): 296 def __init__(self, data, segment, addr, disp, 297 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec, 298 implicitStack): 299 self.data = data 300 [self.scale, self.index, self.base] = addr 301 self.disp = disp 302 self.segment = segment 303 self.dataSize = dataSize 304 self.addressSize = addressSize 305 self.memFlags = baseFlags 306 if atCPL0: 307 self.memFlags += " | (CPL0FlagBit << FlagShift)" 308 self.instFlags = "" 309 if prefetch: 310 self.memFlags += " | Request::PREFETCH" 311 self.instFlags += " | (1ULL << StaticInst::IsDataPrefetch)" 312 if nonSpec: 313 self.instFlags += " | (1ULL << StaticInst::IsNonSpeculative)" 314 # For implicit stack operations, we should use *not* use the 315 # alternative addressing mode for loads/stores if the prefix is set 316 if not implicitStack: 317 self.memFlags += " | (machInst.legacy.addr ? " + \ 318 "(AddrSizeFlagBit << FlagShift) : 0)" 319 320 def getAllocator(self, microFlags): 321 allocator = '''new %(class_name)s(machInst, macrocodeBlock, 322 %(flags)s, %(scale)s, %(index)s, %(base)s, 323 %(disp)s, %(segment)s, %(data)s, 324 %(dataSize)s, %(addressSize)s, %(memFlags)s)''' % { 325 "class_name" : self.className, 326 "flags" : self.microFlagsText(microFlags) + self.instFlags, 327 "scale" : self.scale, "index" : self.index, 328 "base" : self.base, 329 "disp" : self.disp, 330 "segment" : self.segment, "data" : self.data, 331 "dataSize" : self.dataSize, "addressSize" : self.addressSize, 332 "memFlags" : self.memFlags} 333 return allocator 334 335 class BigLdStOp(X86Microop): 336 def __init__(self, data, segment, addr, disp, 337 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec, 338 implicitStack): 339 self.data = data 340 [self.scale, self.index, self.base] = addr 341 self.disp = disp 342 self.segment = segment 343 self.dataSize = dataSize 344 self.addressSize = addressSize 345 self.memFlags = baseFlags 346 if atCPL0: 347 self.memFlags += " | (CPL0FlagBit << FlagShift)" 348 self.instFlags = "" 349 if prefetch: 350 self.memFlags += " | Request::PREFETCH" 351 self.instFlags += " | (1ULL << StaticInst::IsDataPrefetch)" 352 if nonSpec: 353 self.instFlags += " | (1ULL << StaticInst::IsNonSpeculative)" 354 # For implicit stack operations, we should use *not* use the 355 # alternative addressing mode for loads/stores if the prefix is set 356 if not implicitStack: 357 self.memFlags += " | (machInst.legacy.addr ? " + \ 358 "(AddrSizeFlagBit << FlagShift) : 0)" 359 360 def getAllocator(self, microFlags): 361 allocString = ''' 362 (%(dataSize)s >= 4) ? 363 (StaticInstPtr)(new %(class_name)sBig(machInst, 364 macrocodeBlock, %(flags)s, %(scale)s, %(index)s, 365 %(base)s, %(disp)s, %(segment)s, %(data)s, 366 %(dataSize)s, %(addressSize)s, %(memFlags)s)) : 367 (StaticInstPtr)(new %(class_name)s(machInst, 368 macrocodeBlock, %(flags)s, %(scale)s, %(index)s, 369 %(base)s, %(disp)s, %(segment)s, %(data)s, 370 %(dataSize)s, %(addressSize)s, %(memFlags)s)) 371 ''' 372 allocator = allocString % { 373 "class_name" : self.className, 374 "flags" : self.microFlagsText(microFlags) + self.instFlags, 375 "scale" : self.scale, "index" : self.index, 376 "base" : self.base, 377 "disp" : self.disp, 378 "segment" : self.segment, "data" : self.data, 379 "dataSize" : self.dataSize, "addressSize" : self.addressSize, 380 "memFlags" : self.memFlags} 381 return allocator 382 383 class LdStSplitOp(LdStOp): 384 def __init__(self, data, segment, addr, disp, 385 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec, 386 implicitStack): 387 super(LdStSplitOp, self).__init__(0, segment, addr, disp, 388 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec, 389 implicitStack) 390 (self.dataLow, self.dataHi) = data 391 392 def getAllocator(self, microFlags): 393 allocString = '''(StaticInstPtr)(new %(class_name)s(machInst, 394 macrocodeBlock, %(flags)s, %(scale)s, %(index)s, 395 %(base)s, %(disp)s, %(segment)s, 396 %(dataLow)s, %(dataHi)s, 397 %(dataSize)s, %(addressSize)s, %(memFlags)s)) 398 ''' 399 allocator = allocString % { 400 "class_name" : self.className, 401 "flags" : self.microFlagsText(microFlags) + self.instFlags, 402 "scale" : self.scale, "index" : self.index, 403 "base" : self.base, 404 "disp" : self.disp, 405 "segment" : self.segment, 406 "dataLow" : self.dataLow, "dataHi" : self.dataHi, 407 "dataSize" : self.dataSize, "addressSize" : self.addressSize, 408 "memFlags" : self.memFlags} 409 return allocator 410 411}}; 412 413let {{ 414 415 # Make these empty strings so that concatenating onto 416 # them will always work. 417 header_output = "" 418 decoder_output = "" 419 exec_output = "" 420 421 segmentEAExpr = \ 422 'bits(scale * Index + Base + disp, addressSize * 8 - 1, 0);' 423 424 calculateEA = 'EA = SegBase + ' + segmentEAExpr 425 426 def defineMicroLoadOp(mnemonic, code, bigCode='', 427 mem_flags="0", big=True, nonSpec=False, 428 implicitStack=False): 429 global header_output 430 global decoder_output 431 global exec_output 432 global microopClasses 433 Name = mnemonic 434 name = mnemonic.lower() 435 436 # Build up the all register version of this micro op 437 iops = [InstObjParams(name, Name, 'X86ISA::LdStOp', 438 { "code": code, 439 "ea_code": calculateEA, 440 "memDataSize": "dataSize" })] 441 if big: 442 iops += [InstObjParams(name, Name + "Big", 'X86ISA::LdStOp', 443 { "code": bigCode, 444 "ea_code": calculateEA, 445 "memDataSize": "dataSize" })] 446 for iop in iops: 447 header_output += MicroLdStOpDeclare.subst(iop) 448 decoder_output += MicroLdStOpConstructor.subst(iop) 449 exec_output += MicroLoadExecute.subst(iop) 450 exec_output += MicroLoadInitiateAcc.subst(iop) 451 exec_output += MicroLoadCompleteAcc.subst(iop) 452 453 if implicitStack: 454 # For instructions that implicitly access the stack, the address 455 # size is the same as the stack segment pointer size, not the 456 # address size if specified by the instruction prefix 457 addressSize = "env.stackSize" 458 else: 459 addressSize = "env.addressSize" 460 461 base = LdStOp 462 if big: 463 base = BigLdStOp 464 class LoadOp(base): 465 def __init__(self, data, segment, addr, disp = 0, 466 dataSize="env.dataSize", 467 addressSize=addressSize, 468 atCPL0=False, prefetch=False, nonSpec=nonSpec, 469 implicitStack=implicitStack): 470 super(LoadOp, self).__init__(data, segment, addr, 471 disp, dataSize, addressSize, mem_flags, 472 atCPL0, prefetch, nonSpec, implicitStack) 473 self.className = Name 474 self.mnemonic = name 475 476 microopClasses[name] = LoadOp 477 478 defineMicroLoadOp('Ld', 'Data = merge(Data, Mem, dataSize);', 479 'Data = Mem & mask(dataSize * 8);') 480 defineMicroLoadOp('Ldis', 'Data = merge(Data, Mem, dataSize);', 481 'Data = Mem & mask(dataSize * 8);', 482 implicitStack=True) 483 defineMicroLoadOp('Ldst', 'Data = merge(Data, Mem, dataSize);', 484 'Data = Mem & mask(dataSize * 8);', 485 '(StoreCheck << FlagShift)') 486 defineMicroLoadOp('Ldstl', 'Data = merge(Data, Mem, dataSize);', 487 'Data = Mem & mask(dataSize * 8);', 488 '(StoreCheck << FlagShift) | Request::LOCKED_RMW', 489 nonSpec=True) 490 491 defineMicroLoadOp('Ldfp', code='FpData_uqw = Mem', big = False) 492 493 defineMicroLoadOp('Ldfp87', code=''' 494 switch (dataSize) 495 { 496 case 4: 497 FpData_df = *(float *)&Mem; 498 break; 499 case 8: 500 FpData_df = *(double *)&Mem; 501 break; 502 default: 503 panic("Unhandled data size in LdFp87.\\n"); 504 } 505 ''', big = False) 506 507 # Load integer from memory into x87 top-of-stack register. 508 # Used to implement fild instruction. 509 defineMicroLoadOp('Ldifp87', code=''' 510 switch (dataSize) 511 { 512 case 2: 513 FpData_df = (int64_t)sext<16>(Mem); 514 break; 515 case 4: 516 FpData_df = (int64_t)sext<32>(Mem); 517 break; 518 case 8: 519 FpData_df = (int64_t)Mem; 520 break; 521 default: 522 panic("Unhandled data size in LdIFp87.\\n"); 523 } 524 ''', big = False) 525 526 def defineMicroLoadSplitOp(mnemonic, code, mem_flags="0", nonSpec=False): 527 global header_output 528 global decoder_output 529 global exec_output 530 global microopClasses 531 Name = mnemonic 532 name = mnemonic.lower() 533 534 iop = InstObjParams(name, Name, 'X86ISA::LdStSplitOp', 535 { "code": code, 536 "ea_code": calculateEA, 537 "memDataSize": "2 * dataSize" }) 538 539 header_output += MicroLdStSplitOpDeclare.subst(iop) 540 decoder_output += MicroLdStSplitOpConstructor.subst(iop) 541 exec_output += MicroLoadExecute.subst(iop) 542 exec_output += MicroLoadInitiateAcc.subst(iop) 543 exec_output += MicroLoadCompleteAcc.subst(iop) 544 545 class LoadOp(LdStSplitOp): 546 def __init__(self, data, segment, addr, disp = 0, 547 dataSize="env.dataSize", 548 addressSize="env.addressSize", 549 atCPL0=False, prefetch=False, nonSpec=nonSpec, 550 implicitStack=False): 551 super(LoadOp, self).__init__(data, segment, addr, 552 disp, dataSize, addressSize, mem_flags, 553 atCPL0, prefetch, nonSpec, implicitStack) 554 self.className = Name 555 self.mnemonic = name 556 557 microopClasses[name] = LoadOp 558 559 code = ''' 560 DataLow = Mem_u2qw[0]; 561 DataHi = Mem_u2qw[1]; 562 ''' 563 564 defineMicroLoadSplitOp('LdSplit', code, 565 '(StoreCheck << FlagShift)') 566 567 defineMicroLoadSplitOp('LdSplitl', code, 568 '(StoreCheck << FlagShift) | Request::LOCKED_RMW', 569 nonSpec=True) 570 571 def defineMicroStoreOp(mnemonic, code, completeCode="", mem_flags="0", 572 implicitStack=False): 573 global header_output 574 global decoder_output 575 global exec_output 576 global microopClasses 577 Name = mnemonic 578 name = mnemonic.lower() 579 580 # Build up the all register version of this micro op 581 iop = InstObjParams(name, Name, 'X86ISA::LdStOp', 582 { "code": code, 583 "complete_code": completeCode, 584 "ea_code": calculateEA, 585 "memDataSize": "dataSize" }) 586 header_output += MicroLdStOpDeclare.subst(iop) 587 decoder_output += MicroLdStOpConstructor.subst(iop) 588 exec_output += MicroStoreExecute.subst(iop) 589 exec_output += MicroStoreInitiateAcc.subst(iop) 590 exec_output += MicroStoreCompleteAcc.subst(iop) 591 592 if implicitStack: 593 # For instructions that implicitly access the stack, the address 594 # size is the same as the stack segment pointer size, not the 595 # address size if specified by the instruction prefix 596 addressSize = "env.stackSize" 597 else: 598 addressSize = "env.addressSize" 599 600 class StoreOp(LdStOp): 601 def __init__(self, data, segment, addr, disp = 0, 602 dataSize="env.dataSize", 603 addressSize=addressSize, 604 atCPL0=False, nonSpec=False, implicitStack=implicitStack): 605 super(StoreOp, self).__init__(data, segment, addr, disp, 606 dataSize, addressSize, mem_flags, atCPL0, False, 607 nonSpec, implicitStack) 608 self.className = Name 609 self.mnemonic = name 610 611 microopClasses[name] = StoreOp 612 613 defineMicroStoreOp('St', 'Mem = pick(Data, 2, dataSize);') 614 defineMicroStoreOp('Stis', 'Mem = pick(Data, 2, dataSize);', 615 implicitStack=True) 616 defineMicroStoreOp('Stul', 'Mem = pick(Data, 2, dataSize);', 617 mem_flags="Request::LOCKED_RMW") 618 619 defineMicroStoreOp('Stfp', code='Mem = FpData_uqw;') 620 621 defineMicroStoreOp('Stfp87', code=''' 622 switch (dataSize) 623 { 624 case 4: { 625 float single(FpData_df); 626 Mem = *(uint32_t *)&single; 627 } break; 628 case 8: 629 Mem = *(uint64_t *)&FpData_df; 630 break; 631 default: 632 panic("Unhandled data size in StFp87.\\n"); 633 } 634 ''') 635 636 defineMicroStoreOp('Cda', 'Mem = 0;', mem_flags="Request::NO_ACCESS") 637 638 def defineMicroStoreSplitOp(mnemonic, code, 639 completeCode="", mem_flags="0"): 640 global header_output 641 global decoder_output 642 global exec_output 643 global microopClasses 644 Name = mnemonic 645 name = mnemonic.lower() 646 647 iop = InstObjParams(name, Name, 'X86ISA::LdStSplitOp', 648 { "code": code, 649 "complete_code": completeCode, 650 "ea_code": calculateEA, 651 "memDataSize": "2 * dataSize" }) 652 653 header_output += MicroLdStSplitOpDeclare.subst(iop) 654 decoder_output += MicroLdStSplitOpConstructor.subst(iop) 655 exec_output += MicroStoreExecute.subst(iop) 656 exec_output += MicroStoreInitiateAcc.subst(iop) 657 exec_output += MicroStoreCompleteAcc.subst(iop) 658 659 class StoreOp(LdStSplitOp): 660 def __init__(self, data, segment, addr, disp = 0, 661 dataSize="env.dataSize", 662 addressSize="env.addressSize", 663 atCPL0=False, nonSpec=False, implicitStack=False): 664 super(StoreOp, self).__init__(data, segment, addr, disp, 665 dataSize, addressSize, mem_flags, atCPL0, False, 666 nonSpec, implicitStack) 667 self.className = Name 668 self.mnemonic = name 669 670 microopClasses[name] = StoreOp 671 672 code = ''' 673 Mem_u2qw[0] = DataLow; 674 Mem_u2qw[1] = DataHi; 675 ''' 676 677 defineMicroStoreSplitOp('StSplit', code); 678 679 defineMicroStoreSplitOp('StSplitul', code, 680 mem_flags='Request::LOCKED_RMW') 681 682 iop = InstObjParams("lea", "Lea", 'X86ISA::LdStOp', 683 { "code": "Data = merge(Data, EA, dataSize);", 684 "ea_code": "EA = " + segmentEAExpr, 685 "memDataSize": "dataSize" }) 686 header_output += MicroLeaDeclare.subst(iop) 687 decoder_output += MicroLdStOpConstructor.subst(iop) 688 exec_output += MicroLeaExecute.subst(iop) 689 690 class LeaOp(LdStOp): 691 def __init__(self, data, segment, addr, disp = 0, 692 dataSize="env.dataSize", addressSize="env.addressSize"): 693 super(LeaOp, self).__init__(data, segment, addr, disp, 694 dataSize, addressSize, "0", False, False, False, False) 695 self.className = "Lea" 696 self.mnemonic = "lea" 697 698 microopClasses["lea"] = LeaOp 699 700 701 iop = InstObjParams("tia", "Tia", 'X86ISA::LdStOp', 702 { "code": "xc->demapPage(EA, 0);", 703 "ea_code": calculateEA, 704 "memDataSize": "dataSize" }) 705 header_output += MicroLeaDeclare.subst(iop) 706 decoder_output += MicroLdStOpConstructor.subst(iop) 707 exec_output += MicroLeaExecute.subst(iop) 708 709 class TiaOp(LdStOp): 710 def __init__(self, segment, addr, disp = 0, 711 dataSize="env.dataSize", 712 addressSize="env.addressSize"): 713 super(TiaOp, self).__init__("InstRegIndex(NUM_INTREGS)", segment, 714 addr, disp, dataSize, addressSize, "0", False, False, 715 False, False) 716 self.className = "Tia" 717 self.mnemonic = "tia" 718 719 microopClasses["tia"] = TiaOp 720 721 class CdaOp(LdStOp): 722 def __init__(self, segment, addr, disp = 0, 723 dataSize="env.dataSize", 724 addressSize="env.addressSize", atCPL0=False): 725 super(CdaOp, self).__init__("InstRegIndex(NUM_INTREGS)", segment, 726 addr, disp, dataSize, addressSize, "Request::NO_ACCESS", 727 atCPL0, False, False, False) 728 self.className = "Cda" 729 self.mnemonic = "cda" 730 731 microopClasses["cda"] = CdaOp 732}}; 733