1// Copyright (c) 2007-2008 The Hewlett-Packard Development Company 2// Copyright (c) 2015 Advanced Micro Devices, Inc. 3// All rights reserved. 4// 5// The license below extends only to copyright in the software and shall 6// not be construed as granting a license to any other intellectual 7// property including but not limited to intellectual property relating 8// to a hardware implementation of the functionality of the software 9// licensed hereunder. You may use the software subject to the license 10// terms below provided that you ensure that this notice is replicated 11// unmodified and in its entirety in all distributions of the software, 12// modified or unmodified, in source code or in binary form. 13// 14// Copyright (c) 2008 The Regents of The University of Michigan 15// All rights reserved. 16// 17// Redistribution and use in source and binary forms, with or without 18// modification, are permitted provided that the following conditions are 19// met: redistributions of source code must retain the above copyright 20// notice, this list of conditions and the following disclaimer; 21// redistributions in binary form must reproduce the above copyright 22// notice, this list of conditions and the following disclaimer in the 23// documentation and/or other materials provided with the distribution; 24// neither the name of the copyright holders nor the names of its 25// contributors may be used to endorse or promote products derived from 26// this software without specific prior written permission. 27// 28// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 29// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 30// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 31// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 32// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 33// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 34// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 35// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 36// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 37// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 38// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 39// 40// Authors: Gabe Black 41 42////////////////////////////////////////////////////////////////////////// 43// 44// LdStOp Microop templates 45// 46////////////////////////////////////////////////////////////////////////// 47 48// LEA template 49 50def template MicroLeaExecute {{ 51 Fault %(class_name)s::execute(ExecContext *xc, 52 Trace::InstRecord *traceData) const 53 { 54 Fault fault = NoFault; 55 Addr EA; 56 57 %(op_decl)s; 58 %(op_rd)s; 59 %(ea_code)s; 60 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA); 61 62 %(code)s; 63 if(fault == NoFault) 64 { 65 %(op_wb)s; 66 } 67 68 return fault; 69 } 70}}; 71 72def template MicroLeaDeclare {{ 73 class %(class_name)s : public %(base_class)s 74 { 75 public: 76 %(class_name)s(ExtMachInst _machInst, 77 const char * instMnem, uint64_t setFlags, 78 uint8_t _scale, InstRegIndex _index, InstRegIndex _base, 79 uint64_t _disp, InstRegIndex _segment, 80 InstRegIndex _data, 81 uint8_t _dataSize, uint8_t _addressSize, 82 Request::FlagsType _memFlags); 83 84 Fault execute(ExecContext *, Trace::InstRecord *) const; 85 }; 86}}; 87 88// Load templates 89 90def template MicroLoadExecute {{ 91 Fault %(class_name)s::execute(ExecContext *xc, 92 Trace::InstRecord *traceData) const 93 { 94 Fault fault = NoFault; 95 Addr EA; 96 97 %(op_decl)s; 98 %(op_rd)s; 99 %(ea_code)s; 100 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA); 101 102 fault = readMemAtomic(xc, traceData, EA, Mem, dataSize, memFlags); 103 104 if (fault == NoFault) { 105 %(code)s; 106 } else if (memFlags & Request::PREFETCH) { 107 // For prefetches, ignore any faults/exceptions. 108 return NoFault; 109 } 110 if(fault == NoFault) 111 { 112 %(op_wb)s; 113 } 114 115 return fault; 116 } 117}}; 118 119def template MicroLoadInitiateAcc {{ 120 Fault %(class_name)s::initiateAcc(ExecContext * xc, 121 Trace::InstRecord * traceData) const 122 { 123 Fault fault = NoFault; 124 Addr EA; 125 126 %(op_decl)s; 127 %(op_rd)s; 128 %(ea_code)s; 129 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA); 130 131 fault = initiateMemRead(xc, traceData, EA, 132 %(memDataSize)s, memFlags); 133 134 return fault; 135 } 136}}; 137 138def template MicroLoadCompleteAcc {{ 139 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext * xc, 140 Trace::InstRecord * traceData) const 141 { 142 Fault fault = NoFault; 143 144 %(op_decl)s; 145 %(op_rd)s; 146 147 getMem(pkt, Mem, dataSize, traceData); 148 149 %(code)s; 150 151 if(fault == NoFault) 152 { 153 %(op_wb)s; 154 } 155 156 return fault; 157 } 158}}; 159 160// Store templates 161 162def template MicroStoreExecute {{ 163 Fault %(class_name)s::execute(ExecContext * xc, 164 Trace::InstRecord *traceData) const 165 { 166 Fault fault = NoFault; 167 168 Addr EA; 169 %(op_decl)s; 170 %(op_rd)s; 171 %(ea_code)s; 172 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA); 173 174 %(code)s; 175 176 if (fault == NoFault) { 177 fault = writeMemAtomic(xc, traceData, Mem, dataSize, EA, 178 memFlags, NULL); 179 if (fault == NoFault) { 180 %(op_wb)s; 181 } 182 } 183 184 return fault; 185 } 186}}; 187 188def template MicroStoreInitiateAcc {{ 189 Fault %(class_name)s::initiateAcc(ExecContext * xc, 190 Trace::InstRecord * traceData) const 191 { 192 Fault fault = NoFault; 193 194 Addr EA; 195 %(op_decl)s; 196 %(op_rd)s; 197 %(ea_code)s; 198 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA); 199 200 %(code)s; 201 202 if (fault == NoFault) { 203 fault = writeMemTiming(xc, traceData, Mem, dataSize, EA, 204 memFlags, NULL); 205 } 206 return fault; 207 } 208}}; 209 210def template MicroStoreCompleteAcc {{ 211 Fault %(class_name)s::completeAcc(PacketPtr pkt, 212 ExecContext * xc, Trace::InstRecord * traceData) const 213 { 214 %(op_decl)s; 215 %(op_rd)s; 216 %(complete_code)s; 217 %(op_wb)s; 218 return NoFault; 219 } 220}}; 221 222def template MicroLdStOpDeclare {{ 223 class %(class_name)s : public %(base_class)s 224 { 225 public: 226 %(class_name)s(ExtMachInst _machInst, 227 const char * instMnem, uint64_t setFlags, 228 uint8_t _scale, InstRegIndex _index, InstRegIndex _base, 229 uint64_t _disp, InstRegIndex _segment, 230 InstRegIndex _data, 231 uint8_t _dataSize, uint8_t _addressSize, 232 Request::FlagsType _memFlags); 233 234 Fault execute(ExecContext *, Trace::InstRecord *) const; 235 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 236 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; 237 }; 238}}; 239 240// LdStSplitOp is a load or store that uses a pair of regs as the 241// source or destination. Used for cmpxchg{8,16}b. 242def template MicroLdStSplitOpDeclare {{ 243 class %(class_name)s : public %(base_class)s 244 { 245 public: 246 %(class_name)s(ExtMachInst _machInst, 247 const char * instMnem, uint64_t setFlags, 248 uint8_t _scale, InstRegIndex _index, InstRegIndex _base, 249 uint64_t _disp, InstRegIndex _segment, 250 InstRegIndex _dataLow, InstRegIndex _dataHi, 251 uint8_t _dataSize, uint8_t _addressSize, 252 Request::FlagsType _memFlags); 253 254 Fault execute(ExecContext *, Trace::InstRecord *) const; 255 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const; 256 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const; 257 }; 258}}; 259 260def template MicroLdStOpConstructor {{ 261 %(class_name)s::%(class_name)s( 262 ExtMachInst machInst, const char * instMnem, uint64_t setFlags, 263 uint8_t _scale, InstRegIndex _index, InstRegIndex _base, 264 uint64_t _disp, InstRegIndex _segment, 265 InstRegIndex _data, 266 uint8_t _dataSize, uint8_t _addressSize, 267 Request::FlagsType _memFlags) : 268 %(base_class)s(machInst, "%(mnemonic)s", instMnem, setFlags, 269 _scale, _index, _base, 270 _disp, _segment, _data, 271 _dataSize, _addressSize, _memFlags, %(op_class)s) 272 { 273 %(constructor)s; 274 } 275}}; 276 277def template MicroLdStSplitOpConstructor {{ 278 %(class_name)s::%(class_name)s( 279 ExtMachInst machInst, const char * instMnem, uint64_t setFlags, 280 uint8_t _scale, InstRegIndex _index, InstRegIndex _base, 281 uint64_t _disp, InstRegIndex _segment, 282 InstRegIndex _dataLow, InstRegIndex _dataHi, 283 uint8_t _dataSize, uint8_t _addressSize, 284 Request::FlagsType _memFlags) : 285 %(base_class)s(machInst, "%(mnemonic)s", instMnem, setFlags, 286 _scale, _index, _base, 287 _disp, _segment, _dataLow, _dataHi, 288 _dataSize, _addressSize, _memFlags, %(op_class)s) 289 { 290 %(constructor)s; 291 } 292}}; 293 294let {{ 295 class LdStOp(X86Microop): 296 def __init__(self, data, segment, addr, disp, 297 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec, 298 implicitStack, uncacheable): 299 self.data = data 300 [self.scale, self.index, self.base] = addr 301 self.disp = disp 302 self.segment = segment 303 self.dataSize = dataSize 304 self.addressSize = addressSize 305 self.memFlags = baseFlags 306 if atCPL0: 307 self.memFlags += " | (CPL0FlagBit << FlagShift)" 308 self.instFlags = "" 309 if prefetch: 310 self.memFlags += " | Request::PREFETCH" 311 self.instFlags += " | (1ULL << StaticInst::IsDataPrefetch)" 312 if nonSpec: 313 self.instFlags += " | (1ULL << StaticInst::IsNonSpeculative)" 314 if uncacheable: 315 self.instFlags += " | (Request::UNCACHEABLE)" 316 # For implicit stack operations, we should use *not* use the 317 # alternative addressing mode for loads/stores if the prefix is set 318 if not implicitStack: 319 self.memFlags += " | (machInst.legacy.addr ? " + \ 320 "(AddrSizeFlagBit << FlagShift) : 0)" 321 322 def getAllocator(self, microFlags): 323 allocator = '''new %(class_name)s(machInst, macrocodeBlock, 324 %(flags)s, %(scale)s, %(index)s, %(base)s, 325 %(disp)s, %(segment)s, %(data)s, 326 %(dataSize)s, %(addressSize)s, %(memFlags)s)''' % { 327 "class_name" : self.className, 328 "flags" : self.microFlagsText(microFlags) + self.instFlags, 329 "scale" : self.scale, "index" : self.index, 330 "base" : self.base, 331 "disp" : self.disp, 332 "segment" : self.segment, "data" : self.data, 333 "dataSize" : self.dataSize, "addressSize" : self.addressSize, 334 "memFlags" : self.memFlags} 335 return allocator 336 337 class BigLdStOp(X86Microop): 338 def __init__(self, data, segment, addr, disp, 339 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec, 340 implicitStack, uncacheable): 341 self.data = data 342 [self.scale, self.index, self.base] = addr 343 self.disp = disp 344 self.segment = segment 345 self.dataSize = dataSize 346 self.addressSize = addressSize 347 self.memFlags = baseFlags 348 if atCPL0: 349 self.memFlags += " | (CPL0FlagBit << FlagShift)" 350 self.instFlags = "" 351 if prefetch: 352 self.memFlags += " | Request::PREFETCH" 353 self.instFlags += " | (1ULL << StaticInst::IsDataPrefetch)" 354 if nonSpec: 355 self.instFlags += " | (1ULL << StaticInst::IsNonSpeculative)" 356 if uncacheable: 357 self.instFlags += " | (Request::UNCACHEABLE)" 358 # For implicit stack operations, we should use *not* use the 359 # alternative addressing mode for loads/stores if the prefix is set 360 if not implicitStack: 361 self.memFlags += " | (machInst.legacy.addr ? " + \ 362 "(AddrSizeFlagBit << FlagShift) : 0)" 363 364 def getAllocator(self, microFlags): 365 allocString = ''' 366 (%(dataSize)s >= 4) ? 367 (StaticInstPtr)(new %(class_name)sBig(machInst, 368 macrocodeBlock, %(flags)s, %(scale)s, %(index)s, 369 %(base)s, %(disp)s, %(segment)s, %(data)s, 370 %(dataSize)s, %(addressSize)s, %(memFlags)s)) : 371 (StaticInstPtr)(new %(class_name)s(machInst, 372 macrocodeBlock, %(flags)s, %(scale)s, %(index)s, 373 %(base)s, %(disp)s, %(segment)s, %(data)s, 374 %(dataSize)s, %(addressSize)s, %(memFlags)s)) 375 ''' 376 allocator = allocString % { 377 "class_name" : self.className, 378 "flags" : self.microFlagsText(microFlags) + self.instFlags, 379 "scale" : self.scale, "index" : self.index, 380 "base" : self.base, 381 "disp" : self.disp, 382 "segment" : self.segment, "data" : self.data, 383 "dataSize" : self.dataSize, "addressSize" : self.addressSize, 384 "memFlags" : self.memFlags} 385 return allocator 386 387 class LdStSplitOp(LdStOp): 388 def __init__(self, data, segment, addr, disp, 389 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec, 390 implicitStack, uncacheable): 391 super(LdStSplitOp, self).__init__(0, segment, addr, disp, 392 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec, 393 implicitStack, uncacheable) 394 (self.dataLow, self.dataHi) = data 395 396 def getAllocator(self, microFlags): 397 allocString = '''(StaticInstPtr)(new %(class_name)s(machInst, 398 macrocodeBlock, %(flags)s, %(scale)s, %(index)s, 399 %(base)s, %(disp)s, %(segment)s, 400 %(dataLow)s, %(dataHi)s, 401 %(dataSize)s, %(addressSize)s, %(memFlags)s)) 402 ''' 403 allocator = allocString % { 404 "class_name" : self.className, 405 "flags" : self.microFlagsText(microFlags) + self.instFlags, 406 "scale" : self.scale, "index" : self.index, 407 "base" : self.base, 408 "disp" : self.disp, 409 "segment" : self.segment, 410 "dataLow" : self.dataLow, "dataHi" : self.dataHi, 411 "dataSize" : self.dataSize, "addressSize" : self.addressSize, 412 "memFlags" : self.memFlags} 413 return allocator 414 415}}; 416 417let {{ 418 419 # Make these empty strings so that concatenating onto 420 # them will always work. 421 header_output = "" 422 decoder_output = "" 423 exec_output = "" 424 425 segmentEAExpr = \ 426 'bits(scale * Index + Base + disp, addressSize * 8 - 1, 0);' 427 428 calculateEA = 'EA = SegBase + ' + segmentEAExpr 429 430 def defineMicroLoadOp(mnemonic, code, bigCode='', 431 mem_flags="0", big=True, nonSpec=False, 432 implicitStack=False): 433 global header_output 434 global decoder_output 435 global exec_output 436 global microopClasses 437 Name = mnemonic 438 name = mnemonic.lower() 439 440 # Build up the all register version of this micro op 441 iops = [InstObjParams(name, Name, 'X86ISA::LdStOp', 442 { "code": code, 443 "ea_code": calculateEA, 444 "memDataSize": "dataSize" })] 445 if big: 446 iops += [InstObjParams(name, Name + "Big", 'X86ISA::LdStOp', 447 { "code": bigCode, 448 "ea_code": calculateEA, 449 "memDataSize": "dataSize" })] 450 for iop in iops: 451 header_output += MicroLdStOpDeclare.subst(iop) 452 decoder_output += MicroLdStOpConstructor.subst(iop) 453 exec_output += MicroLoadExecute.subst(iop) 454 exec_output += MicroLoadInitiateAcc.subst(iop) 455 exec_output += MicroLoadCompleteAcc.subst(iop) 456 457 if implicitStack: 458 # For instructions that implicitly access the stack, the address 459 # size is the same as the stack segment pointer size, not the 460 # address size if specified by the instruction prefix 461 addressSize = "env.stackSize" 462 else: 463 addressSize = "env.addressSize" 464 465 base = LdStOp 466 if big: 467 base = BigLdStOp 468 class LoadOp(base): 469 def __init__(self, data, segment, addr, disp = 0, 470 dataSize="env.dataSize", 471 addressSize=addressSize, 472 atCPL0=False, prefetch=False, nonSpec=nonSpec, 473 implicitStack=implicitStack, uncacheable=False): 474 super(LoadOp, self).__init__(data, segment, addr, 475 disp, dataSize, addressSize, mem_flags, 476 atCPL0, prefetch, nonSpec, implicitStack, uncacheable) 477 self.className = Name 478 self.mnemonic = name 479 480 microopClasses[name] = LoadOp 481 482 defineMicroLoadOp('Ld', 'Data = merge(Data, Mem, dataSize);', 483 'Data = Mem & mask(dataSize * 8);') 484 defineMicroLoadOp('Ldis', 'Data = merge(Data, Mem, dataSize);', 485 'Data = Mem & mask(dataSize * 8);', 486 implicitStack=True) 487 defineMicroLoadOp('Ldst', 'Data = merge(Data, Mem, dataSize);', 488 'Data = Mem & mask(dataSize * 8);', 489 '(StoreCheck << FlagShift)') 490 defineMicroLoadOp('Ldstl', 'Data = merge(Data, Mem, dataSize);', 491 'Data = Mem & mask(dataSize * 8);', 492 '(StoreCheck << FlagShift) | Request::LOCKED_RMW', 493 nonSpec=True) 494 495 defineMicroLoadOp('Ldfp', code='FpData_uqw = Mem', big = False) 496 497 defineMicroLoadOp('Ldfp87', code=''' 498 switch (dataSize) 499 { 500 case 4: 501 FpData_df = *(float *)&Mem; 502 break; 503 case 8: 504 FpData_df = *(double *)&Mem; 505 break; 506 default: 507 panic("Unhandled data size in LdFp87.\\n"); 508 } 509 ''', big = False) 510 511 # Load integer from memory into x87 top-of-stack register. 512 # Used to implement fild instruction. 513 defineMicroLoadOp('Ldifp87', code=''' 514 switch (dataSize) 515 { 516 case 2: 517 FpData_df = (int64_t)sext<16>(Mem); 518 break; 519 case 4: 520 FpData_df = (int64_t)sext<32>(Mem); 521 break; 522 case 8: 523 FpData_df = (int64_t)Mem; 524 break; 525 default: 526 panic("Unhandled data size in LdIFp87.\\n"); 527 } 528 ''', big = False) 529 530 def defineMicroLoadSplitOp(mnemonic, code, mem_flags="0", nonSpec=False): 531 global header_output 532 global decoder_output 533 global exec_output 534 global microopClasses 535 Name = mnemonic 536 name = mnemonic.lower() 537 538 iop = InstObjParams(name, Name, 'X86ISA::LdStSplitOp', 539 { "code": code, 540 "ea_code": calculateEA, 541 "memDataSize": "2 * dataSize" }) 542 543 header_output += MicroLdStSplitOpDeclare.subst(iop) 544 decoder_output += MicroLdStSplitOpConstructor.subst(iop) 545 exec_output += MicroLoadExecute.subst(iop) 546 exec_output += MicroLoadInitiateAcc.subst(iop) 547 exec_output += MicroLoadCompleteAcc.subst(iop) 548 549 class LoadOp(LdStSplitOp): 550 def __init__(self, data, segment, addr, disp = 0, 551 dataSize="env.dataSize", 552 addressSize="env.addressSize", 553 atCPL0=False, prefetch=False, nonSpec=nonSpec, 554 implicitStack=False, uncacheable=False): 555 super(LoadOp, self).__init__(data, segment, addr, 556 disp, dataSize, addressSize, mem_flags, 557 atCPL0, prefetch, nonSpec, implicitStack, uncacheable) 558 self.className = Name 559 self.mnemonic = name 560 561 microopClasses[name] = LoadOp 562 563 code = ''' 564 DataLow = Mem_u2qw[0]; 565 DataHi = Mem_u2qw[1]; 566 ''' 567 568 defineMicroLoadSplitOp('LdSplit', code, 569 '(StoreCheck << FlagShift)') 570 571 defineMicroLoadSplitOp('LdSplitl', code, 572 '(StoreCheck << FlagShift) | Request::LOCKED_RMW', 573 nonSpec=True) 574 575 def defineMicroStoreOp(mnemonic, code, completeCode="", mem_flags="0", 576 implicitStack=False): 577 global header_output 578 global decoder_output 579 global exec_output 580 global microopClasses 581 Name = mnemonic 582 name = mnemonic.lower() 583 584 # Build up the all register version of this micro op 585 iop = InstObjParams(name, Name, 'X86ISA::LdStOp', 586 { "code": code, 587 "complete_code": completeCode, 588 "ea_code": calculateEA, 589 "memDataSize": "dataSize" }) 590 header_output += MicroLdStOpDeclare.subst(iop) 591 decoder_output += MicroLdStOpConstructor.subst(iop) 592 exec_output += MicroStoreExecute.subst(iop) 593 exec_output += MicroStoreInitiateAcc.subst(iop) 594 exec_output += MicroStoreCompleteAcc.subst(iop) 595 596 if implicitStack: 597 # For instructions that implicitly access the stack, the address 598 # size is the same as the stack segment pointer size, not the 599 # address size if specified by the instruction prefix 600 addressSize = "env.stackSize" 601 else: 602 addressSize = "env.addressSize" 603 604 class StoreOp(LdStOp): 605 def __init__(self, data, segment, addr, disp = 0, 606 dataSize="env.dataSize", 607 addressSize=addressSize, 608 atCPL0=False, nonSpec=False, implicitStack=implicitStack, 609 uncacheable=False): 610 super(StoreOp, self).__init__(data, segment, addr, disp, 611 dataSize, addressSize, mem_flags, atCPL0, False, 612 nonSpec, implicitStack, uncacheable) 613 self.className = Name 614 self.mnemonic = name 615 616 microopClasses[name] = StoreOp 617 618 defineMicroStoreOp('St', 'Mem = pick(Data, 2, dataSize);') 619 defineMicroStoreOp('Stis', 'Mem = pick(Data, 2, dataSize);', 620 implicitStack=True) 621 defineMicroStoreOp('Stul', 'Mem = pick(Data, 2, dataSize);', 622 mem_flags="Request::LOCKED_RMW") 623 624 defineMicroStoreOp('Stfp', code='Mem = FpData_uqw;') 625 626 defineMicroStoreOp('Stfp87', code=''' 627 switch (dataSize) 628 { 629 case 4: { 630 float single(FpData_df); 631 Mem = *(uint32_t *)&single; 632 } break; 633 case 8: 634 Mem = *(uint64_t *)&FpData_df; 635 break; 636 default: 637 panic("Unhandled data size in StFp87.\\n"); 638 } 639 ''') 640 641 defineMicroStoreOp('Cda', 'Mem = 0;', mem_flags="Request::NO_ACCESS") 642 defineMicroStoreOp('Clflushopt', 'Mem = 0;', 643 mem_flags="Request::CLEAN | Request::INVALIDATE" + 644 " | Request::DST_POC") 645 defineMicroStoreOp('Clwb', 'Mem = 0;', 646 mem_flags="Request::CLEAN | Request::DST_POC") 647 648 def defineMicroStoreSplitOp(mnemonic, code, 649 completeCode="", mem_flags="0"): 650 global header_output 651 global decoder_output 652 global exec_output 653 global microopClasses 654 Name = mnemonic 655 name = mnemonic.lower() 656 657 iop = InstObjParams(name, Name, 'X86ISA::LdStSplitOp', 658 { "code": code, 659 "complete_code": completeCode, 660 "ea_code": calculateEA, 661 "memDataSize": "2 * dataSize" }) 662 663 header_output += MicroLdStSplitOpDeclare.subst(iop) 664 decoder_output += MicroLdStSplitOpConstructor.subst(iop) 665 exec_output += MicroStoreExecute.subst(iop) 666 exec_output += MicroStoreInitiateAcc.subst(iop) 667 exec_output += MicroStoreCompleteAcc.subst(iop) 668 669 class StoreOp(LdStSplitOp): 670 def __init__(self, data, segment, addr, disp = 0, 671 dataSize="env.dataSize", 672 addressSize="env.addressSize", 673 atCPL0=False, nonSpec=False, implicitStack=False, 674 uncacheable=False): 675 super(StoreOp, self).__init__(data, segment, addr, disp, 676 dataSize, addressSize, mem_flags, atCPL0, False, 677 nonSpec, implicitStack, uncacheable) 678 self.className = Name 679 self.mnemonic = name 680 681 microopClasses[name] = StoreOp 682 683 code = ''' 684 Mem_u2qw[0] = DataLow; 685 Mem_u2qw[1] = DataHi; 686 ''' 687 688 defineMicroStoreSplitOp('StSplit', code); 689 690 defineMicroStoreSplitOp('StSplitul', code, 691 mem_flags='Request::LOCKED_RMW') 692 693 iop = InstObjParams("lea", "Lea", 'X86ISA::LdStOp', 694 { "code": "Data = merge(Data, EA, dataSize);", 695 "ea_code": "EA = " + segmentEAExpr, 696 "memDataSize": "dataSize" }) 697 header_output += MicroLeaDeclare.subst(iop) 698 decoder_output += MicroLdStOpConstructor.subst(iop) 699 exec_output += MicroLeaExecute.subst(iop) 700 701 class LeaOp(LdStOp): 702 def __init__(self, data, segment, addr, disp = 0, 703 dataSize="env.dataSize", addressSize="env.addressSize"): 704 super(LeaOp, self).__init__(data, segment, addr, disp, 705 dataSize, addressSize, "0", 706 False, False, False, False, False) 707 self.className = "Lea" 708 self.mnemonic = "lea" 709 710 microopClasses["lea"] = LeaOp 711 712 713 iop = InstObjParams("tia", "Tia", 'X86ISA::LdStOp', 714 { "code": "xc->demapPage(EA, 0);", 715 "ea_code": calculateEA, 716 "memDataSize": "dataSize" }) 717 header_output += MicroLeaDeclare.subst(iop) 718 decoder_output += MicroLdStOpConstructor.subst(iop) 719 exec_output += MicroLeaExecute.subst(iop) 720 721 class TiaOp(LdStOp): 722 def __init__(self, segment, addr, disp = 0, 723 dataSize="env.dataSize", 724 addressSize="env.addressSize"): 725 super(TiaOp, self).__init__("InstRegIndex(NUM_INTREGS)", segment, 726 addr, disp, dataSize, addressSize, "0", False, False, 727 False, False, False) 728 self.className = "Tia" 729 self.mnemonic = "tia" 730 731 microopClasses["tia"] = TiaOp 732 733 class CdaOp(LdStOp): 734 def __init__(self, segment, addr, disp = 0, 735 dataSize="env.dataSize", 736 addressSize="env.addressSize", atCPL0=False): 737 super(CdaOp, self).__init__("InstRegIndex(NUM_INTREGS)", segment, 738 addr, disp, dataSize, addressSize, "Request::NO_ACCESS", 739 atCPL0, False, False, False, False) 740 self.className = "Cda" 741 self.mnemonic = "cda" 742 743 microopClasses["cda"] = CdaOp 744}}; 745