ldstop.isa revision 11829
19793Sakash.bagdia@arm.com// Copyright (c) 2007-2008 The Hewlett-Packard Development Company 29522SAndreas.Sandberg@ARM.com// Copyright (c) 2015 Advanced Micro Devices, Inc. 39522SAndreas.Sandberg@ARM.com// All rights reserved. 49522SAndreas.Sandberg@ARM.com// 59522SAndreas.Sandberg@ARM.com// The license below extends only to copyright in the software and shall 69522SAndreas.Sandberg@ARM.com// not be construed as granting a license to any other intellectual 79522SAndreas.Sandberg@ARM.com// property including but not limited to intellectual property relating 89522SAndreas.Sandberg@ARM.com// to a hardware implementation of the functionality of the software 99522SAndreas.Sandberg@ARM.com// licensed hereunder. You may use the software subject to the license 109522SAndreas.Sandberg@ARM.com// terms below provided that you ensure that this notice is replicated 119522SAndreas.Sandberg@ARM.com// unmodified and in its entirety in all distributions of the software, 129522SAndreas.Sandberg@ARM.com// modified or unmodified, in source code or in binary form. 136981SLisa.Hsu@amd.com// 146981SLisa.Hsu@amd.com// Copyright (c) 2008 The Regents of The University of Michigan 156981SLisa.Hsu@amd.com// All rights reserved. 166981SLisa.Hsu@amd.com// 176981SLisa.Hsu@amd.com// Redistribution and use in source and binary forms, with or without 186981SLisa.Hsu@amd.com// modification, are permitted provided that the following conditions are 196981SLisa.Hsu@amd.com// met: redistributions of source code must retain the above copyright 206981SLisa.Hsu@amd.com// notice, this list of conditions and the following disclaimer; 216981SLisa.Hsu@amd.com// redistributions in binary form must reproduce the above copyright 226981SLisa.Hsu@amd.com// notice, this list of conditions and the following disclaimer in the 236981SLisa.Hsu@amd.com// documentation and/or other materials provided with the distribution; 246981SLisa.Hsu@amd.com// neither the name of the copyright holders nor the names of its 256981SLisa.Hsu@amd.com// contributors may be used to endorse or promote products derived from 266981SLisa.Hsu@amd.com// this software without specific prior written permission. 276981SLisa.Hsu@amd.com// 286981SLisa.Hsu@amd.com// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 296981SLisa.Hsu@amd.com// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 306981SLisa.Hsu@amd.com// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 316981SLisa.Hsu@amd.com// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 326981SLisa.Hsu@amd.com// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 336981SLisa.Hsu@amd.com// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 346981SLisa.Hsu@amd.com// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 356981SLisa.Hsu@amd.com// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 366981SLisa.Hsu@amd.com// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 376981SLisa.Hsu@amd.com// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 386981SLisa.Hsu@amd.com// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 396981SLisa.Hsu@amd.com// 406981SLisa.Hsu@amd.com// Authors: Gabe Black 416981SLisa.Hsu@amd.com 426981SLisa.Hsu@amd.com////////////////////////////////////////////////////////////////////////// 436981SLisa.Hsu@amd.com// 446981SLisa.Hsu@amd.com// LdStOp Microop templates 456981SLisa.Hsu@amd.com// 466981SLisa.Hsu@amd.com////////////////////////////////////////////////////////////////////////// 476981SLisa.Hsu@amd.com 486981SLisa.Hsu@amd.com// LEA template 499522SAndreas.Sandberg@ARM.com 509522SAndreas.Sandberg@ARM.comdef template MicroLeaExecute {{ 519522SAndreas.Sandberg@ARM.com Fault %(class_name)s::execute(CPU_EXEC_CONTEXT *xc, 529522SAndreas.Sandberg@ARM.com Trace::InstRecord *traceData) const 539522SAndreas.Sandberg@ARM.com { 549522SAndreas.Sandberg@ARM.com Fault fault = NoFault; 559522SAndreas.Sandberg@ARM.com Addr EA; 569522SAndreas.Sandberg@ARM.com 579522SAndreas.Sandberg@ARM.com %(op_decl)s; 589522SAndreas.Sandberg@ARM.com %(op_rd)s; 599522SAndreas.Sandberg@ARM.com %(ea_code)s; 609522SAndreas.Sandberg@ARM.com DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA); 619522SAndreas.Sandberg@ARM.com 626981SLisa.Hsu@amd.com %(code)s; 639284Sandreas.hansson@arm.com if(fault == NoFault) 649284Sandreas.hansson@arm.com { 659284Sandreas.hansson@arm.com %(op_wb)s; 669284Sandreas.hansson@arm.com } 679793Sakash.bagdia@arm.com 689522SAndreas.Sandberg@ARM.com return fault; 699522SAndreas.Sandberg@ARM.com } 709522SAndreas.Sandberg@ARM.com}}; 718724Srdreslin@umich.edu 729793Sakash.bagdia@arm.comdef template MicroLeaDeclare {{ 739793Sakash.bagdia@arm.com class %(class_name)s : public %(base_class)s 748839Sandreas.hansson@arm.com { 758839Sandreas.hansson@arm.com public: 766981SLisa.Hsu@amd.com %(class_name)s(ExtMachInst _machInst, 776981SLisa.Hsu@amd.com const char * instMnem, uint64_t setFlags, 786981SLisa.Hsu@amd.com uint8_t _scale, InstRegIndex _index, InstRegIndex _base, 799522SAndreas.Sandberg@ARM.com uint64_t _disp, InstRegIndex _segment, 809522SAndreas.Sandberg@ARM.com InstRegIndex _data, 819522SAndreas.Sandberg@ARM.com uint8_t _dataSize, uint8_t _addressSize, 829522SAndreas.Sandberg@ARM.com Request::FlagsType _memFlags); 839522SAndreas.Sandberg@ARM.com 849522SAndreas.Sandberg@ARM.com %(BasicExecDeclare)s 858724Srdreslin@umich.edu }; 869284Sandreas.hansson@arm.com}}; 879284Sandreas.hansson@arm.com 887868Sgblack@eecs.umich.edu// Load templates 898056Sksewell@umich.edu 907868Sgblack@eecs.umich.edudef template MicroLoadExecute {{ 917868Sgblack@eecs.umich.edu Fault %(class_name)s::execute(CPU_EXEC_CONTEXT *xc, 927868Sgblack@eecs.umich.edu Trace::InstRecord *traceData) const 938056Sksewell@umich.edu { 948863Snilay@cs.wisc.edu Fault fault = NoFault; 956981SLisa.Hsu@amd.com Addr EA; 967876Sgblack@eecs.umich.edu 976981SLisa.Hsu@amd.com %(op_decl)s; 987876Sgblack@eecs.umich.edu %(op_rd)s; 996981SLisa.Hsu@amd.com %(ea_code)s; 1006981SLisa.Hsu@amd.com DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA); 101 102 fault = readMemAtomic(xc, traceData, EA, Mem, 103 %(memDataSize)s, memFlags); 104 105 if (fault == NoFault) { 106 %(code)s; 107 } else if (memFlags & Request::PREFETCH) { 108 // For prefetches, ignore any faults/exceptions. 109 return NoFault; 110 } 111 if(fault == NoFault) 112 { 113 %(op_wb)s; 114 } 115 116 return fault; 117 } 118}}; 119 120def template MicroLoadInitiateAcc {{ 121 Fault %(class_name)s::initiateAcc(CPU_EXEC_CONTEXT * xc, 122 Trace::InstRecord * traceData) const 123 { 124 Fault fault = NoFault; 125 Addr EA; 126 127 %(op_decl)s; 128 %(op_rd)s; 129 %(ea_code)s; 130 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA); 131 132 fault = initiateMemRead(xc, traceData, EA, 133 %(memDataSize)s, memFlags); 134 135 return fault; 136 } 137}}; 138 139def template MicroLoadCompleteAcc {{ 140 Fault %(class_name)s::completeAcc(PacketPtr pkt, 141 CPU_EXEC_CONTEXT * xc, 142 Trace::InstRecord * traceData) const 143 { 144 Fault fault = NoFault; 145 146 %(op_decl)s; 147 %(op_rd)s; 148 149 getMem(pkt, Mem, %(memDataSize)s, traceData); 150 151 %(code)s; 152 153 if(fault == NoFault) 154 { 155 %(op_wb)s; 156 } 157 158 return fault; 159 } 160}}; 161 162// Store templates 163 164def template MicroStoreExecute {{ 165 Fault %(class_name)s::execute(CPU_EXEC_CONTEXT * xc, 166 Trace::InstRecord *traceData) const 167 { 168 Fault fault = NoFault; 169 170 Addr EA; 171 %(op_decl)s; 172 %(op_rd)s; 173 %(ea_code)s; 174 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA); 175 176 %(code)s; 177 178 if(fault == NoFault) 179 { 180 fault = writeMemAtomic(xc, traceData, Mem, %(memDataSize)s, EA, 181 memFlags, NULL); 182 if(fault == NoFault) 183 { 184 %(op_wb)s; 185 } 186 } 187 188 return fault; 189 } 190}}; 191 192def template MicroStoreInitiateAcc {{ 193 Fault %(class_name)s::initiateAcc(CPU_EXEC_CONTEXT * xc, 194 Trace::InstRecord * traceData) const 195 { 196 Fault fault = NoFault; 197 198 Addr EA; 199 %(op_decl)s; 200 %(op_rd)s; 201 %(ea_code)s; 202 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA); 203 204 %(code)s; 205 206 if(fault == NoFault) 207 { 208 fault = writeMemTiming(xc, traceData, Mem, %(memDataSize)s, EA, 209 memFlags, NULL); 210 } 211 return fault; 212 } 213}}; 214 215def template MicroStoreCompleteAcc {{ 216 Fault %(class_name)s::completeAcc(PacketPtr pkt, 217 CPU_EXEC_CONTEXT * xc, Trace::InstRecord * traceData) const 218 { 219 %(op_decl)s; 220 %(op_rd)s; 221 %(complete_code)s; 222 %(op_wb)s; 223 return NoFault; 224 } 225}}; 226 227// Common templates 228 229//This delcares the initiateAcc function in memory operations 230def template InitiateAccDeclare {{ 231 Fault initiateAcc(%(CPU_exec_context)s *, Trace::InstRecord *) const; 232}}; 233 234//This declares the completeAcc function in memory operations 235def template CompleteAccDeclare {{ 236 Fault completeAcc(PacketPtr, %(CPU_exec_context)s *, Trace::InstRecord *) const; 237}}; 238 239def template MicroLdStOpDeclare {{ 240 class %(class_name)s : public %(base_class)s 241 { 242 public: 243 %(class_name)s(ExtMachInst _machInst, 244 const char * instMnem, uint64_t setFlags, 245 uint8_t _scale, InstRegIndex _index, InstRegIndex _base, 246 uint64_t _disp, InstRegIndex _segment, 247 InstRegIndex _data, 248 uint8_t _dataSize, uint8_t _addressSize, 249 Request::FlagsType _memFlags); 250 251 %(BasicExecDeclare)s 252 253 %(InitiateAccDeclare)s 254 255 %(CompleteAccDeclare)s 256 }; 257}}; 258 259// LdStSplitOp is a load or store that uses a pair of regs as the 260// source or destination. Used for cmpxchg{8,16}b. 261def template MicroLdStSplitOpDeclare {{ 262 class %(class_name)s : public %(base_class)s 263 { 264 public: 265 %(class_name)s(ExtMachInst _machInst, 266 const char * instMnem, uint64_t setFlags, 267 uint8_t _scale, InstRegIndex _index, InstRegIndex _base, 268 uint64_t _disp, InstRegIndex _segment, 269 InstRegIndex _dataLow, InstRegIndex _dataHi, 270 uint8_t _dataSize, uint8_t _addressSize, 271 Request::FlagsType _memFlags); 272 273 %(BasicExecDeclare)s 274 275 %(InitiateAccDeclare)s 276 277 %(CompleteAccDeclare)s 278 }; 279}}; 280 281def template MicroLdStOpConstructor {{ 282 %(class_name)s::%(class_name)s( 283 ExtMachInst machInst, const char * instMnem, uint64_t setFlags, 284 uint8_t _scale, InstRegIndex _index, InstRegIndex _base, 285 uint64_t _disp, InstRegIndex _segment, 286 InstRegIndex _data, 287 uint8_t _dataSize, uint8_t _addressSize, 288 Request::FlagsType _memFlags) : 289 %(base_class)s(machInst, "%(mnemonic)s", instMnem, setFlags, 290 _scale, _index, _base, 291 _disp, _segment, _data, 292 _dataSize, _addressSize, _memFlags, %(op_class)s) 293 { 294 %(constructor)s; 295 } 296}}; 297 298def template MicroLdStSplitOpConstructor {{ 299 %(class_name)s::%(class_name)s( 300 ExtMachInst machInst, const char * instMnem, uint64_t setFlags, 301 uint8_t _scale, InstRegIndex _index, InstRegIndex _base, 302 uint64_t _disp, InstRegIndex _segment, 303 InstRegIndex _dataLow, InstRegIndex _dataHi, 304 uint8_t _dataSize, uint8_t _addressSize, 305 Request::FlagsType _memFlags) : 306 %(base_class)s(machInst, "%(mnemonic)s", instMnem, setFlags, 307 _scale, _index, _base, 308 _disp, _segment, _dataLow, _dataHi, 309 _dataSize, _addressSize, _memFlags, %(op_class)s) 310 { 311 %(constructor)s; 312 } 313}}; 314 315let {{ 316 class LdStOp(X86Microop): 317 def __init__(self, data, segment, addr, disp, 318 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec, 319 implicitStack): 320 self.data = data 321 [self.scale, self.index, self.base] = addr 322 self.disp = disp 323 self.segment = segment 324 self.dataSize = dataSize 325 self.addressSize = addressSize 326 self.memFlags = baseFlags 327 if atCPL0: 328 self.memFlags += " | (CPL0FlagBit << FlagShift)" 329 self.instFlags = "" 330 if prefetch: 331 self.memFlags += " | Request::PREFETCH" 332 self.instFlags += " | (1ULL << StaticInst::IsDataPrefetch)" 333 if nonSpec: 334 self.instFlags += " | (1ULL << StaticInst::IsNonSpeculative)" 335 # For implicit stack operations, we should use *not* use the 336 # alternative addressing mode for loads/stores if the prefix is set 337 if not implicitStack: 338 self.memFlags += " | (machInst.legacy.addr ? " + \ 339 "(AddrSizeFlagBit << FlagShift) : 0)" 340 341 def getAllocator(self, microFlags): 342 allocator = '''new %(class_name)s(machInst, macrocodeBlock, 343 %(flags)s, %(scale)s, %(index)s, %(base)s, 344 %(disp)s, %(segment)s, %(data)s, 345 %(dataSize)s, %(addressSize)s, %(memFlags)s)''' % { 346 "class_name" : self.className, 347 "flags" : self.microFlagsText(microFlags) + self.instFlags, 348 "scale" : self.scale, "index" : self.index, 349 "base" : self.base, 350 "disp" : self.disp, 351 "segment" : self.segment, "data" : self.data, 352 "dataSize" : self.dataSize, "addressSize" : self.addressSize, 353 "memFlags" : self.memFlags} 354 return allocator 355 356 class BigLdStOp(X86Microop): 357 def __init__(self, data, segment, addr, disp, 358 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec, 359 implicitStack): 360 self.data = data 361 [self.scale, self.index, self.base] = addr 362 self.disp = disp 363 self.segment = segment 364 self.dataSize = dataSize 365 self.addressSize = addressSize 366 self.memFlags = baseFlags 367 if atCPL0: 368 self.memFlags += " | (CPL0FlagBit << FlagShift)" 369 self.instFlags = "" 370 if prefetch: 371 self.memFlags += " | Request::PREFETCH" 372 self.instFlags += " | (1ULL << StaticInst::IsDataPrefetch)" 373 if nonSpec: 374 self.instFlags += " | (1ULL << StaticInst::IsNonSpeculative)" 375 # For implicit stack operations, we should use *not* use the 376 # alternative addressing mode for loads/stores if the prefix is set 377 if not implicitStack: 378 self.memFlags += " | (machInst.legacy.addr ? " + \ 379 "(AddrSizeFlagBit << FlagShift) : 0)" 380 381 def getAllocator(self, microFlags): 382 allocString = ''' 383 (%(dataSize)s >= 4) ? 384 (StaticInstPtr)(new %(class_name)sBig(machInst, 385 macrocodeBlock, %(flags)s, %(scale)s, %(index)s, 386 %(base)s, %(disp)s, %(segment)s, %(data)s, 387 %(dataSize)s, %(addressSize)s, %(memFlags)s)) : 388 (StaticInstPtr)(new %(class_name)s(machInst, 389 macrocodeBlock, %(flags)s, %(scale)s, %(index)s, 390 %(base)s, %(disp)s, %(segment)s, %(data)s, 391 %(dataSize)s, %(addressSize)s, %(memFlags)s)) 392 ''' 393 allocator = allocString % { 394 "class_name" : self.className, 395 "flags" : self.microFlagsText(microFlags) + self.instFlags, 396 "scale" : self.scale, "index" : self.index, 397 "base" : self.base, 398 "disp" : self.disp, 399 "segment" : self.segment, "data" : self.data, 400 "dataSize" : self.dataSize, "addressSize" : self.addressSize, 401 "memFlags" : self.memFlags} 402 return allocator 403 404 class LdStSplitOp(LdStOp): 405 def __init__(self, data, segment, addr, disp, 406 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec, 407 implicitStack): 408 super(LdStSplitOp, self).__init__(0, segment, addr, disp, 409 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec, 410 implicitStack) 411 (self.dataLow, self.dataHi) = data 412 413 def getAllocator(self, microFlags): 414 allocString = '''(StaticInstPtr)(new %(class_name)s(machInst, 415 macrocodeBlock, %(flags)s, %(scale)s, %(index)s, 416 %(base)s, %(disp)s, %(segment)s, 417 %(dataLow)s, %(dataHi)s, 418 %(dataSize)s, %(addressSize)s, %(memFlags)s)) 419 ''' 420 allocator = allocString % { 421 "class_name" : self.className, 422 "flags" : self.microFlagsText(microFlags) + self.instFlags, 423 "scale" : self.scale, "index" : self.index, 424 "base" : self.base, 425 "disp" : self.disp, 426 "segment" : self.segment, 427 "dataLow" : self.dataLow, "dataHi" : self.dataHi, 428 "dataSize" : self.dataSize, "addressSize" : self.addressSize, 429 "memFlags" : self.memFlags} 430 return allocator 431 432}}; 433 434let {{ 435 436 # Make these empty strings so that concatenating onto 437 # them will always work. 438 header_output = "" 439 decoder_output = "" 440 exec_output = "" 441 442 segmentEAExpr = \ 443 'bits(scale * Index + Base + disp, addressSize * 8 - 1, 0);' 444 445 calculateEA = 'EA = SegBase + ' + segmentEAExpr 446 447 def defineMicroLoadOp(mnemonic, code, bigCode='', 448 mem_flags="0", big=True, nonSpec=False, 449 implicitStack=False): 450 global header_output 451 global decoder_output 452 global exec_output 453 global microopClasses 454 Name = mnemonic 455 name = mnemonic.lower() 456 457 # Build up the all register version of this micro op 458 iops = [InstObjParams(name, Name, 'X86ISA::LdStOp', 459 { "code": code, 460 "ea_code": calculateEA, 461 "memDataSize": "dataSize" })] 462 if big: 463 iops += [InstObjParams(name, Name + "Big", 'X86ISA::LdStOp', 464 { "code": bigCode, 465 "ea_code": calculateEA, 466 "memDataSize": "dataSize" })] 467 for iop in iops: 468 header_output += MicroLdStOpDeclare.subst(iop) 469 decoder_output += MicroLdStOpConstructor.subst(iop) 470 exec_output += MicroLoadExecute.subst(iop) 471 exec_output += MicroLoadInitiateAcc.subst(iop) 472 exec_output += MicroLoadCompleteAcc.subst(iop) 473 474 if implicitStack: 475 # For instructions that implicitly access the stack, the address 476 # size is the same as the stack segment pointer size, not the 477 # address size if specified by the instruction prefix 478 addressSize = "env.stackSize" 479 else: 480 addressSize = "env.addressSize" 481 482 base = LdStOp 483 if big: 484 base = BigLdStOp 485 class LoadOp(base): 486 def __init__(self, data, segment, addr, disp = 0, 487 dataSize="env.dataSize", 488 addressSize=addressSize, 489 atCPL0=False, prefetch=False, nonSpec=nonSpec, 490 implicitStack=implicitStack): 491 super(LoadOp, self).__init__(data, segment, addr, 492 disp, dataSize, addressSize, mem_flags, 493 atCPL0, prefetch, nonSpec, implicitStack) 494 self.className = Name 495 self.mnemonic = name 496 497 microopClasses[name] = LoadOp 498 499 defineMicroLoadOp('Ld', 'Data = merge(Data, Mem, dataSize);', 500 'Data = Mem & mask(dataSize * 8);') 501 defineMicroLoadOp('Ldis', 'Data = merge(Data, Mem, dataSize);', 502 'Data = Mem & mask(dataSize * 8);', 503 implicitStack=True) 504 defineMicroLoadOp('Ldst', 'Data = merge(Data, Mem, dataSize);', 505 'Data = Mem & mask(dataSize * 8);', 506 '(StoreCheck << FlagShift)') 507 defineMicroLoadOp('Ldstl', 'Data = merge(Data, Mem, dataSize);', 508 'Data = Mem & mask(dataSize * 8);', 509 '(StoreCheck << FlagShift) | Request::LOCKED_RMW', 510 nonSpec=True) 511 512 defineMicroLoadOp('Ldfp', code='FpData_uqw = Mem', big = False) 513 514 defineMicroLoadOp('Ldfp87', code=''' 515 switch (dataSize) 516 { 517 case 4: 518 FpData_df = *(float *)&Mem; 519 break; 520 case 8: 521 FpData_df = *(double *)&Mem; 522 break; 523 default: 524 panic("Unhandled data size in LdFp87.\\n"); 525 } 526 ''', big = False) 527 528 # Load integer from memory into x87 top-of-stack register. 529 # Used to implement fild instruction. 530 defineMicroLoadOp('Ldifp87', code=''' 531 switch (dataSize) 532 { 533 case 2: 534 FpData_df = (int64_t)sext<16>(Mem); 535 break; 536 case 4: 537 FpData_df = (int64_t)sext<32>(Mem); 538 break; 539 case 8: 540 FpData_df = (int64_t)Mem; 541 break; 542 default: 543 panic("Unhandled data size in LdIFp87.\\n"); 544 } 545 ''', big = False) 546 547 def defineMicroLoadSplitOp(mnemonic, code, mem_flags="0", nonSpec=False): 548 global header_output 549 global decoder_output 550 global exec_output 551 global microopClasses 552 Name = mnemonic 553 name = mnemonic.lower() 554 555 iop = InstObjParams(name, Name, 'X86ISA::LdStSplitOp', 556 { "code": code, 557 "ea_code": calculateEA, 558 "memDataSize": "2 * dataSize" }) 559 560 header_output += MicroLdStSplitOpDeclare.subst(iop) 561 decoder_output += MicroLdStSplitOpConstructor.subst(iop) 562 exec_output += MicroLoadExecute.subst(iop) 563 exec_output += MicroLoadInitiateAcc.subst(iop) 564 exec_output += MicroLoadCompleteAcc.subst(iop) 565 566 class LoadOp(LdStSplitOp): 567 def __init__(self, data, segment, addr, disp = 0, 568 dataSize="env.dataSize", 569 addressSize="env.addressSize", 570 atCPL0=False, prefetch=False, nonSpec=nonSpec, 571 implicitStack=False): 572 super(LoadOp, self).__init__(data, segment, addr, 573 disp, dataSize, addressSize, mem_flags, 574 atCPL0, prefetch, nonSpec, implicitStack) 575 self.className = Name 576 self.mnemonic = name 577 578 microopClasses[name] = LoadOp 579 580 code = ''' 581 switch (dataSize) { 582 case 4: 583 DataLow = bits(Mem_u2qw[0], 31, 0); 584 DataHi = bits(Mem_u2qw[0], 63, 32); 585 break; 586 case 8: 587 DataLow = Mem_u2qw[0]; 588 DataHi = Mem_u2qw[1]; 589 break; 590 default: 591 panic("Unhandled data size %d in LdSplit.\\n", dataSize); 592 }''' 593 594 defineMicroLoadSplitOp('LdSplit', code, 595 '(StoreCheck << FlagShift)') 596 597 defineMicroLoadSplitOp('LdSplitl', code, 598 '(StoreCheck << FlagShift) | Request::LOCKED_RMW', 599 nonSpec=True) 600 601 def defineMicroStoreOp(mnemonic, code, completeCode="", mem_flags="0", 602 implicitStack=False): 603 global header_output 604 global decoder_output 605 global exec_output 606 global microopClasses 607 Name = mnemonic 608 name = mnemonic.lower() 609 610 # Build up the all register version of this micro op 611 iop = InstObjParams(name, Name, 'X86ISA::LdStOp', 612 { "code": code, 613 "complete_code": completeCode, 614 "ea_code": calculateEA, 615 "memDataSize": "dataSize" }) 616 header_output += MicroLdStOpDeclare.subst(iop) 617 decoder_output += MicroLdStOpConstructor.subst(iop) 618 exec_output += MicroStoreExecute.subst(iop) 619 exec_output += MicroStoreInitiateAcc.subst(iop) 620 exec_output += MicroStoreCompleteAcc.subst(iop) 621 622 if implicitStack: 623 # For instructions that implicitly access the stack, the address 624 # size is the same as the stack segment pointer size, not the 625 # address size if specified by the instruction prefix 626 addressSize = "env.stackSize" 627 else: 628 addressSize = "env.addressSize" 629 630 class StoreOp(LdStOp): 631 def __init__(self, data, segment, addr, disp = 0, 632 dataSize="env.dataSize", 633 addressSize=addressSize, 634 atCPL0=False, nonSpec=False, implicitStack=implicitStack): 635 super(StoreOp, self).__init__(data, segment, addr, disp, 636 dataSize, addressSize, mem_flags, atCPL0, False, 637 nonSpec, implicitStack) 638 self.className = Name 639 self.mnemonic = name 640 641 microopClasses[name] = StoreOp 642 643 defineMicroStoreOp('St', 'Mem = pick(Data, 2, dataSize);') 644 defineMicroStoreOp('Stis', 'Mem = pick(Data, 2, dataSize);', 645 implicitStack=True) 646 defineMicroStoreOp('Stul', 'Mem = pick(Data, 2, dataSize);', 647 mem_flags="Request::LOCKED_RMW") 648 649 defineMicroStoreOp('Stfp', code='Mem = FpData_uqw;') 650 651 defineMicroStoreOp('Stfp87', code=''' 652 switch (dataSize) 653 { 654 case 4: { 655 float single(FpData_df); 656 Mem = *(uint32_t *)&single; 657 } break; 658 case 8: 659 Mem = *(uint64_t *)&FpData_df; 660 break; 661 default: 662 panic("Unhandled data size in StFp87.\\n"); 663 } 664 ''') 665 666 defineMicroStoreOp('Cda', 'Mem = 0;', mem_flags="Request::NO_ACCESS") 667 668 def defineMicroStoreSplitOp(mnemonic, code, 669 completeCode="", mem_flags="0"): 670 global header_output 671 global decoder_output 672 global exec_output 673 global microopClasses 674 Name = mnemonic 675 name = mnemonic.lower() 676 677 iop = InstObjParams(name, Name, 'X86ISA::LdStSplitOp', 678 { "code": code, 679 "complete_code": completeCode, 680 "ea_code": calculateEA, 681 "memDataSize": "2 * dataSize" }) 682 683 header_output += MicroLdStSplitOpDeclare.subst(iop) 684 decoder_output += MicroLdStSplitOpConstructor.subst(iop) 685 exec_output += MicroStoreExecute.subst(iop) 686 exec_output += MicroStoreInitiateAcc.subst(iop) 687 exec_output += MicroStoreCompleteAcc.subst(iop) 688 689 class StoreOp(LdStSplitOp): 690 def __init__(self, data, segment, addr, disp = 0, 691 dataSize="env.dataSize", 692 addressSize="env.addressSize", 693 atCPL0=False, nonSpec=False, implicitStack=False): 694 super(StoreOp, self).__init__(data, segment, addr, disp, 695 dataSize, addressSize, mem_flags, atCPL0, False, 696 nonSpec, implicitStack) 697 self.className = Name 698 self.mnemonic = name 699 700 microopClasses[name] = StoreOp 701 702 code = ''' 703 switch (dataSize) { 704 case 4: 705 Mem_u2qw[0] = (DataHi << 32) | DataLow; 706 break; 707 case 8: 708 Mem_u2qw[0] = DataLow; 709 Mem_u2qw[1] = DataHi; 710 break; 711 default: 712 panic("Unhandled data size %d in StSplit.\\n", dataSize); 713 }''' 714 715 defineMicroStoreSplitOp('StSplit', code); 716 717 defineMicroStoreSplitOp('StSplitul', code, 718 mem_flags='Request::LOCKED_RMW') 719 720 iop = InstObjParams("lea", "Lea", 'X86ISA::LdStOp', 721 { "code": "Data = merge(Data, EA, dataSize);", 722 "ea_code": "EA = " + segmentEAExpr, 723 "memDataSize": "dataSize" }) 724 header_output += MicroLeaDeclare.subst(iop) 725 decoder_output += MicroLdStOpConstructor.subst(iop) 726 exec_output += MicroLeaExecute.subst(iop) 727 728 class LeaOp(LdStOp): 729 def __init__(self, data, segment, addr, disp = 0, 730 dataSize="env.dataSize", addressSize="env.addressSize"): 731 super(LeaOp, self).__init__(data, segment, addr, disp, 732 dataSize, addressSize, "0", False, False, False, False) 733 self.className = "Lea" 734 self.mnemonic = "lea" 735 736 microopClasses["lea"] = LeaOp 737 738 739 iop = InstObjParams("tia", "Tia", 'X86ISA::LdStOp', 740 { "code": "xc->demapPage(EA, 0);", 741 "ea_code": calculateEA, 742 "memDataSize": "dataSize" }) 743 header_output += MicroLeaDeclare.subst(iop) 744 decoder_output += MicroLdStOpConstructor.subst(iop) 745 exec_output += MicroLeaExecute.subst(iop) 746 747 class TiaOp(LdStOp): 748 def __init__(self, segment, addr, disp = 0, 749 dataSize="env.dataSize", 750 addressSize="env.addressSize"): 751 super(TiaOp, self).__init__("InstRegIndex(NUM_INTREGS)", segment, 752 addr, disp, dataSize, addressSize, "0", False, False, 753 False, False) 754 self.className = "Tia" 755 self.mnemonic = "tia" 756 757 microopClasses["tia"] = TiaOp 758 759 class CdaOp(LdStOp): 760 def __init__(self, segment, addr, disp = 0, 761 dataSize="env.dataSize", 762 addressSize="env.addressSize", atCPL0=False): 763 super(CdaOp, self).__init__("InstRegIndex(NUM_INTREGS)", segment, 764 addr, disp, dataSize, addressSize, "Request::NO_ACCESS", 765 atCPL0, False, False, False) 766 self.className = "Cda" 767 self.mnemonic = "cda" 768 769 microopClasses["cda"] = CdaOp 770}}; 771