1// -*- mode:c++ -*- 2 3// Copyright (c) 2011-2013, 2016-2019 ARM Limited 4// All rights reserved 5// 6// The license below extends only to copyright in the software and shall 7// not be construed as granting a license to any other intellectual 8// property including but not limited to intellectual property relating 9// to a hardware implementation of the functionality of the software 10// licensed hereunder. You may use the software subject to the license 11// terms below provided that you ensure that this notice is replicated 12// unmodified and in its entirety in all distributions of the software, 13// modified or unmodified, in source code or in binary form. 14// 15// Redistribution and use in source and binary forms, with or without 16// modification, are permitted provided that the following conditions are 17// met: redistributions of source code must retain the above copyright 18// notice, this list of conditions and the following disclaimer; 19// redistributions in binary form must reproduce the above copyright 20// notice, this list of conditions and the following disclaimer in the 21// documentation and/or other materials provided with the distribution; 22// neither the name of the copyright holders nor the names of its 23// contributors may be used to endorse or promote products derived from 24// this software without specific prior written permission. 25// 26// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 27// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 28// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 29// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 30// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 31// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 32// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 33// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 34// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 35// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 36// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 37// 38// Authors: Gabe Black 39 40let {{ 41 42 header_output = "" 43 decoder_output = "" 44 exec_output = "" 45 46 def createCcCode64(carry, overflow): 47 code = "" 48 code += ''' 49 uint16_t _iz, _in; 50 _in = bits(resTemp, intWidth - 1); 51 _iz = ((resTemp & mask(intWidth)) == 0); 52 CondCodesNZ = (_in << 1) | _iz; 53 DPRINTF(Arm, "(in, iz) = (%d, %d)\\n", _in, _iz); 54 ''' 55 if overflow and overflow != "none": 56 code += ''' 57 uint16_t _iv; 58 _iv = %s & 1; 59 CondCodesV = _iv; 60 DPRINTF(Arm, "(iv) = (%%d)\\n", _iv); 61 ''' % overflow 62 if carry and carry != "none": 63 code += ''' 64 uint16_t _ic; 65 _ic = %s & 1; 66 CondCodesC = _ic; 67 DPRINTF(Arm, "(ic) = (%%d)\\n", _ic); 68 ''' % carry 69 return code 70 71 oldC = 'CondCodesC' 72 oldV = 'CondCodesV' 73 # Dicts of ways to set the carry flag. 74 carryCode64 = { 75 "none": "none", 76 "add": 'findCarry(intWidth, resTemp, Op164, secOp)', 77 "sub": 'findCarry(intWidth, resTemp, Op164, ~secOp)', 78 "logic": '0' 79 } 80 # Dict of ways to set the overflow flag. 81 overflowCode64 = { 82 "none": "none", 83 "add": 'findOverflow(intWidth, resTemp, Op164, secOp)', 84 "sub": 'findOverflow(intWidth, resTemp, Op164, ~secOp)', 85 "logic": '0' 86 } 87 88 immOp2 = "uint64_t secOp M5_VAR_USED = imm;" 89 sRegOp2 = "uint64_t secOp M5_VAR_USED = " + \ 90 "shiftReg64(Op264, shiftAmt, shiftType, intWidth);" 91 eRegOp2 = "uint64_t secOp M5_VAR_USED = " + \ 92 "extendReg64(Op264, extendType, shiftAmt, intWidth);" 93 94 def buildDataWork(mnem, code, flagType, suffix, buildCc, buildNonCc, 95 base, templateBase): 96 code = ''' 97 uint64_t resTemp M5_VAR_USED = 0; 98 ''' + code 99 ccCode = createCcCode64(carryCode64[flagType], overflowCode64[flagType]) 100 Name = mnem.capitalize() + suffix 101 iop = InstObjParams(mnem, Name, base, code) 102 iopCc = InstObjParams(mnem + "s", Name + "Cc", base, code + ccCode) 103 104 def subst(iop): 105 global header_output, decoder_output, exec_output 106 header_output += eval(templateBase + "Declare").subst(iop) 107 decoder_output += eval(templateBase + "Constructor").subst(iop) 108 exec_output += BasicExecute.subst(iop) 109 110 if buildNonCc: 111 subst(iop) 112 if buildCc: 113 subst(iopCc) 114 115 def buildXImmDataInst(mnem, code, flagType = "logic", \ 116 buildCc = True, buildNonCc = True, \ 117 suffix = "XImm"): 118 buildDataWork(mnem, immOp2 + code, flagType, suffix, 119 buildCc, buildNonCc, "DataXImmOp", "DataXImm") 120 121 def buildXSRegDataInst(mnem, code, flagType = "logic", \ 122 buildCc = True, buildNonCc = True, \ 123 suffix = "XSReg"): 124 buildDataWork(mnem, sRegOp2 + code, flagType, suffix, 125 buildCc, buildNonCc, "DataXSRegOp", "DataXSReg") 126 127 def buildXERegDataInst(mnem, code, flagType = "logic", \ 128 buildCc = True, buildNonCc = True, \ 129 suffix = "XEReg"): 130 buildDataWork(mnem, eRegOp2 + code, flagType, suffix, 131 buildCc, buildNonCc, "DataXERegOp", "DataXEReg") 132 133 def buildDataInst(mnem, code, flagType = "logic", 134 buildCc = True, buildNonCc = True): 135 buildXImmDataInst(mnem, code, flagType, buildCc, buildNonCc) 136 buildXSRegDataInst(mnem, code, flagType, buildCc, buildNonCc) 137 buildXERegDataInst(mnem, code, flagType, buildCc, buildNonCc) 138 139 buildXImmDataInst("adr", "Dest64 = RawPC + imm", buildCc = False); 140 buildXImmDataInst("adrp", "Dest64 = (RawPC & ~mask(12)) + imm", 141 buildCc = False); 142 buildDataInst("and", "Dest64 = resTemp = Op164 & secOp;") 143 buildDataInst("eor", "Dest64 = Op164 ^ secOp;", buildCc = False) 144 buildXSRegDataInst("eon", "Dest64 = Op164 ^ ~secOp;", buildCc = False) 145 buildDataInst("sub", "Dest64 = resTemp = Op164 - secOp;", "sub") 146 buildDataInst("add", "Dest64 = resTemp = Op164 + secOp;", "add") 147 buildXSRegDataInst("adc", 148 "Dest64 = resTemp = Op164 + secOp + %s;" % oldC, "add") 149 buildXSRegDataInst("sbc", 150 "Dest64 = resTemp = Op164 - secOp - !%s;" % oldC, "sub") 151 buildDataInst("orr", "Dest64 = Op164 | secOp;", buildCc = False) 152 buildXSRegDataInst("orn", "Dest64 = Op164 | ~secOp;", buildCc = False) 153 buildXSRegDataInst("bic", "Dest64 = resTemp = Op164 & ~secOp;") 154 155 def buildDataXImmInst(mnem, code, optArgs = []): 156 global header_output, decoder_output, exec_output 157 classNamePrefix = mnem[0].upper() + mnem[1:] 158 templateBase = "DataXImm" 159 iop = InstObjParams(mnem, classNamePrefix + "64", 160 templateBase + "Op", code, optArgs) 161 header_output += eval(templateBase + "Declare").subst(iop) 162 decoder_output += eval(templateBase + "Constructor").subst(iop) 163 exec_output += BasicExecute.subst(iop) 164 165 def buildDataXRegInst(mnem, regOps, code, optArgs = [], 166 overrideOpClass=None): 167 global header_output, decoder_output, exec_output 168 templateBase = "DataX%dReg" % regOps 169 classNamePrefix = mnem[0].upper() + mnem[1:] 170 if overrideOpClass: 171 iop = InstObjParams(mnem, classNamePrefix + "64", 172 templateBase + "Op", 173 { 'code': code, 'op_class': overrideOpClass}, 174 optArgs) 175 else: 176 iop = InstObjParams(mnem, classNamePrefix + "64", 177 templateBase + "Op", code, optArgs) 178 header_output += eval(templateBase + "Declare").subst(iop) 179 decoder_output += eval(templateBase + "Constructor").subst(iop) 180 exec_output += BasicExecute.subst(iop) 181 182 buildDataXRegInst("madd", 3, "Dest64 = Op164 + Op264 * Op364", 183 overrideOpClass="IntMultOp") 184 buildDataXRegInst("msub", 3, "Dest64 = Op164 - Op264 * Op364", 185 overrideOpClass="IntMultOp") 186 buildDataXRegInst("smaddl", 3, 187 "XDest = XOp1 + sext<32>(WOp2) * sext<32>(WOp3)", 188 overrideOpClass="IntMultOp") 189 buildDataXRegInst("smsubl", 3, 190 "XDest = XOp1 - sext<32>(WOp2) * sext<32>(WOp3)", 191 overrideOpClass="IntMultOp") 192 buildDataXRegInst("smulh", 2, ''' 193 uint64_t op1H = (int32_t)(XOp1 >> 32); 194 uint64_t op1L = (uint32_t)XOp1; 195 uint64_t op2H = (int32_t)(XOp2 >> 32); 196 uint64_t op2L = (uint32_t)XOp2; 197 uint64_t mid1 = ((op1L * op2L) >> 32) + op1H * op2L; 198 uint64_t mid2 = op1L * op2H; 199 uint64_t result = ((uint64_t)(uint32_t)mid1 + (uint32_t)mid2) >> 32; 200 result += shiftReg64(mid1, 32, ASR, intWidth); 201 result += shiftReg64(mid2, 32, ASR, intWidth); 202 XDest = result + op1H * op2H; 203 ''', overrideOpClass="IntMultOp") 204 buildDataXRegInst("umaddl", 3, "XDest = XOp1 + WOp2 * WOp3", 205 overrideOpClass="IntMultOp") 206 buildDataXRegInst("umsubl", 3, "XDest = XOp1 - WOp2 * WOp3", 207 overrideOpClass="IntMultOp") 208 buildDataXRegInst("umulh", 2, ''' 209 uint64_t op1H = (uint32_t)(XOp1 >> 32); 210 uint64_t op1L = (uint32_t)XOp1; 211 uint64_t op2H = (uint32_t)(XOp2 >> 32); 212 uint64_t op2L = (uint32_t)XOp2; 213 uint64_t mid1 = ((op1L * op2L) >> 32) + op1H * op2L; 214 uint64_t mid2 = op1L * op2H; 215 uint64_t result = ((uint64_t)(uint32_t)mid1 + (uint32_t)mid2) >> 32; 216 result += mid1 >> 32; 217 result += mid2 >> 32; 218 XDest = result + op1H * op2H; 219 ''', overrideOpClass="IntMultOp") 220 221 buildDataXRegInst("asrv", 2, 222 "Dest64 = shiftReg64(Op164, Op264, ASR, intWidth)") 223 buildDataXRegInst("lslv", 2, 224 "Dest64 = shiftReg64(Op164, Op264, LSL, intWidth)") 225 buildDataXRegInst("lsrv", 2, 226 "Dest64 = shiftReg64(Op164, Op264, LSR, intWidth)") 227 buildDataXRegInst("rorv", 2, 228 "Dest64 = shiftReg64(Op164, Op264, ROR, intWidth)") 229 230 crcCode = ''' 231 constexpr uint8_t size_bytes = %(sz)d; 232 constexpr uint32_t poly = %(polynom)s; 233 234 // Initial value is often a previously evaluated 235 // crc value hence is always 32bit in CRC32 236 uint32_t initial_crc = Op164 & 0xFFFFFFFF; 237 238 uint64_t data = htole(Op264); 239 auto data_buffer = reinterpret_cast<uint8_t*>(&data); 240 241 Dest = crc32<poly>( 242 data_buffer, /* Message register */ 243 initial_crc, /* Initial value of the CRC */ 244 size_bytes /* Size of the original Message */ 245 ); 246 ''' 247 buildDataXRegInst("crc32b", 2, 248 crcCode % {"sz": 1, "polynom": "0x04C11DB7"}) 249 buildDataXRegInst("crc32h", 2, 250 crcCode % {"sz": 2, "polynom": "0x04C11DB7"}) 251 buildDataXRegInst("crc32w", 2, 252 crcCode % {"sz": 4, "polynom": "0x04C11DB7"}) 253 buildDataXRegInst("crc32x", 2, 254 crcCode % {"sz": 8, "polynom": "0x04C11DB7"}) 255 256 buildDataXRegInst("crc32cb", 2, 257 crcCode % {"sz": 1, "polynom": "0x1EDC6F41"}) 258 buildDataXRegInst("crc32ch", 2, 259 crcCode % {"sz": 2, "polynom": "0x1EDC6F41"}) 260 buildDataXRegInst("crc32cw", 2, 261 crcCode % {"sz": 4, "polynom": "0x1EDC6F41"}) 262 buildDataXRegInst("crc32cx", 2, 263 crcCode % {"sz": 8, "polynom": "0x1EDC6F41"}) 264 265 buildDataXRegInst("sdiv", 2, ''' 266 int64_t op1 = Op164; 267 int64_t op2 = Op264; 268 if (intWidth == 32) { 269 op1 = sext<32>(op1); 270 op2 = sext<32>(op2); 271 } 272 Dest64 = op2 == -1 ? -op1 : op2 ? op1 / op2 : 0; 273 ''', overrideOpClass="IntDivOp") 274 buildDataXRegInst("udiv", 2, "Dest64 = Op264 ? Op164 / Op264 : 0", 275 overrideOpClass="IntDivOp") 276 277 buildDataXRegInst("cls", 1, ''' 278 uint64_t op1 = Op164; 279 if (bits(op1, intWidth - 1)) 280 op1 ^= mask(intWidth); 281 Dest64 = (op1 == 0) ? intWidth - 1 : (intWidth - 2 - findMsbSet(op1)); 282 ''') 283 buildDataXRegInst("clz", 1, ''' 284 Dest64 = (Op164 == 0) ? intWidth : (intWidth - 1 - findMsbSet(Op164)); 285 ''') 286 buildDataXRegInst("rbit", 1, ''' 287 Dest64 = reverseBits(Op164, intWidth/8); 288 ''') 289 buildDataXRegInst("rev", 1, ''' 290 if (intWidth == 32) 291 Dest64 = betole<uint32_t>(Op164); 292 else 293 Dest64 = betole<uint64_t>(Op164); 294 ''') 295 buildDataXRegInst("rev16", 1, ''' 296 int count = intWidth / 16; 297 uint64_t result = 0; 298 for (unsigned i = 0; i < count; i++) { 299 uint16_t hw = Op164 >> (i * 16); 300 result |= (uint64_t)betole<uint16_t>(hw) << (i * 16); 301 } 302 Dest64 = result; 303 ''') 304 buildDataXRegInst("rev32", 1, ''' 305 int count = intWidth / 32; 306 uint64_t result = 0; 307 for (unsigned i = 0; i < count; i++) { 308 uint32_t hw = Op164 >> (i * 32); 309 result |= (uint64_t)betole<uint32_t>(hw) << (i * 32); 310 } 311 Dest64 = result; 312 ''') 313 314 msrMrs64EnabledCheckCode = ''' 315 // Check for read/write access right 316 if (!can%sAArch64SysReg(flat_idx, Scr64, cpsr, xc->tcBase())) { 317 if (flat_idx == MISCREG_DAIF || 318 flat_idx == MISCREG_DC_ZVA_Xt || 319 flat_idx == MISCREG_DC_CVAC_Xt || 320 flat_idx == MISCREG_DC_CIVAC_Xt 321 ) 322 return std::make_shared<UndefinedInstruction>( 323 machInst, 0, EC_TRAPPED_MSR_MRS_64, 324 mnemonic); 325 return std::make_shared<UndefinedInstruction>(machInst, false, 326 mnemonic); 327 } 328 329 fault = this->trap(xc->tcBase(), flat_idx, el, imm); 330 if (fault != NoFault) return fault; 331 ''' 332 333 msr_check_code = ''' 334 auto pre_flat = (MiscRegIndex)snsBankedIndex64(dest, xc->tcBase()); 335 MiscRegIndex flat_idx = (MiscRegIndex) xc->tcBase()-> 336 flattenRegId(RegId(MiscRegClass, pre_flat)).index(); 337 CPSR cpsr = Cpsr; 338 ExceptionLevel el = (ExceptionLevel) (uint8_t) cpsr.el; 339 %s 340 ''' % (msrMrs64EnabledCheckCode % ('Write'),) 341 342 mrs_check_code = ''' 343 auto pre_flat = (MiscRegIndex)snsBankedIndex64(op1, xc->tcBase()); 344 MiscRegIndex flat_idx = (MiscRegIndex) xc->tcBase()-> 345 flattenRegId(RegId(MiscRegClass, pre_flat)).index(); 346 CPSR cpsr = Cpsr; 347 ExceptionLevel el = (ExceptionLevel) (uint8_t) cpsr.el; 348 %s 349 ''' % (msrMrs64EnabledCheckCode % ('Read'),) 350 351 352 mrsCode = mrs_check_code + ''' 353 XDest = MiscOp1_ud; 354 ''' 355 mrsIop = InstObjParams("mrs", "Mrs64", "RegMiscRegImmOp64", 356 mrsCode, 357 ["IsSerializeBefore"]) 358 header_output += RegMiscRegOp64Declare.subst(mrsIop) 359 decoder_output += RegMiscRegOp64Constructor.subst(mrsIop) 360 exec_output += BasicExecute.subst(mrsIop) 361 362 buildDataXRegInst("mrsNZCV", 1, ''' 363 CPSR cpsr = 0; 364 cpsr.nz = CondCodesNZ; 365 cpsr.c = CondCodesC; 366 cpsr.v = CondCodesV; 367 XDest = cpsr; 368 ''') 369 370 msrCode = msr_check_code + ''' 371 MiscDest_ud = XOp1; 372 ''' 373 msrIop = InstObjParams("msr", "Msr64", "MiscRegRegImmOp64", 374 msrCode, 375 ["IsSerializeAfter", "IsNonSpeculative"]) 376 header_output += MiscRegRegOp64Declare.subst(msrIop) 377 decoder_output += MiscRegRegOp64Constructor.subst(msrIop) 378 exec_output += BasicExecute.subst(msrIop) 379 380 381 buildDataXRegInst("msrNZCV", 1, ''' 382 CPSR cpsr = XOp1; 383 CondCodesNZ = cpsr.nz; 384 CondCodesC = cpsr.c; 385 CondCodesV = cpsr.v; 386 ''') 387 388 389 msrdczva_ea_code = msr_check_code 390 msrdczva_ea_code += ''' 391 Request::Flags memAccessFlags = Request::CACHE_BLOCK_ZERO | 392 ArmISA::TLB::MustBeOne; 393 EA = XBase; 394 assert(!(Dczid & 0x10)); 395 uint64_t op_size = power(2, Dczid + 2); 396 EA &= ~(op_size - 1); 397 398 ''' 399 400 msrDCZVAIop = InstObjParams("dc zva", "Dczva", "SysDC64", 401 { "ea_code" : msrdczva_ea_code, 402 "memacc_code" : ';', 403 "use_uops" : 0, 404 "op_wb" : ";", 405 "fa_code" : ";"}, 406 ['IsStore', 'IsMemRef']); 407 header_output += DCStore64Declare.subst(msrDCZVAIop); 408 decoder_output += DCStore64Constructor.subst(msrDCZVAIop); 409 exec_output += DCStore64Execute.subst(msrDCZVAIop); 410 exec_output += DCStore64InitiateAcc.subst(msrDCZVAIop); 411 exec_output += Store64CompleteAcc.subst(msrDCZVAIop); 412 413 414 msrdccvau_ea_code = msr_check_code 415 msrdccvau_ea_code += ''' 416 Request::Flags memAccessFlags = Request::CLEAN | Request::DST_POU | 417 ArmISA::TLB::MustBeOne; 418 EA = XBase; 419 System *sys = xc->tcBase()->getSystemPtr(); 420 Addr op_size = sys->cacheLineSize(); 421 EA &= ~(op_size - 1); 422 ''' 423 424 msrDCCVAUIop = InstObjParams("dc cvau", "Dccvau", "SysDC64", 425 { "ea_code" : msrdccvau_ea_code, 426 "memacc_code" : ';', 427 "use_uops" : 0, 428 "op_wb" : ";", "fa_code" : ";"}, 429 ['IsStore', 'IsMemRef']); 430 header_output += DCStore64Declare.subst(msrDCCVAUIop); 431 decoder_output += DCStore64Constructor.subst(msrDCCVAUIop); 432 exec_output += DCStore64Execute.subst(msrDCCVAUIop); 433 exec_output += DCStore64InitiateAcc.subst(msrDCCVAUIop); 434 exec_output += Store64CompleteAcc.subst(msrDCCVAUIop); 435 436 437 msrdccvac_ea_code = msr_check_code 438 msrdccvac_ea_code += ''' 439 Request::Flags memAccessFlags = Request::CLEAN | Request::DST_POC | 440 ArmISA::TLB::MustBeOne; 441 EA = XBase; 442 System *sys = xc->tcBase()->getSystemPtr(); 443 Addr op_size = sys->cacheLineSize(); 444 EA &= ~(op_size - 1); 445 ''' 446 447 msrDCCVACIop = InstObjParams("dc cvac", "Dccvac", "SysDC64", 448 { "ea_code" : msrdccvac_ea_code, 449 "memacc_code" : ';', 450 "use_uops" : 0, 451 "op_wb" : ";", "fa_code" : ";"}, 452 ['IsStore', 'IsMemRef']); 453 header_output += DCStore64Declare.subst(msrDCCVACIop); 454 decoder_output += DCStore64Constructor.subst(msrDCCVACIop); 455 exec_output += DCStore64Execute.subst(msrDCCVACIop); 456 exec_output += DCStore64InitiateAcc.subst(msrDCCVACIop); 457 exec_output += Store64CompleteAcc.subst(msrDCCVACIop); 458 459 460 msrdccivac_ea_code = msr_check_code 461 msrdccivac_ea_code += ''' 462 Request::Flags memAccessFlags = Request::CLEAN | 463 Request::INVALIDATE | Request::DST_POC | ArmISA::TLB::MustBeOne; 464 EA = XBase; 465 System *sys = xc->tcBase()->getSystemPtr(); 466 Addr op_size = sys->cacheLineSize(); 467 EA &= ~(op_size - 1); 468 ''' 469 470 msrDCCIVACIop = InstObjParams("dc civac", "Dccivac", "SysDC64", 471 { "ea_code" : msrdccivac_ea_code, 472 "memacc_code" : ';', 473 "use_uops" : 0, 474 "op_wb" : ";", "fa_code" : ";"}, 475 ['IsStore', 'IsMemRef']); 476 header_output += DCStore64Declare.subst(msrDCCIVACIop); 477 decoder_output += DCStore64Constructor.subst(msrDCCIVACIop); 478 exec_output += DCStore64Execute.subst(msrDCCIVACIop); 479 exec_output += DCStore64InitiateAcc.subst(msrDCCIVACIop); 480 exec_output += Store64CompleteAcc.subst(msrDCCIVACIop); 481 482 483 msrdcivac_ea_code = msr_check_code 484 msrdcivac_ea_code += ''' 485 Request::Flags memAccessFlags = Request::INVALIDATE | 486 Request::DST_POC | ArmISA::TLB::MustBeOne; 487 EA = XBase; 488 HCR hcr = Hcr64; 489 SCR scr = Scr64; 490 if (el == EL1 && ArmSystem::haveVirtualization(xc->tcBase()) && 491 hcr.vm && (scr.ns || !ArmSystem::haveSecurity(xc->tcBase()))) { 492 memAccessFlags = memAccessFlags | Request::CLEAN; 493 } 494 System *sys = xc->tcBase()->getSystemPtr(); 495 Addr op_size = sys->cacheLineSize(); 496 EA &= ~(op_size - 1); 497 ''' 498 499 msrDCIVACIop = InstObjParams("dc ivac", "Dcivac", "SysDC64", 500 { "ea_code" : msrdcivac_ea_code, 501 "memacc_code" : ';', 502 "use_uops" : 0, 503 "op_wb" : ";", "fa_code" : ";"}, 504 ['IsStore', 'IsMemRef']); 505 header_output += DCStore64Declare.subst(msrDCIVACIop); 506 decoder_output += DCStore64Constructor.subst(msrDCIVACIop); 507 exec_output += DCStore64Execute.subst(msrDCIVACIop); 508 exec_output += DCStore64InitiateAcc.subst(msrDCIVACIop); 509 exec_output += Store64CompleteAcc.subst(msrDCIVACIop); 510 511 def buildMsrImmInst(mnem, inst_name, code): 512 global header_output, decoder_output, exec_output 513 msrImmPermission = ''' 514 auto pre_flat = 515 (MiscRegIndex)snsBankedIndex64(dest, xc->tcBase()); 516 MiscRegIndex misc_index = (MiscRegIndex) xc->tcBase()-> 517 flattenRegId(RegId(MiscRegClass, pre_flat)).index(); 518 519 if (!miscRegInfo[misc_index][MISCREG_IMPLEMENTED]) { 520 return std::make_shared<UndefinedInstruction>( 521 machInst, false, 522 mnemonic); 523 } 524 525 if (!canWriteAArch64SysReg(misc_index, 526 Scr64, Cpsr, xc->tcBase())) { 527 528 return std::make_shared<UndefinedInstruction>( 529 machInst, 0, EC_TRAPPED_MSR_MRS_64, 530 mnemonic); 531 } 532 533 ''' 534 msrIop = InstObjParams("msr", inst_name, "MiscRegImmOp64", 535 msrImmPermission + code, 536 ["IsSerializeAfter", "IsNonSpeculative"]) 537 header_output += MiscRegOp64Declare.subst(msrIop) 538 decoder_output += MiscRegOp64Constructor.subst(msrIop) 539 exec_output += BasicExecute.subst(msrIop) 540 541 buildMsrImmInst("msr", "MsrImm64", ''' 542 // Mask and shift immediate (depending on PSTATE field) 543 // before assignment 544 MiscDest_ud = miscRegImm(); 545 ''') 546 547 buildMsrImmInst("msr", "MsrImmDAIFSet64", ''' 548 CPSR cpsr = Cpsr; 549 cpsr.daif = cpsr.daif | imm; 550 Cpsr = cpsr; 551 ''') 552 553 buildMsrImmInst("msr", "MsrImmDAIFClr64", ''' 554 CPSR cpsr = Cpsr; 555 cpsr.daif = cpsr.daif & ~imm; 556 Cpsr = cpsr; 557 ''') 558 559 def buildDataXCompInst(mnem, instType, suffix, code): 560 global header_output, decoder_output, exec_output 561 templateBase = "DataXCond%s" % instType 562 iop = InstObjParams(mnem, mnem.capitalize() + suffix + "64", 563 templateBase + "Op", code) 564 header_output += eval(templateBase + "Declare").subst(iop) 565 decoder_output += eval(templateBase + "Constructor").subst(iop) 566 exec_output += BasicExecute.subst(iop) 567 568 def buildDataXCondImmInst(mnem, code): 569 buildDataXCompInst(mnem, "CompImm", "Imm", code) 570 def buildDataXCondRegInst(mnem, code): 571 buildDataXCompInst(mnem, "CompReg", "Reg", code) 572 def buildDataXCondSelInst(mnem, code): 573 buildDataXCompInst(mnem, "Sel", "", code) 574 575 def condCompCode(flagType, op, imm): 576 ccCode = createCcCode64(carryCode64[flagType], overflowCode64[flagType]) 577 opDecl = "uint64_t secOp M5_VAR_USED = imm;" 578 if not imm: 579 opDecl = "uint64_t secOp M5_VAR_USED = Op264;" 580 return opDecl + ''' 581 if (testPredicate(CondCodesNZ, CondCodesC, CondCodesV, condCode)) { 582 uint64_t resTemp = Op164 ''' + op + ''' secOp; 583 ''' + ccCode + ''' 584 } else { 585 CondCodesNZ = (defCc >> 2) & 0x3; 586 CondCodesC = (defCc >> 1) & 0x1; 587 CondCodesV = defCc & 0x1; 588 } 589 ''' 590 591 buildDataXCondImmInst("ccmn", condCompCode("add", "+", True)) 592 buildDataXCondImmInst("ccmp", condCompCode("sub", "-", True)) 593 buildDataXCondRegInst("ccmn", condCompCode("add", "+", False)) 594 buildDataXCondRegInst("ccmp", condCompCode("sub", "-", False)) 595 596 condSelCode = ''' 597 if (testPredicate(CondCodesNZ, CondCodesC, CondCodesV, condCode)) { 598 Dest64 = Op164; 599 } else { 600 Dest64 = %(altVal)s; 601 } 602 ''' 603 buildDataXCondSelInst("csel", condSelCode % {"altVal" : "Op264"}) 604 buildDataXCondSelInst("csinc", condSelCode % {"altVal" : "Op264 + 1"}) 605 buildDataXCondSelInst("csinv", condSelCode % {"altVal" : "~Op264"}) 606 buildDataXCondSelInst("csneg", condSelCode % {"altVal" : "-Op264"}) 607}}; 608