1// -*- mode:c++ -*- 2 3// Copyright (c) 2010-2011,2019 ARM Limited 4// All rights reserved 5// 6// The license below extends only to copyright in the software and shall 7// not be construed as granting a license to any other intellectual 8// property including but not limited to intellectual property relating 9// to a hardware implementation of the functionality of the software 10// licensed hereunder. You may use the software subject to the license 11// terms below provided that you ensure that this notice is replicated 12// unmodified and in its entirety in all distributions of the software, 13// modified or unmodified, in source code or in binary form. 14// 15// Redistribution and use in source and binary forms, with or without 16// modification, are permitted provided that the following conditions are 17// met: redistributions of source code must retain the above copyright 18// notice, this list of conditions and the following disclaimer; 19// redistributions in binary form must reproduce the above copyright 20// notice, this list of conditions and the following disclaimer in the 21// documentation and/or other materials provided with the distribution; 22// neither the name of the copyright holders nor the names of its 23// contributors may be used to endorse or promote products derived from 24// this software without specific prior written permission. 25// 26// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 27// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 28// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 29// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 30// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 31// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 32// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 33// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 34// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 35// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 36// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 37// 38// Authors: Gabe Black 39 40let {{ 41 import math 42 43 header_output = "" 44 decoder_output = "" 45 exec_output = "" 46 47 class LoadInst(LoadStoreInst): 48 execBase = 'Load' 49 50 def __init__(self, mnem, post, add, writeback, 51 size=4, sign=False, user=False, flavor="normal"): 52 super(LoadInst, self).__init__() 53 54 self.name = mnem 55 self.post = post 56 self.add = add 57 self.writeback = writeback 58 self.size = size 59 self.sign = sign 60 self.user = user 61 self.flavor = flavor 62 self.rasPop = False 63 64 if self.add: 65 self.op = " +" 66 else: 67 self.op = " -" 68 69 self.memFlags = ["ArmISA::TLB::MustBeOne"] 70 self.codeBlobs = {"postacc_code" : ""} 71 72 def emitHelper(self, base = 'Memory', wbDecl = None, instFlags = [], pcDecl = None): 73 74 global header_output, decoder_output, exec_output 75 76 codeBlobs = self.codeBlobs 77 codeBlobs["predicate_test"] = pickPredicate(codeBlobs) 78 (newHeader, 79 newDecoder, 80 newExec) = self.fillTemplates(self.name, self.Name, codeBlobs, 81 self.memFlags, instFlags, base, 82 wbDecl, pcDecl, self.rasPop, 83 self.size, self.sign) 84 85 header_output += newHeader 86 decoder_output += newDecoder 87 exec_output += newExec 88 89 class RfeInst(LoadInst): 90 decConstBase = 'Rfe' 91 92 def __init__(self, mnem, post, add, writeback): 93 super(RfeInst, self).__init__(mnem, post, add, writeback) 94 self.Name = "RFE_" + loadImmClassName(post, add, writeback, 8) 95 96 self.memFlags.append("ArmISA::TLB::AlignWord") 97 98 def emit(self): 99 offset = 0 100 if self.post != self.add: 101 offset += 4 102 if not self.add: 103 offset -= 8 104 self.codeBlobs["ea_code"] = "EA = Base + %d;" % offset 105 106 wbDiff = -8 107 if self.add: 108 wbDiff = 8 109 accCode = ''' 110 CPSR cpsr = Cpsr; 111 cpsr.nz = CondCodesNZ; 112 cpsr.c = CondCodesC; 113 cpsr.v = CondCodesV; 114 cpsr.ge = CondCodesGE; 115 URc = cpsr; 116 URa = cSwap<uint32_t>(Mem_ud, cpsr.e); 117 URb = cSwap<uint32_t>(Mem_ud >> 32, cpsr.e); 118 ''' 119 self.codeBlobs["memacc_code"] = accCode 120 121 wbDecl = None 122 pcDecl = "MicroUopSetPCCPSR(machInst, INTREG_UREG0, INTREG_UREG1, INTREG_UREG2);" 123 124 if self.writeback: 125 wbDecl = "MicroAddiUop(machInst, base, base, %d);" % wbDiff 126 self.emitHelper('RfeOp', wbDecl, ["IsSerializeAfter", "IsNonSpeculative"], pcDecl) 127 128 class LoadImmInst(LoadInst): 129 def __init__(self, *args, **kargs): 130 super(LoadImmInst, self).__init__(*args, **kargs) 131 self.offset = self.op + " imm" 132 133 if self.add: 134 self.wbDecl = "MicroAddiUop(machInst, base, base, imm);" 135 else: 136 self.wbDecl = "MicroSubiUop(machInst, base, base, imm);" 137 138 if self.add and self.post and self.writeback and not self.sign and \ 139 not self.user and self.size == 4: 140 self.rasPop = True 141 142 class LoadRegInst(LoadInst): 143 def __init__(self, *args, **kargs): 144 super(LoadRegInst, self).__init__(*args, **kargs) 145 self.offset = self.op + " shift_rm_imm(Index, shiftAmt," + \ 146 " shiftType, OptShiftRmCondCodesC)" 147 if self.add: 148 self.wbDecl = ''' 149 MicroAddUop(machInst, base, base, wbIndexReg, shiftAmt, shiftType); 150 ''' 151 else: 152 self.wbDecl = ''' 153 MicroSubUop(machInst, base, base, wbIndexReg, shiftAmt, shiftType); 154 ''' 155 156 class LoadSingle(LoadInst): 157 def __init__(self, *args, **kargs): 158 super(LoadSingle, self).__init__(*args, **kargs) 159 160 # Build the default class name 161 self.Name = self.nameFunc(self.post, self.add, self.writeback, 162 self.size, self.sign, self.user) 163 164 # Add memory request flags where necessary 165 self.memFlags.append("%d" % int(math.log(self.size, 2))) 166 if self.user: 167 self.memFlags.append("ArmISA::TLB::UserMode") 168 169 self.instFlags = [] 170 if self.flavor == "dprefetch": 171 self.memFlags.append("Request::PREFETCH") 172 self.instFlags = ['IsDataPrefetch'] 173 elif self.flavor == "iprefetch": 174 self.memFlags.append("Request::PREFETCH") 175 self.instFlags = ['IsInstPrefetch'] 176 elif self.flavor == "normal": 177 self.memFlags.append("ArmISA::TLB::AllowUnaligned") 178 179 if self.flavor in ("exclusive", "acex"): 180 self.memFlags.append("Request::LLSC") 181 182 if self.flavor in ("acquire", "acex"): 183 self.instFlags.extend(["IsMemBarrier", 184 "IsWriteBarrier", 185 "IsReadBarrier"]) 186 187 # Disambiguate the class name for different flavors of loads 188 if self.flavor != "normal": 189 self.Name = "%s_%s" % (self.name.upper(), self.Name) 190 191 def emit(self): 192 # Address compuation code 193 eaCode = "EA = Base" 194 if not self.post: 195 eaCode += self.offset 196 eaCode += ";" 197 198 if self.flavor == "fp": 199 eaCode += vfpEnabledCheckCode 200 201 self.codeBlobs["ea_code"] = eaCode 202 203 # Code that actually handles the access 204 if self.flavor == "dprefetch" or self.flavor == "iprefetch": 205 accCode = 'uint64_t temp = Mem%s; temp = temp;' 206 elif self.flavor == "fp": 207 accCode = "FpDest_uw = cSwap(Mem%s, ((CPSR)Cpsr).e);\n" 208 else: 209 accCode = "IWDest = cSwap(Mem%s, ((CPSR)Cpsr).e);" 210 accCode = accCode % buildMemSuffix(self.sign, self.size) 211 212 self.codeBlobs["memacc_code"] = accCode 213 214 # Push it out to the output files 215 base = buildMemBase(self.basePrefix, self.post, self.writeback) 216 wbDecl = None 217 if self.writeback: 218 wbDecl = self.wbDecl 219 self.emitHelper(base, wbDecl, self.instFlags) 220 221 def loadImmClassName(post, add, writeback, size=4, sign=False, user=False): 222 return memClassName("LOAD_IMM", post, add, writeback, size, sign, user) 223 224 class LoadImm(LoadImmInst, LoadSingle): 225 decConstBase = 'LoadImm' 226 basePrefix = 'MemoryImm' 227 nameFunc = staticmethod(loadImmClassName) 228 229 def loadRegClassName(post, add, writeback, size=4, sign=False, user=False): 230 return memClassName("LOAD_REG", post, add, writeback, size, sign, user) 231 232 class LoadReg(LoadRegInst, LoadSingle): 233 decConstBase = 'LoadReg' 234 basePrefix = 'MemoryReg' 235 nameFunc = staticmethod(loadRegClassName) 236 237 class LoadDouble(LoadInst): 238 def __init__(self, *args, **kargs): 239 super(LoadDouble, self).__init__(*args, **kargs) 240 241 # Build the default class name 242 self.Name = self.nameFunc(self.post, self.add, self.writeback) 243 244 self.instFlags = [] 245 # Add memory request flags where necessary 246 if self.flavor in ("exclusive", "acex"): 247 self.memFlags.append("Request::LLSC") 248 self.memFlags.append("ArmISA::TLB::AlignDoubleWord") 249 else: 250 self.memFlags.append("ArmISA::TLB::AlignWord") 251 252 # Disambiguate the class name for different flavors of loads 253 if self.flavor != "normal": 254 self.Name = "%s_%s" % (self.name.upper(), self.Name) 255 256 if self.flavor in ("acquire", "acex"): 257 self.instFlags.extend(["IsMemBarrier", 258 "IsWriteBarrier", 259 "IsReadBarrier"]) 260 261 def emit(self): 262 # Address computation code 263 eaCode = "EA = Base" 264 if not self.post: 265 eaCode += self.offset 266 eaCode += ";" 267 268 if self.flavor == "fp": 269 eaCode += vfpEnabledCheckCode 270 271 self.codeBlobs["ea_code"] = eaCode 272 273 # Code that actually handles the access 274 if self.flavor != "fp": 275 accCode = ''' 276 CPSR cpsr = Cpsr; 277 Dest = cSwap<uint32_t>(Mem_ud, cpsr.e); 278 Dest2 = cSwap<uint32_t>(Mem_ud >> 32, cpsr.e); 279 ''' 280 else: 281 accCode = ''' 282 uint64_t swappedMem = cSwap(Mem_ud, ((CPSR)Cpsr).e); 283 FpDest_uw = (uint32_t)swappedMem; 284 FpDest2_uw = (uint32_t)(swappedMem >> 32); 285 ''' 286 287 self.codeBlobs["memacc_code"] = accCode 288 289 # Push it out to the output files 290 base = buildMemBase(self.basePrefix, self.post, self.writeback) 291 wbDecl = None 292 if self.writeback: 293 wbDecl = self.wbDecl 294 self.emitHelper(base, wbDecl, self.instFlags) 295 296 def loadDoubleImmClassName(post, add, writeback): 297 return memClassName("LOAD_IMMD", post, add, writeback, 4, False, False) 298 299 class LoadDoubleImm(LoadImmInst, LoadDouble): 300 decConstBase = 'LoadStoreDImm' 301 basePrefix = 'MemoryDImm' 302 nameFunc = staticmethod(loadDoubleImmClassName) 303 304 def loadDoubleRegClassName(post, add, writeback): 305 return memClassName("LOAD_REGD", post, add, writeback, 4, False, False) 306 307 class LoadDoubleReg(LoadRegInst, LoadDouble): 308 decConstBase = 'LoadDReg' 309 basePrefix = 'MemoryDReg' 310 nameFunc = staticmethod(loadDoubleRegClassName) 311 312 def buildLoads(mnem, size=4, sign=False, user=False): 313 LoadImm(mnem, True, True, True, size, sign, user).emit() 314 LoadReg(mnem, True, True, True, size, sign, user).emit() 315 LoadImm(mnem, True, False, True, size, sign, user).emit() 316 LoadReg(mnem, True, False, True, size, sign, user).emit() 317 LoadImm(mnem, False, True, True, size, sign, user).emit() 318 LoadReg(mnem, False, True, True, size, sign, user).emit() 319 LoadImm(mnem, False, False, True, size, sign, user).emit() 320 LoadReg(mnem, False, False, True, size, sign, user).emit() 321 LoadImm(mnem, False, True, False, size, sign, user).emit() 322 LoadReg(mnem, False, True, False, size, sign, user).emit() 323 LoadImm(mnem, False, False, False, size, sign, user).emit() 324 LoadReg(mnem, False, False, False, size, sign, user).emit() 325 326 def buildDoubleLoads(mnem): 327 LoadDoubleImm(mnem, True, True, True).emit() 328 LoadDoubleReg(mnem, True, True, True).emit() 329 LoadDoubleImm(mnem, True, False, True).emit() 330 LoadDoubleReg(mnem, True, False, True).emit() 331 LoadDoubleImm(mnem, False, True, True).emit() 332 LoadDoubleReg(mnem, False, True, True).emit() 333 LoadDoubleImm(mnem, False, False, True).emit() 334 LoadDoubleReg(mnem, False, False, True).emit() 335 LoadDoubleImm(mnem, False, True, False).emit() 336 LoadDoubleReg(mnem, False, True, False).emit() 337 LoadDoubleImm(mnem, False, False, False).emit() 338 LoadDoubleReg(mnem, False, False, False).emit() 339 340 def buildRfeLoads(mnem): 341 RfeInst(mnem, True, True, True).emit() 342 RfeInst(mnem, True, True, False).emit() 343 RfeInst(mnem, True, False, True).emit() 344 RfeInst(mnem, True, False, False).emit() 345 RfeInst(mnem, False, True, True).emit() 346 RfeInst(mnem, False, True, False).emit() 347 RfeInst(mnem, False, False, True).emit() 348 RfeInst(mnem, False, False, False).emit() 349 350 def buildPrefetches(mnem, type): 351 LoadReg(mnem, False, False, False, size=1, flavor=type).emit() 352 LoadImm(mnem, False, False, False, size=1, flavor=type).emit() 353 LoadReg(mnem, False, True, False, size=1, flavor=type).emit() 354 LoadImm(mnem, False, True, False, size=1, flavor=type).emit() 355 356 buildLoads("ldr") 357 buildLoads("ldrt", user=True) 358 buildLoads("ldrb", size=1) 359 buildLoads("ldrbt", size=1, user=True) 360 buildLoads("ldrsb", size=1, sign=True) 361 buildLoads("ldrsbt", size=1, sign=True, user=True) 362 buildLoads("ldrh", size=2) 363 buildLoads("ldrht", size=2, user=True) 364 buildLoads("ldrsh", size=2, sign=True) 365 buildLoads("ldrsht", size=2, sign=True, user=True) 366 367 buildDoubleLoads("ldrd") 368 369 buildRfeLoads("rfe") 370 371 buildPrefetches("pld", "dprefetch") 372 buildPrefetches("pldw", "dprefetch") 373 buildPrefetches("pli", "iprefetch") 374 375 LoadImm("ldrex", False, True, False, size=4, flavor="exclusive").emit() 376 LoadImm("ldrexh", False, True, False, size=2, flavor="exclusive").emit() 377 LoadImm("ldrexb", False, True, False, size=1, flavor="exclusive").emit() 378 LoadDoubleImm("ldrexd", False, True, False, flavor="exclusive").emit() 379 380 LoadImm("lda", False, True, False, size=4, flavor="acquire").emit() 381 LoadImm("ldah", False, True, False, size=2, flavor="acquire").emit() 382 LoadImm("ldab", False, True, False, size=1, flavor="acquire").emit() 383 LoadImm("ldaex", False, True, False, size=4, flavor="acex").emit() 384 LoadImm("ldaexh", False, True, False, size=2, flavor="acex").emit() 385 LoadImm("ldaexb", False, True, False, size=1, flavor="acex").emit() 386 LoadDoubleImm("ldaexd", False, True, False, flavor="acex").emit() 387 388 LoadImm("vldr", False, True, False, size=4, flavor="fp").emit() 389 LoadImm("vldr", False, False, False, size=4, flavor="fp").emit() 390 LoadDoubleImm("vldr", False, True, False, flavor="fp").emit() 391 LoadDoubleImm("vldr", False, False, False, flavor="fp").emit() 392}}; 393