1// -*- mode:c++ -*- 2 3// Copyright (c) 2011-2013,2017 ARM Limited 4// All rights reserved 5// 6// The license below extends only to copyright in the software and shall 7// not be construed as granting a license to any other intellectual 8// property including but not limited to intellectual property relating 9// to a hardware implementation of the functionality of the software 10// licensed hereunder. You may use the software subject to the license 11// terms below provided that you ensure that this notice is replicated 12// unmodified and in its entirety in all distributions of the software, 13// modified or unmodified, in source code or in binary form. 14// 15// Redistribution and use in source and binary forms, with or without 16// modification, are permitted provided that the following conditions are 17// met: redistributions of source code must retain the above copyright 18// notice, this list of conditions and the following disclaimer; 19// redistributions in binary form must reproduce the above copyright 20// notice, this list of conditions and the following disclaimer in the 21// documentation and/or other materials provided with the distribution; 22// neither the name of the copyright holders nor the names of its 23// contributors may be used to endorse or promote products derived from 24// this software without specific prior written permission. 25// 26// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 27// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 28// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 29// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 30// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 31// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 32// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 33// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 34// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 35// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 36// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 37// 38// Authors: Gabe Black 39 40let {{ 41 42 header_output = "" 43 decoder_output = "" 44 exec_output = "" 45 46 class StoreInst64(LoadStoreInst): 47 execBase = 'Store64' 48 micro = False 49 50 def __init__(self, mnem, Name, size=4, user=False, flavor="normal", 51 top = False): 52 super(StoreInst64, self).__init__() 53 54 self.name = mnem 55 self.Name = Name 56 self.size = size 57 self.user = user 58 self.flavor = flavor 59 self.top = top 60 61 self.memFlags = ["ArmISA::TLB::MustBeOne"] 62 self.instFlags = [] 63 self.codeBlobs = { "postacc_code" : "" } 64 65 # Add memory request flags where necessary 66 if self.user: 67 self.memFlags.append("ArmISA::TLB::UserMode") 68 69 if self.flavor in ("relexp", "exp"): 70 # For exclusive pair ops alignment check is based on total size 71 self.memFlags.append("%d" % int(math.log(self.size, 2) + 1)) 72 elif not (self.size == 16 and self.top): 73 # Only the first microop should perform alignment checking. 74 self.memFlags.append("%d" % int(math.log(self.size, 2))) 75 76 if self.flavor not in ("release", "relex", "exclusive", 77 "relexp", "exp"): 78 self.memFlags.append("ArmISA::TLB::AllowUnaligned") 79 80 if self.micro: 81 self.instFlags.append("IsMicroop") 82 83 if self.flavor in ("release", "relex", "relexp"): 84 self.instFlags.extend(["IsMemBarrier", 85 "IsWriteBarrier", 86 "IsReadBarrier"]) 87 if self.flavor in ("relex", "exclusive", "exp", "relexp"): 88 self.instFlags.append("IsStoreConditional") 89 self.memFlags.append("Request::LLSC") 90 91 def emitHelper(self, base = 'Memory64', wbDecl = None): 92 global header_output, decoder_output, exec_output 93 94 # If this is a microop itself, don't allow anything that would 95 # require further microcoding. 96 if self.micro: 97 assert not wbDecl 98 99 fa_code = None 100 if not self.micro and self.flavor in ("normal", "release"): 101 fa_code = ''' 102 fault->annotate(ArmFault::SAS, %s); 103 fault->annotate(ArmFault::SSE, false); 104 fault->annotate(ArmFault::SRT, dest); 105 fault->annotate(ArmFault::SF, %s); 106 fault->annotate(ArmFault::AR, %s); 107 ''' % ("0" if self.size == 1 else 108 "1" if self.size == 2 else 109 "2" if self.size == 4 else "3", 110 "true" if self.size == 8 else "false", 111 "true" if self.flavor == "release" else "false") 112 113 (newHeader, newDecoder, newExec) = \ 114 self.fillTemplates(self.name, self.Name, self.codeBlobs, 115 self.memFlags, self.instFlags, 116 base, wbDecl, faCode=fa_code) 117 118 header_output += newHeader 119 decoder_output += newDecoder 120 exec_output += newExec 121 122 def buildEACode(self): 123 # Address computation 124 eaCode = "" 125 if self.flavor == "fp": 126 eaCode += vfp64EnabledCheckCode 127 128 eaCode += SPAlignmentCheckCode + "EA = XBase" 129 if self.size == 16: 130 if self.top: 131 eaCode += " + (isBigEndian64(xc->tcBase()) ? 0 : 8)" 132 else: 133 eaCode += " + (isBigEndian64(xc->tcBase()) ? 8 : 0)" 134 if not self.post: 135 eaCode += self.offset 136 eaCode += ";" 137 138 self.codeBlobs["ea_code"] = eaCode 139 140 141 class StoreImmInst64(StoreInst64): 142 def __init__(self, *args, **kargs): 143 super(StoreImmInst64, self).__init__(*args, **kargs) 144 self.offset = "+ imm" 145 146 self.wbDecl = "MicroAddXiUop(machInst, base, base, imm);" 147 148 class StoreRegInst64(StoreInst64): 149 def __init__(self, *args, **kargs): 150 super(StoreRegInst64, self).__init__(*args, **kargs) 151 self.offset = "+ extendReg64(XOffset, type, shiftAmt, 64)" 152 153 self.wbDecl = \ 154 "MicroAddXERegUop(machInst, base, base, " + \ 155 " offset, type, shiftAmt);" 156 157 class StoreRawRegInst64(StoreInst64): 158 def __init__(self, *args, **kargs): 159 super(StoreRawRegInst64, self).__init__(*args, **kargs) 160 self.offset = "" 161 162 class StoreSingle64(StoreInst64): 163 def emit(self): 164 self.buildEACode() 165 166 # Code that actually handles the access 167 if self.flavor == "fp": 168 if self.size in (1, 2, 4): 169 accCode = ''' 170 Mem%(suffix)s = 171 cSwap(AA64FpDestP0%(suffix)s, isBigEndian64(xc->tcBase())); 172 ''' 173 elif self.size == 8 or (self.size == 16 and not self.top): 174 accCode = ''' 175 uint64_t data = AA64FpDestP1_uw; 176 data = (data << 32) | AA64FpDestP0_uw; 177 Mem%(suffix)s = cSwap(data, isBigEndian64(xc->tcBase())); 178 ''' 179 elif self.size == 16 and self.top: 180 accCode = ''' 181 uint64_t data = AA64FpDestP3_uw; 182 data = (data << 32) | AA64FpDestP2_uw; 183 Mem%(suffix)s = cSwap(data, isBigEndian64(xc->tcBase())); 184 ''' 185 else: 186 accCode = \ 187 'Mem%(suffix)s = cSwap(XDest%(suffix)s, isBigEndian64(xc->tcBase()));' 188 if self.size == 16: 189 accCode = accCode % \ 190 { "suffix" : buildMemSuffix(False, 8) } 191 else: 192 accCode = accCode % \ 193 { "suffix" : buildMemSuffix(False, self.size) } 194 195 self.codeBlobs["memacc_code"] = accCode 196 197 if self.flavor in ("relex", "exclusive"): 198 self.instFlags.append("IsStoreConditional") 199 self.memFlags.append("Request::LLSC") 200 201 # Push it out to the output files 202 wbDecl = None 203 if self.writeback and not self.micro: 204 wbDecl = self.wbDecl 205 self.emitHelper(self.base, wbDecl) 206 207 class StoreDouble64(StoreInst64): 208 def emit(self): 209 self.buildEACode() 210 211 # Code that actually handles the access 212 if self.flavor == "fp": 213 accCode = ''' 214 uint64_t data = AA64FpDest2P0_uw; 215 data = isBigEndian64(xc->tcBase()) 216 ? ((uint64_t(AA64FpDestP0_uw) << 32) | data) 217 : ((data << 32) | AA64FpDestP0_uw); 218 Mem_ud = cSwap(data, isBigEndian64(xc->tcBase())); 219 ''' 220 else: 221 if self.size == 4: 222 accCode = ''' 223 uint64_t data = XDest2_uw; 224 data = isBigEndian64(xc->tcBase()) 225 ? ((uint64_t(XDest_uw) << 32) | data) 226 : ((data << 32) | XDest_uw); 227 Mem_ud = cSwap(data, isBigEndian64(xc->tcBase())); 228 ''' 229 elif self.size == 8: 230 accCode = ''' 231 // This temporary needs to be here so that the parser 232 // will correctly identify this instruction as a store. 233 std::array<uint64_t, 2> temp; 234 temp[0] = cSwap(XDest_ud,isBigEndian64(xc->tcBase())); 235 temp[1] = cSwap(XDest2_ud,isBigEndian64(xc->tcBase())); 236 Mem_tud = temp; 237 ''' 238 self.codeBlobs["memacc_code"] = accCode 239 240 # Push it out to the output files 241 wbDecl = None 242 if self.writeback and not self.micro: 243 wbDecl = self.wbDecl 244 self.emitHelper(self.base, wbDecl) 245 246 class StoreImm64(StoreImmInst64, StoreSingle64): 247 decConstBase = 'LoadStoreImm64' 248 base = 'ArmISA::MemoryImm64' 249 writeback = False 250 post = False 251 252 class StorePre64(StoreImmInst64, StoreSingle64): 253 decConstBase = 'LoadStoreImm64' 254 base = 'ArmISA::MemoryPreIndex64' 255 writeback = True 256 post = False 257 258 class StorePost64(StoreImmInst64, StoreSingle64): 259 decConstBase = 'LoadStoreImm64' 260 base = 'ArmISA::MemoryPostIndex64' 261 writeback = True 262 post = True 263 264 class StoreReg64(StoreRegInst64, StoreSingle64): 265 decConstBase = 'LoadStoreReg64' 266 base = 'ArmISA::MemoryReg64' 267 writeback = False 268 post = False 269 270 class StoreRaw64(StoreRawRegInst64, StoreSingle64): 271 decConstBase = 'LoadStoreRaw64' 272 base = 'ArmISA::MemoryRaw64' 273 writeback = False 274 post = False 275 276 class StoreEx64(StoreRawRegInst64, StoreSingle64): 277 decConstBase = 'LoadStoreEx64' 278 base = 'ArmISA::MemoryEx64' 279 writeback = False 280 post = False 281 execBase = 'StoreEx64' 282 def __init__(self, *args, **kargs): 283 super(StoreEx64, self).__init__(*args, **kargs) 284 self.codeBlobs["postacc_code"] = \ 285 "XResult = !writeResult; SevMailbox = 1; LLSCLock = 0;" 286 287 def buildStores64(mnem, NameBase, size, flavor="normal"): 288 StoreImm64(mnem, NameBase + "_IMM", size, flavor=flavor).emit() 289 StorePre64(mnem, NameBase + "_PRE", size, flavor=flavor).emit() 290 StorePost64(mnem, NameBase + "_POST", size, flavor=flavor).emit() 291 StoreReg64(mnem, NameBase + "_REG", size, flavor=flavor).emit() 292 293 buildStores64("strb", "STRB64", 1) 294 buildStores64("strh", "STRH64", 2) 295 buildStores64("str", "STRW64", 4) 296 buildStores64("str", "STRX64", 8) 297 buildStores64("str", "STRBFP64", 1, flavor="fp") 298 buildStores64("str", "STRHFP64", 2, flavor="fp") 299 buildStores64("str", "STRSFP64", 4, flavor="fp") 300 buildStores64("str", "STRDFP64", 8, flavor="fp") 301 302 StoreImm64("sturb", "STURB64_IMM", 1).emit() 303 StoreImm64("sturh", "STURH64_IMM", 2).emit() 304 StoreImm64("stur", "STURW64_IMM", 4).emit() 305 StoreImm64("stur", "STURX64_IMM", 8).emit() 306 StoreImm64("stur", "STURBFP64_IMM", 1, flavor="fp").emit() 307 StoreImm64("stur", "STURHFP64_IMM", 2, flavor="fp").emit() 308 StoreImm64("stur", "STURSFP64_IMM", 4, flavor="fp").emit() 309 StoreImm64("stur", "STURDFP64_IMM", 8, flavor="fp").emit() 310 311 StoreImm64("sttrb", "STTRB64_IMM", 1, user=True).emit() 312 StoreImm64("sttrh", "STTRH64_IMM", 2, user=True).emit() 313 StoreImm64("sttr", "STTRW64_IMM", 4, user=True).emit() 314 StoreImm64("sttr", "STTRX64_IMM", 8, user=True).emit() 315 316 StoreRaw64("stlr", "STLRX64", 8, flavor="release").emit() 317 StoreRaw64("stlr", "STLRW64", 4, flavor="release").emit() 318 StoreRaw64("stlrh", "STLRH64", 2, flavor="release").emit() 319 StoreRaw64("stlrb", "STLRB64", 1, flavor="release").emit() 320 321 StoreEx64("stlxr", "STLXRX64", 8, flavor="relex").emit() 322 StoreEx64("stlxr", "STLXRW64", 4, flavor="relex").emit() 323 StoreEx64("stlxrh", "STLXRH64", 2, flavor="relex").emit() 324 StoreEx64("stlxrb", "STLXRB64", 1, flavor="relex").emit() 325 326 StoreEx64("stxr", "STXRX64", 8, flavor="exclusive").emit() 327 StoreEx64("stxr", "STXRW64", 4, flavor="exclusive").emit() 328 StoreEx64("stxrh", "STXRH64", 2, flavor="exclusive").emit() 329 StoreEx64("stxrb", "STXRB64", 1, flavor="exclusive").emit() 330 331 class StoreImmU64(StoreImm64): 332 decConstBase = 'LoadStoreImmU64' 333 micro = True 334 335 class StoreImmDU64(StoreImmInst64, StoreDouble64): 336 decConstBase = 'LoadStoreImmDU64' 337 base = 'ArmISA::MemoryDImm64' 338 micro = True 339 post = False 340 writeback = False 341 342 class StoreImmDEx64(StoreImmInst64, StoreDouble64): 343 execBase = 'StoreEx64' 344 decConstBase = 'StoreImmDEx64' 345 base = 'ArmISA::MemoryDImmEx64' 346 micro = False 347 post = False 348 writeback = False 349 def __init__(self, *args, **kargs): 350 super(StoreImmDEx64, self).__init__(*args, **kargs) 351 self.codeBlobs["postacc_code"] = \ 352 "XResult = !writeResult; SevMailbox = 1; LLSCLock = 0;" 353 354 class StoreRegU64(StoreReg64): 355 decConstBase = 'LoadStoreRegU64' 356 micro = True 357 358 StoreImmDEx64("stlxp", "STLXPW64", 4, flavor="relexp").emit() 359 StoreImmDEx64("stlxp", "STLXPX64", 8, flavor="relexp").emit() 360 StoreImmDEx64("stxp", "STXPW64", 4, flavor="exp").emit() 361 StoreImmDEx64("stxp", "STXPX64", 8, flavor="exp").emit() 362 363 StoreImmU64("strxi_uop", "MicroStrXImmUop", 8).emit() 364 StoreRegU64("strxr_uop", "MicroStrXRegUop", 8).emit() 365 StoreImmU64("strfpxi_uop", "MicroStrFpXImmUop", 8, flavor="fp").emit() 366 StoreRegU64("strfpxr_uop", "MicroStrFpXRegUop", 8, flavor="fp").emit() 367 StoreImmU64("strqbfpxi_uop", "MicroStrQBFpXImmUop", 368 16, flavor="fp", top=False).emit() 369 StoreRegU64("strqbfpxr_uop", "MicroStrQBFpXRegUop", 370 16, flavor="fp", top=False).emit() 371 StoreImmU64("strqtfpxi_uop", "MicroStrQTFpXImmUop", 372 16, flavor="fp", top=True).emit() 373 StoreRegU64("strqtfpxr_uop", "MicroStrQTFpXRegUop", 374 16, flavor="fp", top=True).emit() 375 StoreImmDU64("strdxi_uop", "MicroStrDXImmUop", 4).emit() 376 StoreImmDU64("strdfpxi_uop", "MicroStrDFpXImmUop", 4, flavor="fp").emit() 377 378}}; 379