1// -*- mode:c++ -*-
2
3// Copyright (c) 2011-2013,2017 ARM Limited
4// All rights reserved
5//
6// The license below extends only to copyright in the software and shall
7// not be construed as granting a license to any other intellectual
8// property including but not limited to intellectual property relating
9// to a hardware implementation of the functionality of the software
10// licensed hereunder. You may use the software subject to the license
11// terms below provided that you ensure that this notice is replicated
12// unmodified and in its entirety in all distributions of the software,
13// modified or unmodified, in source code or in binary form.
14//
15// Redistribution and use in source and binary forms, with or without
16// modification, are permitted provided that the following conditions are
17// met: redistributions of source code must retain the above copyright
18// notice, this list of conditions and the following disclaimer;
19// redistributions in binary form must reproduce the above copyright
20// notice, this list of conditions and the following disclaimer in the
21// documentation and/or other materials provided with the distribution;
22// neither the name of the copyright holders nor the names of its
23// contributors may be used to endorse or promote products derived from
24// this software without specific prior written permission.
25//
26// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
27// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
28// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
29// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
30// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
31// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
32// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
33// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
34// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
35// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
36// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
37//
38// Authors: Gabe Black
39
40let {{
41
42 header_output = ""
43 decoder_output = ""
44 exec_output = ""
45
46 class StoreInst64(LoadStoreInst):
47 execBase = 'Store64'
48 micro = False
49
50 def __init__(self, mnem, Name, size=4, user=False, flavor="normal",
51 top = False):
52 super(StoreInst64, self).__init__()
53
54 self.name = mnem
55 self.Name = Name
56 self.size = size
57 self.user = user
58 self.flavor = flavor
59 self.top = top
60
61 self.memFlags = ["ArmISA::TLB::MustBeOne"]
62 self.instFlags = []
63 self.codeBlobs = { "postacc_code" : "" }
64
65 # Add memory request flags where necessary
66 if self.user:
67 self.memFlags.append("ArmISA::TLB::UserMode")
68
69 if self.flavor in ("relexp", "exp"):
70 # For exclusive pair ops alignment check is based on total size
71 self.memFlags.append("%d" % int(math.log(self.size, 2) + 1))
72 elif not (self.size == 16 and self.top):
73 # Only the first microop should perform alignment checking.
74 self.memFlags.append("%d" % int(math.log(self.size, 2)))
75
76 if self.flavor not in ("release", "relex", "exclusive",
77 "relexp", "exp"):
78 self.memFlags.append("ArmISA::TLB::AllowUnaligned")
79
80 if self.micro:
81 self.instFlags.append("IsMicroop")
82
83 if self.flavor in ("release", "relex", "relexp"):
84 self.instFlags.extend(["IsMemBarrier",
85 "IsWriteBarrier",
86 "IsReadBarrier"])
87 if self.flavor in ("relex", "exclusive", "exp", "relexp"):
88 self.instFlags.append("IsStoreConditional")
89 self.memFlags.append("Request::LLSC")
90
91 def emitHelper(self, base = 'Memory64', wbDecl = None):
92 global header_output, decoder_output, exec_output
93
94 # If this is a microop itself, don't allow anything that would
95 # require further microcoding.
96 if self.micro:
97 assert not wbDecl
98
99 fa_code = None
100 if not self.micro and self.flavor in ("normal", "release"):
101 fa_code = '''
102 fault->annotate(ArmFault::SAS, %s);
103 fault->annotate(ArmFault::SSE, false);
104 fault->annotate(ArmFault::SRT, dest);
105 fault->annotate(ArmFault::SF, %s);
106 fault->annotate(ArmFault::AR, %s);
107 ''' % ("0" if self.size == 1 else
108 "1" if self.size == 2 else
109 "2" if self.size == 4 else "3",
110 "true" if self.size == 8 else "false",
111 "true" if self.flavor == "release" else "false")
112
113 (newHeader, newDecoder, newExec) = \
114 self.fillTemplates(self.name, self.Name, self.codeBlobs,
115 self.memFlags, self.instFlags,
116 base, wbDecl, faCode=fa_code)
117
118 header_output += newHeader
119 decoder_output += newDecoder
120 exec_output += newExec
121
122 def buildEACode(self):
123 # Address computation
124 eaCode = ""
125 if self.flavor == "fp":
126 eaCode += vfp64EnabledCheckCode
127
128 eaCode += SPAlignmentCheckCode + "EA = XBase"
129 if self.size == 16:
130 if self.top:
131 eaCode += " + (isBigEndian64(xc->tcBase()) ? 0 : 8)"
132 else:
133 eaCode += " + (isBigEndian64(xc->tcBase()) ? 8 : 0)"
134 if not self.post:
135 eaCode += self.offset
136 eaCode += ";"
137
138 self.codeBlobs["ea_code"] = eaCode
139
140
141 class StoreImmInst64(StoreInst64):
142 def __init__(self, *args, **kargs):
143 super(StoreImmInst64, self).__init__(*args, **kargs)
144 self.offset = "+ imm"
145
146 self.wbDecl = "MicroAddXiUop(machInst, base, base, imm);"
147
148 class StoreRegInst64(StoreInst64):
149 def __init__(self, *args, **kargs):
150 super(StoreRegInst64, self).__init__(*args, **kargs)
151 self.offset = "+ extendReg64(XOffset, type, shiftAmt, 64)"
152
153 self.wbDecl = \
154 "MicroAddXERegUop(machInst, base, base, " + \
155 " offset, type, shiftAmt);"
156
157 class StoreRawRegInst64(StoreInst64):
158 def __init__(self, *args, **kargs):
159 super(StoreRawRegInst64, self).__init__(*args, **kargs)
160 self.offset = ""
161
162 class StoreSingle64(StoreInst64):
163 def emit(self):
164 self.buildEACode()
165
166 # Code that actually handles the access
167 if self.flavor == "fp":
168 if self.size in (1, 2, 4):
169 accCode = '''
170 Mem%(suffix)s =
171 cSwap(AA64FpDestP0%(suffix)s, isBigEndian64(xc->tcBase()));
172 '''
173 elif self.size == 8 or (self.size == 16 and not self.top):
174 accCode = '''
175 uint64_t data = AA64FpDestP1_uw;
176 data = (data << 32) | AA64FpDestP0_uw;
177 Mem%(suffix)s = cSwap(data, isBigEndian64(xc->tcBase()));
178 '''
179 elif self.size == 16 and self.top:
180 accCode = '''
181 uint64_t data = AA64FpDestP3_uw;
182 data = (data << 32) | AA64FpDestP2_uw;
183 Mem%(suffix)s = cSwap(data, isBigEndian64(xc->tcBase()));
184 '''
185 else:
186 accCode = \
187 'Mem%(suffix)s = cSwap(XDest%(suffix)s, isBigEndian64(xc->tcBase()));'
188 if self.size == 16:
189 accCode = accCode % \
190 { "suffix" : buildMemSuffix(False, 8) }
191 else:
192 accCode = accCode % \
193 { "suffix" : buildMemSuffix(False, self.size) }
194
195 self.codeBlobs["memacc_code"] = accCode
196
197 if self.flavor in ("relex", "exclusive"):
198 self.instFlags.append("IsStoreConditional")
199 self.memFlags.append("Request::LLSC")
200
201 # Push it out to the output files
202 wbDecl = None
203 if self.writeback and not self.micro:
204 wbDecl = self.wbDecl
205 self.emitHelper(self.base, wbDecl)
206
207 class StoreDouble64(StoreInst64):
208 def emit(self):
209 self.buildEACode()
210
211 # Code that actually handles the access
212 if self.flavor == "fp":
213 accCode = '''
214 uint64_t data = AA64FpDest2P0_uw;
215 data = (data << 32) | AA64FpDestP0_uw;
216 Mem_ud = cSwap(data, isBigEndian64(xc->tcBase()));
217 '''
218 else:
219 if self.size == 4:
220 accCode = '''
221 uint64_t data = XDest2_uw;
222 data = (data << 32) | XDest_uw;
223 Mem_ud = cSwap(data, isBigEndian64(xc->tcBase()));
224 '''
225 elif self.size == 8:
226 accCode = '''
227 // This temporary needs to be here so that the parser
228 // will correctly identify this instruction as a store.
229 Twin64_t temp;
230 temp.a = XDest_ud;
231 temp.b = XDest2_ud;
229 std::array<uint64_t, 2> temp;
230 temp[0] = XDest_ud;
231 temp[1] = XDest2_ud;
232 Mem_tud = temp;
233 '''
234 self.codeBlobs["memacc_code"] = accCode
235
236 # Push it out to the output files
237 wbDecl = None
238 if self.writeback and not self.micro:
239 wbDecl = self.wbDecl
240 self.emitHelper(self.base, wbDecl)
241
242 class StoreImm64(StoreImmInst64, StoreSingle64):
243 decConstBase = 'LoadStoreImm64'
244 base = 'ArmISA::MemoryImm64'
245 writeback = False
246 post = False
247
248 class StorePre64(StoreImmInst64, StoreSingle64):
249 decConstBase = 'LoadStoreImm64'
250 base = 'ArmISA::MemoryPreIndex64'
251 writeback = True
252 post = False
253
254 class StorePost64(StoreImmInst64, StoreSingle64):
255 decConstBase = 'LoadStoreImm64'
256 base = 'ArmISA::MemoryPostIndex64'
257 writeback = True
258 post = True
259
260 class StoreReg64(StoreRegInst64, StoreSingle64):
261 decConstBase = 'LoadStoreReg64'
262 base = 'ArmISA::MemoryReg64'
263 writeback = False
264 post = False
265
266 class StoreRaw64(StoreRawRegInst64, StoreSingle64):
267 decConstBase = 'LoadStoreRaw64'
268 base = 'ArmISA::MemoryRaw64'
269 writeback = False
270 post = False
271
272 class StoreEx64(StoreRawRegInst64, StoreSingle64):
273 decConstBase = 'LoadStoreEx64'
274 base = 'ArmISA::MemoryEx64'
275 writeback = False
276 post = False
277 execBase = 'StoreEx64'
278 def __init__(self, *args, **kargs):
279 super(StoreEx64, self).__init__(*args, **kargs)
280 self.codeBlobs["postacc_code"] = \
281 "XResult = !writeResult; SevMailbox = 1; LLSCLock = 0;"
282
283 def buildStores64(mnem, NameBase, size, flavor="normal"):
284 StoreImm64(mnem, NameBase + "_IMM", size, flavor=flavor).emit()
285 StorePre64(mnem, NameBase + "_PRE", size, flavor=flavor).emit()
286 StorePost64(mnem, NameBase + "_POST", size, flavor=flavor).emit()
287 StoreReg64(mnem, NameBase + "_REG", size, flavor=flavor).emit()
288
289 buildStores64("strb", "STRB64", 1)
290 buildStores64("strh", "STRH64", 2)
291 buildStores64("str", "STRW64", 4)
292 buildStores64("str", "STRX64", 8)
293 buildStores64("str", "STRBFP64", 1, flavor="fp")
294 buildStores64("str", "STRHFP64", 2, flavor="fp")
295 buildStores64("str", "STRSFP64", 4, flavor="fp")
296 buildStores64("str", "STRDFP64", 8, flavor="fp")
297
298 StoreImm64("sturb", "STURB64_IMM", 1).emit()
299 StoreImm64("sturh", "STURH64_IMM", 2).emit()
300 StoreImm64("stur", "STURW64_IMM", 4).emit()
301 StoreImm64("stur", "STURX64_IMM", 8).emit()
302 StoreImm64("stur", "STURBFP64_IMM", 1, flavor="fp").emit()
303 StoreImm64("stur", "STURHFP64_IMM", 2, flavor="fp").emit()
304 StoreImm64("stur", "STURSFP64_IMM", 4, flavor="fp").emit()
305 StoreImm64("stur", "STURDFP64_IMM", 8, flavor="fp").emit()
306
307 StoreImm64("sttrb", "STTRB64_IMM", 1, user=True).emit()
308 StoreImm64("sttrh", "STTRH64_IMM", 2, user=True).emit()
309 StoreImm64("sttr", "STTRW64_IMM", 4, user=True).emit()
310 StoreImm64("sttr", "STTRX64_IMM", 8, user=True).emit()
311
312 StoreRaw64("stlr", "STLRX64", 8, flavor="release").emit()
313 StoreRaw64("stlr", "STLRW64", 4, flavor="release").emit()
314 StoreRaw64("stlrh", "STLRH64", 2, flavor="release").emit()
315 StoreRaw64("stlrb", "STLRB64", 1, flavor="release").emit()
316
317 StoreEx64("stlxr", "STLXRX64", 8, flavor="relex").emit()
318 StoreEx64("stlxr", "STLXRW64", 4, flavor="relex").emit()
319 StoreEx64("stlxrh", "STLXRH64", 2, flavor="relex").emit()
320 StoreEx64("stlxrb", "STLXRB64", 1, flavor="relex").emit()
321
322 StoreEx64("stxr", "STXRX64", 8, flavor="exclusive").emit()
323 StoreEx64("stxr", "STXRW64", 4, flavor="exclusive").emit()
324 StoreEx64("stxrh", "STXRH64", 2, flavor="exclusive").emit()
325 StoreEx64("stxrb", "STXRB64", 1, flavor="exclusive").emit()
326
327 class StoreImmU64(StoreImm64):
328 decConstBase = 'LoadStoreImmU64'
329 micro = True
330
331 class StoreImmDU64(StoreImmInst64, StoreDouble64):
332 decConstBase = 'LoadStoreImmDU64'
333 base = 'ArmISA::MemoryDImm64'
334 micro = True
335 post = False
336 writeback = False
337
338 class StoreImmDEx64(StoreImmInst64, StoreDouble64):
339 execBase = 'StoreEx64'
340 decConstBase = 'StoreImmDEx64'
341 base = 'ArmISA::MemoryDImmEx64'
342 micro = False
343 post = False
344 writeback = False
345 def __init__(self, *args, **kargs):
346 super(StoreImmDEx64, self).__init__(*args, **kargs)
347 self.codeBlobs["postacc_code"] = \
348 "XResult = !writeResult; SevMailbox = 1; LLSCLock = 0;"
349
350 class StoreRegU64(StoreReg64):
351 decConstBase = 'LoadStoreRegU64'
352 micro = True
353
354 StoreImmDEx64("stlxp", "STLXPW64", 4, flavor="relexp").emit()
355 StoreImmDEx64("stlxp", "STLXPX64", 8, flavor="relexp").emit()
356 StoreImmDEx64("stxp", "STXPW64", 4, flavor="exp").emit()
357 StoreImmDEx64("stxp", "STXPX64", 8, flavor="exp").emit()
358
359 StoreImmU64("strxi_uop", "MicroStrXImmUop", 8).emit()
360 StoreRegU64("strxr_uop", "MicroStrXRegUop", 8).emit()
361 StoreImmU64("strfpxi_uop", "MicroStrFpXImmUop", 8, flavor="fp").emit()
362 StoreRegU64("strfpxr_uop", "MicroStrFpXRegUop", 8, flavor="fp").emit()
363 StoreImmU64("strqbfpxi_uop", "MicroStrQBFpXImmUop",
364 16, flavor="fp", top=False).emit()
365 StoreRegU64("strqbfpxr_uop", "MicroStrQBFpXRegUop",
366 16, flavor="fp", top=False).emit()
367 StoreImmU64("strqtfpxi_uop", "MicroStrQTFpXImmUop",
368 16, flavor="fp", top=True).emit()
369 StoreRegU64("strqtfpxr_uop", "MicroStrQTFpXRegUop",
370 16, flavor="fp", top=True).emit()
371 StoreImmDU64("strdxi_uop", "MicroStrDXImmUop", 4).emit()
372 StoreImmDU64("strdfpxi_uop", "MicroStrDFpXImmUop", 4, flavor="fp").emit()
373
374}};