1// -*- mode:c++ -*-
2
3// Copyright (c) 2010 ARM Limited
4// All rights reserved
5//
6// The license below extends only to copyright in the software and shall
7// not be construed as granting a license to any other intellectual
8// property including but not limited to intellectual property relating
9// to a hardware implementation of the functionality of the software
10// licensed hereunder. You may use the software subject to the license
11// terms below provided that you ensure that this notice is replicated
12// unmodified and in its entirety in all distributions of the software,
13// modified or unmodified, in source code or in binary form.
14//
15// Redistribution and use in source and binary forms, with or without
16// modification, are permitted provided that the following conditions are
17// met: redistributions of source code must retain the above copyright
18// notice, this list of conditions and the following disclaimer;
19// redistributions in binary form must reproduce the above copyright
20// notice, this list of conditions and the following disclaimer in the
21// documentation and/or other materials provided with the distribution;
22// neither the name of the copyright holders nor the names of its
23// contributors may be used to endorse or promote products derived from
24// this software without specific prior written permission.
25//
26// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
27// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
28// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
29// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
30// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
31// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
32// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
33// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
34// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
35// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
36// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
37//
38// Authors: Gabe Black
39
40let {{
41
42 header_output = ""
43 decoder_output = ""
44 exec_output = ""
45
46 class LoadInst(LoadStoreInst):
47 execBase = 'Load'
48
49 def __init__(self, mnem, post, add, writeback,
50 size=4, sign=False, user=False, flavor="normal"):
51 super(LoadInst, self).__init__()
52
53 self.name = mnem
54 self.post = post
55 self.add = add
56 self.writeback = writeback
57 self.size = size
58 self.sign = sign
59 self.user = user
60 self.flavor = flavor
61
62 if self.add:
63 self.op = " +"
64 else:
65 self.op = " -"
66
67 self.memFlags = ["ArmISA::TLB::MustBeOne"]
68 self.codeBlobs = {"postacc_code" : ""}
69
70 def emitHelper(self, base = 'Memory', wbDecl = None, instFlags = []):
70 def emitHelper(self, base = 'Memory', wbDecl = None, instFlags = [], pcDecl = None):
71
72 global header_output, decoder_output, exec_output
73
74 codeBlobs = self.codeBlobs
75 codeBlobs["predicate_test"] = pickPredicate(codeBlobs)
76 (newHeader,
77 newDecoder,
78 newExec) = self.fillTemplates(self.name, self.Name, codeBlobs,
79 self.memFlags, instFlags, base, wbDecl)
79 self.memFlags, instFlags, base,
80 wbDecl, pcDecl)
81
82 header_output += newHeader
83 decoder_output += newDecoder
84 exec_output += newExec
85
86 class RfeInst(LoadInst):
87 decConstBase = 'Rfe'
88
89 def __init__(self, mnem, post, add, writeback):
90 super(RfeInst, self).__init__(mnem, post, add, writeback)
91 self.Name = "RFE_" + loadImmClassName(post, add, writeback, 8)
92
93 self.memFlags.append("ArmISA::TLB::AlignWord")
94
95 def emit(self):
96 offset = 0
97 if self.post != self.add:
98 offset += 4
99 if not self.add:
100 offset -= 8
101 self.codeBlobs["ea_code"] = "EA = Base + %d;" % offset
102
103 wbDiff = -8
104 if self.add:
105 wbDiff = 8
106 accCode = '''
107 CPSR cpsr = Cpsr;
107 SCTLR sctlr = Sctlr;
108 // Use the version of NPC that gets set before NextThumb
109 pNPC = cSwap<uint32_t>(Mem.ud, cpsr.e);
110 uint32_t tempSpsr = cSwap<uint32_t>(Mem.ud >> 32, cpsr.e);
111 uint32_t newCpsr =
112 cpsrWriteByInstr(cpsr | CondCodes, tempSpsr,
113 0xF, true, sctlr.nmfi);
114 Cpsr = ~CondCodesMask & newCpsr;
115 NextThumb = ((CPSR)newCpsr).t;
116 NextJazelle = ((CPSR)newCpsr).j;
117 ForcedItState = ((((CPSR)tempSpsr).it2 << 2) & 0xFC)
118 | (((CPSR)tempSpsr).it1 & 0x3);
119 CondCodes = CondCodesMask & newCpsr;
108 URc = cpsr | CondCodes;
109 URa = cSwap<uint32_t>(Mem.ud, cpsr.e);
110 URb = cSwap<uint32_t>(Mem.ud >> 32, cpsr.e);
111 '''
112 self.codeBlobs["memacc_code"] = accCode
113
114 wbDecl = None
115 pcDecl = "MicroUopSetPCCPSR(machInst, INTREG_UREG0, INTREG_UREG1, INTREG_UREG2);"
116
117 if self.writeback:
118 wbDecl = "MicroAddiUop(machInst, base, base, %d);" % wbDiff
126 self.emitHelper('RfeOp', wbDecl, ["IsSerializeAfter", "IsNonSpeculative"])
119 self.emitHelper('RfeOp', wbDecl, ["IsSerializeAfter", "IsNonSpeculative"], pcDecl)
120
121 class LoadImmInst(LoadInst):
122 def __init__(self, *args, **kargs):
123 super(LoadImmInst, self).__init__(*args, **kargs)
124 self.offset = self.op + " imm"
125
126 if self.add:
127 self.wbDecl = "MicroAddiUop(machInst, base, base, imm);"
128 else:
129 self.wbDecl = "MicroSubiUop(machInst, base, base, imm);"
130
131 class LoadRegInst(LoadInst):
132 def __init__(self, *args, **kargs):
133 super(LoadRegInst, self).__init__(*args, **kargs)
134 self.offset = self.op + " shift_rm_imm(Index, shiftAmt," + \
135 " shiftType, CondCodes<29:>)"
136 if self.add:
137 self.wbDecl = '''
138 MicroAddUop(machInst, base, base, wbIndexReg, shiftAmt, shiftType);
139 '''
140 else:
141 self.wbDecl = '''
142 MicroSubUop(machInst, base, base, wbIndexReg, shiftAmt, shiftType);
143 '''
144
145 class LoadSingle(LoadInst):
146 def __init__(self, *args, **kargs):
147 super(LoadSingle, self).__init__(*args, **kargs)
148
149 # Build the default class name
150 self.Name = self.nameFunc(self.post, self.add, self.writeback,
151 self.size, self.sign, self.user)
152
153 # Add memory request flags where necessary
154 self.memFlags.append("%d" % (self.size - 1))
155 if self.user:
156 self.memFlags.append("ArmISA::TLB::UserMode")
157
158 self.instFlags = []
159 if self.flavor == "dprefetch":
160 self.memFlags.append("Request::PREFETCH")
161 self.instFlags = ['IsDataPrefetch']
162 elif self.flavor == "iprefetch":
163 self.memFlags.append("Request::PREFETCH")
164 self.instFlags = ['IsInstPrefetch']
165 elif self.flavor == "exclusive":
166 self.memFlags.append("Request::LLSC")
167 elif self.flavor == "normal":
168 self.memFlags.append("ArmISA::TLB::AllowUnaligned")
169
170 # Disambiguate the class name for different flavors of loads
171 if self.flavor != "normal":
172 self.Name = "%s_%s" % (self.name.upper(), self.Name)
173
174 def emit(self):
175 # Address compuation code
176 eaCode = "EA = Base"
177 if not self.post:
178 eaCode += self.offset
179 eaCode += ";"
180
181 if self.flavor == "fp":
182 eaCode += vfpEnabledCheckCode
183
184 self.codeBlobs["ea_code"] = eaCode
185
186 # Code that actually handles the access
187 if self.flavor == "dprefetch" or self.flavor == "iprefetch":
188 accCode = 'uint64_t temp = Mem%s; temp = temp;'
189 elif self.flavor == "fp":
190 accCode = "FpDest.uw = cSwap(Mem%s, ((CPSR)Cpsr).e);\n"
191 else:
192 accCode = "IWDest = cSwap(Mem%s, ((CPSR)Cpsr).e);"
193 accCode = accCode % buildMemSuffix(self.sign, self.size)
194
195 self.codeBlobs["memacc_code"] = accCode
196
197 # Push it out to the output files
198 base = buildMemBase(self.basePrefix, self.post, self.writeback)
199 wbDecl = None
200 if self.writeback:
201 wbDecl = self.wbDecl
202 self.emitHelper(base, wbDecl, self.instFlags)
203
204 def loadImmClassName(post, add, writeback, size=4, sign=False, user=False):
205 return memClassName("LOAD_IMM", post, add, writeback, size, sign, user)
206
207 class LoadImm(LoadImmInst, LoadSingle):
208 decConstBase = 'LoadImm'
209 basePrefix = 'MemoryImm'
210 nameFunc = staticmethod(loadImmClassName)
211
212 def loadRegClassName(post, add, writeback, size=4, sign=False, user=False):
213 return memClassName("LOAD_REG", post, add, writeback, size, sign, user)
214
215 class LoadReg(LoadRegInst, LoadSingle):
216 decConstBase = 'LoadReg'
217 basePrefix = 'MemoryReg'
218 nameFunc = staticmethod(loadRegClassName)
219
220 class LoadDouble(LoadInst):
221 def __init__(self, *args, **kargs):
222 super(LoadDouble, self).__init__(*args, **kargs)
223
224 # Build the default class name
225 self.Name = self.nameFunc(self.post, self.add, self.writeback)
226
227 # Add memory request flags where necessary
228 if self.flavor == "exclusive":
229 self.memFlags.append("Request::LLSC")
230 self.memFlags.append("ArmISA::TLB::AlignDoubleWord")
231 else:
232 self.memFlags.append("ArmISA::TLB::AlignWord")
233
234 # Disambiguate the class name for different flavors of loads
235 if self.flavor != "normal":
236 self.Name = "%s_%s" % (self.name.upper(), self.Name)
237
238 def emit(self):
239 # Address computation code
240 eaCode = "EA = Base"
241 if not self.post:
242 eaCode += self.offset
243 eaCode += ";"
244
245 if self.flavor == "fp":
246 eaCode += vfpEnabledCheckCode
247
248 self.codeBlobs["ea_code"] = eaCode
249
250 # Code that actually handles the access
251 if self.flavor != "fp":
252 accCode = '''
253 CPSR cpsr = Cpsr;
254 Dest = cSwap<uint32_t>(Mem.ud, cpsr.e);
255 Dest2 = cSwap<uint32_t>(Mem.ud >> 32, cpsr.e);
256 '''
257 else:
258 accCode = '''
259 uint64_t swappedMem = cSwap(Mem.ud, ((CPSR)Cpsr).e);
260 FpDest.uw = (uint32_t)swappedMem;
261 FpDest2.uw = (uint32_t)(swappedMem >> 32);
262 '''
263
264 self.codeBlobs["memacc_code"] = accCode
265
266 # Push it out to the output files
267 base = buildMemBase(self.basePrefix, self.post, self.writeback)
268 wbDecl = None
269 if self.writeback:
270 wbDecl = self.wbDecl
271 self.emitHelper(base, wbDecl)
272
273 def loadDoubleImmClassName(post, add, writeback):
274 return memClassName("LOAD_IMMD", post, add, writeback, 4, False, False)
275
276 class LoadDoubleImm(LoadImmInst, LoadDouble):
277 decConstBase = 'LoadStoreDImm'
278 basePrefix = 'MemoryDImm'
279 nameFunc = staticmethod(loadDoubleImmClassName)
280
281 def loadDoubleRegClassName(post, add, writeback):
282 return memClassName("LOAD_REGD", post, add, writeback, 4, False, False)
283
284 class LoadDoubleReg(LoadRegInst, LoadDouble):
285 decConstBase = 'LoadDReg'
286 basePrefix = 'MemoryDReg'
287 nameFunc = staticmethod(loadDoubleRegClassName)
288
289 def buildLoads(mnem, size=4, sign=False, user=False):
290 LoadImm(mnem, True, True, True, size, sign, user).emit()
291 LoadReg(mnem, True, True, True, size, sign, user).emit()
292 LoadImm(mnem, True, False, True, size, sign, user).emit()
293 LoadReg(mnem, True, False, True, size, sign, user).emit()
294 LoadImm(mnem, False, True, True, size, sign, user).emit()
295 LoadReg(mnem, False, True, True, size, sign, user).emit()
296 LoadImm(mnem, False, False, True, size, sign, user).emit()
297 LoadReg(mnem, False, False, True, size, sign, user).emit()
298 LoadImm(mnem, False, True, False, size, sign, user).emit()
299 LoadReg(mnem, False, True, False, size, sign, user).emit()
300 LoadImm(mnem, False, False, False, size, sign, user).emit()
301 LoadReg(mnem, False, False, False, size, sign, user).emit()
302
303 def buildDoubleLoads(mnem):
304 LoadDoubleImm(mnem, True, True, True).emit()
305 LoadDoubleReg(mnem, True, True, True).emit()
306 LoadDoubleImm(mnem, True, False, True).emit()
307 LoadDoubleReg(mnem, True, False, True).emit()
308 LoadDoubleImm(mnem, False, True, True).emit()
309 LoadDoubleReg(mnem, False, True, True).emit()
310 LoadDoubleImm(mnem, False, False, True).emit()
311 LoadDoubleReg(mnem, False, False, True).emit()
312 LoadDoubleImm(mnem, False, True, False).emit()
313 LoadDoubleReg(mnem, False, True, False).emit()
314 LoadDoubleImm(mnem, False, False, False).emit()
315 LoadDoubleReg(mnem, False, False, False).emit()
316
317 def buildRfeLoads(mnem):
318 RfeInst(mnem, True, True, True).emit()
319 RfeInst(mnem, True, True, False).emit()
320 RfeInst(mnem, True, False, True).emit()
321 RfeInst(mnem, True, False, False).emit()
322 RfeInst(mnem, False, True, True).emit()
323 RfeInst(mnem, False, True, False).emit()
324 RfeInst(mnem, False, False, True).emit()
325 RfeInst(mnem, False, False, False).emit()
326
327 def buildPrefetches(mnem, type):
328 LoadReg(mnem, False, False, False, size=1, flavor=type).emit()
329 LoadImm(mnem, False, False, False, size=1, flavor=type).emit()
330 LoadReg(mnem, False, True, False, size=1, flavor=type).emit()
331 LoadImm(mnem, False, True, False, size=1, flavor=type).emit()
332
333 buildLoads("ldr")
334 buildLoads("ldrt", user=True)
335 buildLoads("ldrb", size=1)
336 buildLoads("ldrbt", size=1, user=True)
337 buildLoads("ldrsb", size=1, sign=True)
338 buildLoads("ldrsbt", size=1, sign=True, user=True)
339 buildLoads("ldrh", size=2)
340 buildLoads("ldrht", size=2, user=True)
341 buildLoads("hdrsh", size=2, sign=True)
342 buildLoads("ldrsht", size=2, sign=True, user=True)
343
344 buildDoubleLoads("ldrd")
345
346 buildRfeLoads("rfe")
347
348 buildPrefetches("pld", "dprefetch")
349 buildPrefetches("pldw", "dprefetch")
350 buildPrefetches("pli", "iprefetch")
351
352 LoadImm("ldrex", False, True, False, size=4, flavor="exclusive").emit()
353 LoadImm("ldrexh", False, True, False, size=2, flavor="exclusive").emit()
354 LoadImm("ldrexb", False, True, False, size=1, flavor="exclusive").emit()
355 LoadDoubleImm("ldrexd", False, True, False, flavor="exclusive").emit()
356
357 LoadImm("vldr", False, True, False, size=4, flavor="fp").emit()
358 LoadImm("vldr", False, False, False, size=4, flavor="fp").emit()
359 LoadDoubleImm("vldr", False, True, False, flavor="fp").emit()
360 LoadDoubleImm("vldr", False, False, False, flavor="fp").emit()
361}};