mem64.isa (12359:8fb4630c444f) mem64.isa (12616:4b463b4dc098)
1// -*- mode:c++ -*-
2
3// Copyright (c) 2011-2014, 2017 ARM Limited
4// All rights reserved
5//
6// The license below extends only to copyright in the software and shall
7// not be construed as granting a license to any other intellectual
8// property including but not limited to intellectual property relating
9// to a hardware implementation of the functionality of the software
10// licensed hereunder. You may use the software subject to the license
11// terms below provided that you ensure that this notice is replicated
12// unmodified and in its entirety in all distributions of the software,
13// modified or unmodified, in source code or in binary form.
14//
15// Redistribution and use in source and binary forms, with or without
16// modification, are permitted provided that the following conditions are
17// met: redistributions of source code must retain the above copyright
18// notice, this list of conditions and the following disclaimer;
19// redistributions in binary form must reproduce the above copyright
20// notice, this list of conditions and the following disclaimer in the
21// documentation and/or other materials provided with the distribution;
22// neither the name of the copyright holders nor the names of its
23// contributors may be used to endorse or promote products derived from
24// this software without specific prior written permission.
25//
26// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
27// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
28// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
29// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
30// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
31// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
32// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
33// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
34// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
35// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
36// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
37//
38// Authors: Gabe Black
39
40let {{
41 SPAlignmentCheckCode = '''
42 if (baseIsSP && bits(XBase, 3, 0) &&
43 SPAlignmentCheckEnabled(xc->tcBase())) {
44 return std::make_shared<SPAlignmentFault>();
45 }
46 '''
47}};
48
49def template Load64Execute {{
50 Fault %(class_name)s::execute(ExecContext *xc,
51 Trace::InstRecord *traceData) const
52 {
53 Addr EA;
54 Fault fault = NoFault;
55
56 %(op_decl)s;
57 %(op_rd)s;
58 %(ea_code)s;
59
60 if (fault == NoFault) {
61 fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags);
62 %(memacc_code)s;
63 }
64
65 if (fault == NoFault) {
66 %(op_wb)s;
67 }
68
69 return fault;
70 }
71}};
72
73def template Store64Execute {{
74 Fault %(class_name)s::execute(ExecContext *xc,
75 Trace::InstRecord *traceData) const
76 {
77 Addr EA;
78 Fault fault = NoFault;
79
80 %(op_decl)s;
81 %(op_rd)s;
82 %(ea_code)s;
83
84 if (fault == NoFault) {
85 %(memacc_code)s;
86 }
87
88 if (fault == NoFault) {
89 fault = writeMemAtomic(xc, traceData, Mem, EA,
90 memAccessFlags, NULL);
91 }
92
93 if (fault == NoFault) {
94 %(op_wb)s;
95 }
96
97 return fault;
98 }
99}};
100
101def template Store64InitiateAcc {{
102 Fault %(class_name)s::initiateAcc(ExecContext *xc,
103 Trace::InstRecord *traceData) const
104 {
105 Addr EA;
106 Fault fault = NoFault;
107
108 %(op_decl)s;
109 %(op_rd)s;
110 %(ea_code)s;
111
112 if (fault == NoFault) {
113 %(memacc_code)s;
114 }
115
116 if (fault == NoFault) {
117 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
118 NULL);
119 }
120
121 return fault;
122 }
123}};
124
125def template StoreEx64Execute {{
126 Fault %(class_name)s::execute(ExecContext *xc,
127 Trace::InstRecord *traceData) const
128 {
129 Addr EA;
130 Fault fault = NoFault;
131
132 %(op_decl)s;
133 %(op_rd)s;
134 %(ea_code)s;
135
136 if (fault == NoFault) {
137 %(memacc_code)s;
138 }
139
140 uint64_t writeResult = 0;
141 if (fault == NoFault) {
142 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
143 &writeResult);
144 }
145
146 if (fault == NoFault) {
147 %(postacc_code)s;
148 }
149
150 if (fault == NoFault) {
151 %(op_wb)s;
152 }
153
154 return fault;
155 }
156}};
157
158def template StoreEx64InitiateAcc {{
159 Fault %(class_name)s::initiateAcc(ExecContext *xc,
160 Trace::InstRecord *traceData) const
161 {
162 Addr EA;
163 Fault fault = NoFault;
164
165 %(op_decl)s;
166 %(op_rd)s;
167 %(ea_code)s;
168
169 if (fault == NoFault) {
170 %(memacc_code)s;
171 }
172
173 if (fault == NoFault) {
174 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
175 NULL);
176 }
177
178 return fault;
179 }
180}};
181
182def template Load64InitiateAcc {{
183 Fault %(class_name)s::initiateAcc(ExecContext *xc,
184 Trace::InstRecord *traceData) const
185 {
186 Addr EA;
187 Fault fault = NoFault;
188
189 %(op_src_decl)s;
190 %(op_rd)s;
191 %(ea_code)s;
192
193 if (fault == NoFault) {
194 fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags);
195 }
196
197 return fault;
198 }
199}};
200
201def template Load64CompleteAcc {{
202 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
203 Trace::InstRecord *traceData) const
204 {
205 Fault fault = NoFault;
206
207 %(op_decl)s;
208 %(op_rd)s;
209
210 // ARM instructions will not have a pkt if the predicate is false
211 getMem(pkt, Mem, traceData);
212
213 if (fault == NoFault) {
214 %(memacc_code)s;
215 }
216
217 if (fault == NoFault) {
218 %(op_wb)s;
219 }
220
221 return fault;
222 }
223}};
224
225def template Store64CompleteAcc {{
226 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
227 Trace::InstRecord *traceData) const
228 {
229 return NoFault;
230 }
231}};
232
233def template StoreEx64CompleteAcc {{
234 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
235 Trace::InstRecord *traceData) const
236 {
237 Fault fault = NoFault;
238
239 %(op_decl)s;
240 %(op_rd)s;
241
242 uint64_t writeResult = pkt->req->getExtraData();
243 %(postacc_code)s;
244
245 if (fault == NoFault) {
246 %(op_wb)s;
247 }
248
249 return fault;
250 }
251}};
252
253def template DCStore64Declare {{
254 class %(class_name)s : public %(base_class)s
255 {
256 public:
257
258 /// Constructor.
259 %(class_name)s(ExtMachInst machInst, IntRegIndex _base,
260 MiscRegIndex _dest, uint64_t _imm);
261
1// -*- mode:c++ -*-
2
3// Copyright (c) 2011-2014, 2017 ARM Limited
4// All rights reserved
5//
6// The license below extends only to copyright in the software and shall
7// not be construed as granting a license to any other intellectual
8// property including but not limited to intellectual property relating
9// to a hardware implementation of the functionality of the software
10// licensed hereunder. You may use the software subject to the license
11// terms below provided that you ensure that this notice is replicated
12// unmodified and in its entirety in all distributions of the software,
13// modified or unmodified, in source code or in binary form.
14//
15// Redistribution and use in source and binary forms, with or without
16// modification, are permitted provided that the following conditions are
17// met: redistributions of source code must retain the above copyright
18// notice, this list of conditions and the following disclaimer;
19// redistributions in binary form must reproduce the above copyright
20// notice, this list of conditions and the following disclaimer in the
21// documentation and/or other materials provided with the distribution;
22// neither the name of the copyright holders nor the names of its
23// contributors may be used to endorse or promote products derived from
24// this software without specific prior written permission.
25//
26// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
27// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
28// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
29// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
30// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
31// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
32// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
33// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
34// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
35// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
36// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
37//
38// Authors: Gabe Black
39
40let {{
41 SPAlignmentCheckCode = '''
42 if (baseIsSP && bits(XBase, 3, 0) &&
43 SPAlignmentCheckEnabled(xc->tcBase())) {
44 return std::make_shared<SPAlignmentFault>();
45 }
46 '''
47}};
48
49def template Load64Execute {{
50 Fault %(class_name)s::execute(ExecContext *xc,
51 Trace::InstRecord *traceData) const
52 {
53 Addr EA;
54 Fault fault = NoFault;
55
56 %(op_decl)s;
57 %(op_rd)s;
58 %(ea_code)s;
59
60 if (fault == NoFault) {
61 fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags);
62 %(memacc_code)s;
63 }
64
65 if (fault == NoFault) {
66 %(op_wb)s;
67 }
68
69 return fault;
70 }
71}};
72
73def template Store64Execute {{
74 Fault %(class_name)s::execute(ExecContext *xc,
75 Trace::InstRecord *traceData) const
76 {
77 Addr EA;
78 Fault fault = NoFault;
79
80 %(op_decl)s;
81 %(op_rd)s;
82 %(ea_code)s;
83
84 if (fault == NoFault) {
85 %(memacc_code)s;
86 }
87
88 if (fault == NoFault) {
89 fault = writeMemAtomic(xc, traceData, Mem, EA,
90 memAccessFlags, NULL);
91 }
92
93 if (fault == NoFault) {
94 %(op_wb)s;
95 }
96
97 return fault;
98 }
99}};
100
101def template Store64InitiateAcc {{
102 Fault %(class_name)s::initiateAcc(ExecContext *xc,
103 Trace::InstRecord *traceData) const
104 {
105 Addr EA;
106 Fault fault = NoFault;
107
108 %(op_decl)s;
109 %(op_rd)s;
110 %(ea_code)s;
111
112 if (fault == NoFault) {
113 %(memacc_code)s;
114 }
115
116 if (fault == NoFault) {
117 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
118 NULL);
119 }
120
121 return fault;
122 }
123}};
124
125def template StoreEx64Execute {{
126 Fault %(class_name)s::execute(ExecContext *xc,
127 Trace::InstRecord *traceData) const
128 {
129 Addr EA;
130 Fault fault = NoFault;
131
132 %(op_decl)s;
133 %(op_rd)s;
134 %(ea_code)s;
135
136 if (fault == NoFault) {
137 %(memacc_code)s;
138 }
139
140 uint64_t writeResult = 0;
141 if (fault == NoFault) {
142 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
143 &writeResult);
144 }
145
146 if (fault == NoFault) {
147 %(postacc_code)s;
148 }
149
150 if (fault == NoFault) {
151 %(op_wb)s;
152 }
153
154 return fault;
155 }
156}};
157
158def template StoreEx64InitiateAcc {{
159 Fault %(class_name)s::initiateAcc(ExecContext *xc,
160 Trace::InstRecord *traceData) const
161 {
162 Addr EA;
163 Fault fault = NoFault;
164
165 %(op_decl)s;
166 %(op_rd)s;
167 %(ea_code)s;
168
169 if (fault == NoFault) {
170 %(memacc_code)s;
171 }
172
173 if (fault == NoFault) {
174 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
175 NULL);
176 }
177
178 return fault;
179 }
180}};
181
182def template Load64InitiateAcc {{
183 Fault %(class_name)s::initiateAcc(ExecContext *xc,
184 Trace::InstRecord *traceData) const
185 {
186 Addr EA;
187 Fault fault = NoFault;
188
189 %(op_src_decl)s;
190 %(op_rd)s;
191 %(ea_code)s;
192
193 if (fault == NoFault) {
194 fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags);
195 }
196
197 return fault;
198 }
199}};
200
201def template Load64CompleteAcc {{
202 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
203 Trace::InstRecord *traceData) const
204 {
205 Fault fault = NoFault;
206
207 %(op_decl)s;
208 %(op_rd)s;
209
210 // ARM instructions will not have a pkt if the predicate is false
211 getMem(pkt, Mem, traceData);
212
213 if (fault == NoFault) {
214 %(memacc_code)s;
215 }
216
217 if (fault == NoFault) {
218 %(op_wb)s;
219 }
220
221 return fault;
222 }
223}};
224
225def template Store64CompleteAcc {{
226 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
227 Trace::InstRecord *traceData) const
228 {
229 return NoFault;
230 }
231}};
232
233def template StoreEx64CompleteAcc {{
234 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
235 Trace::InstRecord *traceData) const
236 {
237 Fault fault = NoFault;
238
239 %(op_decl)s;
240 %(op_rd)s;
241
242 uint64_t writeResult = pkt->req->getExtraData();
243 %(postacc_code)s;
244
245 if (fault == NoFault) {
246 %(op_wb)s;
247 }
248
249 return fault;
250 }
251}};
252
253def template DCStore64Declare {{
254 class %(class_name)s : public %(base_class)s
255 {
256 public:
257
258 /// Constructor.
259 %(class_name)s(ExtMachInst machInst, IntRegIndex _base,
260 MiscRegIndex _dest, uint64_t _imm);
261
262 Fault execute(ExecContext *, Trace::InstRecord *) const;
263 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
264 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
262 Fault execute(ExecContext *, Trace::InstRecord *) const override;
263 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
264 Fault completeAcc(PacketPtr, ExecContext *,
265 Trace::InstRecord *) const override;
265
266
266 virtual void
267 annotateFault(ArmFault *fault) {
267 void
268 annotateFault(ArmFault *fault) override
269 {
268 %(fa_code)s
269 }
270 };
271}};
272
273def template DCStore64Constructor {{
274 %(class_name)s::%(class_name)s(ExtMachInst machInst, IntRegIndex _base,
275 MiscRegIndex _dest, uint64_t _imm)
276 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
277 _base, _dest, _imm)
278 {
279 %(constructor)s;
280 assert(!%(use_uops)d);
281 }
282}};
283
284def template DCStore64Execute {{
285 Fault %(class_name)s::execute(ExecContext *xc,
286 Trace::InstRecord *traceData) const
287 {
288 Addr EA;
289 Fault fault = NoFault;
290
291 %(op_decl)s;
292 %(op_rd)s;
293 %(ea_code)s;
294
295
296 if (fault == NoFault) {
297 %(memacc_code)s;
298 }
299
300 if (fault == NoFault) {
301 fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL);
302 }
303
304 if (fault == NoFault) {
305 %(op_wb)s;
306 }
307
308 return fault;
309 }
310}};
311
312def template DCStore64InitiateAcc {{
313 Fault %(class_name)s::initiateAcc(ExecContext *xc,
314 Trace::InstRecord *traceData) const
315 {
316 Addr EA;
317 Fault fault = NoFault;
318
319 %(op_decl)s;
320 %(op_rd)s;
321 %(ea_code)s;
322
323 if (fault == NoFault) {
324 %(memacc_code)s;
325 }
326
327 if (fault == NoFault) {
328 fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL);
329 }
330
331 return fault;
332 }
333}};
334
335
336def template LoadStoreImm64Declare {{
337 class %(class_name)s : public %(base_class)s
338 {
339 public:
340
341 /// Constructor.
342 %(class_name)s(ExtMachInst machInst,
343 IntRegIndex _dest, IntRegIndex _base, int64_t _imm);
344
270 %(fa_code)s
271 }
272 };
273}};
274
275def template DCStore64Constructor {{
276 %(class_name)s::%(class_name)s(ExtMachInst machInst, IntRegIndex _base,
277 MiscRegIndex _dest, uint64_t _imm)
278 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
279 _base, _dest, _imm)
280 {
281 %(constructor)s;
282 assert(!%(use_uops)d);
283 }
284}};
285
286def template DCStore64Execute {{
287 Fault %(class_name)s::execute(ExecContext *xc,
288 Trace::InstRecord *traceData) const
289 {
290 Addr EA;
291 Fault fault = NoFault;
292
293 %(op_decl)s;
294 %(op_rd)s;
295 %(ea_code)s;
296
297
298 if (fault == NoFault) {
299 %(memacc_code)s;
300 }
301
302 if (fault == NoFault) {
303 fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL);
304 }
305
306 if (fault == NoFault) {
307 %(op_wb)s;
308 }
309
310 return fault;
311 }
312}};
313
314def template DCStore64InitiateAcc {{
315 Fault %(class_name)s::initiateAcc(ExecContext *xc,
316 Trace::InstRecord *traceData) const
317 {
318 Addr EA;
319 Fault fault = NoFault;
320
321 %(op_decl)s;
322 %(op_rd)s;
323 %(ea_code)s;
324
325 if (fault == NoFault) {
326 %(memacc_code)s;
327 }
328
329 if (fault == NoFault) {
330 fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL);
331 }
332
333 return fault;
334 }
335}};
336
337
338def template LoadStoreImm64Declare {{
339 class %(class_name)s : public %(base_class)s
340 {
341 public:
342
343 /// Constructor.
344 %(class_name)s(ExtMachInst machInst,
345 IntRegIndex _dest, IntRegIndex _base, int64_t _imm);
346
345 Fault execute(ExecContext *, Trace::InstRecord *) const;
346 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
347 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
347 Fault execute(ExecContext *, Trace::InstRecord *) const override;
348 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
349 Fault completeAcc(PacketPtr, ExecContext *,
350 Trace::InstRecord *) const override;
348
351
349 virtual void
350 annotateFault(ArmFault *fault) {
352 void
353 annotateFault(ArmFault *fault) override
354 {
351 %(fa_code)s
352 }
353 };
354}};
355
356def template LoadStoreImmU64Declare {{
357 class %(class_name)s : public %(base_class)s
358 {
359 public:
360
361 /// Constructor.
362 %(class_name)s(ExtMachInst machInst,
363 IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
364 bool noAlloc = false, bool exclusive = false,
365 bool acrel = false);
366
355 %(fa_code)s
356 }
357 };
358}};
359
360def template LoadStoreImmU64Declare {{
361 class %(class_name)s : public %(base_class)s
362 {
363 public:
364
365 /// Constructor.
366 %(class_name)s(ExtMachInst machInst,
367 IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
368 bool noAlloc = false, bool exclusive = false,
369 bool acrel = false);
370
367 Fault execute(ExecContext *, Trace::InstRecord *) const;
368 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
369 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
371 Fault execute(ExecContext *, Trace::InstRecord *) const override;
372 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
373 Fault completeAcc(PacketPtr, ExecContext *,
374 Trace::InstRecord *) const override;
370
375
371 virtual void
372 annotateFault(ArmFault *fault) {
376 void
377 annotateFault(ArmFault *fault) override
378 {
373 %(fa_code)s
374 }
375 };
376}};
377
378def template LoadStoreImmDU64Declare {{
379 class %(class_name)s : public %(base_class)s
380 {
381 public:
382
383 /// Constructor.
384 %(class_name)s(ExtMachInst machInst,
385 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
386 int64_t _imm = 0, bool noAlloc = false, bool exclusive = false,
387 bool acrel = false);
388
379 %(fa_code)s
380 }
381 };
382}};
383
384def template LoadStoreImmDU64Declare {{
385 class %(class_name)s : public %(base_class)s
386 {
387 public:
388
389 /// Constructor.
390 %(class_name)s(ExtMachInst machInst,
391 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
392 int64_t _imm = 0, bool noAlloc = false, bool exclusive = false,
393 bool acrel = false);
394
389 Fault execute(ExecContext *, Trace::InstRecord *) const;
390 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
391 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
395 Fault execute(ExecContext *, Trace::InstRecord *) const override;
396 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
397 Fault completeAcc(PacketPtr, ExecContext *,
398 Trace::InstRecord *) const override;
392
399
393 virtual void
394 annotateFault(ArmFault *fault) {
400 void
401 annotateFault(ArmFault *fault) override
402 {
395 %(fa_code)s
396 }
397 };
398}};
399
400def template StoreImmDEx64Declare {{
401 /**
402 * Static instruction class for "%(mnemonic)s".
403 */
404 class %(class_name)s : public %(base_class)s
405 {
406 public:
407
408 /// Constructor.
409 %(class_name)s(ExtMachInst machInst,
410 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
411 IntRegIndex _base, int64_t _imm = 0);
412
403 %(fa_code)s
404 }
405 };
406}};
407
408def template StoreImmDEx64Declare {{
409 /**
410 * Static instruction class for "%(mnemonic)s".
411 */
412 class %(class_name)s : public %(base_class)s
413 {
414 public:
415
416 /// Constructor.
417 %(class_name)s(ExtMachInst machInst,
418 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
419 IntRegIndex _base, int64_t _imm = 0);
420
413 Fault execute(ExecContext *, Trace::InstRecord *) const;
414 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
415 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
421 Fault execute(ExecContext *, Trace::InstRecord *) const override;
422 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
423 Fault completeAcc(PacketPtr, ExecContext *,
424 Trace::InstRecord *) const override;
416 };
417}};
418
419
420def template LoadStoreReg64Declare {{
421 class %(class_name)s : public %(base_class)s
422 {
423 public:
424
425 /// Constructor.
426 %(class_name)s(ExtMachInst machInst,
427 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
428 ArmExtendType _type, uint32_t _shiftAmt);
429
425 };
426}};
427
428
429def template LoadStoreReg64Declare {{
430 class %(class_name)s : public %(base_class)s
431 {
432 public:
433
434 /// Constructor.
435 %(class_name)s(ExtMachInst machInst,
436 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
437 ArmExtendType _type, uint32_t _shiftAmt);
438
430 Fault execute(ExecContext *, Trace::InstRecord *) const;
431 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
432 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
439 Fault execute(ExecContext *, Trace::InstRecord *) const override;
440 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
441 Fault completeAcc(PacketPtr, ExecContext *,
442 Trace::InstRecord *) const override;
433
443
434 virtual void
435 annotateFault(ArmFault *fault) {
444 void
445 annotateFault(ArmFault *fault) override
446 {
436 %(fa_code)s
437 }
438 };
439}};
440
441def template LoadStoreRegU64Declare {{
442 class %(class_name)s : public %(base_class)s
443 {
444 public:
445
446 /// Constructor.
447 %(class_name)s(ExtMachInst machInst,
448 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
449 ArmExtendType _type, uint32_t _shiftAmt,
450 bool noAlloc = false, bool exclusive = false,
451 bool acrel = false);
452
447 %(fa_code)s
448 }
449 };
450}};
451
452def template LoadStoreRegU64Declare {{
453 class %(class_name)s : public %(base_class)s
454 {
455 public:
456
457 /// Constructor.
458 %(class_name)s(ExtMachInst machInst,
459 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
460 ArmExtendType _type, uint32_t _shiftAmt,
461 bool noAlloc = false, bool exclusive = false,
462 bool acrel = false);
463
453 Fault execute(ExecContext *, Trace::InstRecord *) const;
454 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
455 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
464 Fault execute(ExecContext *, Trace::InstRecord *) const override;
465 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
466 Fault completeAcc(PacketPtr, ExecContext *,
467 Trace::InstRecord *) const override;
456
468
457 virtual void
458 annotateFault(ArmFault *fault) {
469 void
470 annotateFault(ArmFault *fault) override
471 {
459 %(fa_code)s
460 }
461 };
462}};
463
464def template LoadStoreRaw64Declare {{
465 class %(class_name)s : public %(base_class)s
466 {
467 public:
468
469 /// Constructor.
470 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
471 IntRegIndex _base);
472
472 %(fa_code)s
473 }
474 };
475}};
476
477def template LoadStoreRaw64Declare {{
478 class %(class_name)s : public %(base_class)s
479 {
480 public:
481
482 /// Constructor.
483 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
484 IntRegIndex _base);
485
473 Fault execute(ExecContext *, Trace::InstRecord *) const;
474 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
475 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
486 Fault execute(ExecContext *, Trace::InstRecord *) const override;
487 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
488 Fault completeAcc(PacketPtr, ExecContext *,
489 Trace::InstRecord *) const override;
476
490
477 virtual void
478 annotateFault(ArmFault *fault) {
491 void
492 annotateFault(ArmFault *fault) override
493 {
479 %(fa_code)s
480 }
481 };
482}};
483
484def template LoadStoreEx64Declare {{
485 class %(class_name)s : public %(base_class)s
486 {
487 public:
488
489 /// Constructor.
490 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
491 IntRegIndex _base, IntRegIndex _result);
492
494 %(fa_code)s
495 }
496 };
497}};
498
499def template LoadStoreEx64Declare {{
500 class %(class_name)s : public %(base_class)s
501 {
502 public:
503
504 /// Constructor.
505 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
506 IntRegIndex _base, IntRegIndex _result);
507
493 Fault execute(ExecContext *, Trace::InstRecord *) const;
494 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
495 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
508 Fault execute(ExecContext *, Trace::InstRecord *) const override;
509 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
510 Fault completeAcc(PacketPtr, ExecContext *,
511 Trace::InstRecord *) const override;
496
512
497 virtual void
498 annotateFault(ArmFault *fault) {
513 void
514 annotateFault(ArmFault *fault) override
515 {
499 %(fa_code)s
500 }
501 };
502}};
503
504def template LoadStoreLit64Declare {{
505 class %(class_name)s : public %(base_class)s
506 {
507 public:
508
509 /// Constructor.
510 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm);
511
516 %(fa_code)s
517 }
518 };
519}};
520
521def template LoadStoreLit64Declare {{
522 class %(class_name)s : public %(base_class)s
523 {
524 public:
525
526 /// Constructor.
527 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm);
528
512 Fault execute(ExecContext *, Trace::InstRecord *) const;
513 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
514 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
529 Fault execute(ExecContext *, Trace::InstRecord *) const override;
530 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
531 Fault completeAcc(PacketPtr, ExecContext *,
532 Trace::InstRecord *) const override;
515
533
516 virtual void
517 annotateFault(ArmFault *fault) {
534 void
535 annotateFault(ArmFault *fault) override
536 {
518 %(fa_code)s
519 }
520 };
521}};
522
523def template LoadStoreLitU64Declare {{
524 class %(class_name)s : public %(base_class)s
525 {
526 public:
527
528 /// Constructor.
529 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm,
530 bool noAlloc = false, bool exclusive = false,
531 bool acrel = false);
532
537 %(fa_code)s
538 }
539 };
540}};
541
542def template LoadStoreLitU64Declare {{
543 class %(class_name)s : public %(base_class)s
544 {
545 public:
546
547 /// Constructor.
548 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm,
549 bool noAlloc = false, bool exclusive = false,
550 bool acrel = false);
551
533 Fault execute(ExecContext *, Trace::InstRecord *) const;
534 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
535 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
552 Fault execute(ExecContext *, Trace::InstRecord *) const override;
553 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const override;
554 Fault completeAcc(PacketPtr, ExecContext *,
555 Trace::InstRecord *) const override;
536
556
537 virtual void
538 annotateFault(ArmFault *fault) {
557 void
558 annotateFault(ArmFault *fault) override
559 {
539 %(fa_code)s
540 }
541 };
542}};
543
544def template LoadStoreImm64Constructor {{
545 %(class_name)s::%(class_name)s(ExtMachInst machInst,
546 IntRegIndex _dest, IntRegIndex _base, int64_t _imm)
547 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
548 (IntRegIndex)_dest, (IntRegIndex)_base, _imm)
549 {
550 %(constructor)s;
551#if %(use_uops)d
552 assert(numMicroops >= 2);
553 uops = new StaticInstPtr[numMicroops];
554 uops[0] = new %(acc_name)s(machInst, _dest, _base, _imm);
555 uops[0]->setDelayedCommit();
556 uops[0]->setFirstMicroop();
557 uops[1] = new %(wb_decl)s;
558 uops[1]->setLastMicroop();
559#endif
560 }
561}};
562
563def template LoadStoreImmU64Constructor {{
564 %(class_name)s::%(class_name)s(ExtMachInst machInst,
565 IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
566 bool noAlloc, bool exclusive, bool acrel)
567 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
568 _dest, _base, _imm)
569 {
570 %(constructor)s;
571 assert(!%(use_uops)d);
572 setExcAcRel(exclusive, acrel);
573 }
574}};
575
576def template LoadStoreImmDU64Constructor {{
577 %(class_name)s::%(class_name)s(ExtMachInst machInst,
578 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
579 int64_t _imm, bool noAlloc, bool exclusive, bool acrel)
580 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
581 _dest, _dest2, _base, _imm)
582 {
583 %(constructor)s;
584 assert(!%(use_uops)d);
585 setExcAcRel(exclusive, acrel);
586 }
587}};
588
589def template StoreImmDEx64Constructor {{
590 %(class_name)s::%(class_name)s(ExtMachInst machInst,
591 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
592 IntRegIndex _base, int64_t _imm)
593 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
594 _result, _dest, _dest2, _base, _imm)
595 {
596 %(constructor)s;
597 assert(!%(use_uops)d);
598 }
599}};
600
601
602def template LoadStoreReg64Constructor {{
603 %(class_name)s::%(class_name)s(ExtMachInst machInst,
604 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
605 ArmExtendType _type, uint32_t _shiftAmt)
606 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
607 _dest, _base, _offset, _type, _shiftAmt)
608 {
609 %(constructor)s;
610#if %(use_uops)d
611 assert(numMicroops >= 2);
612 uops = new StaticInstPtr[numMicroops];
613 uops[0] = new %(acc_name)s(machInst, _dest, _base, _offset,
614 _type, _shiftAmt);
615 uops[0]->setDelayedCommit();
616 uops[0]->setFirstMicroop();
617 uops[1] = new %(wb_decl)s;
618 uops[1]->setLastMicroop();
619#endif
620 }
621}};
622
623def template LoadStoreRegU64Constructor {{
624 %(class_name)s::%(class_name)s(ExtMachInst machInst,
625 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
626 ArmExtendType _type, uint32_t _shiftAmt,
627 bool noAlloc, bool exclusive, bool acrel)
628 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
629 _dest, _base, _offset, _type, _shiftAmt)
630 {
631 %(constructor)s;
632 assert(!%(use_uops)d);
633 setExcAcRel(exclusive, acrel);
634 }
635}};
636
637def template LoadStoreRaw64Constructor {{
638 %(class_name)s::%(class_name)s(ExtMachInst machInst,
639 IntRegIndex _dest, IntRegIndex _base)
640 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, _dest, _base)
641 {
642 %(constructor)s;
643 }
644}};
645
646def template LoadStoreEx64Constructor {{
647 %(class_name)s::%(class_name)s(ExtMachInst machInst,
648 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
649 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
650 _dest, _base, _result)
651 {
652 %(constructor)s;
653 }
654}};
655
656def template LoadStoreLit64Constructor {{
657 %(class_name)s::%(class_name)s(ExtMachInst machInst,
658 IntRegIndex _dest, int64_t _imm)
659 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
660 (IntRegIndex)_dest, _imm)
661 {
662 %(constructor)s;
663#if %(use_uops)d
664 assert(numMicroops >= 2);
665 uops = new StaticInstPtr[numMicroops];
666 uops[0] = new %(acc_name)s(machInst, _dest, _imm);
667 uops[0]->setDelayedCommit();
668 uops[0]->setFirstMicroop();
669 uops[1] = new %(wb_decl)s;
670 uops[1]->setLastMicroop();
671#endif
672 }
673}};
674
675def template LoadStoreLitU64Constructor {{
676 %(class_name)s::%(class_name)s(ExtMachInst machInst,
677 IntRegIndex _dest, int64_t _imm,
678 bool noAlloc, bool exclusive, bool acrel)
679 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
680 (IntRegIndex)_dest, _imm)
681 {
682 %(constructor)s;
683 assert(!%(use_uops)d);
684 setExcAcRel(exclusive, acrel);
685 }
686}};
560 %(fa_code)s
561 }
562 };
563}};
564
565def template LoadStoreImm64Constructor {{
566 %(class_name)s::%(class_name)s(ExtMachInst machInst,
567 IntRegIndex _dest, IntRegIndex _base, int64_t _imm)
568 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
569 (IntRegIndex)_dest, (IntRegIndex)_base, _imm)
570 {
571 %(constructor)s;
572#if %(use_uops)d
573 assert(numMicroops >= 2);
574 uops = new StaticInstPtr[numMicroops];
575 uops[0] = new %(acc_name)s(machInst, _dest, _base, _imm);
576 uops[0]->setDelayedCommit();
577 uops[0]->setFirstMicroop();
578 uops[1] = new %(wb_decl)s;
579 uops[1]->setLastMicroop();
580#endif
581 }
582}};
583
584def template LoadStoreImmU64Constructor {{
585 %(class_name)s::%(class_name)s(ExtMachInst machInst,
586 IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
587 bool noAlloc, bool exclusive, bool acrel)
588 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
589 _dest, _base, _imm)
590 {
591 %(constructor)s;
592 assert(!%(use_uops)d);
593 setExcAcRel(exclusive, acrel);
594 }
595}};
596
597def template LoadStoreImmDU64Constructor {{
598 %(class_name)s::%(class_name)s(ExtMachInst machInst,
599 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
600 int64_t _imm, bool noAlloc, bool exclusive, bool acrel)
601 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
602 _dest, _dest2, _base, _imm)
603 {
604 %(constructor)s;
605 assert(!%(use_uops)d);
606 setExcAcRel(exclusive, acrel);
607 }
608}};
609
610def template StoreImmDEx64Constructor {{
611 %(class_name)s::%(class_name)s(ExtMachInst machInst,
612 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
613 IntRegIndex _base, int64_t _imm)
614 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
615 _result, _dest, _dest2, _base, _imm)
616 {
617 %(constructor)s;
618 assert(!%(use_uops)d);
619 }
620}};
621
622
623def template LoadStoreReg64Constructor {{
624 %(class_name)s::%(class_name)s(ExtMachInst machInst,
625 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
626 ArmExtendType _type, uint32_t _shiftAmt)
627 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
628 _dest, _base, _offset, _type, _shiftAmt)
629 {
630 %(constructor)s;
631#if %(use_uops)d
632 assert(numMicroops >= 2);
633 uops = new StaticInstPtr[numMicroops];
634 uops[0] = new %(acc_name)s(machInst, _dest, _base, _offset,
635 _type, _shiftAmt);
636 uops[0]->setDelayedCommit();
637 uops[0]->setFirstMicroop();
638 uops[1] = new %(wb_decl)s;
639 uops[1]->setLastMicroop();
640#endif
641 }
642}};
643
644def template LoadStoreRegU64Constructor {{
645 %(class_name)s::%(class_name)s(ExtMachInst machInst,
646 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
647 ArmExtendType _type, uint32_t _shiftAmt,
648 bool noAlloc, bool exclusive, bool acrel)
649 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
650 _dest, _base, _offset, _type, _shiftAmt)
651 {
652 %(constructor)s;
653 assert(!%(use_uops)d);
654 setExcAcRel(exclusive, acrel);
655 }
656}};
657
658def template LoadStoreRaw64Constructor {{
659 %(class_name)s::%(class_name)s(ExtMachInst machInst,
660 IntRegIndex _dest, IntRegIndex _base)
661 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, _dest, _base)
662 {
663 %(constructor)s;
664 }
665}};
666
667def template LoadStoreEx64Constructor {{
668 %(class_name)s::%(class_name)s(ExtMachInst machInst,
669 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
670 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
671 _dest, _base, _result)
672 {
673 %(constructor)s;
674 }
675}};
676
677def template LoadStoreLit64Constructor {{
678 %(class_name)s::%(class_name)s(ExtMachInst machInst,
679 IntRegIndex _dest, int64_t _imm)
680 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
681 (IntRegIndex)_dest, _imm)
682 {
683 %(constructor)s;
684#if %(use_uops)d
685 assert(numMicroops >= 2);
686 uops = new StaticInstPtr[numMicroops];
687 uops[0] = new %(acc_name)s(machInst, _dest, _imm);
688 uops[0]->setDelayedCommit();
689 uops[0]->setFirstMicroop();
690 uops[1] = new %(wb_decl)s;
691 uops[1]->setLastMicroop();
692#endif
693 }
694}};
695
696def template LoadStoreLitU64Constructor {{
697 %(class_name)s::%(class_name)s(ExtMachInst machInst,
698 IntRegIndex _dest, int64_t _imm,
699 bool noAlloc, bool exclusive, bool acrel)
700 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
701 (IntRegIndex)_dest, _imm)
702 {
703 %(constructor)s;
704 assert(!%(use_uops)d);
705 setExcAcRel(exclusive, acrel);
706 }
707}};