mem64.isa (11303:f694764d656d) mem64.isa (12234:78ece221f9f5)
1// -*- mode:c++ -*-
2
3// Copyright (c) 2011-2014 ARM Limited
4// All rights reserved
5//
6// The license below extends only to copyright in the software and shall
7// not be construed as granting a license to any other intellectual
8// property including but not limited to intellectual property relating
9// to a hardware implementation of the functionality of the software
10// licensed hereunder. You may use the software subject to the license
11// terms below provided that you ensure that this notice is replicated
12// unmodified and in its entirety in all distributions of the software,
13// modified or unmodified, in source code or in binary form.
14//
15// Redistribution and use in source and binary forms, with or without
16// modification, are permitted provided that the following conditions are
17// met: redistributions of source code must retain the above copyright
18// notice, this list of conditions and the following disclaimer;
19// redistributions in binary form must reproduce the above copyright
20// notice, this list of conditions and the following disclaimer in the
21// documentation and/or other materials provided with the distribution;
22// neither the name of the copyright holders nor the names of its
23// contributors may be used to endorse or promote products derived from
24// this software without specific prior written permission.
25//
26// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
27// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
28// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
29// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
30// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
31// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
32// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
33// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
34// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
35// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
36// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
37//
38// Authors: Gabe Black
39
40let {{
41 SPAlignmentCheckCode = '''
42 if (baseIsSP && bits(XBase, 3, 0) &&
43 SPAlignmentCheckEnabled(xc->tcBase())) {
44 return std::make_shared<SPAlignmentFault>();
45 }
46 '''
47}};
48
49def template Load64Execute {{
1// -*- mode:c++ -*-
2
3// Copyright (c) 2011-2014 ARM Limited
4// All rights reserved
5//
6// The license below extends only to copyright in the software and shall
7// not be construed as granting a license to any other intellectual
8// property including but not limited to intellectual property relating
9// to a hardware implementation of the functionality of the software
10// licensed hereunder. You may use the software subject to the license
11// terms below provided that you ensure that this notice is replicated
12// unmodified and in its entirety in all distributions of the software,
13// modified or unmodified, in source code or in binary form.
14//
15// Redistribution and use in source and binary forms, with or without
16// modification, are permitted provided that the following conditions are
17// met: redistributions of source code must retain the above copyright
18// notice, this list of conditions and the following disclaimer;
19// redistributions in binary form must reproduce the above copyright
20// notice, this list of conditions and the following disclaimer in the
21// documentation and/or other materials provided with the distribution;
22// neither the name of the copyright holders nor the names of its
23// contributors may be used to endorse or promote products derived from
24// this software without specific prior written permission.
25//
26// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
27// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
28// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
29// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
30// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
31// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
32// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
33// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
34// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
35// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
36// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
37//
38// Authors: Gabe Black
39
40let {{
41 SPAlignmentCheckCode = '''
42 if (baseIsSP && bits(XBase, 3, 0) &&
43 SPAlignmentCheckEnabled(xc->tcBase())) {
44 return std::make_shared<SPAlignmentFault>();
45 }
46 '''
47}};
48
49def template Load64Execute {{
50 Fault %(class_name)s::execute(CPU_EXEC_CONTEXT *xc,
50 Fault %(class_name)s::execute(ExecContext *xc,
51 Trace::InstRecord *traceData) const
52 {
53 Addr EA;
54 Fault fault = NoFault;
55
56 %(op_decl)s;
57 %(op_rd)s;
58 %(ea_code)s;
59
60 if (fault == NoFault) {
61 fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags);
62 %(memacc_code)s;
63 }
64
65 if (fault == NoFault) {
66 %(op_wb)s;
67 }
68
69 return fault;
70 }
71}};
72
73def template Store64Execute {{
51 Trace::InstRecord *traceData) const
52 {
53 Addr EA;
54 Fault fault = NoFault;
55
56 %(op_decl)s;
57 %(op_rd)s;
58 %(ea_code)s;
59
60 if (fault == NoFault) {
61 fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags);
62 %(memacc_code)s;
63 }
64
65 if (fault == NoFault) {
66 %(op_wb)s;
67 }
68
69 return fault;
70 }
71}};
72
73def template Store64Execute {{
74 Fault %(class_name)s::execute(CPU_EXEC_CONTEXT *xc,
74 Fault %(class_name)s::execute(ExecContext *xc,
75 Trace::InstRecord *traceData) const
76 {
77 Addr EA;
78 Fault fault = NoFault;
79
80 %(op_decl)s;
81 %(op_rd)s;
82 %(ea_code)s;
83
84 if (fault == NoFault) {
85 %(memacc_code)s;
86 }
87
88 if (fault == NoFault) {
89 fault = writeMemAtomic(xc, traceData, Mem, EA,
90 memAccessFlags, NULL);
91 }
92
93 if (fault == NoFault) {
94 %(op_wb)s;
95 }
96
97 return fault;
98 }
99}};
100
101def template Store64InitiateAcc {{
75 Trace::InstRecord *traceData) const
76 {
77 Addr EA;
78 Fault fault = NoFault;
79
80 %(op_decl)s;
81 %(op_rd)s;
82 %(ea_code)s;
83
84 if (fault == NoFault) {
85 %(memacc_code)s;
86 }
87
88 if (fault == NoFault) {
89 fault = writeMemAtomic(xc, traceData, Mem, EA,
90 memAccessFlags, NULL);
91 }
92
93 if (fault == NoFault) {
94 %(op_wb)s;
95 }
96
97 return fault;
98 }
99}};
100
101def template Store64InitiateAcc {{
102 Fault %(class_name)s::initiateAcc(CPU_EXEC_CONTEXT *xc,
102 Fault %(class_name)s::initiateAcc(ExecContext *xc,
103 Trace::InstRecord *traceData) const
104 {
105 Addr EA;
106 Fault fault = NoFault;
107
108 %(op_decl)s;
109 %(op_rd)s;
110 %(ea_code)s;
111
112 if (fault == NoFault) {
113 %(memacc_code)s;
114 }
115
116 if (fault == NoFault) {
117 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
118 NULL);
119 }
120
121 return fault;
122 }
123}};
124
125def template StoreEx64Execute {{
103 Trace::InstRecord *traceData) const
104 {
105 Addr EA;
106 Fault fault = NoFault;
107
108 %(op_decl)s;
109 %(op_rd)s;
110 %(ea_code)s;
111
112 if (fault == NoFault) {
113 %(memacc_code)s;
114 }
115
116 if (fault == NoFault) {
117 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
118 NULL);
119 }
120
121 return fault;
122 }
123}};
124
125def template StoreEx64Execute {{
126 Fault %(class_name)s::execute(CPU_EXEC_CONTEXT *xc,
126 Fault %(class_name)s::execute(ExecContext *xc,
127 Trace::InstRecord *traceData) const
128 {
129 Addr EA;
130 Fault fault = NoFault;
131
132 %(op_decl)s;
133 %(op_rd)s;
134 %(ea_code)s;
135
136 if (fault == NoFault) {
137 %(memacc_code)s;
138 }
139
140 uint64_t writeResult = 0;
141 if (fault == NoFault) {
142 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
143 &writeResult);
144 }
145
146 if (fault == NoFault) {
147 %(postacc_code)s;
148 }
149
150 if (fault == NoFault) {
151 %(op_wb)s;
152 }
153
154 return fault;
155 }
156}};
157
158def template StoreEx64InitiateAcc {{
127 Trace::InstRecord *traceData) const
128 {
129 Addr EA;
130 Fault fault = NoFault;
131
132 %(op_decl)s;
133 %(op_rd)s;
134 %(ea_code)s;
135
136 if (fault == NoFault) {
137 %(memacc_code)s;
138 }
139
140 uint64_t writeResult = 0;
141 if (fault == NoFault) {
142 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
143 &writeResult);
144 }
145
146 if (fault == NoFault) {
147 %(postacc_code)s;
148 }
149
150 if (fault == NoFault) {
151 %(op_wb)s;
152 }
153
154 return fault;
155 }
156}};
157
158def template StoreEx64InitiateAcc {{
159 Fault %(class_name)s::initiateAcc(CPU_EXEC_CONTEXT *xc,
159 Fault %(class_name)s::initiateAcc(ExecContext *xc,
160 Trace::InstRecord *traceData) const
161 {
162 Addr EA;
163 Fault fault = NoFault;
164
165 %(op_decl)s;
166 %(op_rd)s;
167 %(ea_code)s;
168
169 if (fault == NoFault) {
170 %(memacc_code)s;
171 }
172
173 if (fault == NoFault) {
174 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
175 NULL);
176 }
177
178 return fault;
179 }
180}};
181
182def template Load64InitiateAcc {{
160 Trace::InstRecord *traceData) const
161 {
162 Addr EA;
163 Fault fault = NoFault;
164
165 %(op_decl)s;
166 %(op_rd)s;
167 %(ea_code)s;
168
169 if (fault == NoFault) {
170 %(memacc_code)s;
171 }
172
173 if (fault == NoFault) {
174 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
175 NULL);
176 }
177
178 return fault;
179 }
180}};
181
182def template Load64InitiateAcc {{
183 Fault %(class_name)s::initiateAcc(CPU_EXEC_CONTEXT *xc,
183 Fault %(class_name)s::initiateAcc(ExecContext *xc,
184 Trace::InstRecord *traceData) const
185 {
186 Addr EA;
187 Fault fault = NoFault;
188
189 %(op_src_decl)s;
190 %(op_rd)s;
191 %(ea_code)s;
192
193 if (fault == NoFault) {
194 fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags);
195 }
196
197 return fault;
198 }
199}};
200
201def template Load64CompleteAcc {{
184 Trace::InstRecord *traceData) const
185 {
186 Addr EA;
187 Fault fault = NoFault;
188
189 %(op_src_decl)s;
190 %(op_rd)s;
191 %(ea_code)s;
192
193 if (fault == NoFault) {
194 fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags);
195 }
196
197 return fault;
198 }
199}};
200
201def template Load64CompleteAcc {{
202 Fault %(class_name)s::completeAcc(PacketPtr pkt,
203 CPU_EXEC_CONTEXT *xc,
202 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
204 Trace::InstRecord *traceData) const
205 {
206 Fault fault = NoFault;
207
208 %(op_decl)s;
209 %(op_rd)s;
210
211 // ARM instructions will not have a pkt if the predicate is false
212 getMem(pkt, Mem, traceData);
213
214 if (fault == NoFault) {
215 %(memacc_code)s;
216 }
217
218 if (fault == NoFault) {
219 %(op_wb)s;
220 }
221
222 return fault;
223 }
224}};
225
226def template Store64CompleteAcc {{
203 Trace::InstRecord *traceData) const
204 {
205 Fault fault = NoFault;
206
207 %(op_decl)s;
208 %(op_rd)s;
209
210 // ARM instructions will not have a pkt if the predicate is false
211 getMem(pkt, Mem, traceData);
212
213 if (fault == NoFault) {
214 %(memacc_code)s;
215 }
216
217 if (fault == NoFault) {
218 %(op_wb)s;
219 }
220
221 return fault;
222 }
223}};
224
225def template Store64CompleteAcc {{
227 Fault %(class_name)s::completeAcc(PacketPtr pkt,
228 CPU_EXEC_CONTEXT *xc,
226 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
229 Trace::InstRecord *traceData) const
230 {
231 return NoFault;
232 }
233}};
234
235def template StoreEx64CompleteAcc {{
227 Trace::InstRecord *traceData) const
228 {
229 return NoFault;
230 }
231}};
232
233def template StoreEx64CompleteAcc {{
236 Fault %(class_name)s::completeAcc(PacketPtr pkt,
237 CPU_EXEC_CONTEXT *xc,
234 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
238 Trace::InstRecord *traceData) const
239 {
240 Fault fault = NoFault;
241
242 %(op_decl)s;
243 %(op_rd)s;
244
245 uint64_t writeResult = pkt->req->getExtraData();
246 %(postacc_code)s;
247
248 if (fault == NoFault) {
249 %(op_wb)s;
250 }
251
252 return fault;
253 }
254}};
255
256def template DCStore64Declare {{
257 class %(class_name)s : public %(base_class)s
258 {
259 public:
260
261 /// Constructor.
262 %(class_name)s(ExtMachInst machInst, IntRegIndex _base, IntRegIndex _dest, uint64_t _imm);
263
264 %(BasicExecDeclare)s
265 %(InitiateAccDeclare)s
266 %(CompleteAccDeclare)s
267
268 virtual void
269 annotateFault(ArmFault *fault) {
270 %(fa_code)s
271 }
272 };
273}};
274
275def template DCStore64Constructor {{
276 %(class_name)s::%(class_name)s(ExtMachInst machInst, IntRegIndex _base, IntRegIndex _dest, uint64_t _imm)
277 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
278 (IntRegIndex)_base, _dest, _imm)
279 {
280 %(constructor)s;
281 assert(!%(use_uops)d);
282 }
283}};
284
285def template DCStore64Execute {{
235 Trace::InstRecord *traceData) const
236 {
237 Fault fault = NoFault;
238
239 %(op_decl)s;
240 %(op_rd)s;
241
242 uint64_t writeResult = pkt->req->getExtraData();
243 %(postacc_code)s;
244
245 if (fault == NoFault) {
246 %(op_wb)s;
247 }
248
249 return fault;
250 }
251}};
252
253def template DCStore64Declare {{
254 class %(class_name)s : public %(base_class)s
255 {
256 public:
257
258 /// Constructor.
259 %(class_name)s(ExtMachInst machInst, IntRegIndex _base, IntRegIndex _dest, uint64_t _imm);
260
261 %(BasicExecDeclare)s
262 %(InitiateAccDeclare)s
263 %(CompleteAccDeclare)s
264
265 virtual void
266 annotateFault(ArmFault *fault) {
267 %(fa_code)s
268 }
269 };
270}};
271
272def template DCStore64Constructor {{
273 %(class_name)s::%(class_name)s(ExtMachInst machInst, IntRegIndex _base, IntRegIndex _dest, uint64_t _imm)
274 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
275 (IntRegIndex)_base, _dest, _imm)
276 {
277 %(constructor)s;
278 assert(!%(use_uops)d);
279 }
280}};
281
282def template DCStore64Execute {{
286 Fault %(class_name)s::execute(CPU_EXEC_CONTEXT *xc,
283 Fault %(class_name)s::execute(ExecContext *xc,
287 Trace::InstRecord *traceData) const
288 {
289 Addr EA;
290 Fault fault = NoFault;
291
292 %(op_decl)s;
293 %(op_rd)s;
294 %(ea_code)s;
295
296
297 if (fault == NoFault) {
298 %(memacc_code)s;
299 }
300
301 if (fault == NoFault) {
302 fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL);
303 }
304
305 if (fault == NoFault) {
306 %(op_wb)s;
307 }
308
309 return fault;
310 }
311}};
312
313def template DCStore64InitiateAcc {{
284 Trace::InstRecord *traceData) const
285 {
286 Addr EA;
287 Fault fault = NoFault;
288
289 %(op_decl)s;
290 %(op_rd)s;
291 %(ea_code)s;
292
293
294 if (fault == NoFault) {
295 %(memacc_code)s;
296 }
297
298 if (fault == NoFault) {
299 fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL);
300 }
301
302 if (fault == NoFault) {
303 %(op_wb)s;
304 }
305
306 return fault;
307 }
308}};
309
310def template DCStore64InitiateAcc {{
314 Fault %(class_name)s::initiateAcc(CPU_EXEC_CONTEXT *xc,
311 Fault %(class_name)s::initiateAcc(ExecContext *xc,
315 Trace::InstRecord *traceData) const
316 {
317 Addr EA;
318 Fault fault = NoFault;
319
320 %(op_decl)s;
321 %(op_rd)s;
322 %(ea_code)s;
323
324 if (fault == NoFault) {
325 %(memacc_code)s;
326 }
327
328 if (fault == NoFault) {
329 fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL);
330 }
331
332 return fault;
333 }
334}};
335
336
337def template LoadStoreImm64Declare {{
338 class %(class_name)s : public %(base_class)s
339 {
340 public:
341
342 /// Constructor.
343 %(class_name)s(ExtMachInst machInst,
344 IntRegIndex _dest, IntRegIndex _base, int64_t _imm);
345
346 %(BasicExecDeclare)s
347 %(InitiateAccDeclare)s
348 %(CompleteAccDeclare)s
349
350 virtual void
351 annotateFault(ArmFault *fault) {
352 %(fa_code)s
353 }
354 };
355}};
356
357def template LoadStoreImmU64Declare {{
358 class %(class_name)s : public %(base_class)s
359 {
360 public:
361
362 /// Constructor.
363 %(class_name)s(ExtMachInst machInst,
364 IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
365 bool noAlloc = false, bool exclusive = false,
366 bool acrel = false);
367
368 %(BasicExecDeclare)s
369 %(InitiateAccDeclare)s
370 %(CompleteAccDeclare)s
371
372 virtual void
373 annotateFault(ArmFault *fault) {
374 %(fa_code)s
375 }
376 };
377}};
378
379def template LoadStoreImmDU64Declare {{
380 class %(class_name)s : public %(base_class)s
381 {
382 public:
383
384 /// Constructor.
385 %(class_name)s(ExtMachInst machInst,
386 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
387 int64_t _imm = 0, bool noAlloc = false, bool exclusive = false,
388 bool acrel = false);
389
390 %(BasicExecDeclare)s
391 %(InitiateAccDeclare)s
392 %(CompleteAccDeclare)s
393
394 virtual void
395 annotateFault(ArmFault *fault) {
396 %(fa_code)s
397 }
398 };
399}};
400
401def template StoreImmDEx64Declare {{
402 /**
403 * Static instruction class for "%(mnemonic)s".
404 */
405 class %(class_name)s : public %(base_class)s
406 {
407 public:
408
409 /// Constructor.
410 %(class_name)s(ExtMachInst machInst,
411 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
412 IntRegIndex _base, int64_t _imm = 0);
413
414 %(BasicExecDeclare)s
415
416 %(InitiateAccDeclare)s
417
418 %(CompleteAccDeclare)s
419 };
420}};
421
422
423def template LoadStoreReg64Declare {{
424 class %(class_name)s : public %(base_class)s
425 {
426 public:
427
428 /// Constructor.
429 %(class_name)s(ExtMachInst machInst,
430 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
431 ArmExtendType _type, uint32_t _shiftAmt);
432
433 %(BasicExecDeclare)s
434 %(InitiateAccDeclare)s
435 %(CompleteAccDeclare)s
436
437 virtual void
438 annotateFault(ArmFault *fault) {
439 %(fa_code)s
440 }
441 };
442}};
443
444def template LoadStoreRegU64Declare {{
445 class %(class_name)s : public %(base_class)s
446 {
447 public:
448
449 /// Constructor.
450 %(class_name)s(ExtMachInst machInst,
451 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
452 ArmExtendType _type, uint32_t _shiftAmt,
453 bool noAlloc = false, bool exclusive = false,
454 bool acrel = false);
455
456 %(BasicExecDeclare)s
457 %(InitiateAccDeclare)s
458 %(CompleteAccDeclare)s
459
460 virtual void
461 annotateFault(ArmFault *fault) {
462 %(fa_code)s
463 }
464 };
465}};
466
467def template LoadStoreRaw64Declare {{
468 class %(class_name)s : public %(base_class)s
469 {
470 public:
471
472 /// Constructor.
473 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
474 IntRegIndex _base);
475
476 %(BasicExecDeclare)s
477 %(InitiateAccDeclare)s
478 %(CompleteAccDeclare)s
479
480 virtual void
481 annotateFault(ArmFault *fault) {
482 %(fa_code)s
483 }
484 };
485}};
486
487def template LoadStoreEx64Declare {{
488 class %(class_name)s : public %(base_class)s
489 {
490 public:
491
492 /// Constructor.
493 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
494 IntRegIndex _base, IntRegIndex _result);
495
496 %(BasicExecDeclare)s
497 %(InitiateAccDeclare)s
498 %(CompleteAccDeclare)s
499
500 virtual void
501 annotateFault(ArmFault *fault) {
502 %(fa_code)s
503 }
504 };
505}};
506
507def template LoadStoreLit64Declare {{
508 class %(class_name)s : public %(base_class)s
509 {
510 public:
511
512 /// Constructor.
513 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm);
514
515 %(BasicExecDeclare)s
516 %(InitiateAccDeclare)s
517 %(CompleteAccDeclare)s
518
519 virtual void
520 annotateFault(ArmFault *fault) {
521 %(fa_code)s
522 }
523 };
524}};
525
526def template LoadStoreLitU64Declare {{
527 class %(class_name)s : public %(base_class)s
528 {
529 public:
530
531 /// Constructor.
532 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm,
533 bool noAlloc = false, bool exclusive = false,
534 bool acrel = false);
535
536 %(BasicExecDeclare)s
537 %(InitiateAccDeclare)s
538 %(CompleteAccDeclare)s
539
540 virtual void
541 annotateFault(ArmFault *fault) {
542 %(fa_code)s
543 }
544 };
545}};
546
547def template LoadStoreImm64Constructor {{
548 %(class_name)s::%(class_name)s(ExtMachInst machInst,
549 IntRegIndex _dest, IntRegIndex _base, int64_t _imm)
550 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
551 (IntRegIndex)_dest, (IntRegIndex)_base, _imm)
552 {
553 %(constructor)s;
554#if %(use_uops)d
555 assert(numMicroops >= 2);
556 uops = new StaticInstPtr[numMicroops];
557 uops[0] = new %(acc_name)s(machInst, _dest, _base, _imm);
558 uops[0]->setDelayedCommit();
559 uops[0]->setFirstMicroop();
560 uops[1] = new %(wb_decl)s;
561 uops[1]->setLastMicroop();
562#endif
563 }
564}};
565
566def template LoadStoreImmU64Constructor {{
567 %(class_name)s::%(class_name)s(ExtMachInst machInst,
568 IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
569 bool noAlloc, bool exclusive, bool acrel)
570 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
571 _dest, _base, _imm)
572 {
573 %(constructor)s;
574 assert(!%(use_uops)d);
575 setExcAcRel(exclusive, acrel);
576 }
577}};
578
579def template LoadStoreImmDU64Constructor {{
580 %(class_name)s::%(class_name)s(ExtMachInst machInst,
581 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
582 int64_t _imm, bool noAlloc, bool exclusive, bool acrel)
583 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
584 _dest, _dest2, _base, _imm)
585 {
586 %(constructor)s;
587 assert(!%(use_uops)d);
588 setExcAcRel(exclusive, acrel);
589 }
590}};
591
592def template StoreImmDEx64Constructor {{
593 %(class_name)s::%(class_name)s(ExtMachInst machInst,
594 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
595 IntRegIndex _base, int64_t _imm)
596 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
597 _result, _dest, _dest2, _base, _imm)
598 {
599 %(constructor)s;
600 assert(!%(use_uops)d);
601 }
602}};
603
604
605def template LoadStoreReg64Constructor {{
606 %(class_name)s::%(class_name)s(ExtMachInst machInst,
607 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
608 ArmExtendType _type, uint32_t _shiftAmt)
609 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
610 _dest, _base, _offset, _type, _shiftAmt)
611 {
612 %(constructor)s;
613#if %(use_uops)d
614 assert(numMicroops >= 2);
615 uops = new StaticInstPtr[numMicroops];
616 uops[0] = new %(acc_name)s(machInst, _dest, _base, _offset,
617 _type, _shiftAmt);
618 uops[0]->setDelayedCommit();
619 uops[0]->setFirstMicroop();
620 uops[1] = new %(wb_decl)s;
621 uops[1]->setLastMicroop();
622#endif
623 }
624}};
625
626def template LoadStoreRegU64Constructor {{
627 %(class_name)s::%(class_name)s(ExtMachInst machInst,
628 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
629 ArmExtendType _type, uint32_t _shiftAmt,
630 bool noAlloc, bool exclusive, bool acrel)
631 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
632 _dest, _base, _offset, _type, _shiftAmt)
633 {
634 %(constructor)s;
635 assert(!%(use_uops)d);
636 setExcAcRel(exclusive, acrel);
637 }
638}};
639
640def template LoadStoreRaw64Constructor {{
641 %(class_name)s::%(class_name)s(ExtMachInst machInst,
642 IntRegIndex _dest, IntRegIndex _base)
643 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, _dest, _base)
644 {
645 %(constructor)s;
646 }
647}};
648
649def template LoadStoreEx64Constructor {{
650 %(class_name)s::%(class_name)s(ExtMachInst machInst,
651 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
652 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
653 _dest, _base, _result)
654 {
655 %(constructor)s;
656 }
657}};
658
659def template LoadStoreLit64Constructor {{
660 %(class_name)s::%(class_name)s(ExtMachInst machInst,
661 IntRegIndex _dest, int64_t _imm)
662 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
663 (IntRegIndex)_dest, _imm)
664 {
665 %(constructor)s;
666#if %(use_uops)d
667 assert(numMicroops >= 2);
668 uops = new StaticInstPtr[numMicroops];
669 uops[0] = new %(acc_name)s(machInst, _dest, _imm);
670 uops[0]->setDelayedCommit();
671 uops[0]->setFirstMicroop();
672 uops[1] = new %(wb_decl)s;
673 uops[1]->setLastMicroop();
674#endif
675 }
676}};
677
678def template LoadStoreLitU64Constructor {{
679 %(class_name)s::%(class_name)s(ExtMachInst machInst,
680 IntRegIndex _dest, int64_t _imm,
681 bool noAlloc, bool exclusive, bool acrel)
682 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
683 (IntRegIndex)_dest, _imm)
684 {
685 %(constructor)s;
686 assert(!%(use_uops)d);
687 setExcAcRel(exclusive, acrel);
688 }
689}};
312 Trace::InstRecord *traceData) const
313 {
314 Addr EA;
315 Fault fault = NoFault;
316
317 %(op_decl)s;
318 %(op_rd)s;
319 %(ea_code)s;
320
321 if (fault == NoFault) {
322 %(memacc_code)s;
323 }
324
325 if (fault == NoFault) {
326 fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL);
327 }
328
329 return fault;
330 }
331}};
332
333
334def template LoadStoreImm64Declare {{
335 class %(class_name)s : public %(base_class)s
336 {
337 public:
338
339 /// Constructor.
340 %(class_name)s(ExtMachInst machInst,
341 IntRegIndex _dest, IntRegIndex _base, int64_t _imm);
342
343 %(BasicExecDeclare)s
344 %(InitiateAccDeclare)s
345 %(CompleteAccDeclare)s
346
347 virtual void
348 annotateFault(ArmFault *fault) {
349 %(fa_code)s
350 }
351 };
352}};
353
354def template LoadStoreImmU64Declare {{
355 class %(class_name)s : public %(base_class)s
356 {
357 public:
358
359 /// Constructor.
360 %(class_name)s(ExtMachInst machInst,
361 IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
362 bool noAlloc = false, bool exclusive = false,
363 bool acrel = false);
364
365 %(BasicExecDeclare)s
366 %(InitiateAccDeclare)s
367 %(CompleteAccDeclare)s
368
369 virtual void
370 annotateFault(ArmFault *fault) {
371 %(fa_code)s
372 }
373 };
374}};
375
376def template LoadStoreImmDU64Declare {{
377 class %(class_name)s : public %(base_class)s
378 {
379 public:
380
381 /// Constructor.
382 %(class_name)s(ExtMachInst machInst,
383 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
384 int64_t _imm = 0, bool noAlloc = false, bool exclusive = false,
385 bool acrel = false);
386
387 %(BasicExecDeclare)s
388 %(InitiateAccDeclare)s
389 %(CompleteAccDeclare)s
390
391 virtual void
392 annotateFault(ArmFault *fault) {
393 %(fa_code)s
394 }
395 };
396}};
397
398def template StoreImmDEx64Declare {{
399 /**
400 * Static instruction class for "%(mnemonic)s".
401 */
402 class %(class_name)s : public %(base_class)s
403 {
404 public:
405
406 /// Constructor.
407 %(class_name)s(ExtMachInst machInst,
408 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
409 IntRegIndex _base, int64_t _imm = 0);
410
411 %(BasicExecDeclare)s
412
413 %(InitiateAccDeclare)s
414
415 %(CompleteAccDeclare)s
416 };
417}};
418
419
420def template LoadStoreReg64Declare {{
421 class %(class_name)s : public %(base_class)s
422 {
423 public:
424
425 /// Constructor.
426 %(class_name)s(ExtMachInst machInst,
427 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
428 ArmExtendType _type, uint32_t _shiftAmt);
429
430 %(BasicExecDeclare)s
431 %(InitiateAccDeclare)s
432 %(CompleteAccDeclare)s
433
434 virtual void
435 annotateFault(ArmFault *fault) {
436 %(fa_code)s
437 }
438 };
439}};
440
441def template LoadStoreRegU64Declare {{
442 class %(class_name)s : public %(base_class)s
443 {
444 public:
445
446 /// Constructor.
447 %(class_name)s(ExtMachInst machInst,
448 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
449 ArmExtendType _type, uint32_t _shiftAmt,
450 bool noAlloc = false, bool exclusive = false,
451 bool acrel = false);
452
453 %(BasicExecDeclare)s
454 %(InitiateAccDeclare)s
455 %(CompleteAccDeclare)s
456
457 virtual void
458 annotateFault(ArmFault *fault) {
459 %(fa_code)s
460 }
461 };
462}};
463
464def template LoadStoreRaw64Declare {{
465 class %(class_name)s : public %(base_class)s
466 {
467 public:
468
469 /// Constructor.
470 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
471 IntRegIndex _base);
472
473 %(BasicExecDeclare)s
474 %(InitiateAccDeclare)s
475 %(CompleteAccDeclare)s
476
477 virtual void
478 annotateFault(ArmFault *fault) {
479 %(fa_code)s
480 }
481 };
482}};
483
484def template LoadStoreEx64Declare {{
485 class %(class_name)s : public %(base_class)s
486 {
487 public:
488
489 /// Constructor.
490 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
491 IntRegIndex _base, IntRegIndex _result);
492
493 %(BasicExecDeclare)s
494 %(InitiateAccDeclare)s
495 %(CompleteAccDeclare)s
496
497 virtual void
498 annotateFault(ArmFault *fault) {
499 %(fa_code)s
500 }
501 };
502}};
503
504def template LoadStoreLit64Declare {{
505 class %(class_name)s : public %(base_class)s
506 {
507 public:
508
509 /// Constructor.
510 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm);
511
512 %(BasicExecDeclare)s
513 %(InitiateAccDeclare)s
514 %(CompleteAccDeclare)s
515
516 virtual void
517 annotateFault(ArmFault *fault) {
518 %(fa_code)s
519 }
520 };
521}};
522
523def template LoadStoreLitU64Declare {{
524 class %(class_name)s : public %(base_class)s
525 {
526 public:
527
528 /// Constructor.
529 %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm,
530 bool noAlloc = false, bool exclusive = false,
531 bool acrel = false);
532
533 %(BasicExecDeclare)s
534 %(InitiateAccDeclare)s
535 %(CompleteAccDeclare)s
536
537 virtual void
538 annotateFault(ArmFault *fault) {
539 %(fa_code)s
540 }
541 };
542}};
543
544def template LoadStoreImm64Constructor {{
545 %(class_name)s::%(class_name)s(ExtMachInst machInst,
546 IntRegIndex _dest, IntRegIndex _base, int64_t _imm)
547 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
548 (IntRegIndex)_dest, (IntRegIndex)_base, _imm)
549 {
550 %(constructor)s;
551#if %(use_uops)d
552 assert(numMicroops >= 2);
553 uops = new StaticInstPtr[numMicroops];
554 uops[0] = new %(acc_name)s(machInst, _dest, _base, _imm);
555 uops[0]->setDelayedCommit();
556 uops[0]->setFirstMicroop();
557 uops[1] = new %(wb_decl)s;
558 uops[1]->setLastMicroop();
559#endif
560 }
561}};
562
563def template LoadStoreImmU64Constructor {{
564 %(class_name)s::%(class_name)s(ExtMachInst machInst,
565 IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
566 bool noAlloc, bool exclusive, bool acrel)
567 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
568 _dest, _base, _imm)
569 {
570 %(constructor)s;
571 assert(!%(use_uops)d);
572 setExcAcRel(exclusive, acrel);
573 }
574}};
575
576def template LoadStoreImmDU64Constructor {{
577 %(class_name)s::%(class_name)s(ExtMachInst machInst,
578 IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
579 int64_t _imm, bool noAlloc, bool exclusive, bool acrel)
580 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
581 _dest, _dest2, _base, _imm)
582 {
583 %(constructor)s;
584 assert(!%(use_uops)d);
585 setExcAcRel(exclusive, acrel);
586 }
587}};
588
589def template StoreImmDEx64Constructor {{
590 %(class_name)s::%(class_name)s(ExtMachInst machInst,
591 IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
592 IntRegIndex _base, int64_t _imm)
593 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
594 _result, _dest, _dest2, _base, _imm)
595 {
596 %(constructor)s;
597 assert(!%(use_uops)d);
598 }
599}};
600
601
602def template LoadStoreReg64Constructor {{
603 %(class_name)s::%(class_name)s(ExtMachInst machInst,
604 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
605 ArmExtendType _type, uint32_t _shiftAmt)
606 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
607 _dest, _base, _offset, _type, _shiftAmt)
608 {
609 %(constructor)s;
610#if %(use_uops)d
611 assert(numMicroops >= 2);
612 uops = new StaticInstPtr[numMicroops];
613 uops[0] = new %(acc_name)s(machInst, _dest, _base, _offset,
614 _type, _shiftAmt);
615 uops[0]->setDelayedCommit();
616 uops[0]->setFirstMicroop();
617 uops[1] = new %(wb_decl)s;
618 uops[1]->setLastMicroop();
619#endif
620 }
621}};
622
623def template LoadStoreRegU64Constructor {{
624 %(class_name)s::%(class_name)s(ExtMachInst machInst,
625 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
626 ArmExtendType _type, uint32_t _shiftAmt,
627 bool noAlloc, bool exclusive, bool acrel)
628 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
629 _dest, _base, _offset, _type, _shiftAmt)
630 {
631 %(constructor)s;
632 assert(!%(use_uops)d);
633 setExcAcRel(exclusive, acrel);
634 }
635}};
636
637def template LoadStoreRaw64Constructor {{
638 %(class_name)s::%(class_name)s(ExtMachInst machInst,
639 IntRegIndex _dest, IntRegIndex _base)
640 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, _dest, _base)
641 {
642 %(constructor)s;
643 }
644}};
645
646def template LoadStoreEx64Constructor {{
647 %(class_name)s::%(class_name)s(ExtMachInst machInst,
648 IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
649 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
650 _dest, _base, _result)
651 {
652 %(constructor)s;
653 }
654}};
655
656def template LoadStoreLit64Constructor {{
657 %(class_name)s::%(class_name)s(ExtMachInst machInst,
658 IntRegIndex _dest, int64_t _imm)
659 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
660 (IntRegIndex)_dest, _imm)
661 {
662 %(constructor)s;
663#if %(use_uops)d
664 assert(numMicroops >= 2);
665 uops = new StaticInstPtr[numMicroops];
666 uops[0] = new %(acc_name)s(machInst, _dest, _imm);
667 uops[0]->setDelayedCommit();
668 uops[0]->setFirstMicroop();
669 uops[1] = new %(wb_decl)s;
670 uops[1]->setLastMicroop();
671#endif
672 }
673}};
674
675def template LoadStoreLitU64Constructor {{
676 %(class_name)s::%(class_name)s(ExtMachInst machInst,
677 IntRegIndex _dest, int64_t _imm,
678 bool noAlloc, bool exclusive, bool acrel)
679 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
680 (IntRegIndex)_dest, _imm)
681 {
682 %(constructor)s;
683 assert(!%(use_uops)d);
684 setExcAcRel(exclusive, acrel);
685 }
686}};