mem64.isa revision 12234:78ece221f9f5
1// -*- mode:c++ -*-
2
3// Copyright (c) 2011-2014 ARM Limited
4// All rights reserved
5//
6// The license below extends only to copyright in the software and shall
7// not be construed as granting a license to any other intellectual
8// property including but not limited to intellectual property relating
9// to a hardware implementation of the functionality of the software
10// licensed hereunder.  You may use the software subject to the license
11// terms below provided that you ensure that this notice is replicated
12// unmodified and in its entirety in all distributions of the software,
13// modified or unmodified, in source code or in binary form.
14//
15// Redistribution and use in source and binary forms, with or without
16// modification, are permitted provided that the following conditions are
17// met: redistributions of source code must retain the above copyright
18// notice, this list of conditions and the following disclaimer;
19// redistributions in binary form must reproduce the above copyright
20// notice, this list of conditions and the following disclaimer in the
21// documentation and/or other materials provided with the distribution;
22// neither the name of the copyright holders nor the names of its
23// contributors may be used to endorse or promote products derived from
24// this software without specific prior written permission.
25//
26// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
27// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
28// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
29// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
30// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
31// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
32// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
33// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
34// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
35// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
36// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
37//
38// Authors: Gabe Black
39
40let {{
41    SPAlignmentCheckCode = '''
42        if (baseIsSP && bits(XBase, 3, 0) &&
43            SPAlignmentCheckEnabled(xc->tcBase())) {
44            return std::make_shared<SPAlignmentFault>();
45        }
46   '''
47}};
48
49def template Load64Execute {{
50    Fault %(class_name)s::execute(ExecContext *xc,
51                                  Trace::InstRecord *traceData) const
52    {
53        Addr EA;
54        Fault fault = NoFault;
55
56        %(op_decl)s;
57        %(op_rd)s;
58        %(ea_code)s;
59
60        if (fault == NoFault) {
61            fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags);
62            %(memacc_code)s;
63        }
64
65        if (fault == NoFault) {
66            %(op_wb)s;
67        }
68
69        return fault;
70    }
71}};
72
73def template Store64Execute {{
74    Fault %(class_name)s::execute(ExecContext *xc,
75                                  Trace::InstRecord *traceData) const
76    {
77        Addr EA;
78        Fault fault = NoFault;
79
80        %(op_decl)s;
81        %(op_rd)s;
82        %(ea_code)s;
83
84        if (fault == NoFault) {
85            %(memacc_code)s;
86        }
87
88        if (fault == NoFault) {
89            fault = writeMemAtomic(xc, traceData, Mem, EA,
90                                   memAccessFlags, NULL);
91        }
92
93        if (fault == NoFault) {
94            %(op_wb)s;
95        }
96
97        return fault;
98    }
99}};
100
101def template Store64InitiateAcc {{
102    Fault %(class_name)s::initiateAcc(ExecContext *xc,
103                                      Trace::InstRecord *traceData) const
104    {
105        Addr EA;
106        Fault fault = NoFault;
107
108        %(op_decl)s;
109        %(op_rd)s;
110        %(ea_code)s;
111
112        if (fault == NoFault) {
113            %(memacc_code)s;
114        }
115
116        if (fault == NoFault) {
117            fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
118                                   NULL);
119        }
120
121        return fault;
122    }
123}};
124
125def template StoreEx64Execute {{
126    Fault %(class_name)s::execute(ExecContext *xc,
127                                  Trace::InstRecord *traceData) const
128    {
129        Addr EA;
130        Fault fault = NoFault;
131
132        %(op_decl)s;
133        %(op_rd)s;
134        %(ea_code)s;
135
136        if (fault == NoFault) {
137            %(memacc_code)s;
138        }
139
140        uint64_t writeResult = 0;
141        if (fault == NoFault) {
142            fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
143                                   &writeResult);
144        }
145
146        if (fault == NoFault) {
147            %(postacc_code)s;
148        }
149
150        if (fault == NoFault) {
151            %(op_wb)s;
152        }
153
154        return fault;
155    }
156}};
157
158def template StoreEx64InitiateAcc {{
159    Fault %(class_name)s::initiateAcc(ExecContext *xc,
160                                      Trace::InstRecord *traceData) const
161    {
162        Addr EA;
163        Fault fault = NoFault;
164
165        %(op_decl)s;
166        %(op_rd)s;
167        %(ea_code)s;
168
169        if (fault == NoFault) {
170            %(memacc_code)s;
171        }
172
173        if (fault == NoFault) {
174            fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
175                                   NULL);
176        }
177
178        return fault;
179    }
180}};
181
182def template Load64InitiateAcc {{
183    Fault %(class_name)s::initiateAcc(ExecContext *xc,
184                                      Trace::InstRecord *traceData) const
185    {
186        Addr EA;
187        Fault fault = NoFault;
188
189        %(op_src_decl)s;
190        %(op_rd)s;
191        %(ea_code)s;
192
193        if (fault == NoFault) {
194            fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags);
195        }
196
197        return fault;
198    }
199}};
200
201def template Load64CompleteAcc {{
202    Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
203                                      Trace::InstRecord *traceData) const
204    {
205        Fault fault = NoFault;
206
207        %(op_decl)s;
208        %(op_rd)s;
209
210        // ARM instructions will not have a pkt if the predicate is false
211        getMem(pkt, Mem, traceData);
212
213        if (fault == NoFault) {
214            %(memacc_code)s;
215        }
216
217        if (fault == NoFault) {
218            %(op_wb)s;
219        }
220
221        return fault;
222    }
223}};
224
225def template Store64CompleteAcc {{
226    Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
227                                      Trace::InstRecord *traceData) const
228    {
229        return NoFault;
230    }
231}};
232
233def template StoreEx64CompleteAcc {{
234    Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext *xc,
235                                      Trace::InstRecord *traceData) const
236    {
237        Fault fault = NoFault;
238
239        %(op_decl)s;
240        %(op_rd)s;
241
242        uint64_t writeResult = pkt->req->getExtraData();
243        %(postacc_code)s;
244
245        if (fault == NoFault) {
246            %(op_wb)s;
247        }
248
249        return fault;
250    }
251}};
252
253def template DCStore64Declare {{
254    class %(class_name)s : public %(base_class)s
255    {
256      public:
257
258        /// Constructor.
259        %(class_name)s(ExtMachInst machInst, IntRegIndex _base, IntRegIndex _dest, uint64_t _imm);
260
261        %(BasicExecDeclare)s
262        %(InitiateAccDeclare)s
263        %(CompleteAccDeclare)s
264
265        virtual void
266        annotateFault(ArmFault *fault) {
267            %(fa_code)s
268        }
269    };
270}};
271
272def template DCStore64Constructor {{
273    %(class_name)s::%(class_name)s(ExtMachInst machInst, IntRegIndex _base, IntRegIndex _dest, uint64_t _imm)
274         : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
275                 (IntRegIndex)_base, _dest, _imm)
276    {
277        %(constructor)s;
278        assert(!%(use_uops)d);
279    }
280}};
281
282def template DCStore64Execute {{
283    Fault %(class_name)s::execute(ExecContext *xc,
284                                  Trace::InstRecord *traceData) const
285    {
286        Addr EA;
287        Fault fault = NoFault;
288
289        %(op_decl)s;
290        %(op_rd)s;
291        %(ea_code)s;
292
293
294        if (fault == NoFault) {
295            %(memacc_code)s;
296        }
297
298        if (fault == NoFault) {
299            fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL);
300        }
301
302        if (fault == NoFault) {
303            %(op_wb)s;
304        }
305
306        return fault;
307    }
308}};
309
310def template DCStore64InitiateAcc {{
311    Fault %(class_name)s::initiateAcc(ExecContext *xc,
312                                      Trace::InstRecord *traceData) const
313    {
314        Addr EA;
315        Fault fault = NoFault;
316
317        %(op_decl)s;
318        %(op_rd)s;
319        %(ea_code)s;
320
321        if (fault == NoFault) {
322            %(memacc_code)s;
323        }
324
325        if (fault == NoFault) {
326            fault = xc->writeMem(NULL, op_size, EA, memAccessFlags, NULL);
327        }
328
329        return fault;
330    }
331}};
332
333
334def template LoadStoreImm64Declare {{
335    class %(class_name)s : public %(base_class)s
336    {
337      public:
338
339        /// Constructor.
340        %(class_name)s(ExtMachInst machInst,
341                IntRegIndex _dest, IntRegIndex _base, int64_t _imm);
342
343        %(BasicExecDeclare)s
344        %(InitiateAccDeclare)s
345        %(CompleteAccDeclare)s
346
347        virtual void
348        annotateFault(ArmFault *fault) {
349            %(fa_code)s
350        }
351    };
352}};
353
354def template LoadStoreImmU64Declare {{
355    class %(class_name)s : public %(base_class)s
356    {
357      public:
358
359        /// Constructor.
360        %(class_name)s(ExtMachInst machInst,
361                IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
362                bool noAlloc = false, bool exclusive = false,
363                bool acrel = false);
364
365        %(BasicExecDeclare)s
366        %(InitiateAccDeclare)s
367        %(CompleteAccDeclare)s
368
369        virtual void
370        annotateFault(ArmFault *fault) {
371            %(fa_code)s
372        }
373    };
374}};
375
376def template LoadStoreImmDU64Declare {{
377    class %(class_name)s : public %(base_class)s
378    {
379      public:
380
381        /// Constructor.
382        %(class_name)s(ExtMachInst machInst,
383                IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
384                int64_t _imm = 0, bool noAlloc = false, bool exclusive = false,
385                bool acrel = false);
386
387        %(BasicExecDeclare)s
388        %(InitiateAccDeclare)s
389        %(CompleteAccDeclare)s
390
391        virtual void
392        annotateFault(ArmFault *fault) {
393            %(fa_code)s
394        }
395    };
396}};
397
398def template StoreImmDEx64Declare {{
399    /**
400     * Static instruction class for "%(mnemonic)s".
401     */
402    class %(class_name)s : public %(base_class)s
403    {
404      public:
405
406        /// Constructor.
407        %(class_name)s(ExtMachInst machInst,
408                IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
409                IntRegIndex _base, int64_t _imm = 0);
410
411        %(BasicExecDeclare)s
412
413        %(InitiateAccDeclare)s
414
415        %(CompleteAccDeclare)s
416    };
417}};
418
419
420def template LoadStoreReg64Declare {{
421    class %(class_name)s : public %(base_class)s
422    {
423      public:
424
425        /// Constructor.
426        %(class_name)s(ExtMachInst machInst,
427                IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
428                ArmExtendType _type, uint32_t _shiftAmt);
429
430        %(BasicExecDeclare)s
431        %(InitiateAccDeclare)s
432        %(CompleteAccDeclare)s
433
434        virtual void
435        annotateFault(ArmFault *fault) {
436            %(fa_code)s
437        }
438    };
439}};
440
441def template LoadStoreRegU64Declare {{
442    class %(class_name)s : public %(base_class)s
443    {
444      public:
445
446        /// Constructor.
447        %(class_name)s(ExtMachInst machInst,
448                IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
449                ArmExtendType _type, uint32_t _shiftAmt,
450                bool noAlloc = false, bool exclusive = false,
451                bool acrel = false);
452
453        %(BasicExecDeclare)s
454        %(InitiateAccDeclare)s
455        %(CompleteAccDeclare)s
456
457        virtual void
458        annotateFault(ArmFault *fault) {
459            %(fa_code)s
460        }
461    };
462}};
463
464def template LoadStoreRaw64Declare {{
465    class %(class_name)s : public %(base_class)s
466    {
467      public:
468
469        /// Constructor.
470        %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
471                       IntRegIndex _base);
472
473        %(BasicExecDeclare)s
474        %(InitiateAccDeclare)s
475        %(CompleteAccDeclare)s
476
477        virtual void
478        annotateFault(ArmFault *fault) {
479            %(fa_code)s
480        }
481    };
482}};
483
484def template LoadStoreEx64Declare {{
485    class %(class_name)s : public %(base_class)s
486    {
487      public:
488
489        /// Constructor.
490        %(class_name)s(ExtMachInst machInst, IntRegIndex _dest,
491                       IntRegIndex _base, IntRegIndex _result);
492
493        %(BasicExecDeclare)s
494        %(InitiateAccDeclare)s
495        %(CompleteAccDeclare)s
496
497        virtual void
498        annotateFault(ArmFault *fault) {
499            %(fa_code)s
500        }
501    };
502}};
503
504def template LoadStoreLit64Declare {{
505    class %(class_name)s : public %(base_class)s
506    {
507      public:
508
509        /// Constructor.
510        %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm);
511
512        %(BasicExecDeclare)s
513        %(InitiateAccDeclare)s
514        %(CompleteAccDeclare)s
515
516        virtual void
517        annotateFault(ArmFault *fault) {
518            %(fa_code)s
519        }
520    };
521}};
522
523def template LoadStoreLitU64Declare {{
524    class %(class_name)s : public %(base_class)s
525    {
526      public:
527
528        /// Constructor.
529        %(class_name)s(ExtMachInst machInst, IntRegIndex _dest, int64_t _imm,
530                bool noAlloc = false, bool exclusive = false,
531                bool acrel = false);
532
533        %(BasicExecDeclare)s
534        %(InitiateAccDeclare)s
535        %(CompleteAccDeclare)s
536
537        virtual void
538        annotateFault(ArmFault *fault) {
539            %(fa_code)s
540        }
541    };
542}};
543
544def template LoadStoreImm64Constructor {{
545    %(class_name)s::%(class_name)s(ExtMachInst machInst,
546            IntRegIndex _dest, IntRegIndex _base, int64_t _imm)
547         : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
548                 (IntRegIndex)_dest, (IntRegIndex)_base, _imm)
549    {
550        %(constructor)s;
551#if %(use_uops)d
552        assert(numMicroops >= 2);
553        uops = new StaticInstPtr[numMicroops];
554        uops[0] = new %(acc_name)s(machInst, _dest, _base, _imm);
555        uops[0]->setDelayedCommit();
556        uops[0]->setFirstMicroop();
557        uops[1] = new %(wb_decl)s;
558        uops[1]->setLastMicroop();
559#endif
560    }
561}};
562
563def template LoadStoreImmU64Constructor {{
564    %(class_name)s::%(class_name)s(ExtMachInst machInst,
565            IntRegIndex _dest, IntRegIndex _base, int64_t _imm,
566            bool noAlloc, bool exclusive, bool acrel)
567         : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
568                 _dest, _base, _imm)
569    {
570        %(constructor)s;
571        assert(!%(use_uops)d);
572        setExcAcRel(exclusive, acrel);
573    }
574}};
575
576def template LoadStoreImmDU64Constructor {{
577    %(class_name)s::%(class_name)s(ExtMachInst machInst,
578            IntRegIndex _dest, IntRegIndex _dest2, IntRegIndex _base,
579            int64_t _imm, bool noAlloc, bool exclusive, bool acrel)
580         : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
581                 _dest, _dest2, _base, _imm)
582    {
583        %(constructor)s;
584        assert(!%(use_uops)d);
585        setExcAcRel(exclusive, acrel);
586    }
587}};
588
589def template StoreImmDEx64Constructor {{
590    %(class_name)s::%(class_name)s(ExtMachInst machInst,
591            IntRegIndex _result, IntRegIndex _dest, IntRegIndex _dest2,
592            IntRegIndex _base, int64_t _imm)
593         : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
594                 _result, _dest, _dest2, _base, _imm)
595    {
596        %(constructor)s;
597        assert(!%(use_uops)d);
598    }
599}};
600
601
602def template LoadStoreReg64Constructor {{
603    %(class_name)s::%(class_name)s(ExtMachInst machInst,
604            IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
605            ArmExtendType _type, uint32_t _shiftAmt)
606         : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
607                 _dest, _base, _offset, _type, _shiftAmt)
608    {
609        %(constructor)s;
610#if %(use_uops)d
611        assert(numMicroops >= 2);
612        uops = new StaticInstPtr[numMicroops];
613        uops[0] = new %(acc_name)s(machInst, _dest, _base, _offset,
614                                   _type, _shiftAmt);
615        uops[0]->setDelayedCommit();
616        uops[0]->setFirstMicroop();
617        uops[1] = new %(wb_decl)s;
618        uops[1]->setLastMicroop();
619#endif
620    }
621}};
622
623def template LoadStoreRegU64Constructor {{
624    %(class_name)s::%(class_name)s(ExtMachInst machInst,
625            IntRegIndex _dest, IntRegIndex _base, IntRegIndex _offset,
626            ArmExtendType _type, uint32_t _shiftAmt,
627            bool noAlloc, bool exclusive, bool acrel)
628         : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
629                 _dest, _base, _offset, _type, _shiftAmt)
630    {
631        %(constructor)s;
632        assert(!%(use_uops)d);
633        setExcAcRel(exclusive, acrel);
634    }
635}};
636
637def template LoadStoreRaw64Constructor {{
638    %(class_name)s::%(class_name)s(ExtMachInst machInst,
639            IntRegIndex _dest, IntRegIndex _base)
640         : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s, _dest, _base)
641    {
642        %(constructor)s;
643    }
644}};
645
646def template LoadStoreEx64Constructor {{
647    %(class_name)s::%(class_name)s(ExtMachInst machInst,
648            IntRegIndex _dest, IntRegIndex _base, IntRegIndex _result)
649         : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
650                          _dest, _base, _result)
651    {
652        %(constructor)s;
653    }
654}};
655
656def template LoadStoreLit64Constructor {{
657    %(class_name)s::%(class_name)s(ExtMachInst machInst,
658            IntRegIndex _dest, int64_t _imm)
659         : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
660                 (IntRegIndex)_dest, _imm)
661    {
662        %(constructor)s;
663#if %(use_uops)d
664        assert(numMicroops >= 2);
665        uops = new StaticInstPtr[numMicroops];
666        uops[0] = new %(acc_name)s(machInst, _dest, _imm);
667        uops[0]->setDelayedCommit();
668        uops[0]->setFirstMicroop();
669        uops[1] = new %(wb_decl)s;
670        uops[1]->setLastMicroop();
671#endif
672    }
673}};
674
675def template LoadStoreLitU64Constructor {{
676    %(class_name)s::%(class_name)s(ExtMachInst machInst,
677            IntRegIndex _dest, int64_t _imm,
678            bool noAlloc, bool exclusive, bool acrel)
679         : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s,
680                 (IntRegIndex)_dest, _imm)
681    {
682        %(constructor)s;
683        assert(!%(use_uops)d);
684        setExcAcRel(exclusive, acrel);
685    }
686}};
687