amo.isa revision 11965:41e942451f59
1// -*- mode:c++ -*-
2
3// Copyright (c) 2015 Riscv Developers
4// Copyright (c) 2016 The University of Virginia
5// All rights reserved.
6//
7// Redistribution and use in source and binary forms, with or without
8// modification, are permitted provided that the following conditions are
9// met: redistributions of source code must retain the above copyright
10// notice, this list of conditions and the following disclaimer;
11// redistributions in binary form must reproduce the above copyright
12// notice, this list of conditions and the following disclaimer in the
13// documentation and/or other materials provided with the distribution;
14// neither the name of the copyright holders nor the names of its
15// contributors may be used to endorse or promote products derived from
16// this software without specific prior written permission.
17//
18// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29//
30// Authors: Alec Roelke
31
32////////////////////////////////////////////////////////////////////
33//
34// Atomic memory operation instructions
35//
36output header {{
37    class LoadReserved : public RiscvStaticInst
38    {
39      protected:
40        Request::Flags memAccessFlags;
41
42        LoadReserved(const char *mnem, ExtMachInst _machInst,
43            OpClass __opClass)
44                : RiscvStaticInst(mnem, _machInst, __opClass)
45        {}
46
47        std::string
48        generateDisassembly(Addr pc, const SymbolTable *symtab) const;
49    };
50
51    class StoreCond : public RiscvStaticInst
52    {
53      protected:
54        Request::Flags memAccessFlags;
55
56        StoreCond(const char* mnem, ExtMachInst _machInst, OpClass __opClass)
57                : RiscvStaticInst(mnem, _machInst, __opClass)
58        {}
59
60        std::string
61        generateDisassembly(Addr pc, const SymbolTable *symtab) const;
62    };
63
64    class AtomicMemOp : public RiscvMacroInst
65    {
66    protected:
67        /// Constructor
68        // Each AtomicMemOp has a load and a store phase
69        AtomicMemOp(const char *mnem, ExtMachInst _machInst, OpClass __opClass)
70                : RiscvMacroInst(mnem, _machInst, __opClass)
71        {}
72
73        std::string generateDisassembly(Addr pc,
74            const SymbolTable *symtab) const;
75    };
76
77    class AtomicMemOpMicro : public RiscvMicroInst
78    {
79    protected:
80        /// Memory request flags.  See mem/request.hh.
81        Request::Flags memAccessFlags;
82
83        /// Constructor
84        AtomicMemOpMicro(const char *mnem, ExtMachInst _machInst,
85            OpClass __opClass)
86                : RiscvMicroInst(mnem, _machInst, __opClass)
87        {}
88
89        std::string generateDisassembly(Addr pc,
90            const SymbolTable *symtab) const;
91    };
92}};
93
94output decoder {{
95    std::string LoadReserved::generateDisassembly(Addr pc,
96        const SymbolTable *symtab) const
97    {
98        std::stringstream ss;
99        ss << mnemonic << ' ' << regName(_destRegIdx[0]) << ", ("
100                << regName(_srcRegIdx[0]) << ')';
101        return ss.str();
102    }
103
104    std::string StoreCond::generateDisassembly(Addr pc,
105        const SymbolTable *symtab) const
106    {
107        std::stringstream ss;
108        ss << mnemonic << ' ' << regName(_destRegIdx[0]) << ", "
109                << regName(_srcRegIdx[1]) << ", ("
110                << regName(_srcRegIdx[0]) << ')';
111        return ss.str();
112    }
113
114    std::string AtomicMemOp::generateDisassembly(Addr pc,
115        const SymbolTable *symtab) const
116    {
117        std::stringstream ss;
118        ss << mnemonic << ' ' << regName(_destRegIdx[0]) << ", "
119                << regName(_srcRegIdx[1]) << ", ("
120                << regName(_srcRegIdx[0]) << ')';
121        return ss.str();
122    }
123
124    std::string AtomicMemOpMicro::generateDisassembly(Addr pc,
125        const SymbolTable *symtab) const
126    {
127        std::stringstream ss;
128        ss << csprintf("0x%08x", machInst) << ' ' << mnemonic;
129        return ss.str();
130    }
131}};
132
133def template LRSCDeclare {{
134    class %(class_name)s : public %(base_class)s
135    {
136      public:
137        %(class_name)s(ExtMachInst machInst);
138
139        %(BasicExecDeclare)s
140
141        %(EACompDeclare)s
142
143        %(InitiateAccDeclare)s
144
145        %(CompleteAccDeclare)s
146    };
147}};
148
149def template AtomicMemOpDeclare {{
150    /**
151     * Static instruction class for an AtomicMemOp operation
152     */
153    class %(class_name)s : public %(base_class)s
154    {
155      public:
156        // Constructor
157        %(class_name)s(ExtMachInst machInst);
158
159    protected:
160
161        class %(class_name)sLoad : public %(base_class)sMicro
162        {
163          public:
164            // Constructor
165            %(class_name)sLoad(ExtMachInst machInst, %(class_name)s *_p);
166
167            %(BasicExecDeclare)s
168
169            %(EACompDeclare)s
170
171            %(InitiateAccDeclare)s
172
173            %(CompleteAccDeclare)s
174        };
175
176        class %(class_name)sStore : public %(base_class)sMicro
177        {
178          public:
179            // Constructor
180            %(class_name)sStore(ExtMachInst machInst, %(class_name)s *_p);
181
182            %(BasicExecDeclare)s
183
184            %(EACompDeclare)s
185
186            %(InitiateAccDeclare)s
187
188            %(CompleteAccDeclare)s
189        };
190    };
191}};
192
193def template LRSCConstructor {{
194    %(class_name)s::%(class_name)s(ExtMachInst machInst):
195        %(base_class)s("%(mnemonic)s", machInst, %(op_class)s)
196    {
197        %(constructor)s;
198        if (AQ)
199            memAccessFlags = memAccessFlags | Request::ACQUIRE;
200        if (RL)
201            memAccessFlags = memAccessFlags | Request::RELEASE;
202    }
203}};
204
205def template AtomicMemOpMacroConstructor {{
206    %(class_name)s::%(class_name)s(ExtMachInst machInst)
207            : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s)
208    {
209        %(constructor)s;
210        microops = {new %(class_name)sLoad(machInst, this),
211            new %(class_name)sStore(machInst, this)};
212    }
213}};
214
215def template AtomicMemOpLoadConstructor {{
216    %(class_name)s::%(class_name)sLoad::%(class_name)sLoad(
217        ExtMachInst machInst, %(class_name)s *_p)
218            : %(base_class)s("%(mnemonic)s[l]", machInst, %(op_class)s)
219    {
220        %(constructor)s;
221        flags[IsFirstMicroop] = true;
222        flags[IsDelayedCommit] = true;
223        if (AQ)
224            memAccessFlags = Request::ACQUIRE;
225    }
226}};
227
228def template AtomicMemOpStoreConstructor {{
229    %(class_name)s::%(class_name)sStore::%(class_name)sStore(
230        ExtMachInst machInst, %(class_name)s *_p)
231            : %(base_class)s("%(mnemonic)s[s]", machInst, %(op_class)s)
232    {
233        %(constructor)s;
234        flags[IsLastMicroop] = true;
235        flags[IsNonSpeculative] = true;
236        if (RL)
237            memAccessFlags = Request::RELEASE;
238    }
239}};
240
241def template AtomicMemOpMacroDecode {{
242    return new %(class_name)s(machInst);
243}};
244
245def template LoadReservedExecute {{
246    Fault
247    %(class_name)s::execute(CPU_EXEC_CONTEXT *xc,
248        Trace::InstRecord *traceData) const
249    {
250        Addr EA;
251        Fault fault = NoFault;
252
253        %(op_decl)s;
254        %(op_rd)s;
255        %(ea_code)s;
256
257        if (fault == NoFault) {
258            fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags);
259            %(memacc_code)s;
260        }
261
262        if (fault == NoFault) {
263            %(op_wb)s;
264        }
265
266        return fault;
267    }
268}};
269
270def template StoreCondExecute {{
271    Fault %(class_name)s::execute(CPU_EXEC_CONTEXT *xc,
272        Trace::InstRecord *traceData) const
273    {
274        Addr EA;
275        Fault fault = NoFault;
276        uint64_t result;
277
278        %(op_decl)s;
279        %(op_rd)s;
280        %(ea_code)s;
281
282        if (fault == NoFault) {
283            %(memacc_code)s;
284        }
285
286        if (fault == NoFault) {
287            fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
288                &result);
289            // RISC-V has the opposite convention gem5 has for success flags,
290            // so we invert the result here.
291            result = !result;
292        }
293
294        if (fault == NoFault) {
295            %(postacc_code)s;
296        }
297
298        if (fault == NoFault) {
299            %(op_wb)s;
300        }
301
302        return fault;
303    }
304}};
305
306def template AtomicMemOpLoadExecute {{
307    Fault %(class_name)s::%(class_name)sLoad::execute(CPU_EXEC_CONTEXT *xc,
308        Trace::InstRecord *traceData) const
309    {
310        Addr EA;
311        Fault fault = NoFault;
312
313        %(op_decl)s;
314        %(op_rd)s;
315        %(ea_code)s;
316
317        if (fault == NoFault) {
318            fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags);
319        }
320
321        if (fault == NoFault) {
322            %(code)s;
323        }
324
325        if (fault == NoFault) {
326            %(op_wb)s;
327        }
328
329        return fault;
330    }
331}};
332
333def template AtomicMemOpStoreExecute {{
334    Fault %(class_name)s::%(class_name)sStore::execute(CPU_EXEC_CONTEXT *xc,
335        Trace::InstRecord *traceData) const
336    {
337        Addr EA;
338        Fault fault = NoFault;
339
340        %(op_decl)s;
341        %(op_rd)s;
342        %(ea_code)s;
343
344        if (fault == NoFault) {
345            %(code)s;
346        }
347
348        if (fault == NoFault) {
349            fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
350                nullptr);
351        }
352
353        if (fault == NoFault) {
354            %(op_wb)s;
355        }
356
357        return fault;
358    }
359}};
360
361def template LRSCEACompExecute {{
362    Fault
363    %(class_name)s::eaComp(CPU_EXEC_CONTEXT *xc,
364        Trace::InstRecord *traceData) const
365    {
366        Addr EA;
367        Fault fault = NoFault;
368
369        %(op_decl)s;
370        %(op_rd)s;
371        %(ea_code)s;
372
373        if (fault == NoFault) {
374            %(op_wb)s;
375            xc->setEA(EA);
376        }
377
378        return fault;
379    }
380}};
381
382def template AtomicMemOpLoadEACompExecute {{
383    Fault %(class_name)s::%(class_name)sLoad::eaComp(CPU_EXEC_CONTEXT *xc,
384        Trace::InstRecord *traceData) const
385    {
386        Addr EA;
387        Fault fault = NoFault;
388
389        %(op_decl)s;
390        %(op_rd)s;
391        %(ea_code)s;
392
393        if (fault == NoFault) {
394            %(op_wb)s;
395            xc->setEA(EA);
396        }
397
398        return fault;
399    }
400}};
401
402def template AtomicMemOpStoreEACompExecute {{
403    Fault %(class_name)s::%(class_name)sStore::eaComp(CPU_EXEC_CONTEXT *xc,
404        Trace::InstRecord *traceData) const
405    {
406        Addr EA;
407        Fault fault = NoFault;
408
409        %(op_decl)s;
410        %(op_rd)s;
411        %(ea_code)s;
412
413        if (fault == NoFault) {
414            %(op_wb)s;
415            xc->setEA(EA);
416        }
417
418        return fault;
419    }
420}};
421
422def template LoadReservedInitiateAcc {{
423    Fault
424    %(class_name)s::initiateAcc(CPU_EXEC_CONTEXT *xc,
425        Trace::InstRecord *traceData) const
426    {
427        Addr EA;
428        Fault fault = NoFault;
429
430        %(op_src_decl)s;
431        %(op_rd)s;
432        %(ea_code)s;
433
434        if (fault == NoFault) {
435            fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags);
436        }
437
438        return fault;
439    }
440}};
441
442def template StoreCondInitiateAcc {{
443    Fault
444    %(class_name)s::initiateAcc(CPU_EXEC_CONTEXT *xc,
445        Trace::InstRecord *traceData) const
446    {
447        Addr EA;
448        Fault fault = NoFault;
449
450        %(op_decl)s;
451        %(op_rd)s;
452        %(ea_code)s;
453
454        if (fault == NoFault) {
455            %(memacc_code)s;
456        }
457
458        if (fault == NoFault) {
459            fault = writeMemTiming(xc, traceData, Mem, EA,
460                memAccessFlags, nullptr);
461        }
462
463        if (fault == NoFault) {
464            %(op_wb)s;
465        }
466
467        return fault;
468    }
469}};
470
471def template AtomicMemOpLoadInitiateAcc {{
472    Fault %(class_name)s::%(class_name)sLoad::initiateAcc(CPU_EXEC_CONTEXT *xc,
473        Trace::InstRecord *traceData) const
474    {
475        Addr EA;
476        Fault fault = NoFault;
477
478        %(op_src_decl)s;
479        %(op_rd)s;
480        %(ea_code)s;
481
482        if (fault == NoFault) {
483            fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags);
484        }
485
486        return fault;
487    }
488}};
489
490def template AtomicMemOpStoreInitiateAcc {{
491    Fault %(class_name)s::%(class_name)sStore::initiateAcc(
492        CPU_EXEC_CONTEXT *xc, Trace::InstRecord *traceData) const
493    {
494        Addr EA;
495        Fault fault = NoFault;
496
497        %(op_decl)s;
498        %(op_rd)s;
499        %(ea_code)s;
500
501        if (fault == NoFault) {
502            %(code)s;
503        }
504
505        if (fault == NoFault) {
506            fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
507                nullptr);
508        }
509
510        if (fault == NoFault) {
511            %(op_wb)s;
512        }
513
514        return fault;
515    }
516}};
517
518def template LoadReservedCompleteAcc {{
519    Fault
520    %(class_name)s::completeAcc(PacketPtr pkt, CPU_EXEC_CONTEXT *xc,
521        Trace::InstRecord *traceData) const
522    {
523        Fault fault = NoFault;
524
525        %(op_decl)s;
526        %(op_rd)s;
527
528        getMem(pkt, Mem, traceData);
529
530        if (fault == NoFault) {
531            %(memacc_code)s;
532        }
533
534        if (fault == NoFault) {
535            %(op_wb)s;
536        }
537
538        return fault;
539    }
540}};
541
542def template StoreCondCompleteAcc {{
543    Fault %(class_name)s::completeAcc(Packet *pkt, CPU_EXEC_CONTEXT *xc,
544        Trace::InstRecord *traceData) const
545    {
546        Fault fault = NoFault;
547
548        %(op_dest_decl)s;
549
550        // RISC-V has the opposite convention gem5 has for success flags,
551        // so we invert the result here.
552        uint64_t result = !pkt->req->getExtraData();
553
554        if (fault == NoFault) {
555            %(postacc_code)s;
556        }
557
558        if (fault == NoFault) {
559            %(op_wb)s;
560        }
561
562        return fault;
563    }
564}};
565
566def template AtomicMemOpLoadCompleteAcc {{
567    Fault %(class_name)s::%(class_name)sLoad::completeAcc(PacketPtr pkt,
568        CPU_EXEC_CONTEXT *xc, Trace::InstRecord *traceData) const
569    {
570        Fault fault = NoFault;
571
572        %(op_decl)s;
573        %(op_rd)s;
574
575        getMem(pkt, Mem, traceData);
576
577        if (fault == NoFault) {
578            %(code)s;
579        }
580
581        if (fault == NoFault) {
582            %(op_wb)s;
583        }
584
585        return fault;
586    }
587}};
588
589def template AtomicMemOpStoreCompleteAcc {{
590    Fault %(class_name)s::%(class_name)sStore::completeAcc(PacketPtr pkt,
591        CPU_EXEC_CONTEXT *xc, Trace::InstRecord *traceData) const
592    {
593        return NoFault;
594    }
595}};
596
597def format LoadReserved(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}},
598        mem_flags=[], inst_flags=[]) {{
599    mem_flags = makeList(mem_flags)
600    inst_flags = makeList(inst_flags)
601    iop = InstObjParams(name, Name, 'LoadReserved',
602        {'ea_code': ea_code, 'memacc_code': memacc_code,
603        'postacc_code': postacc_code}, inst_flags)
604    iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \
605        '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';'
606
607    header_output = LRSCDeclare.subst(iop)
608    decoder_output = LRSCConstructor.subst(iop)
609    decode_block = BasicDecode.subst(iop)
610    exec_output = LoadReservedExecute.subst(iop) \
611        + LRSCEACompExecute.subst(iop) \
612        + LoadReservedInitiateAcc.subst(iop) \
613        + LoadReservedCompleteAcc.subst(iop)
614}};
615
616def format StoreCond(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}},
617        mem_flags=[], inst_flags=[]) {{
618    mem_flags = makeList(mem_flags)
619    inst_flags = makeList(inst_flags)
620    iop = InstObjParams(name, Name, 'StoreCond',
621        {'ea_code': ea_code, 'memacc_code': memacc_code,
622        'postacc_code': postacc_code}, inst_flags)
623    iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \
624        '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';'
625
626    header_output = LRSCDeclare.subst(iop)
627    decoder_output = LRSCConstructor.subst(iop)
628    decode_block = BasicDecode.subst(iop)
629    exec_output = StoreCondExecute.subst(iop) \
630        + LRSCEACompExecute.subst(iop) \
631        + StoreCondInitiateAcc.subst(iop) \
632        + StoreCondCompleteAcc.subst(iop)
633}};
634
635def format AtomicMemOp(load_code, store_code, ea_code, load_flags=[],
636        store_flags=[], inst_flags=[]) {{
637    macro_iop = InstObjParams(name, Name, 'AtomicMemOp', ea_code, inst_flags)
638    header_output = AtomicMemOpDeclare.subst(macro_iop)
639    decoder_output = AtomicMemOpMacroConstructor.subst(macro_iop)
640    decode_block = AtomicMemOpMacroDecode.subst(macro_iop)
641    exec_output = ''
642
643    load_inst_flags = makeList(inst_flags) + ["IsMemRef", "IsLoad"]
644    load_iop = InstObjParams(name, Name, 'AtomicMemOpMicro',
645        {'ea_code': ea_code, 'code': load_code}, load_inst_flags)
646    decoder_output += AtomicMemOpLoadConstructor.subst(load_iop)
647    exec_output += AtomicMemOpLoadExecute.subst(load_iop) \
648        + AtomicMemOpLoadEACompExecute.subst(load_iop) \
649        + AtomicMemOpLoadInitiateAcc.subst(load_iop) \
650        + AtomicMemOpLoadCompleteAcc.subst(load_iop)
651
652    store_inst_flags = makeList(inst_flags) + ["IsMemRef", "IsStore"]
653    store_iop = InstObjParams(name, Name, 'AtomicMemOpMicro',
654        {'ea_code': ea_code, 'code': store_code}, store_inst_flags)
655    decoder_output += AtomicMemOpStoreConstructor.subst(store_iop)
656    exec_output += AtomicMemOpStoreExecute.subst(store_iop) \
657        + AtomicMemOpStoreEACompExecute.subst(store_iop) \
658        + AtomicMemOpStoreInitiateAcc.subst(store_iop) \
659        + AtomicMemOpStoreCompleteAcc.subst(store_iop)
660}};
661