amo.isa revision 12236:126ac9da6050
1// -*- mode:c++ -*-
2
3// Copyright (c) 2015 Riscv Developers
4// Copyright (c) 2016 The University of Virginia
5// All rights reserved.
6//
7// Redistribution and use in source and binary forms, with or without
8// modification, are permitted provided that the following conditions are
9// met: redistributions of source code must retain the above copyright
10// notice, this list of conditions and the following disclaimer;
11// redistributions in binary form must reproduce the above copyright
12// notice, this list of conditions and the following disclaimer in the
13// documentation and/or other materials provided with the distribution;
14// neither the name of the copyright holders nor the names of its
15// contributors may be used to endorse or promote products derived from
16// this software without specific prior written permission.
17//
18// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29//
30// Authors: Alec Roelke
31
32////////////////////////////////////////////////////////////////////
33//
34// Atomic memory operation instructions
35//
36output header {{
37    class LoadReserved : public RiscvStaticInst
38    {
39      protected:
40        Request::Flags memAccessFlags;
41
42        LoadReserved(const char *mnem, ExtMachInst _machInst,
43            OpClass __opClass)
44                : RiscvStaticInst(mnem, _machInst, __opClass)
45        {}
46
47        std::string
48        generateDisassembly(Addr pc, const SymbolTable *symtab) const;
49    };
50
51    class StoreCond : public RiscvStaticInst
52    {
53      protected:
54        Request::Flags memAccessFlags;
55
56        StoreCond(const char* mnem, ExtMachInst _machInst, OpClass __opClass)
57                : RiscvStaticInst(mnem, _machInst, __opClass)
58        {}
59
60        std::string
61        generateDisassembly(Addr pc, const SymbolTable *symtab) const;
62    };
63
64    class AtomicMemOp : public RiscvMacroInst
65    {
66    protected:
67        /// Constructor
68        // Each AtomicMemOp has a load and a store phase
69        AtomicMemOp(const char *mnem, ExtMachInst _machInst, OpClass __opClass)
70                : RiscvMacroInst(mnem, _machInst, __opClass)
71        {}
72
73        std::string generateDisassembly(Addr pc,
74            const SymbolTable *symtab) const;
75    };
76
77    class AtomicMemOpMicro : public RiscvMicroInst
78    {
79    protected:
80        /// Memory request flags.  See mem/request.hh.
81        Request::Flags memAccessFlags;
82
83        /// Constructor
84        AtomicMemOpMicro(const char *mnem, ExtMachInst _machInst,
85            OpClass __opClass)
86                : RiscvMicroInst(mnem, _machInst, __opClass)
87        {}
88
89        std::string generateDisassembly(Addr pc,
90            const SymbolTable *symtab) const;
91    };
92}};
93
94output decoder {{
95    std::string LoadReserved::generateDisassembly(Addr pc,
96        const SymbolTable *symtab) const
97    {
98        std::stringstream ss;
99        ss << mnemonic << ' ' << registerName(_destRegIdx[0]) << ", ("
100                << registerName(_srcRegIdx[0]) << ')';
101        return ss.str();
102    }
103
104    std::string StoreCond::generateDisassembly(Addr pc,
105        const SymbolTable *symtab) const
106    {
107        std::stringstream ss;
108        ss << mnemonic << ' ' << registerName(_destRegIdx[0]) << ", "
109                << registerName(_srcRegIdx[1]) << ", ("
110                << registerName(_srcRegIdx[0]) << ')';
111        return ss.str();
112    }
113
114    std::string AtomicMemOp::generateDisassembly(Addr pc,
115        const SymbolTable *symtab) const
116    {
117        std::stringstream ss;
118        ss << mnemonic << ' ' << registerName(_destRegIdx[0]) << ", "
119                << registerName(_srcRegIdx[1]) << ", ("
120                << registerName(_srcRegIdx[0]) << ')';
121        return ss.str();
122    }
123
124    std::string AtomicMemOpMicro::generateDisassembly(Addr pc,
125        const SymbolTable *symtab) const
126    {
127        std::stringstream ss;
128        ss << csprintf("0x%08x", machInst) << ' ' << mnemonic;
129        return ss.str();
130    }
131}};
132
133def template AtomicMemOpDeclare {{
134    /**
135     * Static instruction class for an AtomicMemOp operation
136     */
137    class %(class_name)s : public %(base_class)s
138    {
139      public:
140        // Constructor
141        %(class_name)s(ExtMachInst machInst);
142
143    protected:
144
145        class %(class_name)sLoad : public %(base_class)sMicro
146        {
147          public:
148            // Constructor
149            %(class_name)sLoad(ExtMachInst machInst, %(class_name)s *_p);
150
151            Fault execute(ExecContext *, Trace::InstRecord *) const;
152            Fault eaComp(ExecContext *, Trace::InstRecord *) const;
153            Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
154            Fault completeAcc(PacketPtr, ExecContext *,
155                              Trace::InstRecord *) const;
156        };
157
158        class %(class_name)sStore : public %(base_class)sMicro
159        {
160          public:
161            // Constructor
162            %(class_name)sStore(ExtMachInst machInst, %(class_name)s *_p);
163
164            Fault execute(ExecContext *, Trace::InstRecord *) const;
165            Fault eaComp(ExecContext *, Trace::InstRecord *) const;
166            Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
167            Fault completeAcc(PacketPtr, ExecContext *,
168                              Trace::InstRecord *) const;
169        };
170    };
171}};
172
173def template LRSCConstructor {{
174    %(class_name)s::%(class_name)s(ExtMachInst machInst):
175        %(base_class)s("%(mnemonic)s", machInst, %(op_class)s)
176    {
177        %(constructor)s;
178        if (AQ)
179            memAccessFlags = memAccessFlags | Request::ACQUIRE;
180        if (RL)
181            memAccessFlags = memAccessFlags | Request::RELEASE;
182    }
183}};
184
185def template AtomicMemOpMacroConstructor {{
186    %(class_name)s::%(class_name)s(ExtMachInst machInst)
187            : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s)
188    {
189        %(constructor)s;
190        microops = {new %(class_name)sLoad(machInst, this),
191            new %(class_name)sStore(machInst, this)};
192    }
193}};
194
195def template AtomicMemOpLoadConstructor {{
196    %(class_name)s::%(class_name)sLoad::%(class_name)sLoad(
197        ExtMachInst machInst, %(class_name)s *_p)
198            : %(base_class)s("%(mnemonic)s[l]", machInst, %(op_class)s)
199    {
200        %(constructor)s;
201        flags[IsFirstMicroop] = true;
202        flags[IsDelayedCommit] = true;
203        if (AQ)
204            memAccessFlags = Request::ACQUIRE;
205    }
206}};
207
208def template AtomicMemOpStoreConstructor {{
209    %(class_name)s::%(class_name)sStore::%(class_name)sStore(
210        ExtMachInst machInst, %(class_name)s *_p)
211            : %(base_class)s("%(mnemonic)s[s]", machInst, %(op_class)s)
212    {
213        %(constructor)s;
214        flags[IsLastMicroop] = true;
215        flags[IsNonSpeculative] = true;
216        if (RL)
217            memAccessFlags = Request::RELEASE;
218    }
219}};
220
221def template StoreCondExecute {{
222    Fault %(class_name)s::execute(ExecContext *xc,
223        Trace::InstRecord *traceData) const
224    {
225        Addr EA;
226        Fault fault = NoFault;
227        uint64_t result;
228
229        %(op_decl)s;
230        %(op_rd)s;
231        %(ea_code)s;
232
233        if (fault == NoFault) {
234            %(memacc_code)s;
235        }
236
237        if (fault == NoFault) {
238            fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
239                &result);
240            // RISC-V has the opposite convention gem5 has for success flags,
241            // so we invert the result here.
242            result = !result;
243        }
244
245        if (fault == NoFault) {
246            %(postacc_code)s;
247        }
248
249        if (fault == NoFault) {
250            %(op_wb)s;
251        }
252
253        return fault;
254    }
255}};
256
257def template AtomicMemOpLoadExecute {{
258    Fault %(class_name)s::%(class_name)sLoad::execute(ExecContext *xc,
259        Trace::InstRecord *traceData) const
260    {
261        Addr EA;
262        Fault fault = NoFault;
263
264        %(op_decl)s;
265        %(op_rd)s;
266        %(ea_code)s;
267
268        if (fault == NoFault) {
269            fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags);
270        }
271
272        if (fault == NoFault) {
273            %(code)s;
274        }
275
276        if (fault == NoFault) {
277            %(op_wb)s;
278        }
279
280        return fault;
281    }
282}};
283
284def template AtomicMemOpStoreExecute {{
285    Fault %(class_name)s::%(class_name)sStore::execute(ExecContext *xc,
286        Trace::InstRecord *traceData) const
287    {
288        Addr EA;
289        Fault fault = NoFault;
290
291        %(op_decl)s;
292        %(op_rd)s;
293        %(ea_code)s;
294
295        if (fault == NoFault) {
296            %(code)s;
297        }
298
299        if (fault == NoFault) {
300            fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
301                nullptr);
302        }
303
304        if (fault == NoFault) {
305            %(op_wb)s;
306        }
307
308        return fault;
309    }
310}};
311
312def template AtomicMemOpEACompExecute {{
313    Fault
314    %(class_name)s::%(class_name)s%(op_name)s::eaComp(ExecContext *xc,
315        Trace::InstRecord *traceData) const
316    {
317        Addr EA;
318        Fault fault = NoFault;
319
320        %(op_decl)s;
321        %(op_rd)s;
322        %(ea_code)s;
323
324        if (fault == NoFault) {
325            %(op_wb)s;
326            xc->setEA(EA);
327        }
328
329        return fault;
330    }
331}};
332
333def template AtomicMemOpLoadInitiateAcc {{
334    Fault %(class_name)s::%(class_name)sLoad::initiateAcc(ExecContext *xc,
335        Trace::InstRecord *traceData) const
336    {
337        Addr EA;
338        Fault fault = NoFault;
339
340        %(op_src_decl)s;
341        %(op_rd)s;
342        %(ea_code)s;
343
344        if (fault == NoFault) {
345            fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags);
346        }
347
348        return fault;
349    }
350}};
351
352def template AtomicMemOpStoreInitiateAcc {{
353    Fault %(class_name)s::%(class_name)sStore::initiateAcc(
354        ExecContext *xc, Trace::InstRecord *traceData) const
355    {
356        Addr EA;
357        Fault fault = NoFault;
358
359        %(op_decl)s;
360        %(op_rd)s;
361        %(ea_code)s;
362
363        if (fault == NoFault) {
364            %(code)s;
365        }
366
367        if (fault == NoFault) {
368            fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
369                nullptr);
370        }
371
372        if (fault == NoFault) {
373            %(op_wb)s;
374        }
375
376        return fault;
377    }
378}};
379
380def template StoreCondCompleteAcc {{
381    Fault %(class_name)s::completeAcc(Packet *pkt, ExecContext *xc,
382        Trace::InstRecord *traceData) const
383    {
384        Fault fault = NoFault;
385
386        %(op_dest_decl)s;
387
388        // RISC-V has the opposite convention gem5 has for success flags,
389        // so we invert the result here.
390        uint64_t result = !pkt->req->getExtraData();
391
392        if (fault == NoFault) {
393            %(postacc_code)s;
394        }
395
396        if (fault == NoFault) {
397            %(op_wb)s;
398        }
399
400        return fault;
401    }
402}};
403
404def template AtomicMemOpLoadCompleteAcc {{
405    Fault %(class_name)s::%(class_name)sLoad::completeAcc(PacketPtr pkt,
406        ExecContext *xc, Trace::InstRecord *traceData) const
407    {
408        Fault fault = NoFault;
409
410        %(op_decl)s;
411        %(op_rd)s;
412
413        getMem(pkt, Mem, traceData);
414
415        if (fault == NoFault) {
416            %(code)s;
417        }
418
419        if (fault == NoFault) {
420            %(op_wb)s;
421        }
422
423        return fault;
424    }
425}};
426
427def template AtomicMemOpStoreCompleteAcc {{
428    Fault %(class_name)s::%(class_name)sStore::completeAcc(PacketPtr pkt,
429        ExecContext *xc, Trace::InstRecord *traceData) const
430    {
431        return NoFault;
432    }
433}};
434
435def format LoadReserved(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}},
436        mem_flags=[], inst_flags=[]) {{
437    mem_flags = makeList(mem_flags)
438    inst_flags = makeList(inst_flags)
439    iop = InstObjParams(name, Name, 'LoadReserved',
440        {'ea_code': ea_code, 'memacc_code': memacc_code,
441        'postacc_code': postacc_code}, inst_flags)
442    iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \
443        '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';'
444
445    header_output = LoadStoreDeclare.subst(iop)
446    decoder_output = LRSCConstructor.subst(iop)
447    decode_block = BasicDecode.subst(iop)
448    exec_output = LoadExecute.subst(iop) \
449        + EACompExecute.subst(iop) \
450        + LoadInitiateAcc.subst(iop) \
451        + LoadCompleteAcc.subst(iop)
452}};
453
454def format StoreCond(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}},
455        mem_flags=[], inst_flags=[]) {{
456    mem_flags = makeList(mem_flags)
457    inst_flags = makeList(inst_flags)
458    iop = InstObjParams(name, Name, 'StoreCond',
459        {'ea_code': ea_code, 'memacc_code': memacc_code,
460        'postacc_code': postacc_code}, inst_flags)
461    iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \
462        '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';'
463
464    header_output = LoadStoreDeclare.subst(iop)
465    decoder_output = LRSCConstructor.subst(iop)
466    decode_block = BasicDecode.subst(iop)
467    exec_output = StoreCondExecute.subst(iop) \
468        + EACompExecute.subst(iop) \
469        + StoreInitiateAcc.subst(iop) \
470        + StoreCondCompleteAcc.subst(iop)
471}};
472
473def format AtomicMemOp(load_code, store_code, ea_code, load_flags=[],
474        store_flags=[], inst_flags=[]) {{
475    macro_iop = InstObjParams(name, Name, 'AtomicMemOp', ea_code, inst_flags)
476    header_output = AtomicMemOpDeclare.subst(macro_iop)
477    decoder_output = AtomicMemOpMacroConstructor.subst(macro_iop)
478    decode_block = BasicDecode.subst(macro_iop)
479    exec_output = ''
480
481    load_inst_flags = makeList(inst_flags) + ["IsMemRef", "IsLoad"]
482    load_iop = InstObjParams(name, Name, 'AtomicMemOpMicro',
483        {'ea_code': ea_code, 'code': load_code, 'op_name': 'Load'},
484        load_inst_flags)
485    decoder_output += AtomicMemOpLoadConstructor.subst(load_iop)
486    exec_output += AtomicMemOpLoadExecute.subst(load_iop) \
487        + AtomicMemOpEACompExecute.subst(load_iop) \
488        + AtomicMemOpLoadInitiateAcc.subst(load_iop) \
489        + AtomicMemOpLoadCompleteAcc.subst(load_iop)
490
491    store_inst_flags = makeList(inst_flags) + ["IsMemRef", "IsStore"]
492    store_iop = InstObjParams(name, Name, 'AtomicMemOpMicro',
493        {'ea_code': ea_code, 'code': store_code, 'op_name': 'Store'},
494        store_inst_flags)
495    decoder_output += AtomicMemOpStoreConstructor.subst(store_iop)
496    exec_output += AtomicMemOpStoreExecute.subst(store_iop) \
497        + AtomicMemOpEACompExecute.subst(store_iop) \
498        + AtomicMemOpStoreInitiateAcc.subst(store_iop) \
499        + AtomicMemOpStoreCompleteAcc.subst(store_iop)
500}};
501