amo.isa revision 12234
1// -*- mode:c++ -*-
2
3// Copyright (c) 2015 Riscv Developers
4// Copyright (c) 2016 The University of Virginia
5// All rights reserved.
6//
7// Redistribution and use in source and binary forms, with or without
8// modification, are permitted provided that the following conditions are
9// met: redistributions of source code must retain the above copyright
10// notice, this list of conditions and the following disclaimer;
11// redistributions in binary form must reproduce the above copyright
12// notice, this list of conditions and the following disclaimer in the
13// documentation and/or other materials provided with the distribution;
14// neither the name of the copyright holders nor the names of its
15// contributors may be used to endorse or promote products derived from
16// this software without specific prior written permission.
17//
18// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29//
30// Authors: Alec Roelke
31
32////////////////////////////////////////////////////////////////////
33//
34// Atomic memory operation instructions
35//
36output header {{
37    class LoadReserved : public RiscvStaticInst
38    {
39      protected:
40        Request::Flags memAccessFlags;
41
42        LoadReserved(const char *mnem, ExtMachInst _machInst,
43            OpClass __opClass)
44                : RiscvStaticInst(mnem, _machInst, __opClass)
45        {}
46
47        std::string
48        generateDisassembly(Addr pc, const SymbolTable *symtab) const;
49    };
50
51    class StoreCond : public RiscvStaticInst
52    {
53      protected:
54        Request::Flags memAccessFlags;
55
56        StoreCond(const char* mnem, ExtMachInst _machInst, OpClass __opClass)
57                : RiscvStaticInst(mnem, _machInst, __opClass)
58        {}
59
60        std::string
61        generateDisassembly(Addr pc, const SymbolTable *symtab) const;
62    };
63
64    class AtomicMemOp : public RiscvMacroInst
65    {
66    protected:
67        /// Constructor
68        // Each AtomicMemOp has a load and a store phase
69        AtomicMemOp(const char *mnem, ExtMachInst _machInst, OpClass __opClass)
70                : RiscvMacroInst(mnem, _machInst, __opClass)
71        {}
72
73        std::string generateDisassembly(Addr pc,
74            const SymbolTable *symtab) const;
75    };
76
77    class AtomicMemOpMicro : public RiscvMicroInst
78    {
79    protected:
80        /// Memory request flags.  See mem/request.hh.
81        Request::Flags memAccessFlags;
82
83        /// Constructor
84        AtomicMemOpMicro(const char *mnem, ExtMachInst _machInst,
85            OpClass __opClass)
86                : RiscvMicroInst(mnem, _machInst, __opClass)
87        {}
88
89        std::string generateDisassembly(Addr pc,
90            const SymbolTable *symtab) const;
91    };
92}};
93
94output decoder {{
95    std::string LoadReserved::generateDisassembly(Addr pc,
96        const SymbolTable *symtab) const
97    {
98        std::stringstream ss;
99        ss << mnemonic << ' ' << registerName(_destRegIdx[0]) << ", ("
100                << registerName(_srcRegIdx[0]) << ')';
101        return ss.str();
102    }
103
104    std::string StoreCond::generateDisassembly(Addr pc,
105        const SymbolTable *symtab) const
106    {
107        std::stringstream ss;
108        ss << mnemonic << ' ' << registerName(_destRegIdx[0]) << ", "
109                << registerName(_srcRegIdx[1]) << ", ("
110                << registerName(_srcRegIdx[0]) << ')';
111        return ss.str();
112    }
113
114    std::string AtomicMemOp::generateDisassembly(Addr pc,
115        const SymbolTable *symtab) const
116    {
117        std::stringstream ss;
118        ss << mnemonic << ' ' << registerName(_destRegIdx[0]) << ", "
119                << registerName(_srcRegIdx[1]) << ", ("
120                << registerName(_srcRegIdx[0]) << ')';
121        return ss.str();
122    }
123
124    std::string AtomicMemOpMicro::generateDisassembly(Addr pc,
125        const SymbolTable *symtab) const
126    {
127        std::stringstream ss;
128        ss << csprintf("0x%08x", machInst) << ' ' << mnemonic;
129        return ss.str();
130    }
131}};
132
133def template AtomicMemOpDeclare {{
134    /**
135     * Static instruction class for an AtomicMemOp operation
136     */
137    class %(class_name)s : public %(base_class)s
138    {
139      public:
140        // Constructor
141        %(class_name)s(ExtMachInst machInst);
142
143    protected:
144
145        class %(class_name)sLoad : public %(base_class)sMicro
146        {
147          public:
148            // Constructor
149            %(class_name)sLoad(ExtMachInst machInst, %(class_name)s *_p);
150
151            %(BasicExecDeclare)s
152
153            %(EACompDeclare)s
154
155            %(InitiateAccDeclare)s
156
157            %(CompleteAccDeclare)s
158        };
159
160        class %(class_name)sStore : public %(base_class)sMicro
161        {
162          public:
163            // Constructor
164            %(class_name)sStore(ExtMachInst machInst, %(class_name)s *_p);
165
166            %(BasicExecDeclare)s
167
168            %(EACompDeclare)s
169
170            %(InitiateAccDeclare)s
171
172            %(CompleteAccDeclare)s
173        };
174    };
175}};
176
177def template LRSCConstructor {{
178    %(class_name)s::%(class_name)s(ExtMachInst machInst):
179        %(base_class)s("%(mnemonic)s", machInst, %(op_class)s)
180    {
181        %(constructor)s;
182        if (AQ)
183            memAccessFlags = memAccessFlags | Request::ACQUIRE;
184        if (RL)
185            memAccessFlags = memAccessFlags | Request::RELEASE;
186    }
187}};
188
189def template AtomicMemOpMacroConstructor {{
190    %(class_name)s::%(class_name)s(ExtMachInst machInst)
191            : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s)
192    {
193        %(constructor)s;
194        microops = {new %(class_name)sLoad(machInst, this),
195            new %(class_name)sStore(machInst, this)};
196    }
197}};
198
199def template AtomicMemOpLoadConstructor {{
200    %(class_name)s::%(class_name)sLoad::%(class_name)sLoad(
201        ExtMachInst machInst, %(class_name)s *_p)
202            : %(base_class)s("%(mnemonic)s[l]", machInst, %(op_class)s)
203    {
204        %(constructor)s;
205        flags[IsFirstMicroop] = true;
206        flags[IsDelayedCommit] = true;
207        if (AQ)
208            memAccessFlags = Request::ACQUIRE;
209    }
210}};
211
212def template AtomicMemOpStoreConstructor {{
213    %(class_name)s::%(class_name)sStore::%(class_name)sStore(
214        ExtMachInst machInst, %(class_name)s *_p)
215            : %(base_class)s("%(mnemonic)s[s]", machInst, %(op_class)s)
216    {
217        %(constructor)s;
218        flags[IsLastMicroop] = true;
219        flags[IsNonSpeculative] = true;
220        if (RL)
221            memAccessFlags = Request::RELEASE;
222    }
223}};
224
225def template StoreCondExecute {{
226    Fault %(class_name)s::execute(ExecContext *xc,
227        Trace::InstRecord *traceData) const
228    {
229        Addr EA;
230        Fault fault = NoFault;
231        uint64_t result;
232
233        %(op_decl)s;
234        %(op_rd)s;
235        %(ea_code)s;
236
237        if (fault == NoFault) {
238            %(memacc_code)s;
239        }
240
241        if (fault == NoFault) {
242            fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
243                &result);
244            // RISC-V has the opposite convention gem5 has for success flags,
245            // so we invert the result here.
246            result = !result;
247        }
248
249        if (fault == NoFault) {
250            %(postacc_code)s;
251        }
252
253        if (fault == NoFault) {
254            %(op_wb)s;
255        }
256
257        return fault;
258    }
259}};
260
261def template AtomicMemOpLoadExecute {{
262    Fault %(class_name)s::%(class_name)sLoad::execute(ExecContext *xc,
263        Trace::InstRecord *traceData) const
264    {
265        Addr EA;
266        Fault fault = NoFault;
267
268        %(op_decl)s;
269        %(op_rd)s;
270        %(ea_code)s;
271
272        if (fault == NoFault) {
273            fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags);
274        }
275
276        if (fault == NoFault) {
277            %(code)s;
278        }
279
280        if (fault == NoFault) {
281            %(op_wb)s;
282        }
283
284        return fault;
285    }
286}};
287
288def template AtomicMemOpStoreExecute {{
289    Fault %(class_name)s::%(class_name)sStore::execute(ExecContext *xc,
290        Trace::InstRecord *traceData) const
291    {
292        Addr EA;
293        Fault fault = NoFault;
294
295        %(op_decl)s;
296        %(op_rd)s;
297        %(ea_code)s;
298
299        if (fault == NoFault) {
300            %(code)s;
301        }
302
303        if (fault == NoFault) {
304            fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
305                nullptr);
306        }
307
308        if (fault == NoFault) {
309            %(op_wb)s;
310        }
311
312        return fault;
313    }
314}};
315
316def template AtomicMemOpEACompExecute {{
317    Fault
318    %(class_name)s::%(class_name)s%(op_name)s::eaComp(ExecContext *xc,
319        Trace::InstRecord *traceData) const
320    {
321        Addr EA;
322        Fault fault = NoFault;
323
324        %(op_decl)s;
325        %(op_rd)s;
326        %(ea_code)s;
327
328        if (fault == NoFault) {
329            %(op_wb)s;
330            xc->setEA(EA);
331        }
332
333        return fault;
334    }
335}};
336
337def template AtomicMemOpLoadInitiateAcc {{
338    Fault %(class_name)s::%(class_name)sLoad::initiateAcc(ExecContext *xc,
339        Trace::InstRecord *traceData) const
340    {
341        Addr EA;
342        Fault fault = NoFault;
343
344        %(op_src_decl)s;
345        %(op_rd)s;
346        %(ea_code)s;
347
348        if (fault == NoFault) {
349            fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags);
350        }
351
352        return fault;
353    }
354}};
355
356def template AtomicMemOpStoreInitiateAcc {{
357    Fault %(class_name)s::%(class_name)sStore::initiateAcc(
358        ExecContext *xc, Trace::InstRecord *traceData) const
359    {
360        Addr EA;
361        Fault fault = NoFault;
362
363        %(op_decl)s;
364        %(op_rd)s;
365        %(ea_code)s;
366
367        if (fault == NoFault) {
368            %(code)s;
369        }
370
371        if (fault == NoFault) {
372            fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
373                nullptr);
374        }
375
376        if (fault == NoFault) {
377            %(op_wb)s;
378        }
379
380        return fault;
381    }
382}};
383
384def template StoreCondCompleteAcc {{
385    Fault %(class_name)s::completeAcc(Packet *pkt, ExecContext *xc,
386        Trace::InstRecord *traceData) const
387    {
388        Fault fault = NoFault;
389
390        %(op_dest_decl)s;
391
392        // RISC-V has the opposite convention gem5 has for success flags,
393        // so we invert the result here.
394        uint64_t result = !pkt->req->getExtraData();
395
396        if (fault == NoFault) {
397            %(postacc_code)s;
398        }
399
400        if (fault == NoFault) {
401            %(op_wb)s;
402        }
403
404        return fault;
405    }
406}};
407
408def template AtomicMemOpLoadCompleteAcc {{
409    Fault %(class_name)s::%(class_name)sLoad::completeAcc(PacketPtr pkt,
410        ExecContext *xc, Trace::InstRecord *traceData) const
411    {
412        Fault fault = NoFault;
413
414        %(op_decl)s;
415        %(op_rd)s;
416
417        getMem(pkt, Mem, traceData);
418
419        if (fault == NoFault) {
420            %(code)s;
421        }
422
423        if (fault == NoFault) {
424            %(op_wb)s;
425        }
426
427        return fault;
428    }
429}};
430
431def template AtomicMemOpStoreCompleteAcc {{
432    Fault %(class_name)s::%(class_name)sStore::completeAcc(PacketPtr pkt,
433        ExecContext *xc, Trace::InstRecord *traceData) const
434    {
435        return NoFault;
436    }
437}};
438
439def format LoadReserved(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}},
440        mem_flags=[], inst_flags=[]) {{
441    mem_flags = makeList(mem_flags)
442    inst_flags = makeList(inst_flags)
443    iop = InstObjParams(name, Name, 'LoadReserved',
444        {'ea_code': ea_code, 'memacc_code': memacc_code,
445        'postacc_code': postacc_code}, inst_flags)
446    iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \
447        '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';'
448
449    header_output = LoadStoreDeclare.subst(iop)
450    decoder_output = LRSCConstructor.subst(iop)
451    decode_block = BasicDecode.subst(iop)
452    exec_output = LoadExecute.subst(iop) \
453        + EACompExecute.subst(iop) \
454        + LoadInitiateAcc.subst(iop) \
455        + LoadCompleteAcc.subst(iop)
456}};
457
458def format StoreCond(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}},
459        mem_flags=[], inst_flags=[]) {{
460    mem_flags = makeList(mem_flags)
461    inst_flags = makeList(inst_flags)
462    iop = InstObjParams(name, Name, 'StoreCond',
463        {'ea_code': ea_code, 'memacc_code': memacc_code,
464        'postacc_code': postacc_code}, inst_flags)
465    iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \
466        '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';'
467
468    header_output = LoadStoreDeclare.subst(iop)
469    decoder_output = LRSCConstructor.subst(iop)
470    decode_block = BasicDecode.subst(iop)
471    exec_output = StoreCondExecute.subst(iop) \
472        + EACompExecute.subst(iop) \
473        + StoreInitiateAcc.subst(iop) \
474        + StoreCondCompleteAcc.subst(iop)
475}};
476
477def format AtomicMemOp(load_code, store_code, ea_code, load_flags=[],
478        store_flags=[], inst_flags=[]) {{
479    macro_iop = InstObjParams(name, Name, 'AtomicMemOp', ea_code, inst_flags)
480    header_output = AtomicMemOpDeclare.subst(macro_iop)
481    decoder_output = AtomicMemOpMacroConstructor.subst(macro_iop)
482    decode_block = BasicDecode.subst(macro_iop)
483    exec_output = ''
484
485    load_inst_flags = makeList(inst_flags) + ["IsMemRef", "IsLoad"]
486    load_iop = InstObjParams(name, Name, 'AtomicMemOpMicro',
487        {'ea_code': ea_code, 'code': load_code, 'op_name': 'Load'},
488        load_inst_flags)
489    decoder_output += AtomicMemOpLoadConstructor.subst(load_iop)
490    exec_output += AtomicMemOpLoadExecute.subst(load_iop) \
491        + AtomicMemOpEACompExecute.subst(load_iop) \
492        + AtomicMemOpLoadInitiateAcc.subst(load_iop) \
493        + AtomicMemOpLoadCompleteAcc.subst(load_iop)
494
495    store_inst_flags = makeList(inst_flags) + ["IsMemRef", "IsStore"]
496    store_iop = InstObjParams(name, Name, 'AtomicMemOpMicro',
497        {'ea_code': ea_code, 'code': store_code, 'op_name': 'Store'},
498        store_inst_flags)
499    decoder_output += AtomicMemOpStoreConstructor.subst(store_iop)
500    exec_output += AtomicMemOpStoreExecute.subst(store_iop) \
501        + AtomicMemOpEACompExecute.subst(store_iop) \
502        + AtomicMemOpStoreInitiateAcc.subst(store_iop) \
503        + AtomicMemOpStoreCompleteAcc.subst(store_iop)
504}};
505