1// -*- mode:c++ -*-
2
3// Copyright (c) 2015 Riscv Developers
4// Copyright (c) 2016 The University of Virginia
5// All rights reserved.
6//
7// Redistribution and use in source and binary forms, with or without
8// modification, are permitted provided that the following conditions are
9// met: redistributions of source code must retain the above copyright
10// notice, this list of conditions and the following disclaimer;
11// redistributions in binary form must reproduce the above copyright
12// notice, this list of conditions and the following disclaimer in the
13// documentation and/or other materials provided with the distribution;
14// neither the name of the copyright holders nor the names of its
15// contributors may be used to endorse or promote products derived from
16// this software without specific prior written permission.
17//
18// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29//
30// Authors: Alec Roelke
31
32// Declaration templates
33def template AtomicMemOpDeclare {{
34    /**
35     * Static instruction class for an AtomicMemOp operation
36     */
37    class %(class_name)s : public %(base_class)s
38    {
39      public:
40        // Constructor
41        %(class_name)s(ExtMachInst machInst);
42
43    protected:
44
45        /*
46         * The main RMW part of an AMO
47         */
48        class %(class_name)sRMW : public %(base_class)sMicro
49        {
50          public:
51            // Constructor
52            %(class_name)sRMW(ExtMachInst machInst, %(class_name)s *_p);
53
54            Fault execute(ExecContext *, Trace::InstRecord *) const override;
55            Fault initiateAcc(ExecContext *,
56                              Trace::InstRecord *) const override;
57            Fault completeAcc(PacketPtr, ExecContext *,
58                              Trace::InstRecord *) const override;
59        };
60    };
61}};
62
63def template LRSCDeclare {{
64    /**
65     * Static instruction class for an AtomicMemOp operation
66     */
67    class %(class_name)s : public %(base_class)s
68    {
69      public:
70        // Constructor
71        %(class_name)s(ExtMachInst machInst);
72
73    protected:
74
75        class %(class_name)sMicro : public %(base_class)sMicro
76        {
77          public:
78            // Constructor
79            %(class_name)sMicro(ExtMachInst machInst, %(class_name)s *_p);
80
81            Fault execute(ExecContext *, Trace::InstRecord *) const override;
82            Fault initiateAcc(ExecContext *,
83                              Trace::InstRecord *) const override;
84            Fault completeAcc(PacketPtr, ExecContext *,
85                              Trace::InstRecord *) const override;
86        };
87    };
88}};
89
90// Constructor templates
91def template LRSCMacroConstructor {{
92    %(class_name)s::%(class_name)s(ExtMachInst machInst):
93        %(base_class)s("%(mnemonic)s", machInst, %(op_class)s)
94    {
95        %(constructor)s;
96
97        StaticInstPtr rel_fence;
98        StaticInstPtr lrsc;
99        StaticInstPtr acq_fence;
100
101        // set up release fence
102        if (RL) {
103            rel_fence = new MemFenceMicro(machInst, No_OpClass);
104            rel_fence->setFlag(IsFirstMicroop);
105            rel_fence->setFlag(IsMemBarrier);
106            rel_fence->setFlag(IsDelayedCommit);
107        }
108
109        // set up atomic rmw op
110        lrsc = new %(class_name)sMicro(machInst, this);
111
112        if (!RL) {
113            lrsc->setFlag(IsFirstMicroop);
114        }
115
116        if (!AQ) {
117            lrsc->setFlag(IsLastMicroop);
118        } else {
119            lrsc->setFlag(IsDelayedCommit);
120        }
121
122        // set up acquire fence
123        if (AQ) {
124            acq_fence = new MemFenceMicro(machInst, No_OpClass);
125            acq_fence->setFlag(IsLastMicroop);
126            acq_fence->setFlag(IsMemBarrier);
127        }
128
129        if (RL && AQ) {
130            microops = {rel_fence, lrsc, acq_fence};
131        } else if (RL) {
132            microops = {rel_fence, lrsc};
133        } else if (AQ) {
134            microops = {lrsc, acq_fence};
135        } else {
136            microops = {lrsc};
137        }
138    }
139}};
140
141def template LRSCMicroConstructor {{
142    %(class_name)s::%(class_name)sMicro::%(class_name)sMicro(
143        ExtMachInst machInst, %(class_name)s *_p)
144            : %(base_class)sMicro("%(mnemonic)s", machInst, %(op_class)s)
145    {
146        %(constructor)s;
147    }
148}};
149
150def template AtomicMemOpMacroConstructor {{
151    %(class_name)s::%(class_name)s(ExtMachInst machInst)
152            : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s)
153    {
154        %(constructor)s;
155
156        StaticInstPtr rel_fence;
157        StaticInstPtr rmw_op;
158        StaticInstPtr acq_fence;
159
160        // set up release fence
161        if (RL) {
162            rel_fence = new MemFenceMicro(machInst, No_OpClass);
163            rel_fence->setFlag(IsFirstMicroop);
164            rel_fence->setFlag(IsMemBarrier);
165            rel_fence->setFlag(IsDelayedCommit);
166        }
167
168        // set up atomic rmw op
169        rmw_op = new %(class_name)sRMW(machInst, this);
170
171        if (!RL) {
172            rmw_op->setFlag(IsFirstMicroop);
173        }
174
175        if (!AQ) {
176            rmw_op->setFlag(IsLastMicroop);
177        } else {
178            rmw_op->setFlag(IsDelayedCommit);
179        }
180
181        // set up acquire fence
182        if (AQ) {
183            acq_fence = new MemFenceMicro(machInst, No_OpClass);
184            acq_fence->setFlag(IsLastMicroop);
185            acq_fence->setFlag(IsMemBarrier);
186        }
187
188        if (RL && AQ) {
189            microops = {rel_fence, rmw_op, acq_fence};
190        } else if (RL) {
191            microops = {rel_fence, rmw_op};
192        } else if (AQ) {
193            microops = {rmw_op, acq_fence};
194        } else {
195            microops = {rmw_op};
196        }
197    }
198}};
199
200def template AtomicMemOpRMWConstructor {{
201    %(class_name)s::%(class_name)sRMW::%(class_name)sRMW(
202        ExtMachInst machInst, %(class_name)s *_p)
203            : %(base_class)s("%(mnemonic)s[l]", machInst, %(op_class)s)
204    {
205        %(constructor)s;
206
207        // overwrite default flags
208        flags[IsMemRef] = true;
209        flags[IsLoad] = false;
210        flags[IsStore] = false;
211        flags[IsAtomic] = true;
212    }
213}};
214
215// execute() templates
216
217def template LoadReservedExecute {{
218    Fault
219    %(class_name)s::%(class_name)sMicro::execute(
220        ExecContext *xc, Trace::InstRecord *traceData) const
221    {
222        Addr EA;
223        Fault fault = NoFault;
224
225        %(op_decl)s;
226        %(op_rd)s;
227        %(ea_code)s;
228
229        if (fault == NoFault) {
230            fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags);
231            %(memacc_code)s;
232        }
233
234        if (fault == NoFault) {
235            %(op_wb)s;
236        }
237
238        return fault;
239    }
240}};
241
242def template StoreCondExecute {{
243    Fault %(class_name)s::%(class_name)sMicro::execute(ExecContext *xc,
244        Trace::InstRecord *traceData) const
245    {
246        Addr EA;
247        Fault fault = NoFault;
248        uint64_t result;
249
250        %(op_decl)s;
251        %(op_rd)s;
252        %(ea_code)s;
253
254        if (fault == NoFault) {
255            %(memacc_code)s;
256        }
257
258        if (fault == NoFault) {
259            fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
260                &result);
261            // RISC-V has the opposite convention gem5 has for success flags,
262            // so we invert the result here.
263            result = !result;
264        }
265
266        if (fault == NoFault) {
267            %(postacc_code)s;
268        }
269
270        if (fault == NoFault) {
271            %(op_wb)s;
272        }
273
274        return fault;
275    }
276}};
277
278def template AtomicMemOpRMWExecute {{
279    Fault %(class_name)s::%(class_name)sRMW::execute(ExecContext *xc,
280        Trace::InstRecord *traceData) const
281    {
282        Addr EA;
283        Fault fault = NoFault;
284
285        %(op_decl)s;
286        %(op_rd)s;
287        %(ea_code)s;
288        %(amoop_code)s;
289
290        assert(amo_op);
291
292        if (fault == NoFault) {
293            fault = amoMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
294                                 amo_op);
295            %(memacc_code)s;
296        }
297
298        if (fault == NoFault) {
299            %(postacc_code)s;
300        }
301
302        if (fault == NoFault) {
303            %(op_wb)s;
304        }
305
306        return fault;
307    }
308}};
309
310// initiateAcc() templates
311
312def template LoadReservedInitiateAcc {{
313    Fault
314    %(class_name)s::%(class_name)sMicro::initiateAcc(ExecContext *xc,
315        Trace::InstRecord *traceData) const
316    {
317        Addr EA;
318        Fault fault = NoFault;
319
320        %(op_src_decl)s;
321        %(op_rd)s;
322        %(ea_code)s;
323
324        if (fault == NoFault) {
325            fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags);
326        }
327
328        return fault;
329    }
330}};
331
332def template StoreCondInitiateAcc {{
333    Fault
334    %(class_name)s::%(class_name)sMicro::initiateAcc(ExecContext *xc,
335        Trace::InstRecord *traceData) const
336    {
337        Addr EA;
338        Fault fault = NoFault;
339
340        %(op_decl)s;
341        %(op_rd)s;
342        %(ea_code)s;
343
344        if (fault == NoFault) {
345            %(memacc_code)s;
346        }
347
348        if (fault == NoFault) {
349            fault = writeMemTiming(xc, traceData, Mem, EA,
350                memAccessFlags, nullptr);
351        }
352
353        if (fault == NoFault) {
354            %(op_wb)s;
355        }
356
357        return fault;
358    }
359}};
360
361def template AtomicMemOpRMWInitiateAcc {{
362    Fault
363    %(class_name)s::%(class_name)sRMW::initiateAcc(ExecContext *xc,
364        Trace::InstRecord *traceData) const
365    {
366        Addr EA;
367        Fault fault = NoFault;
368
369        %(op_src_decl)s;
370        %(op_rd)s;
371        %(ea_code)s;
372        %(amoop_code)s;
373
374        assert(amo_op);
375
376        if (fault == NoFault) {
377            fault = initiateMemAMO(xc, traceData, EA, Mem, memAccessFlags,
378                                   amo_op);
379        }
380
381        return fault;
382    }
383}};
384
385// completeAcc() templates
386
387def template LoadReservedCompleteAcc {{
388    Fault
389    %(class_name)s::%(class_name)sMicro::completeAcc(PacketPtr pkt,
390        ExecContext *xc, Trace::InstRecord *traceData) const
391    {
392        Fault fault = NoFault;
393
394        %(op_decl)s;
395        %(op_rd)s;
396
397        getMem(pkt, Mem, traceData);
398
399        if (fault == NoFault) {
400            %(memacc_code)s;
401        }
402
403        if (fault == NoFault) {
404            %(op_wb)s;
405        }
406
407        return fault;
408    }
409}};
410
411def template StoreCondCompleteAcc {{
412    Fault %(class_name)s::%(class_name)sMicro::completeAcc(Packet *pkt,
413          ExecContext *xc, Trace::InstRecord *traceData) const
414    {
415        Fault fault = NoFault;
416
417        %(op_dest_decl)s;
418
419        // RISC-V has the opposite convention gem5 has for success flags,
420        // so we invert the result here.
421        uint64_t result = !pkt->req->getExtraData();
422
423        if (fault == NoFault) {
424            %(postacc_code)s;
425        }
426
427        if (fault == NoFault) {
428            %(op_wb)s;
429        }
430
431        return fault;
432    }
433}};
434
435def template AtomicMemOpRMWCompleteAcc {{
436    Fault %(class_name)s::%(class_name)sRMW::completeAcc(Packet *pkt,
437        ExecContext *xc, Trace::InstRecord *traceData) const
438    {
439        Fault fault = NoFault;
440
441        %(op_decl)s;
442        %(op_rd)s;
443
444        getMem(pkt, Mem, traceData);
445
446        if (fault == NoFault) {
447            %(memacc_code)s;
448        }
449
450        if (fault == NoFault) {
451            %(op_wb)s;
452        }
453
454        return fault;
455    }
456}};
457
458// LR/SC/AMO decode formats
459
460def format LoadReserved(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}},
461        mem_flags=[], inst_flags=[]) {{
462    macro_ea_code = ''
463    macro_inst_flags = []
464    macro_iop = InstObjParams(name, Name, 'LoadReserved', macro_ea_code,
465                              macro_inst_flags)
466    header_output = LRSCDeclare.subst(macro_iop)
467    decoder_output = LRSCMacroConstructor.subst(macro_iop)
468    decode_block = BasicDecode.subst(macro_iop)
469
470    exec_output = ''
471
472    mem_flags = makeList(mem_flags)
473    inst_flags = makeList(inst_flags)
474    iop = InstObjParams(name, Name, 'LoadReserved',
475        {'ea_code': ea_code, 'memacc_code': memacc_code,
476        'postacc_code': postacc_code}, inst_flags)
477    iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \
478        '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';'
479
480    decoder_output += LRSCMicroConstructor.subst(iop)
481    decode_block += BasicDecode.subst(iop)
482    exec_output += LoadReservedExecute.subst(iop) \
483        + LoadReservedInitiateAcc.subst(iop) \
484        + LoadReservedCompleteAcc.subst(iop)
485}};
486
487def format StoreCond(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}},
488        mem_flags=[], inst_flags=[]) {{
489    macro_ea_code = ''
490    macro_inst_flags = []
491    macro_iop = InstObjParams(name, Name, 'StoreCond', macro_ea_code,
492                              macro_inst_flags)
493    header_output = LRSCDeclare.subst(macro_iop)
494    decoder_output = LRSCMacroConstructor.subst(macro_iop)
495    decode_block = BasicDecode.subst(macro_iop)
496
497    exec_output = ''
498
499    mem_flags = makeList(mem_flags)
500    inst_flags = makeList(inst_flags)
501    iop = InstObjParams(name, Name, 'StoreCond',
502        {'ea_code': ea_code, 'memacc_code': memacc_code,
503        'postacc_code': postacc_code}, inst_flags)
504    iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \
505        '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';'
506
507    decoder_output += LRSCMicroConstructor.subst(iop)
508    decode_block += BasicDecode.subst(iop)
509    exec_output += StoreCondExecute.subst(iop) \
510        + StoreCondInitiateAcc.subst(iop) \
511        + StoreCondCompleteAcc.subst(iop)
512}};
513
514def format AtomicMemOp(memacc_code, amoop_code, postacc_code={{ }},
515        ea_code={{EA = Rs1;}}, mem_flags=[], inst_flags=[]) {{
516    macro_ea_code = ''
517    macro_inst_flags = []
518    macro_iop = InstObjParams(name, Name, 'AtomicMemOp', macro_ea_code,
519                              macro_inst_flags)
520    header_output = AtomicMemOpDeclare.subst(macro_iop)
521    decoder_output = AtomicMemOpMacroConstructor.subst(macro_iop)
522    decode_block = BasicDecode.subst(macro_iop)
523
524    exec_output = ''
525
526    rmw_mem_flags = makeList(mem_flags)
527    rmw_inst_flags = makeList(inst_flags)
528    rmw_iop = InstObjParams(name, Name, 'AtomicMemOpMicro',
529                            {'ea_code': ea_code,
530                             'memacc_code': memacc_code,
531                             'postacc_code': postacc_code,
532                             'amoop_code': amoop_code},
533                            rmw_inst_flags)
534
535    rmw_iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \
536          '|'.join(['Request::%s' % flag for flag in rmw_mem_flags]) + ';'
537
538    decoder_output += AtomicMemOpRMWConstructor.subst(rmw_iop)
539    decode_block += BasicDecode.subst(rmw_iop)
540    exec_output += AtomicMemOpRMWExecute.subst(rmw_iop) \
541                 + AtomicMemOpRMWInitiateAcc.subst(rmw_iop) \
542                 + AtomicMemOpRMWCompleteAcc.subst(rmw_iop)
543}};
544