amo.isa revision 12323:55d08b81ff39
1// -*- mode:c++ -*-
2
3// Copyright (c) 2015 Riscv Developers
4// Copyright (c) 2016 The University of Virginia
5// All rights reserved.
6//
7// Redistribution and use in source and binary forms, with or without
8// modification, are permitted provided that the following conditions are
9// met: redistributions of source code must retain the above copyright
10// notice, this list of conditions and the following disclaimer;
11// redistributions in binary form must reproduce the above copyright
12// notice, this list of conditions and the following disclaimer in the
13// documentation and/or other materials provided with the distribution;
14// neither the name of the copyright holders nor the names of its
15// contributors may be used to endorse or promote products derived from
16// this software without specific prior written permission.
17//
18// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29//
30// Authors: Alec Roelke
31
32////////////////////////////////////////////////////////////////////
33//
34// Atomic memory operation instructions
35//
36def template AtomicMemOpDeclare {{
37    /**
38     * Static instruction class for an AtomicMemOp operation
39     */
40    class %(class_name)s : public %(base_class)s
41    {
42      public:
43        // Constructor
44        %(class_name)s(ExtMachInst machInst);
45
46    protected:
47
48        class %(class_name)sLoad : public %(base_class)sMicro
49        {
50          public:
51            // Constructor
52            %(class_name)sLoad(ExtMachInst machInst, %(class_name)s *_p);
53
54            Fault execute(ExecContext *, Trace::InstRecord *) const;
55            Fault eaComp(ExecContext *, Trace::InstRecord *) const;
56            Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
57            Fault completeAcc(PacketPtr, ExecContext *,
58                              Trace::InstRecord *) const;
59        };
60
61        class %(class_name)sStore : public %(base_class)sMicro
62        {
63          public:
64            // Constructor
65            %(class_name)sStore(ExtMachInst machInst, %(class_name)s *_p);
66
67            Fault execute(ExecContext *, Trace::InstRecord *) const;
68            Fault eaComp(ExecContext *, Trace::InstRecord *) const;
69            Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
70            Fault completeAcc(PacketPtr, ExecContext *,
71                              Trace::InstRecord *) const;
72        };
73    };
74}};
75
76def template LRSCConstructor {{
77    %(class_name)s::%(class_name)s(ExtMachInst machInst):
78        %(base_class)s("%(mnemonic)s", machInst, %(op_class)s)
79    {
80        %(constructor)s;
81        if (AQ)
82            memAccessFlags = memAccessFlags | Request::ACQUIRE;
83        if (RL)
84            memAccessFlags = memAccessFlags | Request::RELEASE;
85    }
86}};
87
88def template AtomicMemOpMacroConstructor {{
89    %(class_name)s::%(class_name)s(ExtMachInst machInst)
90            : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s)
91    {
92        %(constructor)s;
93        microops = {new %(class_name)sLoad(machInst, this),
94            new %(class_name)sStore(machInst, this)};
95    }
96}};
97
98def template AtomicMemOpLoadConstructor {{
99    %(class_name)s::%(class_name)sLoad::%(class_name)sLoad(
100        ExtMachInst machInst, %(class_name)s *_p)
101            : %(base_class)s("%(mnemonic)s[l]", machInst, %(op_class)s)
102    {
103        %(constructor)s;
104        flags[IsFirstMicroop] = true;
105        flags[IsDelayedCommit] = true;
106        if (AQ)
107            memAccessFlags = Request::ACQUIRE;
108    }
109}};
110
111def template AtomicMemOpStoreConstructor {{
112    %(class_name)s::%(class_name)sStore::%(class_name)sStore(
113        ExtMachInst machInst, %(class_name)s *_p)
114            : %(base_class)s("%(mnemonic)s[s]", machInst, %(op_class)s)
115    {
116        %(constructor)s;
117        flags[IsLastMicroop] = true;
118        flags[IsNonSpeculative] = true;
119        if (RL)
120            memAccessFlags = Request::RELEASE;
121    }
122}};
123
124def template StoreCondExecute {{
125    Fault %(class_name)s::execute(ExecContext *xc,
126        Trace::InstRecord *traceData) const
127    {
128        Addr EA;
129        Fault fault = NoFault;
130        uint64_t result;
131
132        %(op_decl)s;
133        %(op_rd)s;
134        %(ea_code)s;
135
136        if (fault == NoFault) {
137            %(memacc_code)s;
138        }
139
140        if (fault == NoFault) {
141            fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
142                &result);
143            // RISC-V has the opposite convention gem5 has for success flags,
144            // so we invert the result here.
145            result = !result;
146        }
147
148        if (fault == NoFault) {
149            %(postacc_code)s;
150        }
151
152        if (fault == NoFault) {
153            %(op_wb)s;
154        }
155
156        return fault;
157    }
158}};
159
160def template AtomicMemOpLoadExecute {{
161    Fault %(class_name)s::%(class_name)sLoad::execute(ExecContext *xc,
162        Trace::InstRecord *traceData) const
163    {
164        Addr EA;
165        Fault fault = NoFault;
166
167        %(op_decl)s;
168        %(op_rd)s;
169        %(ea_code)s;
170
171        if (fault == NoFault) {
172            fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags);
173        }
174
175        if (fault == NoFault) {
176            %(code)s;
177        }
178
179        if (fault == NoFault) {
180            %(op_wb)s;
181        }
182
183        return fault;
184    }
185}};
186
187def template AtomicMemOpStoreExecute {{
188    Fault %(class_name)s::%(class_name)sStore::execute(ExecContext *xc,
189        Trace::InstRecord *traceData) const
190    {
191        Addr EA;
192        Fault fault = NoFault;
193
194        %(op_decl)s;
195        %(op_rd)s;
196        %(ea_code)s;
197
198        if (fault == NoFault) {
199            %(code)s;
200        }
201
202        if (fault == NoFault) {
203            fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
204                nullptr);
205        }
206
207        if (fault == NoFault) {
208            %(op_wb)s;
209        }
210
211        return fault;
212    }
213}};
214
215def template AtomicMemOpEACompExecute {{
216    Fault
217    %(class_name)s::%(class_name)s%(op_name)s::eaComp(ExecContext *xc,
218        Trace::InstRecord *traceData) const
219    {
220        Addr EA;
221        Fault fault = NoFault;
222
223        %(op_decl)s;
224        %(op_rd)s;
225        %(ea_code)s;
226
227        if (fault == NoFault) {
228            %(op_wb)s;
229            xc->setEA(EA);
230        }
231
232        return fault;
233    }
234}};
235
236def template AtomicMemOpLoadInitiateAcc {{
237    Fault %(class_name)s::%(class_name)sLoad::initiateAcc(ExecContext *xc,
238        Trace::InstRecord *traceData) const
239    {
240        Addr EA;
241        Fault fault = NoFault;
242
243        %(op_src_decl)s;
244        %(op_rd)s;
245        %(ea_code)s;
246
247        if (fault == NoFault) {
248            fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags);
249        }
250
251        return fault;
252    }
253}};
254
255def template AtomicMemOpStoreInitiateAcc {{
256    Fault %(class_name)s::%(class_name)sStore::initiateAcc(
257        ExecContext *xc, Trace::InstRecord *traceData) const
258    {
259        Addr EA;
260        Fault fault = NoFault;
261
262        %(op_decl)s;
263        %(op_rd)s;
264        %(ea_code)s;
265
266        if (fault == NoFault) {
267            %(code)s;
268        }
269
270        if (fault == NoFault) {
271            fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
272                nullptr);
273        }
274
275        if (fault == NoFault) {
276            %(op_wb)s;
277        }
278
279        return fault;
280    }
281}};
282
283def template StoreCondCompleteAcc {{
284    Fault %(class_name)s::completeAcc(Packet *pkt, ExecContext *xc,
285        Trace::InstRecord *traceData) const
286    {
287        Fault fault = NoFault;
288
289        %(op_dest_decl)s;
290
291        // RISC-V has the opposite convention gem5 has for success flags,
292        // so we invert the result here.
293        uint64_t result = !pkt->req->getExtraData();
294
295        if (fault == NoFault) {
296            %(postacc_code)s;
297        }
298
299        if (fault == NoFault) {
300            %(op_wb)s;
301        }
302
303        return fault;
304    }
305}};
306
307def template AtomicMemOpLoadCompleteAcc {{
308    Fault %(class_name)s::%(class_name)sLoad::completeAcc(PacketPtr pkt,
309        ExecContext *xc, Trace::InstRecord *traceData) const
310    {
311        Fault fault = NoFault;
312
313        %(op_decl)s;
314        %(op_rd)s;
315
316        getMem(pkt, Mem, traceData);
317
318        if (fault == NoFault) {
319            %(code)s;
320        }
321
322        if (fault == NoFault) {
323            %(op_wb)s;
324        }
325
326        return fault;
327    }
328}};
329
330def template AtomicMemOpStoreCompleteAcc {{
331    Fault %(class_name)s::%(class_name)sStore::completeAcc(PacketPtr pkt,
332        ExecContext *xc, Trace::InstRecord *traceData) const
333    {
334        return NoFault;
335    }
336}};
337
338def format LoadReserved(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}},
339        mem_flags=[], inst_flags=[]) {{
340    mem_flags = makeList(mem_flags)
341    inst_flags = makeList(inst_flags)
342    iop = InstObjParams(name, Name, 'LoadReserved',
343        {'ea_code': ea_code, 'memacc_code': memacc_code,
344        'postacc_code': postacc_code}, inst_flags)
345    iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \
346        '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';'
347
348    header_output = LoadStoreDeclare.subst(iop)
349    decoder_output = LRSCConstructor.subst(iop)
350    decode_block = BasicDecode.subst(iop)
351    exec_output = LoadExecute.subst(iop) \
352        + EACompExecute.subst(iop) \
353        + LoadInitiateAcc.subst(iop) \
354        + LoadCompleteAcc.subst(iop)
355}};
356
357def format StoreCond(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}},
358        mem_flags=[], inst_flags=[]) {{
359    mem_flags = makeList(mem_flags)
360    inst_flags = makeList(inst_flags)
361    iop = InstObjParams(name, Name, 'StoreCond',
362        {'ea_code': ea_code, 'memacc_code': memacc_code,
363        'postacc_code': postacc_code}, inst_flags)
364    iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \
365        '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';'
366
367    header_output = LoadStoreDeclare.subst(iop)
368    decoder_output = LRSCConstructor.subst(iop)
369    decode_block = BasicDecode.subst(iop)
370    exec_output = StoreCondExecute.subst(iop) \
371        + EACompExecute.subst(iop) \
372        + StoreInitiateAcc.subst(iop) \
373        + StoreCondCompleteAcc.subst(iop)
374}};
375
376def format AtomicMemOp(load_code, store_code, ea_code, load_flags=[],
377        store_flags=[], inst_flags=[]) {{
378    macro_iop = InstObjParams(name, Name, 'AtomicMemOp', ea_code, inst_flags)
379    header_output = AtomicMemOpDeclare.subst(macro_iop)
380    decoder_output = AtomicMemOpMacroConstructor.subst(macro_iop)
381    decode_block = BasicDecode.subst(macro_iop)
382    exec_output = ''
383
384    load_inst_flags = makeList(inst_flags) + ["IsMemRef", "IsLoad"]
385    load_iop = InstObjParams(name, Name, 'AtomicMemOpMicro',
386        {'ea_code': ea_code, 'code': load_code, 'op_name': 'Load'},
387        load_inst_flags)
388    decoder_output += AtomicMemOpLoadConstructor.subst(load_iop)
389    exec_output += AtomicMemOpLoadExecute.subst(load_iop) \
390        + AtomicMemOpEACompExecute.subst(load_iop) \
391        + AtomicMemOpLoadInitiateAcc.subst(load_iop) \
392        + AtomicMemOpLoadCompleteAcc.subst(load_iop)
393
394    store_inst_flags = makeList(inst_flags) + ["IsMemRef", "IsStore"]
395    store_iop = InstObjParams(name, Name, 'AtomicMemOpMicro',
396        {'ea_code': ea_code, 'code': store_code, 'op_name': 'Store'},
397        store_inst_flags)
398    decoder_output += AtomicMemOpStoreConstructor.subst(store_iop)
399    exec_output += AtomicMemOpStoreExecute.subst(store_iop) \
400        + AtomicMemOpEACompExecute.subst(store_iop) \
401        + AtomicMemOpStoreInitiateAcc.subst(store_iop) \
402        + AtomicMemOpStoreCompleteAcc.subst(store_iop)
403}};
404