amo.isa (12236:126ac9da6050) amo.isa (12323:55d08b81ff39)
1// -*- mode:c++ -*-
2
3// Copyright (c) 2015 Riscv Developers
4// Copyright (c) 2016 The University of Virginia
5// All rights reserved.
6//
7// Redistribution and use in source and binary forms, with or without
8// modification, are permitted provided that the following conditions are
9// met: redistributions of source code must retain the above copyright
10// notice, this list of conditions and the following disclaimer;
11// redistributions in binary form must reproduce the above copyright
12// notice, this list of conditions and the following disclaimer in the
13// documentation and/or other materials provided with the distribution;
14// neither the name of the copyright holders nor the names of its
15// contributors may be used to endorse or promote products derived from
16// this software without specific prior written permission.
17//
18// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29//
30// Authors: Alec Roelke
31
32////////////////////////////////////////////////////////////////////
33//
34// Atomic memory operation instructions
35//
1// -*- mode:c++ -*-
2
3// Copyright (c) 2015 Riscv Developers
4// Copyright (c) 2016 The University of Virginia
5// All rights reserved.
6//
7// Redistribution and use in source and binary forms, with or without
8// modification, are permitted provided that the following conditions are
9// met: redistributions of source code must retain the above copyright
10// notice, this list of conditions and the following disclaimer;
11// redistributions in binary form must reproduce the above copyright
12// notice, this list of conditions and the following disclaimer in the
13// documentation and/or other materials provided with the distribution;
14// neither the name of the copyright holders nor the names of its
15// contributors may be used to endorse or promote products derived from
16// this software without specific prior written permission.
17//
18// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29//
30// Authors: Alec Roelke
31
32////////////////////////////////////////////////////////////////////
33//
34// Atomic memory operation instructions
35//
36output header {{
37 class LoadReserved : public RiscvStaticInst
38 {
39 protected:
40 Request::Flags memAccessFlags;
41
42 LoadReserved(const char *mnem, ExtMachInst _machInst,
43 OpClass __opClass)
44 : RiscvStaticInst(mnem, _machInst, __opClass)
45 {}
46
47 std::string
48 generateDisassembly(Addr pc, const SymbolTable *symtab) const;
49 };
50
51 class StoreCond : public RiscvStaticInst
52 {
53 protected:
54 Request::Flags memAccessFlags;
55
56 StoreCond(const char* mnem, ExtMachInst _machInst, OpClass __opClass)
57 : RiscvStaticInst(mnem, _machInst, __opClass)
58 {}
59
60 std::string
61 generateDisassembly(Addr pc, const SymbolTable *symtab) const;
62 };
63
64 class AtomicMemOp : public RiscvMacroInst
65 {
66 protected:
67 /// Constructor
68 // Each AtomicMemOp has a load and a store phase
69 AtomicMemOp(const char *mnem, ExtMachInst _machInst, OpClass __opClass)
70 : RiscvMacroInst(mnem, _machInst, __opClass)
71 {}
72
73 std::string generateDisassembly(Addr pc,
74 const SymbolTable *symtab) const;
75 };
76
77 class AtomicMemOpMicro : public RiscvMicroInst
78 {
79 protected:
80 /// Memory request flags. See mem/request.hh.
81 Request::Flags memAccessFlags;
82
83 /// Constructor
84 AtomicMemOpMicro(const char *mnem, ExtMachInst _machInst,
85 OpClass __opClass)
86 : RiscvMicroInst(mnem, _machInst, __opClass)
87 {}
88
89 std::string generateDisassembly(Addr pc,
90 const SymbolTable *symtab) const;
91 };
92}};
93
94output decoder {{
95 std::string LoadReserved::generateDisassembly(Addr pc,
96 const SymbolTable *symtab) const
97 {
98 std::stringstream ss;
99 ss << mnemonic << ' ' << registerName(_destRegIdx[0]) << ", ("
100 << registerName(_srcRegIdx[0]) << ')';
101 return ss.str();
102 }
103
104 std::string StoreCond::generateDisassembly(Addr pc,
105 const SymbolTable *symtab) const
106 {
107 std::stringstream ss;
108 ss << mnemonic << ' ' << registerName(_destRegIdx[0]) << ", "
109 << registerName(_srcRegIdx[1]) << ", ("
110 << registerName(_srcRegIdx[0]) << ')';
111 return ss.str();
112 }
113
114 std::string AtomicMemOp::generateDisassembly(Addr pc,
115 const SymbolTable *symtab) const
116 {
117 std::stringstream ss;
118 ss << mnemonic << ' ' << registerName(_destRegIdx[0]) << ", "
119 << registerName(_srcRegIdx[1]) << ", ("
120 << registerName(_srcRegIdx[0]) << ')';
121 return ss.str();
122 }
123
124 std::string AtomicMemOpMicro::generateDisassembly(Addr pc,
125 const SymbolTable *symtab) const
126 {
127 std::stringstream ss;
128 ss << csprintf("0x%08x", machInst) << ' ' << mnemonic;
129 return ss.str();
130 }
131}};
132
133def template AtomicMemOpDeclare {{
134 /**
135 * Static instruction class for an AtomicMemOp operation
136 */
137 class %(class_name)s : public %(base_class)s
138 {
139 public:
140 // Constructor
141 %(class_name)s(ExtMachInst machInst);
142
143 protected:
144
145 class %(class_name)sLoad : public %(base_class)sMicro
146 {
147 public:
148 // Constructor
149 %(class_name)sLoad(ExtMachInst machInst, %(class_name)s *_p);
150
151 Fault execute(ExecContext *, Trace::InstRecord *) const;
152 Fault eaComp(ExecContext *, Trace::InstRecord *) const;
153 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
154 Fault completeAcc(PacketPtr, ExecContext *,
155 Trace::InstRecord *) const;
156 };
157
158 class %(class_name)sStore : public %(base_class)sMicro
159 {
160 public:
161 // Constructor
162 %(class_name)sStore(ExtMachInst machInst, %(class_name)s *_p);
163
164 Fault execute(ExecContext *, Trace::InstRecord *) const;
165 Fault eaComp(ExecContext *, Trace::InstRecord *) const;
166 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
167 Fault completeAcc(PacketPtr, ExecContext *,
168 Trace::InstRecord *) const;
169 };
170 };
171}};
172
173def template LRSCConstructor {{
174 %(class_name)s::%(class_name)s(ExtMachInst machInst):
175 %(base_class)s("%(mnemonic)s", machInst, %(op_class)s)
176 {
177 %(constructor)s;
178 if (AQ)
179 memAccessFlags = memAccessFlags | Request::ACQUIRE;
180 if (RL)
181 memAccessFlags = memAccessFlags | Request::RELEASE;
182 }
183}};
184
185def template AtomicMemOpMacroConstructor {{
186 %(class_name)s::%(class_name)s(ExtMachInst machInst)
187 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s)
188 {
189 %(constructor)s;
190 microops = {new %(class_name)sLoad(machInst, this),
191 new %(class_name)sStore(machInst, this)};
192 }
193}};
194
195def template AtomicMemOpLoadConstructor {{
196 %(class_name)s::%(class_name)sLoad::%(class_name)sLoad(
197 ExtMachInst machInst, %(class_name)s *_p)
198 : %(base_class)s("%(mnemonic)s[l]", machInst, %(op_class)s)
199 {
200 %(constructor)s;
201 flags[IsFirstMicroop] = true;
202 flags[IsDelayedCommit] = true;
203 if (AQ)
204 memAccessFlags = Request::ACQUIRE;
205 }
206}};
207
208def template AtomicMemOpStoreConstructor {{
209 %(class_name)s::%(class_name)sStore::%(class_name)sStore(
210 ExtMachInst machInst, %(class_name)s *_p)
211 : %(base_class)s("%(mnemonic)s[s]", machInst, %(op_class)s)
212 {
213 %(constructor)s;
214 flags[IsLastMicroop] = true;
215 flags[IsNonSpeculative] = true;
216 if (RL)
217 memAccessFlags = Request::RELEASE;
218 }
219}};
220
221def template StoreCondExecute {{
222 Fault %(class_name)s::execute(ExecContext *xc,
223 Trace::InstRecord *traceData) const
224 {
225 Addr EA;
226 Fault fault = NoFault;
227 uint64_t result;
228
229 %(op_decl)s;
230 %(op_rd)s;
231 %(ea_code)s;
232
233 if (fault == NoFault) {
234 %(memacc_code)s;
235 }
236
237 if (fault == NoFault) {
238 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
239 &result);
240 // RISC-V has the opposite convention gem5 has for success flags,
241 // so we invert the result here.
242 result = !result;
243 }
244
245 if (fault == NoFault) {
246 %(postacc_code)s;
247 }
248
249 if (fault == NoFault) {
250 %(op_wb)s;
251 }
252
253 return fault;
254 }
255}};
256
257def template AtomicMemOpLoadExecute {{
258 Fault %(class_name)s::%(class_name)sLoad::execute(ExecContext *xc,
259 Trace::InstRecord *traceData) const
260 {
261 Addr EA;
262 Fault fault = NoFault;
263
264 %(op_decl)s;
265 %(op_rd)s;
266 %(ea_code)s;
267
268 if (fault == NoFault) {
269 fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags);
270 }
271
272 if (fault == NoFault) {
273 %(code)s;
274 }
275
276 if (fault == NoFault) {
277 %(op_wb)s;
278 }
279
280 return fault;
281 }
282}};
283
284def template AtomicMemOpStoreExecute {{
285 Fault %(class_name)s::%(class_name)sStore::execute(ExecContext *xc,
286 Trace::InstRecord *traceData) const
287 {
288 Addr EA;
289 Fault fault = NoFault;
290
291 %(op_decl)s;
292 %(op_rd)s;
293 %(ea_code)s;
294
295 if (fault == NoFault) {
296 %(code)s;
297 }
298
299 if (fault == NoFault) {
300 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
301 nullptr);
302 }
303
304 if (fault == NoFault) {
305 %(op_wb)s;
306 }
307
308 return fault;
309 }
310}};
311
312def template AtomicMemOpEACompExecute {{
313 Fault
314 %(class_name)s::%(class_name)s%(op_name)s::eaComp(ExecContext *xc,
315 Trace::InstRecord *traceData) const
316 {
317 Addr EA;
318 Fault fault = NoFault;
319
320 %(op_decl)s;
321 %(op_rd)s;
322 %(ea_code)s;
323
324 if (fault == NoFault) {
325 %(op_wb)s;
326 xc->setEA(EA);
327 }
328
329 return fault;
330 }
331}};
332
333def template AtomicMemOpLoadInitiateAcc {{
334 Fault %(class_name)s::%(class_name)sLoad::initiateAcc(ExecContext *xc,
335 Trace::InstRecord *traceData) const
336 {
337 Addr EA;
338 Fault fault = NoFault;
339
340 %(op_src_decl)s;
341 %(op_rd)s;
342 %(ea_code)s;
343
344 if (fault == NoFault) {
345 fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags);
346 }
347
348 return fault;
349 }
350}};
351
352def template AtomicMemOpStoreInitiateAcc {{
353 Fault %(class_name)s::%(class_name)sStore::initiateAcc(
354 ExecContext *xc, Trace::InstRecord *traceData) const
355 {
356 Addr EA;
357 Fault fault = NoFault;
358
359 %(op_decl)s;
360 %(op_rd)s;
361 %(ea_code)s;
362
363 if (fault == NoFault) {
364 %(code)s;
365 }
366
367 if (fault == NoFault) {
368 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
369 nullptr);
370 }
371
372 if (fault == NoFault) {
373 %(op_wb)s;
374 }
375
376 return fault;
377 }
378}};
379
380def template StoreCondCompleteAcc {{
381 Fault %(class_name)s::completeAcc(Packet *pkt, ExecContext *xc,
382 Trace::InstRecord *traceData) const
383 {
384 Fault fault = NoFault;
385
386 %(op_dest_decl)s;
387
388 // RISC-V has the opposite convention gem5 has for success flags,
389 // so we invert the result here.
390 uint64_t result = !pkt->req->getExtraData();
391
392 if (fault == NoFault) {
393 %(postacc_code)s;
394 }
395
396 if (fault == NoFault) {
397 %(op_wb)s;
398 }
399
400 return fault;
401 }
402}};
403
404def template AtomicMemOpLoadCompleteAcc {{
405 Fault %(class_name)s::%(class_name)sLoad::completeAcc(PacketPtr pkt,
406 ExecContext *xc, Trace::InstRecord *traceData) const
407 {
408 Fault fault = NoFault;
409
410 %(op_decl)s;
411 %(op_rd)s;
412
413 getMem(pkt, Mem, traceData);
414
415 if (fault == NoFault) {
416 %(code)s;
417 }
418
419 if (fault == NoFault) {
420 %(op_wb)s;
421 }
422
423 return fault;
424 }
425}};
426
427def template AtomicMemOpStoreCompleteAcc {{
428 Fault %(class_name)s::%(class_name)sStore::completeAcc(PacketPtr pkt,
429 ExecContext *xc, Trace::InstRecord *traceData) const
430 {
431 return NoFault;
432 }
433}};
434
435def format LoadReserved(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}},
436 mem_flags=[], inst_flags=[]) {{
437 mem_flags = makeList(mem_flags)
438 inst_flags = makeList(inst_flags)
439 iop = InstObjParams(name, Name, 'LoadReserved',
440 {'ea_code': ea_code, 'memacc_code': memacc_code,
441 'postacc_code': postacc_code}, inst_flags)
442 iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \
443 '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';'
444
445 header_output = LoadStoreDeclare.subst(iop)
446 decoder_output = LRSCConstructor.subst(iop)
447 decode_block = BasicDecode.subst(iop)
448 exec_output = LoadExecute.subst(iop) \
449 + EACompExecute.subst(iop) \
450 + LoadInitiateAcc.subst(iop) \
451 + LoadCompleteAcc.subst(iop)
452}};
453
454def format StoreCond(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}},
455 mem_flags=[], inst_flags=[]) {{
456 mem_flags = makeList(mem_flags)
457 inst_flags = makeList(inst_flags)
458 iop = InstObjParams(name, Name, 'StoreCond',
459 {'ea_code': ea_code, 'memacc_code': memacc_code,
460 'postacc_code': postacc_code}, inst_flags)
461 iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \
462 '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';'
463
464 header_output = LoadStoreDeclare.subst(iop)
465 decoder_output = LRSCConstructor.subst(iop)
466 decode_block = BasicDecode.subst(iop)
467 exec_output = StoreCondExecute.subst(iop) \
468 + EACompExecute.subst(iop) \
469 + StoreInitiateAcc.subst(iop) \
470 + StoreCondCompleteAcc.subst(iop)
471}};
472
473def format AtomicMemOp(load_code, store_code, ea_code, load_flags=[],
474 store_flags=[], inst_flags=[]) {{
475 macro_iop = InstObjParams(name, Name, 'AtomicMemOp', ea_code, inst_flags)
476 header_output = AtomicMemOpDeclare.subst(macro_iop)
477 decoder_output = AtomicMemOpMacroConstructor.subst(macro_iop)
478 decode_block = BasicDecode.subst(macro_iop)
479 exec_output = ''
480
481 load_inst_flags = makeList(inst_flags) + ["IsMemRef", "IsLoad"]
482 load_iop = InstObjParams(name, Name, 'AtomicMemOpMicro',
483 {'ea_code': ea_code, 'code': load_code, 'op_name': 'Load'},
484 load_inst_flags)
485 decoder_output += AtomicMemOpLoadConstructor.subst(load_iop)
486 exec_output += AtomicMemOpLoadExecute.subst(load_iop) \
487 + AtomicMemOpEACompExecute.subst(load_iop) \
488 + AtomicMemOpLoadInitiateAcc.subst(load_iop) \
489 + AtomicMemOpLoadCompleteAcc.subst(load_iop)
490
491 store_inst_flags = makeList(inst_flags) + ["IsMemRef", "IsStore"]
492 store_iop = InstObjParams(name, Name, 'AtomicMemOpMicro',
493 {'ea_code': ea_code, 'code': store_code, 'op_name': 'Store'},
494 store_inst_flags)
495 decoder_output += AtomicMemOpStoreConstructor.subst(store_iop)
496 exec_output += AtomicMemOpStoreExecute.subst(store_iop) \
497 + AtomicMemOpEACompExecute.subst(store_iop) \
498 + AtomicMemOpStoreInitiateAcc.subst(store_iop) \
499 + AtomicMemOpStoreCompleteAcc.subst(store_iop)
500}};
36def template AtomicMemOpDeclare {{
37 /**
38 * Static instruction class for an AtomicMemOp operation
39 */
40 class %(class_name)s : public %(base_class)s
41 {
42 public:
43 // Constructor
44 %(class_name)s(ExtMachInst machInst);
45
46 protected:
47
48 class %(class_name)sLoad : public %(base_class)sMicro
49 {
50 public:
51 // Constructor
52 %(class_name)sLoad(ExtMachInst machInst, %(class_name)s *_p);
53
54 Fault execute(ExecContext *, Trace::InstRecord *) const;
55 Fault eaComp(ExecContext *, Trace::InstRecord *) const;
56 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
57 Fault completeAcc(PacketPtr, ExecContext *,
58 Trace::InstRecord *) const;
59 };
60
61 class %(class_name)sStore : public %(base_class)sMicro
62 {
63 public:
64 // Constructor
65 %(class_name)sStore(ExtMachInst machInst, %(class_name)s *_p);
66
67 Fault execute(ExecContext *, Trace::InstRecord *) const;
68 Fault eaComp(ExecContext *, Trace::InstRecord *) const;
69 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
70 Fault completeAcc(PacketPtr, ExecContext *,
71 Trace::InstRecord *) const;
72 };
73 };
74}};
75
76def template LRSCConstructor {{
77 %(class_name)s::%(class_name)s(ExtMachInst machInst):
78 %(base_class)s("%(mnemonic)s", machInst, %(op_class)s)
79 {
80 %(constructor)s;
81 if (AQ)
82 memAccessFlags = memAccessFlags | Request::ACQUIRE;
83 if (RL)
84 memAccessFlags = memAccessFlags | Request::RELEASE;
85 }
86}};
87
88def template AtomicMemOpMacroConstructor {{
89 %(class_name)s::%(class_name)s(ExtMachInst machInst)
90 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s)
91 {
92 %(constructor)s;
93 microops = {new %(class_name)sLoad(machInst, this),
94 new %(class_name)sStore(machInst, this)};
95 }
96}};
97
98def template AtomicMemOpLoadConstructor {{
99 %(class_name)s::%(class_name)sLoad::%(class_name)sLoad(
100 ExtMachInst machInst, %(class_name)s *_p)
101 : %(base_class)s("%(mnemonic)s[l]", machInst, %(op_class)s)
102 {
103 %(constructor)s;
104 flags[IsFirstMicroop] = true;
105 flags[IsDelayedCommit] = true;
106 if (AQ)
107 memAccessFlags = Request::ACQUIRE;
108 }
109}};
110
111def template AtomicMemOpStoreConstructor {{
112 %(class_name)s::%(class_name)sStore::%(class_name)sStore(
113 ExtMachInst machInst, %(class_name)s *_p)
114 : %(base_class)s("%(mnemonic)s[s]", machInst, %(op_class)s)
115 {
116 %(constructor)s;
117 flags[IsLastMicroop] = true;
118 flags[IsNonSpeculative] = true;
119 if (RL)
120 memAccessFlags = Request::RELEASE;
121 }
122}};
123
124def template StoreCondExecute {{
125 Fault %(class_name)s::execute(ExecContext *xc,
126 Trace::InstRecord *traceData) const
127 {
128 Addr EA;
129 Fault fault = NoFault;
130 uint64_t result;
131
132 %(op_decl)s;
133 %(op_rd)s;
134 %(ea_code)s;
135
136 if (fault == NoFault) {
137 %(memacc_code)s;
138 }
139
140 if (fault == NoFault) {
141 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
142 &result);
143 // RISC-V has the opposite convention gem5 has for success flags,
144 // so we invert the result here.
145 result = !result;
146 }
147
148 if (fault == NoFault) {
149 %(postacc_code)s;
150 }
151
152 if (fault == NoFault) {
153 %(op_wb)s;
154 }
155
156 return fault;
157 }
158}};
159
160def template AtomicMemOpLoadExecute {{
161 Fault %(class_name)s::%(class_name)sLoad::execute(ExecContext *xc,
162 Trace::InstRecord *traceData) const
163 {
164 Addr EA;
165 Fault fault = NoFault;
166
167 %(op_decl)s;
168 %(op_rd)s;
169 %(ea_code)s;
170
171 if (fault == NoFault) {
172 fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags);
173 }
174
175 if (fault == NoFault) {
176 %(code)s;
177 }
178
179 if (fault == NoFault) {
180 %(op_wb)s;
181 }
182
183 return fault;
184 }
185}};
186
187def template AtomicMemOpStoreExecute {{
188 Fault %(class_name)s::%(class_name)sStore::execute(ExecContext *xc,
189 Trace::InstRecord *traceData) const
190 {
191 Addr EA;
192 Fault fault = NoFault;
193
194 %(op_decl)s;
195 %(op_rd)s;
196 %(ea_code)s;
197
198 if (fault == NoFault) {
199 %(code)s;
200 }
201
202 if (fault == NoFault) {
203 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
204 nullptr);
205 }
206
207 if (fault == NoFault) {
208 %(op_wb)s;
209 }
210
211 return fault;
212 }
213}};
214
215def template AtomicMemOpEACompExecute {{
216 Fault
217 %(class_name)s::%(class_name)s%(op_name)s::eaComp(ExecContext *xc,
218 Trace::InstRecord *traceData) const
219 {
220 Addr EA;
221 Fault fault = NoFault;
222
223 %(op_decl)s;
224 %(op_rd)s;
225 %(ea_code)s;
226
227 if (fault == NoFault) {
228 %(op_wb)s;
229 xc->setEA(EA);
230 }
231
232 return fault;
233 }
234}};
235
236def template AtomicMemOpLoadInitiateAcc {{
237 Fault %(class_name)s::%(class_name)sLoad::initiateAcc(ExecContext *xc,
238 Trace::InstRecord *traceData) const
239 {
240 Addr EA;
241 Fault fault = NoFault;
242
243 %(op_src_decl)s;
244 %(op_rd)s;
245 %(ea_code)s;
246
247 if (fault == NoFault) {
248 fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags);
249 }
250
251 return fault;
252 }
253}};
254
255def template AtomicMemOpStoreInitiateAcc {{
256 Fault %(class_name)s::%(class_name)sStore::initiateAcc(
257 ExecContext *xc, Trace::InstRecord *traceData) const
258 {
259 Addr EA;
260 Fault fault = NoFault;
261
262 %(op_decl)s;
263 %(op_rd)s;
264 %(ea_code)s;
265
266 if (fault == NoFault) {
267 %(code)s;
268 }
269
270 if (fault == NoFault) {
271 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
272 nullptr);
273 }
274
275 if (fault == NoFault) {
276 %(op_wb)s;
277 }
278
279 return fault;
280 }
281}};
282
283def template StoreCondCompleteAcc {{
284 Fault %(class_name)s::completeAcc(Packet *pkt, ExecContext *xc,
285 Trace::InstRecord *traceData) const
286 {
287 Fault fault = NoFault;
288
289 %(op_dest_decl)s;
290
291 // RISC-V has the opposite convention gem5 has for success flags,
292 // so we invert the result here.
293 uint64_t result = !pkt->req->getExtraData();
294
295 if (fault == NoFault) {
296 %(postacc_code)s;
297 }
298
299 if (fault == NoFault) {
300 %(op_wb)s;
301 }
302
303 return fault;
304 }
305}};
306
307def template AtomicMemOpLoadCompleteAcc {{
308 Fault %(class_name)s::%(class_name)sLoad::completeAcc(PacketPtr pkt,
309 ExecContext *xc, Trace::InstRecord *traceData) const
310 {
311 Fault fault = NoFault;
312
313 %(op_decl)s;
314 %(op_rd)s;
315
316 getMem(pkt, Mem, traceData);
317
318 if (fault == NoFault) {
319 %(code)s;
320 }
321
322 if (fault == NoFault) {
323 %(op_wb)s;
324 }
325
326 return fault;
327 }
328}};
329
330def template AtomicMemOpStoreCompleteAcc {{
331 Fault %(class_name)s::%(class_name)sStore::completeAcc(PacketPtr pkt,
332 ExecContext *xc, Trace::InstRecord *traceData) const
333 {
334 return NoFault;
335 }
336}};
337
338def format LoadReserved(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}},
339 mem_flags=[], inst_flags=[]) {{
340 mem_flags = makeList(mem_flags)
341 inst_flags = makeList(inst_flags)
342 iop = InstObjParams(name, Name, 'LoadReserved',
343 {'ea_code': ea_code, 'memacc_code': memacc_code,
344 'postacc_code': postacc_code}, inst_flags)
345 iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \
346 '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';'
347
348 header_output = LoadStoreDeclare.subst(iop)
349 decoder_output = LRSCConstructor.subst(iop)
350 decode_block = BasicDecode.subst(iop)
351 exec_output = LoadExecute.subst(iop) \
352 + EACompExecute.subst(iop) \
353 + LoadInitiateAcc.subst(iop) \
354 + LoadCompleteAcc.subst(iop)
355}};
356
357def format StoreCond(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}},
358 mem_flags=[], inst_flags=[]) {{
359 mem_flags = makeList(mem_flags)
360 inst_flags = makeList(inst_flags)
361 iop = InstObjParams(name, Name, 'StoreCond',
362 {'ea_code': ea_code, 'memacc_code': memacc_code,
363 'postacc_code': postacc_code}, inst_flags)
364 iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \
365 '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';'
366
367 header_output = LoadStoreDeclare.subst(iop)
368 decoder_output = LRSCConstructor.subst(iop)
369 decode_block = BasicDecode.subst(iop)
370 exec_output = StoreCondExecute.subst(iop) \
371 + EACompExecute.subst(iop) \
372 + StoreInitiateAcc.subst(iop) \
373 + StoreCondCompleteAcc.subst(iop)
374}};
375
376def format AtomicMemOp(load_code, store_code, ea_code, load_flags=[],
377 store_flags=[], inst_flags=[]) {{
378 macro_iop = InstObjParams(name, Name, 'AtomicMemOp', ea_code, inst_flags)
379 header_output = AtomicMemOpDeclare.subst(macro_iop)
380 decoder_output = AtomicMemOpMacroConstructor.subst(macro_iop)
381 decode_block = BasicDecode.subst(macro_iop)
382 exec_output = ''
383
384 load_inst_flags = makeList(inst_flags) + ["IsMemRef", "IsLoad"]
385 load_iop = InstObjParams(name, Name, 'AtomicMemOpMicro',
386 {'ea_code': ea_code, 'code': load_code, 'op_name': 'Load'},
387 load_inst_flags)
388 decoder_output += AtomicMemOpLoadConstructor.subst(load_iop)
389 exec_output += AtomicMemOpLoadExecute.subst(load_iop) \
390 + AtomicMemOpEACompExecute.subst(load_iop) \
391 + AtomicMemOpLoadInitiateAcc.subst(load_iop) \
392 + AtomicMemOpLoadCompleteAcc.subst(load_iop)
393
394 store_inst_flags = makeList(inst_flags) + ["IsMemRef", "IsStore"]
395 store_iop = InstObjParams(name, Name, 'AtomicMemOpMicro',
396 {'ea_code': ea_code, 'code': store_code, 'op_name': 'Store'},
397 store_inst_flags)
398 decoder_output += AtomicMemOpStoreConstructor.subst(store_iop)
399 exec_output += AtomicMemOpStoreExecute.subst(store_iop) \
400 + AtomicMemOpEACompExecute.subst(store_iop) \
401 + AtomicMemOpStoreInitiateAcc.subst(store_iop) \
402 + AtomicMemOpStoreCompleteAcc.subst(store_iop)
403}};