amo.isa (12234:78ece221f9f5) amo.isa (12236:126ac9da6050)
1// -*- mode:c++ -*-
2
3// Copyright (c) 2015 Riscv Developers
4// Copyright (c) 2016 The University of Virginia
5// All rights reserved.
6//
7// Redistribution and use in source and binary forms, with or without
8// modification, are permitted provided that the following conditions are
9// met: redistributions of source code must retain the above copyright
10// notice, this list of conditions and the following disclaimer;
11// redistributions in binary form must reproduce the above copyright
12// notice, this list of conditions and the following disclaimer in the
13// documentation and/or other materials provided with the distribution;
14// neither the name of the copyright holders nor the names of its
15// contributors may be used to endorse or promote products derived from
16// this software without specific prior written permission.
17//
18// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29//
30// Authors: Alec Roelke
31
32////////////////////////////////////////////////////////////////////
33//
34// Atomic memory operation instructions
35//
36output header {{
37 class LoadReserved : public RiscvStaticInst
38 {
39 protected:
40 Request::Flags memAccessFlags;
41
42 LoadReserved(const char *mnem, ExtMachInst _machInst,
43 OpClass __opClass)
44 : RiscvStaticInst(mnem, _machInst, __opClass)
45 {}
46
47 std::string
48 generateDisassembly(Addr pc, const SymbolTable *symtab) const;
49 };
50
51 class StoreCond : public RiscvStaticInst
52 {
53 protected:
54 Request::Flags memAccessFlags;
55
56 StoreCond(const char* mnem, ExtMachInst _machInst, OpClass __opClass)
57 : RiscvStaticInst(mnem, _machInst, __opClass)
58 {}
59
60 std::string
61 generateDisassembly(Addr pc, const SymbolTable *symtab) const;
62 };
63
64 class AtomicMemOp : public RiscvMacroInst
65 {
66 protected:
67 /// Constructor
68 // Each AtomicMemOp has a load and a store phase
69 AtomicMemOp(const char *mnem, ExtMachInst _machInst, OpClass __opClass)
70 : RiscvMacroInst(mnem, _machInst, __opClass)
71 {}
72
73 std::string generateDisassembly(Addr pc,
74 const SymbolTable *symtab) const;
75 };
76
77 class AtomicMemOpMicro : public RiscvMicroInst
78 {
79 protected:
80 /// Memory request flags. See mem/request.hh.
81 Request::Flags memAccessFlags;
82
83 /// Constructor
84 AtomicMemOpMicro(const char *mnem, ExtMachInst _machInst,
85 OpClass __opClass)
86 : RiscvMicroInst(mnem, _machInst, __opClass)
87 {}
88
89 std::string generateDisassembly(Addr pc,
90 const SymbolTable *symtab) const;
91 };
92}};
93
94output decoder {{
95 std::string LoadReserved::generateDisassembly(Addr pc,
96 const SymbolTable *symtab) const
97 {
98 std::stringstream ss;
99 ss << mnemonic << ' ' << registerName(_destRegIdx[0]) << ", ("
100 << registerName(_srcRegIdx[0]) << ')';
101 return ss.str();
102 }
103
104 std::string StoreCond::generateDisassembly(Addr pc,
105 const SymbolTable *symtab) const
106 {
107 std::stringstream ss;
108 ss << mnemonic << ' ' << registerName(_destRegIdx[0]) << ", "
109 << registerName(_srcRegIdx[1]) << ", ("
110 << registerName(_srcRegIdx[0]) << ')';
111 return ss.str();
112 }
113
114 std::string AtomicMemOp::generateDisassembly(Addr pc,
115 const SymbolTable *symtab) const
116 {
117 std::stringstream ss;
118 ss << mnemonic << ' ' << registerName(_destRegIdx[0]) << ", "
119 << registerName(_srcRegIdx[1]) << ", ("
120 << registerName(_srcRegIdx[0]) << ')';
121 return ss.str();
122 }
123
124 std::string AtomicMemOpMicro::generateDisassembly(Addr pc,
125 const SymbolTable *symtab) const
126 {
127 std::stringstream ss;
128 ss << csprintf("0x%08x", machInst) << ' ' << mnemonic;
129 return ss.str();
130 }
131}};
132
133def template AtomicMemOpDeclare {{
134 /**
135 * Static instruction class for an AtomicMemOp operation
136 */
137 class %(class_name)s : public %(base_class)s
138 {
139 public:
140 // Constructor
141 %(class_name)s(ExtMachInst machInst);
142
143 protected:
144
145 class %(class_name)sLoad : public %(base_class)sMicro
146 {
147 public:
148 // Constructor
149 %(class_name)sLoad(ExtMachInst machInst, %(class_name)s *_p);
150
1// -*- mode:c++ -*-
2
3// Copyright (c) 2015 Riscv Developers
4// Copyright (c) 2016 The University of Virginia
5// All rights reserved.
6//
7// Redistribution and use in source and binary forms, with or without
8// modification, are permitted provided that the following conditions are
9// met: redistributions of source code must retain the above copyright
10// notice, this list of conditions and the following disclaimer;
11// redistributions in binary form must reproduce the above copyright
12// notice, this list of conditions and the following disclaimer in the
13// documentation and/or other materials provided with the distribution;
14// neither the name of the copyright holders nor the names of its
15// contributors may be used to endorse or promote products derived from
16// this software without specific prior written permission.
17//
18// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29//
30// Authors: Alec Roelke
31
32////////////////////////////////////////////////////////////////////
33//
34// Atomic memory operation instructions
35//
36output header {{
37 class LoadReserved : public RiscvStaticInst
38 {
39 protected:
40 Request::Flags memAccessFlags;
41
42 LoadReserved(const char *mnem, ExtMachInst _machInst,
43 OpClass __opClass)
44 : RiscvStaticInst(mnem, _machInst, __opClass)
45 {}
46
47 std::string
48 generateDisassembly(Addr pc, const SymbolTable *symtab) const;
49 };
50
51 class StoreCond : public RiscvStaticInst
52 {
53 protected:
54 Request::Flags memAccessFlags;
55
56 StoreCond(const char* mnem, ExtMachInst _machInst, OpClass __opClass)
57 : RiscvStaticInst(mnem, _machInst, __opClass)
58 {}
59
60 std::string
61 generateDisassembly(Addr pc, const SymbolTable *symtab) const;
62 };
63
64 class AtomicMemOp : public RiscvMacroInst
65 {
66 protected:
67 /// Constructor
68 // Each AtomicMemOp has a load and a store phase
69 AtomicMemOp(const char *mnem, ExtMachInst _machInst, OpClass __opClass)
70 : RiscvMacroInst(mnem, _machInst, __opClass)
71 {}
72
73 std::string generateDisassembly(Addr pc,
74 const SymbolTable *symtab) const;
75 };
76
77 class AtomicMemOpMicro : public RiscvMicroInst
78 {
79 protected:
80 /// Memory request flags. See mem/request.hh.
81 Request::Flags memAccessFlags;
82
83 /// Constructor
84 AtomicMemOpMicro(const char *mnem, ExtMachInst _machInst,
85 OpClass __opClass)
86 : RiscvMicroInst(mnem, _machInst, __opClass)
87 {}
88
89 std::string generateDisassembly(Addr pc,
90 const SymbolTable *symtab) const;
91 };
92}};
93
94output decoder {{
95 std::string LoadReserved::generateDisassembly(Addr pc,
96 const SymbolTable *symtab) const
97 {
98 std::stringstream ss;
99 ss << mnemonic << ' ' << registerName(_destRegIdx[0]) << ", ("
100 << registerName(_srcRegIdx[0]) << ')';
101 return ss.str();
102 }
103
104 std::string StoreCond::generateDisassembly(Addr pc,
105 const SymbolTable *symtab) const
106 {
107 std::stringstream ss;
108 ss << mnemonic << ' ' << registerName(_destRegIdx[0]) << ", "
109 << registerName(_srcRegIdx[1]) << ", ("
110 << registerName(_srcRegIdx[0]) << ')';
111 return ss.str();
112 }
113
114 std::string AtomicMemOp::generateDisassembly(Addr pc,
115 const SymbolTable *symtab) const
116 {
117 std::stringstream ss;
118 ss << mnemonic << ' ' << registerName(_destRegIdx[0]) << ", "
119 << registerName(_srcRegIdx[1]) << ", ("
120 << registerName(_srcRegIdx[0]) << ')';
121 return ss.str();
122 }
123
124 std::string AtomicMemOpMicro::generateDisassembly(Addr pc,
125 const SymbolTable *symtab) const
126 {
127 std::stringstream ss;
128 ss << csprintf("0x%08x", machInst) << ' ' << mnemonic;
129 return ss.str();
130 }
131}};
132
133def template AtomicMemOpDeclare {{
134 /**
135 * Static instruction class for an AtomicMemOp operation
136 */
137 class %(class_name)s : public %(base_class)s
138 {
139 public:
140 // Constructor
141 %(class_name)s(ExtMachInst machInst);
142
143 protected:
144
145 class %(class_name)sLoad : public %(base_class)sMicro
146 {
147 public:
148 // Constructor
149 %(class_name)sLoad(ExtMachInst machInst, %(class_name)s *_p);
150
151 %(BasicExecDeclare)s
152
153 %(EACompDeclare)s
154
155 %(InitiateAccDeclare)s
156
157 %(CompleteAccDeclare)s
151 Fault execute(ExecContext *, Trace::InstRecord *) const;
152 Fault eaComp(ExecContext *, Trace::InstRecord *) const;
153 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
154 Fault completeAcc(PacketPtr, ExecContext *,
155 Trace::InstRecord *) const;
158 };
159
160 class %(class_name)sStore : public %(base_class)sMicro
161 {
162 public:
163 // Constructor
164 %(class_name)sStore(ExtMachInst machInst, %(class_name)s *_p);
165
156 };
157
158 class %(class_name)sStore : public %(base_class)sMicro
159 {
160 public:
161 // Constructor
162 %(class_name)sStore(ExtMachInst machInst, %(class_name)s *_p);
163
166 %(BasicExecDeclare)s
167
168 %(EACompDeclare)s
169
170 %(InitiateAccDeclare)s
171
172 %(CompleteAccDeclare)s
164 Fault execute(ExecContext *, Trace::InstRecord *) const;
165 Fault eaComp(ExecContext *, Trace::InstRecord *) const;
166 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
167 Fault completeAcc(PacketPtr, ExecContext *,
168 Trace::InstRecord *) const;
173 };
174 };
175}};
176
177def template LRSCConstructor {{
178 %(class_name)s::%(class_name)s(ExtMachInst machInst):
179 %(base_class)s("%(mnemonic)s", machInst, %(op_class)s)
180 {
181 %(constructor)s;
182 if (AQ)
183 memAccessFlags = memAccessFlags | Request::ACQUIRE;
184 if (RL)
185 memAccessFlags = memAccessFlags | Request::RELEASE;
186 }
187}};
188
189def template AtomicMemOpMacroConstructor {{
190 %(class_name)s::%(class_name)s(ExtMachInst machInst)
191 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s)
192 {
193 %(constructor)s;
194 microops = {new %(class_name)sLoad(machInst, this),
195 new %(class_name)sStore(machInst, this)};
196 }
197}};
198
199def template AtomicMemOpLoadConstructor {{
200 %(class_name)s::%(class_name)sLoad::%(class_name)sLoad(
201 ExtMachInst machInst, %(class_name)s *_p)
202 : %(base_class)s("%(mnemonic)s[l]", machInst, %(op_class)s)
203 {
204 %(constructor)s;
205 flags[IsFirstMicroop] = true;
206 flags[IsDelayedCommit] = true;
207 if (AQ)
208 memAccessFlags = Request::ACQUIRE;
209 }
210}};
211
212def template AtomicMemOpStoreConstructor {{
213 %(class_name)s::%(class_name)sStore::%(class_name)sStore(
214 ExtMachInst machInst, %(class_name)s *_p)
215 : %(base_class)s("%(mnemonic)s[s]", machInst, %(op_class)s)
216 {
217 %(constructor)s;
218 flags[IsLastMicroop] = true;
219 flags[IsNonSpeculative] = true;
220 if (RL)
221 memAccessFlags = Request::RELEASE;
222 }
223}};
224
225def template StoreCondExecute {{
226 Fault %(class_name)s::execute(ExecContext *xc,
227 Trace::InstRecord *traceData) const
228 {
229 Addr EA;
230 Fault fault = NoFault;
231 uint64_t result;
232
233 %(op_decl)s;
234 %(op_rd)s;
235 %(ea_code)s;
236
237 if (fault == NoFault) {
238 %(memacc_code)s;
239 }
240
241 if (fault == NoFault) {
242 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
243 &result);
244 // RISC-V has the opposite convention gem5 has for success flags,
245 // so we invert the result here.
246 result = !result;
247 }
248
249 if (fault == NoFault) {
250 %(postacc_code)s;
251 }
252
253 if (fault == NoFault) {
254 %(op_wb)s;
255 }
256
257 return fault;
258 }
259}};
260
261def template AtomicMemOpLoadExecute {{
262 Fault %(class_name)s::%(class_name)sLoad::execute(ExecContext *xc,
263 Trace::InstRecord *traceData) const
264 {
265 Addr EA;
266 Fault fault = NoFault;
267
268 %(op_decl)s;
269 %(op_rd)s;
270 %(ea_code)s;
271
272 if (fault == NoFault) {
273 fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags);
274 }
275
276 if (fault == NoFault) {
277 %(code)s;
278 }
279
280 if (fault == NoFault) {
281 %(op_wb)s;
282 }
283
284 return fault;
285 }
286}};
287
288def template AtomicMemOpStoreExecute {{
289 Fault %(class_name)s::%(class_name)sStore::execute(ExecContext *xc,
290 Trace::InstRecord *traceData) const
291 {
292 Addr EA;
293 Fault fault = NoFault;
294
295 %(op_decl)s;
296 %(op_rd)s;
297 %(ea_code)s;
298
299 if (fault == NoFault) {
300 %(code)s;
301 }
302
303 if (fault == NoFault) {
304 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
305 nullptr);
306 }
307
308 if (fault == NoFault) {
309 %(op_wb)s;
310 }
311
312 return fault;
313 }
314}};
315
316def template AtomicMemOpEACompExecute {{
317 Fault
318 %(class_name)s::%(class_name)s%(op_name)s::eaComp(ExecContext *xc,
319 Trace::InstRecord *traceData) const
320 {
321 Addr EA;
322 Fault fault = NoFault;
323
324 %(op_decl)s;
325 %(op_rd)s;
326 %(ea_code)s;
327
328 if (fault == NoFault) {
329 %(op_wb)s;
330 xc->setEA(EA);
331 }
332
333 return fault;
334 }
335}};
336
337def template AtomicMemOpLoadInitiateAcc {{
338 Fault %(class_name)s::%(class_name)sLoad::initiateAcc(ExecContext *xc,
339 Trace::InstRecord *traceData) const
340 {
341 Addr EA;
342 Fault fault = NoFault;
343
344 %(op_src_decl)s;
345 %(op_rd)s;
346 %(ea_code)s;
347
348 if (fault == NoFault) {
349 fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags);
350 }
351
352 return fault;
353 }
354}};
355
356def template AtomicMemOpStoreInitiateAcc {{
357 Fault %(class_name)s::%(class_name)sStore::initiateAcc(
358 ExecContext *xc, Trace::InstRecord *traceData) const
359 {
360 Addr EA;
361 Fault fault = NoFault;
362
363 %(op_decl)s;
364 %(op_rd)s;
365 %(ea_code)s;
366
367 if (fault == NoFault) {
368 %(code)s;
369 }
370
371 if (fault == NoFault) {
372 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
373 nullptr);
374 }
375
376 if (fault == NoFault) {
377 %(op_wb)s;
378 }
379
380 return fault;
381 }
382}};
383
384def template StoreCondCompleteAcc {{
385 Fault %(class_name)s::completeAcc(Packet *pkt, ExecContext *xc,
386 Trace::InstRecord *traceData) const
387 {
388 Fault fault = NoFault;
389
390 %(op_dest_decl)s;
391
392 // RISC-V has the opposite convention gem5 has for success flags,
393 // so we invert the result here.
394 uint64_t result = !pkt->req->getExtraData();
395
396 if (fault == NoFault) {
397 %(postacc_code)s;
398 }
399
400 if (fault == NoFault) {
401 %(op_wb)s;
402 }
403
404 return fault;
405 }
406}};
407
408def template AtomicMemOpLoadCompleteAcc {{
409 Fault %(class_name)s::%(class_name)sLoad::completeAcc(PacketPtr pkt,
410 ExecContext *xc, Trace::InstRecord *traceData) const
411 {
412 Fault fault = NoFault;
413
414 %(op_decl)s;
415 %(op_rd)s;
416
417 getMem(pkt, Mem, traceData);
418
419 if (fault == NoFault) {
420 %(code)s;
421 }
422
423 if (fault == NoFault) {
424 %(op_wb)s;
425 }
426
427 return fault;
428 }
429}};
430
431def template AtomicMemOpStoreCompleteAcc {{
432 Fault %(class_name)s::%(class_name)sStore::completeAcc(PacketPtr pkt,
433 ExecContext *xc, Trace::InstRecord *traceData) const
434 {
435 return NoFault;
436 }
437}};
438
439def format LoadReserved(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}},
440 mem_flags=[], inst_flags=[]) {{
441 mem_flags = makeList(mem_flags)
442 inst_flags = makeList(inst_flags)
443 iop = InstObjParams(name, Name, 'LoadReserved',
444 {'ea_code': ea_code, 'memacc_code': memacc_code,
445 'postacc_code': postacc_code}, inst_flags)
446 iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \
447 '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';'
448
449 header_output = LoadStoreDeclare.subst(iop)
450 decoder_output = LRSCConstructor.subst(iop)
451 decode_block = BasicDecode.subst(iop)
452 exec_output = LoadExecute.subst(iop) \
453 + EACompExecute.subst(iop) \
454 + LoadInitiateAcc.subst(iop) \
455 + LoadCompleteAcc.subst(iop)
456}};
457
458def format StoreCond(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}},
459 mem_flags=[], inst_flags=[]) {{
460 mem_flags = makeList(mem_flags)
461 inst_flags = makeList(inst_flags)
462 iop = InstObjParams(name, Name, 'StoreCond',
463 {'ea_code': ea_code, 'memacc_code': memacc_code,
464 'postacc_code': postacc_code}, inst_flags)
465 iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \
466 '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';'
467
468 header_output = LoadStoreDeclare.subst(iop)
469 decoder_output = LRSCConstructor.subst(iop)
470 decode_block = BasicDecode.subst(iop)
471 exec_output = StoreCondExecute.subst(iop) \
472 + EACompExecute.subst(iop) \
473 + StoreInitiateAcc.subst(iop) \
474 + StoreCondCompleteAcc.subst(iop)
475}};
476
477def format AtomicMemOp(load_code, store_code, ea_code, load_flags=[],
478 store_flags=[], inst_flags=[]) {{
479 macro_iop = InstObjParams(name, Name, 'AtomicMemOp', ea_code, inst_flags)
480 header_output = AtomicMemOpDeclare.subst(macro_iop)
481 decoder_output = AtomicMemOpMacroConstructor.subst(macro_iop)
482 decode_block = BasicDecode.subst(macro_iop)
483 exec_output = ''
484
485 load_inst_flags = makeList(inst_flags) + ["IsMemRef", "IsLoad"]
486 load_iop = InstObjParams(name, Name, 'AtomicMemOpMicro',
487 {'ea_code': ea_code, 'code': load_code, 'op_name': 'Load'},
488 load_inst_flags)
489 decoder_output += AtomicMemOpLoadConstructor.subst(load_iop)
490 exec_output += AtomicMemOpLoadExecute.subst(load_iop) \
491 + AtomicMemOpEACompExecute.subst(load_iop) \
492 + AtomicMemOpLoadInitiateAcc.subst(load_iop) \
493 + AtomicMemOpLoadCompleteAcc.subst(load_iop)
494
495 store_inst_flags = makeList(inst_flags) + ["IsMemRef", "IsStore"]
496 store_iop = InstObjParams(name, Name, 'AtomicMemOpMicro',
497 {'ea_code': ea_code, 'code': store_code, 'op_name': 'Store'},
498 store_inst_flags)
499 decoder_output += AtomicMemOpStoreConstructor.subst(store_iop)
500 exec_output += AtomicMemOpStoreExecute.subst(store_iop) \
501 + AtomicMemOpEACompExecute.subst(store_iop) \
502 + AtomicMemOpStoreInitiateAcc.subst(store_iop) \
503 + AtomicMemOpStoreCompleteAcc.subst(store_iop)
504}};
169 };
170 };
171}};
172
173def template LRSCConstructor {{
174 %(class_name)s::%(class_name)s(ExtMachInst machInst):
175 %(base_class)s("%(mnemonic)s", machInst, %(op_class)s)
176 {
177 %(constructor)s;
178 if (AQ)
179 memAccessFlags = memAccessFlags | Request::ACQUIRE;
180 if (RL)
181 memAccessFlags = memAccessFlags | Request::RELEASE;
182 }
183}};
184
185def template AtomicMemOpMacroConstructor {{
186 %(class_name)s::%(class_name)s(ExtMachInst machInst)
187 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s)
188 {
189 %(constructor)s;
190 microops = {new %(class_name)sLoad(machInst, this),
191 new %(class_name)sStore(machInst, this)};
192 }
193}};
194
195def template AtomicMemOpLoadConstructor {{
196 %(class_name)s::%(class_name)sLoad::%(class_name)sLoad(
197 ExtMachInst machInst, %(class_name)s *_p)
198 : %(base_class)s("%(mnemonic)s[l]", machInst, %(op_class)s)
199 {
200 %(constructor)s;
201 flags[IsFirstMicroop] = true;
202 flags[IsDelayedCommit] = true;
203 if (AQ)
204 memAccessFlags = Request::ACQUIRE;
205 }
206}};
207
208def template AtomicMemOpStoreConstructor {{
209 %(class_name)s::%(class_name)sStore::%(class_name)sStore(
210 ExtMachInst machInst, %(class_name)s *_p)
211 : %(base_class)s("%(mnemonic)s[s]", machInst, %(op_class)s)
212 {
213 %(constructor)s;
214 flags[IsLastMicroop] = true;
215 flags[IsNonSpeculative] = true;
216 if (RL)
217 memAccessFlags = Request::RELEASE;
218 }
219}};
220
221def template StoreCondExecute {{
222 Fault %(class_name)s::execute(ExecContext *xc,
223 Trace::InstRecord *traceData) const
224 {
225 Addr EA;
226 Fault fault = NoFault;
227 uint64_t result;
228
229 %(op_decl)s;
230 %(op_rd)s;
231 %(ea_code)s;
232
233 if (fault == NoFault) {
234 %(memacc_code)s;
235 }
236
237 if (fault == NoFault) {
238 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
239 &result);
240 // RISC-V has the opposite convention gem5 has for success flags,
241 // so we invert the result here.
242 result = !result;
243 }
244
245 if (fault == NoFault) {
246 %(postacc_code)s;
247 }
248
249 if (fault == NoFault) {
250 %(op_wb)s;
251 }
252
253 return fault;
254 }
255}};
256
257def template AtomicMemOpLoadExecute {{
258 Fault %(class_name)s::%(class_name)sLoad::execute(ExecContext *xc,
259 Trace::InstRecord *traceData) const
260 {
261 Addr EA;
262 Fault fault = NoFault;
263
264 %(op_decl)s;
265 %(op_rd)s;
266 %(ea_code)s;
267
268 if (fault == NoFault) {
269 fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags);
270 }
271
272 if (fault == NoFault) {
273 %(code)s;
274 }
275
276 if (fault == NoFault) {
277 %(op_wb)s;
278 }
279
280 return fault;
281 }
282}};
283
284def template AtomicMemOpStoreExecute {{
285 Fault %(class_name)s::%(class_name)sStore::execute(ExecContext *xc,
286 Trace::InstRecord *traceData) const
287 {
288 Addr EA;
289 Fault fault = NoFault;
290
291 %(op_decl)s;
292 %(op_rd)s;
293 %(ea_code)s;
294
295 if (fault == NoFault) {
296 %(code)s;
297 }
298
299 if (fault == NoFault) {
300 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
301 nullptr);
302 }
303
304 if (fault == NoFault) {
305 %(op_wb)s;
306 }
307
308 return fault;
309 }
310}};
311
312def template AtomicMemOpEACompExecute {{
313 Fault
314 %(class_name)s::%(class_name)s%(op_name)s::eaComp(ExecContext *xc,
315 Trace::InstRecord *traceData) const
316 {
317 Addr EA;
318 Fault fault = NoFault;
319
320 %(op_decl)s;
321 %(op_rd)s;
322 %(ea_code)s;
323
324 if (fault == NoFault) {
325 %(op_wb)s;
326 xc->setEA(EA);
327 }
328
329 return fault;
330 }
331}};
332
333def template AtomicMemOpLoadInitiateAcc {{
334 Fault %(class_name)s::%(class_name)sLoad::initiateAcc(ExecContext *xc,
335 Trace::InstRecord *traceData) const
336 {
337 Addr EA;
338 Fault fault = NoFault;
339
340 %(op_src_decl)s;
341 %(op_rd)s;
342 %(ea_code)s;
343
344 if (fault == NoFault) {
345 fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags);
346 }
347
348 return fault;
349 }
350}};
351
352def template AtomicMemOpStoreInitiateAcc {{
353 Fault %(class_name)s::%(class_name)sStore::initiateAcc(
354 ExecContext *xc, Trace::InstRecord *traceData) const
355 {
356 Addr EA;
357 Fault fault = NoFault;
358
359 %(op_decl)s;
360 %(op_rd)s;
361 %(ea_code)s;
362
363 if (fault == NoFault) {
364 %(code)s;
365 }
366
367 if (fault == NoFault) {
368 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
369 nullptr);
370 }
371
372 if (fault == NoFault) {
373 %(op_wb)s;
374 }
375
376 return fault;
377 }
378}};
379
380def template StoreCondCompleteAcc {{
381 Fault %(class_name)s::completeAcc(Packet *pkt, ExecContext *xc,
382 Trace::InstRecord *traceData) const
383 {
384 Fault fault = NoFault;
385
386 %(op_dest_decl)s;
387
388 // RISC-V has the opposite convention gem5 has for success flags,
389 // so we invert the result here.
390 uint64_t result = !pkt->req->getExtraData();
391
392 if (fault == NoFault) {
393 %(postacc_code)s;
394 }
395
396 if (fault == NoFault) {
397 %(op_wb)s;
398 }
399
400 return fault;
401 }
402}};
403
404def template AtomicMemOpLoadCompleteAcc {{
405 Fault %(class_name)s::%(class_name)sLoad::completeAcc(PacketPtr pkt,
406 ExecContext *xc, Trace::InstRecord *traceData) const
407 {
408 Fault fault = NoFault;
409
410 %(op_decl)s;
411 %(op_rd)s;
412
413 getMem(pkt, Mem, traceData);
414
415 if (fault == NoFault) {
416 %(code)s;
417 }
418
419 if (fault == NoFault) {
420 %(op_wb)s;
421 }
422
423 return fault;
424 }
425}};
426
427def template AtomicMemOpStoreCompleteAcc {{
428 Fault %(class_name)s::%(class_name)sStore::completeAcc(PacketPtr pkt,
429 ExecContext *xc, Trace::InstRecord *traceData) const
430 {
431 return NoFault;
432 }
433}};
434
435def format LoadReserved(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}},
436 mem_flags=[], inst_flags=[]) {{
437 mem_flags = makeList(mem_flags)
438 inst_flags = makeList(inst_flags)
439 iop = InstObjParams(name, Name, 'LoadReserved',
440 {'ea_code': ea_code, 'memacc_code': memacc_code,
441 'postacc_code': postacc_code}, inst_flags)
442 iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \
443 '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';'
444
445 header_output = LoadStoreDeclare.subst(iop)
446 decoder_output = LRSCConstructor.subst(iop)
447 decode_block = BasicDecode.subst(iop)
448 exec_output = LoadExecute.subst(iop) \
449 + EACompExecute.subst(iop) \
450 + LoadInitiateAcc.subst(iop) \
451 + LoadCompleteAcc.subst(iop)
452}};
453
454def format StoreCond(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}},
455 mem_flags=[], inst_flags=[]) {{
456 mem_flags = makeList(mem_flags)
457 inst_flags = makeList(inst_flags)
458 iop = InstObjParams(name, Name, 'StoreCond',
459 {'ea_code': ea_code, 'memacc_code': memacc_code,
460 'postacc_code': postacc_code}, inst_flags)
461 iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \
462 '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';'
463
464 header_output = LoadStoreDeclare.subst(iop)
465 decoder_output = LRSCConstructor.subst(iop)
466 decode_block = BasicDecode.subst(iop)
467 exec_output = StoreCondExecute.subst(iop) \
468 + EACompExecute.subst(iop) \
469 + StoreInitiateAcc.subst(iop) \
470 + StoreCondCompleteAcc.subst(iop)
471}};
472
473def format AtomicMemOp(load_code, store_code, ea_code, load_flags=[],
474 store_flags=[], inst_flags=[]) {{
475 macro_iop = InstObjParams(name, Name, 'AtomicMemOp', ea_code, inst_flags)
476 header_output = AtomicMemOpDeclare.subst(macro_iop)
477 decoder_output = AtomicMemOpMacroConstructor.subst(macro_iop)
478 decode_block = BasicDecode.subst(macro_iop)
479 exec_output = ''
480
481 load_inst_flags = makeList(inst_flags) + ["IsMemRef", "IsLoad"]
482 load_iop = InstObjParams(name, Name, 'AtomicMemOpMicro',
483 {'ea_code': ea_code, 'code': load_code, 'op_name': 'Load'},
484 load_inst_flags)
485 decoder_output += AtomicMemOpLoadConstructor.subst(load_iop)
486 exec_output += AtomicMemOpLoadExecute.subst(load_iop) \
487 + AtomicMemOpEACompExecute.subst(load_iop) \
488 + AtomicMemOpLoadInitiateAcc.subst(load_iop) \
489 + AtomicMemOpLoadCompleteAcc.subst(load_iop)
490
491 store_inst_flags = makeList(inst_flags) + ["IsMemRef", "IsStore"]
492 store_iop = InstObjParams(name, Name, 'AtomicMemOpMicro',
493 {'ea_code': ea_code, 'code': store_code, 'op_name': 'Store'},
494 store_inst_flags)
495 decoder_output += AtomicMemOpStoreConstructor.subst(store_iop)
496 exec_output += AtomicMemOpStoreExecute.subst(store_iop) \
497 + AtomicMemOpEACompExecute.subst(store_iop) \
498 + AtomicMemOpStoreInitiateAcc.subst(store_iop) \
499 + AtomicMemOpStoreCompleteAcc.subst(store_iop)
500}};