amo.isa (12385:288c62455dde) amo.isa (12482:35461496d012)
1// -*- mode:c++ -*-
2
3// Copyright (c) 2015 Riscv Developers
4// Copyright (c) 2016 The University of Virginia
5// All rights reserved.
6//
7// Redistribution and use in source and binary forms, with or without
8// modification, are permitted provided that the following conditions are
9// met: redistributions of source code must retain the above copyright
10// notice, this list of conditions and the following disclaimer;
11// redistributions in binary form must reproduce the above copyright
12// notice, this list of conditions and the following disclaimer in the
13// documentation and/or other materials provided with the distribution;
14// neither the name of the copyright holders nor the names of its
15// contributors may be used to endorse or promote products derived from
16// this software without specific prior written permission.
17//
18// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29//
30// Authors: Alec Roelke
31
32////////////////////////////////////////////////////////////////////
33//
34// Atomic memory operation instructions
35//
36def template AtomicMemOpDeclare {{
37 /**
38 * Static instruction class for an AtomicMemOp operation
39 */
40 class %(class_name)s : public %(base_class)s
41 {
42 public:
43 // Constructor
44 %(class_name)s(ExtMachInst machInst);
45
46 protected:
47
48 class %(class_name)sLoad : public %(base_class)sMicro
49 {
50 public:
51 // Constructor
52 %(class_name)sLoad(ExtMachInst machInst, %(class_name)s *_p);
53
1// -*- mode:c++ -*-
2
3// Copyright (c) 2015 Riscv Developers
4// Copyright (c) 2016 The University of Virginia
5// All rights reserved.
6//
7// Redistribution and use in source and binary forms, with or without
8// modification, are permitted provided that the following conditions are
9// met: redistributions of source code must retain the above copyright
10// notice, this list of conditions and the following disclaimer;
11// redistributions in binary form must reproduce the above copyright
12// notice, this list of conditions and the following disclaimer in the
13// documentation and/or other materials provided with the distribution;
14// neither the name of the copyright holders nor the names of its
15// contributors may be used to endorse or promote products derived from
16// this software without specific prior written permission.
17//
18// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29//
30// Authors: Alec Roelke
31
32////////////////////////////////////////////////////////////////////
33//
34// Atomic memory operation instructions
35//
36def template AtomicMemOpDeclare {{
37 /**
38 * Static instruction class for an AtomicMemOp operation
39 */
40 class %(class_name)s : public %(base_class)s
41 {
42 public:
43 // Constructor
44 %(class_name)s(ExtMachInst machInst);
45
46 protected:
47
48 class %(class_name)sLoad : public %(base_class)sMicro
49 {
50 public:
51 // Constructor
52 %(class_name)sLoad(ExtMachInst machInst, %(class_name)s *_p);
53
54 Fault execute(ExecContext *, Trace::InstRecord *) const;
55 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
54 Fault execute(ExecContext *, Trace::InstRecord *) const override;
55 Fault initiateAcc(ExecContext *,
56 Trace::InstRecord *) const override;
56 Fault completeAcc(PacketPtr, ExecContext *,
57 Fault completeAcc(PacketPtr, ExecContext *,
57 Trace::InstRecord *) const;
58 Trace::InstRecord *) const override;
58 };
59
60 class %(class_name)sStore : public %(base_class)sMicro
61 {
62 public:
63 // Constructor
64 %(class_name)sStore(ExtMachInst machInst, %(class_name)s *_p);
65
59 };
60
61 class %(class_name)sStore : public %(base_class)sMicro
62 {
63 public:
64 // Constructor
65 %(class_name)sStore(ExtMachInst machInst, %(class_name)s *_p);
66
66 Fault execute(ExecContext *, Trace::InstRecord *) const;
67 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
67 Fault execute(ExecContext *, Trace::InstRecord *) const override;
68 Fault initiateAcc(ExecContext *,
69 Trace::InstRecord *) const override;
68 Fault completeAcc(PacketPtr, ExecContext *,
70 Fault completeAcc(PacketPtr, ExecContext *,
69 Trace::InstRecord *) const;
71 Trace::InstRecord *) const override;
70 };
71 };
72}};
73
74def template LRSCConstructor {{
75 %(class_name)s::%(class_name)s(ExtMachInst machInst):
76 %(base_class)s("%(mnemonic)s", machInst, %(op_class)s)
77 {
78 %(constructor)s;
79 if (AQ)
80 memAccessFlags = memAccessFlags | Request::ACQUIRE;
81 if (RL)
82 memAccessFlags = memAccessFlags | Request::RELEASE;
83 }
84}};
85
86def template AtomicMemOpMacroConstructor {{
87 %(class_name)s::%(class_name)s(ExtMachInst machInst)
88 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s)
89 {
90 %(constructor)s;
91 microops = {new %(class_name)sLoad(machInst, this),
92 new %(class_name)sStore(machInst, this)};
93 }
94}};
95
96def template AtomicMemOpLoadConstructor {{
97 %(class_name)s::%(class_name)sLoad::%(class_name)sLoad(
98 ExtMachInst machInst, %(class_name)s *_p)
99 : %(base_class)s("%(mnemonic)s[l]", machInst, %(op_class)s)
100 {
101 %(constructor)s;
102 flags[IsFirstMicroop] = true;
103 flags[IsDelayedCommit] = true;
104 if (AQ)
105 memAccessFlags = Request::ACQUIRE;
106 }
107}};
108
109def template AtomicMemOpStoreConstructor {{
110 %(class_name)s::%(class_name)sStore::%(class_name)sStore(
111 ExtMachInst machInst, %(class_name)s *_p)
112 : %(base_class)s("%(mnemonic)s[s]", machInst, %(op_class)s)
113 {
114 %(constructor)s;
115 flags[IsLastMicroop] = true;
116 flags[IsNonSpeculative] = true;
117 if (RL)
118 memAccessFlags = Request::RELEASE;
119 }
120}};
121
122def template StoreCondExecute {{
123 Fault %(class_name)s::execute(ExecContext *xc,
124 Trace::InstRecord *traceData) const
125 {
126 Addr EA;
127 Fault fault = NoFault;
128 uint64_t result;
129
130 %(op_decl)s;
131 %(op_rd)s;
132 %(ea_code)s;
133
134 if (fault == NoFault) {
135 %(memacc_code)s;
136 }
137
138 if (fault == NoFault) {
139 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
140 &result);
141 // RISC-V has the opposite convention gem5 has for success flags,
142 // so we invert the result here.
143 result = !result;
144 }
145
146 if (fault == NoFault) {
147 %(postacc_code)s;
148 }
149
150 if (fault == NoFault) {
151 %(op_wb)s;
152 }
153
154 return fault;
155 }
156}};
157
158def template AtomicMemOpLoadExecute {{
159 Fault %(class_name)s::%(class_name)sLoad::execute(ExecContext *xc,
160 Trace::InstRecord *traceData) const
161 {
162 Addr EA;
163 Fault fault = NoFault;
164
165 %(op_decl)s;
166 %(op_rd)s;
167 %(ea_code)s;
168
169 if (fault == NoFault) {
170 fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags);
171 }
172
173 if (fault == NoFault) {
174 %(code)s;
175 }
176
177 if (fault == NoFault) {
178 %(op_wb)s;
179 }
180
181 return fault;
182 }
183}};
184
185def template AtomicMemOpStoreExecute {{
186 Fault %(class_name)s::%(class_name)sStore::execute(ExecContext *xc,
187 Trace::InstRecord *traceData) const
188 {
189 Addr EA;
190 Fault fault = NoFault;
191
192 %(op_decl)s;
193 %(op_rd)s;
194 %(ea_code)s;
195
196 if (fault == NoFault) {
197 %(code)s;
198 }
199
200 if (fault == NoFault) {
201 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
202 nullptr);
203 }
204
205 if (fault == NoFault) {
206 %(op_wb)s;
207 }
208
209 return fault;
210 }
211}};
212
213def template AtomicMemOpLoadInitiateAcc {{
214 Fault %(class_name)s::%(class_name)sLoad::initiateAcc(ExecContext *xc,
215 Trace::InstRecord *traceData) const
216 {
217 Addr EA;
218 Fault fault = NoFault;
219
220 %(op_src_decl)s;
221 %(op_rd)s;
222 %(ea_code)s;
223
224 if (fault == NoFault) {
225 fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags);
226 }
227
228 return fault;
229 }
230}};
231
232def template AtomicMemOpStoreInitiateAcc {{
233 Fault %(class_name)s::%(class_name)sStore::initiateAcc(
234 ExecContext *xc, Trace::InstRecord *traceData) const
235 {
236 Addr EA;
237 Fault fault = NoFault;
238
239 %(op_decl)s;
240 %(op_rd)s;
241 %(ea_code)s;
242
243 if (fault == NoFault) {
244 %(code)s;
245 }
246
247 if (fault == NoFault) {
248 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
249 nullptr);
250 }
251
252 if (fault == NoFault) {
253 %(op_wb)s;
254 }
255
256 return fault;
257 }
258}};
259
260def template StoreCondCompleteAcc {{
261 Fault %(class_name)s::completeAcc(Packet *pkt, ExecContext *xc,
262 Trace::InstRecord *traceData) const
263 {
264 Fault fault = NoFault;
265
266 %(op_dest_decl)s;
267
268 // RISC-V has the opposite convention gem5 has for success flags,
269 // so we invert the result here.
270 uint64_t result = !pkt->req->getExtraData();
271
272 if (fault == NoFault) {
273 %(postacc_code)s;
274 }
275
276 if (fault == NoFault) {
277 %(op_wb)s;
278 }
279
280 return fault;
281 }
282}};
283
284def template AtomicMemOpLoadCompleteAcc {{
285 Fault %(class_name)s::%(class_name)sLoad::completeAcc(PacketPtr pkt,
286 ExecContext *xc, Trace::InstRecord *traceData) const
287 {
288 Fault fault = NoFault;
289
290 %(op_decl)s;
291 %(op_rd)s;
292
293 getMem(pkt, Mem, traceData);
294
295 if (fault == NoFault) {
296 %(code)s;
297 }
298
299 if (fault == NoFault) {
300 %(op_wb)s;
301 }
302
303 return fault;
304 }
305}};
306
307def template AtomicMemOpStoreCompleteAcc {{
308 Fault %(class_name)s::%(class_name)sStore::completeAcc(PacketPtr pkt,
309 ExecContext *xc, Trace::InstRecord *traceData) const
310 {
311 return NoFault;
312 }
313}};
314
315def format LoadReserved(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}},
316 mem_flags=[], inst_flags=[]) {{
317 mem_flags = makeList(mem_flags)
318 inst_flags = makeList(inst_flags)
319 iop = InstObjParams(name, Name, 'LoadReserved',
320 {'ea_code': ea_code, 'memacc_code': memacc_code,
321 'postacc_code': postacc_code}, inst_flags)
322 iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \
323 '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';'
324
325 header_output = LoadStoreDeclare.subst(iop)
326 decoder_output = LRSCConstructor.subst(iop)
327 decode_block = BasicDecode.subst(iop)
328 exec_output = LoadExecute.subst(iop) \
329 + LoadInitiateAcc.subst(iop) \
330 + LoadCompleteAcc.subst(iop)
331}};
332
333def format StoreCond(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}},
334 mem_flags=[], inst_flags=[]) {{
335 mem_flags = makeList(mem_flags)
336 inst_flags = makeList(inst_flags)
337 iop = InstObjParams(name, Name, 'StoreCond',
338 {'ea_code': ea_code, 'memacc_code': memacc_code,
339 'postacc_code': postacc_code}, inst_flags)
340 iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \
341 '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';'
342
343 header_output = LoadStoreDeclare.subst(iop)
344 decoder_output = LRSCConstructor.subst(iop)
345 decode_block = BasicDecode.subst(iop)
346 exec_output = StoreCondExecute.subst(iop) \
347 + StoreInitiateAcc.subst(iop) \
348 + StoreCondCompleteAcc.subst(iop)
349}};
350
351def format AtomicMemOp(load_code, store_code, ea_code, load_flags=[],
352 store_flags=[], inst_flags=[]) {{
353 macro_iop = InstObjParams(name, Name, 'AtomicMemOp', ea_code, inst_flags)
354 header_output = AtomicMemOpDeclare.subst(macro_iop)
355 decoder_output = AtomicMemOpMacroConstructor.subst(macro_iop)
356 decode_block = BasicDecode.subst(macro_iop)
357 exec_output = ''
358
359 load_inst_flags = makeList(inst_flags) + ["IsMemRef", "IsLoad"]
360 load_iop = InstObjParams(name, Name, 'AtomicMemOpMicro',
361 {'ea_code': ea_code, 'code': load_code, 'op_name': 'Load'},
362 load_inst_flags)
363 decoder_output += AtomicMemOpLoadConstructor.subst(load_iop)
364 exec_output += AtomicMemOpLoadExecute.subst(load_iop) \
365 + AtomicMemOpLoadInitiateAcc.subst(load_iop) \
366 + AtomicMemOpLoadCompleteAcc.subst(load_iop)
367
368 store_inst_flags = makeList(inst_flags) + ["IsMemRef", "IsStore"]
369 store_iop = InstObjParams(name, Name, 'AtomicMemOpMicro',
370 {'ea_code': ea_code, 'code': store_code, 'op_name': 'Store'},
371 store_inst_flags)
372 decoder_output += AtomicMemOpStoreConstructor.subst(store_iop)
373 exec_output += AtomicMemOpStoreExecute.subst(store_iop) \
374 + AtomicMemOpStoreInitiateAcc.subst(store_iop) \
375 + AtomicMemOpStoreCompleteAcc.subst(store_iop)
376}};
72 };
73 };
74}};
75
76def template LRSCConstructor {{
77 %(class_name)s::%(class_name)s(ExtMachInst machInst):
78 %(base_class)s("%(mnemonic)s", machInst, %(op_class)s)
79 {
80 %(constructor)s;
81 if (AQ)
82 memAccessFlags = memAccessFlags | Request::ACQUIRE;
83 if (RL)
84 memAccessFlags = memAccessFlags | Request::RELEASE;
85 }
86}};
87
88def template AtomicMemOpMacroConstructor {{
89 %(class_name)s::%(class_name)s(ExtMachInst machInst)
90 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s)
91 {
92 %(constructor)s;
93 microops = {new %(class_name)sLoad(machInst, this),
94 new %(class_name)sStore(machInst, this)};
95 }
96}};
97
98def template AtomicMemOpLoadConstructor {{
99 %(class_name)s::%(class_name)sLoad::%(class_name)sLoad(
100 ExtMachInst machInst, %(class_name)s *_p)
101 : %(base_class)s("%(mnemonic)s[l]", machInst, %(op_class)s)
102 {
103 %(constructor)s;
104 flags[IsFirstMicroop] = true;
105 flags[IsDelayedCommit] = true;
106 if (AQ)
107 memAccessFlags = Request::ACQUIRE;
108 }
109}};
110
111def template AtomicMemOpStoreConstructor {{
112 %(class_name)s::%(class_name)sStore::%(class_name)sStore(
113 ExtMachInst machInst, %(class_name)s *_p)
114 : %(base_class)s("%(mnemonic)s[s]", machInst, %(op_class)s)
115 {
116 %(constructor)s;
117 flags[IsLastMicroop] = true;
118 flags[IsNonSpeculative] = true;
119 if (RL)
120 memAccessFlags = Request::RELEASE;
121 }
122}};
123
124def template StoreCondExecute {{
125 Fault %(class_name)s::execute(ExecContext *xc,
126 Trace::InstRecord *traceData) const
127 {
128 Addr EA;
129 Fault fault = NoFault;
130 uint64_t result;
131
132 %(op_decl)s;
133 %(op_rd)s;
134 %(ea_code)s;
135
136 if (fault == NoFault) {
137 %(memacc_code)s;
138 }
139
140 if (fault == NoFault) {
141 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
142 &result);
143 // RISC-V has the opposite convention gem5 has for success flags,
144 // so we invert the result here.
145 result = !result;
146 }
147
148 if (fault == NoFault) {
149 %(postacc_code)s;
150 }
151
152 if (fault == NoFault) {
153 %(op_wb)s;
154 }
155
156 return fault;
157 }
158}};
159
160def template AtomicMemOpLoadExecute {{
161 Fault %(class_name)s::%(class_name)sLoad::execute(ExecContext *xc,
162 Trace::InstRecord *traceData) const
163 {
164 Addr EA;
165 Fault fault = NoFault;
166
167 %(op_decl)s;
168 %(op_rd)s;
169 %(ea_code)s;
170
171 if (fault == NoFault) {
172 fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags);
173 }
174
175 if (fault == NoFault) {
176 %(code)s;
177 }
178
179 if (fault == NoFault) {
180 %(op_wb)s;
181 }
182
183 return fault;
184 }
185}};
186
187def template AtomicMemOpStoreExecute {{
188 Fault %(class_name)s::%(class_name)sStore::execute(ExecContext *xc,
189 Trace::InstRecord *traceData) const
190 {
191 Addr EA;
192 Fault fault = NoFault;
193
194 %(op_decl)s;
195 %(op_rd)s;
196 %(ea_code)s;
197
198 if (fault == NoFault) {
199 %(code)s;
200 }
201
202 if (fault == NoFault) {
203 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
204 nullptr);
205 }
206
207 if (fault == NoFault) {
208 %(op_wb)s;
209 }
210
211 return fault;
212 }
213}};
214
215def template AtomicMemOpLoadInitiateAcc {{
216 Fault %(class_name)s::%(class_name)sLoad::initiateAcc(ExecContext *xc,
217 Trace::InstRecord *traceData) const
218 {
219 Addr EA;
220 Fault fault = NoFault;
221
222 %(op_src_decl)s;
223 %(op_rd)s;
224 %(ea_code)s;
225
226 if (fault == NoFault) {
227 fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags);
228 }
229
230 return fault;
231 }
232}};
233
234def template AtomicMemOpStoreInitiateAcc {{
235 Fault %(class_name)s::%(class_name)sStore::initiateAcc(
236 ExecContext *xc, Trace::InstRecord *traceData) const
237 {
238 Addr EA;
239 Fault fault = NoFault;
240
241 %(op_decl)s;
242 %(op_rd)s;
243 %(ea_code)s;
244
245 if (fault == NoFault) {
246 %(code)s;
247 }
248
249 if (fault == NoFault) {
250 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
251 nullptr);
252 }
253
254 if (fault == NoFault) {
255 %(op_wb)s;
256 }
257
258 return fault;
259 }
260}};
261
262def template StoreCondCompleteAcc {{
263 Fault %(class_name)s::completeAcc(Packet *pkt, ExecContext *xc,
264 Trace::InstRecord *traceData) const
265 {
266 Fault fault = NoFault;
267
268 %(op_dest_decl)s;
269
270 // RISC-V has the opposite convention gem5 has for success flags,
271 // so we invert the result here.
272 uint64_t result = !pkt->req->getExtraData();
273
274 if (fault == NoFault) {
275 %(postacc_code)s;
276 }
277
278 if (fault == NoFault) {
279 %(op_wb)s;
280 }
281
282 return fault;
283 }
284}};
285
286def template AtomicMemOpLoadCompleteAcc {{
287 Fault %(class_name)s::%(class_name)sLoad::completeAcc(PacketPtr pkt,
288 ExecContext *xc, Trace::InstRecord *traceData) const
289 {
290 Fault fault = NoFault;
291
292 %(op_decl)s;
293 %(op_rd)s;
294
295 getMem(pkt, Mem, traceData);
296
297 if (fault == NoFault) {
298 %(code)s;
299 }
300
301 if (fault == NoFault) {
302 %(op_wb)s;
303 }
304
305 return fault;
306 }
307}};
308
309def template AtomicMemOpStoreCompleteAcc {{
310 Fault %(class_name)s::%(class_name)sStore::completeAcc(PacketPtr pkt,
311 ExecContext *xc, Trace::InstRecord *traceData) const
312 {
313 return NoFault;
314 }
315}};
316
317def format LoadReserved(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}},
318 mem_flags=[], inst_flags=[]) {{
319 mem_flags = makeList(mem_flags)
320 inst_flags = makeList(inst_flags)
321 iop = InstObjParams(name, Name, 'LoadReserved',
322 {'ea_code': ea_code, 'memacc_code': memacc_code,
323 'postacc_code': postacc_code}, inst_flags)
324 iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \
325 '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';'
326
327 header_output = LoadStoreDeclare.subst(iop)
328 decoder_output = LRSCConstructor.subst(iop)
329 decode_block = BasicDecode.subst(iop)
330 exec_output = LoadExecute.subst(iop) \
331 + LoadInitiateAcc.subst(iop) \
332 + LoadCompleteAcc.subst(iop)
333}};
334
335def format StoreCond(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}},
336 mem_flags=[], inst_flags=[]) {{
337 mem_flags = makeList(mem_flags)
338 inst_flags = makeList(inst_flags)
339 iop = InstObjParams(name, Name, 'StoreCond',
340 {'ea_code': ea_code, 'memacc_code': memacc_code,
341 'postacc_code': postacc_code}, inst_flags)
342 iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \
343 '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';'
344
345 header_output = LoadStoreDeclare.subst(iop)
346 decoder_output = LRSCConstructor.subst(iop)
347 decode_block = BasicDecode.subst(iop)
348 exec_output = StoreCondExecute.subst(iop) \
349 + StoreInitiateAcc.subst(iop) \
350 + StoreCondCompleteAcc.subst(iop)
351}};
352
353def format AtomicMemOp(load_code, store_code, ea_code, load_flags=[],
354 store_flags=[], inst_flags=[]) {{
355 macro_iop = InstObjParams(name, Name, 'AtomicMemOp', ea_code, inst_flags)
356 header_output = AtomicMemOpDeclare.subst(macro_iop)
357 decoder_output = AtomicMemOpMacroConstructor.subst(macro_iop)
358 decode_block = BasicDecode.subst(macro_iop)
359 exec_output = ''
360
361 load_inst_flags = makeList(inst_flags) + ["IsMemRef", "IsLoad"]
362 load_iop = InstObjParams(name, Name, 'AtomicMemOpMicro',
363 {'ea_code': ea_code, 'code': load_code, 'op_name': 'Load'},
364 load_inst_flags)
365 decoder_output += AtomicMemOpLoadConstructor.subst(load_iop)
366 exec_output += AtomicMemOpLoadExecute.subst(load_iop) \
367 + AtomicMemOpLoadInitiateAcc.subst(load_iop) \
368 + AtomicMemOpLoadCompleteAcc.subst(load_iop)
369
370 store_inst_flags = makeList(inst_flags) + ["IsMemRef", "IsStore"]
371 store_iop = InstObjParams(name, Name, 'AtomicMemOpMicro',
372 {'ea_code': ea_code, 'code': store_code, 'op_name': 'Store'},
373 store_inst_flags)
374 decoder_output += AtomicMemOpStoreConstructor.subst(store_iop)
375 exec_output += AtomicMemOpStoreExecute.subst(store_iop) \
376 + AtomicMemOpStoreInitiateAcc.subst(store_iop) \
377 + AtomicMemOpStoreCompleteAcc.subst(store_iop)
378}};