Deleted Added
sdiff udiff text old ( 12482:35461496d012 ) new ( 13653:079472978bca )
full compact
1// -*- mode:c++ -*-
2
3// Copyright (c) 2015 Riscv Developers
4// Copyright (c) 2016 The University of Virginia
5// All rights reserved.
6//
7// Redistribution and use in source and binary forms, with or without
8// modification, are permitted provided that the following conditions are

--- 15 unchanged lines hidden (view full) ---

24// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29//
30// Authors: Alec Roelke
31
32////////////////////////////////////////////////////////////////////
33//
34// Atomic memory operation instructions
35//
36def template AtomicMemOpDeclare {{
37 /**
38 * Static instruction class for an AtomicMemOp operation
39 */
40 class %(class_name)s : public %(base_class)s
41 {
42 public:
43 // Constructor
44 %(class_name)s(ExtMachInst machInst);
45
46 protected:
47
48 class %(class_name)sLoad : public %(base_class)sMicro
49 {
50 public:
51 // Constructor
52 %(class_name)sLoad(ExtMachInst machInst, %(class_name)s *_p);
53
54 Fault execute(ExecContext *, Trace::InstRecord *) const override;
55 Fault initiateAcc(ExecContext *,
56 Trace::InstRecord *) const override;
57 Fault completeAcc(PacketPtr, ExecContext *,
58 Trace::InstRecord *) const override;
59 };
60
61 class %(class_name)sStore : public %(base_class)sMicro
62 {
63 public:
64 // Constructor
65 %(class_name)sStore(ExtMachInst machInst, %(class_name)s *_p);
66
67 Fault execute(ExecContext *, Trace::InstRecord *) const override;
68 Fault initiateAcc(ExecContext *,
69 Trace::InstRecord *) const override;
70 Fault completeAcc(PacketPtr, ExecContext *,
71 Trace::InstRecord *) const override;
72 };
73 };
74}};
75
76def template LRSCConstructor {{
77 %(class_name)s::%(class_name)s(ExtMachInst machInst):
78 %(base_class)s("%(mnemonic)s", machInst, %(op_class)s)
79 {
80 %(constructor)s;
81 if (AQ)
82 memAccessFlags = memAccessFlags | Request::ACQUIRE;
83 if (RL)
84 memAccessFlags = memAccessFlags | Request::RELEASE;
85 }
86}};
87
88def template AtomicMemOpMacroConstructor {{
89 %(class_name)s::%(class_name)s(ExtMachInst machInst)
90 : %(base_class)s("%(mnemonic)s", machInst, %(op_class)s)
91 {
92 %(constructor)s;
93 microops = {new %(class_name)sLoad(machInst, this),
94 new %(class_name)sStore(machInst, this)};
95 }
96}};
97
98def template AtomicMemOpLoadConstructor {{
99 %(class_name)s::%(class_name)sLoad::%(class_name)sLoad(
100 ExtMachInst machInst, %(class_name)s *_p)
101 : %(base_class)s("%(mnemonic)s[l]", machInst, %(op_class)s)
102 {
103 %(constructor)s;
104 flags[IsFirstMicroop] = true;
105 flags[IsDelayedCommit] = true;
106 if (AQ)
107 memAccessFlags = Request::ACQUIRE;
108 }
109}};
110
111def template AtomicMemOpStoreConstructor {{
112 %(class_name)s::%(class_name)sStore::%(class_name)sStore(
113 ExtMachInst machInst, %(class_name)s *_p)
114 : %(base_class)s("%(mnemonic)s[s]", machInst, %(op_class)s)
115 {
116 %(constructor)s;
117 flags[IsLastMicroop] = true;
118 flags[IsNonSpeculative] = true;
119 if (RL)
120 memAccessFlags = Request::RELEASE;
121 }
122}};
123
124def template StoreCondExecute {{
125 Fault %(class_name)s::execute(ExecContext *xc,
126 Trace::InstRecord *traceData) const
127 {
128 Addr EA;
129 Fault fault = NoFault;
130 uint64_t result;
131
132 %(op_decl)s;
133 %(op_rd)s;

--- 18 unchanged lines hidden (view full) ---

152 if (fault == NoFault) {
153 %(op_wb)s;
154 }
155
156 return fault;
157 }
158}};
159
160def template AtomicMemOpLoadExecute {{
161 Fault %(class_name)s::%(class_name)sLoad::execute(ExecContext *xc,
162 Trace::InstRecord *traceData) const
163 {
164 Addr EA;
165 Fault fault = NoFault;
166
167 %(op_decl)s;
168 %(op_rd)s;
169 %(ea_code)s;
170
171 if (fault == NoFault) {
172 fault = readMemAtomic(xc, traceData, EA, Mem, memAccessFlags);
173 }
174
175 if (fault == NoFault) {
176 %(code)s;
177 }
178
179 if (fault == NoFault) {
180 %(op_wb)s;
181 }
182
183 return fault;
184 }
185}};
186
187def template AtomicMemOpStoreExecute {{
188 Fault %(class_name)s::%(class_name)sStore::execute(ExecContext *xc,
189 Trace::InstRecord *traceData) const
190 {
191 Addr EA;
192 Fault fault = NoFault;
193
194 %(op_decl)s;
195 %(op_rd)s;
196 %(ea_code)s;
197
198 if (fault == NoFault) {
199 %(code)s;
200 }
201
202 if (fault == NoFault) {
203 fault = writeMemAtomic(xc, traceData, Mem, EA, memAccessFlags,
204 nullptr);
205 }
206
207 if (fault == NoFault) {
208 %(op_wb)s;
209 }
210
211 return fault;
212 }
213}};
214
215def template AtomicMemOpLoadInitiateAcc {{
216 Fault %(class_name)s::%(class_name)sLoad::initiateAcc(ExecContext *xc,
217 Trace::InstRecord *traceData) const
218 {
219 Addr EA;
220 Fault fault = NoFault;
221
222 %(op_src_decl)s;
223 %(op_rd)s;
224 %(ea_code)s;
225
226 if (fault == NoFault) {
227 fault = initiateMemRead(xc, traceData, EA, Mem, memAccessFlags);
228 }
229
230 return fault;
231 }
232}};
233
234def template AtomicMemOpStoreInitiateAcc {{
235 Fault %(class_name)s::%(class_name)sStore::initiateAcc(
236 ExecContext *xc, Trace::InstRecord *traceData) const
237 {
238 Addr EA;
239 Fault fault = NoFault;
240
241 %(op_decl)s;
242 %(op_rd)s;
243 %(ea_code)s;
244
245 if (fault == NoFault) {
246 %(code)s;
247 }
248
249 if (fault == NoFault) {
250 fault = writeMemTiming(xc, traceData, Mem, EA, memAccessFlags,
251 nullptr);
252 }
253
254 if (fault == NoFault) {
255 %(op_wb)s;
256 }
257
258 return fault;
259 }
260}};
261
262def template StoreCondCompleteAcc {{
263 Fault %(class_name)s::completeAcc(Packet *pkt, ExecContext *xc,
264 Trace::InstRecord *traceData) const
265 {
266 Fault fault = NoFault;
267
268 %(op_dest_decl)s;
269
270 // RISC-V has the opposite convention gem5 has for success flags,
271 // so we invert the result here.
272 uint64_t result = !pkt->req->getExtraData();

--- 5 unchanged lines hidden (view full) ---

278 if (fault == NoFault) {
279 %(op_wb)s;
280 }
281
282 return fault;
283 }
284}};
285
286def template AtomicMemOpLoadCompleteAcc {{
287 Fault %(class_name)s::%(class_name)sLoad::completeAcc(PacketPtr pkt,
288 ExecContext *xc, Trace::InstRecord *traceData) const
289 {
290 Fault fault = NoFault;
291
292 %(op_decl)s;
293 %(op_rd)s;
294
295 getMem(pkt, Mem, traceData);
296
297 if (fault == NoFault) {
298 %(code)s;
299 }
300
301 if (fault == NoFault) {
302 %(op_wb)s;
303 }
304
305 return fault;
306 }
307}};
308
309def template AtomicMemOpStoreCompleteAcc {{
310 Fault %(class_name)s::%(class_name)sStore::completeAcc(PacketPtr pkt,
311 ExecContext *xc, Trace::InstRecord *traceData) const
312 {
313 return NoFault;
314 }
315}};
316
317def format LoadReserved(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}},
318 mem_flags=[], inst_flags=[]) {{
319 mem_flags = makeList(mem_flags)
320 inst_flags = makeList(inst_flags)
321 iop = InstObjParams(name, Name, 'LoadReserved',
322 {'ea_code': ea_code, 'memacc_code': memacc_code,
323 'postacc_code': postacc_code}, inst_flags)
324 iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \
325 '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';'
326
327 header_output = LoadStoreDeclare.subst(iop)
328 decoder_output = LRSCConstructor.subst(iop)
329 decode_block = BasicDecode.subst(iop)
330 exec_output = LoadExecute.subst(iop) \
331 + LoadInitiateAcc.subst(iop) \
332 + LoadCompleteAcc.subst(iop)
333}};
334
335def format StoreCond(memacc_code, postacc_code={{ }}, ea_code={{EA = Rs1;}},
336 mem_flags=[], inst_flags=[]) {{
337 mem_flags = makeList(mem_flags)
338 inst_flags = makeList(inst_flags)
339 iop = InstObjParams(name, Name, 'StoreCond',
340 {'ea_code': ea_code, 'memacc_code': memacc_code,
341 'postacc_code': postacc_code}, inst_flags)
342 iop.constructor += '\n\tmemAccessFlags = memAccessFlags | ' + \
343 '|'.join(['Request::%s' % flag for flag in mem_flags]) + ';'
344
345 header_output = LoadStoreDeclare.subst(iop)
346 decoder_output = LRSCConstructor.subst(iop)
347 decode_block = BasicDecode.subst(iop)
348 exec_output = StoreCondExecute.subst(iop) \
349 + StoreInitiateAcc.subst(iop) \
350 + StoreCondCompleteAcc.subst(iop)
351}};
352
353def format AtomicMemOp(load_code, store_code, ea_code, load_flags=[],
354 store_flags=[], inst_flags=[]) {{
355 macro_iop = InstObjParams(name, Name, 'AtomicMemOp', ea_code, inst_flags)
356 header_output = AtomicMemOpDeclare.subst(macro_iop)
357 decoder_output = AtomicMemOpMacroConstructor.subst(macro_iop)
358 decode_block = BasicDecode.subst(macro_iop)
359 exec_output = ''
360
361 load_inst_flags = makeList(inst_flags) + ["IsMemRef", "IsLoad"]
362 load_iop = InstObjParams(name, Name, 'AtomicMemOpMicro',
363 {'ea_code': ea_code, 'code': load_code, 'op_name': 'Load'},
364 load_inst_flags)
365 decoder_output += AtomicMemOpLoadConstructor.subst(load_iop)
366 exec_output += AtomicMemOpLoadExecute.subst(load_iop) \
367 + AtomicMemOpLoadInitiateAcc.subst(load_iop) \
368 + AtomicMemOpLoadCompleteAcc.subst(load_iop)
369
370 store_inst_flags = makeList(inst_flags) + ["IsMemRef", "IsStore"]
371 store_iop = InstObjParams(name, Name, 'AtomicMemOpMicro',
372 {'ea_code': ea_code, 'code': store_code, 'op_name': 'Store'},
373 store_inst_flags)
374 decoder_output += AtomicMemOpStoreConstructor.subst(store_iop)
375 exec_output += AtomicMemOpStoreExecute.subst(store_iop) \
376 + AtomicMemOpStoreInitiateAcc.subst(store_iop) \
377 + AtomicMemOpStoreCompleteAcc.subst(store_iop)
378}};