ldstop.isa (12234:78ece221f9f5) ldstop.isa (12236:126ac9da6050)
1// Copyright (c) 2007-2008 The Hewlett-Packard Development Company
2// Copyright (c) 2015 Advanced Micro Devices, Inc.
3// All rights reserved.
4//
5// The license below extends only to copyright in the software and shall
6// not be construed as granting a license to any other intellectual
7// property including but not limited to intellectual property relating
8// to a hardware implementation of the functionality of the software
9// licensed hereunder. You may use the software subject to the license
10// terms below provided that you ensure that this notice is replicated
11// unmodified and in its entirety in all distributions of the software,
12// modified or unmodified, in source code or in binary form.
13//
14// Copyright (c) 2008 The Regents of The University of Michigan
15// All rights reserved.
16//
17// Redistribution and use in source and binary forms, with or without
18// modification, are permitted provided that the following conditions are
19// met: redistributions of source code must retain the above copyright
20// notice, this list of conditions and the following disclaimer;
21// redistributions in binary form must reproduce the above copyright
22// notice, this list of conditions and the following disclaimer in the
23// documentation and/or other materials provided with the distribution;
24// neither the name of the copyright holders nor the names of its
25// contributors may be used to endorse or promote products derived from
26// this software without specific prior written permission.
27//
28// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
29// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
30// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
31// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
32// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
33// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
34// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
35// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
36// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
37// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
38// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
39//
40// Authors: Gabe Black
41
42//////////////////////////////////////////////////////////////////////////
43//
44// LdStOp Microop templates
45//
46//////////////////////////////////////////////////////////////////////////
47
48// LEA template
49
50def template MicroLeaExecute {{
51 Fault %(class_name)s::execute(ExecContext *xc,
52 Trace::InstRecord *traceData) const
53 {
54 Fault fault = NoFault;
55 Addr EA;
56
57 %(op_decl)s;
58 %(op_rd)s;
59 %(ea_code)s;
60 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
61
62 %(code)s;
63 if(fault == NoFault)
64 {
65 %(op_wb)s;
66 }
67
68 return fault;
69 }
70}};
71
72def template MicroLeaDeclare {{
73 class %(class_name)s : public %(base_class)s
74 {
75 public:
76 %(class_name)s(ExtMachInst _machInst,
77 const char * instMnem, uint64_t setFlags,
78 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
79 uint64_t _disp, InstRegIndex _segment,
80 InstRegIndex _data,
81 uint8_t _dataSize, uint8_t _addressSize,
82 Request::FlagsType _memFlags);
83
1// Copyright (c) 2007-2008 The Hewlett-Packard Development Company
2// Copyright (c) 2015 Advanced Micro Devices, Inc.
3// All rights reserved.
4//
5// The license below extends only to copyright in the software and shall
6// not be construed as granting a license to any other intellectual
7// property including but not limited to intellectual property relating
8// to a hardware implementation of the functionality of the software
9// licensed hereunder. You may use the software subject to the license
10// terms below provided that you ensure that this notice is replicated
11// unmodified and in its entirety in all distributions of the software,
12// modified or unmodified, in source code or in binary form.
13//
14// Copyright (c) 2008 The Regents of The University of Michigan
15// All rights reserved.
16//
17// Redistribution and use in source and binary forms, with or without
18// modification, are permitted provided that the following conditions are
19// met: redistributions of source code must retain the above copyright
20// notice, this list of conditions and the following disclaimer;
21// redistributions in binary form must reproduce the above copyright
22// notice, this list of conditions and the following disclaimer in the
23// documentation and/or other materials provided with the distribution;
24// neither the name of the copyright holders nor the names of its
25// contributors may be used to endorse or promote products derived from
26// this software without specific prior written permission.
27//
28// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
29// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
30// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
31// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
32// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
33// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
34// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
35// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
36// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
37// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
38// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
39//
40// Authors: Gabe Black
41
42//////////////////////////////////////////////////////////////////////////
43//
44// LdStOp Microop templates
45//
46//////////////////////////////////////////////////////////////////////////
47
48// LEA template
49
50def template MicroLeaExecute {{
51 Fault %(class_name)s::execute(ExecContext *xc,
52 Trace::InstRecord *traceData) const
53 {
54 Fault fault = NoFault;
55 Addr EA;
56
57 %(op_decl)s;
58 %(op_rd)s;
59 %(ea_code)s;
60 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
61
62 %(code)s;
63 if(fault == NoFault)
64 {
65 %(op_wb)s;
66 }
67
68 return fault;
69 }
70}};
71
72def template MicroLeaDeclare {{
73 class %(class_name)s : public %(base_class)s
74 {
75 public:
76 %(class_name)s(ExtMachInst _machInst,
77 const char * instMnem, uint64_t setFlags,
78 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
79 uint64_t _disp, InstRegIndex _segment,
80 InstRegIndex _data,
81 uint8_t _dataSize, uint8_t _addressSize,
82 Request::FlagsType _memFlags);
83
84 %(BasicExecDeclare)s
84 Fault execute(ExecContext *, Trace::InstRecord *) const;
85 };
86}};
87
88// Load templates
89
90def template MicroLoadExecute {{
91 Fault %(class_name)s::execute(ExecContext *xc,
92 Trace::InstRecord *traceData) const
93 {
94 Fault fault = NoFault;
95 Addr EA;
96
97 %(op_decl)s;
98 %(op_rd)s;
99 %(ea_code)s;
100 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
101
102 fault = readMemAtomic(xc, traceData, EA, Mem,
103 %(memDataSize)s, memFlags);
104
105 if (fault == NoFault) {
106 %(code)s;
107 } else if (memFlags & Request::PREFETCH) {
108 // For prefetches, ignore any faults/exceptions.
109 return NoFault;
110 }
111 if(fault == NoFault)
112 {
113 %(op_wb)s;
114 }
115
116 return fault;
117 }
118}};
119
120def template MicroLoadInitiateAcc {{
121 Fault %(class_name)s::initiateAcc(ExecContext * xc,
122 Trace::InstRecord * traceData) const
123 {
124 Fault fault = NoFault;
125 Addr EA;
126
127 %(op_decl)s;
128 %(op_rd)s;
129 %(ea_code)s;
130 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
131
132 fault = initiateMemRead(xc, traceData, EA,
133 %(memDataSize)s, memFlags);
134
135 return fault;
136 }
137}};
138
139def template MicroLoadCompleteAcc {{
140 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext * xc,
141 Trace::InstRecord * traceData) const
142 {
143 Fault fault = NoFault;
144
145 %(op_decl)s;
146 %(op_rd)s;
147
148 getMem(pkt, Mem, %(memDataSize)s, traceData);
149
150 %(code)s;
151
152 if(fault == NoFault)
153 {
154 %(op_wb)s;
155 }
156
157 return fault;
158 }
159}};
160
161// Store templates
162
163def template MicroStoreExecute {{
164 Fault %(class_name)s::execute(ExecContext * xc,
165 Trace::InstRecord *traceData) const
166 {
167 Fault fault = NoFault;
168
169 Addr EA;
170 %(op_decl)s;
171 %(op_rd)s;
172 %(ea_code)s;
173 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
174
175 %(code)s;
176
177 if(fault == NoFault)
178 {
179 fault = writeMemAtomic(xc, traceData, Mem, %(memDataSize)s, EA,
180 memFlags, NULL);
181 if(fault == NoFault)
182 {
183 %(op_wb)s;
184 }
185 }
186
187 return fault;
188 }
189}};
190
191def template MicroStoreInitiateAcc {{
192 Fault %(class_name)s::initiateAcc(ExecContext * xc,
193 Trace::InstRecord * traceData) const
194 {
195 Fault fault = NoFault;
196
197 Addr EA;
198 %(op_decl)s;
199 %(op_rd)s;
200 %(ea_code)s;
201 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
202
203 %(code)s;
204
205 if(fault == NoFault)
206 {
207 fault = writeMemTiming(xc, traceData, Mem, %(memDataSize)s, EA,
208 memFlags, NULL);
209 }
210 return fault;
211 }
212}};
213
214def template MicroStoreCompleteAcc {{
215 Fault %(class_name)s::completeAcc(PacketPtr pkt,
216 ExecContext * xc, Trace::InstRecord * traceData) const
217 {
218 %(op_decl)s;
219 %(op_rd)s;
220 %(complete_code)s;
221 %(op_wb)s;
222 return NoFault;
223 }
224}};
225
85 };
86}};
87
88// Load templates
89
90def template MicroLoadExecute {{
91 Fault %(class_name)s::execute(ExecContext *xc,
92 Trace::InstRecord *traceData) const
93 {
94 Fault fault = NoFault;
95 Addr EA;
96
97 %(op_decl)s;
98 %(op_rd)s;
99 %(ea_code)s;
100 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
101
102 fault = readMemAtomic(xc, traceData, EA, Mem,
103 %(memDataSize)s, memFlags);
104
105 if (fault == NoFault) {
106 %(code)s;
107 } else if (memFlags & Request::PREFETCH) {
108 // For prefetches, ignore any faults/exceptions.
109 return NoFault;
110 }
111 if(fault == NoFault)
112 {
113 %(op_wb)s;
114 }
115
116 return fault;
117 }
118}};
119
120def template MicroLoadInitiateAcc {{
121 Fault %(class_name)s::initiateAcc(ExecContext * xc,
122 Trace::InstRecord * traceData) const
123 {
124 Fault fault = NoFault;
125 Addr EA;
126
127 %(op_decl)s;
128 %(op_rd)s;
129 %(ea_code)s;
130 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
131
132 fault = initiateMemRead(xc, traceData, EA,
133 %(memDataSize)s, memFlags);
134
135 return fault;
136 }
137}};
138
139def template MicroLoadCompleteAcc {{
140 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext * xc,
141 Trace::InstRecord * traceData) const
142 {
143 Fault fault = NoFault;
144
145 %(op_decl)s;
146 %(op_rd)s;
147
148 getMem(pkt, Mem, %(memDataSize)s, traceData);
149
150 %(code)s;
151
152 if(fault == NoFault)
153 {
154 %(op_wb)s;
155 }
156
157 return fault;
158 }
159}};
160
161// Store templates
162
163def template MicroStoreExecute {{
164 Fault %(class_name)s::execute(ExecContext * xc,
165 Trace::InstRecord *traceData) const
166 {
167 Fault fault = NoFault;
168
169 Addr EA;
170 %(op_decl)s;
171 %(op_rd)s;
172 %(ea_code)s;
173 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
174
175 %(code)s;
176
177 if(fault == NoFault)
178 {
179 fault = writeMemAtomic(xc, traceData, Mem, %(memDataSize)s, EA,
180 memFlags, NULL);
181 if(fault == NoFault)
182 {
183 %(op_wb)s;
184 }
185 }
186
187 return fault;
188 }
189}};
190
191def template MicroStoreInitiateAcc {{
192 Fault %(class_name)s::initiateAcc(ExecContext * xc,
193 Trace::InstRecord * traceData) const
194 {
195 Fault fault = NoFault;
196
197 Addr EA;
198 %(op_decl)s;
199 %(op_rd)s;
200 %(ea_code)s;
201 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
202
203 %(code)s;
204
205 if(fault == NoFault)
206 {
207 fault = writeMemTiming(xc, traceData, Mem, %(memDataSize)s, EA,
208 memFlags, NULL);
209 }
210 return fault;
211 }
212}};
213
214def template MicroStoreCompleteAcc {{
215 Fault %(class_name)s::completeAcc(PacketPtr pkt,
216 ExecContext * xc, Trace::InstRecord * traceData) const
217 {
218 %(op_decl)s;
219 %(op_rd)s;
220 %(complete_code)s;
221 %(op_wb)s;
222 return NoFault;
223 }
224}};
225
226// Common templates
227
228//This delcares the initiateAcc function in memory operations
229def template InitiateAccDeclare {{
230 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
231}};
232
233//This declares the completeAcc function in memory operations
234def template CompleteAccDeclare {{
235 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
236}};
237
238def template MicroLdStOpDeclare {{
239 class %(class_name)s : public %(base_class)s
240 {
241 public:
242 %(class_name)s(ExtMachInst _machInst,
243 const char * instMnem, uint64_t setFlags,
244 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
245 uint64_t _disp, InstRegIndex _segment,
246 InstRegIndex _data,
247 uint8_t _dataSize, uint8_t _addressSize,
248 Request::FlagsType _memFlags);
249
226def template MicroLdStOpDeclare {{
227 class %(class_name)s : public %(base_class)s
228 {
229 public:
230 %(class_name)s(ExtMachInst _machInst,
231 const char * instMnem, uint64_t setFlags,
232 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
233 uint64_t _disp, InstRegIndex _segment,
234 InstRegIndex _data,
235 uint8_t _dataSize, uint8_t _addressSize,
236 Request::FlagsType _memFlags);
237
250 %(BasicExecDeclare)s
251
252 %(InitiateAccDeclare)s
253
254 %(CompleteAccDeclare)s
238 Fault execute(ExecContext *, Trace::InstRecord *) const;
239 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
240 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
255 };
256}};
257
258// LdStSplitOp is a load or store that uses a pair of regs as the
259// source or destination. Used for cmpxchg{8,16}b.
260def template MicroLdStSplitOpDeclare {{
261 class %(class_name)s : public %(base_class)s
262 {
263 public:
264 %(class_name)s(ExtMachInst _machInst,
265 const char * instMnem, uint64_t setFlags,
266 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
267 uint64_t _disp, InstRegIndex _segment,
268 InstRegIndex _dataLow, InstRegIndex _dataHi,
269 uint8_t _dataSize, uint8_t _addressSize,
270 Request::FlagsType _memFlags);
271
241 };
242}};
243
244// LdStSplitOp is a load or store that uses a pair of regs as the
245// source or destination. Used for cmpxchg{8,16}b.
246def template MicroLdStSplitOpDeclare {{
247 class %(class_name)s : public %(base_class)s
248 {
249 public:
250 %(class_name)s(ExtMachInst _machInst,
251 const char * instMnem, uint64_t setFlags,
252 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
253 uint64_t _disp, InstRegIndex _segment,
254 InstRegIndex _dataLow, InstRegIndex _dataHi,
255 uint8_t _dataSize, uint8_t _addressSize,
256 Request::FlagsType _memFlags);
257
272 %(BasicExecDeclare)s
273
274 %(InitiateAccDeclare)s
275
276 %(CompleteAccDeclare)s
258 Fault execute(ExecContext *, Trace::InstRecord *) const;
259 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
260 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
277 };
278}};
279
280def template MicroLdStOpConstructor {{
281 %(class_name)s::%(class_name)s(
282 ExtMachInst machInst, const char * instMnem, uint64_t setFlags,
283 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
284 uint64_t _disp, InstRegIndex _segment,
285 InstRegIndex _data,
286 uint8_t _dataSize, uint8_t _addressSize,
287 Request::FlagsType _memFlags) :
288 %(base_class)s(machInst, "%(mnemonic)s", instMnem, setFlags,
289 _scale, _index, _base,
290 _disp, _segment, _data,
291 _dataSize, _addressSize, _memFlags, %(op_class)s)
292 {
293 %(constructor)s;
294 }
295}};
296
297def template MicroLdStSplitOpConstructor {{
298 %(class_name)s::%(class_name)s(
299 ExtMachInst machInst, const char * instMnem, uint64_t setFlags,
300 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
301 uint64_t _disp, InstRegIndex _segment,
302 InstRegIndex _dataLow, InstRegIndex _dataHi,
303 uint8_t _dataSize, uint8_t _addressSize,
304 Request::FlagsType _memFlags) :
305 %(base_class)s(machInst, "%(mnemonic)s", instMnem, setFlags,
306 _scale, _index, _base,
307 _disp, _segment, _dataLow, _dataHi,
308 _dataSize, _addressSize, _memFlags, %(op_class)s)
309 {
310 %(constructor)s;
311 }
312}};
313
314let {{
315 class LdStOp(X86Microop):
316 def __init__(self, data, segment, addr, disp,
317 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec,
318 implicitStack):
319 self.data = data
320 [self.scale, self.index, self.base] = addr
321 self.disp = disp
322 self.segment = segment
323 self.dataSize = dataSize
324 self.addressSize = addressSize
325 self.memFlags = baseFlags
326 if atCPL0:
327 self.memFlags += " | (CPL0FlagBit << FlagShift)"
328 self.instFlags = ""
329 if prefetch:
330 self.memFlags += " | Request::PREFETCH"
331 self.instFlags += " | (1ULL << StaticInst::IsDataPrefetch)"
332 if nonSpec:
333 self.instFlags += " | (1ULL << StaticInst::IsNonSpeculative)"
334 # For implicit stack operations, we should use *not* use the
335 # alternative addressing mode for loads/stores if the prefix is set
336 if not implicitStack:
337 self.memFlags += " | (machInst.legacy.addr ? " + \
338 "(AddrSizeFlagBit << FlagShift) : 0)"
339
340 def getAllocator(self, microFlags):
341 allocator = '''new %(class_name)s(machInst, macrocodeBlock,
342 %(flags)s, %(scale)s, %(index)s, %(base)s,
343 %(disp)s, %(segment)s, %(data)s,
344 %(dataSize)s, %(addressSize)s, %(memFlags)s)''' % {
345 "class_name" : self.className,
346 "flags" : self.microFlagsText(microFlags) + self.instFlags,
347 "scale" : self.scale, "index" : self.index,
348 "base" : self.base,
349 "disp" : self.disp,
350 "segment" : self.segment, "data" : self.data,
351 "dataSize" : self.dataSize, "addressSize" : self.addressSize,
352 "memFlags" : self.memFlags}
353 return allocator
354
355 class BigLdStOp(X86Microop):
356 def __init__(self, data, segment, addr, disp,
357 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec,
358 implicitStack):
359 self.data = data
360 [self.scale, self.index, self.base] = addr
361 self.disp = disp
362 self.segment = segment
363 self.dataSize = dataSize
364 self.addressSize = addressSize
365 self.memFlags = baseFlags
366 if atCPL0:
367 self.memFlags += " | (CPL0FlagBit << FlagShift)"
368 self.instFlags = ""
369 if prefetch:
370 self.memFlags += " | Request::PREFETCH"
371 self.instFlags += " | (1ULL << StaticInst::IsDataPrefetch)"
372 if nonSpec:
373 self.instFlags += " | (1ULL << StaticInst::IsNonSpeculative)"
374 # For implicit stack operations, we should use *not* use the
375 # alternative addressing mode for loads/stores if the prefix is set
376 if not implicitStack:
377 self.memFlags += " | (machInst.legacy.addr ? " + \
378 "(AddrSizeFlagBit << FlagShift) : 0)"
379
380 def getAllocator(self, microFlags):
381 allocString = '''
382 (%(dataSize)s >= 4) ?
383 (StaticInstPtr)(new %(class_name)sBig(machInst,
384 macrocodeBlock, %(flags)s, %(scale)s, %(index)s,
385 %(base)s, %(disp)s, %(segment)s, %(data)s,
386 %(dataSize)s, %(addressSize)s, %(memFlags)s)) :
387 (StaticInstPtr)(new %(class_name)s(machInst,
388 macrocodeBlock, %(flags)s, %(scale)s, %(index)s,
389 %(base)s, %(disp)s, %(segment)s, %(data)s,
390 %(dataSize)s, %(addressSize)s, %(memFlags)s))
391 '''
392 allocator = allocString % {
393 "class_name" : self.className,
394 "flags" : self.microFlagsText(microFlags) + self.instFlags,
395 "scale" : self.scale, "index" : self.index,
396 "base" : self.base,
397 "disp" : self.disp,
398 "segment" : self.segment, "data" : self.data,
399 "dataSize" : self.dataSize, "addressSize" : self.addressSize,
400 "memFlags" : self.memFlags}
401 return allocator
402
403 class LdStSplitOp(LdStOp):
404 def __init__(self, data, segment, addr, disp,
405 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec,
406 implicitStack):
407 super(LdStSplitOp, self).__init__(0, segment, addr, disp,
408 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec,
409 implicitStack)
410 (self.dataLow, self.dataHi) = data
411
412 def getAllocator(self, microFlags):
413 allocString = '''(StaticInstPtr)(new %(class_name)s(machInst,
414 macrocodeBlock, %(flags)s, %(scale)s, %(index)s,
415 %(base)s, %(disp)s, %(segment)s,
416 %(dataLow)s, %(dataHi)s,
417 %(dataSize)s, %(addressSize)s, %(memFlags)s))
418 '''
419 allocator = allocString % {
420 "class_name" : self.className,
421 "flags" : self.microFlagsText(microFlags) + self.instFlags,
422 "scale" : self.scale, "index" : self.index,
423 "base" : self.base,
424 "disp" : self.disp,
425 "segment" : self.segment,
426 "dataLow" : self.dataLow, "dataHi" : self.dataHi,
427 "dataSize" : self.dataSize, "addressSize" : self.addressSize,
428 "memFlags" : self.memFlags}
429 return allocator
430
431}};
432
433let {{
434
435 # Make these empty strings so that concatenating onto
436 # them will always work.
437 header_output = ""
438 decoder_output = ""
439 exec_output = ""
440
441 segmentEAExpr = \
442 'bits(scale * Index + Base + disp, addressSize * 8 - 1, 0);'
443
444 calculateEA = 'EA = SegBase + ' + segmentEAExpr
445
446 def defineMicroLoadOp(mnemonic, code, bigCode='',
447 mem_flags="0", big=True, nonSpec=False,
448 implicitStack=False):
449 global header_output
450 global decoder_output
451 global exec_output
452 global microopClasses
453 Name = mnemonic
454 name = mnemonic.lower()
455
456 # Build up the all register version of this micro op
457 iops = [InstObjParams(name, Name, 'X86ISA::LdStOp',
458 { "code": code,
459 "ea_code": calculateEA,
460 "memDataSize": "dataSize" })]
461 if big:
462 iops += [InstObjParams(name, Name + "Big", 'X86ISA::LdStOp',
463 { "code": bigCode,
464 "ea_code": calculateEA,
465 "memDataSize": "dataSize" })]
466 for iop in iops:
467 header_output += MicroLdStOpDeclare.subst(iop)
468 decoder_output += MicroLdStOpConstructor.subst(iop)
469 exec_output += MicroLoadExecute.subst(iop)
470 exec_output += MicroLoadInitiateAcc.subst(iop)
471 exec_output += MicroLoadCompleteAcc.subst(iop)
472
473 if implicitStack:
474 # For instructions that implicitly access the stack, the address
475 # size is the same as the stack segment pointer size, not the
476 # address size if specified by the instruction prefix
477 addressSize = "env.stackSize"
478 else:
479 addressSize = "env.addressSize"
480
481 base = LdStOp
482 if big:
483 base = BigLdStOp
484 class LoadOp(base):
485 def __init__(self, data, segment, addr, disp = 0,
486 dataSize="env.dataSize",
487 addressSize=addressSize,
488 atCPL0=False, prefetch=False, nonSpec=nonSpec,
489 implicitStack=implicitStack):
490 super(LoadOp, self).__init__(data, segment, addr,
491 disp, dataSize, addressSize, mem_flags,
492 atCPL0, prefetch, nonSpec, implicitStack)
493 self.className = Name
494 self.mnemonic = name
495
496 microopClasses[name] = LoadOp
497
498 defineMicroLoadOp('Ld', 'Data = merge(Data, Mem, dataSize);',
499 'Data = Mem & mask(dataSize * 8);')
500 defineMicroLoadOp('Ldis', 'Data = merge(Data, Mem, dataSize);',
501 'Data = Mem & mask(dataSize * 8);',
502 implicitStack=True)
503 defineMicroLoadOp('Ldst', 'Data = merge(Data, Mem, dataSize);',
504 'Data = Mem & mask(dataSize * 8);',
505 '(StoreCheck << FlagShift)')
506 defineMicroLoadOp('Ldstl', 'Data = merge(Data, Mem, dataSize);',
507 'Data = Mem & mask(dataSize * 8);',
508 '(StoreCheck << FlagShift) | Request::LOCKED_RMW',
509 nonSpec=True)
510
511 defineMicroLoadOp('Ldfp', code='FpData_uqw = Mem', big = False)
512
513 defineMicroLoadOp('Ldfp87', code='''
514 switch (dataSize)
515 {
516 case 4:
517 FpData_df = *(float *)&Mem;
518 break;
519 case 8:
520 FpData_df = *(double *)&Mem;
521 break;
522 default:
523 panic("Unhandled data size in LdFp87.\\n");
524 }
525 ''', big = False)
526
527 # Load integer from memory into x87 top-of-stack register.
528 # Used to implement fild instruction.
529 defineMicroLoadOp('Ldifp87', code='''
530 switch (dataSize)
531 {
532 case 2:
533 FpData_df = (int64_t)sext<16>(Mem);
534 break;
535 case 4:
536 FpData_df = (int64_t)sext<32>(Mem);
537 break;
538 case 8:
539 FpData_df = (int64_t)Mem;
540 break;
541 default:
542 panic("Unhandled data size in LdIFp87.\\n");
543 }
544 ''', big = False)
545
546 def defineMicroLoadSplitOp(mnemonic, code, mem_flags="0", nonSpec=False):
547 global header_output
548 global decoder_output
549 global exec_output
550 global microopClasses
551 Name = mnemonic
552 name = mnemonic.lower()
553
554 iop = InstObjParams(name, Name, 'X86ISA::LdStSplitOp',
555 { "code": code,
556 "ea_code": calculateEA,
557 "memDataSize": "2 * dataSize" })
558
559 header_output += MicroLdStSplitOpDeclare.subst(iop)
560 decoder_output += MicroLdStSplitOpConstructor.subst(iop)
561 exec_output += MicroLoadExecute.subst(iop)
562 exec_output += MicroLoadInitiateAcc.subst(iop)
563 exec_output += MicroLoadCompleteAcc.subst(iop)
564
565 class LoadOp(LdStSplitOp):
566 def __init__(self, data, segment, addr, disp = 0,
567 dataSize="env.dataSize",
568 addressSize="env.addressSize",
569 atCPL0=False, prefetch=False, nonSpec=nonSpec,
570 implicitStack=False):
571 super(LoadOp, self).__init__(data, segment, addr,
572 disp, dataSize, addressSize, mem_flags,
573 atCPL0, prefetch, nonSpec, implicitStack)
574 self.className = Name
575 self.mnemonic = name
576
577 microopClasses[name] = LoadOp
578
579 code = '''
580 switch (dataSize) {
581 case 4:
582 DataLow = bits(Mem_u2qw[0], 31, 0);
583 DataHi = bits(Mem_u2qw[0], 63, 32);
584 break;
585 case 8:
586 DataLow = Mem_u2qw[0];
587 DataHi = Mem_u2qw[1];
588 break;
589 default:
590 panic("Unhandled data size %d in LdSplit.\\n", dataSize);
591 }'''
592
593 defineMicroLoadSplitOp('LdSplit', code,
594 '(StoreCheck << FlagShift)')
595
596 defineMicroLoadSplitOp('LdSplitl', code,
597 '(StoreCheck << FlagShift) | Request::LOCKED_RMW',
598 nonSpec=True)
599
600 def defineMicroStoreOp(mnemonic, code, completeCode="", mem_flags="0",
601 implicitStack=False):
602 global header_output
603 global decoder_output
604 global exec_output
605 global microopClasses
606 Name = mnemonic
607 name = mnemonic.lower()
608
609 # Build up the all register version of this micro op
610 iop = InstObjParams(name, Name, 'X86ISA::LdStOp',
611 { "code": code,
612 "complete_code": completeCode,
613 "ea_code": calculateEA,
614 "memDataSize": "dataSize" })
615 header_output += MicroLdStOpDeclare.subst(iop)
616 decoder_output += MicroLdStOpConstructor.subst(iop)
617 exec_output += MicroStoreExecute.subst(iop)
618 exec_output += MicroStoreInitiateAcc.subst(iop)
619 exec_output += MicroStoreCompleteAcc.subst(iop)
620
621 if implicitStack:
622 # For instructions that implicitly access the stack, the address
623 # size is the same as the stack segment pointer size, not the
624 # address size if specified by the instruction prefix
625 addressSize = "env.stackSize"
626 else:
627 addressSize = "env.addressSize"
628
629 class StoreOp(LdStOp):
630 def __init__(self, data, segment, addr, disp = 0,
631 dataSize="env.dataSize",
632 addressSize=addressSize,
633 atCPL0=False, nonSpec=False, implicitStack=implicitStack):
634 super(StoreOp, self).__init__(data, segment, addr, disp,
635 dataSize, addressSize, mem_flags, atCPL0, False,
636 nonSpec, implicitStack)
637 self.className = Name
638 self.mnemonic = name
639
640 microopClasses[name] = StoreOp
641
642 defineMicroStoreOp('St', 'Mem = pick(Data, 2, dataSize);')
643 defineMicroStoreOp('Stis', 'Mem = pick(Data, 2, dataSize);',
644 implicitStack=True)
645 defineMicroStoreOp('Stul', 'Mem = pick(Data, 2, dataSize);',
646 mem_flags="Request::LOCKED_RMW")
647
648 defineMicroStoreOp('Stfp', code='Mem = FpData_uqw;')
649
650 defineMicroStoreOp('Stfp87', code='''
651 switch (dataSize)
652 {
653 case 4: {
654 float single(FpData_df);
655 Mem = *(uint32_t *)&single;
656 } break;
657 case 8:
658 Mem = *(uint64_t *)&FpData_df;
659 break;
660 default:
661 panic("Unhandled data size in StFp87.\\n");
662 }
663 ''')
664
665 defineMicroStoreOp('Cda', 'Mem = 0;', mem_flags="Request::NO_ACCESS")
666
667 def defineMicroStoreSplitOp(mnemonic, code,
668 completeCode="", mem_flags="0"):
669 global header_output
670 global decoder_output
671 global exec_output
672 global microopClasses
673 Name = mnemonic
674 name = mnemonic.lower()
675
676 iop = InstObjParams(name, Name, 'X86ISA::LdStSplitOp',
677 { "code": code,
678 "complete_code": completeCode,
679 "ea_code": calculateEA,
680 "memDataSize": "2 * dataSize" })
681
682 header_output += MicroLdStSplitOpDeclare.subst(iop)
683 decoder_output += MicroLdStSplitOpConstructor.subst(iop)
684 exec_output += MicroStoreExecute.subst(iop)
685 exec_output += MicroStoreInitiateAcc.subst(iop)
686 exec_output += MicroStoreCompleteAcc.subst(iop)
687
688 class StoreOp(LdStSplitOp):
689 def __init__(self, data, segment, addr, disp = 0,
690 dataSize="env.dataSize",
691 addressSize="env.addressSize",
692 atCPL0=False, nonSpec=False, implicitStack=False):
693 super(StoreOp, self).__init__(data, segment, addr, disp,
694 dataSize, addressSize, mem_flags, atCPL0, False,
695 nonSpec, implicitStack)
696 self.className = Name
697 self.mnemonic = name
698
699 microopClasses[name] = StoreOp
700
701 code = '''
702 switch (dataSize) {
703 case 4:
704 Mem_u2qw[0] = (DataHi << 32) | DataLow;
705 break;
706 case 8:
707 Mem_u2qw[0] = DataLow;
708 Mem_u2qw[1] = DataHi;
709 break;
710 default:
711 panic("Unhandled data size %d in StSplit.\\n", dataSize);
712 }'''
713
714 defineMicroStoreSplitOp('StSplit', code);
715
716 defineMicroStoreSplitOp('StSplitul', code,
717 mem_flags='Request::LOCKED_RMW')
718
719 iop = InstObjParams("lea", "Lea", 'X86ISA::LdStOp',
720 { "code": "Data = merge(Data, EA, dataSize);",
721 "ea_code": "EA = " + segmentEAExpr,
722 "memDataSize": "dataSize" })
723 header_output += MicroLeaDeclare.subst(iop)
724 decoder_output += MicroLdStOpConstructor.subst(iop)
725 exec_output += MicroLeaExecute.subst(iop)
726
727 class LeaOp(LdStOp):
728 def __init__(self, data, segment, addr, disp = 0,
729 dataSize="env.dataSize", addressSize="env.addressSize"):
730 super(LeaOp, self).__init__(data, segment, addr, disp,
731 dataSize, addressSize, "0", False, False, False, False)
732 self.className = "Lea"
733 self.mnemonic = "lea"
734
735 microopClasses["lea"] = LeaOp
736
737
738 iop = InstObjParams("tia", "Tia", 'X86ISA::LdStOp',
739 { "code": "xc->demapPage(EA, 0);",
740 "ea_code": calculateEA,
741 "memDataSize": "dataSize" })
742 header_output += MicroLeaDeclare.subst(iop)
743 decoder_output += MicroLdStOpConstructor.subst(iop)
744 exec_output += MicroLeaExecute.subst(iop)
745
746 class TiaOp(LdStOp):
747 def __init__(self, segment, addr, disp = 0,
748 dataSize="env.dataSize",
749 addressSize="env.addressSize"):
750 super(TiaOp, self).__init__("InstRegIndex(NUM_INTREGS)", segment,
751 addr, disp, dataSize, addressSize, "0", False, False,
752 False, False)
753 self.className = "Tia"
754 self.mnemonic = "tia"
755
756 microopClasses["tia"] = TiaOp
757
758 class CdaOp(LdStOp):
759 def __init__(self, segment, addr, disp = 0,
760 dataSize="env.dataSize",
761 addressSize="env.addressSize", atCPL0=False):
762 super(CdaOp, self).__init__("InstRegIndex(NUM_INTREGS)", segment,
763 addr, disp, dataSize, addressSize, "Request::NO_ACCESS",
764 atCPL0, False, False, False)
765 self.className = "Cda"
766 self.mnemonic = "cda"
767
768 microopClasses["cda"] = CdaOp
769}};
261 };
262}};
263
264def template MicroLdStOpConstructor {{
265 %(class_name)s::%(class_name)s(
266 ExtMachInst machInst, const char * instMnem, uint64_t setFlags,
267 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
268 uint64_t _disp, InstRegIndex _segment,
269 InstRegIndex _data,
270 uint8_t _dataSize, uint8_t _addressSize,
271 Request::FlagsType _memFlags) :
272 %(base_class)s(machInst, "%(mnemonic)s", instMnem, setFlags,
273 _scale, _index, _base,
274 _disp, _segment, _data,
275 _dataSize, _addressSize, _memFlags, %(op_class)s)
276 {
277 %(constructor)s;
278 }
279}};
280
281def template MicroLdStSplitOpConstructor {{
282 %(class_name)s::%(class_name)s(
283 ExtMachInst machInst, const char * instMnem, uint64_t setFlags,
284 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
285 uint64_t _disp, InstRegIndex _segment,
286 InstRegIndex _dataLow, InstRegIndex _dataHi,
287 uint8_t _dataSize, uint8_t _addressSize,
288 Request::FlagsType _memFlags) :
289 %(base_class)s(machInst, "%(mnemonic)s", instMnem, setFlags,
290 _scale, _index, _base,
291 _disp, _segment, _dataLow, _dataHi,
292 _dataSize, _addressSize, _memFlags, %(op_class)s)
293 {
294 %(constructor)s;
295 }
296}};
297
298let {{
299 class LdStOp(X86Microop):
300 def __init__(self, data, segment, addr, disp,
301 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec,
302 implicitStack):
303 self.data = data
304 [self.scale, self.index, self.base] = addr
305 self.disp = disp
306 self.segment = segment
307 self.dataSize = dataSize
308 self.addressSize = addressSize
309 self.memFlags = baseFlags
310 if atCPL0:
311 self.memFlags += " | (CPL0FlagBit << FlagShift)"
312 self.instFlags = ""
313 if prefetch:
314 self.memFlags += " | Request::PREFETCH"
315 self.instFlags += " | (1ULL << StaticInst::IsDataPrefetch)"
316 if nonSpec:
317 self.instFlags += " | (1ULL << StaticInst::IsNonSpeculative)"
318 # For implicit stack operations, we should use *not* use the
319 # alternative addressing mode for loads/stores if the prefix is set
320 if not implicitStack:
321 self.memFlags += " | (machInst.legacy.addr ? " + \
322 "(AddrSizeFlagBit << FlagShift) : 0)"
323
324 def getAllocator(self, microFlags):
325 allocator = '''new %(class_name)s(machInst, macrocodeBlock,
326 %(flags)s, %(scale)s, %(index)s, %(base)s,
327 %(disp)s, %(segment)s, %(data)s,
328 %(dataSize)s, %(addressSize)s, %(memFlags)s)''' % {
329 "class_name" : self.className,
330 "flags" : self.microFlagsText(microFlags) + self.instFlags,
331 "scale" : self.scale, "index" : self.index,
332 "base" : self.base,
333 "disp" : self.disp,
334 "segment" : self.segment, "data" : self.data,
335 "dataSize" : self.dataSize, "addressSize" : self.addressSize,
336 "memFlags" : self.memFlags}
337 return allocator
338
339 class BigLdStOp(X86Microop):
340 def __init__(self, data, segment, addr, disp,
341 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec,
342 implicitStack):
343 self.data = data
344 [self.scale, self.index, self.base] = addr
345 self.disp = disp
346 self.segment = segment
347 self.dataSize = dataSize
348 self.addressSize = addressSize
349 self.memFlags = baseFlags
350 if atCPL0:
351 self.memFlags += " | (CPL0FlagBit << FlagShift)"
352 self.instFlags = ""
353 if prefetch:
354 self.memFlags += " | Request::PREFETCH"
355 self.instFlags += " | (1ULL << StaticInst::IsDataPrefetch)"
356 if nonSpec:
357 self.instFlags += " | (1ULL << StaticInst::IsNonSpeculative)"
358 # For implicit stack operations, we should use *not* use the
359 # alternative addressing mode for loads/stores if the prefix is set
360 if not implicitStack:
361 self.memFlags += " | (machInst.legacy.addr ? " + \
362 "(AddrSizeFlagBit << FlagShift) : 0)"
363
364 def getAllocator(self, microFlags):
365 allocString = '''
366 (%(dataSize)s >= 4) ?
367 (StaticInstPtr)(new %(class_name)sBig(machInst,
368 macrocodeBlock, %(flags)s, %(scale)s, %(index)s,
369 %(base)s, %(disp)s, %(segment)s, %(data)s,
370 %(dataSize)s, %(addressSize)s, %(memFlags)s)) :
371 (StaticInstPtr)(new %(class_name)s(machInst,
372 macrocodeBlock, %(flags)s, %(scale)s, %(index)s,
373 %(base)s, %(disp)s, %(segment)s, %(data)s,
374 %(dataSize)s, %(addressSize)s, %(memFlags)s))
375 '''
376 allocator = allocString % {
377 "class_name" : self.className,
378 "flags" : self.microFlagsText(microFlags) + self.instFlags,
379 "scale" : self.scale, "index" : self.index,
380 "base" : self.base,
381 "disp" : self.disp,
382 "segment" : self.segment, "data" : self.data,
383 "dataSize" : self.dataSize, "addressSize" : self.addressSize,
384 "memFlags" : self.memFlags}
385 return allocator
386
387 class LdStSplitOp(LdStOp):
388 def __init__(self, data, segment, addr, disp,
389 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec,
390 implicitStack):
391 super(LdStSplitOp, self).__init__(0, segment, addr, disp,
392 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec,
393 implicitStack)
394 (self.dataLow, self.dataHi) = data
395
396 def getAllocator(self, microFlags):
397 allocString = '''(StaticInstPtr)(new %(class_name)s(machInst,
398 macrocodeBlock, %(flags)s, %(scale)s, %(index)s,
399 %(base)s, %(disp)s, %(segment)s,
400 %(dataLow)s, %(dataHi)s,
401 %(dataSize)s, %(addressSize)s, %(memFlags)s))
402 '''
403 allocator = allocString % {
404 "class_name" : self.className,
405 "flags" : self.microFlagsText(microFlags) + self.instFlags,
406 "scale" : self.scale, "index" : self.index,
407 "base" : self.base,
408 "disp" : self.disp,
409 "segment" : self.segment,
410 "dataLow" : self.dataLow, "dataHi" : self.dataHi,
411 "dataSize" : self.dataSize, "addressSize" : self.addressSize,
412 "memFlags" : self.memFlags}
413 return allocator
414
415}};
416
417let {{
418
419 # Make these empty strings so that concatenating onto
420 # them will always work.
421 header_output = ""
422 decoder_output = ""
423 exec_output = ""
424
425 segmentEAExpr = \
426 'bits(scale * Index + Base + disp, addressSize * 8 - 1, 0);'
427
428 calculateEA = 'EA = SegBase + ' + segmentEAExpr
429
430 def defineMicroLoadOp(mnemonic, code, bigCode='',
431 mem_flags="0", big=True, nonSpec=False,
432 implicitStack=False):
433 global header_output
434 global decoder_output
435 global exec_output
436 global microopClasses
437 Name = mnemonic
438 name = mnemonic.lower()
439
440 # Build up the all register version of this micro op
441 iops = [InstObjParams(name, Name, 'X86ISA::LdStOp',
442 { "code": code,
443 "ea_code": calculateEA,
444 "memDataSize": "dataSize" })]
445 if big:
446 iops += [InstObjParams(name, Name + "Big", 'X86ISA::LdStOp',
447 { "code": bigCode,
448 "ea_code": calculateEA,
449 "memDataSize": "dataSize" })]
450 for iop in iops:
451 header_output += MicroLdStOpDeclare.subst(iop)
452 decoder_output += MicroLdStOpConstructor.subst(iop)
453 exec_output += MicroLoadExecute.subst(iop)
454 exec_output += MicroLoadInitiateAcc.subst(iop)
455 exec_output += MicroLoadCompleteAcc.subst(iop)
456
457 if implicitStack:
458 # For instructions that implicitly access the stack, the address
459 # size is the same as the stack segment pointer size, not the
460 # address size if specified by the instruction prefix
461 addressSize = "env.stackSize"
462 else:
463 addressSize = "env.addressSize"
464
465 base = LdStOp
466 if big:
467 base = BigLdStOp
468 class LoadOp(base):
469 def __init__(self, data, segment, addr, disp = 0,
470 dataSize="env.dataSize",
471 addressSize=addressSize,
472 atCPL0=False, prefetch=False, nonSpec=nonSpec,
473 implicitStack=implicitStack):
474 super(LoadOp, self).__init__(data, segment, addr,
475 disp, dataSize, addressSize, mem_flags,
476 atCPL0, prefetch, nonSpec, implicitStack)
477 self.className = Name
478 self.mnemonic = name
479
480 microopClasses[name] = LoadOp
481
482 defineMicroLoadOp('Ld', 'Data = merge(Data, Mem, dataSize);',
483 'Data = Mem & mask(dataSize * 8);')
484 defineMicroLoadOp('Ldis', 'Data = merge(Data, Mem, dataSize);',
485 'Data = Mem & mask(dataSize * 8);',
486 implicitStack=True)
487 defineMicroLoadOp('Ldst', 'Data = merge(Data, Mem, dataSize);',
488 'Data = Mem & mask(dataSize * 8);',
489 '(StoreCheck << FlagShift)')
490 defineMicroLoadOp('Ldstl', 'Data = merge(Data, Mem, dataSize);',
491 'Data = Mem & mask(dataSize * 8);',
492 '(StoreCheck << FlagShift) | Request::LOCKED_RMW',
493 nonSpec=True)
494
495 defineMicroLoadOp('Ldfp', code='FpData_uqw = Mem', big = False)
496
497 defineMicroLoadOp('Ldfp87', code='''
498 switch (dataSize)
499 {
500 case 4:
501 FpData_df = *(float *)&Mem;
502 break;
503 case 8:
504 FpData_df = *(double *)&Mem;
505 break;
506 default:
507 panic("Unhandled data size in LdFp87.\\n");
508 }
509 ''', big = False)
510
511 # Load integer from memory into x87 top-of-stack register.
512 # Used to implement fild instruction.
513 defineMicroLoadOp('Ldifp87', code='''
514 switch (dataSize)
515 {
516 case 2:
517 FpData_df = (int64_t)sext<16>(Mem);
518 break;
519 case 4:
520 FpData_df = (int64_t)sext<32>(Mem);
521 break;
522 case 8:
523 FpData_df = (int64_t)Mem;
524 break;
525 default:
526 panic("Unhandled data size in LdIFp87.\\n");
527 }
528 ''', big = False)
529
530 def defineMicroLoadSplitOp(mnemonic, code, mem_flags="0", nonSpec=False):
531 global header_output
532 global decoder_output
533 global exec_output
534 global microopClasses
535 Name = mnemonic
536 name = mnemonic.lower()
537
538 iop = InstObjParams(name, Name, 'X86ISA::LdStSplitOp',
539 { "code": code,
540 "ea_code": calculateEA,
541 "memDataSize": "2 * dataSize" })
542
543 header_output += MicroLdStSplitOpDeclare.subst(iop)
544 decoder_output += MicroLdStSplitOpConstructor.subst(iop)
545 exec_output += MicroLoadExecute.subst(iop)
546 exec_output += MicroLoadInitiateAcc.subst(iop)
547 exec_output += MicroLoadCompleteAcc.subst(iop)
548
549 class LoadOp(LdStSplitOp):
550 def __init__(self, data, segment, addr, disp = 0,
551 dataSize="env.dataSize",
552 addressSize="env.addressSize",
553 atCPL0=False, prefetch=False, nonSpec=nonSpec,
554 implicitStack=False):
555 super(LoadOp, self).__init__(data, segment, addr,
556 disp, dataSize, addressSize, mem_flags,
557 atCPL0, prefetch, nonSpec, implicitStack)
558 self.className = Name
559 self.mnemonic = name
560
561 microopClasses[name] = LoadOp
562
563 code = '''
564 switch (dataSize) {
565 case 4:
566 DataLow = bits(Mem_u2qw[0], 31, 0);
567 DataHi = bits(Mem_u2qw[0], 63, 32);
568 break;
569 case 8:
570 DataLow = Mem_u2qw[0];
571 DataHi = Mem_u2qw[1];
572 break;
573 default:
574 panic("Unhandled data size %d in LdSplit.\\n", dataSize);
575 }'''
576
577 defineMicroLoadSplitOp('LdSplit', code,
578 '(StoreCheck << FlagShift)')
579
580 defineMicroLoadSplitOp('LdSplitl', code,
581 '(StoreCheck << FlagShift) | Request::LOCKED_RMW',
582 nonSpec=True)
583
584 def defineMicroStoreOp(mnemonic, code, completeCode="", mem_flags="0",
585 implicitStack=False):
586 global header_output
587 global decoder_output
588 global exec_output
589 global microopClasses
590 Name = mnemonic
591 name = mnemonic.lower()
592
593 # Build up the all register version of this micro op
594 iop = InstObjParams(name, Name, 'X86ISA::LdStOp',
595 { "code": code,
596 "complete_code": completeCode,
597 "ea_code": calculateEA,
598 "memDataSize": "dataSize" })
599 header_output += MicroLdStOpDeclare.subst(iop)
600 decoder_output += MicroLdStOpConstructor.subst(iop)
601 exec_output += MicroStoreExecute.subst(iop)
602 exec_output += MicroStoreInitiateAcc.subst(iop)
603 exec_output += MicroStoreCompleteAcc.subst(iop)
604
605 if implicitStack:
606 # For instructions that implicitly access the stack, the address
607 # size is the same as the stack segment pointer size, not the
608 # address size if specified by the instruction prefix
609 addressSize = "env.stackSize"
610 else:
611 addressSize = "env.addressSize"
612
613 class StoreOp(LdStOp):
614 def __init__(self, data, segment, addr, disp = 0,
615 dataSize="env.dataSize",
616 addressSize=addressSize,
617 atCPL0=False, nonSpec=False, implicitStack=implicitStack):
618 super(StoreOp, self).__init__(data, segment, addr, disp,
619 dataSize, addressSize, mem_flags, atCPL0, False,
620 nonSpec, implicitStack)
621 self.className = Name
622 self.mnemonic = name
623
624 microopClasses[name] = StoreOp
625
626 defineMicroStoreOp('St', 'Mem = pick(Data, 2, dataSize);')
627 defineMicroStoreOp('Stis', 'Mem = pick(Data, 2, dataSize);',
628 implicitStack=True)
629 defineMicroStoreOp('Stul', 'Mem = pick(Data, 2, dataSize);',
630 mem_flags="Request::LOCKED_RMW")
631
632 defineMicroStoreOp('Stfp', code='Mem = FpData_uqw;')
633
634 defineMicroStoreOp('Stfp87', code='''
635 switch (dataSize)
636 {
637 case 4: {
638 float single(FpData_df);
639 Mem = *(uint32_t *)&single;
640 } break;
641 case 8:
642 Mem = *(uint64_t *)&FpData_df;
643 break;
644 default:
645 panic("Unhandled data size in StFp87.\\n");
646 }
647 ''')
648
649 defineMicroStoreOp('Cda', 'Mem = 0;', mem_flags="Request::NO_ACCESS")
650
651 def defineMicroStoreSplitOp(mnemonic, code,
652 completeCode="", mem_flags="0"):
653 global header_output
654 global decoder_output
655 global exec_output
656 global microopClasses
657 Name = mnemonic
658 name = mnemonic.lower()
659
660 iop = InstObjParams(name, Name, 'X86ISA::LdStSplitOp',
661 { "code": code,
662 "complete_code": completeCode,
663 "ea_code": calculateEA,
664 "memDataSize": "2 * dataSize" })
665
666 header_output += MicroLdStSplitOpDeclare.subst(iop)
667 decoder_output += MicroLdStSplitOpConstructor.subst(iop)
668 exec_output += MicroStoreExecute.subst(iop)
669 exec_output += MicroStoreInitiateAcc.subst(iop)
670 exec_output += MicroStoreCompleteAcc.subst(iop)
671
672 class StoreOp(LdStSplitOp):
673 def __init__(self, data, segment, addr, disp = 0,
674 dataSize="env.dataSize",
675 addressSize="env.addressSize",
676 atCPL0=False, nonSpec=False, implicitStack=False):
677 super(StoreOp, self).__init__(data, segment, addr, disp,
678 dataSize, addressSize, mem_flags, atCPL0, False,
679 nonSpec, implicitStack)
680 self.className = Name
681 self.mnemonic = name
682
683 microopClasses[name] = StoreOp
684
685 code = '''
686 switch (dataSize) {
687 case 4:
688 Mem_u2qw[0] = (DataHi << 32) | DataLow;
689 break;
690 case 8:
691 Mem_u2qw[0] = DataLow;
692 Mem_u2qw[1] = DataHi;
693 break;
694 default:
695 panic("Unhandled data size %d in StSplit.\\n", dataSize);
696 }'''
697
698 defineMicroStoreSplitOp('StSplit', code);
699
700 defineMicroStoreSplitOp('StSplitul', code,
701 mem_flags='Request::LOCKED_RMW')
702
703 iop = InstObjParams("lea", "Lea", 'X86ISA::LdStOp',
704 { "code": "Data = merge(Data, EA, dataSize);",
705 "ea_code": "EA = " + segmentEAExpr,
706 "memDataSize": "dataSize" })
707 header_output += MicroLeaDeclare.subst(iop)
708 decoder_output += MicroLdStOpConstructor.subst(iop)
709 exec_output += MicroLeaExecute.subst(iop)
710
711 class LeaOp(LdStOp):
712 def __init__(self, data, segment, addr, disp = 0,
713 dataSize="env.dataSize", addressSize="env.addressSize"):
714 super(LeaOp, self).__init__(data, segment, addr, disp,
715 dataSize, addressSize, "0", False, False, False, False)
716 self.className = "Lea"
717 self.mnemonic = "lea"
718
719 microopClasses["lea"] = LeaOp
720
721
722 iop = InstObjParams("tia", "Tia", 'X86ISA::LdStOp',
723 { "code": "xc->demapPage(EA, 0);",
724 "ea_code": calculateEA,
725 "memDataSize": "dataSize" })
726 header_output += MicroLeaDeclare.subst(iop)
727 decoder_output += MicroLdStOpConstructor.subst(iop)
728 exec_output += MicroLeaExecute.subst(iop)
729
730 class TiaOp(LdStOp):
731 def __init__(self, segment, addr, disp = 0,
732 dataSize="env.dataSize",
733 addressSize="env.addressSize"):
734 super(TiaOp, self).__init__("InstRegIndex(NUM_INTREGS)", segment,
735 addr, disp, dataSize, addressSize, "0", False, False,
736 False, False)
737 self.className = "Tia"
738 self.mnemonic = "tia"
739
740 microopClasses["tia"] = TiaOp
741
742 class CdaOp(LdStOp):
743 def __init__(self, segment, addr, disp = 0,
744 dataSize="env.dataSize",
745 addressSize="env.addressSize", atCPL0=False):
746 super(CdaOp, self).__init__("InstRegIndex(NUM_INTREGS)", segment,
747 addr, disp, dataSize, addressSize, "Request::NO_ACCESS",
748 atCPL0, False, False, False)
749 self.className = "Cda"
750 self.mnemonic = "cda"
751
752 microopClasses["cda"] = CdaOp
753}};