ldstop.isa (11829:cb5390385d87) ldstop.isa (12234:78ece221f9f5)
1// Copyright (c) 2007-2008 The Hewlett-Packard Development Company
2// Copyright (c) 2015 Advanced Micro Devices, Inc.
3// All rights reserved.
4//
5// The license below extends only to copyright in the software and shall
6// not be construed as granting a license to any other intellectual
7// property including but not limited to intellectual property relating
8// to a hardware implementation of the functionality of the software
9// licensed hereunder. You may use the software subject to the license
10// terms below provided that you ensure that this notice is replicated
11// unmodified and in its entirety in all distributions of the software,
12// modified or unmodified, in source code or in binary form.
13//
14// Copyright (c) 2008 The Regents of The University of Michigan
15// All rights reserved.
16//
17// Redistribution and use in source and binary forms, with or without
18// modification, are permitted provided that the following conditions are
19// met: redistributions of source code must retain the above copyright
20// notice, this list of conditions and the following disclaimer;
21// redistributions in binary form must reproduce the above copyright
22// notice, this list of conditions and the following disclaimer in the
23// documentation and/or other materials provided with the distribution;
24// neither the name of the copyright holders nor the names of its
25// contributors may be used to endorse or promote products derived from
26// this software without specific prior written permission.
27//
28// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
29// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
30// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
31// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
32// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
33// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
34// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
35// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
36// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
37// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
38// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
39//
40// Authors: Gabe Black
41
42//////////////////////////////////////////////////////////////////////////
43//
44// LdStOp Microop templates
45//
46//////////////////////////////////////////////////////////////////////////
47
48// LEA template
49
50def template MicroLeaExecute {{
1// Copyright (c) 2007-2008 The Hewlett-Packard Development Company
2// Copyright (c) 2015 Advanced Micro Devices, Inc.
3// All rights reserved.
4//
5// The license below extends only to copyright in the software and shall
6// not be construed as granting a license to any other intellectual
7// property including but not limited to intellectual property relating
8// to a hardware implementation of the functionality of the software
9// licensed hereunder. You may use the software subject to the license
10// terms below provided that you ensure that this notice is replicated
11// unmodified and in its entirety in all distributions of the software,
12// modified or unmodified, in source code or in binary form.
13//
14// Copyright (c) 2008 The Regents of The University of Michigan
15// All rights reserved.
16//
17// Redistribution and use in source and binary forms, with or without
18// modification, are permitted provided that the following conditions are
19// met: redistributions of source code must retain the above copyright
20// notice, this list of conditions and the following disclaimer;
21// redistributions in binary form must reproduce the above copyright
22// notice, this list of conditions and the following disclaimer in the
23// documentation and/or other materials provided with the distribution;
24// neither the name of the copyright holders nor the names of its
25// contributors may be used to endorse or promote products derived from
26// this software without specific prior written permission.
27//
28// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
29// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
30// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
31// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
32// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
33// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
34// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
35// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
36// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
37// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
38// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
39//
40// Authors: Gabe Black
41
42//////////////////////////////////////////////////////////////////////////
43//
44// LdStOp Microop templates
45//
46//////////////////////////////////////////////////////////////////////////
47
48// LEA template
49
50def template MicroLeaExecute {{
51 Fault %(class_name)s::execute(CPU_EXEC_CONTEXT *xc,
51 Fault %(class_name)s::execute(ExecContext *xc,
52 Trace::InstRecord *traceData) const
53 {
54 Fault fault = NoFault;
55 Addr EA;
56
57 %(op_decl)s;
58 %(op_rd)s;
59 %(ea_code)s;
60 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
61
62 %(code)s;
63 if(fault == NoFault)
64 {
65 %(op_wb)s;
66 }
67
68 return fault;
69 }
70}};
71
72def template MicroLeaDeclare {{
73 class %(class_name)s : public %(base_class)s
74 {
75 public:
76 %(class_name)s(ExtMachInst _machInst,
77 const char * instMnem, uint64_t setFlags,
78 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
79 uint64_t _disp, InstRegIndex _segment,
80 InstRegIndex _data,
81 uint8_t _dataSize, uint8_t _addressSize,
82 Request::FlagsType _memFlags);
83
84 %(BasicExecDeclare)s
85 };
86}};
87
88// Load templates
89
90def template MicroLoadExecute {{
52 Trace::InstRecord *traceData) const
53 {
54 Fault fault = NoFault;
55 Addr EA;
56
57 %(op_decl)s;
58 %(op_rd)s;
59 %(ea_code)s;
60 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
61
62 %(code)s;
63 if(fault == NoFault)
64 {
65 %(op_wb)s;
66 }
67
68 return fault;
69 }
70}};
71
72def template MicroLeaDeclare {{
73 class %(class_name)s : public %(base_class)s
74 {
75 public:
76 %(class_name)s(ExtMachInst _machInst,
77 const char * instMnem, uint64_t setFlags,
78 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
79 uint64_t _disp, InstRegIndex _segment,
80 InstRegIndex _data,
81 uint8_t _dataSize, uint8_t _addressSize,
82 Request::FlagsType _memFlags);
83
84 %(BasicExecDeclare)s
85 };
86}};
87
88// Load templates
89
90def template MicroLoadExecute {{
91 Fault %(class_name)s::execute(CPU_EXEC_CONTEXT *xc,
91 Fault %(class_name)s::execute(ExecContext *xc,
92 Trace::InstRecord *traceData) const
93 {
94 Fault fault = NoFault;
95 Addr EA;
96
97 %(op_decl)s;
98 %(op_rd)s;
99 %(ea_code)s;
100 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
101
102 fault = readMemAtomic(xc, traceData, EA, Mem,
103 %(memDataSize)s, memFlags);
104
105 if (fault == NoFault) {
106 %(code)s;
107 } else if (memFlags & Request::PREFETCH) {
108 // For prefetches, ignore any faults/exceptions.
109 return NoFault;
110 }
111 if(fault == NoFault)
112 {
113 %(op_wb)s;
114 }
115
116 return fault;
117 }
118}};
119
120def template MicroLoadInitiateAcc {{
92 Trace::InstRecord *traceData) const
93 {
94 Fault fault = NoFault;
95 Addr EA;
96
97 %(op_decl)s;
98 %(op_rd)s;
99 %(ea_code)s;
100 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
101
102 fault = readMemAtomic(xc, traceData, EA, Mem,
103 %(memDataSize)s, memFlags);
104
105 if (fault == NoFault) {
106 %(code)s;
107 } else if (memFlags & Request::PREFETCH) {
108 // For prefetches, ignore any faults/exceptions.
109 return NoFault;
110 }
111 if(fault == NoFault)
112 {
113 %(op_wb)s;
114 }
115
116 return fault;
117 }
118}};
119
120def template MicroLoadInitiateAcc {{
121 Fault %(class_name)s::initiateAcc(CPU_EXEC_CONTEXT * xc,
121 Fault %(class_name)s::initiateAcc(ExecContext * xc,
122 Trace::InstRecord * traceData) const
123 {
124 Fault fault = NoFault;
125 Addr EA;
126
127 %(op_decl)s;
128 %(op_rd)s;
129 %(ea_code)s;
130 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
131
132 fault = initiateMemRead(xc, traceData, EA,
133 %(memDataSize)s, memFlags);
134
135 return fault;
136 }
137}};
138
139def template MicroLoadCompleteAcc {{
122 Trace::InstRecord * traceData) const
123 {
124 Fault fault = NoFault;
125 Addr EA;
126
127 %(op_decl)s;
128 %(op_rd)s;
129 %(ea_code)s;
130 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
131
132 fault = initiateMemRead(xc, traceData, EA,
133 %(memDataSize)s, memFlags);
134
135 return fault;
136 }
137}};
138
139def template MicroLoadCompleteAcc {{
140 Fault %(class_name)s::completeAcc(PacketPtr pkt,
141 CPU_EXEC_CONTEXT * xc,
142 Trace::InstRecord * traceData) const
140 Fault %(class_name)s::completeAcc(PacketPtr pkt, ExecContext * xc,
141 Trace::InstRecord * traceData) const
143 {
144 Fault fault = NoFault;
145
146 %(op_decl)s;
147 %(op_rd)s;
148
149 getMem(pkt, Mem, %(memDataSize)s, traceData);
150
151 %(code)s;
152
153 if(fault == NoFault)
154 {
155 %(op_wb)s;
156 }
157
158 return fault;
159 }
160}};
161
162// Store templates
163
164def template MicroStoreExecute {{
142 {
143 Fault fault = NoFault;
144
145 %(op_decl)s;
146 %(op_rd)s;
147
148 getMem(pkt, Mem, %(memDataSize)s, traceData);
149
150 %(code)s;
151
152 if(fault == NoFault)
153 {
154 %(op_wb)s;
155 }
156
157 return fault;
158 }
159}};
160
161// Store templates
162
163def template MicroStoreExecute {{
165 Fault %(class_name)s::execute(CPU_EXEC_CONTEXT * xc,
164 Fault %(class_name)s::execute(ExecContext * xc,
166 Trace::InstRecord *traceData) const
167 {
168 Fault fault = NoFault;
169
170 Addr EA;
171 %(op_decl)s;
172 %(op_rd)s;
173 %(ea_code)s;
174 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
175
176 %(code)s;
177
178 if(fault == NoFault)
179 {
180 fault = writeMemAtomic(xc, traceData, Mem, %(memDataSize)s, EA,
181 memFlags, NULL);
182 if(fault == NoFault)
183 {
184 %(op_wb)s;
185 }
186 }
187
188 return fault;
189 }
190}};
191
192def template MicroStoreInitiateAcc {{
165 Trace::InstRecord *traceData) const
166 {
167 Fault fault = NoFault;
168
169 Addr EA;
170 %(op_decl)s;
171 %(op_rd)s;
172 %(ea_code)s;
173 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
174
175 %(code)s;
176
177 if(fault == NoFault)
178 {
179 fault = writeMemAtomic(xc, traceData, Mem, %(memDataSize)s, EA,
180 memFlags, NULL);
181 if(fault == NoFault)
182 {
183 %(op_wb)s;
184 }
185 }
186
187 return fault;
188 }
189}};
190
191def template MicroStoreInitiateAcc {{
193 Fault %(class_name)s::initiateAcc(CPU_EXEC_CONTEXT * xc,
192 Fault %(class_name)s::initiateAcc(ExecContext * xc,
194 Trace::InstRecord * traceData) const
195 {
196 Fault fault = NoFault;
197
198 Addr EA;
199 %(op_decl)s;
200 %(op_rd)s;
201 %(ea_code)s;
202 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
203
204 %(code)s;
205
206 if(fault == NoFault)
207 {
208 fault = writeMemTiming(xc, traceData, Mem, %(memDataSize)s, EA,
209 memFlags, NULL);
210 }
211 return fault;
212 }
213}};
214
215def template MicroStoreCompleteAcc {{
216 Fault %(class_name)s::completeAcc(PacketPtr pkt,
193 Trace::InstRecord * traceData) const
194 {
195 Fault fault = NoFault;
196
197 Addr EA;
198 %(op_decl)s;
199 %(op_rd)s;
200 %(ea_code)s;
201 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
202
203 %(code)s;
204
205 if(fault == NoFault)
206 {
207 fault = writeMemTiming(xc, traceData, Mem, %(memDataSize)s, EA,
208 memFlags, NULL);
209 }
210 return fault;
211 }
212}};
213
214def template MicroStoreCompleteAcc {{
215 Fault %(class_name)s::completeAcc(PacketPtr pkt,
217 CPU_EXEC_CONTEXT * xc, Trace::InstRecord * traceData) const
216 ExecContext * xc, Trace::InstRecord * traceData) const
218 {
219 %(op_decl)s;
220 %(op_rd)s;
221 %(complete_code)s;
222 %(op_wb)s;
223 return NoFault;
224 }
225}};
226
227// Common templates
228
229//This delcares the initiateAcc function in memory operations
230def template InitiateAccDeclare {{
217 {
218 %(op_decl)s;
219 %(op_rd)s;
220 %(complete_code)s;
221 %(op_wb)s;
222 return NoFault;
223 }
224}};
225
226// Common templates
227
228//This delcares the initiateAcc function in memory operations
229def template InitiateAccDeclare {{
231 Fault initiateAcc(%(CPU_exec_context)s *, Trace::InstRecord *) const;
230 Fault initiateAcc(ExecContext *, Trace::InstRecord *) const;
232}};
233
234//This declares the completeAcc function in memory operations
235def template CompleteAccDeclare {{
231}};
232
233//This declares the completeAcc function in memory operations
234def template CompleteAccDeclare {{
236 Fault completeAcc(PacketPtr, %(CPU_exec_context)s *, Trace::InstRecord *) const;
235 Fault completeAcc(PacketPtr, ExecContext *, Trace::InstRecord *) const;
237}};
238
239def template MicroLdStOpDeclare {{
240 class %(class_name)s : public %(base_class)s
241 {
242 public:
243 %(class_name)s(ExtMachInst _machInst,
244 const char * instMnem, uint64_t setFlags,
245 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
246 uint64_t _disp, InstRegIndex _segment,
247 InstRegIndex _data,
248 uint8_t _dataSize, uint8_t _addressSize,
249 Request::FlagsType _memFlags);
250
251 %(BasicExecDeclare)s
252
253 %(InitiateAccDeclare)s
254
255 %(CompleteAccDeclare)s
256 };
257}};
258
259// LdStSplitOp is a load or store that uses a pair of regs as the
260// source or destination. Used for cmpxchg{8,16}b.
261def template MicroLdStSplitOpDeclare {{
262 class %(class_name)s : public %(base_class)s
263 {
264 public:
265 %(class_name)s(ExtMachInst _machInst,
266 const char * instMnem, uint64_t setFlags,
267 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
268 uint64_t _disp, InstRegIndex _segment,
269 InstRegIndex _dataLow, InstRegIndex _dataHi,
270 uint8_t _dataSize, uint8_t _addressSize,
271 Request::FlagsType _memFlags);
272
273 %(BasicExecDeclare)s
274
275 %(InitiateAccDeclare)s
276
277 %(CompleteAccDeclare)s
278 };
279}};
280
281def template MicroLdStOpConstructor {{
282 %(class_name)s::%(class_name)s(
283 ExtMachInst machInst, const char * instMnem, uint64_t setFlags,
284 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
285 uint64_t _disp, InstRegIndex _segment,
286 InstRegIndex _data,
287 uint8_t _dataSize, uint8_t _addressSize,
288 Request::FlagsType _memFlags) :
289 %(base_class)s(machInst, "%(mnemonic)s", instMnem, setFlags,
290 _scale, _index, _base,
291 _disp, _segment, _data,
292 _dataSize, _addressSize, _memFlags, %(op_class)s)
293 {
294 %(constructor)s;
295 }
296}};
297
298def template MicroLdStSplitOpConstructor {{
299 %(class_name)s::%(class_name)s(
300 ExtMachInst machInst, const char * instMnem, uint64_t setFlags,
301 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
302 uint64_t _disp, InstRegIndex _segment,
303 InstRegIndex _dataLow, InstRegIndex _dataHi,
304 uint8_t _dataSize, uint8_t _addressSize,
305 Request::FlagsType _memFlags) :
306 %(base_class)s(machInst, "%(mnemonic)s", instMnem, setFlags,
307 _scale, _index, _base,
308 _disp, _segment, _dataLow, _dataHi,
309 _dataSize, _addressSize, _memFlags, %(op_class)s)
310 {
311 %(constructor)s;
312 }
313}};
314
315let {{
316 class LdStOp(X86Microop):
317 def __init__(self, data, segment, addr, disp,
318 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec,
319 implicitStack):
320 self.data = data
321 [self.scale, self.index, self.base] = addr
322 self.disp = disp
323 self.segment = segment
324 self.dataSize = dataSize
325 self.addressSize = addressSize
326 self.memFlags = baseFlags
327 if atCPL0:
328 self.memFlags += " | (CPL0FlagBit << FlagShift)"
329 self.instFlags = ""
330 if prefetch:
331 self.memFlags += " | Request::PREFETCH"
332 self.instFlags += " | (1ULL << StaticInst::IsDataPrefetch)"
333 if nonSpec:
334 self.instFlags += " | (1ULL << StaticInst::IsNonSpeculative)"
335 # For implicit stack operations, we should use *not* use the
336 # alternative addressing mode for loads/stores if the prefix is set
337 if not implicitStack:
338 self.memFlags += " | (machInst.legacy.addr ? " + \
339 "(AddrSizeFlagBit << FlagShift) : 0)"
340
341 def getAllocator(self, microFlags):
342 allocator = '''new %(class_name)s(machInst, macrocodeBlock,
343 %(flags)s, %(scale)s, %(index)s, %(base)s,
344 %(disp)s, %(segment)s, %(data)s,
345 %(dataSize)s, %(addressSize)s, %(memFlags)s)''' % {
346 "class_name" : self.className,
347 "flags" : self.microFlagsText(microFlags) + self.instFlags,
348 "scale" : self.scale, "index" : self.index,
349 "base" : self.base,
350 "disp" : self.disp,
351 "segment" : self.segment, "data" : self.data,
352 "dataSize" : self.dataSize, "addressSize" : self.addressSize,
353 "memFlags" : self.memFlags}
354 return allocator
355
356 class BigLdStOp(X86Microop):
357 def __init__(self, data, segment, addr, disp,
358 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec,
359 implicitStack):
360 self.data = data
361 [self.scale, self.index, self.base] = addr
362 self.disp = disp
363 self.segment = segment
364 self.dataSize = dataSize
365 self.addressSize = addressSize
366 self.memFlags = baseFlags
367 if atCPL0:
368 self.memFlags += " | (CPL0FlagBit << FlagShift)"
369 self.instFlags = ""
370 if prefetch:
371 self.memFlags += " | Request::PREFETCH"
372 self.instFlags += " | (1ULL << StaticInst::IsDataPrefetch)"
373 if nonSpec:
374 self.instFlags += " | (1ULL << StaticInst::IsNonSpeculative)"
375 # For implicit stack operations, we should use *not* use the
376 # alternative addressing mode for loads/stores if the prefix is set
377 if not implicitStack:
378 self.memFlags += " | (machInst.legacy.addr ? " + \
379 "(AddrSizeFlagBit << FlagShift) : 0)"
380
381 def getAllocator(self, microFlags):
382 allocString = '''
383 (%(dataSize)s >= 4) ?
384 (StaticInstPtr)(new %(class_name)sBig(machInst,
385 macrocodeBlock, %(flags)s, %(scale)s, %(index)s,
386 %(base)s, %(disp)s, %(segment)s, %(data)s,
387 %(dataSize)s, %(addressSize)s, %(memFlags)s)) :
388 (StaticInstPtr)(new %(class_name)s(machInst,
389 macrocodeBlock, %(flags)s, %(scale)s, %(index)s,
390 %(base)s, %(disp)s, %(segment)s, %(data)s,
391 %(dataSize)s, %(addressSize)s, %(memFlags)s))
392 '''
393 allocator = allocString % {
394 "class_name" : self.className,
395 "flags" : self.microFlagsText(microFlags) + self.instFlags,
396 "scale" : self.scale, "index" : self.index,
397 "base" : self.base,
398 "disp" : self.disp,
399 "segment" : self.segment, "data" : self.data,
400 "dataSize" : self.dataSize, "addressSize" : self.addressSize,
401 "memFlags" : self.memFlags}
402 return allocator
403
404 class LdStSplitOp(LdStOp):
405 def __init__(self, data, segment, addr, disp,
406 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec,
407 implicitStack):
408 super(LdStSplitOp, self).__init__(0, segment, addr, disp,
409 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec,
410 implicitStack)
411 (self.dataLow, self.dataHi) = data
412
413 def getAllocator(self, microFlags):
414 allocString = '''(StaticInstPtr)(new %(class_name)s(machInst,
415 macrocodeBlock, %(flags)s, %(scale)s, %(index)s,
416 %(base)s, %(disp)s, %(segment)s,
417 %(dataLow)s, %(dataHi)s,
418 %(dataSize)s, %(addressSize)s, %(memFlags)s))
419 '''
420 allocator = allocString % {
421 "class_name" : self.className,
422 "flags" : self.microFlagsText(microFlags) + self.instFlags,
423 "scale" : self.scale, "index" : self.index,
424 "base" : self.base,
425 "disp" : self.disp,
426 "segment" : self.segment,
427 "dataLow" : self.dataLow, "dataHi" : self.dataHi,
428 "dataSize" : self.dataSize, "addressSize" : self.addressSize,
429 "memFlags" : self.memFlags}
430 return allocator
431
432}};
433
434let {{
435
436 # Make these empty strings so that concatenating onto
437 # them will always work.
438 header_output = ""
439 decoder_output = ""
440 exec_output = ""
441
442 segmentEAExpr = \
443 'bits(scale * Index + Base + disp, addressSize * 8 - 1, 0);'
444
445 calculateEA = 'EA = SegBase + ' + segmentEAExpr
446
447 def defineMicroLoadOp(mnemonic, code, bigCode='',
448 mem_flags="0", big=True, nonSpec=False,
449 implicitStack=False):
450 global header_output
451 global decoder_output
452 global exec_output
453 global microopClasses
454 Name = mnemonic
455 name = mnemonic.lower()
456
457 # Build up the all register version of this micro op
458 iops = [InstObjParams(name, Name, 'X86ISA::LdStOp',
459 { "code": code,
460 "ea_code": calculateEA,
461 "memDataSize": "dataSize" })]
462 if big:
463 iops += [InstObjParams(name, Name + "Big", 'X86ISA::LdStOp',
464 { "code": bigCode,
465 "ea_code": calculateEA,
466 "memDataSize": "dataSize" })]
467 for iop in iops:
468 header_output += MicroLdStOpDeclare.subst(iop)
469 decoder_output += MicroLdStOpConstructor.subst(iop)
470 exec_output += MicroLoadExecute.subst(iop)
471 exec_output += MicroLoadInitiateAcc.subst(iop)
472 exec_output += MicroLoadCompleteAcc.subst(iop)
473
474 if implicitStack:
475 # For instructions that implicitly access the stack, the address
476 # size is the same as the stack segment pointer size, not the
477 # address size if specified by the instruction prefix
478 addressSize = "env.stackSize"
479 else:
480 addressSize = "env.addressSize"
481
482 base = LdStOp
483 if big:
484 base = BigLdStOp
485 class LoadOp(base):
486 def __init__(self, data, segment, addr, disp = 0,
487 dataSize="env.dataSize",
488 addressSize=addressSize,
489 atCPL0=False, prefetch=False, nonSpec=nonSpec,
490 implicitStack=implicitStack):
491 super(LoadOp, self).__init__(data, segment, addr,
492 disp, dataSize, addressSize, mem_flags,
493 atCPL0, prefetch, nonSpec, implicitStack)
494 self.className = Name
495 self.mnemonic = name
496
497 microopClasses[name] = LoadOp
498
499 defineMicroLoadOp('Ld', 'Data = merge(Data, Mem, dataSize);',
500 'Data = Mem & mask(dataSize * 8);')
501 defineMicroLoadOp('Ldis', 'Data = merge(Data, Mem, dataSize);',
502 'Data = Mem & mask(dataSize * 8);',
503 implicitStack=True)
504 defineMicroLoadOp('Ldst', 'Data = merge(Data, Mem, dataSize);',
505 'Data = Mem & mask(dataSize * 8);',
506 '(StoreCheck << FlagShift)')
507 defineMicroLoadOp('Ldstl', 'Data = merge(Data, Mem, dataSize);',
508 'Data = Mem & mask(dataSize * 8);',
509 '(StoreCheck << FlagShift) | Request::LOCKED_RMW',
510 nonSpec=True)
511
512 defineMicroLoadOp('Ldfp', code='FpData_uqw = Mem', big = False)
513
514 defineMicroLoadOp('Ldfp87', code='''
515 switch (dataSize)
516 {
517 case 4:
518 FpData_df = *(float *)&Mem;
519 break;
520 case 8:
521 FpData_df = *(double *)&Mem;
522 break;
523 default:
524 panic("Unhandled data size in LdFp87.\\n");
525 }
526 ''', big = False)
527
528 # Load integer from memory into x87 top-of-stack register.
529 # Used to implement fild instruction.
530 defineMicroLoadOp('Ldifp87', code='''
531 switch (dataSize)
532 {
533 case 2:
534 FpData_df = (int64_t)sext<16>(Mem);
535 break;
536 case 4:
537 FpData_df = (int64_t)sext<32>(Mem);
538 break;
539 case 8:
540 FpData_df = (int64_t)Mem;
541 break;
542 default:
543 panic("Unhandled data size in LdIFp87.\\n");
544 }
545 ''', big = False)
546
547 def defineMicroLoadSplitOp(mnemonic, code, mem_flags="0", nonSpec=False):
548 global header_output
549 global decoder_output
550 global exec_output
551 global microopClasses
552 Name = mnemonic
553 name = mnemonic.lower()
554
555 iop = InstObjParams(name, Name, 'X86ISA::LdStSplitOp',
556 { "code": code,
557 "ea_code": calculateEA,
558 "memDataSize": "2 * dataSize" })
559
560 header_output += MicroLdStSplitOpDeclare.subst(iop)
561 decoder_output += MicroLdStSplitOpConstructor.subst(iop)
562 exec_output += MicroLoadExecute.subst(iop)
563 exec_output += MicroLoadInitiateAcc.subst(iop)
564 exec_output += MicroLoadCompleteAcc.subst(iop)
565
566 class LoadOp(LdStSplitOp):
567 def __init__(self, data, segment, addr, disp = 0,
568 dataSize="env.dataSize",
569 addressSize="env.addressSize",
570 atCPL0=False, prefetch=False, nonSpec=nonSpec,
571 implicitStack=False):
572 super(LoadOp, self).__init__(data, segment, addr,
573 disp, dataSize, addressSize, mem_flags,
574 atCPL0, prefetch, nonSpec, implicitStack)
575 self.className = Name
576 self.mnemonic = name
577
578 microopClasses[name] = LoadOp
579
580 code = '''
581 switch (dataSize) {
582 case 4:
583 DataLow = bits(Mem_u2qw[0], 31, 0);
584 DataHi = bits(Mem_u2qw[0], 63, 32);
585 break;
586 case 8:
587 DataLow = Mem_u2qw[0];
588 DataHi = Mem_u2qw[1];
589 break;
590 default:
591 panic("Unhandled data size %d in LdSplit.\\n", dataSize);
592 }'''
593
594 defineMicroLoadSplitOp('LdSplit', code,
595 '(StoreCheck << FlagShift)')
596
597 defineMicroLoadSplitOp('LdSplitl', code,
598 '(StoreCheck << FlagShift) | Request::LOCKED_RMW',
599 nonSpec=True)
600
601 def defineMicroStoreOp(mnemonic, code, completeCode="", mem_flags="0",
602 implicitStack=False):
603 global header_output
604 global decoder_output
605 global exec_output
606 global microopClasses
607 Name = mnemonic
608 name = mnemonic.lower()
609
610 # Build up the all register version of this micro op
611 iop = InstObjParams(name, Name, 'X86ISA::LdStOp',
612 { "code": code,
613 "complete_code": completeCode,
614 "ea_code": calculateEA,
615 "memDataSize": "dataSize" })
616 header_output += MicroLdStOpDeclare.subst(iop)
617 decoder_output += MicroLdStOpConstructor.subst(iop)
618 exec_output += MicroStoreExecute.subst(iop)
619 exec_output += MicroStoreInitiateAcc.subst(iop)
620 exec_output += MicroStoreCompleteAcc.subst(iop)
621
622 if implicitStack:
623 # For instructions that implicitly access the stack, the address
624 # size is the same as the stack segment pointer size, not the
625 # address size if specified by the instruction prefix
626 addressSize = "env.stackSize"
627 else:
628 addressSize = "env.addressSize"
629
630 class StoreOp(LdStOp):
631 def __init__(self, data, segment, addr, disp = 0,
632 dataSize="env.dataSize",
633 addressSize=addressSize,
634 atCPL0=False, nonSpec=False, implicitStack=implicitStack):
635 super(StoreOp, self).__init__(data, segment, addr, disp,
636 dataSize, addressSize, mem_flags, atCPL0, False,
637 nonSpec, implicitStack)
638 self.className = Name
639 self.mnemonic = name
640
641 microopClasses[name] = StoreOp
642
643 defineMicroStoreOp('St', 'Mem = pick(Data, 2, dataSize);')
644 defineMicroStoreOp('Stis', 'Mem = pick(Data, 2, dataSize);',
645 implicitStack=True)
646 defineMicroStoreOp('Stul', 'Mem = pick(Data, 2, dataSize);',
647 mem_flags="Request::LOCKED_RMW")
648
649 defineMicroStoreOp('Stfp', code='Mem = FpData_uqw;')
650
651 defineMicroStoreOp('Stfp87', code='''
652 switch (dataSize)
653 {
654 case 4: {
655 float single(FpData_df);
656 Mem = *(uint32_t *)&single;
657 } break;
658 case 8:
659 Mem = *(uint64_t *)&FpData_df;
660 break;
661 default:
662 panic("Unhandled data size in StFp87.\\n");
663 }
664 ''')
665
666 defineMicroStoreOp('Cda', 'Mem = 0;', mem_flags="Request::NO_ACCESS")
667
668 def defineMicroStoreSplitOp(mnemonic, code,
669 completeCode="", mem_flags="0"):
670 global header_output
671 global decoder_output
672 global exec_output
673 global microopClasses
674 Name = mnemonic
675 name = mnemonic.lower()
676
677 iop = InstObjParams(name, Name, 'X86ISA::LdStSplitOp',
678 { "code": code,
679 "complete_code": completeCode,
680 "ea_code": calculateEA,
681 "memDataSize": "2 * dataSize" })
682
683 header_output += MicroLdStSplitOpDeclare.subst(iop)
684 decoder_output += MicroLdStSplitOpConstructor.subst(iop)
685 exec_output += MicroStoreExecute.subst(iop)
686 exec_output += MicroStoreInitiateAcc.subst(iop)
687 exec_output += MicroStoreCompleteAcc.subst(iop)
688
689 class StoreOp(LdStSplitOp):
690 def __init__(self, data, segment, addr, disp = 0,
691 dataSize="env.dataSize",
692 addressSize="env.addressSize",
693 atCPL0=False, nonSpec=False, implicitStack=False):
694 super(StoreOp, self).__init__(data, segment, addr, disp,
695 dataSize, addressSize, mem_flags, atCPL0, False,
696 nonSpec, implicitStack)
697 self.className = Name
698 self.mnemonic = name
699
700 microopClasses[name] = StoreOp
701
702 code = '''
703 switch (dataSize) {
704 case 4:
705 Mem_u2qw[0] = (DataHi << 32) | DataLow;
706 break;
707 case 8:
708 Mem_u2qw[0] = DataLow;
709 Mem_u2qw[1] = DataHi;
710 break;
711 default:
712 panic("Unhandled data size %d in StSplit.\\n", dataSize);
713 }'''
714
715 defineMicroStoreSplitOp('StSplit', code);
716
717 defineMicroStoreSplitOp('StSplitul', code,
718 mem_flags='Request::LOCKED_RMW')
719
720 iop = InstObjParams("lea", "Lea", 'X86ISA::LdStOp',
721 { "code": "Data = merge(Data, EA, dataSize);",
722 "ea_code": "EA = " + segmentEAExpr,
723 "memDataSize": "dataSize" })
724 header_output += MicroLeaDeclare.subst(iop)
725 decoder_output += MicroLdStOpConstructor.subst(iop)
726 exec_output += MicroLeaExecute.subst(iop)
727
728 class LeaOp(LdStOp):
729 def __init__(self, data, segment, addr, disp = 0,
730 dataSize="env.dataSize", addressSize="env.addressSize"):
731 super(LeaOp, self).__init__(data, segment, addr, disp,
732 dataSize, addressSize, "0", False, False, False, False)
733 self.className = "Lea"
734 self.mnemonic = "lea"
735
736 microopClasses["lea"] = LeaOp
737
738
739 iop = InstObjParams("tia", "Tia", 'X86ISA::LdStOp',
740 { "code": "xc->demapPage(EA, 0);",
741 "ea_code": calculateEA,
742 "memDataSize": "dataSize" })
743 header_output += MicroLeaDeclare.subst(iop)
744 decoder_output += MicroLdStOpConstructor.subst(iop)
745 exec_output += MicroLeaExecute.subst(iop)
746
747 class TiaOp(LdStOp):
748 def __init__(self, segment, addr, disp = 0,
749 dataSize="env.dataSize",
750 addressSize="env.addressSize"):
751 super(TiaOp, self).__init__("InstRegIndex(NUM_INTREGS)", segment,
752 addr, disp, dataSize, addressSize, "0", False, False,
753 False, False)
754 self.className = "Tia"
755 self.mnemonic = "tia"
756
757 microopClasses["tia"] = TiaOp
758
759 class CdaOp(LdStOp):
760 def __init__(self, segment, addr, disp = 0,
761 dataSize="env.dataSize",
762 addressSize="env.addressSize", atCPL0=False):
763 super(CdaOp, self).__init__("InstRegIndex(NUM_INTREGS)", segment,
764 addr, disp, dataSize, addressSize, "Request::NO_ACCESS",
765 atCPL0, False, False, False)
766 self.className = "Cda"
767 self.mnemonic = "cda"
768
769 microopClasses["cda"] = CdaOp
770}};
236}};
237
238def template MicroLdStOpDeclare {{
239 class %(class_name)s : public %(base_class)s
240 {
241 public:
242 %(class_name)s(ExtMachInst _machInst,
243 const char * instMnem, uint64_t setFlags,
244 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
245 uint64_t _disp, InstRegIndex _segment,
246 InstRegIndex _data,
247 uint8_t _dataSize, uint8_t _addressSize,
248 Request::FlagsType _memFlags);
249
250 %(BasicExecDeclare)s
251
252 %(InitiateAccDeclare)s
253
254 %(CompleteAccDeclare)s
255 };
256}};
257
258// LdStSplitOp is a load or store that uses a pair of regs as the
259// source or destination. Used for cmpxchg{8,16}b.
260def template MicroLdStSplitOpDeclare {{
261 class %(class_name)s : public %(base_class)s
262 {
263 public:
264 %(class_name)s(ExtMachInst _machInst,
265 const char * instMnem, uint64_t setFlags,
266 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
267 uint64_t _disp, InstRegIndex _segment,
268 InstRegIndex _dataLow, InstRegIndex _dataHi,
269 uint8_t _dataSize, uint8_t _addressSize,
270 Request::FlagsType _memFlags);
271
272 %(BasicExecDeclare)s
273
274 %(InitiateAccDeclare)s
275
276 %(CompleteAccDeclare)s
277 };
278}};
279
280def template MicroLdStOpConstructor {{
281 %(class_name)s::%(class_name)s(
282 ExtMachInst machInst, const char * instMnem, uint64_t setFlags,
283 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
284 uint64_t _disp, InstRegIndex _segment,
285 InstRegIndex _data,
286 uint8_t _dataSize, uint8_t _addressSize,
287 Request::FlagsType _memFlags) :
288 %(base_class)s(machInst, "%(mnemonic)s", instMnem, setFlags,
289 _scale, _index, _base,
290 _disp, _segment, _data,
291 _dataSize, _addressSize, _memFlags, %(op_class)s)
292 {
293 %(constructor)s;
294 }
295}};
296
297def template MicroLdStSplitOpConstructor {{
298 %(class_name)s::%(class_name)s(
299 ExtMachInst machInst, const char * instMnem, uint64_t setFlags,
300 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
301 uint64_t _disp, InstRegIndex _segment,
302 InstRegIndex _dataLow, InstRegIndex _dataHi,
303 uint8_t _dataSize, uint8_t _addressSize,
304 Request::FlagsType _memFlags) :
305 %(base_class)s(machInst, "%(mnemonic)s", instMnem, setFlags,
306 _scale, _index, _base,
307 _disp, _segment, _dataLow, _dataHi,
308 _dataSize, _addressSize, _memFlags, %(op_class)s)
309 {
310 %(constructor)s;
311 }
312}};
313
314let {{
315 class LdStOp(X86Microop):
316 def __init__(self, data, segment, addr, disp,
317 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec,
318 implicitStack):
319 self.data = data
320 [self.scale, self.index, self.base] = addr
321 self.disp = disp
322 self.segment = segment
323 self.dataSize = dataSize
324 self.addressSize = addressSize
325 self.memFlags = baseFlags
326 if atCPL0:
327 self.memFlags += " | (CPL0FlagBit << FlagShift)"
328 self.instFlags = ""
329 if prefetch:
330 self.memFlags += " | Request::PREFETCH"
331 self.instFlags += " | (1ULL << StaticInst::IsDataPrefetch)"
332 if nonSpec:
333 self.instFlags += " | (1ULL << StaticInst::IsNonSpeculative)"
334 # For implicit stack operations, we should use *not* use the
335 # alternative addressing mode for loads/stores if the prefix is set
336 if not implicitStack:
337 self.memFlags += " | (machInst.legacy.addr ? " + \
338 "(AddrSizeFlagBit << FlagShift) : 0)"
339
340 def getAllocator(self, microFlags):
341 allocator = '''new %(class_name)s(machInst, macrocodeBlock,
342 %(flags)s, %(scale)s, %(index)s, %(base)s,
343 %(disp)s, %(segment)s, %(data)s,
344 %(dataSize)s, %(addressSize)s, %(memFlags)s)''' % {
345 "class_name" : self.className,
346 "flags" : self.microFlagsText(microFlags) + self.instFlags,
347 "scale" : self.scale, "index" : self.index,
348 "base" : self.base,
349 "disp" : self.disp,
350 "segment" : self.segment, "data" : self.data,
351 "dataSize" : self.dataSize, "addressSize" : self.addressSize,
352 "memFlags" : self.memFlags}
353 return allocator
354
355 class BigLdStOp(X86Microop):
356 def __init__(self, data, segment, addr, disp,
357 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec,
358 implicitStack):
359 self.data = data
360 [self.scale, self.index, self.base] = addr
361 self.disp = disp
362 self.segment = segment
363 self.dataSize = dataSize
364 self.addressSize = addressSize
365 self.memFlags = baseFlags
366 if atCPL0:
367 self.memFlags += " | (CPL0FlagBit << FlagShift)"
368 self.instFlags = ""
369 if prefetch:
370 self.memFlags += " | Request::PREFETCH"
371 self.instFlags += " | (1ULL << StaticInst::IsDataPrefetch)"
372 if nonSpec:
373 self.instFlags += " | (1ULL << StaticInst::IsNonSpeculative)"
374 # For implicit stack operations, we should use *not* use the
375 # alternative addressing mode for loads/stores if the prefix is set
376 if not implicitStack:
377 self.memFlags += " | (machInst.legacy.addr ? " + \
378 "(AddrSizeFlagBit << FlagShift) : 0)"
379
380 def getAllocator(self, microFlags):
381 allocString = '''
382 (%(dataSize)s >= 4) ?
383 (StaticInstPtr)(new %(class_name)sBig(machInst,
384 macrocodeBlock, %(flags)s, %(scale)s, %(index)s,
385 %(base)s, %(disp)s, %(segment)s, %(data)s,
386 %(dataSize)s, %(addressSize)s, %(memFlags)s)) :
387 (StaticInstPtr)(new %(class_name)s(machInst,
388 macrocodeBlock, %(flags)s, %(scale)s, %(index)s,
389 %(base)s, %(disp)s, %(segment)s, %(data)s,
390 %(dataSize)s, %(addressSize)s, %(memFlags)s))
391 '''
392 allocator = allocString % {
393 "class_name" : self.className,
394 "flags" : self.microFlagsText(microFlags) + self.instFlags,
395 "scale" : self.scale, "index" : self.index,
396 "base" : self.base,
397 "disp" : self.disp,
398 "segment" : self.segment, "data" : self.data,
399 "dataSize" : self.dataSize, "addressSize" : self.addressSize,
400 "memFlags" : self.memFlags}
401 return allocator
402
403 class LdStSplitOp(LdStOp):
404 def __init__(self, data, segment, addr, disp,
405 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec,
406 implicitStack):
407 super(LdStSplitOp, self).__init__(0, segment, addr, disp,
408 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec,
409 implicitStack)
410 (self.dataLow, self.dataHi) = data
411
412 def getAllocator(self, microFlags):
413 allocString = '''(StaticInstPtr)(new %(class_name)s(machInst,
414 macrocodeBlock, %(flags)s, %(scale)s, %(index)s,
415 %(base)s, %(disp)s, %(segment)s,
416 %(dataLow)s, %(dataHi)s,
417 %(dataSize)s, %(addressSize)s, %(memFlags)s))
418 '''
419 allocator = allocString % {
420 "class_name" : self.className,
421 "flags" : self.microFlagsText(microFlags) + self.instFlags,
422 "scale" : self.scale, "index" : self.index,
423 "base" : self.base,
424 "disp" : self.disp,
425 "segment" : self.segment,
426 "dataLow" : self.dataLow, "dataHi" : self.dataHi,
427 "dataSize" : self.dataSize, "addressSize" : self.addressSize,
428 "memFlags" : self.memFlags}
429 return allocator
430
431}};
432
433let {{
434
435 # Make these empty strings so that concatenating onto
436 # them will always work.
437 header_output = ""
438 decoder_output = ""
439 exec_output = ""
440
441 segmentEAExpr = \
442 'bits(scale * Index + Base + disp, addressSize * 8 - 1, 0);'
443
444 calculateEA = 'EA = SegBase + ' + segmentEAExpr
445
446 def defineMicroLoadOp(mnemonic, code, bigCode='',
447 mem_flags="0", big=True, nonSpec=False,
448 implicitStack=False):
449 global header_output
450 global decoder_output
451 global exec_output
452 global microopClasses
453 Name = mnemonic
454 name = mnemonic.lower()
455
456 # Build up the all register version of this micro op
457 iops = [InstObjParams(name, Name, 'X86ISA::LdStOp',
458 { "code": code,
459 "ea_code": calculateEA,
460 "memDataSize": "dataSize" })]
461 if big:
462 iops += [InstObjParams(name, Name + "Big", 'X86ISA::LdStOp',
463 { "code": bigCode,
464 "ea_code": calculateEA,
465 "memDataSize": "dataSize" })]
466 for iop in iops:
467 header_output += MicroLdStOpDeclare.subst(iop)
468 decoder_output += MicroLdStOpConstructor.subst(iop)
469 exec_output += MicroLoadExecute.subst(iop)
470 exec_output += MicroLoadInitiateAcc.subst(iop)
471 exec_output += MicroLoadCompleteAcc.subst(iop)
472
473 if implicitStack:
474 # For instructions that implicitly access the stack, the address
475 # size is the same as the stack segment pointer size, not the
476 # address size if specified by the instruction prefix
477 addressSize = "env.stackSize"
478 else:
479 addressSize = "env.addressSize"
480
481 base = LdStOp
482 if big:
483 base = BigLdStOp
484 class LoadOp(base):
485 def __init__(self, data, segment, addr, disp = 0,
486 dataSize="env.dataSize",
487 addressSize=addressSize,
488 atCPL0=False, prefetch=False, nonSpec=nonSpec,
489 implicitStack=implicitStack):
490 super(LoadOp, self).__init__(data, segment, addr,
491 disp, dataSize, addressSize, mem_flags,
492 atCPL0, prefetch, nonSpec, implicitStack)
493 self.className = Name
494 self.mnemonic = name
495
496 microopClasses[name] = LoadOp
497
498 defineMicroLoadOp('Ld', 'Data = merge(Data, Mem, dataSize);',
499 'Data = Mem & mask(dataSize * 8);')
500 defineMicroLoadOp('Ldis', 'Data = merge(Data, Mem, dataSize);',
501 'Data = Mem & mask(dataSize * 8);',
502 implicitStack=True)
503 defineMicroLoadOp('Ldst', 'Data = merge(Data, Mem, dataSize);',
504 'Data = Mem & mask(dataSize * 8);',
505 '(StoreCheck << FlagShift)')
506 defineMicroLoadOp('Ldstl', 'Data = merge(Data, Mem, dataSize);',
507 'Data = Mem & mask(dataSize * 8);',
508 '(StoreCheck << FlagShift) | Request::LOCKED_RMW',
509 nonSpec=True)
510
511 defineMicroLoadOp('Ldfp', code='FpData_uqw = Mem', big = False)
512
513 defineMicroLoadOp('Ldfp87', code='''
514 switch (dataSize)
515 {
516 case 4:
517 FpData_df = *(float *)&Mem;
518 break;
519 case 8:
520 FpData_df = *(double *)&Mem;
521 break;
522 default:
523 panic("Unhandled data size in LdFp87.\\n");
524 }
525 ''', big = False)
526
527 # Load integer from memory into x87 top-of-stack register.
528 # Used to implement fild instruction.
529 defineMicroLoadOp('Ldifp87', code='''
530 switch (dataSize)
531 {
532 case 2:
533 FpData_df = (int64_t)sext<16>(Mem);
534 break;
535 case 4:
536 FpData_df = (int64_t)sext<32>(Mem);
537 break;
538 case 8:
539 FpData_df = (int64_t)Mem;
540 break;
541 default:
542 panic("Unhandled data size in LdIFp87.\\n");
543 }
544 ''', big = False)
545
546 def defineMicroLoadSplitOp(mnemonic, code, mem_flags="0", nonSpec=False):
547 global header_output
548 global decoder_output
549 global exec_output
550 global microopClasses
551 Name = mnemonic
552 name = mnemonic.lower()
553
554 iop = InstObjParams(name, Name, 'X86ISA::LdStSplitOp',
555 { "code": code,
556 "ea_code": calculateEA,
557 "memDataSize": "2 * dataSize" })
558
559 header_output += MicroLdStSplitOpDeclare.subst(iop)
560 decoder_output += MicroLdStSplitOpConstructor.subst(iop)
561 exec_output += MicroLoadExecute.subst(iop)
562 exec_output += MicroLoadInitiateAcc.subst(iop)
563 exec_output += MicroLoadCompleteAcc.subst(iop)
564
565 class LoadOp(LdStSplitOp):
566 def __init__(self, data, segment, addr, disp = 0,
567 dataSize="env.dataSize",
568 addressSize="env.addressSize",
569 atCPL0=False, prefetch=False, nonSpec=nonSpec,
570 implicitStack=False):
571 super(LoadOp, self).__init__(data, segment, addr,
572 disp, dataSize, addressSize, mem_flags,
573 atCPL0, prefetch, nonSpec, implicitStack)
574 self.className = Name
575 self.mnemonic = name
576
577 microopClasses[name] = LoadOp
578
579 code = '''
580 switch (dataSize) {
581 case 4:
582 DataLow = bits(Mem_u2qw[0], 31, 0);
583 DataHi = bits(Mem_u2qw[0], 63, 32);
584 break;
585 case 8:
586 DataLow = Mem_u2qw[0];
587 DataHi = Mem_u2qw[1];
588 break;
589 default:
590 panic("Unhandled data size %d in LdSplit.\\n", dataSize);
591 }'''
592
593 defineMicroLoadSplitOp('LdSplit', code,
594 '(StoreCheck << FlagShift)')
595
596 defineMicroLoadSplitOp('LdSplitl', code,
597 '(StoreCheck << FlagShift) | Request::LOCKED_RMW',
598 nonSpec=True)
599
600 def defineMicroStoreOp(mnemonic, code, completeCode="", mem_flags="0",
601 implicitStack=False):
602 global header_output
603 global decoder_output
604 global exec_output
605 global microopClasses
606 Name = mnemonic
607 name = mnemonic.lower()
608
609 # Build up the all register version of this micro op
610 iop = InstObjParams(name, Name, 'X86ISA::LdStOp',
611 { "code": code,
612 "complete_code": completeCode,
613 "ea_code": calculateEA,
614 "memDataSize": "dataSize" })
615 header_output += MicroLdStOpDeclare.subst(iop)
616 decoder_output += MicroLdStOpConstructor.subst(iop)
617 exec_output += MicroStoreExecute.subst(iop)
618 exec_output += MicroStoreInitiateAcc.subst(iop)
619 exec_output += MicroStoreCompleteAcc.subst(iop)
620
621 if implicitStack:
622 # For instructions that implicitly access the stack, the address
623 # size is the same as the stack segment pointer size, not the
624 # address size if specified by the instruction prefix
625 addressSize = "env.stackSize"
626 else:
627 addressSize = "env.addressSize"
628
629 class StoreOp(LdStOp):
630 def __init__(self, data, segment, addr, disp = 0,
631 dataSize="env.dataSize",
632 addressSize=addressSize,
633 atCPL0=False, nonSpec=False, implicitStack=implicitStack):
634 super(StoreOp, self).__init__(data, segment, addr, disp,
635 dataSize, addressSize, mem_flags, atCPL0, False,
636 nonSpec, implicitStack)
637 self.className = Name
638 self.mnemonic = name
639
640 microopClasses[name] = StoreOp
641
642 defineMicroStoreOp('St', 'Mem = pick(Data, 2, dataSize);')
643 defineMicroStoreOp('Stis', 'Mem = pick(Data, 2, dataSize);',
644 implicitStack=True)
645 defineMicroStoreOp('Stul', 'Mem = pick(Data, 2, dataSize);',
646 mem_flags="Request::LOCKED_RMW")
647
648 defineMicroStoreOp('Stfp', code='Mem = FpData_uqw;')
649
650 defineMicroStoreOp('Stfp87', code='''
651 switch (dataSize)
652 {
653 case 4: {
654 float single(FpData_df);
655 Mem = *(uint32_t *)&single;
656 } break;
657 case 8:
658 Mem = *(uint64_t *)&FpData_df;
659 break;
660 default:
661 panic("Unhandled data size in StFp87.\\n");
662 }
663 ''')
664
665 defineMicroStoreOp('Cda', 'Mem = 0;', mem_flags="Request::NO_ACCESS")
666
667 def defineMicroStoreSplitOp(mnemonic, code,
668 completeCode="", mem_flags="0"):
669 global header_output
670 global decoder_output
671 global exec_output
672 global microopClasses
673 Name = mnemonic
674 name = mnemonic.lower()
675
676 iop = InstObjParams(name, Name, 'X86ISA::LdStSplitOp',
677 { "code": code,
678 "complete_code": completeCode,
679 "ea_code": calculateEA,
680 "memDataSize": "2 * dataSize" })
681
682 header_output += MicroLdStSplitOpDeclare.subst(iop)
683 decoder_output += MicroLdStSplitOpConstructor.subst(iop)
684 exec_output += MicroStoreExecute.subst(iop)
685 exec_output += MicroStoreInitiateAcc.subst(iop)
686 exec_output += MicroStoreCompleteAcc.subst(iop)
687
688 class StoreOp(LdStSplitOp):
689 def __init__(self, data, segment, addr, disp = 0,
690 dataSize="env.dataSize",
691 addressSize="env.addressSize",
692 atCPL0=False, nonSpec=False, implicitStack=False):
693 super(StoreOp, self).__init__(data, segment, addr, disp,
694 dataSize, addressSize, mem_flags, atCPL0, False,
695 nonSpec, implicitStack)
696 self.className = Name
697 self.mnemonic = name
698
699 microopClasses[name] = StoreOp
700
701 code = '''
702 switch (dataSize) {
703 case 4:
704 Mem_u2qw[0] = (DataHi << 32) | DataLow;
705 break;
706 case 8:
707 Mem_u2qw[0] = DataLow;
708 Mem_u2qw[1] = DataHi;
709 break;
710 default:
711 panic("Unhandled data size %d in StSplit.\\n", dataSize);
712 }'''
713
714 defineMicroStoreSplitOp('StSplit', code);
715
716 defineMicroStoreSplitOp('StSplitul', code,
717 mem_flags='Request::LOCKED_RMW')
718
719 iop = InstObjParams("lea", "Lea", 'X86ISA::LdStOp',
720 { "code": "Data = merge(Data, EA, dataSize);",
721 "ea_code": "EA = " + segmentEAExpr,
722 "memDataSize": "dataSize" })
723 header_output += MicroLeaDeclare.subst(iop)
724 decoder_output += MicroLdStOpConstructor.subst(iop)
725 exec_output += MicroLeaExecute.subst(iop)
726
727 class LeaOp(LdStOp):
728 def __init__(self, data, segment, addr, disp = 0,
729 dataSize="env.dataSize", addressSize="env.addressSize"):
730 super(LeaOp, self).__init__(data, segment, addr, disp,
731 dataSize, addressSize, "0", False, False, False, False)
732 self.className = "Lea"
733 self.mnemonic = "lea"
734
735 microopClasses["lea"] = LeaOp
736
737
738 iop = InstObjParams("tia", "Tia", 'X86ISA::LdStOp',
739 { "code": "xc->demapPage(EA, 0);",
740 "ea_code": calculateEA,
741 "memDataSize": "dataSize" })
742 header_output += MicroLeaDeclare.subst(iop)
743 decoder_output += MicroLdStOpConstructor.subst(iop)
744 exec_output += MicroLeaExecute.subst(iop)
745
746 class TiaOp(LdStOp):
747 def __init__(self, segment, addr, disp = 0,
748 dataSize="env.dataSize",
749 addressSize="env.addressSize"):
750 super(TiaOp, self).__init__("InstRegIndex(NUM_INTREGS)", segment,
751 addr, disp, dataSize, addressSize, "0", False, False,
752 False, False)
753 self.className = "Tia"
754 self.mnemonic = "tia"
755
756 microopClasses["tia"] = TiaOp
757
758 class CdaOp(LdStOp):
759 def __init__(self, segment, addr, disp = 0,
760 dataSize="env.dataSize",
761 addressSize="env.addressSize", atCPL0=False):
762 super(CdaOp, self).__init__("InstRegIndex(NUM_INTREGS)", segment,
763 addr, disp, dataSize, addressSize, "Request::NO_ACCESS",
764 atCPL0, False, False, False)
765 self.className = "Cda"
766 self.mnemonic = "cda"
767
768 microopClasses["cda"] = CdaOp
769}};