ldstop.isa (11329:82bb3ee706b3) ldstop.isa (11829:cb5390385d87)
1// Copyright (c) 2007-2008 The Hewlett-Packard Development Company
2// Copyright (c) 2015 Advanced Micro Devices, Inc.
3// All rights reserved.
4//
5// The license below extends only to copyright in the software and shall
6// not be construed as granting a license to any other intellectual
7// property including but not limited to intellectual property relating
8// to a hardware implementation of the functionality of the software
9// licensed hereunder. You may use the software subject to the license
10// terms below provided that you ensure that this notice is replicated
11// unmodified and in its entirety in all distributions of the software,
12// modified or unmodified, in source code or in binary form.
13//
14// Copyright (c) 2008 The Regents of The University of Michigan
15// All rights reserved.
16//
17// Redistribution and use in source and binary forms, with or without
18// modification, are permitted provided that the following conditions are
19// met: redistributions of source code must retain the above copyright
20// notice, this list of conditions and the following disclaimer;
21// redistributions in binary form must reproduce the above copyright
22// notice, this list of conditions and the following disclaimer in the
23// documentation and/or other materials provided with the distribution;
24// neither the name of the copyright holders nor the names of its
25// contributors may be used to endorse or promote products derived from
26// this software without specific prior written permission.
27//
28// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
29// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
30// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
31// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
32// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
33// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
34// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
35// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
36// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
37// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
38// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
39//
40// Authors: Gabe Black
41
42//////////////////////////////////////////////////////////////////////////
43//
44// LdStOp Microop templates
45//
46//////////////////////////////////////////////////////////////////////////
47
48// LEA template
49
50def template MicroLeaExecute {{
51 Fault %(class_name)s::execute(CPU_EXEC_CONTEXT *xc,
52 Trace::InstRecord *traceData) const
53 {
54 Fault fault = NoFault;
55 Addr EA;
56
57 %(op_decl)s;
58 %(op_rd)s;
59 %(ea_code)s;
60 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
61
62 %(code)s;
63 if(fault == NoFault)
64 {
65 %(op_wb)s;
66 }
67
68 return fault;
69 }
70}};
71
72def template MicroLeaDeclare {{
73 class %(class_name)s : public %(base_class)s
74 {
75 public:
76 %(class_name)s(ExtMachInst _machInst,
77 const char * instMnem, uint64_t setFlags,
78 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
79 uint64_t _disp, InstRegIndex _segment,
80 InstRegIndex _data,
81 uint8_t _dataSize, uint8_t _addressSize,
82 Request::FlagsType _memFlags);
83
84 %(BasicExecDeclare)s
85 };
86}};
87
88// Load templates
89
90def template MicroLoadExecute {{
91 Fault %(class_name)s::execute(CPU_EXEC_CONTEXT *xc,
92 Trace::InstRecord *traceData) const
93 {
94 Fault fault = NoFault;
95 Addr EA;
96
97 %(op_decl)s;
98 %(op_rd)s;
99 %(ea_code)s;
100 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
101
102 fault = readMemAtomic(xc, traceData, EA, Mem,
103 %(memDataSize)s, memFlags);
104
105 if (fault == NoFault) {
106 %(code)s;
107 } else if (memFlags & Request::PREFETCH) {
108 // For prefetches, ignore any faults/exceptions.
109 return NoFault;
110 }
111 if(fault == NoFault)
112 {
113 %(op_wb)s;
114 }
115
116 return fault;
117 }
118}};
119
120def template MicroLoadInitiateAcc {{
121 Fault %(class_name)s::initiateAcc(CPU_EXEC_CONTEXT * xc,
122 Trace::InstRecord * traceData) const
123 {
124 Fault fault = NoFault;
125 Addr EA;
126
127 %(op_decl)s;
128 %(op_rd)s;
129 %(ea_code)s;
130 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
131
132 fault = initiateMemRead(xc, traceData, EA,
133 %(memDataSize)s, memFlags);
134
135 return fault;
136 }
137}};
138
139def template MicroLoadCompleteAcc {{
140 Fault %(class_name)s::completeAcc(PacketPtr pkt,
141 CPU_EXEC_CONTEXT * xc,
142 Trace::InstRecord * traceData) const
143 {
144 Fault fault = NoFault;
145
146 %(op_decl)s;
147 %(op_rd)s;
148
149 getMem(pkt, Mem, %(memDataSize)s, traceData);
150
151 %(code)s;
152
153 if(fault == NoFault)
154 {
155 %(op_wb)s;
156 }
157
158 return fault;
159 }
160}};
161
162// Store templates
163
164def template MicroStoreExecute {{
165 Fault %(class_name)s::execute(CPU_EXEC_CONTEXT * xc,
166 Trace::InstRecord *traceData) const
167 {
168 Fault fault = NoFault;
169
170 Addr EA;
171 %(op_decl)s;
172 %(op_rd)s;
173 %(ea_code)s;
174 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
175
176 %(code)s;
177
178 if(fault == NoFault)
179 {
180 fault = writeMemAtomic(xc, traceData, Mem, %(memDataSize)s, EA,
181 memFlags, NULL);
182 if(fault == NoFault)
183 {
184 %(op_wb)s;
185 }
186 }
187
188 return fault;
189 }
190}};
191
192def template MicroStoreInitiateAcc {{
193 Fault %(class_name)s::initiateAcc(CPU_EXEC_CONTEXT * xc,
194 Trace::InstRecord * traceData) const
195 {
196 Fault fault = NoFault;
197
198 Addr EA;
199 %(op_decl)s;
200 %(op_rd)s;
201 %(ea_code)s;
202 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
203
204 %(code)s;
205
206 if(fault == NoFault)
207 {
208 fault = writeMemTiming(xc, traceData, Mem, %(memDataSize)s, EA,
209 memFlags, NULL);
210 }
211 return fault;
212 }
213}};
214
215def template MicroStoreCompleteAcc {{
216 Fault %(class_name)s::completeAcc(PacketPtr pkt,
217 CPU_EXEC_CONTEXT * xc, Trace::InstRecord * traceData) const
218 {
219 %(op_decl)s;
220 %(op_rd)s;
221 %(complete_code)s;
222 %(op_wb)s;
223 return NoFault;
224 }
225}};
226
227// Common templates
228
229//This delcares the initiateAcc function in memory operations
230def template InitiateAccDeclare {{
231 Fault initiateAcc(%(CPU_exec_context)s *, Trace::InstRecord *) const;
232}};
233
234//This declares the completeAcc function in memory operations
235def template CompleteAccDeclare {{
236 Fault completeAcc(PacketPtr, %(CPU_exec_context)s *, Trace::InstRecord *) const;
237}};
238
239def template MicroLdStOpDeclare {{
240 class %(class_name)s : public %(base_class)s
241 {
242 public:
243 %(class_name)s(ExtMachInst _machInst,
244 const char * instMnem, uint64_t setFlags,
245 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
246 uint64_t _disp, InstRegIndex _segment,
247 InstRegIndex _data,
248 uint8_t _dataSize, uint8_t _addressSize,
249 Request::FlagsType _memFlags);
250
251 %(BasicExecDeclare)s
252
253 %(InitiateAccDeclare)s
254
255 %(CompleteAccDeclare)s
256 };
257}};
258
259// LdStSplitOp is a load or store that uses a pair of regs as the
260// source or destination. Used for cmpxchg{8,16}b.
261def template MicroLdStSplitOpDeclare {{
262 class %(class_name)s : public %(base_class)s
263 {
264 public:
265 %(class_name)s(ExtMachInst _machInst,
266 const char * instMnem, uint64_t setFlags,
267 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
268 uint64_t _disp, InstRegIndex _segment,
269 InstRegIndex _dataLow, InstRegIndex _dataHi,
270 uint8_t _dataSize, uint8_t _addressSize,
271 Request::FlagsType _memFlags);
272
273 %(BasicExecDeclare)s
274
275 %(InitiateAccDeclare)s
276
277 %(CompleteAccDeclare)s
278 };
279}};
280
281def template MicroLdStOpConstructor {{
282 %(class_name)s::%(class_name)s(
283 ExtMachInst machInst, const char * instMnem, uint64_t setFlags,
284 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
285 uint64_t _disp, InstRegIndex _segment,
286 InstRegIndex _data,
287 uint8_t _dataSize, uint8_t _addressSize,
288 Request::FlagsType _memFlags) :
289 %(base_class)s(machInst, "%(mnemonic)s", instMnem, setFlags,
290 _scale, _index, _base,
291 _disp, _segment, _data,
292 _dataSize, _addressSize, _memFlags, %(op_class)s)
293 {
294 %(constructor)s;
295 }
296}};
297
298def template MicroLdStSplitOpConstructor {{
299 %(class_name)s::%(class_name)s(
300 ExtMachInst machInst, const char * instMnem, uint64_t setFlags,
301 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
302 uint64_t _disp, InstRegIndex _segment,
303 InstRegIndex _dataLow, InstRegIndex _dataHi,
304 uint8_t _dataSize, uint8_t _addressSize,
305 Request::FlagsType _memFlags) :
306 %(base_class)s(machInst, "%(mnemonic)s", instMnem, setFlags,
307 _scale, _index, _base,
308 _disp, _segment, _dataLow, _dataHi,
309 _dataSize, _addressSize, _memFlags, %(op_class)s)
310 {
311 %(constructor)s;
312 }
313}};
314
315let {{
316 class LdStOp(X86Microop):
317 def __init__(self, data, segment, addr, disp,
1// Copyright (c) 2007-2008 The Hewlett-Packard Development Company
2// Copyright (c) 2015 Advanced Micro Devices, Inc.
3// All rights reserved.
4//
5// The license below extends only to copyright in the software and shall
6// not be construed as granting a license to any other intellectual
7// property including but not limited to intellectual property relating
8// to a hardware implementation of the functionality of the software
9// licensed hereunder. You may use the software subject to the license
10// terms below provided that you ensure that this notice is replicated
11// unmodified and in its entirety in all distributions of the software,
12// modified or unmodified, in source code or in binary form.
13//
14// Copyright (c) 2008 The Regents of The University of Michigan
15// All rights reserved.
16//
17// Redistribution and use in source and binary forms, with or without
18// modification, are permitted provided that the following conditions are
19// met: redistributions of source code must retain the above copyright
20// notice, this list of conditions and the following disclaimer;
21// redistributions in binary form must reproduce the above copyright
22// notice, this list of conditions and the following disclaimer in the
23// documentation and/or other materials provided with the distribution;
24// neither the name of the copyright holders nor the names of its
25// contributors may be used to endorse or promote products derived from
26// this software without specific prior written permission.
27//
28// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
29// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
30// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
31// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
32// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
33// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
34// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
35// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
36// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
37// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
38// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
39//
40// Authors: Gabe Black
41
42//////////////////////////////////////////////////////////////////////////
43//
44// LdStOp Microop templates
45//
46//////////////////////////////////////////////////////////////////////////
47
48// LEA template
49
50def template MicroLeaExecute {{
51 Fault %(class_name)s::execute(CPU_EXEC_CONTEXT *xc,
52 Trace::InstRecord *traceData) const
53 {
54 Fault fault = NoFault;
55 Addr EA;
56
57 %(op_decl)s;
58 %(op_rd)s;
59 %(ea_code)s;
60 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
61
62 %(code)s;
63 if(fault == NoFault)
64 {
65 %(op_wb)s;
66 }
67
68 return fault;
69 }
70}};
71
72def template MicroLeaDeclare {{
73 class %(class_name)s : public %(base_class)s
74 {
75 public:
76 %(class_name)s(ExtMachInst _machInst,
77 const char * instMnem, uint64_t setFlags,
78 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
79 uint64_t _disp, InstRegIndex _segment,
80 InstRegIndex _data,
81 uint8_t _dataSize, uint8_t _addressSize,
82 Request::FlagsType _memFlags);
83
84 %(BasicExecDeclare)s
85 };
86}};
87
88// Load templates
89
90def template MicroLoadExecute {{
91 Fault %(class_name)s::execute(CPU_EXEC_CONTEXT *xc,
92 Trace::InstRecord *traceData) const
93 {
94 Fault fault = NoFault;
95 Addr EA;
96
97 %(op_decl)s;
98 %(op_rd)s;
99 %(ea_code)s;
100 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
101
102 fault = readMemAtomic(xc, traceData, EA, Mem,
103 %(memDataSize)s, memFlags);
104
105 if (fault == NoFault) {
106 %(code)s;
107 } else if (memFlags & Request::PREFETCH) {
108 // For prefetches, ignore any faults/exceptions.
109 return NoFault;
110 }
111 if(fault == NoFault)
112 {
113 %(op_wb)s;
114 }
115
116 return fault;
117 }
118}};
119
120def template MicroLoadInitiateAcc {{
121 Fault %(class_name)s::initiateAcc(CPU_EXEC_CONTEXT * xc,
122 Trace::InstRecord * traceData) const
123 {
124 Fault fault = NoFault;
125 Addr EA;
126
127 %(op_decl)s;
128 %(op_rd)s;
129 %(ea_code)s;
130 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
131
132 fault = initiateMemRead(xc, traceData, EA,
133 %(memDataSize)s, memFlags);
134
135 return fault;
136 }
137}};
138
139def template MicroLoadCompleteAcc {{
140 Fault %(class_name)s::completeAcc(PacketPtr pkt,
141 CPU_EXEC_CONTEXT * xc,
142 Trace::InstRecord * traceData) const
143 {
144 Fault fault = NoFault;
145
146 %(op_decl)s;
147 %(op_rd)s;
148
149 getMem(pkt, Mem, %(memDataSize)s, traceData);
150
151 %(code)s;
152
153 if(fault == NoFault)
154 {
155 %(op_wb)s;
156 }
157
158 return fault;
159 }
160}};
161
162// Store templates
163
164def template MicroStoreExecute {{
165 Fault %(class_name)s::execute(CPU_EXEC_CONTEXT * xc,
166 Trace::InstRecord *traceData) const
167 {
168 Fault fault = NoFault;
169
170 Addr EA;
171 %(op_decl)s;
172 %(op_rd)s;
173 %(ea_code)s;
174 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
175
176 %(code)s;
177
178 if(fault == NoFault)
179 {
180 fault = writeMemAtomic(xc, traceData, Mem, %(memDataSize)s, EA,
181 memFlags, NULL);
182 if(fault == NoFault)
183 {
184 %(op_wb)s;
185 }
186 }
187
188 return fault;
189 }
190}};
191
192def template MicroStoreInitiateAcc {{
193 Fault %(class_name)s::initiateAcc(CPU_EXEC_CONTEXT * xc,
194 Trace::InstRecord * traceData) const
195 {
196 Fault fault = NoFault;
197
198 Addr EA;
199 %(op_decl)s;
200 %(op_rd)s;
201 %(ea_code)s;
202 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
203
204 %(code)s;
205
206 if(fault == NoFault)
207 {
208 fault = writeMemTiming(xc, traceData, Mem, %(memDataSize)s, EA,
209 memFlags, NULL);
210 }
211 return fault;
212 }
213}};
214
215def template MicroStoreCompleteAcc {{
216 Fault %(class_name)s::completeAcc(PacketPtr pkt,
217 CPU_EXEC_CONTEXT * xc, Trace::InstRecord * traceData) const
218 {
219 %(op_decl)s;
220 %(op_rd)s;
221 %(complete_code)s;
222 %(op_wb)s;
223 return NoFault;
224 }
225}};
226
227// Common templates
228
229//This delcares the initiateAcc function in memory operations
230def template InitiateAccDeclare {{
231 Fault initiateAcc(%(CPU_exec_context)s *, Trace::InstRecord *) const;
232}};
233
234//This declares the completeAcc function in memory operations
235def template CompleteAccDeclare {{
236 Fault completeAcc(PacketPtr, %(CPU_exec_context)s *, Trace::InstRecord *) const;
237}};
238
239def template MicroLdStOpDeclare {{
240 class %(class_name)s : public %(base_class)s
241 {
242 public:
243 %(class_name)s(ExtMachInst _machInst,
244 const char * instMnem, uint64_t setFlags,
245 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
246 uint64_t _disp, InstRegIndex _segment,
247 InstRegIndex _data,
248 uint8_t _dataSize, uint8_t _addressSize,
249 Request::FlagsType _memFlags);
250
251 %(BasicExecDeclare)s
252
253 %(InitiateAccDeclare)s
254
255 %(CompleteAccDeclare)s
256 };
257}};
258
259// LdStSplitOp is a load or store that uses a pair of regs as the
260// source or destination. Used for cmpxchg{8,16}b.
261def template MicroLdStSplitOpDeclare {{
262 class %(class_name)s : public %(base_class)s
263 {
264 public:
265 %(class_name)s(ExtMachInst _machInst,
266 const char * instMnem, uint64_t setFlags,
267 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
268 uint64_t _disp, InstRegIndex _segment,
269 InstRegIndex _dataLow, InstRegIndex _dataHi,
270 uint8_t _dataSize, uint8_t _addressSize,
271 Request::FlagsType _memFlags);
272
273 %(BasicExecDeclare)s
274
275 %(InitiateAccDeclare)s
276
277 %(CompleteAccDeclare)s
278 };
279}};
280
281def template MicroLdStOpConstructor {{
282 %(class_name)s::%(class_name)s(
283 ExtMachInst machInst, const char * instMnem, uint64_t setFlags,
284 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
285 uint64_t _disp, InstRegIndex _segment,
286 InstRegIndex _data,
287 uint8_t _dataSize, uint8_t _addressSize,
288 Request::FlagsType _memFlags) :
289 %(base_class)s(machInst, "%(mnemonic)s", instMnem, setFlags,
290 _scale, _index, _base,
291 _disp, _segment, _data,
292 _dataSize, _addressSize, _memFlags, %(op_class)s)
293 {
294 %(constructor)s;
295 }
296}};
297
298def template MicroLdStSplitOpConstructor {{
299 %(class_name)s::%(class_name)s(
300 ExtMachInst machInst, const char * instMnem, uint64_t setFlags,
301 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
302 uint64_t _disp, InstRegIndex _segment,
303 InstRegIndex _dataLow, InstRegIndex _dataHi,
304 uint8_t _dataSize, uint8_t _addressSize,
305 Request::FlagsType _memFlags) :
306 %(base_class)s(machInst, "%(mnemonic)s", instMnem, setFlags,
307 _scale, _index, _base,
308 _disp, _segment, _dataLow, _dataHi,
309 _dataSize, _addressSize, _memFlags, %(op_class)s)
310 {
311 %(constructor)s;
312 }
313}};
314
315let {{
316 class LdStOp(X86Microop):
317 def __init__(self, data, segment, addr, disp,
318 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec):
318 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec,
319 implicitStack):
319 self.data = data
320 [self.scale, self.index, self.base] = addr
321 self.disp = disp
322 self.segment = segment
323 self.dataSize = dataSize
324 self.addressSize = addressSize
325 self.memFlags = baseFlags
326 if atCPL0:
327 self.memFlags += " | (CPL0FlagBit << FlagShift)"
328 self.instFlags = ""
329 if prefetch:
330 self.memFlags += " | Request::PREFETCH"
331 self.instFlags += " | (1ULL << StaticInst::IsDataPrefetch)"
332 if nonSpec:
333 self.instFlags += " | (1ULL << StaticInst::IsNonSpeculative)"
320 self.data = data
321 [self.scale, self.index, self.base] = addr
322 self.disp = disp
323 self.segment = segment
324 self.dataSize = dataSize
325 self.addressSize = addressSize
326 self.memFlags = baseFlags
327 if atCPL0:
328 self.memFlags += " | (CPL0FlagBit << FlagShift)"
329 self.instFlags = ""
330 if prefetch:
331 self.memFlags += " | Request::PREFETCH"
332 self.instFlags += " | (1ULL << StaticInst::IsDataPrefetch)"
333 if nonSpec:
334 self.instFlags += " | (1ULL << StaticInst::IsNonSpeculative)"
334 self.memFlags += " | (machInst.legacy.addr ? " + \
335 "(AddrSizeFlagBit << FlagShift) : 0)"
335 # For implicit stack operations, we should use *not* use the
336 # alternative addressing mode for loads/stores if the prefix is set
337 if not implicitStack:
338 self.memFlags += " | (machInst.legacy.addr ? " + \
339 "(AddrSizeFlagBit << FlagShift) : 0)"
336
337 def getAllocator(self, microFlags):
338 allocator = '''new %(class_name)s(machInst, macrocodeBlock,
339 %(flags)s, %(scale)s, %(index)s, %(base)s,
340 %(disp)s, %(segment)s, %(data)s,
341 %(dataSize)s, %(addressSize)s, %(memFlags)s)''' % {
342 "class_name" : self.className,
343 "flags" : self.microFlagsText(microFlags) + self.instFlags,
344 "scale" : self.scale, "index" : self.index,
345 "base" : self.base,
346 "disp" : self.disp,
347 "segment" : self.segment, "data" : self.data,
348 "dataSize" : self.dataSize, "addressSize" : self.addressSize,
349 "memFlags" : self.memFlags}
350 return allocator
351
352 class BigLdStOp(X86Microop):
353 def __init__(self, data, segment, addr, disp,
340
341 def getAllocator(self, microFlags):
342 allocator = '''new %(class_name)s(machInst, macrocodeBlock,
343 %(flags)s, %(scale)s, %(index)s, %(base)s,
344 %(disp)s, %(segment)s, %(data)s,
345 %(dataSize)s, %(addressSize)s, %(memFlags)s)''' % {
346 "class_name" : self.className,
347 "flags" : self.microFlagsText(microFlags) + self.instFlags,
348 "scale" : self.scale, "index" : self.index,
349 "base" : self.base,
350 "disp" : self.disp,
351 "segment" : self.segment, "data" : self.data,
352 "dataSize" : self.dataSize, "addressSize" : self.addressSize,
353 "memFlags" : self.memFlags}
354 return allocator
355
356 class BigLdStOp(X86Microop):
357 def __init__(self, data, segment, addr, disp,
354 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec):
358 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec,
359 implicitStack):
355 self.data = data
356 [self.scale, self.index, self.base] = addr
357 self.disp = disp
358 self.segment = segment
359 self.dataSize = dataSize
360 self.addressSize = addressSize
361 self.memFlags = baseFlags
362 if atCPL0:
363 self.memFlags += " | (CPL0FlagBit << FlagShift)"
364 self.instFlags = ""
365 if prefetch:
366 self.memFlags += " | Request::PREFETCH"
367 self.instFlags += " | (1ULL << StaticInst::IsDataPrefetch)"
368 if nonSpec:
369 self.instFlags += " | (1ULL << StaticInst::IsNonSpeculative)"
360 self.data = data
361 [self.scale, self.index, self.base] = addr
362 self.disp = disp
363 self.segment = segment
364 self.dataSize = dataSize
365 self.addressSize = addressSize
366 self.memFlags = baseFlags
367 if atCPL0:
368 self.memFlags += " | (CPL0FlagBit << FlagShift)"
369 self.instFlags = ""
370 if prefetch:
371 self.memFlags += " | Request::PREFETCH"
372 self.instFlags += " | (1ULL << StaticInst::IsDataPrefetch)"
373 if nonSpec:
374 self.instFlags += " | (1ULL << StaticInst::IsNonSpeculative)"
370 self.memFlags += " | (machInst.legacy.addr ? " + \
371 "(AddrSizeFlagBit << FlagShift) : 0)"
375 # For implicit stack operations, we should use *not* use the
376 # alternative addressing mode for loads/stores if the prefix is set
377 if not implicitStack:
378 self.memFlags += " | (machInst.legacy.addr ? " + \
379 "(AddrSizeFlagBit << FlagShift) : 0)"
372
373 def getAllocator(self, microFlags):
374 allocString = '''
375 (%(dataSize)s >= 4) ?
376 (StaticInstPtr)(new %(class_name)sBig(machInst,
377 macrocodeBlock, %(flags)s, %(scale)s, %(index)s,
378 %(base)s, %(disp)s, %(segment)s, %(data)s,
379 %(dataSize)s, %(addressSize)s, %(memFlags)s)) :
380 (StaticInstPtr)(new %(class_name)s(machInst,
381 macrocodeBlock, %(flags)s, %(scale)s, %(index)s,
382 %(base)s, %(disp)s, %(segment)s, %(data)s,
383 %(dataSize)s, %(addressSize)s, %(memFlags)s))
384 '''
385 allocator = allocString % {
386 "class_name" : self.className,
387 "flags" : self.microFlagsText(microFlags) + self.instFlags,
388 "scale" : self.scale, "index" : self.index,
389 "base" : self.base,
390 "disp" : self.disp,
391 "segment" : self.segment, "data" : self.data,
392 "dataSize" : self.dataSize, "addressSize" : self.addressSize,
393 "memFlags" : self.memFlags}
394 return allocator
395
396 class LdStSplitOp(LdStOp):
397 def __init__(self, data, segment, addr, disp,
380
381 def getAllocator(self, microFlags):
382 allocString = '''
383 (%(dataSize)s >= 4) ?
384 (StaticInstPtr)(new %(class_name)sBig(machInst,
385 macrocodeBlock, %(flags)s, %(scale)s, %(index)s,
386 %(base)s, %(disp)s, %(segment)s, %(data)s,
387 %(dataSize)s, %(addressSize)s, %(memFlags)s)) :
388 (StaticInstPtr)(new %(class_name)s(machInst,
389 macrocodeBlock, %(flags)s, %(scale)s, %(index)s,
390 %(base)s, %(disp)s, %(segment)s, %(data)s,
391 %(dataSize)s, %(addressSize)s, %(memFlags)s))
392 '''
393 allocator = allocString % {
394 "class_name" : self.className,
395 "flags" : self.microFlagsText(microFlags) + self.instFlags,
396 "scale" : self.scale, "index" : self.index,
397 "base" : self.base,
398 "disp" : self.disp,
399 "segment" : self.segment, "data" : self.data,
400 "dataSize" : self.dataSize, "addressSize" : self.addressSize,
401 "memFlags" : self.memFlags}
402 return allocator
403
404 class LdStSplitOp(LdStOp):
405 def __init__(self, data, segment, addr, disp,
398 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec):
406 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec,
407 implicitStack):
399 super(LdStSplitOp, self).__init__(0, segment, addr, disp,
408 super(LdStSplitOp, self).__init__(0, segment, addr, disp,
400 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec)
409 dataSize, addressSize, baseFlags, atCPL0, prefetch, nonSpec,
410 implicitStack)
401 (self.dataLow, self.dataHi) = data
402
403 def getAllocator(self, microFlags):
404 allocString = '''(StaticInstPtr)(new %(class_name)s(machInst,
405 macrocodeBlock, %(flags)s, %(scale)s, %(index)s,
406 %(base)s, %(disp)s, %(segment)s,
407 %(dataLow)s, %(dataHi)s,
408 %(dataSize)s, %(addressSize)s, %(memFlags)s))
409 '''
410 allocator = allocString % {
411 "class_name" : self.className,
412 "flags" : self.microFlagsText(microFlags) + self.instFlags,
413 "scale" : self.scale, "index" : self.index,
414 "base" : self.base,
415 "disp" : self.disp,
416 "segment" : self.segment,
417 "dataLow" : self.dataLow, "dataHi" : self.dataHi,
418 "dataSize" : self.dataSize, "addressSize" : self.addressSize,
419 "memFlags" : self.memFlags}
420 return allocator
421
422}};
423
424let {{
425
426 # Make these empty strings so that concatenating onto
427 # them will always work.
428 header_output = ""
429 decoder_output = ""
430 exec_output = ""
431
432 segmentEAExpr = \
433 'bits(scale * Index + Base + disp, addressSize * 8 - 1, 0);'
434
435 calculateEA = 'EA = SegBase + ' + segmentEAExpr
436
437 def defineMicroLoadOp(mnemonic, code, bigCode='',
411 (self.dataLow, self.dataHi) = data
412
413 def getAllocator(self, microFlags):
414 allocString = '''(StaticInstPtr)(new %(class_name)s(machInst,
415 macrocodeBlock, %(flags)s, %(scale)s, %(index)s,
416 %(base)s, %(disp)s, %(segment)s,
417 %(dataLow)s, %(dataHi)s,
418 %(dataSize)s, %(addressSize)s, %(memFlags)s))
419 '''
420 allocator = allocString % {
421 "class_name" : self.className,
422 "flags" : self.microFlagsText(microFlags) + self.instFlags,
423 "scale" : self.scale, "index" : self.index,
424 "base" : self.base,
425 "disp" : self.disp,
426 "segment" : self.segment,
427 "dataLow" : self.dataLow, "dataHi" : self.dataHi,
428 "dataSize" : self.dataSize, "addressSize" : self.addressSize,
429 "memFlags" : self.memFlags}
430 return allocator
431
432}};
433
434let {{
435
436 # Make these empty strings so that concatenating onto
437 # them will always work.
438 header_output = ""
439 decoder_output = ""
440 exec_output = ""
441
442 segmentEAExpr = \
443 'bits(scale * Index + Base + disp, addressSize * 8 - 1, 0);'
444
445 calculateEA = 'EA = SegBase + ' + segmentEAExpr
446
447 def defineMicroLoadOp(mnemonic, code, bigCode='',
438 mem_flags="0", big=True, nonSpec=False):
448 mem_flags="0", big=True, nonSpec=False,
449 implicitStack=False):
439 global header_output
440 global decoder_output
441 global exec_output
442 global microopClasses
443 Name = mnemonic
444 name = mnemonic.lower()
445
446 # Build up the all register version of this micro op
447 iops = [InstObjParams(name, Name, 'X86ISA::LdStOp',
448 { "code": code,
449 "ea_code": calculateEA,
450 "memDataSize": "dataSize" })]
451 if big:
452 iops += [InstObjParams(name, Name + "Big", 'X86ISA::LdStOp',
453 { "code": bigCode,
454 "ea_code": calculateEA,
455 "memDataSize": "dataSize" })]
456 for iop in iops:
457 header_output += MicroLdStOpDeclare.subst(iop)
458 decoder_output += MicroLdStOpConstructor.subst(iop)
459 exec_output += MicroLoadExecute.subst(iop)
460 exec_output += MicroLoadInitiateAcc.subst(iop)
461 exec_output += MicroLoadCompleteAcc.subst(iop)
462
450 global header_output
451 global decoder_output
452 global exec_output
453 global microopClasses
454 Name = mnemonic
455 name = mnemonic.lower()
456
457 # Build up the all register version of this micro op
458 iops = [InstObjParams(name, Name, 'X86ISA::LdStOp',
459 { "code": code,
460 "ea_code": calculateEA,
461 "memDataSize": "dataSize" })]
462 if big:
463 iops += [InstObjParams(name, Name + "Big", 'X86ISA::LdStOp',
464 { "code": bigCode,
465 "ea_code": calculateEA,
466 "memDataSize": "dataSize" })]
467 for iop in iops:
468 header_output += MicroLdStOpDeclare.subst(iop)
469 decoder_output += MicroLdStOpConstructor.subst(iop)
470 exec_output += MicroLoadExecute.subst(iop)
471 exec_output += MicroLoadInitiateAcc.subst(iop)
472 exec_output += MicroLoadCompleteAcc.subst(iop)
473
474 if implicitStack:
475 # For instructions that implicitly access the stack, the address
476 # size is the same as the stack segment pointer size, not the
477 # address size if specified by the instruction prefix
478 addressSize = "env.stackSize"
479 else:
480 addressSize = "env.addressSize"
481
463 base = LdStOp
464 if big:
465 base = BigLdStOp
466 class LoadOp(base):
467 def __init__(self, data, segment, addr, disp = 0,
468 dataSize="env.dataSize",
482 base = LdStOp
483 if big:
484 base = BigLdStOp
485 class LoadOp(base):
486 def __init__(self, data, segment, addr, disp = 0,
487 dataSize="env.dataSize",
469 addressSize="env.addressSize",
470 atCPL0=False, prefetch=False, nonSpec=nonSpec):
488 addressSize=addressSize,
489 atCPL0=False, prefetch=False, nonSpec=nonSpec,
490 implicitStack=implicitStack):
471 super(LoadOp, self).__init__(data, segment, addr,
472 disp, dataSize, addressSize, mem_flags,
491 super(LoadOp, self).__init__(data, segment, addr,
492 disp, dataSize, addressSize, mem_flags,
473 atCPL0, prefetch, nonSpec)
493 atCPL0, prefetch, nonSpec, implicitStack)
474 self.className = Name
475 self.mnemonic = name
476
477 microopClasses[name] = LoadOp
478
479 defineMicroLoadOp('Ld', 'Data = merge(Data, Mem, dataSize);',
480 'Data = Mem & mask(dataSize * 8);')
494 self.className = Name
495 self.mnemonic = name
496
497 microopClasses[name] = LoadOp
498
499 defineMicroLoadOp('Ld', 'Data = merge(Data, Mem, dataSize);',
500 'Data = Mem & mask(dataSize * 8);')
501 defineMicroLoadOp('Ldis', 'Data = merge(Data, Mem, dataSize);',
502 'Data = Mem & mask(dataSize * 8);',
503 implicitStack=True)
481 defineMicroLoadOp('Ldst', 'Data = merge(Data, Mem, dataSize);',
482 'Data = Mem & mask(dataSize * 8);',
483 '(StoreCheck << FlagShift)')
484 defineMicroLoadOp('Ldstl', 'Data = merge(Data, Mem, dataSize);',
485 'Data = Mem & mask(dataSize * 8);',
486 '(StoreCheck << FlagShift) | Request::LOCKED_RMW',
487 nonSpec=True)
488
489 defineMicroLoadOp('Ldfp', code='FpData_uqw = Mem', big = False)
490
491 defineMicroLoadOp('Ldfp87', code='''
492 switch (dataSize)
493 {
494 case 4:
495 FpData_df = *(float *)&Mem;
496 break;
497 case 8:
498 FpData_df = *(double *)&Mem;
499 break;
500 default:
501 panic("Unhandled data size in LdFp87.\\n");
502 }
503 ''', big = False)
504
505 # Load integer from memory into x87 top-of-stack register.
506 # Used to implement fild instruction.
507 defineMicroLoadOp('Ldifp87', code='''
508 switch (dataSize)
509 {
510 case 2:
511 FpData_df = (int64_t)sext<16>(Mem);
512 break;
513 case 4:
514 FpData_df = (int64_t)sext<32>(Mem);
515 break;
516 case 8:
517 FpData_df = (int64_t)Mem;
518 break;
519 default:
520 panic("Unhandled data size in LdIFp87.\\n");
521 }
522 ''', big = False)
523
524 def defineMicroLoadSplitOp(mnemonic, code, mem_flags="0", nonSpec=False):
525 global header_output
526 global decoder_output
527 global exec_output
528 global microopClasses
529 Name = mnemonic
530 name = mnemonic.lower()
531
532 iop = InstObjParams(name, Name, 'X86ISA::LdStSplitOp',
533 { "code": code,
534 "ea_code": calculateEA,
535 "memDataSize": "2 * dataSize" })
536
537 header_output += MicroLdStSplitOpDeclare.subst(iop)
538 decoder_output += MicroLdStSplitOpConstructor.subst(iop)
539 exec_output += MicroLoadExecute.subst(iop)
540 exec_output += MicroLoadInitiateAcc.subst(iop)
541 exec_output += MicroLoadCompleteAcc.subst(iop)
542
543 class LoadOp(LdStSplitOp):
544 def __init__(self, data, segment, addr, disp = 0,
545 dataSize="env.dataSize",
546 addressSize="env.addressSize",
504 defineMicroLoadOp('Ldst', 'Data = merge(Data, Mem, dataSize);',
505 'Data = Mem & mask(dataSize * 8);',
506 '(StoreCheck << FlagShift)')
507 defineMicroLoadOp('Ldstl', 'Data = merge(Data, Mem, dataSize);',
508 'Data = Mem & mask(dataSize * 8);',
509 '(StoreCheck << FlagShift) | Request::LOCKED_RMW',
510 nonSpec=True)
511
512 defineMicroLoadOp('Ldfp', code='FpData_uqw = Mem', big = False)
513
514 defineMicroLoadOp('Ldfp87', code='''
515 switch (dataSize)
516 {
517 case 4:
518 FpData_df = *(float *)&Mem;
519 break;
520 case 8:
521 FpData_df = *(double *)&Mem;
522 break;
523 default:
524 panic("Unhandled data size in LdFp87.\\n");
525 }
526 ''', big = False)
527
528 # Load integer from memory into x87 top-of-stack register.
529 # Used to implement fild instruction.
530 defineMicroLoadOp('Ldifp87', code='''
531 switch (dataSize)
532 {
533 case 2:
534 FpData_df = (int64_t)sext<16>(Mem);
535 break;
536 case 4:
537 FpData_df = (int64_t)sext<32>(Mem);
538 break;
539 case 8:
540 FpData_df = (int64_t)Mem;
541 break;
542 default:
543 panic("Unhandled data size in LdIFp87.\\n");
544 }
545 ''', big = False)
546
547 def defineMicroLoadSplitOp(mnemonic, code, mem_flags="0", nonSpec=False):
548 global header_output
549 global decoder_output
550 global exec_output
551 global microopClasses
552 Name = mnemonic
553 name = mnemonic.lower()
554
555 iop = InstObjParams(name, Name, 'X86ISA::LdStSplitOp',
556 { "code": code,
557 "ea_code": calculateEA,
558 "memDataSize": "2 * dataSize" })
559
560 header_output += MicroLdStSplitOpDeclare.subst(iop)
561 decoder_output += MicroLdStSplitOpConstructor.subst(iop)
562 exec_output += MicroLoadExecute.subst(iop)
563 exec_output += MicroLoadInitiateAcc.subst(iop)
564 exec_output += MicroLoadCompleteAcc.subst(iop)
565
566 class LoadOp(LdStSplitOp):
567 def __init__(self, data, segment, addr, disp = 0,
568 dataSize="env.dataSize",
569 addressSize="env.addressSize",
547 atCPL0=False, prefetch=False, nonSpec=nonSpec):
570 atCPL0=False, prefetch=False, nonSpec=nonSpec,
571 implicitStack=False):
548 super(LoadOp, self).__init__(data, segment, addr,
549 disp, dataSize, addressSize, mem_flags,
572 super(LoadOp, self).__init__(data, segment, addr,
573 disp, dataSize, addressSize, mem_flags,
550 atCPL0, prefetch, nonSpec)
574 atCPL0, prefetch, nonSpec, implicitStack)
551 self.className = Name
552 self.mnemonic = name
553
554 microopClasses[name] = LoadOp
555
556 code = '''
557 switch (dataSize) {
558 case 4:
559 DataLow = bits(Mem_u2qw[0], 31, 0);
560 DataHi = bits(Mem_u2qw[0], 63, 32);
561 break;
562 case 8:
563 DataLow = Mem_u2qw[0];
564 DataHi = Mem_u2qw[1];
565 break;
566 default:
567 panic("Unhandled data size %d in LdSplit.\\n", dataSize);
568 }'''
569
570 defineMicroLoadSplitOp('LdSplit', code,
571 '(StoreCheck << FlagShift)')
572
573 defineMicroLoadSplitOp('LdSplitl', code,
574 '(StoreCheck << FlagShift) | Request::LOCKED_RMW',
575 nonSpec=True)
576
575 self.className = Name
576 self.mnemonic = name
577
578 microopClasses[name] = LoadOp
579
580 code = '''
581 switch (dataSize) {
582 case 4:
583 DataLow = bits(Mem_u2qw[0], 31, 0);
584 DataHi = bits(Mem_u2qw[0], 63, 32);
585 break;
586 case 8:
587 DataLow = Mem_u2qw[0];
588 DataHi = Mem_u2qw[1];
589 break;
590 default:
591 panic("Unhandled data size %d in LdSplit.\\n", dataSize);
592 }'''
593
594 defineMicroLoadSplitOp('LdSplit', code,
595 '(StoreCheck << FlagShift)')
596
597 defineMicroLoadSplitOp('LdSplitl', code,
598 '(StoreCheck << FlagShift) | Request::LOCKED_RMW',
599 nonSpec=True)
600
577 def defineMicroStoreOp(mnemonic, code, completeCode="", mem_flags="0"):
601 def defineMicroStoreOp(mnemonic, code, completeCode="", mem_flags="0",
602 implicitStack=False):
578 global header_output
579 global decoder_output
580 global exec_output
581 global microopClasses
582 Name = mnemonic
583 name = mnemonic.lower()
584
585 # Build up the all register version of this micro op
586 iop = InstObjParams(name, Name, 'X86ISA::LdStOp',
587 { "code": code,
588 "complete_code": completeCode,
589 "ea_code": calculateEA,
590 "memDataSize": "dataSize" })
591 header_output += MicroLdStOpDeclare.subst(iop)
592 decoder_output += MicroLdStOpConstructor.subst(iop)
593 exec_output += MicroStoreExecute.subst(iop)
594 exec_output += MicroStoreInitiateAcc.subst(iop)
595 exec_output += MicroStoreCompleteAcc.subst(iop)
596
603 global header_output
604 global decoder_output
605 global exec_output
606 global microopClasses
607 Name = mnemonic
608 name = mnemonic.lower()
609
610 # Build up the all register version of this micro op
611 iop = InstObjParams(name, Name, 'X86ISA::LdStOp',
612 { "code": code,
613 "complete_code": completeCode,
614 "ea_code": calculateEA,
615 "memDataSize": "dataSize" })
616 header_output += MicroLdStOpDeclare.subst(iop)
617 decoder_output += MicroLdStOpConstructor.subst(iop)
618 exec_output += MicroStoreExecute.subst(iop)
619 exec_output += MicroStoreInitiateAcc.subst(iop)
620 exec_output += MicroStoreCompleteAcc.subst(iop)
621
622 if implicitStack:
623 # For instructions that implicitly access the stack, the address
624 # size is the same as the stack segment pointer size, not the
625 # address size if specified by the instruction prefix
626 addressSize = "env.stackSize"
627 else:
628 addressSize = "env.addressSize"
629
597 class StoreOp(LdStOp):
598 def __init__(self, data, segment, addr, disp = 0,
599 dataSize="env.dataSize",
630 class StoreOp(LdStOp):
631 def __init__(self, data, segment, addr, disp = 0,
632 dataSize="env.dataSize",
600 addressSize="env.addressSize",
601 atCPL0=False, nonSpec=False):
633 addressSize=addressSize,
634 atCPL0=False, nonSpec=False, implicitStack=implicitStack):
602 super(StoreOp, self).__init__(data, segment, addr, disp,
603 dataSize, addressSize, mem_flags, atCPL0, False,
635 super(StoreOp, self).__init__(data, segment, addr, disp,
636 dataSize, addressSize, mem_flags, atCPL0, False,
604 nonSpec)
637 nonSpec, implicitStack)
605 self.className = Name
606 self.mnemonic = name
607
608 microopClasses[name] = StoreOp
609
610 defineMicroStoreOp('St', 'Mem = pick(Data, 2, dataSize);')
638 self.className = Name
639 self.mnemonic = name
640
641 microopClasses[name] = StoreOp
642
643 defineMicroStoreOp('St', 'Mem = pick(Data, 2, dataSize);')
644 defineMicroStoreOp('Stis', 'Mem = pick(Data, 2, dataSize);',
645 implicitStack=True)
611 defineMicroStoreOp('Stul', 'Mem = pick(Data, 2, dataSize);',
612 mem_flags="Request::LOCKED_RMW")
613
614 defineMicroStoreOp('Stfp', code='Mem = FpData_uqw;')
615
616 defineMicroStoreOp('Stfp87', code='''
617 switch (dataSize)
618 {
619 case 4: {
620 float single(FpData_df);
621 Mem = *(uint32_t *)&single;
622 } break;
623 case 8:
624 Mem = *(uint64_t *)&FpData_df;
625 break;
626 default:
627 panic("Unhandled data size in StFp87.\\n");
628 }
629 ''')
630
631 defineMicroStoreOp('Cda', 'Mem = 0;', mem_flags="Request::NO_ACCESS")
632
633 def defineMicroStoreSplitOp(mnemonic, code,
634 completeCode="", mem_flags="0"):
635 global header_output
636 global decoder_output
637 global exec_output
638 global microopClasses
639 Name = mnemonic
640 name = mnemonic.lower()
641
642 iop = InstObjParams(name, Name, 'X86ISA::LdStSplitOp',
643 { "code": code,
644 "complete_code": completeCode,
645 "ea_code": calculateEA,
646 "memDataSize": "2 * dataSize" })
647
648 header_output += MicroLdStSplitOpDeclare.subst(iop)
649 decoder_output += MicroLdStSplitOpConstructor.subst(iop)
650 exec_output += MicroStoreExecute.subst(iop)
651 exec_output += MicroStoreInitiateAcc.subst(iop)
652 exec_output += MicroStoreCompleteAcc.subst(iop)
653
654 class StoreOp(LdStSplitOp):
655 def __init__(self, data, segment, addr, disp = 0,
656 dataSize="env.dataSize",
657 addressSize="env.addressSize",
646 defineMicroStoreOp('Stul', 'Mem = pick(Data, 2, dataSize);',
647 mem_flags="Request::LOCKED_RMW")
648
649 defineMicroStoreOp('Stfp', code='Mem = FpData_uqw;')
650
651 defineMicroStoreOp('Stfp87', code='''
652 switch (dataSize)
653 {
654 case 4: {
655 float single(FpData_df);
656 Mem = *(uint32_t *)&single;
657 } break;
658 case 8:
659 Mem = *(uint64_t *)&FpData_df;
660 break;
661 default:
662 panic("Unhandled data size in StFp87.\\n");
663 }
664 ''')
665
666 defineMicroStoreOp('Cda', 'Mem = 0;', mem_flags="Request::NO_ACCESS")
667
668 def defineMicroStoreSplitOp(mnemonic, code,
669 completeCode="", mem_flags="0"):
670 global header_output
671 global decoder_output
672 global exec_output
673 global microopClasses
674 Name = mnemonic
675 name = mnemonic.lower()
676
677 iop = InstObjParams(name, Name, 'X86ISA::LdStSplitOp',
678 { "code": code,
679 "complete_code": completeCode,
680 "ea_code": calculateEA,
681 "memDataSize": "2 * dataSize" })
682
683 header_output += MicroLdStSplitOpDeclare.subst(iop)
684 decoder_output += MicroLdStSplitOpConstructor.subst(iop)
685 exec_output += MicroStoreExecute.subst(iop)
686 exec_output += MicroStoreInitiateAcc.subst(iop)
687 exec_output += MicroStoreCompleteAcc.subst(iop)
688
689 class StoreOp(LdStSplitOp):
690 def __init__(self, data, segment, addr, disp = 0,
691 dataSize="env.dataSize",
692 addressSize="env.addressSize",
658 atCPL0=False, nonSpec=False):
693 atCPL0=False, nonSpec=False, implicitStack=False):
659 super(StoreOp, self).__init__(data, segment, addr, disp,
660 dataSize, addressSize, mem_flags, atCPL0, False,
694 super(StoreOp, self).__init__(data, segment, addr, disp,
695 dataSize, addressSize, mem_flags, atCPL0, False,
661 nonSpec)
696 nonSpec, implicitStack)
662 self.className = Name
663 self.mnemonic = name
664
665 microopClasses[name] = StoreOp
666
667 code = '''
668 switch (dataSize) {
669 case 4:
670 Mem_u2qw[0] = (DataHi << 32) | DataLow;
671 break;
672 case 8:
673 Mem_u2qw[0] = DataLow;
674 Mem_u2qw[1] = DataHi;
675 break;
676 default:
677 panic("Unhandled data size %d in StSplit.\\n", dataSize);
678 }'''
679
680 defineMicroStoreSplitOp('StSplit', code);
681
682 defineMicroStoreSplitOp('StSplitul', code,
683 mem_flags='Request::LOCKED_RMW')
684
685 iop = InstObjParams("lea", "Lea", 'X86ISA::LdStOp',
686 { "code": "Data = merge(Data, EA, dataSize);",
687 "ea_code": "EA = " + segmentEAExpr,
688 "memDataSize": "dataSize" })
689 header_output += MicroLeaDeclare.subst(iop)
690 decoder_output += MicroLdStOpConstructor.subst(iop)
691 exec_output += MicroLeaExecute.subst(iop)
692
693 class LeaOp(LdStOp):
694 def __init__(self, data, segment, addr, disp = 0,
695 dataSize="env.dataSize", addressSize="env.addressSize"):
696 super(LeaOp, self).__init__(data, segment, addr, disp,
697 self.className = Name
698 self.mnemonic = name
699
700 microopClasses[name] = StoreOp
701
702 code = '''
703 switch (dataSize) {
704 case 4:
705 Mem_u2qw[0] = (DataHi << 32) | DataLow;
706 break;
707 case 8:
708 Mem_u2qw[0] = DataLow;
709 Mem_u2qw[1] = DataHi;
710 break;
711 default:
712 panic("Unhandled data size %d in StSplit.\\n", dataSize);
713 }'''
714
715 defineMicroStoreSplitOp('StSplit', code);
716
717 defineMicroStoreSplitOp('StSplitul', code,
718 mem_flags='Request::LOCKED_RMW')
719
720 iop = InstObjParams("lea", "Lea", 'X86ISA::LdStOp',
721 { "code": "Data = merge(Data, EA, dataSize);",
722 "ea_code": "EA = " + segmentEAExpr,
723 "memDataSize": "dataSize" })
724 header_output += MicroLeaDeclare.subst(iop)
725 decoder_output += MicroLdStOpConstructor.subst(iop)
726 exec_output += MicroLeaExecute.subst(iop)
727
728 class LeaOp(LdStOp):
729 def __init__(self, data, segment, addr, disp = 0,
730 dataSize="env.dataSize", addressSize="env.addressSize"):
731 super(LeaOp, self).__init__(data, segment, addr, disp,
697 dataSize, addressSize, "0", False, False, False)
732 dataSize, addressSize, "0", False, False, False, False)
698 self.className = "Lea"
699 self.mnemonic = "lea"
700
701 microopClasses["lea"] = LeaOp
702
703
704 iop = InstObjParams("tia", "Tia", 'X86ISA::LdStOp',
705 { "code": "xc->demapPage(EA, 0);",
706 "ea_code": calculateEA,
707 "memDataSize": "dataSize" })
708 header_output += MicroLeaDeclare.subst(iop)
709 decoder_output += MicroLdStOpConstructor.subst(iop)
710 exec_output += MicroLeaExecute.subst(iop)
711
712 class TiaOp(LdStOp):
713 def __init__(self, segment, addr, disp = 0,
714 dataSize="env.dataSize",
715 addressSize="env.addressSize"):
716 super(TiaOp, self).__init__("InstRegIndex(NUM_INTREGS)", segment,
717 addr, disp, dataSize, addressSize, "0", False, False,
733 self.className = "Lea"
734 self.mnemonic = "lea"
735
736 microopClasses["lea"] = LeaOp
737
738
739 iop = InstObjParams("tia", "Tia", 'X86ISA::LdStOp',
740 { "code": "xc->demapPage(EA, 0);",
741 "ea_code": calculateEA,
742 "memDataSize": "dataSize" })
743 header_output += MicroLeaDeclare.subst(iop)
744 decoder_output += MicroLdStOpConstructor.subst(iop)
745 exec_output += MicroLeaExecute.subst(iop)
746
747 class TiaOp(LdStOp):
748 def __init__(self, segment, addr, disp = 0,
749 dataSize="env.dataSize",
750 addressSize="env.addressSize"):
751 super(TiaOp, self).__init__("InstRegIndex(NUM_INTREGS)", segment,
752 addr, disp, dataSize, addressSize, "0", False, False,
718 False)
753 False, False)
719 self.className = "Tia"
720 self.mnemonic = "tia"
721
722 microopClasses["tia"] = TiaOp
723
724 class CdaOp(LdStOp):
725 def __init__(self, segment, addr, disp = 0,
726 dataSize="env.dataSize",
727 addressSize="env.addressSize", atCPL0=False):
728 super(CdaOp, self).__init__("InstRegIndex(NUM_INTREGS)", segment,
729 addr, disp, dataSize, addressSize, "Request::NO_ACCESS",
754 self.className = "Tia"
755 self.mnemonic = "tia"
756
757 microopClasses["tia"] = TiaOp
758
759 class CdaOp(LdStOp):
760 def __init__(self, segment, addr, disp = 0,
761 dataSize="env.dataSize",
762 addressSize="env.addressSize", atCPL0=False):
763 super(CdaOp, self).__init__("InstRegIndex(NUM_INTREGS)", segment,
764 addr, disp, dataSize, addressSize, "Request::NO_ACCESS",
730 atCPL0, False, False)
765 atCPL0, False, False, False)
731 self.className = "Cda"
732 self.mnemonic = "cda"
733
734 microopClasses["cda"] = CdaOp
735}};
766 self.className = "Cda"
767 self.mnemonic = "cda"
768
769 microopClasses["cda"] = CdaOp
770}};
736