1// Copyright (c) 2007-2008 The Hewlett-Packard Development Company
2// All rights reserved.
3//
4// The license below extends only to copyright in the software and shall
5// not be construed as granting a license to any other intellectual
6// property including but not limited to intellectual property relating
7// to a hardware implementation of the functionality of the software
8// licensed hereunder. You may use the software subject to the license
9// terms below provided that you ensure that this notice is replicated
10// unmodified and in its entirety in all distributions of the software,
11// modified or unmodified, in source code or in binary form.
12//
13// Copyright (c) 2008 The Regents of The University of Michigan
14// All rights reserved.
15//
16// Redistribution and use in source and binary forms, with or without
17// modification, are permitted provided that the following conditions are
18// met: redistributions of source code must retain the above copyright
19// notice, this list of conditions and the following disclaimer;
20// redistributions in binary form must reproduce the above copyright
21// notice, this list of conditions and the following disclaimer in the
22// documentation and/or other materials provided with the distribution;
23// neither the name of the copyright holders nor the names of its
24// contributors may be used to endorse or promote products derived from
25// this software without specific prior written permission.
26//
27// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
28// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
29// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
30// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
31// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
32// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
33// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
34// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
35// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
36// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
37// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
38//
39// Authors: Gabe Black
40
41//////////////////////////////////////////////////////////////////////////
42//
43// LdStOp Microop templates
44//
45//////////////////////////////////////////////////////////////////////////
46
47// LEA template
48
49def template MicroLeaExecute {{
50 Fault %(class_name)s::execute(%(CPU_exec_context)s *xc,
51 Trace::InstRecord *traceData) const
52 {
53 Fault fault = NoFault;
54 Addr EA;
55
56 %(op_decl)s;
57 %(op_rd)s;
58 %(ea_code)s;
59 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
60
61 %(code)s;
62 if(fault == NoFault)
63 {
64 %(op_wb)s;
65 }
66
67 return fault;
68 }
69}};
70
71def template MicroLeaDeclare {{
72 class %(class_name)s : public %(base_class)s
73 {
74 protected:
75 void buildMe();
76
77 public:
78 %(class_name)s(ExtMachInst _machInst,
79 const char * instMnem,
80 bool isMicro, bool isDelayed, bool isFirst, bool isLast,
79 const char * instMnem, uint64_t setFlags,
80 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
81 uint64_t _disp, InstRegIndex _segment,
82 InstRegIndex _data,
83 uint8_t _dataSize, uint8_t _addressSize,
84 Request::FlagsType _memFlags);
85
86 %(class_name)s(ExtMachInst _machInst,
87 const char * instMnem,
88 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
89 uint64_t _disp, InstRegIndex _segment,
90 InstRegIndex _data,
91 uint8_t _dataSize, uint8_t _addressSize,
92 Request::FlagsType _memFlags);
93
94 %(BasicExecDeclare)s
95 };
96}};
97
98// Load templates
99
100def template MicroLoadExecute {{
101 Fault %(class_name)s::execute(%(CPU_exec_context)s *xc,
102 Trace::InstRecord *traceData) const
103 {
104 Fault fault = NoFault;
105 Addr EA;
106
107 %(op_decl)s;
108 %(op_rd)s;
109 %(ea_code)s;
110 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
111
112 fault = read(xc, EA, Mem, memFlags);
113
114 if (fault == NoFault) {
115 %(code)s;
116 } else if (memFlags & Request::PREFETCH) {
117 // For prefetches, ignore any faults/exceptions.
118 return NoFault;
119 }
120 if(fault == NoFault)
121 {
122 %(op_wb)s;
123 }
124
125 return fault;
126 }
127}};
128
129def template MicroLoadInitiateAcc {{
130 Fault %(class_name)s::initiateAcc(%(CPU_exec_context)s * xc,
131 Trace::InstRecord * traceData) const
132 {
133 Fault fault = NoFault;
134 Addr EA;
135
136 %(op_decl)s;
137 %(op_rd)s;
138 %(ea_code)s;
139 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
140
141 fault = read(xc, EA, Mem, memFlags);
142
143 return fault;
144 }
145}};
146
147def template MicroLoadCompleteAcc {{
148 Fault %(class_name)s::completeAcc(PacketPtr pkt,
149 %(CPU_exec_context)s * xc,
150 Trace::InstRecord * traceData) const
151 {
152 Fault fault = NoFault;
153
154 %(op_decl)s;
155 %(op_rd)s;
156
157 Mem = get(pkt);
158
159 %(code)s;
160
161 if(fault == NoFault)
162 {
163 %(op_wb)s;
164 }
165
166 return fault;
167 }
168}};
169
170// Store templates
171
172def template MicroStoreExecute {{
173 Fault %(class_name)s::execute(%(CPU_exec_context)s * xc,
174 Trace::InstRecord *traceData) const
175 {
176 Fault fault = NoFault;
177
178 Addr EA;
179 %(op_decl)s;
180 %(op_rd)s;
181 %(ea_code)s;
182 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
183
184 %(code)s;
185
186 if(fault == NoFault)
187 {
188 fault = write(xc, Mem, EA, memFlags);
189 if(fault == NoFault)
190 {
191 %(post_code)s;
192 %(op_wb)s;
193 }
194 }
195
196 return fault;
197 }
198}};
199
200def template MicroStoreInitiateAcc {{
201 Fault %(class_name)s::initiateAcc(%(CPU_exec_context)s * xc,
202 Trace::InstRecord * traceData) const
203 {
204 Fault fault = NoFault;
205
206 Addr EA;
207 %(op_decl)s;
208 %(op_rd)s;
209 %(ea_code)s;
210 DPRINTF(X86, "%s : %s: The address is %#x\n", instMnem, mnemonic, EA);
211
212 %(code)s;
213
214 if(fault == NoFault)
215 {
216 write(xc, Mem, EA, memFlags);
217 }
218 return fault;
219 }
220}};
221
222def template MicroStoreCompleteAcc {{
223 Fault %(class_name)s::completeAcc(PacketPtr pkt,
224 %(CPU_exec_context)s * xc, Trace::InstRecord * traceData) const
225 {
226 %(op_decl)s;
227 %(op_rd)s;
228 %(complete_code)s;
229 %(op_wb)s;
230 return NoFault;
231 }
232}};
233
234// Common templates
235
236//This delcares the initiateAcc function in memory operations
237def template InitiateAccDeclare {{
238 Fault initiateAcc(%(CPU_exec_context)s *, Trace::InstRecord *) const;
239}};
240
241//This declares the completeAcc function in memory operations
242def template CompleteAccDeclare {{
243 Fault completeAcc(PacketPtr, %(CPU_exec_context)s *, Trace::InstRecord *) const;
244}};
245
246def template MicroLdStOpDeclare {{
247 class %(class_name)s : public %(base_class)s
248 {
249 protected:
250 void buildMe();
251
252 public:
253 %(class_name)s(ExtMachInst _machInst,
255 const char * instMnem,
256 bool isMicro, bool isDelayed, bool isFirst, bool isLast,
254 const char * instMnem, uint64_t setFlags,
255 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
256 uint64_t _disp, InstRegIndex _segment,
257 InstRegIndex _data,
258 uint8_t _dataSize, uint8_t _addressSize,
259 Request::FlagsType _memFlags);
260
261 %(class_name)s(ExtMachInst _machInst,
262 const char * instMnem,
263 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
264 uint64_t _disp, InstRegIndex _segment,
265 InstRegIndex _data,
266 uint8_t _dataSize, uint8_t _addressSize,
267 Request::FlagsType _memFlags);
268
269 %(BasicExecDeclare)s
270
271 %(InitiateAccDeclare)s
272
273 %(CompleteAccDeclare)s
274 };
275}};
276
277def template MicroLdStOpConstructor {{
278
279 inline void %(class_name)s::buildMe()
280 {
281 %(constructor)s;
282 }
283
284 inline %(class_name)s::%(class_name)s(
285 ExtMachInst machInst, const char * instMnem,
286 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
287 uint64_t _disp, InstRegIndex _segment,
288 InstRegIndex _data,
289 uint8_t _dataSize, uint8_t _addressSize,
290 Request::FlagsType _memFlags) :
293 %(base_class)s(machInst, "%(mnemonic)s", instMnem,
294 false, false, false, false,
291 %(base_class)s(machInst, "%(mnemonic)s", instMnem, 0,
292 _scale, _index, _base,
293 _disp, _segment, _data,
294 _dataSize, _addressSize, _memFlags, %(op_class)s)
295 {
296 buildMe();
297 }
298
299 inline %(class_name)s::%(class_name)s(
303 ExtMachInst machInst, const char * instMnem,
304 bool isMicro, bool isDelayed, bool isFirst, bool isLast,
300 ExtMachInst machInst, const char * instMnem, uint64_t setFlags,
301 uint8_t _scale, InstRegIndex _index, InstRegIndex _base,
302 uint64_t _disp, InstRegIndex _segment,
303 InstRegIndex _data,
304 uint8_t _dataSize, uint8_t _addressSize,
305 Request::FlagsType _memFlags) :
310 %(base_class)s(machInst, "%(mnemonic)s", instMnem,
311 isMicro, isDelayed, isFirst, isLast,
306 %(base_class)s(machInst, "%(mnemonic)s", instMnem, setFlags,
307 _scale, _index, _base,
308 _disp, _segment, _data,
309 _dataSize, _addressSize, _memFlags, %(op_class)s)
310 {
311 buildMe();
312 }
313}};
314
315let {{
316 class LdStOp(X86Microop):
317 def __init__(self, data, segment, addr, disp,
318 dataSize, addressSize, baseFlags, atCPL0, prefetch):
319 self.data = data
320 [self.scale, self.index, self.base] = addr
321 self.disp = disp
322 self.segment = segment
323 self.dataSize = dataSize
324 self.addressSize = addressSize
325 self.memFlags = baseFlags
326 if atCPL0:
327 self.memFlags += " | (CPL0FlagBit << FlagShift)"
328 if prefetch:
329 self.memFlags += " | Request::PREFETCH"
330 self.memFlags += " | (machInst.legacy.addr ? " + \
331 "(AddrSizeFlagBit << FlagShift) : 0)"
332
338 def getAllocator(self, *microFlags):
339 allocator = '''new %(class_name)s(machInst, macrocodeBlock
333 def getAllocator(self, microFlags):
334 allocator = '''new %(class_name)s(machInst, macrocodeBlock,
335 %(flags)s, %(scale)s, %(index)s, %(base)s,
336 %(disp)s, %(segment)s, %(data)s,
337 %(dataSize)s, %(addressSize)s, %(memFlags)s)''' % {
338 "class_name" : self.className,
339 "flags" : self.microFlagsText(microFlags),
340 "scale" : self.scale, "index" : self.index,
341 "base" : self.base,
342 "disp" : self.disp,
343 "segment" : self.segment, "data" : self.data,
344 "dataSize" : self.dataSize, "addressSize" : self.addressSize,
345 "memFlags" : self.memFlags}
346 return allocator
347}};
348
349let {{
350
351 # Make these empty strings so that concatenating onto
352 # them will always work.
353 header_output = ""
354 decoder_output = ""
355 exec_output = ""
356
357 calculateEA = '''
358 EA = bits(SegBase + scale * Index + Base + disp, addressSize * 8 - 1, 0);
359 '''
360
361 def defineMicroLoadOp(mnemonic, code, mem_flags="0"):
362 global header_output
363 global decoder_output
364 global exec_output
365 global microopClasses
366 Name = mnemonic
367 name = mnemonic.lower()
368
369 # Build up the all register version of this micro op
370 iop = InstObjParams(name, Name, 'X86ISA::LdStOp',
371 {"code": code,
372 "ea_code": calculateEA})
373 header_output += MicroLdStOpDeclare.subst(iop)
374 decoder_output += MicroLdStOpConstructor.subst(iop)
375 exec_output += MicroLoadExecute.subst(iop)
376 exec_output += MicroLoadInitiateAcc.subst(iop)
377 exec_output += MicroLoadCompleteAcc.subst(iop)
378
379 class LoadOp(LdStOp):
380 def __init__(self, data, segment, addr, disp = 0,
381 dataSize="env.dataSize",
382 addressSize="env.addressSize",
383 atCPL0=False, prefetch=False):
384 super(LoadOp, self).__init__(data, segment, addr,
385 disp, dataSize, addressSize, mem_flags,
386 atCPL0, prefetch)
387 self.className = Name
388 self.mnemonic = name
389
390 microopClasses[name] = LoadOp
391
392 defineMicroLoadOp('Ld', 'Data = merge(Data, Mem, dataSize);')
393 defineMicroLoadOp('Ldst', 'Data = merge(Data, Mem, dataSize);',
394 '(StoreCheck << FlagShift)')
395 defineMicroLoadOp('Ldstl', 'Data = merge(Data, Mem, dataSize);',
396 '(StoreCheck << FlagShift) | Request::LOCKED')
397 defineMicroLoadOp('Ldfp', 'FpData.uqw = Mem;')
398
399 def defineMicroStoreOp(mnemonic, code, \
400 postCode="", completeCode="", mem_flags="0"):
401 global header_output
402 global decoder_output
403 global exec_output
404 global microopClasses
405 Name = mnemonic
406 name = mnemonic.lower()
407
408 # Build up the all register version of this micro op
409 iop = InstObjParams(name, Name, 'X86ISA::LdStOp',
410 {"code": code,
411 "post_code": postCode,
412 "complete_code": completeCode,
413 "ea_code": calculateEA})
414 header_output += MicroLdStOpDeclare.subst(iop)
415 decoder_output += MicroLdStOpConstructor.subst(iop)
416 exec_output += MicroStoreExecute.subst(iop)
417 exec_output += MicroStoreInitiateAcc.subst(iop)
418 exec_output += MicroStoreCompleteAcc.subst(iop)
419
420 class StoreOp(LdStOp):
421 def __init__(self, data, segment, addr, disp = 0,
422 dataSize="env.dataSize",
423 addressSize="env.addressSize",
424 atCPL0=False):
425 super(StoreOp, self).__init__(data, segment, addr,
426 disp, dataSize, addressSize, mem_flags, atCPL0, False)
427 self.className = Name
428 self.mnemonic = name
429
430 microopClasses[name] = StoreOp
431
432 defineMicroStoreOp('St', 'Mem = pick(Data, 2, dataSize);')
433 defineMicroStoreOp('Stul', 'Mem = pick(Data, 2, dataSize);',
434 mem_flags="Request::LOCKED")
435 defineMicroStoreOp('Stfp', 'Mem = FpData.uqw;')
436 defineMicroStoreOp('Stupd', 'Mem = pick(Data, 2, dataSize);',
437 'Base = merge(Base, EA - SegBase, addressSize);',
438 'Base = merge(Base, pkt->req->getVaddr() - SegBase, addressSize);');
439 defineMicroStoreOp('Cda', 'Mem = 0;', mem_flags="Request::NO_ACCESS")
440
441 iop = InstObjParams("lea", "Lea", 'X86ISA::LdStOp',
442 {"code": "Data = merge(Data, EA, dataSize);",
443 "ea_code": '''
444 EA = bits(scale * Index + Base + disp, addressSize * 8 - 1, 0);
445 '''})
446 header_output += MicroLeaDeclare.subst(iop)
447 decoder_output += MicroLdStOpConstructor.subst(iop)
448 exec_output += MicroLeaExecute.subst(iop)
449
450 class LeaOp(LdStOp):
451 def __init__(self, data, segment, addr, disp = 0,
452 dataSize="env.dataSize", addressSize="env.addressSize"):
453 super(LeaOp, self).__init__(data, segment,
454 addr, disp, dataSize, addressSize, "0", False, False)
455 self.className = "Lea"
456 self.mnemonic = "lea"
457
458 microopClasses["lea"] = LeaOp
459
460
461 iop = InstObjParams("tia", "Tia", 'X86ISA::LdStOp',
462 {"code": "xc->demapPage(EA, 0);",
463 "ea_code": calculateEA})
464 header_output += MicroLeaDeclare.subst(iop)
465 decoder_output += MicroLdStOpConstructor.subst(iop)
466 exec_output += MicroLeaExecute.subst(iop)
467
468 class TiaOp(LdStOp):
469 def __init__(self, segment, addr, disp = 0,
470 dataSize="env.dataSize",
471 addressSize="env.addressSize"):
472 super(TiaOp, self).__init__("InstRegIndex(NUM_INTREGS)", segment,
473 addr, disp, dataSize, addressSize, "0", False, False)
474 self.className = "Tia"
475 self.mnemonic = "tia"
476
477 microopClasses["tia"] = TiaOp
478
479 class CdaOp(LdStOp):
480 def __init__(self, segment, addr, disp = 0,
481 dataSize="env.dataSize",
482 addressSize="env.addressSize", atCPL0=False):
483 super(CdaOp, self).__init__("InstRegIndex(NUM_INTREGS)", segment,
484 addr, disp, dataSize, addressSize, "Request::NO_ACCESS",
485 atCPL0, False)
486 self.className = "Cda"
487 self.mnemonic = "cda"
488
489 microopClasses["cda"] = CdaOp
490}};
491