1# Copyright (c) 2014, 2016 ARM Limited
2# All rights reserved
3#
4# The license below extends only to copyright in the software and shall
5# not be construed as granting a license to any other intellectual
6# property including but not limited to intellectual property relating
7# to a hardware implementation of the functionality of the software
8# licensed hereunder. You may use the software subject to the license
9# terms below provided that you ensure that this notice is replicated
10# unmodified and in its entirety in all distributions of the software,
11# modified or unmodified, in source code or in binary form.
12#
13# Copyright (c) 2003-2005 The Regents of The University of Michigan
14# Copyright (c) 2013,2015 Advanced Micro Devices, Inc.
15# All rights reserved.
16#
17# Redistribution and use in source and binary forms, with or without
18# modification, are permitted provided that the following conditions are
19# met: redistributions of source code must retain the above copyright
20# notice, this list of conditions and the following disclaimer;
21# redistributions in binary form must reproduce the above copyright
22# notice, this list of conditions and the following disclaimer in the
23# documentation and/or other materials provided with the distribution;
24# neither the name of the copyright holders nor the names of its
25# contributors may be used to endorse or promote products derived from
26# this software without specific prior written permission.
27#
28# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
29# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
30# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
31# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
32# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
33# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
34# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
35# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
36# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
37# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
38# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
39#
40# Authors: Steve Reinhardt
41
42from __future__ import with_statement
43import os
44import sys
45import re
46import string
47import inspect, traceback
48# get type names
49from types import *
50
51from m5.util.grammar import Grammar
52
53debug=False
54
55###################
56# Utility functions
57
58#
59# Indent every line in string 's' by two spaces
60# (except preprocessor directives).
61# Used to make nested code blocks look pretty.
62#
63def indent(s):
64 return re.sub(r'(?m)^(?!#)', ' ', s)
65
66#
67# Munge a somewhat arbitrarily formatted piece of Python code
68# (e.g. from a format 'let' block) into something whose indentation
69# will get by the Python parser.
70#
71# The two keys here are that Python will give a syntax error if
72# there's any whitespace at the beginning of the first line, and that
73# all lines at the same lexical nesting level must have identical
74# indentation. Unfortunately the way code literals work, an entire
75# let block tends to have some initial indentation. Rather than
76# trying to figure out what that is and strip it off, we prepend 'if
77# 1:' to make the let code the nested block inside the if (and have
78# the parser automatically deal with the indentation for us).
79#
80# We don't want to do this if (1) the code block is empty or (2) the
81# first line of the block doesn't have any whitespace at the front.
82
83def fixPythonIndentation(s):
84 # get rid of blank lines first
85 s = re.sub(r'(?m)^\s*\n', '', s);
86 if (s != '' and re.match(r'[ \t]', s[0])):
87 s = 'if 1:\n' + s
88 return s
89
90class ISAParserError(Exception):
91 """Exception class for parser errors"""
92 def __init__(self, first, second=None):
93 if second is None:
94 self.lineno = 0
95 self.string = first
96 else:
97 self.lineno = first
98 self.string = second
99
100 def __str__(self):
101 return self.string
102
103def error(*args):
104 raise ISAParserError(*args)
105
106####################
107# Template objects.
108#
109# Template objects are format strings that allow substitution from
110# the attribute spaces of other objects (e.g. InstObjParams instances).
111
112labelRE = re.compile(r'(?<!%)%\(([^\)]+)\)[sd]')
113
114class Template(object):
115 def __init__(self, parser, t):
116 self.parser = parser
117 self.template = t
118
119 def subst(self, d):
120 myDict = None
121
122 # Protect non-Python-dict substitutions (e.g. if there's a printf
123 # in the templated C++ code)
124 template = self.parser.protectNonSubstPercents(self.template)
125 # CPU-model-specific substitutions are handled later (in GenCode).
126 template = self.parser.protectCpuSymbols(template)
127
128 # Build a dict ('myDict') to use for the template substitution.
129 # Start with the template namespace. Make a copy since we're
130 # going to modify it.
131 myDict = self.parser.templateMap.copy()
132
133 if isinstance(d, InstObjParams):
134 # If we're dealing with an InstObjParams object, we need
135 # to be a little more sophisticated. The instruction-wide
136 # parameters are already formed, but the parameters which
137 # are only function wide still need to be generated.
138 compositeCode = ''
139
140 myDict.update(d.__dict__)
141 # The "operands" and "snippets" attributes of the InstObjParams
142 # objects are for internal use and not substitution.
143 del myDict['operands']
144 del myDict['snippets']
145
146 snippetLabels = [l for l in labelRE.findall(template)
147 if d.snippets.has_key(l)]
148
149 snippets = dict([(s, self.parser.mungeSnippet(d.snippets[s]))
150 for s in snippetLabels])
151
152 myDict.update(snippets)
153
154 compositeCode = ' '.join(map(str, snippets.values()))
155
156 # Add in template itself in case it references any
157 # operands explicitly (like Mem)
158 compositeCode += ' ' + template
159
160 operands = SubOperandList(self.parser, compositeCode, d.operands)
161
162 myDict['op_decl'] = operands.concatAttrStrings('op_decl')
163 if operands.readPC or operands.setPC:
164 myDict['op_decl'] += 'TheISA::PCState __parserAutoPCState;\n'
165
166 # In case there are predicated register reads and write, declare
167 # the variables for register indicies. It is being assumed that
168 # all the operands in the OperandList are also in the
169 # SubOperandList and in the same order. Otherwise, it is
170 # expected that predication would not be used for the operands.
171 if operands.predRead:
172 myDict['op_decl'] += 'uint8_t _sourceIndex = 0;\n'
173 if operands.predWrite:
174 myDict['op_decl'] += 'uint8_t M5_VAR_USED _destIndex = 0;\n'
175
176 is_src = lambda op: op.is_src
177 is_dest = lambda op: op.is_dest
178
179 myDict['op_src_decl'] = \
180 operands.concatSomeAttrStrings(is_src, 'op_src_decl')
181 myDict['op_dest_decl'] = \
182 operands.concatSomeAttrStrings(is_dest, 'op_dest_decl')
183 if operands.readPC:
184 myDict['op_src_decl'] += \
185 'TheISA::PCState __parserAutoPCState;\n'
186 if operands.setPC:
187 myDict['op_dest_decl'] += \
188 'TheISA::PCState __parserAutoPCState;\n'
189
190 myDict['op_rd'] = operands.concatAttrStrings('op_rd')
191 if operands.readPC:
192 myDict['op_rd'] = '__parserAutoPCState = xc->pcState();\n' + \
193 myDict['op_rd']
194
195 # Compose the op_wb string. If we're going to write back the
196 # PC state because we changed some of its elements, we'll need to
197 # do that as early as possible. That allows later uncoordinated
198 # modifications to the PC to layer appropriately.
199 reordered = list(operands.items)
200 reordered.reverse()
201 op_wb_str = ''
202 pcWbStr = 'xc->pcState(__parserAutoPCState);\n'
203 for op_desc in reordered:
204 if op_desc.isPCPart() and op_desc.is_dest:
205 op_wb_str = op_desc.op_wb + pcWbStr + op_wb_str
206 pcWbStr = ''
207 else:
208 op_wb_str = op_desc.op_wb + op_wb_str
209 myDict['op_wb'] = op_wb_str
210
211 elif isinstance(d, dict):
212 # if the argument is a dictionary, we just use it.
213 myDict.update(d)
214 elif hasattr(d, '__dict__'):
215 # if the argument is an object, we use its attribute map.
216 myDict.update(d.__dict__)
217 else:
218 raise TypeError, "Template.subst() arg must be or have dictionary"
219 return template % myDict
220
221 # Convert to string. This handles the case when a template with a
222 # CPU-specific term gets interpolated into another template or into
223 # an output block.
224 def __str__(self):
225 return self.parser.expandCpuSymbolsToString(self.template)
226
227################
228# Format object.
229#
230# A format object encapsulates an instruction format. It must provide
231# a defineInst() method that generates the code for an instruction
232# definition.
233
234class Format(object):
235 def __init__(self, id, params, code):
236 self.id = id
237 self.params = params
238 label = 'def format ' + id
239 self.user_code = compile(fixPythonIndentation(code), label, 'exec')
240 param_list = string.join(params, ", ")
241 f = '''def defInst(_code, _context, %s):
242 my_locals = vars().copy()
243 exec _code in _context, my_locals
244 return my_locals\n''' % param_list
245 c = compile(f, label + ' wrapper', 'exec')
246 exec c
247 self.func = defInst
248
249 def defineInst(self, parser, name, args, lineno):
250 parser.updateExportContext()
251 context = parser.exportContext.copy()
252 if len(name):
253 Name = name[0].upper()
254 if len(name) > 1:
255 Name += name[1:]
256 context.update({ 'name' : name, 'Name' : Name })
257 try:
258 vars = self.func(self.user_code, context, *args[0], **args[1])
259 except Exception, exc:
260 if debug:
261 raise
262 error(lineno, 'error defining "%s": %s.' % (name, exc))
263 for k in vars.keys():
264 if k not in ('header_output', 'decoder_output',
265 'exec_output', 'decode_block'):
266 del vars[k]
267 return GenCode(parser, **vars)
268
269# Special null format to catch an implicit-format instruction
270# definition outside of any format block.
271class NoFormat(object):
272 def __init__(self):
273 self.defaultInst = ''
274
275 def defineInst(self, parser, name, args, lineno):
276 error(lineno,
277 'instruction definition "%s" with no active format!' % name)
278
279###############
280# GenCode class
281#
282# The GenCode class encapsulates generated code destined for various
283# output files. The header_output and decoder_output attributes are
284# strings containing code destined for decoder.hh and decoder.cc
285# respectively. The decode_block attribute contains code to be
286# incorporated in the decode function itself (that will also end up in
287# decoder.cc). The exec_output attribute is a dictionary with a key
288# for each CPU model name; the value associated with a particular key
289# is the string of code for that CPU model's exec.cc file. The
290# has_decode_default attribute is used in the decode block to allow
291# explicit default clauses to override default default clauses.
292
293class GenCode(object):
294 # Constructor. At this point we substitute out all CPU-specific
295 # symbols. For the exec output, these go into the per-model
296 # dictionary. For all other output types they get collapsed into
297 # a single string.
298 def __init__(self, parser,
299 header_output = '', decoder_output = '', exec_output = '',
300 decode_block = '', has_decode_default = False):
301 self.parser = parser
302 self.header_output = parser.expandCpuSymbolsToString(header_output)
303 self.decoder_output = parser.expandCpuSymbolsToString(decoder_output)
304 self.exec_output = exec_output
305 self.decode_block = decode_block
306 self.has_decode_default = has_decode_default
307
308 # Write these code chunks out to the filesystem. They will be properly
309 # interwoven by the write_top_level_files().
310 def emit(self):
311 if self.header_output:
312 self.parser.get_file('header').write(self.header_output)
313 if self.decoder_output:
314 self.parser.get_file('decoder').write(self.decoder_output)
315 if self.exec_output:
316 self.parser.get_file('exec').write(self.exec_output)
317 if self.decode_block:
318 self.parser.get_file('decode_block').write(self.decode_block)
319
320 # Override '+' operator: generate a new GenCode object that
321 # concatenates all the individual strings in the operands.
322 def __add__(self, other):
323 return GenCode(self.parser,
324 self.header_output + other.header_output,
325 self.decoder_output + other.decoder_output,
326 self.exec_output + other.exec_output,
327 self.decode_block + other.decode_block,
328 self.has_decode_default or other.has_decode_default)
329
330 # Prepend a string (typically a comment) to all the strings.
331 def prepend_all(self, pre):
332 self.header_output = pre + self.header_output
333 self.decoder_output = pre + self.decoder_output
334 self.decode_block = pre + self.decode_block
335 self.exec_output = pre + self.exec_output
336
337 # Wrap the decode block in a pair of strings (e.g., 'case foo:'
338 # and 'break;'). Used to build the big nested switch statement.
339 def wrap_decode_block(self, pre, post = ''):
340 self.decode_block = pre + indent(self.decode_block) + post
341
342#####################################################################
343#
344# Bitfield Operator Support
345#
346#####################################################################
347
348bitOp1ArgRE = re.compile(r'<\s*(\w+)\s*:\s*>')
349
350bitOpWordRE = re.compile(r'(?<![\w\.])([\w\.]+)<\s*(\w+)\s*:\s*(\w+)\s*>')
351bitOpExprRE = re.compile(r'\)<\s*(\w+)\s*:\s*(\w+)\s*>')
352
353def substBitOps(code):
354 # first convert single-bit selectors to two-index form
355 # i.e., <n> --> <n:n>
356 code = bitOp1ArgRE.sub(r'<\1:\1>', code)
357 # simple case: selector applied to ID (name)
358 # i.e., foo<a:b> --> bits(foo, a, b)
359 code = bitOpWordRE.sub(r'bits(\1, \2, \3)', code)
360 # if selector is applied to expression (ending in ')'),
361 # we need to search backward for matching '('
362 match = bitOpExprRE.search(code)
363 while match:
364 exprEnd = match.start()
365 here = exprEnd - 1
366 nestLevel = 1
367 while nestLevel > 0:
368 if code[here] == '(':
369 nestLevel -= 1
370 elif code[here] == ')':
371 nestLevel += 1
372 here -= 1
373 if here < 0:
374 sys.exit("Didn't find '('!")
375 exprStart = here+1
376 newExpr = r'bits(%s, %s, %s)' % (code[exprStart:exprEnd+1],
377 match.group(1), match.group(2))
378 code = code[:exprStart] + newExpr + code[match.end():]
379 match = bitOpExprRE.search(code)
380 return code
381
382
383#####################################################################
384#
385# Code Parser
386#
387# The remaining code is the support for automatically extracting
388# instruction characteristics from pseudocode.
389#
390#####################################################################
391
392# Force the argument to be a list. Useful for flags, where a caller
393# can specify a singleton flag or a list of flags. Also usful for
394# converting tuples to lists so they can be modified.
395def makeList(arg):
396 if isinstance(arg, list):
397 return arg
398 elif isinstance(arg, tuple):
399 return list(arg)
400 elif not arg:
401 return []
402 else:
403 return [ arg ]
404
405class Operand(object):
406 '''Base class for operand descriptors. An instance of this class
407 (or actually a class derived from this one) represents a specific
408 operand for a code block (e.g, "Rc.sq" as a dest). Intermediate
409 derived classes encapsulates the traits of a particular operand
410 type (e.g., "32-bit integer register").'''
411
412 def buildReadCode(self, func = None):
413 subst_dict = {"name": self.base_name,
414 "func": func,
415 "reg_idx": self.reg_spec,
416 "ctype": self.ctype}
417 if hasattr(self, 'src_reg_idx'):
418 subst_dict['op_idx'] = self.src_reg_idx
419 code = self.read_code % subst_dict
420 return '%s = %s;\n' % (self.base_name, code)
421
422 def buildWriteCode(self, func = None):
423 subst_dict = {"name": self.base_name,
424 "func": func,
425 "reg_idx": self.reg_spec,
426 "ctype": self.ctype,
427 "final_val": self.base_name}
428 if hasattr(self, 'dest_reg_idx'):
429 subst_dict['op_idx'] = self.dest_reg_idx
430 code = self.write_code % subst_dict
431 return '''
432 {
433 %s final_val = %s;
434 %s;
435 if (traceData) { traceData->setData(final_val); }
436 }''' % (self.dflt_ctype, self.base_name, code)
437
438 def __init__(self, parser, full_name, ext, is_src, is_dest):
439 self.full_name = full_name
440 self.ext = ext
441 self.is_src = is_src
442 self.is_dest = is_dest
443 # The 'effective extension' (eff_ext) is either the actual
444 # extension, if one was explicitly provided, or the default.
445 if ext:
446 self.eff_ext = ext
447 elif hasattr(self, 'dflt_ext'):
448 self.eff_ext = self.dflt_ext
449
450 if hasattr(self, 'eff_ext'):
451 self.ctype = parser.operandTypeMap[self.eff_ext]
452
453 # Finalize additional fields (primarily code fields). This step
454 # is done separately since some of these fields may depend on the
455 # register index enumeration that hasn't been performed yet at the
456 # time of __init__(). The register index enumeration is affected
457 # by predicated register reads/writes. Hence, we forward the flags
458 # that indicate whether or not predication is in use.
459 def finalize(self, predRead, predWrite):
460 self.flags = self.getFlags()
461 self.constructor = self.makeConstructor(predRead, predWrite)
462 self.op_decl = self.makeDecl()
463
464 if self.is_src:
465 self.op_rd = self.makeRead(predRead)
466 self.op_src_decl = self.makeDecl()
467 else:
468 self.op_rd = ''
469 self.op_src_decl = ''
470
471 if self.is_dest:
472 self.op_wb = self.makeWrite(predWrite)
473 self.op_dest_decl = self.makeDecl()
474 else:
475 self.op_wb = ''
476 self.op_dest_decl = ''
477
478 def isMem(self):
479 return 0
480
481 def isReg(self):
482 return 0
483
484 def isFloatReg(self):
485 return 0
486
487 def isIntReg(self):
488 return 0
489
490 def isCCReg(self):
491 return 0
492
493 def isControlReg(self):
494 return 0
495
496 def isVecReg(self):
497 return 0
498
499 def isVecElem(self):
500 return 0
501
502 def isPCState(self):
503 return 0
504
505 def isPCPart(self):
506 return self.isPCState() and self.reg_spec
507
508 def hasReadPred(self):
509 return self.read_predicate != None
510
511 def hasWritePred(self):
512 return self.write_predicate != None
513
514 def getFlags(self):
515 # note the empty slice '[:]' gives us a copy of self.flags[0]
516 # instead of a reference to it
517 my_flags = self.flags[0][:]
518 if self.is_src:
519 my_flags += self.flags[1]
520 if self.is_dest:
521 my_flags += self.flags[2]
522 return my_flags
523
524 def makeDecl(self):
525 # Note that initializations in the declarations are solely
526 # to avoid 'uninitialized variable' errors from the compiler.
527 return self.ctype + ' ' + self.base_name + ' = 0;\n';
528
529
530src_reg_constructor = '\n\t_srcRegIdx[_numSrcRegs++] = RegId(%s, %s);'
531dst_reg_constructor = '\n\t_destRegIdx[_numDestRegs++] = RegId(%s, %s);'
532
533
534class IntRegOperand(Operand):
535 reg_class = 'IntRegClass'
536
537 def isReg(self):
538 return 1
539
540 def isIntReg(self):
541 return 1
542
543 def makeConstructor(self, predRead, predWrite):
544 c_src = ''
545 c_dest = ''
546
547 if self.is_src:
548 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
549 if self.hasReadPred():
550 c_src = '\n\tif (%s) {%s\n\t}' % \
551 (self.read_predicate, c_src)
552
553 if self.is_dest:
554 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
555 c_dest += '\n\t_numIntDestRegs++;'
556 if self.hasWritePred():
557 c_dest = '\n\tif (%s) {%s\n\t}' % \
558 (self.write_predicate, c_dest)
559
560 return c_src + c_dest
561
562 def makeRead(self, predRead):
563 if (self.ctype == 'float' or self.ctype == 'double'):
564 error('Attempt to read integer register as FP')
565 if self.read_code != None:
566 return self.buildReadCode('readIntRegOperand')
567
568 int_reg_val = ''
569 if predRead:
570 int_reg_val = 'xc->readIntRegOperand(this, _sourceIndex++)'
571 if self.hasReadPred():
572 int_reg_val = '(%s) ? %s : 0' % \
573 (self.read_predicate, int_reg_val)
574 else:
575 int_reg_val = 'xc->readIntRegOperand(this, %d)' % self.src_reg_idx
576
577 return '%s = %s;\n' % (self.base_name, int_reg_val)
578
579 def makeWrite(self, predWrite):
580 if (self.ctype == 'float' or self.ctype == 'double'):
581 error('Attempt to write integer register as FP')
582 if self.write_code != None:
583 return self.buildWriteCode('setIntRegOperand')
584
585 if predWrite:
586 wp = 'true'
587 if self.hasWritePred():
588 wp = self.write_predicate
589
590 wcond = 'if (%s)' % (wp)
591 windex = '_destIndex++'
592 else:
593 wcond = ''
594 windex = '%d' % self.dest_reg_idx
595
596 wb = '''
597 %s
598 {
599 %s final_val = %s;
600 xc->setIntRegOperand(this, %s, final_val);\n
601 if (traceData) { traceData->setData(final_val); }
602 }''' % (wcond, self.ctype, self.base_name, windex)
603
604 return wb
605
606class FloatRegOperand(Operand):
607 reg_class = 'FloatRegClass'
608
609 def isReg(self):
610 return 1
611
612 def isFloatReg(self):
613 return 1
614
615 def makeConstructor(self, predRead, predWrite):
616 c_src = ''
617 c_dest = ''
618
619 if self.is_src:
620 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
621
622 if self.is_dest:
623 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
624 c_dest += '\n\t_numFPDestRegs++;'
625
626 return c_src + c_dest
627
628 def makeRead(self, predRead):
629 bit_select = 0
630 if (self.ctype == 'float' or self.ctype == 'double'):
631 func = 'readFloatRegOperand'
632 else:
633 func = 'readFloatRegOperandBits'
634 if self.read_code != None:
635 return self.buildReadCode(func)
636
637 if predRead:
638 rindex = '_sourceIndex++'
639 else:
640 rindex = '%d' % self.src_reg_idx
641
642 return '%s = xc->%s(this, %s);\n' % \
643 (self.base_name, func, rindex)
644
645 def makeWrite(self, predWrite):
646 if (self.ctype == 'float' or self.ctype == 'double'):
647 func = 'setFloatRegOperand'
648 else:
649 func = 'setFloatRegOperandBits'
650 if self.write_code != None:
651 return self.buildWriteCode(func)
652
653 if predWrite:
654 wp = '_destIndex++'
655 else:
656 wp = '%d' % self.dest_reg_idx
657 wp = 'xc->%s(this, %s, final_val);' % (func, wp)
658
659 wb = '''
660 {
661 %s final_val = %s;
662 %s\n
663 if (traceData) { traceData->setData(final_val); }
664 }''' % (self.ctype, self.base_name, wp)
665 return wb
666
667class VecRegOperand(Operand):
668 reg_class = 'VecRegClass'
669
670 def __init__(self, parser, full_name, ext, is_src, is_dest):
671 Operand.__init__(self, parser, full_name, ext, is_src, is_dest)
672 self.elemExt = None
673 self.parser = parser
674
675 def isReg(self):
676 return 1
677
678 def isVecReg(self):
679 return 1
680
681 def makeDeclElem(self, elem_op):
682 (elem_name, elem_ext) = elem_op
683 (elem_spec, dflt_elem_ext, zeroing) = self.elems[elem_name]
684 if elem_ext:
685 ext = elem_ext
686 else:
687 ext = dflt_elem_ext
688 ctype = self.parser.operandTypeMap[ext]
689 return '\n\t%s %s = 0;' % (ctype, elem_name)
690
691 def makeDecl(self):
692 if not self.is_dest and self.is_src:
693 c_decl = '\t/* Vars for %s*/' % (self.base_name)
694 if hasattr(self, 'active_elems'):
695 if self.active_elems:
696 for elem in self.active_elems:
697 c_decl += self.makeDeclElem(elem)
698 return c_decl + '\t/* End vars for %s */\n' % (self.base_name)
699 else:
700 return ''
701
702 def makeConstructor(self, predRead, predWrite):
703 c_src = ''
704 c_dest = ''
705
706 numAccessNeeded = 1
707
708 if self.is_src:
709 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
710
711 if self.is_dest:
712 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
713 c_dest += '\n\t_numVecDestRegs++;'
714
715 return c_src + c_dest
716
717 # Read destination register to write
718 def makeReadWElem(self, elem_op):
719 (elem_name, elem_ext) = elem_op
720 (elem_spec, dflt_elem_ext, zeroing) = self.elems[elem_name]
721 if elem_ext:
722 ext = elem_ext
723 else:
724 ext = dflt_elem_ext
725 ctype = self.parser.operandTypeMap[ext]
726 c_read = '\t\t%s& %s = %s[%s];\n' % \
727 (ctype, elem_name, self.base_name, elem_spec)
728 return c_read
729
730 def makeReadW(self, predWrite):
731 func = 'getWritableVecRegOperand'
732 if self.read_code != None:
733 return self.buildReadCode(func)
734
735 if predWrite:
736 rindex = '_destIndex++'
737 else:
738 rindex = '%d' % self.dest_reg_idx
739
740 c_readw = '\t\t%s& tmp_d%s = xc->%s(this, %s);\n'\
741 % ('TheISA::VecRegContainer', rindex, func, rindex)
742 if self.elemExt:
743 c_readw += '\t\tauto %s = tmp_d%s.as<%s>();\n' % (self.base_name,
744 rindex, self.parser.operandTypeMap[self.elemExt])
745 if self.ext:
746 c_readw += '\t\tauto %s = tmp_d%s.as<%s>();\n' % (self.base_name,
747 rindex, self.parser.operandTypeMap[self.ext])
748 if hasattr(self, 'active_elems'):
749 if self.active_elems:
750 for elem in self.active_elems:
751 c_readw += self.makeReadWElem(elem)
752 return c_readw
753
754 # Normal source operand read
755 def makeReadElem(self, elem_op, name):
756 (elem_name, elem_ext) = elem_op
757 (elem_spec, dflt_elem_ext, zeroing) = self.elems[elem_name]
758
759 if elem_ext:
760 ext = elem_ext
761 else:
762 ext = dflt_elem_ext
763 ctype = self.parser.operandTypeMap[ext]
764 c_read = '\t\t%s = %s[%s];\n' % \
765 (elem_name, name, elem_spec)
766 return c_read
767
768 def makeRead(self, predRead):
769 func = 'readVecRegOperand'
770 if self.read_code != None:
771 return self.buildReadCode(func)
772
773 if predRead:
774 rindex = '_sourceIndex++'
775 else:
776 rindex = '%d' % self.src_reg_idx
777
778 name = self.base_name
779 if self.is_dest and self.is_src:
780 name += '_merger'
781
782 c_read = '\t\t%s& tmp_s%s = xc->%s(this, %s);\n' \
783 % ('const TheISA::VecRegContainer', rindex, func, rindex)
784 # If the parser has detected that elements are being access, create
785 # the appropriate view
786 if self.elemExt:
787 c_read += '\t\tauto %s = tmp_s%s.as<%s>();\n' % \
788 (name, rindex, self.parser.operandTypeMap[self.elemExt])
789 if self.ext:
790 c_read += '\t\tauto %s = tmp_s%s.as<%s>();\n' % \
791 (name, rindex, self.parser.operandTypeMap[self.ext])
792 if hasattr(self, 'active_elems'):
793 if self.active_elems:
794 for elem in self.active_elems:
795 c_read += self.makeReadElem(elem, name)
796 return c_read
797
798 def makeWrite(self, predWrite):
799 func = 'setVecRegOperand'
800 if self.write_code != None:
801 return self.buildWriteCode(func)
802
803 wb = '''
804 if (traceData) {
805 panic("Vectors not supported yet in tracedata");
806 /*traceData->setData(final_val);*/
807 }
808 '''
809 return wb
810
811 def finalize(self, predRead, predWrite):
812 super(VecRegOperand, self).finalize(predRead, predWrite)
813 if self.is_dest:
814 self.op_rd = self.makeReadW(predWrite) + self.op_rd
815
816class VecElemOperand(Operand):
817 reg_class = 'VectorElemClass'
818
819 def isReg(self):
820 return 1
821
822 def isVecElem(self):
823 return 1
824
825 def makeDecl(self):
826 if self.is_dest and not self.is_src:
827 return '\n\t%s %s;' % (self.ctype, self.base_name)
828 else:
829 return ''
830
831 def makeConstructor(self, predRead, predWrite):
832 c_src = ''
833 c_dest = ''
834
835 numAccessNeeded = 1
836 regId = 'RegId(%s, %s * numVecElemPerVecReg + elemIdx, %s)' % \
837 (self.reg_class, self.reg_spec)
838
839 if self.is_src:
840 c_src = ('\n\t_srcRegIdx[_numSrcRegs++] = RegId(%s, %s, %s);' %
841 (self.reg_class, self.reg_spec, self.elem_spec))
842
843 if self.is_dest:
844 c_dest = ('\n\t_destRegIdx[_numDestRegs++] = RegId(%s, %s, %s);' %
845 (self.reg_class, self.reg_spec, self.elem_spec))
846 c_dest += '\n\t_numVecElemDestRegs++;'
847 return c_src + c_dest
848
849 def makeRead(self, predRead):
850 c_read = ('\n/* Elem is kept inside the operand description */' +
851 '\n\tVecElem %s = xc->readVecElemOperand(this, %d);' %
852 (self.base_name, self.src_reg_idx))
853 return c_read
854
855 def makeWrite(self, predWrite):
856 c_write = ('\n/* Elem is kept inside the operand description */' +
857 '\n\txc->setVecElemOperand(this, %d, %s);' %
858 (self.dest_reg_idx, self.base_name))
859 return c_write
860
861class CCRegOperand(Operand):
862 reg_class = 'CCRegClass'
863
864 def isReg(self):
865 return 1
866
867 def isCCReg(self):
868 return 1
869
870 def makeConstructor(self, predRead, predWrite):
871 c_src = ''
872 c_dest = ''
873
874 if self.is_src:
875 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
876 if self.hasReadPred():
877 c_src = '\n\tif (%s) {%s\n\t}' % \
878 (self.read_predicate, c_src)
879
880 if self.is_dest:
881 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
882 c_dest += '\n\t_numCCDestRegs++;'
883 if self.hasWritePred():
884 c_dest = '\n\tif (%s) {%s\n\t}' % \
885 (self.write_predicate, c_dest)
886
887 return c_src + c_dest
888
889 def makeRead(self, predRead):
890 if (self.ctype == 'float' or self.ctype == 'double'):
891 error('Attempt to read condition-code register as FP')
892 if self.read_code != None:
893 return self.buildReadCode('readCCRegOperand')
894
895 int_reg_val = ''
896 if predRead:
897 int_reg_val = 'xc->readCCRegOperand(this, _sourceIndex++)'
898 if self.hasReadPred():
899 int_reg_val = '(%s) ? %s : 0' % \
900 (self.read_predicate, int_reg_val)
901 else:
902 int_reg_val = 'xc->readCCRegOperand(this, %d)' % self.src_reg_idx
903
904 return '%s = %s;\n' % (self.base_name, int_reg_val)
905
906 def makeWrite(self, predWrite):
907 if (self.ctype == 'float' or self.ctype == 'double'):
908 error('Attempt to write condition-code register as FP')
909 if self.write_code != None:
910 return self.buildWriteCode('setCCRegOperand')
911
912 if predWrite:
913 wp = 'true'
914 if self.hasWritePred():
915 wp = self.write_predicate
916
917 wcond = 'if (%s)' % (wp)
918 windex = '_destIndex++'
919 else:
920 wcond = ''
921 windex = '%d' % self.dest_reg_idx
922
923 wb = '''
924 %s
925 {
926 %s final_val = %s;
927 xc->setCCRegOperand(this, %s, final_val);\n
928 if (traceData) { traceData->setData(final_val); }
929 }''' % (wcond, self.ctype, self.base_name, windex)
930
931 return wb
932
933class ControlRegOperand(Operand):
934 reg_class = 'MiscRegClass'
935
936 def isReg(self):
937 return 1
938
939 def isControlReg(self):
940 return 1
941
942 def makeConstructor(self, predRead, predWrite):
943 c_src = ''
944 c_dest = ''
945
946 if self.is_src:
947 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
948
949 if self.is_dest:
950 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
951
952 return c_src + c_dest
953
954 def makeRead(self, predRead):
955 bit_select = 0
956 if (self.ctype == 'float' or self.ctype == 'double'):
957 error('Attempt to read control register as FP')
958 if self.read_code != None:
959 return self.buildReadCode('readMiscRegOperand')
960
961 if predRead:
962 rindex = '_sourceIndex++'
963 else:
964 rindex = '%d' % self.src_reg_idx
965
966 return '%s = xc->readMiscRegOperand(this, %s);\n' % \
967 (self.base_name, rindex)
968
969 def makeWrite(self, predWrite):
970 if (self.ctype == 'float' or self.ctype == 'double'):
971 error('Attempt to write control register as FP')
972 if self.write_code != None:
973 return self.buildWriteCode('setMiscRegOperand')
974
975 if predWrite:
976 windex = '_destIndex++'
977 else:
978 windex = '%d' % self.dest_reg_idx
979
980 wb = 'xc->setMiscRegOperand(this, %s, %s);\n' % \
981 (windex, self.base_name)
982 wb += 'if (traceData) { traceData->setData(%s); }' % \
983 self.base_name
984
985 return wb
986
987class MemOperand(Operand):
988 def isMem(self):
989 return 1
990
991 def makeConstructor(self, predRead, predWrite):
992 return ''
993
994 def makeDecl(self):
995 # Declare memory data variable.
996 return '%s %s;\n' % (self.ctype, self.base_name)
997
998 def makeRead(self, predRead):
999 if self.read_code != None:
1000 return self.buildReadCode()
1001 return ''
1002
1003 def makeWrite(self, predWrite):
1004 if self.write_code != None:
1005 return self.buildWriteCode()
1006 return ''
1007
1008class PCStateOperand(Operand):
1009 def makeConstructor(self, predRead, predWrite):
1010 return ''
1011
1012 def makeRead(self, predRead):
1013 if self.reg_spec:
1014 # A component of the PC state.
1015 return '%s = __parserAutoPCState.%s();\n' % \
1016 (self.base_name, self.reg_spec)
1017 else:
1018 # The whole PC state itself.
1019 return '%s = xc->pcState();\n' % self.base_name
1020
1021 def makeWrite(self, predWrite):
1022 if self.reg_spec:
1023 # A component of the PC state.
1024 return '__parserAutoPCState.%s(%s);\n' % \
1025 (self.reg_spec, self.base_name)
1026 else:
1027 # The whole PC state itself.
1028 return 'xc->pcState(%s);\n' % self.base_name
1029
1030 def makeDecl(self):
1031 ctype = 'TheISA::PCState'
1032 if self.isPCPart():
1033 ctype = self.ctype
1034 # Note that initializations in the declarations are solely
1035 # to avoid 'uninitialized variable' errors from the compiler.
1036 return '%s %s = 0;\n' % (ctype, self.base_name)
1037
1038 def isPCState(self):
1039 return 1
1040
1041class OperandList(object):
1042 '''Find all the operands in the given code block. Returns an operand
1043 descriptor list (instance of class OperandList).'''
1044 def __init__(self, parser, code):
1045 self.items = []
1046 self.bases = {}
1047 # delete strings and comments so we don't match on operands inside
1048 for regEx in (stringRE, commentRE):
1049 code = regEx.sub('', code)
1050 # search for operands
1051 next_pos = 0
1052 while 1:
1053 match = parser.operandsRE.search(code, next_pos)
1054 if not match:
1055 # no more matches: we're done
1056 break
1057 op = match.groups()
1058 # regexp groups are operand full name, base, and extension
1059 (op_full, op_base, op_ext) = op
1060 # If is a elem operand, define or update the corresponding
1061 # vector operand
1062 isElem = False
1063 if op_base in parser.elemToVector:
1064 isElem = True
1065 elem_op = (op_base, op_ext)
1066 op_base = parser.elemToVector[op_base]
1067 op_ext = '' # use the default one
1068 # if the token following the operand is an assignment, this is
1069 # a destination (LHS), else it's a source (RHS)
1070 is_dest = (assignRE.match(code, match.end()) != None)
1071 is_src = not is_dest
1072
1073 # see if we've already seen this one
1074 op_desc = self.find_base(op_base)
1075 if op_desc:
867 if op_desc.ext != op_ext:
868 error ('Inconsistent extensions for operand %s' % \
869 op_base)
1076 if op_ext and op_ext != '' and op_desc.ext != op_ext:
1077 error ('Inconsistent extensions for operand %s: %s - %s' \
1078 % (op_base, op_desc.ext, op_ext))
1079 op_desc.is_src = op_desc.is_src or is_src
1080 op_desc.is_dest = op_desc.is_dest or is_dest
1081 if isElem:
1082 (elem_base, elem_ext) = elem_op
1083 found = False
1084 for ae in op_desc.active_elems:
1085 (ae_base, ae_ext) = ae
1086 if ae_base == elem_base:
1087 if ae_ext != elem_ext:
1088 error('Inconsistent extensions for elem'
1089 ' operand %s' % elem_base)
1090 else:
1091 found = True
1092 if not found:
1093 op_desc.active_elems.append(elem_op)
1094 else:
1095 # new operand: create new descriptor
1096 op_desc = parser.operandNameMap[op_base](parser,
1097 op_full, op_ext, is_src, is_dest)
1098 # if operand is a vector elem, add the corresponding vector
1099 # operand if not already done
1100 if isElem:
1101 op_desc.elemExt = elem_op[1]
1102 op_desc.active_elems = [elem_op]
1103 self.append(op_desc)
1104 # start next search after end of current match
1105 next_pos = match.end()
1106 self.sort()
1107 # enumerate source & dest register operands... used in building
1108 # constructor later
1109 self.numSrcRegs = 0
1110 self.numDestRegs = 0
1111 self.numFPDestRegs = 0
1112 self.numIntDestRegs = 0
1113 self.numVecDestRegs = 0
1114 self.numCCDestRegs = 0
1115 self.numMiscDestRegs = 0
1116 self.memOperand = None
1117
1118 # Flags to keep track if one or more operands are to be read/written
1119 # conditionally.
1120 self.predRead = False
1121 self.predWrite = False
1122
1123 for op_desc in self.items:
1124 if op_desc.isReg():
1125 if op_desc.is_src:
1126 op_desc.src_reg_idx = self.numSrcRegs
1127 self.numSrcRegs += 1
1128 if op_desc.is_dest:
1129 op_desc.dest_reg_idx = self.numDestRegs
1130 self.numDestRegs += 1
1131 if op_desc.isFloatReg():
1132 self.numFPDestRegs += 1
1133 elif op_desc.isIntReg():
1134 self.numIntDestRegs += 1
1135 elif op_desc.isVecReg():
1136 self.numVecDestRegs += 1
1137 elif op_desc.isCCReg():
1138 self.numCCDestRegs += 1
1139 elif op_desc.isControlReg():
1140 self.numMiscDestRegs += 1
1141 elif op_desc.isMem():
1142 if self.memOperand:
1143 error("Code block has more than one memory operand.")
1144 self.memOperand = op_desc
1145
1146 # Check if this operand has read/write predication. If true, then
1147 # the microop will dynamically index source/dest registers.
1148 self.predRead = self.predRead or op_desc.hasReadPred()
1149 self.predWrite = self.predWrite or op_desc.hasWritePred()
1150
1151 if parser.maxInstSrcRegs < self.numSrcRegs:
1152 parser.maxInstSrcRegs = self.numSrcRegs
1153 if parser.maxInstDestRegs < self.numDestRegs:
1154 parser.maxInstDestRegs = self.numDestRegs
1155 if parser.maxMiscDestRegs < self.numMiscDestRegs:
1156 parser.maxMiscDestRegs = self.numMiscDestRegs
1157
1158 # now make a final pass to finalize op_desc fields that may depend
1159 # on the register enumeration
1160 for op_desc in self.items:
1161 op_desc.finalize(self.predRead, self.predWrite)
1162
1163 def __len__(self):
1164 return len(self.items)
1165
1166 def __getitem__(self, index):
1167 return self.items[index]
1168
1169 def append(self, op_desc):
1170 self.items.append(op_desc)
1171 self.bases[op_desc.base_name] = op_desc
1172
1173 def find_base(self, base_name):
1174 # like self.bases[base_name], but returns None if not found
1175 # (rather than raising exception)
1176 return self.bases.get(base_name)
1177
1178 # internal helper function for concat[Some]Attr{Strings|Lists}
1179 def __internalConcatAttrs(self, attr_name, filter, result):
1180 for op_desc in self.items:
1181 if filter(op_desc):
1182 result += getattr(op_desc, attr_name)
1183 return result
1184
1185 # return a single string that is the concatenation of the (string)
1186 # values of the specified attribute for all operands
1187 def concatAttrStrings(self, attr_name):
1188 return self.__internalConcatAttrs(attr_name, lambda x: 1, '')
1189
1190 # like concatAttrStrings, but only include the values for the operands
1191 # for which the provided filter function returns true
1192 def concatSomeAttrStrings(self, filter, attr_name):
1193 return self.__internalConcatAttrs(attr_name, filter, '')
1194
1195 # return a single list that is the concatenation of the (list)
1196 # values of the specified attribute for all operands
1197 def concatAttrLists(self, attr_name):
1198 return self.__internalConcatAttrs(attr_name, lambda x: 1, [])
1199
1200 # like concatAttrLists, but only include the values for the operands
1201 # for which the provided filter function returns true
1202 def concatSomeAttrLists(self, filter, attr_name):
1203 return self.__internalConcatAttrs(attr_name, filter, [])
1204
1205 def sort(self):
1206 self.items.sort(lambda a, b: a.sort_pri - b.sort_pri)
1207
1208class SubOperandList(OperandList):
1209 '''Find all the operands in the given code block. Returns an operand
1210 descriptor list (instance of class OperandList).'''
1211 def __init__(self, parser, code, master_list):
1212 self.items = []
1213 self.bases = {}
1214 # delete strings and comments so we don't match on operands inside
1215 for regEx in (stringRE, commentRE):
1216 code = regEx.sub('', code)
1217 # search for operands
1218 next_pos = 0
1219 while 1:
1220 match = parser.operandsRE.search(code, next_pos)
1221 if not match:
1222 # no more matches: we're done
1223 break
1224 op = match.groups()
1225 # regexp groups are operand full name, base, and extension
1226 (op_full, op_base, op_ext) = op
1227 # If is a elem operand, define or update the corresponding
1228 # vector operand
1229 if op_base in parser.elemToVector:
1230 elem_op = op_base
1231 op_base = parser.elemToVector[elem_op]
1232 # find this op in the master list
1233 op_desc = master_list.find_base(op_base)
1234 if not op_desc:
1235 error('Found operand %s which is not in the master list!'
1236 % op_base)
1237 else:
1238 # See if we've already found this operand
1239 op_desc = self.find_base(op_base)
1240 if not op_desc:
1241 # if not, add a reference to it to this sub list
1242 self.append(master_list.bases[op_base])
1243
1244 # start next search after end of current match
1245 next_pos = match.end()
1246 self.sort()
1247 self.memOperand = None
1248 # Whether the whole PC needs to be read so parts of it can be accessed
1249 self.readPC = False
1250 # Whether the whole PC needs to be written after parts of it were
1251 # changed
1252 self.setPC = False
1253 # Whether this instruction manipulates the whole PC or parts of it.
1254 # Mixing the two is a bad idea and flagged as an error.
1255 self.pcPart = None
1256
1257 # Flags to keep track if one or more operands are to be read/written
1258 # conditionally.
1259 self.predRead = False
1260 self.predWrite = False
1261
1262 for op_desc in self.items:
1263 if op_desc.isPCPart():
1264 self.readPC = True
1265 if op_desc.is_dest:
1266 self.setPC = True
1267
1268 if op_desc.isPCState():
1269 if self.pcPart is not None:
1270 if self.pcPart and not op_desc.isPCPart() or \
1271 not self.pcPart and op_desc.isPCPart():
1272 error("Mixed whole and partial PC state operands.")
1273 self.pcPart = op_desc.isPCPart()
1274
1275 if op_desc.isMem():
1276 if self.memOperand:
1277 error("Code block has more than one memory operand.")
1278 self.memOperand = op_desc
1279
1280 # Check if this operand has read/write predication. If true, then
1281 # the microop will dynamically index source/dest registers.
1282 self.predRead = self.predRead or op_desc.hasReadPred()
1283 self.predWrite = self.predWrite or op_desc.hasWritePred()
1284
1285# Regular expression object to match C++ strings
1286stringRE = re.compile(r'"([^"\\]|\\.)*"')
1287
1288# Regular expression object to match C++ comments
1289# (used in findOperands())
1290commentRE = re.compile(r'(^)?[^\S\n]*/(?:\*(.*?)\*/[^\S\n]*|/[^\n]*)($)?',
1291 re.DOTALL | re.MULTILINE)
1292
1293# Regular expression object to match assignment statements (used in
1294# findOperands()). If the code immediately following the first
1295# appearance of the operand matches this regex, then the operand
1296# appears to be on the LHS of an assignment, and is thus a
1297# destination. basically we're looking for an '=' that's not '=='.
1298# The heinous tangle before that handles the case where the operand
1299# has an array subscript.
1300assignRE = re.compile(r'(\[[^\]]+\])?\s*=(?!=)', re.MULTILINE)
1301
1302def makeFlagConstructor(flag_list):
1303 if len(flag_list) == 0:
1304 return ''
1305 # filter out repeated flags
1306 flag_list.sort()
1307 i = 1
1308 while i < len(flag_list):
1309 if flag_list[i] == flag_list[i-1]:
1310 del flag_list[i]
1311 else:
1312 i += 1
1313 pre = '\n\tflags['
1314 post = '] = true;'
1315 code = pre + string.join(flag_list, post + pre) + post
1316 return code
1317
1318# Assume all instruction flags are of the form 'IsFoo'
1319instFlagRE = re.compile(r'Is.*')
1320
1321# OpClass constants end in 'Op' except No_OpClass
1322opClassRE = re.compile(r'.*Op|No_OpClass')
1323
1324class InstObjParams(object):
1325 def __init__(self, parser, mnem, class_name, base_class = '',
1326 snippets = {}, opt_args = []):
1327 self.mnemonic = mnem
1328 self.class_name = class_name
1329 self.base_class = base_class
1330 if not isinstance(snippets, dict):
1331 snippets = {'code' : snippets}
1332 compositeCode = ' '.join(map(str, snippets.values()))
1333 self.snippets = snippets
1334
1335 self.operands = OperandList(parser, compositeCode)
1336
1337 # The header of the constructor declares the variables to be used
1338 # in the body of the constructor.
1339 header = ''
1340 header += '\n\t_numSrcRegs = 0;'
1341 header += '\n\t_numDestRegs = 0;'
1342 header += '\n\t_numFPDestRegs = 0;'
1343 header += '\n\t_numVecDestRegs = 0;'
1344 header += '\n\t_numVecElemDestRegs = 0;'
1345 header += '\n\t_numIntDestRegs = 0;'
1346 header += '\n\t_numCCDestRegs = 0;'
1347
1348 self.constructor = header + \
1349 self.operands.concatAttrStrings('constructor')
1350
1351 self.flags = self.operands.concatAttrLists('flags')
1352
1353 self.op_class = None
1354
1355 # Optional arguments are assumed to be either StaticInst flags
1356 # or an OpClass value. To avoid having to import a complete
1357 # list of these values to match against, we do it ad-hoc
1358 # with regexps.
1359 for oa in opt_args:
1360 if instFlagRE.match(oa):
1361 self.flags.append(oa)
1362 elif opClassRE.match(oa):
1363 self.op_class = oa
1364 else:
1365 error('InstObjParams: optional arg "%s" not recognized '
1366 'as StaticInst::Flag or OpClass.' % oa)
1367
1368 # Make a basic guess on the operand class if not set.
1369 # These are good enough for most cases.
1370 if not self.op_class:
1371 if 'IsStore' in self.flags:
1372 # The order matters here: 'IsFloating' and 'IsInteger' are
1373 # usually set in FP instructions because of the base
1374 # register
1375 if 'IsFloating' in self.flags:
1376 self.op_class = 'FloatMemWriteOp'
1377 else:
1378 self.op_class = 'MemWriteOp'
1379 elif 'IsLoad' in self.flags or 'IsPrefetch' in self.flags:
1380 # The order matters here: 'IsFloating' and 'IsInteger' are
1381 # usually set in FP instructions because of the base
1382 # register
1383 if 'IsFloating' in self.flags:
1384 self.op_class = 'FloatMemReadOp'
1385 else:
1386 self.op_class = 'MemReadOp'
1387 elif 'IsFloating' in self.flags:
1388 self.op_class = 'FloatAddOp'
1389 elif 'IsVector' in self.flags:
1390 self.op_class = 'SimdAddOp'
1391 else:
1392 self.op_class = 'IntAluOp'
1393
1394 # add flag initialization to contructor here to include
1395 # any flags added via opt_args
1396 self.constructor += makeFlagConstructor(self.flags)
1397
1398 # if 'IsFloating' is set, add call to the FP enable check
1399 # function (which should be provided by isa_desc via a declare)
1400 # if 'IsVector' is set, add call to the Vector enable check
1401 # function (which should be provided by isa_desc via a declare)
1402 if 'IsFloating' in self.flags:
1403 self.fp_enable_check = 'fault = checkFpEnableFault(xc);'
1404 elif 'IsVector' in self.flags:
1405 self.fp_enable_check = 'fault = checkVecEnableFault(xc);'
1406 else:
1407 self.fp_enable_check = ''
1408
1409##############
1410# Stack: a simple stack object. Used for both formats (formatStack)
1411# and default cases (defaultStack). Simply wraps a list to give more
1412# stack-like syntax and enable initialization with an argument list
1413# (as opposed to an argument that's a list).
1414
1415class Stack(list):
1416 def __init__(self, *items):
1417 list.__init__(self, items)
1418
1419 def push(self, item):
1420 self.append(item);
1421
1422 def top(self):
1423 return self[-1]
1424
1425# Format a file include stack backtrace as a string
1426def backtrace(filename_stack):
1427 fmt = "In file included from %s:"
1428 return "\n".join([fmt % f for f in filename_stack])
1429
1430
1431#######################
1432#
1433# LineTracker: track filenames along with line numbers in PLY lineno fields
1434# PLY explicitly doesn't do anything with 'lineno' except propagate
1435# it. This class lets us tie filenames with the line numbers with a
1436# minimum of disruption to existing increment code.
1437#
1438
1439class LineTracker(object):
1440 def __init__(self, filename, lineno=1):
1441 self.filename = filename
1442 self.lineno = lineno
1443
1444 # Overload '+=' for increments. We need to create a new object on
1445 # each update else every token ends up referencing the same
1446 # constantly incrementing instance.
1447 def __iadd__(self, incr):
1448 return LineTracker(self.filename, self.lineno + incr)
1449
1450 def __str__(self):
1451 return "%s:%d" % (self.filename, self.lineno)
1452
1453 # In case there are places where someone really expects a number
1454 def __int__(self):
1455 return self.lineno
1456
1457
1458#######################
1459#
1460# ISA Parser
1461# parses ISA DSL and emits C++ headers and source
1462#
1463
1464class ISAParser(Grammar):
1465 class CpuModel(object):
1466 def __init__(self, name, filename, includes, strings):
1467 self.name = name
1468 self.filename = filename
1469 self.includes = includes
1470 self.strings = strings
1471
1472 def __init__(self, output_dir):
1473 super(ISAParser, self).__init__()
1474 self.output_dir = output_dir
1475
1476 self.filename = None # for output file watermarking/scaremongering
1477
1478 self.cpuModels = [
1479 ISAParser.CpuModel('ExecContext',
1480 'generic_cpu_exec.cc',
1481 '#include "cpu/exec_context.hh"',
1482 { "CPU_exec_context" : "ExecContext" }),
1483 ]
1484
1485 # variable to hold templates
1486 self.templateMap = {}
1487
1488 # This dictionary maps format name strings to Format objects.
1489 self.formatMap = {}
1490
1491 # Track open files and, if applicable, how many chunks it has been
1492 # split into so far.
1493 self.files = {}
1494 self.splits = {}
1495
1496 # isa_name / namespace identifier from namespace declaration.
1497 # before the namespace declaration, None.
1498 self.isa_name = None
1499 self.namespace = None
1500
1501 # The format stack.
1502 self.formatStack = Stack(NoFormat())
1503
1504 # The default case stack.
1505 self.defaultStack = Stack(None)
1506
1507 # Stack that tracks current file and line number. Each
1508 # element is a tuple (filename, lineno) that records the
1509 # *current* filename and the line number in the *previous*
1510 # file where it was included.
1511 self.fileNameStack = Stack()
1512
1513 symbols = ('makeList', 're', 'string')
1514 self.exportContext = dict([(s, eval(s)) for s in symbols])
1515
1516 self.maxInstSrcRegs = 0
1517 self.maxInstDestRegs = 0
1518 self.maxMiscDestRegs = 0
1519
1520 def __getitem__(self, i): # Allow object (self) to be
1521 return getattr(self, i) # passed to %-substitutions
1522
1523 # Change the file suffix of a base filename:
1524 # (e.g.) decoder.cc -> decoder-g.cc.inc for 'global' outputs
1525 def suffixize(self, s, sec):
1526 extn = re.compile('(\.[^\.]+)$') # isolate extension
1527 if self.namespace:
1528 return extn.sub(r'-ns\1.inc', s) # insert some text on either side
1529 else:
1530 return extn.sub(r'-g\1.inc', s)
1531
1532 # Get the file object for emitting code into the specified section
1533 # (header, decoder, exec, decode_block).
1534 def get_file(self, section):
1535 if section == 'decode_block':
1536 filename = 'decode-method.cc.inc'
1537 else:
1538 if section == 'header':
1539 file = 'decoder.hh'
1540 else:
1541 file = '%s.cc' % section
1542 filename = self.suffixize(file, section)
1543 try:
1544 return self.files[filename]
1545 except KeyError: pass
1546
1547 f = self.open(filename)
1548 self.files[filename] = f
1549
1550 # The splittable files are the ones with many independent
1551 # per-instruction functions - the decoder's instruction constructors
1552 # and the instruction execution (execute()) methods. These both have
1553 # the suffix -ns.cc.inc, meaning they are within the namespace part
1554 # of the ISA, contain object-emitting C++ source, and are included
1555 # into other top-level files. These are the files that need special
1556 # #define's to allow parts of them to be compiled separately. Rather
1557 # than splitting the emissions into separate files, the monolithic
1558 # output of the ISA parser is maintained, but the value (or lack
1559 # thereof) of the __SPLIT definition during C preprocessing will
1560 # select the different chunks. If no 'split' directives are used,
1561 # the cpp emissions have no effect.
1562 if re.search('-ns.cc.inc$', filename):
1563 print >>f, '#if !defined(__SPLIT) || (__SPLIT == 1)'
1564 self.splits[f] = 1
1565 # ensure requisite #include's
1566 elif filename == 'decoder-g.hh.inc':
1567 print >>f, '#include "base/bitfield.hh"'
1568
1569 return f
1570
1571 # Weave together the parts of the different output sections by
1572 # #include'ing them into some very short top-level .cc/.hh files.
1573 # These small files make it much clearer how this tool works, since
1574 # you directly see the chunks emitted as files that are #include'd.
1575 def write_top_level_files(self):
1576 dep = self.open('inc.d', bare=True)
1577
1578 # decoder header - everything depends on this
1579 file = 'decoder.hh'
1580 with self.open(file) as f:
1581 inc = []
1582
1583 fn = 'decoder-g.hh.inc'
1584 assert(fn in self.files)
1585 f.write('#include "%s"\n' % fn)
1586 inc.append(fn)
1587
1588 fn = 'decoder-ns.hh.inc'
1589 assert(fn in self.files)
1590 f.write('namespace %s {\n#include "%s"\n}\n'
1591 % (self.namespace, fn))
1592 inc.append(fn)
1593
1594 print >>dep, file+':', ' '.join(inc)
1595
1596 # decoder method - cannot be split
1597 file = 'decoder.cc'
1598 with self.open(file) as f:
1599 inc = []
1600
1601 fn = 'decoder-g.cc.inc'
1602 assert(fn in self.files)
1603 f.write('#include "%s"\n' % fn)
1604 inc.append(fn)
1605
1606 fn = 'decoder.hh'
1607 f.write('#include "%s"\n' % fn)
1608 inc.append(fn)
1609
1610 fn = 'decode-method.cc.inc'
1611 # is guaranteed to have been written for parse to complete
1612 f.write('#include "%s"\n' % fn)
1613 inc.append(fn)
1614
1615 print >>dep, file+':', ' '.join(inc)
1616
1617 extn = re.compile('(\.[^\.]+)$')
1618
1619 # instruction constructors
1620 splits = self.splits[self.get_file('decoder')]
1621 file_ = 'inst-constrs.cc'
1622 for i in range(1, splits+1):
1623 if splits > 1:
1624 file = extn.sub(r'-%d\1' % i, file_)
1625 else:
1626 file = file_
1627 with self.open(file) as f:
1628 inc = []
1629
1630 fn = 'decoder-g.cc.inc'
1631 assert(fn in self.files)
1632 f.write('#include "%s"\n' % fn)
1633 inc.append(fn)
1634
1635 fn = 'decoder.hh'
1636 f.write('#include "%s"\n' % fn)
1637 inc.append(fn)
1638
1639 fn = 'decoder-ns.cc.inc'
1640 assert(fn in self.files)
1641 print >>f, 'namespace %s {' % self.namespace
1642 if splits > 1:
1643 print >>f, '#define __SPLIT %u' % i
1644 print >>f, '#include "%s"' % fn
1645 print >>f, '}'
1646 inc.append(fn)
1647
1648 print >>dep, file+':', ' '.join(inc)
1649
1650 # instruction execution per-CPU model
1651 splits = self.splits[self.get_file('exec')]
1652 for cpu in self.cpuModels:
1653 for i in range(1, splits+1):
1654 if splits > 1:
1655 file = extn.sub(r'_%d\1' % i, cpu.filename)
1656 else:
1657 file = cpu.filename
1658 with self.open(file) as f:
1659 inc = []
1660
1661 fn = 'exec-g.cc.inc'
1662 assert(fn in self.files)
1663 f.write('#include "%s"\n' % fn)
1664 inc.append(fn)
1665
1666 f.write(cpu.includes+"\n")
1667
1668 fn = 'decoder.hh'
1669 f.write('#include "%s"\n' % fn)
1670 inc.append(fn)
1671
1672 fn = 'exec-ns.cc.inc'
1673 assert(fn in self.files)
1674 print >>f, 'namespace %s {' % self.namespace
1675 print >>f, '#define CPU_EXEC_CONTEXT %s' \
1676 % cpu.strings['CPU_exec_context']
1677 if splits > 1:
1678 print >>f, '#define __SPLIT %u' % i
1679 print >>f, '#include "%s"' % fn
1680 print >>f, '}'
1681 inc.append(fn)
1682
1683 inc.append("decoder.hh")
1684 print >>dep, file+':', ' '.join(inc)
1685
1686 # max_inst_regs.hh
1687 self.update('max_inst_regs.hh',
1688 '''namespace %(namespace)s {
1689 const int MaxInstSrcRegs = %(maxInstSrcRegs)d;
1690 const int MaxInstDestRegs = %(maxInstDestRegs)d;
1691 const int MaxMiscDestRegs = %(maxMiscDestRegs)d;\n}\n''' % self)
1692 print >>dep, 'max_inst_regs.hh:'
1693
1694 dep.close()
1695
1696
1697 scaremonger_template ='''// DO NOT EDIT
1698// This file was automatically generated from an ISA description:
1699// %(filename)s
1700
1701''';
1702
1703 #####################################################################
1704 #
1705 # Lexer
1706 #
1707 # The PLY lexer module takes two things as input:
1708 # - A list of token names (the string list 'tokens')
1709 # - A regular expression describing a match for each token. The
1710 # regexp for token FOO can be provided in two ways:
1711 # - as a string variable named t_FOO
1712 # - as the doc string for a function named t_FOO. In this case,
1713 # the function is also executed, allowing an action to be
1714 # associated with each token match.
1715 #
1716 #####################################################################
1717
1718 # Reserved words. These are listed separately as they are matched
1719 # using the same regexp as generic IDs, but distinguished in the
1720 # t_ID() function. The PLY documentation suggests this approach.
1721 reserved = (
1722 'BITFIELD', 'DECODE', 'DECODER', 'DEFAULT', 'DEF', 'EXEC', 'FORMAT',
1723 'HEADER', 'LET', 'NAMESPACE', 'OPERAND_TYPES', 'OPERANDS',
1724 'OUTPUT', 'SIGNED', 'SPLIT', 'TEMPLATE'
1725 )
1726
1727 # List of tokens. The lex module requires this.
1728 tokens = reserved + (
1729 # identifier
1730 'ID',
1731
1732 # integer literal
1733 'INTLIT',
1734
1735 # string literal
1736 'STRLIT',
1737
1738 # code literal
1739 'CODELIT',
1740
1741 # ( ) [ ] { } < > , ; . : :: *
1742 'LPAREN', 'RPAREN',
1743 'LBRACKET', 'RBRACKET',
1744 'LBRACE', 'RBRACE',
1745 'LESS', 'GREATER', 'EQUALS',
1746 'COMMA', 'SEMI', 'DOT', 'COLON', 'DBLCOLON',
1747 'ASTERISK',
1748
1749 # C preprocessor directives
1750 'CPPDIRECTIVE'
1751
1752 # The following are matched but never returned. commented out to
1753 # suppress PLY warning
1754 # newfile directive
1755 # 'NEWFILE',
1756
1757 # endfile directive
1758 # 'ENDFILE'
1759 )
1760
1761 # Regular expressions for token matching
1762 t_LPAREN = r'\('
1763 t_RPAREN = r'\)'
1764 t_LBRACKET = r'\['
1765 t_RBRACKET = r'\]'
1766 t_LBRACE = r'\{'
1767 t_RBRACE = r'\}'
1768 t_LESS = r'\<'
1769 t_GREATER = r'\>'
1770 t_EQUALS = r'='
1771 t_COMMA = r','
1772 t_SEMI = r';'
1773 t_DOT = r'\.'
1774 t_COLON = r':'
1775 t_DBLCOLON = r'::'
1776 t_ASTERISK = r'\*'
1777
1778 # Identifiers and reserved words
1779 reserved_map = { }
1780 for r in reserved:
1781 reserved_map[r.lower()] = r
1782
1783 def t_ID(self, t):
1784 r'[A-Za-z_]\w*'
1785 t.type = self.reserved_map.get(t.value, 'ID')
1786 return t
1787
1788 # Integer literal
1789 def t_INTLIT(self, t):
1790 r'-?(0x[\da-fA-F]+)|\d+'
1791 try:
1792 t.value = int(t.value,0)
1793 except ValueError:
1794 error(t.lexer.lineno, 'Integer value "%s" too large' % t.value)
1795 t.value = 0
1796 return t
1797
1798 # String literal. Note that these use only single quotes, and
1799 # can span multiple lines.
1800 def t_STRLIT(self, t):
1801 r"(?m)'([^'])+'"
1802 # strip off quotes
1803 t.value = t.value[1:-1]
1804 t.lexer.lineno += t.value.count('\n')
1805 return t
1806
1807
1808 # "Code literal"... like a string literal, but delimiters are
1809 # '{{' and '}}' so they get formatted nicely under emacs c-mode
1810 def t_CODELIT(self, t):
1811 r"(?m)\{\{([^\}]|}(?!\}))+\}\}"
1812 # strip off {{ & }}
1813 t.value = t.value[2:-2]
1814 t.lexer.lineno += t.value.count('\n')
1815 return t
1816
1817 def t_CPPDIRECTIVE(self, t):
1818 r'^\#[^\#].*\n'
1819 t.lexer.lineno += t.value.count('\n')
1820 return t
1821
1822 def t_NEWFILE(self, t):
1823 r'^\#\#newfile\s+"[^"]*"\n'
1824 self.fileNameStack.push(t.lexer.lineno)
1825 t.lexer.lineno = LineTracker(t.value[11:-2])
1826
1827 def t_ENDFILE(self, t):
1828 r'^\#\#endfile\n'
1829 t.lexer.lineno = self.fileNameStack.pop()
1830
1831 #
1832 # The functions t_NEWLINE, t_ignore, and t_error are
1833 # special for the lex module.
1834 #
1835
1836 # Newlines
1837 def t_NEWLINE(self, t):
1838 r'\n+'
1839 t.lexer.lineno += t.value.count('\n')
1840
1841 # Comments
1842 def t_comment(self, t):
1843 r'//.*'
1844
1845 # Completely ignored characters
1846 t_ignore = ' \t\x0c'
1847
1848 # Error handler
1849 def t_error(self, t):
1850 error(t.lexer.lineno, "illegal character '%s'" % t.value[0])
1851 t.skip(1)
1852
1853 #####################################################################
1854 #
1855 # Parser
1856 #
1857 # Every function whose name starts with 'p_' defines a grammar
1858 # rule. The rule is encoded in the function's doc string, while
1859 # the function body provides the action taken when the rule is
1860 # matched. The argument to each function is a list of the values
1861 # of the rule's symbols: t[0] for the LHS, and t[1..n] for the
1862 # symbols on the RHS. For tokens, the value is copied from the
1863 # t.value attribute provided by the lexer. For non-terminals, the
1864 # value is assigned by the producing rule; i.e., the job of the
1865 # grammar rule function is to set the value for the non-terminal
1866 # on the LHS (by assigning to t[0]).
1867 #####################################################################
1868
1869 # The LHS of the first grammar rule is used as the start symbol
1870 # (in this case, 'specification'). Note that this rule enforces
1871 # that there will be exactly one namespace declaration, with 0 or
1872 # more global defs/decls before and after it. The defs & decls
1873 # before the namespace decl will be outside the namespace; those
1874 # after will be inside. The decoder function is always inside the
1875 # namespace.
1876 def p_specification(self, t):
1877 'specification : opt_defs_and_outputs top_level_decode_block'
1878
1879 for f in self.splits.iterkeys():
1880 f.write('\n#endif\n')
1881
1882 for f in self.files.itervalues(): # close ALL the files;
1883 f.close() # not doing so can cause compilation to fail
1884
1885 self.write_top_level_files()
1886
1887 t[0] = True
1888
1889 # 'opt_defs_and_outputs' is a possibly empty sequence of def and/or
1890 # output statements. Its productions do the hard work of eventually
1891 # instantiating a GenCode, which are generally emitted (written to disk)
1892 # as soon as possible, except for the decode_block, which has to be
1893 # accumulated into one large function of nested switch/case blocks.
1894 def p_opt_defs_and_outputs_0(self, t):
1895 'opt_defs_and_outputs : empty'
1896
1897 def p_opt_defs_and_outputs_1(self, t):
1898 'opt_defs_and_outputs : defs_and_outputs'
1899
1900 def p_defs_and_outputs_0(self, t):
1901 'defs_and_outputs : def_or_output'
1902
1903 def p_defs_and_outputs_1(self, t):
1904 'defs_and_outputs : defs_and_outputs def_or_output'
1905
1906 # The list of possible definition/output statements.
1907 # They are all processed as they are seen.
1908 def p_def_or_output(self, t):
1909 '''def_or_output : name_decl
1910 | def_format
1911 | def_bitfield
1912 | def_bitfield_struct
1913 | def_template
1914 | def_operand_types
1915 | def_operands
1916 | output
1917 | global_let
1918 | split'''
1919
1920 # Utility function used by both invocations of splitting - explicit
1921 # 'split' keyword and split() function inside "let {{ }};" blocks.
1922 def split(self, sec, write=False):
1923 assert(sec != 'header' and "header cannot be split")
1924
1925 f = self.get_file(sec)
1926 self.splits[f] += 1
1927 s = '\n#endif\n#if __SPLIT == %u\n' % self.splits[f]
1928 if write:
1929 f.write(s)
1930 else:
1931 return s
1932
1933 # split output file to reduce compilation time
1934 def p_split(self, t):
1935 'split : SPLIT output_type SEMI'
1936 assert(self.isa_name and "'split' not allowed before namespace decl")
1937
1938 self.split(t[2], True)
1939
1940 def p_output_type(self, t):
1941 '''output_type : DECODER
1942 | HEADER
1943 | EXEC'''
1944 t[0] = t[1]
1945
1946 # ISA name declaration looks like "namespace <foo>;"
1947 def p_name_decl(self, t):
1948 'name_decl : NAMESPACE ID SEMI'
1949 assert(self.isa_name == None and "Only 1 namespace decl permitted")
1950 self.isa_name = t[2]
1951 self.namespace = t[2] + 'Inst'
1952
1953 # Output blocks 'output <foo> {{...}}' (C++ code blocks) are copied
1954 # directly to the appropriate output section.
1955
1956 # Massage output block by substituting in template definitions and
1957 # bit operators. We handle '%'s embedded in the string that don't
1958 # indicate template substitutions (or CPU-specific symbols, which
1959 # get handled in GenCode) by doubling them first so that the
1960 # format operation will reduce them back to single '%'s.
1961 def process_output(self, s):
1962 s = self.protectNonSubstPercents(s)
1963 # protects cpu-specific symbols too
1964 s = self.protectCpuSymbols(s)
1965 return substBitOps(s % self.templateMap)
1966
1967 def p_output(self, t):
1968 'output : OUTPUT output_type CODELIT SEMI'
1969 kwargs = { t[2]+'_output' : self.process_output(t[3]) }
1970 GenCode(self, **kwargs).emit()
1971
1972 # global let blocks 'let {{...}}' (Python code blocks) are
1973 # executed directly when seen. Note that these execute in a
1974 # special variable context 'exportContext' to prevent the code
1975 # from polluting this script's namespace.
1976 def p_global_let(self, t):
1977 'global_let : LET CODELIT SEMI'
1978 def _split(sec):
1979 return self.split(sec)
1980 self.updateExportContext()
1981 self.exportContext["header_output"] = ''
1982 self.exportContext["decoder_output"] = ''
1983 self.exportContext["exec_output"] = ''
1984 self.exportContext["decode_block"] = ''
1985 self.exportContext["split"] = _split
1986 split_setup = '''
1987def wrap(func):
1988 def split(sec):
1989 globals()[sec + '_output'] += func(sec)
1990 return split
1991split = wrap(split)
1992del wrap
1993'''
1994 # This tricky setup (immediately above) allows us to just write
1995 # (e.g.) "split('exec')" in the Python code and the split #ifdef's
1996 # will automatically be added to the exec_output variable. The inner
1997 # Python execution environment doesn't know about the split points,
1998 # so we carefully inject and wrap a closure that can retrieve the
1999 # next split's #define from the parser and add it to the current
2000 # emission-in-progress.
2001 try:
2002 exec split_setup+fixPythonIndentation(t[2]) in self.exportContext
2003 except Exception, exc:
2004 if debug:
2005 raise
2006 error(t.lineno(1), 'In global let block: %s' % exc)
2007 GenCode(self,
2008 header_output=self.exportContext["header_output"],
2009 decoder_output=self.exportContext["decoder_output"],
2010 exec_output=self.exportContext["exec_output"],
2011 decode_block=self.exportContext["decode_block"]).emit()
2012
2013 # Define the mapping from operand type extensions to C++ types and
2014 # bit widths (stored in operandTypeMap).
2015 def p_def_operand_types(self, t):
2016 'def_operand_types : DEF OPERAND_TYPES CODELIT SEMI'
2017 try:
2018 self.operandTypeMap = eval('{' + t[3] + '}')
2019 except Exception, exc:
2020 if debug:
2021 raise
2022 error(t.lineno(1),
2023 'In def operand_types: %s' % exc)
2024
2025 # Define the mapping from operand names to operand classes and
2026 # other traits. Stored in operandNameMap.
2027 def p_def_operands(self, t):
2028 'def_operands : DEF OPERANDS CODELIT SEMI'
2029 if not hasattr(self, 'operandTypeMap'):
2030 error(t.lineno(1),
2031 'error: operand types must be defined before operands')
2032 try:
2033 user_dict = eval('{' + t[3] + '}', self.exportContext)
2034 except Exception, exc:
2035 if debug:
2036 raise
2037 error(t.lineno(1), 'In def operands: %s' % exc)
2038 self.buildOperandNameMap(user_dict, t.lexer.lineno)
2039
2040 # A bitfield definition looks like:
2041 # 'def [signed] bitfield <ID> [<first>:<last>]'
2042 # This generates a preprocessor macro in the output file.
2043 def p_def_bitfield_0(self, t):
2044 'def_bitfield : DEF opt_signed BITFIELD ID LESS INTLIT COLON INTLIT GREATER SEMI'
2045 expr = 'bits(machInst, %2d, %2d)' % (t[6], t[8])
2046 if (t[2] == 'signed'):
2047 expr = 'sext<%d>(%s)' % (t[6] - t[8] + 1, expr)
2048 hash_define = '#undef %s\n#define %s\t%s\n' % (t[4], t[4], expr)
2049 GenCode(self, header_output=hash_define).emit()
2050
2051 # alternate form for single bit: 'def [signed] bitfield <ID> [<bit>]'
2052 def p_def_bitfield_1(self, t):
2053 'def_bitfield : DEF opt_signed BITFIELD ID LESS INTLIT GREATER SEMI'
2054 expr = 'bits(machInst, %2d, %2d)' % (t[6], t[6])
2055 if (t[2] == 'signed'):
2056 expr = 'sext<%d>(%s)' % (1, expr)
2057 hash_define = '#undef %s\n#define %s\t%s\n' % (t[4], t[4], expr)
2058 GenCode(self, header_output=hash_define).emit()
2059
2060 # alternate form for structure member: 'def bitfield <ID> <ID>'
2061 def p_def_bitfield_struct(self, t):
2062 'def_bitfield_struct : DEF opt_signed BITFIELD ID id_with_dot SEMI'
2063 if (t[2] != ''):
2064 error(t.lineno(1),
2065 'error: structure bitfields are always unsigned.')
2066 expr = 'machInst.%s' % t[5]
2067 hash_define = '#undef %s\n#define %s\t%s\n' % (t[4], t[4], expr)
2068 GenCode(self, header_output=hash_define).emit()
2069
2070 def p_id_with_dot_0(self, t):
2071 'id_with_dot : ID'
2072 t[0] = t[1]
2073
2074 def p_id_with_dot_1(self, t):
2075 'id_with_dot : ID DOT id_with_dot'
2076 t[0] = t[1] + t[2] + t[3]
2077
2078 def p_opt_signed_0(self, t):
2079 'opt_signed : SIGNED'
2080 t[0] = t[1]
2081
2082 def p_opt_signed_1(self, t):
2083 'opt_signed : empty'
2084 t[0] = ''
2085
2086 def p_def_template(self, t):
2087 'def_template : DEF TEMPLATE ID CODELIT SEMI'
2088 if t[3] in self.templateMap:
2089 print "warning: template %s already defined" % t[3]
2090 self.templateMap[t[3]] = Template(self, t[4])
2091
2092 # An instruction format definition looks like
2093 # "def format <fmt>(<params>) {{...}};"
2094 def p_def_format(self, t):
2095 'def_format : DEF FORMAT ID LPAREN param_list RPAREN CODELIT SEMI'
2096 (id, params, code) = (t[3], t[5], t[7])
2097 self.defFormat(id, params, code, t.lexer.lineno)
2098
2099 # The formal parameter list for an instruction format is a
2100 # possibly empty list of comma-separated parameters. Positional
2101 # (standard, non-keyword) parameters must come first, followed by
2102 # keyword parameters, followed by a '*foo' parameter that gets
2103 # excess positional arguments (as in Python). Each of these three
2104 # parameter categories is optional.
2105 #
2106 # Note that we do not support the '**foo' parameter for collecting
2107 # otherwise undefined keyword args. Otherwise the parameter list
2108 # is (I believe) identical to what is supported in Python.
2109 #
2110 # The param list generates a tuple, where the first element is a
2111 # list of the positional params and the second element is a dict
2112 # containing the keyword params.
2113 def p_param_list_0(self, t):
2114 'param_list : positional_param_list COMMA nonpositional_param_list'
2115 t[0] = t[1] + t[3]
2116
2117 def p_param_list_1(self, t):
2118 '''param_list : positional_param_list
2119 | nonpositional_param_list'''
2120 t[0] = t[1]
2121
2122 def p_positional_param_list_0(self, t):
2123 'positional_param_list : empty'
2124 t[0] = []
2125
2126 def p_positional_param_list_1(self, t):
2127 'positional_param_list : ID'
2128 t[0] = [t[1]]
2129
2130 def p_positional_param_list_2(self, t):
2131 'positional_param_list : positional_param_list COMMA ID'
2132 t[0] = t[1] + [t[3]]
2133
2134 def p_nonpositional_param_list_0(self, t):
2135 'nonpositional_param_list : keyword_param_list COMMA excess_args_param'
2136 t[0] = t[1] + t[3]
2137
2138 def p_nonpositional_param_list_1(self, t):
2139 '''nonpositional_param_list : keyword_param_list
2140 | excess_args_param'''
2141 t[0] = t[1]
2142
2143 def p_keyword_param_list_0(self, t):
2144 'keyword_param_list : keyword_param'
2145 t[0] = [t[1]]
2146
2147 def p_keyword_param_list_1(self, t):
2148 'keyword_param_list : keyword_param_list COMMA keyword_param'
2149 t[0] = t[1] + [t[3]]
2150
2151 def p_keyword_param(self, t):
2152 'keyword_param : ID EQUALS expr'
2153 t[0] = t[1] + ' = ' + t[3].__repr__()
2154
2155 def p_excess_args_param(self, t):
2156 'excess_args_param : ASTERISK ID'
2157 # Just concatenate them: '*ID'. Wrap in list to be consistent
2158 # with positional_param_list and keyword_param_list.
2159 t[0] = [t[1] + t[2]]
2160
2161 # End of format definition-related rules.
2162 ##############
2163
2164 #
2165 # A decode block looks like:
2166 # decode <field1> [, <field2>]* [default <inst>] { ... }
2167 #
2168 def p_top_level_decode_block(self, t):
2169 'top_level_decode_block : decode_block'
2170 codeObj = t[1]
2171 codeObj.wrap_decode_block('''
2172StaticInstPtr
2173%(isa_name)s::Decoder::decodeInst(%(isa_name)s::ExtMachInst machInst)
2174{
2175 using namespace %(namespace)s;
2176''' % self, '}')
2177
2178 codeObj.emit()
2179
2180 def p_decode_block(self, t):
2181 'decode_block : DECODE ID opt_default LBRACE decode_stmt_list RBRACE'
2182 default_defaults = self.defaultStack.pop()
2183 codeObj = t[5]
2184 # use the "default defaults" only if there was no explicit
2185 # default statement in decode_stmt_list
2186 if not codeObj.has_decode_default:
2187 codeObj += default_defaults
2188 codeObj.wrap_decode_block('switch (%s) {\n' % t[2], '}\n')
2189 t[0] = codeObj
2190
2191 # The opt_default statement serves only to push the "default
2192 # defaults" onto defaultStack. This value will be used by nested
2193 # decode blocks, and used and popped off when the current
2194 # decode_block is processed (in p_decode_block() above).
2195 def p_opt_default_0(self, t):
2196 'opt_default : empty'
2197 # no default specified: reuse the one currently at the top of
2198 # the stack
2199 self.defaultStack.push(self.defaultStack.top())
2200 # no meaningful value returned
2201 t[0] = None
2202
2203 def p_opt_default_1(self, t):
2204 'opt_default : DEFAULT inst'
2205 # push the new default
2206 codeObj = t[2]
2207 codeObj.wrap_decode_block('\ndefault:\n', 'break;\n')
2208 self.defaultStack.push(codeObj)
2209 # no meaningful value returned
2210 t[0] = None
2211
2212 def p_decode_stmt_list_0(self, t):
2213 'decode_stmt_list : decode_stmt'
2214 t[0] = t[1]
2215
2216 def p_decode_stmt_list_1(self, t):
2217 'decode_stmt_list : decode_stmt decode_stmt_list'
2218 if (t[1].has_decode_default and t[2].has_decode_default):
2219 error(t.lineno(1), 'Two default cases in decode block')
2220 t[0] = t[1] + t[2]
2221
2222 #
2223 # Decode statement rules
2224 #
2225 # There are four types of statements allowed in a decode block:
2226 # 1. Format blocks 'format <foo> { ... }'
2227 # 2. Nested decode blocks
2228 # 3. Instruction definitions.
2229 # 4. C preprocessor directives.
2230
2231
2232 # Preprocessor directives found in a decode statement list are
2233 # passed through to the output, replicated to all of the output
2234 # code streams. This works well for ifdefs, so we can ifdef out
2235 # both the declarations and the decode cases generated by an
2236 # instruction definition. Handling them as part of the grammar
2237 # makes it easy to keep them in the right place with respect to
2238 # the code generated by the other statements.
2239 def p_decode_stmt_cpp(self, t):
2240 'decode_stmt : CPPDIRECTIVE'
2241 t[0] = GenCode(self, t[1], t[1], t[1], t[1])
2242
2243 # A format block 'format <foo> { ... }' sets the default
2244 # instruction format used to handle instruction definitions inside
2245 # the block. This format can be overridden by using an explicit
2246 # format on the instruction definition or with a nested format
2247 # block.
2248 def p_decode_stmt_format(self, t):
2249 'decode_stmt : FORMAT push_format_id LBRACE decode_stmt_list RBRACE'
2250 # The format will be pushed on the stack when 'push_format_id'
2251 # is processed (see below). Once the parser has recognized
2252 # the full production (though the right brace), we're done
2253 # with the format, so now we can pop it.
2254 self.formatStack.pop()
2255 t[0] = t[4]
2256
2257 # This rule exists so we can set the current format (& push the
2258 # stack) when we recognize the format name part of the format
2259 # block.
2260 def p_push_format_id(self, t):
2261 'push_format_id : ID'
2262 try:
2263 self.formatStack.push(self.formatMap[t[1]])
2264 t[0] = ('', '// format %s' % t[1])
2265 except KeyError:
2266 error(t.lineno(1), 'instruction format "%s" not defined.' % t[1])
2267
2268 # Nested decode block: if the value of the current field matches
2269 # the specified constant(s), do a nested decode on some other field.
2270 def p_decode_stmt_decode(self, t):
2271 'decode_stmt : case_list COLON decode_block'
2272 case_list = t[1]
2273 codeObj = t[3]
2274 # just wrap the decoding code from the block as a case in the
2275 # outer switch statement.
2276 codeObj.wrap_decode_block('\n%s\n' % ''.join(case_list))
2277 codeObj.has_decode_default = (case_list == ['default:'])
2278 t[0] = codeObj
2279
2280 # Instruction definition (finally!).
2281 def p_decode_stmt_inst(self, t):
2282 'decode_stmt : case_list COLON inst SEMI'
2283 case_list = t[1]
2284 codeObj = t[3]
2285 codeObj.wrap_decode_block('\n%s' % ''.join(case_list), 'break;\n')
2286 codeObj.has_decode_default = (case_list == ['default:'])
2287 t[0] = codeObj
2288
2289 # The constant list for a decode case label must be non-empty, and must
2290 # either be the keyword 'default', or made up of one or more
2291 # comma-separated integer literals or strings which evaluate to
2292 # constants when compiled as C++.
2293 def p_case_list_0(self, t):
2294 'case_list : DEFAULT'
2295 t[0] = ['default:']
2296
2297 def prep_int_lit_case_label(self, lit):
2298 if lit >= 2**32:
2299 return 'case ULL(%#x): ' % lit
2300 else:
2301 return 'case %#x: ' % lit
2302
2303 def prep_str_lit_case_label(self, lit):
2304 return 'case %s: ' % lit
2305
2306 def p_case_list_1(self, t):
2307 'case_list : INTLIT'
2308 t[0] = [self.prep_int_lit_case_label(t[1])]
2309
2310 def p_case_list_2(self, t):
2311 'case_list : STRLIT'
2312 t[0] = [self.prep_str_lit_case_label(t[1])]
2313
2314 def p_case_list_3(self, t):
2315 'case_list : case_list COMMA INTLIT'
2316 t[0] = t[1]
2317 t[0].append(self.prep_int_lit_case_label(t[3]))
2318
2319 def p_case_list_4(self, t):
2320 'case_list : case_list COMMA STRLIT'
2321 t[0] = t[1]
2322 t[0].append(self.prep_str_lit_case_label(t[3]))
2323
2324 # Define an instruction using the current instruction format
2325 # (specified by an enclosing format block).
2326 # "<mnemonic>(<args>)"
2327 def p_inst_0(self, t):
2328 'inst : ID LPAREN arg_list RPAREN'
2329 # Pass the ID and arg list to the current format class to deal with.
2330 currentFormat = self.formatStack.top()
2331 codeObj = currentFormat.defineInst(self, t[1], t[3], t.lexer.lineno)
2332 args = ','.join(map(str, t[3]))
2333 args = re.sub('(?m)^', '//', args)
2334 args = re.sub('^//', '', args)
2335 comment = '\n// %s::%s(%s)\n' % (currentFormat.id, t[1], args)
2336 codeObj.prepend_all(comment)
2337 t[0] = codeObj
2338
2339 # Define an instruction using an explicitly specified format:
2340 # "<fmt>::<mnemonic>(<args>)"
2341 def p_inst_1(self, t):
2342 'inst : ID DBLCOLON ID LPAREN arg_list RPAREN'
2343 try:
2344 format = self.formatMap[t[1]]
2345 except KeyError:
2346 error(t.lineno(1), 'instruction format "%s" not defined.' % t[1])
2347
2348 codeObj = format.defineInst(self, t[3], t[5], t.lexer.lineno)
2349 comment = '\n// %s::%s(%s)\n' % (t[1], t[3], t[5])
2350 codeObj.prepend_all(comment)
2351 t[0] = codeObj
2352
2353 # The arg list generates a tuple, where the first element is a
2354 # list of the positional args and the second element is a dict
2355 # containing the keyword args.
2356 def p_arg_list_0(self, t):
2357 'arg_list : positional_arg_list COMMA keyword_arg_list'
2358 t[0] = ( t[1], t[3] )
2359
2360 def p_arg_list_1(self, t):
2361 'arg_list : positional_arg_list'
2362 t[0] = ( t[1], {} )
2363
2364 def p_arg_list_2(self, t):
2365 'arg_list : keyword_arg_list'
2366 t[0] = ( [], t[1] )
2367
2368 def p_positional_arg_list_0(self, t):
2369 'positional_arg_list : empty'
2370 t[0] = []
2371
2372 def p_positional_arg_list_1(self, t):
2373 'positional_arg_list : expr'
2374 t[0] = [t[1]]
2375
2376 def p_positional_arg_list_2(self, t):
2377 'positional_arg_list : positional_arg_list COMMA expr'
2378 t[0] = t[1] + [t[3]]
2379
2380 def p_keyword_arg_list_0(self, t):
2381 'keyword_arg_list : keyword_arg'
2382 t[0] = t[1]
2383
2384 def p_keyword_arg_list_1(self, t):
2385 'keyword_arg_list : keyword_arg_list COMMA keyword_arg'
2386 t[0] = t[1]
2387 t[0].update(t[3])
2388
2389 def p_keyword_arg(self, t):
2390 'keyword_arg : ID EQUALS expr'
2391 t[0] = { t[1] : t[3] }
2392
2393 #
2394 # Basic expressions. These constitute the argument values of
2395 # "function calls" (i.e. instruction definitions in the decode
2396 # block) and default values for formal parameters of format
2397 # functions.
2398 #
2399 # Right now, these are either strings, integers, or (recursively)
2400 # lists of exprs (using Python square-bracket list syntax). Note
2401 # that bare identifiers are trated as string constants here (since
2402 # there isn't really a variable namespace to refer to).
2403 #
2404 def p_expr_0(self, t):
2405 '''expr : ID
2406 | INTLIT
2407 | STRLIT
2408 | CODELIT'''
2409 t[0] = t[1]
2410
2411 def p_expr_1(self, t):
2412 '''expr : LBRACKET list_expr RBRACKET'''
2413 t[0] = t[2]
2414
2415 def p_list_expr_0(self, t):
2416 'list_expr : expr'
2417 t[0] = [t[1]]
2418
2419 def p_list_expr_1(self, t):
2420 'list_expr : list_expr COMMA expr'
2421 t[0] = t[1] + [t[3]]
2422
2423 def p_list_expr_2(self, t):
2424 'list_expr : empty'
2425 t[0] = []
2426
2427 #
2428 # Empty production... use in other rules for readability.
2429 #
2430 def p_empty(self, t):
2431 'empty :'
2432 pass
2433
2434 # Parse error handler. Note that the argument here is the
2435 # offending *token*, not a grammar symbol (hence the need to use
2436 # t.value)
2437 def p_error(self, t):
2438 if t:
2439 error(t.lexer.lineno, "syntax error at '%s'" % t.value)
2440 else:
2441 error("unknown syntax error")
2442
2443 # END OF GRAMMAR RULES
2444
2445 def updateExportContext(self):
2446
2447 # create a continuation that allows us to grab the current parser
2448 def wrapInstObjParams(*args):
2449 return InstObjParams(self, *args)
2450 self.exportContext['InstObjParams'] = wrapInstObjParams
2451 self.exportContext.update(self.templateMap)
2452
2453 def defFormat(self, id, params, code, lineno):
2454 '''Define a new format'''
2455
2456 # make sure we haven't already defined this one
2457 if id in self.formatMap:
2458 error(lineno, 'format %s redefined.' % id)
2459
2460 # create new object and store in global map
2461 self.formatMap[id] = Format(id, params, code)
2462
2463 def expandCpuSymbolsToDict(self, template):
2464 '''Expand template with CPU-specific references into a
2465 dictionary with an entry for each CPU model name. The entry
2466 key is the model name and the corresponding value is the
2467 template with the CPU-specific refs substituted for that
2468 model.'''
2469
2470 # Protect '%'s that don't go with CPU-specific terms
2471 t = re.sub(r'%(?!\(CPU_)', '%%', template)
2472 result = {}
2473 for cpu in self.cpuModels:
2474 result[cpu.name] = t % cpu.strings
2475 return result
2476
2477 def expandCpuSymbolsToString(self, template):
2478 '''*If* the template has CPU-specific references, return a
2479 single string containing a copy of the template for each CPU
2480 model with the corresponding values substituted in. If the
2481 template has no CPU-specific references, it is returned
2482 unmodified.'''
2483
2484 if template.find('%(CPU_') != -1:
2485 return reduce(lambda x,y: x+y,
2486 self.expandCpuSymbolsToDict(template).values())
2487 else:
2488 return template
2489
2490 def protectCpuSymbols(self, template):
2491 '''Protect CPU-specific references by doubling the
2492 corresponding '%'s (in preparation for substituting a different
2493 set of references into the template).'''
2494
2495 return re.sub(r'%(?=\(CPU_)', '%%', template)
2496
2497 def protectNonSubstPercents(self, s):
2498 '''Protect any non-dict-substitution '%'s in a format string
2499 (i.e. those not followed by '(')'''
2500
2501 return re.sub(r'%(?!\()', '%%', s)
2502
2503 def buildOperandNameMap(self, user_dict, lineno):
2504 operand_name = {}
2505 for op_name, val in user_dict.iteritems():
2506
2507 # Check if extra attributes have been specified.
2508 if len(val) > 9:
2509 error(lineno, 'error: too many attributes for operand "%s"' %
2510 base_cls_name)
2511
2512 # Pad val with None in case optional args are missing
2513 val += (None, None, None, None)
2514 base_cls_name, dflt_ext, reg_spec, flags, sort_pri, \
2515 read_code, write_code, read_predicate, write_predicate = val[:9]
2516
2517 # Canonical flag structure is a triple of lists, where each list
2518 # indicates the set of flags implied by this operand always, when
2519 # used as a source, and when used as a dest, respectively.
2520 # For simplicity this can be initialized using a variety of fairly
2521 # obvious shortcuts; we convert these to canonical form here.
2522 if not flags:
2523 # no flags specified (e.g., 'None')
2524 flags = ( [], [], [] )
2525 elif isinstance(flags, str):
2526 # a single flag: assumed to be unconditional
2527 flags = ( [ flags ], [], [] )
2528 elif isinstance(flags, list):
2529 # a list of flags: also assumed to be unconditional
2530 flags = ( flags, [], [] )
2531 elif isinstance(flags, tuple):
2532 # it's a tuple: it should be a triple,
2533 # but each item could be a single string or a list
2534 (uncond_flags, src_flags, dest_flags) = flags
2535 flags = (makeList(uncond_flags),
2536 makeList(src_flags), makeList(dest_flags))
2537
2538 # Accumulate attributes of new operand class in tmp_dict
2539 tmp_dict = {}
2540 attrList = ['reg_spec', 'flags', 'sort_pri',
2541 'read_code', 'write_code',
2542 'read_predicate', 'write_predicate']
2543 if dflt_ext:
2544 dflt_ctype = self.operandTypeMap[dflt_ext]
2545 attrList.extend(['dflt_ctype', 'dflt_ext'])
2546 # reg_spec is either just a string or a dictionary
2547 # (for elems of vector)
2548 if isinstance(reg_spec, tuple):
2549 (reg_spec, elem_spec) = reg_spec
2550 if isinstance(elem_spec, str):
2551 attrList.append('elem_spec')
2552 else:
2553 assert(isinstance(elem_spec, dict))
2554 elems = elem_spec
2555 attrList.append('elems')
2556 for attr in attrList:
2557 tmp_dict[attr] = eval(attr)
2558 tmp_dict['base_name'] = op_name
2559
2560 # New class name will be e.g. "IntReg_Ra"
2561 cls_name = base_cls_name + '_' + op_name
2562 # Evaluate string arg to get class object. Note that the
2563 # actual base class for "IntReg" is "IntRegOperand", i.e. we
2564 # have to append "Operand".
2565 try:
2566 base_cls = eval(base_cls_name + 'Operand')
2567 except NameError:
2568 error(lineno,
2569 'error: unknown operand base class "%s"' % base_cls_name)
2570 # The following statement creates a new class called
2571 # <cls_name> as a subclass of <base_cls> with the attributes
2572 # in tmp_dict, just as if we evaluated a class declaration.
2573 operand_name[op_name] = type(cls_name, (base_cls,), tmp_dict)
2574
2575 self.operandNameMap = operand_name
2576
2577 # Define operand variables.
2578 operands = user_dict.keys()
2579 # Add the elems defined in the vector operands and
2580 # build a map elem -> vector (used in OperandList)
2581 elem_to_vec = {}
2582 for op in user_dict.keys():
2583 if hasattr(self.operandNameMap[op], 'elems'):
2584 for elem in self.operandNameMap[op].elems.keys():
2585 operands.append(elem)
2586 elem_to_vec[elem] = op
2587 self.elemToVector = elem_to_vec
2588 extensions = self.operandTypeMap.keys()
2589
2590 operandsREString = r'''
2591 (?<!\w) # neg. lookbehind assertion: prevent partial matches
2592 ((%s)(?:_(%s))?) # match: operand with optional '_' then suffix
2593 (?!\w) # neg. lookahead assertion: prevent partial matches
2594 ''' % (string.join(operands, '|'), string.join(extensions, '|'))
2595
2596 self.operandsRE = re.compile(operandsREString, re.MULTILINE|re.VERBOSE)
2597
2598 # Same as operandsREString, but extension is mandatory, and only two
2599 # groups are returned (base and ext, not full name as above).
2600 # Used for subtituting '_' for '.' to make C++ identifiers.
2601 operandsWithExtREString = r'(?<!\w)(%s)_(%s)(?!\w)' \
2602 % (string.join(operands, '|'), string.join(extensions, '|'))
2603
2604 self.operandsWithExtRE = \
2605 re.compile(operandsWithExtREString, re.MULTILINE)
2606
2607 def substMungedOpNames(self, code):
2608 '''Munge operand names in code string to make legal C++
2609 variable names. This means getting rid of the type extension
2610 if any. Will match base_name attribute of Operand object.)'''
2611 return self.operandsWithExtRE.sub(r'\1', code)
2612
2613 def mungeSnippet(self, s):
2614 '''Fix up code snippets for final substitution in templates.'''
2615 if isinstance(s, str):
2616 return self.substMungedOpNames(substBitOps(s))
2617 else:
2618 return s
2619
2620 def open(self, name, bare=False):
2621 '''Open the output file for writing and include scary warning.'''
2622 filename = os.path.join(self.output_dir, name)
2623 f = open(filename, 'w')
2624 if f:
2625 if not bare:
2626 f.write(ISAParser.scaremonger_template % self)
2627 return f
2628
2629 def update(self, file, contents):
2630 '''Update the output file only. Scons should handle the case when
2631 the new contents are unchanged using its built-in hash feature.'''
2632 f = self.open(file)
2633 f.write(contents)
2634 f.close()
2635
2636 # This regular expression matches '##include' directives
2637 includeRE = re.compile(r'^\s*##include\s+"(?P<filename>[^"]*)".*$',
2638 re.MULTILINE)
2639
2640 def replace_include(self, matchobj, dirname):
2641 """Function to replace a matched '##include' directive with the
2642 contents of the specified file (with nested ##includes
2643 replaced recursively). 'matchobj' is an re match object
2644 (from a match of includeRE) and 'dirname' is the directory
2645 relative to which the file path should be resolved."""
2646
2647 fname = matchobj.group('filename')
2648 full_fname = os.path.normpath(os.path.join(dirname, fname))
2649 contents = '##newfile "%s"\n%s\n##endfile\n' % \
2650 (full_fname, self.read_and_flatten(full_fname))
2651 return contents
2652
2653 def read_and_flatten(self, filename):
2654 """Read a file and recursively flatten nested '##include' files."""
2655
2656 current_dir = os.path.dirname(filename)
2657 try:
2658 contents = open(filename).read()
2659 except IOError:
2660 error('Error including file "%s"' % filename)
2661
2662 self.fileNameStack.push(LineTracker(filename))
2663
2664 # Find any includes and include them
2665 def replace(matchobj):
2666 return self.replace_include(matchobj, current_dir)
2667 contents = self.includeRE.sub(replace, contents)
2668
2669 self.fileNameStack.pop()
2670 return contents
2671
2672 AlreadyGenerated = {}
2673
2674 def _parse_isa_desc(self, isa_desc_file):
2675 '''Read in and parse the ISA description.'''
2676
2677 # The build system can end up running the ISA parser twice: once to
2678 # finalize the build dependencies, and then to actually generate
2679 # the files it expects (in src/arch/$ARCH/generated). This code
2680 # doesn't do anything different either time, however; the SCons
2681 # invocations just expect different things. Since this code runs
2682 # within SCons, we can just remember that we've already run and
2683 # not perform a completely unnecessary run, since the ISA parser's
2684 # effect is idempotent.
2685 if isa_desc_file in ISAParser.AlreadyGenerated:
2686 return
2687
2688 # grab the last three path components of isa_desc_file
2689 self.filename = '/'.join(isa_desc_file.split('/')[-3:])
2690
2691 # Read file and (recursively) all included files into a string.
2692 # PLY requires that the input be in a single string so we have to
2693 # do this up front.
2694 isa_desc = self.read_and_flatten(isa_desc_file)
2695
2696 # Initialize lineno tracker
2697 self.lex.lineno = LineTracker(isa_desc_file)
2698
2699 # Parse.
2700 self.parse_string(isa_desc)
2701
2702 ISAParser.AlreadyGenerated[isa_desc_file] = None
2703
2704 def parse_isa_desc(self, *args, **kwargs):
2705 try:
2706 self._parse_isa_desc(*args, **kwargs)
2707 except ISAParserError, e:
2708 print backtrace(self.fileNameStack)
2709 print "At %s:" % e.lineno
2710 print e
2711 sys.exit(1)
2712
2713# Called as script: get args from command line.
2714# Args are: <isa desc file> <output dir>
2715if __name__ == '__main__':
2716 ISAParser(sys.argv[2]).parse_isa_desc(sys.argv[1])