isa_parser.py (13602:73512cfcca53) isa_parser.py (13610:5d5404ac6288)
1# Copyright (c) 2014, 2016, 2019 ARM Limited
2# All rights reserved
3#
4# The license below extends only to copyright in the software and shall
5# not be construed as granting a license to any other intellectual
6# property including but not limited to intellectual property relating
7# to a hardware implementation of the functionality of the software
8# licensed hereunder. You may use the software subject to the license
9# terms below provided that you ensure that this notice is replicated
10# unmodified and in its entirety in all distributions of the software,
11# modified or unmodified, in source code or in binary form.
12#
13# Copyright (c) 2003-2005 The Regents of The University of Michigan
14# Copyright (c) 2013,2015 Advanced Micro Devices, Inc.
15# All rights reserved.
16#
17# Redistribution and use in source and binary forms, with or without
18# modification, are permitted provided that the following conditions are
19# met: redistributions of source code must retain the above copyright
20# notice, this list of conditions and the following disclaimer;
21# redistributions in binary form must reproduce the above copyright
22# notice, this list of conditions and the following disclaimer in the
23# documentation and/or other materials provided with the distribution;
24# neither the name of the copyright holders nor the names of its
25# contributors may be used to endorse or promote products derived from
26# this software without specific prior written permission.
27#
28# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
29# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
30# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
31# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
32# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
33# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
34# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
35# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
36# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
37# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
38# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
39#
40# Authors: Steve Reinhardt
41
42from __future__ import with_statement, print_function
43import os
44import sys
45import re
46import string
47import inspect, traceback
48# get type names
49from types import *
50
51from m5.util.grammar import Grammar
52
53debug=False
54
55###################
56# Utility functions
57
58#
59# Indent every line in string 's' by two spaces
60# (except preprocessor directives).
61# Used to make nested code blocks look pretty.
62#
63def indent(s):
64 return re.sub(r'(?m)^(?!#)', ' ', s)
65
66#
67# Munge a somewhat arbitrarily formatted piece of Python code
68# (e.g. from a format 'let' block) into something whose indentation
69# will get by the Python parser.
70#
71# The two keys here are that Python will give a syntax error if
72# there's any whitespace at the beginning of the first line, and that
73# all lines at the same lexical nesting level must have identical
74# indentation. Unfortunately the way code literals work, an entire
75# let block tends to have some initial indentation. Rather than
76# trying to figure out what that is and strip it off, we prepend 'if
77# 1:' to make the let code the nested block inside the if (and have
78# the parser automatically deal with the indentation for us).
79#
80# We don't want to do this if (1) the code block is empty or (2) the
81# first line of the block doesn't have any whitespace at the front.
82
83def fixPythonIndentation(s):
84 # get rid of blank lines first
85 s = re.sub(r'(?m)^\s*\n', '', s);
86 if (s != '' and re.match(r'[ \t]', s[0])):
87 s = 'if 1:\n' + s
88 return s
89
90class ISAParserError(Exception):
91 """Exception class for parser errors"""
92 def __init__(self, first, second=None):
93 if second is None:
94 self.lineno = 0
95 self.string = first
96 else:
97 self.lineno = first
98 self.string = second
99
100 def __str__(self):
101 return self.string
102
103def error(*args):
104 raise ISAParserError(*args)
105
106####################
107# Template objects.
108#
109# Template objects are format strings that allow substitution from
110# the attribute spaces of other objects (e.g. InstObjParams instances).
111
112labelRE = re.compile(r'(?<!%)%\(([^\)]+)\)[sd]')
113
114class Template(object):
115 def __init__(self, parser, t):
116 self.parser = parser
117 self.template = t
118
119 def subst(self, d):
120 myDict = None
121
122 # Protect non-Python-dict substitutions (e.g. if there's a printf
123 # in the templated C++ code)
124 template = self.parser.protectNonSubstPercents(self.template)
125
126 # Build a dict ('myDict') to use for the template substitution.
127 # Start with the template namespace. Make a copy since we're
128 # going to modify it.
129 myDict = self.parser.templateMap.copy()
130
131 if isinstance(d, InstObjParams):
132 # If we're dealing with an InstObjParams object, we need
133 # to be a little more sophisticated. The instruction-wide
134 # parameters are already formed, but the parameters which
135 # are only function wide still need to be generated.
136 compositeCode = ''
137
138 myDict.update(d.__dict__)
139 # The "operands" and "snippets" attributes of the InstObjParams
140 # objects are for internal use and not substitution.
141 del myDict['operands']
142 del myDict['snippets']
143
144 snippetLabels = [l for l in labelRE.findall(template)
145 if d.snippets.has_key(l)]
146
147 snippets = dict([(s, self.parser.mungeSnippet(d.snippets[s]))
148 for s in snippetLabels])
149
150 myDict.update(snippets)
151
152 compositeCode = ' '.join(map(str, snippets.values()))
153
154 # Add in template itself in case it references any
155 # operands explicitly (like Mem)
156 compositeCode += ' ' + template
157
158 operands = SubOperandList(self.parser, compositeCode, d.operands)
159
160 myDict['op_decl'] = operands.concatAttrStrings('op_decl')
161 if operands.readPC or operands.setPC:
162 myDict['op_decl'] += 'TheISA::PCState __parserAutoPCState;\n'
163
164 # In case there are predicated register reads and write, declare
165 # the variables for register indicies. It is being assumed that
166 # all the operands in the OperandList are also in the
167 # SubOperandList and in the same order. Otherwise, it is
168 # expected that predication would not be used for the operands.
169 if operands.predRead:
170 myDict['op_decl'] += 'uint8_t _sourceIndex = 0;\n'
171 if operands.predWrite:
172 myDict['op_decl'] += 'uint8_t M5_VAR_USED _destIndex = 0;\n'
173
174 is_src = lambda op: op.is_src
175 is_dest = lambda op: op.is_dest
176
177 myDict['op_src_decl'] = \
178 operands.concatSomeAttrStrings(is_src, 'op_src_decl')
179 myDict['op_dest_decl'] = \
180 operands.concatSomeAttrStrings(is_dest, 'op_dest_decl')
181 if operands.readPC:
182 myDict['op_src_decl'] += \
183 'TheISA::PCState __parserAutoPCState;\n'
184 if operands.setPC:
185 myDict['op_dest_decl'] += \
186 'TheISA::PCState __parserAutoPCState;\n'
187
188 myDict['op_rd'] = operands.concatAttrStrings('op_rd')
189 if operands.readPC:
190 myDict['op_rd'] = '__parserAutoPCState = xc->pcState();\n' + \
191 myDict['op_rd']
192
193 # Compose the op_wb string. If we're going to write back the
194 # PC state because we changed some of its elements, we'll need to
195 # do that as early as possible. That allows later uncoordinated
196 # modifications to the PC to layer appropriately.
197 reordered = list(operands.items)
198 reordered.reverse()
199 op_wb_str = ''
200 pcWbStr = 'xc->pcState(__parserAutoPCState);\n'
201 for op_desc in reordered:
202 if op_desc.isPCPart() and op_desc.is_dest:
203 op_wb_str = op_desc.op_wb + pcWbStr + op_wb_str
204 pcWbStr = ''
205 else:
206 op_wb_str = op_desc.op_wb + op_wb_str
207 myDict['op_wb'] = op_wb_str
208
209 elif isinstance(d, dict):
210 # if the argument is a dictionary, we just use it.
211 myDict.update(d)
212 elif hasattr(d, '__dict__'):
213 # if the argument is an object, we use its attribute map.
214 myDict.update(d.__dict__)
215 else:
216 raise TypeError, "Template.subst() arg must be or have dictionary"
217 return template % myDict
218
219 # Convert to string.
220 def __str__(self):
221 return self.template
222
223################
224# Format object.
225#
226# A format object encapsulates an instruction format. It must provide
227# a defineInst() method that generates the code for an instruction
228# definition.
229
230class Format(object):
231 def __init__(self, id, params, code):
232 self.id = id
233 self.params = params
234 label = 'def format ' + id
235 self.user_code = compile(fixPythonIndentation(code), label, 'exec')
236 param_list = string.join(params, ", ")
237 f = '''def defInst(_code, _context, %s):
238 my_locals = vars().copy()
239 exec _code in _context, my_locals
240 return my_locals\n''' % param_list
241 c = compile(f, label + ' wrapper', 'exec')
242 exec c
243 self.func = defInst
244
245 def defineInst(self, parser, name, args, lineno):
246 parser.updateExportContext()
247 context = parser.exportContext.copy()
248 if len(name):
249 Name = name[0].upper()
250 if len(name) > 1:
251 Name += name[1:]
252 context.update({ 'name' : name, 'Name' : Name })
253 try:
254 vars = self.func(self.user_code, context, *args[0], **args[1])
255 except Exception, exc:
256 if debug:
257 raise
258 error(lineno, 'error defining "%s": %s.' % (name, exc))
259 for k in vars.keys():
260 if k not in ('header_output', 'decoder_output',
261 'exec_output', 'decode_block'):
262 del vars[k]
263 return GenCode(parser, **vars)
264
265# Special null format to catch an implicit-format instruction
266# definition outside of any format block.
267class NoFormat(object):
268 def __init__(self):
269 self.defaultInst = ''
270
271 def defineInst(self, parser, name, args, lineno):
272 error(lineno,
273 'instruction definition "%s" with no active format!' % name)
274
275###############
276# GenCode class
277#
278# The GenCode class encapsulates generated code destined for various
279# output files. The header_output and decoder_output attributes are
280# strings containing code destined for decoder.hh and decoder.cc
281# respectively. The decode_block attribute contains code to be
282# incorporated in the decode function itself (that will also end up in
283# decoder.cc). The exec_output attribute is the string of code for the
284# exec.cc file. The has_decode_default attribute is used in the decode block
285# to allow explicit default clauses to override default default clauses.
286
287class GenCode(object):
288 # Constructor.
289 def __init__(self, parser,
290 header_output = '', decoder_output = '', exec_output = '',
291 decode_block = '', has_decode_default = False):
292 self.parser = parser
293 self.header_output = header_output
294 self.decoder_output = decoder_output
295 self.exec_output = exec_output
296 self.decode_block = decode_block
297 self.has_decode_default = has_decode_default
298
299 # Write these code chunks out to the filesystem. They will be properly
300 # interwoven by the write_top_level_files().
301 def emit(self):
302 if self.header_output:
303 self.parser.get_file('header').write(self.header_output)
304 if self.decoder_output:
305 self.parser.get_file('decoder').write(self.decoder_output)
306 if self.exec_output:
307 self.parser.get_file('exec').write(self.exec_output)
308 if self.decode_block:
309 self.parser.get_file('decode_block').write(self.decode_block)
310
311 # Override '+' operator: generate a new GenCode object that
312 # concatenates all the individual strings in the operands.
313 def __add__(self, other):
314 return GenCode(self.parser,
315 self.header_output + other.header_output,
316 self.decoder_output + other.decoder_output,
317 self.exec_output + other.exec_output,
318 self.decode_block + other.decode_block,
319 self.has_decode_default or other.has_decode_default)
320
321 # Prepend a string (typically a comment) to all the strings.
322 def prepend_all(self, pre):
323 self.header_output = pre + self.header_output
324 self.decoder_output = pre + self.decoder_output
325 self.decode_block = pre + self.decode_block
326 self.exec_output = pre + self.exec_output
327
328 # Wrap the decode block in a pair of strings (e.g., 'case foo:'
329 # and 'break;'). Used to build the big nested switch statement.
330 def wrap_decode_block(self, pre, post = ''):
331 self.decode_block = pre + indent(self.decode_block) + post
332
333#####################################################################
334#
335# Bitfield Operator Support
336#
337#####################################################################
338
339bitOp1ArgRE = re.compile(r'<\s*(\w+)\s*:\s*>')
340
341bitOpWordRE = re.compile(r'(?<![\w\.])([\w\.]+)<\s*(\w+)\s*:\s*(\w+)\s*>')
342bitOpExprRE = re.compile(r'\)<\s*(\w+)\s*:\s*(\w+)\s*>')
343
344def substBitOps(code):
345 # first convert single-bit selectors to two-index form
346 # i.e., <n> --> <n:n>
347 code = bitOp1ArgRE.sub(r'<\1:\1>', code)
348 # simple case: selector applied to ID (name)
349 # i.e., foo<a:b> --> bits(foo, a, b)
350 code = bitOpWordRE.sub(r'bits(\1, \2, \3)', code)
351 # if selector is applied to expression (ending in ')'),
352 # we need to search backward for matching '('
353 match = bitOpExprRE.search(code)
354 while match:
355 exprEnd = match.start()
356 here = exprEnd - 1
357 nestLevel = 1
358 while nestLevel > 0:
359 if code[here] == '(':
360 nestLevel -= 1
361 elif code[here] == ')':
362 nestLevel += 1
363 here -= 1
364 if here < 0:
365 sys.exit("Didn't find '('!")
366 exprStart = here+1
367 newExpr = r'bits(%s, %s, %s)' % (code[exprStart:exprEnd+1],
368 match.group(1), match.group(2))
369 code = code[:exprStart] + newExpr + code[match.end():]
370 match = bitOpExprRE.search(code)
371 return code
372
373
374#####################################################################
375#
376# Code Parser
377#
378# The remaining code is the support for automatically extracting
379# instruction characteristics from pseudocode.
380#
381#####################################################################
382
383# Force the argument to be a list. Useful for flags, where a caller
384# can specify a singleton flag or a list of flags. Also usful for
385# converting tuples to lists so they can be modified.
386def makeList(arg):
387 if isinstance(arg, list):
388 return arg
389 elif isinstance(arg, tuple):
390 return list(arg)
391 elif not arg:
392 return []
393 else:
394 return [ arg ]
395
396class Operand(object):
397 '''Base class for operand descriptors. An instance of this class
398 (or actually a class derived from this one) represents a specific
399 operand for a code block (e.g, "Rc.sq" as a dest). Intermediate
400 derived classes encapsulates the traits of a particular operand
401 type (e.g., "32-bit integer register").'''
402
403 def buildReadCode(self, func = None):
404 subst_dict = {"name": self.base_name,
405 "func": func,
406 "reg_idx": self.reg_spec,
407 "ctype": self.ctype}
408 if hasattr(self, 'src_reg_idx'):
409 subst_dict['op_idx'] = self.src_reg_idx
410 code = self.read_code % subst_dict
411 return '%s = %s;\n' % (self.base_name, code)
412
413 def buildWriteCode(self, func = None):
414 subst_dict = {"name": self.base_name,
415 "func": func,
416 "reg_idx": self.reg_spec,
417 "ctype": self.ctype,
418 "final_val": self.base_name}
419 if hasattr(self, 'dest_reg_idx'):
420 subst_dict['op_idx'] = self.dest_reg_idx
421 code = self.write_code % subst_dict
422 return '''
423 {
424 %s final_val = %s;
425 %s;
426 if (traceData) { traceData->setData(final_val); }
427 }''' % (self.dflt_ctype, self.base_name, code)
428
429 def __init__(self, parser, full_name, ext, is_src, is_dest):
430 self.full_name = full_name
431 self.ext = ext
432 self.is_src = is_src
433 self.is_dest = is_dest
434 # The 'effective extension' (eff_ext) is either the actual
435 # extension, if one was explicitly provided, or the default.
436 if ext:
437 self.eff_ext = ext
438 elif hasattr(self, 'dflt_ext'):
439 self.eff_ext = self.dflt_ext
440
441 if hasattr(self, 'eff_ext'):
442 self.ctype = parser.operandTypeMap[self.eff_ext]
443
444 # Finalize additional fields (primarily code fields). This step
445 # is done separately since some of these fields may depend on the
446 # register index enumeration that hasn't been performed yet at the
447 # time of __init__(). The register index enumeration is affected
448 # by predicated register reads/writes. Hence, we forward the flags
449 # that indicate whether or not predication is in use.
450 def finalize(self, predRead, predWrite):
451 self.flags = self.getFlags()
452 self.constructor = self.makeConstructor(predRead, predWrite)
453 self.op_decl = self.makeDecl()
454
455 if self.is_src:
456 self.op_rd = self.makeRead(predRead)
457 self.op_src_decl = self.makeDecl()
458 else:
459 self.op_rd = ''
460 self.op_src_decl = ''
461
462 if self.is_dest:
463 self.op_wb = self.makeWrite(predWrite)
464 self.op_dest_decl = self.makeDecl()
465 else:
466 self.op_wb = ''
467 self.op_dest_decl = ''
468
469 def isMem(self):
470 return 0
471
472 def isReg(self):
473 return 0
474
475 def isFloatReg(self):
476 return 0
477
478 def isIntReg(self):
479 return 0
480
481 def isCCReg(self):
482 return 0
483
484 def isControlReg(self):
485 return 0
486
487 def isVecReg(self):
488 return 0
489
490 def isVecElem(self):
491 return 0
492
1# Copyright (c) 2014, 2016, 2019 ARM Limited
2# All rights reserved
3#
4# The license below extends only to copyright in the software and shall
5# not be construed as granting a license to any other intellectual
6# property including but not limited to intellectual property relating
7# to a hardware implementation of the functionality of the software
8# licensed hereunder. You may use the software subject to the license
9# terms below provided that you ensure that this notice is replicated
10# unmodified and in its entirety in all distributions of the software,
11# modified or unmodified, in source code or in binary form.
12#
13# Copyright (c) 2003-2005 The Regents of The University of Michigan
14# Copyright (c) 2013,2015 Advanced Micro Devices, Inc.
15# All rights reserved.
16#
17# Redistribution and use in source and binary forms, with or without
18# modification, are permitted provided that the following conditions are
19# met: redistributions of source code must retain the above copyright
20# notice, this list of conditions and the following disclaimer;
21# redistributions in binary form must reproduce the above copyright
22# notice, this list of conditions and the following disclaimer in the
23# documentation and/or other materials provided with the distribution;
24# neither the name of the copyright holders nor the names of its
25# contributors may be used to endorse or promote products derived from
26# this software without specific prior written permission.
27#
28# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
29# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
30# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
31# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
32# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
33# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
34# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
35# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
36# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
37# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
38# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
39#
40# Authors: Steve Reinhardt
41
42from __future__ import with_statement, print_function
43import os
44import sys
45import re
46import string
47import inspect, traceback
48# get type names
49from types import *
50
51from m5.util.grammar import Grammar
52
53debug=False
54
55###################
56# Utility functions
57
58#
59# Indent every line in string 's' by two spaces
60# (except preprocessor directives).
61# Used to make nested code blocks look pretty.
62#
63def indent(s):
64 return re.sub(r'(?m)^(?!#)', ' ', s)
65
66#
67# Munge a somewhat arbitrarily formatted piece of Python code
68# (e.g. from a format 'let' block) into something whose indentation
69# will get by the Python parser.
70#
71# The two keys here are that Python will give a syntax error if
72# there's any whitespace at the beginning of the first line, and that
73# all lines at the same lexical nesting level must have identical
74# indentation. Unfortunately the way code literals work, an entire
75# let block tends to have some initial indentation. Rather than
76# trying to figure out what that is and strip it off, we prepend 'if
77# 1:' to make the let code the nested block inside the if (and have
78# the parser automatically deal with the indentation for us).
79#
80# We don't want to do this if (1) the code block is empty or (2) the
81# first line of the block doesn't have any whitespace at the front.
82
83def fixPythonIndentation(s):
84 # get rid of blank lines first
85 s = re.sub(r'(?m)^\s*\n', '', s);
86 if (s != '' and re.match(r'[ \t]', s[0])):
87 s = 'if 1:\n' + s
88 return s
89
90class ISAParserError(Exception):
91 """Exception class for parser errors"""
92 def __init__(self, first, second=None):
93 if second is None:
94 self.lineno = 0
95 self.string = first
96 else:
97 self.lineno = first
98 self.string = second
99
100 def __str__(self):
101 return self.string
102
103def error(*args):
104 raise ISAParserError(*args)
105
106####################
107# Template objects.
108#
109# Template objects are format strings that allow substitution from
110# the attribute spaces of other objects (e.g. InstObjParams instances).
111
112labelRE = re.compile(r'(?<!%)%\(([^\)]+)\)[sd]')
113
114class Template(object):
115 def __init__(self, parser, t):
116 self.parser = parser
117 self.template = t
118
119 def subst(self, d):
120 myDict = None
121
122 # Protect non-Python-dict substitutions (e.g. if there's a printf
123 # in the templated C++ code)
124 template = self.parser.protectNonSubstPercents(self.template)
125
126 # Build a dict ('myDict') to use for the template substitution.
127 # Start with the template namespace. Make a copy since we're
128 # going to modify it.
129 myDict = self.parser.templateMap.copy()
130
131 if isinstance(d, InstObjParams):
132 # If we're dealing with an InstObjParams object, we need
133 # to be a little more sophisticated. The instruction-wide
134 # parameters are already formed, but the parameters which
135 # are only function wide still need to be generated.
136 compositeCode = ''
137
138 myDict.update(d.__dict__)
139 # The "operands" and "snippets" attributes of the InstObjParams
140 # objects are for internal use and not substitution.
141 del myDict['operands']
142 del myDict['snippets']
143
144 snippetLabels = [l for l in labelRE.findall(template)
145 if d.snippets.has_key(l)]
146
147 snippets = dict([(s, self.parser.mungeSnippet(d.snippets[s]))
148 for s in snippetLabels])
149
150 myDict.update(snippets)
151
152 compositeCode = ' '.join(map(str, snippets.values()))
153
154 # Add in template itself in case it references any
155 # operands explicitly (like Mem)
156 compositeCode += ' ' + template
157
158 operands = SubOperandList(self.parser, compositeCode, d.operands)
159
160 myDict['op_decl'] = operands.concatAttrStrings('op_decl')
161 if operands.readPC or operands.setPC:
162 myDict['op_decl'] += 'TheISA::PCState __parserAutoPCState;\n'
163
164 # In case there are predicated register reads and write, declare
165 # the variables for register indicies. It is being assumed that
166 # all the operands in the OperandList are also in the
167 # SubOperandList and in the same order. Otherwise, it is
168 # expected that predication would not be used for the operands.
169 if operands.predRead:
170 myDict['op_decl'] += 'uint8_t _sourceIndex = 0;\n'
171 if operands.predWrite:
172 myDict['op_decl'] += 'uint8_t M5_VAR_USED _destIndex = 0;\n'
173
174 is_src = lambda op: op.is_src
175 is_dest = lambda op: op.is_dest
176
177 myDict['op_src_decl'] = \
178 operands.concatSomeAttrStrings(is_src, 'op_src_decl')
179 myDict['op_dest_decl'] = \
180 operands.concatSomeAttrStrings(is_dest, 'op_dest_decl')
181 if operands.readPC:
182 myDict['op_src_decl'] += \
183 'TheISA::PCState __parserAutoPCState;\n'
184 if operands.setPC:
185 myDict['op_dest_decl'] += \
186 'TheISA::PCState __parserAutoPCState;\n'
187
188 myDict['op_rd'] = operands.concatAttrStrings('op_rd')
189 if operands.readPC:
190 myDict['op_rd'] = '__parserAutoPCState = xc->pcState();\n' + \
191 myDict['op_rd']
192
193 # Compose the op_wb string. If we're going to write back the
194 # PC state because we changed some of its elements, we'll need to
195 # do that as early as possible. That allows later uncoordinated
196 # modifications to the PC to layer appropriately.
197 reordered = list(operands.items)
198 reordered.reverse()
199 op_wb_str = ''
200 pcWbStr = 'xc->pcState(__parserAutoPCState);\n'
201 for op_desc in reordered:
202 if op_desc.isPCPart() and op_desc.is_dest:
203 op_wb_str = op_desc.op_wb + pcWbStr + op_wb_str
204 pcWbStr = ''
205 else:
206 op_wb_str = op_desc.op_wb + op_wb_str
207 myDict['op_wb'] = op_wb_str
208
209 elif isinstance(d, dict):
210 # if the argument is a dictionary, we just use it.
211 myDict.update(d)
212 elif hasattr(d, '__dict__'):
213 # if the argument is an object, we use its attribute map.
214 myDict.update(d.__dict__)
215 else:
216 raise TypeError, "Template.subst() arg must be or have dictionary"
217 return template % myDict
218
219 # Convert to string.
220 def __str__(self):
221 return self.template
222
223################
224# Format object.
225#
226# A format object encapsulates an instruction format. It must provide
227# a defineInst() method that generates the code for an instruction
228# definition.
229
230class Format(object):
231 def __init__(self, id, params, code):
232 self.id = id
233 self.params = params
234 label = 'def format ' + id
235 self.user_code = compile(fixPythonIndentation(code), label, 'exec')
236 param_list = string.join(params, ", ")
237 f = '''def defInst(_code, _context, %s):
238 my_locals = vars().copy()
239 exec _code in _context, my_locals
240 return my_locals\n''' % param_list
241 c = compile(f, label + ' wrapper', 'exec')
242 exec c
243 self.func = defInst
244
245 def defineInst(self, parser, name, args, lineno):
246 parser.updateExportContext()
247 context = parser.exportContext.copy()
248 if len(name):
249 Name = name[0].upper()
250 if len(name) > 1:
251 Name += name[1:]
252 context.update({ 'name' : name, 'Name' : Name })
253 try:
254 vars = self.func(self.user_code, context, *args[0], **args[1])
255 except Exception, exc:
256 if debug:
257 raise
258 error(lineno, 'error defining "%s": %s.' % (name, exc))
259 for k in vars.keys():
260 if k not in ('header_output', 'decoder_output',
261 'exec_output', 'decode_block'):
262 del vars[k]
263 return GenCode(parser, **vars)
264
265# Special null format to catch an implicit-format instruction
266# definition outside of any format block.
267class NoFormat(object):
268 def __init__(self):
269 self.defaultInst = ''
270
271 def defineInst(self, parser, name, args, lineno):
272 error(lineno,
273 'instruction definition "%s" with no active format!' % name)
274
275###############
276# GenCode class
277#
278# The GenCode class encapsulates generated code destined for various
279# output files. The header_output and decoder_output attributes are
280# strings containing code destined for decoder.hh and decoder.cc
281# respectively. The decode_block attribute contains code to be
282# incorporated in the decode function itself (that will also end up in
283# decoder.cc). The exec_output attribute is the string of code for the
284# exec.cc file. The has_decode_default attribute is used in the decode block
285# to allow explicit default clauses to override default default clauses.
286
287class GenCode(object):
288 # Constructor.
289 def __init__(self, parser,
290 header_output = '', decoder_output = '', exec_output = '',
291 decode_block = '', has_decode_default = False):
292 self.parser = parser
293 self.header_output = header_output
294 self.decoder_output = decoder_output
295 self.exec_output = exec_output
296 self.decode_block = decode_block
297 self.has_decode_default = has_decode_default
298
299 # Write these code chunks out to the filesystem. They will be properly
300 # interwoven by the write_top_level_files().
301 def emit(self):
302 if self.header_output:
303 self.parser.get_file('header').write(self.header_output)
304 if self.decoder_output:
305 self.parser.get_file('decoder').write(self.decoder_output)
306 if self.exec_output:
307 self.parser.get_file('exec').write(self.exec_output)
308 if self.decode_block:
309 self.parser.get_file('decode_block').write(self.decode_block)
310
311 # Override '+' operator: generate a new GenCode object that
312 # concatenates all the individual strings in the operands.
313 def __add__(self, other):
314 return GenCode(self.parser,
315 self.header_output + other.header_output,
316 self.decoder_output + other.decoder_output,
317 self.exec_output + other.exec_output,
318 self.decode_block + other.decode_block,
319 self.has_decode_default or other.has_decode_default)
320
321 # Prepend a string (typically a comment) to all the strings.
322 def prepend_all(self, pre):
323 self.header_output = pre + self.header_output
324 self.decoder_output = pre + self.decoder_output
325 self.decode_block = pre + self.decode_block
326 self.exec_output = pre + self.exec_output
327
328 # Wrap the decode block in a pair of strings (e.g., 'case foo:'
329 # and 'break;'). Used to build the big nested switch statement.
330 def wrap_decode_block(self, pre, post = ''):
331 self.decode_block = pre + indent(self.decode_block) + post
332
333#####################################################################
334#
335# Bitfield Operator Support
336#
337#####################################################################
338
339bitOp1ArgRE = re.compile(r'<\s*(\w+)\s*:\s*>')
340
341bitOpWordRE = re.compile(r'(?<![\w\.])([\w\.]+)<\s*(\w+)\s*:\s*(\w+)\s*>')
342bitOpExprRE = re.compile(r'\)<\s*(\w+)\s*:\s*(\w+)\s*>')
343
344def substBitOps(code):
345 # first convert single-bit selectors to two-index form
346 # i.e., <n> --> <n:n>
347 code = bitOp1ArgRE.sub(r'<\1:\1>', code)
348 # simple case: selector applied to ID (name)
349 # i.e., foo<a:b> --> bits(foo, a, b)
350 code = bitOpWordRE.sub(r'bits(\1, \2, \3)', code)
351 # if selector is applied to expression (ending in ')'),
352 # we need to search backward for matching '('
353 match = bitOpExprRE.search(code)
354 while match:
355 exprEnd = match.start()
356 here = exprEnd - 1
357 nestLevel = 1
358 while nestLevel > 0:
359 if code[here] == '(':
360 nestLevel -= 1
361 elif code[here] == ')':
362 nestLevel += 1
363 here -= 1
364 if here < 0:
365 sys.exit("Didn't find '('!")
366 exprStart = here+1
367 newExpr = r'bits(%s, %s, %s)' % (code[exprStart:exprEnd+1],
368 match.group(1), match.group(2))
369 code = code[:exprStart] + newExpr + code[match.end():]
370 match = bitOpExprRE.search(code)
371 return code
372
373
374#####################################################################
375#
376# Code Parser
377#
378# The remaining code is the support for automatically extracting
379# instruction characteristics from pseudocode.
380#
381#####################################################################
382
383# Force the argument to be a list. Useful for flags, where a caller
384# can specify a singleton flag or a list of flags. Also usful for
385# converting tuples to lists so they can be modified.
386def makeList(arg):
387 if isinstance(arg, list):
388 return arg
389 elif isinstance(arg, tuple):
390 return list(arg)
391 elif not arg:
392 return []
393 else:
394 return [ arg ]
395
396class Operand(object):
397 '''Base class for operand descriptors. An instance of this class
398 (or actually a class derived from this one) represents a specific
399 operand for a code block (e.g, "Rc.sq" as a dest). Intermediate
400 derived classes encapsulates the traits of a particular operand
401 type (e.g., "32-bit integer register").'''
402
403 def buildReadCode(self, func = None):
404 subst_dict = {"name": self.base_name,
405 "func": func,
406 "reg_idx": self.reg_spec,
407 "ctype": self.ctype}
408 if hasattr(self, 'src_reg_idx'):
409 subst_dict['op_idx'] = self.src_reg_idx
410 code = self.read_code % subst_dict
411 return '%s = %s;\n' % (self.base_name, code)
412
413 def buildWriteCode(self, func = None):
414 subst_dict = {"name": self.base_name,
415 "func": func,
416 "reg_idx": self.reg_spec,
417 "ctype": self.ctype,
418 "final_val": self.base_name}
419 if hasattr(self, 'dest_reg_idx'):
420 subst_dict['op_idx'] = self.dest_reg_idx
421 code = self.write_code % subst_dict
422 return '''
423 {
424 %s final_val = %s;
425 %s;
426 if (traceData) { traceData->setData(final_val); }
427 }''' % (self.dflt_ctype, self.base_name, code)
428
429 def __init__(self, parser, full_name, ext, is_src, is_dest):
430 self.full_name = full_name
431 self.ext = ext
432 self.is_src = is_src
433 self.is_dest = is_dest
434 # The 'effective extension' (eff_ext) is either the actual
435 # extension, if one was explicitly provided, or the default.
436 if ext:
437 self.eff_ext = ext
438 elif hasattr(self, 'dflt_ext'):
439 self.eff_ext = self.dflt_ext
440
441 if hasattr(self, 'eff_ext'):
442 self.ctype = parser.operandTypeMap[self.eff_ext]
443
444 # Finalize additional fields (primarily code fields). This step
445 # is done separately since some of these fields may depend on the
446 # register index enumeration that hasn't been performed yet at the
447 # time of __init__(). The register index enumeration is affected
448 # by predicated register reads/writes. Hence, we forward the flags
449 # that indicate whether or not predication is in use.
450 def finalize(self, predRead, predWrite):
451 self.flags = self.getFlags()
452 self.constructor = self.makeConstructor(predRead, predWrite)
453 self.op_decl = self.makeDecl()
454
455 if self.is_src:
456 self.op_rd = self.makeRead(predRead)
457 self.op_src_decl = self.makeDecl()
458 else:
459 self.op_rd = ''
460 self.op_src_decl = ''
461
462 if self.is_dest:
463 self.op_wb = self.makeWrite(predWrite)
464 self.op_dest_decl = self.makeDecl()
465 else:
466 self.op_wb = ''
467 self.op_dest_decl = ''
468
469 def isMem(self):
470 return 0
471
472 def isReg(self):
473 return 0
474
475 def isFloatReg(self):
476 return 0
477
478 def isIntReg(self):
479 return 0
480
481 def isCCReg(self):
482 return 0
483
484 def isControlReg(self):
485 return 0
486
487 def isVecReg(self):
488 return 0
489
490 def isVecElem(self):
491 return 0
492
493 def isVecPredReg(self):
494 return 0
495
493 def isPCState(self):
494 return 0
495
496 def isPCPart(self):
497 return self.isPCState() and self.reg_spec
498
499 def hasReadPred(self):
500 return self.read_predicate != None
501
502 def hasWritePred(self):
503 return self.write_predicate != None
504
505 def getFlags(self):
506 # note the empty slice '[:]' gives us a copy of self.flags[0]
507 # instead of a reference to it
508 my_flags = self.flags[0][:]
509 if self.is_src:
510 my_flags += self.flags[1]
511 if self.is_dest:
512 my_flags += self.flags[2]
513 return my_flags
514
515 def makeDecl(self):
516 # Note that initializations in the declarations are solely
517 # to avoid 'uninitialized variable' errors from the compiler.
518 return self.ctype + ' ' + self.base_name + ' = 0;\n';
519
520
521src_reg_constructor = '\n\t_srcRegIdx[_numSrcRegs++] = RegId(%s, %s);'
522dst_reg_constructor = '\n\t_destRegIdx[_numDestRegs++] = RegId(%s, %s);'
523
524
525class IntRegOperand(Operand):
526 reg_class = 'IntRegClass'
527
528 def isReg(self):
529 return 1
530
531 def isIntReg(self):
532 return 1
533
534 def makeConstructor(self, predRead, predWrite):
535 c_src = ''
536 c_dest = ''
537
538 if self.is_src:
539 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
540 if self.hasReadPred():
541 c_src = '\n\tif (%s) {%s\n\t}' % \
542 (self.read_predicate, c_src)
543
544 if self.is_dest:
545 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
546 c_dest += '\n\t_numIntDestRegs++;'
547 if self.hasWritePred():
548 c_dest = '\n\tif (%s) {%s\n\t}' % \
549 (self.write_predicate, c_dest)
550
551 return c_src + c_dest
552
553 def makeRead(self, predRead):
554 if (self.ctype == 'float' or self.ctype == 'double'):
555 error('Attempt to read integer register as FP')
556 if self.read_code != None:
557 return self.buildReadCode('readIntRegOperand')
558
559 int_reg_val = ''
560 if predRead:
561 int_reg_val = 'xc->readIntRegOperand(this, _sourceIndex++)'
562 if self.hasReadPred():
563 int_reg_val = '(%s) ? %s : 0' % \
564 (self.read_predicate, int_reg_val)
565 else:
566 int_reg_val = 'xc->readIntRegOperand(this, %d)' % self.src_reg_idx
567
568 return '%s = %s;\n' % (self.base_name, int_reg_val)
569
570 def makeWrite(self, predWrite):
571 if (self.ctype == 'float' or self.ctype == 'double'):
572 error('Attempt to write integer register as FP')
573 if self.write_code != None:
574 return self.buildWriteCode('setIntRegOperand')
575
576 if predWrite:
577 wp = 'true'
578 if self.hasWritePred():
579 wp = self.write_predicate
580
581 wcond = 'if (%s)' % (wp)
582 windex = '_destIndex++'
583 else:
584 wcond = ''
585 windex = '%d' % self.dest_reg_idx
586
587 wb = '''
588 %s
589 {
590 %s final_val = %s;
591 xc->setIntRegOperand(this, %s, final_val);\n
592 if (traceData) { traceData->setData(final_val); }
593 }''' % (wcond, self.ctype, self.base_name, windex)
594
595 return wb
596
597class FloatRegOperand(Operand):
598 reg_class = 'FloatRegClass'
599
600 def isReg(self):
601 return 1
602
603 def isFloatReg(self):
604 return 1
605
606 def makeConstructor(self, predRead, predWrite):
607 c_src = ''
608 c_dest = ''
609
610 if self.is_src:
611 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
612
613 if self.is_dest:
614 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
615 c_dest += '\n\t_numFPDestRegs++;'
616
617 return c_src + c_dest
618
619 def makeRead(self, predRead):
620 if self.read_code != None:
621 return self.buildReadCode('readFloatRegOperandBits')
622
623 if predRead:
624 rindex = '_sourceIndex++'
625 else:
626 rindex = '%d' % self.src_reg_idx
627
628 code = 'xc->readFloatRegOperandBits(this, %s)' % rindex
629 if self.ctype == 'float':
630 code = 'bitsToFloat32(%s)' % code
631 elif self.ctype == 'double':
632 code = 'bitsToFloat64(%s)' % code
633 return '%s = %s;\n' % (self.base_name, code)
634
635 def makeWrite(self, predWrite):
636 if self.write_code != None:
637 return self.buildWriteCode('setFloatRegOperandBits')
638
639 if predWrite:
640 wp = '_destIndex++'
641 else:
642 wp = '%d' % self.dest_reg_idx
643
644 val = 'final_val'
645 if self.ctype == 'float':
646 val = 'floatToBits32(%s)' % val
647 elif self.ctype == 'double':
648 val = 'floatToBits64(%s)' % val
649
650 wp = 'xc->setFloatRegOperandBits(this, %s, %s);' % (wp, val)
651
652 wb = '''
653 {
654 %s final_val = %s;
655 %s\n
656 if (traceData) { traceData->setData(final_val); }
657 }''' % (self.ctype, self.base_name, wp)
658 return wb
659
660class VecRegOperand(Operand):
661 reg_class = 'VecRegClass'
662
663 def __init__(self, parser, full_name, ext, is_src, is_dest):
664 Operand.__init__(self, parser, full_name, ext, is_src, is_dest)
665 self.elemExt = None
666 self.parser = parser
667
668 def isReg(self):
669 return 1
670
671 def isVecReg(self):
672 return 1
673
674 def makeDeclElem(self, elem_op):
675 (elem_name, elem_ext) = elem_op
676 (elem_spec, dflt_elem_ext, zeroing) = self.elems[elem_name]
677 if elem_ext:
678 ext = elem_ext
679 else:
680 ext = dflt_elem_ext
681 ctype = self.parser.operandTypeMap[ext]
682 return '\n\t%s %s = 0;' % (ctype, elem_name)
683
684 def makeDecl(self):
685 if not self.is_dest and self.is_src:
686 c_decl = '\t/* Vars for %s*/' % (self.base_name)
687 if hasattr(self, 'active_elems'):
688 if self.active_elems:
689 for elem in self.active_elems:
690 c_decl += self.makeDeclElem(elem)
691 return c_decl + '\t/* End vars for %s */\n' % (self.base_name)
692 else:
693 return ''
694
695 def makeConstructor(self, predRead, predWrite):
696 c_src = ''
697 c_dest = ''
698
699 numAccessNeeded = 1
700
701 if self.is_src:
702 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
703
704 if self.is_dest:
705 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
706 c_dest += '\n\t_numVecDestRegs++;'
707
708 return c_src + c_dest
709
710 # Read destination register to write
711 def makeReadWElem(self, elem_op):
712 (elem_name, elem_ext) = elem_op
713 (elem_spec, dflt_elem_ext, zeroing) = self.elems[elem_name]
714 if elem_ext:
715 ext = elem_ext
716 else:
717 ext = dflt_elem_ext
718 ctype = self.parser.operandTypeMap[ext]
719 c_read = '\t\t%s& %s = %s[%s];\n' % \
720 (ctype, elem_name, self.base_name, elem_spec)
721 return c_read
722
723 def makeReadW(self, predWrite):
724 func = 'getWritableVecRegOperand'
725 if self.read_code != None:
726 return self.buildReadCode(func)
727
728 if predWrite:
729 rindex = '_destIndex++'
730 else:
731 rindex = '%d' % self.dest_reg_idx
732
733 c_readw = '\t\t%s& tmp_d%s = xc->%s(this, %s);\n'\
734 % ('TheISA::VecRegContainer', rindex, func, rindex)
735 if self.elemExt:
736 c_readw += '\t\tauto %s = tmp_d%s.as<%s>();\n' % (self.base_name,
737 rindex, self.parser.operandTypeMap[self.elemExt])
738 if self.ext:
739 c_readw += '\t\tauto %s = tmp_d%s.as<%s>();\n' % (self.base_name,
740 rindex, self.parser.operandTypeMap[self.ext])
741 if hasattr(self, 'active_elems'):
742 if self.active_elems:
743 for elem in self.active_elems:
744 c_readw += self.makeReadWElem(elem)
745 return c_readw
746
747 # Normal source operand read
748 def makeReadElem(self, elem_op, name):
749 (elem_name, elem_ext) = elem_op
750 (elem_spec, dflt_elem_ext, zeroing) = self.elems[elem_name]
751
752 if elem_ext:
753 ext = elem_ext
754 else:
755 ext = dflt_elem_ext
756 ctype = self.parser.operandTypeMap[ext]
757 c_read = '\t\t%s = %s[%s];\n' % \
758 (elem_name, name, elem_spec)
759 return c_read
760
761 def makeRead(self, predRead):
762 func = 'readVecRegOperand'
763 if self.read_code != None:
764 return self.buildReadCode(func)
765
766 if predRead:
767 rindex = '_sourceIndex++'
768 else:
769 rindex = '%d' % self.src_reg_idx
770
771 name = self.base_name
772 if self.is_dest and self.is_src:
773 name += '_merger'
774
775 c_read = '\t\t%s& tmp_s%s = xc->%s(this, %s);\n' \
776 % ('const TheISA::VecRegContainer', rindex, func, rindex)
777 # If the parser has detected that elements are being access, create
778 # the appropriate view
779 if self.elemExt:
780 c_read += '\t\tauto %s = tmp_s%s.as<%s>();\n' % \
781 (name, rindex, self.parser.operandTypeMap[self.elemExt])
782 if self.ext:
783 c_read += '\t\tauto %s = tmp_s%s.as<%s>();\n' % \
784 (name, rindex, self.parser.operandTypeMap[self.ext])
785 if hasattr(self, 'active_elems'):
786 if self.active_elems:
787 for elem in self.active_elems:
788 c_read += self.makeReadElem(elem, name)
789 return c_read
790
791 def makeWrite(self, predWrite):
792 func = 'setVecRegOperand'
793 if self.write_code != None:
794 return self.buildWriteCode(func)
795
796 wb = '''
797 if (traceData) {
496 def isPCState(self):
497 return 0
498
499 def isPCPart(self):
500 return self.isPCState() and self.reg_spec
501
502 def hasReadPred(self):
503 return self.read_predicate != None
504
505 def hasWritePred(self):
506 return self.write_predicate != None
507
508 def getFlags(self):
509 # note the empty slice '[:]' gives us a copy of self.flags[0]
510 # instead of a reference to it
511 my_flags = self.flags[0][:]
512 if self.is_src:
513 my_flags += self.flags[1]
514 if self.is_dest:
515 my_flags += self.flags[2]
516 return my_flags
517
518 def makeDecl(self):
519 # Note that initializations in the declarations are solely
520 # to avoid 'uninitialized variable' errors from the compiler.
521 return self.ctype + ' ' + self.base_name + ' = 0;\n';
522
523
524src_reg_constructor = '\n\t_srcRegIdx[_numSrcRegs++] = RegId(%s, %s);'
525dst_reg_constructor = '\n\t_destRegIdx[_numDestRegs++] = RegId(%s, %s);'
526
527
528class IntRegOperand(Operand):
529 reg_class = 'IntRegClass'
530
531 def isReg(self):
532 return 1
533
534 def isIntReg(self):
535 return 1
536
537 def makeConstructor(self, predRead, predWrite):
538 c_src = ''
539 c_dest = ''
540
541 if self.is_src:
542 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
543 if self.hasReadPred():
544 c_src = '\n\tif (%s) {%s\n\t}' % \
545 (self.read_predicate, c_src)
546
547 if self.is_dest:
548 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
549 c_dest += '\n\t_numIntDestRegs++;'
550 if self.hasWritePred():
551 c_dest = '\n\tif (%s) {%s\n\t}' % \
552 (self.write_predicate, c_dest)
553
554 return c_src + c_dest
555
556 def makeRead(self, predRead):
557 if (self.ctype == 'float' or self.ctype == 'double'):
558 error('Attempt to read integer register as FP')
559 if self.read_code != None:
560 return self.buildReadCode('readIntRegOperand')
561
562 int_reg_val = ''
563 if predRead:
564 int_reg_val = 'xc->readIntRegOperand(this, _sourceIndex++)'
565 if self.hasReadPred():
566 int_reg_val = '(%s) ? %s : 0' % \
567 (self.read_predicate, int_reg_val)
568 else:
569 int_reg_val = 'xc->readIntRegOperand(this, %d)' % self.src_reg_idx
570
571 return '%s = %s;\n' % (self.base_name, int_reg_val)
572
573 def makeWrite(self, predWrite):
574 if (self.ctype == 'float' or self.ctype == 'double'):
575 error('Attempt to write integer register as FP')
576 if self.write_code != None:
577 return self.buildWriteCode('setIntRegOperand')
578
579 if predWrite:
580 wp = 'true'
581 if self.hasWritePred():
582 wp = self.write_predicate
583
584 wcond = 'if (%s)' % (wp)
585 windex = '_destIndex++'
586 else:
587 wcond = ''
588 windex = '%d' % self.dest_reg_idx
589
590 wb = '''
591 %s
592 {
593 %s final_val = %s;
594 xc->setIntRegOperand(this, %s, final_val);\n
595 if (traceData) { traceData->setData(final_val); }
596 }''' % (wcond, self.ctype, self.base_name, windex)
597
598 return wb
599
600class FloatRegOperand(Operand):
601 reg_class = 'FloatRegClass'
602
603 def isReg(self):
604 return 1
605
606 def isFloatReg(self):
607 return 1
608
609 def makeConstructor(self, predRead, predWrite):
610 c_src = ''
611 c_dest = ''
612
613 if self.is_src:
614 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
615
616 if self.is_dest:
617 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
618 c_dest += '\n\t_numFPDestRegs++;'
619
620 return c_src + c_dest
621
622 def makeRead(self, predRead):
623 if self.read_code != None:
624 return self.buildReadCode('readFloatRegOperandBits')
625
626 if predRead:
627 rindex = '_sourceIndex++'
628 else:
629 rindex = '%d' % self.src_reg_idx
630
631 code = 'xc->readFloatRegOperandBits(this, %s)' % rindex
632 if self.ctype == 'float':
633 code = 'bitsToFloat32(%s)' % code
634 elif self.ctype == 'double':
635 code = 'bitsToFloat64(%s)' % code
636 return '%s = %s;\n' % (self.base_name, code)
637
638 def makeWrite(self, predWrite):
639 if self.write_code != None:
640 return self.buildWriteCode('setFloatRegOperandBits')
641
642 if predWrite:
643 wp = '_destIndex++'
644 else:
645 wp = '%d' % self.dest_reg_idx
646
647 val = 'final_val'
648 if self.ctype == 'float':
649 val = 'floatToBits32(%s)' % val
650 elif self.ctype == 'double':
651 val = 'floatToBits64(%s)' % val
652
653 wp = 'xc->setFloatRegOperandBits(this, %s, %s);' % (wp, val)
654
655 wb = '''
656 {
657 %s final_val = %s;
658 %s\n
659 if (traceData) { traceData->setData(final_val); }
660 }''' % (self.ctype, self.base_name, wp)
661 return wb
662
663class VecRegOperand(Operand):
664 reg_class = 'VecRegClass'
665
666 def __init__(self, parser, full_name, ext, is_src, is_dest):
667 Operand.__init__(self, parser, full_name, ext, is_src, is_dest)
668 self.elemExt = None
669 self.parser = parser
670
671 def isReg(self):
672 return 1
673
674 def isVecReg(self):
675 return 1
676
677 def makeDeclElem(self, elem_op):
678 (elem_name, elem_ext) = elem_op
679 (elem_spec, dflt_elem_ext, zeroing) = self.elems[elem_name]
680 if elem_ext:
681 ext = elem_ext
682 else:
683 ext = dflt_elem_ext
684 ctype = self.parser.operandTypeMap[ext]
685 return '\n\t%s %s = 0;' % (ctype, elem_name)
686
687 def makeDecl(self):
688 if not self.is_dest and self.is_src:
689 c_decl = '\t/* Vars for %s*/' % (self.base_name)
690 if hasattr(self, 'active_elems'):
691 if self.active_elems:
692 for elem in self.active_elems:
693 c_decl += self.makeDeclElem(elem)
694 return c_decl + '\t/* End vars for %s */\n' % (self.base_name)
695 else:
696 return ''
697
698 def makeConstructor(self, predRead, predWrite):
699 c_src = ''
700 c_dest = ''
701
702 numAccessNeeded = 1
703
704 if self.is_src:
705 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
706
707 if self.is_dest:
708 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
709 c_dest += '\n\t_numVecDestRegs++;'
710
711 return c_src + c_dest
712
713 # Read destination register to write
714 def makeReadWElem(self, elem_op):
715 (elem_name, elem_ext) = elem_op
716 (elem_spec, dflt_elem_ext, zeroing) = self.elems[elem_name]
717 if elem_ext:
718 ext = elem_ext
719 else:
720 ext = dflt_elem_ext
721 ctype = self.parser.operandTypeMap[ext]
722 c_read = '\t\t%s& %s = %s[%s];\n' % \
723 (ctype, elem_name, self.base_name, elem_spec)
724 return c_read
725
726 def makeReadW(self, predWrite):
727 func = 'getWritableVecRegOperand'
728 if self.read_code != None:
729 return self.buildReadCode(func)
730
731 if predWrite:
732 rindex = '_destIndex++'
733 else:
734 rindex = '%d' % self.dest_reg_idx
735
736 c_readw = '\t\t%s& tmp_d%s = xc->%s(this, %s);\n'\
737 % ('TheISA::VecRegContainer', rindex, func, rindex)
738 if self.elemExt:
739 c_readw += '\t\tauto %s = tmp_d%s.as<%s>();\n' % (self.base_name,
740 rindex, self.parser.operandTypeMap[self.elemExt])
741 if self.ext:
742 c_readw += '\t\tauto %s = tmp_d%s.as<%s>();\n' % (self.base_name,
743 rindex, self.parser.operandTypeMap[self.ext])
744 if hasattr(self, 'active_elems'):
745 if self.active_elems:
746 for elem in self.active_elems:
747 c_readw += self.makeReadWElem(elem)
748 return c_readw
749
750 # Normal source operand read
751 def makeReadElem(self, elem_op, name):
752 (elem_name, elem_ext) = elem_op
753 (elem_spec, dflt_elem_ext, zeroing) = self.elems[elem_name]
754
755 if elem_ext:
756 ext = elem_ext
757 else:
758 ext = dflt_elem_ext
759 ctype = self.parser.operandTypeMap[ext]
760 c_read = '\t\t%s = %s[%s];\n' % \
761 (elem_name, name, elem_spec)
762 return c_read
763
764 def makeRead(self, predRead):
765 func = 'readVecRegOperand'
766 if self.read_code != None:
767 return self.buildReadCode(func)
768
769 if predRead:
770 rindex = '_sourceIndex++'
771 else:
772 rindex = '%d' % self.src_reg_idx
773
774 name = self.base_name
775 if self.is_dest and self.is_src:
776 name += '_merger'
777
778 c_read = '\t\t%s& tmp_s%s = xc->%s(this, %s);\n' \
779 % ('const TheISA::VecRegContainer', rindex, func, rindex)
780 # If the parser has detected that elements are being access, create
781 # the appropriate view
782 if self.elemExt:
783 c_read += '\t\tauto %s = tmp_s%s.as<%s>();\n' % \
784 (name, rindex, self.parser.operandTypeMap[self.elemExt])
785 if self.ext:
786 c_read += '\t\tauto %s = tmp_s%s.as<%s>();\n' % \
787 (name, rindex, self.parser.operandTypeMap[self.ext])
788 if hasattr(self, 'active_elems'):
789 if self.active_elems:
790 for elem in self.active_elems:
791 c_read += self.makeReadElem(elem, name)
792 return c_read
793
794 def makeWrite(self, predWrite):
795 func = 'setVecRegOperand'
796 if self.write_code != None:
797 return self.buildWriteCode(func)
798
799 wb = '''
800 if (traceData) {
798 warn_once("Vectors not supported yet in tracedata");
799 /*traceData->setData(final_val);*/
801 traceData->setData(tmp_d%d);
800 }
802 }
801 '''
803 ''' % self.dest_reg_idx
802 return wb
803
804 def finalize(self, predRead, predWrite):
805 super(VecRegOperand, self).finalize(predRead, predWrite)
806 if self.is_dest:
807 self.op_rd = self.makeReadW(predWrite) + self.op_rd
808
809class VecElemOperand(Operand):
810 reg_class = 'VecElemClass'
811
812 def isReg(self):
813 return 1
814
815 def isVecElem(self):
816 return 1
817
818 def makeDecl(self):
819 if self.is_dest and not self.is_src:
820 return '\n\t%s %s;' % (self.ctype, self.base_name)
821 else:
822 return ''
823
824 def makeConstructor(self, predRead, predWrite):
825 c_src = ''
826 c_dest = ''
827
828 numAccessNeeded = 1
829
830 if self.is_src:
831 c_src = ('\n\t_srcRegIdx[_numSrcRegs++] = RegId(%s, %s, %s);' %
832 (self.reg_class, self.reg_spec, self.elem_spec))
833
834 if self.is_dest:
835 c_dest = ('\n\t_destRegIdx[_numDestRegs++] = RegId(%s, %s, %s);' %
836 (self.reg_class, self.reg_spec, self.elem_spec))
837 c_dest += '\n\t_numVecElemDestRegs++;'
838 return c_src + c_dest
839
840 def makeRead(self, predRead):
841 c_read = 'xc->readVecElemOperand(this, %d)' % self.src_reg_idx
842
843 if self.ctype == 'float':
844 c_read = 'bitsToFloat32(%s)' % c_read
845 elif self.ctype == 'double':
846 c_read = 'bitsToFloat64(%s)' % c_read
847
848 return '\n\t%s %s = %s;\n' % (self.ctype, self.base_name, c_read)
849
850 def makeWrite(self, predWrite):
851 if self.ctype == 'float':
852 c_write = 'floatToBits32(%s)' % self.base_name
853 elif self.ctype == 'double':
854 c_write = 'floatToBits64(%s)' % self.base_name
855 else:
856 c_write = self.base_name
857
858 c_write = ('\n\txc->setVecElemOperand(this, %d, %s);' %
859 (self.dest_reg_idx, c_write))
860
861 return c_write
862
804 return wb
805
806 def finalize(self, predRead, predWrite):
807 super(VecRegOperand, self).finalize(predRead, predWrite)
808 if self.is_dest:
809 self.op_rd = self.makeReadW(predWrite) + self.op_rd
810
811class VecElemOperand(Operand):
812 reg_class = 'VecElemClass'
813
814 def isReg(self):
815 return 1
816
817 def isVecElem(self):
818 return 1
819
820 def makeDecl(self):
821 if self.is_dest and not self.is_src:
822 return '\n\t%s %s;' % (self.ctype, self.base_name)
823 else:
824 return ''
825
826 def makeConstructor(self, predRead, predWrite):
827 c_src = ''
828 c_dest = ''
829
830 numAccessNeeded = 1
831
832 if self.is_src:
833 c_src = ('\n\t_srcRegIdx[_numSrcRegs++] = RegId(%s, %s, %s);' %
834 (self.reg_class, self.reg_spec, self.elem_spec))
835
836 if self.is_dest:
837 c_dest = ('\n\t_destRegIdx[_numDestRegs++] = RegId(%s, %s, %s);' %
838 (self.reg_class, self.reg_spec, self.elem_spec))
839 c_dest += '\n\t_numVecElemDestRegs++;'
840 return c_src + c_dest
841
842 def makeRead(self, predRead):
843 c_read = 'xc->readVecElemOperand(this, %d)' % self.src_reg_idx
844
845 if self.ctype == 'float':
846 c_read = 'bitsToFloat32(%s)' % c_read
847 elif self.ctype == 'double':
848 c_read = 'bitsToFloat64(%s)' % c_read
849
850 return '\n\t%s %s = %s;\n' % (self.ctype, self.base_name, c_read)
851
852 def makeWrite(self, predWrite):
853 if self.ctype == 'float':
854 c_write = 'floatToBits32(%s)' % self.base_name
855 elif self.ctype == 'double':
856 c_write = 'floatToBits64(%s)' % self.base_name
857 else:
858 c_write = self.base_name
859
860 c_write = ('\n\txc->setVecElemOperand(this, %d, %s);' %
861 (self.dest_reg_idx, c_write))
862
863 return c_write
864
865class VecPredRegOperand(Operand):
866 reg_class = 'VecPredRegClass'
867
868 def __init__(self, parser, full_name, ext, is_src, is_dest):
869 Operand.__init__(self, parser, full_name, ext, is_src, is_dest)
870 self.parser = parser
871
872 def isReg(self):
873 return 1
874
875 def isVecPredReg(self):
876 return 1
877
878 def makeDecl(self):
879 return ''
880
881 def makeConstructor(self, predRead, predWrite):
882 c_src = ''
883 c_dest = ''
884
885 if self.is_src:
886 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
887
888 if self.is_dest:
889 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
890 c_dest += '\n\t_numVecPredDestRegs++;'
891
892 return c_src + c_dest
893
894 def makeRead(self, predRead):
895 func = 'readVecPredRegOperand'
896 if self.read_code != None:
897 return self.buildReadCode(func)
898
899 if predRead:
900 rindex = '_sourceIndex++'
901 else:
902 rindex = '%d' % self.src_reg_idx
903
904 c_read = '\t\t%s& tmp_s%s = xc->%s(this, %s);\n' % (
905 'const TheISA::VecPredRegContainer', rindex, func, rindex)
906 if self.ext:
907 c_read += '\t\tauto %s = tmp_s%s.as<%s>();\n' % (
908 self.base_name, rindex,
909 self.parser.operandTypeMap[self.ext])
910 return c_read
911
912 def makeReadW(self, predWrite):
913 func = 'getWritableVecPredRegOperand'
914 if self.read_code != None:
915 return self.buildReadCode(func)
916
917 if predWrite:
918 rindex = '_destIndex++'
919 else:
920 rindex = '%d' % self.dest_reg_idx
921
922 c_readw = '\t\t%s& tmp_d%s = xc->%s(this, %s);\n' % (
923 'TheISA::VecPredRegContainer', rindex, func, rindex)
924 if self.ext:
925 c_readw += '\t\tauto %s = tmp_d%s.as<%s>();\n' % (
926 self.base_name, rindex,
927 self.parser.operandTypeMap[self.ext])
928 return c_readw
929
930 def makeWrite(self, predWrite):
931 func = 'setVecPredRegOperand'
932 if self.write_code != None:
933 return self.buildWriteCode(func)
934
935 wb = '''
936 if (traceData) {
937 traceData->setData(tmp_d%d);
938 }
939 ''' % self.dest_reg_idx
940 return wb
941
942 def finalize(self, predRead, predWrite):
943 super(VecPredRegOperand, self).finalize(predRead, predWrite)
944 if self.is_dest:
945 self.op_rd = self.makeReadW(predWrite) + self.op_rd
946
863class CCRegOperand(Operand):
864 reg_class = 'CCRegClass'
865
866 def isReg(self):
867 return 1
868
869 def isCCReg(self):
870 return 1
871
872 def makeConstructor(self, predRead, predWrite):
873 c_src = ''
874 c_dest = ''
875
876 if self.is_src:
877 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
878 if self.hasReadPred():
879 c_src = '\n\tif (%s) {%s\n\t}' % \
880 (self.read_predicate, c_src)
881
882 if self.is_dest:
883 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
884 c_dest += '\n\t_numCCDestRegs++;'
885 if self.hasWritePred():
886 c_dest = '\n\tif (%s) {%s\n\t}' % \
887 (self.write_predicate, c_dest)
888
889 return c_src + c_dest
890
891 def makeRead(self, predRead):
892 if (self.ctype == 'float' or self.ctype == 'double'):
893 error('Attempt to read condition-code register as FP')
894 if self.read_code != None:
895 return self.buildReadCode('readCCRegOperand')
896
897 int_reg_val = ''
898 if predRead:
899 int_reg_val = 'xc->readCCRegOperand(this, _sourceIndex++)'
900 if self.hasReadPred():
901 int_reg_val = '(%s) ? %s : 0' % \
902 (self.read_predicate, int_reg_val)
903 else:
904 int_reg_val = 'xc->readCCRegOperand(this, %d)' % self.src_reg_idx
905
906 return '%s = %s;\n' % (self.base_name, int_reg_val)
907
908 def makeWrite(self, predWrite):
909 if (self.ctype == 'float' or self.ctype == 'double'):
910 error('Attempt to write condition-code register as FP')
911 if self.write_code != None:
912 return self.buildWriteCode('setCCRegOperand')
913
914 if predWrite:
915 wp = 'true'
916 if self.hasWritePred():
917 wp = self.write_predicate
918
919 wcond = 'if (%s)' % (wp)
920 windex = '_destIndex++'
921 else:
922 wcond = ''
923 windex = '%d' % self.dest_reg_idx
924
925 wb = '''
926 %s
927 {
928 %s final_val = %s;
929 xc->setCCRegOperand(this, %s, final_val);\n
930 if (traceData) { traceData->setData(final_val); }
931 }''' % (wcond, self.ctype, self.base_name, windex)
932
933 return wb
934
935class ControlRegOperand(Operand):
936 reg_class = 'MiscRegClass'
937
938 def isReg(self):
939 return 1
940
941 def isControlReg(self):
942 return 1
943
944 def makeConstructor(self, predRead, predWrite):
945 c_src = ''
946 c_dest = ''
947
948 if self.is_src:
949 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
950
951 if self.is_dest:
952 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
953
954 return c_src + c_dest
955
956 def makeRead(self, predRead):
957 bit_select = 0
958 if (self.ctype == 'float' or self.ctype == 'double'):
959 error('Attempt to read control register as FP')
960 if self.read_code != None:
961 return self.buildReadCode('readMiscRegOperand')
962
963 if predRead:
964 rindex = '_sourceIndex++'
965 else:
966 rindex = '%d' % self.src_reg_idx
967
968 return '%s = xc->readMiscRegOperand(this, %s);\n' % \
969 (self.base_name, rindex)
970
971 def makeWrite(self, predWrite):
972 if (self.ctype == 'float' or self.ctype == 'double'):
973 error('Attempt to write control register as FP')
974 if self.write_code != None:
975 return self.buildWriteCode('setMiscRegOperand')
976
977 if predWrite:
978 windex = '_destIndex++'
979 else:
980 windex = '%d' % self.dest_reg_idx
981
982 wb = 'xc->setMiscRegOperand(this, %s, %s);\n' % \
983 (windex, self.base_name)
984 wb += 'if (traceData) { traceData->setData(%s); }' % \
985 self.base_name
986
987 return wb
988
989class MemOperand(Operand):
990 def isMem(self):
991 return 1
992
993 def makeConstructor(self, predRead, predWrite):
994 return ''
995
996 def makeDecl(self):
997 # Declare memory data variable.
998 return '%s %s;\n' % (self.ctype, self.base_name)
999
1000 def makeRead(self, predRead):
1001 if self.read_code != None:
1002 return self.buildReadCode()
1003 return ''
1004
1005 def makeWrite(self, predWrite):
1006 if self.write_code != None:
1007 return self.buildWriteCode()
1008 return ''
1009
1010class PCStateOperand(Operand):
1011 def makeConstructor(self, predRead, predWrite):
1012 return ''
1013
1014 def makeRead(self, predRead):
1015 if self.reg_spec:
1016 # A component of the PC state.
1017 return '%s = __parserAutoPCState.%s();\n' % \
1018 (self.base_name, self.reg_spec)
1019 else:
1020 # The whole PC state itself.
1021 return '%s = xc->pcState();\n' % self.base_name
1022
1023 def makeWrite(self, predWrite):
1024 if self.reg_spec:
1025 # A component of the PC state.
1026 return '__parserAutoPCState.%s(%s);\n' % \
1027 (self.reg_spec, self.base_name)
1028 else:
1029 # The whole PC state itself.
1030 return 'xc->pcState(%s);\n' % self.base_name
1031
1032 def makeDecl(self):
1033 ctype = 'TheISA::PCState'
1034 if self.isPCPart():
1035 ctype = self.ctype
1036 # Note that initializations in the declarations are solely
1037 # to avoid 'uninitialized variable' errors from the compiler.
1038 return '%s %s = 0;\n' % (ctype, self.base_name)
1039
1040 def isPCState(self):
1041 return 1
1042
1043class OperandList(object):
1044 '''Find all the operands in the given code block. Returns an operand
1045 descriptor list (instance of class OperandList).'''
1046 def __init__(self, parser, code):
1047 self.items = []
1048 self.bases = {}
1049 # delete strings and comments so we don't match on operands inside
1050 for regEx in (stringRE, commentRE):
1051 code = regEx.sub('', code)
1052 # search for operands
1053 next_pos = 0
1054 while 1:
1055 match = parser.operandsRE.search(code, next_pos)
1056 if not match:
1057 # no more matches: we're done
1058 break
1059 op = match.groups()
1060 # regexp groups are operand full name, base, and extension
1061 (op_full, op_base, op_ext) = op
1062 # If is a elem operand, define or update the corresponding
1063 # vector operand
1064 isElem = False
1065 if op_base in parser.elemToVector:
1066 isElem = True
1067 elem_op = (op_base, op_ext)
1068 op_base = parser.elemToVector[op_base]
1069 op_ext = '' # use the default one
1070 # if the token following the operand is an assignment, this is
1071 # a destination (LHS), else it's a source (RHS)
1072 is_dest = (assignRE.match(code, match.end()) != None)
1073 is_src = not is_dest
1074
1075 # see if we've already seen this one
1076 op_desc = self.find_base(op_base)
1077 if op_desc:
1078 if op_ext and op_ext != '' and op_desc.ext != op_ext:
1079 error ('Inconsistent extensions for operand %s: %s - %s' \
1080 % (op_base, op_desc.ext, op_ext))
1081 op_desc.is_src = op_desc.is_src or is_src
1082 op_desc.is_dest = op_desc.is_dest or is_dest
1083 if isElem:
1084 (elem_base, elem_ext) = elem_op
1085 found = False
1086 for ae in op_desc.active_elems:
1087 (ae_base, ae_ext) = ae
1088 if ae_base == elem_base:
1089 if ae_ext != elem_ext:
1090 error('Inconsistent extensions for elem'
1091 ' operand %s' % elem_base)
1092 else:
1093 found = True
1094 if not found:
1095 op_desc.active_elems.append(elem_op)
1096 else:
1097 # new operand: create new descriptor
1098 op_desc = parser.operandNameMap[op_base](parser,
1099 op_full, op_ext, is_src, is_dest)
1100 # if operand is a vector elem, add the corresponding vector
1101 # operand if not already done
1102 if isElem:
1103 op_desc.elemExt = elem_op[1]
1104 op_desc.active_elems = [elem_op]
1105 self.append(op_desc)
1106 # start next search after end of current match
1107 next_pos = match.end()
1108 self.sort()
1109 # enumerate source & dest register operands... used in building
1110 # constructor later
1111 self.numSrcRegs = 0
1112 self.numDestRegs = 0
1113 self.numFPDestRegs = 0
1114 self.numIntDestRegs = 0
1115 self.numVecDestRegs = 0
947class CCRegOperand(Operand):
948 reg_class = 'CCRegClass'
949
950 def isReg(self):
951 return 1
952
953 def isCCReg(self):
954 return 1
955
956 def makeConstructor(self, predRead, predWrite):
957 c_src = ''
958 c_dest = ''
959
960 if self.is_src:
961 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
962 if self.hasReadPred():
963 c_src = '\n\tif (%s) {%s\n\t}' % \
964 (self.read_predicate, c_src)
965
966 if self.is_dest:
967 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
968 c_dest += '\n\t_numCCDestRegs++;'
969 if self.hasWritePred():
970 c_dest = '\n\tif (%s) {%s\n\t}' % \
971 (self.write_predicate, c_dest)
972
973 return c_src + c_dest
974
975 def makeRead(self, predRead):
976 if (self.ctype == 'float' or self.ctype == 'double'):
977 error('Attempt to read condition-code register as FP')
978 if self.read_code != None:
979 return self.buildReadCode('readCCRegOperand')
980
981 int_reg_val = ''
982 if predRead:
983 int_reg_val = 'xc->readCCRegOperand(this, _sourceIndex++)'
984 if self.hasReadPred():
985 int_reg_val = '(%s) ? %s : 0' % \
986 (self.read_predicate, int_reg_val)
987 else:
988 int_reg_val = 'xc->readCCRegOperand(this, %d)' % self.src_reg_idx
989
990 return '%s = %s;\n' % (self.base_name, int_reg_val)
991
992 def makeWrite(self, predWrite):
993 if (self.ctype == 'float' or self.ctype == 'double'):
994 error('Attempt to write condition-code register as FP')
995 if self.write_code != None:
996 return self.buildWriteCode('setCCRegOperand')
997
998 if predWrite:
999 wp = 'true'
1000 if self.hasWritePred():
1001 wp = self.write_predicate
1002
1003 wcond = 'if (%s)' % (wp)
1004 windex = '_destIndex++'
1005 else:
1006 wcond = ''
1007 windex = '%d' % self.dest_reg_idx
1008
1009 wb = '''
1010 %s
1011 {
1012 %s final_val = %s;
1013 xc->setCCRegOperand(this, %s, final_val);\n
1014 if (traceData) { traceData->setData(final_val); }
1015 }''' % (wcond, self.ctype, self.base_name, windex)
1016
1017 return wb
1018
1019class ControlRegOperand(Operand):
1020 reg_class = 'MiscRegClass'
1021
1022 def isReg(self):
1023 return 1
1024
1025 def isControlReg(self):
1026 return 1
1027
1028 def makeConstructor(self, predRead, predWrite):
1029 c_src = ''
1030 c_dest = ''
1031
1032 if self.is_src:
1033 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
1034
1035 if self.is_dest:
1036 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
1037
1038 return c_src + c_dest
1039
1040 def makeRead(self, predRead):
1041 bit_select = 0
1042 if (self.ctype == 'float' or self.ctype == 'double'):
1043 error('Attempt to read control register as FP')
1044 if self.read_code != None:
1045 return self.buildReadCode('readMiscRegOperand')
1046
1047 if predRead:
1048 rindex = '_sourceIndex++'
1049 else:
1050 rindex = '%d' % self.src_reg_idx
1051
1052 return '%s = xc->readMiscRegOperand(this, %s);\n' % \
1053 (self.base_name, rindex)
1054
1055 def makeWrite(self, predWrite):
1056 if (self.ctype == 'float' or self.ctype == 'double'):
1057 error('Attempt to write control register as FP')
1058 if self.write_code != None:
1059 return self.buildWriteCode('setMiscRegOperand')
1060
1061 if predWrite:
1062 windex = '_destIndex++'
1063 else:
1064 windex = '%d' % self.dest_reg_idx
1065
1066 wb = 'xc->setMiscRegOperand(this, %s, %s);\n' % \
1067 (windex, self.base_name)
1068 wb += 'if (traceData) { traceData->setData(%s); }' % \
1069 self.base_name
1070
1071 return wb
1072
1073class MemOperand(Operand):
1074 def isMem(self):
1075 return 1
1076
1077 def makeConstructor(self, predRead, predWrite):
1078 return ''
1079
1080 def makeDecl(self):
1081 # Declare memory data variable.
1082 return '%s %s;\n' % (self.ctype, self.base_name)
1083
1084 def makeRead(self, predRead):
1085 if self.read_code != None:
1086 return self.buildReadCode()
1087 return ''
1088
1089 def makeWrite(self, predWrite):
1090 if self.write_code != None:
1091 return self.buildWriteCode()
1092 return ''
1093
1094class PCStateOperand(Operand):
1095 def makeConstructor(self, predRead, predWrite):
1096 return ''
1097
1098 def makeRead(self, predRead):
1099 if self.reg_spec:
1100 # A component of the PC state.
1101 return '%s = __parserAutoPCState.%s();\n' % \
1102 (self.base_name, self.reg_spec)
1103 else:
1104 # The whole PC state itself.
1105 return '%s = xc->pcState();\n' % self.base_name
1106
1107 def makeWrite(self, predWrite):
1108 if self.reg_spec:
1109 # A component of the PC state.
1110 return '__parserAutoPCState.%s(%s);\n' % \
1111 (self.reg_spec, self.base_name)
1112 else:
1113 # The whole PC state itself.
1114 return 'xc->pcState(%s);\n' % self.base_name
1115
1116 def makeDecl(self):
1117 ctype = 'TheISA::PCState'
1118 if self.isPCPart():
1119 ctype = self.ctype
1120 # Note that initializations in the declarations are solely
1121 # to avoid 'uninitialized variable' errors from the compiler.
1122 return '%s %s = 0;\n' % (ctype, self.base_name)
1123
1124 def isPCState(self):
1125 return 1
1126
1127class OperandList(object):
1128 '''Find all the operands in the given code block. Returns an operand
1129 descriptor list (instance of class OperandList).'''
1130 def __init__(self, parser, code):
1131 self.items = []
1132 self.bases = {}
1133 # delete strings and comments so we don't match on operands inside
1134 for regEx in (stringRE, commentRE):
1135 code = regEx.sub('', code)
1136 # search for operands
1137 next_pos = 0
1138 while 1:
1139 match = parser.operandsRE.search(code, next_pos)
1140 if not match:
1141 # no more matches: we're done
1142 break
1143 op = match.groups()
1144 # regexp groups are operand full name, base, and extension
1145 (op_full, op_base, op_ext) = op
1146 # If is a elem operand, define or update the corresponding
1147 # vector operand
1148 isElem = False
1149 if op_base in parser.elemToVector:
1150 isElem = True
1151 elem_op = (op_base, op_ext)
1152 op_base = parser.elemToVector[op_base]
1153 op_ext = '' # use the default one
1154 # if the token following the operand is an assignment, this is
1155 # a destination (LHS), else it's a source (RHS)
1156 is_dest = (assignRE.match(code, match.end()) != None)
1157 is_src = not is_dest
1158
1159 # see if we've already seen this one
1160 op_desc = self.find_base(op_base)
1161 if op_desc:
1162 if op_ext and op_ext != '' and op_desc.ext != op_ext:
1163 error ('Inconsistent extensions for operand %s: %s - %s' \
1164 % (op_base, op_desc.ext, op_ext))
1165 op_desc.is_src = op_desc.is_src or is_src
1166 op_desc.is_dest = op_desc.is_dest or is_dest
1167 if isElem:
1168 (elem_base, elem_ext) = elem_op
1169 found = False
1170 for ae in op_desc.active_elems:
1171 (ae_base, ae_ext) = ae
1172 if ae_base == elem_base:
1173 if ae_ext != elem_ext:
1174 error('Inconsistent extensions for elem'
1175 ' operand %s' % elem_base)
1176 else:
1177 found = True
1178 if not found:
1179 op_desc.active_elems.append(elem_op)
1180 else:
1181 # new operand: create new descriptor
1182 op_desc = parser.operandNameMap[op_base](parser,
1183 op_full, op_ext, is_src, is_dest)
1184 # if operand is a vector elem, add the corresponding vector
1185 # operand if not already done
1186 if isElem:
1187 op_desc.elemExt = elem_op[1]
1188 op_desc.active_elems = [elem_op]
1189 self.append(op_desc)
1190 # start next search after end of current match
1191 next_pos = match.end()
1192 self.sort()
1193 # enumerate source & dest register operands... used in building
1194 # constructor later
1195 self.numSrcRegs = 0
1196 self.numDestRegs = 0
1197 self.numFPDestRegs = 0
1198 self.numIntDestRegs = 0
1199 self.numVecDestRegs = 0
1200 self.numVecPredDestRegs = 0
1116 self.numCCDestRegs = 0
1117 self.numMiscDestRegs = 0
1118 self.memOperand = None
1119
1120 # Flags to keep track if one or more operands are to be read/written
1121 # conditionally.
1122 self.predRead = False
1123 self.predWrite = False
1124
1125 for op_desc in self.items:
1126 if op_desc.isReg():
1127 if op_desc.is_src:
1128 op_desc.src_reg_idx = self.numSrcRegs
1129 self.numSrcRegs += 1
1130 if op_desc.is_dest:
1131 op_desc.dest_reg_idx = self.numDestRegs
1132 self.numDestRegs += 1
1133 if op_desc.isFloatReg():
1134 self.numFPDestRegs += 1
1135 elif op_desc.isIntReg():
1136 self.numIntDestRegs += 1
1137 elif op_desc.isVecReg():
1138 self.numVecDestRegs += 1
1201 self.numCCDestRegs = 0
1202 self.numMiscDestRegs = 0
1203 self.memOperand = None
1204
1205 # Flags to keep track if one or more operands are to be read/written
1206 # conditionally.
1207 self.predRead = False
1208 self.predWrite = False
1209
1210 for op_desc in self.items:
1211 if op_desc.isReg():
1212 if op_desc.is_src:
1213 op_desc.src_reg_idx = self.numSrcRegs
1214 self.numSrcRegs += 1
1215 if op_desc.is_dest:
1216 op_desc.dest_reg_idx = self.numDestRegs
1217 self.numDestRegs += 1
1218 if op_desc.isFloatReg():
1219 self.numFPDestRegs += 1
1220 elif op_desc.isIntReg():
1221 self.numIntDestRegs += 1
1222 elif op_desc.isVecReg():
1223 self.numVecDestRegs += 1
1224 elif op_desc.isVecPredReg():
1225 self.numVecPredDestRegs += 1
1139 elif op_desc.isCCReg():
1140 self.numCCDestRegs += 1
1141 elif op_desc.isControlReg():
1142 self.numMiscDestRegs += 1
1143 elif op_desc.isMem():
1144 if self.memOperand:
1145 error("Code block has more than one memory operand.")
1146 self.memOperand = op_desc
1147
1148 # Check if this operand has read/write predication. If true, then
1149 # the microop will dynamically index source/dest registers.
1150 self.predRead = self.predRead or op_desc.hasReadPred()
1151 self.predWrite = self.predWrite or op_desc.hasWritePred()
1152
1153 if parser.maxInstSrcRegs < self.numSrcRegs:
1154 parser.maxInstSrcRegs = self.numSrcRegs
1155 if parser.maxInstDestRegs < self.numDestRegs:
1156 parser.maxInstDestRegs = self.numDestRegs
1157 if parser.maxMiscDestRegs < self.numMiscDestRegs:
1158 parser.maxMiscDestRegs = self.numMiscDestRegs
1159
1160 # now make a final pass to finalize op_desc fields that may depend
1161 # on the register enumeration
1162 for op_desc in self.items:
1163 op_desc.finalize(self.predRead, self.predWrite)
1164
1165 def __len__(self):
1166 return len(self.items)
1167
1168 def __getitem__(self, index):
1169 return self.items[index]
1170
1171 def append(self, op_desc):
1172 self.items.append(op_desc)
1173 self.bases[op_desc.base_name] = op_desc
1174
1175 def find_base(self, base_name):
1176 # like self.bases[base_name], but returns None if not found
1177 # (rather than raising exception)
1178 return self.bases.get(base_name)
1179
1180 # internal helper function for concat[Some]Attr{Strings|Lists}
1181 def __internalConcatAttrs(self, attr_name, filter, result):
1182 for op_desc in self.items:
1183 if filter(op_desc):
1184 result += getattr(op_desc, attr_name)
1185 return result
1186
1187 # return a single string that is the concatenation of the (string)
1188 # values of the specified attribute for all operands
1189 def concatAttrStrings(self, attr_name):
1190 return self.__internalConcatAttrs(attr_name, lambda x: 1, '')
1191
1192 # like concatAttrStrings, but only include the values for the operands
1193 # for which the provided filter function returns true
1194 def concatSomeAttrStrings(self, filter, attr_name):
1195 return self.__internalConcatAttrs(attr_name, filter, '')
1196
1197 # return a single list that is the concatenation of the (list)
1198 # values of the specified attribute for all operands
1199 def concatAttrLists(self, attr_name):
1200 return self.__internalConcatAttrs(attr_name, lambda x: 1, [])
1201
1202 # like concatAttrLists, but only include the values for the operands
1203 # for which the provided filter function returns true
1204 def concatSomeAttrLists(self, filter, attr_name):
1205 return self.__internalConcatAttrs(attr_name, filter, [])
1206
1207 def sort(self):
1208 self.items.sort(lambda a, b: a.sort_pri - b.sort_pri)
1209
1210class SubOperandList(OperandList):
1211 '''Find all the operands in the given code block. Returns an operand
1212 descriptor list (instance of class OperandList).'''
1213 def __init__(self, parser, code, master_list):
1214 self.items = []
1215 self.bases = {}
1216 # delete strings and comments so we don't match on operands inside
1217 for regEx in (stringRE, commentRE):
1218 code = regEx.sub('', code)
1219 # search for operands
1220 next_pos = 0
1221 while 1:
1222 match = parser.operandsRE.search(code, next_pos)
1223 if not match:
1224 # no more matches: we're done
1225 break
1226 op = match.groups()
1227 # regexp groups are operand full name, base, and extension
1228 (op_full, op_base, op_ext) = op
1229 # If is a elem operand, define or update the corresponding
1230 # vector operand
1231 if op_base in parser.elemToVector:
1232 elem_op = op_base
1233 op_base = parser.elemToVector[elem_op]
1234 # find this op in the master list
1235 op_desc = master_list.find_base(op_base)
1236 if not op_desc:
1237 error('Found operand %s which is not in the master list!'
1238 % op_base)
1239 else:
1240 # See if we've already found this operand
1241 op_desc = self.find_base(op_base)
1242 if not op_desc:
1243 # if not, add a reference to it to this sub list
1244 self.append(master_list.bases[op_base])
1245
1246 # start next search after end of current match
1247 next_pos = match.end()
1248 self.sort()
1249 self.memOperand = None
1250 # Whether the whole PC needs to be read so parts of it can be accessed
1251 self.readPC = False
1252 # Whether the whole PC needs to be written after parts of it were
1253 # changed
1254 self.setPC = False
1255 # Whether this instruction manipulates the whole PC or parts of it.
1256 # Mixing the two is a bad idea and flagged as an error.
1257 self.pcPart = None
1258
1259 # Flags to keep track if one or more operands are to be read/written
1260 # conditionally.
1261 self.predRead = False
1262 self.predWrite = False
1263
1264 for op_desc in self.items:
1265 if op_desc.isPCPart():
1266 self.readPC = True
1267 if op_desc.is_dest:
1268 self.setPC = True
1269
1270 if op_desc.isPCState():
1271 if self.pcPart is not None:
1272 if self.pcPart and not op_desc.isPCPart() or \
1273 not self.pcPart and op_desc.isPCPart():
1274 error("Mixed whole and partial PC state operands.")
1275 self.pcPart = op_desc.isPCPart()
1276
1277 if op_desc.isMem():
1278 if self.memOperand:
1279 error("Code block has more than one memory operand.")
1280 self.memOperand = op_desc
1281
1282 # Check if this operand has read/write predication. If true, then
1283 # the microop will dynamically index source/dest registers.
1284 self.predRead = self.predRead or op_desc.hasReadPred()
1285 self.predWrite = self.predWrite or op_desc.hasWritePred()
1286
1287# Regular expression object to match C++ strings
1288stringRE = re.compile(r'"([^"\\]|\\.)*"')
1289
1290# Regular expression object to match C++ comments
1291# (used in findOperands())
1292commentRE = re.compile(r'(^)?[^\S\n]*/(?:\*(.*?)\*/[^\S\n]*|/[^\n]*)($)?',
1293 re.DOTALL | re.MULTILINE)
1294
1295# Regular expression object to match assignment statements (used in
1296# findOperands()). If the code immediately following the first
1297# appearance of the operand matches this regex, then the operand
1298# appears to be on the LHS of an assignment, and is thus a
1299# destination. basically we're looking for an '=' that's not '=='.
1300# The heinous tangle before that handles the case where the operand
1301# has an array subscript.
1302assignRE = re.compile(r'(\[[^\]]+\])?\s*=(?!=)', re.MULTILINE)
1303
1304def makeFlagConstructor(flag_list):
1305 if len(flag_list) == 0:
1306 return ''
1307 # filter out repeated flags
1308 flag_list.sort()
1309 i = 1
1310 while i < len(flag_list):
1311 if flag_list[i] == flag_list[i-1]:
1312 del flag_list[i]
1313 else:
1314 i += 1
1315 pre = '\n\tflags['
1316 post = '] = true;'
1317 code = pre + string.join(flag_list, post + pre) + post
1318 return code
1319
1320# Assume all instruction flags are of the form 'IsFoo'
1321instFlagRE = re.compile(r'Is.*')
1322
1323# OpClass constants end in 'Op' except No_OpClass
1324opClassRE = re.compile(r'.*Op|No_OpClass')
1325
1326class InstObjParams(object):
1327 def __init__(self, parser, mnem, class_name, base_class = '',
1328 snippets = {}, opt_args = []):
1329 self.mnemonic = mnem
1330 self.class_name = class_name
1331 self.base_class = base_class
1332 if not isinstance(snippets, dict):
1333 snippets = {'code' : snippets}
1334 compositeCode = ' '.join(map(str, snippets.values()))
1335 self.snippets = snippets
1336
1337 self.operands = OperandList(parser, compositeCode)
1338
1339 # The header of the constructor declares the variables to be used
1340 # in the body of the constructor.
1341 header = ''
1342 header += '\n\t_numSrcRegs = 0;'
1343 header += '\n\t_numDestRegs = 0;'
1344 header += '\n\t_numFPDestRegs = 0;'
1345 header += '\n\t_numVecDestRegs = 0;'
1346 header += '\n\t_numVecElemDestRegs = 0;'
1226 elif op_desc.isCCReg():
1227 self.numCCDestRegs += 1
1228 elif op_desc.isControlReg():
1229 self.numMiscDestRegs += 1
1230 elif op_desc.isMem():
1231 if self.memOperand:
1232 error("Code block has more than one memory operand.")
1233 self.memOperand = op_desc
1234
1235 # Check if this operand has read/write predication. If true, then
1236 # the microop will dynamically index source/dest registers.
1237 self.predRead = self.predRead or op_desc.hasReadPred()
1238 self.predWrite = self.predWrite or op_desc.hasWritePred()
1239
1240 if parser.maxInstSrcRegs < self.numSrcRegs:
1241 parser.maxInstSrcRegs = self.numSrcRegs
1242 if parser.maxInstDestRegs < self.numDestRegs:
1243 parser.maxInstDestRegs = self.numDestRegs
1244 if parser.maxMiscDestRegs < self.numMiscDestRegs:
1245 parser.maxMiscDestRegs = self.numMiscDestRegs
1246
1247 # now make a final pass to finalize op_desc fields that may depend
1248 # on the register enumeration
1249 for op_desc in self.items:
1250 op_desc.finalize(self.predRead, self.predWrite)
1251
1252 def __len__(self):
1253 return len(self.items)
1254
1255 def __getitem__(self, index):
1256 return self.items[index]
1257
1258 def append(self, op_desc):
1259 self.items.append(op_desc)
1260 self.bases[op_desc.base_name] = op_desc
1261
1262 def find_base(self, base_name):
1263 # like self.bases[base_name], but returns None if not found
1264 # (rather than raising exception)
1265 return self.bases.get(base_name)
1266
1267 # internal helper function for concat[Some]Attr{Strings|Lists}
1268 def __internalConcatAttrs(self, attr_name, filter, result):
1269 for op_desc in self.items:
1270 if filter(op_desc):
1271 result += getattr(op_desc, attr_name)
1272 return result
1273
1274 # return a single string that is the concatenation of the (string)
1275 # values of the specified attribute for all operands
1276 def concatAttrStrings(self, attr_name):
1277 return self.__internalConcatAttrs(attr_name, lambda x: 1, '')
1278
1279 # like concatAttrStrings, but only include the values for the operands
1280 # for which the provided filter function returns true
1281 def concatSomeAttrStrings(self, filter, attr_name):
1282 return self.__internalConcatAttrs(attr_name, filter, '')
1283
1284 # return a single list that is the concatenation of the (list)
1285 # values of the specified attribute for all operands
1286 def concatAttrLists(self, attr_name):
1287 return self.__internalConcatAttrs(attr_name, lambda x: 1, [])
1288
1289 # like concatAttrLists, but only include the values for the operands
1290 # for which the provided filter function returns true
1291 def concatSomeAttrLists(self, filter, attr_name):
1292 return self.__internalConcatAttrs(attr_name, filter, [])
1293
1294 def sort(self):
1295 self.items.sort(lambda a, b: a.sort_pri - b.sort_pri)
1296
1297class SubOperandList(OperandList):
1298 '''Find all the operands in the given code block. Returns an operand
1299 descriptor list (instance of class OperandList).'''
1300 def __init__(self, parser, code, master_list):
1301 self.items = []
1302 self.bases = {}
1303 # delete strings and comments so we don't match on operands inside
1304 for regEx in (stringRE, commentRE):
1305 code = regEx.sub('', code)
1306 # search for operands
1307 next_pos = 0
1308 while 1:
1309 match = parser.operandsRE.search(code, next_pos)
1310 if not match:
1311 # no more matches: we're done
1312 break
1313 op = match.groups()
1314 # regexp groups are operand full name, base, and extension
1315 (op_full, op_base, op_ext) = op
1316 # If is a elem operand, define or update the corresponding
1317 # vector operand
1318 if op_base in parser.elemToVector:
1319 elem_op = op_base
1320 op_base = parser.elemToVector[elem_op]
1321 # find this op in the master list
1322 op_desc = master_list.find_base(op_base)
1323 if not op_desc:
1324 error('Found operand %s which is not in the master list!'
1325 % op_base)
1326 else:
1327 # See if we've already found this operand
1328 op_desc = self.find_base(op_base)
1329 if not op_desc:
1330 # if not, add a reference to it to this sub list
1331 self.append(master_list.bases[op_base])
1332
1333 # start next search after end of current match
1334 next_pos = match.end()
1335 self.sort()
1336 self.memOperand = None
1337 # Whether the whole PC needs to be read so parts of it can be accessed
1338 self.readPC = False
1339 # Whether the whole PC needs to be written after parts of it were
1340 # changed
1341 self.setPC = False
1342 # Whether this instruction manipulates the whole PC or parts of it.
1343 # Mixing the two is a bad idea and flagged as an error.
1344 self.pcPart = None
1345
1346 # Flags to keep track if one or more operands are to be read/written
1347 # conditionally.
1348 self.predRead = False
1349 self.predWrite = False
1350
1351 for op_desc in self.items:
1352 if op_desc.isPCPart():
1353 self.readPC = True
1354 if op_desc.is_dest:
1355 self.setPC = True
1356
1357 if op_desc.isPCState():
1358 if self.pcPart is not None:
1359 if self.pcPart and not op_desc.isPCPart() or \
1360 not self.pcPart and op_desc.isPCPart():
1361 error("Mixed whole and partial PC state operands.")
1362 self.pcPart = op_desc.isPCPart()
1363
1364 if op_desc.isMem():
1365 if self.memOperand:
1366 error("Code block has more than one memory operand.")
1367 self.memOperand = op_desc
1368
1369 # Check if this operand has read/write predication. If true, then
1370 # the microop will dynamically index source/dest registers.
1371 self.predRead = self.predRead or op_desc.hasReadPred()
1372 self.predWrite = self.predWrite or op_desc.hasWritePred()
1373
1374# Regular expression object to match C++ strings
1375stringRE = re.compile(r'"([^"\\]|\\.)*"')
1376
1377# Regular expression object to match C++ comments
1378# (used in findOperands())
1379commentRE = re.compile(r'(^)?[^\S\n]*/(?:\*(.*?)\*/[^\S\n]*|/[^\n]*)($)?',
1380 re.DOTALL | re.MULTILINE)
1381
1382# Regular expression object to match assignment statements (used in
1383# findOperands()). If the code immediately following the first
1384# appearance of the operand matches this regex, then the operand
1385# appears to be on the LHS of an assignment, and is thus a
1386# destination. basically we're looking for an '=' that's not '=='.
1387# The heinous tangle before that handles the case where the operand
1388# has an array subscript.
1389assignRE = re.compile(r'(\[[^\]]+\])?\s*=(?!=)', re.MULTILINE)
1390
1391def makeFlagConstructor(flag_list):
1392 if len(flag_list) == 0:
1393 return ''
1394 # filter out repeated flags
1395 flag_list.sort()
1396 i = 1
1397 while i < len(flag_list):
1398 if flag_list[i] == flag_list[i-1]:
1399 del flag_list[i]
1400 else:
1401 i += 1
1402 pre = '\n\tflags['
1403 post = '] = true;'
1404 code = pre + string.join(flag_list, post + pre) + post
1405 return code
1406
1407# Assume all instruction flags are of the form 'IsFoo'
1408instFlagRE = re.compile(r'Is.*')
1409
1410# OpClass constants end in 'Op' except No_OpClass
1411opClassRE = re.compile(r'.*Op|No_OpClass')
1412
1413class InstObjParams(object):
1414 def __init__(self, parser, mnem, class_name, base_class = '',
1415 snippets = {}, opt_args = []):
1416 self.mnemonic = mnem
1417 self.class_name = class_name
1418 self.base_class = base_class
1419 if not isinstance(snippets, dict):
1420 snippets = {'code' : snippets}
1421 compositeCode = ' '.join(map(str, snippets.values()))
1422 self.snippets = snippets
1423
1424 self.operands = OperandList(parser, compositeCode)
1425
1426 # The header of the constructor declares the variables to be used
1427 # in the body of the constructor.
1428 header = ''
1429 header += '\n\t_numSrcRegs = 0;'
1430 header += '\n\t_numDestRegs = 0;'
1431 header += '\n\t_numFPDestRegs = 0;'
1432 header += '\n\t_numVecDestRegs = 0;'
1433 header += '\n\t_numVecElemDestRegs = 0;'
1434 header += '\n\t_numVecPredDestRegs = 0;'
1347 header += '\n\t_numIntDestRegs = 0;'
1348 header += '\n\t_numCCDestRegs = 0;'
1349
1350 self.constructor = header + \
1351 self.operands.concatAttrStrings('constructor')
1352
1353 self.flags = self.operands.concatAttrLists('flags')
1354
1355 self.op_class = None
1356
1357 # Optional arguments are assumed to be either StaticInst flags
1358 # or an OpClass value. To avoid having to import a complete
1359 # list of these values to match against, we do it ad-hoc
1360 # with regexps.
1361 for oa in opt_args:
1362 if instFlagRE.match(oa):
1363 self.flags.append(oa)
1364 elif opClassRE.match(oa):
1365 self.op_class = oa
1366 else:
1367 error('InstObjParams: optional arg "%s" not recognized '
1368 'as StaticInst::Flag or OpClass.' % oa)
1369
1370 # Make a basic guess on the operand class if not set.
1371 # These are good enough for most cases.
1372 if not self.op_class:
1373 if 'IsStore' in self.flags:
1374 # The order matters here: 'IsFloating' and 'IsInteger' are
1375 # usually set in FP instructions because of the base
1376 # register
1377 if 'IsFloating' in self.flags:
1378 self.op_class = 'FloatMemWriteOp'
1379 else:
1380 self.op_class = 'MemWriteOp'
1381 elif 'IsLoad' in self.flags or 'IsPrefetch' in self.flags:
1382 # The order matters here: 'IsFloating' and 'IsInteger' are
1383 # usually set in FP instructions because of the base
1384 # register
1385 if 'IsFloating' in self.flags:
1386 self.op_class = 'FloatMemReadOp'
1387 else:
1388 self.op_class = 'MemReadOp'
1389 elif 'IsFloating' in self.flags:
1390 self.op_class = 'FloatAddOp'
1391 elif 'IsVector' in self.flags:
1392 self.op_class = 'SimdAddOp'
1393 else:
1394 self.op_class = 'IntAluOp'
1395
1396 # add flag initialization to contructor here to include
1397 # any flags added via opt_args
1398 self.constructor += makeFlagConstructor(self.flags)
1399
1400 # if 'IsFloating' is set, add call to the FP enable check
1401 # function (which should be provided by isa_desc via a declare)
1402 # if 'IsVector' is set, add call to the Vector enable check
1403 # function (which should be provided by isa_desc via a declare)
1404 if 'IsFloating' in self.flags:
1405 self.fp_enable_check = 'fault = checkFpEnableFault(xc);'
1406 elif 'IsVector' in self.flags:
1407 self.fp_enable_check = 'fault = checkVecEnableFault(xc);'
1408 else:
1409 self.fp_enable_check = ''
1410
1411##############
1412# Stack: a simple stack object. Used for both formats (formatStack)
1413# and default cases (defaultStack). Simply wraps a list to give more
1414# stack-like syntax and enable initialization with an argument list
1415# (as opposed to an argument that's a list).
1416
1417class Stack(list):
1418 def __init__(self, *items):
1419 list.__init__(self, items)
1420
1421 def push(self, item):
1422 self.append(item);
1423
1424 def top(self):
1425 return self[-1]
1426
1427# Format a file include stack backtrace as a string
1428def backtrace(filename_stack):
1429 fmt = "In file included from %s:"
1430 return "\n".join([fmt % f for f in filename_stack])
1431
1432
1433#######################
1434#
1435# LineTracker: track filenames along with line numbers in PLY lineno fields
1436# PLY explicitly doesn't do anything with 'lineno' except propagate
1437# it. This class lets us tie filenames with the line numbers with a
1438# minimum of disruption to existing increment code.
1439#
1440
1441class LineTracker(object):
1442 def __init__(self, filename, lineno=1):
1443 self.filename = filename
1444 self.lineno = lineno
1445
1446 # Overload '+=' for increments. We need to create a new object on
1447 # each update else every token ends up referencing the same
1448 # constantly incrementing instance.
1449 def __iadd__(self, incr):
1450 return LineTracker(self.filename, self.lineno + incr)
1451
1452 def __str__(self):
1453 return "%s:%d" % (self.filename, self.lineno)
1454
1455 # In case there are places where someone really expects a number
1456 def __int__(self):
1457 return self.lineno
1458
1459
1460#######################
1461#
1462# ISA Parser
1463# parses ISA DSL and emits C++ headers and source
1464#
1465
1466class ISAParser(Grammar):
1467 def __init__(self, output_dir):
1468 super(ISAParser, self).__init__()
1469 self.output_dir = output_dir
1470
1471 self.filename = None # for output file watermarking/scaremongering
1472
1473 # variable to hold templates
1474 self.templateMap = {}
1475
1476 # This dictionary maps format name strings to Format objects.
1477 self.formatMap = {}
1478
1479 # Track open files and, if applicable, how many chunks it has been
1480 # split into so far.
1481 self.files = {}
1482 self.splits = {}
1483
1484 # isa_name / namespace identifier from namespace declaration.
1485 # before the namespace declaration, None.
1486 self.isa_name = None
1487 self.namespace = None
1488
1489 # The format stack.
1490 self.formatStack = Stack(NoFormat())
1491
1492 # The default case stack.
1493 self.defaultStack = Stack(None)
1494
1495 # Stack that tracks current file and line number. Each
1496 # element is a tuple (filename, lineno) that records the
1497 # *current* filename and the line number in the *previous*
1498 # file where it was included.
1499 self.fileNameStack = Stack()
1500
1501 symbols = ('makeList', 're', 'string')
1502 self.exportContext = dict([(s, eval(s)) for s in symbols])
1503
1504 self.maxInstSrcRegs = 0
1505 self.maxInstDestRegs = 0
1506 self.maxMiscDestRegs = 0
1507
1508 def __getitem__(self, i): # Allow object (self) to be
1509 return getattr(self, i) # passed to %-substitutions
1510
1511 # Change the file suffix of a base filename:
1512 # (e.g.) decoder.cc -> decoder-g.cc.inc for 'global' outputs
1513 def suffixize(self, s, sec):
1514 extn = re.compile('(\.[^\.]+)$') # isolate extension
1515 if self.namespace:
1516 return extn.sub(r'-ns\1.inc', s) # insert some text on either side
1517 else:
1518 return extn.sub(r'-g\1.inc', s)
1519
1520 # Get the file object for emitting code into the specified section
1521 # (header, decoder, exec, decode_block).
1522 def get_file(self, section):
1523 if section == 'decode_block':
1524 filename = 'decode-method.cc.inc'
1525 else:
1526 if section == 'header':
1527 file = 'decoder.hh'
1528 else:
1529 file = '%s.cc' % section
1530 filename = self.suffixize(file, section)
1531 try:
1532 return self.files[filename]
1533 except KeyError: pass
1534
1535 f = self.open(filename)
1536 self.files[filename] = f
1537
1538 # The splittable files are the ones with many independent
1539 # per-instruction functions - the decoder's instruction constructors
1540 # and the instruction execution (execute()) methods. These both have
1541 # the suffix -ns.cc.inc, meaning they are within the namespace part
1542 # of the ISA, contain object-emitting C++ source, and are included
1543 # into other top-level files. These are the files that need special
1544 # #define's to allow parts of them to be compiled separately. Rather
1545 # than splitting the emissions into separate files, the monolithic
1546 # output of the ISA parser is maintained, but the value (or lack
1547 # thereof) of the __SPLIT definition during C preprocessing will
1548 # select the different chunks. If no 'split' directives are used,
1549 # the cpp emissions have no effect.
1550 if re.search('-ns.cc.inc$', filename):
1551 print('#if !defined(__SPLIT) || (__SPLIT == 1)', file=f)
1552 self.splits[f] = 1
1553 # ensure requisite #include's
1554 elif filename == 'decoder-g.hh.inc':
1555 print('#include "base/bitfield.hh"', file=f)
1556
1557 return f
1558
1559 # Weave together the parts of the different output sections by
1560 # #include'ing them into some very short top-level .cc/.hh files.
1561 # These small files make it much clearer how this tool works, since
1562 # you directly see the chunks emitted as files that are #include'd.
1563 def write_top_level_files(self):
1564 # decoder header - everything depends on this
1565 file = 'decoder.hh'
1566 with self.open(file) as f:
1567 fn = 'decoder-g.hh.inc'
1568 assert(fn in self.files)
1569 f.write('#include "%s"\n' % fn)
1570
1571 fn = 'decoder-ns.hh.inc'
1572 assert(fn in self.files)
1573 f.write('namespace %s {\n#include "%s"\n}\n'
1574 % (self.namespace, fn))
1575
1576 # decoder method - cannot be split
1577 file = 'decoder.cc'
1578 with self.open(file) as f:
1579 fn = 'base/compiler.hh'
1580 f.write('#include "%s"\n' % fn)
1581
1582 fn = 'decoder-g.cc.inc'
1583 assert(fn in self.files)
1584 f.write('#include "%s"\n' % fn)
1585
1586 fn = 'decoder.hh'
1587 f.write('#include "%s"\n' % fn)
1588
1589 fn = 'decode-method.cc.inc'
1590 # is guaranteed to have been written for parse to complete
1591 f.write('#include "%s"\n' % fn)
1592
1593 extn = re.compile('(\.[^\.]+)$')
1594
1595 # instruction constructors
1596 splits = self.splits[self.get_file('decoder')]
1597 file_ = 'inst-constrs.cc'
1598 for i in range(1, splits+1):
1599 if splits > 1:
1600 file = extn.sub(r'-%d\1' % i, file_)
1601 else:
1602 file = file_
1603 with self.open(file) as f:
1604 fn = 'decoder-g.cc.inc'
1605 assert(fn in self.files)
1606 f.write('#include "%s"\n' % fn)
1607
1608 fn = 'decoder.hh'
1609 f.write('#include "%s"\n' % fn)
1610
1611 fn = 'decoder-ns.cc.inc'
1612 assert(fn in self.files)
1613 print('namespace %s {' % self.namespace, file=f)
1614 if splits > 1:
1615 print('#define __SPLIT %u' % i, file=f)
1616 print('#include "%s"' % fn, file=f)
1617 print('}', file=f)
1618
1619 # instruction execution
1620 splits = self.splits[self.get_file('exec')]
1621 for i in range(1, splits+1):
1622 file = 'generic_cpu_exec.cc'
1623 if splits > 1:
1624 file = extn.sub(r'_%d\1' % i, file)
1625 with self.open(file) as f:
1626 fn = 'exec-g.cc.inc'
1627 assert(fn in self.files)
1628 f.write('#include "%s"\n' % fn)
1629 f.write('#include "cpu/exec_context.hh"\n')
1630 f.write('#include "decoder.hh"\n')
1631
1632 fn = 'exec-ns.cc.inc'
1633 assert(fn in self.files)
1634 print('namespace %s {' % self.namespace, file=f)
1635 if splits > 1:
1636 print('#define __SPLIT %u' % i, file=f)
1637 print('#include "%s"' % fn, file=f)
1638 print('}', file=f)
1639
1640 # max_inst_regs.hh
1641 self.update('max_inst_regs.hh',
1642 '''namespace %(namespace)s {
1643 const int MaxInstSrcRegs = %(maxInstSrcRegs)d;
1644 const int MaxInstDestRegs = %(maxInstDestRegs)d;
1645 const int MaxMiscDestRegs = %(maxMiscDestRegs)d;\n}\n''' % self)
1646
1647 scaremonger_template ='''// DO NOT EDIT
1648// This file was automatically generated from an ISA description:
1649// %(filename)s
1650
1651''';
1652
1653 #####################################################################
1654 #
1655 # Lexer
1656 #
1657 # The PLY lexer module takes two things as input:
1658 # - A list of token names (the string list 'tokens')
1659 # - A regular expression describing a match for each token. The
1660 # regexp for token FOO can be provided in two ways:
1661 # - as a string variable named t_FOO
1662 # - as the doc string for a function named t_FOO. In this case,
1663 # the function is also executed, allowing an action to be
1664 # associated with each token match.
1665 #
1666 #####################################################################
1667
1668 # Reserved words. These are listed separately as they are matched
1669 # using the same regexp as generic IDs, but distinguished in the
1670 # t_ID() function. The PLY documentation suggests this approach.
1671 reserved = (
1672 'BITFIELD', 'DECODE', 'DECODER', 'DEFAULT', 'DEF', 'EXEC', 'FORMAT',
1673 'HEADER', 'LET', 'NAMESPACE', 'OPERAND_TYPES', 'OPERANDS',
1674 'OUTPUT', 'SIGNED', 'SPLIT', 'TEMPLATE'
1675 )
1676
1677 # List of tokens. The lex module requires this.
1678 tokens = reserved + (
1679 # identifier
1680 'ID',
1681
1682 # integer literal
1683 'INTLIT',
1684
1685 # string literal
1686 'STRLIT',
1687
1688 # code literal
1689 'CODELIT',
1690
1691 # ( ) [ ] { } < > , ; . : :: *
1692 'LPAREN', 'RPAREN',
1693 'LBRACKET', 'RBRACKET',
1694 'LBRACE', 'RBRACE',
1695 'LESS', 'GREATER', 'EQUALS',
1696 'COMMA', 'SEMI', 'DOT', 'COLON', 'DBLCOLON',
1697 'ASTERISK',
1698
1699 # C preprocessor directives
1700 'CPPDIRECTIVE'
1701
1702 # The following are matched but never returned. commented out to
1703 # suppress PLY warning
1704 # newfile directive
1705 # 'NEWFILE',
1706
1707 # endfile directive
1708 # 'ENDFILE'
1709 )
1710
1711 # Regular expressions for token matching
1712 t_LPAREN = r'\('
1713 t_RPAREN = r'\)'
1714 t_LBRACKET = r'\['
1715 t_RBRACKET = r'\]'
1716 t_LBRACE = r'\{'
1717 t_RBRACE = r'\}'
1718 t_LESS = r'\<'
1719 t_GREATER = r'\>'
1720 t_EQUALS = r'='
1721 t_COMMA = r','
1722 t_SEMI = r';'
1723 t_DOT = r'\.'
1724 t_COLON = r':'
1725 t_DBLCOLON = r'::'
1726 t_ASTERISK = r'\*'
1727
1728 # Identifiers and reserved words
1729 reserved_map = { }
1730 for r in reserved:
1731 reserved_map[r.lower()] = r
1732
1733 def t_ID(self, t):
1734 r'[A-Za-z_]\w*'
1735 t.type = self.reserved_map.get(t.value, 'ID')
1736 return t
1737
1738 # Integer literal
1739 def t_INTLIT(self, t):
1740 r'-?(0x[\da-fA-F]+)|\d+'
1741 try:
1742 t.value = int(t.value,0)
1743 except ValueError:
1744 error(t.lexer.lineno, 'Integer value "%s" too large' % t.value)
1745 t.value = 0
1746 return t
1747
1748 # String literal. Note that these use only single quotes, and
1749 # can span multiple lines.
1750 def t_STRLIT(self, t):
1751 r"(?m)'([^'])+'"
1752 # strip off quotes
1753 t.value = t.value[1:-1]
1754 t.lexer.lineno += t.value.count('\n')
1755 return t
1756
1757
1758 # "Code literal"... like a string literal, but delimiters are
1759 # '{{' and '}}' so they get formatted nicely under emacs c-mode
1760 def t_CODELIT(self, t):
1761 r"(?m)\{\{([^\}]|}(?!\}))+\}\}"
1762 # strip off {{ & }}
1763 t.value = t.value[2:-2]
1764 t.lexer.lineno += t.value.count('\n')
1765 return t
1766
1767 def t_CPPDIRECTIVE(self, t):
1768 r'^\#[^\#].*\n'
1769 t.lexer.lineno += t.value.count('\n')
1770 return t
1771
1772 def t_NEWFILE(self, t):
1773 r'^\#\#newfile\s+"[^"]*"\n'
1774 self.fileNameStack.push(t.lexer.lineno)
1775 t.lexer.lineno = LineTracker(t.value[11:-2])
1776
1777 def t_ENDFILE(self, t):
1778 r'^\#\#endfile\n'
1779 t.lexer.lineno = self.fileNameStack.pop()
1780
1781 #
1782 # The functions t_NEWLINE, t_ignore, and t_error are
1783 # special for the lex module.
1784 #
1785
1786 # Newlines
1787 def t_NEWLINE(self, t):
1788 r'\n+'
1789 t.lexer.lineno += t.value.count('\n')
1790
1791 # Comments
1792 def t_comment(self, t):
1793 r'//.*'
1794
1795 # Completely ignored characters
1796 t_ignore = ' \t\x0c'
1797
1798 # Error handler
1799 def t_error(self, t):
1800 error(t.lexer.lineno, "illegal character '%s'" % t.value[0])
1801 t.skip(1)
1802
1803 #####################################################################
1804 #
1805 # Parser
1806 #
1807 # Every function whose name starts with 'p_' defines a grammar
1808 # rule. The rule is encoded in the function's doc string, while
1809 # the function body provides the action taken when the rule is
1810 # matched. The argument to each function is a list of the values
1811 # of the rule's symbols: t[0] for the LHS, and t[1..n] for the
1812 # symbols on the RHS. For tokens, the value is copied from the
1813 # t.value attribute provided by the lexer. For non-terminals, the
1814 # value is assigned by the producing rule; i.e., the job of the
1815 # grammar rule function is to set the value for the non-terminal
1816 # on the LHS (by assigning to t[0]).
1817 #####################################################################
1818
1819 # The LHS of the first grammar rule is used as the start symbol
1820 # (in this case, 'specification'). Note that this rule enforces
1821 # that there will be exactly one namespace declaration, with 0 or
1822 # more global defs/decls before and after it. The defs & decls
1823 # before the namespace decl will be outside the namespace; those
1824 # after will be inside. The decoder function is always inside the
1825 # namespace.
1826 def p_specification(self, t):
1827 'specification : opt_defs_and_outputs top_level_decode_block'
1828
1829 for f in self.splits.iterkeys():
1830 f.write('\n#endif\n')
1831
1832 for f in self.files.itervalues(): # close ALL the files;
1833 f.close() # not doing so can cause compilation to fail
1834
1835 self.write_top_level_files()
1836
1837 t[0] = True
1838
1839 # 'opt_defs_and_outputs' is a possibly empty sequence of def and/or
1840 # output statements. Its productions do the hard work of eventually
1841 # instantiating a GenCode, which are generally emitted (written to disk)
1842 # as soon as possible, except for the decode_block, which has to be
1843 # accumulated into one large function of nested switch/case blocks.
1844 def p_opt_defs_and_outputs_0(self, t):
1845 'opt_defs_and_outputs : empty'
1846
1847 def p_opt_defs_and_outputs_1(self, t):
1848 'opt_defs_and_outputs : defs_and_outputs'
1849
1850 def p_defs_and_outputs_0(self, t):
1851 'defs_and_outputs : def_or_output'
1852
1853 def p_defs_and_outputs_1(self, t):
1854 'defs_and_outputs : defs_and_outputs def_or_output'
1855
1856 # The list of possible definition/output statements.
1857 # They are all processed as they are seen.
1858 def p_def_or_output(self, t):
1859 '''def_or_output : name_decl
1860 | def_format
1861 | def_bitfield
1862 | def_bitfield_struct
1863 | def_template
1864 | def_operand_types
1865 | def_operands
1866 | output
1867 | global_let
1868 | split'''
1869
1870 # Utility function used by both invocations of splitting - explicit
1871 # 'split' keyword and split() function inside "let {{ }};" blocks.
1872 def split(self, sec, write=False):
1873 assert(sec != 'header' and "header cannot be split")
1874
1875 f = self.get_file(sec)
1876 self.splits[f] += 1
1877 s = '\n#endif\n#if __SPLIT == %u\n' % self.splits[f]
1878 if write:
1879 f.write(s)
1880 else:
1881 return s
1882
1883 # split output file to reduce compilation time
1884 def p_split(self, t):
1885 'split : SPLIT output_type SEMI'
1886 assert(self.isa_name and "'split' not allowed before namespace decl")
1887
1888 self.split(t[2], True)
1889
1890 def p_output_type(self, t):
1891 '''output_type : DECODER
1892 | HEADER
1893 | EXEC'''
1894 t[0] = t[1]
1895
1896 # ISA name declaration looks like "namespace <foo>;"
1897 def p_name_decl(self, t):
1898 'name_decl : NAMESPACE ID SEMI'
1899 assert(self.isa_name == None and "Only 1 namespace decl permitted")
1900 self.isa_name = t[2]
1901 self.namespace = t[2] + 'Inst'
1902
1903 # Output blocks 'output <foo> {{...}}' (C++ code blocks) are copied
1904 # directly to the appropriate output section.
1905
1906 # Massage output block by substituting in template definitions and
1907 # bit operators. We handle '%'s embedded in the string that don't
1908 # indicate template substitutions by doubling them first so that the
1909 # format operation will reduce them back to single '%'s.
1910 def process_output(self, s):
1911 s = self.protectNonSubstPercents(s)
1912 return substBitOps(s % self.templateMap)
1913
1914 def p_output(self, t):
1915 'output : OUTPUT output_type CODELIT SEMI'
1916 kwargs = { t[2]+'_output' : self.process_output(t[3]) }
1917 GenCode(self, **kwargs).emit()
1918
1919 # global let blocks 'let {{...}}' (Python code blocks) are
1920 # executed directly when seen. Note that these execute in a
1921 # special variable context 'exportContext' to prevent the code
1922 # from polluting this script's namespace.
1923 def p_global_let(self, t):
1924 'global_let : LET CODELIT SEMI'
1925 def _split(sec):
1926 return self.split(sec)
1927 self.updateExportContext()
1928 self.exportContext["header_output"] = ''
1929 self.exportContext["decoder_output"] = ''
1930 self.exportContext["exec_output"] = ''
1931 self.exportContext["decode_block"] = ''
1932 self.exportContext["split"] = _split
1933 split_setup = '''
1934def wrap(func):
1935 def split(sec):
1936 globals()[sec + '_output'] += func(sec)
1937 return split
1938split = wrap(split)
1939del wrap
1940'''
1941 # This tricky setup (immediately above) allows us to just write
1942 # (e.g.) "split('exec')" in the Python code and the split #ifdef's
1943 # will automatically be added to the exec_output variable. The inner
1944 # Python execution environment doesn't know about the split points,
1945 # so we carefully inject and wrap a closure that can retrieve the
1946 # next split's #define from the parser and add it to the current
1947 # emission-in-progress.
1948 try:
1949 exec split_setup+fixPythonIndentation(t[2]) in self.exportContext
1950 except Exception, exc:
1951 traceback.print_exc(file=sys.stdout)
1952 if debug:
1953 raise
1954 error(t.lineno(1), 'In global let block: %s' % exc)
1955 GenCode(self,
1956 header_output=self.exportContext["header_output"],
1957 decoder_output=self.exportContext["decoder_output"],
1958 exec_output=self.exportContext["exec_output"],
1959 decode_block=self.exportContext["decode_block"]).emit()
1960
1961 # Define the mapping from operand type extensions to C++ types and
1962 # bit widths (stored in operandTypeMap).
1963 def p_def_operand_types(self, t):
1964 'def_operand_types : DEF OPERAND_TYPES CODELIT SEMI'
1965 try:
1966 self.operandTypeMap = eval('{' + t[3] + '}')
1967 except Exception, exc:
1968 if debug:
1969 raise
1970 error(t.lineno(1),
1971 'In def operand_types: %s' % exc)
1972
1973 # Define the mapping from operand names to operand classes and
1974 # other traits. Stored in operandNameMap.
1975 def p_def_operands(self, t):
1976 'def_operands : DEF OPERANDS CODELIT SEMI'
1977 if not hasattr(self, 'operandTypeMap'):
1978 error(t.lineno(1),
1979 'error: operand types must be defined before operands')
1980 try:
1981 user_dict = eval('{' + t[3] + '}', self.exportContext)
1982 except Exception, exc:
1983 if debug:
1984 raise
1985 error(t.lineno(1), 'In def operands: %s' % exc)
1986 self.buildOperandNameMap(user_dict, t.lexer.lineno)
1987
1988 # A bitfield definition looks like:
1989 # 'def [signed] bitfield <ID> [<first>:<last>]'
1990 # This generates a preprocessor macro in the output file.
1991 def p_def_bitfield_0(self, t):
1992 'def_bitfield : DEF opt_signed BITFIELD ID LESS INTLIT COLON INTLIT GREATER SEMI'
1993 expr = 'bits(machInst, %2d, %2d)' % (t[6], t[8])
1994 if (t[2] == 'signed'):
1995 expr = 'sext<%d>(%s)' % (t[6] - t[8] + 1, expr)
1996 hash_define = '#undef %s\n#define %s\t%s\n' % (t[4], t[4], expr)
1997 GenCode(self, header_output=hash_define).emit()
1998
1999 # alternate form for single bit: 'def [signed] bitfield <ID> [<bit>]'
2000 def p_def_bitfield_1(self, t):
2001 'def_bitfield : DEF opt_signed BITFIELD ID LESS INTLIT GREATER SEMI'
2002 expr = 'bits(machInst, %2d, %2d)' % (t[6], t[6])
2003 if (t[2] == 'signed'):
2004 expr = 'sext<%d>(%s)' % (1, expr)
2005 hash_define = '#undef %s\n#define %s\t%s\n' % (t[4], t[4], expr)
2006 GenCode(self, header_output=hash_define).emit()
2007
2008 # alternate form for structure member: 'def bitfield <ID> <ID>'
2009 def p_def_bitfield_struct(self, t):
2010 'def_bitfield_struct : DEF opt_signed BITFIELD ID id_with_dot SEMI'
2011 if (t[2] != ''):
2012 error(t.lineno(1),
2013 'error: structure bitfields are always unsigned.')
2014 expr = 'machInst.%s' % t[5]
2015 hash_define = '#undef %s\n#define %s\t%s\n' % (t[4], t[4], expr)
2016 GenCode(self, header_output=hash_define).emit()
2017
2018 def p_id_with_dot_0(self, t):
2019 'id_with_dot : ID'
2020 t[0] = t[1]
2021
2022 def p_id_with_dot_1(self, t):
2023 'id_with_dot : ID DOT id_with_dot'
2024 t[0] = t[1] + t[2] + t[3]
2025
2026 def p_opt_signed_0(self, t):
2027 'opt_signed : SIGNED'
2028 t[0] = t[1]
2029
2030 def p_opt_signed_1(self, t):
2031 'opt_signed : empty'
2032 t[0] = ''
2033
2034 def p_def_template(self, t):
2035 'def_template : DEF TEMPLATE ID CODELIT SEMI'
2036 if t[3] in self.templateMap:
2037 print("warning: template %s already defined" % t[3])
2038 self.templateMap[t[3]] = Template(self, t[4])
2039
2040 # An instruction format definition looks like
2041 # "def format <fmt>(<params>) {{...}};"
2042 def p_def_format(self, t):
2043 'def_format : DEF FORMAT ID LPAREN param_list RPAREN CODELIT SEMI'
2044 (id, params, code) = (t[3], t[5], t[7])
2045 self.defFormat(id, params, code, t.lexer.lineno)
2046
2047 # The formal parameter list for an instruction format is a
2048 # possibly empty list of comma-separated parameters. Positional
2049 # (standard, non-keyword) parameters must come first, followed by
2050 # keyword parameters, followed by a '*foo' parameter that gets
2051 # excess positional arguments (as in Python). Each of these three
2052 # parameter categories is optional.
2053 #
2054 # Note that we do not support the '**foo' parameter for collecting
2055 # otherwise undefined keyword args. Otherwise the parameter list
2056 # is (I believe) identical to what is supported in Python.
2057 #
2058 # The param list generates a tuple, where the first element is a
2059 # list of the positional params and the second element is a dict
2060 # containing the keyword params.
2061 def p_param_list_0(self, t):
2062 'param_list : positional_param_list COMMA nonpositional_param_list'
2063 t[0] = t[1] + t[3]
2064
2065 def p_param_list_1(self, t):
2066 '''param_list : positional_param_list
2067 | nonpositional_param_list'''
2068 t[0] = t[1]
2069
2070 def p_positional_param_list_0(self, t):
2071 'positional_param_list : empty'
2072 t[0] = []
2073
2074 def p_positional_param_list_1(self, t):
2075 'positional_param_list : ID'
2076 t[0] = [t[1]]
2077
2078 def p_positional_param_list_2(self, t):
2079 'positional_param_list : positional_param_list COMMA ID'
2080 t[0] = t[1] + [t[3]]
2081
2082 def p_nonpositional_param_list_0(self, t):
2083 'nonpositional_param_list : keyword_param_list COMMA excess_args_param'
2084 t[0] = t[1] + t[3]
2085
2086 def p_nonpositional_param_list_1(self, t):
2087 '''nonpositional_param_list : keyword_param_list
2088 | excess_args_param'''
2089 t[0] = t[1]
2090
2091 def p_keyword_param_list_0(self, t):
2092 'keyword_param_list : keyword_param'
2093 t[0] = [t[1]]
2094
2095 def p_keyword_param_list_1(self, t):
2096 'keyword_param_list : keyword_param_list COMMA keyword_param'
2097 t[0] = t[1] + [t[3]]
2098
2099 def p_keyword_param(self, t):
2100 'keyword_param : ID EQUALS expr'
2101 t[0] = t[1] + ' = ' + t[3].__repr__()
2102
2103 def p_excess_args_param(self, t):
2104 'excess_args_param : ASTERISK ID'
2105 # Just concatenate them: '*ID'. Wrap in list to be consistent
2106 # with positional_param_list and keyword_param_list.
2107 t[0] = [t[1] + t[2]]
2108
2109 # End of format definition-related rules.
2110 ##############
2111
2112 #
2113 # A decode block looks like:
2114 # decode <field1> [, <field2>]* [default <inst>] { ... }
2115 #
2116 def p_top_level_decode_block(self, t):
2117 'top_level_decode_block : decode_block'
2118 codeObj = t[1]
2119 codeObj.wrap_decode_block('''
2120StaticInstPtr
2121%(isa_name)s::Decoder::decodeInst(%(isa_name)s::ExtMachInst machInst)
2122{
2123 using namespace %(namespace)s;
2124''' % self, '}')
2125
2126 codeObj.emit()
2127
2128 def p_decode_block(self, t):
2129 'decode_block : DECODE ID opt_default LBRACE decode_stmt_list RBRACE'
2130 default_defaults = self.defaultStack.pop()
2131 codeObj = t[5]
2132 # use the "default defaults" only if there was no explicit
2133 # default statement in decode_stmt_list
2134 if not codeObj.has_decode_default:
2135 codeObj += default_defaults
2136 codeObj.wrap_decode_block('switch (%s) {\n' % t[2], '}\n')
2137 t[0] = codeObj
2138
2139 # The opt_default statement serves only to push the "default
2140 # defaults" onto defaultStack. This value will be used by nested
2141 # decode blocks, and used and popped off when the current
2142 # decode_block is processed (in p_decode_block() above).
2143 def p_opt_default_0(self, t):
2144 'opt_default : empty'
2145 # no default specified: reuse the one currently at the top of
2146 # the stack
2147 self.defaultStack.push(self.defaultStack.top())
2148 # no meaningful value returned
2149 t[0] = None
2150
2151 def p_opt_default_1(self, t):
2152 'opt_default : DEFAULT inst'
2153 # push the new default
2154 codeObj = t[2]
2155 codeObj.wrap_decode_block('\ndefault:\n', 'break;\n')
2156 self.defaultStack.push(codeObj)
2157 # no meaningful value returned
2158 t[0] = None
2159
2160 def p_decode_stmt_list_0(self, t):
2161 'decode_stmt_list : decode_stmt'
2162 t[0] = t[1]
2163
2164 def p_decode_stmt_list_1(self, t):
2165 'decode_stmt_list : decode_stmt decode_stmt_list'
2166 if (t[1].has_decode_default and t[2].has_decode_default):
2167 error(t.lineno(1), 'Two default cases in decode block')
2168 t[0] = t[1] + t[2]
2169
2170 #
2171 # Decode statement rules
2172 #
2173 # There are four types of statements allowed in a decode block:
2174 # 1. Format blocks 'format <foo> { ... }'
2175 # 2. Nested decode blocks
2176 # 3. Instruction definitions.
2177 # 4. C preprocessor directives.
2178
2179
2180 # Preprocessor directives found in a decode statement list are
2181 # passed through to the output, replicated to all of the output
2182 # code streams. This works well for ifdefs, so we can ifdef out
2183 # both the declarations and the decode cases generated by an
2184 # instruction definition. Handling them as part of the grammar
2185 # makes it easy to keep them in the right place with respect to
2186 # the code generated by the other statements.
2187 def p_decode_stmt_cpp(self, t):
2188 'decode_stmt : CPPDIRECTIVE'
2189 t[0] = GenCode(self, t[1], t[1], t[1], t[1])
2190
2191 # A format block 'format <foo> { ... }' sets the default
2192 # instruction format used to handle instruction definitions inside
2193 # the block. This format can be overridden by using an explicit
2194 # format on the instruction definition or with a nested format
2195 # block.
2196 def p_decode_stmt_format(self, t):
2197 'decode_stmt : FORMAT push_format_id LBRACE decode_stmt_list RBRACE'
2198 # The format will be pushed on the stack when 'push_format_id'
2199 # is processed (see below). Once the parser has recognized
2200 # the full production (though the right brace), we're done
2201 # with the format, so now we can pop it.
2202 self.formatStack.pop()
2203 t[0] = t[4]
2204
2205 # This rule exists so we can set the current format (& push the
2206 # stack) when we recognize the format name part of the format
2207 # block.
2208 def p_push_format_id(self, t):
2209 'push_format_id : ID'
2210 try:
2211 self.formatStack.push(self.formatMap[t[1]])
2212 t[0] = ('', '// format %s' % t[1])
2213 except KeyError:
2214 error(t.lineno(1), 'instruction format "%s" not defined.' % t[1])
2215
2216 # Nested decode block: if the value of the current field matches
2217 # the specified constant(s), do a nested decode on some other field.
2218 def p_decode_stmt_decode(self, t):
2219 'decode_stmt : case_list COLON decode_block'
2220 case_list = t[1]
2221 codeObj = t[3]
2222 # just wrap the decoding code from the block as a case in the
2223 # outer switch statement.
2224 codeObj.wrap_decode_block('\n%s\n' % ''.join(case_list),
2225 'M5_UNREACHABLE;\n')
2226 codeObj.has_decode_default = (case_list == ['default:'])
2227 t[0] = codeObj
2228
2229 # Instruction definition (finally!).
2230 def p_decode_stmt_inst(self, t):
2231 'decode_stmt : case_list COLON inst SEMI'
2232 case_list = t[1]
2233 codeObj = t[3]
2234 codeObj.wrap_decode_block('\n%s' % ''.join(case_list), 'break;\n')
2235 codeObj.has_decode_default = (case_list == ['default:'])
2236 t[0] = codeObj
2237
2238 # The constant list for a decode case label must be non-empty, and must
2239 # either be the keyword 'default', or made up of one or more
2240 # comma-separated integer literals or strings which evaluate to
2241 # constants when compiled as C++.
2242 def p_case_list_0(self, t):
2243 'case_list : DEFAULT'
2244 t[0] = ['default:']
2245
2246 def prep_int_lit_case_label(self, lit):
2247 if lit >= 2**32:
2248 return 'case ULL(%#x): ' % lit
2249 else:
2250 return 'case %#x: ' % lit
2251
2252 def prep_str_lit_case_label(self, lit):
2253 return 'case %s: ' % lit
2254
2255 def p_case_list_1(self, t):
2256 'case_list : INTLIT'
2257 t[0] = [self.prep_int_lit_case_label(t[1])]
2258
2259 def p_case_list_2(self, t):
2260 'case_list : STRLIT'
2261 t[0] = [self.prep_str_lit_case_label(t[1])]
2262
2263 def p_case_list_3(self, t):
2264 'case_list : case_list COMMA INTLIT'
2265 t[0] = t[1]
2266 t[0].append(self.prep_int_lit_case_label(t[3]))
2267
2268 def p_case_list_4(self, t):
2269 'case_list : case_list COMMA STRLIT'
2270 t[0] = t[1]
2271 t[0].append(self.prep_str_lit_case_label(t[3]))
2272
2273 # Define an instruction using the current instruction format
2274 # (specified by an enclosing format block).
2275 # "<mnemonic>(<args>)"
2276 def p_inst_0(self, t):
2277 'inst : ID LPAREN arg_list RPAREN'
2278 # Pass the ID and arg list to the current format class to deal with.
2279 currentFormat = self.formatStack.top()
2280 codeObj = currentFormat.defineInst(self, t[1], t[3], t.lexer.lineno)
2281 args = ','.join(map(str, t[3]))
2282 args = re.sub('(?m)^', '//', args)
2283 args = re.sub('^//', '', args)
2284 comment = '\n// %s::%s(%s)\n' % (currentFormat.id, t[1], args)
2285 codeObj.prepend_all(comment)
2286 t[0] = codeObj
2287
2288 # Define an instruction using an explicitly specified format:
2289 # "<fmt>::<mnemonic>(<args>)"
2290 def p_inst_1(self, t):
2291 'inst : ID DBLCOLON ID LPAREN arg_list RPAREN'
2292 try:
2293 format = self.formatMap[t[1]]
2294 except KeyError:
2295 error(t.lineno(1), 'instruction format "%s" not defined.' % t[1])
2296
2297 codeObj = format.defineInst(self, t[3], t[5], t.lexer.lineno)
2298 comment = '\n// %s::%s(%s)\n' % (t[1], t[3], t[5])
2299 codeObj.prepend_all(comment)
2300 t[0] = codeObj
2301
2302 # The arg list generates a tuple, where the first element is a
2303 # list of the positional args and the second element is a dict
2304 # containing the keyword args.
2305 def p_arg_list_0(self, t):
2306 'arg_list : positional_arg_list COMMA keyword_arg_list'
2307 t[0] = ( t[1], t[3] )
2308
2309 def p_arg_list_1(self, t):
2310 'arg_list : positional_arg_list'
2311 t[0] = ( t[1], {} )
2312
2313 def p_arg_list_2(self, t):
2314 'arg_list : keyword_arg_list'
2315 t[0] = ( [], t[1] )
2316
2317 def p_positional_arg_list_0(self, t):
2318 'positional_arg_list : empty'
2319 t[0] = []
2320
2321 def p_positional_arg_list_1(self, t):
2322 'positional_arg_list : expr'
2323 t[0] = [t[1]]
2324
2325 def p_positional_arg_list_2(self, t):
2326 'positional_arg_list : positional_arg_list COMMA expr'
2327 t[0] = t[1] + [t[3]]
2328
2329 def p_keyword_arg_list_0(self, t):
2330 'keyword_arg_list : keyword_arg'
2331 t[0] = t[1]
2332
2333 def p_keyword_arg_list_1(self, t):
2334 'keyword_arg_list : keyword_arg_list COMMA keyword_arg'
2335 t[0] = t[1]
2336 t[0].update(t[3])
2337
2338 def p_keyword_arg(self, t):
2339 'keyword_arg : ID EQUALS expr'
2340 t[0] = { t[1] : t[3] }
2341
2342 #
2343 # Basic expressions. These constitute the argument values of
2344 # "function calls" (i.e. instruction definitions in the decode
2345 # block) and default values for formal parameters of format
2346 # functions.
2347 #
2348 # Right now, these are either strings, integers, or (recursively)
2349 # lists of exprs (using Python square-bracket list syntax). Note
2350 # that bare identifiers are trated as string constants here (since
2351 # there isn't really a variable namespace to refer to).
2352 #
2353 def p_expr_0(self, t):
2354 '''expr : ID
2355 | INTLIT
2356 | STRLIT
2357 | CODELIT'''
2358 t[0] = t[1]
2359
2360 def p_expr_1(self, t):
2361 '''expr : LBRACKET list_expr RBRACKET'''
2362 t[0] = t[2]
2363
2364 def p_list_expr_0(self, t):
2365 'list_expr : expr'
2366 t[0] = [t[1]]
2367
2368 def p_list_expr_1(self, t):
2369 'list_expr : list_expr COMMA expr'
2370 t[0] = t[1] + [t[3]]
2371
2372 def p_list_expr_2(self, t):
2373 'list_expr : empty'
2374 t[0] = []
2375
2376 #
2377 # Empty production... use in other rules for readability.
2378 #
2379 def p_empty(self, t):
2380 'empty :'
2381 pass
2382
2383 # Parse error handler. Note that the argument here is the
2384 # offending *token*, not a grammar symbol (hence the need to use
2385 # t.value)
2386 def p_error(self, t):
2387 if t:
2388 error(t.lexer.lineno, "syntax error at '%s'" % t.value)
2389 else:
2390 error("unknown syntax error")
2391
2392 # END OF GRAMMAR RULES
2393
2394 def updateExportContext(self):
2395
2396 # create a continuation that allows us to grab the current parser
2397 def wrapInstObjParams(*args):
2398 return InstObjParams(self, *args)
2399 self.exportContext['InstObjParams'] = wrapInstObjParams
2400 self.exportContext.update(self.templateMap)
2401
2402 def defFormat(self, id, params, code, lineno):
2403 '''Define a new format'''
2404
2405 # make sure we haven't already defined this one
2406 if id in self.formatMap:
2407 error(lineno, 'format %s redefined.' % id)
2408
2409 # create new object and store in global map
2410 self.formatMap[id] = Format(id, params, code)
2411
2412 def protectNonSubstPercents(self, s):
2413 '''Protect any non-dict-substitution '%'s in a format string
2414 (i.e. those not followed by '(')'''
2415
2416 return re.sub(r'%(?!\()', '%%', s)
2417
2418 def buildOperandNameMap(self, user_dict, lineno):
2419 operand_name = {}
2420 for op_name, val in user_dict.iteritems():
2421
2422 # Check if extra attributes have been specified.
2423 if len(val) > 9:
2424 error(lineno, 'error: too many attributes for operand "%s"' %
2425 base_cls_name)
2426
2427 # Pad val with None in case optional args are missing
2428 val += (None, None, None, None)
2429 base_cls_name, dflt_ext, reg_spec, flags, sort_pri, \
2430 read_code, write_code, read_predicate, write_predicate = val[:9]
2431
2432 # Canonical flag structure is a triple of lists, where each list
2433 # indicates the set of flags implied by this operand always, when
2434 # used as a source, and when used as a dest, respectively.
2435 # For simplicity this can be initialized using a variety of fairly
2436 # obvious shortcuts; we convert these to canonical form here.
2437 if not flags:
2438 # no flags specified (e.g., 'None')
2439 flags = ( [], [], [] )
2440 elif isinstance(flags, str):
2441 # a single flag: assumed to be unconditional
2442 flags = ( [ flags ], [], [] )
2443 elif isinstance(flags, list):
2444 # a list of flags: also assumed to be unconditional
2445 flags = ( flags, [], [] )
2446 elif isinstance(flags, tuple):
2447 # it's a tuple: it should be a triple,
2448 # but each item could be a single string or a list
2449 (uncond_flags, src_flags, dest_flags) = flags
2450 flags = (makeList(uncond_flags),
2451 makeList(src_flags), makeList(dest_flags))
2452
2453 # Accumulate attributes of new operand class in tmp_dict
2454 tmp_dict = {}
2455 attrList = ['reg_spec', 'flags', 'sort_pri',
2456 'read_code', 'write_code',
2457 'read_predicate', 'write_predicate']
2458 if dflt_ext:
2459 dflt_ctype = self.operandTypeMap[dflt_ext]
2460 attrList.extend(['dflt_ctype', 'dflt_ext'])
2461 # reg_spec is either just a string or a dictionary
2462 # (for elems of vector)
2463 if isinstance(reg_spec, tuple):
2464 (reg_spec, elem_spec) = reg_spec
2465 if isinstance(elem_spec, str):
2466 attrList.append('elem_spec')
2467 else:
2468 assert(isinstance(elem_spec, dict))
2469 elems = elem_spec
2470 attrList.append('elems')
2471 for attr in attrList:
2472 tmp_dict[attr] = eval(attr)
2473 tmp_dict['base_name'] = op_name
2474
2475 # New class name will be e.g. "IntReg_Ra"
2476 cls_name = base_cls_name + '_' + op_name
2477 # Evaluate string arg to get class object. Note that the
2478 # actual base class for "IntReg" is "IntRegOperand", i.e. we
2479 # have to append "Operand".
2480 try:
2481 base_cls = eval(base_cls_name + 'Operand')
2482 except NameError:
2483 error(lineno,
2484 'error: unknown operand base class "%s"' % base_cls_name)
2485 # The following statement creates a new class called
2486 # <cls_name> as a subclass of <base_cls> with the attributes
2487 # in tmp_dict, just as if we evaluated a class declaration.
2488 operand_name[op_name] = type(cls_name, (base_cls,), tmp_dict)
2489
2490 self.operandNameMap = operand_name
2491
2492 # Define operand variables.
2493 operands = user_dict.keys()
2494 # Add the elems defined in the vector operands and
2495 # build a map elem -> vector (used in OperandList)
2496 elem_to_vec = {}
2497 for op in user_dict.keys():
2498 if hasattr(self.operandNameMap[op], 'elems'):
2499 for elem in self.operandNameMap[op].elems.keys():
2500 operands.append(elem)
2501 elem_to_vec[elem] = op
2502 self.elemToVector = elem_to_vec
2503 extensions = self.operandTypeMap.keys()
2504
2505 operandsREString = r'''
2506 (?<!\w) # neg. lookbehind assertion: prevent partial matches
2507 ((%s)(?:_(%s))?) # match: operand with optional '_' then suffix
2508 (?!\w) # neg. lookahead assertion: prevent partial matches
2509 ''' % (string.join(operands, '|'), string.join(extensions, '|'))
2510
2511 self.operandsRE = re.compile(operandsREString, re.MULTILINE|re.VERBOSE)
2512
2513 # Same as operandsREString, but extension is mandatory, and only two
2514 # groups are returned (base and ext, not full name as above).
2515 # Used for subtituting '_' for '.' to make C++ identifiers.
2516 operandsWithExtREString = r'(?<!\w)(%s)_(%s)(?!\w)' \
2517 % (string.join(operands, '|'), string.join(extensions, '|'))
2518
2519 self.operandsWithExtRE = \
2520 re.compile(operandsWithExtREString, re.MULTILINE)
2521
2522 def substMungedOpNames(self, code):
2523 '''Munge operand names in code string to make legal C++
2524 variable names. This means getting rid of the type extension
2525 if any. Will match base_name attribute of Operand object.)'''
2526 return self.operandsWithExtRE.sub(r'\1', code)
2527
2528 def mungeSnippet(self, s):
2529 '''Fix up code snippets for final substitution in templates.'''
2530 if isinstance(s, str):
2531 return self.substMungedOpNames(substBitOps(s))
2532 else:
2533 return s
2534
2535 def open(self, name, bare=False):
2536 '''Open the output file for writing and include scary warning.'''
2537 filename = os.path.join(self.output_dir, name)
2538 f = open(filename, 'w')
2539 if f:
2540 if not bare:
2541 f.write(ISAParser.scaremonger_template % self)
2542 return f
2543
2544 def update(self, file, contents):
2545 '''Update the output file only. Scons should handle the case when
2546 the new contents are unchanged using its built-in hash feature.'''
2547 f = self.open(file)
2548 f.write(contents)
2549 f.close()
2550
2551 # This regular expression matches '##include' directives
2552 includeRE = re.compile(r'^\s*##include\s+"(?P<filename>[^"]*)".*$',
2553 re.MULTILINE)
2554
2555 def replace_include(self, matchobj, dirname):
2556 """Function to replace a matched '##include' directive with the
2557 contents of the specified file (with nested ##includes
2558 replaced recursively). 'matchobj' is an re match object
2559 (from a match of includeRE) and 'dirname' is the directory
2560 relative to which the file path should be resolved."""
2561
2562 fname = matchobj.group('filename')
2563 full_fname = os.path.normpath(os.path.join(dirname, fname))
2564 contents = '##newfile "%s"\n%s\n##endfile\n' % \
2565 (full_fname, self.read_and_flatten(full_fname))
2566 return contents
2567
2568 def read_and_flatten(self, filename):
2569 """Read a file and recursively flatten nested '##include' files."""
2570
2571 current_dir = os.path.dirname(filename)
2572 try:
2573 contents = open(filename).read()
2574 except IOError:
2575 error('Error including file "%s"' % filename)
2576
2577 self.fileNameStack.push(LineTracker(filename))
2578
2579 # Find any includes and include them
2580 def replace(matchobj):
2581 return self.replace_include(matchobj, current_dir)
2582 contents = self.includeRE.sub(replace, contents)
2583
2584 self.fileNameStack.pop()
2585 return contents
2586
2587 AlreadyGenerated = {}
2588
2589 def _parse_isa_desc(self, isa_desc_file):
2590 '''Read in and parse the ISA description.'''
2591
2592 # The build system can end up running the ISA parser twice: once to
2593 # finalize the build dependencies, and then to actually generate
2594 # the files it expects (in src/arch/$ARCH/generated). This code
2595 # doesn't do anything different either time, however; the SCons
2596 # invocations just expect different things. Since this code runs
2597 # within SCons, we can just remember that we've already run and
2598 # not perform a completely unnecessary run, since the ISA parser's
2599 # effect is idempotent.
2600 if isa_desc_file in ISAParser.AlreadyGenerated:
2601 return
2602
2603 # grab the last three path components of isa_desc_file
2604 self.filename = '/'.join(isa_desc_file.split('/')[-3:])
2605
2606 # Read file and (recursively) all included files into a string.
2607 # PLY requires that the input be in a single string so we have to
2608 # do this up front.
2609 isa_desc = self.read_and_flatten(isa_desc_file)
2610
2611 # Initialize lineno tracker
2612 self.lex.lineno = LineTracker(isa_desc_file)
2613
2614 # Parse.
2615 self.parse_string(isa_desc)
2616
2617 ISAParser.AlreadyGenerated[isa_desc_file] = None
2618
2619 def parse_isa_desc(self, *args, **kwargs):
2620 try:
2621 self._parse_isa_desc(*args, **kwargs)
2622 except ISAParserError, e:
2623 print(backtrace(self.fileNameStack))
2624 print("At %s:" % e.lineno)
2625 print(e)
2626 sys.exit(1)
2627
2628# Called as script: get args from command line.
2629# Args are: <isa desc file> <output dir>
2630if __name__ == '__main__':
2631 ISAParser(sys.argv[2]).parse_isa_desc(sys.argv[1])
1435 header += '\n\t_numIntDestRegs = 0;'
1436 header += '\n\t_numCCDestRegs = 0;'
1437
1438 self.constructor = header + \
1439 self.operands.concatAttrStrings('constructor')
1440
1441 self.flags = self.operands.concatAttrLists('flags')
1442
1443 self.op_class = None
1444
1445 # Optional arguments are assumed to be either StaticInst flags
1446 # or an OpClass value. To avoid having to import a complete
1447 # list of these values to match against, we do it ad-hoc
1448 # with regexps.
1449 for oa in opt_args:
1450 if instFlagRE.match(oa):
1451 self.flags.append(oa)
1452 elif opClassRE.match(oa):
1453 self.op_class = oa
1454 else:
1455 error('InstObjParams: optional arg "%s" not recognized '
1456 'as StaticInst::Flag or OpClass.' % oa)
1457
1458 # Make a basic guess on the operand class if not set.
1459 # These are good enough for most cases.
1460 if not self.op_class:
1461 if 'IsStore' in self.flags:
1462 # The order matters here: 'IsFloating' and 'IsInteger' are
1463 # usually set in FP instructions because of the base
1464 # register
1465 if 'IsFloating' in self.flags:
1466 self.op_class = 'FloatMemWriteOp'
1467 else:
1468 self.op_class = 'MemWriteOp'
1469 elif 'IsLoad' in self.flags or 'IsPrefetch' in self.flags:
1470 # The order matters here: 'IsFloating' and 'IsInteger' are
1471 # usually set in FP instructions because of the base
1472 # register
1473 if 'IsFloating' in self.flags:
1474 self.op_class = 'FloatMemReadOp'
1475 else:
1476 self.op_class = 'MemReadOp'
1477 elif 'IsFloating' in self.flags:
1478 self.op_class = 'FloatAddOp'
1479 elif 'IsVector' in self.flags:
1480 self.op_class = 'SimdAddOp'
1481 else:
1482 self.op_class = 'IntAluOp'
1483
1484 # add flag initialization to contructor here to include
1485 # any flags added via opt_args
1486 self.constructor += makeFlagConstructor(self.flags)
1487
1488 # if 'IsFloating' is set, add call to the FP enable check
1489 # function (which should be provided by isa_desc via a declare)
1490 # if 'IsVector' is set, add call to the Vector enable check
1491 # function (which should be provided by isa_desc via a declare)
1492 if 'IsFloating' in self.flags:
1493 self.fp_enable_check = 'fault = checkFpEnableFault(xc);'
1494 elif 'IsVector' in self.flags:
1495 self.fp_enable_check = 'fault = checkVecEnableFault(xc);'
1496 else:
1497 self.fp_enable_check = ''
1498
1499##############
1500# Stack: a simple stack object. Used for both formats (formatStack)
1501# and default cases (defaultStack). Simply wraps a list to give more
1502# stack-like syntax and enable initialization with an argument list
1503# (as opposed to an argument that's a list).
1504
1505class Stack(list):
1506 def __init__(self, *items):
1507 list.__init__(self, items)
1508
1509 def push(self, item):
1510 self.append(item);
1511
1512 def top(self):
1513 return self[-1]
1514
1515# Format a file include stack backtrace as a string
1516def backtrace(filename_stack):
1517 fmt = "In file included from %s:"
1518 return "\n".join([fmt % f for f in filename_stack])
1519
1520
1521#######################
1522#
1523# LineTracker: track filenames along with line numbers in PLY lineno fields
1524# PLY explicitly doesn't do anything with 'lineno' except propagate
1525# it. This class lets us tie filenames with the line numbers with a
1526# minimum of disruption to existing increment code.
1527#
1528
1529class LineTracker(object):
1530 def __init__(self, filename, lineno=1):
1531 self.filename = filename
1532 self.lineno = lineno
1533
1534 # Overload '+=' for increments. We need to create a new object on
1535 # each update else every token ends up referencing the same
1536 # constantly incrementing instance.
1537 def __iadd__(self, incr):
1538 return LineTracker(self.filename, self.lineno + incr)
1539
1540 def __str__(self):
1541 return "%s:%d" % (self.filename, self.lineno)
1542
1543 # In case there are places where someone really expects a number
1544 def __int__(self):
1545 return self.lineno
1546
1547
1548#######################
1549#
1550# ISA Parser
1551# parses ISA DSL and emits C++ headers and source
1552#
1553
1554class ISAParser(Grammar):
1555 def __init__(self, output_dir):
1556 super(ISAParser, self).__init__()
1557 self.output_dir = output_dir
1558
1559 self.filename = None # for output file watermarking/scaremongering
1560
1561 # variable to hold templates
1562 self.templateMap = {}
1563
1564 # This dictionary maps format name strings to Format objects.
1565 self.formatMap = {}
1566
1567 # Track open files and, if applicable, how many chunks it has been
1568 # split into so far.
1569 self.files = {}
1570 self.splits = {}
1571
1572 # isa_name / namespace identifier from namespace declaration.
1573 # before the namespace declaration, None.
1574 self.isa_name = None
1575 self.namespace = None
1576
1577 # The format stack.
1578 self.formatStack = Stack(NoFormat())
1579
1580 # The default case stack.
1581 self.defaultStack = Stack(None)
1582
1583 # Stack that tracks current file and line number. Each
1584 # element is a tuple (filename, lineno) that records the
1585 # *current* filename and the line number in the *previous*
1586 # file where it was included.
1587 self.fileNameStack = Stack()
1588
1589 symbols = ('makeList', 're', 'string')
1590 self.exportContext = dict([(s, eval(s)) for s in symbols])
1591
1592 self.maxInstSrcRegs = 0
1593 self.maxInstDestRegs = 0
1594 self.maxMiscDestRegs = 0
1595
1596 def __getitem__(self, i): # Allow object (self) to be
1597 return getattr(self, i) # passed to %-substitutions
1598
1599 # Change the file suffix of a base filename:
1600 # (e.g.) decoder.cc -> decoder-g.cc.inc for 'global' outputs
1601 def suffixize(self, s, sec):
1602 extn = re.compile('(\.[^\.]+)$') # isolate extension
1603 if self.namespace:
1604 return extn.sub(r'-ns\1.inc', s) # insert some text on either side
1605 else:
1606 return extn.sub(r'-g\1.inc', s)
1607
1608 # Get the file object for emitting code into the specified section
1609 # (header, decoder, exec, decode_block).
1610 def get_file(self, section):
1611 if section == 'decode_block':
1612 filename = 'decode-method.cc.inc'
1613 else:
1614 if section == 'header':
1615 file = 'decoder.hh'
1616 else:
1617 file = '%s.cc' % section
1618 filename = self.suffixize(file, section)
1619 try:
1620 return self.files[filename]
1621 except KeyError: pass
1622
1623 f = self.open(filename)
1624 self.files[filename] = f
1625
1626 # The splittable files are the ones with many independent
1627 # per-instruction functions - the decoder's instruction constructors
1628 # and the instruction execution (execute()) methods. These both have
1629 # the suffix -ns.cc.inc, meaning they are within the namespace part
1630 # of the ISA, contain object-emitting C++ source, and are included
1631 # into other top-level files. These are the files that need special
1632 # #define's to allow parts of them to be compiled separately. Rather
1633 # than splitting the emissions into separate files, the monolithic
1634 # output of the ISA parser is maintained, but the value (or lack
1635 # thereof) of the __SPLIT definition during C preprocessing will
1636 # select the different chunks. If no 'split' directives are used,
1637 # the cpp emissions have no effect.
1638 if re.search('-ns.cc.inc$', filename):
1639 print('#if !defined(__SPLIT) || (__SPLIT == 1)', file=f)
1640 self.splits[f] = 1
1641 # ensure requisite #include's
1642 elif filename == 'decoder-g.hh.inc':
1643 print('#include "base/bitfield.hh"', file=f)
1644
1645 return f
1646
1647 # Weave together the parts of the different output sections by
1648 # #include'ing them into some very short top-level .cc/.hh files.
1649 # These small files make it much clearer how this tool works, since
1650 # you directly see the chunks emitted as files that are #include'd.
1651 def write_top_level_files(self):
1652 # decoder header - everything depends on this
1653 file = 'decoder.hh'
1654 with self.open(file) as f:
1655 fn = 'decoder-g.hh.inc'
1656 assert(fn in self.files)
1657 f.write('#include "%s"\n' % fn)
1658
1659 fn = 'decoder-ns.hh.inc'
1660 assert(fn in self.files)
1661 f.write('namespace %s {\n#include "%s"\n}\n'
1662 % (self.namespace, fn))
1663
1664 # decoder method - cannot be split
1665 file = 'decoder.cc'
1666 with self.open(file) as f:
1667 fn = 'base/compiler.hh'
1668 f.write('#include "%s"\n' % fn)
1669
1670 fn = 'decoder-g.cc.inc'
1671 assert(fn in self.files)
1672 f.write('#include "%s"\n' % fn)
1673
1674 fn = 'decoder.hh'
1675 f.write('#include "%s"\n' % fn)
1676
1677 fn = 'decode-method.cc.inc'
1678 # is guaranteed to have been written for parse to complete
1679 f.write('#include "%s"\n' % fn)
1680
1681 extn = re.compile('(\.[^\.]+)$')
1682
1683 # instruction constructors
1684 splits = self.splits[self.get_file('decoder')]
1685 file_ = 'inst-constrs.cc'
1686 for i in range(1, splits+1):
1687 if splits > 1:
1688 file = extn.sub(r'-%d\1' % i, file_)
1689 else:
1690 file = file_
1691 with self.open(file) as f:
1692 fn = 'decoder-g.cc.inc'
1693 assert(fn in self.files)
1694 f.write('#include "%s"\n' % fn)
1695
1696 fn = 'decoder.hh'
1697 f.write('#include "%s"\n' % fn)
1698
1699 fn = 'decoder-ns.cc.inc'
1700 assert(fn in self.files)
1701 print('namespace %s {' % self.namespace, file=f)
1702 if splits > 1:
1703 print('#define __SPLIT %u' % i, file=f)
1704 print('#include "%s"' % fn, file=f)
1705 print('}', file=f)
1706
1707 # instruction execution
1708 splits = self.splits[self.get_file('exec')]
1709 for i in range(1, splits+1):
1710 file = 'generic_cpu_exec.cc'
1711 if splits > 1:
1712 file = extn.sub(r'_%d\1' % i, file)
1713 with self.open(file) as f:
1714 fn = 'exec-g.cc.inc'
1715 assert(fn in self.files)
1716 f.write('#include "%s"\n' % fn)
1717 f.write('#include "cpu/exec_context.hh"\n')
1718 f.write('#include "decoder.hh"\n')
1719
1720 fn = 'exec-ns.cc.inc'
1721 assert(fn in self.files)
1722 print('namespace %s {' % self.namespace, file=f)
1723 if splits > 1:
1724 print('#define __SPLIT %u' % i, file=f)
1725 print('#include "%s"' % fn, file=f)
1726 print('}', file=f)
1727
1728 # max_inst_regs.hh
1729 self.update('max_inst_regs.hh',
1730 '''namespace %(namespace)s {
1731 const int MaxInstSrcRegs = %(maxInstSrcRegs)d;
1732 const int MaxInstDestRegs = %(maxInstDestRegs)d;
1733 const int MaxMiscDestRegs = %(maxMiscDestRegs)d;\n}\n''' % self)
1734
1735 scaremonger_template ='''// DO NOT EDIT
1736// This file was automatically generated from an ISA description:
1737// %(filename)s
1738
1739''';
1740
1741 #####################################################################
1742 #
1743 # Lexer
1744 #
1745 # The PLY lexer module takes two things as input:
1746 # - A list of token names (the string list 'tokens')
1747 # - A regular expression describing a match for each token. The
1748 # regexp for token FOO can be provided in two ways:
1749 # - as a string variable named t_FOO
1750 # - as the doc string for a function named t_FOO. In this case,
1751 # the function is also executed, allowing an action to be
1752 # associated with each token match.
1753 #
1754 #####################################################################
1755
1756 # Reserved words. These are listed separately as they are matched
1757 # using the same regexp as generic IDs, but distinguished in the
1758 # t_ID() function. The PLY documentation suggests this approach.
1759 reserved = (
1760 'BITFIELD', 'DECODE', 'DECODER', 'DEFAULT', 'DEF', 'EXEC', 'FORMAT',
1761 'HEADER', 'LET', 'NAMESPACE', 'OPERAND_TYPES', 'OPERANDS',
1762 'OUTPUT', 'SIGNED', 'SPLIT', 'TEMPLATE'
1763 )
1764
1765 # List of tokens. The lex module requires this.
1766 tokens = reserved + (
1767 # identifier
1768 'ID',
1769
1770 # integer literal
1771 'INTLIT',
1772
1773 # string literal
1774 'STRLIT',
1775
1776 # code literal
1777 'CODELIT',
1778
1779 # ( ) [ ] { } < > , ; . : :: *
1780 'LPAREN', 'RPAREN',
1781 'LBRACKET', 'RBRACKET',
1782 'LBRACE', 'RBRACE',
1783 'LESS', 'GREATER', 'EQUALS',
1784 'COMMA', 'SEMI', 'DOT', 'COLON', 'DBLCOLON',
1785 'ASTERISK',
1786
1787 # C preprocessor directives
1788 'CPPDIRECTIVE'
1789
1790 # The following are matched but never returned. commented out to
1791 # suppress PLY warning
1792 # newfile directive
1793 # 'NEWFILE',
1794
1795 # endfile directive
1796 # 'ENDFILE'
1797 )
1798
1799 # Regular expressions for token matching
1800 t_LPAREN = r'\('
1801 t_RPAREN = r'\)'
1802 t_LBRACKET = r'\['
1803 t_RBRACKET = r'\]'
1804 t_LBRACE = r'\{'
1805 t_RBRACE = r'\}'
1806 t_LESS = r'\<'
1807 t_GREATER = r'\>'
1808 t_EQUALS = r'='
1809 t_COMMA = r','
1810 t_SEMI = r';'
1811 t_DOT = r'\.'
1812 t_COLON = r':'
1813 t_DBLCOLON = r'::'
1814 t_ASTERISK = r'\*'
1815
1816 # Identifiers and reserved words
1817 reserved_map = { }
1818 for r in reserved:
1819 reserved_map[r.lower()] = r
1820
1821 def t_ID(self, t):
1822 r'[A-Za-z_]\w*'
1823 t.type = self.reserved_map.get(t.value, 'ID')
1824 return t
1825
1826 # Integer literal
1827 def t_INTLIT(self, t):
1828 r'-?(0x[\da-fA-F]+)|\d+'
1829 try:
1830 t.value = int(t.value,0)
1831 except ValueError:
1832 error(t.lexer.lineno, 'Integer value "%s" too large' % t.value)
1833 t.value = 0
1834 return t
1835
1836 # String literal. Note that these use only single quotes, and
1837 # can span multiple lines.
1838 def t_STRLIT(self, t):
1839 r"(?m)'([^'])+'"
1840 # strip off quotes
1841 t.value = t.value[1:-1]
1842 t.lexer.lineno += t.value.count('\n')
1843 return t
1844
1845
1846 # "Code literal"... like a string literal, but delimiters are
1847 # '{{' and '}}' so they get formatted nicely under emacs c-mode
1848 def t_CODELIT(self, t):
1849 r"(?m)\{\{([^\}]|}(?!\}))+\}\}"
1850 # strip off {{ & }}
1851 t.value = t.value[2:-2]
1852 t.lexer.lineno += t.value.count('\n')
1853 return t
1854
1855 def t_CPPDIRECTIVE(self, t):
1856 r'^\#[^\#].*\n'
1857 t.lexer.lineno += t.value.count('\n')
1858 return t
1859
1860 def t_NEWFILE(self, t):
1861 r'^\#\#newfile\s+"[^"]*"\n'
1862 self.fileNameStack.push(t.lexer.lineno)
1863 t.lexer.lineno = LineTracker(t.value[11:-2])
1864
1865 def t_ENDFILE(self, t):
1866 r'^\#\#endfile\n'
1867 t.lexer.lineno = self.fileNameStack.pop()
1868
1869 #
1870 # The functions t_NEWLINE, t_ignore, and t_error are
1871 # special for the lex module.
1872 #
1873
1874 # Newlines
1875 def t_NEWLINE(self, t):
1876 r'\n+'
1877 t.lexer.lineno += t.value.count('\n')
1878
1879 # Comments
1880 def t_comment(self, t):
1881 r'//.*'
1882
1883 # Completely ignored characters
1884 t_ignore = ' \t\x0c'
1885
1886 # Error handler
1887 def t_error(self, t):
1888 error(t.lexer.lineno, "illegal character '%s'" % t.value[0])
1889 t.skip(1)
1890
1891 #####################################################################
1892 #
1893 # Parser
1894 #
1895 # Every function whose name starts with 'p_' defines a grammar
1896 # rule. The rule is encoded in the function's doc string, while
1897 # the function body provides the action taken when the rule is
1898 # matched. The argument to each function is a list of the values
1899 # of the rule's symbols: t[0] for the LHS, and t[1..n] for the
1900 # symbols on the RHS. For tokens, the value is copied from the
1901 # t.value attribute provided by the lexer. For non-terminals, the
1902 # value is assigned by the producing rule; i.e., the job of the
1903 # grammar rule function is to set the value for the non-terminal
1904 # on the LHS (by assigning to t[0]).
1905 #####################################################################
1906
1907 # The LHS of the first grammar rule is used as the start symbol
1908 # (in this case, 'specification'). Note that this rule enforces
1909 # that there will be exactly one namespace declaration, with 0 or
1910 # more global defs/decls before and after it. The defs & decls
1911 # before the namespace decl will be outside the namespace; those
1912 # after will be inside. The decoder function is always inside the
1913 # namespace.
1914 def p_specification(self, t):
1915 'specification : opt_defs_and_outputs top_level_decode_block'
1916
1917 for f in self.splits.iterkeys():
1918 f.write('\n#endif\n')
1919
1920 for f in self.files.itervalues(): # close ALL the files;
1921 f.close() # not doing so can cause compilation to fail
1922
1923 self.write_top_level_files()
1924
1925 t[0] = True
1926
1927 # 'opt_defs_and_outputs' is a possibly empty sequence of def and/or
1928 # output statements. Its productions do the hard work of eventually
1929 # instantiating a GenCode, which are generally emitted (written to disk)
1930 # as soon as possible, except for the decode_block, which has to be
1931 # accumulated into one large function of nested switch/case blocks.
1932 def p_opt_defs_and_outputs_0(self, t):
1933 'opt_defs_and_outputs : empty'
1934
1935 def p_opt_defs_and_outputs_1(self, t):
1936 'opt_defs_and_outputs : defs_and_outputs'
1937
1938 def p_defs_and_outputs_0(self, t):
1939 'defs_and_outputs : def_or_output'
1940
1941 def p_defs_and_outputs_1(self, t):
1942 'defs_and_outputs : defs_and_outputs def_or_output'
1943
1944 # The list of possible definition/output statements.
1945 # They are all processed as they are seen.
1946 def p_def_or_output(self, t):
1947 '''def_or_output : name_decl
1948 | def_format
1949 | def_bitfield
1950 | def_bitfield_struct
1951 | def_template
1952 | def_operand_types
1953 | def_operands
1954 | output
1955 | global_let
1956 | split'''
1957
1958 # Utility function used by both invocations of splitting - explicit
1959 # 'split' keyword and split() function inside "let {{ }};" blocks.
1960 def split(self, sec, write=False):
1961 assert(sec != 'header' and "header cannot be split")
1962
1963 f = self.get_file(sec)
1964 self.splits[f] += 1
1965 s = '\n#endif\n#if __SPLIT == %u\n' % self.splits[f]
1966 if write:
1967 f.write(s)
1968 else:
1969 return s
1970
1971 # split output file to reduce compilation time
1972 def p_split(self, t):
1973 'split : SPLIT output_type SEMI'
1974 assert(self.isa_name and "'split' not allowed before namespace decl")
1975
1976 self.split(t[2], True)
1977
1978 def p_output_type(self, t):
1979 '''output_type : DECODER
1980 | HEADER
1981 | EXEC'''
1982 t[0] = t[1]
1983
1984 # ISA name declaration looks like "namespace <foo>;"
1985 def p_name_decl(self, t):
1986 'name_decl : NAMESPACE ID SEMI'
1987 assert(self.isa_name == None and "Only 1 namespace decl permitted")
1988 self.isa_name = t[2]
1989 self.namespace = t[2] + 'Inst'
1990
1991 # Output blocks 'output <foo> {{...}}' (C++ code blocks) are copied
1992 # directly to the appropriate output section.
1993
1994 # Massage output block by substituting in template definitions and
1995 # bit operators. We handle '%'s embedded in the string that don't
1996 # indicate template substitutions by doubling them first so that the
1997 # format operation will reduce them back to single '%'s.
1998 def process_output(self, s):
1999 s = self.protectNonSubstPercents(s)
2000 return substBitOps(s % self.templateMap)
2001
2002 def p_output(self, t):
2003 'output : OUTPUT output_type CODELIT SEMI'
2004 kwargs = { t[2]+'_output' : self.process_output(t[3]) }
2005 GenCode(self, **kwargs).emit()
2006
2007 # global let blocks 'let {{...}}' (Python code blocks) are
2008 # executed directly when seen. Note that these execute in a
2009 # special variable context 'exportContext' to prevent the code
2010 # from polluting this script's namespace.
2011 def p_global_let(self, t):
2012 'global_let : LET CODELIT SEMI'
2013 def _split(sec):
2014 return self.split(sec)
2015 self.updateExportContext()
2016 self.exportContext["header_output"] = ''
2017 self.exportContext["decoder_output"] = ''
2018 self.exportContext["exec_output"] = ''
2019 self.exportContext["decode_block"] = ''
2020 self.exportContext["split"] = _split
2021 split_setup = '''
2022def wrap(func):
2023 def split(sec):
2024 globals()[sec + '_output'] += func(sec)
2025 return split
2026split = wrap(split)
2027del wrap
2028'''
2029 # This tricky setup (immediately above) allows us to just write
2030 # (e.g.) "split('exec')" in the Python code and the split #ifdef's
2031 # will automatically be added to the exec_output variable. The inner
2032 # Python execution environment doesn't know about the split points,
2033 # so we carefully inject and wrap a closure that can retrieve the
2034 # next split's #define from the parser and add it to the current
2035 # emission-in-progress.
2036 try:
2037 exec split_setup+fixPythonIndentation(t[2]) in self.exportContext
2038 except Exception, exc:
2039 traceback.print_exc(file=sys.stdout)
2040 if debug:
2041 raise
2042 error(t.lineno(1), 'In global let block: %s' % exc)
2043 GenCode(self,
2044 header_output=self.exportContext["header_output"],
2045 decoder_output=self.exportContext["decoder_output"],
2046 exec_output=self.exportContext["exec_output"],
2047 decode_block=self.exportContext["decode_block"]).emit()
2048
2049 # Define the mapping from operand type extensions to C++ types and
2050 # bit widths (stored in operandTypeMap).
2051 def p_def_operand_types(self, t):
2052 'def_operand_types : DEF OPERAND_TYPES CODELIT SEMI'
2053 try:
2054 self.operandTypeMap = eval('{' + t[3] + '}')
2055 except Exception, exc:
2056 if debug:
2057 raise
2058 error(t.lineno(1),
2059 'In def operand_types: %s' % exc)
2060
2061 # Define the mapping from operand names to operand classes and
2062 # other traits. Stored in operandNameMap.
2063 def p_def_operands(self, t):
2064 'def_operands : DEF OPERANDS CODELIT SEMI'
2065 if not hasattr(self, 'operandTypeMap'):
2066 error(t.lineno(1),
2067 'error: operand types must be defined before operands')
2068 try:
2069 user_dict = eval('{' + t[3] + '}', self.exportContext)
2070 except Exception, exc:
2071 if debug:
2072 raise
2073 error(t.lineno(1), 'In def operands: %s' % exc)
2074 self.buildOperandNameMap(user_dict, t.lexer.lineno)
2075
2076 # A bitfield definition looks like:
2077 # 'def [signed] bitfield <ID> [<first>:<last>]'
2078 # This generates a preprocessor macro in the output file.
2079 def p_def_bitfield_0(self, t):
2080 'def_bitfield : DEF opt_signed BITFIELD ID LESS INTLIT COLON INTLIT GREATER SEMI'
2081 expr = 'bits(machInst, %2d, %2d)' % (t[6], t[8])
2082 if (t[2] == 'signed'):
2083 expr = 'sext<%d>(%s)' % (t[6] - t[8] + 1, expr)
2084 hash_define = '#undef %s\n#define %s\t%s\n' % (t[4], t[4], expr)
2085 GenCode(self, header_output=hash_define).emit()
2086
2087 # alternate form for single bit: 'def [signed] bitfield <ID> [<bit>]'
2088 def p_def_bitfield_1(self, t):
2089 'def_bitfield : DEF opt_signed BITFIELD ID LESS INTLIT GREATER SEMI'
2090 expr = 'bits(machInst, %2d, %2d)' % (t[6], t[6])
2091 if (t[2] == 'signed'):
2092 expr = 'sext<%d>(%s)' % (1, expr)
2093 hash_define = '#undef %s\n#define %s\t%s\n' % (t[4], t[4], expr)
2094 GenCode(self, header_output=hash_define).emit()
2095
2096 # alternate form for structure member: 'def bitfield <ID> <ID>'
2097 def p_def_bitfield_struct(self, t):
2098 'def_bitfield_struct : DEF opt_signed BITFIELD ID id_with_dot SEMI'
2099 if (t[2] != ''):
2100 error(t.lineno(1),
2101 'error: structure bitfields are always unsigned.')
2102 expr = 'machInst.%s' % t[5]
2103 hash_define = '#undef %s\n#define %s\t%s\n' % (t[4], t[4], expr)
2104 GenCode(self, header_output=hash_define).emit()
2105
2106 def p_id_with_dot_0(self, t):
2107 'id_with_dot : ID'
2108 t[0] = t[1]
2109
2110 def p_id_with_dot_1(self, t):
2111 'id_with_dot : ID DOT id_with_dot'
2112 t[0] = t[1] + t[2] + t[3]
2113
2114 def p_opt_signed_0(self, t):
2115 'opt_signed : SIGNED'
2116 t[0] = t[1]
2117
2118 def p_opt_signed_1(self, t):
2119 'opt_signed : empty'
2120 t[0] = ''
2121
2122 def p_def_template(self, t):
2123 'def_template : DEF TEMPLATE ID CODELIT SEMI'
2124 if t[3] in self.templateMap:
2125 print("warning: template %s already defined" % t[3])
2126 self.templateMap[t[3]] = Template(self, t[4])
2127
2128 # An instruction format definition looks like
2129 # "def format <fmt>(<params>) {{...}};"
2130 def p_def_format(self, t):
2131 'def_format : DEF FORMAT ID LPAREN param_list RPAREN CODELIT SEMI'
2132 (id, params, code) = (t[3], t[5], t[7])
2133 self.defFormat(id, params, code, t.lexer.lineno)
2134
2135 # The formal parameter list for an instruction format is a
2136 # possibly empty list of comma-separated parameters. Positional
2137 # (standard, non-keyword) parameters must come first, followed by
2138 # keyword parameters, followed by a '*foo' parameter that gets
2139 # excess positional arguments (as in Python). Each of these three
2140 # parameter categories is optional.
2141 #
2142 # Note that we do not support the '**foo' parameter for collecting
2143 # otherwise undefined keyword args. Otherwise the parameter list
2144 # is (I believe) identical to what is supported in Python.
2145 #
2146 # The param list generates a tuple, where the first element is a
2147 # list of the positional params and the second element is a dict
2148 # containing the keyword params.
2149 def p_param_list_0(self, t):
2150 'param_list : positional_param_list COMMA nonpositional_param_list'
2151 t[0] = t[1] + t[3]
2152
2153 def p_param_list_1(self, t):
2154 '''param_list : positional_param_list
2155 | nonpositional_param_list'''
2156 t[0] = t[1]
2157
2158 def p_positional_param_list_0(self, t):
2159 'positional_param_list : empty'
2160 t[0] = []
2161
2162 def p_positional_param_list_1(self, t):
2163 'positional_param_list : ID'
2164 t[0] = [t[1]]
2165
2166 def p_positional_param_list_2(self, t):
2167 'positional_param_list : positional_param_list COMMA ID'
2168 t[0] = t[1] + [t[3]]
2169
2170 def p_nonpositional_param_list_0(self, t):
2171 'nonpositional_param_list : keyword_param_list COMMA excess_args_param'
2172 t[0] = t[1] + t[3]
2173
2174 def p_nonpositional_param_list_1(self, t):
2175 '''nonpositional_param_list : keyword_param_list
2176 | excess_args_param'''
2177 t[0] = t[1]
2178
2179 def p_keyword_param_list_0(self, t):
2180 'keyword_param_list : keyword_param'
2181 t[0] = [t[1]]
2182
2183 def p_keyword_param_list_1(self, t):
2184 'keyword_param_list : keyword_param_list COMMA keyword_param'
2185 t[0] = t[1] + [t[3]]
2186
2187 def p_keyword_param(self, t):
2188 'keyword_param : ID EQUALS expr'
2189 t[0] = t[1] + ' = ' + t[3].__repr__()
2190
2191 def p_excess_args_param(self, t):
2192 'excess_args_param : ASTERISK ID'
2193 # Just concatenate them: '*ID'. Wrap in list to be consistent
2194 # with positional_param_list and keyword_param_list.
2195 t[0] = [t[1] + t[2]]
2196
2197 # End of format definition-related rules.
2198 ##############
2199
2200 #
2201 # A decode block looks like:
2202 # decode <field1> [, <field2>]* [default <inst>] { ... }
2203 #
2204 def p_top_level_decode_block(self, t):
2205 'top_level_decode_block : decode_block'
2206 codeObj = t[1]
2207 codeObj.wrap_decode_block('''
2208StaticInstPtr
2209%(isa_name)s::Decoder::decodeInst(%(isa_name)s::ExtMachInst machInst)
2210{
2211 using namespace %(namespace)s;
2212''' % self, '}')
2213
2214 codeObj.emit()
2215
2216 def p_decode_block(self, t):
2217 'decode_block : DECODE ID opt_default LBRACE decode_stmt_list RBRACE'
2218 default_defaults = self.defaultStack.pop()
2219 codeObj = t[5]
2220 # use the "default defaults" only if there was no explicit
2221 # default statement in decode_stmt_list
2222 if not codeObj.has_decode_default:
2223 codeObj += default_defaults
2224 codeObj.wrap_decode_block('switch (%s) {\n' % t[2], '}\n')
2225 t[0] = codeObj
2226
2227 # The opt_default statement serves only to push the "default
2228 # defaults" onto defaultStack. This value will be used by nested
2229 # decode blocks, and used and popped off when the current
2230 # decode_block is processed (in p_decode_block() above).
2231 def p_opt_default_0(self, t):
2232 'opt_default : empty'
2233 # no default specified: reuse the one currently at the top of
2234 # the stack
2235 self.defaultStack.push(self.defaultStack.top())
2236 # no meaningful value returned
2237 t[0] = None
2238
2239 def p_opt_default_1(self, t):
2240 'opt_default : DEFAULT inst'
2241 # push the new default
2242 codeObj = t[2]
2243 codeObj.wrap_decode_block('\ndefault:\n', 'break;\n')
2244 self.defaultStack.push(codeObj)
2245 # no meaningful value returned
2246 t[0] = None
2247
2248 def p_decode_stmt_list_0(self, t):
2249 'decode_stmt_list : decode_stmt'
2250 t[0] = t[1]
2251
2252 def p_decode_stmt_list_1(self, t):
2253 'decode_stmt_list : decode_stmt decode_stmt_list'
2254 if (t[1].has_decode_default and t[2].has_decode_default):
2255 error(t.lineno(1), 'Two default cases in decode block')
2256 t[0] = t[1] + t[2]
2257
2258 #
2259 # Decode statement rules
2260 #
2261 # There are four types of statements allowed in a decode block:
2262 # 1. Format blocks 'format <foo> { ... }'
2263 # 2. Nested decode blocks
2264 # 3. Instruction definitions.
2265 # 4. C preprocessor directives.
2266
2267
2268 # Preprocessor directives found in a decode statement list are
2269 # passed through to the output, replicated to all of the output
2270 # code streams. This works well for ifdefs, so we can ifdef out
2271 # both the declarations and the decode cases generated by an
2272 # instruction definition. Handling them as part of the grammar
2273 # makes it easy to keep them in the right place with respect to
2274 # the code generated by the other statements.
2275 def p_decode_stmt_cpp(self, t):
2276 'decode_stmt : CPPDIRECTIVE'
2277 t[0] = GenCode(self, t[1], t[1], t[1], t[1])
2278
2279 # A format block 'format <foo> { ... }' sets the default
2280 # instruction format used to handle instruction definitions inside
2281 # the block. This format can be overridden by using an explicit
2282 # format on the instruction definition or with a nested format
2283 # block.
2284 def p_decode_stmt_format(self, t):
2285 'decode_stmt : FORMAT push_format_id LBRACE decode_stmt_list RBRACE'
2286 # The format will be pushed on the stack when 'push_format_id'
2287 # is processed (see below). Once the parser has recognized
2288 # the full production (though the right brace), we're done
2289 # with the format, so now we can pop it.
2290 self.formatStack.pop()
2291 t[0] = t[4]
2292
2293 # This rule exists so we can set the current format (& push the
2294 # stack) when we recognize the format name part of the format
2295 # block.
2296 def p_push_format_id(self, t):
2297 'push_format_id : ID'
2298 try:
2299 self.formatStack.push(self.formatMap[t[1]])
2300 t[0] = ('', '// format %s' % t[1])
2301 except KeyError:
2302 error(t.lineno(1), 'instruction format "%s" not defined.' % t[1])
2303
2304 # Nested decode block: if the value of the current field matches
2305 # the specified constant(s), do a nested decode on some other field.
2306 def p_decode_stmt_decode(self, t):
2307 'decode_stmt : case_list COLON decode_block'
2308 case_list = t[1]
2309 codeObj = t[3]
2310 # just wrap the decoding code from the block as a case in the
2311 # outer switch statement.
2312 codeObj.wrap_decode_block('\n%s\n' % ''.join(case_list),
2313 'M5_UNREACHABLE;\n')
2314 codeObj.has_decode_default = (case_list == ['default:'])
2315 t[0] = codeObj
2316
2317 # Instruction definition (finally!).
2318 def p_decode_stmt_inst(self, t):
2319 'decode_stmt : case_list COLON inst SEMI'
2320 case_list = t[1]
2321 codeObj = t[3]
2322 codeObj.wrap_decode_block('\n%s' % ''.join(case_list), 'break;\n')
2323 codeObj.has_decode_default = (case_list == ['default:'])
2324 t[0] = codeObj
2325
2326 # The constant list for a decode case label must be non-empty, and must
2327 # either be the keyword 'default', or made up of one or more
2328 # comma-separated integer literals or strings which evaluate to
2329 # constants when compiled as C++.
2330 def p_case_list_0(self, t):
2331 'case_list : DEFAULT'
2332 t[0] = ['default:']
2333
2334 def prep_int_lit_case_label(self, lit):
2335 if lit >= 2**32:
2336 return 'case ULL(%#x): ' % lit
2337 else:
2338 return 'case %#x: ' % lit
2339
2340 def prep_str_lit_case_label(self, lit):
2341 return 'case %s: ' % lit
2342
2343 def p_case_list_1(self, t):
2344 'case_list : INTLIT'
2345 t[0] = [self.prep_int_lit_case_label(t[1])]
2346
2347 def p_case_list_2(self, t):
2348 'case_list : STRLIT'
2349 t[0] = [self.prep_str_lit_case_label(t[1])]
2350
2351 def p_case_list_3(self, t):
2352 'case_list : case_list COMMA INTLIT'
2353 t[0] = t[1]
2354 t[0].append(self.prep_int_lit_case_label(t[3]))
2355
2356 def p_case_list_4(self, t):
2357 'case_list : case_list COMMA STRLIT'
2358 t[0] = t[1]
2359 t[0].append(self.prep_str_lit_case_label(t[3]))
2360
2361 # Define an instruction using the current instruction format
2362 # (specified by an enclosing format block).
2363 # "<mnemonic>(<args>)"
2364 def p_inst_0(self, t):
2365 'inst : ID LPAREN arg_list RPAREN'
2366 # Pass the ID and arg list to the current format class to deal with.
2367 currentFormat = self.formatStack.top()
2368 codeObj = currentFormat.defineInst(self, t[1], t[3], t.lexer.lineno)
2369 args = ','.join(map(str, t[3]))
2370 args = re.sub('(?m)^', '//', args)
2371 args = re.sub('^//', '', args)
2372 comment = '\n// %s::%s(%s)\n' % (currentFormat.id, t[1], args)
2373 codeObj.prepend_all(comment)
2374 t[0] = codeObj
2375
2376 # Define an instruction using an explicitly specified format:
2377 # "<fmt>::<mnemonic>(<args>)"
2378 def p_inst_1(self, t):
2379 'inst : ID DBLCOLON ID LPAREN arg_list RPAREN'
2380 try:
2381 format = self.formatMap[t[1]]
2382 except KeyError:
2383 error(t.lineno(1), 'instruction format "%s" not defined.' % t[1])
2384
2385 codeObj = format.defineInst(self, t[3], t[5], t.lexer.lineno)
2386 comment = '\n// %s::%s(%s)\n' % (t[1], t[3], t[5])
2387 codeObj.prepend_all(comment)
2388 t[0] = codeObj
2389
2390 # The arg list generates a tuple, where the first element is a
2391 # list of the positional args and the second element is a dict
2392 # containing the keyword args.
2393 def p_arg_list_0(self, t):
2394 'arg_list : positional_arg_list COMMA keyword_arg_list'
2395 t[0] = ( t[1], t[3] )
2396
2397 def p_arg_list_1(self, t):
2398 'arg_list : positional_arg_list'
2399 t[0] = ( t[1], {} )
2400
2401 def p_arg_list_2(self, t):
2402 'arg_list : keyword_arg_list'
2403 t[0] = ( [], t[1] )
2404
2405 def p_positional_arg_list_0(self, t):
2406 'positional_arg_list : empty'
2407 t[0] = []
2408
2409 def p_positional_arg_list_1(self, t):
2410 'positional_arg_list : expr'
2411 t[0] = [t[1]]
2412
2413 def p_positional_arg_list_2(self, t):
2414 'positional_arg_list : positional_arg_list COMMA expr'
2415 t[0] = t[1] + [t[3]]
2416
2417 def p_keyword_arg_list_0(self, t):
2418 'keyword_arg_list : keyword_arg'
2419 t[0] = t[1]
2420
2421 def p_keyword_arg_list_1(self, t):
2422 'keyword_arg_list : keyword_arg_list COMMA keyword_arg'
2423 t[0] = t[1]
2424 t[0].update(t[3])
2425
2426 def p_keyword_arg(self, t):
2427 'keyword_arg : ID EQUALS expr'
2428 t[0] = { t[1] : t[3] }
2429
2430 #
2431 # Basic expressions. These constitute the argument values of
2432 # "function calls" (i.e. instruction definitions in the decode
2433 # block) and default values for formal parameters of format
2434 # functions.
2435 #
2436 # Right now, these are either strings, integers, or (recursively)
2437 # lists of exprs (using Python square-bracket list syntax). Note
2438 # that bare identifiers are trated as string constants here (since
2439 # there isn't really a variable namespace to refer to).
2440 #
2441 def p_expr_0(self, t):
2442 '''expr : ID
2443 | INTLIT
2444 | STRLIT
2445 | CODELIT'''
2446 t[0] = t[1]
2447
2448 def p_expr_1(self, t):
2449 '''expr : LBRACKET list_expr RBRACKET'''
2450 t[0] = t[2]
2451
2452 def p_list_expr_0(self, t):
2453 'list_expr : expr'
2454 t[0] = [t[1]]
2455
2456 def p_list_expr_1(self, t):
2457 'list_expr : list_expr COMMA expr'
2458 t[0] = t[1] + [t[3]]
2459
2460 def p_list_expr_2(self, t):
2461 'list_expr : empty'
2462 t[0] = []
2463
2464 #
2465 # Empty production... use in other rules for readability.
2466 #
2467 def p_empty(self, t):
2468 'empty :'
2469 pass
2470
2471 # Parse error handler. Note that the argument here is the
2472 # offending *token*, not a grammar symbol (hence the need to use
2473 # t.value)
2474 def p_error(self, t):
2475 if t:
2476 error(t.lexer.lineno, "syntax error at '%s'" % t.value)
2477 else:
2478 error("unknown syntax error")
2479
2480 # END OF GRAMMAR RULES
2481
2482 def updateExportContext(self):
2483
2484 # create a continuation that allows us to grab the current parser
2485 def wrapInstObjParams(*args):
2486 return InstObjParams(self, *args)
2487 self.exportContext['InstObjParams'] = wrapInstObjParams
2488 self.exportContext.update(self.templateMap)
2489
2490 def defFormat(self, id, params, code, lineno):
2491 '''Define a new format'''
2492
2493 # make sure we haven't already defined this one
2494 if id in self.formatMap:
2495 error(lineno, 'format %s redefined.' % id)
2496
2497 # create new object and store in global map
2498 self.formatMap[id] = Format(id, params, code)
2499
2500 def protectNonSubstPercents(self, s):
2501 '''Protect any non-dict-substitution '%'s in a format string
2502 (i.e. those not followed by '(')'''
2503
2504 return re.sub(r'%(?!\()', '%%', s)
2505
2506 def buildOperandNameMap(self, user_dict, lineno):
2507 operand_name = {}
2508 for op_name, val in user_dict.iteritems():
2509
2510 # Check if extra attributes have been specified.
2511 if len(val) > 9:
2512 error(lineno, 'error: too many attributes for operand "%s"' %
2513 base_cls_name)
2514
2515 # Pad val with None in case optional args are missing
2516 val += (None, None, None, None)
2517 base_cls_name, dflt_ext, reg_spec, flags, sort_pri, \
2518 read_code, write_code, read_predicate, write_predicate = val[:9]
2519
2520 # Canonical flag structure is a triple of lists, where each list
2521 # indicates the set of flags implied by this operand always, when
2522 # used as a source, and when used as a dest, respectively.
2523 # For simplicity this can be initialized using a variety of fairly
2524 # obvious shortcuts; we convert these to canonical form here.
2525 if not flags:
2526 # no flags specified (e.g., 'None')
2527 flags = ( [], [], [] )
2528 elif isinstance(flags, str):
2529 # a single flag: assumed to be unconditional
2530 flags = ( [ flags ], [], [] )
2531 elif isinstance(flags, list):
2532 # a list of flags: also assumed to be unconditional
2533 flags = ( flags, [], [] )
2534 elif isinstance(flags, tuple):
2535 # it's a tuple: it should be a triple,
2536 # but each item could be a single string or a list
2537 (uncond_flags, src_flags, dest_flags) = flags
2538 flags = (makeList(uncond_flags),
2539 makeList(src_flags), makeList(dest_flags))
2540
2541 # Accumulate attributes of new operand class in tmp_dict
2542 tmp_dict = {}
2543 attrList = ['reg_spec', 'flags', 'sort_pri',
2544 'read_code', 'write_code',
2545 'read_predicate', 'write_predicate']
2546 if dflt_ext:
2547 dflt_ctype = self.operandTypeMap[dflt_ext]
2548 attrList.extend(['dflt_ctype', 'dflt_ext'])
2549 # reg_spec is either just a string or a dictionary
2550 # (for elems of vector)
2551 if isinstance(reg_spec, tuple):
2552 (reg_spec, elem_spec) = reg_spec
2553 if isinstance(elem_spec, str):
2554 attrList.append('elem_spec')
2555 else:
2556 assert(isinstance(elem_spec, dict))
2557 elems = elem_spec
2558 attrList.append('elems')
2559 for attr in attrList:
2560 tmp_dict[attr] = eval(attr)
2561 tmp_dict['base_name'] = op_name
2562
2563 # New class name will be e.g. "IntReg_Ra"
2564 cls_name = base_cls_name + '_' + op_name
2565 # Evaluate string arg to get class object. Note that the
2566 # actual base class for "IntReg" is "IntRegOperand", i.e. we
2567 # have to append "Operand".
2568 try:
2569 base_cls = eval(base_cls_name + 'Operand')
2570 except NameError:
2571 error(lineno,
2572 'error: unknown operand base class "%s"' % base_cls_name)
2573 # The following statement creates a new class called
2574 # <cls_name> as a subclass of <base_cls> with the attributes
2575 # in tmp_dict, just as if we evaluated a class declaration.
2576 operand_name[op_name] = type(cls_name, (base_cls,), tmp_dict)
2577
2578 self.operandNameMap = operand_name
2579
2580 # Define operand variables.
2581 operands = user_dict.keys()
2582 # Add the elems defined in the vector operands and
2583 # build a map elem -> vector (used in OperandList)
2584 elem_to_vec = {}
2585 for op in user_dict.keys():
2586 if hasattr(self.operandNameMap[op], 'elems'):
2587 for elem in self.operandNameMap[op].elems.keys():
2588 operands.append(elem)
2589 elem_to_vec[elem] = op
2590 self.elemToVector = elem_to_vec
2591 extensions = self.operandTypeMap.keys()
2592
2593 operandsREString = r'''
2594 (?<!\w) # neg. lookbehind assertion: prevent partial matches
2595 ((%s)(?:_(%s))?) # match: operand with optional '_' then suffix
2596 (?!\w) # neg. lookahead assertion: prevent partial matches
2597 ''' % (string.join(operands, '|'), string.join(extensions, '|'))
2598
2599 self.operandsRE = re.compile(operandsREString, re.MULTILINE|re.VERBOSE)
2600
2601 # Same as operandsREString, but extension is mandatory, and only two
2602 # groups are returned (base and ext, not full name as above).
2603 # Used for subtituting '_' for '.' to make C++ identifiers.
2604 operandsWithExtREString = r'(?<!\w)(%s)_(%s)(?!\w)' \
2605 % (string.join(operands, '|'), string.join(extensions, '|'))
2606
2607 self.operandsWithExtRE = \
2608 re.compile(operandsWithExtREString, re.MULTILINE)
2609
2610 def substMungedOpNames(self, code):
2611 '''Munge operand names in code string to make legal C++
2612 variable names. This means getting rid of the type extension
2613 if any. Will match base_name attribute of Operand object.)'''
2614 return self.operandsWithExtRE.sub(r'\1', code)
2615
2616 def mungeSnippet(self, s):
2617 '''Fix up code snippets for final substitution in templates.'''
2618 if isinstance(s, str):
2619 return self.substMungedOpNames(substBitOps(s))
2620 else:
2621 return s
2622
2623 def open(self, name, bare=False):
2624 '''Open the output file for writing and include scary warning.'''
2625 filename = os.path.join(self.output_dir, name)
2626 f = open(filename, 'w')
2627 if f:
2628 if not bare:
2629 f.write(ISAParser.scaremonger_template % self)
2630 return f
2631
2632 def update(self, file, contents):
2633 '''Update the output file only. Scons should handle the case when
2634 the new contents are unchanged using its built-in hash feature.'''
2635 f = self.open(file)
2636 f.write(contents)
2637 f.close()
2638
2639 # This regular expression matches '##include' directives
2640 includeRE = re.compile(r'^\s*##include\s+"(?P<filename>[^"]*)".*$',
2641 re.MULTILINE)
2642
2643 def replace_include(self, matchobj, dirname):
2644 """Function to replace a matched '##include' directive with the
2645 contents of the specified file (with nested ##includes
2646 replaced recursively). 'matchobj' is an re match object
2647 (from a match of includeRE) and 'dirname' is the directory
2648 relative to which the file path should be resolved."""
2649
2650 fname = matchobj.group('filename')
2651 full_fname = os.path.normpath(os.path.join(dirname, fname))
2652 contents = '##newfile "%s"\n%s\n##endfile\n' % \
2653 (full_fname, self.read_and_flatten(full_fname))
2654 return contents
2655
2656 def read_and_flatten(self, filename):
2657 """Read a file and recursively flatten nested '##include' files."""
2658
2659 current_dir = os.path.dirname(filename)
2660 try:
2661 contents = open(filename).read()
2662 except IOError:
2663 error('Error including file "%s"' % filename)
2664
2665 self.fileNameStack.push(LineTracker(filename))
2666
2667 # Find any includes and include them
2668 def replace(matchobj):
2669 return self.replace_include(matchobj, current_dir)
2670 contents = self.includeRE.sub(replace, contents)
2671
2672 self.fileNameStack.pop()
2673 return contents
2674
2675 AlreadyGenerated = {}
2676
2677 def _parse_isa_desc(self, isa_desc_file):
2678 '''Read in and parse the ISA description.'''
2679
2680 # The build system can end up running the ISA parser twice: once to
2681 # finalize the build dependencies, and then to actually generate
2682 # the files it expects (in src/arch/$ARCH/generated). This code
2683 # doesn't do anything different either time, however; the SCons
2684 # invocations just expect different things. Since this code runs
2685 # within SCons, we can just remember that we've already run and
2686 # not perform a completely unnecessary run, since the ISA parser's
2687 # effect is idempotent.
2688 if isa_desc_file in ISAParser.AlreadyGenerated:
2689 return
2690
2691 # grab the last three path components of isa_desc_file
2692 self.filename = '/'.join(isa_desc_file.split('/')[-3:])
2693
2694 # Read file and (recursively) all included files into a string.
2695 # PLY requires that the input be in a single string so we have to
2696 # do this up front.
2697 isa_desc = self.read_and_flatten(isa_desc_file)
2698
2699 # Initialize lineno tracker
2700 self.lex.lineno = LineTracker(isa_desc_file)
2701
2702 # Parse.
2703 self.parse_string(isa_desc)
2704
2705 ISAParser.AlreadyGenerated[isa_desc_file] = None
2706
2707 def parse_isa_desc(self, *args, **kwargs):
2708 try:
2709 self._parse_isa_desc(*args, **kwargs)
2710 except ISAParserError, e:
2711 print(backtrace(self.fileNameStack))
2712 print("At %s:" % e.lineno)
2713 print(e)
2714 sys.exit(1)
2715
2716# Called as script: get args from command line.
2717# Args are: <isa desc file> <output dir>
2718if __name__ == '__main__':
2719 ISAParser(sys.argv[2]).parse_isa_desc(sys.argv[1])