isa_parser.py (13610:5d5404ac6288) isa_parser.py (13675:afeab32b3655)
1# Copyright (c) 2014, 2016, 2019 ARM Limited
2# All rights reserved
3#
4# The license below extends only to copyright in the software and shall
5# not be construed as granting a license to any other intellectual
6# property including but not limited to intellectual property relating
7# to a hardware implementation of the functionality of the software
8# licensed hereunder. You may use the software subject to the license
9# terms below provided that you ensure that this notice is replicated
10# unmodified and in its entirety in all distributions of the software,
11# modified or unmodified, in source code or in binary form.
12#
13# Copyright (c) 2003-2005 The Regents of The University of Michigan
14# Copyright (c) 2013,2015 Advanced Micro Devices, Inc.
15# All rights reserved.
16#
17# Redistribution and use in source and binary forms, with or without
18# modification, are permitted provided that the following conditions are
19# met: redistributions of source code must retain the above copyright
20# notice, this list of conditions and the following disclaimer;
21# redistributions in binary form must reproduce the above copyright
22# notice, this list of conditions and the following disclaimer in the
23# documentation and/or other materials provided with the distribution;
24# neither the name of the copyright holders nor the names of its
25# contributors may be used to endorse or promote products derived from
26# this software without specific prior written permission.
27#
28# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
29# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
30# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
31# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
32# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
33# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
34# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
35# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
36# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
37# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
38# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
39#
40# Authors: Steve Reinhardt
41
42from __future__ import with_statement, print_function
43import os
44import sys
45import re
46import string
47import inspect, traceback
48# get type names
49from types import *
50
51from m5.util.grammar import Grammar
52
53debug=False
54
55###################
56# Utility functions
57
58#
59# Indent every line in string 's' by two spaces
60# (except preprocessor directives).
61# Used to make nested code blocks look pretty.
62#
63def indent(s):
64 return re.sub(r'(?m)^(?!#)', ' ', s)
65
66#
67# Munge a somewhat arbitrarily formatted piece of Python code
68# (e.g. from a format 'let' block) into something whose indentation
69# will get by the Python parser.
70#
71# The two keys here are that Python will give a syntax error if
72# there's any whitespace at the beginning of the first line, and that
73# all lines at the same lexical nesting level must have identical
74# indentation. Unfortunately the way code literals work, an entire
75# let block tends to have some initial indentation. Rather than
76# trying to figure out what that is and strip it off, we prepend 'if
77# 1:' to make the let code the nested block inside the if (and have
78# the parser automatically deal with the indentation for us).
79#
80# We don't want to do this if (1) the code block is empty or (2) the
81# first line of the block doesn't have any whitespace at the front.
82
83def fixPythonIndentation(s):
84 # get rid of blank lines first
85 s = re.sub(r'(?m)^\s*\n', '', s);
86 if (s != '' and re.match(r'[ \t]', s[0])):
87 s = 'if 1:\n' + s
88 return s
89
90class ISAParserError(Exception):
91 """Exception class for parser errors"""
92 def __init__(self, first, second=None):
93 if second is None:
94 self.lineno = 0
95 self.string = first
96 else:
97 self.lineno = first
98 self.string = second
99
100 def __str__(self):
101 return self.string
102
103def error(*args):
104 raise ISAParserError(*args)
105
106####################
107# Template objects.
108#
109# Template objects are format strings that allow substitution from
110# the attribute spaces of other objects (e.g. InstObjParams instances).
111
112labelRE = re.compile(r'(?<!%)%\(([^\)]+)\)[sd]')
113
114class Template(object):
115 def __init__(self, parser, t):
116 self.parser = parser
117 self.template = t
118
119 def subst(self, d):
120 myDict = None
121
122 # Protect non-Python-dict substitutions (e.g. if there's a printf
123 # in the templated C++ code)
124 template = self.parser.protectNonSubstPercents(self.template)
125
126 # Build a dict ('myDict') to use for the template substitution.
127 # Start with the template namespace. Make a copy since we're
128 # going to modify it.
129 myDict = self.parser.templateMap.copy()
130
131 if isinstance(d, InstObjParams):
132 # If we're dealing with an InstObjParams object, we need
133 # to be a little more sophisticated. The instruction-wide
134 # parameters are already formed, but the parameters which
135 # are only function wide still need to be generated.
136 compositeCode = ''
137
138 myDict.update(d.__dict__)
139 # The "operands" and "snippets" attributes of the InstObjParams
140 # objects are for internal use and not substitution.
141 del myDict['operands']
142 del myDict['snippets']
143
144 snippetLabels = [l for l in labelRE.findall(template)
1# Copyright (c) 2014, 2016, 2019 ARM Limited
2# All rights reserved
3#
4# The license below extends only to copyright in the software and shall
5# not be construed as granting a license to any other intellectual
6# property including but not limited to intellectual property relating
7# to a hardware implementation of the functionality of the software
8# licensed hereunder. You may use the software subject to the license
9# terms below provided that you ensure that this notice is replicated
10# unmodified and in its entirety in all distributions of the software,
11# modified or unmodified, in source code or in binary form.
12#
13# Copyright (c) 2003-2005 The Regents of The University of Michigan
14# Copyright (c) 2013,2015 Advanced Micro Devices, Inc.
15# All rights reserved.
16#
17# Redistribution and use in source and binary forms, with or without
18# modification, are permitted provided that the following conditions are
19# met: redistributions of source code must retain the above copyright
20# notice, this list of conditions and the following disclaimer;
21# redistributions in binary form must reproduce the above copyright
22# notice, this list of conditions and the following disclaimer in the
23# documentation and/or other materials provided with the distribution;
24# neither the name of the copyright holders nor the names of its
25# contributors may be used to endorse or promote products derived from
26# this software without specific prior written permission.
27#
28# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
29# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
30# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
31# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
32# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
33# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
34# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
35# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
36# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
37# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
38# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
39#
40# Authors: Steve Reinhardt
41
42from __future__ import with_statement, print_function
43import os
44import sys
45import re
46import string
47import inspect, traceback
48# get type names
49from types import *
50
51from m5.util.grammar import Grammar
52
53debug=False
54
55###################
56# Utility functions
57
58#
59# Indent every line in string 's' by two spaces
60# (except preprocessor directives).
61# Used to make nested code blocks look pretty.
62#
63def indent(s):
64 return re.sub(r'(?m)^(?!#)', ' ', s)
65
66#
67# Munge a somewhat arbitrarily formatted piece of Python code
68# (e.g. from a format 'let' block) into something whose indentation
69# will get by the Python parser.
70#
71# The two keys here are that Python will give a syntax error if
72# there's any whitespace at the beginning of the first line, and that
73# all lines at the same lexical nesting level must have identical
74# indentation. Unfortunately the way code literals work, an entire
75# let block tends to have some initial indentation. Rather than
76# trying to figure out what that is and strip it off, we prepend 'if
77# 1:' to make the let code the nested block inside the if (and have
78# the parser automatically deal with the indentation for us).
79#
80# We don't want to do this if (1) the code block is empty or (2) the
81# first line of the block doesn't have any whitespace at the front.
82
83def fixPythonIndentation(s):
84 # get rid of blank lines first
85 s = re.sub(r'(?m)^\s*\n', '', s);
86 if (s != '' and re.match(r'[ \t]', s[0])):
87 s = 'if 1:\n' + s
88 return s
89
90class ISAParserError(Exception):
91 """Exception class for parser errors"""
92 def __init__(self, first, second=None):
93 if second is None:
94 self.lineno = 0
95 self.string = first
96 else:
97 self.lineno = first
98 self.string = second
99
100 def __str__(self):
101 return self.string
102
103def error(*args):
104 raise ISAParserError(*args)
105
106####################
107# Template objects.
108#
109# Template objects are format strings that allow substitution from
110# the attribute spaces of other objects (e.g. InstObjParams instances).
111
112labelRE = re.compile(r'(?<!%)%\(([^\)]+)\)[sd]')
113
114class Template(object):
115 def __init__(self, parser, t):
116 self.parser = parser
117 self.template = t
118
119 def subst(self, d):
120 myDict = None
121
122 # Protect non-Python-dict substitutions (e.g. if there's a printf
123 # in the templated C++ code)
124 template = self.parser.protectNonSubstPercents(self.template)
125
126 # Build a dict ('myDict') to use for the template substitution.
127 # Start with the template namespace. Make a copy since we're
128 # going to modify it.
129 myDict = self.parser.templateMap.copy()
130
131 if isinstance(d, InstObjParams):
132 # If we're dealing with an InstObjParams object, we need
133 # to be a little more sophisticated. The instruction-wide
134 # parameters are already formed, but the parameters which
135 # are only function wide still need to be generated.
136 compositeCode = ''
137
138 myDict.update(d.__dict__)
139 # The "operands" and "snippets" attributes of the InstObjParams
140 # objects are for internal use and not substitution.
141 del myDict['operands']
142 del myDict['snippets']
143
144 snippetLabels = [l for l in labelRE.findall(template)
145 if d.snippets.has_key(l)]
145 if l in d.snippets]
146
147 snippets = dict([(s, self.parser.mungeSnippet(d.snippets[s]))
148 for s in snippetLabels])
149
150 myDict.update(snippets)
151
152 compositeCode = ' '.join(map(str, snippets.values()))
153
154 # Add in template itself in case it references any
155 # operands explicitly (like Mem)
156 compositeCode += ' ' + template
157
158 operands = SubOperandList(self.parser, compositeCode, d.operands)
159
160 myDict['op_decl'] = operands.concatAttrStrings('op_decl')
161 if operands.readPC or operands.setPC:
162 myDict['op_decl'] += 'TheISA::PCState __parserAutoPCState;\n'
163
164 # In case there are predicated register reads and write, declare
165 # the variables for register indicies. It is being assumed that
166 # all the operands in the OperandList are also in the
167 # SubOperandList and in the same order. Otherwise, it is
168 # expected that predication would not be used for the operands.
169 if operands.predRead:
170 myDict['op_decl'] += 'uint8_t _sourceIndex = 0;\n'
171 if operands.predWrite:
172 myDict['op_decl'] += 'uint8_t M5_VAR_USED _destIndex = 0;\n'
173
174 is_src = lambda op: op.is_src
175 is_dest = lambda op: op.is_dest
176
177 myDict['op_src_decl'] = \
178 operands.concatSomeAttrStrings(is_src, 'op_src_decl')
179 myDict['op_dest_decl'] = \
180 operands.concatSomeAttrStrings(is_dest, 'op_dest_decl')
181 if operands.readPC:
182 myDict['op_src_decl'] += \
183 'TheISA::PCState __parserAutoPCState;\n'
184 if operands.setPC:
185 myDict['op_dest_decl'] += \
186 'TheISA::PCState __parserAutoPCState;\n'
187
188 myDict['op_rd'] = operands.concatAttrStrings('op_rd')
189 if operands.readPC:
190 myDict['op_rd'] = '__parserAutoPCState = xc->pcState();\n' + \
191 myDict['op_rd']
192
193 # Compose the op_wb string. If we're going to write back the
194 # PC state because we changed some of its elements, we'll need to
195 # do that as early as possible. That allows later uncoordinated
196 # modifications to the PC to layer appropriately.
197 reordered = list(operands.items)
198 reordered.reverse()
199 op_wb_str = ''
200 pcWbStr = 'xc->pcState(__parserAutoPCState);\n'
201 for op_desc in reordered:
202 if op_desc.isPCPart() and op_desc.is_dest:
203 op_wb_str = op_desc.op_wb + pcWbStr + op_wb_str
204 pcWbStr = ''
205 else:
206 op_wb_str = op_desc.op_wb + op_wb_str
207 myDict['op_wb'] = op_wb_str
208
209 elif isinstance(d, dict):
210 # if the argument is a dictionary, we just use it.
211 myDict.update(d)
212 elif hasattr(d, '__dict__'):
213 # if the argument is an object, we use its attribute map.
214 myDict.update(d.__dict__)
215 else:
216 raise TypeError, "Template.subst() arg must be or have dictionary"
217 return template % myDict
218
219 # Convert to string.
220 def __str__(self):
221 return self.template
222
223################
224# Format object.
225#
226# A format object encapsulates an instruction format. It must provide
227# a defineInst() method that generates the code for an instruction
228# definition.
229
230class Format(object):
231 def __init__(self, id, params, code):
232 self.id = id
233 self.params = params
234 label = 'def format ' + id
235 self.user_code = compile(fixPythonIndentation(code), label, 'exec')
236 param_list = string.join(params, ", ")
237 f = '''def defInst(_code, _context, %s):
238 my_locals = vars().copy()
239 exec _code in _context, my_locals
240 return my_locals\n''' % param_list
241 c = compile(f, label + ' wrapper', 'exec')
242 exec c
243 self.func = defInst
244
245 def defineInst(self, parser, name, args, lineno):
246 parser.updateExportContext()
247 context = parser.exportContext.copy()
248 if len(name):
249 Name = name[0].upper()
250 if len(name) > 1:
251 Name += name[1:]
252 context.update({ 'name' : name, 'Name' : Name })
253 try:
254 vars = self.func(self.user_code, context, *args[0], **args[1])
255 except Exception, exc:
256 if debug:
257 raise
258 error(lineno, 'error defining "%s": %s.' % (name, exc))
259 for k in vars.keys():
260 if k not in ('header_output', 'decoder_output',
261 'exec_output', 'decode_block'):
262 del vars[k]
263 return GenCode(parser, **vars)
264
265# Special null format to catch an implicit-format instruction
266# definition outside of any format block.
267class NoFormat(object):
268 def __init__(self):
269 self.defaultInst = ''
270
271 def defineInst(self, parser, name, args, lineno):
272 error(lineno,
273 'instruction definition "%s" with no active format!' % name)
274
275###############
276# GenCode class
277#
278# The GenCode class encapsulates generated code destined for various
279# output files. The header_output and decoder_output attributes are
280# strings containing code destined for decoder.hh and decoder.cc
281# respectively. The decode_block attribute contains code to be
282# incorporated in the decode function itself (that will also end up in
283# decoder.cc). The exec_output attribute is the string of code for the
284# exec.cc file. The has_decode_default attribute is used in the decode block
285# to allow explicit default clauses to override default default clauses.
286
287class GenCode(object):
288 # Constructor.
289 def __init__(self, parser,
290 header_output = '', decoder_output = '', exec_output = '',
291 decode_block = '', has_decode_default = False):
292 self.parser = parser
293 self.header_output = header_output
294 self.decoder_output = decoder_output
295 self.exec_output = exec_output
296 self.decode_block = decode_block
297 self.has_decode_default = has_decode_default
298
299 # Write these code chunks out to the filesystem. They will be properly
300 # interwoven by the write_top_level_files().
301 def emit(self):
302 if self.header_output:
303 self.parser.get_file('header').write(self.header_output)
304 if self.decoder_output:
305 self.parser.get_file('decoder').write(self.decoder_output)
306 if self.exec_output:
307 self.parser.get_file('exec').write(self.exec_output)
308 if self.decode_block:
309 self.parser.get_file('decode_block').write(self.decode_block)
310
311 # Override '+' operator: generate a new GenCode object that
312 # concatenates all the individual strings in the operands.
313 def __add__(self, other):
314 return GenCode(self.parser,
315 self.header_output + other.header_output,
316 self.decoder_output + other.decoder_output,
317 self.exec_output + other.exec_output,
318 self.decode_block + other.decode_block,
319 self.has_decode_default or other.has_decode_default)
320
321 # Prepend a string (typically a comment) to all the strings.
322 def prepend_all(self, pre):
323 self.header_output = pre + self.header_output
324 self.decoder_output = pre + self.decoder_output
325 self.decode_block = pre + self.decode_block
326 self.exec_output = pre + self.exec_output
327
328 # Wrap the decode block in a pair of strings (e.g., 'case foo:'
329 # and 'break;'). Used to build the big nested switch statement.
330 def wrap_decode_block(self, pre, post = ''):
331 self.decode_block = pre + indent(self.decode_block) + post
332
333#####################################################################
334#
335# Bitfield Operator Support
336#
337#####################################################################
338
339bitOp1ArgRE = re.compile(r'<\s*(\w+)\s*:\s*>')
340
341bitOpWordRE = re.compile(r'(?<![\w\.])([\w\.]+)<\s*(\w+)\s*:\s*(\w+)\s*>')
342bitOpExprRE = re.compile(r'\)<\s*(\w+)\s*:\s*(\w+)\s*>')
343
344def substBitOps(code):
345 # first convert single-bit selectors to two-index form
346 # i.e., <n> --> <n:n>
347 code = bitOp1ArgRE.sub(r'<\1:\1>', code)
348 # simple case: selector applied to ID (name)
349 # i.e., foo<a:b> --> bits(foo, a, b)
350 code = bitOpWordRE.sub(r'bits(\1, \2, \3)', code)
351 # if selector is applied to expression (ending in ')'),
352 # we need to search backward for matching '('
353 match = bitOpExprRE.search(code)
354 while match:
355 exprEnd = match.start()
356 here = exprEnd - 1
357 nestLevel = 1
358 while nestLevel > 0:
359 if code[here] == '(':
360 nestLevel -= 1
361 elif code[here] == ')':
362 nestLevel += 1
363 here -= 1
364 if here < 0:
365 sys.exit("Didn't find '('!")
366 exprStart = here+1
367 newExpr = r'bits(%s, %s, %s)' % (code[exprStart:exprEnd+1],
368 match.group(1), match.group(2))
369 code = code[:exprStart] + newExpr + code[match.end():]
370 match = bitOpExprRE.search(code)
371 return code
372
373
374#####################################################################
375#
376# Code Parser
377#
378# The remaining code is the support for automatically extracting
379# instruction characteristics from pseudocode.
380#
381#####################################################################
382
383# Force the argument to be a list. Useful for flags, where a caller
384# can specify a singleton flag or a list of flags. Also usful for
385# converting tuples to lists so they can be modified.
386def makeList(arg):
387 if isinstance(arg, list):
388 return arg
389 elif isinstance(arg, tuple):
390 return list(arg)
391 elif not arg:
392 return []
393 else:
394 return [ arg ]
395
396class Operand(object):
397 '''Base class for operand descriptors. An instance of this class
398 (or actually a class derived from this one) represents a specific
399 operand for a code block (e.g, "Rc.sq" as a dest). Intermediate
400 derived classes encapsulates the traits of a particular operand
401 type (e.g., "32-bit integer register").'''
402
403 def buildReadCode(self, func = None):
404 subst_dict = {"name": self.base_name,
405 "func": func,
406 "reg_idx": self.reg_spec,
407 "ctype": self.ctype}
408 if hasattr(self, 'src_reg_idx'):
409 subst_dict['op_idx'] = self.src_reg_idx
410 code = self.read_code % subst_dict
411 return '%s = %s;\n' % (self.base_name, code)
412
413 def buildWriteCode(self, func = None):
414 subst_dict = {"name": self.base_name,
415 "func": func,
416 "reg_idx": self.reg_spec,
417 "ctype": self.ctype,
418 "final_val": self.base_name}
419 if hasattr(self, 'dest_reg_idx'):
420 subst_dict['op_idx'] = self.dest_reg_idx
421 code = self.write_code % subst_dict
422 return '''
423 {
424 %s final_val = %s;
425 %s;
426 if (traceData) { traceData->setData(final_val); }
427 }''' % (self.dflt_ctype, self.base_name, code)
428
429 def __init__(self, parser, full_name, ext, is_src, is_dest):
430 self.full_name = full_name
431 self.ext = ext
432 self.is_src = is_src
433 self.is_dest = is_dest
434 # The 'effective extension' (eff_ext) is either the actual
435 # extension, if one was explicitly provided, or the default.
436 if ext:
437 self.eff_ext = ext
438 elif hasattr(self, 'dflt_ext'):
439 self.eff_ext = self.dflt_ext
440
441 if hasattr(self, 'eff_ext'):
442 self.ctype = parser.operandTypeMap[self.eff_ext]
443
444 # Finalize additional fields (primarily code fields). This step
445 # is done separately since some of these fields may depend on the
446 # register index enumeration that hasn't been performed yet at the
447 # time of __init__(). The register index enumeration is affected
448 # by predicated register reads/writes. Hence, we forward the flags
449 # that indicate whether or not predication is in use.
450 def finalize(self, predRead, predWrite):
451 self.flags = self.getFlags()
452 self.constructor = self.makeConstructor(predRead, predWrite)
453 self.op_decl = self.makeDecl()
454
455 if self.is_src:
456 self.op_rd = self.makeRead(predRead)
457 self.op_src_decl = self.makeDecl()
458 else:
459 self.op_rd = ''
460 self.op_src_decl = ''
461
462 if self.is_dest:
463 self.op_wb = self.makeWrite(predWrite)
464 self.op_dest_decl = self.makeDecl()
465 else:
466 self.op_wb = ''
467 self.op_dest_decl = ''
468
469 def isMem(self):
470 return 0
471
472 def isReg(self):
473 return 0
474
475 def isFloatReg(self):
476 return 0
477
478 def isIntReg(self):
479 return 0
480
481 def isCCReg(self):
482 return 0
483
484 def isControlReg(self):
485 return 0
486
487 def isVecReg(self):
488 return 0
489
490 def isVecElem(self):
491 return 0
492
493 def isVecPredReg(self):
494 return 0
495
496 def isPCState(self):
497 return 0
498
499 def isPCPart(self):
500 return self.isPCState() and self.reg_spec
501
502 def hasReadPred(self):
503 return self.read_predicate != None
504
505 def hasWritePred(self):
506 return self.write_predicate != None
507
508 def getFlags(self):
509 # note the empty slice '[:]' gives us a copy of self.flags[0]
510 # instead of a reference to it
511 my_flags = self.flags[0][:]
512 if self.is_src:
513 my_flags += self.flags[1]
514 if self.is_dest:
515 my_flags += self.flags[2]
516 return my_flags
517
518 def makeDecl(self):
519 # Note that initializations in the declarations are solely
520 # to avoid 'uninitialized variable' errors from the compiler.
521 return self.ctype + ' ' + self.base_name + ' = 0;\n';
522
523
524src_reg_constructor = '\n\t_srcRegIdx[_numSrcRegs++] = RegId(%s, %s);'
525dst_reg_constructor = '\n\t_destRegIdx[_numDestRegs++] = RegId(%s, %s);'
526
527
528class IntRegOperand(Operand):
529 reg_class = 'IntRegClass'
530
531 def isReg(self):
532 return 1
533
534 def isIntReg(self):
535 return 1
536
537 def makeConstructor(self, predRead, predWrite):
538 c_src = ''
539 c_dest = ''
540
541 if self.is_src:
542 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
543 if self.hasReadPred():
544 c_src = '\n\tif (%s) {%s\n\t}' % \
545 (self.read_predicate, c_src)
546
547 if self.is_dest:
548 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
549 c_dest += '\n\t_numIntDestRegs++;'
550 if self.hasWritePred():
551 c_dest = '\n\tif (%s) {%s\n\t}' % \
552 (self.write_predicate, c_dest)
553
554 return c_src + c_dest
555
556 def makeRead(self, predRead):
557 if (self.ctype == 'float' or self.ctype == 'double'):
558 error('Attempt to read integer register as FP')
559 if self.read_code != None:
560 return self.buildReadCode('readIntRegOperand')
561
562 int_reg_val = ''
563 if predRead:
564 int_reg_val = 'xc->readIntRegOperand(this, _sourceIndex++)'
565 if self.hasReadPred():
566 int_reg_val = '(%s) ? %s : 0' % \
567 (self.read_predicate, int_reg_val)
568 else:
569 int_reg_val = 'xc->readIntRegOperand(this, %d)' % self.src_reg_idx
570
571 return '%s = %s;\n' % (self.base_name, int_reg_val)
572
573 def makeWrite(self, predWrite):
574 if (self.ctype == 'float' or self.ctype == 'double'):
575 error('Attempt to write integer register as FP')
576 if self.write_code != None:
577 return self.buildWriteCode('setIntRegOperand')
578
579 if predWrite:
580 wp = 'true'
581 if self.hasWritePred():
582 wp = self.write_predicate
583
584 wcond = 'if (%s)' % (wp)
585 windex = '_destIndex++'
586 else:
587 wcond = ''
588 windex = '%d' % self.dest_reg_idx
589
590 wb = '''
591 %s
592 {
593 %s final_val = %s;
594 xc->setIntRegOperand(this, %s, final_val);\n
595 if (traceData) { traceData->setData(final_val); }
596 }''' % (wcond, self.ctype, self.base_name, windex)
597
598 return wb
599
600class FloatRegOperand(Operand):
601 reg_class = 'FloatRegClass'
602
603 def isReg(self):
604 return 1
605
606 def isFloatReg(self):
607 return 1
608
609 def makeConstructor(self, predRead, predWrite):
610 c_src = ''
611 c_dest = ''
612
613 if self.is_src:
614 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
615
616 if self.is_dest:
617 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
618 c_dest += '\n\t_numFPDestRegs++;'
619
620 return c_src + c_dest
621
622 def makeRead(self, predRead):
623 if self.read_code != None:
624 return self.buildReadCode('readFloatRegOperandBits')
625
626 if predRead:
627 rindex = '_sourceIndex++'
628 else:
629 rindex = '%d' % self.src_reg_idx
630
631 code = 'xc->readFloatRegOperandBits(this, %s)' % rindex
632 if self.ctype == 'float':
633 code = 'bitsToFloat32(%s)' % code
634 elif self.ctype == 'double':
635 code = 'bitsToFloat64(%s)' % code
636 return '%s = %s;\n' % (self.base_name, code)
637
638 def makeWrite(self, predWrite):
639 if self.write_code != None:
640 return self.buildWriteCode('setFloatRegOperandBits')
641
642 if predWrite:
643 wp = '_destIndex++'
644 else:
645 wp = '%d' % self.dest_reg_idx
646
647 val = 'final_val'
648 if self.ctype == 'float':
649 val = 'floatToBits32(%s)' % val
650 elif self.ctype == 'double':
651 val = 'floatToBits64(%s)' % val
652
653 wp = 'xc->setFloatRegOperandBits(this, %s, %s);' % (wp, val)
654
655 wb = '''
656 {
657 %s final_val = %s;
658 %s\n
659 if (traceData) { traceData->setData(final_val); }
660 }''' % (self.ctype, self.base_name, wp)
661 return wb
662
663class VecRegOperand(Operand):
664 reg_class = 'VecRegClass'
665
666 def __init__(self, parser, full_name, ext, is_src, is_dest):
667 Operand.__init__(self, parser, full_name, ext, is_src, is_dest)
668 self.elemExt = None
669 self.parser = parser
670
671 def isReg(self):
672 return 1
673
674 def isVecReg(self):
675 return 1
676
677 def makeDeclElem(self, elem_op):
678 (elem_name, elem_ext) = elem_op
679 (elem_spec, dflt_elem_ext, zeroing) = self.elems[elem_name]
680 if elem_ext:
681 ext = elem_ext
682 else:
683 ext = dflt_elem_ext
684 ctype = self.parser.operandTypeMap[ext]
685 return '\n\t%s %s = 0;' % (ctype, elem_name)
686
687 def makeDecl(self):
688 if not self.is_dest and self.is_src:
689 c_decl = '\t/* Vars for %s*/' % (self.base_name)
690 if hasattr(self, 'active_elems'):
691 if self.active_elems:
692 for elem in self.active_elems:
693 c_decl += self.makeDeclElem(elem)
694 return c_decl + '\t/* End vars for %s */\n' % (self.base_name)
695 else:
696 return ''
697
698 def makeConstructor(self, predRead, predWrite):
699 c_src = ''
700 c_dest = ''
701
702 numAccessNeeded = 1
703
704 if self.is_src:
705 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
706
707 if self.is_dest:
708 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
709 c_dest += '\n\t_numVecDestRegs++;'
710
711 return c_src + c_dest
712
713 # Read destination register to write
714 def makeReadWElem(self, elem_op):
715 (elem_name, elem_ext) = elem_op
716 (elem_spec, dflt_elem_ext, zeroing) = self.elems[elem_name]
717 if elem_ext:
718 ext = elem_ext
719 else:
720 ext = dflt_elem_ext
721 ctype = self.parser.operandTypeMap[ext]
722 c_read = '\t\t%s& %s = %s[%s];\n' % \
723 (ctype, elem_name, self.base_name, elem_spec)
724 return c_read
725
726 def makeReadW(self, predWrite):
727 func = 'getWritableVecRegOperand'
728 if self.read_code != None:
729 return self.buildReadCode(func)
730
731 if predWrite:
732 rindex = '_destIndex++'
733 else:
734 rindex = '%d' % self.dest_reg_idx
735
736 c_readw = '\t\t%s& tmp_d%s = xc->%s(this, %s);\n'\
737 % ('TheISA::VecRegContainer', rindex, func, rindex)
738 if self.elemExt:
739 c_readw += '\t\tauto %s = tmp_d%s.as<%s>();\n' % (self.base_name,
740 rindex, self.parser.operandTypeMap[self.elemExt])
741 if self.ext:
742 c_readw += '\t\tauto %s = tmp_d%s.as<%s>();\n' % (self.base_name,
743 rindex, self.parser.operandTypeMap[self.ext])
744 if hasattr(self, 'active_elems'):
745 if self.active_elems:
746 for elem in self.active_elems:
747 c_readw += self.makeReadWElem(elem)
748 return c_readw
749
750 # Normal source operand read
751 def makeReadElem(self, elem_op, name):
752 (elem_name, elem_ext) = elem_op
753 (elem_spec, dflt_elem_ext, zeroing) = self.elems[elem_name]
754
755 if elem_ext:
756 ext = elem_ext
757 else:
758 ext = dflt_elem_ext
759 ctype = self.parser.operandTypeMap[ext]
760 c_read = '\t\t%s = %s[%s];\n' % \
761 (elem_name, name, elem_spec)
762 return c_read
763
764 def makeRead(self, predRead):
765 func = 'readVecRegOperand'
766 if self.read_code != None:
767 return self.buildReadCode(func)
768
769 if predRead:
770 rindex = '_sourceIndex++'
771 else:
772 rindex = '%d' % self.src_reg_idx
773
774 name = self.base_name
775 if self.is_dest and self.is_src:
776 name += '_merger'
777
778 c_read = '\t\t%s& tmp_s%s = xc->%s(this, %s);\n' \
779 % ('const TheISA::VecRegContainer', rindex, func, rindex)
780 # If the parser has detected that elements are being access, create
781 # the appropriate view
782 if self.elemExt:
783 c_read += '\t\tauto %s = tmp_s%s.as<%s>();\n' % \
784 (name, rindex, self.parser.operandTypeMap[self.elemExt])
785 if self.ext:
786 c_read += '\t\tauto %s = tmp_s%s.as<%s>();\n' % \
787 (name, rindex, self.parser.operandTypeMap[self.ext])
788 if hasattr(self, 'active_elems'):
789 if self.active_elems:
790 for elem in self.active_elems:
791 c_read += self.makeReadElem(elem, name)
792 return c_read
793
794 def makeWrite(self, predWrite):
795 func = 'setVecRegOperand'
796 if self.write_code != None:
797 return self.buildWriteCode(func)
798
799 wb = '''
800 if (traceData) {
801 traceData->setData(tmp_d%d);
802 }
803 ''' % self.dest_reg_idx
804 return wb
805
806 def finalize(self, predRead, predWrite):
807 super(VecRegOperand, self).finalize(predRead, predWrite)
808 if self.is_dest:
809 self.op_rd = self.makeReadW(predWrite) + self.op_rd
810
811class VecElemOperand(Operand):
812 reg_class = 'VecElemClass'
813
814 def isReg(self):
815 return 1
816
817 def isVecElem(self):
818 return 1
819
820 def makeDecl(self):
821 if self.is_dest and not self.is_src:
822 return '\n\t%s %s;' % (self.ctype, self.base_name)
823 else:
824 return ''
825
826 def makeConstructor(self, predRead, predWrite):
827 c_src = ''
828 c_dest = ''
829
830 numAccessNeeded = 1
831
832 if self.is_src:
833 c_src = ('\n\t_srcRegIdx[_numSrcRegs++] = RegId(%s, %s, %s);' %
834 (self.reg_class, self.reg_spec, self.elem_spec))
835
836 if self.is_dest:
837 c_dest = ('\n\t_destRegIdx[_numDestRegs++] = RegId(%s, %s, %s);' %
838 (self.reg_class, self.reg_spec, self.elem_spec))
839 c_dest += '\n\t_numVecElemDestRegs++;'
840 return c_src + c_dest
841
842 def makeRead(self, predRead):
843 c_read = 'xc->readVecElemOperand(this, %d)' % self.src_reg_idx
844
845 if self.ctype == 'float':
846 c_read = 'bitsToFloat32(%s)' % c_read
847 elif self.ctype == 'double':
848 c_read = 'bitsToFloat64(%s)' % c_read
849
850 return '\n\t%s %s = %s;\n' % (self.ctype, self.base_name, c_read)
851
852 def makeWrite(self, predWrite):
853 if self.ctype == 'float':
854 c_write = 'floatToBits32(%s)' % self.base_name
855 elif self.ctype == 'double':
856 c_write = 'floatToBits64(%s)' % self.base_name
857 else:
858 c_write = self.base_name
859
860 c_write = ('\n\txc->setVecElemOperand(this, %d, %s);' %
861 (self.dest_reg_idx, c_write))
862
863 return c_write
864
865class VecPredRegOperand(Operand):
866 reg_class = 'VecPredRegClass'
867
868 def __init__(self, parser, full_name, ext, is_src, is_dest):
869 Operand.__init__(self, parser, full_name, ext, is_src, is_dest)
870 self.parser = parser
871
872 def isReg(self):
873 return 1
874
875 def isVecPredReg(self):
876 return 1
877
878 def makeDecl(self):
879 return ''
880
881 def makeConstructor(self, predRead, predWrite):
882 c_src = ''
883 c_dest = ''
884
885 if self.is_src:
886 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
887
888 if self.is_dest:
889 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
890 c_dest += '\n\t_numVecPredDestRegs++;'
891
892 return c_src + c_dest
893
894 def makeRead(self, predRead):
895 func = 'readVecPredRegOperand'
896 if self.read_code != None:
897 return self.buildReadCode(func)
898
899 if predRead:
900 rindex = '_sourceIndex++'
901 else:
902 rindex = '%d' % self.src_reg_idx
903
904 c_read = '\t\t%s& tmp_s%s = xc->%s(this, %s);\n' % (
905 'const TheISA::VecPredRegContainer', rindex, func, rindex)
906 if self.ext:
907 c_read += '\t\tauto %s = tmp_s%s.as<%s>();\n' % (
908 self.base_name, rindex,
909 self.parser.operandTypeMap[self.ext])
910 return c_read
911
912 def makeReadW(self, predWrite):
913 func = 'getWritableVecPredRegOperand'
914 if self.read_code != None:
915 return self.buildReadCode(func)
916
917 if predWrite:
918 rindex = '_destIndex++'
919 else:
920 rindex = '%d' % self.dest_reg_idx
921
922 c_readw = '\t\t%s& tmp_d%s = xc->%s(this, %s);\n' % (
923 'TheISA::VecPredRegContainer', rindex, func, rindex)
924 if self.ext:
925 c_readw += '\t\tauto %s = tmp_d%s.as<%s>();\n' % (
926 self.base_name, rindex,
927 self.parser.operandTypeMap[self.ext])
928 return c_readw
929
930 def makeWrite(self, predWrite):
931 func = 'setVecPredRegOperand'
932 if self.write_code != None:
933 return self.buildWriteCode(func)
934
935 wb = '''
936 if (traceData) {
937 traceData->setData(tmp_d%d);
938 }
939 ''' % self.dest_reg_idx
940 return wb
941
942 def finalize(self, predRead, predWrite):
943 super(VecPredRegOperand, self).finalize(predRead, predWrite)
944 if self.is_dest:
945 self.op_rd = self.makeReadW(predWrite) + self.op_rd
946
947class CCRegOperand(Operand):
948 reg_class = 'CCRegClass'
949
950 def isReg(self):
951 return 1
952
953 def isCCReg(self):
954 return 1
955
956 def makeConstructor(self, predRead, predWrite):
957 c_src = ''
958 c_dest = ''
959
960 if self.is_src:
961 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
962 if self.hasReadPred():
963 c_src = '\n\tif (%s) {%s\n\t}' % \
964 (self.read_predicate, c_src)
965
966 if self.is_dest:
967 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
968 c_dest += '\n\t_numCCDestRegs++;'
969 if self.hasWritePred():
970 c_dest = '\n\tif (%s) {%s\n\t}' % \
971 (self.write_predicate, c_dest)
972
973 return c_src + c_dest
974
975 def makeRead(self, predRead):
976 if (self.ctype == 'float' or self.ctype == 'double'):
977 error('Attempt to read condition-code register as FP')
978 if self.read_code != None:
979 return self.buildReadCode('readCCRegOperand')
980
981 int_reg_val = ''
982 if predRead:
983 int_reg_val = 'xc->readCCRegOperand(this, _sourceIndex++)'
984 if self.hasReadPred():
985 int_reg_val = '(%s) ? %s : 0' % \
986 (self.read_predicate, int_reg_val)
987 else:
988 int_reg_val = 'xc->readCCRegOperand(this, %d)' % self.src_reg_idx
989
990 return '%s = %s;\n' % (self.base_name, int_reg_val)
991
992 def makeWrite(self, predWrite):
993 if (self.ctype == 'float' or self.ctype == 'double'):
994 error('Attempt to write condition-code register as FP')
995 if self.write_code != None:
996 return self.buildWriteCode('setCCRegOperand')
997
998 if predWrite:
999 wp = 'true'
1000 if self.hasWritePred():
1001 wp = self.write_predicate
1002
1003 wcond = 'if (%s)' % (wp)
1004 windex = '_destIndex++'
1005 else:
1006 wcond = ''
1007 windex = '%d' % self.dest_reg_idx
1008
1009 wb = '''
1010 %s
1011 {
1012 %s final_val = %s;
1013 xc->setCCRegOperand(this, %s, final_val);\n
1014 if (traceData) { traceData->setData(final_val); }
1015 }''' % (wcond, self.ctype, self.base_name, windex)
1016
1017 return wb
1018
1019class ControlRegOperand(Operand):
1020 reg_class = 'MiscRegClass'
1021
1022 def isReg(self):
1023 return 1
1024
1025 def isControlReg(self):
1026 return 1
1027
1028 def makeConstructor(self, predRead, predWrite):
1029 c_src = ''
1030 c_dest = ''
1031
1032 if self.is_src:
1033 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
1034
1035 if self.is_dest:
1036 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
1037
1038 return c_src + c_dest
1039
1040 def makeRead(self, predRead):
1041 bit_select = 0
1042 if (self.ctype == 'float' or self.ctype == 'double'):
1043 error('Attempt to read control register as FP')
1044 if self.read_code != None:
1045 return self.buildReadCode('readMiscRegOperand')
1046
1047 if predRead:
1048 rindex = '_sourceIndex++'
1049 else:
1050 rindex = '%d' % self.src_reg_idx
1051
1052 return '%s = xc->readMiscRegOperand(this, %s);\n' % \
1053 (self.base_name, rindex)
1054
1055 def makeWrite(self, predWrite):
1056 if (self.ctype == 'float' or self.ctype == 'double'):
1057 error('Attempt to write control register as FP')
1058 if self.write_code != None:
1059 return self.buildWriteCode('setMiscRegOperand')
1060
1061 if predWrite:
1062 windex = '_destIndex++'
1063 else:
1064 windex = '%d' % self.dest_reg_idx
1065
1066 wb = 'xc->setMiscRegOperand(this, %s, %s);\n' % \
1067 (windex, self.base_name)
1068 wb += 'if (traceData) { traceData->setData(%s); }' % \
1069 self.base_name
1070
1071 return wb
1072
1073class MemOperand(Operand):
1074 def isMem(self):
1075 return 1
1076
1077 def makeConstructor(self, predRead, predWrite):
1078 return ''
1079
1080 def makeDecl(self):
1081 # Declare memory data variable.
1082 return '%s %s;\n' % (self.ctype, self.base_name)
1083
1084 def makeRead(self, predRead):
1085 if self.read_code != None:
1086 return self.buildReadCode()
1087 return ''
1088
1089 def makeWrite(self, predWrite):
1090 if self.write_code != None:
1091 return self.buildWriteCode()
1092 return ''
1093
1094class PCStateOperand(Operand):
1095 def makeConstructor(self, predRead, predWrite):
1096 return ''
1097
1098 def makeRead(self, predRead):
1099 if self.reg_spec:
1100 # A component of the PC state.
1101 return '%s = __parserAutoPCState.%s();\n' % \
1102 (self.base_name, self.reg_spec)
1103 else:
1104 # The whole PC state itself.
1105 return '%s = xc->pcState();\n' % self.base_name
1106
1107 def makeWrite(self, predWrite):
1108 if self.reg_spec:
1109 # A component of the PC state.
1110 return '__parserAutoPCState.%s(%s);\n' % \
1111 (self.reg_spec, self.base_name)
1112 else:
1113 # The whole PC state itself.
1114 return 'xc->pcState(%s);\n' % self.base_name
1115
1116 def makeDecl(self):
1117 ctype = 'TheISA::PCState'
1118 if self.isPCPart():
1119 ctype = self.ctype
1120 # Note that initializations in the declarations are solely
1121 # to avoid 'uninitialized variable' errors from the compiler.
1122 return '%s %s = 0;\n' % (ctype, self.base_name)
1123
1124 def isPCState(self):
1125 return 1
1126
1127class OperandList(object):
1128 '''Find all the operands in the given code block. Returns an operand
1129 descriptor list (instance of class OperandList).'''
1130 def __init__(self, parser, code):
1131 self.items = []
1132 self.bases = {}
1133 # delete strings and comments so we don't match on operands inside
1134 for regEx in (stringRE, commentRE):
1135 code = regEx.sub('', code)
1136 # search for operands
1137 next_pos = 0
1138 while 1:
1139 match = parser.operandsRE.search(code, next_pos)
1140 if not match:
1141 # no more matches: we're done
1142 break
1143 op = match.groups()
1144 # regexp groups are operand full name, base, and extension
1145 (op_full, op_base, op_ext) = op
1146 # If is a elem operand, define or update the corresponding
1147 # vector operand
1148 isElem = False
1149 if op_base in parser.elemToVector:
1150 isElem = True
1151 elem_op = (op_base, op_ext)
1152 op_base = parser.elemToVector[op_base]
1153 op_ext = '' # use the default one
1154 # if the token following the operand is an assignment, this is
1155 # a destination (LHS), else it's a source (RHS)
1156 is_dest = (assignRE.match(code, match.end()) != None)
1157 is_src = not is_dest
1158
1159 # see if we've already seen this one
1160 op_desc = self.find_base(op_base)
1161 if op_desc:
1162 if op_ext and op_ext != '' and op_desc.ext != op_ext:
1163 error ('Inconsistent extensions for operand %s: %s - %s' \
1164 % (op_base, op_desc.ext, op_ext))
1165 op_desc.is_src = op_desc.is_src or is_src
1166 op_desc.is_dest = op_desc.is_dest or is_dest
1167 if isElem:
1168 (elem_base, elem_ext) = elem_op
1169 found = False
1170 for ae in op_desc.active_elems:
1171 (ae_base, ae_ext) = ae
1172 if ae_base == elem_base:
1173 if ae_ext != elem_ext:
1174 error('Inconsistent extensions for elem'
1175 ' operand %s' % elem_base)
1176 else:
1177 found = True
1178 if not found:
1179 op_desc.active_elems.append(elem_op)
1180 else:
1181 # new operand: create new descriptor
1182 op_desc = parser.operandNameMap[op_base](parser,
1183 op_full, op_ext, is_src, is_dest)
1184 # if operand is a vector elem, add the corresponding vector
1185 # operand if not already done
1186 if isElem:
1187 op_desc.elemExt = elem_op[1]
1188 op_desc.active_elems = [elem_op]
1189 self.append(op_desc)
1190 # start next search after end of current match
1191 next_pos = match.end()
1192 self.sort()
1193 # enumerate source & dest register operands... used in building
1194 # constructor later
1195 self.numSrcRegs = 0
1196 self.numDestRegs = 0
1197 self.numFPDestRegs = 0
1198 self.numIntDestRegs = 0
1199 self.numVecDestRegs = 0
1200 self.numVecPredDestRegs = 0
1201 self.numCCDestRegs = 0
1202 self.numMiscDestRegs = 0
1203 self.memOperand = None
1204
1205 # Flags to keep track if one or more operands are to be read/written
1206 # conditionally.
1207 self.predRead = False
1208 self.predWrite = False
1209
1210 for op_desc in self.items:
1211 if op_desc.isReg():
1212 if op_desc.is_src:
1213 op_desc.src_reg_idx = self.numSrcRegs
1214 self.numSrcRegs += 1
1215 if op_desc.is_dest:
1216 op_desc.dest_reg_idx = self.numDestRegs
1217 self.numDestRegs += 1
1218 if op_desc.isFloatReg():
1219 self.numFPDestRegs += 1
1220 elif op_desc.isIntReg():
1221 self.numIntDestRegs += 1
1222 elif op_desc.isVecReg():
1223 self.numVecDestRegs += 1
1224 elif op_desc.isVecPredReg():
1225 self.numVecPredDestRegs += 1
1226 elif op_desc.isCCReg():
1227 self.numCCDestRegs += 1
1228 elif op_desc.isControlReg():
1229 self.numMiscDestRegs += 1
1230 elif op_desc.isMem():
1231 if self.memOperand:
1232 error("Code block has more than one memory operand.")
1233 self.memOperand = op_desc
1234
1235 # Check if this operand has read/write predication. If true, then
1236 # the microop will dynamically index source/dest registers.
1237 self.predRead = self.predRead or op_desc.hasReadPred()
1238 self.predWrite = self.predWrite or op_desc.hasWritePred()
1239
1240 if parser.maxInstSrcRegs < self.numSrcRegs:
1241 parser.maxInstSrcRegs = self.numSrcRegs
1242 if parser.maxInstDestRegs < self.numDestRegs:
1243 parser.maxInstDestRegs = self.numDestRegs
1244 if parser.maxMiscDestRegs < self.numMiscDestRegs:
1245 parser.maxMiscDestRegs = self.numMiscDestRegs
1246
1247 # now make a final pass to finalize op_desc fields that may depend
1248 # on the register enumeration
1249 for op_desc in self.items:
1250 op_desc.finalize(self.predRead, self.predWrite)
1251
1252 def __len__(self):
1253 return len(self.items)
1254
1255 def __getitem__(self, index):
1256 return self.items[index]
1257
1258 def append(self, op_desc):
1259 self.items.append(op_desc)
1260 self.bases[op_desc.base_name] = op_desc
1261
1262 def find_base(self, base_name):
1263 # like self.bases[base_name], but returns None if not found
1264 # (rather than raising exception)
1265 return self.bases.get(base_name)
1266
1267 # internal helper function for concat[Some]Attr{Strings|Lists}
1268 def __internalConcatAttrs(self, attr_name, filter, result):
1269 for op_desc in self.items:
1270 if filter(op_desc):
1271 result += getattr(op_desc, attr_name)
1272 return result
1273
1274 # return a single string that is the concatenation of the (string)
1275 # values of the specified attribute for all operands
1276 def concatAttrStrings(self, attr_name):
1277 return self.__internalConcatAttrs(attr_name, lambda x: 1, '')
1278
1279 # like concatAttrStrings, but only include the values for the operands
1280 # for which the provided filter function returns true
1281 def concatSomeAttrStrings(self, filter, attr_name):
1282 return self.__internalConcatAttrs(attr_name, filter, '')
1283
1284 # return a single list that is the concatenation of the (list)
1285 # values of the specified attribute for all operands
1286 def concatAttrLists(self, attr_name):
1287 return self.__internalConcatAttrs(attr_name, lambda x: 1, [])
1288
1289 # like concatAttrLists, but only include the values for the operands
1290 # for which the provided filter function returns true
1291 def concatSomeAttrLists(self, filter, attr_name):
1292 return self.__internalConcatAttrs(attr_name, filter, [])
1293
1294 def sort(self):
1295 self.items.sort(lambda a, b: a.sort_pri - b.sort_pri)
1296
1297class SubOperandList(OperandList):
1298 '''Find all the operands in the given code block. Returns an operand
1299 descriptor list (instance of class OperandList).'''
1300 def __init__(self, parser, code, master_list):
1301 self.items = []
1302 self.bases = {}
1303 # delete strings and comments so we don't match on operands inside
1304 for regEx in (stringRE, commentRE):
1305 code = regEx.sub('', code)
1306 # search for operands
1307 next_pos = 0
1308 while 1:
1309 match = parser.operandsRE.search(code, next_pos)
1310 if not match:
1311 # no more matches: we're done
1312 break
1313 op = match.groups()
1314 # regexp groups are operand full name, base, and extension
1315 (op_full, op_base, op_ext) = op
1316 # If is a elem operand, define or update the corresponding
1317 # vector operand
1318 if op_base in parser.elemToVector:
1319 elem_op = op_base
1320 op_base = parser.elemToVector[elem_op]
1321 # find this op in the master list
1322 op_desc = master_list.find_base(op_base)
1323 if not op_desc:
1324 error('Found operand %s which is not in the master list!'
1325 % op_base)
1326 else:
1327 # See if we've already found this operand
1328 op_desc = self.find_base(op_base)
1329 if not op_desc:
1330 # if not, add a reference to it to this sub list
1331 self.append(master_list.bases[op_base])
1332
1333 # start next search after end of current match
1334 next_pos = match.end()
1335 self.sort()
1336 self.memOperand = None
1337 # Whether the whole PC needs to be read so parts of it can be accessed
1338 self.readPC = False
1339 # Whether the whole PC needs to be written after parts of it were
1340 # changed
1341 self.setPC = False
1342 # Whether this instruction manipulates the whole PC or parts of it.
1343 # Mixing the two is a bad idea and flagged as an error.
1344 self.pcPart = None
1345
1346 # Flags to keep track if one or more operands are to be read/written
1347 # conditionally.
1348 self.predRead = False
1349 self.predWrite = False
1350
1351 for op_desc in self.items:
1352 if op_desc.isPCPart():
1353 self.readPC = True
1354 if op_desc.is_dest:
1355 self.setPC = True
1356
1357 if op_desc.isPCState():
1358 if self.pcPart is not None:
1359 if self.pcPart and not op_desc.isPCPart() or \
1360 not self.pcPart and op_desc.isPCPart():
1361 error("Mixed whole and partial PC state operands.")
1362 self.pcPart = op_desc.isPCPart()
1363
1364 if op_desc.isMem():
1365 if self.memOperand:
1366 error("Code block has more than one memory operand.")
1367 self.memOperand = op_desc
1368
1369 # Check if this operand has read/write predication. If true, then
1370 # the microop will dynamically index source/dest registers.
1371 self.predRead = self.predRead or op_desc.hasReadPred()
1372 self.predWrite = self.predWrite or op_desc.hasWritePred()
1373
1374# Regular expression object to match C++ strings
1375stringRE = re.compile(r'"([^"\\]|\\.)*"')
1376
1377# Regular expression object to match C++ comments
1378# (used in findOperands())
1379commentRE = re.compile(r'(^)?[^\S\n]*/(?:\*(.*?)\*/[^\S\n]*|/[^\n]*)($)?',
1380 re.DOTALL | re.MULTILINE)
1381
1382# Regular expression object to match assignment statements (used in
1383# findOperands()). If the code immediately following the first
1384# appearance of the operand matches this regex, then the operand
1385# appears to be on the LHS of an assignment, and is thus a
1386# destination. basically we're looking for an '=' that's not '=='.
1387# The heinous tangle before that handles the case where the operand
1388# has an array subscript.
1389assignRE = re.compile(r'(\[[^\]]+\])?\s*=(?!=)', re.MULTILINE)
1390
1391def makeFlagConstructor(flag_list):
1392 if len(flag_list) == 0:
1393 return ''
1394 # filter out repeated flags
1395 flag_list.sort()
1396 i = 1
1397 while i < len(flag_list):
1398 if flag_list[i] == flag_list[i-1]:
1399 del flag_list[i]
1400 else:
1401 i += 1
1402 pre = '\n\tflags['
1403 post = '] = true;'
1404 code = pre + string.join(flag_list, post + pre) + post
1405 return code
1406
1407# Assume all instruction flags are of the form 'IsFoo'
1408instFlagRE = re.compile(r'Is.*')
1409
1410# OpClass constants end in 'Op' except No_OpClass
1411opClassRE = re.compile(r'.*Op|No_OpClass')
1412
1413class InstObjParams(object):
1414 def __init__(self, parser, mnem, class_name, base_class = '',
1415 snippets = {}, opt_args = []):
1416 self.mnemonic = mnem
1417 self.class_name = class_name
1418 self.base_class = base_class
1419 if not isinstance(snippets, dict):
1420 snippets = {'code' : snippets}
1421 compositeCode = ' '.join(map(str, snippets.values()))
1422 self.snippets = snippets
1423
1424 self.operands = OperandList(parser, compositeCode)
1425
1426 # The header of the constructor declares the variables to be used
1427 # in the body of the constructor.
1428 header = ''
1429 header += '\n\t_numSrcRegs = 0;'
1430 header += '\n\t_numDestRegs = 0;'
1431 header += '\n\t_numFPDestRegs = 0;'
1432 header += '\n\t_numVecDestRegs = 0;'
1433 header += '\n\t_numVecElemDestRegs = 0;'
1434 header += '\n\t_numVecPredDestRegs = 0;'
1435 header += '\n\t_numIntDestRegs = 0;'
1436 header += '\n\t_numCCDestRegs = 0;'
1437
1438 self.constructor = header + \
1439 self.operands.concatAttrStrings('constructor')
1440
1441 self.flags = self.operands.concatAttrLists('flags')
1442
1443 self.op_class = None
1444
1445 # Optional arguments are assumed to be either StaticInst flags
1446 # or an OpClass value. To avoid having to import a complete
1447 # list of these values to match against, we do it ad-hoc
1448 # with regexps.
1449 for oa in opt_args:
1450 if instFlagRE.match(oa):
1451 self.flags.append(oa)
1452 elif opClassRE.match(oa):
1453 self.op_class = oa
1454 else:
1455 error('InstObjParams: optional arg "%s" not recognized '
1456 'as StaticInst::Flag or OpClass.' % oa)
1457
1458 # Make a basic guess on the operand class if not set.
1459 # These are good enough for most cases.
1460 if not self.op_class:
1461 if 'IsStore' in self.flags:
1462 # The order matters here: 'IsFloating' and 'IsInteger' are
1463 # usually set in FP instructions because of the base
1464 # register
1465 if 'IsFloating' in self.flags:
1466 self.op_class = 'FloatMemWriteOp'
1467 else:
1468 self.op_class = 'MemWriteOp'
1469 elif 'IsLoad' in self.flags or 'IsPrefetch' in self.flags:
1470 # The order matters here: 'IsFloating' and 'IsInteger' are
1471 # usually set in FP instructions because of the base
1472 # register
1473 if 'IsFloating' in self.flags:
1474 self.op_class = 'FloatMemReadOp'
1475 else:
1476 self.op_class = 'MemReadOp'
1477 elif 'IsFloating' in self.flags:
1478 self.op_class = 'FloatAddOp'
1479 elif 'IsVector' in self.flags:
1480 self.op_class = 'SimdAddOp'
1481 else:
1482 self.op_class = 'IntAluOp'
1483
1484 # add flag initialization to contructor here to include
1485 # any flags added via opt_args
1486 self.constructor += makeFlagConstructor(self.flags)
1487
1488 # if 'IsFloating' is set, add call to the FP enable check
1489 # function (which should be provided by isa_desc via a declare)
1490 # if 'IsVector' is set, add call to the Vector enable check
1491 # function (which should be provided by isa_desc via a declare)
1492 if 'IsFloating' in self.flags:
1493 self.fp_enable_check = 'fault = checkFpEnableFault(xc);'
1494 elif 'IsVector' in self.flags:
1495 self.fp_enable_check = 'fault = checkVecEnableFault(xc);'
1496 else:
1497 self.fp_enable_check = ''
1498
1499##############
1500# Stack: a simple stack object. Used for both formats (formatStack)
1501# and default cases (defaultStack). Simply wraps a list to give more
1502# stack-like syntax and enable initialization with an argument list
1503# (as opposed to an argument that's a list).
1504
1505class Stack(list):
1506 def __init__(self, *items):
1507 list.__init__(self, items)
1508
1509 def push(self, item):
1510 self.append(item);
1511
1512 def top(self):
1513 return self[-1]
1514
1515# Format a file include stack backtrace as a string
1516def backtrace(filename_stack):
1517 fmt = "In file included from %s:"
1518 return "\n".join([fmt % f for f in filename_stack])
1519
1520
1521#######################
1522#
1523# LineTracker: track filenames along with line numbers in PLY lineno fields
1524# PLY explicitly doesn't do anything with 'lineno' except propagate
1525# it. This class lets us tie filenames with the line numbers with a
1526# minimum of disruption to existing increment code.
1527#
1528
1529class LineTracker(object):
1530 def __init__(self, filename, lineno=1):
1531 self.filename = filename
1532 self.lineno = lineno
1533
1534 # Overload '+=' for increments. We need to create a new object on
1535 # each update else every token ends up referencing the same
1536 # constantly incrementing instance.
1537 def __iadd__(self, incr):
1538 return LineTracker(self.filename, self.lineno + incr)
1539
1540 def __str__(self):
1541 return "%s:%d" % (self.filename, self.lineno)
1542
1543 # In case there are places where someone really expects a number
1544 def __int__(self):
1545 return self.lineno
1546
1547
1548#######################
1549#
1550# ISA Parser
1551# parses ISA DSL and emits C++ headers and source
1552#
1553
1554class ISAParser(Grammar):
1555 def __init__(self, output_dir):
1556 super(ISAParser, self).__init__()
1557 self.output_dir = output_dir
1558
1559 self.filename = None # for output file watermarking/scaremongering
1560
1561 # variable to hold templates
1562 self.templateMap = {}
1563
1564 # This dictionary maps format name strings to Format objects.
1565 self.formatMap = {}
1566
1567 # Track open files and, if applicable, how many chunks it has been
1568 # split into so far.
1569 self.files = {}
1570 self.splits = {}
1571
1572 # isa_name / namespace identifier from namespace declaration.
1573 # before the namespace declaration, None.
1574 self.isa_name = None
1575 self.namespace = None
1576
1577 # The format stack.
1578 self.formatStack = Stack(NoFormat())
1579
1580 # The default case stack.
1581 self.defaultStack = Stack(None)
1582
1583 # Stack that tracks current file and line number. Each
1584 # element is a tuple (filename, lineno) that records the
1585 # *current* filename and the line number in the *previous*
1586 # file where it was included.
1587 self.fileNameStack = Stack()
1588
1589 symbols = ('makeList', 're', 'string')
1590 self.exportContext = dict([(s, eval(s)) for s in symbols])
1591
1592 self.maxInstSrcRegs = 0
1593 self.maxInstDestRegs = 0
1594 self.maxMiscDestRegs = 0
1595
1596 def __getitem__(self, i): # Allow object (self) to be
1597 return getattr(self, i) # passed to %-substitutions
1598
1599 # Change the file suffix of a base filename:
1600 # (e.g.) decoder.cc -> decoder-g.cc.inc for 'global' outputs
1601 def suffixize(self, s, sec):
1602 extn = re.compile('(\.[^\.]+)$') # isolate extension
1603 if self.namespace:
1604 return extn.sub(r'-ns\1.inc', s) # insert some text on either side
1605 else:
1606 return extn.sub(r'-g\1.inc', s)
1607
1608 # Get the file object for emitting code into the specified section
1609 # (header, decoder, exec, decode_block).
1610 def get_file(self, section):
1611 if section == 'decode_block':
1612 filename = 'decode-method.cc.inc'
1613 else:
1614 if section == 'header':
1615 file = 'decoder.hh'
1616 else:
1617 file = '%s.cc' % section
1618 filename = self.suffixize(file, section)
1619 try:
1620 return self.files[filename]
1621 except KeyError: pass
1622
1623 f = self.open(filename)
1624 self.files[filename] = f
1625
1626 # The splittable files are the ones with many independent
1627 # per-instruction functions - the decoder's instruction constructors
1628 # and the instruction execution (execute()) methods. These both have
1629 # the suffix -ns.cc.inc, meaning they are within the namespace part
1630 # of the ISA, contain object-emitting C++ source, and are included
1631 # into other top-level files. These are the files that need special
1632 # #define's to allow parts of them to be compiled separately. Rather
1633 # than splitting the emissions into separate files, the monolithic
1634 # output of the ISA parser is maintained, but the value (or lack
1635 # thereof) of the __SPLIT definition during C preprocessing will
1636 # select the different chunks. If no 'split' directives are used,
1637 # the cpp emissions have no effect.
1638 if re.search('-ns.cc.inc$', filename):
1639 print('#if !defined(__SPLIT) || (__SPLIT == 1)', file=f)
1640 self.splits[f] = 1
1641 # ensure requisite #include's
1642 elif filename == 'decoder-g.hh.inc':
1643 print('#include "base/bitfield.hh"', file=f)
1644
1645 return f
1646
1647 # Weave together the parts of the different output sections by
1648 # #include'ing them into some very short top-level .cc/.hh files.
1649 # These small files make it much clearer how this tool works, since
1650 # you directly see the chunks emitted as files that are #include'd.
1651 def write_top_level_files(self):
1652 # decoder header - everything depends on this
1653 file = 'decoder.hh'
1654 with self.open(file) as f:
1655 fn = 'decoder-g.hh.inc'
1656 assert(fn in self.files)
1657 f.write('#include "%s"\n' % fn)
1658
1659 fn = 'decoder-ns.hh.inc'
1660 assert(fn in self.files)
1661 f.write('namespace %s {\n#include "%s"\n}\n'
1662 % (self.namespace, fn))
1663
1664 # decoder method - cannot be split
1665 file = 'decoder.cc'
1666 with self.open(file) as f:
1667 fn = 'base/compiler.hh'
1668 f.write('#include "%s"\n' % fn)
1669
1670 fn = 'decoder-g.cc.inc'
1671 assert(fn in self.files)
1672 f.write('#include "%s"\n' % fn)
1673
1674 fn = 'decoder.hh'
1675 f.write('#include "%s"\n' % fn)
1676
1677 fn = 'decode-method.cc.inc'
1678 # is guaranteed to have been written for parse to complete
1679 f.write('#include "%s"\n' % fn)
1680
1681 extn = re.compile('(\.[^\.]+)$')
1682
1683 # instruction constructors
1684 splits = self.splits[self.get_file('decoder')]
1685 file_ = 'inst-constrs.cc'
1686 for i in range(1, splits+1):
1687 if splits > 1:
1688 file = extn.sub(r'-%d\1' % i, file_)
1689 else:
1690 file = file_
1691 with self.open(file) as f:
1692 fn = 'decoder-g.cc.inc'
1693 assert(fn in self.files)
1694 f.write('#include "%s"\n' % fn)
1695
1696 fn = 'decoder.hh'
1697 f.write('#include "%s"\n' % fn)
1698
1699 fn = 'decoder-ns.cc.inc'
1700 assert(fn in self.files)
1701 print('namespace %s {' % self.namespace, file=f)
1702 if splits > 1:
1703 print('#define __SPLIT %u' % i, file=f)
1704 print('#include "%s"' % fn, file=f)
1705 print('}', file=f)
1706
1707 # instruction execution
1708 splits = self.splits[self.get_file('exec')]
1709 for i in range(1, splits+1):
1710 file = 'generic_cpu_exec.cc'
1711 if splits > 1:
1712 file = extn.sub(r'_%d\1' % i, file)
1713 with self.open(file) as f:
1714 fn = 'exec-g.cc.inc'
1715 assert(fn in self.files)
1716 f.write('#include "%s"\n' % fn)
1717 f.write('#include "cpu/exec_context.hh"\n')
1718 f.write('#include "decoder.hh"\n')
1719
1720 fn = 'exec-ns.cc.inc'
1721 assert(fn in self.files)
1722 print('namespace %s {' % self.namespace, file=f)
1723 if splits > 1:
1724 print('#define __SPLIT %u' % i, file=f)
1725 print('#include "%s"' % fn, file=f)
1726 print('}', file=f)
1727
1728 # max_inst_regs.hh
1729 self.update('max_inst_regs.hh',
1730 '''namespace %(namespace)s {
1731 const int MaxInstSrcRegs = %(maxInstSrcRegs)d;
1732 const int MaxInstDestRegs = %(maxInstDestRegs)d;
1733 const int MaxMiscDestRegs = %(maxMiscDestRegs)d;\n}\n''' % self)
1734
1735 scaremonger_template ='''// DO NOT EDIT
1736// This file was automatically generated from an ISA description:
1737// %(filename)s
1738
1739''';
1740
1741 #####################################################################
1742 #
1743 # Lexer
1744 #
1745 # The PLY lexer module takes two things as input:
1746 # - A list of token names (the string list 'tokens')
1747 # - A regular expression describing a match for each token. The
1748 # regexp for token FOO can be provided in two ways:
1749 # - as a string variable named t_FOO
1750 # - as the doc string for a function named t_FOO. In this case,
1751 # the function is also executed, allowing an action to be
1752 # associated with each token match.
1753 #
1754 #####################################################################
1755
1756 # Reserved words. These are listed separately as they are matched
1757 # using the same regexp as generic IDs, but distinguished in the
1758 # t_ID() function. The PLY documentation suggests this approach.
1759 reserved = (
1760 'BITFIELD', 'DECODE', 'DECODER', 'DEFAULT', 'DEF', 'EXEC', 'FORMAT',
1761 'HEADER', 'LET', 'NAMESPACE', 'OPERAND_TYPES', 'OPERANDS',
1762 'OUTPUT', 'SIGNED', 'SPLIT', 'TEMPLATE'
1763 )
1764
1765 # List of tokens. The lex module requires this.
1766 tokens = reserved + (
1767 # identifier
1768 'ID',
1769
1770 # integer literal
1771 'INTLIT',
1772
1773 # string literal
1774 'STRLIT',
1775
1776 # code literal
1777 'CODELIT',
1778
1779 # ( ) [ ] { } < > , ; . : :: *
1780 'LPAREN', 'RPAREN',
1781 'LBRACKET', 'RBRACKET',
1782 'LBRACE', 'RBRACE',
1783 'LESS', 'GREATER', 'EQUALS',
1784 'COMMA', 'SEMI', 'DOT', 'COLON', 'DBLCOLON',
1785 'ASTERISK',
1786
1787 # C preprocessor directives
1788 'CPPDIRECTIVE'
1789
1790 # The following are matched but never returned. commented out to
1791 # suppress PLY warning
1792 # newfile directive
1793 # 'NEWFILE',
1794
1795 # endfile directive
1796 # 'ENDFILE'
1797 )
1798
1799 # Regular expressions for token matching
1800 t_LPAREN = r'\('
1801 t_RPAREN = r'\)'
1802 t_LBRACKET = r'\['
1803 t_RBRACKET = r'\]'
1804 t_LBRACE = r'\{'
1805 t_RBRACE = r'\}'
1806 t_LESS = r'\<'
1807 t_GREATER = r'\>'
1808 t_EQUALS = r'='
1809 t_COMMA = r','
1810 t_SEMI = r';'
1811 t_DOT = r'\.'
1812 t_COLON = r':'
1813 t_DBLCOLON = r'::'
1814 t_ASTERISK = r'\*'
1815
1816 # Identifiers and reserved words
1817 reserved_map = { }
1818 for r in reserved:
1819 reserved_map[r.lower()] = r
1820
1821 def t_ID(self, t):
1822 r'[A-Za-z_]\w*'
1823 t.type = self.reserved_map.get(t.value, 'ID')
1824 return t
1825
1826 # Integer literal
1827 def t_INTLIT(self, t):
1828 r'-?(0x[\da-fA-F]+)|\d+'
1829 try:
1830 t.value = int(t.value,0)
1831 except ValueError:
1832 error(t.lexer.lineno, 'Integer value "%s" too large' % t.value)
1833 t.value = 0
1834 return t
1835
1836 # String literal. Note that these use only single quotes, and
1837 # can span multiple lines.
1838 def t_STRLIT(self, t):
1839 r"(?m)'([^'])+'"
1840 # strip off quotes
1841 t.value = t.value[1:-1]
1842 t.lexer.lineno += t.value.count('\n')
1843 return t
1844
1845
1846 # "Code literal"... like a string literal, but delimiters are
1847 # '{{' and '}}' so they get formatted nicely under emacs c-mode
1848 def t_CODELIT(self, t):
1849 r"(?m)\{\{([^\}]|}(?!\}))+\}\}"
1850 # strip off {{ & }}
1851 t.value = t.value[2:-2]
1852 t.lexer.lineno += t.value.count('\n')
1853 return t
1854
1855 def t_CPPDIRECTIVE(self, t):
1856 r'^\#[^\#].*\n'
1857 t.lexer.lineno += t.value.count('\n')
1858 return t
1859
1860 def t_NEWFILE(self, t):
1861 r'^\#\#newfile\s+"[^"]*"\n'
1862 self.fileNameStack.push(t.lexer.lineno)
1863 t.lexer.lineno = LineTracker(t.value[11:-2])
1864
1865 def t_ENDFILE(self, t):
1866 r'^\#\#endfile\n'
1867 t.lexer.lineno = self.fileNameStack.pop()
1868
1869 #
1870 # The functions t_NEWLINE, t_ignore, and t_error are
1871 # special for the lex module.
1872 #
1873
1874 # Newlines
1875 def t_NEWLINE(self, t):
1876 r'\n+'
1877 t.lexer.lineno += t.value.count('\n')
1878
1879 # Comments
1880 def t_comment(self, t):
1881 r'//.*'
1882
1883 # Completely ignored characters
1884 t_ignore = ' \t\x0c'
1885
1886 # Error handler
1887 def t_error(self, t):
1888 error(t.lexer.lineno, "illegal character '%s'" % t.value[0])
1889 t.skip(1)
1890
1891 #####################################################################
1892 #
1893 # Parser
1894 #
1895 # Every function whose name starts with 'p_' defines a grammar
1896 # rule. The rule is encoded in the function's doc string, while
1897 # the function body provides the action taken when the rule is
1898 # matched. The argument to each function is a list of the values
1899 # of the rule's symbols: t[0] for the LHS, and t[1..n] for the
1900 # symbols on the RHS. For tokens, the value is copied from the
1901 # t.value attribute provided by the lexer. For non-terminals, the
1902 # value is assigned by the producing rule; i.e., the job of the
1903 # grammar rule function is to set the value for the non-terminal
1904 # on the LHS (by assigning to t[0]).
1905 #####################################################################
1906
1907 # The LHS of the first grammar rule is used as the start symbol
1908 # (in this case, 'specification'). Note that this rule enforces
1909 # that there will be exactly one namespace declaration, with 0 or
1910 # more global defs/decls before and after it. The defs & decls
1911 # before the namespace decl will be outside the namespace; those
1912 # after will be inside. The decoder function is always inside the
1913 # namespace.
1914 def p_specification(self, t):
1915 'specification : opt_defs_and_outputs top_level_decode_block'
1916
1917 for f in self.splits.iterkeys():
1918 f.write('\n#endif\n')
1919
1920 for f in self.files.itervalues(): # close ALL the files;
1921 f.close() # not doing so can cause compilation to fail
1922
1923 self.write_top_level_files()
1924
1925 t[0] = True
1926
1927 # 'opt_defs_and_outputs' is a possibly empty sequence of def and/or
1928 # output statements. Its productions do the hard work of eventually
1929 # instantiating a GenCode, which are generally emitted (written to disk)
1930 # as soon as possible, except for the decode_block, which has to be
1931 # accumulated into one large function of nested switch/case blocks.
1932 def p_opt_defs_and_outputs_0(self, t):
1933 'opt_defs_and_outputs : empty'
1934
1935 def p_opt_defs_and_outputs_1(self, t):
1936 'opt_defs_and_outputs : defs_and_outputs'
1937
1938 def p_defs_and_outputs_0(self, t):
1939 'defs_and_outputs : def_or_output'
1940
1941 def p_defs_and_outputs_1(self, t):
1942 'defs_and_outputs : defs_and_outputs def_or_output'
1943
1944 # The list of possible definition/output statements.
1945 # They are all processed as they are seen.
1946 def p_def_or_output(self, t):
1947 '''def_or_output : name_decl
1948 | def_format
1949 | def_bitfield
1950 | def_bitfield_struct
1951 | def_template
1952 | def_operand_types
1953 | def_operands
1954 | output
1955 | global_let
1956 | split'''
1957
1958 # Utility function used by both invocations of splitting - explicit
1959 # 'split' keyword and split() function inside "let {{ }};" blocks.
1960 def split(self, sec, write=False):
1961 assert(sec != 'header' and "header cannot be split")
1962
1963 f = self.get_file(sec)
1964 self.splits[f] += 1
1965 s = '\n#endif\n#if __SPLIT == %u\n' % self.splits[f]
1966 if write:
1967 f.write(s)
1968 else:
1969 return s
1970
1971 # split output file to reduce compilation time
1972 def p_split(self, t):
1973 'split : SPLIT output_type SEMI'
1974 assert(self.isa_name and "'split' not allowed before namespace decl")
1975
1976 self.split(t[2], True)
1977
1978 def p_output_type(self, t):
1979 '''output_type : DECODER
1980 | HEADER
1981 | EXEC'''
1982 t[0] = t[1]
1983
1984 # ISA name declaration looks like "namespace <foo>;"
1985 def p_name_decl(self, t):
1986 'name_decl : NAMESPACE ID SEMI'
1987 assert(self.isa_name == None and "Only 1 namespace decl permitted")
1988 self.isa_name = t[2]
1989 self.namespace = t[2] + 'Inst'
1990
1991 # Output blocks 'output <foo> {{...}}' (C++ code blocks) are copied
1992 # directly to the appropriate output section.
1993
1994 # Massage output block by substituting in template definitions and
1995 # bit operators. We handle '%'s embedded in the string that don't
1996 # indicate template substitutions by doubling them first so that the
1997 # format operation will reduce them back to single '%'s.
1998 def process_output(self, s):
1999 s = self.protectNonSubstPercents(s)
2000 return substBitOps(s % self.templateMap)
2001
2002 def p_output(self, t):
2003 'output : OUTPUT output_type CODELIT SEMI'
2004 kwargs = { t[2]+'_output' : self.process_output(t[3]) }
2005 GenCode(self, **kwargs).emit()
2006
2007 # global let blocks 'let {{...}}' (Python code blocks) are
2008 # executed directly when seen. Note that these execute in a
2009 # special variable context 'exportContext' to prevent the code
2010 # from polluting this script's namespace.
2011 def p_global_let(self, t):
2012 'global_let : LET CODELIT SEMI'
2013 def _split(sec):
2014 return self.split(sec)
2015 self.updateExportContext()
2016 self.exportContext["header_output"] = ''
2017 self.exportContext["decoder_output"] = ''
2018 self.exportContext["exec_output"] = ''
2019 self.exportContext["decode_block"] = ''
2020 self.exportContext["split"] = _split
2021 split_setup = '''
2022def wrap(func):
2023 def split(sec):
2024 globals()[sec + '_output'] += func(sec)
2025 return split
2026split = wrap(split)
2027del wrap
2028'''
2029 # This tricky setup (immediately above) allows us to just write
2030 # (e.g.) "split('exec')" in the Python code and the split #ifdef's
2031 # will automatically be added to the exec_output variable. The inner
2032 # Python execution environment doesn't know about the split points,
2033 # so we carefully inject and wrap a closure that can retrieve the
2034 # next split's #define from the parser and add it to the current
2035 # emission-in-progress.
2036 try:
2037 exec split_setup+fixPythonIndentation(t[2]) in self.exportContext
2038 except Exception, exc:
2039 traceback.print_exc(file=sys.stdout)
2040 if debug:
2041 raise
2042 error(t.lineno(1), 'In global let block: %s' % exc)
2043 GenCode(self,
2044 header_output=self.exportContext["header_output"],
2045 decoder_output=self.exportContext["decoder_output"],
2046 exec_output=self.exportContext["exec_output"],
2047 decode_block=self.exportContext["decode_block"]).emit()
2048
2049 # Define the mapping from operand type extensions to C++ types and
2050 # bit widths (stored in operandTypeMap).
2051 def p_def_operand_types(self, t):
2052 'def_operand_types : DEF OPERAND_TYPES CODELIT SEMI'
2053 try:
2054 self.operandTypeMap = eval('{' + t[3] + '}')
2055 except Exception, exc:
2056 if debug:
2057 raise
2058 error(t.lineno(1),
2059 'In def operand_types: %s' % exc)
2060
2061 # Define the mapping from operand names to operand classes and
2062 # other traits. Stored in operandNameMap.
2063 def p_def_operands(self, t):
2064 'def_operands : DEF OPERANDS CODELIT SEMI'
2065 if not hasattr(self, 'operandTypeMap'):
2066 error(t.lineno(1),
2067 'error: operand types must be defined before operands')
2068 try:
2069 user_dict = eval('{' + t[3] + '}', self.exportContext)
2070 except Exception, exc:
2071 if debug:
2072 raise
2073 error(t.lineno(1), 'In def operands: %s' % exc)
2074 self.buildOperandNameMap(user_dict, t.lexer.lineno)
2075
2076 # A bitfield definition looks like:
2077 # 'def [signed] bitfield <ID> [<first>:<last>]'
2078 # This generates a preprocessor macro in the output file.
2079 def p_def_bitfield_0(self, t):
2080 'def_bitfield : DEF opt_signed BITFIELD ID LESS INTLIT COLON INTLIT GREATER SEMI'
2081 expr = 'bits(machInst, %2d, %2d)' % (t[6], t[8])
2082 if (t[2] == 'signed'):
2083 expr = 'sext<%d>(%s)' % (t[6] - t[8] + 1, expr)
2084 hash_define = '#undef %s\n#define %s\t%s\n' % (t[4], t[4], expr)
2085 GenCode(self, header_output=hash_define).emit()
2086
2087 # alternate form for single bit: 'def [signed] bitfield <ID> [<bit>]'
2088 def p_def_bitfield_1(self, t):
2089 'def_bitfield : DEF opt_signed BITFIELD ID LESS INTLIT GREATER SEMI'
2090 expr = 'bits(machInst, %2d, %2d)' % (t[6], t[6])
2091 if (t[2] == 'signed'):
2092 expr = 'sext<%d>(%s)' % (1, expr)
2093 hash_define = '#undef %s\n#define %s\t%s\n' % (t[4], t[4], expr)
2094 GenCode(self, header_output=hash_define).emit()
2095
2096 # alternate form for structure member: 'def bitfield <ID> <ID>'
2097 def p_def_bitfield_struct(self, t):
2098 'def_bitfield_struct : DEF opt_signed BITFIELD ID id_with_dot SEMI'
2099 if (t[2] != ''):
2100 error(t.lineno(1),
2101 'error: structure bitfields are always unsigned.')
2102 expr = 'machInst.%s' % t[5]
2103 hash_define = '#undef %s\n#define %s\t%s\n' % (t[4], t[4], expr)
2104 GenCode(self, header_output=hash_define).emit()
2105
2106 def p_id_with_dot_0(self, t):
2107 'id_with_dot : ID'
2108 t[0] = t[1]
2109
2110 def p_id_with_dot_1(self, t):
2111 'id_with_dot : ID DOT id_with_dot'
2112 t[0] = t[1] + t[2] + t[3]
2113
2114 def p_opt_signed_0(self, t):
2115 'opt_signed : SIGNED'
2116 t[0] = t[1]
2117
2118 def p_opt_signed_1(self, t):
2119 'opt_signed : empty'
2120 t[0] = ''
2121
2122 def p_def_template(self, t):
2123 'def_template : DEF TEMPLATE ID CODELIT SEMI'
2124 if t[3] in self.templateMap:
2125 print("warning: template %s already defined" % t[3])
2126 self.templateMap[t[3]] = Template(self, t[4])
2127
2128 # An instruction format definition looks like
2129 # "def format <fmt>(<params>) {{...}};"
2130 def p_def_format(self, t):
2131 'def_format : DEF FORMAT ID LPAREN param_list RPAREN CODELIT SEMI'
2132 (id, params, code) = (t[3], t[5], t[7])
2133 self.defFormat(id, params, code, t.lexer.lineno)
2134
2135 # The formal parameter list for an instruction format is a
2136 # possibly empty list of comma-separated parameters. Positional
2137 # (standard, non-keyword) parameters must come first, followed by
2138 # keyword parameters, followed by a '*foo' parameter that gets
2139 # excess positional arguments (as in Python). Each of these three
2140 # parameter categories is optional.
2141 #
2142 # Note that we do not support the '**foo' parameter for collecting
2143 # otherwise undefined keyword args. Otherwise the parameter list
2144 # is (I believe) identical to what is supported in Python.
2145 #
2146 # The param list generates a tuple, where the first element is a
2147 # list of the positional params and the second element is a dict
2148 # containing the keyword params.
2149 def p_param_list_0(self, t):
2150 'param_list : positional_param_list COMMA nonpositional_param_list'
2151 t[0] = t[1] + t[3]
2152
2153 def p_param_list_1(self, t):
2154 '''param_list : positional_param_list
2155 | nonpositional_param_list'''
2156 t[0] = t[1]
2157
2158 def p_positional_param_list_0(self, t):
2159 'positional_param_list : empty'
2160 t[0] = []
2161
2162 def p_positional_param_list_1(self, t):
2163 'positional_param_list : ID'
2164 t[0] = [t[1]]
2165
2166 def p_positional_param_list_2(self, t):
2167 'positional_param_list : positional_param_list COMMA ID'
2168 t[0] = t[1] + [t[3]]
2169
2170 def p_nonpositional_param_list_0(self, t):
2171 'nonpositional_param_list : keyword_param_list COMMA excess_args_param'
2172 t[0] = t[1] + t[3]
2173
2174 def p_nonpositional_param_list_1(self, t):
2175 '''nonpositional_param_list : keyword_param_list
2176 | excess_args_param'''
2177 t[0] = t[1]
2178
2179 def p_keyword_param_list_0(self, t):
2180 'keyword_param_list : keyword_param'
2181 t[0] = [t[1]]
2182
2183 def p_keyword_param_list_1(self, t):
2184 'keyword_param_list : keyword_param_list COMMA keyword_param'
2185 t[0] = t[1] + [t[3]]
2186
2187 def p_keyword_param(self, t):
2188 'keyword_param : ID EQUALS expr'
2189 t[0] = t[1] + ' = ' + t[3].__repr__()
2190
2191 def p_excess_args_param(self, t):
2192 'excess_args_param : ASTERISK ID'
2193 # Just concatenate them: '*ID'. Wrap in list to be consistent
2194 # with positional_param_list and keyword_param_list.
2195 t[0] = [t[1] + t[2]]
2196
2197 # End of format definition-related rules.
2198 ##############
2199
2200 #
2201 # A decode block looks like:
2202 # decode <field1> [, <field2>]* [default <inst>] { ... }
2203 #
2204 def p_top_level_decode_block(self, t):
2205 'top_level_decode_block : decode_block'
2206 codeObj = t[1]
2207 codeObj.wrap_decode_block('''
2208StaticInstPtr
2209%(isa_name)s::Decoder::decodeInst(%(isa_name)s::ExtMachInst machInst)
2210{
2211 using namespace %(namespace)s;
2212''' % self, '}')
2213
2214 codeObj.emit()
2215
2216 def p_decode_block(self, t):
2217 'decode_block : DECODE ID opt_default LBRACE decode_stmt_list RBRACE'
2218 default_defaults = self.defaultStack.pop()
2219 codeObj = t[5]
2220 # use the "default defaults" only if there was no explicit
2221 # default statement in decode_stmt_list
2222 if not codeObj.has_decode_default:
2223 codeObj += default_defaults
2224 codeObj.wrap_decode_block('switch (%s) {\n' % t[2], '}\n')
2225 t[0] = codeObj
2226
2227 # The opt_default statement serves only to push the "default
2228 # defaults" onto defaultStack. This value will be used by nested
2229 # decode blocks, and used and popped off when the current
2230 # decode_block is processed (in p_decode_block() above).
2231 def p_opt_default_0(self, t):
2232 'opt_default : empty'
2233 # no default specified: reuse the one currently at the top of
2234 # the stack
2235 self.defaultStack.push(self.defaultStack.top())
2236 # no meaningful value returned
2237 t[0] = None
2238
2239 def p_opt_default_1(self, t):
2240 'opt_default : DEFAULT inst'
2241 # push the new default
2242 codeObj = t[2]
2243 codeObj.wrap_decode_block('\ndefault:\n', 'break;\n')
2244 self.defaultStack.push(codeObj)
2245 # no meaningful value returned
2246 t[0] = None
2247
2248 def p_decode_stmt_list_0(self, t):
2249 'decode_stmt_list : decode_stmt'
2250 t[0] = t[1]
2251
2252 def p_decode_stmt_list_1(self, t):
2253 'decode_stmt_list : decode_stmt decode_stmt_list'
2254 if (t[1].has_decode_default and t[2].has_decode_default):
2255 error(t.lineno(1), 'Two default cases in decode block')
2256 t[0] = t[1] + t[2]
2257
2258 #
2259 # Decode statement rules
2260 #
2261 # There are four types of statements allowed in a decode block:
2262 # 1. Format blocks 'format <foo> { ... }'
2263 # 2. Nested decode blocks
2264 # 3. Instruction definitions.
2265 # 4. C preprocessor directives.
2266
2267
2268 # Preprocessor directives found in a decode statement list are
2269 # passed through to the output, replicated to all of the output
2270 # code streams. This works well for ifdefs, so we can ifdef out
2271 # both the declarations and the decode cases generated by an
2272 # instruction definition. Handling them as part of the grammar
2273 # makes it easy to keep them in the right place with respect to
2274 # the code generated by the other statements.
2275 def p_decode_stmt_cpp(self, t):
2276 'decode_stmt : CPPDIRECTIVE'
2277 t[0] = GenCode(self, t[1], t[1], t[1], t[1])
2278
2279 # A format block 'format <foo> { ... }' sets the default
2280 # instruction format used to handle instruction definitions inside
2281 # the block. This format can be overridden by using an explicit
2282 # format on the instruction definition or with a nested format
2283 # block.
2284 def p_decode_stmt_format(self, t):
2285 'decode_stmt : FORMAT push_format_id LBRACE decode_stmt_list RBRACE'
2286 # The format will be pushed on the stack when 'push_format_id'
2287 # is processed (see below). Once the parser has recognized
2288 # the full production (though the right brace), we're done
2289 # with the format, so now we can pop it.
2290 self.formatStack.pop()
2291 t[0] = t[4]
2292
2293 # This rule exists so we can set the current format (& push the
2294 # stack) when we recognize the format name part of the format
2295 # block.
2296 def p_push_format_id(self, t):
2297 'push_format_id : ID'
2298 try:
2299 self.formatStack.push(self.formatMap[t[1]])
2300 t[0] = ('', '// format %s' % t[1])
2301 except KeyError:
2302 error(t.lineno(1), 'instruction format "%s" not defined.' % t[1])
2303
2304 # Nested decode block: if the value of the current field matches
2305 # the specified constant(s), do a nested decode on some other field.
2306 def p_decode_stmt_decode(self, t):
2307 'decode_stmt : case_list COLON decode_block'
2308 case_list = t[1]
2309 codeObj = t[3]
2310 # just wrap the decoding code from the block as a case in the
2311 # outer switch statement.
2312 codeObj.wrap_decode_block('\n%s\n' % ''.join(case_list),
2313 'M5_UNREACHABLE;\n')
2314 codeObj.has_decode_default = (case_list == ['default:'])
2315 t[0] = codeObj
2316
2317 # Instruction definition (finally!).
2318 def p_decode_stmt_inst(self, t):
2319 'decode_stmt : case_list COLON inst SEMI'
2320 case_list = t[1]
2321 codeObj = t[3]
2322 codeObj.wrap_decode_block('\n%s' % ''.join(case_list), 'break;\n')
2323 codeObj.has_decode_default = (case_list == ['default:'])
2324 t[0] = codeObj
2325
2326 # The constant list for a decode case label must be non-empty, and must
2327 # either be the keyword 'default', or made up of one or more
2328 # comma-separated integer literals or strings which evaluate to
2329 # constants when compiled as C++.
2330 def p_case_list_0(self, t):
2331 'case_list : DEFAULT'
2332 t[0] = ['default:']
2333
2334 def prep_int_lit_case_label(self, lit):
2335 if lit >= 2**32:
2336 return 'case ULL(%#x): ' % lit
2337 else:
2338 return 'case %#x: ' % lit
2339
2340 def prep_str_lit_case_label(self, lit):
2341 return 'case %s: ' % lit
2342
2343 def p_case_list_1(self, t):
2344 'case_list : INTLIT'
2345 t[0] = [self.prep_int_lit_case_label(t[1])]
2346
2347 def p_case_list_2(self, t):
2348 'case_list : STRLIT'
2349 t[0] = [self.prep_str_lit_case_label(t[1])]
2350
2351 def p_case_list_3(self, t):
2352 'case_list : case_list COMMA INTLIT'
2353 t[0] = t[1]
2354 t[0].append(self.prep_int_lit_case_label(t[3]))
2355
2356 def p_case_list_4(self, t):
2357 'case_list : case_list COMMA STRLIT'
2358 t[0] = t[1]
2359 t[0].append(self.prep_str_lit_case_label(t[3]))
2360
2361 # Define an instruction using the current instruction format
2362 # (specified by an enclosing format block).
2363 # "<mnemonic>(<args>)"
2364 def p_inst_0(self, t):
2365 'inst : ID LPAREN arg_list RPAREN'
2366 # Pass the ID and arg list to the current format class to deal with.
2367 currentFormat = self.formatStack.top()
2368 codeObj = currentFormat.defineInst(self, t[1], t[3], t.lexer.lineno)
2369 args = ','.join(map(str, t[3]))
2370 args = re.sub('(?m)^', '//', args)
2371 args = re.sub('^//', '', args)
2372 comment = '\n// %s::%s(%s)\n' % (currentFormat.id, t[1], args)
2373 codeObj.prepend_all(comment)
2374 t[0] = codeObj
2375
2376 # Define an instruction using an explicitly specified format:
2377 # "<fmt>::<mnemonic>(<args>)"
2378 def p_inst_1(self, t):
2379 'inst : ID DBLCOLON ID LPAREN arg_list RPAREN'
2380 try:
2381 format = self.formatMap[t[1]]
2382 except KeyError:
2383 error(t.lineno(1), 'instruction format "%s" not defined.' % t[1])
2384
2385 codeObj = format.defineInst(self, t[3], t[5], t.lexer.lineno)
2386 comment = '\n// %s::%s(%s)\n' % (t[1], t[3], t[5])
2387 codeObj.prepend_all(comment)
2388 t[0] = codeObj
2389
2390 # The arg list generates a tuple, where the first element is a
2391 # list of the positional args and the second element is a dict
2392 # containing the keyword args.
2393 def p_arg_list_0(self, t):
2394 'arg_list : positional_arg_list COMMA keyword_arg_list'
2395 t[0] = ( t[1], t[3] )
2396
2397 def p_arg_list_1(self, t):
2398 'arg_list : positional_arg_list'
2399 t[0] = ( t[1], {} )
2400
2401 def p_arg_list_2(self, t):
2402 'arg_list : keyword_arg_list'
2403 t[0] = ( [], t[1] )
2404
2405 def p_positional_arg_list_0(self, t):
2406 'positional_arg_list : empty'
2407 t[0] = []
2408
2409 def p_positional_arg_list_1(self, t):
2410 'positional_arg_list : expr'
2411 t[0] = [t[1]]
2412
2413 def p_positional_arg_list_2(self, t):
2414 'positional_arg_list : positional_arg_list COMMA expr'
2415 t[0] = t[1] + [t[3]]
2416
2417 def p_keyword_arg_list_0(self, t):
2418 'keyword_arg_list : keyword_arg'
2419 t[0] = t[1]
2420
2421 def p_keyword_arg_list_1(self, t):
2422 'keyword_arg_list : keyword_arg_list COMMA keyword_arg'
2423 t[0] = t[1]
2424 t[0].update(t[3])
2425
2426 def p_keyword_arg(self, t):
2427 'keyword_arg : ID EQUALS expr'
2428 t[0] = { t[1] : t[3] }
2429
2430 #
2431 # Basic expressions. These constitute the argument values of
2432 # "function calls" (i.e. instruction definitions in the decode
2433 # block) and default values for formal parameters of format
2434 # functions.
2435 #
2436 # Right now, these are either strings, integers, or (recursively)
2437 # lists of exprs (using Python square-bracket list syntax). Note
2438 # that bare identifiers are trated as string constants here (since
2439 # there isn't really a variable namespace to refer to).
2440 #
2441 def p_expr_0(self, t):
2442 '''expr : ID
2443 | INTLIT
2444 | STRLIT
2445 | CODELIT'''
2446 t[0] = t[1]
2447
2448 def p_expr_1(self, t):
2449 '''expr : LBRACKET list_expr RBRACKET'''
2450 t[0] = t[2]
2451
2452 def p_list_expr_0(self, t):
2453 'list_expr : expr'
2454 t[0] = [t[1]]
2455
2456 def p_list_expr_1(self, t):
2457 'list_expr : list_expr COMMA expr'
2458 t[0] = t[1] + [t[3]]
2459
2460 def p_list_expr_2(self, t):
2461 'list_expr : empty'
2462 t[0] = []
2463
2464 #
2465 # Empty production... use in other rules for readability.
2466 #
2467 def p_empty(self, t):
2468 'empty :'
2469 pass
2470
2471 # Parse error handler. Note that the argument here is the
2472 # offending *token*, not a grammar symbol (hence the need to use
2473 # t.value)
2474 def p_error(self, t):
2475 if t:
2476 error(t.lexer.lineno, "syntax error at '%s'" % t.value)
2477 else:
2478 error("unknown syntax error")
2479
2480 # END OF GRAMMAR RULES
2481
2482 def updateExportContext(self):
2483
2484 # create a continuation that allows us to grab the current parser
2485 def wrapInstObjParams(*args):
2486 return InstObjParams(self, *args)
2487 self.exportContext['InstObjParams'] = wrapInstObjParams
2488 self.exportContext.update(self.templateMap)
2489
2490 def defFormat(self, id, params, code, lineno):
2491 '''Define a new format'''
2492
2493 # make sure we haven't already defined this one
2494 if id in self.formatMap:
2495 error(lineno, 'format %s redefined.' % id)
2496
2497 # create new object and store in global map
2498 self.formatMap[id] = Format(id, params, code)
2499
2500 def protectNonSubstPercents(self, s):
2501 '''Protect any non-dict-substitution '%'s in a format string
2502 (i.e. those not followed by '(')'''
2503
2504 return re.sub(r'%(?!\()', '%%', s)
2505
2506 def buildOperandNameMap(self, user_dict, lineno):
2507 operand_name = {}
2508 for op_name, val in user_dict.iteritems():
2509
2510 # Check if extra attributes have been specified.
2511 if len(val) > 9:
2512 error(lineno, 'error: too many attributes for operand "%s"' %
2513 base_cls_name)
2514
2515 # Pad val with None in case optional args are missing
2516 val += (None, None, None, None)
2517 base_cls_name, dflt_ext, reg_spec, flags, sort_pri, \
2518 read_code, write_code, read_predicate, write_predicate = val[:9]
2519
2520 # Canonical flag structure is a triple of lists, where each list
2521 # indicates the set of flags implied by this operand always, when
2522 # used as a source, and when used as a dest, respectively.
2523 # For simplicity this can be initialized using a variety of fairly
2524 # obvious shortcuts; we convert these to canonical form here.
2525 if not flags:
2526 # no flags specified (e.g., 'None')
2527 flags = ( [], [], [] )
2528 elif isinstance(flags, str):
2529 # a single flag: assumed to be unconditional
2530 flags = ( [ flags ], [], [] )
2531 elif isinstance(flags, list):
2532 # a list of flags: also assumed to be unconditional
2533 flags = ( flags, [], [] )
2534 elif isinstance(flags, tuple):
2535 # it's a tuple: it should be a triple,
2536 # but each item could be a single string or a list
2537 (uncond_flags, src_flags, dest_flags) = flags
2538 flags = (makeList(uncond_flags),
2539 makeList(src_flags), makeList(dest_flags))
2540
2541 # Accumulate attributes of new operand class in tmp_dict
2542 tmp_dict = {}
2543 attrList = ['reg_spec', 'flags', 'sort_pri',
2544 'read_code', 'write_code',
2545 'read_predicate', 'write_predicate']
2546 if dflt_ext:
2547 dflt_ctype = self.operandTypeMap[dflt_ext]
2548 attrList.extend(['dflt_ctype', 'dflt_ext'])
2549 # reg_spec is either just a string or a dictionary
2550 # (for elems of vector)
2551 if isinstance(reg_spec, tuple):
2552 (reg_spec, elem_spec) = reg_spec
2553 if isinstance(elem_spec, str):
2554 attrList.append('elem_spec')
2555 else:
2556 assert(isinstance(elem_spec, dict))
2557 elems = elem_spec
2558 attrList.append('elems')
2559 for attr in attrList:
2560 tmp_dict[attr] = eval(attr)
2561 tmp_dict['base_name'] = op_name
2562
2563 # New class name will be e.g. "IntReg_Ra"
2564 cls_name = base_cls_name + '_' + op_name
2565 # Evaluate string arg to get class object. Note that the
2566 # actual base class for "IntReg" is "IntRegOperand", i.e. we
2567 # have to append "Operand".
2568 try:
2569 base_cls = eval(base_cls_name + 'Operand')
2570 except NameError:
2571 error(lineno,
2572 'error: unknown operand base class "%s"' % base_cls_name)
2573 # The following statement creates a new class called
2574 # <cls_name> as a subclass of <base_cls> with the attributes
2575 # in tmp_dict, just as if we evaluated a class declaration.
2576 operand_name[op_name] = type(cls_name, (base_cls,), tmp_dict)
2577
2578 self.operandNameMap = operand_name
2579
2580 # Define operand variables.
2581 operands = user_dict.keys()
2582 # Add the elems defined in the vector operands and
2583 # build a map elem -> vector (used in OperandList)
2584 elem_to_vec = {}
2585 for op in user_dict.keys():
2586 if hasattr(self.operandNameMap[op], 'elems'):
2587 for elem in self.operandNameMap[op].elems.keys():
2588 operands.append(elem)
2589 elem_to_vec[elem] = op
2590 self.elemToVector = elem_to_vec
2591 extensions = self.operandTypeMap.keys()
2592
2593 operandsREString = r'''
2594 (?<!\w) # neg. lookbehind assertion: prevent partial matches
2595 ((%s)(?:_(%s))?) # match: operand with optional '_' then suffix
2596 (?!\w) # neg. lookahead assertion: prevent partial matches
2597 ''' % (string.join(operands, '|'), string.join(extensions, '|'))
2598
2599 self.operandsRE = re.compile(operandsREString, re.MULTILINE|re.VERBOSE)
2600
2601 # Same as operandsREString, but extension is mandatory, and only two
2602 # groups are returned (base and ext, not full name as above).
2603 # Used for subtituting '_' for '.' to make C++ identifiers.
2604 operandsWithExtREString = r'(?<!\w)(%s)_(%s)(?!\w)' \
2605 % (string.join(operands, '|'), string.join(extensions, '|'))
2606
2607 self.operandsWithExtRE = \
2608 re.compile(operandsWithExtREString, re.MULTILINE)
2609
2610 def substMungedOpNames(self, code):
2611 '''Munge operand names in code string to make legal C++
2612 variable names. This means getting rid of the type extension
2613 if any. Will match base_name attribute of Operand object.)'''
2614 return self.operandsWithExtRE.sub(r'\1', code)
2615
2616 def mungeSnippet(self, s):
2617 '''Fix up code snippets for final substitution in templates.'''
2618 if isinstance(s, str):
2619 return self.substMungedOpNames(substBitOps(s))
2620 else:
2621 return s
2622
2623 def open(self, name, bare=False):
2624 '''Open the output file for writing and include scary warning.'''
2625 filename = os.path.join(self.output_dir, name)
2626 f = open(filename, 'w')
2627 if f:
2628 if not bare:
2629 f.write(ISAParser.scaremonger_template % self)
2630 return f
2631
2632 def update(self, file, contents):
2633 '''Update the output file only. Scons should handle the case when
2634 the new contents are unchanged using its built-in hash feature.'''
2635 f = self.open(file)
2636 f.write(contents)
2637 f.close()
2638
2639 # This regular expression matches '##include' directives
2640 includeRE = re.compile(r'^\s*##include\s+"(?P<filename>[^"]*)".*$',
2641 re.MULTILINE)
2642
2643 def replace_include(self, matchobj, dirname):
2644 """Function to replace a matched '##include' directive with the
2645 contents of the specified file (with nested ##includes
2646 replaced recursively). 'matchobj' is an re match object
2647 (from a match of includeRE) and 'dirname' is the directory
2648 relative to which the file path should be resolved."""
2649
2650 fname = matchobj.group('filename')
2651 full_fname = os.path.normpath(os.path.join(dirname, fname))
2652 contents = '##newfile "%s"\n%s\n##endfile\n' % \
2653 (full_fname, self.read_and_flatten(full_fname))
2654 return contents
2655
2656 def read_and_flatten(self, filename):
2657 """Read a file and recursively flatten nested '##include' files."""
2658
2659 current_dir = os.path.dirname(filename)
2660 try:
2661 contents = open(filename).read()
2662 except IOError:
2663 error('Error including file "%s"' % filename)
2664
2665 self.fileNameStack.push(LineTracker(filename))
2666
2667 # Find any includes and include them
2668 def replace(matchobj):
2669 return self.replace_include(matchobj, current_dir)
2670 contents = self.includeRE.sub(replace, contents)
2671
2672 self.fileNameStack.pop()
2673 return contents
2674
2675 AlreadyGenerated = {}
2676
2677 def _parse_isa_desc(self, isa_desc_file):
2678 '''Read in and parse the ISA description.'''
2679
2680 # The build system can end up running the ISA parser twice: once to
2681 # finalize the build dependencies, and then to actually generate
2682 # the files it expects (in src/arch/$ARCH/generated). This code
2683 # doesn't do anything different either time, however; the SCons
2684 # invocations just expect different things. Since this code runs
2685 # within SCons, we can just remember that we've already run and
2686 # not perform a completely unnecessary run, since the ISA parser's
2687 # effect is idempotent.
2688 if isa_desc_file in ISAParser.AlreadyGenerated:
2689 return
2690
2691 # grab the last three path components of isa_desc_file
2692 self.filename = '/'.join(isa_desc_file.split('/')[-3:])
2693
2694 # Read file and (recursively) all included files into a string.
2695 # PLY requires that the input be in a single string so we have to
2696 # do this up front.
2697 isa_desc = self.read_and_flatten(isa_desc_file)
2698
2699 # Initialize lineno tracker
2700 self.lex.lineno = LineTracker(isa_desc_file)
2701
2702 # Parse.
2703 self.parse_string(isa_desc)
2704
2705 ISAParser.AlreadyGenerated[isa_desc_file] = None
2706
2707 def parse_isa_desc(self, *args, **kwargs):
2708 try:
2709 self._parse_isa_desc(*args, **kwargs)
2710 except ISAParserError, e:
2711 print(backtrace(self.fileNameStack))
2712 print("At %s:" % e.lineno)
2713 print(e)
2714 sys.exit(1)
2715
2716# Called as script: get args from command line.
2717# Args are: <isa desc file> <output dir>
2718if __name__ == '__main__':
2719 ISAParser(sys.argv[2]).parse_isa_desc(sys.argv[1])
146
147 snippets = dict([(s, self.parser.mungeSnippet(d.snippets[s]))
148 for s in snippetLabels])
149
150 myDict.update(snippets)
151
152 compositeCode = ' '.join(map(str, snippets.values()))
153
154 # Add in template itself in case it references any
155 # operands explicitly (like Mem)
156 compositeCode += ' ' + template
157
158 operands = SubOperandList(self.parser, compositeCode, d.operands)
159
160 myDict['op_decl'] = operands.concatAttrStrings('op_decl')
161 if operands.readPC or operands.setPC:
162 myDict['op_decl'] += 'TheISA::PCState __parserAutoPCState;\n'
163
164 # In case there are predicated register reads and write, declare
165 # the variables for register indicies. It is being assumed that
166 # all the operands in the OperandList are also in the
167 # SubOperandList and in the same order. Otherwise, it is
168 # expected that predication would not be used for the operands.
169 if operands.predRead:
170 myDict['op_decl'] += 'uint8_t _sourceIndex = 0;\n'
171 if operands.predWrite:
172 myDict['op_decl'] += 'uint8_t M5_VAR_USED _destIndex = 0;\n'
173
174 is_src = lambda op: op.is_src
175 is_dest = lambda op: op.is_dest
176
177 myDict['op_src_decl'] = \
178 operands.concatSomeAttrStrings(is_src, 'op_src_decl')
179 myDict['op_dest_decl'] = \
180 operands.concatSomeAttrStrings(is_dest, 'op_dest_decl')
181 if operands.readPC:
182 myDict['op_src_decl'] += \
183 'TheISA::PCState __parserAutoPCState;\n'
184 if operands.setPC:
185 myDict['op_dest_decl'] += \
186 'TheISA::PCState __parserAutoPCState;\n'
187
188 myDict['op_rd'] = operands.concatAttrStrings('op_rd')
189 if operands.readPC:
190 myDict['op_rd'] = '__parserAutoPCState = xc->pcState();\n' + \
191 myDict['op_rd']
192
193 # Compose the op_wb string. If we're going to write back the
194 # PC state because we changed some of its elements, we'll need to
195 # do that as early as possible. That allows later uncoordinated
196 # modifications to the PC to layer appropriately.
197 reordered = list(operands.items)
198 reordered.reverse()
199 op_wb_str = ''
200 pcWbStr = 'xc->pcState(__parserAutoPCState);\n'
201 for op_desc in reordered:
202 if op_desc.isPCPart() and op_desc.is_dest:
203 op_wb_str = op_desc.op_wb + pcWbStr + op_wb_str
204 pcWbStr = ''
205 else:
206 op_wb_str = op_desc.op_wb + op_wb_str
207 myDict['op_wb'] = op_wb_str
208
209 elif isinstance(d, dict):
210 # if the argument is a dictionary, we just use it.
211 myDict.update(d)
212 elif hasattr(d, '__dict__'):
213 # if the argument is an object, we use its attribute map.
214 myDict.update(d.__dict__)
215 else:
216 raise TypeError, "Template.subst() arg must be or have dictionary"
217 return template % myDict
218
219 # Convert to string.
220 def __str__(self):
221 return self.template
222
223################
224# Format object.
225#
226# A format object encapsulates an instruction format. It must provide
227# a defineInst() method that generates the code for an instruction
228# definition.
229
230class Format(object):
231 def __init__(self, id, params, code):
232 self.id = id
233 self.params = params
234 label = 'def format ' + id
235 self.user_code = compile(fixPythonIndentation(code), label, 'exec')
236 param_list = string.join(params, ", ")
237 f = '''def defInst(_code, _context, %s):
238 my_locals = vars().copy()
239 exec _code in _context, my_locals
240 return my_locals\n''' % param_list
241 c = compile(f, label + ' wrapper', 'exec')
242 exec c
243 self.func = defInst
244
245 def defineInst(self, parser, name, args, lineno):
246 parser.updateExportContext()
247 context = parser.exportContext.copy()
248 if len(name):
249 Name = name[0].upper()
250 if len(name) > 1:
251 Name += name[1:]
252 context.update({ 'name' : name, 'Name' : Name })
253 try:
254 vars = self.func(self.user_code, context, *args[0], **args[1])
255 except Exception, exc:
256 if debug:
257 raise
258 error(lineno, 'error defining "%s": %s.' % (name, exc))
259 for k in vars.keys():
260 if k not in ('header_output', 'decoder_output',
261 'exec_output', 'decode_block'):
262 del vars[k]
263 return GenCode(parser, **vars)
264
265# Special null format to catch an implicit-format instruction
266# definition outside of any format block.
267class NoFormat(object):
268 def __init__(self):
269 self.defaultInst = ''
270
271 def defineInst(self, parser, name, args, lineno):
272 error(lineno,
273 'instruction definition "%s" with no active format!' % name)
274
275###############
276# GenCode class
277#
278# The GenCode class encapsulates generated code destined for various
279# output files. The header_output and decoder_output attributes are
280# strings containing code destined for decoder.hh and decoder.cc
281# respectively. The decode_block attribute contains code to be
282# incorporated in the decode function itself (that will also end up in
283# decoder.cc). The exec_output attribute is the string of code for the
284# exec.cc file. The has_decode_default attribute is used in the decode block
285# to allow explicit default clauses to override default default clauses.
286
287class GenCode(object):
288 # Constructor.
289 def __init__(self, parser,
290 header_output = '', decoder_output = '', exec_output = '',
291 decode_block = '', has_decode_default = False):
292 self.parser = parser
293 self.header_output = header_output
294 self.decoder_output = decoder_output
295 self.exec_output = exec_output
296 self.decode_block = decode_block
297 self.has_decode_default = has_decode_default
298
299 # Write these code chunks out to the filesystem. They will be properly
300 # interwoven by the write_top_level_files().
301 def emit(self):
302 if self.header_output:
303 self.parser.get_file('header').write(self.header_output)
304 if self.decoder_output:
305 self.parser.get_file('decoder').write(self.decoder_output)
306 if self.exec_output:
307 self.parser.get_file('exec').write(self.exec_output)
308 if self.decode_block:
309 self.parser.get_file('decode_block').write(self.decode_block)
310
311 # Override '+' operator: generate a new GenCode object that
312 # concatenates all the individual strings in the operands.
313 def __add__(self, other):
314 return GenCode(self.parser,
315 self.header_output + other.header_output,
316 self.decoder_output + other.decoder_output,
317 self.exec_output + other.exec_output,
318 self.decode_block + other.decode_block,
319 self.has_decode_default or other.has_decode_default)
320
321 # Prepend a string (typically a comment) to all the strings.
322 def prepend_all(self, pre):
323 self.header_output = pre + self.header_output
324 self.decoder_output = pre + self.decoder_output
325 self.decode_block = pre + self.decode_block
326 self.exec_output = pre + self.exec_output
327
328 # Wrap the decode block in a pair of strings (e.g., 'case foo:'
329 # and 'break;'). Used to build the big nested switch statement.
330 def wrap_decode_block(self, pre, post = ''):
331 self.decode_block = pre + indent(self.decode_block) + post
332
333#####################################################################
334#
335# Bitfield Operator Support
336#
337#####################################################################
338
339bitOp1ArgRE = re.compile(r'<\s*(\w+)\s*:\s*>')
340
341bitOpWordRE = re.compile(r'(?<![\w\.])([\w\.]+)<\s*(\w+)\s*:\s*(\w+)\s*>')
342bitOpExprRE = re.compile(r'\)<\s*(\w+)\s*:\s*(\w+)\s*>')
343
344def substBitOps(code):
345 # first convert single-bit selectors to two-index form
346 # i.e., <n> --> <n:n>
347 code = bitOp1ArgRE.sub(r'<\1:\1>', code)
348 # simple case: selector applied to ID (name)
349 # i.e., foo<a:b> --> bits(foo, a, b)
350 code = bitOpWordRE.sub(r'bits(\1, \2, \3)', code)
351 # if selector is applied to expression (ending in ')'),
352 # we need to search backward for matching '('
353 match = bitOpExprRE.search(code)
354 while match:
355 exprEnd = match.start()
356 here = exprEnd - 1
357 nestLevel = 1
358 while nestLevel > 0:
359 if code[here] == '(':
360 nestLevel -= 1
361 elif code[here] == ')':
362 nestLevel += 1
363 here -= 1
364 if here < 0:
365 sys.exit("Didn't find '('!")
366 exprStart = here+1
367 newExpr = r'bits(%s, %s, %s)' % (code[exprStart:exprEnd+1],
368 match.group(1), match.group(2))
369 code = code[:exprStart] + newExpr + code[match.end():]
370 match = bitOpExprRE.search(code)
371 return code
372
373
374#####################################################################
375#
376# Code Parser
377#
378# The remaining code is the support for automatically extracting
379# instruction characteristics from pseudocode.
380#
381#####################################################################
382
383# Force the argument to be a list. Useful for flags, where a caller
384# can specify a singleton flag or a list of flags. Also usful for
385# converting tuples to lists so they can be modified.
386def makeList(arg):
387 if isinstance(arg, list):
388 return arg
389 elif isinstance(arg, tuple):
390 return list(arg)
391 elif not arg:
392 return []
393 else:
394 return [ arg ]
395
396class Operand(object):
397 '''Base class for operand descriptors. An instance of this class
398 (or actually a class derived from this one) represents a specific
399 operand for a code block (e.g, "Rc.sq" as a dest). Intermediate
400 derived classes encapsulates the traits of a particular operand
401 type (e.g., "32-bit integer register").'''
402
403 def buildReadCode(self, func = None):
404 subst_dict = {"name": self.base_name,
405 "func": func,
406 "reg_idx": self.reg_spec,
407 "ctype": self.ctype}
408 if hasattr(self, 'src_reg_idx'):
409 subst_dict['op_idx'] = self.src_reg_idx
410 code = self.read_code % subst_dict
411 return '%s = %s;\n' % (self.base_name, code)
412
413 def buildWriteCode(self, func = None):
414 subst_dict = {"name": self.base_name,
415 "func": func,
416 "reg_idx": self.reg_spec,
417 "ctype": self.ctype,
418 "final_val": self.base_name}
419 if hasattr(self, 'dest_reg_idx'):
420 subst_dict['op_idx'] = self.dest_reg_idx
421 code = self.write_code % subst_dict
422 return '''
423 {
424 %s final_val = %s;
425 %s;
426 if (traceData) { traceData->setData(final_val); }
427 }''' % (self.dflt_ctype, self.base_name, code)
428
429 def __init__(self, parser, full_name, ext, is_src, is_dest):
430 self.full_name = full_name
431 self.ext = ext
432 self.is_src = is_src
433 self.is_dest = is_dest
434 # The 'effective extension' (eff_ext) is either the actual
435 # extension, if one was explicitly provided, or the default.
436 if ext:
437 self.eff_ext = ext
438 elif hasattr(self, 'dflt_ext'):
439 self.eff_ext = self.dflt_ext
440
441 if hasattr(self, 'eff_ext'):
442 self.ctype = parser.operandTypeMap[self.eff_ext]
443
444 # Finalize additional fields (primarily code fields). This step
445 # is done separately since some of these fields may depend on the
446 # register index enumeration that hasn't been performed yet at the
447 # time of __init__(). The register index enumeration is affected
448 # by predicated register reads/writes. Hence, we forward the flags
449 # that indicate whether or not predication is in use.
450 def finalize(self, predRead, predWrite):
451 self.flags = self.getFlags()
452 self.constructor = self.makeConstructor(predRead, predWrite)
453 self.op_decl = self.makeDecl()
454
455 if self.is_src:
456 self.op_rd = self.makeRead(predRead)
457 self.op_src_decl = self.makeDecl()
458 else:
459 self.op_rd = ''
460 self.op_src_decl = ''
461
462 if self.is_dest:
463 self.op_wb = self.makeWrite(predWrite)
464 self.op_dest_decl = self.makeDecl()
465 else:
466 self.op_wb = ''
467 self.op_dest_decl = ''
468
469 def isMem(self):
470 return 0
471
472 def isReg(self):
473 return 0
474
475 def isFloatReg(self):
476 return 0
477
478 def isIntReg(self):
479 return 0
480
481 def isCCReg(self):
482 return 0
483
484 def isControlReg(self):
485 return 0
486
487 def isVecReg(self):
488 return 0
489
490 def isVecElem(self):
491 return 0
492
493 def isVecPredReg(self):
494 return 0
495
496 def isPCState(self):
497 return 0
498
499 def isPCPart(self):
500 return self.isPCState() and self.reg_spec
501
502 def hasReadPred(self):
503 return self.read_predicate != None
504
505 def hasWritePred(self):
506 return self.write_predicate != None
507
508 def getFlags(self):
509 # note the empty slice '[:]' gives us a copy of self.flags[0]
510 # instead of a reference to it
511 my_flags = self.flags[0][:]
512 if self.is_src:
513 my_flags += self.flags[1]
514 if self.is_dest:
515 my_flags += self.flags[2]
516 return my_flags
517
518 def makeDecl(self):
519 # Note that initializations in the declarations are solely
520 # to avoid 'uninitialized variable' errors from the compiler.
521 return self.ctype + ' ' + self.base_name + ' = 0;\n';
522
523
524src_reg_constructor = '\n\t_srcRegIdx[_numSrcRegs++] = RegId(%s, %s);'
525dst_reg_constructor = '\n\t_destRegIdx[_numDestRegs++] = RegId(%s, %s);'
526
527
528class IntRegOperand(Operand):
529 reg_class = 'IntRegClass'
530
531 def isReg(self):
532 return 1
533
534 def isIntReg(self):
535 return 1
536
537 def makeConstructor(self, predRead, predWrite):
538 c_src = ''
539 c_dest = ''
540
541 if self.is_src:
542 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
543 if self.hasReadPred():
544 c_src = '\n\tif (%s) {%s\n\t}' % \
545 (self.read_predicate, c_src)
546
547 if self.is_dest:
548 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
549 c_dest += '\n\t_numIntDestRegs++;'
550 if self.hasWritePred():
551 c_dest = '\n\tif (%s) {%s\n\t}' % \
552 (self.write_predicate, c_dest)
553
554 return c_src + c_dest
555
556 def makeRead(self, predRead):
557 if (self.ctype == 'float' or self.ctype == 'double'):
558 error('Attempt to read integer register as FP')
559 if self.read_code != None:
560 return self.buildReadCode('readIntRegOperand')
561
562 int_reg_val = ''
563 if predRead:
564 int_reg_val = 'xc->readIntRegOperand(this, _sourceIndex++)'
565 if self.hasReadPred():
566 int_reg_val = '(%s) ? %s : 0' % \
567 (self.read_predicate, int_reg_val)
568 else:
569 int_reg_val = 'xc->readIntRegOperand(this, %d)' % self.src_reg_idx
570
571 return '%s = %s;\n' % (self.base_name, int_reg_val)
572
573 def makeWrite(self, predWrite):
574 if (self.ctype == 'float' or self.ctype == 'double'):
575 error('Attempt to write integer register as FP')
576 if self.write_code != None:
577 return self.buildWriteCode('setIntRegOperand')
578
579 if predWrite:
580 wp = 'true'
581 if self.hasWritePred():
582 wp = self.write_predicate
583
584 wcond = 'if (%s)' % (wp)
585 windex = '_destIndex++'
586 else:
587 wcond = ''
588 windex = '%d' % self.dest_reg_idx
589
590 wb = '''
591 %s
592 {
593 %s final_val = %s;
594 xc->setIntRegOperand(this, %s, final_val);\n
595 if (traceData) { traceData->setData(final_val); }
596 }''' % (wcond, self.ctype, self.base_name, windex)
597
598 return wb
599
600class FloatRegOperand(Operand):
601 reg_class = 'FloatRegClass'
602
603 def isReg(self):
604 return 1
605
606 def isFloatReg(self):
607 return 1
608
609 def makeConstructor(self, predRead, predWrite):
610 c_src = ''
611 c_dest = ''
612
613 if self.is_src:
614 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
615
616 if self.is_dest:
617 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
618 c_dest += '\n\t_numFPDestRegs++;'
619
620 return c_src + c_dest
621
622 def makeRead(self, predRead):
623 if self.read_code != None:
624 return self.buildReadCode('readFloatRegOperandBits')
625
626 if predRead:
627 rindex = '_sourceIndex++'
628 else:
629 rindex = '%d' % self.src_reg_idx
630
631 code = 'xc->readFloatRegOperandBits(this, %s)' % rindex
632 if self.ctype == 'float':
633 code = 'bitsToFloat32(%s)' % code
634 elif self.ctype == 'double':
635 code = 'bitsToFloat64(%s)' % code
636 return '%s = %s;\n' % (self.base_name, code)
637
638 def makeWrite(self, predWrite):
639 if self.write_code != None:
640 return self.buildWriteCode('setFloatRegOperandBits')
641
642 if predWrite:
643 wp = '_destIndex++'
644 else:
645 wp = '%d' % self.dest_reg_idx
646
647 val = 'final_val'
648 if self.ctype == 'float':
649 val = 'floatToBits32(%s)' % val
650 elif self.ctype == 'double':
651 val = 'floatToBits64(%s)' % val
652
653 wp = 'xc->setFloatRegOperandBits(this, %s, %s);' % (wp, val)
654
655 wb = '''
656 {
657 %s final_val = %s;
658 %s\n
659 if (traceData) { traceData->setData(final_val); }
660 }''' % (self.ctype, self.base_name, wp)
661 return wb
662
663class VecRegOperand(Operand):
664 reg_class = 'VecRegClass'
665
666 def __init__(self, parser, full_name, ext, is_src, is_dest):
667 Operand.__init__(self, parser, full_name, ext, is_src, is_dest)
668 self.elemExt = None
669 self.parser = parser
670
671 def isReg(self):
672 return 1
673
674 def isVecReg(self):
675 return 1
676
677 def makeDeclElem(self, elem_op):
678 (elem_name, elem_ext) = elem_op
679 (elem_spec, dflt_elem_ext, zeroing) = self.elems[elem_name]
680 if elem_ext:
681 ext = elem_ext
682 else:
683 ext = dflt_elem_ext
684 ctype = self.parser.operandTypeMap[ext]
685 return '\n\t%s %s = 0;' % (ctype, elem_name)
686
687 def makeDecl(self):
688 if not self.is_dest and self.is_src:
689 c_decl = '\t/* Vars for %s*/' % (self.base_name)
690 if hasattr(self, 'active_elems'):
691 if self.active_elems:
692 for elem in self.active_elems:
693 c_decl += self.makeDeclElem(elem)
694 return c_decl + '\t/* End vars for %s */\n' % (self.base_name)
695 else:
696 return ''
697
698 def makeConstructor(self, predRead, predWrite):
699 c_src = ''
700 c_dest = ''
701
702 numAccessNeeded = 1
703
704 if self.is_src:
705 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
706
707 if self.is_dest:
708 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
709 c_dest += '\n\t_numVecDestRegs++;'
710
711 return c_src + c_dest
712
713 # Read destination register to write
714 def makeReadWElem(self, elem_op):
715 (elem_name, elem_ext) = elem_op
716 (elem_spec, dflt_elem_ext, zeroing) = self.elems[elem_name]
717 if elem_ext:
718 ext = elem_ext
719 else:
720 ext = dflt_elem_ext
721 ctype = self.parser.operandTypeMap[ext]
722 c_read = '\t\t%s& %s = %s[%s];\n' % \
723 (ctype, elem_name, self.base_name, elem_spec)
724 return c_read
725
726 def makeReadW(self, predWrite):
727 func = 'getWritableVecRegOperand'
728 if self.read_code != None:
729 return self.buildReadCode(func)
730
731 if predWrite:
732 rindex = '_destIndex++'
733 else:
734 rindex = '%d' % self.dest_reg_idx
735
736 c_readw = '\t\t%s& tmp_d%s = xc->%s(this, %s);\n'\
737 % ('TheISA::VecRegContainer', rindex, func, rindex)
738 if self.elemExt:
739 c_readw += '\t\tauto %s = tmp_d%s.as<%s>();\n' % (self.base_name,
740 rindex, self.parser.operandTypeMap[self.elemExt])
741 if self.ext:
742 c_readw += '\t\tauto %s = tmp_d%s.as<%s>();\n' % (self.base_name,
743 rindex, self.parser.operandTypeMap[self.ext])
744 if hasattr(self, 'active_elems'):
745 if self.active_elems:
746 for elem in self.active_elems:
747 c_readw += self.makeReadWElem(elem)
748 return c_readw
749
750 # Normal source operand read
751 def makeReadElem(self, elem_op, name):
752 (elem_name, elem_ext) = elem_op
753 (elem_spec, dflt_elem_ext, zeroing) = self.elems[elem_name]
754
755 if elem_ext:
756 ext = elem_ext
757 else:
758 ext = dflt_elem_ext
759 ctype = self.parser.operandTypeMap[ext]
760 c_read = '\t\t%s = %s[%s];\n' % \
761 (elem_name, name, elem_spec)
762 return c_read
763
764 def makeRead(self, predRead):
765 func = 'readVecRegOperand'
766 if self.read_code != None:
767 return self.buildReadCode(func)
768
769 if predRead:
770 rindex = '_sourceIndex++'
771 else:
772 rindex = '%d' % self.src_reg_idx
773
774 name = self.base_name
775 if self.is_dest and self.is_src:
776 name += '_merger'
777
778 c_read = '\t\t%s& tmp_s%s = xc->%s(this, %s);\n' \
779 % ('const TheISA::VecRegContainer', rindex, func, rindex)
780 # If the parser has detected that elements are being access, create
781 # the appropriate view
782 if self.elemExt:
783 c_read += '\t\tauto %s = tmp_s%s.as<%s>();\n' % \
784 (name, rindex, self.parser.operandTypeMap[self.elemExt])
785 if self.ext:
786 c_read += '\t\tauto %s = tmp_s%s.as<%s>();\n' % \
787 (name, rindex, self.parser.operandTypeMap[self.ext])
788 if hasattr(self, 'active_elems'):
789 if self.active_elems:
790 for elem in self.active_elems:
791 c_read += self.makeReadElem(elem, name)
792 return c_read
793
794 def makeWrite(self, predWrite):
795 func = 'setVecRegOperand'
796 if self.write_code != None:
797 return self.buildWriteCode(func)
798
799 wb = '''
800 if (traceData) {
801 traceData->setData(tmp_d%d);
802 }
803 ''' % self.dest_reg_idx
804 return wb
805
806 def finalize(self, predRead, predWrite):
807 super(VecRegOperand, self).finalize(predRead, predWrite)
808 if self.is_dest:
809 self.op_rd = self.makeReadW(predWrite) + self.op_rd
810
811class VecElemOperand(Operand):
812 reg_class = 'VecElemClass'
813
814 def isReg(self):
815 return 1
816
817 def isVecElem(self):
818 return 1
819
820 def makeDecl(self):
821 if self.is_dest and not self.is_src:
822 return '\n\t%s %s;' % (self.ctype, self.base_name)
823 else:
824 return ''
825
826 def makeConstructor(self, predRead, predWrite):
827 c_src = ''
828 c_dest = ''
829
830 numAccessNeeded = 1
831
832 if self.is_src:
833 c_src = ('\n\t_srcRegIdx[_numSrcRegs++] = RegId(%s, %s, %s);' %
834 (self.reg_class, self.reg_spec, self.elem_spec))
835
836 if self.is_dest:
837 c_dest = ('\n\t_destRegIdx[_numDestRegs++] = RegId(%s, %s, %s);' %
838 (self.reg_class, self.reg_spec, self.elem_spec))
839 c_dest += '\n\t_numVecElemDestRegs++;'
840 return c_src + c_dest
841
842 def makeRead(self, predRead):
843 c_read = 'xc->readVecElemOperand(this, %d)' % self.src_reg_idx
844
845 if self.ctype == 'float':
846 c_read = 'bitsToFloat32(%s)' % c_read
847 elif self.ctype == 'double':
848 c_read = 'bitsToFloat64(%s)' % c_read
849
850 return '\n\t%s %s = %s;\n' % (self.ctype, self.base_name, c_read)
851
852 def makeWrite(self, predWrite):
853 if self.ctype == 'float':
854 c_write = 'floatToBits32(%s)' % self.base_name
855 elif self.ctype == 'double':
856 c_write = 'floatToBits64(%s)' % self.base_name
857 else:
858 c_write = self.base_name
859
860 c_write = ('\n\txc->setVecElemOperand(this, %d, %s);' %
861 (self.dest_reg_idx, c_write))
862
863 return c_write
864
865class VecPredRegOperand(Operand):
866 reg_class = 'VecPredRegClass'
867
868 def __init__(self, parser, full_name, ext, is_src, is_dest):
869 Operand.__init__(self, parser, full_name, ext, is_src, is_dest)
870 self.parser = parser
871
872 def isReg(self):
873 return 1
874
875 def isVecPredReg(self):
876 return 1
877
878 def makeDecl(self):
879 return ''
880
881 def makeConstructor(self, predRead, predWrite):
882 c_src = ''
883 c_dest = ''
884
885 if self.is_src:
886 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
887
888 if self.is_dest:
889 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
890 c_dest += '\n\t_numVecPredDestRegs++;'
891
892 return c_src + c_dest
893
894 def makeRead(self, predRead):
895 func = 'readVecPredRegOperand'
896 if self.read_code != None:
897 return self.buildReadCode(func)
898
899 if predRead:
900 rindex = '_sourceIndex++'
901 else:
902 rindex = '%d' % self.src_reg_idx
903
904 c_read = '\t\t%s& tmp_s%s = xc->%s(this, %s);\n' % (
905 'const TheISA::VecPredRegContainer', rindex, func, rindex)
906 if self.ext:
907 c_read += '\t\tauto %s = tmp_s%s.as<%s>();\n' % (
908 self.base_name, rindex,
909 self.parser.operandTypeMap[self.ext])
910 return c_read
911
912 def makeReadW(self, predWrite):
913 func = 'getWritableVecPredRegOperand'
914 if self.read_code != None:
915 return self.buildReadCode(func)
916
917 if predWrite:
918 rindex = '_destIndex++'
919 else:
920 rindex = '%d' % self.dest_reg_idx
921
922 c_readw = '\t\t%s& tmp_d%s = xc->%s(this, %s);\n' % (
923 'TheISA::VecPredRegContainer', rindex, func, rindex)
924 if self.ext:
925 c_readw += '\t\tauto %s = tmp_d%s.as<%s>();\n' % (
926 self.base_name, rindex,
927 self.parser.operandTypeMap[self.ext])
928 return c_readw
929
930 def makeWrite(self, predWrite):
931 func = 'setVecPredRegOperand'
932 if self.write_code != None:
933 return self.buildWriteCode(func)
934
935 wb = '''
936 if (traceData) {
937 traceData->setData(tmp_d%d);
938 }
939 ''' % self.dest_reg_idx
940 return wb
941
942 def finalize(self, predRead, predWrite):
943 super(VecPredRegOperand, self).finalize(predRead, predWrite)
944 if self.is_dest:
945 self.op_rd = self.makeReadW(predWrite) + self.op_rd
946
947class CCRegOperand(Operand):
948 reg_class = 'CCRegClass'
949
950 def isReg(self):
951 return 1
952
953 def isCCReg(self):
954 return 1
955
956 def makeConstructor(self, predRead, predWrite):
957 c_src = ''
958 c_dest = ''
959
960 if self.is_src:
961 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
962 if self.hasReadPred():
963 c_src = '\n\tif (%s) {%s\n\t}' % \
964 (self.read_predicate, c_src)
965
966 if self.is_dest:
967 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
968 c_dest += '\n\t_numCCDestRegs++;'
969 if self.hasWritePred():
970 c_dest = '\n\tif (%s) {%s\n\t}' % \
971 (self.write_predicate, c_dest)
972
973 return c_src + c_dest
974
975 def makeRead(self, predRead):
976 if (self.ctype == 'float' or self.ctype == 'double'):
977 error('Attempt to read condition-code register as FP')
978 if self.read_code != None:
979 return self.buildReadCode('readCCRegOperand')
980
981 int_reg_val = ''
982 if predRead:
983 int_reg_val = 'xc->readCCRegOperand(this, _sourceIndex++)'
984 if self.hasReadPred():
985 int_reg_val = '(%s) ? %s : 0' % \
986 (self.read_predicate, int_reg_val)
987 else:
988 int_reg_val = 'xc->readCCRegOperand(this, %d)' % self.src_reg_idx
989
990 return '%s = %s;\n' % (self.base_name, int_reg_val)
991
992 def makeWrite(self, predWrite):
993 if (self.ctype == 'float' or self.ctype == 'double'):
994 error('Attempt to write condition-code register as FP')
995 if self.write_code != None:
996 return self.buildWriteCode('setCCRegOperand')
997
998 if predWrite:
999 wp = 'true'
1000 if self.hasWritePred():
1001 wp = self.write_predicate
1002
1003 wcond = 'if (%s)' % (wp)
1004 windex = '_destIndex++'
1005 else:
1006 wcond = ''
1007 windex = '%d' % self.dest_reg_idx
1008
1009 wb = '''
1010 %s
1011 {
1012 %s final_val = %s;
1013 xc->setCCRegOperand(this, %s, final_val);\n
1014 if (traceData) { traceData->setData(final_val); }
1015 }''' % (wcond, self.ctype, self.base_name, windex)
1016
1017 return wb
1018
1019class ControlRegOperand(Operand):
1020 reg_class = 'MiscRegClass'
1021
1022 def isReg(self):
1023 return 1
1024
1025 def isControlReg(self):
1026 return 1
1027
1028 def makeConstructor(self, predRead, predWrite):
1029 c_src = ''
1030 c_dest = ''
1031
1032 if self.is_src:
1033 c_src = src_reg_constructor % (self.reg_class, self.reg_spec)
1034
1035 if self.is_dest:
1036 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec)
1037
1038 return c_src + c_dest
1039
1040 def makeRead(self, predRead):
1041 bit_select = 0
1042 if (self.ctype == 'float' or self.ctype == 'double'):
1043 error('Attempt to read control register as FP')
1044 if self.read_code != None:
1045 return self.buildReadCode('readMiscRegOperand')
1046
1047 if predRead:
1048 rindex = '_sourceIndex++'
1049 else:
1050 rindex = '%d' % self.src_reg_idx
1051
1052 return '%s = xc->readMiscRegOperand(this, %s);\n' % \
1053 (self.base_name, rindex)
1054
1055 def makeWrite(self, predWrite):
1056 if (self.ctype == 'float' or self.ctype == 'double'):
1057 error('Attempt to write control register as FP')
1058 if self.write_code != None:
1059 return self.buildWriteCode('setMiscRegOperand')
1060
1061 if predWrite:
1062 windex = '_destIndex++'
1063 else:
1064 windex = '%d' % self.dest_reg_idx
1065
1066 wb = 'xc->setMiscRegOperand(this, %s, %s);\n' % \
1067 (windex, self.base_name)
1068 wb += 'if (traceData) { traceData->setData(%s); }' % \
1069 self.base_name
1070
1071 return wb
1072
1073class MemOperand(Operand):
1074 def isMem(self):
1075 return 1
1076
1077 def makeConstructor(self, predRead, predWrite):
1078 return ''
1079
1080 def makeDecl(self):
1081 # Declare memory data variable.
1082 return '%s %s;\n' % (self.ctype, self.base_name)
1083
1084 def makeRead(self, predRead):
1085 if self.read_code != None:
1086 return self.buildReadCode()
1087 return ''
1088
1089 def makeWrite(self, predWrite):
1090 if self.write_code != None:
1091 return self.buildWriteCode()
1092 return ''
1093
1094class PCStateOperand(Operand):
1095 def makeConstructor(self, predRead, predWrite):
1096 return ''
1097
1098 def makeRead(self, predRead):
1099 if self.reg_spec:
1100 # A component of the PC state.
1101 return '%s = __parserAutoPCState.%s();\n' % \
1102 (self.base_name, self.reg_spec)
1103 else:
1104 # The whole PC state itself.
1105 return '%s = xc->pcState();\n' % self.base_name
1106
1107 def makeWrite(self, predWrite):
1108 if self.reg_spec:
1109 # A component of the PC state.
1110 return '__parserAutoPCState.%s(%s);\n' % \
1111 (self.reg_spec, self.base_name)
1112 else:
1113 # The whole PC state itself.
1114 return 'xc->pcState(%s);\n' % self.base_name
1115
1116 def makeDecl(self):
1117 ctype = 'TheISA::PCState'
1118 if self.isPCPart():
1119 ctype = self.ctype
1120 # Note that initializations in the declarations are solely
1121 # to avoid 'uninitialized variable' errors from the compiler.
1122 return '%s %s = 0;\n' % (ctype, self.base_name)
1123
1124 def isPCState(self):
1125 return 1
1126
1127class OperandList(object):
1128 '''Find all the operands in the given code block. Returns an operand
1129 descriptor list (instance of class OperandList).'''
1130 def __init__(self, parser, code):
1131 self.items = []
1132 self.bases = {}
1133 # delete strings and comments so we don't match on operands inside
1134 for regEx in (stringRE, commentRE):
1135 code = regEx.sub('', code)
1136 # search for operands
1137 next_pos = 0
1138 while 1:
1139 match = parser.operandsRE.search(code, next_pos)
1140 if not match:
1141 # no more matches: we're done
1142 break
1143 op = match.groups()
1144 # regexp groups are operand full name, base, and extension
1145 (op_full, op_base, op_ext) = op
1146 # If is a elem operand, define or update the corresponding
1147 # vector operand
1148 isElem = False
1149 if op_base in parser.elemToVector:
1150 isElem = True
1151 elem_op = (op_base, op_ext)
1152 op_base = parser.elemToVector[op_base]
1153 op_ext = '' # use the default one
1154 # if the token following the operand is an assignment, this is
1155 # a destination (LHS), else it's a source (RHS)
1156 is_dest = (assignRE.match(code, match.end()) != None)
1157 is_src = not is_dest
1158
1159 # see if we've already seen this one
1160 op_desc = self.find_base(op_base)
1161 if op_desc:
1162 if op_ext and op_ext != '' and op_desc.ext != op_ext:
1163 error ('Inconsistent extensions for operand %s: %s - %s' \
1164 % (op_base, op_desc.ext, op_ext))
1165 op_desc.is_src = op_desc.is_src or is_src
1166 op_desc.is_dest = op_desc.is_dest or is_dest
1167 if isElem:
1168 (elem_base, elem_ext) = elem_op
1169 found = False
1170 for ae in op_desc.active_elems:
1171 (ae_base, ae_ext) = ae
1172 if ae_base == elem_base:
1173 if ae_ext != elem_ext:
1174 error('Inconsistent extensions for elem'
1175 ' operand %s' % elem_base)
1176 else:
1177 found = True
1178 if not found:
1179 op_desc.active_elems.append(elem_op)
1180 else:
1181 # new operand: create new descriptor
1182 op_desc = parser.operandNameMap[op_base](parser,
1183 op_full, op_ext, is_src, is_dest)
1184 # if operand is a vector elem, add the corresponding vector
1185 # operand if not already done
1186 if isElem:
1187 op_desc.elemExt = elem_op[1]
1188 op_desc.active_elems = [elem_op]
1189 self.append(op_desc)
1190 # start next search after end of current match
1191 next_pos = match.end()
1192 self.sort()
1193 # enumerate source & dest register operands... used in building
1194 # constructor later
1195 self.numSrcRegs = 0
1196 self.numDestRegs = 0
1197 self.numFPDestRegs = 0
1198 self.numIntDestRegs = 0
1199 self.numVecDestRegs = 0
1200 self.numVecPredDestRegs = 0
1201 self.numCCDestRegs = 0
1202 self.numMiscDestRegs = 0
1203 self.memOperand = None
1204
1205 # Flags to keep track if one or more operands are to be read/written
1206 # conditionally.
1207 self.predRead = False
1208 self.predWrite = False
1209
1210 for op_desc in self.items:
1211 if op_desc.isReg():
1212 if op_desc.is_src:
1213 op_desc.src_reg_idx = self.numSrcRegs
1214 self.numSrcRegs += 1
1215 if op_desc.is_dest:
1216 op_desc.dest_reg_idx = self.numDestRegs
1217 self.numDestRegs += 1
1218 if op_desc.isFloatReg():
1219 self.numFPDestRegs += 1
1220 elif op_desc.isIntReg():
1221 self.numIntDestRegs += 1
1222 elif op_desc.isVecReg():
1223 self.numVecDestRegs += 1
1224 elif op_desc.isVecPredReg():
1225 self.numVecPredDestRegs += 1
1226 elif op_desc.isCCReg():
1227 self.numCCDestRegs += 1
1228 elif op_desc.isControlReg():
1229 self.numMiscDestRegs += 1
1230 elif op_desc.isMem():
1231 if self.memOperand:
1232 error("Code block has more than one memory operand.")
1233 self.memOperand = op_desc
1234
1235 # Check if this operand has read/write predication. If true, then
1236 # the microop will dynamically index source/dest registers.
1237 self.predRead = self.predRead or op_desc.hasReadPred()
1238 self.predWrite = self.predWrite or op_desc.hasWritePred()
1239
1240 if parser.maxInstSrcRegs < self.numSrcRegs:
1241 parser.maxInstSrcRegs = self.numSrcRegs
1242 if parser.maxInstDestRegs < self.numDestRegs:
1243 parser.maxInstDestRegs = self.numDestRegs
1244 if parser.maxMiscDestRegs < self.numMiscDestRegs:
1245 parser.maxMiscDestRegs = self.numMiscDestRegs
1246
1247 # now make a final pass to finalize op_desc fields that may depend
1248 # on the register enumeration
1249 for op_desc in self.items:
1250 op_desc.finalize(self.predRead, self.predWrite)
1251
1252 def __len__(self):
1253 return len(self.items)
1254
1255 def __getitem__(self, index):
1256 return self.items[index]
1257
1258 def append(self, op_desc):
1259 self.items.append(op_desc)
1260 self.bases[op_desc.base_name] = op_desc
1261
1262 def find_base(self, base_name):
1263 # like self.bases[base_name], but returns None if not found
1264 # (rather than raising exception)
1265 return self.bases.get(base_name)
1266
1267 # internal helper function for concat[Some]Attr{Strings|Lists}
1268 def __internalConcatAttrs(self, attr_name, filter, result):
1269 for op_desc in self.items:
1270 if filter(op_desc):
1271 result += getattr(op_desc, attr_name)
1272 return result
1273
1274 # return a single string that is the concatenation of the (string)
1275 # values of the specified attribute for all operands
1276 def concatAttrStrings(self, attr_name):
1277 return self.__internalConcatAttrs(attr_name, lambda x: 1, '')
1278
1279 # like concatAttrStrings, but only include the values for the operands
1280 # for which the provided filter function returns true
1281 def concatSomeAttrStrings(self, filter, attr_name):
1282 return self.__internalConcatAttrs(attr_name, filter, '')
1283
1284 # return a single list that is the concatenation of the (list)
1285 # values of the specified attribute for all operands
1286 def concatAttrLists(self, attr_name):
1287 return self.__internalConcatAttrs(attr_name, lambda x: 1, [])
1288
1289 # like concatAttrLists, but only include the values for the operands
1290 # for which the provided filter function returns true
1291 def concatSomeAttrLists(self, filter, attr_name):
1292 return self.__internalConcatAttrs(attr_name, filter, [])
1293
1294 def sort(self):
1295 self.items.sort(lambda a, b: a.sort_pri - b.sort_pri)
1296
1297class SubOperandList(OperandList):
1298 '''Find all the operands in the given code block. Returns an operand
1299 descriptor list (instance of class OperandList).'''
1300 def __init__(self, parser, code, master_list):
1301 self.items = []
1302 self.bases = {}
1303 # delete strings and comments so we don't match on operands inside
1304 for regEx in (stringRE, commentRE):
1305 code = regEx.sub('', code)
1306 # search for operands
1307 next_pos = 0
1308 while 1:
1309 match = parser.operandsRE.search(code, next_pos)
1310 if not match:
1311 # no more matches: we're done
1312 break
1313 op = match.groups()
1314 # regexp groups are operand full name, base, and extension
1315 (op_full, op_base, op_ext) = op
1316 # If is a elem operand, define or update the corresponding
1317 # vector operand
1318 if op_base in parser.elemToVector:
1319 elem_op = op_base
1320 op_base = parser.elemToVector[elem_op]
1321 # find this op in the master list
1322 op_desc = master_list.find_base(op_base)
1323 if not op_desc:
1324 error('Found operand %s which is not in the master list!'
1325 % op_base)
1326 else:
1327 # See if we've already found this operand
1328 op_desc = self.find_base(op_base)
1329 if not op_desc:
1330 # if not, add a reference to it to this sub list
1331 self.append(master_list.bases[op_base])
1332
1333 # start next search after end of current match
1334 next_pos = match.end()
1335 self.sort()
1336 self.memOperand = None
1337 # Whether the whole PC needs to be read so parts of it can be accessed
1338 self.readPC = False
1339 # Whether the whole PC needs to be written after parts of it were
1340 # changed
1341 self.setPC = False
1342 # Whether this instruction manipulates the whole PC or parts of it.
1343 # Mixing the two is a bad idea and flagged as an error.
1344 self.pcPart = None
1345
1346 # Flags to keep track if one or more operands are to be read/written
1347 # conditionally.
1348 self.predRead = False
1349 self.predWrite = False
1350
1351 for op_desc in self.items:
1352 if op_desc.isPCPart():
1353 self.readPC = True
1354 if op_desc.is_dest:
1355 self.setPC = True
1356
1357 if op_desc.isPCState():
1358 if self.pcPart is not None:
1359 if self.pcPart and not op_desc.isPCPart() or \
1360 not self.pcPart and op_desc.isPCPart():
1361 error("Mixed whole and partial PC state operands.")
1362 self.pcPart = op_desc.isPCPart()
1363
1364 if op_desc.isMem():
1365 if self.memOperand:
1366 error("Code block has more than one memory operand.")
1367 self.memOperand = op_desc
1368
1369 # Check if this operand has read/write predication. If true, then
1370 # the microop will dynamically index source/dest registers.
1371 self.predRead = self.predRead or op_desc.hasReadPred()
1372 self.predWrite = self.predWrite or op_desc.hasWritePred()
1373
1374# Regular expression object to match C++ strings
1375stringRE = re.compile(r'"([^"\\]|\\.)*"')
1376
1377# Regular expression object to match C++ comments
1378# (used in findOperands())
1379commentRE = re.compile(r'(^)?[^\S\n]*/(?:\*(.*?)\*/[^\S\n]*|/[^\n]*)($)?',
1380 re.DOTALL | re.MULTILINE)
1381
1382# Regular expression object to match assignment statements (used in
1383# findOperands()). If the code immediately following the first
1384# appearance of the operand matches this regex, then the operand
1385# appears to be on the LHS of an assignment, and is thus a
1386# destination. basically we're looking for an '=' that's not '=='.
1387# The heinous tangle before that handles the case where the operand
1388# has an array subscript.
1389assignRE = re.compile(r'(\[[^\]]+\])?\s*=(?!=)', re.MULTILINE)
1390
1391def makeFlagConstructor(flag_list):
1392 if len(flag_list) == 0:
1393 return ''
1394 # filter out repeated flags
1395 flag_list.sort()
1396 i = 1
1397 while i < len(flag_list):
1398 if flag_list[i] == flag_list[i-1]:
1399 del flag_list[i]
1400 else:
1401 i += 1
1402 pre = '\n\tflags['
1403 post = '] = true;'
1404 code = pre + string.join(flag_list, post + pre) + post
1405 return code
1406
1407# Assume all instruction flags are of the form 'IsFoo'
1408instFlagRE = re.compile(r'Is.*')
1409
1410# OpClass constants end in 'Op' except No_OpClass
1411opClassRE = re.compile(r'.*Op|No_OpClass')
1412
1413class InstObjParams(object):
1414 def __init__(self, parser, mnem, class_name, base_class = '',
1415 snippets = {}, opt_args = []):
1416 self.mnemonic = mnem
1417 self.class_name = class_name
1418 self.base_class = base_class
1419 if not isinstance(snippets, dict):
1420 snippets = {'code' : snippets}
1421 compositeCode = ' '.join(map(str, snippets.values()))
1422 self.snippets = snippets
1423
1424 self.operands = OperandList(parser, compositeCode)
1425
1426 # The header of the constructor declares the variables to be used
1427 # in the body of the constructor.
1428 header = ''
1429 header += '\n\t_numSrcRegs = 0;'
1430 header += '\n\t_numDestRegs = 0;'
1431 header += '\n\t_numFPDestRegs = 0;'
1432 header += '\n\t_numVecDestRegs = 0;'
1433 header += '\n\t_numVecElemDestRegs = 0;'
1434 header += '\n\t_numVecPredDestRegs = 0;'
1435 header += '\n\t_numIntDestRegs = 0;'
1436 header += '\n\t_numCCDestRegs = 0;'
1437
1438 self.constructor = header + \
1439 self.operands.concatAttrStrings('constructor')
1440
1441 self.flags = self.operands.concatAttrLists('flags')
1442
1443 self.op_class = None
1444
1445 # Optional arguments are assumed to be either StaticInst flags
1446 # or an OpClass value. To avoid having to import a complete
1447 # list of these values to match against, we do it ad-hoc
1448 # with regexps.
1449 for oa in opt_args:
1450 if instFlagRE.match(oa):
1451 self.flags.append(oa)
1452 elif opClassRE.match(oa):
1453 self.op_class = oa
1454 else:
1455 error('InstObjParams: optional arg "%s" not recognized '
1456 'as StaticInst::Flag or OpClass.' % oa)
1457
1458 # Make a basic guess on the operand class if not set.
1459 # These are good enough for most cases.
1460 if not self.op_class:
1461 if 'IsStore' in self.flags:
1462 # The order matters here: 'IsFloating' and 'IsInteger' are
1463 # usually set in FP instructions because of the base
1464 # register
1465 if 'IsFloating' in self.flags:
1466 self.op_class = 'FloatMemWriteOp'
1467 else:
1468 self.op_class = 'MemWriteOp'
1469 elif 'IsLoad' in self.flags or 'IsPrefetch' in self.flags:
1470 # The order matters here: 'IsFloating' and 'IsInteger' are
1471 # usually set in FP instructions because of the base
1472 # register
1473 if 'IsFloating' in self.flags:
1474 self.op_class = 'FloatMemReadOp'
1475 else:
1476 self.op_class = 'MemReadOp'
1477 elif 'IsFloating' in self.flags:
1478 self.op_class = 'FloatAddOp'
1479 elif 'IsVector' in self.flags:
1480 self.op_class = 'SimdAddOp'
1481 else:
1482 self.op_class = 'IntAluOp'
1483
1484 # add flag initialization to contructor here to include
1485 # any flags added via opt_args
1486 self.constructor += makeFlagConstructor(self.flags)
1487
1488 # if 'IsFloating' is set, add call to the FP enable check
1489 # function (which should be provided by isa_desc via a declare)
1490 # if 'IsVector' is set, add call to the Vector enable check
1491 # function (which should be provided by isa_desc via a declare)
1492 if 'IsFloating' in self.flags:
1493 self.fp_enable_check = 'fault = checkFpEnableFault(xc);'
1494 elif 'IsVector' in self.flags:
1495 self.fp_enable_check = 'fault = checkVecEnableFault(xc);'
1496 else:
1497 self.fp_enable_check = ''
1498
1499##############
1500# Stack: a simple stack object. Used for both formats (formatStack)
1501# and default cases (defaultStack). Simply wraps a list to give more
1502# stack-like syntax and enable initialization with an argument list
1503# (as opposed to an argument that's a list).
1504
1505class Stack(list):
1506 def __init__(self, *items):
1507 list.__init__(self, items)
1508
1509 def push(self, item):
1510 self.append(item);
1511
1512 def top(self):
1513 return self[-1]
1514
1515# Format a file include stack backtrace as a string
1516def backtrace(filename_stack):
1517 fmt = "In file included from %s:"
1518 return "\n".join([fmt % f for f in filename_stack])
1519
1520
1521#######################
1522#
1523# LineTracker: track filenames along with line numbers in PLY lineno fields
1524# PLY explicitly doesn't do anything with 'lineno' except propagate
1525# it. This class lets us tie filenames with the line numbers with a
1526# minimum of disruption to existing increment code.
1527#
1528
1529class LineTracker(object):
1530 def __init__(self, filename, lineno=1):
1531 self.filename = filename
1532 self.lineno = lineno
1533
1534 # Overload '+=' for increments. We need to create a new object on
1535 # each update else every token ends up referencing the same
1536 # constantly incrementing instance.
1537 def __iadd__(self, incr):
1538 return LineTracker(self.filename, self.lineno + incr)
1539
1540 def __str__(self):
1541 return "%s:%d" % (self.filename, self.lineno)
1542
1543 # In case there are places where someone really expects a number
1544 def __int__(self):
1545 return self.lineno
1546
1547
1548#######################
1549#
1550# ISA Parser
1551# parses ISA DSL and emits C++ headers and source
1552#
1553
1554class ISAParser(Grammar):
1555 def __init__(self, output_dir):
1556 super(ISAParser, self).__init__()
1557 self.output_dir = output_dir
1558
1559 self.filename = None # for output file watermarking/scaremongering
1560
1561 # variable to hold templates
1562 self.templateMap = {}
1563
1564 # This dictionary maps format name strings to Format objects.
1565 self.formatMap = {}
1566
1567 # Track open files and, if applicable, how many chunks it has been
1568 # split into so far.
1569 self.files = {}
1570 self.splits = {}
1571
1572 # isa_name / namespace identifier from namespace declaration.
1573 # before the namespace declaration, None.
1574 self.isa_name = None
1575 self.namespace = None
1576
1577 # The format stack.
1578 self.formatStack = Stack(NoFormat())
1579
1580 # The default case stack.
1581 self.defaultStack = Stack(None)
1582
1583 # Stack that tracks current file and line number. Each
1584 # element is a tuple (filename, lineno) that records the
1585 # *current* filename and the line number in the *previous*
1586 # file where it was included.
1587 self.fileNameStack = Stack()
1588
1589 symbols = ('makeList', 're', 'string')
1590 self.exportContext = dict([(s, eval(s)) for s in symbols])
1591
1592 self.maxInstSrcRegs = 0
1593 self.maxInstDestRegs = 0
1594 self.maxMiscDestRegs = 0
1595
1596 def __getitem__(self, i): # Allow object (self) to be
1597 return getattr(self, i) # passed to %-substitutions
1598
1599 # Change the file suffix of a base filename:
1600 # (e.g.) decoder.cc -> decoder-g.cc.inc for 'global' outputs
1601 def suffixize(self, s, sec):
1602 extn = re.compile('(\.[^\.]+)$') # isolate extension
1603 if self.namespace:
1604 return extn.sub(r'-ns\1.inc', s) # insert some text on either side
1605 else:
1606 return extn.sub(r'-g\1.inc', s)
1607
1608 # Get the file object for emitting code into the specified section
1609 # (header, decoder, exec, decode_block).
1610 def get_file(self, section):
1611 if section == 'decode_block':
1612 filename = 'decode-method.cc.inc'
1613 else:
1614 if section == 'header':
1615 file = 'decoder.hh'
1616 else:
1617 file = '%s.cc' % section
1618 filename = self.suffixize(file, section)
1619 try:
1620 return self.files[filename]
1621 except KeyError: pass
1622
1623 f = self.open(filename)
1624 self.files[filename] = f
1625
1626 # The splittable files are the ones with many independent
1627 # per-instruction functions - the decoder's instruction constructors
1628 # and the instruction execution (execute()) methods. These both have
1629 # the suffix -ns.cc.inc, meaning they are within the namespace part
1630 # of the ISA, contain object-emitting C++ source, and are included
1631 # into other top-level files. These are the files that need special
1632 # #define's to allow parts of them to be compiled separately. Rather
1633 # than splitting the emissions into separate files, the monolithic
1634 # output of the ISA parser is maintained, but the value (or lack
1635 # thereof) of the __SPLIT definition during C preprocessing will
1636 # select the different chunks. If no 'split' directives are used,
1637 # the cpp emissions have no effect.
1638 if re.search('-ns.cc.inc$', filename):
1639 print('#if !defined(__SPLIT) || (__SPLIT == 1)', file=f)
1640 self.splits[f] = 1
1641 # ensure requisite #include's
1642 elif filename == 'decoder-g.hh.inc':
1643 print('#include "base/bitfield.hh"', file=f)
1644
1645 return f
1646
1647 # Weave together the parts of the different output sections by
1648 # #include'ing them into some very short top-level .cc/.hh files.
1649 # These small files make it much clearer how this tool works, since
1650 # you directly see the chunks emitted as files that are #include'd.
1651 def write_top_level_files(self):
1652 # decoder header - everything depends on this
1653 file = 'decoder.hh'
1654 with self.open(file) as f:
1655 fn = 'decoder-g.hh.inc'
1656 assert(fn in self.files)
1657 f.write('#include "%s"\n' % fn)
1658
1659 fn = 'decoder-ns.hh.inc'
1660 assert(fn in self.files)
1661 f.write('namespace %s {\n#include "%s"\n}\n'
1662 % (self.namespace, fn))
1663
1664 # decoder method - cannot be split
1665 file = 'decoder.cc'
1666 with self.open(file) as f:
1667 fn = 'base/compiler.hh'
1668 f.write('#include "%s"\n' % fn)
1669
1670 fn = 'decoder-g.cc.inc'
1671 assert(fn in self.files)
1672 f.write('#include "%s"\n' % fn)
1673
1674 fn = 'decoder.hh'
1675 f.write('#include "%s"\n' % fn)
1676
1677 fn = 'decode-method.cc.inc'
1678 # is guaranteed to have been written for parse to complete
1679 f.write('#include "%s"\n' % fn)
1680
1681 extn = re.compile('(\.[^\.]+)$')
1682
1683 # instruction constructors
1684 splits = self.splits[self.get_file('decoder')]
1685 file_ = 'inst-constrs.cc'
1686 for i in range(1, splits+1):
1687 if splits > 1:
1688 file = extn.sub(r'-%d\1' % i, file_)
1689 else:
1690 file = file_
1691 with self.open(file) as f:
1692 fn = 'decoder-g.cc.inc'
1693 assert(fn in self.files)
1694 f.write('#include "%s"\n' % fn)
1695
1696 fn = 'decoder.hh'
1697 f.write('#include "%s"\n' % fn)
1698
1699 fn = 'decoder-ns.cc.inc'
1700 assert(fn in self.files)
1701 print('namespace %s {' % self.namespace, file=f)
1702 if splits > 1:
1703 print('#define __SPLIT %u' % i, file=f)
1704 print('#include "%s"' % fn, file=f)
1705 print('}', file=f)
1706
1707 # instruction execution
1708 splits = self.splits[self.get_file('exec')]
1709 for i in range(1, splits+1):
1710 file = 'generic_cpu_exec.cc'
1711 if splits > 1:
1712 file = extn.sub(r'_%d\1' % i, file)
1713 with self.open(file) as f:
1714 fn = 'exec-g.cc.inc'
1715 assert(fn in self.files)
1716 f.write('#include "%s"\n' % fn)
1717 f.write('#include "cpu/exec_context.hh"\n')
1718 f.write('#include "decoder.hh"\n')
1719
1720 fn = 'exec-ns.cc.inc'
1721 assert(fn in self.files)
1722 print('namespace %s {' % self.namespace, file=f)
1723 if splits > 1:
1724 print('#define __SPLIT %u' % i, file=f)
1725 print('#include "%s"' % fn, file=f)
1726 print('}', file=f)
1727
1728 # max_inst_regs.hh
1729 self.update('max_inst_regs.hh',
1730 '''namespace %(namespace)s {
1731 const int MaxInstSrcRegs = %(maxInstSrcRegs)d;
1732 const int MaxInstDestRegs = %(maxInstDestRegs)d;
1733 const int MaxMiscDestRegs = %(maxMiscDestRegs)d;\n}\n''' % self)
1734
1735 scaremonger_template ='''// DO NOT EDIT
1736// This file was automatically generated from an ISA description:
1737// %(filename)s
1738
1739''';
1740
1741 #####################################################################
1742 #
1743 # Lexer
1744 #
1745 # The PLY lexer module takes two things as input:
1746 # - A list of token names (the string list 'tokens')
1747 # - A regular expression describing a match for each token. The
1748 # regexp for token FOO can be provided in two ways:
1749 # - as a string variable named t_FOO
1750 # - as the doc string for a function named t_FOO. In this case,
1751 # the function is also executed, allowing an action to be
1752 # associated with each token match.
1753 #
1754 #####################################################################
1755
1756 # Reserved words. These are listed separately as they are matched
1757 # using the same regexp as generic IDs, but distinguished in the
1758 # t_ID() function. The PLY documentation suggests this approach.
1759 reserved = (
1760 'BITFIELD', 'DECODE', 'DECODER', 'DEFAULT', 'DEF', 'EXEC', 'FORMAT',
1761 'HEADER', 'LET', 'NAMESPACE', 'OPERAND_TYPES', 'OPERANDS',
1762 'OUTPUT', 'SIGNED', 'SPLIT', 'TEMPLATE'
1763 )
1764
1765 # List of tokens. The lex module requires this.
1766 tokens = reserved + (
1767 # identifier
1768 'ID',
1769
1770 # integer literal
1771 'INTLIT',
1772
1773 # string literal
1774 'STRLIT',
1775
1776 # code literal
1777 'CODELIT',
1778
1779 # ( ) [ ] { } < > , ; . : :: *
1780 'LPAREN', 'RPAREN',
1781 'LBRACKET', 'RBRACKET',
1782 'LBRACE', 'RBRACE',
1783 'LESS', 'GREATER', 'EQUALS',
1784 'COMMA', 'SEMI', 'DOT', 'COLON', 'DBLCOLON',
1785 'ASTERISK',
1786
1787 # C preprocessor directives
1788 'CPPDIRECTIVE'
1789
1790 # The following are matched but never returned. commented out to
1791 # suppress PLY warning
1792 # newfile directive
1793 # 'NEWFILE',
1794
1795 # endfile directive
1796 # 'ENDFILE'
1797 )
1798
1799 # Regular expressions for token matching
1800 t_LPAREN = r'\('
1801 t_RPAREN = r'\)'
1802 t_LBRACKET = r'\['
1803 t_RBRACKET = r'\]'
1804 t_LBRACE = r'\{'
1805 t_RBRACE = r'\}'
1806 t_LESS = r'\<'
1807 t_GREATER = r'\>'
1808 t_EQUALS = r'='
1809 t_COMMA = r','
1810 t_SEMI = r';'
1811 t_DOT = r'\.'
1812 t_COLON = r':'
1813 t_DBLCOLON = r'::'
1814 t_ASTERISK = r'\*'
1815
1816 # Identifiers and reserved words
1817 reserved_map = { }
1818 for r in reserved:
1819 reserved_map[r.lower()] = r
1820
1821 def t_ID(self, t):
1822 r'[A-Za-z_]\w*'
1823 t.type = self.reserved_map.get(t.value, 'ID')
1824 return t
1825
1826 # Integer literal
1827 def t_INTLIT(self, t):
1828 r'-?(0x[\da-fA-F]+)|\d+'
1829 try:
1830 t.value = int(t.value,0)
1831 except ValueError:
1832 error(t.lexer.lineno, 'Integer value "%s" too large' % t.value)
1833 t.value = 0
1834 return t
1835
1836 # String literal. Note that these use only single quotes, and
1837 # can span multiple lines.
1838 def t_STRLIT(self, t):
1839 r"(?m)'([^'])+'"
1840 # strip off quotes
1841 t.value = t.value[1:-1]
1842 t.lexer.lineno += t.value.count('\n')
1843 return t
1844
1845
1846 # "Code literal"... like a string literal, but delimiters are
1847 # '{{' and '}}' so they get formatted nicely under emacs c-mode
1848 def t_CODELIT(self, t):
1849 r"(?m)\{\{([^\}]|}(?!\}))+\}\}"
1850 # strip off {{ & }}
1851 t.value = t.value[2:-2]
1852 t.lexer.lineno += t.value.count('\n')
1853 return t
1854
1855 def t_CPPDIRECTIVE(self, t):
1856 r'^\#[^\#].*\n'
1857 t.lexer.lineno += t.value.count('\n')
1858 return t
1859
1860 def t_NEWFILE(self, t):
1861 r'^\#\#newfile\s+"[^"]*"\n'
1862 self.fileNameStack.push(t.lexer.lineno)
1863 t.lexer.lineno = LineTracker(t.value[11:-2])
1864
1865 def t_ENDFILE(self, t):
1866 r'^\#\#endfile\n'
1867 t.lexer.lineno = self.fileNameStack.pop()
1868
1869 #
1870 # The functions t_NEWLINE, t_ignore, and t_error are
1871 # special for the lex module.
1872 #
1873
1874 # Newlines
1875 def t_NEWLINE(self, t):
1876 r'\n+'
1877 t.lexer.lineno += t.value.count('\n')
1878
1879 # Comments
1880 def t_comment(self, t):
1881 r'//.*'
1882
1883 # Completely ignored characters
1884 t_ignore = ' \t\x0c'
1885
1886 # Error handler
1887 def t_error(self, t):
1888 error(t.lexer.lineno, "illegal character '%s'" % t.value[0])
1889 t.skip(1)
1890
1891 #####################################################################
1892 #
1893 # Parser
1894 #
1895 # Every function whose name starts with 'p_' defines a grammar
1896 # rule. The rule is encoded in the function's doc string, while
1897 # the function body provides the action taken when the rule is
1898 # matched. The argument to each function is a list of the values
1899 # of the rule's symbols: t[0] for the LHS, and t[1..n] for the
1900 # symbols on the RHS. For tokens, the value is copied from the
1901 # t.value attribute provided by the lexer. For non-terminals, the
1902 # value is assigned by the producing rule; i.e., the job of the
1903 # grammar rule function is to set the value for the non-terminal
1904 # on the LHS (by assigning to t[0]).
1905 #####################################################################
1906
1907 # The LHS of the first grammar rule is used as the start symbol
1908 # (in this case, 'specification'). Note that this rule enforces
1909 # that there will be exactly one namespace declaration, with 0 or
1910 # more global defs/decls before and after it. The defs & decls
1911 # before the namespace decl will be outside the namespace; those
1912 # after will be inside. The decoder function is always inside the
1913 # namespace.
1914 def p_specification(self, t):
1915 'specification : opt_defs_and_outputs top_level_decode_block'
1916
1917 for f in self.splits.iterkeys():
1918 f.write('\n#endif\n')
1919
1920 for f in self.files.itervalues(): # close ALL the files;
1921 f.close() # not doing so can cause compilation to fail
1922
1923 self.write_top_level_files()
1924
1925 t[0] = True
1926
1927 # 'opt_defs_and_outputs' is a possibly empty sequence of def and/or
1928 # output statements. Its productions do the hard work of eventually
1929 # instantiating a GenCode, which are generally emitted (written to disk)
1930 # as soon as possible, except for the decode_block, which has to be
1931 # accumulated into one large function of nested switch/case blocks.
1932 def p_opt_defs_and_outputs_0(self, t):
1933 'opt_defs_and_outputs : empty'
1934
1935 def p_opt_defs_and_outputs_1(self, t):
1936 'opt_defs_and_outputs : defs_and_outputs'
1937
1938 def p_defs_and_outputs_0(self, t):
1939 'defs_and_outputs : def_or_output'
1940
1941 def p_defs_and_outputs_1(self, t):
1942 'defs_and_outputs : defs_and_outputs def_or_output'
1943
1944 # The list of possible definition/output statements.
1945 # They are all processed as they are seen.
1946 def p_def_or_output(self, t):
1947 '''def_or_output : name_decl
1948 | def_format
1949 | def_bitfield
1950 | def_bitfield_struct
1951 | def_template
1952 | def_operand_types
1953 | def_operands
1954 | output
1955 | global_let
1956 | split'''
1957
1958 # Utility function used by both invocations of splitting - explicit
1959 # 'split' keyword and split() function inside "let {{ }};" blocks.
1960 def split(self, sec, write=False):
1961 assert(sec != 'header' and "header cannot be split")
1962
1963 f = self.get_file(sec)
1964 self.splits[f] += 1
1965 s = '\n#endif\n#if __SPLIT == %u\n' % self.splits[f]
1966 if write:
1967 f.write(s)
1968 else:
1969 return s
1970
1971 # split output file to reduce compilation time
1972 def p_split(self, t):
1973 'split : SPLIT output_type SEMI'
1974 assert(self.isa_name and "'split' not allowed before namespace decl")
1975
1976 self.split(t[2], True)
1977
1978 def p_output_type(self, t):
1979 '''output_type : DECODER
1980 | HEADER
1981 | EXEC'''
1982 t[0] = t[1]
1983
1984 # ISA name declaration looks like "namespace <foo>;"
1985 def p_name_decl(self, t):
1986 'name_decl : NAMESPACE ID SEMI'
1987 assert(self.isa_name == None and "Only 1 namespace decl permitted")
1988 self.isa_name = t[2]
1989 self.namespace = t[2] + 'Inst'
1990
1991 # Output blocks 'output <foo> {{...}}' (C++ code blocks) are copied
1992 # directly to the appropriate output section.
1993
1994 # Massage output block by substituting in template definitions and
1995 # bit operators. We handle '%'s embedded in the string that don't
1996 # indicate template substitutions by doubling them first so that the
1997 # format operation will reduce them back to single '%'s.
1998 def process_output(self, s):
1999 s = self.protectNonSubstPercents(s)
2000 return substBitOps(s % self.templateMap)
2001
2002 def p_output(self, t):
2003 'output : OUTPUT output_type CODELIT SEMI'
2004 kwargs = { t[2]+'_output' : self.process_output(t[3]) }
2005 GenCode(self, **kwargs).emit()
2006
2007 # global let blocks 'let {{...}}' (Python code blocks) are
2008 # executed directly when seen. Note that these execute in a
2009 # special variable context 'exportContext' to prevent the code
2010 # from polluting this script's namespace.
2011 def p_global_let(self, t):
2012 'global_let : LET CODELIT SEMI'
2013 def _split(sec):
2014 return self.split(sec)
2015 self.updateExportContext()
2016 self.exportContext["header_output"] = ''
2017 self.exportContext["decoder_output"] = ''
2018 self.exportContext["exec_output"] = ''
2019 self.exportContext["decode_block"] = ''
2020 self.exportContext["split"] = _split
2021 split_setup = '''
2022def wrap(func):
2023 def split(sec):
2024 globals()[sec + '_output'] += func(sec)
2025 return split
2026split = wrap(split)
2027del wrap
2028'''
2029 # This tricky setup (immediately above) allows us to just write
2030 # (e.g.) "split('exec')" in the Python code and the split #ifdef's
2031 # will automatically be added to the exec_output variable. The inner
2032 # Python execution environment doesn't know about the split points,
2033 # so we carefully inject and wrap a closure that can retrieve the
2034 # next split's #define from the parser and add it to the current
2035 # emission-in-progress.
2036 try:
2037 exec split_setup+fixPythonIndentation(t[2]) in self.exportContext
2038 except Exception, exc:
2039 traceback.print_exc(file=sys.stdout)
2040 if debug:
2041 raise
2042 error(t.lineno(1), 'In global let block: %s' % exc)
2043 GenCode(self,
2044 header_output=self.exportContext["header_output"],
2045 decoder_output=self.exportContext["decoder_output"],
2046 exec_output=self.exportContext["exec_output"],
2047 decode_block=self.exportContext["decode_block"]).emit()
2048
2049 # Define the mapping from operand type extensions to C++ types and
2050 # bit widths (stored in operandTypeMap).
2051 def p_def_operand_types(self, t):
2052 'def_operand_types : DEF OPERAND_TYPES CODELIT SEMI'
2053 try:
2054 self.operandTypeMap = eval('{' + t[3] + '}')
2055 except Exception, exc:
2056 if debug:
2057 raise
2058 error(t.lineno(1),
2059 'In def operand_types: %s' % exc)
2060
2061 # Define the mapping from operand names to operand classes and
2062 # other traits. Stored in operandNameMap.
2063 def p_def_operands(self, t):
2064 'def_operands : DEF OPERANDS CODELIT SEMI'
2065 if not hasattr(self, 'operandTypeMap'):
2066 error(t.lineno(1),
2067 'error: operand types must be defined before operands')
2068 try:
2069 user_dict = eval('{' + t[3] + '}', self.exportContext)
2070 except Exception, exc:
2071 if debug:
2072 raise
2073 error(t.lineno(1), 'In def operands: %s' % exc)
2074 self.buildOperandNameMap(user_dict, t.lexer.lineno)
2075
2076 # A bitfield definition looks like:
2077 # 'def [signed] bitfield <ID> [<first>:<last>]'
2078 # This generates a preprocessor macro in the output file.
2079 def p_def_bitfield_0(self, t):
2080 'def_bitfield : DEF opt_signed BITFIELD ID LESS INTLIT COLON INTLIT GREATER SEMI'
2081 expr = 'bits(machInst, %2d, %2d)' % (t[6], t[8])
2082 if (t[2] == 'signed'):
2083 expr = 'sext<%d>(%s)' % (t[6] - t[8] + 1, expr)
2084 hash_define = '#undef %s\n#define %s\t%s\n' % (t[4], t[4], expr)
2085 GenCode(self, header_output=hash_define).emit()
2086
2087 # alternate form for single bit: 'def [signed] bitfield <ID> [<bit>]'
2088 def p_def_bitfield_1(self, t):
2089 'def_bitfield : DEF opt_signed BITFIELD ID LESS INTLIT GREATER SEMI'
2090 expr = 'bits(machInst, %2d, %2d)' % (t[6], t[6])
2091 if (t[2] == 'signed'):
2092 expr = 'sext<%d>(%s)' % (1, expr)
2093 hash_define = '#undef %s\n#define %s\t%s\n' % (t[4], t[4], expr)
2094 GenCode(self, header_output=hash_define).emit()
2095
2096 # alternate form for structure member: 'def bitfield <ID> <ID>'
2097 def p_def_bitfield_struct(self, t):
2098 'def_bitfield_struct : DEF opt_signed BITFIELD ID id_with_dot SEMI'
2099 if (t[2] != ''):
2100 error(t.lineno(1),
2101 'error: structure bitfields are always unsigned.')
2102 expr = 'machInst.%s' % t[5]
2103 hash_define = '#undef %s\n#define %s\t%s\n' % (t[4], t[4], expr)
2104 GenCode(self, header_output=hash_define).emit()
2105
2106 def p_id_with_dot_0(self, t):
2107 'id_with_dot : ID'
2108 t[0] = t[1]
2109
2110 def p_id_with_dot_1(self, t):
2111 'id_with_dot : ID DOT id_with_dot'
2112 t[0] = t[1] + t[2] + t[3]
2113
2114 def p_opt_signed_0(self, t):
2115 'opt_signed : SIGNED'
2116 t[0] = t[1]
2117
2118 def p_opt_signed_1(self, t):
2119 'opt_signed : empty'
2120 t[0] = ''
2121
2122 def p_def_template(self, t):
2123 'def_template : DEF TEMPLATE ID CODELIT SEMI'
2124 if t[3] in self.templateMap:
2125 print("warning: template %s already defined" % t[3])
2126 self.templateMap[t[3]] = Template(self, t[4])
2127
2128 # An instruction format definition looks like
2129 # "def format <fmt>(<params>) {{...}};"
2130 def p_def_format(self, t):
2131 'def_format : DEF FORMAT ID LPAREN param_list RPAREN CODELIT SEMI'
2132 (id, params, code) = (t[3], t[5], t[7])
2133 self.defFormat(id, params, code, t.lexer.lineno)
2134
2135 # The formal parameter list for an instruction format is a
2136 # possibly empty list of comma-separated parameters. Positional
2137 # (standard, non-keyword) parameters must come first, followed by
2138 # keyword parameters, followed by a '*foo' parameter that gets
2139 # excess positional arguments (as in Python). Each of these three
2140 # parameter categories is optional.
2141 #
2142 # Note that we do not support the '**foo' parameter for collecting
2143 # otherwise undefined keyword args. Otherwise the parameter list
2144 # is (I believe) identical to what is supported in Python.
2145 #
2146 # The param list generates a tuple, where the first element is a
2147 # list of the positional params and the second element is a dict
2148 # containing the keyword params.
2149 def p_param_list_0(self, t):
2150 'param_list : positional_param_list COMMA nonpositional_param_list'
2151 t[0] = t[1] + t[3]
2152
2153 def p_param_list_1(self, t):
2154 '''param_list : positional_param_list
2155 | nonpositional_param_list'''
2156 t[0] = t[1]
2157
2158 def p_positional_param_list_0(self, t):
2159 'positional_param_list : empty'
2160 t[0] = []
2161
2162 def p_positional_param_list_1(self, t):
2163 'positional_param_list : ID'
2164 t[0] = [t[1]]
2165
2166 def p_positional_param_list_2(self, t):
2167 'positional_param_list : positional_param_list COMMA ID'
2168 t[0] = t[1] + [t[3]]
2169
2170 def p_nonpositional_param_list_0(self, t):
2171 'nonpositional_param_list : keyword_param_list COMMA excess_args_param'
2172 t[0] = t[1] + t[3]
2173
2174 def p_nonpositional_param_list_1(self, t):
2175 '''nonpositional_param_list : keyword_param_list
2176 | excess_args_param'''
2177 t[0] = t[1]
2178
2179 def p_keyword_param_list_0(self, t):
2180 'keyword_param_list : keyword_param'
2181 t[0] = [t[1]]
2182
2183 def p_keyword_param_list_1(self, t):
2184 'keyword_param_list : keyword_param_list COMMA keyword_param'
2185 t[0] = t[1] + [t[3]]
2186
2187 def p_keyword_param(self, t):
2188 'keyword_param : ID EQUALS expr'
2189 t[0] = t[1] + ' = ' + t[3].__repr__()
2190
2191 def p_excess_args_param(self, t):
2192 'excess_args_param : ASTERISK ID'
2193 # Just concatenate them: '*ID'. Wrap in list to be consistent
2194 # with positional_param_list and keyword_param_list.
2195 t[0] = [t[1] + t[2]]
2196
2197 # End of format definition-related rules.
2198 ##############
2199
2200 #
2201 # A decode block looks like:
2202 # decode <field1> [, <field2>]* [default <inst>] { ... }
2203 #
2204 def p_top_level_decode_block(self, t):
2205 'top_level_decode_block : decode_block'
2206 codeObj = t[1]
2207 codeObj.wrap_decode_block('''
2208StaticInstPtr
2209%(isa_name)s::Decoder::decodeInst(%(isa_name)s::ExtMachInst machInst)
2210{
2211 using namespace %(namespace)s;
2212''' % self, '}')
2213
2214 codeObj.emit()
2215
2216 def p_decode_block(self, t):
2217 'decode_block : DECODE ID opt_default LBRACE decode_stmt_list RBRACE'
2218 default_defaults = self.defaultStack.pop()
2219 codeObj = t[5]
2220 # use the "default defaults" only if there was no explicit
2221 # default statement in decode_stmt_list
2222 if not codeObj.has_decode_default:
2223 codeObj += default_defaults
2224 codeObj.wrap_decode_block('switch (%s) {\n' % t[2], '}\n')
2225 t[0] = codeObj
2226
2227 # The opt_default statement serves only to push the "default
2228 # defaults" onto defaultStack. This value will be used by nested
2229 # decode blocks, and used and popped off when the current
2230 # decode_block is processed (in p_decode_block() above).
2231 def p_opt_default_0(self, t):
2232 'opt_default : empty'
2233 # no default specified: reuse the one currently at the top of
2234 # the stack
2235 self.defaultStack.push(self.defaultStack.top())
2236 # no meaningful value returned
2237 t[0] = None
2238
2239 def p_opt_default_1(self, t):
2240 'opt_default : DEFAULT inst'
2241 # push the new default
2242 codeObj = t[2]
2243 codeObj.wrap_decode_block('\ndefault:\n', 'break;\n')
2244 self.defaultStack.push(codeObj)
2245 # no meaningful value returned
2246 t[0] = None
2247
2248 def p_decode_stmt_list_0(self, t):
2249 'decode_stmt_list : decode_stmt'
2250 t[0] = t[1]
2251
2252 def p_decode_stmt_list_1(self, t):
2253 'decode_stmt_list : decode_stmt decode_stmt_list'
2254 if (t[1].has_decode_default and t[2].has_decode_default):
2255 error(t.lineno(1), 'Two default cases in decode block')
2256 t[0] = t[1] + t[2]
2257
2258 #
2259 # Decode statement rules
2260 #
2261 # There are four types of statements allowed in a decode block:
2262 # 1. Format blocks 'format <foo> { ... }'
2263 # 2. Nested decode blocks
2264 # 3. Instruction definitions.
2265 # 4. C preprocessor directives.
2266
2267
2268 # Preprocessor directives found in a decode statement list are
2269 # passed through to the output, replicated to all of the output
2270 # code streams. This works well for ifdefs, so we can ifdef out
2271 # both the declarations and the decode cases generated by an
2272 # instruction definition. Handling them as part of the grammar
2273 # makes it easy to keep them in the right place with respect to
2274 # the code generated by the other statements.
2275 def p_decode_stmt_cpp(self, t):
2276 'decode_stmt : CPPDIRECTIVE'
2277 t[0] = GenCode(self, t[1], t[1], t[1], t[1])
2278
2279 # A format block 'format <foo> { ... }' sets the default
2280 # instruction format used to handle instruction definitions inside
2281 # the block. This format can be overridden by using an explicit
2282 # format on the instruction definition or with a nested format
2283 # block.
2284 def p_decode_stmt_format(self, t):
2285 'decode_stmt : FORMAT push_format_id LBRACE decode_stmt_list RBRACE'
2286 # The format will be pushed on the stack when 'push_format_id'
2287 # is processed (see below). Once the parser has recognized
2288 # the full production (though the right brace), we're done
2289 # with the format, so now we can pop it.
2290 self.formatStack.pop()
2291 t[0] = t[4]
2292
2293 # This rule exists so we can set the current format (& push the
2294 # stack) when we recognize the format name part of the format
2295 # block.
2296 def p_push_format_id(self, t):
2297 'push_format_id : ID'
2298 try:
2299 self.formatStack.push(self.formatMap[t[1]])
2300 t[0] = ('', '// format %s' % t[1])
2301 except KeyError:
2302 error(t.lineno(1), 'instruction format "%s" not defined.' % t[1])
2303
2304 # Nested decode block: if the value of the current field matches
2305 # the specified constant(s), do a nested decode on some other field.
2306 def p_decode_stmt_decode(self, t):
2307 'decode_stmt : case_list COLON decode_block'
2308 case_list = t[1]
2309 codeObj = t[3]
2310 # just wrap the decoding code from the block as a case in the
2311 # outer switch statement.
2312 codeObj.wrap_decode_block('\n%s\n' % ''.join(case_list),
2313 'M5_UNREACHABLE;\n')
2314 codeObj.has_decode_default = (case_list == ['default:'])
2315 t[0] = codeObj
2316
2317 # Instruction definition (finally!).
2318 def p_decode_stmt_inst(self, t):
2319 'decode_stmt : case_list COLON inst SEMI'
2320 case_list = t[1]
2321 codeObj = t[3]
2322 codeObj.wrap_decode_block('\n%s' % ''.join(case_list), 'break;\n')
2323 codeObj.has_decode_default = (case_list == ['default:'])
2324 t[0] = codeObj
2325
2326 # The constant list for a decode case label must be non-empty, and must
2327 # either be the keyword 'default', or made up of one or more
2328 # comma-separated integer literals or strings which evaluate to
2329 # constants when compiled as C++.
2330 def p_case_list_0(self, t):
2331 'case_list : DEFAULT'
2332 t[0] = ['default:']
2333
2334 def prep_int_lit_case_label(self, lit):
2335 if lit >= 2**32:
2336 return 'case ULL(%#x): ' % lit
2337 else:
2338 return 'case %#x: ' % lit
2339
2340 def prep_str_lit_case_label(self, lit):
2341 return 'case %s: ' % lit
2342
2343 def p_case_list_1(self, t):
2344 'case_list : INTLIT'
2345 t[0] = [self.prep_int_lit_case_label(t[1])]
2346
2347 def p_case_list_2(self, t):
2348 'case_list : STRLIT'
2349 t[0] = [self.prep_str_lit_case_label(t[1])]
2350
2351 def p_case_list_3(self, t):
2352 'case_list : case_list COMMA INTLIT'
2353 t[0] = t[1]
2354 t[0].append(self.prep_int_lit_case_label(t[3]))
2355
2356 def p_case_list_4(self, t):
2357 'case_list : case_list COMMA STRLIT'
2358 t[0] = t[1]
2359 t[0].append(self.prep_str_lit_case_label(t[3]))
2360
2361 # Define an instruction using the current instruction format
2362 # (specified by an enclosing format block).
2363 # "<mnemonic>(<args>)"
2364 def p_inst_0(self, t):
2365 'inst : ID LPAREN arg_list RPAREN'
2366 # Pass the ID and arg list to the current format class to deal with.
2367 currentFormat = self.formatStack.top()
2368 codeObj = currentFormat.defineInst(self, t[1], t[3], t.lexer.lineno)
2369 args = ','.join(map(str, t[3]))
2370 args = re.sub('(?m)^', '//', args)
2371 args = re.sub('^//', '', args)
2372 comment = '\n// %s::%s(%s)\n' % (currentFormat.id, t[1], args)
2373 codeObj.prepend_all(comment)
2374 t[0] = codeObj
2375
2376 # Define an instruction using an explicitly specified format:
2377 # "<fmt>::<mnemonic>(<args>)"
2378 def p_inst_1(self, t):
2379 'inst : ID DBLCOLON ID LPAREN arg_list RPAREN'
2380 try:
2381 format = self.formatMap[t[1]]
2382 except KeyError:
2383 error(t.lineno(1), 'instruction format "%s" not defined.' % t[1])
2384
2385 codeObj = format.defineInst(self, t[3], t[5], t.lexer.lineno)
2386 comment = '\n// %s::%s(%s)\n' % (t[1], t[3], t[5])
2387 codeObj.prepend_all(comment)
2388 t[0] = codeObj
2389
2390 # The arg list generates a tuple, where the first element is a
2391 # list of the positional args and the second element is a dict
2392 # containing the keyword args.
2393 def p_arg_list_0(self, t):
2394 'arg_list : positional_arg_list COMMA keyword_arg_list'
2395 t[0] = ( t[1], t[3] )
2396
2397 def p_arg_list_1(self, t):
2398 'arg_list : positional_arg_list'
2399 t[0] = ( t[1], {} )
2400
2401 def p_arg_list_2(self, t):
2402 'arg_list : keyword_arg_list'
2403 t[0] = ( [], t[1] )
2404
2405 def p_positional_arg_list_0(self, t):
2406 'positional_arg_list : empty'
2407 t[0] = []
2408
2409 def p_positional_arg_list_1(self, t):
2410 'positional_arg_list : expr'
2411 t[0] = [t[1]]
2412
2413 def p_positional_arg_list_2(self, t):
2414 'positional_arg_list : positional_arg_list COMMA expr'
2415 t[0] = t[1] + [t[3]]
2416
2417 def p_keyword_arg_list_0(self, t):
2418 'keyword_arg_list : keyword_arg'
2419 t[0] = t[1]
2420
2421 def p_keyword_arg_list_1(self, t):
2422 'keyword_arg_list : keyword_arg_list COMMA keyword_arg'
2423 t[0] = t[1]
2424 t[0].update(t[3])
2425
2426 def p_keyword_arg(self, t):
2427 'keyword_arg : ID EQUALS expr'
2428 t[0] = { t[1] : t[3] }
2429
2430 #
2431 # Basic expressions. These constitute the argument values of
2432 # "function calls" (i.e. instruction definitions in the decode
2433 # block) and default values for formal parameters of format
2434 # functions.
2435 #
2436 # Right now, these are either strings, integers, or (recursively)
2437 # lists of exprs (using Python square-bracket list syntax). Note
2438 # that bare identifiers are trated as string constants here (since
2439 # there isn't really a variable namespace to refer to).
2440 #
2441 def p_expr_0(self, t):
2442 '''expr : ID
2443 | INTLIT
2444 | STRLIT
2445 | CODELIT'''
2446 t[0] = t[1]
2447
2448 def p_expr_1(self, t):
2449 '''expr : LBRACKET list_expr RBRACKET'''
2450 t[0] = t[2]
2451
2452 def p_list_expr_0(self, t):
2453 'list_expr : expr'
2454 t[0] = [t[1]]
2455
2456 def p_list_expr_1(self, t):
2457 'list_expr : list_expr COMMA expr'
2458 t[0] = t[1] + [t[3]]
2459
2460 def p_list_expr_2(self, t):
2461 'list_expr : empty'
2462 t[0] = []
2463
2464 #
2465 # Empty production... use in other rules for readability.
2466 #
2467 def p_empty(self, t):
2468 'empty :'
2469 pass
2470
2471 # Parse error handler. Note that the argument here is the
2472 # offending *token*, not a grammar symbol (hence the need to use
2473 # t.value)
2474 def p_error(self, t):
2475 if t:
2476 error(t.lexer.lineno, "syntax error at '%s'" % t.value)
2477 else:
2478 error("unknown syntax error")
2479
2480 # END OF GRAMMAR RULES
2481
2482 def updateExportContext(self):
2483
2484 # create a continuation that allows us to grab the current parser
2485 def wrapInstObjParams(*args):
2486 return InstObjParams(self, *args)
2487 self.exportContext['InstObjParams'] = wrapInstObjParams
2488 self.exportContext.update(self.templateMap)
2489
2490 def defFormat(self, id, params, code, lineno):
2491 '''Define a new format'''
2492
2493 # make sure we haven't already defined this one
2494 if id in self.formatMap:
2495 error(lineno, 'format %s redefined.' % id)
2496
2497 # create new object and store in global map
2498 self.formatMap[id] = Format(id, params, code)
2499
2500 def protectNonSubstPercents(self, s):
2501 '''Protect any non-dict-substitution '%'s in a format string
2502 (i.e. those not followed by '(')'''
2503
2504 return re.sub(r'%(?!\()', '%%', s)
2505
2506 def buildOperandNameMap(self, user_dict, lineno):
2507 operand_name = {}
2508 for op_name, val in user_dict.iteritems():
2509
2510 # Check if extra attributes have been specified.
2511 if len(val) > 9:
2512 error(lineno, 'error: too many attributes for operand "%s"' %
2513 base_cls_name)
2514
2515 # Pad val with None in case optional args are missing
2516 val += (None, None, None, None)
2517 base_cls_name, dflt_ext, reg_spec, flags, sort_pri, \
2518 read_code, write_code, read_predicate, write_predicate = val[:9]
2519
2520 # Canonical flag structure is a triple of lists, where each list
2521 # indicates the set of flags implied by this operand always, when
2522 # used as a source, and when used as a dest, respectively.
2523 # For simplicity this can be initialized using a variety of fairly
2524 # obvious shortcuts; we convert these to canonical form here.
2525 if not flags:
2526 # no flags specified (e.g., 'None')
2527 flags = ( [], [], [] )
2528 elif isinstance(flags, str):
2529 # a single flag: assumed to be unconditional
2530 flags = ( [ flags ], [], [] )
2531 elif isinstance(flags, list):
2532 # a list of flags: also assumed to be unconditional
2533 flags = ( flags, [], [] )
2534 elif isinstance(flags, tuple):
2535 # it's a tuple: it should be a triple,
2536 # but each item could be a single string or a list
2537 (uncond_flags, src_flags, dest_flags) = flags
2538 flags = (makeList(uncond_flags),
2539 makeList(src_flags), makeList(dest_flags))
2540
2541 # Accumulate attributes of new operand class in tmp_dict
2542 tmp_dict = {}
2543 attrList = ['reg_spec', 'flags', 'sort_pri',
2544 'read_code', 'write_code',
2545 'read_predicate', 'write_predicate']
2546 if dflt_ext:
2547 dflt_ctype = self.operandTypeMap[dflt_ext]
2548 attrList.extend(['dflt_ctype', 'dflt_ext'])
2549 # reg_spec is either just a string or a dictionary
2550 # (for elems of vector)
2551 if isinstance(reg_spec, tuple):
2552 (reg_spec, elem_spec) = reg_spec
2553 if isinstance(elem_spec, str):
2554 attrList.append('elem_spec')
2555 else:
2556 assert(isinstance(elem_spec, dict))
2557 elems = elem_spec
2558 attrList.append('elems')
2559 for attr in attrList:
2560 tmp_dict[attr] = eval(attr)
2561 tmp_dict['base_name'] = op_name
2562
2563 # New class name will be e.g. "IntReg_Ra"
2564 cls_name = base_cls_name + '_' + op_name
2565 # Evaluate string arg to get class object. Note that the
2566 # actual base class for "IntReg" is "IntRegOperand", i.e. we
2567 # have to append "Operand".
2568 try:
2569 base_cls = eval(base_cls_name + 'Operand')
2570 except NameError:
2571 error(lineno,
2572 'error: unknown operand base class "%s"' % base_cls_name)
2573 # The following statement creates a new class called
2574 # <cls_name> as a subclass of <base_cls> with the attributes
2575 # in tmp_dict, just as if we evaluated a class declaration.
2576 operand_name[op_name] = type(cls_name, (base_cls,), tmp_dict)
2577
2578 self.operandNameMap = operand_name
2579
2580 # Define operand variables.
2581 operands = user_dict.keys()
2582 # Add the elems defined in the vector operands and
2583 # build a map elem -> vector (used in OperandList)
2584 elem_to_vec = {}
2585 for op in user_dict.keys():
2586 if hasattr(self.operandNameMap[op], 'elems'):
2587 for elem in self.operandNameMap[op].elems.keys():
2588 operands.append(elem)
2589 elem_to_vec[elem] = op
2590 self.elemToVector = elem_to_vec
2591 extensions = self.operandTypeMap.keys()
2592
2593 operandsREString = r'''
2594 (?<!\w) # neg. lookbehind assertion: prevent partial matches
2595 ((%s)(?:_(%s))?) # match: operand with optional '_' then suffix
2596 (?!\w) # neg. lookahead assertion: prevent partial matches
2597 ''' % (string.join(operands, '|'), string.join(extensions, '|'))
2598
2599 self.operandsRE = re.compile(operandsREString, re.MULTILINE|re.VERBOSE)
2600
2601 # Same as operandsREString, but extension is mandatory, and only two
2602 # groups are returned (base and ext, not full name as above).
2603 # Used for subtituting '_' for '.' to make C++ identifiers.
2604 operandsWithExtREString = r'(?<!\w)(%s)_(%s)(?!\w)' \
2605 % (string.join(operands, '|'), string.join(extensions, '|'))
2606
2607 self.operandsWithExtRE = \
2608 re.compile(operandsWithExtREString, re.MULTILINE)
2609
2610 def substMungedOpNames(self, code):
2611 '''Munge operand names in code string to make legal C++
2612 variable names. This means getting rid of the type extension
2613 if any. Will match base_name attribute of Operand object.)'''
2614 return self.operandsWithExtRE.sub(r'\1', code)
2615
2616 def mungeSnippet(self, s):
2617 '''Fix up code snippets for final substitution in templates.'''
2618 if isinstance(s, str):
2619 return self.substMungedOpNames(substBitOps(s))
2620 else:
2621 return s
2622
2623 def open(self, name, bare=False):
2624 '''Open the output file for writing and include scary warning.'''
2625 filename = os.path.join(self.output_dir, name)
2626 f = open(filename, 'w')
2627 if f:
2628 if not bare:
2629 f.write(ISAParser.scaremonger_template % self)
2630 return f
2631
2632 def update(self, file, contents):
2633 '''Update the output file only. Scons should handle the case when
2634 the new contents are unchanged using its built-in hash feature.'''
2635 f = self.open(file)
2636 f.write(contents)
2637 f.close()
2638
2639 # This regular expression matches '##include' directives
2640 includeRE = re.compile(r'^\s*##include\s+"(?P<filename>[^"]*)".*$',
2641 re.MULTILINE)
2642
2643 def replace_include(self, matchobj, dirname):
2644 """Function to replace a matched '##include' directive with the
2645 contents of the specified file (with nested ##includes
2646 replaced recursively). 'matchobj' is an re match object
2647 (from a match of includeRE) and 'dirname' is the directory
2648 relative to which the file path should be resolved."""
2649
2650 fname = matchobj.group('filename')
2651 full_fname = os.path.normpath(os.path.join(dirname, fname))
2652 contents = '##newfile "%s"\n%s\n##endfile\n' % \
2653 (full_fname, self.read_and_flatten(full_fname))
2654 return contents
2655
2656 def read_and_flatten(self, filename):
2657 """Read a file and recursively flatten nested '##include' files."""
2658
2659 current_dir = os.path.dirname(filename)
2660 try:
2661 contents = open(filename).read()
2662 except IOError:
2663 error('Error including file "%s"' % filename)
2664
2665 self.fileNameStack.push(LineTracker(filename))
2666
2667 # Find any includes and include them
2668 def replace(matchobj):
2669 return self.replace_include(matchobj, current_dir)
2670 contents = self.includeRE.sub(replace, contents)
2671
2672 self.fileNameStack.pop()
2673 return contents
2674
2675 AlreadyGenerated = {}
2676
2677 def _parse_isa_desc(self, isa_desc_file):
2678 '''Read in and parse the ISA description.'''
2679
2680 # The build system can end up running the ISA parser twice: once to
2681 # finalize the build dependencies, and then to actually generate
2682 # the files it expects (in src/arch/$ARCH/generated). This code
2683 # doesn't do anything different either time, however; the SCons
2684 # invocations just expect different things. Since this code runs
2685 # within SCons, we can just remember that we've already run and
2686 # not perform a completely unnecessary run, since the ISA parser's
2687 # effect is idempotent.
2688 if isa_desc_file in ISAParser.AlreadyGenerated:
2689 return
2690
2691 # grab the last three path components of isa_desc_file
2692 self.filename = '/'.join(isa_desc_file.split('/')[-3:])
2693
2694 # Read file and (recursively) all included files into a string.
2695 # PLY requires that the input be in a single string so we have to
2696 # do this up front.
2697 isa_desc = self.read_and_flatten(isa_desc_file)
2698
2699 # Initialize lineno tracker
2700 self.lex.lineno = LineTracker(isa_desc_file)
2701
2702 # Parse.
2703 self.parse_string(isa_desc)
2704
2705 ISAParser.AlreadyGenerated[isa_desc_file] = None
2706
2707 def parse_isa_desc(self, *args, **kwargs):
2708 try:
2709 self._parse_isa_desc(*args, **kwargs)
2710 except ISAParserError, e:
2711 print(backtrace(self.fileNameStack))
2712 print("At %s:" % e.lineno)
2713 print(e)
2714 sys.exit(1)
2715
2716# Called as script: get args from command line.
2717# Args are: <isa desc file> <output dir>
2718if __name__ == '__main__':
2719 ISAParser(sys.argv[2]).parse_isa_desc(sys.argv[1])