1# Copyright (c) 2014, 2016 ARM Limited 2# All rights reserved 3# 4# The license below extends only to copyright in the software and shall 5# not be construed as granting a license to any other intellectual 6# property including but not limited to intellectual property relating 7# to a hardware implementation of the functionality of the software 8# licensed hereunder. You may use the software subject to the license 9# terms below provided that you ensure that this notice is replicated 10# unmodified and in its entirety in all distributions of the software, 11# modified or unmodified, in source code or in binary form. 12# 13# Copyright (c) 2003-2005 The Regents of The University of Michigan 14# Copyright (c) 2013,2015 Advanced Micro Devices, Inc. 15# All rights reserved. 16# 17# Redistribution and use in source and binary forms, with or without 18# modification, are permitted provided that the following conditions are 19# met: redistributions of source code must retain the above copyright 20# notice, this list of conditions and the following disclaimer; 21# redistributions in binary form must reproduce the above copyright 22# notice, this list of conditions and the following disclaimer in the 23# documentation and/or other materials provided with the distribution; 24# neither the name of the copyright holders nor the names of its 25# contributors may be used to endorse or promote products derived from 26# this software without specific prior written permission. 27# 28# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 29# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 30# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 31# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 32# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 33# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 34# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 35# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 36# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 37# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 38# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 39# 40# Authors: Steve Reinhardt 41 42from __future__ import with_statement 43import os 44import sys 45import re 46import string 47import inspect, traceback 48# get type names 49from types import * 50 51from m5.util.grammar import Grammar 52 53debug=False 54 55################### 56# Utility functions 57 58# 59# Indent every line in string 's' by two spaces 60# (except preprocessor directives). 61# Used to make nested code blocks look pretty. 62# 63def indent(s): 64 return re.sub(r'(?m)^(?!#)', ' ', s) 65 66# 67# Munge a somewhat arbitrarily formatted piece of Python code 68# (e.g. from a format 'let' block) into something whose indentation 69# will get by the Python parser. 70# 71# The two keys here are that Python will give a syntax error if 72# there's any whitespace at the beginning of the first line, and that 73# all lines at the same lexical nesting level must have identical 74# indentation. Unfortunately the way code literals work, an entire 75# let block tends to have some initial indentation. Rather than 76# trying to figure out what that is and strip it off, we prepend 'if 77# 1:' to make the let code the nested block inside the if (and have 78# the parser automatically deal with the indentation for us). 79# 80# We don't want to do this if (1) the code block is empty or (2) the 81# first line of the block doesn't have any whitespace at the front. 82 83def fixPythonIndentation(s): 84 # get rid of blank lines first 85 s = re.sub(r'(?m)^\s*\n', '', s); 86 if (s != '' and re.match(r'[ \t]', s[0])): 87 s = 'if 1:\n' + s 88 return s 89 90class ISAParserError(Exception): 91 """Exception class for parser errors""" 92 def __init__(self, first, second=None): 93 if second is None: 94 self.lineno = 0 95 self.string = first 96 else: 97 self.lineno = first 98 self.string = second 99 100 def __str__(self): 101 return self.string 102 103def error(*args): 104 raise ISAParserError(*args) 105 106#################### 107# Template objects. 108# 109# Template objects are format strings that allow substitution from 110# the attribute spaces of other objects (e.g. InstObjParams instances). 111 112labelRE = re.compile(r'(?<!%)%\(([^\)]+)\)[sd]') 113 114class Template(object): 115 def __init__(self, parser, t): 116 self.parser = parser 117 self.template = t 118 119 def subst(self, d): 120 myDict = None 121 122 # Protect non-Python-dict substitutions (e.g. if there's a printf 123 # in the templated C++ code) 124 template = self.parser.protectNonSubstPercents(self.template)
| 1# Copyright (c) 2014, 2016 ARM Limited 2# All rights reserved 3# 4# The license below extends only to copyright in the software and shall 5# not be construed as granting a license to any other intellectual 6# property including but not limited to intellectual property relating 7# to a hardware implementation of the functionality of the software 8# licensed hereunder. You may use the software subject to the license 9# terms below provided that you ensure that this notice is replicated 10# unmodified and in its entirety in all distributions of the software, 11# modified or unmodified, in source code or in binary form. 12# 13# Copyright (c) 2003-2005 The Regents of The University of Michigan 14# Copyright (c) 2013,2015 Advanced Micro Devices, Inc. 15# All rights reserved. 16# 17# Redistribution and use in source and binary forms, with or without 18# modification, are permitted provided that the following conditions are 19# met: redistributions of source code must retain the above copyright 20# notice, this list of conditions and the following disclaimer; 21# redistributions in binary form must reproduce the above copyright 22# notice, this list of conditions and the following disclaimer in the 23# documentation and/or other materials provided with the distribution; 24# neither the name of the copyright holders nor the names of its 25# contributors may be used to endorse or promote products derived from 26# this software without specific prior written permission. 27# 28# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 29# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 30# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 31# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 32# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 33# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 34# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 35# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 36# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 37# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 38# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 39# 40# Authors: Steve Reinhardt 41 42from __future__ import with_statement 43import os 44import sys 45import re 46import string 47import inspect, traceback 48# get type names 49from types import * 50 51from m5.util.grammar import Grammar 52 53debug=False 54 55################### 56# Utility functions 57 58# 59# Indent every line in string 's' by two spaces 60# (except preprocessor directives). 61# Used to make nested code blocks look pretty. 62# 63def indent(s): 64 return re.sub(r'(?m)^(?!#)', ' ', s) 65 66# 67# Munge a somewhat arbitrarily formatted piece of Python code 68# (e.g. from a format 'let' block) into something whose indentation 69# will get by the Python parser. 70# 71# The two keys here are that Python will give a syntax error if 72# there's any whitespace at the beginning of the first line, and that 73# all lines at the same lexical nesting level must have identical 74# indentation. Unfortunately the way code literals work, an entire 75# let block tends to have some initial indentation. Rather than 76# trying to figure out what that is and strip it off, we prepend 'if 77# 1:' to make the let code the nested block inside the if (and have 78# the parser automatically deal with the indentation for us). 79# 80# We don't want to do this if (1) the code block is empty or (2) the 81# first line of the block doesn't have any whitespace at the front. 82 83def fixPythonIndentation(s): 84 # get rid of blank lines first 85 s = re.sub(r'(?m)^\s*\n', '', s); 86 if (s != '' and re.match(r'[ \t]', s[0])): 87 s = 'if 1:\n' + s 88 return s 89 90class ISAParserError(Exception): 91 """Exception class for parser errors""" 92 def __init__(self, first, second=None): 93 if second is None: 94 self.lineno = 0 95 self.string = first 96 else: 97 self.lineno = first 98 self.string = second 99 100 def __str__(self): 101 return self.string 102 103def error(*args): 104 raise ISAParserError(*args) 105 106#################### 107# Template objects. 108# 109# Template objects are format strings that allow substitution from 110# the attribute spaces of other objects (e.g. InstObjParams instances). 111 112labelRE = re.compile(r'(?<!%)%\(([^\)]+)\)[sd]') 113 114class Template(object): 115 def __init__(self, parser, t): 116 self.parser = parser 117 self.template = t 118 119 def subst(self, d): 120 myDict = None 121 122 # Protect non-Python-dict substitutions (e.g. if there's a printf 123 # in the templated C++ code) 124 template = self.parser.protectNonSubstPercents(self.template)
|
125 # CPU-model-specific substitutions are handled later (in GenCode). 126 template = self.parser.protectCpuSymbols(template)
| |
127 128 # Build a dict ('myDict') to use for the template substitution. 129 # Start with the template namespace. Make a copy since we're 130 # going to modify it. 131 myDict = self.parser.templateMap.copy() 132 133 if isinstance(d, InstObjParams): 134 # If we're dealing with an InstObjParams object, we need 135 # to be a little more sophisticated. The instruction-wide 136 # parameters are already formed, but the parameters which 137 # are only function wide still need to be generated. 138 compositeCode = '' 139 140 myDict.update(d.__dict__) 141 # The "operands" and "snippets" attributes of the InstObjParams 142 # objects are for internal use and not substitution. 143 del myDict['operands'] 144 del myDict['snippets'] 145 146 snippetLabels = [l for l in labelRE.findall(template) 147 if d.snippets.has_key(l)] 148 149 snippets = dict([(s, self.parser.mungeSnippet(d.snippets[s])) 150 for s in snippetLabels]) 151 152 myDict.update(snippets) 153 154 compositeCode = ' '.join(map(str, snippets.values())) 155 156 # Add in template itself in case it references any 157 # operands explicitly (like Mem) 158 compositeCode += ' ' + template 159 160 operands = SubOperandList(self.parser, compositeCode, d.operands) 161 162 myDict['op_decl'] = operands.concatAttrStrings('op_decl') 163 if operands.readPC or operands.setPC: 164 myDict['op_decl'] += 'TheISA::PCState __parserAutoPCState;\n' 165 166 # In case there are predicated register reads and write, declare 167 # the variables for register indicies. It is being assumed that 168 # all the operands in the OperandList are also in the 169 # SubOperandList and in the same order. Otherwise, it is 170 # expected that predication would not be used for the operands. 171 if operands.predRead: 172 myDict['op_decl'] += 'uint8_t _sourceIndex = 0;\n' 173 if operands.predWrite: 174 myDict['op_decl'] += 'uint8_t M5_VAR_USED _destIndex = 0;\n' 175 176 is_src = lambda op: op.is_src 177 is_dest = lambda op: op.is_dest 178 179 myDict['op_src_decl'] = \ 180 operands.concatSomeAttrStrings(is_src, 'op_src_decl') 181 myDict['op_dest_decl'] = \ 182 operands.concatSomeAttrStrings(is_dest, 'op_dest_decl') 183 if operands.readPC: 184 myDict['op_src_decl'] += \ 185 'TheISA::PCState __parserAutoPCState;\n' 186 if operands.setPC: 187 myDict['op_dest_decl'] += \ 188 'TheISA::PCState __parserAutoPCState;\n' 189 190 myDict['op_rd'] = operands.concatAttrStrings('op_rd') 191 if operands.readPC: 192 myDict['op_rd'] = '__parserAutoPCState = xc->pcState();\n' + \ 193 myDict['op_rd'] 194 195 # Compose the op_wb string. If we're going to write back the 196 # PC state because we changed some of its elements, we'll need to 197 # do that as early as possible. That allows later uncoordinated 198 # modifications to the PC to layer appropriately. 199 reordered = list(operands.items) 200 reordered.reverse() 201 op_wb_str = '' 202 pcWbStr = 'xc->pcState(__parserAutoPCState);\n' 203 for op_desc in reordered: 204 if op_desc.isPCPart() and op_desc.is_dest: 205 op_wb_str = op_desc.op_wb + pcWbStr + op_wb_str 206 pcWbStr = '' 207 else: 208 op_wb_str = op_desc.op_wb + op_wb_str 209 myDict['op_wb'] = op_wb_str 210 211 elif isinstance(d, dict): 212 # if the argument is a dictionary, we just use it. 213 myDict.update(d) 214 elif hasattr(d, '__dict__'): 215 # if the argument is an object, we use its attribute map. 216 myDict.update(d.__dict__) 217 else: 218 raise TypeError, "Template.subst() arg must be or have dictionary" 219 return template % myDict 220
| 125 126 # Build a dict ('myDict') to use for the template substitution. 127 # Start with the template namespace. Make a copy since we're 128 # going to modify it. 129 myDict = self.parser.templateMap.copy() 130 131 if isinstance(d, InstObjParams): 132 # If we're dealing with an InstObjParams object, we need 133 # to be a little more sophisticated. The instruction-wide 134 # parameters are already formed, but the parameters which 135 # are only function wide still need to be generated. 136 compositeCode = '' 137 138 myDict.update(d.__dict__) 139 # The "operands" and "snippets" attributes of the InstObjParams 140 # objects are for internal use and not substitution. 141 del myDict['operands'] 142 del myDict['snippets'] 143 144 snippetLabels = [l for l in labelRE.findall(template) 145 if d.snippets.has_key(l)] 146 147 snippets = dict([(s, self.parser.mungeSnippet(d.snippets[s])) 148 for s in snippetLabels]) 149 150 myDict.update(snippets) 151 152 compositeCode = ' '.join(map(str, snippets.values())) 153 154 # Add in template itself in case it references any 155 # operands explicitly (like Mem) 156 compositeCode += ' ' + template 157 158 operands = SubOperandList(self.parser, compositeCode, d.operands) 159 160 myDict['op_decl'] = operands.concatAttrStrings('op_decl') 161 if operands.readPC or operands.setPC: 162 myDict['op_decl'] += 'TheISA::PCState __parserAutoPCState;\n' 163 164 # In case there are predicated register reads and write, declare 165 # the variables for register indicies. It is being assumed that 166 # all the operands in the OperandList are also in the 167 # SubOperandList and in the same order. Otherwise, it is 168 # expected that predication would not be used for the operands. 169 if operands.predRead: 170 myDict['op_decl'] += 'uint8_t _sourceIndex = 0;\n' 171 if operands.predWrite: 172 myDict['op_decl'] += 'uint8_t M5_VAR_USED _destIndex = 0;\n' 173 174 is_src = lambda op: op.is_src 175 is_dest = lambda op: op.is_dest 176 177 myDict['op_src_decl'] = \ 178 operands.concatSomeAttrStrings(is_src, 'op_src_decl') 179 myDict['op_dest_decl'] = \ 180 operands.concatSomeAttrStrings(is_dest, 'op_dest_decl') 181 if operands.readPC: 182 myDict['op_src_decl'] += \ 183 'TheISA::PCState __parserAutoPCState;\n' 184 if operands.setPC: 185 myDict['op_dest_decl'] += \ 186 'TheISA::PCState __parserAutoPCState;\n' 187 188 myDict['op_rd'] = operands.concatAttrStrings('op_rd') 189 if operands.readPC: 190 myDict['op_rd'] = '__parserAutoPCState = xc->pcState();\n' + \ 191 myDict['op_rd'] 192 193 # Compose the op_wb string. If we're going to write back the 194 # PC state because we changed some of its elements, we'll need to 195 # do that as early as possible. That allows later uncoordinated 196 # modifications to the PC to layer appropriately. 197 reordered = list(operands.items) 198 reordered.reverse() 199 op_wb_str = '' 200 pcWbStr = 'xc->pcState(__parserAutoPCState);\n' 201 for op_desc in reordered: 202 if op_desc.isPCPart() and op_desc.is_dest: 203 op_wb_str = op_desc.op_wb + pcWbStr + op_wb_str 204 pcWbStr = '' 205 else: 206 op_wb_str = op_desc.op_wb + op_wb_str 207 myDict['op_wb'] = op_wb_str 208 209 elif isinstance(d, dict): 210 # if the argument is a dictionary, we just use it. 211 myDict.update(d) 212 elif hasattr(d, '__dict__'): 213 # if the argument is an object, we use its attribute map. 214 myDict.update(d.__dict__) 215 else: 216 raise TypeError, "Template.subst() arg must be or have dictionary" 217 return template % myDict 218
|
221 # Convert to string. This handles the case when a template with a 222 # CPU-specific term gets interpolated into another template or into 223 # an output block.
| 219 # Convert to string.
|
224 def __str__(self):
| 220 def __str__(self):
|
225 return self.parser.expandCpuSymbolsToString(self.template)
| 221 return self.template
|
226 227################ 228# Format object. 229# 230# A format object encapsulates an instruction format. It must provide 231# a defineInst() method that generates the code for an instruction 232# definition. 233 234class Format(object): 235 def __init__(self, id, params, code): 236 self.id = id 237 self.params = params 238 label = 'def format ' + id 239 self.user_code = compile(fixPythonIndentation(code), label, 'exec') 240 param_list = string.join(params, ", ") 241 f = '''def defInst(_code, _context, %s): 242 my_locals = vars().copy() 243 exec _code in _context, my_locals 244 return my_locals\n''' % param_list 245 c = compile(f, label + ' wrapper', 'exec') 246 exec c 247 self.func = defInst 248 249 def defineInst(self, parser, name, args, lineno): 250 parser.updateExportContext() 251 context = parser.exportContext.copy() 252 if len(name): 253 Name = name[0].upper() 254 if len(name) > 1: 255 Name += name[1:] 256 context.update({ 'name' : name, 'Name' : Name }) 257 try: 258 vars = self.func(self.user_code, context, *args[0], **args[1]) 259 except Exception, exc: 260 if debug: 261 raise 262 error(lineno, 'error defining "%s": %s.' % (name, exc)) 263 for k in vars.keys(): 264 if k not in ('header_output', 'decoder_output', 265 'exec_output', 'decode_block'): 266 del vars[k] 267 return GenCode(parser, **vars) 268 269# Special null format to catch an implicit-format instruction 270# definition outside of any format block. 271class NoFormat(object): 272 def __init__(self): 273 self.defaultInst = '' 274 275 def defineInst(self, parser, name, args, lineno): 276 error(lineno, 277 'instruction definition "%s" with no active format!' % name) 278 279############### 280# GenCode class 281# 282# The GenCode class encapsulates generated code destined for various 283# output files. The header_output and decoder_output attributes are 284# strings containing code destined for decoder.hh and decoder.cc 285# respectively. The decode_block attribute contains code to be 286# incorporated in the decode function itself (that will also end up in
| 222 223################ 224# Format object. 225# 226# A format object encapsulates an instruction format. It must provide 227# a defineInst() method that generates the code for an instruction 228# definition. 229 230class Format(object): 231 def __init__(self, id, params, code): 232 self.id = id 233 self.params = params 234 label = 'def format ' + id 235 self.user_code = compile(fixPythonIndentation(code), label, 'exec') 236 param_list = string.join(params, ", ") 237 f = '''def defInst(_code, _context, %s): 238 my_locals = vars().copy() 239 exec _code in _context, my_locals 240 return my_locals\n''' % param_list 241 c = compile(f, label + ' wrapper', 'exec') 242 exec c 243 self.func = defInst 244 245 def defineInst(self, parser, name, args, lineno): 246 parser.updateExportContext() 247 context = parser.exportContext.copy() 248 if len(name): 249 Name = name[0].upper() 250 if len(name) > 1: 251 Name += name[1:] 252 context.update({ 'name' : name, 'Name' : Name }) 253 try: 254 vars = self.func(self.user_code, context, *args[0], **args[1]) 255 except Exception, exc: 256 if debug: 257 raise 258 error(lineno, 'error defining "%s": %s.' % (name, exc)) 259 for k in vars.keys(): 260 if k not in ('header_output', 'decoder_output', 261 'exec_output', 'decode_block'): 262 del vars[k] 263 return GenCode(parser, **vars) 264 265# Special null format to catch an implicit-format instruction 266# definition outside of any format block. 267class NoFormat(object): 268 def __init__(self): 269 self.defaultInst = '' 270 271 def defineInst(self, parser, name, args, lineno): 272 error(lineno, 273 'instruction definition "%s" with no active format!' % name) 274 275############### 276# GenCode class 277# 278# The GenCode class encapsulates generated code destined for various 279# output files. The header_output and decoder_output attributes are 280# strings containing code destined for decoder.hh and decoder.cc 281# respectively. The decode_block attribute contains code to be 282# incorporated in the decode function itself (that will also end up in
|
287# decoder.cc). The exec_output attribute is a dictionary with a key 288# for each CPU model name; the value associated with a particular key 289# is the string of code for that CPU model's exec.cc file. The 290# has_decode_default attribute is used in the decode block to allow 291# explicit default clauses to override default default clauses.
| 283# decoder.cc). The exec_output attribute is the string of code for the 284# exec.cc file. The has_decode_default attribute is used in the decode block 285# to allow explicit default clauses to override default default clauses.
|
292 293class GenCode(object):
| 286 287class GenCode(object):
|
294 # Constructor. At this point we substitute out all CPU-specific 295 # symbols. For the exec output, these go into the per-model 296 # dictionary. For all other output types they get collapsed into 297 # a single string.
| 288 # Constructor.
|
298 def __init__(self, parser, 299 header_output = '', decoder_output = '', exec_output = '', 300 decode_block = '', has_decode_default = False): 301 self.parser = parser
| 289 def __init__(self, parser, 290 header_output = '', decoder_output = '', exec_output = '', 291 decode_block = '', has_decode_default = False): 292 self.parser = parser
|
302 self.header_output = parser.expandCpuSymbolsToString(header_output) 303 self.decoder_output = parser.expandCpuSymbolsToString(decoder_output)
| 293 self.header_output = header_output 294 self.decoder_output = decoder_output
|
304 self.exec_output = exec_output 305 self.decode_block = decode_block 306 self.has_decode_default = has_decode_default 307 308 # Write these code chunks out to the filesystem. They will be properly 309 # interwoven by the write_top_level_files(). 310 def emit(self): 311 if self.header_output: 312 self.parser.get_file('header').write(self.header_output) 313 if self.decoder_output: 314 self.parser.get_file('decoder').write(self.decoder_output) 315 if self.exec_output: 316 self.parser.get_file('exec').write(self.exec_output) 317 if self.decode_block: 318 self.parser.get_file('decode_block').write(self.decode_block) 319 320 # Override '+' operator: generate a new GenCode object that 321 # concatenates all the individual strings in the operands. 322 def __add__(self, other): 323 return GenCode(self.parser, 324 self.header_output + other.header_output, 325 self.decoder_output + other.decoder_output, 326 self.exec_output + other.exec_output, 327 self.decode_block + other.decode_block, 328 self.has_decode_default or other.has_decode_default) 329 330 # Prepend a string (typically a comment) to all the strings. 331 def prepend_all(self, pre): 332 self.header_output = pre + self.header_output 333 self.decoder_output = pre + self.decoder_output 334 self.decode_block = pre + self.decode_block 335 self.exec_output = pre + self.exec_output 336 337 # Wrap the decode block in a pair of strings (e.g., 'case foo:' 338 # and 'break;'). Used to build the big nested switch statement. 339 def wrap_decode_block(self, pre, post = ''): 340 self.decode_block = pre + indent(self.decode_block) + post 341 342##################################################################### 343# 344# Bitfield Operator Support 345# 346##################################################################### 347 348bitOp1ArgRE = re.compile(r'<\s*(\w+)\s*:\s*>') 349 350bitOpWordRE = re.compile(r'(?<![\w\.])([\w\.]+)<\s*(\w+)\s*:\s*(\w+)\s*>') 351bitOpExprRE = re.compile(r'\)<\s*(\w+)\s*:\s*(\w+)\s*>') 352 353def substBitOps(code): 354 # first convert single-bit selectors to two-index form 355 # i.e., <n> --> <n:n> 356 code = bitOp1ArgRE.sub(r'<\1:\1>', code) 357 # simple case: selector applied to ID (name) 358 # i.e., foo<a:b> --> bits(foo, a, b) 359 code = bitOpWordRE.sub(r'bits(\1, \2, \3)', code) 360 # if selector is applied to expression (ending in ')'), 361 # we need to search backward for matching '(' 362 match = bitOpExprRE.search(code) 363 while match: 364 exprEnd = match.start() 365 here = exprEnd - 1 366 nestLevel = 1 367 while nestLevel > 0: 368 if code[here] == '(': 369 nestLevel -= 1 370 elif code[here] == ')': 371 nestLevel += 1 372 here -= 1 373 if here < 0: 374 sys.exit("Didn't find '('!") 375 exprStart = here+1 376 newExpr = r'bits(%s, %s, %s)' % (code[exprStart:exprEnd+1], 377 match.group(1), match.group(2)) 378 code = code[:exprStart] + newExpr + code[match.end():] 379 match = bitOpExprRE.search(code) 380 return code 381 382 383##################################################################### 384# 385# Code Parser 386# 387# The remaining code is the support for automatically extracting 388# instruction characteristics from pseudocode. 389# 390##################################################################### 391 392# Force the argument to be a list. Useful for flags, where a caller 393# can specify a singleton flag or a list of flags. Also usful for 394# converting tuples to lists so they can be modified. 395def makeList(arg): 396 if isinstance(arg, list): 397 return arg 398 elif isinstance(arg, tuple): 399 return list(arg) 400 elif not arg: 401 return [] 402 else: 403 return [ arg ] 404 405class Operand(object): 406 '''Base class for operand descriptors. An instance of this class 407 (or actually a class derived from this one) represents a specific 408 operand for a code block (e.g, "Rc.sq" as a dest). Intermediate 409 derived classes encapsulates the traits of a particular operand 410 type (e.g., "32-bit integer register").''' 411 412 def buildReadCode(self, func = None): 413 subst_dict = {"name": self.base_name, 414 "func": func, 415 "reg_idx": self.reg_spec, 416 "ctype": self.ctype} 417 if hasattr(self, 'src_reg_idx'): 418 subst_dict['op_idx'] = self.src_reg_idx 419 code = self.read_code % subst_dict 420 return '%s = %s;\n' % (self.base_name, code) 421 422 def buildWriteCode(self, func = None): 423 subst_dict = {"name": self.base_name, 424 "func": func, 425 "reg_idx": self.reg_spec, 426 "ctype": self.ctype, 427 "final_val": self.base_name} 428 if hasattr(self, 'dest_reg_idx'): 429 subst_dict['op_idx'] = self.dest_reg_idx 430 code = self.write_code % subst_dict 431 return ''' 432 { 433 %s final_val = %s; 434 %s; 435 if (traceData) { traceData->setData(final_val); } 436 }''' % (self.dflt_ctype, self.base_name, code) 437 438 def __init__(self, parser, full_name, ext, is_src, is_dest): 439 self.full_name = full_name 440 self.ext = ext 441 self.is_src = is_src 442 self.is_dest = is_dest 443 # The 'effective extension' (eff_ext) is either the actual 444 # extension, if one was explicitly provided, or the default. 445 if ext: 446 self.eff_ext = ext 447 elif hasattr(self, 'dflt_ext'): 448 self.eff_ext = self.dflt_ext 449 450 if hasattr(self, 'eff_ext'): 451 self.ctype = parser.operandTypeMap[self.eff_ext] 452 453 # Finalize additional fields (primarily code fields). This step 454 # is done separately since some of these fields may depend on the 455 # register index enumeration that hasn't been performed yet at the 456 # time of __init__(). The register index enumeration is affected 457 # by predicated register reads/writes. Hence, we forward the flags 458 # that indicate whether or not predication is in use. 459 def finalize(self, predRead, predWrite): 460 self.flags = self.getFlags() 461 self.constructor = self.makeConstructor(predRead, predWrite) 462 self.op_decl = self.makeDecl() 463 464 if self.is_src: 465 self.op_rd = self.makeRead(predRead) 466 self.op_src_decl = self.makeDecl() 467 else: 468 self.op_rd = '' 469 self.op_src_decl = '' 470 471 if self.is_dest: 472 self.op_wb = self.makeWrite(predWrite) 473 self.op_dest_decl = self.makeDecl() 474 else: 475 self.op_wb = '' 476 self.op_dest_decl = '' 477 478 def isMem(self): 479 return 0 480 481 def isReg(self): 482 return 0 483 484 def isFloatReg(self): 485 return 0 486 487 def isIntReg(self): 488 return 0 489 490 def isCCReg(self): 491 return 0 492 493 def isControlReg(self): 494 return 0 495 496 def isVecReg(self): 497 return 0 498 499 def isVecElem(self): 500 return 0 501 502 def isPCState(self): 503 return 0 504 505 def isPCPart(self): 506 return self.isPCState() and self.reg_spec 507 508 def hasReadPred(self): 509 return self.read_predicate != None 510 511 def hasWritePred(self): 512 return self.write_predicate != None 513 514 def getFlags(self): 515 # note the empty slice '[:]' gives us a copy of self.flags[0] 516 # instead of a reference to it 517 my_flags = self.flags[0][:] 518 if self.is_src: 519 my_flags += self.flags[1] 520 if self.is_dest: 521 my_flags += self.flags[2] 522 return my_flags 523 524 def makeDecl(self): 525 # Note that initializations in the declarations are solely 526 # to avoid 'uninitialized variable' errors from the compiler. 527 return self.ctype + ' ' + self.base_name + ' = 0;\n'; 528 529 530src_reg_constructor = '\n\t_srcRegIdx[_numSrcRegs++] = RegId(%s, %s);' 531dst_reg_constructor = '\n\t_destRegIdx[_numDestRegs++] = RegId(%s, %s);' 532 533 534class IntRegOperand(Operand): 535 reg_class = 'IntRegClass' 536 537 def isReg(self): 538 return 1 539 540 def isIntReg(self): 541 return 1 542 543 def makeConstructor(self, predRead, predWrite): 544 c_src = '' 545 c_dest = '' 546 547 if self.is_src: 548 c_src = src_reg_constructor % (self.reg_class, self.reg_spec) 549 if self.hasReadPred(): 550 c_src = '\n\tif (%s) {%s\n\t}' % \ 551 (self.read_predicate, c_src) 552 553 if self.is_dest: 554 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec) 555 c_dest += '\n\t_numIntDestRegs++;' 556 if self.hasWritePred(): 557 c_dest = '\n\tif (%s) {%s\n\t}' % \ 558 (self.write_predicate, c_dest) 559 560 return c_src + c_dest 561 562 def makeRead(self, predRead): 563 if (self.ctype == 'float' or self.ctype == 'double'): 564 error('Attempt to read integer register as FP') 565 if self.read_code != None: 566 return self.buildReadCode('readIntRegOperand') 567 568 int_reg_val = '' 569 if predRead: 570 int_reg_val = 'xc->readIntRegOperand(this, _sourceIndex++)' 571 if self.hasReadPred(): 572 int_reg_val = '(%s) ? %s : 0' % \ 573 (self.read_predicate, int_reg_val) 574 else: 575 int_reg_val = 'xc->readIntRegOperand(this, %d)' % self.src_reg_idx 576 577 return '%s = %s;\n' % (self.base_name, int_reg_val) 578 579 def makeWrite(self, predWrite): 580 if (self.ctype == 'float' or self.ctype == 'double'): 581 error('Attempt to write integer register as FP') 582 if self.write_code != None: 583 return self.buildWriteCode('setIntRegOperand') 584 585 if predWrite: 586 wp = 'true' 587 if self.hasWritePred(): 588 wp = self.write_predicate 589 590 wcond = 'if (%s)' % (wp) 591 windex = '_destIndex++' 592 else: 593 wcond = '' 594 windex = '%d' % self.dest_reg_idx 595 596 wb = ''' 597 %s 598 { 599 %s final_val = %s; 600 xc->setIntRegOperand(this, %s, final_val);\n 601 if (traceData) { traceData->setData(final_val); } 602 }''' % (wcond, self.ctype, self.base_name, windex) 603 604 return wb 605 606class FloatRegOperand(Operand): 607 reg_class = 'FloatRegClass' 608 609 def isReg(self): 610 return 1 611 612 def isFloatReg(self): 613 return 1 614 615 def makeConstructor(self, predRead, predWrite): 616 c_src = '' 617 c_dest = '' 618 619 if self.is_src: 620 c_src = src_reg_constructor % (self.reg_class, self.reg_spec) 621 622 if self.is_dest: 623 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec) 624 c_dest += '\n\t_numFPDestRegs++;' 625 626 return c_src + c_dest 627 628 def makeRead(self, predRead): 629 bit_select = 0 630 if (self.ctype == 'float' or self.ctype == 'double'): 631 func = 'readFloatRegOperand' 632 else: 633 func = 'readFloatRegOperandBits' 634 if self.read_code != None: 635 return self.buildReadCode(func) 636 637 if predRead: 638 rindex = '_sourceIndex++' 639 else: 640 rindex = '%d' % self.src_reg_idx 641 642 return '%s = xc->%s(this, %s);\n' % \ 643 (self.base_name, func, rindex) 644 645 def makeWrite(self, predWrite): 646 if (self.ctype == 'float' or self.ctype == 'double'): 647 func = 'setFloatRegOperand' 648 else: 649 func = 'setFloatRegOperandBits' 650 if self.write_code != None: 651 return self.buildWriteCode(func) 652 653 if predWrite: 654 wp = '_destIndex++' 655 else: 656 wp = '%d' % self.dest_reg_idx 657 wp = 'xc->%s(this, %s, final_val);' % (func, wp) 658 659 wb = ''' 660 { 661 %s final_val = %s; 662 %s\n 663 if (traceData) { traceData->setData(final_val); } 664 }''' % (self.ctype, self.base_name, wp) 665 return wb 666 667class VecRegOperand(Operand): 668 reg_class = 'VecRegClass' 669 670 def __init__(self, parser, full_name, ext, is_src, is_dest): 671 Operand.__init__(self, parser, full_name, ext, is_src, is_dest) 672 self.elemExt = None 673 self.parser = parser 674 675 def isReg(self): 676 return 1 677 678 def isVecReg(self): 679 return 1 680 681 def makeDeclElem(self, elem_op): 682 (elem_name, elem_ext) = elem_op 683 (elem_spec, dflt_elem_ext, zeroing) = self.elems[elem_name] 684 if elem_ext: 685 ext = elem_ext 686 else: 687 ext = dflt_elem_ext 688 ctype = self.parser.operandTypeMap[ext] 689 return '\n\t%s %s = 0;' % (ctype, elem_name) 690 691 def makeDecl(self): 692 if not self.is_dest and self.is_src: 693 c_decl = '\t/* Vars for %s*/' % (self.base_name) 694 if hasattr(self, 'active_elems'): 695 if self.active_elems: 696 for elem in self.active_elems: 697 c_decl += self.makeDeclElem(elem) 698 return c_decl + '\t/* End vars for %s */\n' % (self.base_name) 699 else: 700 return '' 701 702 def makeConstructor(self, predRead, predWrite): 703 c_src = '' 704 c_dest = '' 705 706 numAccessNeeded = 1 707 708 if self.is_src: 709 c_src = src_reg_constructor % (self.reg_class, self.reg_spec) 710 711 if self.is_dest: 712 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec) 713 c_dest += '\n\t_numVecDestRegs++;' 714 715 return c_src + c_dest 716 717 # Read destination register to write 718 def makeReadWElem(self, elem_op): 719 (elem_name, elem_ext) = elem_op 720 (elem_spec, dflt_elem_ext, zeroing) = self.elems[elem_name] 721 if elem_ext: 722 ext = elem_ext 723 else: 724 ext = dflt_elem_ext 725 ctype = self.parser.operandTypeMap[ext] 726 c_read = '\t\t%s& %s = %s[%s];\n' % \ 727 (ctype, elem_name, self.base_name, elem_spec) 728 return c_read 729 730 def makeReadW(self, predWrite): 731 func = 'getWritableVecRegOperand' 732 if self.read_code != None: 733 return self.buildReadCode(func) 734 735 if predWrite: 736 rindex = '_destIndex++' 737 else: 738 rindex = '%d' % self.dest_reg_idx 739 740 c_readw = '\t\t%s& tmp_d%s = xc->%s(this, %s);\n'\ 741 % ('TheISA::VecRegContainer', rindex, func, rindex) 742 if self.elemExt: 743 c_readw += '\t\tauto %s = tmp_d%s.as<%s>();\n' % (self.base_name, 744 rindex, self.parser.operandTypeMap[self.elemExt]) 745 if self.ext: 746 c_readw += '\t\tauto %s = tmp_d%s.as<%s>();\n' % (self.base_name, 747 rindex, self.parser.operandTypeMap[self.ext]) 748 if hasattr(self, 'active_elems'): 749 if self.active_elems: 750 for elem in self.active_elems: 751 c_readw += self.makeReadWElem(elem) 752 return c_readw 753 754 # Normal source operand read 755 def makeReadElem(self, elem_op, name): 756 (elem_name, elem_ext) = elem_op 757 (elem_spec, dflt_elem_ext, zeroing) = self.elems[elem_name] 758 759 if elem_ext: 760 ext = elem_ext 761 else: 762 ext = dflt_elem_ext 763 ctype = self.parser.operandTypeMap[ext] 764 c_read = '\t\t%s = %s[%s];\n' % \ 765 (elem_name, name, elem_spec) 766 return c_read 767 768 def makeRead(self, predRead): 769 func = 'readVecRegOperand' 770 if self.read_code != None: 771 return self.buildReadCode(func) 772 773 if predRead: 774 rindex = '_sourceIndex++' 775 else: 776 rindex = '%d' % self.src_reg_idx 777 778 name = self.base_name 779 if self.is_dest and self.is_src: 780 name += '_merger' 781 782 c_read = '\t\t%s& tmp_s%s = xc->%s(this, %s);\n' \ 783 % ('const TheISA::VecRegContainer', rindex, func, rindex) 784 # If the parser has detected that elements are being access, create 785 # the appropriate view 786 if self.elemExt: 787 c_read += '\t\tauto %s = tmp_s%s.as<%s>();\n' % \ 788 (name, rindex, self.parser.operandTypeMap[self.elemExt]) 789 if self.ext: 790 c_read += '\t\tauto %s = tmp_s%s.as<%s>();\n' % \ 791 (name, rindex, self.parser.operandTypeMap[self.ext]) 792 if hasattr(self, 'active_elems'): 793 if self.active_elems: 794 for elem in self.active_elems: 795 c_read += self.makeReadElem(elem, name) 796 return c_read 797 798 def makeWrite(self, predWrite): 799 func = 'setVecRegOperand' 800 if self.write_code != None: 801 return self.buildWriteCode(func) 802 803 wb = ''' 804 if (traceData) { 805 warn_once("Vectors not supported yet in tracedata"); 806 /*traceData->setData(final_val);*/ 807 } 808 ''' 809 return wb 810 811 def finalize(self, predRead, predWrite): 812 super(VecRegOperand, self).finalize(predRead, predWrite) 813 if self.is_dest: 814 self.op_rd = self.makeReadW(predWrite) + self.op_rd 815 816class VecElemOperand(Operand): 817 reg_class = 'VectorElemClass' 818 819 def isReg(self): 820 return 1 821 822 def isVecElem(self): 823 return 1 824 825 def makeDecl(self): 826 if self.is_dest and not self.is_src: 827 return '\n\t%s %s;' % (self.ctype, self.base_name) 828 else: 829 return '' 830 831 def makeConstructor(self, predRead, predWrite): 832 c_src = '' 833 c_dest = '' 834 835 numAccessNeeded = 1 836 regId = 'RegId(%s, %s * numVecElemPerVecReg + elemIdx, %s)' % \ 837 (self.reg_class, self.reg_spec) 838 839 if self.is_src: 840 c_src = ('\n\t_srcRegIdx[_numSrcRegs++] = RegId(%s, %s, %s);' % 841 (self.reg_class, self.reg_spec, self.elem_spec)) 842 843 if self.is_dest: 844 c_dest = ('\n\t_destRegIdx[_numDestRegs++] = RegId(%s, %s, %s);' % 845 (self.reg_class, self.reg_spec, self.elem_spec)) 846 c_dest += '\n\t_numVecElemDestRegs++;' 847 return c_src + c_dest 848 849 def makeRead(self, predRead): 850 c_read = ('\n/* Elem is kept inside the operand description */' + 851 '\n\tVecElem %s = xc->readVecElemOperand(this, %d);' % 852 (self.base_name, self.src_reg_idx)) 853 return c_read 854 855 def makeWrite(self, predWrite): 856 c_write = ('\n/* Elem is kept inside the operand description */' + 857 '\n\txc->setVecElemOperand(this, %d, %s);' % 858 (self.dest_reg_idx, self.base_name)) 859 return c_write 860 861class CCRegOperand(Operand): 862 reg_class = 'CCRegClass' 863 864 def isReg(self): 865 return 1 866 867 def isCCReg(self): 868 return 1 869 870 def makeConstructor(self, predRead, predWrite): 871 c_src = '' 872 c_dest = '' 873 874 if self.is_src: 875 c_src = src_reg_constructor % (self.reg_class, self.reg_spec) 876 if self.hasReadPred(): 877 c_src = '\n\tif (%s) {%s\n\t}' % \ 878 (self.read_predicate, c_src) 879 880 if self.is_dest: 881 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec) 882 c_dest += '\n\t_numCCDestRegs++;' 883 if self.hasWritePred(): 884 c_dest = '\n\tif (%s) {%s\n\t}' % \ 885 (self.write_predicate, c_dest) 886 887 return c_src + c_dest 888 889 def makeRead(self, predRead): 890 if (self.ctype == 'float' or self.ctype == 'double'): 891 error('Attempt to read condition-code register as FP') 892 if self.read_code != None: 893 return self.buildReadCode('readCCRegOperand') 894 895 int_reg_val = '' 896 if predRead: 897 int_reg_val = 'xc->readCCRegOperand(this, _sourceIndex++)' 898 if self.hasReadPred(): 899 int_reg_val = '(%s) ? %s : 0' % \ 900 (self.read_predicate, int_reg_val) 901 else: 902 int_reg_val = 'xc->readCCRegOperand(this, %d)' % self.src_reg_idx 903 904 return '%s = %s;\n' % (self.base_name, int_reg_val) 905 906 def makeWrite(self, predWrite): 907 if (self.ctype == 'float' or self.ctype == 'double'): 908 error('Attempt to write condition-code register as FP') 909 if self.write_code != None: 910 return self.buildWriteCode('setCCRegOperand') 911 912 if predWrite: 913 wp = 'true' 914 if self.hasWritePred(): 915 wp = self.write_predicate 916 917 wcond = 'if (%s)' % (wp) 918 windex = '_destIndex++' 919 else: 920 wcond = '' 921 windex = '%d' % self.dest_reg_idx 922 923 wb = ''' 924 %s 925 { 926 %s final_val = %s; 927 xc->setCCRegOperand(this, %s, final_val);\n 928 if (traceData) { traceData->setData(final_val); } 929 }''' % (wcond, self.ctype, self.base_name, windex) 930 931 return wb 932 933class ControlRegOperand(Operand): 934 reg_class = 'MiscRegClass' 935 936 def isReg(self): 937 return 1 938 939 def isControlReg(self): 940 return 1 941 942 def makeConstructor(self, predRead, predWrite): 943 c_src = '' 944 c_dest = '' 945 946 if self.is_src: 947 c_src = src_reg_constructor % (self.reg_class, self.reg_spec) 948 949 if self.is_dest: 950 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec) 951 952 return c_src + c_dest 953 954 def makeRead(self, predRead): 955 bit_select = 0 956 if (self.ctype == 'float' or self.ctype == 'double'): 957 error('Attempt to read control register as FP') 958 if self.read_code != None: 959 return self.buildReadCode('readMiscRegOperand') 960 961 if predRead: 962 rindex = '_sourceIndex++' 963 else: 964 rindex = '%d' % self.src_reg_idx 965 966 return '%s = xc->readMiscRegOperand(this, %s);\n' % \ 967 (self.base_name, rindex) 968 969 def makeWrite(self, predWrite): 970 if (self.ctype == 'float' or self.ctype == 'double'): 971 error('Attempt to write control register as FP') 972 if self.write_code != None: 973 return self.buildWriteCode('setMiscRegOperand') 974 975 if predWrite: 976 windex = '_destIndex++' 977 else: 978 windex = '%d' % self.dest_reg_idx 979 980 wb = 'xc->setMiscRegOperand(this, %s, %s);\n' % \ 981 (windex, self.base_name) 982 wb += 'if (traceData) { traceData->setData(%s); }' % \ 983 self.base_name 984 985 return wb 986 987class MemOperand(Operand): 988 def isMem(self): 989 return 1 990 991 def makeConstructor(self, predRead, predWrite): 992 return '' 993 994 def makeDecl(self): 995 # Declare memory data variable. 996 return '%s %s;\n' % (self.ctype, self.base_name) 997 998 def makeRead(self, predRead): 999 if self.read_code != None: 1000 return self.buildReadCode() 1001 return '' 1002 1003 def makeWrite(self, predWrite): 1004 if self.write_code != None: 1005 return self.buildWriteCode() 1006 return '' 1007 1008class PCStateOperand(Operand): 1009 def makeConstructor(self, predRead, predWrite): 1010 return '' 1011 1012 def makeRead(self, predRead): 1013 if self.reg_spec: 1014 # A component of the PC state. 1015 return '%s = __parserAutoPCState.%s();\n' % \ 1016 (self.base_name, self.reg_spec) 1017 else: 1018 # The whole PC state itself. 1019 return '%s = xc->pcState();\n' % self.base_name 1020 1021 def makeWrite(self, predWrite): 1022 if self.reg_spec: 1023 # A component of the PC state. 1024 return '__parserAutoPCState.%s(%s);\n' % \ 1025 (self.reg_spec, self.base_name) 1026 else: 1027 # The whole PC state itself. 1028 return 'xc->pcState(%s);\n' % self.base_name 1029 1030 def makeDecl(self): 1031 ctype = 'TheISA::PCState' 1032 if self.isPCPart(): 1033 ctype = self.ctype 1034 # Note that initializations in the declarations are solely 1035 # to avoid 'uninitialized variable' errors from the compiler. 1036 return '%s %s = 0;\n' % (ctype, self.base_name) 1037 1038 def isPCState(self): 1039 return 1 1040 1041class OperandList(object): 1042 '''Find all the operands in the given code block. Returns an operand 1043 descriptor list (instance of class OperandList).''' 1044 def __init__(self, parser, code): 1045 self.items = [] 1046 self.bases = {} 1047 # delete strings and comments so we don't match on operands inside 1048 for regEx in (stringRE, commentRE): 1049 code = regEx.sub('', code) 1050 # search for operands 1051 next_pos = 0 1052 while 1: 1053 match = parser.operandsRE.search(code, next_pos) 1054 if not match: 1055 # no more matches: we're done 1056 break 1057 op = match.groups() 1058 # regexp groups are operand full name, base, and extension 1059 (op_full, op_base, op_ext) = op 1060 # If is a elem operand, define or update the corresponding 1061 # vector operand 1062 isElem = False 1063 if op_base in parser.elemToVector: 1064 isElem = True 1065 elem_op = (op_base, op_ext) 1066 op_base = parser.elemToVector[op_base] 1067 op_ext = '' # use the default one 1068 # if the token following the operand is an assignment, this is 1069 # a destination (LHS), else it's a source (RHS) 1070 is_dest = (assignRE.match(code, match.end()) != None) 1071 is_src = not is_dest 1072 1073 # see if we've already seen this one 1074 op_desc = self.find_base(op_base) 1075 if op_desc: 1076 if op_ext and op_ext != '' and op_desc.ext != op_ext: 1077 error ('Inconsistent extensions for operand %s: %s - %s' \ 1078 % (op_base, op_desc.ext, op_ext)) 1079 op_desc.is_src = op_desc.is_src or is_src 1080 op_desc.is_dest = op_desc.is_dest or is_dest 1081 if isElem: 1082 (elem_base, elem_ext) = elem_op 1083 found = False 1084 for ae in op_desc.active_elems: 1085 (ae_base, ae_ext) = ae 1086 if ae_base == elem_base: 1087 if ae_ext != elem_ext: 1088 error('Inconsistent extensions for elem' 1089 ' operand %s' % elem_base) 1090 else: 1091 found = True 1092 if not found: 1093 op_desc.active_elems.append(elem_op) 1094 else: 1095 # new operand: create new descriptor 1096 op_desc = parser.operandNameMap[op_base](parser, 1097 op_full, op_ext, is_src, is_dest) 1098 # if operand is a vector elem, add the corresponding vector 1099 # operand if not already done 1100 if isElem: 1101 op_desc.elemExt = elem_op[1] 1102 op_desc.active_elems = [elem_op] 1103 self.append(op_desc) 1104 # start next search after end of current match 1105 next_pos = match.end() 1106 self.sort() 1107 # enumerate source & dest register operands... used in building 1108 # constructor later 1109 self.numSrcRegs = 0 1110 self.numDestRegs = 0 1111 self.numFPDestRegs = 0 1112 self.numIntDestRegs = 0 1113 self.numVecDestRegs = 0 1114 self.numCCDestRegs = 0 1115 self.numMiscDestRegs = 0 1116 self.memOperand = None 1117 1118 # Flags to keep track if one or more operands are to be read/written 1119 # conditionally. 1120 self.predRead = False 1121 self.predWrite = False 1122 1123 for op_desc in self.items: 1124 if op_desc.isReg(): 1125 if op_desc.is_src: 1126 op_desc.src_reg_idx = self.numSrcRegs 1127 self.numSrcRegs += 1 1128 if op_desc.is_dest: 1129 op_desc.dest_reg_idx = self.numDestRegs 1130 self.numDestRegs += 1 1131 if op_desc.isFloatReg(): 1132 self.numFPDestRegs += 1 1133 elif op_desc.isIntReg(): 1134 self.numIntDestRegs += 1 1135 elif op_desc.isVecReg(): 1136 self.numVecDestRegs += 1 1137 elif op_desc.isCCReg(): 1138 self.numCCDestRegs += 1 1139 elif op_desc.isControlReg(): 1140 self.numMiscDestRegs += 1 1141 elif op_desc.isMem(): 1142 if self.memOperand: 1143 error("Code block has more than one memory operand.") 1144 self.memOperand = op_desc 1145 1146 # Check if this operand has read/write predication. If true, then 1147 # the microop will dynamically index source/dest registers. 1148 self.predRead = self.predRead or op_desc.hasReadPred() 1149 self.predWrite = self.predWrite or op_desc.hasWritePred() 1150 1151 if parser.maxInstSrcRegs < self.numSrcRegs: 1152 parser.maxInstSrcRegs = self.numSrcRegs 1153 if parser.maxInstDestRegs < self.numDestRegs: 1154 parser.maxInstDestRegs = self.numDestRegs 1155 if parser.maxMiscDestRegs < self.numMiscDestRegs: 1156 parser.maxMiscDestRegs = self.numMiscDestRegs 1157 1158 # now make a final pass to finalize op_desc fields that may depend 1159 # on the register enumeration 1160 for op_desc in self.items: 1161 op_desc.finalize(self.predRead, self.predWrite) 1162 1163 def __len__(self): 1164 return len(self.items) 1165 1166 def __getitem__(self, index): 1167 return self.items[index] 1168 1169 def append(self, op_desc): 1170 self.items.append(op_desc) 1171 self.bases[op_desc.base_name] = op_desc 1172 1173 def find_base(self, base_name): 1174 # like self.bases[base_name], but returns None if not found 1175 # (rather than raising exception) 1176 return self.bases.get(base_name) 1177 1178 # internal helper function for concat[Some]Attr{Strings|Lists} 1179 def __internalConcatAttrs(self, attr_name, filter, result): 1180 for op_desc in self.items: 1181 if filter(op_desc): 1182 result += getattr(op_desc, attr_name) 1183 return result 1184 1185 # return a single string that is the concatenation of the (string) 1186 # values of the specified attribute for all operands 1187 def concatAttrStrings(self, attr_name): 1188 return self.__internalConcatAttrs(attr_name, lambda x: 1, '') 1189 1190 # like concatAttrStrings, but only include the values for the operands 1191 # for which the provided filter function returns true 1192 def concatSomeAttrStrings(self, filter, attr_name): 1193 return self.__internalConcatAttrs(attr_name, filter, '') 1194 1195 # return a single list that is the concatenation of the (list) 1196 # values of the specified attribute for all operands 1197 def concatAttrLists(self, attr_name): 1198 return self.__internalConcatAttrs(attr_name, lambda x: 1, []) 1199 1200 # like concatAttrLists, but only include the values for the operands 1201 # for which the provided filter function returns true 1202 def concatSomeAttrLists(self, filter, attr_name): 1203 return self.__internalConcatAttrs(attr_name, filter, []) 1204 1205 def sort(self): 1206 self.items.sort(lambda a, b: a.sort_pri - b.sort_pri) 1207 1208class SubOperandList(OperandList): 1209 '''Find all the operands in the given code block. Returns an operand 1210 descriptor list (instance of class OperandList).''' 1211 def __init__(self, parser, code, master_list): 1212 self.items = [] 1213 self.bases = {} 1214 # delete strings and comments so we don't match on operands inside 1215 for regEx in (stringRE, commentRE): 1216 code = regEx.sub('', code) 1217 # search for operands 1218 next_pos = 0 1219 while 1: 1220 match = parser.operandsRE.search(code, next_pos) 1221 if not match: 1222 # no more matches: we're done 1223 break 1224 op = match.groups() 1225 # regexp groups are operand full name, base, and extension 1226 (op_full, op_base, op_ext) = op 1227 # If is a elem operand, define or update the corresponding 1228 # vector operand 1229 if op_base in parser.elemToVector: 1230 elem_op = op_base 1231 op_base = parser.elemToVector[elem_op] 1232 # find this op in the master list 1233 op_desc = master_list.find_base(op_base) 1234 if not op_desc: 1235 error('Found operand %s which is not in the master list!' 1236 % op_base) 1237 else: 1238 # See if we've already found this operand 1239 op_desc = self.find_base(op_base) 1240 if not op_desc: 1241 # if not, add a reference to it to this sub list 1242 self.append(master_list.bases[op_base]) 1243 1244 # start next search after end of current match 1245 next_pos = match.end() 1246 self.sort() 1247 self.memOperand = None 1248 # Whether the whole PC needs to be read so parts of it can be accessed 1249 self.readPC = False 1250 # Whether the whole PC needs to be written after parts of it were 1251 # changed 1252 self.setPC = False 1253 # Whether this instruction manipulates the whole PC or parts of it. 1254 # Mixing the two is a bad idea and flagged as an error. 1255 self.pcPart = None 1256 1257 # Flags to keep track if one or more operands are to be read/written 1258 # conditionally. 1259 self.predRead = False 1260 self.predWrite = False 1261 1262 for op_desc in self.items: 1263 if op_desc.isPCPart(): 1264 self.readPC = True 1265 if op_desc.is_dest: 1266 self.setPC = True 1267 1268 if op_desc.isPCState(): 1269 if self.pcPart is not None: 1270 if self.pcPart and not op_desc.isPCPart() or \ 1271 not self.pcPart and op_desc.isPCPart(): 1272 error("Mixed whole and partial PC state operands.") 1273 self.pcPart = op_desc.isPCPart() 1274 1275 if op_desc.isMem(): 1276 if self.memOperand: 1277 error("Code block has more than one memory operand.") 1278 self.memOperand = op_desc 1279 1280 # Check if this operand has read/write predication. If true, then 1281 # the microop will dynamically index source/dest registers. 1282 self.predRead = self.predRead or op_desc.hasReadPred() 1283 self.predWrite = self.predWrite or op_desc.hasWritePred() 1284 1285# Regular expression object to match C++ strings 1286stringRE = re.compile(r'"([^"\\]|\\.)*"') 1287 1288# Regular expression object to match C++ comments 1289# (used in findOperands()) 1290commentRE = re.compile(r'(^)?[^\S\n]*/(?:\*(.*?)\*/[^\S\n]*|/[^\n]*)($)?', 1291 re.DOTALL | re.MULTILINE) 1292 1293# Regular expression object to match assignment statements (used in 1294# findOperands()). If the code immediately following the first 1295# appearance of the operand matches this regex, then the operand 1296# appears to be on the LHS of an assignment, and is thus a 1297# destination. basically we're looking for an '=' that's not '=='. 1298# The heinous tangle before that handles the case where the operand 1299# has an array subscript. 1300assignRE = re.compile(r'(\[[^\]]+\])?\s*=(?!=)', re.MULTILINE) 1301 1302def makeFlagConstructor(flag_list): 1303 if len(flag_list) == 0: 1304 return '' 1305 # filter out repeated flags 1306 flag_list.sort() 1307 i = 1 1308 while i < len(flag_list): 1309 if flag_list[i] == flag_list[i-1]: 1310 del flag_list[i] 1311 else: 1312 i += 1 1313 pre = '\n\tflags[' 1314 post = '] = true;' 1315 code = pre + string.join(flag_list, post + pre) + post 1316 return code 1317 1318# Assume all instruction flags are of the form 'IsFoo' 1319instFlagRE = re.compile(r'Is.*') 1320 1321# OpClass constants end in 'Op' except No_OpClass 1322opClassRE = re.compile(r'.*Op|No_OpClass') 1323 1324class InstObjParams(object): 1325 def __init__(self, parser, mnem, class_name, base_class = '', 1326 snippets = {}, opt_args = []): 1327 self.mnemonic = mnem 1328 self.class_name = class_name 1329 self.base_class = base_class 1330 if not isinstance(snippets, dict): 1331 snippets = {'code' : snippets} 1332 compositeCode = ' '.join(map(str, snippets.values())) 1333 self.snippets = snippets 1334 1335 self.operands = OperandList(parser, compositeCode) 1336 1337 # The header of the constructor declares the variables to be used 1338 # in the body of the constructor. 1339 header = '' 1340 header += '\n\t_numSrcRegs = 0;' 1341 header += '\n\t_numDestRegs = 0;' 1342 header += '\n\t_numFPDestRegs = 0;' 1343 header += '\n\t_numVecDestRegs = 0;' 1344 header += '\n\t_numVecElemDestRegs = 0;' 1345 header += '\n\t_numIntDestRegs = 0;' 1346 header += '\n\t_numCCDestRegs = 0;' 1347 1348 self.constructor = header + \ 1349 self.operands.concatAttrStrings('constructor') 1350 1351 self.flags = self.operands.concatAttrLists('flags') 1352 1353 self.op_class = None 1354 1355 # Optional arguments are assumed to be either StaticInst flags 1356 # or an OpClass value. To avoid having to import a complete 1357 # list of these values to match against, we do it ad-hoc 1358 # with regexps. 1359 for oa in opt_args: 1360 if instFlagRE.match(oa): 1361 self.flags.append(oa) 1362 elif opClassRE.match(oa): 1363 self.op_class = oa 1364 else: 1365 error('InstObjParams: optional arg "%s" not recognized ' 1366 'as StaticInst::Flag or OpClass.' % oa) 1367 1368 # Make a basic guess on the operand class if not set. 1369 # These are good enough for most cases. 1370 if not self.op_class: 1371 if 'IsStore' in self.flags: 1372 # The order matters here: 'IsFloating' and 'IsInteger' are 1373 # usually set in FP instructions because of the base 1374 # register 1375 if 'IsFloating' in self.flags: 1376 self.op_class = 'FloatMemWriteOp' 1377 else: 1378 self.op_class = 'MemWriteOp' 1379 elif 'IsLoad' in self.flags or 'IsPrefetch' in self.flags: 1380 # The order matters here: 'IsFloating' and 'IsInteger' are 1381 # usually set in FP instructions because of the base 1382 # register 1383 if 'IsFloating' in self.flags: 1384 self.op_class = 'FloatMemReadOp' 1385 else: 1386 self.op_class = 'MemReadOp' 1387 elif 'IsFloating' in self.flags: 1388 self.op_class = 'FloatAddOp' 1389 elif 'IsVector' in self.flags: 1390 self.op_class = 'SimdAddOp' 1391 else: 1392 self.op_class = 'IntAluOp' 1393 1394 # add flag initialization to contructor here to include 1395 # any flags added via opt_args 1396 self.constructor += makeFlagConstructor(self.flags) 1397 1398 # if 'IsFloating' is set, add call to the FP enable check 1399 # function (which should be provided by isa_desc via a declare) 1400 # if 'IsVector' is set, add call to the Vector enable check 1401 # function (which should be provided by isa_desc via a declare) 1402 if 'IsFloating' in self.flags: 1403 self.fp_enable_check = 'fault = checkFpEnableFault(xc);' 1404 elif 'IsVector' in self.flags: 1405 self.fp_enable_check = 'fault = checkVecEnableFault(xc);' 1406 else: 1407 self.fp_enable_check = '' 1408 1409############## 1410# Stack: a simple stack object. Used for both formats (formatStack) 1411# and default cases (defaultStack). Simply wraps a list to give more 1412# stack-like syntax and enable initialization with an argument list 1413# (as opposed to an argument that's a list). 1414 1415class Stack(list): 1416 def __init__(self, *items): 1417 list.__init__(self, items) 1418 1419 def push(self, item): 1420 self.append(item); 1421 1422 def top(self): 1423 return self[-1] 1424 1425# Format a file include stack backtrace as a string 1426def backtrace(filename_stack): 1427 fmt = "In file included from %s:" 1428 return "\n".join([fmt % f for f in filename_stack]) 1429 1430 1431####################### 1432# 1433# LineTracker: track filenames along with line numbers in PLY lineno fields 1434# PLY explicitly doesn't do anything with 'lineno' except propagate 1435# it. This class lets us tie filenames with the line numbers with a 1436# minimum of disruption to existing increment code. 1437# 1438 1439class LineTracker(object): 1440 def __init__(self, filename, lineno=1): 1441 self.filename = filename 1442 self.lineno = lineno 1443 1444 # Overload '+=' for increments. We need to create a new object on 1445 # each update else every token ends up referencing the same 1446 # constantly incrementing instance. 1447 def __iadd__(self, incr): 1448 return LineTracker(self.filename, self.lineno + incr) 1449 1450 def __str__(self): 1451 return "%s:%d" % (self.filename, self.lineno) 1452 1453 # In case there are places where someone really expects a number 1454 def __int__(self): 1455 return self.lineno 1456 1457 1458####################### 1459# 1460# ISA Parser 1461# parses ISA DSL and emits C++ headers and source 1462# 1463 1464class ISAParser(Grammar):
| 295 self.exec_output = exec_output 296 self.decode_block = decode_block 297 self.has_decode_default = has_decode_default 298 299 # Write these code chunks out to the filesystem. They will be properly 300 # interwoven by the write_top_level_files(). 301 def emit(self): 302 if self.header_output: 303 self.parser.get_file('header').write(self.header_output) 304 if self.decoder_output: 305 self.parser.get_file('decoder').write(self.decoder_output) 306 if self.exec_output: 307 self.parser.get_file('exec').write(self.exec_output) 308 if self.decode_block: 309 self.parser.get_file('decode_block').write(self.decode_block) 310 311 # Override '+' operator: generate a new GenCode object that 312 # concatenates all the individual strings in the operands. 313 def __add__(self, other): 314 return GenCode(self.parser, 315 self.header_output + other.header_output, 316 self.decoder_output + other.decoder_output, 317 self.exec_output + other.exec_output, 318 self.decode_block + other.decode_block, 319 self.has_decode_default or other.has_decode_default) 320 321 # Prepend a string (typically a comment) to all the strings. 322 def prepend_all(self, pre): 323 self.header_output = pre + self.header_output 324 self.decoder_output = pre + self.decoder_output 325 self.decode_block = pre + self.decode_block 326 self.exec_output = pre + self.exec_output 327 328 # Wrap the decode block in a pair of strings (e.g., 'case foo:' 329 # and 'break;'). Used to build the big nested switch statement. 330 def wrap_decode_block(self, pre, post = ''): 331 self.decode_block = pre + indent(self.decode_block) + post 332 333##################################################################### 334# 335# Bitfield Operator Support 336# 337##################################################################### 338 339bitOp1ArgRE = re.compile(r'<\s*(\w+)\s*:\s*>') 340 341bitOpWordRE = re.compile(r'(?<![\w\.])([\w\.]+)<\s*(\w+)\s*:\s*(\w+)\s*>') 342bitOpExprRE = re.compile(r'\)<\s*(\w+)\s*:\s*(\w+)\s*>') 343 344def substBitOps(code): 345 # first convert single-bit selectors to two-index form 346 # i.e., <n> --> <n:n> 347 code = bitOp1ArgRE.sub(r'<\1:\1>', code) 348 # simple case: selector applied to ID (name) 349 # i.e., foo<a:b> --> bits(foo, a, b) 350 code = bitOpWordRE.sub(r'bits(\1, \2, \3)', code) 351 # if selector is applied to expression (ending in ')'), 352 # we need to search backward for matching '(' 353 match = bitOpExprRE.search(code) 354 while match: 355 exprEnd = match.start() 356 here = exprEnd - 1 357 nestLevel = 1 358 while nestLevel > 0: 359 if code[here] == '(': 360 nestLevel -= 1 361 elif code[here] == ')': 362 nestLevel += 1 363 here -= 1 364 if here < 0: 365 sys.exit("Didn't find '('!") 366 exprStart = here+1 367 newExpr = r'bits(%s, %s, %s)' % (code[exprStart:exprEnd+1], 368 match.group(1), match.group(2)) 369 code = code[:exprStart] + newExpr + code[match.end():] 370 match = bitOpExprRE.search(code) 371 return code 372 373 374##################################################################### 375# 376# Code Parser 377# 378# The remaining code is the support for automatically extracting 379# instruction characteristics from pseudocode. 380# 381##################################################################### 382 383# Force the argument to be a list. Useful for flags, where a caller 384# can specify a singleton flag or a list of flags. Also usful for 385# converting tuples to lists so they can be modified. 386def makeList(arg): 387 if isinstance(arg, list): 388 return arg 389 elif isinstance(arg, tuple): 390 return list(arg) 391 elif not arg: 392 return [] 393 else: 394 return [ arg ] 395 396class Operand(object): 397 '''Base class for operand descriptors. An instance of this class 398 (or actually a class derived from this one) represents a specific 399 operand for a code block (e.g, "Rc.sq" as a dest). Intermediate 400 derived classes encapsulates the traits of a particular operand 401 type (e.g., "32-bit integer register").''' 402 403 def buildReadCode(self, func = None): 404 subst_dict = {"name": self.base_name, 405 "func": func, 406 "reg_idx": self.reg_spec, 407 "ctype": self.ctype} 408 if hasattr(self, 'src_reg_idx'): 409 subst_dict['op_idx'] = self.src_reg_idx 410 code = self.read_code % subst_dict 411 return '%s = %s;\n' % (self.base_name, code) 412 413 def buildWriteCode(self, func = None): 414 subst_dict = {"name": self.base_name, 415 "func": func, 416 "reg_idx": self.reg_spec, 417 "ctype": self.ctype, 418 "final_val": self.base_name} 419 if hasattr(self, 'dest_reg_idx'): 420 subst_dict['op_idx'] = self.dest_reg_idx 421 code = self.write_code % subst_dict 422 return ''' 423 { 424 %s final_val = %s; 425 %s; 426 if (traceData) { traceData->setData(final_val); } 427 }''' % (self.dflt_ctype, self.base_name, code) 428 429 def __init__(self, parser, full_name, ext, is_src, is_dest): 430 self.full_name = full_name 431 self.ext = ext 432 self.is_src = is_src 433 self.is_dest = is_dest 434 # The 'effective extension' (eff_ext) is either the actual 435 # extension, if one was explicitly provided, or the default. 436 if ext: 437 self.eff_ext = ext 438 elif hasattr(self, 'dflt_ext'): 439 self.eff_ext = self.dflt_ext 440 441 if hasattr(self, 'eff_ext'): 442 self.ctype = parser.operandTypeMap[self.eff_ext] 443 444 # Finalize additional fields (primarily code fields). This step 445 # is done separately since some of these fields may depend on the 446 # register index enumeration that hasn't been performed yet at the 447 # time of __init__(). The register index enumeration is affected 448 # by predicated register reads/writes. Hence, we forward the flags 449 # that indicate whether or not predication is in use. 450 def finalize(self, predRead, predWrite): 451 self.flags = self.getFlags() 452 self.constructor = self.makeConstructor(predRead, predWrite) 453 self.op_decl = self.makeDecl() 454 455 if self.is_src: 456 self.op_rd = self.makeRead(predRead) 457 self.op_src_decl = self.makeDecl() 458 else: 459 self.op_rd = '' 460 self.op_src_decl = '' 461 462 if self.is_dest: 463 self.op_wb = self.makeWrite(predWrite) 464 self.op_dest_decl = self.makeDecl() 465 else: 466 self.op_wb = '' 467 self.op_dest_decl = '' 468 469 def isMem(self): 470 return 0 471 472 def isReg(self): 473 return 0 474 475 def isFloatReg(self): 476 return 0 477 478 def isIntReg(self): 479 return 0 480 481 def isCCReg(self): 482 return 0 483 484 def isControlReg(self): 485 return 0 486 487 def isVecReg(self): 488 return 0 489 490 def isVecElem(self): 491 return 0 492 493 def isPCState(self): 494 return 0 495 496 def isPCPart(self): 497 return self.isPCState() and self.reg_spec 498 499 def hasReadPred(self): 500 return self.read_predicate != None 501 502 def hasWritePred(self): 503 return self.write_predicate != None 504 505 def getFlags(self): 506 # note the empty slice '[:]' gives us a copy of self.flags[0] 507 # instead of a reference to it 508 my_flags = self.flags[0][:] 509 if self.is_src: 510 my_flags += self.flags[1] 511 if self.is_dest: 512 my_flags += self.flags[2] 513 return my_flags 514 515 def makeDecl(self): 516 # Note that initializations in the declarations are solely 517 # to avoid 'uninitialized variable' errors from the compiler. 518 return self.ctype + ' ' + self.base_name + ' = 0;\n'; 519 520 521src_reg_constructor = '\n\t_srcRegIdx[_numSrcRegs++] = RegId(%s, %s);' 522dst_reg_constructor = '\n\t_destRegIdx[_numDestRegs++] = RegId(%s, %s);' 523 524 525class IntRegOperand(Operand): 526 reg_class = 'IntRegClass' 527 528 def isReg(self): 529 return 1 530 531 def isIntReg(self): 532 return 1 533 534 def makeConstructor(self, predRead, predWrite): 535 c_src = '' 536 c_dest = '' 537 538 if self.is_src: 539 c_src = src_reg_constructor % (self.reg_class, self.reg_spec) 540 if self.hasReadPred(): 541 c_src = '\n\tif (%s) {%s\n\t}' % \ 542 (self.read_predicate, c_src) 543 544 if self.is_dest: 545 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec) 546 c_dest += '\n\t_numIntDestRegs++;' 547 if self.hasWritePred(): 548 c_dest = '\n\tif (%s) {%s\n\t}' % \ 549 (self.write_predicate, c_dest) 550 551 return c_src + c_dest 552 553 def makeRead(self, predRead): 554 if (self.ctype == 'float' or self.ctype == 'double'): 555 error('Attempt to read integer register as FP') 556 if self.read_code != None: 557 return self.buildReadCode('readIntRegOperand') 558 559 int_reg_val = '' 560 if predRead: 561 int_reg_val = 'xc->readIntRegOperand(this, _sourceIndex++)' 562 if self.hasReadPred(): 563 int_reg_val = '(%s) ? %s : 0' % \ 564 (self.read_predicate, int_reg_val) 565 else: 566 int_reg_val = 'xc->readIntRegOperand(this, %d)' % self.src_reg_idx 567 568 return '%s = %s;\n' % (self.base_name, int_reg_val) 569 570 def makeWrite(self, predWrite): 571 if (self.ctype == 'float' or self.ctype == 'double'): 572 error('Attempt to write integer register as FP') 573 if self.write_code != None: 574 return self.buildWriteCode('setIntRegOperand') 575 576 if predWrite: 577 wp = 'true' 578 if self.hasWritePred(): 579 wp = self.write_predicate 580 581 wcond = 'if (%s)' % (wp) 582 windex = '_destIndex++' 583 else: 584 wcond = '' 585 windex = '%d' % self.dest_reg_idx 586 587 wb = ''' 588 %s 589 { 590 %s final_val = %s; 591 xc->setIntRegOperand(this, %s, final_val);\n 592 if (traceData) { traceData->setData(final_val); } 593 }''' % (wcond, self.ctype, self.base_name, windex) 594 595 return wb 596 597class FloatRegOperand(Operand): 598 reg_class = 'FloatRegClass' 599 600 def isReg(self): 601 return 1 602 603 def isFloatReg(self): 604 return 1 605 606 def makeConstructor(self, predRead, predWrite): 607 c_src = '' 608 c_dest = '' 609 610 if self.is_src: 611 c_src = src_reg_constructor % (self.reg_class, self.reg_spec) 612 613 if self.is_dest: 614 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec) 615 c_dest += '\n\t_numFPDestRegs++;' 616 617 return c_src + c_dest 618 619 def makeRead(self, predRead): 620 bit_select = 0 621 if (self.ctype == 'float' or self.ctype == 'double'): 622 func = 'readFloatRegOperand' 623 else: 624 func = 'readFloatRegOperandBits' 625 if self.read_code != None: 626 return self.buildReadCode(func) 627 628 if predRead: 629 rindex = '_sourceIndex++' 630 else: 631 rindex = '%d' % self.src_reg_idx 632 633 return '%s = xc->%s(this, %s);\n' % \ 634 (self.base_name, func, rindex) 635 636 def makeWrite(self, predWrite): 637 if (self.ctype == 'float' or self.ctype == 'double'): 638 func = 'setFloatRegOperand' 639 else: 640 func = 'setFloatRegOperandBits' 641 if self.write_code != None: 642 return self.buildWriteCode(func) 643 644 if predWrite: 645 wp = '_destIndex++' 646 else: 647 wp = '%d' % self.dest_reg_idx 648 wp = 'xc->%s(this, %s, final_val);' % (func, wp) 649 650 wb = ''' 651 { 652 %s final_val = %s; 653 %s\n 654 if (traceData) { traceData->setData(final_val); } 655 }''' % (self.ctype, self.base_name, wp) 656 return wb 657 658class VecRegOperand(Operand): 659 reg_class = 'VecRegClass' 660 661 def __init__(self, parser, full_name, ext, is_src, is_dest): 662 Operand.__init__(self, parser, full_name, ext, is_src, is_dest) 663 self.elemExt = None 664 self.parser = parser 665 666 def isReg(self): 667 return 1 668 669 def isVecReg(self): 670 return 1 671 672 def makeDeclElem(self, elem_op): 673 (elem_name, elem_ext) = elem_op 674 (elem_spec, dflt_elem_ext, zeroing) = self.elems[elem_name] 675 if elem_ext: 676 ext = elem_ext 677 else: 678 ext = dflt_elem_ext 679 ctype = self.parser.operandTypeMap[ext] 680 return '\n\t%s %s = 0;' % (ctype, elem_name) 681 682 def makeDecl(self): 683 if not self.is_dest and self.is_src: 684 c_decl = '\t/* Vars for %s*/' % (self.base_name) 685 if hasattr(self, 'active_elems'): 686 if self.active_elems: 687 for elem in self.active_elems: 688 c_decl += self.makeDeclElem(elem) 689 return c_decl + '\t/* End vars for %s */\n' % (self.base_name) 690 else: 691 return '' 692 693 def makeConstructor(self, predRead, predWrite): 694 c_src = '' 695 c_dest = '' 696 697 numAccessNeeded = 1 698 699 if self.is_src: 700 c_src = src_reg_constructor % (self.reg_class, self.reg_spec) 701 702 if self.is_dest: 703 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec) 704 c_dest += '\n\t_numVecDestRegs++;' 705 706 return c_src + c_dest 707 708 # Read destination register to write 709 def makeReadWElem(self, elem_op): 710 (elem_name, elem_ext) = elem_op 711 (elem_spec, dflt_elem_ext, zeroing) = self.elems[elem_name] 712 if elem_ext: 713 ext = elem_ext 714 else: 715 ext = dflt_elem_ext 716 ctype = self.parser.operandTypeMap[ext] 717 c_read = '\t\t%s& %s = %s[%s];\n' % \ 718 (ctype, elem_name, self.base_name, elem_spec) 719 return c_read 720 721 def makeReadW(self, predWrite): 722 func = 'getWritableVecRegOperand' 723 if self.read_code != None: 724 return self.buildReadCode(func) 725 726 if predWrite: 727 rindex = '_destIndex++' 728 else: 729 rindex = '%d' % self.dest_reg_idx 730 731 c_readw = '\t\t%s& tmp_d%s = xc->%s(this, %s);\n'\ 732 % ('TheISA::VecRegContainer', rindex, func, rindex) 733 if self.elemExt: 734 c_readw += '\t\tauto %s = tmp_d%s.as<%s>();\n' % (self.base_name, 735 rindex, self.parser.operandTypeMap[self.elemExt]) 736 if self.ext: 737 c_readw += '\t\tauto %s = tmp_d%s.as<%s>();\n' % (self.base_name, 738 rindex, self.parser.operandTypeMap[self.ext]) 739 if hasattr(self, 'active_elems'): 740 if self.active_elems: 741 for elem in self.active_elems: 742 c_readw += self.makeReadWElem(elem) 743 return c_readw 744 745 # Normal source operand read 746 def makeReadElem(self, elem_op, name): 747 (elem_name, elem_ext) = elem_op 748 (elem_spec, dflt_elem_ext, zeroing) = self.elems[elem_name] 749 750 if elem_ext: 751 ext = elem_ext 752 else: 753 ext = dflt_elem_ext 754 ctype = self.parser.operandTypeMap[ext] 755 c_read = '\t\t%s = %s[%s];\n' % \ 756 (elem_name, name, elem_spec) 757 return c_read 758 759 def makeRead(self, predRead): 760 func = 'readVecRegOperand' 761 if self.read_code != None: 762 return self.buildReadCode(func) 763 764 if predRead: 765 rindex = '_sourceIndex++' 766 else: 767 rindex = '%d' % self.src_reg_idx 768 769 name = self.base_name 770 if self.is_dest and self.is_src: 771 name += '_merger' 772 773 c_read = '\t\t%s& tmp_s%s = xc->%s(this, %s);\n' \ 774 % ('const TheISA::VecRegContainer', rindex, func, rindex) 775 # If the parser has detected that elements are being access, create 776 # the appropriate view 777 if self.elemExt: 778 c_read += '\t\tauto %s = tmp_s%s.as<%s>();\n' % \ 779 (name, rindex, self.parser.operandTypeMap[self.elemExt]) 780 if self.ext: 781 c_read += '\t\tauto %s = tmp_s%s.as<%s>();\n' % \ 782 (name, rindex, self.parser.operandTypeMap[self.ext]) 783 if hasattr(self, 'active_elems'): 784 if self.active_elems: 785 for elem in self.active_elems: 786 c_read += self.makeReadElem(elem, name) 787 return c_read 788 789 def makeWrite(self, predWrite): 790 func = 'setVecRegOperand' 791 if self.write_code != None: 792 return self.buildWriteCode(func) 793 794 wb = ''' 795 if (traceData) { 796 warn_once("Vectors not supported yet in tracedata"); 797 /*traceData->setData(final_val);*/ 798 } 799 ''' 800 return wb 801 802 def finalize(self, predRead, predWrite): 803 super(VecRegOperand, self).finalize(predRead, predWrite) 804 if self.is_dest: 805 self.op_rd = self.makeReadW(predWrite) + self.op_rd 806 807class VecElemOperand(Operand): 808 reg_class = 'VectorElemClass' 809 810 def isReg(self): 811 return 1 812 813 def isVecElem(self): 814 return 1 815 816 def makeDecl(self): 817 if self.is_dest and not self.is_src: 818 return '\n\t%s %s;' % (self.ctype, self.base_name) 819 else: 820 return '' 821 822 def makeConstructor(self, predRead, predWrite): 823 c_src = '' 824 c_dest = '' 825 826 numAccessNeeded = 1 827 regId = 'RegId(%s, %s * numVecElemPerVecReg + elemIdx, %s)' % \ 828 (self.reg_class, self.reg_spec) 829 830 if self.is_src: 831 c_src = ('\n\t_srcRegIdx[_numSrcRegs++] = RegId(%s, %s, %s);' % 832 (self.reg_class, self.reg_spec, self.elem_spec)) 833 834 if self.is_dest: 835 c_dest = ('\n\t_destRegIdx[_numDestRegs++] = RegId(%s, %s, %s);' % 836 (self.reg_class, self.reg_spec, self.elem_spec)) 837 c_dest += '\n\t_numVecElemDestRegs++;' 838 return c_src + c_dest 839 840 def makeRead(self, predRead): 841 c_read = ('\n/* Elem is kept inside the operand description */' + 842 '\n\tVecElem %s = xc->readVecElemOperand(this, %d);' % 843 (self.base_name, self.src_reg_idx)) 844 return c_read 845 846 def makeWrite(self, predWrite): 847 c_write = ('\n/* Elem is kept inside the operand description */' + 848 '\n\txc->setVecElemOperand(this, %d, %s);' % 849 (self.dest_reg_idx, self.base_name)) 850 return c_write 851 852class CCRegOperand(Operand): 853 reg_class = 'CCRegClass' 854 855 def isReg(self): 856 return 1 857 858 def isCCReg(self): 859 return 1 860 861 def makeConstructor(self, predRead, predWrite): 862 c_src = '' 863 c_dest = '' 864 865 if self.is_src: 866 c_src = src_reg_constructor % (self.reg_class, self.reg_spec) 867 if self.hasReadPred(): 868 c_src = '\n\tif (%s) {%s\n\t}' % \ 869 (self.read_predicate, c_src) 870 871 if self.is_dest: 872 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec) 873 c_dest += '\n\t_numCCDestRegs++;' 874 if self.hasWritePred(): 875 c_dest = '\n\tif (%s) {%s\n\t}' % \ 876 (self.write_predicate, c_dest) 877 878 return c_src + c_dest 879 880 def makeRead(self, predRead): 881 if (self.ctype == 'float' or self.ctype == 'double'): 882 error('Attempt to read condition-code register as FP') 883 if self.read_code != None: 884 return self.buildReadCode('readCCRegOperand') 885 886 int_reg_val = '' 887 if predRead: 888 int_reg_val = 'xc->readCCRegOperand(this, _sourceIndex++)' 889 if self.hasReadPred(): 890 int_reg_val = '(%s) ? %s : 0' % \ 891 (self.read_predicate, int_reg_val) 892 else: 893 int_reg_val = 'xc->readCCRegOperand(this, %d)' % self.src_reg_idx 894 895 return '%s = %s;\n' % (self.base_name, int_reg_val) 896 897 def makeWrite(self, predWrite): 898 if (self.ctype == 'float' or self.ctype == 'double'): 899 error('Attempt to write condition-code register as FP') 900 if self.write_code != None: 901 return self.buildWriteCode('setCCRegOperand') 902 903 if predWrite: 904 wp = 'true' 905 if self.hasWritePred(): 906 wp = self.write_predicate 907 908 wcond = 'if (%s)' % (wp) 909 windex = '_destIndex++' 910 else: 911 wcond = '' 912 windex = '%d' % self.dest_reg_idx 913 914 wb = ''' 915 %s 916 { 917 %s final_val = %s; 918 xc->setCCRegOperand(this, %s, final_val);\n 919 if (traceData) { traceData->setData(final_val); } 920 }''' % (wcond, self.ctype, self.base_name, windex) 921 922 return wb 923 924class ControlRegOperand(Operand): 925 reg_class = 'MiscRegClass' 926 927 def isReg(self): 928 return 1 929 930 def isControlReg(self): 931 return 1 932 933 def makeConstructor(self, predRead, predWrite): 934 c_src = '' 935 c_dest = '' 936 937 if self.is_src: 938 c_src = src_reg_constructor % (self.reg_class, self.reg_spec) 939 940 if self.is_dest: 941 c_dest = dst_reg_constructor % (self.reg_class, self.reg_spec) 942 943 return c_src + c_dest 944 945 def makeRead(self, predRead): 946 bit_select = 0 947 if (self.ctype == 'float' or self.ctype == 'double'): 948 error('Attempt to read control register as FP') 949 if self.read_code != None: 950 return self.buildReadCode('readMiscRegOperand') 951 952 if predRead: 953 rindex = '_sourceIndex++' 954 else: 955 rindex = '%d' % self.src_reg_idx 956 957 return '%s = xc->readMiscRegOperand(this, %s);\n' % \ 958 (self.base_name, rindex) 959 960 def makeWrite(self, predWrite): 961 if (self.ctype == 'float' or self.ctype == 'double'): 962 error('Attempt to write control register as FP') 963 if self.write_code != None: 964 return self.buildWriteCode('setMiscRegOperand') 965 966 if predWrite: 967 windex = '_destIndex++' 968 else: 969 windex = '%d' % self.dest_reg_idx 970 971 wb = 'xc->setMiscRegOperand(this, %s, %s);\n' % \ 972 (windex, self.base_name) 973 wb += 'if (traceData) { traceData->setData(%s); }' % \ 974 self.base_name 975 976 return wb 977 978class MemOperand(Operand): 979 def isMem(self): 980 return 1 981 982 def makeConstructor(self, predRead, predWrite): 983 return '' 984 985 def makeDecl(self): 986 # Declare memory data variable. 987 return '%s %s;\n' % (self.ctype, self.base_name) 988 989 def makeRead(self, predRead): 990 if self.read_code != None: 991 return self.buildReadCode() 992 return '' 993 994 def makeWrite(self, predWrite): 995 if self.write_code != None: 996 return self.buildWriteCode() 997 return '' 998 999class PCStateOperand(Operand): 1000 def makeConstructor(self, predRead, predWrite): 1001 return '' 1002 1003 def makeRead(self, predRead): 1004 if self.reg_spec: 1005 # A component of the PC state. 1006 return '%s = __parserAutoPCState.%s();\n' % \ 1007 (self.base_name, self.reg_spec) 1008 else: 1009 # The whole PC state itself. 1010 return '%s = xc->pcState();\n' % self.base_name 1011 1012 def makeWrite(self, predWrite): 1013 if self.reg_spec: 1014 # A component of the PC state. 1015 return '__parserAutoPCState.%s(%s);\n' % \ 1016 (self.reg_spec, self.base_name) 1017 else: 1018 # The whole PC state itself. 1019 return 'xc->pcState(%s);\n' % self.base_name 1020 1021 def makeDecl(self): 1022 ctype = 'TheISA::PCState' 1023 if self.isPCPart(): 1024 ctype = self.ctype 1025 # Note that initializations in the declarations are solely 1026 # to avoid 'uninitialized variable' errors from the compiler. 1027 return '%s %s = 0;\n' % (ctype, self.base_name) 1028 1029 def isPCState(self): 1030 return 1 1031 1032class OperandList(object): 1033 '''Find all the operands in the given code block. Returns an operand 1034 descriptor list (instance of class OperandList).''' 1035 def __init__(self, parser, code): 1036 self.items = [] 1037 self.bases = {} 1038 # delete strings and comments so we don't match on operands inside 1039 for regEx in (stringRE, commentRE): 1040 code = regEx.sub('', code) 1041 # search for operands 1042 next_pos = 0 1043 while 1: 1044 match = parser.operandsRE.search(code, next_pos) 1045 if not match: 1046 # no more matches: we're done 1047 break 1048 op = match.groups() 1049 # regexp groups are operand full name, base, and extension 1050 (op_full, op_base, op_ext) = op 1051 # If is a elem operand, define or update the corresponding 1052 # vector operand 1053 isElem = False 1054 if op_base in parser.elemToVector: 1055 isElem = True 1056 elem_op = (op_base, op_ext) 1057 op_base = parser.elemToVector[op_base] 1058 op_ext = '' # use the default one 1059 # if the token following the operand is an assignment, this is 1060 # a destination (LHS), else it's a source (RHS) 1061 is_dest = (assignRE.match(code, match.end()) != None) 1062 is_src = not is_dest 1063 1064 # see if we've already seen this one 1065 op_desc = self.find_base(op_base) 1066 if op_desc: 1067 if op_ext and op_ext != '' and op_desc.ext != op_ext: 1068 error ('Inconsistent extensions for operand %s: %s - %s' \ 1069 % (op_base, op_desc.ext, op_ext)) 1070 op_desc.is_src = op_desc.is_src or is_src 1071 op_desc.is_dest = op_desc.is_dest or is_dest 1072 if isElem: 1073 (elem_base, elem_ext) = elem_op 1074 found = False 1075 for ae in op_desc.active_elems: 1076 (ae_base, ae_ext) = ae 1077 if ae_base == elem_base: 1078 if ae_ext != elem_ext: 1079 error('Inconsistent extensions for elem' 1080 ' operand %s' % elem_base) 1081 else: 1082 found = True 1083 if not found: 1084 op_desc.active_elems.append(elem_op) 1085 else: 1086 # new operand: create new descriptor 1087 op_desc = parser.operandNameMap[op_base](parser, 1088 op_full, op_ext, is_src, is_dest) 1089 # if operand is a vector elem, add the corresponding vector 1090 # operand if not already done 1091 if isElem: 1092 op_desc.elemExt = elem_op[1] 1093 op_desc.active_elems = [elem_op] 1094 self.append(op_desc) 1095 # start next search after end of current match 1096 next_pos = match.end() 1097 self.sort() 1098 # enumerate source & dest register operands... used in building 1099 # constructor later 1100 self.numSrcRegs = 0 1101 self.numDestRegs = 0 1102 self.numFPDestRegs = 0 1103 self.numIntDestRegs = 0 1104 self.numVecDestRegs = 0 1105 self.numCCDestRegs = 0 1106 self.numMiscDestRegs = 0 1107 self.memOperand = None 1108 1109 # Flags to keep track if one or more operands are to be read/written 1110 # conditionally. 1111 self.predRead = False 1112 self.predWrite = False 1113 1114 for op_desc in self.items: 1115 if op_desc.isReg(): 1116 if op_desc.is_src: 1117 op_desc.src_reg_idx = self.numSrcRegs 1118 self.numSrcRegs += 1 1119 if op_desc.is_dest: 1120 op_desc.dest_reg_idx = self.numDestRegs 1121 self.numDestRegs += 1 1122 if op_desc.isFloatReg(): 1123 self.numFPDestRegs += 1 1124 elif op_desc.isIntReg(): 1125 self.numIntDestRegs += 1 1126 elif op_desc.isVecReg(): 1127 self.numVecDestRegs += 1 1128 elif op_desc.isCCReg(): 1129 self.numCCDestRegs += 1 1130 elif op_desc.isControlReg(): 1131 self.numMiscDestRegs += 1 1132 elif op_desc.isMem(): 1133 if self.memOperand: 1134 error("Code block has more than one memory operand.") 1135 self.memOperand = op_desc 1136 1137 # Check if this operand has read/write predication. If true, then 1138 # the microop will dynamically index source/dest registers. 1139 self.predRead = self.predRead or op_desc.hasReadPred() 1140 self.predWrite = self.predWrite or op_desc.hasWritePred() 1141 1142 if parser.maxInstSrcRegs < self.numSrcRegs: 1143 parser.maxInstSrcRegs = self.numSrcRegs 1144 if parser.maxInstDestRegs < self.numDestRegs: 1145 parser.maxInstDestRegs = self.numDestRegs 1146 if parser.maxMiscDestRegs < self.numMiscDestRegs: 1147 parser.maxMiscDestRegs = self.numMiscDestRegs 1148 1149 # now make a final pass to finalize op_desc fields that may depend 1150 # on the register enumeration 1151 for op_desc in self.items: 1152 op_desc.finalize(self.predRead, self.predWrite) 1153 1154 def __len__(self): 1155 return len(self.items) 1156 1157 def __getitem__(self, index): 1158 return self.items[index] 1159 1160 def append(self, op_desc): 1161 self.items.append(op_desc) 1162 self.bases[op_desc.base_name] = op_desc 1163 1164 def find_base(self, base_name): 1165 # like self.bases[base_name], but returns None if not found 1166 # (rather than raising exception) 1167 return self.bases.get(base_name) 1168 1169 # internal helper function for concat[Some]Attr{Strings|Lists} 1170 def __internalConcatAttrs(self, attr_name, filter, result): 1171 for op_desc in self.items: 1172 if filter(op_desc): 1173 result += getattr(op_desc, attr_name) 1174 return result 1175 1176 # return a single string that is the concatenation of the (string) 1177 # values of the specified attribute for all operands 1178 def concatAttrStrings(self, attr_name): 1179 return self.__internalConcatAttrs(attr_name, lambda x: 1, '') 1180 1181 # like concatAttrStrings, but only include the values for the operands 1182 # for which the provided filter function returns true 1183 def concatSomeAttrStrings(self, filter, attr_name): 1184 return self.__internalConcatAttrs(attr_name, filter, '') 1185 1186 # return a single list that is the concatenation of the (list) 1187 # values of the specified attribute for all operands 1188 def concatAttrLists(self, attr_name): 1189 return self.__internalConcatAttrs(attr_name, lambda x: 1, []) 1190 1191 # like concatAttrLists, but only include the values for the operands 1192 # for which the provided filter function returns true 1193 def concatSomeAttrLists(self, filter, attr_name): 1194 return self.__internalConcatAttrs(attr_name, filter, []) 1195 1196 def sort(self): 1197 self.items.sort(lambda a, b: a.sort_pri - b.sort_pri) 1198 1199class SubOperandList(OperandList): 1200 '''Find all the operands in the given code block. Returns an operand 1201 descriptor list (instance of class OperandList).''' 1202 def __init__(self, parser, code, master_list): 1203 self.items = [] 1204 self.bases = {} 1205 # delete strings and comments so we don't match on operands inside 1206 for regEx in (stringRE, commentRE): 1207 code = regEx.sub('', code) 1208 # search for operands 1209 next_pos = 0 1210 while 1: 1211 match = parser.operandsRE.search(code, next_pos) 1212 if not match: 1213 # no more matches: we're done 1214 break 1215 op = match.groups() 1216 # regexp groups are operand full name, base, and extension 1217 (op_full, op_base, op_ext) = op 1218 # If is a elem operand, define or update the corresponding 1219 # vector operand 1220 if op_base in parser.elemToVector: 1221 elem_op = op_base 1222 op_base = parser.elemToVector[elem_op] 1223 # find this op in the master list 1224 op_desc = master_list.find_base(op_base) 1225 if not op_desc: 1226 error('Found operand %s which is not in the master list!' 1227 % op_base) 1228 else: 1229 # See if we've already found this operand 1230 op_desc = self.find_base(op_base) 1231 if not op_desc: 1232 # if not, add a reference to it to this sub list 1233 self.append(master_list.bases[op_base]) 1234 1235 # start next search after end of current match 1236 next_pos = match.end() 1237 self.sort() 1238 self.memOperand = None 1239 # Whether the whole PC needs to be read so parts of it can be accessed 1240 self.readPC = False 1241 # Whether the whole PC needs to be written after parts of it were 1242 # changed 1243 self.setPC = False 1244 # Whether this instruction manipulates the whole PC or parts of it. 1245 # Mixing the two is a bad idea and flagged as an error. 1246 self.pcPart = None 1247 1248 # Flags to keep track if one or more operands are to be read/written 1249 # conditionally. 1250 self.predRead = False 1251 self.predWrite = False 1252 1253 for op_desc in self.items: 1254 if op_desc.isPCPart(): 1255 self.readPC = True 1256 if op_desc.is_dest: 1257 self.setPC = True 1258 1259 if op_desc.isPCState(): 1260 if self.pcPart is not None: 1261 if self.pcPart and not op_desc.isPCPart() or \ 1262 not self.pcPart and op_desc.isPCPart(): 1263 error("Mixed whole and partial PC state operands.") 1264 self.pcPart = op_desc.isPCPart() 1265 1266 if op_desc.isMem(): 1267 if self.memOperand: 1268 error("Code block has more than one memory operand.") 1269 self.memOperand = op_desc 1270 1271 # Check if this operand has read/write predication. If true, then 1272 # the microop will dynamically index source/dest registers. 1273 self.predRead = self.predRead or op_desc.hasReadPred() 1274 self.predWrite = self.predWrite or op_desc.hasWritePred() 1275 1276# Regular expression object to match C++ strings 1277stringRE = re.compile(r'"([^"\\]|\\.)*"') 1278 1279# Regular expression object to match C++ comments 1280# (used in findOperands()) 1281commentRE = re.compile(r'(^)?[^\S\n]*/(?:\*(.*?)\*/[^\S\n]*|/[^\n]*)($)?', 1282 re.DOTALL | re.MULTILINE) 1283 1284# Regular expression object to match assignment statements (used in 1285# findOperands()). If the code immediately following the first 1286# appearance of the operand matches this regex, then the operand 1287# appears to be on the LHS of an assignment, and is thus a 1288# destination. basically we're looking for an '=' that's not '=='. 1289# The heinous tangle before that handles the case where the operand 1290# has an array subscript. 1291assignRE = re.compile(r'(\[[^\]]+\])?\s*=(?!=)', re.MULTILINE) 1292 1293def makeFlagConstructor(flag_list): 1294 if len(flag_list) == 0: 1295 return '' 1296 # filter out repeated flags 1297 flag_list.sort() 1298 i = 1 1299 while i < len(flag_list): 1300 if flag_list[i] == flag_list[i-1]: 1301 del flag_list[i] 1302 else: 1303 i += 1 1304 pre = '\n\tflags[' 1305 post = '] = true;' 1306 code = pre + string.join(flag_list, post + pre) + post 1307 return code 1308 1309# Assume all instruction flags are of the form 'IsFoo' 1310instFlagRE = re.compile(r'Is.*') 1311 1312# OpClass constants end in 'Op' except No_OpClass 1313opClassRE = re.compile(r'.*Op|No_OpClass') 1314 1315class InstObjParams(object): 1316 def __init__(self, parser, mnem, class_name, base_class = '', 1317 snippets = {}, opt_args = []): 1318 self.mnemonic = mnem 1319 self.class_name = class_name 1320 self.base_class = base_class 1321 if not isinstance(snippets, dict): 1322 snippets = {'code' : snippets} 1323 compositeCode = ' '.join(map(str, snippets.values())) 1324 self.snippets = snippets 1325 1326 self.operands = OperandList(parser, compositeCode) 1327 1328 # The header of the constructor declares the variables to be used 1329 # in the body of the constructor. 1330 header = '' 1331 header += '\n\t_numSrcRegs = 0;' 1332 header += '\n\t_numDestRegs = 0;' 1333 header += '\n\t_numFPDestRegs = 0;' 1334 header += '\n\t_numVecDestRegs = 0;' 1335 header += '\n\t_numVecElemDestRegs = 0;' 1336 header += '\n\t_numIntDestRegs = 0;' 1337 header += '\n\t_numCCDestRegs = 0;' 1338 1339 self.constructor = header + \ 1340 self.operands.concatAttrStrings('constructor') 1341 1342 self.flags = self.operands.concatAttrLists('flags') 1343 1344 self.op_class = None 1345 1346 # Optional arguments are assumed to be either StaticInst flags 1347 # or an OpClass value. To avoid having to import a complete 1348 # list of these values to match against, we do it ad-hoc 1349 # with regexps. 1350 for oa in opt_args: 1351 if instFlagRE.match(oa): 1352 self.flags.append(oa) 1353 elif opClassRE.match(oa): 1354 self.op_class = oa 1355 else: 1356 error('InstObjParams: optional arg "%s" not recognized ' 1357 'as StaticInst::Flag or OpClass.' % oa) 1358 1359 # Make a basic guess on the operand class if not set. 1360 # These are good enough for most cases. 1361 if not self.op_class: 1362 if 'IsStore' in self.flags: 1363 # The order matters here: 'IsFloating' and 'IsInteger' are 1364 # usually set in FP instructions because of the base 1365 # register 1366 if 'IsFloating' in self.flags: 1367 self.op_class = 'FloatMemWriteOp' 1368 else: 1369 self.op_class = 'MemWriteOp' 1370 elif 'IsLoad' in self.flags or 'IsPrefetch' in self.flags: 1371 # The order matters here: 'IsFloating' and 'IsInteger' are 1372 # usually set in FP instructions because of the base 1373 # register 1374 if 'IsFloating' in self.flags: 1375 self.op_class = 'FloatMemReadOp' 1376 else: 1377 self.op_class = 'MemReadOp' 1378 elif 'IsFloating' in self.flags: 1379 self.op_class = 'FloatAddOp' 1380 elif 'IsVector' in self.flags: 1381 self.op_class = 'SimdAddOp' 1382 else: 1383 self.op_class = 'IntAluOp' 1384 1385 # add flag initialization to contructor here to include 1386 # any flags added via opt_args 1387 self.constructor += makeFlagConstructor(self.flags) 1388 1389 # if 'IsFloating' is set, add call to the FP enable check 1390 # function (which should be provided by isa_desc via a declare) 1391 # if 'IsVector' is set, add call to the Vector enable check 1392 # function (which should be provided by isa_desc via a declare) 1393 if 'IsFloating' in self.flags: 1394 self.fp_enable_check = 'fault = checkFpEnableFault(xc);' 1395 elif 'IsVector' in self.flags: 1396 self.fp_enable_check = 'fault = checkVecEnableFault(xc);' 1397 else: 1398 self.fp_enable_check = '' 1399 1400############## 1401# Stack: a simple stack object. Used for both formats (formatStack) 1402# and default cases (defaultStack). Simply wraps a list to give more 1403# stack-like syntax and enable initialization with an argument list 1404# (as opposed to an argument that's a list). 1405 1406class Stack(list): 1407 def __init__(self, *items): 1408 list.__init__(self, items) 1409 1410 def push(self, item): 1411 self.append(item); 1412 1413 def top(self): 1414 return self[-1] 1415 1416# Format a file include stack backtrace as a string 1417def backtrace(filename_stack): 1418 fmt = "In file included from %s:" 1419 return "\n".join([fmt % f for f in filename_stack]) 1420 1421 1422####################### 1423# 1424# LineTracker: track filenames along with line numbers in PLY lineno fields 1425# PLY explicitly doesn't do anything with 'lineno' except propagate 1426# it. This class lets us tie filenames with the line numbers with a 1427# minimum of disruption to existing increment code. 1428# 1429 1430class LineTracker(object): 1431 def __init__(self, filename, lineno=1): 1432 self.filename = filename 1433 self.lineno = lineno 1434 1435 # Overload '+=' for increments. We need to create a new object on 1436 # each update else every token ends up referencing the same 1437 # constantly incrementing instance. 1438 def __iadd__(self, incr): 1439 return LineTracker(self.filename, self.lineno + incr) 1440 1441 def __str__(self): 1442 return "%s:%d" % (self.filename, self.lineno) 1443 1444 # In case there are places where someone really expects a number 1445 def __int__(self): 1446 return self.lineno 1447 1448 1449####################### 1450# 1451# ISA Parser 1452# parses ISA DSL and emits C++ headers and source 1453# 1454 1455class ISAParser(Grammar):
|
1465 class CpuModel(object): 1466 def __init__(self, name, filename, includes, strings): 1467 self.name = name 1468 self.filename = filename 1469 self.includes = includes 1470 self.strings = strings 1471
| |
1472 def __init__(self, output_dir): 1473 super(ISAParser, self).__init__() 1474 self.output_dir = output_dir 1475 1476 self.filename = None # for output file watermarking/scaremongering 1477
| 1456 def __init__(self, output_dir): 1457 super(ISAParser, self).__init__() 1458 self.output_dir = output_dir 1459 1460 self.filename = None # for output file watermarking/scaremongering 1461
|
1478 self.cpuModels = [ 1479 ISAParser.CpuModel('ExecContext', 1480 'generic_cpu_exec.cc', 1481 '#include "cpu/exec_context.hh"', 1482 { "CPU_exec_context" : "ExecContext" }), 1483 ] 1484
| |
1485 # variable to hold templates 1486 self.templateMap = {} 1487 1488 # This dictionary maps format name strings to Format objects. 1489 self.formatMap = {} 1490 1491 # Track open files and, if applicable, how many chunks it has been 1492 # split into so far. 1493 self.files = {} 1494 self.splits = {} 1495 1496 # isa_name / namespace identifier from namespace declaration. 1497 # before the namespace declaration, None. 1498 self.isa_name = None 1499 self.namespace = None 1500 1501 # The format stack. 1502 self.formatStack = Stack(NoFormat()) 1503 1504 # The default case stack. 1505 self.defaultStack = Stack(None) 1506 1507 # Stack that tracks current file and line number. Each 1508 # element is a tuple (filename, lineno) that records the 1509 # *current* filename and the line number in the *previous* 1510 # file where it was included. 1511 self.fileNameStack = Stack() 1512 1513 symbols = ('makeList', 're', 'string') 1514 self.exportContext = dict([(s, eval(s)) for s in symbols]) 1515 1516 self.maxInstSrcRegs = 0 1517 self.maxInstDestRegs = 0 1518 self.maxMiscDestRegs = 0 1519 1520 def __getitem__(self, i): # Allow object (self) to be 1521 return getattr(self, i) # passed to %-substitutions 1522 1523 # Change the file suffix of a base filename: 1524 # (e.g.) decoder.cc -> decoder-g.cc.inc for 'global' outputs 1525 def suffixize(self, s, sec): 1526 extn = re.compile('(\.[^\.]+)$') # isolate extension 1527 if self.namespace: 1528 return extn.sub(r'-ns\1.inc', s) # insert some text on either side 1529 else: 1530 return extn.sub(r'-g\1.inc', s) 1531 1532 # Get the file object for emitting code into the specified section 1533 # (header, decoder, exec, decode_block). 1534 def get_file(self, section): 1535 if section == 'decode_block': 1536 filename = 'decode-method.cc.inc' 1537 else: 1538 if section == 'header': 1539 file = 'decoder.hh' 1540 else: 1541 file = '%s.cc' % section 1542 filename = self.suffixize(file, section) 1543 try: 1544 return self.files[filename] 1545 except KeyError: pass 1546 1547 f = self.open(filename) 1548 self.files[filename] = f 1549 1550 # The splittable files are the ones with many independent 1551 # per-instruction functions - the decoder's instruction constructors 1552 # and the instruction execution (execute()) methods. These both have 1553 # the suffix -ns.cc.inc, meaning they are within the namespace part 1554 # of the ISA, contain object-emitting C++ source, and are included 1555 # into other top-level files. These are the files that need special 1556 # #define's to allow parts of them to be compiled separately. Rather 1557 # than splitting the emissions into separate files, the monolithic 1558 # output of the ISA parser is maintained, but the value (or lack 1559 # thereof) of the __SPLIT definition during C preprocessing will 1560 # select the different chunks. If no 'split' directives are used, 1561 # the cpp emissions have no effect. 1562 if re.search('-ns.cc.inc$', filename): 1563 print >>f, '#if !defined(__SPLIT) || (__SPLIT == 1)' 1564 self.splits[f] = 1 1565 # ensure requisite #include's 1566 elif filename == 'decoder-g.hh.inc': 1567 print >>f, '#include "base/bitfield.hh"' 1568 1569 return f 1570 1571 # Weave together the parts of the different output sections by 1572 # #include'ing them into some very short top-level .cc/.hh files. 1573 # These small files make it much clearer how this tool works, since 1574 # you directly see the chunks emitted as files that are #include'd. 1575 def write_top_level_files(self): 1576 # decoder header - everything depends on this 1577 file = 'decoder.hh' 1578 with self.open(file) as f: 1579 fn = 'decoder-g.hh.inc' 1580 assert(fn in self.files) 1581 f.write('#include "%s"\n' % fn) 1582 1583 fn = 'decoder-ns.hh.inc' 1584 assert(fn in self.files) 1585 f.write('namespace %s {\n#include "%s"\n}\n' 1586 % (self.namespace, fn)) 1587 1588 # decoder method - cannot be split 1589 file = 'decoder.cc' 1590 with self.open(file) as f: 1591 fn = 'decoder-g.cc.inc' 1592 assert(fn in self.files) 1593 f.write('#include "%s"\n' % fn) 1594 1595 fn = 'decoder.hh' 1596 f.write('#include "%s"\n' % fn) 1597 1598 fn = 'decode-method.cc.inc' 1599 # is guaranteed to have been written for parse to complete 1600 f.write('#include "%s"\n' % fn) 1601 1602 extn = re.compile('(\.[^\.]+)$') 1603 1604 # instruction constructors 1605 splits = self.splits[self.get_file('decoder')] 1606 file_ = 'inst-constrs.cc' 1607 for i in range(1, splits+1): 1608 if splits > 1: 1609 file = extn.sub(r'-%d\1' % i, file_) 1610 else: 1611 file = file_ 1612 with self.open(file) as f: 1613 fn = 'decoder-g.cc.inc' 1614 assert(fn in self.files) 1615 f.write('#include "%s"\n' % fn) 1616 1617 fn = 'decoder.hh' 1618 f.write('#include "%s"\n' % fn) 1619 1620 fn = 'decoder-ns.cc.inc' 1621 assert(fn in self.files) 1622 print >>f, 'namespace %s {' % self.namespace 1623 if splits > 1: 1624 print >>f, '#define __SPLIT %u' % i 1625 print >>f, '#include "%s"' % fn 1626 print >>f, '}' 1627
| 1462 # variable to hold templates 1463 self.templateMap = {} 1464 1465 # This dictionary maps format name strings to Format objects. 1466 self.formatMap = {} 1467 1468 # Track open files and, if applicable, how many chunks it has been 1469 # split into so far. 1470 self.files = {} 1471 self.splits = {} 1472 1473 # isa_name / namespace identifier from namespace declaration. 1474 # before the namespace declaration, None. 1475 self.isa_name = None 1476 self.namespace = None 1477 1478 # The format stack. 1479 self.formatStack = Stack(NoFormat()) 1480 1481 # The default case stack. 1482 self.defaultStack = Stack(None) 1483 1484 # Stack that tracks current file and line number. Each 1485 # element is a tuple (filename, lineno) that records the 1486 # *current* filename and the line number in the *previous* 1487 # file where it was included. 1488 self.fileNameStack = Stack() 1489 1490 symbols = ('makeList', 're', 'string') 1491 self.exportContext = dict([(s, eval(s)) for s in symbols]) 1492 1493 self.maxInstSrcRegs = 0 1494 self.maxInstDestRegs = 0 1495 self.maxMiscDestRegs = 0 1496 1497 def __getitem__(self, i): # Allow object (self) to be 1498 return getattr(self, i) # passed to %-substitutions 1499 1500 # Change the file suffix of a base filename: 1501 # (e.g.) decoder.cc -> decoder-g.cc.inc for 'global' outputs 1502 def suffixize(self, s, sec): 1503 extn = re.compile('(\.[^\.]+)$') # isolate extension 1504 if self.namespace: 1505 return extn.sub(r'-ns\1.inc', s) # insert some text on either side 1506 else: 1507 return extn.sub(r'-g\1.inc', s) 1508 1509 # Get the file object for emitting code into the specified section 1510 # (header, decoder, exec, decode_block). 1511 def get_file(self, section): 1512 if section == 'decode_block': 1513 filename = 'decode-method.cc.inc' 1514 else: 1515 if section == 'header': 1516 file = 'decoder.hh' 1517 else: 1518 file = '%s.cc' % section 1519 filename = self.suffixize(file, section) 1520 try: 1521 return self.files[filename] 1522 except KeyError: pass 1523 1524 f = self.open(filename) 1525 self.files[filename] = f 1526 1527 # The splittable files are the ones with many independent 1528 # per-instruction functions - the decoder's instruction constructors 1529 # and the instruction execution (execute()) methods. These both have 1530 # the suffix -ns.cc.inc, meaning they are within the namespace part 1531 # of the ISA, contain object-emitting C++ source, and are included 1532 # into other top-level files. These are the files that need special 1533 # #define's to allow parts of them to be compiled separately. Rather 1534 # than splitting the emissions into separate files, the monolithic 1535 # output of the ISA parser is maintained, but the value (or lack 1536 # thereof) of the __SPLIT definition during C preprocessing will 1537 # select the different chunks. If no 'split' directives are used, 1538 # the cpp emissions have no effect. 1539 if re.search('-ns.cc.inc$', filename): 1540 print >>f, '#if !defined(__SPLIT) || (__SPLIT == 1)' 1541 self.splits[f] = 1 1542 # ensure requisite #include's 1543 elif filename == 'decoder-g.hh.inc': 1544 print >>f, '#include "base/bitfield.hh"' 1545 1546 return f 1547 1548 # Weave together the parts of the different output sections by 1549 # #include'ing them into some very short top-level .cc/.hh files. 1550 # These small files make it much clearer how this tool works, since 1551 # you directly see the chunks emitted as files that are #include'd. 1552 def write_top_level_files(self): 1553 # decoder header - everything depends on this 1554 file = 'decoder.hh' 1555 with self.open(file) as f: 1556 fn = 'decoder-g.hh.inc' 1557 assert(fn in self.files) 1558 f.write('#include "%s"\n' % fn) 1559 1560 fn = 'decoder-ns.hh.inc' 1561 assert(fn in self.files) 1562 f.write('namespace %s {\n#include "%s"\n}\n' 1563 % (self.namespace, fn)) 1564 1565 # decoder method - cannot be split 1566 file = 'decoder.cc' 1567 with self.open(file) as f: 1568 fn = 'decoder-g.cc.inc' 1569 assert(fn in self.files) 1570 f.write('#include "%s"\n' % fn) 1571 1572 fn = 'decoder.hh' 1573 f.write('#include "%s"\n' % fn) 1574 1575 fn = 'decode-method.cc.inc' 1576 # is guaranteed to have been written for parse to complete 1577 f.write('#include "%s"\n' % fn) 1578 1579 extn = re.compile('(\.[^\.]+)$') 1580 1581 # instruction constructors 1582 splits = self.splits[self.get_file('decoder')] 1583 file_ = 'inst-constrs.cc' 1584 for i in range(1, splits+1): 1585 if splits > 1: 1586 file = extn.sub(r'-%d\1' % i, file_) 1587 else: 1588 file = file_ 1589 with self.open(file) as f: 1590 fn = 'decoder-g.cc.inc' 1591 assert(fn in self.files) 1592 f.write('#include "%s"\n' % fn) 1593 1594 fn = 'decoder.hh' 1595 f.write('#include "%s"\n' % fn) 1596 1597 fn = 'decoder-ns.cc.inc' 1598 assert(fn in self.files) 1599 print >>f, 'namespace %s {' % self.namespace 1600 if splits > 1: 1601 print >>f, '#define __SPLIT %u' % i 1602 print >>f, '#include "%s"' % fn 1603 print >>f, '}' 1604
|
1628 # instruction execution per-CPU model
| 1605 # instruction execution
|
1629 splits = self.splits[self.get_file('exec')]
| 1606 splits = self.splits[self.get_file('exec')]
|
1630 for cpu in self.cpuModels: 1631 for i in range(1, splits+1):
| 1607 for i in range(1, splits+1): 1608 file = 'generic_cpu_exec.cc' 1609 if splits > 1: 1610 file = extn.sub(r'_%d\1' % i, file) 1611 with self.open(file) as f: 1612 fn = 'exec-g.cc.inc' 1613 assert(fn in self.files) 1614 f.write('#include "%s"\n' % fn) 1615 f.write('#include "cpu/exec_context.hh"\n') 1616 f.write('#include "decoder.hh"\n') 1617 1618 fn = 'exec-ns.cc.inc' 1619 assert(fn in self.files) 1620 print >>f, 'namespace %s {' % self.namespace
|
1632 if splits > 1:
| 1621 if splits > 1:
|
1633 file = extn.sub(r'_%d\1' % i, cpu.filename) 1634 else: 1635 file = cpu.filename 1636 with self.open(file) as f: 1637 fn = 'exec-g.cc.inc' 1638 assert(fn in self.files) 1639 f.write('#include "%s"\n' % fn)
| 1622 print >>f, '#define __SPLIT %u' % i 1623 print >>f, '#include "%s"' % fn 1624 print >>f, '}'
|
1640
| 1625
|
1641 f.write(cpu.includes+"\n") 1642 1643 fn = 'decoder.hh' 1644 f.write('#include "%s"\n' % fn) 1645 1646 fn = 'exec-ns.cc.inc' 1647 assert(fn in self.files) 1648 print >>f, 'namespace %s {' % self.namespace 1649 print >>f, '#define CPU_EXEC_CONTEXT %s' \ 1650 % cpu.strings['CPU_exec_context'] 1651 if splits > 1: 1652 print >>f, '#define __SPLIT %u' % i 1653 print >>f, '#include "%s"' % fn 1654 print >>f, '}' 1655
| |
1656 # max_inst_regs.hh 1657 self.update('max_inst_regs.hh', 1658 '''namespace %(namespace)s { 1659 const int MaxInstSrcRegs = %(maxInstSrcRegs)d; 1660 const int MaxInstDestRegs = %(maxInstDestRegs)d; 1661 const int MaxMiscDestRegs = %(maxMiscDestRegs)d;\n}\n''' % self) 1662 1663 scaremonger_template ='''// DO NOT EDIT 1664// This file was automatically generated from an ISA description: 1665// %(filename)s 1666 1667'''; 1668 1669 ##################################################################### 1670 # 1671 # Lexer 1672 # 1673 # The PLY lexer module takes two things as input: 1674 # - A list of token names (the string list 'tokens') 1675 # - A regular expression describing a match for each token. The 1676 # regexp for token FOO can be provided in two ways: 1677 # - as a string variable named t_FOO 1678 # - as the doc string for a function named t_FOO. In this case, 1679 # the function is also executed, allowing an action to be 1680 # associated with each token match. 1681 # 1682 ##################################################################### 1683 1684 # Reserved words. These are listed separately as they are matched 1685 # using the same regexp as generic IDs, but distinguished in the 1686 # t_ID() function. The PLY documentation suggests this approach. 1687 reserved = ( 1688 'BITFIELD', 'DECODE', 'DECODER', 'DEFAULT', 'DEF', 'EXEC', 'FORMAT', 1689 'HEADER', 'LET', 'NAMESPACE', 'OPERAND_TYPES', 'OPERANDS', 1690 'OUTPUT', 'SIGNED', 'SPLIT', 'TEMPLATE' 1691 ) 1692 1693 # List of tokens. The lex module requires this. 1694 tokens = reserved + ( 1695 # identifier 1696 'ID', 1697 1698 # integer literal 1699 'INTLIT', 1700 1701 # string literal 1702 'STRLIT', 1703 1704 # code literal 1705 'CODELIT', 1706 1707 # ( ) [ ] { } < > , ; . : :: * 1708 'LPAREN', 'RPAREN', 1709 'LBRACKET', 'RBRACKET', 1710 'LBRACE', 'RBRACE', 1711 'LESS', 'GREATER', 'EQUALS', 1712 'COMMA', 'SEMI', 'DOT', 'COLON', 'DBLCOLON', 1713 'ASTERISK', 1714 1715 # C preprocessor directives 1716 'CPPDIRECTIVE' 1717 1718 # The following are matched but never returned. commented out to 1719 # suppress PLY warning 1720 # newfile directive 1721 # 'NEWFILE', 1722 1723 # endfile directive 1724 # 'ENDFILE' 1725 ) 1726 1727 # Regular expressions for token matching 1728 t_LPAREN = r'\(' 1729 t_RPAREN = r'\)' 1730 t_LBRACKET = r'\[' 1731 t_RBRACKET = r'\]' 1732 t_LBRACE = r'\{' 1733 t_RBRACE = r'\}' 1734 t_LESS = r'\<' 1735 t_GREATER = r'\>' 1736 t_EQUALS = r'=' 1737 t_COMMA = r',' 1738 t_SEMI = r';' 1739 t_DOT = r'\.' 1740 t_COLON = r':' 1741 t_DBLCOLON = r'::' 1742 t_ASTERISK = r'\*' 1743 1744 # Identifiers and reserved words 1745 reserved_map = { } 1746 for r in reserved: 1747 reserved_map[r.lower()] = r 1748 1749 def t_ID(self, t): 1750 r'[A-Za-z_]\w*' 1751 t.type = self.reserved_map.get(t.value, 'ID') 1752 return t 1753 1754 # Integer literal 1755 def t_INTLIT(self, t): 1756 r'-?(0x[\da-fA-F]+)|\d+' 1757 try: 1758 t.value = int(t.value,0) 1759 except ValueError: 1760 error(t.lexer.lineno, 'Integer value "%s" too large' % t.value) 1761 t.value = 0 1762 return t 1763 1764 # String literal. Note that these use only single quotes, and 1765 # can span multiple lines. 1766 def t_STRLIT(self, t): 1767 r"(?m)'([^'])+'" 1768 # strip off quotes 1769 t.value = t.value[1:-1] 1770 t.lexer.lineno += t.value.count('\n') 1771 return t 1772 1773 1774 # "Code literal"... like a string literal, but delimiters are 1775 # '{{' and '}}' so they get formatted nicely under emacs c-mode 1776 def t_CODELIT(self, t): 1777 r"(?m)\{\{([^\}]|}(?!\}))+\}\}" 1778 # strip off {{ & }} 1779 t.value = t.value[2:-2] 1780 t.lexer.lineno += t.value.count('\n') 1781 return t 1782 1783 def t_CPPDIRECTIVE(self, t): 1784 r'^\#[^\#].*\n' 1785 t.lexer.lineno += t.value.count('\n') 1786 return t 1787 1788 def t_NEWFILE(self, t): 1789 r'^\#\#newfile\s+"[^"]*"\n' 1790 self.fileNameStack.push(t.lexer.lineno) 1791 t.lexer.lineno = LineTracker(t.value[11:-2]) 1792 1793 def t_ENDFILE(self, t): 1794 r'^\#\#endfile\n' 1795 t.lexer.lineno = self.fileNameStack.pop() 1796 1797 # 1798 # The functions t_NEWLINE, t_ignore, and t_error are 1799 # special for the lex module. 1800 # 1801 1802 # Newlines 1803 def t_NEWLINE(self, t): 1804 r'\n+' 1805 t.lexer.lineno += t.value.count('\n') 1806 1807 # Comments 1808 def t_comment(self, t): 1809 r'//.*' 1810 1811 # Completely ignored characters 1812 t_ignore = ' \t\x0c' 1813 1814 # Error handler 1815 def t_error(self, t): 1816 error(t.lexer.lineno, "illegal character '%s'" % t.value[0]) 1817 t.skip(1) 1818 1819 ##################################################################### 1820 # 1821 # Parser 1822 # 1823 # Every function whose name starts with 'p_' defines a grammar 1824 # rule. The rule is encoded in the function's doc string, while 1825 # the function body provides the action taken when the rule is 1826 # matched. The argument to each function is a list of the values 1827 # of the rule's symbols: t[0] for the LHS, and t[1..n] for the 1828 # symbols on the RHS. For tokens, the value is copied from the 1829 # t.value attribute provided by the lexer. For non-terminals, the 1830 # value is assigned by the producing rule; i.e., the job of the 1831 # grammar rule function is to set the value for the non-terminal 1832 # on the LHS (by assigning to t[0]). 1833 ##################################################################### 1834 1835 # The LHS of the first grammar rule is used as the start symbol 1836 # (in this case, 'specification'). Note that this rule enforces 1837 # that there will be exactly one namespace declaration, with 0 or 1838 # more global defs/decls before and after it. The defs & decls 1839 # before the namespace decl will be outside the namespace; those 1840 # after will be inside. The decoder function is always inside the 1841 # namespace. 1842 def p_specification(self, t): 1843 'specification : opt_defs_and_outputs top_level_decode_block' 1844 1845 for f in self.splits.iterkeys(): 1846 f.write('\n#endif\n') 1847 1848 for f in self.files.itervalues(): # close ALL the files; 1849 f.close() # not doing so can cause compilation to fail 1850 1851 self.write_top_level_files() 1852 1853 t[0] = True 1854 1855 # 'opt_defs_and_outputs' is a possibly empty sequence of def and/or 1856 # output statements. Its productions do the hard work of eventually 1857 # instantiating a GenCode, which are generally emitted (written to disk) 1858 # as soon as possible, except for the decode_block, which has to be 1859 # accumulated into one large function of nested switch/case blocks. 1860 def p_opt_defs_and_outputs_0(self, t): 1861 'opt_defs_and_outputs : empty' 1862 1863 def p_opt_defs_and_outputs_1(self, t): 1864 'opt_defs_and_outputs : defs_and_outputs' 1865 1866 def p_defs_and_outputs_0(self, t): 1867 'defs_and_outputs : def_or_output' 1868 1869 def p_defs_and_outputs_1(self, t): 1870 'defs_and_outputs : defs_and_outputs def_or_output' 1871 1872 # The list of possible definition/output statements. 1873 # They are all processed as they are seen. 1874 def p_def_or_output(self, t): 1875 '''def_or_output : name_decl 1876 | def_format 1877 | def_bitfield 1878 | def_bitfield_struct 1879 | def_template 1880 | def_operand_types 1881 | def_operands 1882 | output 1883 | global_let 1884 | split''' 1885 1886 # Utility function used by both invocations of splitting - explicit 1887 # 'split' keyword and split() function inside "let {{ }};" blocks. 1888 def split(self, sec, write=False): 1889 assert(sec != 'header' and "header cannot be split") 1890 1891 f = self.get_file(sec) 1892 self.splits[f] += 1 1893 s = '\n#endif\n#if __SPLIT == %u\n' % self.splits[f] 1894 if write: 1895 f.write(s) 1896 else: 1897 return s 1898 1899 # split output file to reduce compilation time 1900 def p_split(self, t): 1901 'split : SPLIT output_type SEMI' 1902 assert(self.isa_name and "'split' not allowed before namespace decl") 1903 1904 self.split(t[2], True) 1905 1906 def p_output_type(self, t): 1907 '''output_type : DECODER 1908 | HEADER 1909 | EXEC''' 1910 t[0] = t[1] 1911 1912 # ISA name declaration looks like "namespace <foo>;" 1913 def p_name_decl(self, t): 1914 'name_decl : NAMESPACE ID SEMI' 1915 assert(self.isa_name == None and "Only 1 namespace decl permitted") 1916 self.isa_name = t[2] 1917 self.namespace = t[2] + 'Inst' 1918 1919 # Output blocks 'output <foo> {{...}}' (C++ code blocks) are copied 1920 # directly to the appropriate output section. 1921 1922 # Massage output block by substituting in template definitions and 1923 # bit operators. We handle '%'s embedded in the string that don't
| 1626 # max_inst_regs.hh 1627 self.update('max_inst_regs.hh', 1628 '''namespace %(namespace)s { 1629 const int MaxInstSrcRegs = %(maxInstSrcRegs)d; 1630 const int MaxInstDestRegs = %(maxInstDestRegs)d; 1631 const int MaxMiscDestRegs = %(maxMiscDestRegs)d;\n}\n''' % self) 1632 1633 scaremonger_template ='''// DO NOT EDIT 1634// This file was automatically generated from an ISA description: 1635// %(filename)s 1636 1637'''; 1638 1639 ##################################################################### 1640 # 1641 # Lexer 1642 # 1643 # The PLY lexer module takes two things as input: 1644 # - A list of token names (the string list 'tokens') 1645 # - A regular expression describing a match for each token. The 1646 # regexp for token FOO can be provided in two ways: 1647 # - as a string variable named t_FOO 1648 # - as the doc string for a function named t_FOO. In this case, 1649 # the function is also executed, allowing an action to be 1650 # associated with each token match. 1651 # 1652 ##################################################################### 1653 1654 # Reserved words. These are listed separately as they are matched 1655 # using the same regexp as generic IDs, but distinguished in the 1656 # t_ID() function. The PLY documentation suggests this approach. 1657 reserved = ( 1658 'BITFIELD', 'DECODE', 'DECODER', 'DEFAULT', 'DEF', 'EXEC', 'FORMAT', 1659 'HEADER', 'LET', 'NAMESPACE', 'OPERAND_TYPES', 'OPERANDS', 1660 'OUTPUT', 'SIGNED', 'SPLIT', 'TEMPLATE' 1661 ) 1662 1663 # List of tokens. The lex module requires this. 1664 tokens = reserved + ( 1665 # identifier 1666 'ID', 1667 1668 # integer literal 1669 'INTLIT', 1670 1671 # string literal 1672 'STRLIT', 1673 1674 # code literal 1675 'CODELIT', 1676 1677 # ( ) [ ] { } < > , ; . : :: * 1678 'LPAREN', 'RPAREN', 1679 'LBRACKET', 'RBRACKET', 1680 'LBRACE', 'RBRACE', 1681 'LESS', 'GREATER', 'EQUALS', 1682 'COMMA', 'SEMI', 'DOT', 'COLON', 'DBLCOLON', 1683 'ASTERISK', 1684 1685 # C preprocessor directives 1686 'CPPDIRECTIVE' 1687 1688 # The following are matched but never returned. commented out to 1689 # suppress PLY warning 1690 # newfile directive 1691 # 'NEWFILE', 1692 1693 # endfile directive 1694 # 'ENDFILE' 1695 ) 1696 1697 # Regular expressions for token matching 1698 t_LPAREN = r'\(' 1699 t_RPAREN = r'\)' 1700 t_LBRACKET = r'\[' 1701 t_RBRACKET = r'\]' 1702 t_LBRACE = r'\{' 1703 t_RBRACE = r'\}' 1704 t_LESS = r'\<' 1705 t_GREATER = r'\>' 1706 t_EQUALS = r'=' 1707 t_COMMA = r',' 1708 t_SEMI = r';' 1709 t_DOT = r'\.' 1710 t_COLON = r':' 1711 t_DBLCOLON = r'::' 1712 t_ASTERISK = r'\*' 1713 1714 # Identifiers and reserved words 1715 reserved_map = { } 1716 for r in reserved: 1717 reserved_map[r.lower()] = r 1718 1719 def t_ID(self, t): 1720 r'[A-Za-z_]\w*' 1721 t.type = self.reserved_map.get(t.value, 'ID') 1722 return t 1723 1724 # Integer literal 1725 def t_INTLIT(self, t): 1726 r'-?(0x[\da-fA-F]+)|\d+' 1727 try: 1728 t.value = int(t.value,0) 1729 except ValueError: 1730 error(t.lexer.lineno, 'Integer value "%s" too large' % t.value) 1731 t.value = 0 1732 return t 1733 1734 # String literal. Note that these use only single quotes, and 1735 # can span multiple lines. 1736 def t_STRLIT(self, t): 1737 r"(?m)'([^'])+'" 1738 # strip off quotes 1739 t.value = t.value[1:-1] 1740 t.lexer.lineno += t.value.count('\n') 1741 return t 1742 1743 1744 # "Code literal"... like a string literal, but delimiters are 1745 # '{{' and '}}' so they get formatted nicely under emacs c-mode 1746 def t_CODELIT(self, t): 1747 r"(?m)\{\{([^\}]|}(?!\}))+\}\}" 1748 # strip off {{ & }} 1749 t.value = t.value[2:-2] 1750 t.lexer.lineno += t.value.count('\n') 1751 return t 1752 1753 def t_CPPDIRECTIVE(self, t): 1754 r'^\#[^\#].*\n' 1755 t.lexer.lineno += t.value.count('\n') 1756 return t 1757 1758 def t_NEWFILE(self, t): 1759 r'^\#\#newfile\s+"[^"]*"\n' 1760 self.fileNameStack.push(t.lexer.lineno) 1761 t.lexer.lineno = LineTracker(t.value[11:-2]) 1762 1763 def t_ENDFILE(self, t): 1764 r'^\#\#endfile\n' 1765 t.lexer.lineno = self.fileNameStack.pop() 1766 1767 # 1768 # The functions t_NEWLINE, t_ignore, and t_error are 1769 # special for the lex module. 1770 # 1771 1772 # Newlines 1773 def t_NEWLINE(self, t): 1774 r'\n+' 1775 t.lexer.lineno += t.value.count('\n') 1776 1777 # Comments 1778 def t_comment(self, t): 1779 r'//.*' 1780 1781 # Completely ignored characters 1782 t_ignore = ' \t\x0c' 1783 1784 # Error handler 1785 def t_error(self, t): 1786 error(t.lexer.lineno, "illegal character '%s'" % t.value[0]) 1787 t.skip(1) 1788 1789 ##################################################################### 1790 # 1791 # Parser 1792 # 1793 # Every function whose name starts with 'p_' defines a grammar 1794 # rule. The rule is encoded in the function's doc string, while 1795 # the function body provides the action taken when the rule is 1796 # matched. The argument to each function is a list of the values 1797 # of the rule's symbols: t[0] for the LHS, and t[1..n] for the 1798 # symbols on the RHS. For tokens, the value is copied from the 1799 # t.value attribute provided by the lexer. For non-terminals, the 1800 # value is assigned by the producing rule; i.e., the job of the 1801 # grammar rule function is to set the value for the non-terminal 1802 # on the LHS (by assigning to t[0]). 1803 ##################################################################### 1804 1805 # The LHS of the first grammar rule is used as the start symbol 1806 # (in this case, 'specification'). Note that this rule enforces 1807 # that there will be exactly one namespace declaration, with 0 or 1808 # more global defs/decls before and after it. The defs & decls 1809 # before the namespace decl will be outside the namespace; those 1810 # after will be inside. The decoder function is always inside the 1811 # namespace. 1812 def p_specification(self, t): 1813 'specification : opt_defs_and_outputs top_level_decode_block' 1814 1815 for f in self.splits.iterkeys(): 1816 f.write('\n#endif\n') 1817 1818 for f in self.files.itervalues(): # close ALL the files; 1819 f.close() # not doing so can cause compilation to fail 1820 1821 self.write_top_level_files() 1822 1823 t[0] = True 1824 1825 # 'opt_defs_and_outputs' is a possibly empty sequence of def and/or 1826 # output statements. Its productions do the hard work of eventually 1827 # instantiating a GenCode, which are generally emitted (written to disk) 1828 # as soon as possible, except for the decode_block, which has to be 1829 # accumulated into one large function of nested switch/case blocks. 1830 def p_opt_defs_and_outputs_0(self, t): 1831 'opt_defs_and_outputs : empty' 1832 1833 def p_opt_defs_and_outputs_1(self, t): 1834 'opt_defs_and_outputs : defs_and_outputs' 1835 1836 def p_defs_and_outputs_0(self, t): 1837 'defs_and_outputs : def_or_output' 1838 1839 def p_defs_and_outputs_1(self, t): 1840 'defs_and_outputs : defs_and_outputs def_or_output' 1841 1842 # The list of possible definition/output statements. 1843 # They are all processed as they are seen. 1844 def p_def_or_output(self, t): 1845 '''def_or_output : name_decl 1846 | def_format 1847 | def_bitfield 1848 | def_bitfield_struct 1849 | def_template 1850 | def_operand_types 1851 | def_operands 1852 | output 1853 | global_let 1854 | split''' 1855 1856 # Utility function used by both invocations of splitting - explicit 1857 # 'split' keyword and split() function inside "let {{ }};" blocks. 1858 def split(self, sec, write=False): 1859 assert(sec != 'header' and "header cannot be split") 1860 1861 f = self.get_file(sec) 1862 self.splits[f] += 1 1863 s = '\n#endif\n#if __SPLIT == %u\n' % self.splits[f] 1864 if write: 1865 f.write(s) 1866 else: 1867 return s 1868 1869 # split output file to reduce compilation time 1870 def p_split(self, t): 1871 'split : SPLIT output_type SEMI' 1872 assert(self.isa_name and "'split' not allowed before namespace decl") 1873 1874 self.split(t[2], True) 1875 1876 def p_output_type(self, t): 1877 '''output_type : DECODER 1878 | HEADER 1879 | EXEC''' 1880 t[0] = t[1] 1881 1882 # ISA name declaration looks like "namespace <foo>;" 1883 def p_name_decl(self, t): 1884 'name_decl : NAMESPACE ID SEMI' 1885 assert(self.isa_name == None and "Only 1 namespace decl permitted") 1886 self.isa_name = t[2] 1887 self.namespace = t[2] + 'Inst' 1888 1889 # Output blocks 'output <foo> {{...}}' (C++ code blocks) are copied 1890 # directly to the appropriate output section. 1891 1892 # Massage output block by substituting in template definitions and 1893 # bit operators. We handle '%'s embedded in the string that don't
|
1924 # indicate template substitutions (or CPU-specific symbols, which 1925 # get handled in GenCode) by doubling them first so that the
| 1894 # indicate template substitutions by doubling them first so that the
|
1926 # format operation will reduce them back to single '%'s. 1927 def process_output(self, s): 1928 s = self.protectNonSubstPercents(s)
| 1895 # format operation will reduce them back to single '%'s. 1896 def process_output(self, s): 1897 s = self.protectNonSubstPercents(s)
|
1929 # protects cpu-specific symbols too 1930 s = self.protectCpuSymbols(s)
| |
1931 return substBitOps(s % self.templateMap) 1932 1933 def p_output(self, t): 1934 'output : OUTPUT output_type CODELIT SEMI' 1935 kwargs = { t[2]+'_output' : self.process_output(t[3]) } 1936 GenCode(self, **kwargs).emit() 1937 1938 # global let blocks 'let {{...}}' (Python code blocks) are 1939 # executed directly when seen. Note that these execute in a 1940 # special variable context 'exportContext' to prevent the code 1941 # from polluting this script's namespace. 1942 def p_global_let(self, t): 1943 'global_let : LET CODELIT SEMI' 1944 def _split(sec): 1945 return self.split(sec) 1946 self.updateExportContext() 1947 self.exportContext["header_output"] = '' 1948 self.exportContext["decoder_output"] = '' 1949 self.exportContext["exec_output"] = '' 1950 self.exportContext["decode_block"] = '' 1951 self.exportContext["split"] = _split 1952 split_setup = ''' 1953def wrap(func): 1954 def split(sec): 1955 globals()[sec + '_output'] += func(sec) 1956 return split 1957split = wrap(split) 1958del wrap 1959''' 1960 # This tricky setup (immediately above) allows us to just write 1961 # (e.g.) "split('exec')" in the Python code and the split #ifdef's 1962 # will automatically be added to the exec_output variable. The inner 1963 # Python execution environment doesn't know about the split points, 1964 # so we carefully inject and wrap a closure that can retrieve the 1965 # next split's #define from the parser and add it to the current 1966 # emission-in-progress. 1967 try: 1968 exec split_setup+fixPythonIndentation(t[2]) in self.exportContext 1969 except Exception, exc: 1970 if debug: 1971 raise 1972 error(t.lineno(1), 'In global let block: %s' % exc) 1973 GenCode(self, 1974 header_output=self.exportContext["header_output"], 1975 decoder_output=self.exportContext["decoder_output"], 1976 exec_output=self.exportContext["exec_output"], 1977 decode_block=self.exportContext["decode_block"]).emit() 1978 1979 # Define the mapping from operand type extensions to C++ types and 1980 # bit widths (stored in operandTypeMap). 1981 def p_def_operand_types(self, t): 1982 'def_operand_types : DEF OPERAND_TYPES CODELIT SEMI' 1983 try: 1984 self.operandTypeMap = eval('{' + t[3] + '}') 1985 except Exception, exc: 1986 if debug: 1987 raise 1988 error(t.lineno(1), 1989 'In def operand_types: %s' % exc) 1990 1991 # Define the mapping from operand names to operand classes and 1992 # other traits. Stored in operandNameMap. 1993 def p_def_operands(self, t): 1994 'def_operands : DEF OPERANDS CODELIT SEMI' 1995 if not hasattr(self, 'operandTypeMap'): 1996 error(t.lineno(1), 1997 'error: operand types must be defined before operands') 1998 try: 1999 user_dict = eval('{' + t[3] + '}', self.exportContext) 2000 except Exception, exc: 2001 if debug: 2002 raise 2003 error(t.lineno(1), 'In def operands: %s' % exc) 2004 self.buildOperandNameMap(user_dict, t.lexer.lineno) 2005 2006 # A bitfield definition looks like: 2007 # 'def [signed] bitfield <ID> [<first>:<last>]' 2008 # This generates a preprocessor macro in the output file. 2009 def p_def_bitfield_0(self, t): 2010 'def_bitfield : DEF opt_signed BITFIELD ID LESS INTLIT COLON INTLIT GREATER SEMI' 2011 expr = 'bits(machInst, %2d, %2d)' % (t[6], t[8]) 2012 if (t[2] == 'signed'): 2013 expr = 'sext<%d>(%s)' % (t[6] - t[8] + 1, expr) 2014 hash_define = '#undef %s\n#define %s\t%s\n' % (t[4], t[4], expr) 2015 GenCode(self, header_output=hash_define).emit() 2016 2017 # alternate form for single bit: 'def [signed] bitfield <ID> [<bit>]' 2018 def p_def_bitfield_1(self, t): 2019 'def_bitfield : DEF opt_signed BITFIELD ID LESS INTLIT GREATER SEMI' 2020 expr = 'bits(machInst, %2d, %2d)' % (t[6], t[6]) 2021 if (t[2] == 'signed'): 2022 expr = 'sext<%d>(%s)' % (1, expr) 2023 hash_define = '#undef %s\n#define %s\t%s\n' % (t[4], t[4], expr) 2024 GenCode(self, header_output=hash_define).emit() 2025 2026 # alternate form for structure member: 'def bitfield <ID> <ID>' 2027 def p_def_bitfield_struct(self, t): 2028 'def_bitfield_struct : DEF opt_signed BITFIELD ID id_with_dot SEMI' 2029 if (t[2] != ''): 2030 error(t.lineno(1), 2031 'error: structure bitfields are always unsigned.') 2032 expr = 'machInst.%s' % t[5] 2033 hash_define = '#undef %s\n#define %s\t%s\n' % (t[4], t[4], expr) 2034 GenCode(self, header_output=hash_define).emit() 2035 2036 def p_id_with_dot_0(self, t): 2037 'id_with_dot : ID' 2038 t[0] = t[1] 2039 2040 def p_id_with_dot_1(self, t): 2041 'id_with_dot : ID DOT id_with_dot' 2042 t[0] = t[1] + t[2] + t[3] 2043 2044 def p_opt_signed_0(self, t): 2045 'opt_signed : SIGNED' 2046 t[0] = t[1] 2047 2048 def p_opt_signed_1(self, t): 2049 'opt_signed : empty' 2050 t[0] = '' 2051 2052 def p_def_template(self, t): 2053 'def_template : DEF TEMPLATE ID CODELIT SEMI' 2054 if t[3] in self.templateMap: 2055 print "warning: template %s already defined" % t[3] 2056 self.templateMap[t[3]] = Template(self, t[4]) 2057 2058 # An instruction format definition looks like 2059 # "def format <fmt>(<params>) {{...}};" 2060 def p_def_format(self, t): 2061 'def_format : DEF FORMAT ID LPAREN param_list RPAREN CODELIT SEMI' 2062 (id, params, code) = (t[3], t[5], t[7]) 2063 self.defFormat(id, params, code, t.lexer.lineno) 2064 2065 # The formal parameter list for an instruction format is a 2066 # possibly empty list of comma-separated parameters. Positional 2067 # (standard, non-keyword) parameters must come first, followed by 2068 # keyword parameters, followed by a '*foo' parameter that gets 2069 # excess positional arguments (as in Python). Each of these three 2070 # parameter categories is optional. 2071 # 2072 # Note that we do not support the '**foo' parameter for collecting 2073 # otherwise undefined keyword args. Otherwise the parameter list 2074 # is (I believe) identical to what is supported in Python. 2075 # 2076 # The param list generates a tuple, where the first element is a 2077 # list of the positional params and the second element is a dict 2078 # containing the keyword params. 2079 def p_param_list_0(self, t): 2080 'param_list : positional_param_list COMMA nonpositional_param_list' 2081 t[0] = t[1] + t[3] 2082 2083 def p_param_list_1(self, t): 2084 '''param_list : positional_param_list 2085 | nonpositional_param_list''' 2086 t[0] = t[1] 2087 2088 def p_positional_param_list_0(self, t): 2089 'positional_param_list : empty' 2090 t[0] = [] 2091 2092 def p_positional_param_list_1(self, t): 2093 'positional_param_list : ID' 2094 t[0] = [t[1]] 2095 2096 def p_positional_param_list_2(self, t): 2097 'positional_param_list : positional_param_list COMMA ID' 2098 t[0] = t[1] + [t[3]] 2099 2100 def p_nonpositional_param_list_0(self, t): 2101 'nonpositional_param_list : keyword_param_list COMMA excess_args_param' 2102 t[0] = t[1] + t[3] 2103 2104 def p_nonpositional_param_list_1(self, t): 2105 '''nonpositional_param_list : keyword_param_list 2106 | excess_args_param''' 2107 t[0] = t[1] 2108 2109 def p_keyword_param_list_0(self, t): 2110 'keyword_param_list : keyword_param' 2111 t[0] = [t[1]] 2112 2113 def p_keyword_param_list_1(self, t): 2114 'keyword_param_list : keyword_param_list COMMA keyword_param' 2115 t[0] = t[1] + [t[3]] 2116 2117 def p_keyword_param(self, t): 2118 'keyword_param : ID EQUALS expr' 2119 t[0] = t[1] + ' = ' + t[3].__repr__() 2120 2121 def p_excess_args_param(self, t): 2122 'excess_args_param : ASTERISK ID' 2123 # Just concatenate them: '*ID'. Wrap in list to be consistent 2124 # with positional_param_list and keyword_param_list. 2125 t[0] = [t[1] + t[2]] 2126 2127 # End of format definition-related rules. 2128 ############## 2129 2130 # 2131 # A decode block looks like: 2132 # decode <field1> [, <field2>]* [default <inst>] { ... } 2133 # 2134 def p_top_level_decode_block(self, t): 2135 'top_level_decode_block : decode_block' 2136 codeObj = t[1] 2137 codeObj.wrap_decode_block(''' 2138StaticInstPtr 2139%(isa_name)s::Decoder::decodeInst(%(isa_name)s::ExtMachInst machInst) 2140{ 2141 using namespace %(namespace)s; 2142''' % self, '}') 2143 2144 codeObj.emit() 2145 2146 def p_decode_block(self, t): 2147 'decode_block : DECODE ID opt_default LBRACE decode_stmt_list RBRACE' 2148 default_defaults = self.defaultStack.pop() 2149 codeObj = t[5] 2150 # use the "default defaults" only if there was no explicit 2151 # default statement in decode_stmt_list 2152 if not codeObj.has_decode_default: 2153 codeObj += default_defaults 2154 codeObj.wrap_decode_block('switch (%s) {\n' % t[2], '}\n') 2155 t[0] = codeObj 2156 2157 # The opt_default statement serves only to push the "default 2158 # defaults" onto defaultStack. This value will be used by nested 2159 # decode blocks, and used and popped off when the current 2160 # decode_block is processed (in p_decode_block() above). 2161 def p_opt_default_0(self, t): 2162 'opt_default : empty' 2163 # no default specified: reuse the one currently at the top of 2164 # the stack 2165 self.defaultStack.push(self.defaultStack.top()) 2166 # no meaningful value returned 2167 t[0] = None 2168 2169 def p_opt_default_1(self, t): 2170 'opt_default : DEFAULT inst' 2171 # push the new default 2172 codeObj = t[2] 2173 codeObj.wrap_decode_block('\ndefault:\n', 'break;\n') 2174 self.defaultStack.push(codeObj) 2175 # no meaningful value returned 2176 t[0] = None 2177 2178 def p_decode_stmt_list_0(self, t): 2179 'decode_stmt_list : decode_stmt' 2180 t[0] = t[1] 2181 2182 def p_decode_stmt_list_1(self, t): 2183 'decode_stmt_list : decode_stmt decode_stmt_list' 2184 if (t[1].has_decode_default and t[2].has_decode_default): 2185 error(t.lineno(1), 'Two default cases in decode block') 2186 t[0] = t[1] + t[2] 2187 2188 # 2189 # Decode statement rules 2190 # 2191 # There are four types of statements allowed in a decode block: 2192 # 1. Format blocks 'format <foo> { ... }' 2193 # 2. Nested decode blocks 2194 # 3. Instruction definitions. 2195 # 4. C preprocessor directives. 2196 2197 2198 # Preprocessor directives found in a decode statement list are 2199 # passed through to the output, replicated to all of the output 2200 # code streams. This works well for ifdefs, so we can ifdef out 2201 # both the declarations and the decode cases generated by an 2202 # instruction definition. Handling them as part of the grammar 2203 # makes it easy to keep them in the right place with respect to 2204 # the code generated by the other statements. 2205 def p_decode_stmt_cpp(self, t): 2206 'decode_stmt : CPPDIRECTIVE' 2207 t[0] = GenCode(self, t[1], t[1], t[1], t[1]) 2208 2209 # A format block 'format <foo> { ... }' sets the default 2210 # instruction format used to handle instruction definitions inside 2211 # the block. This format can be overridden by using an explicit 2212 # format on the instruction definition or with a nested format 2213 # block. 2214 def p_decode_stmt_format(self, t): 2215 'decode_stmt : FORMAT push_format_id LBRACE decode_stmt_list RBRACE' 2216 # The format will be pushed on the stack when 'push_format_id' 2217 # is processed (see below). Once the parser has recognized 2218 # the full production (though the right brace), we're done 2219 # with the format, so now we can pop it. 2220 self.formatStack.pop() 2221 t[0] = t[4] 2222 2223 # This rule exists so we can set the current format (& push the 2224 # stack) when we recognize the format name part of the format 2225 # block. 2226 def p_push_format_id(self, t): 2227 'push_format_id : ID' 2228 try: 2229 self.formatStack.push(self.formatMap[t[1]]) 2230 t[0] = ('', '// format %s' % t[1]) 2231 except KeyError: 2232 error(t.lineno(1), 'instruction format "%s" not defined.' % t[1]) 2233 2234 # Nested decode block: if the value of the current field matches 2235 # the specified constant(s), do a nested decode on some other field. 2236 def p_decode_stmt_decode(self, t): 2237 'decode_stmt : case_list COLON decode_block' 2238 case_list = t[1] 2239 codeObj = t[3] 2240 # just wrap the decoding code from the block as a case in the 2241 # outer switch statement. 2242 codeObj.wrap_decode_block('\n%s\n' % ''.join(case_list)) 2243 codeObj.has_decode_default = (case_list == ['default:']) 2244 t[0] = codeObj 2245 2246 # Instruction definition (finally!). 2247 def p_decode_stmt_inst(self, t): 2248 'decode_stmt : case_list COLON inst SEMI' 2249 case_list = t[1] 2250 codeObj = t[3] 2251 codeObj.wrap_decode_block('\n%s' % ''.join(case_list), 'break;\n') 2252 codeObj.has_decode_default = (case_list == ['default:']) 2253 t[0] = codeObj 2254 2255 # The constant list for a decode case label must be non-empty, and must 2256 # either be the keyword 'default', or made up of one or more 2257 # comma-separated integer literals or strings which evaluate to 2258 # constants when compiled as C++. 2259 def p_case_list_0(self, t): 2260 'case_list : DEFAULT' 2261 t[0] = ['default:'] 2262 2263 def prep_int_lit_case_label(self, lit): 2264 if lit >= 2**32: 2265 return 'case ULL(%#x): ' % lit 2266 else: 2267 return 'case %#x: ' % lit 2268 2269 def prep_str_lit_case_label(self, lit): 2270 return 'case %s: ' % lit 2271 2272 def p_case_list_1(self, t): 2273 'case_list : INTLIT' 2274 t[0] = [self.prep_int_lit_case_label(t[1])] 2275 2276 def p_case_list_2(self, t): 2277 'case_list : STRLIT' 2278 t[0] = [self.prep_str_lit_case_label(t[1])] 2279 2280 def p_case_list_3(self, t): 2281 'case_list : case_list COMMA INTLIT' 2282 t[0] = t[1] 2283 t[0].append(self.prep_int_lit_case_label(t[3])) 2284 2285 def p_case_list_4(self, t): 2286 'case_list : case_list COMMA STRLIT' 2287 t[0] = t[1] 2288 t[0].append(self.prep_str_lit_case_label(t[3])) 2289 2290 # Define an instruction using the current instruction format 2291 # (specified by an enclosing format block). 2292 # "<mnemonic>(<args>)" 2293 def p_inst_0(self, t): 2294 'inst : ID LPAREN arg_list RPAREN' 2295 # Pass the ID and arg list to the current format class to deal with. 2296 currentFormat = self.formatStack.top() 2297 codeObj = currentFormat.defineInst(self, t[1], t[3], t.lexer.lineno) 2298 args = ','.join(map(str, t[3])) 2299 args = re.sub('(?m)^', '//', args) 2300 args = re.sub('^//', '', args) 2301 comment = '\n// %s::%s(%s)\n' % (currentFormat.id, t[1], args) 2302 codeObj.prepend_all(comment) 2303 t[0] = codeObj 2304 2305 # Define an instruction using an explicitly specified format: 2306 # "<fmt>::<mnemonic>(<args>)" 2307 def p_inst_1(self, t): 2308 'inst : ID DBLCOLON ID LPAREN arg_list RPAREN' 2309 try: 2310 format = self.formatMap[t[1]] 2311 except KeyError: 2312 error(t.lineno(1), 'instruction format "%s" not defined.' % t[1]) 2313 2314 codeObj = format.defineInst(self, t[3], t[5], t.lexer.lineno) 2315 comment = '\n// %s::%s(%s)\n' % (t[1], t[3], t[5]) 2316 codeObj.prepend_all(comment) 2317 t[0] = codeObj 2318 2319 # The arg list generates a tuple, where the first element is a 2320 # list of the positional args and the second element is a dict 2321 # containing the keyword args. 2322 def p_arg_list_0(self, t): 2323 'arg_list : positional_arg_list COMMA keyword_arg_list' 2324 t[0] = ( t[1], t[3] ) 2325 2326 def p_arg_list_1(self, t): 2327 'arg_list : positional_arg_list' 2328 t[0] = ( t[1], {} ) 2329 2330 def p_arg_list_2(self, t): 2331 'arg_list : keyword_arg_list' 2332 t[0] = ( [], t[1] ) 2333 2334 def p_positional_arg_list_0(self, t): 2335 'positional_arg_list : empty' 2336 t[0] = [] 2337 2338 def p_positional_arg_list_1(self, t): 2339 'positional_arg_list : expr' 2340 t[0] = [t[1]] 2341 2342 def p_positional_arg_list_2(self, t): 2343 'positional_arg_list : positional_arg_list COMMA expr' 2344 t[0] = t[1] + [t[3]] 2345 2346 def p_keyword_arg_list_0(self, t): 2347 'keyword_arg_list : keyword_arg' 2348 t[0] = t[1] 2349 2350 def p_keyword_arg_list_1(self, t): 2351 'keyword_arg_list : keyword_arg_list COMMA keyword_arg' 2352 t[0] = t[1] 2353 t[0].update(t[3]) 2354 2355 def p_keyword_arg(self, t): 2356 'keyword_arg : ID EQUALS expr' 2357 t[0] = { t[1] : t[3] } 2358 2359 # 2360 # Basic expressions. These constitute the argument values of 2361 # "function calls" (i.e. instruction definitions in the decode 2362 # block) and default values for formal parameters of format 2363 # functions. 2364 # 2365 # Right now, these are either strings, integers, or (recursively) 2366 # lists of exprs (using Python square-bracket list syntax). Note 2367 # that bare identifiers are trated as string constants here (since 2368 # there isn't really a variable namespace to refer to). 2369 # 2370 def p_expr_0(self, t): 2371 '''expr : ID 2372 | INTLIT 2373 | STRLIT 2374 | CODELIT''' 2375 t[0] = t[1] 2376 2377 def p_expr_1(self, t): 2378 '''expr : LBRACKET list_expr RBRACKET''' 2379 t[0] = t[2] 2380 2381 def p_list_expr_0(self, t): 2382 'list_expr : expr' 2383 t[0] = [t[1]] 2384 2385 def p_list_expr_1(self, t): 2386 'list_expr : list_expr COMMA expr' 2387 t[0] = t[1] + [t[3]] 2388 2389 def p_list_expr_2(self, t): 2390 'list_expr : empty' 2391 t[0] = [] 2392 2393 # 2394 # Empty production... use in other rules for readability. 2395 # 2396 def p_empty(self, t): 2397 'empty :' 2398 pass 2399 2400 # Parse error handler. Note that the argument here is the 2401 # offending *token*, not a grammar symbol (hence the need to use 2402 # t.value) 2403 def p_error(self, t): 2404 if t: 2405 error(t.lexer.lineno, "syntax error at '%s'" % t.value) 2406 else: 2407 error("unknown syntax error") 2408 2409 # END OF GRAMMAR RULES 2410 2411 def updateExportContext(self): 2412 2413 # create a continuation that allows us to grab the current parser 2414 def wrapInstObjParams(*args): 2415 return InstObjParams(self, *args) 2416 self.exportContext['InstObjParams'] = wrapInstObjParams 2417 self.exportContext.update(self.templateMap) 2418 2419 def defFormat(self, id, params, code, lineno): 2420 '''Define a new format''' 2421 2422 # make sure we haven't already defined this one 2423 if id in self.formatMap: 2424 error(lineno, 'format %s redefined.' % id) 2425 2426 # create new object and store in global map 2427 self.formatMap[id] = Format(id, params, code) 2428
| 1898 return substBitOps(s % self.templateMap) 1899 1900 def p_output(self, t): 1901 'output : OUTPUT output_type CODELIT SEMI' 1902 kwargs = { t[2]+'_output' : self.process_output(t[3]) } 1903 GenCode(self, **kwargs).emit() 1904 1905 # global let blocks 'let {{...}}' (Python code blocks) are 1906 # executed directly when seen. Note that these execute in a 1907 # special variable context 'exportContext' to prevent the code 1908 # from polluting this script's namespace. 1909 def p_global_let(self, t): 1910 'global_let : LET CODELIT SEMI' 1911 def _split(sec): 1912 return self.split(sec) 1913 self.updateExportContext() 1914 self.exportContext["header_output"] = '' 1915 self.exportContext["decoder_output"] = '' 1916 self.exportContext["exec_output"] = '' 1917 self.exportContext["decode_block"] = '' 1918 self.exportContext["split"] = _split 1919 split_setup = ''' 1920def wrap(func): 1921 def split(sec): 1922 globals()[sec + '_output'] += func(sec) 1923 return split 1924split = wrap(split) 1925del wrap 1926''' 1927 # This tricky setup (immediately above) allows us to just write 1928 # (e.g.) "split('exec')" in the Python code and the split #ifdef's 1929 # will automatically be added to the exec_output variable. The inner 1930 # Python execution environment doesn't know about the split points, 1931 # so we carefully inject and wrap a closure that can retrieve the 1932 # next split's #define from the parser and add it to the current 1933 # emission-in-progress. 1934 try: 1935 exec split_setup+fixPythonIndentation(t[2]) in self.exportContext 1936 except Exception, exc: 1937 if debug: 1938 raise 1939 error(t.lineno(1), 'In global let block: %s' % exc) 1940 GenCode(self, 1941 header_output=self.exportContext["header_output"], 1942 decoder_output=self.exportContext["decoder_output"], 1943 exec_output=self.exportContext["exec_output"], 1944 decode_block=self.exportContext["decode_block"]).emit() 1945 1946 # Define the mapping from operand type extensions to C++ types and 1947 # bit widths (stored in operandTypeMap). 1948 def p_def_operand_types(self, t): 1949 'def_operand_types : DEF OPERAND_TYPES CODELIT SEMI' 1950 try: 1951 self.operandTypeMap = eval('{' + t[3] + '}') 1952 except Exception, exc: 1953 if debug: 1954 raise 1955 error(t.lineno(1), 1956 'In def operand_types: %s' % exc) 1957 1958 # Define the mapping from operand names to operand classes and 1959 # other traits. Stored in operandNameMap. 1960 def p_def_operands(self, t): 1961 'def_operands : DEF OPERANDS CODELIT SEMI' 1962 if not hasattr(self, 'operandTypeMap'): 1963 error(t.lineno(1), 1964 'error: operand types must be defined before operands') 1965 try: 1966 user_dict = eval('{' + t[3] + '}', self.exportContext) 1967 except Exception, exc: 1968 if debug: 1969 raise 1970 error(t.lineno(1), 'In def operands: %s' % exc) 1971 self.buildOperandNameMap(user_dict, t.lexer.lineno) 1972 1973 # A bitfield definition looks like: 1974 # 'def [signed] bitfield <ID> [<first>:<last>]' 1975 # This generates a preprocessor macro in the output file. 1976 def p_def_bitfield_0(self, t): 1977 'def_bitfield : DEF opt_signed BITFIELD ID LESS INTLIT COLON INTLIT GREATER SEMI' 1978 expr = 'bits(machInst, %2d, %2d)' % (t[6], t[8]) 1979 if (t[2] == 'signed'): 1980 expr = 'sext<%d>(%s)' % (t[6] - t[8] + 1, expr) 1981 hash_define = '#undef %s\n#define %s\t%s\n' % (t[4], t[4], expr) 1982 GenCode(self, header_output=hash_define).emit() 1983 1984 # alternate form for single bit: 'def [signed] bitfield <ID> [<bit>]' 1985 def p_def_bitfield_1(self, t): 1986 'def_bitfield : DEF opt_signed BITFIELD ID LESS INTLIT GREATER SEMI' 1987 expr = 'bits(machInst, %2d, %2d)' % (t[6], t[6]) 1988 if (t[2] == 'signed'): 1989 expr = 'sext<%d>(%s)' % (1, expr) 1990 hash_define = '#undef %s\n#define %s\t%s\n' % (t[4], t[4], expr) 1991 GenCode(self, header_output=hash_define).emit() 1992 1993 # alternate form for structure member: 'def bitfield <ID> <ID>' 1994 def p_def_bitfield_struct(self, t): 1995 'def_bitfield_struct : DEF opt_signed BITFIELD ID id_with_dot SEMI' 1996 if (t[2] != ''): 1997 error(t.lineno(1), 1998 'error: structure bitfields are always unsigned.') 1999 expr = 'machInst.%s' % t[5] 2000 hash_define = '#undef %s\n#define %s\t%s\n' % (t[4], t[4], expr) 2001 GenCode(self, header_output=hash_define).emit() 2002 2003 def p_id_with_dot_0(self, t): 2004 'id_with_dot : ID' 2005 t[0] = t[1] 2006 2007 def p_id_with_dot_1(self, t): 2008 'id_with_dot : ID DOT id_with_dot' 2009 t[0] = t[1] + t[2] + t[3] 2010 2011 def p_opt_signed_0(self, t): 2012 'opt_signed : SIGNED' 2013 t[0] = t[1] 2014 2015 def p_opt_signed_1(self, t): 2016 'opt_signed : empty' 2017 t[0] = '' 2018 2019 def p_def_template(self, t): 2020 'def_template : DEF TEMPLATE ID CODELIT SEMI' 2021 if t[3] in self.templateMap: 2022 print "warning: template %s already defined" % t[3] 2023 self.templateMap[t[3]] = Template(self, t[4]) 2024 2025 # An instruction format definition looks like 2026 # "def format <fmt>(<params>) {{...}};" 2027 def p_def_format(self, t): 2028 'def_format : DEF FORMAT ID LPAREN param_list RPAREN CODELIT SEMI' 2029 (id, params, code) = (t[3], t[5], t[7]) 2030 self.defFormat(id, params, code, t.lexer.lineno) 2031 2032 # The formal parameter list for an instruction format is a 2033 # possibly empty list of comma-separated parameters. Positional 2034 # (standard, non-keyword) parameters must come first, followed by 2035 # keyword parameters, followed by a '*foo' parameter that gets 2036 # excess positional arguments (as in Python). Each of these three 2037 # parameter categories is optional. 2038 # 2039 # Note that we do not support the '**foo' parameter for collecting 2040 # otherwise undefined keyword args. Otherwise the parameter list 2041 # is (I believe) identical to what is supported in Python. 2042 # 2043 # The param list generates a tuple, where the first element is a 2044 # list of the positional params and the second element is a dict 2045 # containing the keyword params. 2046 def p_param_list_0(self, t): 2047 'param_list : positional_param_list COMMA nonpositional_param_list' 2048 t[0] = t[1] + t[3] 2049 2050 def p_param_list_1(self, t): 2051 '''param_list : positional_param_list 2052 | nonpositional_param_list''' 2053 t[0] = t[1] 2054 2055 def p_positional_param_list_0(self, t): 2056 'positional_param_list : empty' 2057 t[0] = [] 2058 2059 def p_positional_param_list_1(self, t): 2060 'positional_param_list : ID' 2061 t[0] = [t[1]] 2062 2063 def p_positional_param_list_2(self, t): 2064 'positional_param_list : positional_param_list COMMA ID' 2065 t[0] = t[1] + [t[3]] 2066 2067 def p_nonpositional_param_list_0(self, t): 2068 'nonpositional_param_list : keyword_param_list COMMA excess_args_param' 2069 t[0] = t[1] + t[3] 2070 2071 def p_nonpositional_param_list_1(self, t): 2072 '''nonpositional_param_list : keyword_param_list 2073 | excess_args_param''' 2074 t[0] = t[1] 2075 2076 def p_keyword_param_list_0(self, t): 2077 'keyword_param_list : keyword_param' 2078 t[0] = [t[1]] 2079 2080 def p_keyword_param_list_1(self, t): 2081 'keyword_param_list : keyword_param_list COMMA keyword_param' 2082 t[0] = t[1] + [t[3]] 2083 2084 def p_keyword_param(self, t): 2085 'keyword_param : ID EQUALS expr' 2086 t[0] = t[1] + ' = ' + t[3].__repr__() 2087 2088 def p_excess_args_param(self, t): 2089 'excess_args_param : ASTERISK ID' 2090 # Just concatenate them: '*ID'. Wrap in list to be consistent 2091 # with positional_param_list and keyword_param_list. 2092 t[0] = [t[1] + t[2]] 2093 2094 # End of format definition-related rules. 2095 ############## 2096 2097 # 2098 # A decode block looks like: 2099 # decode <field1> [, <field2>]* [default <inst>] { ... } 2100 # 2101 def p_top_level_decode_block(self, t): 2102 'top_level_decode_block : decode_block' 2103 codeObj = t[1] 2104 codeObj.wrap_decode_block(''' 2105StaticInstPtr 2106%(isa_name)s::Decoder::decodeInst(%(isa_name)s::ExtMachInst machInst) 2107{ 2108 using namespace %(namespace)s; 2109''' % self, '}') 2110 2111 codeObj.emit() 2112 2113 def p_decode_block(self, t): 2114 'decode_block : DECODE ID opt_default LBRACE decode_stmt_list RBRACE' 2115 default_defaults = self.defaultStack.pop() 2116 codeObj = t[5] 2117 # use the "default defaults" only if there was no explicit 2118 # default statement in decode_stmt_list 2119 if not codeObj.has_decode_default: 2120 codeObj += default_defaults 2121 codeObj.wrap_decode_block('switch (%s) {\n' % t[2], '}\n') 2122 t[0] = codeObj 2123 2124 # The opt_default statement serves only to push the "default 2125 # defaults" onto defaultStack. This value will be used by nested 2126 # decode blocks, and used and popped off when the current 2127 # decode_block is processed (in p_decode_block() above). 2128 def p_opt_default_0(self, t): 2129 'opt_default : empty' 2130 # no default specified: reuse the one currently at the top of 2131 # the stack 2132 self.defaultStack.push(self.defaultStack.top()) 2133 # no meaningful value returned 2134 t[0] = None 2135 2136 def p_opt_default_1(self, t): 2137 'opt_default : DEFAULT inst' 2138 # push the new default 2139 codeObj = t[2] 2140 codeObj.wrap_decode_block('\ndefault:\n', 'break;\n') 2141 self.defaultStack.push(codeObj) 2142 # no meaningful value returned 2143 t[0] = None 2144 2145 def p_decode_stmt_list_0(self, t): 2146 'decode_stmt_list : decode_stmt' 2147 t[0] = t[1] 2148 2149 def p_decode_stmt_list_1(self, t): 2150 'decode_stmt_list : decode_stmt decode_stmt_list' 2151 if (t[1].has_decode_default and t[2].has_decode_default): 2152 error(t.lineno(1), 'Two default cases in decode block') 2153 t[0] = t[1] + t[2] 2154 2155 # 2156 # Decode statement rules 2157 # 2158 # There are four types of statements allowed in a decode block: 2159 # 1. Format blocks 'format <foo> { ... }' 2160 # 2. Nested decode blocks 2161 # 3. Instruction definitions. 2162 # 4. C preprocessor directives. 2163 2164 2165 # Preprocessor directives found in a decode statement list are 2166 # passed through to the output, replicated to all of the output 2167 # code streams. This works well for ifdefs, so we can ifdef out 2168 # both the declarations and the decode cases generated by an 2169 # instruction definition. Handling them as part of the grammar 2170 # makes it easy to keep them in the right place with respect to 2171 # the code generated by the other statements. 2172 def p_decode_stmt_cpp(self, t): 2173 'decode_stmt : CPPDIRECTIVE' 2174 t[0] = GenCode(self, t[1], t[1], t[1], t[1]) 2175 2176 # A format block 'format <foo> { ... }' sets the default 2177 # instruction format used to handle instruction definitions inside 2178 # the block. This format can be overridden by using an explicit 2179 # format on the instruction definition or with a nested format 2180 # block. 2181 def p_decode_stmt_format(self, t): 2182 'decode_stmt : FORMAT push_format_id LBRACE decode_stmt_list RBRACE' 2183 # The format will be pushed on the stack when 'push_format_id' 2184 # is processed (see below). Once the parser has recognized 2185 # the full production (though the right brace), we're done 2186 # with the format, so now we can pop it. 2187 self.formatStack.pop() 2188 t[0] = t[4] 2189 2190 # This rule exists so we can set the current format (& push the 2191 # stack) when we recognize the format name part of the format 2192 # block. 2193 def p_push_format_id(self, t): 2194 'push_format_id : ID' 2195 try: 2196 self.formatStack.push(self.formatMap[t[1]]) 2197 t[0] = ('', '// format %s' % t[1]) 2198 except KeyError: 2199 error(t.lineno(1), 'instruction format "%s" not defined.' % t[1]) 2200 2201 # Nested decode block: if the value of the current field matches 2202 # the specified constant(s), do a nested decode on some other field. 2203 def p_decode_stmt_decode(self, t): 2204 'decode_stmt : case_list COLON decode_block' 2205 case_list = t[1] 2206 codeObj = t[3] 2207 # just wrap the decoding code from the block as a case in the 2208 # outer switch statement. 2209 codeObj.wrap_decode_block('\n%s\n' % ''.join(case_list)) 2210 codeObj.has_decode_default = (case_list == ['default:']) 2211 t[0] = codeObj 2212 2213 # Instruction definition (finally!). 2214 def p_decode_stmt_inst(self, t): 2215 'decode_stmt : case_list COLON inst SEMI' 2216 case_list = t[1] 2217 codeObj = t[3] 2218 codeObj.wrap_decode_block('\n%s' % ''.join(case_list), 'break;\n') 2219 codeObj.has_decode_default = (case_list == ['default:']) 2220 t[0] = codeObj 2221 2222 # The constant list for a decode case label must be non-empty, and must 2223 # either be the keyword 'default', or made up of one or more 2224 # comma-separated integer literals or strings which evaluate to 2225 # constants when compiled as C++. 2226 def p_case_list_0(self, t): 2227 'case_list : DEFAULT' 2228 t[0] = ['default:'] 2229 2230 def prep_int_lit_case_label(self, lit): 2231 if lit >= 2**32: 2232 return 'case ULL(%#x): ' % lit 2233 else: 2234 return 'case %#x: ' % lit 2235 2236 def prep_str_lit_case_label(self, lit): 2237 return 'case %s: ' % lit 2238 2239 def p_case_list_1(self, t): 2240 'case_list : INTLIT' 2241 t[0] = [self.prep_int_lit_case_label(t[1])] 2242 2243 def p_case_list_2(self, t): 2244 'case_list : STRLIT' 2245 t[0] = [self.prep_str_lit_case_label(t[1])] 2246 2247 def p_case_list_3(self, t): 2248 'case_list : case_list COMMA INTLIT' 2249 t[0] = t[1] 2250 t[0].append(self.prep_int_lit_case_label(t[3])) 2251 2252 def p_case_list_4(self, t): 2253 'case_list : case_list COMMA STRLIT' 2254 t[0] = t[1] 2255 t[0].append(self.prep_str_lit_case_label(t[3])) 2256 2257 # Define an instruction using the current instruction format 2258 # (specified by an enclosing format block). 2259 # "<mnemonic>(<args>)" 2260 def p_inst_0(self, t): 2261 'inst : ID LPAREN arg_list RPAREN' 2262 # Pass the ID and arg list to the current format class to deal with. 2263 currentFormat = self.formatStack.top() 2264 codeObj = currentFormat.defineInst(self, t[1], t[3], t.lexer.lineno) 2265 args = ','.join(map(str, t[3])) 2266 args = re.sub('(?m)^', '//', args) 2267 args = re.sub('^//', '', args) 2268 comment = '\n// %s::%s(%s)\n' % (currentFormat.id, t[1], args) 2269 codeObj.prepend_all(comment) 2270 t[0] = codeObj 2271 2272 # Define an instruction using an explicitly specified format: 2273 # "<fmt>::<mnemonic>(<args>)" 2274 def p_inst_1(self, t): 2275 'inst : ID DBLCOLON ID LPAREN arg_list RPAREN' 2276 try: 2277 format = self.formatMap[t[1]] 2278 except KeyError: 2279 error(t.lineno(1), 'instruction format "%s" not defined.' % t[1]) 2280 2281 codeObj = format.defineInst(self, t[3], t[5], t.lexer.lineno) 2282 comment = '\n// %s::%s(%s)\n' % (t[1], t[3], t[5]) 2283 codeObj.prepend_all(comment) 2284 t[0] = codeObj 2285 2286 # The arg list generates a tuple, where the first element is a 2287 # list of the positional args and the second element is a dict 2288 # containing the keyword args. 2289 def p_arg_list_0(self, t): 2290 'arg_list : positional_arg_list COMMA keyword_arg_list' 2291 t[0] = ( t[1], t[3] ) 2292 2293 def p_arg_list_1(self, t): 2294 'arg_list : positional_arg_list' 2295 t[0] = ( t[1], {} ) 2296 2297 def p_arg_list_2(self, t): 2298 'arg_list : keyword_arg_list' 2299 t[0] = ( [], t[1] ) 2300 2301 def p_positional_arg_list_0(self, t): 2302 'positional_arg_list : empty' 2303 t[0] = [] 2304 2305 def p_positional_arg_list_1(self, t): 2306 'positional_arg_list : expr' 2307 t[0] = [t[1]] 2308 2309 def p_positional_arg_list_2(self, t): 2310 'positional_arg_list : positional_arg_list COMMA expr' 2311 t[0] = t[1] + [t[3]] 2312 2313 def p_keyword_arg_list_0(self, t): 2314 'keyword_arg_list : keyword_arg' 2315 t[0] = t[1] 2316 2317 def p_keyword_arg_list_1(self, t): 2318 'keyword_arg_list : keyword_arg_list COMMA keyword_arg' 2319 t[0] = t[1] 2320 t[0].update(t[3]) 2321 2322 def p_keyword_arg(self, t): 2323 'keyword_arg : ID EQUALS expr' 2324 t[0] = { t[1] : t[3] } 2325 2326 # 2327 # Basic expressions. These constitute the argument values of 2328 # "function calls" (i.e. instruction definitions in the decode 2329 # block) and default values for formal parameters of format 2330 # functions. 2331 # 2332 # Right now, these are either strings, integers, or (recursively) 2333 # lists of exprs (using Python square-bracket list syntax). Note 2334 # that bare identifiers are trated as string constants here (since 2335 # there isn't really a variable namespace to refer to). 2336 # 2337 def p_expr_0(self, t): 2338 '''expr : ID 2339 | INTLIT 2340 | STRLIT 2341 | CODELIT''' 2342 t[0] = t[1] 2343 2344 def p_expr_1(self, t): 2345 '''expr : LBRACKET list_expr RBRACKET''' 2346 t[0] = t[2] 2347 2348 def p_list_expr_0(self, t): 2349 'list_expr : expr' 2350 t[0] = [t[1]] 2351 2352 def p_list_expr_1(self, t): 2353 'list_expr : list_expr COMMA expr' 2354 t[0] = t[1] + [t[3]] 2355 2356 def p_list_expr_2(self, t): 2357 'list_expr : empty' 2358 t[0] = [] 2359 2360 # 2361 # Empty production... use in other rules for readability. 2362 # 2363 def p_empty(self, t): 2364 'empty :' 2365 pass 2366 2367 # Parse error handler. Note that the argument here is the 2368 # offending *token*, not a grammar symbol (hence the need to use 2369 # t.value) 2370 def p_error(self, t): 2371 if t: 2372 error(t.lexer.lineno, "syntax error at '%s'" % t.value) 2373 else: 2374 error("unknown syntax error") 2375 2376 # END OF GRAMMAR RULES 2377 2378 def updateExportContext(self): 2379 2380 # create a continuation that allows us to grab the current parser 2381 def wrapInstObjParams(*args): 2382 return InstObjParams(self, *args) 2383 self.exportContext['InstObjParams'] = wrapInstObjParams 2384 self.exportContext.update(self.templateMap) 2385 2386 def defFormat(self, id, params, code, lineno): 2387 '''Define a new format''' 2388 2389 # make sure we haven't already defined this one 2390 if id in self.formatMap: 2391 error(lineno, 'format %s redefined.' % id) 2392 2393 # create new object and store in global map 2394 self.formatMap[id] = Format(id, params, code) 2395
|
2429 def expandCpuSymbolsToDict(self, template): 2430 '''Expand template with CPU-specific references into a 2431 dictionary with an entry for each CPU model name. The entry 2432 key is the model name and the corresponding value is the 2433 template with the CPU-specific refs substituted for that 2434 model.''' 2435 2436 # Protect '%'s that don't go with CPU-specific terms 2437 t = re.sub(r'%(?!\(CPU_)', '%%', template) 2438 result = {} 2439 for cpu in self.cpuModels: 2440 result[cpu.name] = t % cpu.strings 2441 return result 2442 2443 def expandCpuSymbolsToString(self, template): 2444 '''*If* the template has CPU-specific references, return a 2445 single string containing a copy of the template for each CPU 2446 model with the corresponding values substituted in. If the 2447 template has no CPU-specific references, it is returned 2448 unmodified.''' 2449 2450 if template.find('%(CPU_') != -1: 2451 return reduce(lambda x,y: x+y, 2452 self.expandCpuSymbolsToDict(template).values()) 2453 else: 2454 return template 2455 2456 def protectCpuSymbols(self, template): 2457 '''Protect CPU-specific references by doubling the 2458 corresponding '%'s (in preparation for substituting a different 2459 set of references into the template).''' 2460 2461 return re.sub(r'%(?=\(CPU_)', '%%', template) 2462
| |
2463 def protectNonSubstPercents(self, s): 2464 '''Protect any non-dict-substitution '%'s in a format string 2465 (i.e. those not followed by '(')''' 2466 2467 return re.sub(r'%(?!\()', '%%', s) 2468 2469 def buildOperandNameMap(self, user_dict, lineno): 2470 operand_name = {} 2471 for op_name, val in user_dict.iteritems(): 2472 2473 # Check if extra attributes have been specified. 2474 if len(val) > 9: 2475 error(lineno, 'error: too many attributes for operand "%s"' % 2476 base_cls_name) 2477 2478 # Pad val with None in case optional args are missing 2479 val += (None, None, None, None) 2480 base_cls_name, dflt_ext, reg_spec, flags, sort_pri, \ 2481 read_code, write_code, read_predicate, write_predicate = val[:9] 2482 2483 # Canonical flag structure is a triple of lists, where each list 2484 # indicates the set of flags implied by this operand always, when 2485 # used as a source, and when used as a dest, respectively. 2486 # For simplicity this can be initialized using a variety of fairly 2487 # obvious shortcuts; we convert these to canonical form here. 2488 if not flags: 2489 # no flags specified (e.g., 'None') 2490 flags = ( [], [], [] ) 2491 elif isinstance(flags, str): 2492 # a single flag: assumed to be unconditional 2493 flags = ( [ flags ], [], [] ) 2494 elif isinstance(flags, list): 2495 # a list of flags: also assumed to be unconditional 2496 flags = ( flags, [], [] ) 2497 elif isinstance(flags, tuple): 2498 # it's a tuple: it should be a triple, 2499 # but each item could be a single string or a list 2500 (uncond_flags, src_flags, dest_flags) = flags 2501 flags = (makeList(uncond_flags), 2502 makeList(src_flags), makeList(dest_flags)) 2503 2504 # Accumulate attributes of new operand class in tmp_dict 2505 tmp_dict = {} 2506 attrList = ['reg_spec', 'flags', 'sort_pri', 2507 'read_code', 'write_code', 2508 'read_predicate', 'write_predicate'] 2509 if dflt_ext: 2510 dflt_ctype = self.operandTypeMap[dflt_ext] 2511 attrList.extend(['dflt_ctype', 'dflt_ext']) 2512 # reg_spec is either just a string or a dictionary 2513 # (for elems of vector) 2514 if isinstance(reg_spec, tuple): 2515 (reg_spec, elem_spec) = reg_spec 2516 if isinstance(elem_spec, str): 2517 attrList.append('elem_spec') 2518 else: 2519 assert(isinstance(elem_spec, dict)) 2520 elems = elem_spec 2521 attrList.append('elems') 2522 for attr in attrList: 2523 tmp_dict[attr] = eval(attr) 2524 tmp_dict['base_name'] = op_name 2525 2526 # New class name will be e.g. "IntReg_Ra" 2527 cls_name = base_cls_name + '_' + op_name 2528 # Evaluate string arg to get class object. Note that the 2529 # actual base class for "IntReg" is "IntRegOperand", i.e. we 2530 # have to append "Operand". 2531 try: 2532 base_cls = eval(base_cls_name + 'Operand') 2533 except NameError: 2534 error(lineno, 2535 'error: unknown operand base class "%s"' % base_cls_name) 2536 # The following statement creates a new class called 2537 # <cls_name> as a subclass of <base_cls> with the attributes 2538 # in tmp_dict, just as if we evaluated a class declaration. 2539 operand_name[op_name] = type(cls_name, (base_cls,), tmp_dict) 2540 2541 self.operandNameMap = operand_name 2542 2543 # Define operand variables. 2544 operands = user_dict.keys() 2545 # Add the elems defined in the vector operands and 2546 # build a map elem -> vector (used in OperandList) 2547 elem_to_vec = {} 2548 for op in user_dict.keys(): 2549 if hasattr(self.operandNameMap[op], 'elems'): 2550 for elem in self.operandNameMap[op].elems.keys(): 2551 operands.append(elem) 2552 elem_to_vec[elem] = op 2553 self.elemToVector = elem_to_vec 2554 extensions = self.operandTypeMap.keys() 2555 2556 operandsREString = r''' 2557 (?<!\w) # neg. lookbehind assertion: prevent partial matches 2558 ((%s)(?:_(%s))?) # match: operand with optional '_' then suffix 2559 (?!\w) # neg. lookahead assertion: prevent partial matches 2560 ''' % (string.join(operands, '|'), string.join(extensions, '|')) 2561 2562 self.operandsRE = re.compile(operandsREString, re.MULTILINE|re.VERBOSE) 2563 2564 # Same as operandsREString, but extension is mandatory, and only two 2565 # groups are returned (base and ext, not full name as above). 2566 # Used for subtituting '_' for '.' to make C++ identifiers. 2567 operandsWithExtREString = r'(?<!\w)(%s)_(%s)(?!\w)' \ 2568 % (string.join(operands, '|'), string.join(extensions, '|')) 2569 2570 self.operandsWithExtRE = \ 2571 re.compile(operandsWithExtREString, re.MULTILINE) 2572 2573 def substMungedOpNames(self, code): 2574 '''Munge operand names in code string to make legal C++ 2575 variable names. This means getting rid of the type extension 2576 if any. Will match base_name attribute of Operand object.)''' 2577 return self.operandsWithExtRE.sub(r'\1', code) 2578 2579 def mungeSnippet(self, s): 2580 '''Fix up code snippets for final substitution in templates.''' 2581 if isinstance(s, str): 2582 return self.substMungedOpNames(substBitOps(s)) 2583 else: 2584 return s 2585 2586 def open(self, name, bare=False): 2587 '''Open the output file for writing and include scary warning.''' 2588 filename = os.path.join(self.output_dir, name) 2589 f = open(filename, 'w') 2590 if f: 2591 if not bare: 2592 f.write(ISAParser.scaremonger_template % self) 2593 return f 2594 2595 def update(self, file, contents): 2596 '''Update the output file only. Scons should handle the case when 2597 the new contents are unchanged using its built-in hash feature.''' 2598 f = self.open(file) 2599 f.write(contents) 2600 f.close() 2601 2602 # This regular expression matches '##include' directives 2603 includeRE = re.compile(r'^\s*##include\s+"(?P<filename>[^"]*)".*$', 2604 re.MULTILINE) 2605 2606 def replace_include(self, matchobj, dirname): 2607 """Function to replace a matched '##include' directive with the 2608 contents of the specified file (with nested ##includes 2609 replaced recursively). 'matchobj' is an re match object 2610 (from a match of includeRE) and 'dirname' is the directory 2611 relative to which the file path should be resolved.""" 2612 2613 fname = matchobj.group('filename') 2614 full_fname = os.path.normpath(os.path.join(dirname, fname)) 2615 contents = '##newfile "%s"\n%s\n##endfile\n' % \ 2616 (full_fname, self.read_and_flatten(full_fname)) 2617 return contents 2618 2619 def read_and_flatten(self, filename): 2620 """Read a file and recursively flatten nested '##include' files.""" 2621 2622 current_dir = os.path.dirname(filename) 2623 try: 2624 contents = open(filename).read() 2625 except IOError: 2626 error('Error including file "%s"' % filename) 2627 2628 self.fileNameStack.push(LineTracker(filename)) 2629 2630 # Find any includes and include them 2631 def replace(matchobj): 2632 return self.replace_include(matchobj, current_dir) 2633 contents = self.includeRE.sub(replace, contents) 2634 2635 self.fileNameStack.pop() 2636 return contents 2637 2638 AlreadyGenerated = {} 2639 2640 def _parse_isa_desc(self, isa_desc_file): 2641 '''Read in and parse the ISA description.''' 2642 2643 # The build system can end up running the ISA parser twice: once to 2644 # finalize the build dependencies, and then to actually generate 2645 # the files it expects (in src/arch/$ARCH/generated). This code 2646 # doesn't do anything different either time, however; the SCons 2647 # invocations just expect different things. Since this code runs 2648 # within SCons, we can just remember that we've already run and 2649 # not perform a completely unnecessary run, since the ISA parser's 2650 # effect is idempotent. 2651 if isa_desc_file in ISAParser.AlreadyGenerated: 2652 return 2653 2654 # grab the last three path components of isa_desc_file 2655 self.filename = '/'.join(isa_desc_file.split('/')[-3:]) 2656 2657 # Read file and (recursively) all included files into a string. 2658 # PLY requires that the input be in a single string so we have to 2659 # do this up front. 2660 isa_desc = self.read_and_flatten(isa_desc_file) 2661 2662 # Initialize lineno tracker 2663 self.lex.lineno = LineTracker(isa_desc_file) 2664 2665 # Parse. 2666 self.parse_string(isa_desc) 2667 2668 ISAParser.AlreadyGenerated[isa_desc_file] = None 2669 2670 def parse_isa_desc(self, *args, **kwargs): 2671 try: 2672 self._parse_isa_desc(*args, **kwargs) 2673 except ISAParserError, e: 2674 print backtrace(self.fileNameStack) 2675 print "At %s:" % e.lineno 2676 print e 2677 sys.exit(1) 2678 2679# Called as script: get args from command line. 2680# Args are: <isa desc file> <output dir> 2681if __name__ == '__main__': 2682 ISAParser(sys.argv[2]).parse_isa_desc(sys.argv[1])
| 2396 def protectNonSubstPercents(self, s): 2397 '''Protect any non-dict-substitution '%'s in a format string 2398 (i.e. those not followed by '(')''' 2399 2400 return re.sub(r'%(?!\()', '%%', s) 2401 2402 def buildOperandNameMap(self, user_dict, lineno): 2403 operand_name = {} 2404 for op_name, val in user_dict.iteritems(): 2405 2406 # Check if extra attributes have been specified. 2407 if len(val) > 9: 2408 error(lineno, 'error: too many attributes for operand "%s"' % 2409 base_cls_name) 2410 2411 # Pad val with None in case optional args are missing 2412 val += (None, None, None, None) 2413 base_cls_name, dflt_ext, reg_spec, flags, sort_pri, \ 2414 read_code, write_code, read_predicate, write_predicate = val[:9] 2415 2416 # Canonical flag structure is a triple of lists, where each list 2417 # indicates the set of flags implied by this operand always, when 2418 # used as a source, and when used as a dest, respectively. 2419 # For simplicity this can be initialized using a variety of fairly 2420 # obvious shortcuts; we convert these to canonical form here. 2421 if not flags: 2422 # no flags specified (e.g., 'None') 2423 flags = ( [], [], [] ) 2424 elif isinstance(flags, str): 2425 # a single flag: assumed to be unconditional 2426 flags = ( [ flags ], [], [] ) 2427 elif isinstance(flags, list): 2428 # a list of flags: also assumed to be unconditional 2429 flags = ( flags, [], [] ) 2430 elif isinstance(flags, tuple): 2431 # it's a tuple: it should be a triple, 2432 # but each item could be a single string or a list 2433 (uncond_flags, src_flags, dest_flags) = flags 2434 flags = (makeList(uncond_flags), 2435 makeList(src_flags), makeList(dest_flags)) 2436 2437 # Accumulate attributes of new operand class in tmp_dict 2438 tmp_dict = {} 2439 attrList = ['reg_spec', 'flags', 'sort_pri', 2440 'read_code', 'write_code', 2441 'read_predicate', 'write_predicate'] 2442 if dflt_ext: 2443 dflt_ctype = self.operandTypeMap[dflt_ext] 2444 attrList.extend(['dflt_ctype', 'dflt_ext']) 2445 # reg_spec is either just a string or a dictionary 2446 # (for elems of vector) 2447 if isinstance(reg_spec, tuple): 2448 (reg_spec, elem_spec) = reg_spec 2449 if isinstance(elem_spec, str): 2450 attrList.append('elem_spec') 2451 else: 2452 assert(isinstance(elem_spec, dict)) 2453 elems = elem_spec 2454 attrList.append('elems') 2455 for attr in attrList: 2456 tmp_dict[attr] = eval(attr) 2457 tmp_dict['base_name'] = op_name 2458 2459 # New class name will be e.g. "IntReg_Ra" 2460 cls_name = base_cls_name + '_' + op_name 2461 # Evaluate string arg to get class object. Note that the 2462 # actual base class for "IntReg" is "IntRegOperand", i.e. we 2463 # have to append "Operand". 2464 try: 2465 base_cls = eval(base_cls_name + 'Operand') 2466 except NameError: 2467 error(lineno, 2468 'error: unknown operand base class "%s"' % base_cls_name) 2469 # The following statement creates a new class called 2470 # <cls_name> as a subclass of <base_cls> with the attributes 2471 # in tmp_dict, just as if we evaluated a class declaration. 2472 operand_name[op_name] = type(cls_name, (base_cls,), tmp_dict) 2473 2474 self.operandNameMap = operand_name 2475 2476 # Define operand variables. 2477 operands = user_dict.keys() 2478 # Add the elems defined in the vector operands and 2479 # build a map elem -> vector (used in OperandList) 2480 elem_to_vec = {} 2481 for op in user_dict.keys(): 2482 if hasattr(self.operandNameMap[op], 'elems'): 2483 for elem in self.operandNameMap[op].elems.keys(): 2484 operands.append(elem) 2485 elem_to_vec[elem] = op 2486 self.elemToVector = elem_to_vec 2487 extensions = self.operandTypeMap.keys() 2488 2489 operandsREString = r''' 2490 (?<!\w) # neg. lookbehind assertion: prevent partial matches 2491 ((%s)(?:_(%s))?) # match: operand with optional '_' then suffix 2492 (?!\w) # neg. lookahead assertion: prevent partial matches 2493 ''' % (string.join(operands, '|'), string.join(extensions, '|')) 2494 2495 self.operandsRE = re.compile(operandsREString, re.MULTILINE|re.VERBOSE) 2496 2497 # Same as operandsREString, but extension is mandatory, and only two 2498 # groups are returned (base and ext, not full name as above). 2499 # Used for subtituting '_' for '.' to make C++ identifiers. 2500 operandsWithExtREString = r'(?<!\w)(%s)_(%s)(?!\w)' \ 2501 % (string.join(operands, '|'), string.join(extensions, '|')) 2502 2503 self.operandsWithExtRE = \ 2504 re.compile(operandsWithExtREString, re.MULTILINE) 2505 2506 def substMungedOpNames(self, code): 2507 '''Munge operand names in code string to make legal C++ 2508 variable names. This means getting rid of the type extension 2509 if any. Will match base_name attribute of Operand object.)''' 2510 return self.operandsWithExtRE.sub(r'\1', code) 2511 2512 def mungeSnippet(self, s): 2513 '''Fix up code snippets for final substitution in templates.''' 2514 if isinstance(s, str): 2515 return self.substMungedOpNames(substBitOps(s)) 2516 else: 2517 return s 2518 2519 def open(self, name, bare=False): 2520 '''Open the output file for writing and include scary warning.''' 2521 filename = os.path.join(self.output_dir, name) 2522 f = open(filename, 'w') 2523 if f: 2524 if not bare: 2525 f.write(ISAParser.scaremonger_template % self) 2526 return f 2527 2528 def update(self, file, contents): 2529 '''Update the output file only. Scons should handle the case when 2530 the new contents are unchanged using its built-in hash feature.''' 2531 f = self.open(file) 2532 f.write(contents) 2533 f.close() 2534 2535 # This regular expression matches '##include' directives 2536 includeRE = re.compile(r'^\s*##include\s+"(?P<filename>[^"]*)".*$', 2537 re.MULTILINE) 2538 2539 def replace_include(self, matchobj, dirname): 2540 """Function to replace a matched '##include' directive with the 2541 contents of the specified file (with nested ##includes 2542 replaced recursively). 'matchobj' is an re match object 2543 (from a match of includeRE) and 'dirname' is the directory 2544 relative to which the file path should be resolved.""" 2545 2546 fname = matchobj.group('filename') 2547 full_fname = os.path.normpath(os.path.join(dirname, fname)) 2548 contents = '##newfile "%s"\n%s\n##endfile\n' % \ 2549 (full_fname, self.read_and_flatten(full_fname)) 2550 return contents 2551 2552 def read_and_flatten(self, filename): 2553 """Read a file and recursively flatten nested '##include' files.""" 2554 2555 current_dir = os.path.dirname(filename) 2556 try: 2557 contents = open(filename).read() 2558 except IOError: 2559 error('Error including file "%s"' % filename) 2560 2561 self.fileNameStack.push(LineTracker(filename)) 2562 2563 # Find any includes and include them 2564 def replace(matchobj): 2565 return self.replace_include(matchobj, current_dir) 2566 contents = self.includeRE.sub(replace, contents) 2567 2568 self.fileNameStack.pop() 2569 return contents 2570 2571 AlreadyGenerated = {} 2572 2573 def _parse_isa_desc(self, isa_desc_file): 2574 '''Read in and parse the ISA description.''' 2575 2576 # The build system can end up running the ISA parser twice: once to 2577 # finalize the build dependencies, and then to actually generate 2578 # the files it expects (in src/arch/$ARCH/generated). This code 2579 # doesn't do anything different either time, however; the SCons 2580 # invocations just expect different things. Since this code runs 2581 # within SCons, we can just remember that we've already run and 2582 # not perform a completely unnecessary run, since the ISA parser's 2583 # effect is idempotent. 2584 if isa_desc_file in ISAParser.AlreadyGenerated: 2585 return 2586 2587 # grab the last three path components of isa_desc_file 2588 self.filename = '/'.join(isa_desc_file.split('/')[-3:]) 2589 2590 # Read file and (recursively) all included files into a string. 2591 # PLY requires that the input be in a single string so we have to 2592 # do this up front. 2593 isa_desc = self.read_and_flatten(isa_desc_file) 2594 2595 # Initialize lineno tracker 2596 self.lex.lineno = LineTracker(isa_desc_file) 2597 2598 # Parse. 2599 self.parse_string(isa_desc) 2600 2601 ISAParser.AlreadyGenerated[isa_desc_file] = None 2602 2603 def parse_isa_desc(self, *args, **kwargs): 2604 try: 2605 self._parse_isa_desc(*args, **kwargs) 2606 except ISAParserError, e: 2607 print backtrace(self.fileNameStack) 2608 print "At %s:" % e.lineno 2609 print e 2610 sys.exit(1) 2611 2612# Called as script: get args from command line. 2613# Args are: <isa desc file> <output dir> 2614if __name__ == '__main__': 2615 ISAParser(sys.argv[2]).parse_isa_desc(sys.argv[1])
|