isa_parser.py revision 8452
1# Copyright (c) 2003-2005 The Regents of The University of Michigan 2# All rights reserved. 3# 4# Redistribution and use in source and binary forms, with or without 5# modification, are permitted provided that the following conditions are 6# met: redistributions of source code must retain the above copyright 7# notice, this list of conditions and the following disclaimer; 8# redistributions in binary form must reproduce the above copyright 9# notice, this list of conditions and the following disclaimer in the 10# documentation and/or other materials provided with the distribution; 11# neither the name of the copyright holders nor the names of its 12# contributors may be used to endorse or promote products derived from 13# this software without specific prior written permission. 14# 15# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 16# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 17# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 18# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 19# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 20# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 21# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 22# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 23# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 24# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 25# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 26# 27# Authors: Steve Reinhardt 28 29import os 30import sys 31import re 32import string 33import inspect, traceback 34# get type names 35from types import * 36 37from m5.util.grammar import Grammar 38 39debug=False 40 41################### 42# Utility functions 43 44# 45# Indent every line in string 's' by two spaces 46# (except preprocessor directives). 47# Used to make nested code blocks look pretty. 48# 49def indent(s): 50 return re.sub(r'(?m)^(?!#)', ' ', s) 51 52# 53# Munge a somewhat arbitrarily formatted piece of Python code 54# (e.g. from a format 'let' block) into something whose indentation 55# will get by the Python parser. 56# 57# The two keys here are that Python will give a syntax error if 58# there's any whitespace at the beginning of the first line, and that 59# all lines at the same lexical nesting level must have identical 60# indentation. Unfortunately the way code literals work, an entire 61# let block tends to have some initial indentation. Rather than 62# trying to figure out what that is and strip it off, we prepend 'if 63# 1:' to make the let code the nested block inside the if (and have 64# the parser automatically deal with the indentation for us). 65# 66# We don't want to do this if (1) the code block is empty or (2) the 67# first line of the block doesn't have any whitespace at the front. 68 69def fixPythonIndentation(s): 70 # get rid of blank lines first 71 s = re.sub(r'(?m)^\s*\n', '', s); 72 if (s != '' and re.match(r'[ \t]', s[0])): 73 s = 'if 1:\n' + s 74 return s 75 76class ISAParserError(Exception): 77 """Error handler for parser errors""" 78 def __init__(self, first, second=None): 79 if second is None: 80 self.lineno = 0 81 self.string = first 82 else: 83 if hasattr(first, 'lexer'): 84 first = first.lexer.lineno 85 self.lineno = first 86 self.string = second 87 88 def display(self, filename_stack, print_traceback=debug): 89 # Output formatted to work under Emacs compile-mode. Optional 90 # 'print_traceback' arg, if set to True, prints a Python stack 91 # backtrace too (can be handy when trying to debug the parser 92 # itself). 93 94 spaces = "" 95 for (filename, line) in filename_stack[:-1]: 96 print "%sIn file included from %s:" % (spaces, filename) 97 spaces += " " 98 99 # Print a Python stack backtrace if requested. 100 if print_traceback or not self.lineno: 101 traceback.print_exc() 102 103 line_str = "%s:" % (filename_stack[-1][0], ) 104 if self.lineno: 105 line_str += "%d:" % (self.lineno, ) 106 107 return "%s%s %s" % (spaces, line_str, self.string) 108 109 def exit(self, filename_stack, print_traceback=debug): 110 # Just call exit. 111 112 sys.exit(self.display(filename_stack, print_traceback)) 113 114def error(*args): 115 raise ISAParserError(*args) 116 117#################### 118# Template objects. 119# 120# Template objects are format strings that allow substitution from 121# the attribute spaces of other objects (e.g. InstObjParams instances). 122 123labelRE = re.compile(r'(?<!%)%\(([^\)]+)\)[sd]') 124 125class Template(object): 126 def __init__(self, parser, t): 127 self.parser = parser 128 self.template = t 129 130 def subst(self, d): 131 myDict = None 132 133 # Protect non-Python-dict substitutions (e.g. if there's a printf 134 # in the templated C++ code) 135 template = self.parser.protectNonSubstPercents(self.template) 136 # CPU-model-specific substitutions are handled later (in GenCode). 137 template = self.parser.protectCpuSymbols(template) 138 139 # Build a dict ('myDict') to use for the template substitution. 140 # Start with the template namespace. Make a copy since we're 141 # going to modify it. 142 myDict = self.parser.templateMap.copy() 143 144 if isinstance(d, InstObjParams): 145 # If we're dealing with an InstObjParams object, we need 146 # to be a little more sophisticated. The instruction-wide 147 # parameters are already formed, but the parameters which 148 # are only function wide still need to be generated. 149 compositeCode = '' 150 151 myDict.update(d.__dict__) 152 # The "operands" and "snippets" attributes of the InstObjParams 153 # objects are for internal use and not substitution. 154 del myDict['operands'] 155 del myDict['snippets'] 156 157 snippetLabels = [l for l in labelRE.findall(template) 158 if d.snippets.has_key(l)] 159 160 snippets = dict([(s, self.parser.mungeSnippet(d.snippets[s])) 161 for s in snippetLabels]) 162 163 myDict.update(snippets) 164 165 compositeCode = ' '.join(map(str, snippets.values())) 166 167 # Add in template itself in case it references any 168 # operands explicitly (like Mem) 169 compositeCode += ' ' + template 170 171 operands = SubOperandList(self.parser, compositeCode, d.operands) 172 173 myDict['op_decl'] = operands.concatAttrStrings('op_decl') 174 if operands.readPC or operands.setPC: 175 myDict['op_decl'] += 'TheISA::PCState __parserAutoPCState;\n' 176 177 is_src = lambda op: op.is_src 178 is_dest = lambda op: op.is_dest 179 180 myDict['op_src_decl'] = \ 181 operands.concatSomeAttrStrings(is_src, 'op_src_decl') 182 myDict['op_dest_decl'] = \ 183 operands.concatSomeAttrStrings(is_dest, 'op_dest_decl') 184 if operands.readPC: 185 myDict['op_src_decl'] += \ 186 'TheISA::PCState __parserAutoPCState;\n' 187 if operands.setPC: 188 myDict['op_dest_decl'] += \ 189 'TheISA::PCState __parserAutoPCState;\n' 190 191 myDict['op_rd'] = operands.concatAttrStrings('op_rd') 192 if operands.readPC: 193 myDict['op_rd'] = '__parserAutoPCState = xc->pcState();\n' + \ 194 myDict['op_rd'] 195 196 # Compose the op_wb string. If we're going to write back the 197 # PC state because we changed some of its elements, we'll need to 198 # do that as early as possible. That allows later uncoordinated 199 # modifications to the PC to layer appropriately. 200 reordered = list(operands.items) 201 reordered.reverse() 202 op_wb_str = '' 203 pcWbStr = 'xc->pcState(__parserAutoPCState);\n' 204 for op_desc in reordered: 205 if op_desc.isPCPart() and op_desc.is_dest: 206 op_wb_str = op_desc.op_wb + pcWbStr + op_wb_str 207 pcWbStr = '' 208 else: 209 op_wb_str = op_desc.op_wb + op_wb_str 210 myDict['op_wb'] = op_wb_str 211 212 if d.operands.memOperand: 213 myDict['mem_acc_type'] = d.operands.memOperand.mem_acc_type 214 215 elif isinstance(d, dict): 216 # if the argument is a dictionary, we just use it. 217 myDict.update(d) 218 elif hasattr(d, '__dict__'): 219 # if the argument is an object, we use its attribute map. 220 myDict.update(d.__dict__) 221 else: 222 raise TypeError, "Template.subst() arg must be or have dictionary" 223 return template % myDict 224 225 # Convert to string. This handles the case when a template with a 226 # CPU-specific term gets interpolated into another template or into 227 # an output block. 228 def __str__(self): 229 return self.parser.expandCpuSymbolsToString(self.template) 230 231################ 232# Format object. 233# 234# A format object encapsulates an instruction format. It must provide 235# a defineInst() method that generates the code for an instruction 236# definition. 237 238class Format(object): 239 def __init__(self, id, params, code): 240 self.id = id 241 self.params = params 242 label = 'def format ' + id 243 self.user_code = compile(fixPythonIndentation(code), label, 'exec') 244 param_list = string.join(params, ", ") 245 f = '''def defInst(_code, _context, %s): 246 my_locals = vars().copy() 247 exec _code in _context, my_locals 248 return my_locals\n''' % param_list 249 c = compile(f, label + ' wrapper', 'exec') 250 exec c 251 self.func = defInst 252 253 def defineInst(self, parser, name, args, lineno): 254 parser.updateExportContext() 255 context = parser.exportContext.copy() 256 if len(name): 257 Name = name[0].upper() 258 if len(name) > 1: 259 Name += name[1:] 260 context.update({ 'name' : name, 'Name' : Name }) 261 try: 262 vars = self.func(self.user_code, context, *args[0], **args[1]) 263 except Exception, exc: 264 if debug: 265 raise 266 error(lineno, 'error defining "%s": %s.' % (name, exc)) 267 for k in vars.keys(): 268 if k not in ('header_output', 'decoder_output', 269 'exec_output', 'decode_block'): 270 del vars[k] 271 return GenCode(parser, **vars) 272 273# Special null format to catch an implicit-format instruction 274# definition outside of any format block. 275class NoFormat(object): 276 def __init__(self): 277 self.defaultInst = '' 278 279 def defineInst(self, parser, name, args, lineno): 280 error(lineno, 281 'instruction definition "%s" with no active format!' % name) 282 283############### 284# GenCode class 285# 286# The GenCode class encapsulates generated code destined for various 287# output files. The header_output and decoder_output attributes are 288# strings containing code destined for decoder.hh and decoder.cc 289# respectively. The decode_block attribute contains code to be 290# incorporated in the decode function itself (that will also end up in 291# decoder.cc). The exec_output attribute is a dictionary with a key 292# for each CPU model name; the value associated with a particular key 293# is the string of code for that CPU model's exec.cc file. The 294# has_decode_default attribute is used in the decode block to allow 295# explicit default clauses to override default default clauses. 296 297class GenCode(object): 298 # Constructor. At this point we substitute out all CPU-specific 299 # symbols. For the exec output, these go into the per-model 300 # dictionary. For all other output types they get collapsed into 301 # a single string. 302 def __init__(self, parser, 303 header_output = '', decoder_output = '', exec_output = '', 304 decode_block = '', has_decode_default = False): 305 self.parser = parser 306 self.header_output = parser.expandCpuSymbolsToString(header_output) 307 self.decoder_output = parser.expandCpuSymbolsToString(decoder_output) 308 if isinstance(exec_output, dict): 309 self.exec_output = exec_output 310 elif isinstance(exec_output, str): 311 # If the exec_output arg is a single string, we replicate 312 # it for each of the CPU models, substituting and 313 # %(CPU_foo)s params appropriately. 314 self.exec_output = parser.expandCpuSymbolsToDict(exec_output) 315 self.decode_block = parser.expandCpuSymbolsToString(decode_block) 316 self.has_decode_default = has_decode_default 317 318 # Override '+' operator: generate a new GenCode object that 319 # concatenates all the individual strings in the operands. 320 def __add__(self, other): 321 exec_output = {} 322 for cpu in self.parser.cpuModels: 323 n = cpu.name 324 exec_output[n] = self.exec_output[n] + other.exec_output[n] 325 return GenCode(self.parser, 326 self.header_output + other.header_output, 327 self.decoder_output + other.decoder_output, 328 exec_output, 329 self.decode_block + other.decode_block, 330 self.has_decode_default or other.has_decode_default) 331 332 # Prepend a string (typically a comment) to all the strings. 333 def prepend_all(self, pre): 334 self.header_output = pre + self.header_output 335 self.decoder_output = pre + self.decoder_output 336 self.decode_block = pre + self.decode_block 337 for cpu in self.parser.cpuModels: 338 self.exec_output[cpu.name] = pre + self.exec_output[cpu.name] 339 340 # Wrap the decode block in a pair of strings (e.g., 'case foo:' 341 # and 'break;'). Used to build the big nested switch statement. 342 def wrap_decode_block(self, pre, post = ''): 343 self.decode_block = pre + indent(self.decode_block) + post 344 345##################################################################### 346# 347# Bitfield Operator Support 348# 349##################################################################### 350 351bitOp1ArgRE = re.compile(r'<\s*(\w+)\s*:\s*>') 352 353bitOpWordRE = re.compile(r'(?<![\w\.])([\w\.]+)<\s*(\w+)\s*:\s*(\w+)\s*>') 354bitOpExprRE = re.compile(r'\)<\s*(\w+)\s*:\s*(\w+)\s*>') 355 356def substBitOps(code): 357 # first convert single-bit selectors to two-index form 358 # i.e., <n> --> <n:n> 359 code = bitOp1ArgRE.sub(r'<\1:\1>', code) 360 # simple case: selector applied to ID (name) 361 # i.e., foo<a:b> --> bits(foo, a, b) 362 code = bitOpWordRE.sub(r'bits(\1, \2, \3)', code) 363 # if selector is applied to expression (ending in ')'), 364 # we need to search backward for matching '(' 365 match = bitOpExprRE.search(code) 366 while match: 367 exprEnd = match.start() 368 here = exprEnd - 1 369 nestLevel = 1 370 while nestLevel > 0: 371 if code[here] == '(': 372 nestLevel -= 1 373 elif code[here] == ')': 374 nestLevel += 1 375 here -= 1 376 if here < 0: 377 sys.exit("Didn't find '('!") 378 exprStart = here+1 379 newExpr = r'bits(%s, %s, %s)' % (code[exprStart:exprEnd+1], 380 match.group(1), match.group(2)) 381 code = code[:exprStart] + newExpr + code[match.end():] 382 match = bitOpExprRE.search(code) 383 return code 384 385 386##################################################################### 387# 388# Code Parser 389# 390# The remaining code is the support for automatically extracting 391# instruction characteristics from pseudocode. 392# 393##################################################################### 394 395# Force the argument to be a list. Useful for flags, where a caller 396# can specify a singleton flag or a list of flags. Also usful for 397# converting tuples to lists so they can be modified. 398def makeList(arg): 399 if isinstance(arg, list): 400 return arg 401 elif isinstance(arg, tuple): 402 return list(arg) 403 elif not arg: 404 return [] 405 else: 406 return [ arg ] 407 408class Operand(object): 409 '''Base class for operand descriptors. An instance of this class 410 (or actually a class derived from this one) represents a specific 411 operand for a code block (e.g, "Rc.sq" as a dest). Intermediate 412 derived classes encapsulates the traits of a particular operand 413 type (e.g., "32-bit integer register").''' 414 415 def buildReadCode(self, func = None): 416 subst_dict = {"name": self.base_name, 417 "func": func, 418 "reg_idx": self.reg_spec, 419 "ctype": self.ctype} 420 if hasattr(self, 'src_reg_idx'): 421 subst_dict['op_idx'] = self.src_reg_idx 422 code = self.read_code % subst_dict 423 return '%s = %s;\n' % (self.base_name, code) 424 425 def buildWriteCode(self, func = None): 426 subst_dict = {"name": self.base_name, 427 "func": func, 428 "reg_idx": self.reg_spec, 429 "ctype": self.ctype, 430 "final_val": self.base_name} 431 if hasattr(self, 'dest_reg_idx'): 432 subst_dict['op_idx'] = self.dest_reg_idx 433 code = self.write_code % subst_dict 434 return ''' 435 { 436 %s final_val = %s; 437 %s; 438 if (traceData) { traceData->setData(final_val); } 439 }''' % (self.dflt_ctype, self.base_name, code) 440 441 def __init__(self, parser, full_name, ext, is_src, is_dest): 442 self.full_name = full_name 443 self.ext = ext 444 self.is_src = is_src 445 self.is_dest = is_dest 446 # The 'effective extension' (eff_ext) is either the actual 447 # extension, if one was explicitly provided, or the default. 448 if ext: 449 self.eff_ext = ext 450 elif hasattr(self, 'dflt_ext'): 451 self.eff_ext = self.dflt_ext 452 453 if hasattr(self, 'eff_ext'): 454 self.ctype = parser.operandTypeMap[self.eff_ext] 455 456 # note that mem_acc_type is undefined for non-mem operands... 457 # template must be careful not to use it if it doesn't apply. 458 if self.isMem(): 459 self.mem_acc_type = self.ctype 460 461 # Finalize additional fields (primarily code fields). This step 462 # is done separately since some of these fields may depend on the 463 # register index enumeration that hasn't been performed yet at the 464 # time of __init__(). 465 def finalize(self): 466 self.flags = self.getFlags() 467 self.constructor = self.makeConstructor() 468 self.op_decl = self.makeDecl() 469 470 if self.is_src: 471 self.op_rd = self.makeRead() 472 self.op_src_decl = self.makeDecl() 473 else: 474 self.op_rd = '' 475 self.op_src_decl = '' 476 477 if self.is_dest: 478 self.op_wb = self.makeWrite() 479 self.op_dest_decl = self.makeDecl() 480 else: 481 self.op_wb = '' 482 self.op_dest_decl = '' 483 484 def isMem(self): 485 return 0 486 487 def isReg(self): 488 return 0 489 490 def isFloatReg(self): 491 return 0 492 493 def isIntReg(self): 494 return 0 495 496 def isControlReg(self): 497 return 0 498 499 def isPCState(self): 500 return 0 501 502 def isPCPart(self): 503 return self.isPCState() and self.reg_spec 504 505 def getFlags(self): 506 # note the empty slice '[:]' gives us a copy of self.flags[0] 507 # instead of a reference to it 508 my_flags = self.flags[0][:] 509 if self.is_src: 510 my_flags += self.flags[1] 511 if self.is_dest: 512 my_flags += self.flags[2] 513 return my_flags 514 515 def makeDecl(self): 516 # Note that initializations in the declarations are solely 517 # to avoid 'uninitialized variable' errors from the compiler. 518 return self.ctype + ' ' + self.base_name + ' = 0;\n'; 519 520class IntRegOperand(Operand): 521 def isReg(self): 522 return 1 523 524 def isIntReg(self): 525 return 1 526 527 def makeConstructor(self): 528 c = '' 529 if self.is_src: 530 c += '\n\t_srcRegIdx[%d] = %s;' % \ 531 (self.src_reg_idx, self.reg_spec) 532 if self.is_dest: 533 c += '\n\t_destRegIdx[%d] = %s;' % \ 534 (self.dest_reg_idx, self.reg_spec) 535 return c 536 537 def makeRead(self): 538 if (self.ctype == 'float' or self.ctype == 'double'): 539 error('Attempt to read integer register as FP') 540 if self.read_code != None: 541 return self.buildReadCode('readIntRegOperand') 542 int_reg_val = 'xc->readIntRegOperand(this, %d)' % self.src_reg_idx 543 return '%s = %s;\n' % (self.base_name, int_reg_val) 544 545 def makeWrite(self): 546 if (self.ctype == 'float' or self.ctype == 'double'): 547 error('Attempt to write integer register as FP') 548 if self.write_code != None: 549 return self.buildWriteCode('setIntRegOperand') 550 wb = ''' 551 { 552 %s final_val = %s; 553 xc->setIntRegOperand(this, %d, final_val);\n 554 if (traceData) { traceData->setData(final_val); } 555 }''' % (self.ctype, self.base_name, self.dest_reg_idx) 556 return wb 557 558class FloatRegOperand(Operand): 559 def isReg(self): 560 return 1 561 562 def isFloatReg(self): 563 return 1 564 565 def makeConstructor(self): 566 c = '' 567 if self.is_src: 568 c += '\n\t_srcRegIdx[%d] = %s + FP_Base_DepTag;' % \ 569 (self.src_reg_idx, self.reg_spec) 570 if self.is_dest: 571 c += '\n\t_destRegIdx[%d] = %s + FP_Base_DepTag;' % \ 572 (self.dest_reg_idx, self.reg_spec) 573 return c 574 575 def makeRead(self): 576 bit_select = 0 577 if (self.ctype == 'float' or self.ctype == 'double'): 578 func = 'readFloatRegOperand' 579 else: 580 func = 'readFloatRegOperandBits' 581 if self.read_code != None: 582 return self.buildReadCode(func) 583 return '%s = xc->%s(this, %d);\n' % \ 584 (self.base_name, func, self.src_reg_idx) 585 586 def makeWrite(self): 587 if (self.ctype == 'float' or self.ctype == 'double'): 588 func = 'setFloatRegOperand' 589 else: 590 func = 'setFloatRegOperandBits' 591 if self.write_code != None: 592 return self.buildWriteCode(func) 593 wb = ''' 594 { 595 %s final_val = %s; 596 xc->%s(this, %d, final_val);\n 597 if (traceData) { traceData->setData(final_val); } 598 }''' % (self.ctype, self.base_name, func, self.dest_reg_idx) 599 return wb 600 601class ControlRegOperand(Operand): 602 def isReg(self): 603 return 1 604 605 def isControlReg(self): 606 return 1 607 608 def makeConstructor(self): 609 c = '' 610 if self.is_src: 611 c += '\n\t_srcRegIdx[%d] = %s + Ctrl_Base_DepTag;' % \ 612 (self.src_reg_idx, self.reg_spec) 613 if self.is_dest: 614 c += '\n\t_destRegIdx[%d] = %s + Ctrl_Base_DepTag;' % \ 615 (self.dest_reg_idx, self.reg_spec) 616 return c 617 618 def makeRead(self): 619 bit_select = 0 620 if (self.ctype == 'float' or self.ctype == 'double'): 621 error('Attempt to read control register as FP') 622 if self.read_code != None: 623 return self.buildReadCode('readMiscRegOperand') 624 return '%s = xc->readMiscRegOperand(this, %s);\n' % \ 625 (self.base_name, self.src_reg_idx) 626 627 def makeWrite(self): 628 if (self.ctype == 'float' or self.ctype == 'double'): 629 error('Attempt to write control register as FP') 630 if self.write_code != None: 631 return self.buildWriteCode('setMiscRegOperand') 632 wb = 'xc->setMiscRegOperand(this, %s, %s);\n' % \ 633 (self.dest_reg_idx, self.base_name) 634 wb += 'if (traceData) { traceData->setData(%s); }' % \ 635 self.base_name 636 return wb 637 638class MemOperand(Operand): 639 def isMem(self): 640 return 1 641 642 def makeConstructor(self): 643 return '' 644 645 def makeDecl(self): 646 # Note that initializations in the declarations are solely 647 # to avoid 'uninitialized variable' errors from the compiler. 648 # Declare memory data variable. 649 return '%s %s = 0;\n' % (self.ctype, self.base_name) 650 651 def makeRead(self): 652 if self.read_code != None: 653 return self.buildReadCode() 654 return '' 655 656 def makeWrite(self): 657 if self.write_code != None: 658 return self.buildWriteCode() 659 return '' 660 661class PCStateOperand(Operand): 662 def makeConstructor(self): 663 return '' 664 665 def makeRead(self): 666 if self.reg_spec: 667 # A component of the PC state. 668 return '%s = __parserAutoPCState.%s();\n' % \ 669 (self.base_name, self.reg_spec) 670 else: 671 # The whole PC state itself. 672 return '%s = xc->pcState();\n' % self.base_name 673 674 def makeWrite(self): 675 if self.reg_spec: 676 # A component of the PC state. 677 return '__parserAutoPCState.%s(%s);\n' % \ 678 (self.reg_spec, self.base_name) 679 else: 680 # The whole PC state itself. 681 return 'xc->pcState(%s);\n' % self.base_name 682 683 def makeDecl(self): 684 ctype = 'TheISA::PCState' 685 if self.isPCPart(): 686 ctype = self.ctype 687 return "%s %s;\n" % (ctype, self.base_name) 688 689 def isPCState(self): 690 return 1 691 692class OperandList(object): 693 '''Find all the operands in the given code block. Returns an operand 694 descriptor list (instance of class OperandList).''' 695 def __init__(self, parser, code): 696 self.items = [] 697 self.bases = {} 698 # delete comments so we don't match on reg specifiers inside 699 code = commentRE.sub('', code) 700 # search for operands 701 next_pos = 0 702 while 1: 703 match = parser.operandsRE.search(code, next_pos) 704 if not match: 705 # no more matches: we're done 706 break 707 op = match.groups() 708 # regexp groups are operand full name, base, and extension 709 (op_full, op_base, op_ext) = op 710 # if the token following the operand is an assignment, this is 711 # a destination (LHS), else it's a source (RHS) 712 is_dest = (assignRE.match(code, match.end()) != None) 713 is_src = not is_dest 714 # see if we've already seen this one 715 op_desc = self.find_base(op_base) 716 if op_desc: 717 if op_desc.ext != op_ext: 718 error('Inconsistent extensions for operand %s' % \ 719 op_base) 720 op_desc.is_src = op_desc.is_src or is_src 721 op_desc.is_dest = op_desc.is_dest or is_dest 722 else: 723 # new operand: create new descriptor 724 op_desc = parser.operandNameMap[op_base](parser, 725 op_full, op_ext, is_src, is_dest) 726 self.append(op_desc) 727 # start next search after end of current match 728 next_pos = match.end() 729 self.sort() 730 # enumerate source & dest register operands... used in building 731 # constructor later 732 self.numSrcRegs = 0 733 self.numDestRegs = 0 734 self.numFPDestRegs = 0 735 self.numIntDestRegs = 0 736 self.memOperand = None 737 for op_desc in self.items: 738 if op_desc.isReg(): 739 if op_desc.is_src: 740 op_desc.src_reg_idx = self.numSrcRegs 741 self.numSrcRegs += 1 742 if op_desc.is_dest: 743 op_desc.dest_reg_idx = self.numDestRegs 744 self.numDestRegs += 1 745 if op_desc.isFloatReg(): 746 self.numFPDestRegs += 1 747 elif op_desc.isIntReg(): 748 self.numIntDestRegs += 1 749 elif op_desc.isMem(): 750 if self.memOperand: 751 error("Code block has more than one memory operand.") 752 self.memOperand = op_desc 753 if parser.maxInstSrcRegs < self.numSrcRegs: 754 parser.maxInstSrcRegs = self.numSrcRegs 755 if parser.maxInstDestRegs < self.numDestRegs: 756 parser.maxInstDestRegs = self.numDestRegs 757 # now make a final pass to finalize op_desc fields that may depend 758 # on the register enumeration 759 for op_desc in self.items: 760 op_desc.finalize() 761 762 def __len__(self): 763 return len(self.items) 764 765 def __getitem__(self, index): 766 return self.items[index] 767 768 def append(self, op_desc): 769 self.items.append(op_desc) 770 self.bases[op_desc.base_name] = op_desc 771 772 def find_base(self, base_name): 773 # like self.bases[base_name], but returns None if not found 774 # (rather than raising exception) 775 return self.bases.get(base_name) 776 777 # internal helper function for concat[Some]Attr{Strings|Lists} 778 def __internalConcatAttrs(self, attr_name, filter, result): 779 for op_desc in self.items: 780 if filter(op_desc): 781 result += getattr(op_desc, attr_name) 782 return result 783 784 # return a single string that is the concatenation of the (string) 785 # values of the specified attribute for all operands 786 def concatAttrStrings(self, attr_name): 787 return self.__internalConcatAttrs(attr_name, lambda x: 1, '') 788 789 # like concatAttrStrings, but only include the values for the operands 790 # for which the provided filter function returns true 791 def concatSomeAttrStrings(self, filter, attr_name): 792 return self.__internalConcatAttrs(attr_name, filter, '') 793 794 # return a single list that is the concatenation of the (list) 795 # values of the specified attribute for all operands 796 def concatAttrLists(self, attr_name): 797 return self.__internalConcatAttrs(attr_name, lambda x: 1, []) 798 799 # like concatAttrLists, but only include the values for the operands 800 # for which the provided filter function returns true 801 def concatSomeAttrLists(self, filter, attr_name): 802 return self.__internalConcatAttrs(attr_name, filter, []) 803 804 def sort(self): 805 self.items.sort(lambda a, b: a.sort_pri - b.sort_pri) 806 807class SubOperandList(OperandList): 808 '''Find all the operands in the given code block. Returns an operand 809 descriptor list (instance of class OperandList).''' 810 def __init__(self, parser, code, master_list): 811 self.items = [] 812 self.bases = {} 813 # delete comments so we don't match on reg specifiers inside 814 code = commentRE.sub('', code) 815 # search for operands 816 next_pos = 0 817 while 1: 818 match = parser.operandsRE.search(code, next_pos) 819 if not match: 820 # no more matches: we're done 821 break 822 op = match.groups() 823 # regexp groups are operand full name, base, and extension 824 (op_full, op_base, op_ext) = op 825 # find this op in the master list 826 op_desc = master_list.find_base(op_base) 827 if not op_desc: 828 error('Found operand %s which is not in the master list!' \ 829 ' This is an internal error' % op_base) 830 else: 831 # See if we've already found this operand 832 op_desc = self.find_base(op_base) 833 if not op_desc: 834 # if not, add a reference to it to this sub list 835 self.append(master_list.bases[op_base]) 836 837 # start next search after end of current match 838 next_pos = match.end() 839 self.sort() 840 self.memOperand = None 841 # Whether the whole PC needs to be read so parts of it can be accessed 842 self.readPC = False 843 # Whether the whole PC needs to be written after parts of it were 844 # changed 845 self.setPC = False 846 # Whether this instruction manipulates the whole PC or parts of it. 847 # Mixing the two is a bad idea and flagged as an error. 848 self.pcPart = None 849 for op_desc in self.items: 850 if op_desc.isPCPart(): 851 self.readPC = True 852 if op_desc.is_dest: 853 self.setPC = True 854 if op_desc.isPCState(): 855 if self.pcPart is not None: 856 if self.pcPart and not op_desc.isPCPart() or \ 857 not self.pcPart and op_desc.isPCPart(): 858 error("Mixed whole and partial PC state operands.") 859 self.pcPart = op_desc.isPCPart() 860 if op_desc.isMem(): 861 if self.memOperand: 862 error("Code block has more than one memory operand.") 863 self.memOperand = op_desc 864 865# Regular expression object to match C++ comments 866# (used in findOperands()) 867commentRE = re.compile(r'//.*\n') 868 869# Regular expression object to match assignment statements 870# (used in findOperands()) 871assignRE = re.compile(r'\s*=(?!=)', re.MULTILINE) 872 873def makeFlagConstructor(flag_list): 874 if len(flag_list) == 0: 875 return '' 876 # filter out repeated flags 877 flag_list.sort() 878 i = 1 879 while i < len(flag_list): 880 if flag_list[i] == flag_list[i-1]: 881 del flag_list[i] 882 else: 883 i += 1 884 pre = '\n\tflags[' 885 post = '] = true;' 886 code = pre + string.join(flag_list, post + pre) + post 887 return code 888 889# Assume all instruction flags are of the form 'IsFoo' 890instFlagRE = re.compile(r'Is.*') 891 892# OpClass constants end in 'Op' except No_OpClass 893opClassRE = re.compile(r'.*Op|No_OpClass') 894 895class InstObjParams(object): 896 def __init__(self, parser, mnem, class_name, base_class = '', 897 snippets = {}, opt_args = []): 898 self.mnemonic = mnem 899 self.class_name = class_name 900 self.base_class = base_class 901 if not isinstance(snippets, dict): 902 snippets = {'code' : snippets} 903 compositeCode = ' '.join(map(str, snippets.values())) 904 self.snippets = snippets 905 906 self.operands = OperandList(parser, compositeCode) 907 self.constructor = self.operands.concatAttrStrings('constructor') 908 self.constructor += \ 909 '\n\t_numSrcRegs = %d;' % self.operands.numSrcRegs 910 self.constructor += \ 911 '\n\t_numDestRegs = %d;' % self.operands.numDestRegs 912 self.constructor += \ 913 '\n\t_numFPDestRegs = %d;' % self.operands.numFPDestRegs 914 self.constructor += \ 915 '\n\t_numIntDestRegs = %d;' % self.operands.numIntDestRegs 916 self.flags = self.operands.concatAttrLists('flags') 917 918 # Make a basic guess on the operand class (function unit type). 919 # These are good enough for most cases, and can be overridden 920 # later otherwise. 921 if 'IsStore' in self.flags: 922 self.op_class = 'MemWriteOp' 923 elif 'IsLoad' in self.flags or 'IsPrefetch' in self.flags: 924 self.op_class = 'MemReadOp' 925 elif 'IsFloating' in self.flags: 926 self.op_class = 'FloatAddOp' 927 else: 928 self.op_class = 'IntAluOp' 929 930 # Optional arguments are assumed to be either StaticInst flags 931 # or an OpClass value. To avoid having to import a complete 932 # list of these values to match against, we do it ad-hoc 933 # with regexps. 934 for oa in opt_args: 935 if instFlagRE.match(oa): 936 self.flags.append(oa) 937 elif opClassRE.match(oa): 938 self.op_class = oa 939 else: 940 error('InstObjParams: optional arg "%s" not recognized ' 941 'as StaticInst::Flag or OpClass.' % oa) 942 943 # add flag initialization to contructor here to include 944 # any flags added via opt_args 945 self.constructor += makeFlagConstructor(self.flags) 946 947 # if 'IsFloating' is set, add call to the FP enable check 948 # function (which should be provided by isa_desc via a declare) 949 if 'IsFloating' in self.flags: 950 self.fp_enable_check = 'fault = checkFpEnableFault(xc);' 951 else: 952 self.fp_enable_check = '' 953 954############## 955# Stack: a simple stack object. Used for both formats (formatStack) 956# and default cases (defaultStack). Simply wraps a list to give more 957# stack-like syntax and enable initialization with an argument list 958# (as opposed to an argument that's a list). 959 960class Stack(list): 961 def __init__(self, *items): 962 list.__init__(self, items) 963 964 def push(self, item): 965 self.append(item); 966 967 def top(self): 968 return self[-1] 969 970####################### 971# 972# Output file template 973# 974 975file_template = ''' 976/* 977 * DO NOT EDIT THIS FILE!!! 978 * 979 * It was automatically generated from the ISA description in %(filename)s 980 */ 981 982%(includes)s 983 984%(global_output)s 985 986namespace %(namespace)s { 987 988%(namespace_output)s 989 990} // namespace %(namespace)s 991 992%(decode_function)s 993''' 994 995max_inst_regs_template = ''' 996/* 997 * DO NOT EDIT THIS FILE!!! 998 * 999 * It was automatically generated from the ISA description in %(filename)s 1000 */ 1001 1002namespace %(namespace)s { 1003 1004 const int MaxInstSrcRegs = %(MaxInstSrcRegs)d; 1005 const int MaxInstDestRegs = %(MaxInstDestRegs)d; 1006 1007} // namespace %(namespace)s 1008 1009''' 1010 1011class ISAParser(Grammar): 1012 def __init__(self, output_dir, cpu_models): 1013 super(ISAParser, self).__init__() 1014 self.output_dir = output_dir 1015 1016 self.cpuModels = cpu_models 1017 1018 # variable to hold templates 1019 self.templateMap = {} 1020 1021 # This dictionary maps format name strings to Format objects. 1022 self.formatMap = {} 1023 1024 # The format stack. 1025 self.formatStack = Stack(NoFormat()) 1026 1027 # The default case stack. 1028 self.defaultStack = Stack(None) 1029 1030 # Stack that tracks current file and line number. Each 1031 # element is a tuple (filename, lineno) that records the 1032 # *current* filename and the line number in the *previous* 1033 # file where it was included. 1034 self.fileNameStack = Stack() 1035 1036 symbols = ('makeList', 're', 'string') 1037 self.exportContext = dict([(s, eval(s)) for s in symbols]) 1038 1039 self.maxInstSrcRegs = 0 1040 self.maxInstDestRegs = 0 1041 1042 ##################################################################### 1043 # 1044 # Lexer 1045 # 1046 # The PLY lexer module takes two things as input: 1047 # - A list of token names (the string list 'tokens') 1048 # - A regular expression describing a match for each token. The 1049 # regexp for token FOO can be provided in two ways: 1050 # - as a string variable named t_FOO 1051 # - as the doc string for a function named t_FOO. In this case, 1052 # the function is also executed, allowing an action to be 1053 # associated with each token match. 1054 # 1055 ##################################################################### 1056 1057 # Reserved words. These are listed separately as they are matched 1058 # using the same regexp as generic IDs, but distinguished in the 1059 # t_ID() function. The PLY documentation suggests this approach. 1060 reserved = ( 1061 'BITFIELD', 'DECODE', 'DECODER', 'DEFAULT', 'DEF', 'EXEC', 'FORMAT', 1062 'HEADER', 'LET', 'NAMESPACE', 'OPERAND_TYPES', 'OPERANDS', 1063 'OUTPUT', 'SIGNED', 'TEMPLATE' 1064 ) 1065 1066 # List of tokens. The lex module requires this. 1067 tokens = reserved + ( 1068 # identifier 1069 'ID', 1070 1071 # integer literal 1072 'INTLIT', 1073 1074 # string literal 1075 'STRLIT', 1076 1077 # code literal 1078 'CODELIT', 1079 1080 # ( ) [ ] { } < > , ; . : :: * 1081 'LPAREN', 'RPAREN', 1082 'LBRACKET', 'RBRACKET', 1083 'LBRACE', 'RBRACE', 1084 'LESS', 'GREATER', 'EQUALS', 1085 'COMMA', 'SEMI', 'DOT', 'COLON', 'DBLCOLON', 1086 'ASTERISK', 1087 1088 # C preprocessor directives 1089 'CPPDIRECTIVE' 1090 1091 # The following are matched but never returned. commented out to 1092 # suppress PLY warning 1093 # newfile directive 1094 # 'NEWFILE', 1095 1096 # endfile directive 1097 # 'ENDFILE' 1098 ) 1099 1100 # Regular expressions for token matching 1101 t_LPAREN = r'\(' 1102 t_RPAREN = r'\)' 1103 t_LBRACKET = r'\[' 1104 t_RBRACKET = r'\]' 1105 t_LBRACE = r'\{' 1106 t_RBRACE = r'\}' 1107 t_LESS = r'\<' 1108 t_GREATER = r'\>' 1109 t_EQUALS = r'=' 1110 t_COMMA = r',' 1111 t_SEMI = r';' 1112 t_DOT = r'\.' 1113 t_COLON = r':' 1114 t_DBLCOLON = r'::' 1115 t_ASTERISK = r'\*' 1116 1117 # Identifiers and reserved words 1118 reserved_map = { } 1119 for r in reserved: 1120 reserved_map[r.lower()] = r 1121 1122 def t_ID(self, t): 1123 r'[A-Za-z_]\w*' 1124 t.type = self.reserved_map.get(t.value, 'ID') 1125 return t 1126 1127 # Integer literal 1128 def t_INTLIT(self, t): 1129 r'-?(0x[\da-fA-F]+)|\d+' 1130 try: 1131 t.value = int(t.value,0) 1132 except ValueError: 1133 error(t, 'Integer value "%s" too large' % t.value) 1134 t.value = 0 1135 return t 1136 1137 # String literal. Note that these use only single quotes, and 1138 # can span multiple lines. 1139 def t_STRLIT(self, t): 1140 r"(?m)'([^'])+'" 1141 # strip off quotes 1142 t.value = t.value[1:-1] 1143 t.lexer.lineno += t.value.count('\n') 1144 return t 1145 1146 1147 # "Code literal"... like a string literal, but delimiters are 1148 # '{{' and '}}' so they get formatted nicely under emacs c-mode 1149 def t_CODELIT(self, t): 1150 r"(?m)\{\{([^\}]|}(?!\}))+\}\}" 1151 # strip off {{ & }} 1152 t.value = t.value[2:-2] 1153 t.lexer.lineno += t.value.count('\n') 1154 return t 1155 1156 def t_CPPDIRECTIVE(self, t): 1157 r'^\#[^\#].*\n' 1158 t.lexer.lineno += t.value.count('\n') 1159 return t 1160 1161 def t_NEWFILE(self, t): 1162 r'^\#\#newfile\s+"[^"]*"' 1163 self.fileNameStack.push((t.value[11:-1], t.lexer.lineno)) 1164 t.lexer.lineno = 0 1165 1166 def t_ENDFILE(self, t): 1167 r'^\#\#endfile' 1168 (old_filename, t.lexer.lineno) = self.fileNameStack.pop() 1169 1170 # 1171 # The functions t_NEWLINE, t_ignore, and t_error are 1172 # special for the lex module. 1173 # 1174 1175 # Newlines 1176 def t_NEWLINE(self, t): 1177 r'\n+' 1178 t.lexer.lineno += t.value.count('\n') 1179 1180 # Comments 1181 def t_comment(self, t): 1182 r'//.*' 1183 1184 # Completely ignored characters 1185 t_ignore = ' \t\x0c' 1186 1187 # Error handler 1188 def t_error(self, t): 1189 error(t, "illegal character '%s'" % t.value[0]) 1190 t.skip(1) 1191 1192 ##################################################################### 1193 # 1194 # Parser 1195 # 1196 # Every function whose name starts with 'p_' defines a grammar 1197 # rule. The rule is encoded in the function's doc string, while 1198 # the function body provides the action taken when the rule is 1199 # matched. The argument to each function is a list of the values 1200 # of the rule's symbols: t[0] for the LHS, and t[1..n] for the 1201 # symbols on the RHS. For tokens, the value is copied from the 1202 # t.value attribute provided by the lexer. For non-terminals, the 1203 # value is assigned by the producing rule; i.e., the job of the 1204 # grammar rule function is to set the value for the non-terminal 1205 # on the LHS (by assigning to t[0]). 1206 ##################################################################### 1207 1208 # The LHS of the first grammar rule is used as the start symbol 1209 # (in this case, 'specification'). Note that this rule enforces 1210 # that there will be exactly one namespace declaration, with 0 or 1211 # more global defs/decls before and after it. The defs & decls 1212 # before the namespace decl will be outside the namespace; those 1213 # after will be inside. The decoder function is always inside the 1214 # namespace. 1215 def p_specification(self, t): 1216 'specification : opt_defs_and_outputs name_decl opt_defs_and_outputs decode_block' 1217 global_code = t[1] 1218 isa_name = t[2] 1219 namespace = isa_name + "Inst" 1220 # wrap the decode block as a function definition 1221 t[4].wrap_decode_block(''' 1222StaticInstPtr 1223%(isa_name)s::decodeInst(%(isa_name)s::ExtMachInst machInst) 1224{ 1225 using namespace %(namespace)s; 1226''' % vars(), '}') 1227 # both the latter output blocks and the decode block are in 1228 # the namespace 1229 namespace_code = t[3] + t[4] 1230 # pass it all back to the caller of yacc.parse() 1231 t[0] = (isa_name, namespace, global_code, namespace_code) 1232 1233 # ISA name declaration looks like "namespace <foo>;" 1234 def p_name_decl(self, t): 1235 'name_decl : NAMESPACE ID SEMI' 1236 t[0] = t[2] 1237 1238 # 'opt_defs_and_outputs' is a possibly empty sequence of 1239 # def and/or output statements. 1240 def p_opt_defs_and_outputs_0(self, t): 1241 'opt_defs_and_outputs : empty' 1242 t[0] = GenCode(self) 1243 1244 def p_opt_defs_and_outputs_1(self, t): 1245 'opt_defs_and_outputs : defs_and_outputs' 1246 t[0] = t[1] 1247 1248 def p_defs_and_outputs_0(self, t): 1249 'defs_and_outputs : def_or_output' 1250 t[0] = t[1] 1251 1252 def p_defs_and_outputs_1(self, t): 1253 'defs_and_outputs : defs_and_outputs def_or_output' 1254 t[0] = t[1] + t[2] 1255 1256 # The list of possible definition/output statements. 1257 def p_def_or_output(self, t): 1258 '''def_or_output : def_format 1259 | def_bitfield 1260 | def_bitfield_struct 1261 | def_template 1262 | def_operand_types 1263 | def_operands 1264 | output_header 1265 | output_decoder 1266 | output_exec 1267 | global_let''' 1268 t[0] = t[1] 1269 1270 # Output blocks 'output <foo> {{...}}' (C++ code blocks) are copied 1271 # directly to the appropriate output section. 1272 1273 # Massage output block by substituting in template definitions and 1274 # bit operators. We handle '%'s embedded in the string that don't 1275 # indicate template substitutions (or CPU-specific symbols, which 1276 # get handled in GenCode) by doubling them first so that the 1277 # format operation will reduce them back to single '%'s. 1278 def process_output(self, s): 1279 s = self.protectNonSubstPercents(s) 1280 # protects cpu-specific symbols too 1281 s = self.protectCpuSymbols(s) 1282 return substBitOps(s % self.templateMap) 1283 1284 def p_output_header(self, t): 1285 'output_header : OUTPUT HEADER CODELIT SEMI' 1286 t[0] = GenCode(self, header_output = self.process_output(t[3])) 1287 1288 def p_output_decoder(self, t): 1289 'output_decoder : OUTPUT DECODER CODELIT SEMI' 1290 t[0] = GenCode(self, decoder_output = self.process_output(t[3])) 1291 1292 def p_output_exec(self, t): 1293 'output_exec : OUTPUT EXEC CODELIT SEMI' 1294 t[0] = GenCode(self, exec_output = self.process_output(t[3])) 1295 1296 # global let blocks 'let {{...}}' (Python code blocks) are 1297 # executed directly when seen. Note that these execute in a 1298 # special variable context 'exportContext' to prevent the code 1299 # from polluting this script's namespace. 1300 def p_global_let(self, t): 1301 'global_let : LET CODELIT SEMI' 1302 self.updateExportContext() 1303 self.exportContext["header_output"] = '' 1304 self.exportContext["decoder_output"] = '' 1305 self.exportContext["exec_output"] = '' 1306 self.exportContext["decode_block"] = '' 1307 try: 1308 exec fixPythonIndentation(t[2]) in self.exportContext 1309 except Exception, exc: 1310 if debug: 1311 raise 1312 error(t, 'error: %s in global let block "%s".' % (exc, t[2])) 1313 t[0] = GenCode(self, 1314 header_output=self.exportContext["header_output"], 1315 decoder_output=self.exportContext["decoder_output"], 1316 exec_output=self.exportContext["exec_output"], 1317 decode_block=self.exportContext["decode_block"]) 1318 1319 # Define the mapping from operand type extensions to C++ types and 1320 # bit widths (stored in operandTypeMap). 1321 def p_def_operand_types(self, t): 1322 'def_operand_types : DEF OPERAND_TYPES CODELIT SEMI' 1323 try: 1324 self.operandTypeMap = eval('{' + t[3] + '}') 1325 except Exception, exc: 1326 if debug: 1327 raise 1328 error(t, 1329 'error: %s in def operand_types block "%s".' % (exc, t[3])) 1330 t[0] = GenCode(self) # contributes nothing to the output C++ file 1331 1332 # Define the mapping from operand names to operand classes and 1333 # other traits. Stored in operandNameMap. 1334 def p_def_operands(self, t): 1335 'def_operands : DEF OPERANDS CODELIT SEMI' 1336 if not hasattr(self, 'operandTypeMap'): 1337 error(t, 'error: operand types must be defined before operands') 1338 try: 1339 user_dict = eval('{' + t[3] + '}', self.exportContext) 1340 except Exception, exc: 1341 if debug: 1342 raise 1343 error(t, 'error: %s in def operands block "%s".' % (exc, t[3])) 1344 self.buildOperandNameMap(user_dict, t.lexer.lineno) 1345 t[0] = GenCode(self) # contributes nothing to the output C++ file 1346 1347 # A bitfield definition looks like: 1348 # 'def [signed] bitfield <ID> [<first>:<last>]' 1349 # This generates a preprocessor macro in the output file. 1350 def p_def_bitfield_0(self, t): 1351 'def_bitfield : DEF opt_signed BITFIELD ID LESS INTLIT COLON INTLIT GREATER SEMI' 1352 expr = 'bits(machInst, %2d, %2d)' % (t[6], t[8]) 1353 if (t[2] == 'signed'): 1354 expr = 'sext<%d>(%s)' % (t[6] - t[8] + 1, expr) 1355 hash_define = '#undef %s\n#define %s\t%s\n' % (t[4], t[4], expr) 1356 t[0] = GenCode(self, header_output=hash_define) 1357 1358 # alternate form for single bit: 'def [signed] bitfield <ID> [<bit>]' 1359 def p_def_bitfield_1(self, t): 1360 'def_bitfield : DEF opt_signed BITFIELD ID LESS INTLIT GREATER SEMI' 1361 expr = 'bits(machInst, %2d, %2d)' % (t[6], t[6]) 1362 if (t[2] == 'signed'): 1363 expr = 'sext<%d>(%s)' % (1, expr) 1364 hash_define = '#undef %s\n#define %s\t%s\n' % (t[4], t[4], expr) 1365 t[0] = GenCode(self, header_output=hash_define) 1366 1367 # alternate form for structure member: 'def bitfield <ID> <ID>' 1368 def p_def_bitfield_struct(self, t): 1369 'def_bitfield_struct : DEF opt_signed BITFIELD ID id_with_dot SEMI' 1370 if (t[2] != ''): 1371 error(t, 'error: structure bitfields are always unsigned.') 1372 expr = 'machInst.%s' % t[5] 1373 hash_define = '#undef %s\n#define %s\t%s\n' % (t[4], t[4], expr) 1374 t[0] = GenCode(self, header_output=hash_define) 1375 1376 def p_id_with_dot_0(self, t): 1377 'id_with_dot : ID' 1378 t[0] = t[1] 1379 1380 def p_id_with_dot_1(self, t): 1381 'id_with_dot : ID DOT id_with_dot' 1382 t[0] = t[1] + t[2] + t[3] 1383 1384 def p_opt_signed_0(self, t): 1385 'opt_signed : SIGNED' 1386 t[0] = t[1] 1387 1388 def p_opt_signed_1(self, t): 1389 'opt_signed : empty' 1390 t[0] = '' 1391 1392 def p_def_template(self, t): 1393 'def_template : DEF TEMPLATE ID CODELIT SEMI' 1394 self.templateMap[t[3]] = Template(self, t[4]) 1395 t[0] = GenCode(self) 1396 1397 # An instruction format definition looks like 1398 # "def format <fmt>(<params>) {{...}};" 1399 def p_def_format(self, t): 1400 'def_format : DEF FORMAT ID LPAREN param_list RPAREN CODELIT SEMI' 1401 (id, params, code) = (t[3], t[5], t[7]) 1402 self.defFormat(id, params, code, t.lexer.lineno) 1403 t[0] = GenCode(self) 1404 1405 # The formal parameter list for an instruction format is a 1406 # possibly empty list of comma-separated parameters. Positional 1407 # (standard, non-keyword) parameters must come first, followed by 1408 # keyword parameters, followed by a '*foo' parameter that gets 1409 # excess positional arguments (as in Python). Each of these three 1410 # parameter categories is optional. 1411 # 1412 # Note that we do not support the '**foo' parameter for collecting 1413 # otherwise undefined keyword args. Otherwise the parameter list 1414 # is (I believe) identical to what is supported in Python. 1415 # 1416 # The param list generates a tuple, where the first element is a 1417 # list of the positional params and the second element is a dict 1418 # containing the keyword params. 1419 def p_param_list_0(self, t): 1420 'param_list : positional_param_list COMMA nonpositional_param_list' 1421 t[0] = t[1] + t[3] 1422 1423 def p_param_list_1(self, t): 1424 '''param_list : positional_param_list 1425 | nonpositional_param_list''' 1426 t[0] = t[1] 1427 1428 def p_positional_param_list_0(self, t): 1429 'positional_param_list : empty' 1430 t[0] = [] 1431 1432 def p_positional_param_list_1(self, t): 1433 'positional_param_list : ID' 1434 t[0] = [t[1]] 1435 1436 def p_positional_param_list_2(self, t): 1437 'positional_param_list : positional_param_list COMMA ID' 1438 t[0] = t[1] + [t[3]] 1439 1440 def p_nonpositional_param_list_0(self, t): 1441 'nonpositional_param_list : keyword_param_list COMMA excess_args_param' 1442 t[0] = t[1] + t[3] 1443 1444 def p_nonpositional_param_list_1(self, t): 1445 '''nonpositional_param_list : keyword_param_list 1446 | excess_args_param''' 1447 t[0] = t[1] 1448 1449 def p_keyword_param_list_0(self, t): 1450 'keyword_param_list : keyword_param' 1451 t[0] = [t[1]] 1452 1453 def p_keyword_param_list_1(self, t): 1454 'keyword_param_list : keyword_param_list COMMA keyword_param' 1455 t[0] = t[1] + [t[3]] 1456 1457 def p_keyword_param(self, t): 1458 'keyword_param : ID EQUALS expr' 1459 t[0] = t[1] + ' = ' + t[3].__repr__() 1460 1461 def p_excess_args_param(self, t): 1462 'excess_args_param : ASTERISK ID' 1463 # Just concatenate them: '*ID'. Wrap in list to be consistent 1464 # with positional_param_list and keyword_param_list. 1465 t[0] = [t[1] + t[2]] 1466 1467 # End of format definition-related rules. 1468 ############## 1469 1470 # 1471 # A decode block looks like: 1472 # decode <field1> [, <field2>]* [default <inst>] { ... } 1473 # 1474 def p_decode_block(self, t): 1475 'decode_block : DECODE ID opt_default LBRACE decode_stmt_list RBRACE' 1476 default_defaults = self.defaultStack.pop() 1477 codeObj = t[5] 1478 # use the "default defaults" only if there was no explicit 1479 # default statement in decode_stmt_list 1480 if not codeObj.has_decode_default: 1481 codeObj += default_defaults 1482 codeObj.wrap_decode_block('switch (%s) {\n' % t[2], '}\n') 1483 t[0] = codeObj 1484 1485 # The opt_default statement serves only to push the "default 1486 # defaults" onto defaultStack. This value will be used by nested 1487 # decode blocks, and used and popped off when the current 1488 # decode_block is processed (in p_decode_block() above). 1489 def p_opt_default_0(self, t): 1490 'opt_default : empty' 1491 # no default specified: reuse the one currently at the top of 1492 # the stack 1493 self.defaultStack.push(self.defaultStack.top()) 1494 # no meaningful value returned 1495 t[0] = None 1496 1497 def p_opt_default_1(self, t): 1498 'opt_default : DEFAULT inst' 1499 # push the new default 1500 codeObj = t[2] 1501 codeObj.wrap_decode_block('\ndefault:\n', 'break;\n') 1502 self.defaultStack.push(codeObj) 1503 # no meaningful value returned 1504 t[0] = None 1505 1506 def p_decode_stmt_list_0(self, t): 1507 'decode_stmt_list : decode_stmt' 1508 t[0] = t[1] 1509 1510 def p_decode_stmt_list_1(self, t): 1511 'decode_stmt_list : decode_stmt decode_stmt_list' 1512 if (t[1].has_decode_default and t[2].has_decode_default): 1513 error(t, 'Two default cases in decode block') 1514 t[0] = t[1] + t[2] 1515 1516 # 1517 # Decode statement rules 1518 # 1519 # There are four types of statements allowed in a decode block: 1520 # 1. Format blocks 'format <foo> { ... }' 1521 # 2. Nested decode blocks 1522 # 3. Instruction definitions. 1523 # 4. C preprocessor directives. 1524 1525 1526 # Preprocessor directives found in a decode statement list are 1527 # passed through to the output, replicated to all of the output 1528 # code streams. This works well for ifdefs, so we can ifdef out 1529 # both the declarations and the decode cases generated by an 1530 # instruction definition. Handling them as part of the grammar 1531 # makes it easy to keep them in the right place with respect to 1532 # the code generated by the other statements. 1533 def p_decode_stmt_cpp(self, t): 1534 'decode_stmt : CPPDIRECTIVE' 1535 t[0] = GenCode(self, t[1], t[1], t[1], t[1]) 1536 1537 # A format block 'format <foo> { ... }' sets the default 1538 # instruction format used to handle instruction definitions inside 1539 # the block. This format can be overridden by using an explicit 1540 # format on the instruction definition or with a nested format 1541 # block. 1542 def p_decode_stmt_format(self, t): 1543 'decode_stmt : FORMAT push_format_id LBRACE decode_stmt_list RBRACE' 1544 # The format will be pushed on the stack when 'push_format_id' 1545 # is processed (see below). Once the parser has recognized 1546 # the full production (though the right brace), we're done 1547 # with the format, so now we can pop it. 1548 self.formatStack.pop() 1549 t[0] = t[4] 1550 1551 # This rule exists so we can set the current format (& push the 1552 # stack) when we recognize the format name part of the format 1553 # block. 1554 def p_push_format_id(self, t): 1555 'push_format_id : ID' 1556 try: 1557 self.formatStack.push(self.formatMap[t[1]]) 1558 t[0] = ('', '// format %s' % t[1]) 1559 except KeyError: 1560 error(t, 'instruction format "%s" not defined.' % t[1]) 1561 1562 # Nested decode block: if the value of the current field matches 1563 # the specified constant, do a nested decode on some other field. 1564 def p_decode_stmt_decode(self, t): 1565 'decode_stmt : case_label COLON decode_block' 1566 label = t[1] 1567 codeObj = t[3] 1568 # just wrap the decoding code from the block as a case in the 1569 # outer switch statement. 1570 codeObj.wrap_decode_block('\n%s:\n' % label) 1571 codeObj.has_decode_default = (label == 'default') 1572 t[0] = codeObj 1573 1574 # Instruction definition (finally!). 1575 def p_decode_stmt_inst(self, t): 1576 'decode_stmt : case_label COLON inst SEMI' 1577 label = t[1] 1578 codeObj = t[3] 1579 codeObj.wrap_decode_block('\n%s:' % label, 'break;\n') 1580 codeObj.has_decode_default = (label == 'default') 1581 t[0] = codeObj 1582 1583 # The case label is either a list of one or more constants or 1584 # 'default' 1585 def p_case_label_0(self, t): 1586 'case_label : intlit_list' 1587 def make_case(intlit): 1588 if intlit >= 2**32: 1589 return 'case ULL(%#x)' % intlit 1590 else: 1591 return 'case %#x' % intlit 1592 t[0] = ': '.join(map(make_case, t[1])) 1593 1594 def p_case_label_1(self, t): 1595 'case_label : DEFAULT' 1596 t[0] = 'default' 1597 1598 # 1599 # The constant list for a decode case label must be non-empty, but 1600 # may have one or more comma-separated integer literals in it. 1601 # 1602 def p_intlit_list_0(self, t): 1603 'intlit_list : INTLIT' 1604 t[0] = [t[1]] 1605 1606 def p_intlit_list_1(self, t): 1607 'intlit_list : intlit_list COMMA INTLIT' 1608 t[0] = t[1] 1609 t[0].append(t[3]) 1610 1611 # Define an instruction using the current instruction format 1612 # (specified by an enclosing format block). 1613 # "<mnemonic>(<args>)" 1614 def p_inst_0(self, t): 1615 'inst : ID LPAREN arg_list RPAREN' 1616 # Pass the ID and arg list to the current format class to deal with. 1617 currentFormat = self.formatStack.top() 1618 codeObj = currentFormat.defineInst(self, t[1], t[3], t.lexer.lineno) 1619 args = ','.join(map(str, t[3])) 1620 args = re.sub('(?m)^', '//', args) 1621 args = re.sub('^//', '', args) 1622 comment = '\n// %s::%s(%s)\n' % (currentFormat.id, t[1], args) 1623 codeObj.prepend_all(comment) 1624 t[0] = codeObj 1625 1626 # Define an instruction using an explicitly specified format: 1627 # "<fmt>::<mnemonic>(<args>)" 1628 def p_inst_1(self, t): 1629 'inst : ID DBLCOLON ID LPAREN arg_list RPAREN' 1630 try: 1631 format = self.formatMap[t[1]] 1632 except KeyError: 1633 error(t, 'instruction format "%s" not defined.' % t[1]) 1634 1635 codeObj = format.defineInst(self, t[3], t[5], t.lexer.lineno) 1636 comment = '\n// %s::%s(%s)\n' % (t[1], t[3], t[5]) 1637 codeObj.prepend_all(comment) 1638 t[0] = codeObj 1639 1640 # The arg list generates a tuple, where the first element is a 1641 # list of the positional args and the second element is a dict 1642 # containing the keyword args. 1643 def p_arg_list_0(self, t): 1644 'arg_list : positional_arg_list COMMA keyword_arg_list' 1645 t[0] = ( t[1], t[3] ) 1646 1647 def p_arg_list_1(self, t): 1648 'arg_list : positional_arg_list' 1649 t[0] = ( t[1], {} ) 1650 1651 def p_arg_list_2(self, t): 1652 'arg_list : keyword_arg_list' 1653 t[0] = ( [], t[1] ) 1654 1655 def p_positional_arg_list_0(self, t): 1656 'positional_arg_list : empty' 1657 t[0] = [] 1658 1659 def p_positional_arg_list_1(self, t): 1660 'positional_arg_list : expr' 1661 t[0] = [t[1]] 1662 1663 def p_positional_arg_list_2(self, t): 1664 'positional_arg_list : positional_arg_list COMMA expr' 1665 t[0] = t[1] + [t[3]] 1666 1667 def p_keyword_arg_list_0(self, t): 1668 'keyword_arg_list : keyword_arg' 1669 t[0] = t[1] 1670 1671 def p_keyword_arg_list_1(self, t): 1672 'keyword_arg_list : keyword_arg_list COMMA keyword_arg' 1673 t[0] = t[1] 1674 t[0].update(t[3]) 1675 1676 def p_keyword_arg(self, t): 1677 'keyword_arg : ID EQUALS expr' 1678 t[0] = { t[1] : t[3] } 1679 1680 # 1681 # Basic expressions. These constitute the argument values of 1682 # "function calls" (i.e. instruction definitions in the decode 1683 # block) and default values for formal parameters of format 1684 # functions. 1685 # 1686 # Right now, these are either strings, integers, or (recursively) 1687 # lists of exprs (using Python square-bracket list syntax). Note 1688 # that bare identifiers are trated as string constants here (since 1689 # there isn't really a variable namespace to refer to). 1690 # 1691 def p_expr_0(self, t): 1692 '''expr : ID 1693 | INTLIT 1694 | STRLIT 1695 | CODELIT''' 1696 t[0] = t[1] 1697 1698 def p_expr_1(self, t): 1699 '''expr : LBRACKET list_expr RBRACKET''' 1700 t[0] = t[2] 1701 1702 def p_list_expr_0(self, t): 1703 'list_expr : expr' 1704 t[0] = [t[1]] 1705 1706 def p_list_expr_1(self, t): 1707 'list_expr : list_expr COMMA expr' 1708 t[0] = t[1] + [t[3]] 1709 1710 def p_list_expr_2(self, t): 1711 'list_expr : empty' 1712 t[0] = [] 1713 1714 # 1715 # Empty production... use in other rules for readability. 1716 # 1717 def p_empty(self, t): 1718 'empty :' 1719 pass 1720 1721 # Parse error handler. Note that the argument here is the 1722 # offending *token*, not a grammar symbol (hence the need to use 1723 # t.value) 1724 def p_error(self, t): 1725 if t: 1726 error(t, "syntax error at '%s'" % t.value) 1727 else: 1728 error("unknown syntax error") 1729 1730 # END OF GRAMMAR RULES 1731 1732 def updateExportContext(self): 1733 1734 # create a continuation that allows us to grab the current parser 1735 def wrapInstObjParams(*args): 1736 return InstObjParams(self, *args) 1737 self.exportContext['InstObjParams'] = wrapInstObjParams 1738 self.exportContext.update(self.templateMap) 1739 1740 def defFormat(self, id, params, code, lineno): 1741 '''Define a new format''' 1742 1743 # make sure we haven't already defined this one 1744 if id in self.formatMap: 1745 error(lineno, 'format %s redefined.' % id) 1746 1747 # create new object and store in global map 1748 self.formatMap[id] = Format(id, params, code) 1749 1750 def expandCpuSymbolsToDict(self, template): 1751 '''Expand template with CPU-specific references into a 1752 dictionary with an entry for each CPU model name. The entry 1753 key is the model name and the corresponding value is the 1754 template with the CPU-specific refs substituted for that 1755 model.''' 1756 1757 # Protect '%'s that don't go with CPU-specific terms 1758 t = re.sub(r'%(?!\(CPU_)', '%%', template) 1759 result = {} 1760 for cpu in self.cpuModels: 1761 result[cpu.name] = t % cpu.strings 1762 return result 1763 1764 def expandCpuSymbolsToString(self, template): 1765 '''*If* the template has CPU-specific references, return a 1766 single string containing a copy of the template for each CPU 1767 model with the corresponding values substituted in. If the 1768 template has no CPU-specific references, it is returned 1769 unmodified.''' 1770 1771 if template.find('%(CPU_') != -1: 1772 return reduce(lambda x,y: x+y, 1773 self.expandCpuSymbolsToDict(template).values()) 1774 else: 1775 return template 1776 1777 def protectCpuSymbols(self, template): 1778 '''Protect CPU-specific references by doubling the 1779 corresponding '%'s (in preparation for substituting a different 1780 set of references into the template).''' 1781 1782 return re.sub(r'%(?=\(CPU_)', '%%', template) 1783 1784 def protectNonSubstPercents(self, s): 1785 '''Protect any non-dict-substitution '%'s in a format string 1786 (i.e. those not followed by '(')''' 1787 1788 return re.sub(r'%(?!\()', '%%', s) 1789 1790 def buildOperandNameMap(self, user_dict, lineno): 1791 operand_name = {} 1792 for op_name, val in user_dict.iteritems(): 1793 base_cls_name, dflt_ext, reg_spec, flags, sort_pri = val[:5] 1794 if len(val) > 5: 1795 read_code = val[5] 1796 else: 1797 read_code = None 1798 if len(val) > 6: 1799 write_code = val[6] 1800 else: 1801 write_code = None 1802 if len(val) > 7: 1803 error(lineno, 1804 'error: too many attributes for operand "%s"' % 1805 base_cls_name) 1806 1807 # Canonical flag structure is a triple of lists, where each list 1808 # indicates the set of flags implied by this operand always, when 1809 # used as a source, and when used as a dest, respectively. 1810 # For simplicity this can be initialized using a variety of fairly 1811 # obvious shortcuts; we convert these to canonical form here. 1812 if not flags: 1813 # no flags specified (e.g., 'None') 1814 flags = ( [], [], [] ) 1815 elif isinstance(flags, str): 1816 # a single flag: assumed to be unconditional 1817 flags = ( [ flags ], [], [] ) 1818 elif isinstance(flags, list): 1819 # a list of flags: also assumed to be unconditional 1820 flags = ( flags, [], [] ) 1821 elif isinstance(flags, tuple): 1822 # it's a tuple: it should be a triple, 1823 # but each item could be a single string or a list 1824 (uncond_flags, src_flags, dest_flags) = flags 1825 flags = (makeList(uncond_flags), 1826 makeList(src_flags), makeList(dest_flags)) 1827 # Accumulate attributes of new operand class in tmp_dict 1828 tmp_dict = {} 1829 attrList = ['reg_spec', 'flags', 'sort_pri', 1830 'read_code', 'write_code'] 1831 if dflt_ext: 1832 dflt_ctype = self.operandTypeMap[dflt_ext] 1833 attrList.extend(['dflt_ctype', 'dflt_ext']) 1834 for attr in attrList: 1835 tmp_dict[attr] = eval(attr) 1836 tmp_dict['base_name'] = op_name 1837 # New class name will be e.g. "IntReg_Ra" 1838 cls_name = base_cls_name + '_' + op_name 1839 # Evaluate string arg to get class object. Note that the 1840 # actual base class for "IntReg" is "IntRegOperand", i.e. we 1841 # have to append "Operand". 1842 try: 1843 base_cls = eval(base_cls_name + 'Operand') 1844 except NameError: 1845 error(lineno, 1846 'error: unknown operand base class "%s"' % base_cls_name) 1847 # The following statement creates a new class called 1848 # <cls_name> as a subclass of <base_cls> with the attributes 1849 # in tmp_dict, just as if we evaluated a class declaration. 1850 operand_name[op_name] = type(cls_name, (base_cls,), tmp_dict) 1851 1852 self.operandNameMap = operand_name 1853 1854 # Define operand variables. 1855 operands = user_dict.keys() 1856 1857 operandsREString = (r''' 1858 (?<![\w\.]) # neg. lookbehind assertion: prevent partial matches 1859 ((%s)(?:\.(\w+))?) # match: operand with optional '.' then suffix 1860 (?![\w\.]) # neg. lookahead assertion: prevent partial matches 1861 ''' 1862 % string.join(operands, '|')) 1863 1864 self.operandsRE = re.compile(operandsREString, re.MULTILINE|re.VERBOSE) 1865 1866 # Same as operandsREString, but extension is mandatory, and only two 1867 # groups are returned (base and ext, not full name as above). 1868 # Used for subtituting '_' for '.' to make C++ identifiers. 1869 operandsWithExtREString = (r'(?<![\w\.])(%s)\.(\w+)(?![\w\.])' 1870 % string.join(operands, '|')) 1871 1872 self.operandsWithExtRE = \ 1873 re.compile(operandsWithExtREString, re.MULTILINE) 1874 1875 def substMungedOpNames(self, code): 1876 '''Munge operand names in code string to make legal C++ 1877 variable names. This means getting rid of the type extension 1878 if any. Will match base_name attribute of Operand object.)''' 1879 return self.operandsWithExtRE.sub(r'\1', code) 1880 1881 def mungeSnippet(self, s): 1882 '''Fix up code snippets for final substitution in templates.''' 1883 if isinstance(s, str): 1884 return self.substMungedOpNames(substBitOps(s)) 1885 else: 1886 return s 1887 1888 def update_if_needed(self, file, contents): 1889 '''Update the output file only if the new contents are 1890 different from the current contents. Minimizes the files that 1891 need to be rebuilt after minor changes.''' 1892 1893 file = os.path.join(self.output_dir, file) 1894 update = False 1895 if os.access(file, os.R_OK): 1896 f = open(file, 'r') 1897 old_contents = f.read() 1898 f.close() 1899 if contents != old_contents: 1900 os.remove(file) # in case it's write-protected 1901 update = True 1902 else: 1903 print 'File', file, 'is unchanged' 1904 else: 1905 update = True 1906 if update: 1907 f = open(file, 'w') 1908 f.write(contents) 1909 f.close() 1910 1911 # This regular expression matches '##include' directives 1912 includeRE = re.compile(r'^\s*##include\s+"(?P<filename>[^"]*)".*$', 1913 re.MULTILINE) 1914 1915 def replace_include(self, matchobj, dirname): 1916 """Function to replace a matched '##include' directive with the 1917 contents of the specified file (with nested ##includes 1918 replaced recursively). 'matchobj' is an re match object 1919 (from a match of includeRE) and 'dirname' is the directory 1920 relative to which the file path should be resolved.""" 1921 1922 fname = matchobj.group('filename') 1923 full_fname = os.path.normpath(os.path.join(dirname, fname)) 1924 contents = '##newfile "%s"\n%s\n##endfile\n' % \ 1925 (full_fname, self.read_and_flatten(full_fname)) 1926 return contents 1927 1928 def read_and_flatten(self, filename): 1929 """Read a file and recursively flatten nested '##include' files.""" 1930 1931 current_dir = os.path.dirname(filename) 1932 try: 1933 contents = open(filename).read() 1934 except IOError: 1935 error('Error including file "%s"' % filename) 1936 1937 self.fileNameStack.push((filename, 0)) 1938 1939 # Find any includes and include them 1940 def replace(matchobj): 1941 return self.replace_include(matchobj, current_dir) 1942 contents = self.includeRE.sub(replace, contents) 1943 1944 self.fileNameStack.pop() 1945 return contents 1946 1947 def _parse_isa_desc(self, isa_desc_file): 1948 '''Read in and parse the ISA description.''' 1949 1950 # Read file and (recursively) all included files into a string. 1951 # PLY requires that the input be in a single string so we have to 1952 # do this up front. 1953 isa_desc = self.read_and_flatten(isa_desc_file) 1954 1955 # Initialize filename stack with outer file. 1956 self.fileNameStack.push((isa_desc_file, 0)) 1957 1958 # Parse it. 1959 (isa_name, namespace, global_code, namespace_code) = \ 1960 self.parse_string(isa_desc) 1961 1962 # grab the last three path components of isa_desc_file to put in 1963 # the output 1964 filename = '/'.join(isa_desc_file.split('/')[-3:]) 1965 1966 # generate decoder.hh 1967 includes = '#include "base/bitfield.hh" // for bitfield support' 1968 global_output = global_code.header_output 1969 namespace_output = namespace_code.header_output 1970 decode_function = '' 1971 self.update_if_needed('decoder.hh', file_template % vars()) 1972 1973 # generate decoder.cc 1974 includes = '#include "decoder.hh"' 1975 global_output = global_code.decoder_output 1976 namespace_output = namespace_code.decoder_output 1977 # namespace_output += namespace_code.decode_block 1978 decode_function = namespace_code.decode_block 1979 self.update_if_needed('decoder.cc', file_template % vars()) 1980 1981 # generate per-cpu exec files 1982 for cpu in self.cpuModels: 1983 includes = '#include "decoder.hh"\n' 1984 includes += cpu.includes 1985 global_output = global_code.exec_output[cpu.name] 1986 namespace_output = namespace_code.exec_output[cpu.name] 1987 decode_function = '' 1988 self.update_if_needed(cpu.filename, file_template % vars()) 1989 1990 # The variable names here are hacky, but this will creat local 1991 # variables which will be referenced in vars() which have the 1992 # value of the globals. 1993 MaxInstSrcRegs = self.maxInstSrcRegs 1994 MaxInstDestRegs = self.maxInstDestRegs 1995 # max_inst_regs.hh 1996 self.update_if_needed('max_inst_regs.hh', 1997 max_inst_regs_template % vars()) 1998 1999 def parse_isa_desc(self, *args, **kwargs): 2000 try: 2001 self._parse_isa_desc(*args, **kwargs) 2002 except ISAParserError, e: 2003 e.exit(self.fileNameStack) 2004 2005# Called as script: get args from command line. 2006# Args are: <path to cpu_models.py> <isa desc file> <output dir> <cpu models> 2007if __name__ == '__main__': 2008 execfile(sys.argv[1]) # read in CpuModel definitions 2009 cpu_models = [CpuModel.dict[cpu] for cpu in sys.argv[4:]] 2010 ISAParser(sys.argv[3], cpu_models).parse_isa_desc(sys.argv[2]) 2011