isa_parser.py revision 11327:1e7b883dffc6
1# Copyright (c) 2014 ARM Limited
2# All rights reserved
3#
4# The license below extends only to copyright in the software and shall
5# not be construed as granting a license to any other intellectual
6# property including but not limited to intellectual property relating
7# to a hardware implementation of the functionality of the software
8# licensed hereunder.  You may use the software subject to the license
9# terms below provided that you ensure that this notice is replicated
10# unmodified and in its entirety in all distributions of the software,
11# modified or unmodified, in source code or in binary form.
12#
13# Copyright (c) 2003-2005 The Regents of The University of Michigan
14# Copyright (c) 2013,2015 Advanced Micro Devices, Inc.
15# All rights reserved.
16#
17# Redistribution and use in source and binary forms, with or without
18# modification, are permitted provided that the following conditions are
19# met: redistributions of source code must retain the above copyright
20# notice, this list of conditions and the following disclaimer;
21# redistributions in binary form must reproduce the above copyright
22# notice, this list of conditions and the following disclaimer in the
23# documentation and/or other materials provided with the distribution;
24# neither the name of the copyright holders nor the names of its
25# contributors may be used to endorse or promote products derived from
26# this software without specific prior written permission.
27#
28# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
29# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
30# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
31# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
32# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
33# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
34# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
35# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
36# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
37# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
38# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
39#
40# Authors: Steve Reinhardt
41
42from __future__ import with_statement
43import os
44import sys
45import re
46import string
47import inspect, traceback
48# get type names
49from types import *
50
51from m5.util.grammar import Grammar
52
53debug=False
54
55###################
56# Utility functions
57
58#
59# Indent every line in string 's' by two spaces
60# (except preprocessor directives).
61# Used to make nested code blocks look pretty.
62#
63def indent(s):
64    return re.sub(r'(?m)^(?!#)', '  ', s)
65
66#
67# Munge a somewhat arbitrarily formatted piece of Python code
68# (e.g. from a format 'let' block) into something whose indentation
69# will get by the Python parser.
70#
71# The two keys here are that Python will give a syntax error if
72# there's any whitespace at the beginning of the first line, and that
73# all lines at the same lexical nesting level must have identical
74# indentation.  Unfortunately the way code literals work, an entire
75# let block tends to have some initial indentation.  Rather than
76# trying to figure out what that is and strip it off, we prepend 'if
77# 1:' to make the let code the nested block inside the if (and have
78# the parser automatically deal with the indentation for us).
79#
80# We don't want to do this if (1) the code block is empty or (2) the
81# first line of the block doesn't have any whitespace at the front.
82
83def fixPythonIndentation(s):
84    # get rid of blank lines first
85    s = re.sub(r'(?m)^\s*\n', '', s);
86    if (s != '' and re.match(r'[ \t]', s[0])):
87        s = 'if 1:\n' + s
88    return s
89
90class ISAParserError(Exception):
91    """Exception class for parser errors"""
92    def __init__(self, first, second=None):
93        if second is None:
94            self.lineno = 0
95            self.string = first
96        else:
97            self.lineno = first
98            self.string = second
99
100    def __str__(self):
101        return self.string
102
103def error(*args):
104    raise ISAParserError(*args)
105
106####################
107# Template objects.
108#
109# Template objects are format strings that allow substitution from
110# the attribute spaces of other objects (e.g. InstObjParams instances).
111
112labelRE = re.compile(r'(?<!%)%\(([^\)]+)\)[sd]')
113
114class Template(object):
115    def __init__(self, parser, t):
116        self.parser = parser
117        self.template = t
118
119    def subst(self, d):
120        myDict = None
121
122        # Protect non-Python-dict substitutions (e.g. if there's a printf
123        # in the templated C++ code)
124        template = self.parser.protectNonSubstPercents(self.template)
125        # CPU-model-specific substitutions are handled later (in GenCode).
126        template = self.parser.protectCpuSymbols(template)
127
128        # Build a dict ('myDict') to use for the template substitution.
129        # Start with the template namespace.  Make a copy since we're
130        # going to modify it.
131        myDict = self.parser.templateMap.copy()
132
133        if isinstance(d, InstObjParams):
134            # If we're dealing with an InstObjParams object, we need
135            # to be a little more sophisticated.  The instruction-wide
136            # parameters are already formed, but the parameters which
137            # are only function wide still need to be generated.
138            compositeCode = ''
139
140            myDict.update(d.__dict__)
141            # The "operands" and "snippets" attributes of the InstObjParams
142            # objects are for internal use and not substitution.
143            del myDict['operands']
144            del myDict['snippets']
145
146            snippetLabels = [l for l in labelRE.findall(template)
147                             if d.snippets.has_key(l)]
148
149            snippets = dict([(s, self.parser.mungeSnippet(d.snippets[s]))
150                             for s in snippetLabels])
151
152            myDict.update(snippets)
153
154            compositeCode = ' '.join(map(str, snippets.values()))
155
156            # Add in template itself in case it references any
157            # operands explicitly (like Mem)
158            compositeCode += ' ' + template
159
160            operands = SubOperandList(self.parser, compositeCode, d.operands)
161
162            myDict['op_decl'] = operands.concatAttrStrings('op_decl')
163            if operands.readPC or operands.setPC:
164                myDict['op_decl'] += 'TheISA::PCState __parserAutoPCState;\n'
165
166            # In case there are predicated register reads and write, declare
167            # the variables for register indicies. It is being assumed that
168            # all the operands in the OperandList are also in the
169            # SubOperandList and in the same order. Otherwise, it is
170            # expected that predication would not be used for the operands.
171            if operands.predRead:
172                myDict['op_decl'] += 'uint8_t _sourceIndex = 0;\n'
173            if operands.predWrite:
174                myDict['op_decl'] += 'uint8_t M5_VAR_USED _destIndex = 0;\n'
175
176            is_src = lambda op: op.is_src
177            is_dest = lambda op: op.is_dest
178
179            myDict['op_src_decl'] = \
180                      operands.concatSomeAttrStrings(is_src, 'op_src_decl')
181            myDict['op_dest_decl'] = \
182                      operands.concatSomeAttrStrings(is_dest, 'op_dest_decl')
183            if operands.readPC:
184                myDict['op_src_decl'] += \
185                    'TheISA::PCState __parserAutoPCState;\n'
186            if operands.setPC:
187                myDict['op_dest_decl'] += \
188                    'TheISA::PCState __parserAutoPCState;\n'
189
190            myDict['op_rd'] = operands.concatAttrStrings('op_rd')
191            if operands.readPC:
192                myDict['op_rd'] = '__parserAutoPCState = xc->pcState();\n' + \
193                                  myDict['op_rd']
194
195            # Compose the op_wb string. If we're going to write back the
196            # PC state because we changed some of its elements, we'll need to
197            # do that as early as possible. That allows later uncoordinated
198            # modifications to the PC to layer appropriately.
199            reordered = list(operands.items)
200            reordered.reverse()
201            op_wb_str = ''
202            pcWbStr = 'xc->pcState(__parserAutoPCState);\n'
203            for op_desc in reordered:
204                if op_desc.isPCPart() and op_desc.is_dest:
205                    op_wb_str = op_desc.op_wb + pcWbStr + op_wb_str
206                    pcWbStr = ''
207                else:
208                    op_wb_str = op_desc.op_wb + op_wb_str
209            myDict['op_wb'] = op_wb_str
210
211        elif isinstance(d, dict):
212            # if the argument is a dictionary, we just use it.
213            myDict.update(d)
214        elif hasattr(d, '__dict__'):
215            # if the argument is an object, we use its attribute map.
216            myDict.update(d.__dict__)
217        else:
218            raise TypeError, "Template.subst() arg must be or have dictionary"
219        return template % myDict
220
221    # Convert to string.  This handles the case when a template with a
222    # CPU-specific term gets interpolated into another template or into
223    # an output block.
224    def __str__(self):
225        return self.parser.expandCpuSymbolsToString(self.template)
226
227################
228# Format object.
229#
230# A format object encapsulates an instruction format.  It must provide
231# a defineInst() method that generates the code for an instruction
232# definition.
233
234class Format(object):
235    def __init__(self, id, params, code):
236        self.id = id
237        self.params = params
238        label = 'def format ' + id
239        self.user_code = compile(fixPythonIndentation(code), label, 'exec')
240        param_list = string.join(params, ", ")
241        f = '''def defInst(_code, _context, %s):
242                my_locals = vars().copy()
243                exec _code in _context, my_locals
244                return my_locals\n''' % param_list
245        c = compile(f, label + ' wrapper', 'exec')
246        exec c
247        self.func = defInst
248
249    def defineInst(self, parser, name, args, lineno):
250        parser.updateExportContext()
251        context = parser.exportContext.copy()
252        if len(name):
253            Name = name[0].upper()
254            if len(name) > 1:
255                Name += name[1:]
256        context.update({ 'name' : name, 'Name' : Name })
257        try:
258            vars = self.func(self.user_code, context, *args[0], **args[1])
259        except Exception, exc:
260            if debug:
261                raise
262            error(lineno, 'error defining "%s": %s.' % (name, exc))
263        for k in vars.keys():
264            if k not in ('header_output', 'decoder_output',
265                         'exec_output', 'decode_block'):
266                del vars[k]
267        return GenCode(parser, **vars)
268
269# Special null format to catch an implicit-format instruction
270# definition outside of any format block.
271class NoFormat(object):
272    def __init__(self):
273        self.defaultInst = ''
274
275    def defineInst(self, parser, name, args, lineno):
276        error(lineno,
277              'instruction definition "%s" with no active format!' % name)
278
279###############
280# GenCode class
281#
282# The GenCode class encapsulates generated code destined for various
283# output files.  The header_output and decoder_output attributes are
284# strings containing code destined for decoder.hh and decoder.cc
285# respectively.  The decode_block attribute contains code to be
286# incorporated in the decode function itself (that will also end up in
287# decoder.cc).  The exec_output attribute is a dictionary with a key
288# for each CPU model name; the value associated with a particular key
289# is the string of code for that CPU model's exec.cc file.  The
290# has_decode_default attribute is used in the decode block to allow
291# explicit default clauses to override default default clauses.
292
293class GenCode(object):
294    # Constructor.  At this point we substitute out all CPU-specific
295    # symbols.  For the exec output, these go into the per-model
296    # dictionary.  For all other output types they get collapsed into
297    # a single string.
298    def __init__(self, parser,
299                 header_output = '', decoder_output = '', exec_output = '',
300                 decode_block = '', has_decode_default = False):
301        self.parser = parser
302        self.header_output = parser.expandCpuSymbolsToString(header_output)
303        self.decoder_output = parser.expandCpuSymbolsToString(decoder_output)
304        self.exec_output = exec_output
305        self.decode_block = decode_block
306        self.has_decode_default = has_decode_default
307
308    # Write these code chunks out to the filesystem.  They will be properly
309    # interwoven by the write_top_level_files().
310    def emit(self):
311        if self.header_output:
312            self.parser.get_file('header').write(self.header_output)
313        if self.decoder_output:
314            self.parser.get_file('decoder').write(self.decoder_output)
315        if self.exec_output:
316            self.parser.get_file('exec').write(self.exec_output)
317        if self.decode_block:
318            self.parser.get_file('decode_block').write(self.decode_block)
319
320    # Override '+' operator: generate a new GenCode object that
321    # concatenates all the individual strings in the operands.
322    def __add__(self, other):
323        return GenCode(self.parser,
324                       self.header_output + other.header_output,
325                       self.decoder_output + other.decoder_output,
326                       self.exec_output + other.exec_output,
327                       self.decode_block + other.decode_block,
328                       self.has_decode_default or other.has_decode_default)
329
330    # Prepend a string (typically a comment) to all the strings.
331    def prepend_all(self, pre):
332        self.header_output = pre + self.header_output
333        self.decoder_output  = pre + self.decoder_output
334        self.decode_block = pre + self.decode_block
335        self.exec_output  = pre + self.exec_output
336
337    # Wrap the decode block in a pair of strings (e.g., 'case foo:'
338    # and 'break;').  Used to build the big nested switch statement.
339    def wrap_decode_block(self, pre, post = ''):
340        self.decode_block = pre + indent(self.decode_block) + post
341
342#####################################################################
343#
344#                      Bitfield Operator Support
345#
346#####################################################################
347
348bitOp1ArgRE = re.compile(r'<\s*(\w+)\s*:\s*>')
349
350bitOpWordRE = re.compile(r'(?<![\w\.])([\w\.]+)<\s*(\w+)\s*:\s*(\w+)\s*>')
351bitOpExprRE = re.compile(r'\)<\s*(\w+)\s*:\s*(\w+)\s*>')
352
353def substBitOps(code):
354    # first convert single-bit selectors to two-index form
355    # i.e., <n> --> <n:n>
356    code = bitOp1ArgRE.sub(r'<\1:\1>', code)
357    # simple case: selector applied to ID (name)
358    # i.e., foo<a:b> --> bits(foo, a, b)
359    code = bitOpWordRE.sub(r'bits(\1, \2, \3)', code)
360    # if selector is applied to expression (ending in ')'),
361    # we need to search backward for matching '('
362    match = bitOpExprRE.search(code)
363    while match:
364        exprEnd = match.start()
365        here = exprEnd - 1
366        nestLevel = 1
367        while nestLevel > 0:
368            if code[here] == '(':
369                nestLevel -= 1
370            elif code[here] == ')':
371                nestLevel += 1
372            here -= 1
373            if here < 0:
374                sys.exit("Didn't find '('!")
375        exprStart = here+1
376        newExpr = r'bits(%s, %s, %s)' % (code[exprStart:exprEnd+1],
377                                         match.group(1), match.group(2))
378        code = code[:exprStart] + newExpr + code[match.end():]
379        match = bitOpExprRE.search(code)
380    return code
381
382
383#####################################################################
384#
385#                             Code Parser
386#
387# The remaining code is the support for automatically extracting
388# instruction characteristics from pseudocode.
389#
390#####################################################################
391
392# Force the argument to be a list.  Useful for flags, where a caller
393# can specify a singleton flag or a list of flags.  Also usful for
394# converting tuples to lists so they can be modified.
395def makeList(arg):
396    if isinstance(arg, list):
397        return arg
398    elif isinstance(arg, tuple):
399        return list(arg)
400    elif not arg:
401        return []
402    else:
403        return [ arg ]
404
405class Operand(object):
406    '''Base class for operand descriptors.  An instance of this class
407    (or actually a class derived from this one) represents a specific
408    operand for a code block (e.g, "Rc.sq" as a dest). Intermediate
409    derived classes encapsulates the traits of a particular operand
410    type (e.g., "32-bit integer register").'''
411
412    def buildReadCode(self, func = None):
413        subst_dict = {"name": self.base_name,
414                      "func": func,
415                      "reg_idx": self.reg_spec,
416                      "ctype": self.ctype}
417        if hasattr(self, 'src_reg_idx'):
418            subst_dict['op_idx'] = self.src_reg_idx
419        code = self.read_code % subst_dict
420        return '%s = %s;\n' % (self.base_name, code)
421
422    def buildWriteCode(self, func = None):
423        subst_dict = {"name": self.base_name,
424                      "func": func,
425                      "reg_idx": self.reg_spec,
426                      "ctype": self.ctype,
427                      "final_val": self.base_name}
428        if hasattr(self, 'dest_reg_idx'):
429            subst_dict['op_idx'] = self.dest_reg_idx
430        code = self.write_code % subst_dict
431        return '''
432        {
433            %s final_val = %s;
434            %s;
435            if (traceData) { traceData->setData(final_val); }
436        }''' % (self.dflt_ctype, self.base_name, code)
437
438    def __init__(self, parser, full_name, ext, is_src, is_dest):
439        self.full_name = full_name
440        self.ext = ext
441        self.is_src = is_src
442        self.is_dest = is_dest
443        # The 'effective extension' (eff_ext) is either the actual
444        # extension, if one was explicitly provided, or the default.
445        if ext:
446            self.eff_ext = ext
447        elif hasattr(self, 'dflt_ext'):
448            self.eff_ext = self.dflt_ext
449
450        if hasattr(self, 'eff_ext'):
451            self.ctype = parser.operandTypeMap[self.eff_ext]
452
453    # Finalize additional fields (primarily code fields).  This step
454    # is done separately since some of these fields may depend on the
455    # register index enumeration that hasn't been performed yet at the
456    # time of __init__(). The register index enumeration is affected
457    # by predicated register reads/writes. Hence, we forward the flags
458    # that indicate whether or not predication is in use.
459    def finalize(self, predRead, predWrite):
460        self.flags = self.getFlags()
461        self.constructor = self.makeConstructor(predRead, predWrite)
462        self.op_decl = self.makeDecl()
463
464        if self.is_src:
465            self.op_rd = self.makeRead(predRead)
466            self.op_src_decl = self.makeDecl()
467        else:
468            self.op_rd = ''
469            self.op_src_decl = ''
470
471        if self.is_dest:
472            self.op_wb = self.makeWrite(predWrite)
473            self.op_dest_decl = self.makeDecl()
474        else:
475            self.op_wb = ''
476            self.op_dest_decl = ''
477
478    def isMem(self):
479        return 0
480
481    def isReg(self):
482        return 0
483
484    def isFloatReg(self):
485        return 0
486
487    def isIntReg(self):
488        return 0
489
490    def isCCReg(self):
491        return 0
492
493    def isControlReg(self):
494        return 0
495
496    def isPCState(self):
497        return 0
498
499    def isPCPart(self):
500        return self.isPCState() and self.reg_spec
501
502    def hasReadPred(self):
503        return self.read_predicate != None
504
505    def hasWritePred(self):
506        return self.write_predicate != None
507
508    def getFlags(self):
509        # note the empty slice '[:]' gives us a copy of self.flags[0]
510        # instead of a reference to it
511        my_flags = self.flags[0][:]
512        if self.is_src:
513            my_flags += self.flags[1]
514        if self.is_dest:
515            my_flags += self.flags[2]
516        return my_flags
517
518    def makeDecl(self):
519        # Note that initializations in the declarations are solely
520        # to avoid 'uninitialized variable' errors from the compiler.
521        return self.ctype + ' ' + self.base_name + ' = 0;\n';
522
523class IntRegOperand(Operand):
524    def isReg(self):
525        return 1
526
527    def isIntReg(self):
528        return 1
529
530    def makeConstructor(self, predRead, predWrite):
531        c_src = ''
532        c_dest = ''
533
534        if self.is_src:
535            c_src = '\n\t_srcRegIdx[_numSrcRegs++] = %s;' % (self.reg_spec)
536            if self.hasReadPred():
537                c_src = '\n\tif (%s) {%s\n\t}' % \
538                        (self.read_predicate, c_src)
539
540        if self.is_dest:
541            c_dest = '\n\t_destRegIdx[_numDestRegs++] = %s;' % \
542                    (self.reg_spec)
543            c_dest += '\n\t_numIntDestRegs++;'
544            if self.hasWritePred():
545                c_dest = '\n\tif (%s) {%s\n\t}' % \
546                         (self.write_predicate, c_dest)
547
548        return c_src + c_dest
549
550    def makeRead(self, predRead):
551        if (self.ctype == 'float' or self.ctype == 'double'):
552            error('Attempt to read integer register as FP')
553        if self.read_code != None:
554            return self.buildReadCode('readIntRegOperand')
555
556        int_reg_val = ''
557        if predRead:
558            int_reg_val = 'xc->readIntRegOperand(this, _sourceIndex++)'
559            if self.hasReadPred():
560                int_reg_val = '(%s) ? %s : 0' % \
561                              (self.read_predicate, int_reg_val)
562        else:
563            int_reg_val = 'xc->readIntRegOperand(this, %d)' % self.src_reg_idx
564
565        return '%s = %s;\n' % (self.base_name, int_reg_val)
566
567    def makeWrite(self, predWrite):
568        if (self.ctype == 'float' or self.ctype == 'double'):
569            error('Attempt to write integer register as FP')
570        if self.write_code != None:
571            return self.buildWriteCode('setIntRegOperand')
572
573        if predWrite:
574            wp = 'true'
575            if self.hasWritePred():
576                wp = self.write_predicate
577
578            wcond = 'if (%s)' % (wp)
579            windex = '_destIndex++'
580        else:
581            wcond = ''
582            windex = '%d' % self.dest_reg_idx
583
584        wb = '''
585        %s
586        {
587            %s final_val = %s;
588            xc->setIntRegOperand(this, %s, final_val);\n
589            if (traceData) { traceData->setData(final_val); }
590        }''' % (wcond, self.ctype, self.base_name, windex)
591
592        return wb
593
594class FloatRegOperand(Operand):
595    def isReg(self):
596        return 1
597
598    def isFloatReg(self):
599        return 1
600
601    def makeConstructor(self, predRead, predWrite):
602        c_src = ''
603        c_dest = ''
604
605        if self.is_src:
606            c_src = '\n\t_srcRegIdx[_numSrcRegs++] = %s + FP_Reg_Base;' % \
607                    (self.reg_spec)
608
609        if self.is_dest:
610            c_dest = \
611              '\n\t_destRegIdx[_numDestRegs++] = %s + FP_Reg_Base;' % \
612              (self.reg_spec)
613            c_dest += '\n\t_numFPDestRegs++;'
614
615        return c_src + c_dest
616
617    def makeRead(self, predRead):
618        bit_select = 0
619        if (self.ctype == 'float' or self.ctype == 'double'):
620            func = 'readFloatRegOperand'
621        else:
622            func = 'readFloatRegOperandBits'
623        if self.read_code != None:
624            return self.buildReadCode(func)
625
626        if predRead:
627            rindex = '_sourceIndex++'
628        else:
629            rindex = '%d' % self.src_reg_idx
630
631        return '%s = xc->%s(this, %s);\n' % \
632            (self.base_name, func, rindex)
633
634    def makeWrite(self, predWrite):
635        if (self.ctype == 'float' or self.ctype == 'double'):
636            func = 'setFloatRegOperand'
637        else:
638            func = 'setFloatRegOperandBits'
639        if self.write_code != None:
640            return self.buildWriteCode(func)
641
642        if predWrite:
643            wp = '_destIndex++'
644        else:
645            wp = '%d' % self.dest_reg_idx
646        wp = 'xc->%s(this, %s, final_val);' % (func, wp)
647
648        wb = '''
649        {
650            %s final_val = %s;
651            %s\n
652            if (traceData) { traceData->setData(final_val); }
653        }''' % (self.ctype, self.base_name, wp)
654        return wb
655
656class CCRegOperand(Operand):
657    def isReg(self):
658        return 1
659
660    def isCCReg(self):
661        return 1
662
663    def makeConstructor(self, predRead, predWrite):
664        c_src = ''
665        c_dest = ''
666
667        if self.is_src:
668            c_src = '\n\t_srcRegIdx[_numSrcRegs++] = %s + CC_Reg_Base;' % \
669                     (self.reg_spec)
670            if self.hasReadPred():
671                c_src = '\n\tif (%s) {%s\n\t}' % \
672                        (self.read_predicate, c_src)
673
674        if self.is_dest:
675            c_dest = \
676              '\n\t_destRegIdx[_numDestRegs++] = %s + CC_Reg_Base;' % \
677              (self.reg_spec)
678            c_dest += '\n\t_numCCDestRegs++;'
679            if self.hasWritePred():
680                c_dest = '\n\tif (%s) {%s\n\t}' % \
681                         (self.write_predicate, c_dest)
682
683        return c_src + c_dest
684
685    def makeRead(self, predRead):
686        if (self.ctype == 'float' or self.ctype == 'double'):
687            error('Attempt to read condition-code register as FP')
688        if self.read_code != None:
689            return self.buildReadCode('readCCRegOperand')
690
691        int_reg_val = ''
692        if predRead:
693            int_reg_val = 'xc->readCCRegOperand(this, _sourceIndex++)'
694            if self.hasReadPred():
695                int_reg_val = '(%s) ? %s : 0' % \
696                              (self.read_predicate, int_reg_val)
697        else:
698            int_reg_val = 'xc->readCCRegOperand(this, %d)' % self.src_reg_idx
699
700        return '%s = %s;\n' % (self.base_name, int_reg_val)
701
702    def makeWrite(self, predWrite):
703        if (self.ctype == 'float' or self.ctype == 'double'):
704            error('Attempt to write condition-code register as FP')
705        if self.write_code != None:
706            return self.buildWriteCode('setCCRegOperand')
707
708        if predWrite:
709            wp = 'true'
710            if self.hasWritePred():
711                wp = self.write_predicate
712
713            wcond = 'if (%s)' % (wp)
714            windex = '_destIndex++'
715        else:
716            wcond = ''
717            windex = '%d' % self.dest_reg_idx
718
719        wb = '''
720        %s
721        {
722            %s final_val = %s;
723            xc->setCCRegOperand(this, %s, final_val);\n
724            if (traceData) { traceData->setData(final_val); }
725        }''' % (wcond, self.ctype, self.base_name, windex)
726
727        return wb
728
729class ControlRegOperand(Operand):
730    def isReg(self):
731        return 1
732
733    def isControlReg(self):
734        return 1
735
736    def makeConstructor(self, predRead, predWrite):
737        c_src = ''
738        c_dest = ''
739
740        if self.is_src:
741            c_src = \
742              '\n\t_srcRegIdx[_numSrcRegs++] = %s + Misc_Reg_Base;' % \
743              (self.reg_spec)
744
745        if self.is_dest:
746            c_dest = \
747              '\n\t_destRegIdx[_numDestRegs++] = %s + Misc_Reg_Base;' % \
748              (self.reg_spec)
749
750        return c_src + c_dest
751
752    def makeRead(self, predRead):
753        bit_select = 0
754        if (self.ctype == 'float' or self.ctype == 'double'):
755            error('Attempt to read control register as FP')
756        if self.read_code != None:
757            return self.buildReadCode('readMiscRegOperand')
758
759        if predRead:
760            rindex = '_sourceIndex++'
761        else:
762            rindex = '%d' % self.src_reg_idx
763
764        return '%s = xc->readMiscRegOperand(this, %s);\n' % \
765            (self.base_name, rindex)
766
767    def makeWrite(self, predWrite):
768        if (self.ctype == 'float' or self.ctype == 'double'):
769            error('Attempt to write control register as FP')
770        if self.write_code != None:
771            return self.buildWriteCode('setMiscRegOperand')
772
773        if predWrite:
774            windex = '_destIndex++'
775        else:
776            windex = '%d' % self.dest_reg_idx
777
778        wb = 'xc->setMiscRegOperand(this, %s, %s);\n' % \
779             (windex, self.base_name)
780        wb += 'if (traceData) { traceData->setData(%s); }' % \
781              self.base_name
782
783        return wb
784
785class MemOperand(Operand):
786    def isMem(self):
787        return 1
788
789    def makeConstructor(self, predRead, predWrite):
790        return ''
791
792    def makeDecl(self):
793        # Declare memory data variable.
794        return '%s %s;\n' % (self.ctype, self.base_name)
795
796    def makeRead(self, predRead):
797        if self.read_code != None:
798            return self.buildReadCode()
799        return ''
800
801    def makeWrite(self, predWrite):
802        if self.write_code != None:
803            return self.buildWriteCode()
804        return ''
805
806class PCStateOperand(Operand):
807    def makeConstructor(self, predRead, predWrite):
808        return ''
809
810    def makeRead(self, predRead):
811        if self.reg_spec:
812            # A component of the PC state.
813            return '%s = __parserAutoPCState.%s();\n' % \
814                (self.base_name, self.reg_spec)
815        else:
816            # The whole PC state itself.
817            return '%s = xc->pcState();\n' % self.base_name
818
819    def makeWrite(self, predWrite):
820        if self.reg_spec:
821            # A component of the PC state.
822            return '__parserAutoPCState.%s(%s);\n' % \
823                (self.reg_spec, self.base_name)
824        else:
825            # The whole PC state itself.
826            return 'xc->pcState(%s);\n' % self.base_name
827
828    def makeDecl(self):
829        ctype = 'TheISA::PCState'
830        if self.isPCPart():
831            ctype = self.ctype
832        # Note that initializations in the declarations are solely
833        # to avoid 'uninitialized variable' errors from the compiler.
834        return '%s %s = 0;\n' % (ctype, self.base_name)
835
836    def isPCState(self):
837        return 1
838
839class OperandList(object):
840    '''Find all the operands in the given code block.  Returns an operand
841    descriptor list (instance of class OperandList).'''
842    def __init__(self, parser, code):
843        self.items = []
844        self.bases = {}
845        # delete strings and comments so we don't match on operands inside
846        for regEx in (stringRE, commentRE):
847            code = regEx.sub('', code)
848        # search for operands
849        next_pos = 0
850        while 1:
851            match = parser.operandsRE.search(code, next_pos)
852            if not match:
853                # no more matches: we're done
854                break
855            op = match.groups()
856            # regexp groups are operand full name, base, and extension
857            (op_full, op_base, op_ext) = op
858            # if the token following the operand is an assignment, this is
859            # a destination (LHS), else it's a source (RHS)
860            is_dest = (assignRE.match(code, match.end()) != None)
861            is_src = not is_dest
862            # see if we've already seen this one
863            op_desc = self.find_base(op_base)
864            if op_desc:
865                if op_desc.ext != op_ext:
866                    error('Inconsistent extensions for operand %s' % \
867                          op_base)
868                op_desc.is_src = op_desc.is_src or is_src
869                op_desc.is_dest = op_desc.is_dest or is_dest
870            else:
871                # new operand: create new descriptor
872                op_desc = parser.operandNameMap[op_base](parser,
873                    op_full, op_ext, is_src, is_dest)
874                self.append(op_desc)
875            # start next search after end of current match
876            next_pos = match.end()
877        self.sort()
878        # enumerate source & dest register operands... used in building
879        # constructor later
880        self.numSrcRegs = 0
881        self.numDestRegs = 0
882        self.numFPDestRegs = 0
883        self.numIntDestRegs = 0
884        self.numCCDestRegs = 0
885        self.numMiscDestRegs = 0
886        self.memOperand = None
887
888        # Flags to keep track if one or more operands are to be read/written
889        # conditionally.
890        self.predRead = False
891        self.predWrite = False
892
893        for op_desc in self.items:
894            if op_desc.isReg():
895                if op_desc.is_src:
896                    op_desc.src_reg_idx = self.numSrcRegs
897                    self.numSrcRegs += 1
898                if op_desc.is_dest:
899                    op_desc.dest_reg_idx = self.numDestRegs
900                    self.numDestRegs += 1
901                    if op_desc.isFloatReg():
902                        self.numFPDestRegs += 1
903                    elif op_desc.isIntReg():
904                        self.numIntDestRegs += 1
905                    elif op_desc.isCCReg():
906                        self.numCCDestRegs += 1
907                    elif op_desc.isControlReg():
908                        self.numMiscDestRegs += 1
909            elif op_desc.isMem():
910                if self.memOperand:
911                    error("Code block has more than one memory operand.")
912                self.memOperand = op_desc
913
914            # Check if this operand has read/write predication. If true, then
915            # the microop will dynamically index source/dest registers.
916            self.predRead = self.predRead or op_desc.hasReadPred()
917            self.predWrite = self.predWrite or op_desc.hasWritePred()
918
919        if parser.maxInstSrcRegs < self.numSrcRegs:
920            parser.maxInstSrcRegs = self.numSrcRegs
921        if parser.maxInstDestRegs < self.numDestRegs:
922            parser.maxInstDestRegs = self.numDestRegs
923        if parser.maxMiscDestRegs < self.numMiscDestRegs:
924            parser.maxMiscDestRegs = self.numMiscDestRegs
925
926        # now make a final pass to finalize op_desc fields that may depend
927        # on the register enumeration
928        for op_desc in self.items:
929            op_desc.finalize(self.predRead, self.predWrite)
930
931    def __len__(self):
932        return len(self.items)
933
934    def __getitem__(self, index):
935        return self.items[index]
936
937    def append(self, op_desc):
938        self.items.append(op_desc)
939        self.bases[op_desc.base_name] = op_desc
940
941    def find_base(self, base_name):
942        # like self.bases[base_name], but returns None if not found
943        # (rather than raising exception)
944        return self.bases.get(base_name)
945
946    # internal helper function for concat[Some]Attr{Strings|Lists}
947    def __internalConcatAttrs(self, attr_name, filter, result):
948        for op_desc in self.items:
949            if filter(op_desc):
950                result += getattr(op_desc, attr_name)
951        return result
952
953    # return a single string that is the concatenation of the (string)
954    # values of the specified attribute for all operands
955    def concatAttrStrings(self, attr_name):
956        return self.__internalConcatAttrs(attr_name, lambda x: 1, '')
957
958    # like concatAttrStrings, but only include the values for the operands
959    # for which the provided filter function returns true
960    def concatSomeAttrStrings(self, filter, attr_name):
961        return self.__internalConcatAttrs(attr_name, filter, '')
962
963    # return a single list that is the concatenation of the (list)
964    # values of the specified attribute for all operands
965    def concatAttrLists(self, attr_name):
966        return self.__internalConcatAttrs(attr_name, lambda x: 1, [])
967
968    # like concatAttrLists, but only include the values for the operands
969    # for which the provided filter function returns true
970    def concatSomeAttrLists(self, filter, attr_name):
971        return self.__internalConcatAttrs(attr_name, filter, [])
972
973    def sort(self):
974        self.items.sort(lambda a, b: a.sort_pri - b.sort_pri)
975
976class SubOperandList(OperandList):
977    '''Find all the operands in the given code block.  Returns an operand
978    descriptor list (instance of class OperandList).'''
979    def __init__(self, parser, code, master_list):
980        self.items = []
981        self.bases = {}
982        # delete strings and comments so we don't match on operands inside
983        for regEx in (stringRE, commentRE):
984            code = regEx.sub('', code)
985        # search for operands
986        next_pos = 0
987        while 1:
988            match = parser.operandsRE.search(code, next_pos)
989            if not match:
990                # no more matches: we're done
991                break
992            op = match.groups()
993            # regexp groups are operand full name, base, and extension
994            (op_full, op_base, op_ext) = op
995            # find this op in the master list
996            op_desc = master_list.find_base(op_base)
997            if not op_desc:
998                error('Found operand %s which is not in the master list!'
999                      % op_base)
1000            else:
1001                # See if we've already found this operand
1002                op_desc = self.find_base(op_base)
1003                if not op_desc:
1004                    # if not, add a reference to it to this sub list
1005                    self.append(master_list.bases[op_base])
1006
1007            # start next search after end of current match
1008            next_pos = match.end()
1009        self.sort()
1010        self.memOperand = None
1011        # Whether the whole PC needs to be read so parts of it can be accessed
1012        self.readPC = False
1013        # Whether the whole PC needs to be written after parts of it were
1014        # changed
1015        self.setPC = False
1016        # Whether this instruction manipulates the whole PC or parts of it.
1017        # Mixing the two is a bad idea and flagged as an error.
1018        self.pcPart = None
1019
1020        # Flags to keep track if one or more operands are to be read/written
1021        # conditionally.
1022        self.predRead = False
1023        self.predWrite = False
1024
1025        for op_desc in self.items:
1026            if op_desc.isPCPart():
1027                self.readPC = True
1028                if op_desc.is_dest:
1029                    self.setPC = True
1030
1031            if op_desc.isPCState():
1032                if self.pcPart is not None:
1033                    if self.pcPart and not op_desc.isPCPart() or \
1034                            not self.pcPart and op_desc.isPCPart():
1035                        error("Mixed whole and partial PC state operands.")
1036                self.pcPart = op_desc.isPCPart()
1037
1038            if op_desc.isMem():
1039                if self.memOperand:
1040                    error("Code block has more than one memory operand.")
1041                self.memOperand = op_desc
1042
1043            # Check if this operand has read/write predication. If true, then
1044            # the microop will dynamically index source/dest registers.
1045            self.predRead = self.predRead or op_desc.hasReadPred()
1046            self.predWrite = self.predWrite or op_desc.hasWritePred()
1047
1048# Regular expression object to match C++ strings
1049stringRE = re.compile(r'"([^"\\]|\\.)*"')
1050
1051# Regular expression object to match C++ comments
1052# (used in findOperands())
1053commentRE = re.compile(r'(^)?[^\S\n]*/(?:\*(.*?)\*/[^\S\n]*|/[^\n]*)($)?',
1054        re.DOTALL | re.MULTILINE)
1055
1056# Regular expression object to match assignment statements
1057# (used in findOperands())
1058assignRE = re.compile(r'\s*=(?!=)', re.MULTILINE)
1059
1060def makeFlagConstructor(flag_list):
1061    if len(flag_list) == 0:
1062        return ''
1063    # filter out repeated flags
1064    flag_list.sort()
1065    i = 1
1066    while i < len(flag_list):
1067        if flag_list[i] == flag_list[i-1]:
1068            del flag_list[i]
1069        else:
1070            i += 1
1071    pre = '\n\tflags['
1072    post = '] = true;'
1073    code = pre + string.join(flag_list, post + pre) + post
1074    return code
1075
1076# Assume all instruction flags are of the form 'IsFoo'
1077instFlagRE = re.compile(r'Is.*')
1078
1079# OpClass constants end in 'Op' except No_OpClass
1080opClassRE = re.compile(r'.*Op|No_OpClass')
1081
1082class InstObjParams(object):
1083    def __init__(self, parser, mnem, class_name, base_class = '',
1084                 snippets = {}, opt_args = []):
1085        self.mnemonic = mnem
1086        self.class_name = class_name
1087        self.base_class = base_class
1088        if not isinstance(snippets, dict):
1089            snippets = {'code' : snippets}
1090        compositeCode = ' '.join(map(str, snippets.values()))
1091        self.snippets = snippets
1092
1093        self.operands = OperandList(parser, compositeCode)
1094
1095        # The header of the constructor declares the variables to be used
1096        # in the body of the constructor.
1097        header = ''
1098        header += '\n\t_numSrcRegs = 0;'
1099        header += '\n\t_numDestRegs = 0;'
1100        header += '\n\t_numFPDestRegs = 0;'
1101        header += '\n\t_numIntDestRegs = 0;'
1102        header += '\n\t_numCCDestRegs = 0;'
1103
1104        self.constructor = header + \
1105                           self.operands.concatAttrStrings('constructor')
1106
1107        self.flags = self.operands.concatAttrLists('flags')
1108
1109        self.op_class = None
1110
1111        # Optional arguments are assumed to be either StaticInst flags
1112        # or an OpClass value.  To avoid having to import a complete
1113        # list of these values to match against, we do it ad-hoc
1114        # with regexps.
1115        for oa in opt_args:
1116            if instFlagRE.match(oa):
1117                self.flags.append(oa)
1118            elif opClassRE.match(oa):
1119                self.op_class = oa
1120            else:
1121                error('InstObjParams: optional arg "%s" not recognized '
1122                      'as StaticInst::Flag or OpClass.' % oa)
1123
1124        # Make a basic guess on the operand class if not set.
1125        # These are good enough for most cases.
1126        if not self.op_class:
1127            if 'IsStore' in self.flags:
1128                self.op_class = 'MemWriteOp'
1129            elif 'IsLoad' in self.flags or 'IsPrefetch' in self.flags:
1130                self.op_class = 'MemReadOp'
1131            elif 'IsFloating' in self.flags:
1132                self.op_class = 'FloatAddOp'
1133            else:
1134                self.op_class = 'IntAluOp'
1135
1136        # add flag initialization to contructor here to include
1137        # any flags added via opt_args
1138        self.constructor += makeFlagConstructor(self.flags)
1139
1140        # if 'IsFloating' is set, add call to the FP enable check
1141        # function (which should be provided by isa_desc via a declare)
1142        if 'IsFloating' in self.flags:
1143            self.fp_enable_check = 'fault = checkFpEnableFault(xc);'
1144        else:
1145            self.fp_enable_check = ''
1146
1147##############
1148# Stack: a simple stack object.  Used for both formats (formatStack)
1149# and default cases (defaultStack).  Simply wraps a list to give more
1150# stack-like syntax and enable initialization with an argument list
1151# (as opposed to an argument that's a list).
1152
1153class Stack(list):
1154    def __init__(self, *items):
1155        list.__init__(self, items)
1156
1157    def push(self, item):
1158        self.append(item);
1159
1160    def top(self):
1161        return self[-1]
1162
1163# Format a file include stack backtrace as a string
1164def backtrace(filename_stack):
1165    fmt = "In file included from %s:"
1166    return "\n".join([fmt % f for f in filename_stack])
1167
1168
1169#######################
1170#
1171# LineTracker: track filenames along with line numbers in PLY lineno fields
1172#     PLY explicitly doesn't do anything with 'lineno' except propagate
1173#     it.  This class lets us tie filenames with the line numbers with a
1174#     minimum of disruption to existing increment code.
1175#
1176
1177class LineTracker(object):
1178    def __init__(self, filename, lineno=1):
1179        self.filename = filename
1180        self.lineno = lineno
1181
1182    # Overload '+=' for increments.  We need to create a new object on
1183    # each update else every token ends up referencing the same
1184    # constantly incrementing instance.
1185    def __iadd__(self, incr):
1186        return LineTracker(self.filename, self.lineno + incr)
1187
1188    def __str__(self):
1189        return "%s:%d" % (self.filename, self.lineno)
1190
1191    # In case there are places where someone really expects a number
1192    def __int__(self):
1193        return self.lineno
1194
1195
1196#######################
1197#
1198# ISA Parser
1199#   parses ISA DSL and emits C++ headers and source
1200#
1201
1202class ISAParser(Grammar):
1203    class CpuModel(object):
1204        def __init__(self, name, filename, includes, strings):
1205            self.name = name
1206            self.filename = filename
1207            self.includes = includes
1208            self.strings = strings
1209
1210    def __init__(self, output_dir):
1211        super(ISAParser, self).__init__()
1212        self.output_dir = output_dir
1213
1214        self.filename = None # for output file watermarking/scaremongering
1215
1216        self.cpuModels = [
1217            ISAParser.CpuModel('ExecContext',
1218                               'generic_cpu_exec.cc',
1219                               '#include "cpu/exec_context.hh"',
1220                               { "CPU_exec_context" : "ExecContext" }),
1221            ]
1222
1223        # variable to hold templates
1224        self.templateMap = {}
1225
1226        # This dictionary maps format name strings to Format objects.
1227        self.formatMap = {}
1228
1229        # Track open files and, if applicable, how many chunks it has been
1230        # split into so far.
1231        self.files = {}
1232        self.splits = {}
1233
1234        # isa_name / namespace identifier from namespace declaration.
1235        # before the namespace declaration, None.
1236        self.isa_name = None
1237        self.namespace = None
1238
1239        # The format stack.
1240        self.formatStack = Stack(NoFormat())
1241
1242        # The default case stack.
1243        self.defaultStack = Stack(None)
1244
1245        # Stack that tracks current file and line number.  Each
1246        # element is a tuple (filename, lineno) that records the
1247        # *current* filename and the line number in the *previous*
1248        # file where it was included.
1249        self.fileNameStack = Stack()
1250
1251        symbols = ('makeList', 're', 'string')
1252        self.exportContext = dict([(s, eval(s)) for s in symbols])
1253
1254        self.maxInstSrcRegs = 0
1255        self.maxInstDestRegs = 0
1256        self.maxMiscDestRegs = 0
1257
1258    def __getitem__(self, i):    # Allow object (self) to be
1259        return getattr(self, i)  # passed to %-substitutions
1260
1261    # Change the file suffix of a base filename:
1262    #   (e.g.) decoder.cc -> decoder-g.cc.inc for 'global' outputs
1263    def suffixize(self, s, sec):
1264        extn = re.compile('(\.[^\.]+)$') # isolate extension
1265        if self.namespace:
1266            return extn.sub(r'-ns\1.inc', s) # insert some text on either side
1267        else:
1268            return extn.sub(r'-g\1.inc', s)
1269
1270    # Get the file object for emitting code into the specified section
1271    # (header, decoder, exec, decode_block).
1272    def get_file(self, section):
1273        if section == 'decode_block':
1274            filename = 'decode-method.cc.inc'
1275        else:
1276            if section == 'header':
1277                file = 'decoder.hh'
1278            else:
1279                file = '%s.cc' % section
1280            filename = self.suffixize(file, section)
1281        try:
1282            return self.files[filename]
1283        except KeyError: pass
1284
1285        f = self.open(filename)
1286        self.files[filename] = f
1287
1288        # The splittable files are the ones with many independent
1289        # per-instruction functions - the decoder's instruction constructors
1290        # and the instruction execution (execute()) methods. These both have
1291        # the suffix -ns.cc.inc, meaning they are within the namespace part
1292        # of the ISA, contain object-emitting C++ source, and are included
1293        # into other top-level files. These are the files that need special
1294        # #define's to allow parts of them to be compiled separately. Rather
1295        # than splitting the emissions into separate files, the monolithic
1296        # output of the ISA parser is maintained, but the value (or lack
1297        # thereof) of the __SPLIT definition during C preprocessing will
1298        # select the different chunks. If no 'split' directives are used,
1299        # the cpp emissions have no effect.
1300        if re.search('-ns.cc.inc$', filename):
1301            print >>f, '#if !defined(__SPLIT) || (__SPLIT == 1)'
1302            self.splits[f] = 1
1303        # ensure requisite #include's
1304        elif filename in ['decoder-g.cc.inc', 'exec-g.cc.inc']:
1305            print >>f, '#include "decoder.hh"'
1306        elif filename == 'decoder-g.hh.inc':
1307            print >>f, '#include "base/bitfield.hh"'
1308
1309        return f
1310
1311    # Weave together the parts of the different output sections by
1312    # #include'ing them into some very short top-level .cc/.hh files.
1313    # These small files make it much clearer how this tool works, since
1314    # you directly see the chunks emitted as files that are #include'd.
1315    def write_top_level_files(self):
1316        dep = self.open('inc.d', bare=True)
1317
1318        # decoder header - everything depends on this
1319        file = 'decoder.hh'
1320        with self.open(file) as f:
1321            inc = []
1322
1323            fn = 'decoder-g.hh.inc'
1324            assert(fn in self.files)
1325            f.write('#include "%s"\n' % fn)
1326            inc.append(fn)
1327
1328            fn = 'decoder-ns.hh.inc'
1329            assert(fn in self.files)
1330            f.write('namespace %s {\n#include "%s"\n}\n'
1331                    % (self.namespace, fn))
1332            inc.append(fn)
1333
1334            print >>dep, file+':', ' '.join(inc)
1335
1336        # decoder method - cannot be split
1337        file = 'decoder.cc'
1338        with self.open(file) as f:
1339            inc = []
1340
1341            fn = 'decoder-g.cc.inc'
1342            assert(fn in self.files)
1343            f.write('#include "%s"\n' % fn)
1344            inc.append(fn)
1345
1346            fn = 'decode-method.cc.inc'
1347            # is guaranteed to have been written for parse to complete
1348            f.write('#include "%s"\n' % fn)
1349            inc.append(fn)
1350
1351            inc.append("decoder.hh")
1352            print >>dep, file+':', ' '.join(inc)
1353
1354        extn = re.compile('(\.[^\.]+)$')
1355
1356        # instruction constructors
1357        splits = self.splits[self.get_file('decoder')]
1358        file_ = 'inst-constrs.cc'
1359        for i in range(1, splits+1):
1360            if splits > 1:
1361                file = extn.sub(r'-%d\1' % i, file_)
1362            else:
1363                file = file_
1364            with self.open(file) as f:
1365                inc = []
1366
1367                fn = 'decoder-g.cc.inc'
1368                assert(fn in self.files)
1369                f.write('#include "%s"\n' % fn)
1370                inc.append(fn)
1371
1372                fn = 'decoder-ns.cc.inc'
1373                assert(fn in self.files)
1374                print >>f, 'namespace %s {' % self.namespace
1375                if splits > 1:
1376                    print >>f, '#define __SPLIT %u' % i
1377                print >>f, '#include "%s"' % fn
1378                print >>f, '}'
1379                inc.append(fn)
1380
1381                inc.append("decoder.hh")
1382                print >>dep, file+':', ' '.join(inc)
1383
1384        # instruction execution per-CPU model
1385        splits = self.splits[self.get_file('exec')]
1386        for cpu in self.cpuModels:
1387            for i in range(1, splits+1):
1388                if splits > 1:
1389                    file = extn.sub(r'_%d\1' % i, cpu.filename)
1390                else:
1391                    file = cpu.filename
1392                with self.open(file) as f:
1393                    inc = []
1394
1395                    fn = 'exec-g.cc.inc'
1396                    assert(fn in self.files)
1397                    f.write('#include "%s"\n' % fn)
1398                    inc.append(fn)
1399
1400                    f.write(cpu.includes+"\n")
1401
1402                    fn = 'exec-ns.cc.inc'
1403                    assert(fn in self.files)
1404                    print >>f, 'namespace %s {' % self.namespace
1405                    print >>f, '#define CPU_EXEC_CONTEXT %s' \
1406                               % cpu.strings['CPU_exec_context']
1407                    if splits > 1:
1408                        print >>f, '#define __SPLIT %u' % i
1409                    print >>f, '#include "%s"' % fn
1410                    print >>f, '}'
1411                    inc.append(fn)
1412
1413                    inc.append("decoder.hh")
1414                    print >>dep, file+':', ' '.join(inc)
1415
1416        # max_inst_regs.hh
1417        self.update('max_inst_regs.hh',
1418                    '''namespace %(namespace)s {
1419    const int MaxInstSrcRegs = %(maxInstSrcRegs)d;
1420    const int MaxInstDestRegs = %(maxInstDestRegs)d;
1421    const int MaxMiscDestRegs = %(maxMiscDestRegs)d;\n}\n''' % self)
1422        print >>dep, 'max_inst_regs.hh:'
1423
1424        dep.close()
1425
1426
1427    scaremonger_template ='''// DO NOT EDIT
1428// This file was automatically generated from an ISA description:
1429//   %(filename)s
1430
1431''';
1432
1433    #####################################################################
1434    #
1435    #                                Lexer
1436    #
1437    # The PLY lexer module takes two things as input:
1438    # - A list of token names (the string list 'tokens')
1439    # - A regular expression describing a match for each token.  The
1440    #   regexp for token FOO can be provided in two ways:
1441    #   - as a string variable named t_FOO
1442    #   - as the doc string for a function named t_FOO.  In this case,
1443    #     the function is also executed, allowing an action to be
1444    #     associated with each token match.
1445    #
1446    #####################################################################
1447
1448    # Reserved words.  These are listed separately as they are matched
1449    # using the same regexp as generic IDs, but distinguished in the
1450    # t_ID() function.  The PLY documentation suggests this approach.
1451    reserved = (
1452        'BITFIELD', 'DECODE', 'DECODER', 'DEFAULT', 'DEF', 'EXEC', 'FORMAT',
1453        'HEADER', 'LET', 'NAMESPACE', 'OPERAND_TYPES', 'OPERANDS',
1454        'OUTPUT', 'SIGNED', 'SPLIT', 'TEMPLATE'
1455        )
1456
1457    # List of tokens.  The lex module requires this.
1458    tokens = reserved + (
1459        # identifier
1460        'ID',
1461
1462        # integer literal
1463        'INTLIT',
1464
1465        # string literal
1466        'STRLIT',
1467
1468        # code literal
1469        'CODELIT',
1470
1471        # ( ) [ ] { } < > , ; . : :: *
1472        'LPAREN', 'RPAREN',
1473        'LBRACKET', 'RBRACKET',
1474        'LBRACE', 'RBRACE',
1475        'LESS', 'GREATER', 'EQUALS',
1476        'COMMA', 'SEMI', 'DOT', 'COLON', 'DBLCOLON',
1477        'ASTERISK',
1478
1479        # C preprocessor directives
1480        'CPPDIRECTIVE'
1481
1482    # The following are matched but never returned. commented out to
1483    # suppress PLY warning
1484        # newfile directive
1485    #    'NEWFILE',
1486
1487        # endfile directive
1488    #    'ENDFILE'
1489    )
1490
1491    # Regular expressions for token matching
1492    t_LPAREN           = r'\('
1493    t_RPAREN           = r'\)'
1494    t_LBRACKET         = r'\['
1495    t_RBRACKET         = r'\]'
1496    t_LBRACE           = r'\{'
1497    t_RBRACE           = r'\}'
1498    t_LESS             = r'\<'
1499    t_GREATER          = r'\>'
1500    t_EQUALS           = r'='
1501    t_COMMA            = r','
1502    t_SEMI             = r';'
1503    t_DOT              = r'\.'
1504    t_COLON            = r':'
1505    t_DBLCOLON         = r'::'
1506    t_ASTERISK         = r'\*'
1507
1508    # Identifiers and reserved words
1509    reserved_map = { }
1510    for r in reserved:
1511        reserved_map[r.lower()] = r
1512
1513    def t_ID(self, t):
1514        r'[A-Za-z_]\w*'
1515        t.type = self.reserved_map.get(t.value, 'ID')
1516        return t
1517
1518    # Integer literal
1519    def t_INTLIT(self, t):
1520        r'-?(0x[\da-fA-F]+)|\d+'
1521        try:
1522            t.value = int(t.value,0)
1523        except ValueError:
1524            error(t.lexer.lineno, 'Integer value "%s" too large' % t.value)
1525            t.value = 0
1526        return t
1527
1528    # String literal.  Note that these use only single quotes, and
1529    # can span multiple lines.
1530    def t_STRLIT(self, t):
1531        r"(?m)'([^'])+'"
1532        # strip off quotes
1533        t.value = t.value[1:-1]
1534        t.lexer.lineno += t.value.count('\n')
1535        return t
1536
1537
1538    # "Code literal"... like a string literal, but delimiters are
1539    # '{{' and '}}' so they get formatted nicely under emacs c-mode
1540    def t_CODELIT(self, t):
1541        r"(?m)\{\{([^\}]|}(?!\}))+\}\}"
1542        # strip off {{ & }}
1543        t.value = t.value[2:-2]
1544        t.lexer.lineno += t.value.count('\n')
1545        return t
1546
1547    def t_CPPDIRECTIVE(self, t):
1548        r'^\#[^\#].*\n'
1549        t.lexer.lineno += t.value.count('\n')
1550        return t
1551
1552    def t_NEWFILE(self, t):
1553        r'^\#\#newfile\s+"[^"]*"\n'
1554        self.fileNameStack.push(t.lexer.lineno)
1555        t.lexer.lineno = LineTracker(t.value[11:-2])
1556
1557    def t_ENDFILE(self, t):
1558        r'^\#\#endfile\n'
1559        t.lexer.lineno = self.fileNameStack.pop()
1560
1561    #
1562    # The functions t_NEWLINE, t_ignore, and t_error are
1563    # special for the lex module.
1564    #
1565
1566    # Newlines
1567    def t_NEWLINE(self, t):
1568        r'\n+'
1569        t.lexer.lineno += t.value.count('\n')
1570
1571    # Comments
1572    def t_comment(self, t):
1573        r'//.*'
1574
1575    # Completely ignored characters
1576    t_ignore = ' \t\x0c'
1577
1578    # Error handler
1579    def t_error(self, t):
1580        error(t.lexer.lineno, "illegal character '%s'" % t.value[0])
1581        t.skip(1)
1582
1583    #####################################################################
1584    #
1585    #                                Parser
1586    #
1587    # Every function whose name starts with 'p_' defines a grammar
1588    # rule.  The rule is encoded in the function's doc string, while
1589    # the function body provides the action taken when the rule is
1590    # matched.  The argument to each function is a list of the values
1591    # of the rule's symbols: t[0] for the LHS, and t[1..n] for the
1592    # symbols on the RHS.  For tokens, the value is copied from the
1593    # t.value attribute provided by the lexer.  For non-terminals, the
1594    # value is assigned by the producing rule; i.e., the job of the
1595    # grammar rule function is to set the value for the non-terminal
1596    # on the LHS (by assigning to t[0]).
1597    #####################################################################
1598
1599    # The LHS of the first grammar rule is used as the start symbol
1600    # (in this case, 'specification').  Note that this rule enforces
1601    # that there will be exactly one namespace declaration, with 0 or
1602    # more global defs/decls before and after it.  The defs & decls
1603    # before the namespace decl will be outside the namespace; those
1604    # after will be inside.  The decoder function is always inside the
1605    # namespace.
1606    def p_specification(self, t):
1607        'specification : opt_defs_and_outputs top_level_decode_block'
1608
1609        for f in self.splits.iterkeys():
1610            f.write('\n#endif\n')
1611
1612        for f in self.files.itervalues(): # close ALL the files;
1613            f.close() # not doing so can cause compilation to fail
1614
1615        self.write_top_level_files()
1616
1617        t[0] = True
1618
1619    # 'opt_defs_and_outputs' is a possibly empty sequence of def and/or
1620    # output statements. Its productions do the hard work of eventually
1621    # instantiating a GenCode, which are generally emitted (written to disk)
1622    # as soon as possible, except for the decode_block, which has to be
1623    # accumulated into one large function of nested switch/case blocks.
1624    def p_opt_defs_and_outputs_0(self, t):
1625        'opt_defs_and_outputs : empty'
1626
1627    def p_opt_defs_and_outputs_1(self, t):
1628        'opt_defs_and_outputs : defs_and_outputs'
1629
1630    def p_defs_and_outputs_0(self, t):
1631        'defs_and_outputs : def_or_output'
1632
1633    def p_defs_and_outputs_1(self, t):
1634        'defs_and_outputs : defs_and_outputs def_or_output'
1635
1636    # The list of possible definition/output statements.
1637    # They are all processed as they are seen.
1638    def p_def_or_output(self, t):
1639        '''def_or_output : name_decl
1640                         | def_format
1641                         | def_bitfield
1642                         | def_bitfield_struct
1643                         | def_template
1644                         | def_operand_types
1645                         | def_operands
1646                         | output
1647                         | global_let
1648                         | split'''
1649
1650    # Utility function used by both invocations of splitting - explicit
1651    # 'split' keyword and split() function inside "let {{ }};" blocks.
1652    def split(self, sec, write=False):
1653        assert(sec != 'header' and "header cannot be split")
1654
1655        f = self.get_file(sec)
1656        self.splits[f] += 1
1657        s = '\n#endif\n#if __SPLIT == %u\n' % self.splits[f]
1658        if write:
1659            f.write(s)
1660        else:
1661            return s
1662
1663    # split output file to reduce compilation time
1664    def p_split(self, t):
1665        'split : SPLIT output_type SEMI'
1666        assert(self.isa_name and "'split' not allowed before namespace decl")
1667
1668        self.split(t[2], True)
1669
1670    def p_output_type(self, t):
1671        '''output_type : DECODER
1672                       | HEADER
1673                       | EXEC'''
1674        t[0] = t[1]
1675
1676    # ISA name declaration looks like "namespace <foo>;"
1677    def p_name_decl(self, t):
1678        'name_decl : NAMESPACE ID SEMI'
1679        assert(self.isa_name == None and "Only 1 namespace decl permitted")
1680        self.isa_name = t[2]
1681        self.namespace = t[2] + 'Inst'
1682
1683    # Output blocks 'output <foo> {{...}}' (C++ code blocks) are copied
1684    # directly to the appropriate output section.
1685
1686    # Massage output block by substituting in template definitions and
1687    # bit operators.  We handle '%'s embedded in the string that don't
1688    # indicate template substitutions (or CPU-specific symbols, which
1689    # get handled in GenCode) by doubling them first so that the
1690    # format operation will reduce them back to single '%'s.
1691    def process_output(self, s):
1692        s = self.protectNonSubstPercents(s)
1693        # protects cpu-specific symbols too
1694        s = self.protectCpuSymbols(s)
1695        return substBitOps(s % self.templateMap)
1696
1697    def p_output(self, t):
1698        'output : OUTPUT output_type CODELIT SEMI'
1699        kwargs = { t[2]+'_output' : self.process_output(t[3]) }
1700        GenCode(self, **kwargs).emit()
1701
1702    # global let blocks 'let {{...}}' (Python code blocks) are
1703    # executed directly when seen.  Note that these execute in a
1704    # special variable context 'exportContext' to prevent the code
1705    # from polluting this script's namespace.
1706    def p_global_let(self, t):
1707        'global_let : LET CODELIT SEMI'
1708        def _split(sec):
1709            return self.split(sec)
1710        self.updateExportContext()
1711        self.exportContext["header_output"] = ''
1712        self.exportContext["decoder_output"] = ''
1713        self.exportContext["exec_output"] = ''
1714        self.exportContext["decode_block"] = ''
1715        self.exportContext["split"] = _split
1716        split_setup = '''
1717def wrap(func):
1718    def split(sec):
1719        globals()[sec + '_output'] += func(sec)
1720    return split
1721split = wrap(split)
1722del wrap
1723'''
1724        # This tricky setup (immediately above) allows us to just write
1725        # (e.g.) "split('exec')" in the Python code and the split #ifdef's
1726        # will automatically be added to the exec_output variable. The inner
1727        # Python execution environment doesn't know about the split points,
1728        # so we carefully inject and wrap a closure that can retrieve the
1729        # next split's #define from the parser and add it to the current
1730        # emission-in-progress.
1731        try:
1732            exec split_setup+fixPythonIndentation(t[2]) in self.exportContext
1733        except Exception, exc:
1734            if debug:
1735                raise
1736            error(t.lineno(1), 'In global let block: %s' % exc)
1737        GenCode(self,
1738                header_output=self.exportContext["header_output"],
1739                decoder_output=self.exportContext["decoder_output"],
1740                exec_output=self.exportContext["exec_output"],
1741                decode_block=self.exportContext["decode_block"]).emit()
1742
1743    # Define the mapping from operand type extensions to C++ types and
1744    # bit widths (stored in operandTypeMap).
1745    def p_def_operand_types(self, t):
1746        'def_operand_types : DEF OPERAND_TYPES CODELIT SEMI'
1747        try:
1748            self.operandTypeMap = eval('{' + t[3] + '}')
1749        except Exception, exc:
1750            if debug:
1751                raise
1752            error(t.lineno(1),
1753                  'In def operand_types: %s' % exc)
1754
1755    # Define the mapping from operand names to operand classes and
1756    # other traits.  Stored in operandNameMap.
1757    def p_def_operands(self, t):
1758        'def_operands : DEF OPERANDS CODELIT SEMI'
1759        if not hasattr(self, 'operandTypeMap'):
1760            error(t.lineno(1),
1761                  'error: operand types must be defined before operands')
1762        try:
1763            user_dict = eval('{' + t[3] + '}', self.exportContext)
1764        except Exception, exc:
1765            if debug:
1766                raise
1767            error(t.lineno(1), 'In def operands: %s' % exc)
1768        self.buildOperandNameMap(user_dict, t.lexer.lineno)
1769
1770    # A bitfield definition looks like:
1771    # 'def [signed] bitfield <ID> [<first>:<last>]'
1772    # This generates a preprocessor macro in the output file.
1773    def p_def_bitfield_0(self, t):
1774        'def_bitfield : DEF opt_signed BITFIELD ID LESS INTLIT COLON INTLIT GREATER SEMI'
1775        expr = 'bits(machInst, %2d, %2d)' % (t[6], t[8])
1776        if (t[2] == 'signed'):
1777            expr = 'sext<%d>(%s)' % (t[6] - t[8] + 1, expr)
1778        hash_define = '#undef %s\n#define %s\t%s\n' % (t[4], t[4], expr)
1779        GenCode(self, header_output=hash_define).emit()
1780
1781    # alternate form for single bit: 'def [signed] bitfield <ID> [<bit>]'
1782    def p_def_bitfield_1(self, t):
1783        'def_bitfield : DEF opt_signed BITFIELD ID LESS INTLIT GREATER SEMI'
1784        expr = 'bits(machInst, %2d, %2d)' % (t[6], t[6])
1785        if (t[2] == 'signed'):
1786            expr = 'sext<%d>(%s)' % (1, expr)
1787        hash_define = '#undef %s\n#define %s\t%s\n' % (t[4], t[4], expr)
1788        GenCode(self, header_output=hash_define).emit()
1789
1790    # alternate form for structure member: 'def bitfield <ID> <ID>'
1791    def p_def_bitfield_struct(self, t):
1792        'def_bitfield_struct : DEF opt_signed BITFIELD ID id_with_dot SEMI'
1793        if (t[2] != ''):
1794            error(t.lineno(1),
1795                  'error: structure bitfields are always unsigned.')
1796        expr = 'machInst.%s' % t[5]
1797        hash_define = '#undef %s\n#define %s\t%s\n' % (t[4], t[4], expr)
1798        GenCode(self, header_output=hash_define).emit()
1799
1800    def p_id_with_dot_0(self, t):
1801        'id_with_dot : ID'
1802        t[0] = t[1]
1803
1804    def p_id_with_dot_1(self, t):
1805        'id_with_dot : ID DOT id_with_dot'
1806        t[0] = t[1] + t[2] + t[3]
1807
1808    def p_opt_signed_0(self, t):
1809        'opt_signed : SIGNED'
1810        t[0] = t[1]
1811
1812    def p_opt_signed_1(self, t):
1813        'opt_signed : empty'
1814        t[0] = ''
1815
1816    def p_def_template(self, t):
1817        'def_template : DEF TEMPLATE ID CODELIT SEMI'
1818        if t[3] in self.templateMap:
1819            print "warning: template %s already defined" % t[3]
1820        self.templateMap[t[3]] = Template(self, t[4])
1821
1822    # An instruction format definition looks like
1823    # "def format <fmt>(<params>) {{...}};"
1824    def p_def_format(self, t):
1825        'def_format : DEF FORMAT ID LPAREN param_list RPAREN CODELIT SEMI'
1826        (id, params, code) = (t[3], t[5], t[7])
1827        self.defFormat(id, params, code, t.lexer.lineno)
1828
1829    # The formal parameter list for an instruction format is a
1830    # possibly empty list of comma-separated parameters.  Positional
1831    # (standard, non-keyword) parameters must come first, followed by
1832    # keyword parameters, followed by a '*foo' parameter that gets
1833    # excess positional arguments (as in Python).  Each of these three
1834    # parameter categories is optional.
1835    #
1836    # Note that we do not support the '**foo' parameter for collecting
1837    # otherwise undefined keyword args.  Otherwise the parameter list
1838    # is (I believe) identical to what is supported in Python.
1839    #
1840    # The param list generates a tuple, where the first element is a
1841    # list of the positional params and the second element is a dict
1842    # containing the keyword params.
1843    def p_param_list_0(self, t):
1844        'param_list : positional_param_list COMMA nonpositional_param_list'
1845        t[0] = t[1] + t[3]
1846
1847    def p_param_list_1(self, t):
1848        '''param_list : positional_param_list
1849                      | nonpositional_param_list'''
1850        t[0] = t[1]
1851
1852    def p_positional_param_list_0(self, t):
1853        'positional_param_list : empty'
1854        t[0] = []
1855
1856    def p_positional_param_list_1(self, t):
1857        'positional_param_list : ID'
1858        t[0] = [t[1]]
1859
1860    def p_positional_param_list_2(self, t):
1861        'positional_param_list : positional_param_list COMMA ID'
1862        t[0] = t[1] + [t[3]]
1863
1864    def p_nonpositional_param_list_0(self, t):
1865        'nonpositional_param_list : keyword_param_list COMMA excess_args_param'
1866        t[0] = t[1] + t[3]
1867
1868    def p_nonpositional_param_list_1(self, t):
1869        '''nonpositional_param_list : keyword_param_list
1870                                    | excess_args_param'''
1871        t[0] = t[1]
1872
1873    def p_keyword_param_list_0(self, t):
1874        'keyword_param_list : keyword_param'
1875        t[0] = [t[1]]
1876
1877    def p_keyword_param_list_1(self, t):
1878        'keyword_param_list : keyword_param_list COMMA keyword_param'
1879        t[0] = t[1] + [t[3]]
1880
1881    def p_keyword_param(self, t):
1882        'keyword_param : ID EQUALS expr'
1883        t[0] = t[1] + ' = ' + t[3].__repr__()
1884
1885    def p_excess_args_param(self, t):
1886        'excess_args_param : ASTERISK ID'
1887        # Just concatenate them: '*ID'.  Wrap in list to be consistent
1888        # with positional_param_list and keyword_param_list.
1889        t[0] = [t[1] + t[2]]
1890
1891    # End of format definition-related rules.
1892    ##############
1893
1894    #
1895    # A decode block looks like:
1896    #       decode <field1> [, <field2>]* [default <inst>] { ... }
1897    #
1898    def p_top_level_decode_block(self, t):
1899        'top_level_decode_block : decode_block'
1900        codeObj = t[1]
1901        codeObj.wrap_decode_block('''
1902StaticInstPtr
1903%(isa_name)s::Decoder::decodeInst(%(isa_name)s::ExtMachInst machInst)
1904{
1905    using namespace %(namespace)s;
1906''' % self, '}')
1907
1908        codeObj.emit()
1909
1910    def p_decode_block(self, t):
1911        'decode_block : DECODE ID opt_default LBRACE decode_stmt_list RBRACE'
1912        default_defaults = self.defaultStack.pop()
1913        codeObj = t[5]
1914        # use the "default defaults" only if there was no explicit
1915        # default statement in decode_stmt_list
1916        if not codeObj.has_decode_default:
1917            codeObj += default_defaults
1918        codeObj.wrap_decode_block('switch (%s) {\n' % t[2], '}\n')
1919        t[0] = codeObj
1920
1921    # The opt_default statement serves only to push the "default
1922    # defaults" onto defaultStack.  This value will be used by nested
1923    # decode blocks, and used and popped off when the current
1924    # decode_block is processed (in p_decode_block() above).
1925    def p_opt_default_0(self, t):
1926        'opt_default : empty'
1927        # no default specified: reuse the one currently at the top of
1928        # the stack
1929        self.defaultStack.push(self.defaultStack.top())
1930        # no meaningful value returned
1931        t[0] = None
1932
1933    def p_opt_default_1(self, t):
1934        'opt_default : DEFAULT inst'
1935        # push the new default
1936        codeObj = t[2]
1937        codeObj.wrap_decode_block('\ndefault:\n', 'break;\n')
1938        self.defaultStack.push(codeObj)
1939        # no meaningful value returned
1940        t[0] = None
1941
1942    def p_decode_stmt_list_0(self, t):
1943        'decode_stmt_list : decode_stmt'
1944        t[0] = t[1]
1945
1946    def p_decode_stmt_list_1(self, t):
1947        'decode_stmt_list : decode_stmt decode_stmt_list'
1948        if (t[1].has_decode_default and t[2].has_decode_default):
1949            error(t.lineno(1), 'Two default cases in decode block')
1950        t[0] = t[1] + t[2]
1951
1952    #
1953    # Decode statement rules
1954    #
1955    # There are four types of statements allowed in a decode block:
1956    # 1. Format blocks 'format <foo> { ... }'
1957    # 2. Nested decode blocks
1958    # 3. Instruction definitions.
1959    # 4. C preprocessor directives.
1960
1961
1962    # Preprocessor directives found in a decode statement list are
1963    # passed through to the output, replicated to all of the output
1964    # code streams.  This works well for ifdefs, so we can ifdef out
1965    # both the declarations and the decode cases generated by an
1966    # instruction definition.  Handling them as part of the grammar
1967    # makes it easy to keep them in the right place with respect to
1968    # the code generated by the other statements.
1969    def p_decode_stmt_cpp(self, t):
1970        'decode_stmt : CPPDIRECTIVE'
1971        t[0] = GenCode(self, t[1], t[1], t[1], t[1])
1972
1973    # A format block 'format <foo> { ... }' sets the default
1974    # instruction format used to handle instruction definitions inside
1975    # the block.  This format can be overridden by using an explicit
1976    # format on the instruction definition or with a nested format
1977    # block.
1978    def p_decode_stmt_format(self, t):
1979        'decode_stmt : FORMAT push_format_id LBRACE decode_stmt_list RBRACE'
1980        # The format will be pushed on the stack when 'push_format_id'
1981        # is processed (see below).  Once the parser has recognized
1982        # the full production (though the right brace), we're done
1983        # with the format, so now we can pop it.
1984        self.formatStack.pop()
1985        t[0] = t[4]
1986
1987    # This rule exists so we can set the current format (& push the
1988    # stack) when we recognize the format name part of the format
1989    # block.
1990    def p_push_format_id(self, t):
1991        'push_format_id : ID'
1992        try:
1993            self.formatStack.push(self.formatMap[t[1]])
1994            t[0] = ('', '// format %s' % t[1])
1995        except KeyError:
1996            error(t.lineno(1), 'instruction format "%s" not defined.' % t[1])
1997
1998    # Nested decode block: if the value of the current field matches
1999    # the specified constant(s), do a nested decode on some other field.
2000    def p_decode_stmt_decode(self, t):
2001        'decode_stmt : case_list COLON decode_block'
2002        case_list = t[1]
2003        codeObj = t[3]
2004        # just wrap the decoding code from the block as a case in the
2005        # outer switch statement.
2006        codeObj.wrap_decode_block('\n%s\n' % ''.join(case_list))
2007        codeObj.has_decode_default = (case_list == ['default:'])
2008        t[0] = codeObj
2009
2010    # Instruction definition (finally!).
2011    def p_decode_stmt_inst(self, t):
2012        'decode_stmt : case_list COLON inst SEMI'
2013        case_list = t[1]
2014        codeObj = t[3]
2015        codeObj.wrap_decode_block('\n%s' % ''.join(case_list), 'break;\n')
2016        codeObj.has_decode_default = (case_list == ['default:'])
2017        t[0] = codeObj
2018
2019    # The constant list for a decode case label must be non-empty, and must
2020    # either be the keyword 'default', or made up of one or more
2021    # comma-separated integer literals or strings which evaluate to
2022    # constants when compiled as C++.
2023    def p_case_list_0(self, t):
2024        'case_list : DEFAULT'
2025        t[0] = ['default:']
2026
2027    def prep_int_lit_case_label(self, lit):
2028        if lit >= 2**32:
2029            return 'case ULL(%#x): ' % lit
2030        else:
2031            return 'case %#x: ' % lit
2032
2033    def prep_str_lit_case_label(self, lit):
2034        return 'case %s: ' % lit
2035
2036    def p_case_list_1(self, t):
2037        'case_list : INTLIT'
2038        t[0] = [self.prep_int_lit_case_label(t[1])]
2039
2040    def p_case_list_2(self, t):
2041        'case_list : STRLIT'
2042        t[0] = [self.prep_str_lit_case_label(t[1])]
2043
2044    def p_case_list_3(self, t):
2045        'case_list : case_list COMMA INTLIT'
2046        t[0] = t[1]
2047        t[0].append(self.prep_int_lit_case_label(t[3]))
2048
2049    def p_case_list_4(self, t):
2050        'case_list : case_list COMMA STRLIT'
2051        t[0] = t[1]
2052        t[0].append(self.prep_str_lit_case_label(t[3]))
2053
2054    # Define an instruction using the current instruction format
2055    # (specified by an enclosing format block).
2056    # "<mnemonic>(<args>)"
2057    def p_inst_0(self, t):
2058        'inst : ID LPAREN arg_list RPAREN'
2059        # Pass the ID and arg list to the current format class to deal with.
2060        currentFormat = self.formatStack.top()
2061        codeObj = currentFormat.defineInst(self, t[1], t[3], t.lexer.lineno)
2062        args = ','.join(map(str, t[3]))
2063        args = re.sub('(?m)^', '//', args)
2064        args = re.sub('^//', '', args)
2065        comment = '\n// %s::%s(%s)\n' % (currentFormat.id, t[1], args)
2066        codeObj.prepend_all(comment)
2067        t[0] = codeObj
2068
2069    # Define an instruction using an explicitly specified format:
2070    # "<fmt>::<mnemonic>(<args>)"
2071    def p_inst_1(self, t):
2072        'inst : ID DBLCOLON ID LPAREN arg_list RPAREN'
2073        try:
2074            format = self.formatMap[t[1]]
2075        except KeyError:
2076            error(t.lineno(1), 'instruction format "%s" not defined.' % t[1])
2077
2078        codeObj = format.defineInst(self, t[3], t[5], t.lexer.lineno)
2079        comment = '\n// %s::%s(%s)\n' % (t[1], t[3], t[5])
2080        codeObj.prepend_all(comment)
2081        t[0] = codeObj
2082
2083    # The arg list generates a tuple, where the first element is a
2084    # list of the positional args and the second element is a dict
2085    # containing the keyword args.
2086    def p_arg_list_0(self, t):
2087        'arg_list : positional_arg_list COMMA keyword_arg_list'
2088        t[0] = ( t[1], t[3] )
2089
2090    def p_arg_list_1(self, t):
2091        'arg_list : positional_arg_list'
2092        t[0] = ( t[1], {} )
2093
2094    def p_arg_list_2(self, t):
2095        'arg_list : keyword_arg_list'
2096        t[0] = ( [], t[1] )
2097
2098    def p_positional_arg_list_0(self, t):
2099        'positional_arg_list : empty'
2100        t[0] = []
2101
2102    def p_positional_arg_list_1(self, t):
2103        'positional_arg_list : expr'
2104        t[0] = [t[1]]
2105
2106    def p_positional_arg_list_2(self, t):
2107        'positional_arg_list : positional_arg_list COMMA expr'
2108        t[0] = t[1] + [t[3]]
2109
2110    def p_keyword_arg_list_0(self, t):
2111        'keyword_arg_list : keyword_arg'
2112        t[0] = t[1]
2113
2114    def p_keyword_arg_list_1(self, t):
2115        'keyword_arg_list : keyword_arg_list COMMA keyword_arg'
2116        t[0] = t[1]
2117        t[0].update(t[3])
2118
2119    def p_keyword_arg(self, t):
2120        'keyword_arg : ID EQUALS expr'
2121        t[0] = { t[1] : t[3] }
2122
2123    #
2124    # Basic expressions.  These constitute the argument values of
2125    # "function calls" (i.e. instruction definitions in the decode
2126    # block) and default values for formal parameters of format
2127    # functions.
2128    #
2129    # Right now, these are either strings, integers, or (recursively)
2130    # lists of exprs (using Python square-bracket list syntax).  Note
2131    # that bare identifiers are trated as string constants here (since
2132    # there isn't really a variable namespace to refer to).
2133    #
2134    def p_expr_0(self, t):
2135        '''expr : ID
2136                | INTLIT
2137                | STRLIT
2138                | CODELIT'''
2139        t[0] = t[1]
2140
2141    def p_expr_1(self, t):
2142        '''expr : LBRACKET list_expr RBRACKET'''
2143        t[0] = t[2]
2144
2145    def p_list_expr_0(self, t):
2146        'list_expr : expr'
2147        t[0] = [t[1]]
2148
2149    def p_list_expr_1(self, t):
2150        'list_expr : list_expr COMMA expr'
2151        t[0] = t[1] + [t[3]]
2152
2153    def p_list_expr_2(self, t):
2154        'list_expr : empty'
2155        t[0] = []
2156
2157    #
2158    # Empty production... use in other rules for readability.
2159    #
2160    def p_empty(self, t):
2161        'empty :'
2162        pass
2163
2164    # Parse error handler.  Note that the argument here is the
2165    # offending *token*, not a grammar symbol (hence the need to use
2166    # t.value)
2167    def p_error(self, t):
2168        if t:
2169            error(t.lexer.lineno, "syntax error at '%s'" % t.value)
2170        else:
2171            error("unknown syntax error")
2172
2173    # END OF GRAMMAR RULES
2174
2175    def updateExportContext(self):
2176
2177        # create a continuation that allows us to grab the current parser
2178        def wrapInstObjParams(*args):
2179            return InstObjParams(self, *args)
2180        self.exportContext['InstObjParams'] = wrapInstObjParams
2181        self.exportContext.update(self.templateMap)
2182
2183    def defFormat(self, id, params, code, lineno):
2184        '''Define a new format'''
2185
2186        # make sure we haven't already defined this one
2187        if id in self.formatMap:
2188            error(lineno, 'format %s redefined.' % id)
2189
2190        # create new object and store in global map
2191        self.formatMap[id] = Format(id, params, code)
2192
2193    def expandCpuSymbolsToDict(self, template):
2194        '''Expand template with CPU-specific references into a
2195        dictionary with an entry for each CPU model name.  The entry
2196        key is the model name and the corresponding value is the
2197        template with the CPU-specific refs substituted for that
2198        model.'''
2199
2200        # Protect '%'s that don't go with CPU-specific terms
2201        t = re.sub(r'%(?!\(CPU_)', '%%', template)
2202        result = {}
2203        for cpu in self.cpuModels:
2204            result[cpu.name] = t % cpu.strings
2205        return result
2206
2207    def expandCpuSymbolsToString(self, template):
2208        '''*If* the template has CPU-specific references, return a
2209        single string containing a copy of the template for each CPU
2210        model with the corresponding values substituted in.  If the
2211        template has no CPU-specific references, it is returned
2212        unmodified.'''
2213
2214        if template.find('%(CPU_') != -1:
2215            return reduce(lambda x,y: x+y,
2216                          self.expandCpuSymbolsToDict(template).values())
2217        else:
2218            return template
2219
2220    def protectCpuSymbols(self, template):
2221        '''Protect CPU-specific references by doubling the
2222        corresponding '%'s (in preparation for substituting a different
2223        set of references into the template).'''
2224
2225        return re.sub(r'%(?=\(CPU_)', '%%', template)
2226
2227    def protectNonSubstPercents(self, s):
2228        '''Protect any non-dict-substitution '%'s in a format string
2229        (i.e. those not followed by '(')'''
2230
2231        return re.sub(r'%(?!\()', '%%', s)
2232
2233    def buildOperandNameMap(self, user_dict, lineno):
2234        operand_name = {}
2235        for op_name, val in user_dict.iteritems():
2236
2237            # Check if extra attributes have been specified.
2238            if len(val) > 9:
2239                error(lineno, 'error: too many attributes for operand "%s"' %
2240                      base_cls_name)
2241
2242            # Pad val with None in case optional args are missing
2243            val += (None, None, None, None)
2244            base_cls_name, dflt_ext, reg_spec, flags, sort_pri, \
2245            read_code, write_code, read_predicate, write_predicate = val[:9]
2246
2247            # Canonical flag structure is a triple of lists, where each list
2248            # indicates the set of flags implied by this operand always, when
2249            # used as a source, and when used as a dest, respectively.
2250            # For simplicity this can be initialized using a variety of fairly
2251            # obvious shortcuts; we convert these to canonical form here.
2252            if not flags:
2253                # no flags specified (e.g., 'None')
2254                flags = ( [], [], [] )
2255            elif isinstance(flags, str):
2256                # a single flag: assumed to be unconditional
2257                flags = ( [ flags ], [], [] )
2258            elif isinstance(flags, list):
2259                # a list of flags: also assumed to be unconditional
2260                flags = ( flags, [], [] )
2261            elif isinstance(flags, tuple):
2262                # it's a tuple: it should be a triple,
2263                # but each item could be a single string or a list
2264                (uncond_flags, src_flags, dest_flags) = flags
2265                flags = (makeList(uncond_flags),
2266                         makeList(src_flags), makeList(dest_flags))
2267
2268            # Accumulate attributes of new operand class in tmp_dict
2269            tmp_dict = {}
2270            attrList = ['reg_spec', 'flags', 'sort_pri',
2271                        'read_code', 'write_code',
2272                        'read_predicate', 'write_predicate']
2273            if dflt_ext:
2274                dflt_ctype = self.operandTypeMap[dflt_ext]
2275                attrList.extend(['dflt_ctype', 'dflt_ext'])
2276            for attr in attrList:
2277                tmp_dict[attr] = eval(attr)
2278            tmp_dict['base_name'] = op_name
2279
2280            # New class name will be e.g. "IntReg_Ra"
2281            cls_name = base_cls_name + '_' + op_name
2282            # Evaluate string arg to get class object.  Note that the
2283            # actual base class for "IntReg" is "IntRegOperand", i.e. we
2284            # have to append "Operand".
2285            try:
2286                base_cls = eval(base_cls_name + 'Operand')
2287            except NameError:
2288                error(lineno,
2289                      'error: unknown operand base class "%s"' % base_cls_name)
2290            # The following statement creates a new class called
2291            # <cls_name> as a subclass of <base_cls> with the attributes
2292            # in tmp_dict, just as if we evaluated a class declaration.
2293            operand_name[op_name] = type(cls_name, (base_cls,), tmp_dict)
2294
2295        self.operandNameMap = operand_name
2296
2297        # Define operand variables.
2298        operands = user_dict.keys()
2299        extensions = self.operandTypeMap.keys()
2300
2301        operandsREString = r'''
2302        (?<!\w)      # neg. lookbehind assertion: prevent partial matches
2303        ((%s)(?:_(%s))?)   # match: operand with optional '_' then suffix
2304        (?!\w)       # neg. lookahead assertion: prevent partial matches
2305        ''' % (string.join(operands, '|'), string.join(extensions, '|'))
2306
2307        self.operandsRE = re.compile(operandsREString, re.MULTILINE|re.VERBOSE)
2308
2309        # Same as operandsREString, but extension is mandatory, and only two
2310        # groups are returned (base and ext, not full name as above).
2311        # Used for subtituting '_' for '.' to make C++ identifiers.
2312        operandsWithExtREString = r'(?<!\w)(%s)_(%s)(?!\w)' \
2313            % (string.join(operands, '|'), string.join(extensions, '|'))
2314
2315        self.operandsWithExtRE = \
2316            re.compile(operandsWithExtREString, re.MULTILINE)
2317
2318    def substMungedOpNames(self, code):
2319        '''Munge operand names in code string to make legal C++
2320        variable names.  This means getting rid of the type extension
2321        if any.  Will match base_name attribute of Operand object.)'''
2322        return self.operandsWithExtRE.sub(r'\1', code)
2323
2324    def mungeSnippet(self, s):
2325        '''Fix up code snippets for final substitution in templates.'''
2326        if isinstance(s, str):
2327            return self.substMungedOpNames(substBitOps(s))
2328        else:
2329            return s
2330
2331    def open(self, name, bare=False):
2332        '''Open the output file for writing and include scary warning.'''
2333        filename = os.path.join(self.output_dir, name)
2334        f = open(filename, 'w')
2335        if f:
2336            if not bare:
2337                f.write(ISAParser.scaremonger_template % self)
2338        return f
2339
2340    def update(self, file, contents):
2341        '''Update the output file only.  Scons should handle the case when
2342        the new contents are unchanged using its built-in hash feature.'''
2343        f = self.open(file)
2344        f.write(contents)
2345        f.close()
2346
2347    # This regular expression matches '##include' directives
2348    includeRE = re.compile(r'^\s*##include\s+"(?P<filename>[^"]*)".*$',
2349                           re.MULTILINE)
2350
2351    def replace_include(self, matchobj, dirname):
2352        """Function to replace a matched '##include' directive with the
2353        contents of the specified file (with nested ##includes
2354        replaced recursively).  'matchobj' is an re match object
2355        (from a match of includeRE) and 'dirname' is the directory
2356        relative to which the file path should be resolved."""
2357
2358        fname = matchobj.group('filename')
2359        full_fname = os.path.normpath(os.path.join(dirname, fname))
2360        contents = '##newfile "%s"\n%s\n##endfile\n' % \
2361                   (full_fname, self.read_and_flatten(full_fname))
2362        return contents
2363
2364    def read_and_flatten(self, filename):
2365        """Read a file and recursively flatten nested '##include' files."""
2366
2367        current_dir = os.path.dirname(filename)
2368        try:
2369            contents = open(filename).read()
2370        except IOError:
2371            error('Error including file "%s"' % filename)
2372
2373        self.fileNameStack.push(LineTracker(filename))
2374
2375        # Find any includes and include them
2376        def replace(matchobj):
2377            return self.replace_include(matchobj, current_dir)
2378        contents = self.includeRE.sub(replace, contents)
2379
2380        self.fileNameStack.pop()
2381        return contents
2382
2383    AlreadyGenerated = {}
2384
2385    def _parse_isa_desc(self, isa_desc_file):
2386        '''Read in and parse the ISA description.'''
2387
2388        # The build system can end up running the ISA parser twice: once to
2389        # finalize the build dependencies, and then to actually generate
2390        # the files it expects (in src/arch/$ARCH/generated). This code
2391        # doesn't do anything different either time, however; the SCons
2392        # invocations just expect different things. Since this code runs
2393        # within SCons, we can just remember that we've already run and
2394        # not perform a completely unnecessary run, since the ISA parser's
2395        # effect is idempotent.
2396        if isa_desc_file in ISAParser.AlreadyGenerated:
2397            return
2398
2399        # grab the last three path components of isa_desc_file
2400        self.filename = '/'.join(isa_desc_file.split('/')[-3:])
2401
2402        # Read file and (recursively) all included files into a string.
2403        # PLY requires that the input be in a single string so we have to
2404        # do this up front.
2405        isa_desc = self.read_and_flatten(isa_desc_file)
2406
2407        # Initialize lineno tracker
2408        self.lex.lineno = LineTracker(isa_desc_file)
2409
2410        # Parse.
2411        self.parse_string(isa_desc)
2412
2413        ISAParser.AlreadyGenerated[isa_desc_file] = None
2414
2415    def parse_isa_desc(self, *args, **kwargs):
2416        try:
2417            self._parse_isa_desc(*args, **kwargs)
2418        except ISAParserError, e:
2419            print backtrace(self.fileNameStack)
2420            print "At %s:" % e.lineno
2421            print e
2422            sys.exit(1)
2423
2424# Called as script: get args from command line.
2425# Args are: <isa desc file> <output dir>
2426if __name__ == '__main__':
2427    ISAParser(sys.argv[2]).parse_isa_desc(sys.argv[1])
2428