99a100
> import SCons.Tool
101,104c102,105
< isa_scanner = SCons.Scanner.Classic("ISAScan",
< [".isa", ".ISA"],
< "SRCDIR",
< r'^\s*##include\s+"([\w/.-]*)"')
---
> scanner = SCons.Scanner.Classic("ISAScan",
> [".isa", ".ISA"],
> "SRCDIR",
> r'^\s*##include\s+"([\w/.-]*)"')
106c107
< env.Append(SCANNERS = isa_scanner)
---
> env.Append(SCANNERS=scanner)
107a109,111
> # Tell scons that when it sees a cc.inc file, it should scan it for includes.
> SCons.Tool.SourceFileScanner.add_scanner('.cc.inc', SCons.Tool.CScanner)
>
113c117,118
< isa_parser = File('isa_parser.py')
---
> parser_py = File('isa_parser.py')
> micro_asm_py = File('micro_asm.py')
115,147d119
< # The emitter patches up the sources & targets to include the
< # autogenerated files as targets and isa parser itself as a source.
< def isa_desc_emitter(target, source, env):
< # List the isa parser as a source.
< source += [
< isa_parser,
< Value("ExecContext"),
< ]
<
< # Specify different targets depending on if we're running the ISA
< # parser for its dependency information, or for the generated files.
< # (As an optimization, the ISA parser detects the useless second run
< # and skips doing any work, if the first run was performed, since it
< # always generates all its files). The way we track this in SCons is the
< # <arch>_isa_outputs value in the environment (env). If it's unset, we
< # don't know what the dependencies are so we ask for generated/inc.d to
< # be generated so they can be acquired. If we know what they are, then
< # it's because we've already processed inc.d and then claim that our
< # outputs (targets) will be thus.
< isa = env['TARGET_ISA']
< key = '%s_isa_outputs' % isa
< if key in env:
< targets = [ os.path.join('generated', f) for f in env[key] ]
< else:
< targets = [ os.path.join('generated','inc.d') ]
<
< def prefix(s):
< return os.path.join(target[0].dir.up().abspath, s)
<
< return [ prefix(t) for t in targets ], source
<
< ARCH_DIR = Dir('.')
<
151,153c123,125
< def isa_desc_action_func(target, source, env):
< # Add the current directory to the system path so we can import files
< sys.path[0:0] = [ ARCH_DIR.srcnode().abspath ]
---
> def run_parser(target, source, env):
> # Add the current directory to the system path so we can import files.
> sys.path[0:0] = [ parser_py.dir.abspath ]
156,157d127
< # Skip over the ISA description itself and the parser to the CPU models.
< models = [ s.get_contents() for s in source[2:] ]
160d129
< isa_desc_action = MakeAction(isa_desc_action_func, Transform("ISA DESC", 1))
162,164c131
< # Also include the CheckerCPU as one of the models if it is being
< # enabled via command line.
< isa_desc_builder = Builder(action=isa_desc_action, emitter=isa_desc_emitter)
---
> desc_action = MakeAction(run_parser, Transform("ISA DESC", 1))
166c133
< env.Append(BUILDERS = { 'ISADesc' : isa_desc_builder })
---
> IsaDescBuilder = Builder(action=desc_action)
168,175d134
< # The ISA is generated twice: the first time to find out what it generates,
< # and the second time to make scons happy by telling the ISADesc builder
< # what it will make before it builds it.
< def scan_isa_deps(target, source, env):
< # Process dependency file generated by the ISA parser --
< # add the listed files to the dependency tree of the build.
< source = source[0]
< archbase = source.dir.up().path
177,182c136,139
< try:
< depfile = open(source.abspath, 'r')
< except:
< print "scan_isa_deps: Can't open ISA deps file '%s' in %s" % \
< (source.path,os.getcwd())
< raise
---
> # ISAs should use this function to set up an IsaDescBuilder and not try to
> # set one up manually.
> def ISADesc(desc, decoder_splits=1, exec_splits=1):
> '''Set up a builder for an ISA description.
184,192c141,144
< # Scan through the lines
< targets = {}
< for line in depfile:
< # Read the dependency line with the format
< # <target file>: [ <dependent file>* ]
< m = re.match(r'^\s*([^:]+\.([^\.:]+))\s*:\s*(.*)', line)
< assert(m)
< targ, extn = m.group(1,2)
< deps = m.group(3).split()
---
> The decoder_splits and exec_splits parameters let us determine what
> files the isa parser is actually going to generate. This needs to match
> what files are actually generated, and there's no specific check for that
> right now.
194,198c146,151
< files = [ targ ] + deps
< for f in files:
< targets[f] = True
< # Eliminate unnecessary re-generation if we already generated it
< env.Precious(os.path.join(archbase, 'generated', f))
---
> If the parser itself is responsible for generating a list of its products
> and their dependencies, then using that output to set up the right
> dependencies. This is what we used to do. The problem is that scons
> fundamentally doesn't support using a build product to affect its graph
> of possible products, dependencies, builders, etc. There are a couple ways
> to work around that limitation.
200c153,159
< files = [ os.path.join(archbase, 'generated', f) for f in files ]
---
> One option is to compute dependencies while the build phase of scons is
> running. That method can be quite complicated and cumbersome, because we
> have to make sure our modifications are made before scons tries to
> consume them. There's also no guarantee that this mechanism will work since
> it subverts scons expectations and changes things behind its back. This
> was implemented previously and constrained the builds parallelism
> significantly.
202,205c161,166
< if extn == 'cc':
< Source(os.path.join(archbase,'generated', targ))
< depfile.close()
< env[env['TARGET_ISA'] + '_isa_outputs'] = targets.keys()
---
> Another option would be to recursively call scons to have it update the
> list of products/dependencies during the setup phase of this invocation of
> scons. The problem with that is that it would be very difficult to make
> the sub-invocation of scons observe the options passed to the primary one
> in all possible cases, or to even determine conclusively what the name of
> the scons executable is in the first place.
207,209c168,178
< isa = env.ISADesc(os.path.join(archbase,'isa','main.isa'))
< for t in targets:
< env.Depends('#all-isas', isa)
---
> Possible future changes to the isa parser might make it easier to
> determine what files it would generate, perhaps because there was a more
> direct correspondence between input files and output files. Or, if the
> parser could run quickly and determine what its output files would be
> without having do actually generate those files, then it could be run
> unconditionally without slowing down all builds or touching the output
> files unnecessarily.
> '''
> generated_dir = File(desc).dir.up().Dir('generated')
> def gen_file(name):
> return generated_dir.File(name)
211,213c180,182
< env.Append(BUILDERS = {'ScanISA' :
< Builder(action=MakeAction(scan_isa_deps,
< Transform("NEW DEPS", 1)))})
---
> gen = []
> def add_gen(name):
> gen.append(gen_file(name))
214a184,224
> # Tell scons about the various files the ISA parser will generate.
> add_gen('decoder-g.cc.inc')
> add_gen('decoder-ns.cc.inc')
> add_gen('decode-method.cc.inc')
>
> add_gen('decoder.hh')
> add_gen('decoder-g.hh.inc')
> add_gen('decoder-ns.hh.inc')
>
> add_gen('exec-g.cc.inc')
> add_gen('exec-ns.cc.inc')
>
> add_gen('max_inst_regs.hh')
>
>
> # These generated files are also top level sources.
> def source_gen(name):
> add_gen(name)
> Source(gen_file(name))
>
> source_gen('decoder.cc')
>
> if decoder_splits == 1:
> source_gen('inst-constrs.cc')
> else:
> for i in range(1, decoder_splits + 1):
> source_gen('inst-constrs-%d.cc' % i)
>
> if exec_splits == 1:
> source_gen('generic_cpu_exec.cc')
> else:
> for i in range(1, exec_splits + 1):
> source_gen('generic_cpu_exec_%d.cc' % i)
>
> # Actually create the builder.
> sources = [desc, parser_py, micro_asm_py]
> IsaDescBuilder(target=gen, source=sources, env=env)
> return gen
>
> Export('ISADesc')
>