SConstruct revision 11887:f08918a690cf
1# -*- mode:python -*-
2
3# Copyright (c) 2013, 2015, 2016 ARM Limited
4# All rights reserved.
5#
6# The license below extends only to copyright in the software and shall
7# not be construed as granting a license to any other intellectual
8# property including but not limited to intellectual property relating
9# to a hardware implementation of the functionality of the software
10# licensed hereunder.  You may use the software subject to the license
11# terms below provided that you ensure that this notice is replicated
12# unmodified and in its entirety in all distributions of the software,
13# modified or unmodified, in source code or in binary form.
14#
15# Copyright (c) 2011 Advanced Micro Devices, Inc.
16# Copyright (c) 2009 The Hewlett-Packard Development Company
17# Copyright (c) 2004-2005 The Regents of The University of Michigan
18# All rights reserved.
19#
20# Redistribution and use in source and binary forms, with or without
21# modification, are permitted provided that the following conditions are
22# met: redistributions of source code must retain the above copyright
23# notice, this list of conditions and the following disclaimer;
24# redistributions in binary form must reproduce the above copyright
25# notice, this list of conditions and the following disclaimer in the
26# documentation and/or other materials provided with the distribution;
27# neither the name of the copyright holders nor the names of its
28# contributors may be used to endorse or promote products derived from
29# this software without specific prior written permission.
30#
31# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
32# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
33# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
34# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
35# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
37# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
38# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
39# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
40# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
41# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
42#
43# Authors: Steve Reinhardt
44#          Nathan Binkert
45
46###################################################
47#
48# SCons top-level build description (SConstruct) file.
49#
50# While in this directory ('gem5'), just type 'scons' to build the default
51# configuration (see below), or type 'scons build/<CONFIG>/<binary>'
52# to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for
53# the optimized full-system version).
54#
55# You can build gem5 in a different directory as long as there is a
56# 'build/<CONFIG>' somewhere along the target path.  The build system
57# expects that all configs under the same build directory are being
58# built for the same host system.
59#
60# Examples:
61#
62#   The following two commands are equivalent.  The '-u' option tells
63#   scons to search up the directory tree for this SConstruct file.
64#   % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug
65#   % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug
66#
67#   The following two commands are equivalent and demonstrate building
68#   in a directory outside of the source tree.  The '-C' option tells
69#   scons to chdir to the specified directory to find this SConstruct
70#   file.
71#   % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug
72#   % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug
73#
74# You can use 'scons -H' to print scons options.  If you're in this
75# 'gem5' directory (or use -u or -C to tell scons where to find this
76# file), you can use 'scons -h' to print all the gem5-specific build
77# options as well.
78#
79###################################################
80
81# Check for recent-enough Python and SCons versions.
82try:
83    # Really old versions of scons only take two options for the
84    # function, so check once without the revision and once with the
85    # revision, the first instance will fail for stuff other than
86    # 0.98, and the second will fail for 0.98.0
87    EnsureSConsVersion(0, 98)
88    EnsureSConsVersion(0, 98, 1)
89except SystemExit, e:
90    print """
91For more details, see:
92    http://gem5.org/Dependencies
93"""
94    raise
95
96# We ensure the python version early because because python-config
97# requires python 2.5
98try:
99    EnsurePythonVersion(2, 5)
100except SystemExit, e:
101    print """
102You can use a non-default installation of the Python interpreter by
103rearranging your PATH so that scons finds the non-default 'python' and
104'python-config' first.
105
106For more details, see:
107    http://gem5.org/wiki/index.php/Using_a_non-default_Python_installation
108"""
109    raise
110
111# Global Python includes
112import itertools
113import os
114import re
115import shutil
116import subprocess
117import sys
118
119from os import mkdir, environ
120from os.path import abspath, basename, dirname, expanduser, normpath
121from os.path import exists,  isdir, isfile
122from os.path import join as joinpath, split as splitpath
123
124# SCons includes
125import SCons
126import SCons.Node
127
128extra_python_paths = [
129    Dir('src/python').srcnode().abspath, # gem5 includes
130    Dir('ext/ply').srcnode().abspath, # ply is used by several files
131    ]
132
133sys.path[1:1] = extra_python_paths
134
135from m5.util import compareVersions, readCommand
136from m5.util.terminal import get_termcap
137
138help_texts = {
139    "options" : "",
140    "global_vars" : "",
141    "local_vars" : ""
142}
143
144Export("help_texts")
145
146
147# There's a bug in scons in that (1) by default, the help texts from
148# AddOption() are supposed to be displayed when you type 'scons -h'
149# and (2) you can override the help displayed by 'scons -h' using the
150# Help() function, but these two features are incompatible: once
151# you've overridden the help text using Help(), there's no way to get
152# at the help texts from AddOptions.  See:
153#     http://scons.tigris.org/issues/show_bug.cgi?id=2356
154#     http://scons.tigris.org/issues/show_bug.cgi?id=2611
155# This hack lets us extract the help text from AddOptions and
156# re-inject it via Help().  Ideally someday this bug will be fixed and
157# we can just use AddOption directly.
158def AddLocalOption(*args, **kwargs):
159    col_width = 30
160
161    help = "  " + ", ".join(args)
162    if "help" in kwargs:
163        length = len(help)
164        if length >= col_width:
165            help += "\n" + " " * col_width
166        else:
167            help += " " * (col_width - length)
168        help += kwargs["help"]
169    help_texts["options"] += help + "\n"
170
171    AddOption(*args, **kwargs)
172
173AddLocalOption('--colors', dest='use_colors', action='store_true',
174               help="Add color to abbreviated scons output")
175AddLocalOption('--no-colors', dest='use_colors', action='store_false',
176               help="Don't add color to abbreviated scons output")
177AddLocalOption('--with-cxx-config', dest='with_cxx_config',
178               action='store_true',
179               help="Build with support for C++-based configuration")
180AddLocalOption('--default', dest='default', type='string', action='store',
181               help='Override which build_opts file to use for defaults')
182AddLocalOption('--ignore-style', dest='ignore_style', action='store_true',
183               help='Disable style checking hooks')
184AddLocalOption('--no-lto', dest='no_lto', action='store_true',
185               help='Disable Link-Time Optimization for fast')
186AddLocalOption('--update-ref', dest='update_ref', action='store_true',
187               help='Update test reference outputs')
188AddLocalOption('--verbose', dest='verbose', action='store_true',
189               help='Print full tool command lines')
190AddLocalOption('--without-python', dest='without_python',
191               action='store_true',
192               help='Build without Python configuration support')
193AddLocalOption('--without-tcmalloc', dest='without_tcmalloc',
194               action='store_true',
195               help='Disable linking against tcmalloc')
196AddLocalOption('--with-ubsan', dest='with_ubsan', action='store_true',
197               help='Build with Undefined Behavior Sanitizer if available')
198AddLocalOption('--with-asan', dest='with_asan', action='store_true',
199               help='Build with Address Sanitizer if available')
200
201termcap = get_termcap(GetOption('use_colors'))
202
203########################################################################
204#
205# Set up the main build environment.
206#
207########################################################################
208
209# export TERM so that clang reports errors in color
210use_vars = set([ 'AS', 'AR', 'CC', 'CXX', 'HOME', 'LD_LIBRARY_PATH',
211                 'LIBRARY_PATH', 'PATH', 'PKG_CONFIG_PATH', 'PROTOC',
212                 'PYTHONPATH', 'RANLIB', 'SWIG', 'TERM' ])
213
214use_prefixes = [
215    "ASAN_",           # address sanitizer symbolizer path and settings
216    "CCACHE_",         # ccache (caching compiler wrapper) configuration
217    "CCC_",            # clang static analyzer configuration
218    "DISTCC_",         # distcc (distributed compiler wrapper) configuration
219    "INCLUDE_SERVER_", # distcc pump server settings
220    "M5",              # M5 configuration (e.g., path to kernels)
221    ]
222
223use_env = {}
224for key,val in sorted(os.environ.iteritems()):
225    if key in use_vars or \
226            any([key.startswith(prefix) for prefix in use_prefixes]):
227        use_env[key] = val
228
229# Tell scons to avoid implicit command dependencies to avoid issues
230# with the param wrappes being compiled twice (see
231# http://scons.tigris.org/issues/show_bug.cgi?id=2811)
232main = Environment(ENV=use_env, IMPLICIT_COMMAND_DEPENDENCIES=0)
233main.Decider('MD5-timestamp')
234main.root = Dir(".")         # The current directory (where this file lives).
235main.srcdir = Dir("src")     # The source directory
236
237main_dict_keys = main.Dictionary().keys()
238
239# Check that we have a C/C++ compiler
240if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys):
241    print "No C++ compiler installed (package g++ on Ubuntu and RedHat)"
242    Exit(1)
243
244# Check that swig is present
245if not 'SWIG' in main_dict_keys:
246    print "swig is not installed (package swig on Ubuntu and RedHat)"
247    Exit(1)
248
249# add useful python code PYTHONPATH so it can be used by subprocesses
250# as well
251main.AppendENVPath('PYTHONPATH', extra_python_paths)
252
253########################################################################
254#
255# Mercurial Stuff.
256#
257# If the gem5 directory is a mercurial repository, we should do some
258# extra things.
259#
260########################################################################
261
262hgdir = main.root.Dir(".hg")
263
264
265style_message = """
266You're missing the gem5 style hook, which automatically checks your code
267against the gem5 style rules on %s.
268This script will now install the hook in your %s.
269Press enter to continue, or ctrl-c to abort: """
270
271mercurial_style_message = """
272You're missing the gem5 style hook, which automatically checks your code
273against the gem5 style rules on hg commit and qrefresh commands.
274This script will now install the hook in your .hg/hgrc file.
275Press enter to continue, or ctrl-c to abort: """
276
277git_style_message = """
278You're missing the gem5 style or commit message hook. These hooks help
279to ensure that your code follows gem5's style rules on git commit.
280This script will now install the hook in your .git/hooks/ directory.
281Press enter to continue, or ctrl-c to abort: """
282
283mercurial_style_upgrade_message = """
284Your Mercurial style hooks are not up-to-date. This script will now
285try to automatically update them. A backup of your hgrc will be saved
286in .hg/hgrc.old.
287Press enter to continue, or ctrl-c to abort: """
288
289mercurial_style_hook = """
290# The following lines were automatically added by gem5/SConstruct
291# to provide the gem5 style-checking hooks
292[extensions]
293hgstyle = %s/util/hgstyle.py
294
295[hooks]
296pretxncommit.style = python:hgstyle.check_style
297pre-qrefresh.style = python:hgstyle.check_style
298# End of SConstruct additions
299
300""" % (main.root.abspath)
301
302mercurial_lib_not_found = """
303Mercurial libraries cannot be found, ignoring style hook.  If
304you are a gem5 developer, please fix this and run the style
305hook. It is important.
306"""
307
308# Check for style hook and prompt for installation if it's not there.
309# Skip this if --ignore-style was specified, there's no interactive
310# terminal to prompt, or no recognized revision control system can be
311# found.
312ignore_style = GetOption('ignore_style') or not sys.stdin.isatty()
313
314# Try wire up Mercurial to the style hooks
315if not ignore_style and hgdir.exists():
316    style_hook = True
317    style_hooks = tuple()
318    hgrc = hgdir.File('hgrc')
319    hgrc_old = hgdir.File('hgrc.old')
320    try:
321        from mercurial import ui
322        ui = ui.ui()
323        ui.readconfig(hgrc.abspath)
324        style_hooks = (ui.config('hooks', 'pretxncommit.style', None),
325                       ui.config('hooks', 'pre-qrefresh.style', None))
326        style_hook = all(style_hooks)
327        style_extension = ui.config('extensions', 'style', None)
328    except ImportError:
329        print mercurial_lib_not_found
330
331    if "python:style.check_style" in style_hooks:
332        # Try to upgrade the style hooks
333        print mercurial_style_upgrade_message
334        # continue unless user does ctrl-c/ctrl-d etc.
335        try:
336            raw_input()
337        except:
338            print "Input exception, exiting scons.\n"
339            sys.exit(1)
340        shutil.copyfile(hgrc.abspath, hgrc_old.abspath)
341        re_style_hook = re.compile(r"^([^=#]+)\.style\s*=\s*([^#\s]+).*")
342        re_style_extension = re.compile("style\s*=\s*([^#\s]+).*")
343        old, new = open(hgrc_old.abspath, 'r'), open(hgrc.abspath, 'w')
344        for l in old:
345            m_hook = re_style_hook.match(l)
346            m_ext = re_style_extension.match(l)
347            if m_hook:
348                hook, check = m_hook.groups()
349                if check != "python:style.check_style":
350                    print "Warning: %s.style is using a non-default " \
351                        "checker: %s" % (hook, check)
352                if hook not in ("pretxncommit", "pre-qrefresh"):
353                    print "Warning: Updating unknown style hook: %s" % hook
354
355                l = "%s.style = python:hgstyle.check_style\n" % hook
356            elif m_ext and m_ext.group(1) == style_extension:
357                l = "hgstyle = %s/util/hgstyle.py\n" % main.root.abspath
358
359            new.write(l)
360    elif not style_hook:
361        print mercurial_style_message,
362        # continue unless user does ctrl-c/ctrl-d etc.
363        try:
364            raw_input()
365        except:
366            print "Input exception, exiting scons.\n"
367            sys.exit(1)
368        hgrc_path = '%s/.hg/hgrc' % main.root.abspath
369        print "Adding style hook to", hgrc_path, "\n"
370        try:
371            with open(hgrc_path, 'a') as f:
372                f.write(mercurial_style_hook)
373        except:
374            print "Error updating", hgrc_path
375            sys.exit(1)
376
377def install_git_style_hooks():
378    try:
379        gitdir = Dir(readCommand(
380            ["git", "rev-parse", "--git-dir"]).strip("\n"))
381    except Exception, e:
382        print "Warning: Failed to find git repo directory: %s" % e
383        return
384
385    git_hooks = gitdir.Dir("hooks")
386    def hook_exists(hook_name):
387        hook = git_hooks.File(hook_name)
388        return hook.exists()
389
390    def hook_install(hook_name, script):
391        hook = git_hooks.File(hook_name)
392        if hook.exists():
393            print "Warning: Can't install %s, hook already exists." % hook_name
394            return
395
396        if not git_hooks.exists():
397            mkdir(git_hooks.get_abspath())
398
399        # Use a relative symlink if the hooks live in the source directory
400        if hook.is_under(main.root):
401            script_path = os.path.relpath(
402                script.get_abspath(),
403                hook.Dir(".").get_abspath())
404        else:
405            script_path = script.get_abspath()
406
407        try:
408            os.symlink(script_path, hook.get_abspath())
409        except:
410            print "Error updating git %s hook" % hook_name
411            raise
412
413    if hook_exists("pre-commit") and hook_exists("commit-msg"):
414        return
415
416    print git_style_message,
417    try:
418        raw_input()
419    except:
420        print "Input exception, exiting scons.\n"
421        sys.exit(1)
422
423    git_style_script = File("util/git-pre-commit.py")
424    git_msg_script = File("ext/git-commit-msg")
425
426    hook_install("pre-commit", git_style_script)
427    hook_install("commit-msg", git_msg_script)
428
429# Try to wire up git to the style hooks
430if not ignore_style and main.root.Entry(".git").exists():
431    install_git_style_hooks()
432
433###################################################
434#
435# Figure out which configurations to set up based on the path(s) of
436# the target(s).
437#
438###################################################
439
440# Find default configuration & binary.
441Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug'))
442
443# helper function: find last occurrence of element in list
444def rfind(l, elt, offs = -1):
445    for i in range(len(l)+offs, 0, -1):
446        if l[i] == elt:
447            return i
448    raise ValueError, "element not found"
449
450# Take a list of paths (or SCons Nodes) and return a list with all
451# paths made absolute and ~-expanded.  Paths will be interpreted
452# relative to the launch directory unless a different root is provided
453def makePathListAbsolute(path_list, root=GetLaunchDir()):
454    return [abspath(joinpath(root, expanduser(str(p))))
455            for p in path_list]
456
457# Each target must have 'build' in the interior of the path; the
458# directory below this will determine the build parameters.  For
459# example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we
460# recognize that ALPHA_SE specifies the configuration because it
461# follow 'build' in the build path.
462
463# The funky assignment to "[:]" is needed to replace the list contents
464# in place rather than reassign the symbol to a new list, which
465# doesn't work (obviously!).
466BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS)
467
468# Generate a list of the unique build roots and configs that the
469# collected targets reference.
470variant_paths = []
471build_root = None
472for t in BUILD_TARGETS:
473    path_dirs = t.split('/')
474    try:
475        build_top = rfind(path_dirs, 'build', -2)
476    except:
477        print "Error: no non-leaf 'build' dir found on target path", t
478        Exit(1)
479    this_build_root = joinpath('/',*path_dirs[:build_top+1])
480    if not build_root:
481        build_root = this_build_root
482    else:
483        if this_build_root != build_root:
484            print "Error: build targets not under same build root\n"\
485                  "  %s\n  %s" % (build_root, this_build_root)
486            Exit(1)
487    variant_path = joinpath('/',*path_dirs[:build_top+2])
488    if variant_path not in variant_paths:
489        variant_paths.append(variant_path)
490
491# Make sure build_root exists (might not if this is the first build there)
492if not isdir(build_root):
493    mkdir(build_root)
494main['BUILDROOT'] = build_root
495
496Export('main')
497
498main.SConsignFile(joinpath(build_root, "sconsign"))
499
500# Default duplicate option is to use hard links, but this messes up
501# when you use emacs to edit a file in the target dir, as emacs moves
502# file to file~ then copies to file, breaking the link.  Symbolic
503# (soft) links work better.
504main.SetOption('duplicate', 'soft-copy')
505
506#
507# Set up global sticky variables... these are common to an entire build
508# tree (not specific to a particular build like ALPHA_SE)
509#
510
511global_vars_file = joinpath(build_root, 'variables.global')
512
513global_vars = Variables(global_vars_file, args=ARGUMENTS)
514
515global_vars.AddVariables(
516    ('CC', 'C compiler', environ.get('CC', main['CC'])),
517    ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])),
518    ('SWIG', 'SWIG tool', environ.get('SWIG', main['SWIG'])),
519    ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')),
520    ('BATCH', 'Use batch pool for build and tests', False),
521    ('BATCH_CMD', 'Batch pool submission command name', 'qdo'),
522    ('M5_BUILD_CACHE', 'Cache built objects in this directory', False),
523    ('EXTRAS', 'Add extra directories to the compilation', '')
524    )
525
526# Update main environment with values from ARGUMENTS & global_vars_file
527global_vars.Update(main)
528help_texts["global_vars"] += global_vars.GenerateHelpText(main)
529
530# Save sticky variable settings back to current variables file
531global_vars.Save(global_vars_file, main)
532
533# Parse EXTRAS variable to build list of all directories where we're
534# look for sources etc.  This list is exported as extras_dir_list.
535base_dir = main.srcdir.abspath
536if main['EXTRAS']:
537    extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':'))
538else:
539    extras_dir_list = []
540
541Export('base_dir')
542Export('extras_dir_list')
543
544# the ext directory should be on the #includes path
545main.Append(CPPPATH=[Dir('ext')])
546
547def strip_build_path(path, env):
548    path = str(path)
549    variant_base = env['BUILDROOT'] + os.path.sep
550    if path.startswith(variant_base):
551        path = path[len(variant_base):]
552    elif path.startswith('build/'):
553        path = path[6:]
554    return path
555
556# Generate a string of the form:
557#   common/path/prefix/src1, src2 -> tgt1, tgt2
558# to print while building.
559class Transform(object):
560    # all specific color settings should be here and nowhere else
561    tool_color = termcap.Normal
562    pfx_color = termcap.Yellow
563    srcs_color = termcap.Yellow + termcap.Bold
564    arrow_color = termcap.Blue + termcap.Bold
565    tgts_color = termcap.Yellow + termcap.Bold
566
567    def __init__(self, tool, max_sources=99):
568        self.format = self.tool_color + (" [%8s] " % tool) \
569                      + self.pfx_color + "%s" \
570                      + self.srcs_color + "%s" \
571                      + self.arrow_color + " -> " \
572                      + self.tgts_color + "%s" \
573                      + termcap.Normal
574        self.max_sources = max_sources
575
576    def __call__(self, target, source, env, for_signature=None):
577        # truncate source list according to max_sources param
578        source = source[0:self.max_sources]
579        def strip(f):
580            return strip_build_path(str(f), env)
581        if len(source) > 0:
582            srcs = map(strip, source)
583        else:
584            srcs = ['']
585        tgts = map(strip, target)
586        # surprisingly, os.path.commonprefix is a dumb char-by-char string
587        # operation that has nothing to do with paths.
588        com_pfx = os.path.commonprefix(srcs + tgts)
589        com_pfx_len = len(com_pfx)
590        if com_pfx:
591            # do some cleanup and sanity checking on common prefix
592            if com_pfx[-1] == ".":
593                # prefix matches all but file extension: ok
594                # back up one to change 'foo.cc -> o' to 'foo.cc -> .o'
595                com_pfx = com_pfx[0:-1]
596            elif com_pfx[-1] == "/":
597                # common prefix is directory path: OK
598                pass
599            else:
600                src0_len = len(srcs[0])
601                tgt0_len = len(tgts[0])
602                if src0_len == com_pfx_len:
603                    # source is a substring of target, OK
604                    pass
605                elif tgt0_len == com_pfx_len:
606                    # target is a substring of source, need to back up to
607                    # avoid empty string on RHS of arrow
608                    sep_idx = com_pfx.rfind(".")
609                    if sep_idx != -1:
610                        com_pfx = com_pfx[0:sep_idx]
611                    else:
612                        com_pfx = ''
613                elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".":
614                    # still splitting at file extension: ok
615                    pass
616                else:
617                    # probably a fluke; ignore it
618                    com_pfx = ''
619        # recalculate length in case com_pfx was modified
620        com_pfx_len = len(com_pfx)
621        def fmt(files):
622            f = map(lambda s: s[com_pfx_len:], files)
623            return ', '.join(f)
624        return self.format % (com_pfx, fmt(srcs), fmt(tgts))
625
626Export('Transform')
627
628# enable the regression script to use the termcap
629main['TERMCAP'] = termcap
630
631if GetOption('verbose'):
632    def MakeAction(action, string, *args, **kwargs):
633        return Action(action, *args, **kwargs)
634else:
635    MakeAction = Action
636    main['CCCOMSTR']        = Transform("CC")
637    main['CXXCOMSTR']       = Transform("CXX")
638    main['ASCOMSTR']        = Transform("AS")
639    main['SWIGCOMSTR']      = Transform("SWIG")
640    main['ARCOMSTR']        = Transform("AR", 0)
641    main['LINKCOMSTR']      = Transform("LINK", 0)
642    main['RANLIBCOMSTR']    = Transform("RANLIB", 0)
643    main['M4COMSTR']        = Transform("M4")
644    main['SHCCCOMSTR']      = Transform("SHCC")
645    main['SHCXXCOMSTR']     = Transform("SHCXX")
646Export('MakeAction')
647
648# Initialize the Link-Time Optimization (LTO) flags
649main['LTO_CCFLAGS'] = []
650main['LTO_LDFLAGS'] = []
651
652# According to the readme, tcmalloc works best if the compiler doesn't
653# assume that we're using the builtin malloc and friends. These flags
654# are compiler-specific, so we need to set them after we detect which
655# compiler we're using.
656main['TCMALLOC_CCFLAGS'] = []
657
658CXX_version = readCommand([main['CXX'],'--version'], exception=False)
659CXX_V = readCommand([main['CXX'],'-V'], exception=False)
660
661main['GCC'] = CXX_version and CXX_version.find('g++') >= 0
662main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0
663if main['GCC'] + main['CLANG'] > 1:
664    print 'Error: How can we have two at the same time?'
665    Exit(1)
666
667# Set up default C++ compiler flags
668if main['GCC'] or main['CLANG']:
669    # As gcc and clang share many flags, do the common parts here
670    main.Append(CCFLAGS=['-pipe'])
671    main.Append(CCFLAGS=['-fno-strict-aliasing'])
672    # Enable -Wall and -Wextra and then disable the few warnings that
673    # we consistently violate
674    main.Append(CCFLAGS=['-Wall', '-Wundef', '-Wextra',
675                         '-Wno-sign-compare', '-Wno-unused-parameter'])
676    # We always compile using C++11
677    main.Append(CXXFLAGS=['-std=c++11'])
678    if sys.platform.startswith('freebsd'):
679        main.Append(CCFLAGS=['-I/usr/local/include'])
680        main.Append(CXXFLAGS=['-I/usr/local/include'])
681else:
682    print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
683    print "Don't know what compiler options to use for your compiler."
684    print termcap.Yellow + '       compiler:' + termcap.Normal, main['CXX']
685    print termcap.Yellow + '       version:' + termcap.Normal,
686    if not CXX_version:
687        print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
688               termcap.Normal
689    else:
690        print CXX_version.replace('\n', '<nl>')
691    print "       If you're trying to use a compiler other than GCC"
692    print "       or clang, there appears to be something wrong with your"
693    print "       environment."
694    print "       "
695    print "       If you are trying to use a compiler other than those listed"
696    print "       above you will need to ease fix SConstruct and "
697    print "       src/SConscript to support that compiler."
698    Exit(1)
699
700if main['GCC']:
701    # Check for a supported version of gcc. >= 4.8 is chosen for its
702    # level of c++11 support. See
703    # http://gcc.gnu.org/projects/cxx0x.html for details.
704    gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False)
705    if compareVersions(gcc_version, "4.8") < 0:
706        print 'Error: gcc version 4.8 or newer required.'
707        print '       Installed version:', gcc_version
708        Exit(1)
709
710    main['GCC_VERSION'] = gcc_version
711
712    # gcc from version 4.8 and above generates "rep; ret" instructions
713    # to avoid performance penalties on certain AMD chips. Older
714    # assemblers detect this as an error, "Error: expecting string
715    # instruction after `rep'"
716    as_version_raw = readCommand([main['AS'], '-v', '/dev/null'],
717                                 exception=False).split()
718
719    # version strings may contain extra distro-specific
720    # qualifiers, so play it safe and keep only what comes before
721    # the first hyphen
722    as_version = as_version_raw[-1].split('-')[0] if as_version_raw else None
723
724    if not as_version or compareVersions(as_version, "2.23") < 0:
725        print termcap.Yellow + termcap.Bold + \
726            'Warning: This combination of gcc and binutils have' + \
727            ' known incompatibilities.\n' + \
728            '         If you encounter build problems, please update ' + \
729            'binutils to 2.23.' + \
730            termcap.Normal
731
732    # Make sure we warn if the user has requested to compile with the
733    # Undefined Benahvior Sanitizer and this version of gcc does not
734    # support it.
735    if GetOption('with_ubsan') and \
736            compareVersions(gcc_version, '4.9') < 0:
737        print termcap.Yellow + termcap.Bold + \
738            'Warning: UBSan is only supported using gcc 4.9 and later.' + \
739            termcap.Normal
740
741    # Add the appropriate Link-Time Optimization (LTO) flags
742    # unless LTO is explicitly turned off. Note that these flags
743    # are only used by the fast target.
744    if not GetOption('no_lto'):
745        # Pass the LTO flag when compiling to produce GIMPLE
746        # output, we merely create the flags here and only append
747        # them later
748        main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
749
750        # Use the same amount of jobs for LTO as we are running
751        # scons with
752        main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
753
754    main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc',
755                                  '-fno-builtin-realloc', '-fno-builtin-free'])
756
757    # add option to check for undeclared overrides
758    if compareVersions(gcc_version, "5.0") > 0:
759        main.Append(CCFLAGS=['-Wno-error=suggest-override'])
760
761elif main['CLANG']:
762    # Check for a supported version of clang, >= 3.1 is needed to
763    # support similar features as gcc 4.8. See
764    # http://clang.llvm.org/cxx_status.html for details
765    clang_version_re = re.compile(".* version (\d+\.\d+)")
766    clang_version_match = clang_version_re.search(CXX_version)
767    if (clang_version_match):
768        clang_version = clang_version_match.groups()[0]
769        if compareVersions(clang_version, "3.1") < 0:
770            print 'Error: clang version 3.1 or newer required.'
771            print '       Installed version:', clang_version
772            Exit(1)
773    else:
774        print 'Error: Unable to determine clang version.'
775        Exit(1)
776
777    # clang has a few additional warnings that we disable, extraneous
778    # parantheses are allowed due to Ruby's printing of the AST,
779    # finally self assignments are allowed as the generated CPU code
780    # is relying on this
781    main.Append(CCFLAGS=['-Wno-parentheses',
782                         '-Wno-self-assign',
783                         # Some versions of libstdc++ (4.8?) seem to
784                         # use struct hash and class hash
785                         # interchangeably.
786                         '-Wno-mismatched-tags',
787                         ])
788
789    main.Append(TCMALLOC_CCFLAGS=['-fno-builtin'])
790
791    # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as
792    # opposed to libstdc++, as the later is dated.
793    if sys.platform == "darwin":
794        main.Append(CXXFLAGS=['-stdlib=libc++'])
795        main.Append(LIBS=['c++'])
796
797    # On FreeBSD we need libthr.
798    if sys.platform.startswith('freebsd'):
799        main.Append(LIBS=['thr'])
800
801else:
802    print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
803    print "Don't know what compiler options to use for your compiler."
804    print termcap.Yellow + '       compiler:' + termcap.Normal, main['CXX']
805    print termcap.Yellow + '       version:' + termcap.Normal,
806    if not CXX_version:
807        print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
808               termcap.Normal
809    else:
810        print CXX_version.replace('\n', '<nl>')
811    print "       If you're trying to use a compiler other than GCC"
812    print "       or clang, there appears to be something wrong with your"
813    print "       environment."
814    print "       "
815    print "       If you are trying to use a compiler other than those listed"
816    print "       above you will need to ease fix SConstruct and "
817    print "       src/SConscript to support that compiler."
818    Exit(1)
819
820# Set up common yacc/bison flags (needed for Ruby)
821main['YACCFLAGS'] = '-d'
822main['YACCHXXFILESUFFIX'] = '.hh'
823
824# Do this after we save setting back, or else we'll tack on an
825# extra 'qdo' every time we run scons.
826if main['BATCH']:
827    main['CC']     = main['BATCH_CMD'] + ' ' + main['CC']
828    main['CXX']    = main['BATCH_CMD'] + ' ' + main['CXX']
829    main['AS']     = main['BATCH_CMD'] + ' ' + main['AS']
830    main['AR']     = main['BATCH_CMD'] + ' ' + main['AR']
831    main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB']
832
833if sys.platform == 'cygwin':
834    # cygwin has some header file issues...
835    main.Append(CCFLAGS=["-Wno-uninitialized"])
836
837# Check for the protobuf compiler
838protoc_version = readCommand([main['PROTOC'], '--version'],
839                             exception='').split()
840
841# First two words should be "libprotoc x.y.z"
842if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc':
843    print termcap.Yellow + termcap.Bold + \
844        'Warning: Protocol buffer compiler (protoc) not found.\n' + \
845        '         Please install protobuf-compiler for tracing support.' + \
846        termcap.Normal
847    main['PROTOC'] = False
848else:
849    # Based on the availability of the compress stream wrappers,
850    # require 2.1.0
851    min_protoc_version = '2.1.0'
852    if compareVersions(protoc_version[1], min_protoc_version) < 0:
853        print termcap.Yellow + termcap.Bold + \
854            'Warning: protoc version', min_protoc_version, \
855            'or newer required.\n' + \
856            '         Installed version:', protoc_version[1], \
857            termcap.Normal
858        main['PROTOC'] = False
859    else:
860        # Attempt to determine the appropriate include path and
861        # library path using pkg-config, that means we also need to
862        # check for pkg-config. Note that it is possible to use
863        # protobuf without the involvement of pkg-config. Later on we
864        # check go a library config check and at that point the test
865        # will fail if libprotobuf cannot be found.
866        if readCommand(['pkg-config', '--version'], exception=''):
867            try:
868                # Attempt to establish what linking flags to add for protobuf
869                # using pkg-config
870                main.ParseConfig('pkg-config --cflags --libs-only-L protobuf')
871            except:
872                print termcap.Yellow + termcap.Bold + \
873                    'Warning: pkg-config could not get protobuf flags.' + \
874                    termcap.Normal
875
876# Check for SWIG
877if not main.has_key('SWIG'):
878    print 'Error: SWIG utility not found.'
879    print '       Please install (see http://www.swig.org) and retry.'
880    Exit(1)
881
882# Check for appropriate SWIG version
883swig_version = readCommand([main['SWIG'], '-version'], exception='').split()
884# First 3 words should be "SWIG Version x.y.z"
885if len(swig_version) < 3 or \
886        swig_version[0] != 'SWIG' or swig_version[1] != 'Version':
887    print 'Error determining SWIG version.'
888    Exit(1)
889
890min_swig_version = '2.0.4'
891if compareVersions(swig_version[2], min_swig_version) < 0:
892    print 'Error: SWIG version', min_swig_version, 'or newer required.'
893    print '       Installed version:', swig_version[2]
894    Exit(1)
895
896# Check for known incompatibilities. The standard library shipped with
897# gcc >= 4.9 does not play well with swig versions prior to 3.0
898if main['GCC'] and compareVersions(gcc_version, '4.9') >= 0 and \
899        compareVersions(swig_version[2], '3.0') < 0:
900    print termcap.Yellow + termcap.Bold + \
901        'Warning: This combination of gcc and swig have' + \
902        ' known incompatibilities.\n' + \
903        '         If you encounter build problems, please update ' + \
904        'swig to 3.0 or later.' + \
905        termcap.Normal
906
907# Set up SWIG flags & scanner
908swig_flags=Split('-c++ -python -modern -templatereduce $_CPPINCFLAGS')
909main.Append(SWIGFLAGS=swig_flags)
910
911# Check for 'timeout' from GNU coreutils. If present, regressions will
912# be run with a time limit. We require version 8.13 since we rely on
913# support for the '--foreground' option.
914if sys.platform.startswith('freebsd'):
915    timeout_lines = readCommand(['gtimeout', '--version'],
916                                exception='').splitlines()
917else:
918    timeout_lines = readCommand(['timeout', '--version'],
919                                exception='').splitlines()
920# Get the first line and tokenize it
921timeout_version = timeout_lines[0].split() if timeout_lines else []
922main['TIMEOUT'] =  timeout_version and \
923    compareVersions(timeout_version[-1], '8.13') >= 0
924
925# filter out all existing swig scanners, they mess up the dependency
926# stuff for some reason
927scanners = []
928for scanner in main['SCANNERS']:
929    skeys = scanner.skeys
930    if skeys == '.i':
931        continue
932
933    if isinstance(skeys, (list, tuple)) and '.i' in skeys:
934        continue
935
936    scanners.append(scanner)
937
938# add the new swig scanner that we like better
939from SCons.Scanner import ClassicCPP as CPPScanner
940swig_inc_re = '^[ \t]*[%,#][ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")'
941scanners.append(CPPScanner("SwigScan", [ ".i" ], "CPPPATH", swig_inc_re))
942
943# replace the scanners list that has what we want
944main['SCANNERS'] = scanners
945
946# Add a custom Check function to test for structure members.
947def CheckMember(context, include, decl, member, include_quotes="<>"):
948    context.Message("Checking for member %s in %s..." %
949                    (member, decl))
950    text = """
951#include %(header)s
952int main(){
953  %(decl)s test;
954  (void)test.%(member)s;
955  return 0;
956};
957""" % { "header" : include_quotes[0] + include + include_quotes[1],
958        "decl" : decl,
959        "member" : member,
960        }
961
962    ret = context.TryCompile(text, extension=".cc")
963    context.Result(ret)
964    return ret
965
966# Platform-specific configuration.  Note again that we assume that all
967# builds under a given build root run on the same host platform.
968conf = Configure(main,
969                 conf_dir = joinpath(build_root, '.scons_config'),
970                 log_file = joinpath(build_root, 'scons_config.log'),
971                 custom_tests = {
972        'CheckMember' : CheckMember,
973        })
974
975# Check if we should compile a 64 bit binary on Mac OS X/Darwin
976try:
977    import platform
978    uname = platform.uname()
979    if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0:
980        if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]):
981            main.Append(CCFLAGS=['-arch', 'x86_64'])
982            main.Append(CFLAGS=['-arch', 'x86_64'])
983            main.Append(LINKFLAGS=['-arch', 'x86_64'])
984            main.Append(ASFLAGS=['-arch', 'x86_64'])
985except:
986    pass
987
988# Recent versions of scons substitute a "Null" object for Configure()
989# when configuration isn't necessary, e.g., if the "--help" option is
990# present.  Unfortuantely this Null object always returns false,
991# breaking all our configuration checks.  We replace it with our own
992# more optimistic null object that returns True instead.
993if not conf:
994    def NullCheck(*args, **kwargs):
995        return True
996
997    class NullConf:
998        def __init__(self, env):
999            self.env = env
1000        def Finish(self):
1001            return self.env
1002        def __getattr__(self, mname):
1003            return NullCheck
1004
1005    conf = NullConf(main)
1006
1007# Cache build files in the supplied directory.
1008if main['M5_BUILD_CACHE']:
1009    print 'Using build cache located at', main['M5_BUILD_CACHE']
1010    CacheDir(main['M5_BUILD_CACHE'])
1011
1012if not GetOption('without_python'):
1013    # Find Python include and library directories for embedding the
1014    # interpreter. We rely on python-config to resolve the appropriate
1015    # includes and linker flags. ParseConfig does not seem to understand
1016    # the more exotic linker flags such as -Xlinker and -export-dynamic so
1017    # we add them explicitly below. If you want to link in an alternate
1018    # version of python, see above for instructions on how to invoke
1019    # scons with the appropriate PATH set.
1020    #
1021    # First we check if python2-config exists, else we use python-config
1022    python_config = readCommand(['which', 'python2-config'],
1023                                exception='').strip()
1024    if not os.path.exists(python_config):
1025        python_config = readCommand(['which', 'python-config'],
1026                                    exception='').strip()
1027    py_includes = readCommand([python_config, '--includes'],
1028                              exception='').split()
1029    # Strip the -I from the include folders before adding them to the
1030    # CPPPATH
1031    main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes))
1032
1033    # Read the linker flags and split them into libraries and other link
1034    # flags. The libraries are added later through the call the CheckLib.
1035    py_ld_flags = readCommand([python_config, '--ldflags'],
1036        exception='').split()
1037    py_libs = []
1038    for lib in py_ld_flags:
1039         if not lib.startswith('-l'):
1040             main.Append(LINKFLAGS=[lib])
1041         else:
1042             lib = lib[2:]
1043             if lib not in py_libs:
1044                 py_libs.append(lib)
1045
1046    # verify that this stuff works
1047    if not conf.CheckHeader('Python.h', '<>'):
1048        print "Error: can't find Python.h header in", py_includes
1049        print "Install Python headers (package python-dev on Ubuntu and RedHat)"
1050        Exit(1)
1051
1052    for lib in py_libs:
1053        if not conf.CheckLib(lib):
1054            print "Error: can't find library %s required by python" % lib
1055            Exit(1)
1056
1057# On Solaris you need to use libsocket for socket ops
1058if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'):
1059   if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'):
1060       print "Can't find library with socket calls (e.g. accept())"
1061       Exit(1)
1062
1063# Check for zlib.  If the check passes, libz will be automatically
1064# added to the LIBS environment variable.
1065if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'):
1066    print 'Error: did not find needed zlib compression library '\
1067          'and/or zlib.h header file.'
1068    print '       Please install zlib and try again.'
1069    Exit(1)
1070
1071# If we have the protobuf compiler, also make sure we have the
1072# development libraries. If the check passes, libprotobuf will be
1073# automatically added to the LIBS environment variable. After
1074# this, we can use the HAVE_PROTOBUF flag to determine if we have
1075# got both protoc and libprotobuf available.
1076main['HAVE_PROTOBUF'] = main['PROTOC'] and \
1077    conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h',
1078                            'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;')
1079
1080# If we have the compiler but not the library, print another warning.
1081if main['PROTOC'] and not main['HAVE_PROTOBUF']:
1082    print termcap.Yellow + termcap.Bold + \
1083        'Warning: did not find protocol buffer library and/or headers.\n' + \
1084    '       Please install libprotobuf-dev for tracing support.' + \
1085    termcap.Normal
1086
1087# Check for librt.
1088have_posix_clock = \
1089    conf.CheckLibWithHeader(None, 'time.h', 'C',
1090                            'clock_nanosleep(0,0,NULL,NULL);') or \
1091    conf.CheckLibWithHeader('rt', 'time.h', 'C',
1092                            'clock_nanosleep(0,0,NULL,NULL);')
1093
1094have_posix_timers = \
1095    conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C',
1096                            'timer_create(CLOCK_MONOTONIC, NULL, NULL);')
1097
1098if not GetOption('without_tcmalloc'):
1099    if conf.CheckLib('tcmalloc'):
1100        main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1101    elif conf.CheckLib('tcmalloc_minimal'):
1102        main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1103    else:
1104        print termcap.Yellow + termcap.Bold + \
1105              "You can get a 12% performance improvement by "\
1106              "installing tcmalloc (libgoogle-perftools-dev package "\
1107              "on Ubuntu or RedHat)." + termcap.Normal
1108
1109
1110# Detect back trace implementations. The last implementation in the
1111# list will be used by default.
1112backtrace_impls = [ "none" ]
1113
1114if conf.CheckLibWithHeader(None, 'execinfo.h', 'C',
1115                           'backtrace_symbols_fd((void*)0, 0, 0);'):
1116    backtrace_impls.append("glibc")
1117elif conf.CheckLibWithHeader('execinfo', 'execinfo.h', 'C',
1118                           'backtrace_symbols_fd((void*)0, 0, 0);'):
1119    # NetBSD and FreeBSD need libexecinfo.
1120    backtrace_impls.append("glibc")
1121    main.Append(LIBS=['execinfo'])
1122
1123if backtrace_impls[-1] == "none":
1124    default_backtrace_impl = "none"
1125    print termcap.Yellow + termcap.Bold + \
1126        "No suitable back trace implementation found." + \
1127        termcap.Normal
1128
1129if not have_posix_clock:
1130    print "Can't find library for POSIX clocks."
1131
1132# Check for <fenv.h> (C99 FP environment control)
1133have_fenv = conf.CheckHeader('fenv.h', '<>')
1134if not have_fenv:
1135    print "Warning: Header file <fenv.h> not found."
1136    print "         This host has no IEEE FP rounding mode control."
1137
1138# Check if we should enable KVM-based hardware virtualization. The API
1139# we rely on exists since version 2.6.36 of the kernel, but somehow
1140# the KVM_API_VERSION does not reflect the change. We test for one of
1141# the types as a fall back.
1142have_kvm = conf.CheckHeader('linux/kvm.h', '<>')
1143if not have_kvm:
1144    print "Info: Compatible header file <linux/kvm.h> not found, " \
1145        "disabling KVM support."
1146
1147# x86 needs support for xsave. We test for the structure here since we
1148# won't be able to run new tests by the time we know which ISA we're
1149# targeting.
1150have_kvm_xsave = conf.CheckTypeSize('struct kvm_xsave',
1151                                    '#include <linux/kvm.h>') != 0
1152
1153# Check if the requested target ISA is compatible with the host
1154def is_isa_kvm_compatible(isa):
1155    try:
1156        import platform
1157        host_isa = platform.machine()
1158    except:
1159        print "Warning: Failed to determine host ISA."
1160        return False
1161
1162    if not have_posix_timers:
1163        print "Warning: Can not enable KVM, host seems to lack support " \
1164            "for POSIX timers"
1165        return False
1166
1167    if isa == "arm":
1168        return host_isa in ( "armv7l", "aarch64" )
1169    elif isa == "x86":
1170        if host_isa != "x86_64":
1171            return False
1172
1173        if not have_kvm_xsave:
1174            print "KVM on x86 requires xsave support in kernel headers."
1175            return False
1176
1177        return True
1178    else:
1179        return False
1180
1181
1182# Check if the exclude_host attribute is available. We want this to
1183# get accurate instruction counts in KVM.
1184main['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf.CheckMember(
1185    'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host')
1186
1187
1188######################################################################
1189#
1190# Finish the configuration
1191#
1192main = conf.Finish()
1193
1194######################################################################
1195#
1196# Collect all non-global variables
1197#
1198
1199# Define the universe of supported ISAs
1200all_isa_list = [ ]
1201all_gpu_isa_list = [ ]
1202Export('all_isa_list')
1203Export('all_gpu_isa_list')
1204
1205class CpuModel(object):
1206    '''The CpuModel class encapsulates everything the ISA parser needs to
1207    know about a particular CPU model.'''
1208
1209    # Dict of available CPU model objects.  Accessible as CpuModel.dict.
1210    dict = {}
1211
1212    # Constructor.  Automatically adds models to CpuModel.dict.
1213    def __init__(self, name, default=False):
1214        self.name = name           # name of model
1215
1216        # This cpu is enabled by default
1217        self.default = default
1218
1219        # Add self to dict
1220        if name in CpuModel.dict:
1221            raise AttributeError, "CpuModel '%s' already registered" % name
1222        CpuModel.dict[name] = self
1223
1224Export('CpuModel')
1225
1226# Sticky variables get saved in the variables file so they persist from
1227# one invocation to the next (unless overridden, in which case the new
1228# value becomes sticky).
1229sticky_vars = Variables(args=ARGUMENTS)
1230Export('sticky_vars')
1231
1232# Sticky variables that should be exported
1233export_vars = []
1234Export('export_vars')
1235
1236# For Ruby
1237all_protocols = []
1238Export('all_protocols')
1239protocol_dirs = []
1240Export('protocol_dirs')
1241slicc_includes = []
1242Export('slicc_includes')
1243
1244# Walk the tree and execute all SConsopts scripts that wil add to the
1245# above variables
1246if GetOption('verbose'):
1247    print "Reading SConsopts"
1248for bdir in [ base_dir ] + extras_dir_list:
1249    if not isdir(bdir):
1250        print "Error: directory '%s' does not exist" % bdir
1251        Exit(1)
1252    for root, dirs, files in os.walk(bdir):
1253        if 'SConsopts' in files:
1254            if GetOption('verbose'):
1255                print "Reading", joinpath(root, 'SConsopts')
1256            SConscript(joinpath(root, 'SConsopts'))
1257
1258all_isa_list.sort()
1259all_gpu_isa_list.sort()
1260
1261sticky_vars.AddVariables(
1262    EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list),
1263    EnumVariable('TARGET_GPU_ISA', 'Target GPU ISA', 'hsail', all_gpu_isa_list),
1264    ListVariable('CPU_MODELS', 'CPU models',
1265                 sorted(n for n,m in CpuModel.dict.iteritems() if m.default),
1266                 sorted(CpuModel.dict.keys())),
1267    BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger',
1268                 False),
1269    BoolVariable('SS_COMPATIBLE_FP',
1270                 'Make floating-point results compatible with SimpleScalar',
1271                 False),
1272    BoolVariable('USE_SSE2',
1273                 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts',
1274                 False),
1275    BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock),
1276    BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv),
1277    BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability', False),
1278    BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models', have_kvm),
1279    BoolVariable('BUILD_GPU', 'Build the compute-GPU model', False),
1280    EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None',
1281                  all_protocols),
1282    EnumVariable('BACKTRACE_IMPL', 'Post-mortem dump implementation',
1283                 backtrace_impls[-1], backtrace_impls)
1284    )
1285
1286# These variables get exported to #defines in config/*.hh (see src/SConscript).
1287export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'TARGET_GPU_ISA',
1288                'CP_ANNOTATE', 'USE_POSIX_CLOCK', 'USE_KVM', 'PROTOCOL',
1289                'HAVE_PROTOBUF', 'HAVE_PERF_ATTR_EXCLUDE_HOST']
1290
1291###################################################
1292#
1293# Define a SCons builder for configuration flag headers.
1294#
1295###################################################
1296
1297# This function generates a config header file that #defines the
1298# variable symbol to the current variable setting (0 or 1).  The source
1299# operands are the name of the variable and a Value node containing the
1300# value of the variable.
1301def build_config_file(target, source, env):
1302    (variable, value) = [s.get_contents() for s in source]
1303    f = file(str(target[0]), 'w')
1304    print >> f, '#define', variable, value
1305    f.close()
1306    return None
1307
1308# Combine the two functions into a scons Action object.
1309config_action = MakeAction(build_config_file, Transform("CONFIG H", 2))
1310
1311# The emitter munges the source & target node lists to reflect what
1312# we're really doing.
1313def config_emitter(target, source, env):
1314    # extract variable name from Builder arg
1315    variable = str(target[0])
1316    # True target is config header file
1317    target = joinpath('config', variable.lower() + '.hh')
1318    val = env[variable]
1319    if isinstance(val, bool):
1320        # Force value to 0/1
1321        val = int(val)
1322    elif isinstance(val, str):
1323        val = '"' + val + '"'
1324
1325    # Sources are variable name & value (packaged in SCons Value nodes)
1326    return ([target], [Value(variable), Value(val)])
1327
1328config_builder = Builder(emitter = config_emitter, action = config_action)
1329
1330main.Append(BUILDERS = { 'ConfigFile' : config_builder })
1331
1332# libelf build is shared across all configs in the build root.
1333main.SConscript('ext/libelf/SConscript',
1334                variant_dir = joinpath(build_root, 'libelf'))
1335
1336# iostream3 build is shared across all configs in the build root.
1337main.SConscript('ext/iostream3/SConscript',
1338                variant_dir = joinpath(build_root, 'iostream3'))
1339
1340# libfdt build is shared across all configs in the build root.
1341main.SConscript('ext/libfdt/SConscript',
1342                variant_dir = joinpath(build_root, 'libfdt'))
1343
1344# fputils build is shared across all configs in the build root.
1345main.SConscript('ext/fputils/SConscript',
1346                variant_dir = joinpath(build_root, 'fputils'))
1347
1348# DRAMSim2 build is shared across all configs in the build root.
1349main.SConscript('ext/dramsim2/SConscript',
1350                variant_dir = joinpath(build_root, 'dramsim2'))
1351
1352# DRAMPower build is shared across all configs in the build root.
1353main.SConscript('ext/drampower/SConscript',
1354                variant_dir = joinpath(build_root, 'drampower'))
1355
1356# nomali build is shared across all configs in the build root.
1357main.SConscript('ext/nomali/SConscript',
1358                variant_dir = joinpath(build_root, 'nomali'))
1359
1360###################################################
1361#
1362# This function is used to set up a directory with switching headers
1363#
1364###################################################
1365
1366main['ALL_ISA_LIST'] = all_isa_list
1367main['ALL_GPU_ISA_LIST'] = all_gpu_isa_list
1368all_isa_deps = {}
1369def make_switching_dir(dname, switch_headers, env):
1370    # Generate the header.  target[0] is the full path of the output
1371    # header to generate.  'source' is a dummy variable, since we get the
1372    # list of ISAs from env['ALL_ISA_LIST'].
1373    def gen_switch_hdr(target, source, env):
1374        fname = str(target[0])
1375        isa = env['TARGET_ISA'].lower()
1376        try:
1377            f = open(fname, 'w')
1378            print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname))
1379            f.close()
1380        except IOError:
1381            print "Failed to create %s" % fname
1382            raise
1383
1384    # Build SCons Action object. 'varlist' specifies env vars that this
1385    # action depends on; when env['ALL_ISA_LIST'] changes these actions
1386    # should get re-executed.
1387    switch_hdr_action = MakeAction(gen_switch_hdr,
1388                          Transform("GENERATE"), varlist=['ALL_ISA_LIST'])
1389
1390    # Instantiate actions for each header
1391    for hdr in switch_headers:
1392        env.Command(hdr, [], switch_hdr_action)
1393
1394    isa_target = Dir('.').up().name.lower().replace('_', '-')
1395    env['PHONY_BASE'] = '#'+isa_target
1396    all_isa_deps[isa_target] = None
1397
1398Export('make_switching_dir')
1399
1400def make_gpu_switching_dir(dname, switch_headers, env):
1401    # Generate the header.  target[0] is the full path of the output
1402    # header to generate.  'source' is a dummy variable, since we get the
1403    # list of ISAs from env['ALL_ISA_LIST'].
1404    def gen_switch_hdr(target, source, env):
1405        fname = str(target[0])
1406
1407        isa = env['TARGET_GPU_ISA'].lower()
1408
1409        try:
1410            f = open(fname, 'w')
1411            print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname))
1412            f.close()
1413        except IOError:
1414            print "Failed to create %s" % fname
1415            raise
1416
1417    # Build SCons Action object. 'varlist' specifies env vars that this
1418    # action depends on; when env['ALL_ISA_LIST'] changes these actions
1419    # should get re-executed.
1420    switch_hdr_action = MakeAction(gen_switch_hdr,
1421                          Transform("GENERATE"), varlist=['ALL_ISA_GPU_LIST'])
1422
1423    # Instantiate actions for each header
1424    for hdr in switch_headers:
1425        env.Command(hdr, [], switch_hdr_action)
1426
1427Export('make_gpu_switching_dir')
1428
1429# all-isas -> all-deps -> all-environs -> all_targets
1430main.Alias('#all-isas', [])
1431main.Alias('#all-deps', '#all-isas')
1432
1433# Dummy target to ensure all environments are created before telling
1434# SCons what to actually make (the command line arguments).  We attach
1435# them to the dependence graph after the environments are complete.
1436ORIG_BUILD_TARGETS = list(BUILD_TARGETS) # force a copy; gets closure to work.
1437def environsComplete(target, source, env):
1438    for t in ORIG_BUILD_TARGETS:
1439        main.Depends('#all-targets', t)
1440
1441# Each build/* switching_dir attaches its *-environs target to #all-environs.
1442main.Append(BUILDERS = {'CompleteEnvirons' :
1443                        Builder(action=MakeAction(environsComplete, None))})
1444main.CompleteEnvirons('#all-environs', [])
1445
1446def doNothing(**ignored): pass
1447main.Append(BUILDERS = {'Dummy': Builder(action=MakeAction(doNothing, None))})
1448
1449# The final target to which all the original targets ultimately get attached.
1450main.Dummy('#all-targets', '#all-environs')
1451BUILD_TARGETS[:] = ['#all-targets']
1452
1453###################################################
1454#
1455# Define build environments for selected configurations.
1456#
1457###################################################
1458
1459for variant_path in variant_paths:
1460    if not GetOption('silent'):
1461        print "Building in", variant_path
1462
1463    # Make a copy of the build-root environment to use for this config.
1464    env = main.Clone()
1465    env['BUILDDIR'] = variant_path
1466
1467    # variant_dir is the tail component of build path, and is used to
1468    # determine the build parameters (e.g., 'ALPHA_SE')
1469    (build_root, variant_dir) = splitpath(variant_path)
1470
1471    # Set env variables according to the build directory config.
1472    sticky_vars.files = []
1473    # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in
1474    # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke
1475    # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings.
1476    current_vars_file = joinpath(build_root, 'variables', variant_dir)
1477    if isfile(current_vars_file):
1478        sticky_vars.files.append(current_vars_file)
1479        if not GetOption('silent'):
1480            print "Using saved variables file %s" % current_vars_file
1481    else:
1482        # Build dir-specific variables file doesn't exist.
1483
1484        # Make sure the directory is there so we can create it later
1485        opt_dir = dirname(current_vars_file)
1486        if not isdir(opt_dir):
1487            mkdir(opt_dir)
1488
1489        # Get default build variables from source tree.  Variables are
1490        # normally determined by name of $VARIANT_DIR, but can be
1491        # overridden by '--default=' arg on command line.
1492        default = GetOption('default')
1493        opts_dir = joinpath(main.root.abspath, 'build_opts')
1494        if default:
1495            default_vars_files = [joinpath(build_root, 'variables', default),
1496                                  joinpath(opts_dir, default)]
1497        else:
1498            default_vars_files = [joinpath(opts_dir, variant_dir)]
1499        existing_files = filter(isfile, default_vars_files)
1500        if existing_files:
1501            default_vars_file = existing_files[0]
1502            sticky_vars.files.append(default_vars_file)
1503            print "Variables file %s not found,\n  using defaults in %s" \
1504                  % (current_vars_file, default_vars_file)
1505        else:
1506            print "Error: cannot find variables file %s or " \
1507                  "default file(s) %s" \
1508                  % (current_vars_file, ' or '.join(default_vars_files))
1509            Exit(1)
1510
1511    # Apply current variable settings to env
1512    sticky_vars.Update(env)
1513
1514    help_texts["local_vars"] += \
1515        "Build variables for %s:\n" % variant_dir \
1516                 + sticky_vars.GenerateHelpText(env)
1517
1518    # Process variable settings.
1519
1520    if not have_fenv and env['USE_FENV']:
1521        print "Warning: <fenv.h> not available; " \
1522              "forcing USE_FENV to False in", variant_dir + "."
1523        env['USE_FENV'] = False
1524
1525    if not env['USE_FENV']:
1526        print "Warning: No IEEE FP rounding mode control in", variant_dir + "."
1527        print "         FP results may deviate slightly from other platforms."
1528
1529    if env['EFENCE']:
1530        env.Append(LIBS=['efence'])
1531
1532    if env['USE_KVM']:
1533        if not have_kvm:
1534            print "Warning: Can not enable KVM, host seems to lack KVM support"
1535            env['USE_KVM'] = False
1536        elif not is_isa_kvm_compatible(env['TARGET_ISA']):
1537            print "Info: KVM support disabled due to unsupported host and " \
1538                "target ISA combination"
1539            env['USE_KVM'] = False
1540
1541    if env['BUILD_GPU']:
1542        env.Append(CPPDEFINES=['BUILD_GPU'])
1543
1544    # Warn about missing optional functionality
1545    if env['USE_KVM']:
1546        if not main['HAVE_PERF_ATTR_EXCLUDE_HOST']:
1547            print "Warning: perf_event headers lack support for the " \
1548                "exclude_host attribute. KVM instruction counts will " \
1549                "be inaccurate."
1550
1551    # Save sticky variable settings back to current variables file
1552    sticky_vars.Save(current_vars_file, env)
1553
1554    if env['USE_SSE2']:
1555        env.Append(CCFLAGS=['-msse2'])
1556
1557    # The src/SConscript file sets up the build rules in 'env' according
1558    # to the configured variables.  It returns a list of environments,
1559    # one for each variant build (debug, opt, etc.)
1560    SConscript('src/SConscript', variant_dir = variant_path, exports = 'env')
1561
1562def pairwise(iterable):
1563    "s -> (s0,s1), (s1,s2), (s2, s3), ..."
1564    a, b = itertools.tee(iterable)
1565    b.next()
1566    return itertools.izip(a, b)
1567
1568# Create false dependencies so SCons will parse ISAs, establish
1569# dependencies, and setup the build Environments serially. Either
1570# SCons (likely) and/or our SConscripts (possibly) cannot cope with -j
1571# greater than 1. It appears to be standard race condition stuff; it
1572# doesn't always fail, but usually, and the behaviors are different.
1573# Every time I tried to remove this, builds would fail in some
1574# creative new way. So, don't do that. You'll want to, though, because
1575# tests/SConscript takes a long time to make its Environments.
1576for t1, t2 in pairwise(sorted(all_isa_deps.iterkeys())):
1577    main.Depends('#%s-deps'     % t2, '#%s-deps'     % t1)
1578    main.Depends('#%s-environs' % t2, '#%s-environs' % t1)
1579
1580# base help text
1581Help('''
1582Usage: scons [scons options] [build variables] [target(s)]
1583
1584Extra scons options:
1585%(options)s
1586
1587Global build variables:
1588%(global_vars)s
1589
1590%(local_vars)s
1591''' % help_texts)
1592