SConstruct revision 11988
1# -*- mode:python -*-
2
3# Copyright (c) 2013, 2015, 2016 ARM Limited
4# All rights reserved.
5#
6# The license below extends only to copyright in the software and shall
7# not be construed as granting a license to any other intellectual
8# property including but not limited to intellectual property relating
9# to a hardware implementation of the functionality of the software
10# licensed hereunder.  You may use the software subject to the license
11# terms below provided that you ensure that this notice is replicated
12# unmodified and in its entirety in all distributions of the software,
13# modified or unmodified, in source code or in binary form.
14#
15# Copyright (c) 2011 Advanced Micro Devices, Inc.
16# Copyright (c) 2009 The Hewlett-Packard Development Company
17# Copyright (c) 2004-2005 The Regents of The University of Michigan
18# All rights reserved.
19#
20# Redistribution and use in source and binary forms, with or without
21# modification, are permitted provided that the following conditions are
22# met: redistributions of source code must retain the above copyright
23# notice, this list of conditions and the following disclaimer;
24# redistributions in binary form must reproduce the above copyright
25# notice, this list of conditions and the following disclaimer in the
26# documentation and/or other materials provided with the distribution;
27# neither the name of the copyright holders nor the names of its
28# contributors may be used to endorse or promote products derived from
29# this software without specific prior written permission.
30#
31# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
32# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
33# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
34# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
35# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
37# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
38# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
39# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
40# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
41# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
42#
43# Authors: Steve Reinhardt
44#          Nathan Binkert
45
46###################################################
47#
48# SCons top-level build description (SConstruct) file.
49#
50# While in this directory ('gem5'), just type 'scons' to build the default
51# configuration (see below), or type 'scons build/<CONFIG>/<binary>'
52# to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for
53# the optimized full-system version).
54#
55# You can build gem5 in a different directory as long as there is a
56# 'build/<CONFIG>' somewhere along the target path.  The build system
57# expects that all configs under the same build directory are being
58# built for the same host system.
59#
60# Examples:
61#
62#   The following two commands are equivalent.  The '-u' option tells
63#   scons to search up the directory tree for this SConstruct file.
64#   % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug
65#   % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug
66#
67#   The following two commands are equivalent and demonstrate building
68#   in a directory outside of the source tree.  The '-C' option tells
69#   scons to chdir to the specified directory to find this SConstruct
70#   file.
71#   % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug
72#   % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug
73#
74# You can use 'scons -H' to print scons options.  If you're in this
75# 'gem5' directory (or use -u or -C to tell scons where to find this
76# file), you can use 'scons -h' to print all the gem5-specific build
77# options as well.
78#
79###################################################
80
81# Check for recent-enough Python and SCons versions.
82try:
83    # Really old versions of scons only take two options for the
84    # function, so check once without the revision and once with the
85    # revision, the first instance will fail for stuff other than
86    # 0.98, and the second will fail for 0.98.0
87    EnsureSConsVersion(0, 98)
88    EnsureSConsVersion(0, 98, 1)
89except SystemExit, e:
90    print """
91For more details, see:
92    http://gem5.org/Dependencies
93"""
94    raise
95
96# We ensure the python version early because because python-config
97# requires python 2.5
98try:
99    EnsurePythonVersion(2, 5)
100except SystemExit, e:
101    print """
102You can use a non-default installation of the Python interpreter by
103rearranging your PATH so that scons finds the non-default 'python' and
104'python-config' first.
105
106For more details, see:
107    http://gem5.org/wiki/index.php/Using_a_non-default_Python_installation
108"""
109    raise
110
111# Global Python includes
112import itertools
113import os
114import re
115import shutil
116import subprocess
117import sys
118
119from os import mkdir, environ
120from os.path import abspath, basename, dirname, expanduser, normpath
121from os.path import exists,  isdir, isfile
122from os.path import join as joinpath, split as splitpath
123
124# SCons includes
125import SCons
126import SCons.Node
127
128extra_python_paths = [
129    Dir('src/python').srcnode().abspath, # gem5 includes
130    Dir('ext/ply').srcnode().abspath, # ply is used by several files
131    ]
132
133sys.path[1:1] = extra_python_paths
134
135from m5.util import compareVersions, readCommand
136from m5.util.terminal import get_termcap
137
138help_texts = {
139    "options" : "",
140    "global_vars" : "",
141    "local_vars" : ""
142}
143
144Export("help_texts")
145
146
147# There's a bug in scons in that (1) by default, the help texts from
148# AddOption() are supposed to be displayed when you type 'scons -h'
149# and (2) you can override the help displayed by 'scons -h' using the
150# Help() function, but these two features are incompatible: once
151# you've overridden the help text using Help(), there's no way to get
152# at the help texts from AddOptions.  See:
153#     http://scons.tigris.org/issues/show_bug.cgi?id=2356
154#     http://scons.tigris.org/issues/show_bug.cgi?id=2611
155# This hack lets us extract the help text from AddOptions and
156# re-inject it via Help().  Ideally someday this bug will be fixed and
157# we can just use AddOption directly.
158def AddLocalOption(*args, **kwargs):
159    col_width = 30
160
161    help = "  " + ", ".join(args)
162    if "help" in kwargs:
163        length = len(help)
164        if length >= col_width:
165            help += "\n" + " " * col_width
166        else:
167            help += " " * (col_width - length)
168        help += kwargs["help"]
169    help_texts["options"] += help + "\n"
170
171    AddOption(*args, **kwargs)
172
173AddLocalOption('--colors', dest='use_colors', action='store_true',
174               help="Add color to abbreviated scons output")
175AddLocalOption('--no-colors', dest='use_colors', action='store_false',
176               help="Don't add color to abbreviated scons output")
177AddLocalOption('--with-cxx-config', dest='with_cxx_config',
178               action='store_true',
179               help="Build with support for C++-based configuration")
180AddLocalOption('--default', dest='default', type='string', action='store',
181               help='Override which build_opts file to use for defaults')
182AddLocalOption('--ignore-style', dest='ignore_style', action='store_true',
183               help='Disable style checking hooks')
184AddLocalOption('--no-lto', dest='no_lto', action='store_true',
185               help='Disable Link-Time Optimization for fast')
186AddLocalOption('--update-ref', dest='update_ref', action='store_true',
187               help='Update test reference outputs')
188AddLocalOption('--verbose', dest='verbose', action='store_true',
189               help='Print full tool command lines')
190AddLocalOption('--without-python', dest='without_python',
191               action='store_true',
192               help='Build without Python configuration support')
193AddLocalOption('--without-tcmalloc', dest='without_tcmalloc',
194               action='store_true',
195               help='Disable linking against tcmalloc')
196AddLocalOption('--with-ubsan', dest='with_ubsan', action='store_true',
197               help='Build with Undefined Behavior Sanitizer if available')
198AddLocalOption('--with-asan', dest='with_asan', action='store_true',
199               help='Build with Address Sanitizer if available')
200
201termcap = get_termcap(GetOption('use_colors'))
202
203########################################################################
204#
205# Set up the main build environment.
206#
207########################################################################
208
209# export TERM so that clang reports errors in color
210use_vars = set([ 'AS', 'AR', 'CC', 'CXX', 'HOME', 'LD_LIBRARY_PATH',
211                 'LIBRARY_PATH', 'PATH', 'PKG_CONFIG_PATH', 'PROTOC',
212                 'PYTHONPATH', 'RANLIB', 'SWIG', 'TERM' ])
213
214use_prefixes = [
215    "ASAN_",           # address sanitizer symbolizer path and settings
216    "CCACHE_",         # ccache (caching compiler wrapper) configuration
217    "CCC_",            # clang static analyzer configuration
218    "DISTCC_",         # distcc (distributed compiler wrapper) configuration
219    "INCLUDE_SERVER_", # distcc pump server settings
220    "M5",              # M5 configuration (e.g., path to kernels)
221    ]
222
223use_env = {}
224for key,val in sorted(os.environ.iteritems()):
225    if key in use_vars or \
226            any([key.startswith(prefix) for prefix in use_prefixes]):
227        use_env[key] = val
228
229# Tell scons to avoid implicit command dependencies to avoid issues
230# with the param wrappes being compiled twice (see
231# http://scons.tigris.org/issues/show_bug.cgi?id=2811)
232main = Environment(ENV=use_env, IMPLICIT_COMMAND_DEPENDENCIES=0)
233main.Decider('MD5-timestamp')
234main.root = Dir(".")         # The current directory (where this file lives).
235main.srcdir = Dir("src")     # The source directory
236
237main_dict_keys = main.Dictionary().keys()
238
239# Check that we have a C/C++ compiler
240if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys):
241    print "No C++ compiler installed (package g++ on Ubuntu and RedHat)"
242    Exit(1)
243
244# Check that swig is present
245if not 'SWIG' in main_dict_keys:
246    print "swig is not installed (package swig on Ubuntu and RedHat)"
247    Exit(1)
248
249# add useful python code PYTHONPATH so it can be used by subprocesses
250# as well
251main.AppendENVPath('PYTHONPATH', extra_python_paths)
252
253########################################################################
254#
255# Mercurial Stuff.
256#
257# If the gem5 directory is a mercurial repository, we should do some
258# extra things.
259#
260########################################################################
261
262hgdir = main.root.Dir(".hg")
263
264
265style_message = """
266You're missing the gem5 style hook, which automatically checks your code
267against the gem5 style rules on %s.
268This script will now install the hook in your %s.
269Press enter to continue, or ctrl-c to abort: """
270
271mercurial_style_message = """
272You're missing the gem5 style hook, which automatically checks your code
273against the gem5 style rules on hg commit and qrefresh commands.
274This script will now install the hook in your .hg/hgrc file.
275Press enter to continue, or ctrl-c to abort: """
276
277git_style_message = """
278You're missing the gem5 style or commit message hook. These hooks help
279to ensure that your code follows gem5's style rules on git commit.
280This script will now install the hook in your .git/hooks/ directory.
281Press enter to continue, or ctrl-c to abort: """
282
283mercurial_style_upgrade_message = """
284Your Mercurial style hooks are not up-to-date. This script will now
285try to automatically update them. A backup of your hgrc will be saved
286in .hg/hgrc.old.
287Press enter to continue, or ctrl-c to abort: """
288
289mercurial_style_hook = """
290# The following lines were automatically added by gem5/SConstruct
291# to provide the gem5 style-checking hooks
292[extensions]
293hgstyle = %s/util/hgstyle.py
294
295[hooks]
296pretxncommit.style = python:hgstyle.check_style
297pre-qrefresh.style = python:hgstyle.check_style
298# End of SConstruct additions
299
300""" % (main.root.abspath)
301
302mercurial_lib_not_found = """
303Mercurial libraries cannot be found, ignoring style hook.  If
304you are a gem5 developer, please fix this and run the style
305hook. It is important.
306"""
307
308# Check for style hook and prompt for installation if it's not there.
309# Skip this if --ignore-style was specified, there's no interactive
310# terminal to prompt, or no recognized revision control system can be
311# found.
312ignore_style = GetOption('ignore_style') or not sys.stdin.isatty()
313
314# Try wire up Mercurial to the style hooks
315if not ignore_style and hgdir.exists():
316    style_hook = True
317    style_hooks = tuple()
318    hgrc = hgdir.File('hgrc')
319    hgrc_old = hgdir.File('hgrc.old')
320    try:
321        from mercurial import ui
322        ui = ui.ui()
323        ui.readconfig(hgrc.abspath)
324        style_hooks = (ui.config('hooks', 'pretxncommit.style', None),
325                       ui.config('hooks', 'pre-qrefresh.style', None))
326        style_hook = all(style_hooks)
327        style_extension = ui.config('extensions', 'style', None)
328    except ImportError:
329        print mercurial_lib_not_found
330
331    if "python:style.check_style" in style_hooks:
332        # Try to upgrade the style hooks
333        print mercurial_style_upgrade_message
334        # continue unless user does ctrl-c/ctrl-d etc.
335        try:
336            raw_input()
337        except:
338            print "Input exception, exiting scons.\n"
339            sys.exit(1)
340        shutil.copyfile(hgrc.abspath, hgrc_old.abspath)
341        re_style_hook = re.compile(r"^([^=#]+)\.style\s*=\s*([^#\s]+).*")
342        re_style_extension = re.compile("style\s*=\s*([^#\s]+).*")
343        old, new = open(hgrc_old.abspath, 'r'), open(hgrc.abspath, 'w')
344        for l in old:
345            m_hook = re_style_hook.match(l)
346            m_ext = re_style_extension.match(l)
347            if m_hook:
348                hook, check = m_hook.groups()
349                if check != "python:style.check_style":
350                    print "Warning: %s.style is using a non-default " \
351                        "checker: %s" % (hook, check)
352                if hook not in ("pretxncommit", "pre-qrefresh"):
353                    print "Warning: Updating unknown style hook: %s" % hook
354
355                l = "%s.style = python:hgstyle.check_style\n" % hook
356            elif m_ext and m_ext.group(1) == style_extension:
357                l = "hgstyle = %s/util/hgstyle.py\n" % main.root.abspath
358
359            new.write(l)
360    elif not style_hook:
361        print mercurial_style_message,
362        # continue unless user does ctrl-c/ctrl-d etc.
363        try:
364            raw_input()
365        except:
366            print "Input exception, exiting scons.\n"
367            sys.exit(1)
368        hgrc_path = '%s/.hg/hgrc' % main.root.abspath
369        print "Adding style hook to", hgrc_path, "\n"
370        try:
371            with open(hgrc_path, 'a') as f:
372                f.write(mercurial_style_hook)
373        except:
374            print "Error updating", hgrc_path
375            sys.exit(1)
376
377def install_git_style_hooks():
378    try:
379        gitdir = Dir(readCommand(
380            ["git", "rev-parse", "--git-dir"]).strip("\n"))
381    except Exception, e:
382        print "Warning: Failed to find git repo directory: %s" % e
383        return
384
385    git_hooks = gitdir.Dir("hooks")
386    def hook_exists(hook_name):
387        hook = git_hooks.File(hook_name)
388        return hook.exists()
389
390    def hook_install(hook_name, script):
391        hook = git_hooks.File(hook_name)
392        if hook.exists():
393            print "Warning: Can't install %s, hook already exists." % hook_name
394            return
395
396        if hook.islink():
397            print "Warning: Removing broken symlink for hook %s." % hook_name
398            os.unlink(hook.get_abspath())
399
400        if not git_hooks.exists():
401            mkdir(git_hooks.get_abspath())
402            git_hooks.clear()
403
404        abs_symlink_hooks = git_hooks.islink() and \
405            os.path.isabs(os.readlink(git_hooks.get_abspath()))
406
407        # Use a relative symlink if the hooks live in the source directory,
408        # and the hooks directory is not a symlink to an absolute path.
409        if hook.is_under(main.root) and not abs_symlink_hooks:
410            script_path = os.path.relpath(
411                os.path.realpath(script.get_abspath()),
412                os.path.realpath(hook.Dir(".").get_abspath()))
413        else:
414            script_path = script.get_abspath()
415
416        try:
417            os.symlink(script_path, hook.get_abspath())
418        except:
419            print "Error updating git %s hook" % hook_name
420            raise
421
422    if hook_exists("pre-commit") and hook_exists("commit-msg"):
423        return
424
425    print git_style_message,
426    try:
427        raw_input()
428    except:
429        print "Input exception, exiting scons.\n"
430        sys.exit(1)
431
432    git_style_script = File("util/git-pre-commit.py")
433    git_msg_script = File("ext/git-commit-msg")
434
435    hook_install("pre-commit", git_style_script)
436    hook_install("commit-msg", git_msg_script)
437
438# Try to wire up git to the style hooks
439if not ignore_style and main.root.Entry(".git").exists():
440    install_git_style_hooks()
441
442###################################################
443#
444# Figure out which configurations to set up based on the path(s) of
445# the target(s).
446#
447###################################################
448
449# Find default configuration & binary.
450Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug'))
451
452# helper function: find last occurrence of element in list
453def rfind(l, elt, offs = -1):
454    for i in range(len(l)+offs, 0, -1):
455        if l[i] == elt:
456            return i
457    raise ValueError, "element not found"
458
459# Take a list of paths (or SCons Nodes) and return a list with all
460# paths made absolute and ~-expanded.  Paths will be interpreted
461# relative to the launch directory unless a different root is provided
462def makePathListAbsolute(path_list, root=GetLaunchDir()):
463    return [abspath(joinpath(root, expanduser(str(p))))
464            for p in path_list]
465
466# Each target must have 'build' in the interior of the path; the
467# directory below this will determine the build parameters.  For
468# example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we
469# recognize that ALPHA_SE specifies the configuration because it
470# follow 'build' in the build path.
471
472# The funky assignment to "[:]" is needed to replace the list contents
473# in place rather than reassign the symbol to a new list, which
474# doesn't work (obviously!).
475BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS)
476
477# Generate a list of the unique build roots and configs that the
478# collected targets reference.
479variant_paths = []
480build_root = None
481for t in BUILD_TARGETS:
482    path_dirs = t.split('/')
483    try:
484        build_top = rfind(path_dirs, 'build', -2)
485    except:
486        print "Error: no non-leaf 'build' dir found on target path", t
487        Exit(1)
488    this_build_root = joinpath('/',*path_dirs[:build_top+1])
489    if not build_root:
490        build_root = this_build_root
491    else:
492        if this_build_root != build_root:
493            print "Error: build targets not under same build root\n"\
494                  "  %s\n  %s" % (build_root, this_build_root)
495            Exit(1)
496    variant_path = joinpath('/',*path_dirs[:build_top+2])
497    if variant_path not in variant_paths:
498        variant_paths.append(variant_path)
499
500# Make sure build_root exists (might not if this is the first build there)
501if not isdir(build_root):
502    mkdir(build_root)
503main['BUILDROOT'] = build_root
504
505Export('main')
506
507main.SConsignFile(joinpath(build_root, "sconsign"))
508
509# Default duplicate option is to use hard links, but this messes up
510# when you use emacs to edit a file in the target dir, as emacs moves
511# file to file~ then copies to file, breaking the link.  Symbolic
512# (soft) links work better.
513main.SetOption('duplicate', 'soft-copy')
514
515#
516# Set up global sticky variables... these are common to an entire build
517# tree (not specific to a particular build like ALPHA_SE)
518#
519
520global_vars_file = joinpath(build_root, 'variables.global')
521
522global_vars = Variables(global_vars_file, args=ARGUMENTS)
523
524global_vars.AddVariables(
525    ('CC', 'C compiler', environ.get('CC', main['CC'])),
526    ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])),
527    ('SWIG', 'SWIG tool', environ.get('SWIG', main['SWIG'])),
528    ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')),
529    ('BATCH', 'Use batch pool for build and tests', False),
530    ('BATCH_CMD', 'Batch pool submission command name', 'qdo'),
531    ('M5_BUILD_CACHE', 'Cache built objects in this directory', False),
532    ('EXTRAS', 'Add extra directories to the compilation', '')
533    )
534
535# Update main environment with values from ARGUMENTS & global_vars_file
536global_vars.Update(main)
537help_texts["global_vars"] += global_vars.GenerateHelpText(main)
538
539# Save sticky variable settings back to current variables file
540global_vars.Save(global_vars_file, main)
541
542# Parse EXTRAS variable to build list of all directories where we're
543# look for sources etc.  This list is exported as extras_dir_list.
544base_dir = main.srcdir.abspath
545if main['EXTRAS']:
546    extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':'))
547else:
548    extras_dir_list = []
549
550Export('base_dir')
551Export('extras_dir_list')
552
553# the ext directory should be on the #includes path
554main.Append(CPPPATH=[Dir('ext')])
555
556def strip_build_path(path, env):
557    path = str(path)
558    variant_base = env['BUILDROOT'] + os.path.sep
559    if path.startswith(variant_base):
560        path = path[len(variant_base):]
561    elif path.startswith('build/'):
562        path = path[6:]
563    return path
564
565# Generate a string of the form:
566#   common/path/prefix/src1, src2 -> tgt1, tgt2
567# to print while building.
568class Transform(object):
569    # all specific color settings should be here and nowhere else
570    tool_color = termcap.Normal
571    pfx_color = termcap.Yellow
572    srcs_color = termcap.Yellow + termcap.Bold
573    arrow_color = termcap.Blue + termcap.Bold
574    tgts_color = termcap.Yellow + termcap.Bold
575
576    def __init__(self, tool, max_sources=99):
577        self.format = self.tool_color + (" [%8s] " % tool) \
578                      + self.pfx_color + "%s" \
579                      + self.srcs_color + "%s" \
580                      + self.arrow_color + " -> " \
581                      + self.tgts_color + "%s" \
582                      + termcap.Normal
583        self.max_sources = max_sources
584
585    def __call__(self, target, source, env, for_signature=None):
586        # truncate source list according to max_sources param
587        source = source[0:self.max_sources]
588        def strip(f):
589            return strip_build_path(str(f), env)
590        if len(source) > 0:
591            srcs = map(strip, source)
592        else:
593            srcs = ['']
594        tgts = map(strip, target)
595        # surprisingly, os.path.commonprefix is a dumb char-by-char string
596        # operation that has nothing to do with paths.
597        com_pfx = os.path.commonprefix(srcs + tgts)
598        com_pfx_len = len(com_pfx)
599        if com_pfx:
600            # do some cleanup and sanity checking on common prefix
601            if com_pfx[-1] == ".":
602                # prefix matches all but file extension: ok
603                # back up one to change 'foo.cc -> o' to 'foo.cc -> .o'
604                com_pfx = com_pfx[0:-1]
605            elif com_pfx[-1] == "/":
606                # common prefix is directory path: OK
607                pass
608            else:
609                src0_len = len(srcs[0])
610                tgt0_len = len(tgts[0])
611                if src0_len == com_pfx_len:
612                    # source is a substring of target, OK
613                    pass
614                elif tgt0_len == com_pfx_len:
615                    # target is a substring of source, need to back up to
616                    # avoid empty string on RHS of arrow
617                    sep_idx = com_pfx.rfind(".")
618                    if sep_idx != -1:
619                        com_pfx = com_pfx[0:sep_idx]
620                    else:
621                        com_pfx = ''
622                elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".":
623                    # still splitting at file extension: ok
624                    pass
625                else:
626                    # probably a fluke; ignore it
627                    com_pfx = ''
628        # recalculate length in case com_pfx was modified
629        com_pfx_len = len(com_pfx)
630        def fmt(files):
631            f = map(lambda s: s[com_pfx_len:], files)
632            return ', '.join(f)
633        return self.format % (com_pfx, fmt(srcs), fmt(tgts))
634
635Export('Transform')
636
637# enable the regression script to use the termcap
638main['TERMCAP'] = termcap
639
640if GetOption('verbose'):
641    def MakeAction(action, string, *args, **kwargs):
642        return Action(action, *args, **kwargs)
643else:
644    MakeAction = Action
645    main['CCCOMSTR']        = Transform("CC")
646    main['CXXCOMSTR']       = Transform("CXX")
647    main['ASCOMSTR']        = Transform("AS")
648    main['SWIGCOMSTR']      = Transform("SWIG")
649    main['ARCOMSTR']        = Transform("AR", 0)
650    main['LINKCOMSTR']      = Transform("LINK", 0)
651    main['SHLINKCOMSTR']    = Transform("SHLINK", 0)
652    main['RANLIBCOMSTR']    = Transform("RANLIB", 0)
653    main['M4COMSTR']        = Transform("M4")
654    main['SHCCCOMSTR']      = Transform("SHCC")
655    main['SHCXXCOMSTR']     = Transform("SHCXX")
656Export('MakeAction')
657
658# Initialize the Link-Time Optimization (LTO) flags
659main['LTO_CCFLAGS'] = []
660main['LTO_LDFLAGS'] = []
661
662# According to the readme, tcmalloc works best if the compiler doesn't
663# assume that we're using the builtin malloc and friends. These flags
664# are compiler-specific, so we need to set them after we detect which
665# compiler we're using.
666main['TCMALLOC_CCFLAGS'] = []
667
668CXX_version = readCommand([main['CXX'],'--version'], exception=False)
669CXX_V = readCommand([main['CXX'],'-V'], exception=False)
670
671main['GCC'] = CXX_version and CXX_version.find('g++') >= 0
672main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0
673if main['GCC'] + main['CLANG'] > 1:
674    print 'Error: How can we have two at the same time?'
675    Exit(1)
676
677# Set up default C++ compiler flags
678if main['GCC'] or main['CLANG']:
679    # As gcc and clang share many flags, do the common parts here
680    main.Append(CCFLAGS=['-pipe'])
681    main.Append(CCFLAGS=['-fno-strict-aliasing'])
682    # Enable -Wall and -Wextra and then disable the few warnings that
683    # we consistently violate
684    main.Append(CCFLAGS=['-Wall', '-Wundef', '-Wextra',
685                         '-Wno-sign-compare', '-Wno-unused-parameter'])
686    # We always compile using C++11
687    main.Append(CXXFLAGS=['-std=c++11'])
688    if sys.platform.startswith('freebsd'):
689        main.Append(CCFLAGS=['-I/usr/local/include'])
690        main.Append(CXXFLAGS=['-I/usr/local/include'])
691
692    main['FILTER_PSHLINKFLAGS'] = lambda x: str(x).replace(' -shared', '')
693    main['PSHLINKFLAGS'] = main.subst('${FILTER_PSHLINKFLAGS(SHLINKFLAGS)}')
694    main['PLINKFLAGS'] = main.subst('${LINKFLAGS}')
695    shared_partial_flags = ['-Wl,--relocatable', '-nostdlib']
696    main.Append(PSHLINKFLAGS=shared_partial_flags)
697    main.Append(PLINKFLAGS=shared_partial_flags)
698else:
699    print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
700    print "Don't know what compiler options to use for your compiler."
701    print termcap.Yellow + '       compiler:' + termcap.Normal, main['CXX']
702    print termcap.Yellow + '       version:' + termcap.Normal,
703    if not CXX_version:
704        print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
705               termcap.Normal
706    else:
707        print CXX_version.replace('\n', '<nl>')
708    print "       If you're trying to use a compiler other than GCC"
709    print "       or clang, there appears to be something wrong with your"
710    print "       environment."
711    print "       "
712    print "       If you are trying to use a compiler other than those listed"
713    print "       above you will need to ease fix SConstruct and "
714    print "       src/SConscript to support that compiler."
715    Exit(1)
716
717if main['GCC']:
718    # Check for a supported version of gcc. >= 4.8 is chosen for its
719    # level of c++11 support. See
720    # http://gcc.gnu.org/projects/cxx0x.html for details.
721    gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False)
722    if compareVersions(gcc_version, "4.8") < 0:
723        print 'Error: gcc version 4.8 or newer required.'
724        print '       Installed version:', gcc_version
725        Exit(1)
726
727    main['GCC_VERSION'] = gcc_version
728
729    # gcc from version 4.8 and above generates "rep; ret" instructions
730    # to avoid performance penalties on certain AMD chips. Older
731    # assemblers detect this as an error, "Error: expecting string
732    # instruction after `rep'"
733    as_version_raw = readCommand([main['AS'], '-v', '/dev/null',
734                                  '-o', '/dev/null'],
735                                 exception=False).split()
736
737    # version strings may contain extra distro-specific
738    # qualifiers, so play it safe and keep only what comes before
739    # the first hyphen
740    as_version = as_version_raw[-1].split('-')[0] if as_version_raw else None
741
742    if not as_version or compareVersions(as_version, "2.23") < 0:
743        print termcap.Yellow + termcap.Bold + \
744            'Warning: This combination of gcc and binutils have' + \
745            ' known incompatibilities.\n' + \
746            '         If you encounter build problems, please update ' + \
747            'binutils to 2.23.' + \
748            termcap.Normal
749
750    # Make sure we warn if the user has requested to compile with the
751    # Undefined Benahvior Sanitizer and this version of gcc does not
752    # support it.
753    if GetOption('with_ubsan') and \
754            compareVersions(gcc_version, '4.9') < 0:
755        print termcap.Yellow + termcap.Bold + \
756            'Warning: UBSan is only supported using gcc 4.9 and later.' + \
757            termcap.Normal
758
759    # Add the appropriate Link-Time Optimization (LTO) flags
760    # unless LTO is explicitly turned off. Note that these flags
761    # are only used by the fast target.
762    if not GetOption('no_lto'):
763        # Pass the LTO flag when compiling to produce GIMPLE
764        # output, we merely create the flags here and only append
765        # them later
766        main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
767
768        # Use the same amount of jobs for LTO as we are running
769        # scons with
770        main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
771
772    main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc',
773                                  '-fno-builtin-realloc', '-fno-builtin-free'])
774
775    # add option to check for undeclared overrides
776    if compareVersions(gcc_version, "5.0") > 0:
777        main.Append(CCFLAGS=['-Wno-error=suggest-override'])
778
779elif main['CLANG']:
780    # Check for a supported version of clang, >= 3.1 is needed to
781    # support similar features as gcc 4.8. See
782    # http://clang.llvm.org/cxx_status.html for details
783    clang_version_re = re.compile(".* version (\d+\.\d+)")
784    clang_version_match = clang_version_re.search(CXX_version)
785    if (clang_version_match):
786        clang_version = clang_version_match.groups()[0]
787        if compareVersions(clang_version, "3.1") < 0:
788            print 'Error: clang version 3.1 or newer required.'
789            print '       Installed version:', clang_version
790            Exit(1)
791    else:
792        print 'Error: Unable to determine clang version.'
793        Exit(1)
794
795    # clang has a few additional warnings that we disable, extraneous
796    # parantheses are allowed due to Ruby's printing of the AST,
797    # finally self assignments are allowed as the generated CPU code
798    # is relying on this
799    main.Append(CCFLAGS=['-Wno-parentheses',
800                         '-Wno-self-assign',
801                         # Some versions of libstdc++ (4.8?) seem to
802                         # use struct hash and class hash
803                         # interchangeably.
804                         '-Wno-mismatched-tags',
805                         ])
806
807    main.Append(TCMALLOC_CCFLAGS=['-fno-builtin'])
808
809    # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as
810    # opposed to libstdc++, as the later is dated.
811    if sys.platform == "darwin":
812        main.Append(CXXFLAGS=['-stdlib=libc++'])
813        main.Append(LIBS=['c++'])
814
815    # On FreeBSD we need libthr.
816    if sys.platform.startswith('freebsd'):
817        main.Append(LIBS=['thr'])
818
819else:
820    print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
821    print "Don't know what compiler options to use for your compiler."
822    print termcap.Yellow + '       compiler:' + termcap.Normal, main['CXX']
823    print termcap.Yellow + '       version:' + termcap.Normal,
824    if not CXX_version:
825        print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
826               termcap.Normal
827    else:
828        print CXX_version.replace('\n', '<nl>')
829    print "       If you're trying to use a compiler other than GCC"
830    print "       or clang, there appears to be something wrong with your"
831    print "       environment."
832    print "       "
833    print "       If you are trying to use a compiler other than those listed"
834    print "       above you will need to ease fix SConstruct and "
835    print "       src/SConscript to support that compiler."
836    Exit(1)
837
838# Set up common yacc/bison flags (needed for Ruby)
839main['YACCFLAGS'] = '-d'
840main['YACCHXXFILESUFFIX'] = '.hh'
841
842# Do this after we save setting back, or else we'll tack on an
843# extra 'qdo' every time we run scons.
844if main['BATCH']:
845    main['CC']     = main['BATCH_CMD'] + ' ' + main['CC']
846    main['CXX']    = main['BATCH_CMD'] + ' ' + main['CXX']
847    main['AS']     = main['BATCH_CMD'] + ' ' + main['AS']
848    main['AR']     = main['BATCH_CMD'] + ' ' + main['AR']
849    main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB']
850
851if sys.platform == 'cygwin':
852    # cygwin has some header file issues...
853    main.Append(CCFLAGS=["-Wno-uninitialized"])
854
855# Check for the protobuf compiler
856protoc_version = readCommand([main['PROTOC'], '--version'],
857                             exception='').split()
858
859# First two words should be "libprotoc x.y.z"
860if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc':
861    print termcap.Yellow + termcap.Bold + \
862        'Warning: Protocol buffer compiler (protoc) not found.\n' + \
863        '         Please install protobuf-compiler for tracing support.' + \
864        termcap.Normal
865    main['PROTOC'] = False
866else:
867    # Based on the availability of the compress stream wrappers,
868    # require 2.1.0
869    min_protoc_version = '2.1.0'
870    if compareVersions(protoc_version[1], min_protoc_version) < 0:
871        print termcap.Yellow + termcap.Bold + \
872            'Warning: protoc version', min_protoc_version, \
873            'or newer required.\n' + \
874            '         Installed version:', protoc_version[1], \
875            termcap.Normal
876        main['PROTOC'] = False
877    else:
878        # Attempt to determine the appropriate include path and
879        # library path using pkg-config, that means we also need to
880        # check for pkg-config. Note that it is possible to use
881        # protobuf without the involvement of pkg-config. Later on we
882        # check go a library config check and at that point the test
883        # will fail if libprotobuf cannot be found.
884        if readCommand(['pkg-config', '--version'], exception=''):
885            try:
886                # Attempt to establish what linking flags to add for protobuf
887                # using pkg-config
888                main.ParseConfig('pkg-config --cflags --libs-only-L protobuf')
889            except:
890                print termcap.Yellow + termcap.Bold + \
891                    'Warning: pkg-config could not get protobuf flags.' + \
892                    termcap.Normal
893
894# Check for SWIG
895if not main.has_key('SWIG'):
896    print 'Error: SWIG utility not found.'
897    print '       Please install (see http://www.swig.org) and retry.'
898    Exit(1)
899
900# Check for appropriate SWIG version
901swig_version = readCommand([main['SWIG'], '-version'], exception='').split()
902# First 3 words should be "SWIG Version x.y.z"
903if len(swig_version) < 3 or \
904        swig_version[0] != 'SWIG' or swig_version[1] != 'Version':
905    print 'Error determining SWIG version.'
906    Exit(1)
907
908min_swig_version = '2.0.4'
909if compareVersions(swig_version[2], min_swig_version) < 0:
910    print 'Error: SWIG version', min_swig_version, 'or newer required.'
911    print '       Installed version:', swig_version[2]
912    Exit(1)
913
914# Check for known incompatibilities. The standard library shipped with
915# gcc >= 4.9 does not play well with swig versions prior to 3.0
916if main['GCC'] and compareVersions(gcc_version, '4.9') >= 0 and \
917        compareVersions(swig_version[2], '3.0') < 0:
918    print termcap.Yellow + termcap.Bold + \
919        'Warning: This combination of gcc and swig have' + \
920        ' known incompatibilities.\n' + \
921        '         If you encounter build problems, please update ' + \
922        'swig to 3.0 or later.' + \
923        termcap.Normal
924
925# Set up SWIG flags & scanner
926swig_flags=Split('-c++ -python -modern -templatereduce $_CPPINCFLAGS')
927main.Append(SWIGFLAGS=swig_flags)
928
929# Check for 'timeout' from GNU coreutils. If present, regressions will
930# be run with a time limit. We require version 8.13 since we rely on
931# support for the '--foreground' option.
932if sys.platform.startswith('freebsd'):
933    timeout_lines = readCommand(['gtimeout', '--version'],
934                                exception='').splitlines()
935else:
936    timeout_lines = readCommand(['timeout', '--version'],
937                                exception='').splitlines()
938# Get the first line and tokenize it
939timeout_version = timeout_lines[0].split() if timeout_lines else []
940main['TIMEOUT'] =  timeout_version and \
941    compareVersions(timeout_version[-1], '8.13') >= 0
942
943# filter out all existing swig scanners, they mess up the dependency
944# stuff for some reason
945scanners = []
946for scanner in main['SCANNERS']:
947    skeys = scanner.skeys
948    if skeys == '.i':
949        continue
950
951    if isinstance(skeys, (list, tuple)) and '.i' in skeys:
952        continue
953
954    scanners.append(scanner)
955
956# add the new swig scanner that we like better
957from SCons.Scanner import ClassicCPP as CPPScanner
958swig_inc_re = '^[ \t]*[%,#][ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")'
959scanners.append(CPPScanner("SwigScan", [ ".i" ], "CPPPATH", swig_inc_re))
960
961# replace the scanners list that has what we want
962main['SCANNERS'] = scanners
963
964# Add a custom Check function to test for structure members.
965def CheckMember(context, include, decl, member, include_quotes="<>"):
966    context.Message("Checking for member %s in %s..." %
967                    (member, decl))
968    text = """
969#include %(header)s
970int main(){
971  %(decl)s test;
972  (void)test.%(member)s;
973  return 0;
974};
975""" % { "header" : include_quotes[0] + include + include_quotes[1],
976        "decl" : decl,
977        "member" : member,
978        }
979
980    ret = context.TryCompile(text, extension=".cc")
981    context.Result(ret)
982    return ret
983
984# Platform-specific configuration.  Note again that we assume that all
985# builds under a given build root run on the same host platform.
986conf = Configure(main,
987                 conf_dir = joinpath(build_root, '.scons_config'),
988                 log_file = joinpath(build_root, 'scons_config.log'),
989                 custom_tests = {
990        'CheckMember' : CheckMember,
991        })
992
993# Check if we should compile a 64 bit binary on Mac OS X/Darwin
994try:
995    import platform
996    uname = platform.uname()
997    if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0:
998        if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]):
999            main.Append(CCFLAGS=['-arch', 'x86_64'])
1000            main.Append(CFLAGS=['-arch', 'x86_64'])
1001            main.Append(LINKFLAGS=['-arch', 'x86_64'])
1002            main.Append(ASFLAGS=['-arch', 'x86_64'])
1003except:
1004    pass
1005
1006# Recent versions of scons substitute a "Null" object for Configure()
1007# when configuration isn't necessary, e.g., if the "--help" option is
1008# present.  Unfortuantely this Null object always returns false,
1009# breaking all our configuration checks.  We replace it with our own
1010# more optimistic null object that returns True instead.
1011if not conf:
1012    def NullCheck(*args, **kwargs):
1013        return True
1014
1015    class NullConf:
1016        def __init__(self, env):
1017            self.env = env
1018        def Finish(self):
1019            return self.env
1020        def __getattr__(self, mname):
1021            return NullCheck
1022
1023    conf = NullConf(main)
1024
1025# Cache build files in the supplied directory.
1026if main['M5_BUILD_CACHE']:
1027    print 'Using build cache located at', main['M5_BUILD_CACHE']
1028    CacheDir(main['M5_BUILD_CACHE'])
1029
1030main['USE_PYTHON'] = not GetOption('without_python')
1031if main['USE_PYTHON']:
1032    # Find Python include and library directories for embedding the
1033    # interpreter. We rely on python-config to resolve the appropriate
1034    # includes and linker flags. ParseConfig does not seem to understand
1035    # the more exotic linker flags such as -Xlinker and -export-dynamic so
1036    # we add them explicitly below. If you want to link in an alternate
1037    # version of python, see above for instructions on how to invoke
1038    # scons with the appropriate PATH set.
1039    #
1040    # First we check if python2-config exists, else we use python-config
1041    python_config = readCommand(['which', 'python2-config'],
1042                                exception='').strip()
1043    if not os.path.exists(python_config):
1044        python_config = readCommand(['which', 'python-config'],
1045                                    exception='').strip()
1046    py_includes = readCommand([python_config, '--includes'],
1047                              exception='').split()
1048    # Strip the -I from the include folders before adding them to the
1049    # CPPPATH
1050    main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes))
1051
1052    # Read the linker flags and split them into libraries and other link
1053    # flags. The libraries are added later through the call the CheckLib.
1054    py_ld_flags = readCommand([python_config, '--ldflags'],
1055        exception='').split()
1056    py_libs = []
1057    for lib in py_ld_flags:
1058         if not lib.startswith('-l'):
1059             main.Append(LINKFLAGS=[lib])
1060         else:
1061             lib = lib[2:]
1062             if lib not in py_libs:
1063                 py_libs.append(lib)
1064
1065    # verify that this stuff works
1066    if not conf.CheckHeader('Python.h', '<>'):
1067        print "Error: can't find Python.h header in", py_includes
1068        print "Install Python headers (package python-dev on Ubuntu and RedHat)"
1069        Exit(1)
1070
1071    for lib in py_libs:
1072        if not conf.CheckLib(lib):
1073            print "Error: can't find library %s required by python" % lib
1074            Exit(1)
1075
1076# On Solaris you need to use libsocket for socket ops
1077if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'):
1078   if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'):
1079       print "Can't find library with socket calls (e.g. accept())"
1080       Exit(1)
1081
1082# Check for zlib.  If the check passes, libz will be automatically
1083# added to the LIBS environment variable.
1084if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'):
1085    print 'Error: did not find needed zlib compression library '\
1086          'and/or zlib.h header file.'
1087    print '       Please install zlib and try again.'
1088    Exit(1)
1089
1090# If we have the protobuf compiler, also make sure we have the
1091# development libraries. If the check passes, libprotobuf will be
1092# automatically added to the LIBS environment variable. After
1093# this, we can use the HAVE_PROTOBUF flag to determine if we have
1094# got both protoc and libprotobuf available.
1095main['HAVE_PROTOBUF'] = main['PROTOC'] and \
1096    conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h',
1097                            'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;')
1098
1099# If we have the compiler but not the library, print another warning.
1100if main['PROTOC'] and not main['HAVE_PROTOBUF']:
1101    print termcap.Yellow + termcap.Bold + \
1102        'Warning: did not find protocol buffer library and/or headers.\n' + \
1103    '       Please install libprotobuf-dev for tracing support.' + \
1104    termcap.Normal
1105
1106# Check for librt.
1107have_posix_clock = \
1108    conf.CheckLibWithHeader(None, 'time.h', 'C',
1109                            'clock_nanosleep(0,0,NULL,NULL);') or \
1110    conf.CheckLibWithHeader('rt', 'time.h', 'C',
1111                            'clock_nanosleep(0,0,NULL,NULL);')
1112
1113have_posix_timers = \
1114    conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C',
1115                            'timer_create(CLOCK_MONOTONIC, NULL, NULL);')
1116
1117if not GetOption('without_tcmalloc'):
1118    if conf.CheckLib('tcmalloc'):
1119        main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1120    elif conf.CheckLib('tcmalloc_minimal'):
1121        main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1122    else:
1123        print termcap.Yellow + termcap.Bold + \
1124              "You can get a 12% performance improvement by "\
1125              "installing tcmalloc (libgoogle-perftools-dev package "\
1126              "on Ubuntu or RedHat)." + termcap.Normal
1127
1128
1129# Detect back trace implementations. The last implementation in the
1130# list will be used by default.
1131backtrace_impls = [ "none" ]
1132
1133if conf.CheckLibWithHeader(None, 'execinfo.h', 'C',
1134                           'backtrace_symbols_fd((void*)0, 0, 0);'):
1135    backtrace_impls.append("glibc")
1136elif conf.CheckLibWithHeader('execinfo', 'execinfo.h', 'C',
1137                           'backtrace_symbols_fd((void*)0, 0, 0);'):
1138    # NetBSD and FreeBSD need libexecinfo.
1139    backtrace_impls.append("glibc")
1140    main.Append(LIBS=['execinfo'])
1141
1142if backtrace_impls[-1] == "none":
1143    default_backtrace_impl = "none"
1144    print termcap.Yellow + termcap.Bold + \
1145        "No suitable back trace implementation found." + \
1146        termcap.Normal
1147
1148if not have_posix_clock:
1149    print "Can't find library for POSIX clocks."
1150
1151# Check for <fenv.h> (C99 FP environment control)
1152have_fenv = conf.CheckHeader('fenv.h', '<>')
1153if not have_fenv:
1154    print "Warning: Header file <fenv.h> not found."
1155    print "         This host has no IEEE FP rounding mode control."
1156
1157# Check if we should enable KVM-based hardware virtualization. The API
1158# we rely on exists since version 2.6.36 of the kernel, but somehow
1159# the KVM_API_VERSION does not reflect the change. We test for one of
1160# the types as a fall back.
1161have_kvm = conf.CheckHeader('linux/kvm.h', '<>')
1162if not have_kvm:
1163    print "Info: Compatible header file <linux/kvm.h> not found, " \
1164        "disabling KVM support."
1165
1166# x86 needs support for xsave. We test for the structure here since we
1167# won't be able to run new tests by the time we know which ISA we're
1168# targeting.
1169have_kvm_xsave = conf.CheckTypeSize('struct kvm_xsave',
1170                                    '#include <linux/kvm.h>') != 0
1171
1172# Check if the requested target ISA is compatible with the host
1173def is_isa_kvm_compatible(isa):
1174    try:
1175        import platform
1176        host_isa = platform.machine()
1177    except:
1178        print "Warning: Failed to determine host ISA."
1179        return False
1180
1181    if not have_posix_timers:
1182        print "Warning: Can not enable KVM, host seems to lack support " \
1183            "for POSIX timers"
1184        return False
1185
1186    if isa == "arm":
1187        return host_isa in ( "armv7l", "aarch64" )
1188    elif isa == "x86":
1189        if host_isa != "x86_64":
1190            return False
1191
1192        if not have_kvm_xsave:
1193            print "KVM on x86 requires xsave support in kernel headers."
1194            return False
1195
1196        return True
1197    else:
1198        return False
1199
1200
1201# Check if the exclude_host attribute is available. We want this to
1202# get accurate instruction counts in KVM.
1203main['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf.CheckMember(
1204    'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host')
1205
1206
1207######################################################################
1208#
1209# Finish the configuration
1210#
1211main = conf.Finish()
1212
1213######################################################################
1214#
1215# Collect all non-global variables
1216#
1217
1218# Define the universe of supported ISAs
1219all_isa_list = [ ]
1220all_gpu_isa_list = [ ]
1221Export('all_isa_list')
1222Export('all_gpu_isa_list')
1223
1224class CpuModel(object):
1225    '''The CpuModel class encapsulates everything the ISA parser needs to
1226    know about a particular CPU model.'''
1227
1228    # Dict of available CPU model objects.  Accessible as CpuModel.dict.
1229    dict = {}
1230
1231    # Constructor.  Automatically adds models to CpuModel.dict.
1232    def __init__(self, name, default=False):
1233        self.name = name           # name of model
1234
1235        # This cpu is enabled by default
1236        self.default = default
1237
1238        # Add self to dict
1239        if name in CpuModel.dict:
1240            raise AttributeError, "CpuModel '%s' already registered" % name
1241        CpuModel.dict[name] = self
1242
1243Export('CpuModel')
1244
1245# Sticky variables get saved in the variables file so they persist from
1246# one invocation to the next (unless overridden, in which case the new
1247# value becomes sticky).
1248sticky_vars = Variables(args=ARGUMENTS)
1249Export('sticky_vars')
1250
1251# Sticky variables that should be exported
1252export_vars = []
1253Export('export_vars')
1254
1255# For Ruby
1256all_protocols = []
1257Export('all_protocols')
1258protocol_dirs = []
1259Export('protocol_dirs')
1260slicc_includes = []
1261Export('slicc_includes')
1262
1263# Walk the tree and execute all SConsopts scripts that wil add to the
1264# above variables
1265if GetOption('verbose'):
1266    print "Reading SConsopts"
1267for bdir in [ base_dir ] + extras_dir_list:
1268    if not isdir(bdir):
1269        print "Error: directory '%s' does not exist" % bdir
1270        Exit(1)
1271    for root, dirs, files in os.walk(bdir):
1272        if 'SConsopts' in files:
1273            if GetOption('verbose'):
1274                print "Reading", joinpath(root, 'SConsopts')
1275            SConscript(joinpath(root, 'SConsopts'))
1276
1277all_isa_list.sort()
1278all_gpu_isa_list.sort()
1279
1280sticky_vars.AddVariables(
1281    EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list),
1282    EnumVariable('TARGET_GPU_ISA', 'Target GPU ISA', 'hsail', all_gpu_isa_list),
1283    ListVariable('CPU_MODELS', 'CPU models',
1284                 sorted(n for n,m in CpuModel.dict.iteritems() if m.default),
1285                 sorted(CpuModel.dict.keys())),
1286    BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger',
1287                 False),
1288    BoolVariable('SS_COMPATIBLE_FP',
1289                 'Make floating-point results compatible with SimpleScalar',
1290                 False),
1291    BoolVariable('USE_SSE2',
1292                 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts',
1293                 False),
1294    BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock),
1295    BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv),
1296    BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability', False),
1297    BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models', have_kvm),
1298    BoolVariable('BUILD_GPU', 'Build the compute-GPU model', False),
1299    EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None',
1300                  all_protocols),
1301    EnumVariable('BACKTRACE_IMPL', 'Post-mortem dump implementation',
1302                 backtrace_impls[-1], backtrace_impls)
1303    )
1304
1305# These variables get exported to #defines in config/*.hh (see src/SConscript).
1306export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'TARGET_GPU_ISA',
1307                'CP_ANNOTATE', 'USE_POSIX_CLOCK', 'USE_KVM', 'PROTOCOL',
1308                'HAVE_PROTOBUF', 'HAVE_PERF_ATTR_EXCLUDE_HOST']
1309
1310###################################################
1311#
1312# Define a SCons builder for configuration flag headers.
1313#
1314###################################################
1315
1316# This function generates a config header file that #defines the
1317# variable symbol to the current variable setting (0 or 1).  The source
1318# operands are the name of the variable and a Value node containing the
1319# value of the variable.
1320def build_config_file(target, source, env):
1321    (variable, value) = [s.get_contents() for s in source]
1322    f = file(str(target[0]), 'w')
1323    print >> f, '#define', variable, value
1324    f.close()
1325    return None
1326
1327# Combine the two functions into a scons Action object.
1328config_action = MakeAction(build_config_file, Transform("CONFIG H", 2))
1329
1330# The emitter munges the source & target node lists to reflect what
1331# we're really doing.
1332def config_emitter(target, source, env):
1333    # extract variable name from Builder arg
1334    variable = str(target[0])
1335    # True target is config header file
1336    target = joinpath('config', variable.lower() + '.hh')
1337    val = env[variable]
1338    if isinstance(val, bool):
1339        # Force value to 0/1
1340        val = int(val)
1341    elif isinstance(val, str):
1342        val = '"' + val + '"'
1343
1344    # Sources are variable name & value (packaged in SCons Value nodes)
1345    return ([target], [Value(variable), Value(val)])
1346
1347config_builder = Builder(emitter = config_emitter, action = config_action)
1348
1349main.Append(BUILDERS = { 'ConfigFile' : config_builder })
1350
1351###################################################
1352#
1353# Builders for static and shared partially linked object files.
1354#
1355###################################################
1356
1357partial_static_builder = Builder(action=SCons.Defaults.LinkAction,
1358                                 src_suffix='$OBJSUFFIX',
1359                                 src_builder=['StaticObject', 'Object'],
1360                                 LINKFLAGS='$PLINKFLAGS',
1361                                 LIBS='')
1362
1363def partial_shared_emitter(target, source, env):
1364    for tgt in target:
1365        tgt.attributes.shared = 1
1366    return (target, source)
1367partial_shared_builder = Builder(action=SCons.Defaults.ShLinkAction,
1368                                 emitter=partial_shared_emitter,
1369                                 src_suffix='$SHOBJSUFFIX',
1370                                 src_builder='SharedObject',
1371                                 SHLINKFLAGS='$PSHLINKFLAGS',
1372                                 LIBS='')
1373
1374main.Append(BUILDERS = { 'PartialShared' : partial_shared_builder,
1375                         'PartialStatic' : partial_static_builder })
1376
1377# builds in ext are shared across all configs in the build root.
1378ext_dir = abspath(joinpath(str(main.root), 'ext'))
1379for root, dirs, files in os.walk(ext_dir):
1380    if 'SConscript' in files:
1381        build_dir = os.path.relpath(root, ext_dir)
1382        main.SConscript(joinpath(root, 'SConscript'),
1383                        variant_dir=joinpath(build_root, build_dir))
1384
1385main.Prepend(CPPPATH=Dir('ext/pybind11/include/'))
1386
1387###################################################
1388#
1389# This function is used to set up a directory with switching headers
1390#
1391###################################################
1392
1393main['ALL_ISA_LIST'] = all_isa_list
1394main['ALL_GPU_ISA_LIST'] = all_gpu_isa_list
1395all_isa_deps = {}
1396def make_switching_dir(dname, switch_headers, env):
1397    # Generate the header.  target[0] is the full path of the output
1398    # header to generate.  'source' is a dummy variable, since we get the
1399    # list of ISAs from env['ALL_ISA_LIST'].
1400    def gen_switch_hdr(target, source, env):
1401        fname = str(target[0])
1402        isa = env['TARGET_ISA'].lower()
1403        try:
1404            f = open(fname, 'w')
1405            print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname))
1406            f.close()
1407        except IOError:
1408            print "Failed to create %s" % fname
1409            raise
1410
1411    # Build SCons Action object. 'varlist' specifies env vars that this
1412    # action depends on; when env['ALL_ISA_LIST'] changes these actions
1413    # should get re-executed.
1414    switch_hdr_action = MakeAction(gen_switch_hdr,
1415                          Transform("GENERATE"), varlist=['ALL_ISA_LIST'])
1416
1417    # Instantiate actions for each header
1418    for hdr in switch_headers:
1419        env.Command(hdr, [], switch_hdr_action)
1420
1421    isa_target = Dir('.').up().name.lower().replace('_', '-')
1422    env['PHONY_BASE'] = '#'+isa_target
1423    all_isa_deps[isa_target] = None
1424
1425Export('make_switching_dir')
1426
1427def make_gpu_switching_dir(dname, switch_headers, env):
1428    # Generate the header.  target[0] is the full path of the output
1429    # header to generate.  'source' is a dummy variable, since we get the
1430    # list of ISAs from env['ALL_ISA_LIST'].
1431    def gen_switch_hdr(target, source, env):
1432        fname = str(target[0])
1433
1434        isa = env['TARGET_GPU_ISA'].lower()
1435
1436        try:
1437            f = open(fname, 'w')
1438            print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname))
1439            f.close()
1440        except IOError:
1441            print "Failed to create %s" % fname
1442            raise
1443
1444    # Build SCons Action object. 'varlist' specifies env vars that this
1445    # action depends on; when env['ALL_ISA_LIST'] changes these actions
1446    # should get re-executed.
1447    switch_hdr_action = MakeAction(gen_switch_hdr,
1448                          Transform("GENERATE"), varlist=['ALL_ISA_GPU_LIST'])
1449
1450    # Instantiate actions for each header
1451    for hdr in switch_headers:
1452        env.Command(hdr, [], switch_hdr_action)
1453
1454Export('make_gpu_switching_dir')
1455
1456# all-isas -> all-deps -> all-environs -> all_targets
1457main.Alias('#all-isas', [])
1458main.Alias('#all-deps', '#all-isas')
1459
1460# Dummy target to ensure all environments are created before telling
1461# SCons what to actually make (the command line arguments).  We attach
1462# them to the dependence graph after the environments are complete.
1463ORIG_BUILD_TARGETS = list(BUILD_TARGETS) # force a copy; gets closure to work.
1464def environsComplete(target, source, env):
1465    for t in ORIG_BUILD_TARGETS:
1466        main.Depends('#all-targets', t)
1467
1468# Each build/* switching_dir attaches its *-environs target to #all-environs.
1469main.Append(BUILDERS = {'CompleteEnvirons' :
1470                        Builder(action=MakeAction(environsComplete, None))})
1471main.CompleteEnvirons('#all-environs', [])
1472
1473def doNothing(**ignored): pass
1474main.Append(BUILDERS = {'Dummy': Builder(action=MakeAction(doNothing, None))})
1475
1476# The final target to which all the original targets ultimately get attached.
1477main.Dummy('#all-targets', '#all-environs')
1478BUILD_TARGETS[:] = ['#all-targets']
1479
1480###################################################
1481#
1482# Define build environments for selected configurations.
1483#
1484###################################################
1485
1486for variant_path in variant_paths:
1487    if not GetOption('silent'):
1488        print "Building in", variant_path
1489
1490    # Make a copy of the build-root environment to use for this config.
1491    env = main.Clone()
1492    env['BUILDDIR'] = variant_path
1493
1494    # variant_dir is the tail component of build path, and is used to
1495    # determine the build parameters (e.g., 'ALPHA_SE')
1496    (build_root, variant_dir) = splitpath(variant_path)
1497
1498    # Set env variables according to the build directory config.
1499    sticky_vars.files = []
1500    # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in
1501    # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke
1502    # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings.
1503    current_vars_file = joinpath(build_root, 'variables', variant_dir)
1504    if isfile(current_vars_file):
1505        sticky_vars.files.append(current_vars_file)
1506        if not GetOption('silent'):
1507            print "Using saved variables file %s" % current_vars_file
1508    else:
1509        # Build dir-specific variables file doesn't exist.
1510
1511        # Make sure the directory is there so we can create it later
1512        opt_dir = dirname(current_vars_file)
1513        if not isdir(opt_dir):
1514            mkdir(opt_dir)
1515
1516        # Get default build variables from source tree.  Variables are
1517        # normally determined by name of $VARIANT_DIR, but can be
1518        # overridden by '--default=' arg on command line.
1519        default = GetOption('default')
1520        opts_dir = joinpath(main.root.abspath, 'build_opts')
1521        if default:
1522            default_vars_files = [joinpath(build_root, 'variables', default),
1523                                  joinpath(opts_dir, default)]
1524        else:
1525            default_vars_files = [joinpath(opts_dir, variant_dir)]
1526        existing_files = filter(isfile, default_vars_files)
1527        if existing_files:
1528            default_vars_file = existing_files[0]
1529            sticky_vars.files.append(default_vars_file)
1530            print "Variables file %s not found,\n  using defaults in %s" \
1531                  % (current_vars_file, default_vars_file)
1532        else:
1533            print "Error: cannot find variables file %s or " \
1534                  "default file(s) %s" \
1535                  % (current_vars_file, ' or '.join(default_vars_files))
1536            Exit(1)
1537
1538    # Apply current variable settings to env
1539    sticky_vars.Update(env)
1540
1541    help_texts["local_vars"] += \
1542        "Build variables for %s:\n" % variant_dir \
1543                 + sticky_vars.GenerateHelpText(env)
1544
1545    # Process variable settings.
1546
1547    if not have_fenv and env['USE_FENV']:
1548        print "Warning: <fenv.h> not available; " \
1549              "forcing USE_FENV to False in", variant_dir + "."
1550        env['USE_FENV'] = False
1551
1552    if not env['USE_FENV']:
1553        print "Warning: No IEEE FP rounding mode control in", variant_dir + "."
1554        print "         FP results may deviate slightly from other platforms."
1555
1556    if env['EFENCE']:
1557        env.Append(LIBS=['efence'])
1558
1559    if env['USE_KVM']:
1560        if not have_kvm:
1561            print "Warning: Can not enable KVM, host seems to lack KVM support"
1562            env['USE_KVM'] = False
1563        elif not is_isa_kvm_compatible(env['TARGET_ISA']):
1564            print "Info: KVM support disabled due to unsupported host and " \
1565                "target ISA combination"
1566            env['USE_KVM'] = False
1567
1568    if env['BUILD_GPU']:
1569        env.Append(CPPDEFINES=['BUILD_GPU'])
1570
1571    # Warn about missing optional functionality
1572    if env['USE_KVM']:
1573        if not main['HAVE_PERF_ATTR_EXCLUDE_HOST']:
1574            print "Warning: perf_event headers lack support for the " \
1575                "exclude_host attribute. KVM instruction counts will " \
1576                "be inaccurate."
1577
1578    # Save sticky variable settings back to current variables file
1579    sticky_vars.Save(current_vars_file, env)
1580
1581    if env['USE_SSE2']:
1582        env.Append(CCFLAGS=['-msse2'])
1583
1584    # The src/SConscript file sets up the build rules in 'env' according
1585    # to the configured variables.  It returns a list of environments,
1586    # one for each variant build (debug, opt, etc.)
1587    SConscript('src/SConscript', variant_dir = variant_path, exports = 'env')
1588
1589def pairwise(iterable):
1590    "s -> (s0,s1), (s1,s2), (s2, s3), ..."
1591    a, b = itertools.tee(iterable)
1592    b.next()
1593    return itertools.izip(a, b)
1594
1595# Create false dependencies so SCons will parse ISAs, establish
1596# dependencies, and setup the build Environments serially. Either
1597# SCons (likely) and/or our SConscripts (possibly) cannot cope with -j
1598# greater than 1. It appears to be standard race condition stuff; it
1599# doesn't always fail, but usually, and the behaviors are different.
1600# Every time I tried to remove this, builds would fail in some
1601# creative new way. So, don't do that. You'll want to, though, because
1602# tests/SConscript takes a long time to make its Environments.
1603for t1, t2 in pairwise(sorted(all_isa_deps.iterkeys())):
1604    main.Depends('#%s-deps'     % t2, '#%s-deps'     % t1)
1605    main.Depends('#%s-environs' % t2, '#%s-environs' % t1)
1606
1607# base help text
1608Help('''
1609Usage: scons [scons options] [build variables] [target(s)]
1610
1611Extra scons options:
1612%(options)s
1613
1614Global build variables:
1615%(global_vars)s
1616
1617%(local_vars)s
1618''' % help_texts)
1619