SConstruct revision 11476:8c67ac296e75
1# -*- mode:python -*-
2
3# Copyright (c) 2013, 2015, 2016 ARM Limited
4# All rights reserved.
5#
6# The license below extends only to copyright in the software and shall
7# not be construed as granting a license to any other intellectual
8# property including but not limited to intellectual property relating
9# to a hardware implementation of the functionality of the software
10# licensed hereunder.  You may use the software subject to the license
11# terms below provided that you ensure that this notice is replicated
12# unmodified and in its entirety in all distributions of the software,
13# modified or unmodified, in source code or in binary form.
14#
15# Copyright (c) 2011 Advanced Micro Devices, Inc.
16# Copyright (c) 2009 The Hewlett-Packard Development Company
17# Copyright (c) 2004-2005 The Regents of The University of Michigan
18# All rights reserved.
19#
20# Redistribution and use in source and binary forms, with or without
21# modification, are permitted provided that the following conditions are
22# met: redistributions of source code must retain the above copyright
23# notice, this list of conditions and the following disclaimer;
24# redistributions in binary form must reproduce the above copyright
25# notice, this list of conditions and the following disclaimer in the
26# documentation and/or other materials provided with the distribution;
27# neither the name of the copyright holders nor the names of its
28# contributors may be used to endorse or promote products derived from
29# this software without specific prior written permission.
30#
31# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
32# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
33# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
34# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
35# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
37# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
38# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
39# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
40# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
41# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
42#
43# Authors: Steve Reinhardt
44#          Nathan Binkert
45
46###################################################
47#
48# SCons top-level build description (SConstruct) file.
49#
50# While in this directory ('gem5'), just type 'scons' to build the default
51# configuration (see below), or type 'scons build/<CONFIG>/<binary>'
52# to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for
53# the optimized full-system version).
54#
55# You can build gem5 in a different directory as long as there is a
56# 'build/<CONFIG>' somewhere along the target path.  The build system
57# expects that all configs under the same build directory are being
58# built for the same host system.
59#
60# Examples:
61#
62#   The following two commands are equivalent.  The '-u' option tells
63#   scons to search up the directory tree for this SConstruct file.
64#   % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug
65#   % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug
66#
67#   The following two commands are equivalent and demonstrate building
68#   in a directory outside of the source tree.  The '-C' option tells
69#   scons to chdir to the specified directory to find this SConstruct
70#   file.
71#   % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug
72#   % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug
73#
74# You can use 'scons -H' to print scons options.  If you're in this
75# 'gem5' directory (or use -u or -C to tell scons where to find this
76# file), you can use 'scons -h' to print all the gem5-specific build
77# options as well.
78#
79###################################################
80
81# Check for recent-enough Python and SCons versions.
82try:
83    # Really old versions of scons only take two options for the
84    # function, so check once without the revision and once with the
85    # revision, the first instance will fail for stuff other than
86    # 0.98, and the second will fail for 0.98.0
87    EnsureSConsVersion(0, 98)
88    EnsureSConsVersion(0, 98, 1)
89except SystemExit, e:
90    print """
91For more details, see:
92    http://gem5.org/Dependencies
93"""
94    raise
95
96# We ensure the python version early because because python-config
97# requires python 2.5
98try:
99    EnsurePythonVersion(2, 5)
100except SystemExit, e:
101    print """
102You can use a non-default installation of the Python interpreter by
103rearranging your PATH so that scons finds the non-default 'python' and
104'python-config' first.
105
106For more details, see:
107    http://gem5.org/wiki/index.php/Using_a_non-default_Python_installation
108"""
109    raise
110
111# Global Python includes
112import itertools
113import os
114import re
115import shutil
116import subprocess
117import sys
118
119from os import mkdir, environ
120from os.path import abspath, basename, dirname, expanduser, normpath
121from os.path import exists,  isdir, isfile
122from os.path import join as joinpath, split as splitpath
123
124# SCons includes
125import SCons
126import SCons.Node
127
128extra_python_paths = [
129    Dir('src/python').srcnode().abspath, # gem5 includes
130    Dir('ext/ply').srcnode().abspath, # ply is used by several files
131    ]
132
133sys.path[1:1] = extra_python_paths
134
135from m5.util import compareVersions, readCommand
136from m5.util.terminal import get_termcap
137
138help_texts = {
139    "options" : "",
140    "global_vars" : "",
141    "local_vars" : ""
142}
143
144Export("help_texts")
145
146
147# There's a bug in scons in that (1) by default, the help texts from
148# AddOption() are supposed to be displayed when you type 'scons -h'
149# and (2) you can override the help displayed by 'scons -h' using the
150# Help() function, but these two features are incompatible: once
151# you've overridden the help text using Help(), there's no way to get
152# at the help texts from AddOptions.  See:
153#     http://scons.tigris.org/issues/show_bug.cgi?id=2356
154#     http://scons.tigris.org/issues/show_bug.cgi?id=2611
155# This hack lets us extract the help text from AddOptions and
156# re-inject it via Help().  Ideally someday this bug will be fixed and
157# we can just use AddOption directly.
158def AddLocalOption(*args, **kwargs):
159    col_width = 30
160
161    help = "  " + ", ".join(args)
162    if "help" in kwargs:
163        length = len(help)
164        if length >= col_width:
165            help += "\n" + " " * col_width
166        else:
167            help += " " * (col_width - length)
168        help += kwargs["help"]
169    help_texts["options"] += help + "\n"
170
171    AddOption(*args, **kwargs)
172
173AddLocalOption('--colors', dest='use_colors', action='store_true',
174               help="Add color to abbreviated scons output")
175AddLocalOption('--no-colors', dest='use_colors', action='store_false',
176               help="Don't add color to abbreviated scons output")
177AddLocalOption('--with-cxx-config', dest='with_cxx_config',
178               action='store_true',
179               help="Build with support for C++-based configuration")
180AddLocalOption('--default', dest='default', type='string', action='store',
181               help='Override which build_opts file to use for defaults')
182AddLocalOption('--ignore-style', dest='ignore_style', action='store_true',
183               help='Disable style checking hooks')
184AddLocalOption('--no-lto', dest='no_lto', action='store_true',
185               help='Disable Link-Time Optimization for fast')
186AddLocalOption('--update-ref', dest='update_ref', action='store_true',
187               help='Update test reference outputs')
188AddLocalOption('--verbose', dest='verbose', action='store_true',
189               help='Print full tool command lines')
190AddLocalOption('--without-python', dest='without_python',
191               action='store_true',
192               help='Build without Python configuration support')
193AddLocalOption('--without-tcmalloc', dest='without_tcmalloc',
194               action='store_true',
195               help='Disable linking against tcmalloc')
196AddLocalOption('--with-ubsan', dest='with_ubsan', action='store_true',
197               help='Build with Undefined Behavior Sanitizer if available')
198AddLocalOption('--with-asan', dest='with_asan', action='store_true',
199               help='Build with Address Sanitizer if available')
200
201termcap = get_termcap(GetOption('use_colors'))
202
203########################################################################
204#
205# Set up the main build environment.
206#
207########################################################################
208
209# export TERM so that clang reports errors in color
210use_vars = set([ 'AS', 'AR', 'CC', 'CXX', 'HOME', 'LD_LIBRARY_PATH',
211                 'LIBRARY_PATH', 'PATH', 'PKG_CONFIG_PATH', 'PROTOC',
212                 'PYTHONPATH', 'RANLIB', 'SWIG', 'TERM' ])
213
214use_prefixes = [
215    "ASAN_",           # address sanitizer symbolizer path and settings
216    "CCACHE_",         # ccache (caching compiler wrapper) configuration
217    "CCC_",            # clang static analyzer configuration
218    "DISTCC_",         # distcc (distributed compiler wrapper) configuration
219    "INCLUDE_SERVER_", # distcc pump server settings
220    "M5",              # M5 configuration (e.g., path to kernels)
221    ]
222
223use_env = {}
224for key,val in sorted(os.environ.iteritems()):
225    if key in use_vars or \
226            any([key.startswith(prefix) for prefix in use_prefixes]):
227        use_env[key] = val
228
229# Tell scons to avoid implicit command dependencies to avoid issues
230# with the param wrappes being compiled twice (see
231# http://scons.tigris.org/issues/show_bug.cgi?id=2811)
232main = Environment(ENV=use_env, IMPLICIT_COMMAND_DEPENDENCIES=0)
233main.Decider('MD5-timestamp')
234main.root = Dir(".")         # The current directory (where this file lives).
235main.srcdir = Dir("src")     # The source directory
236
237main_dict_keys = main.Dictionary().keys()
238
239# Check that we have a C/C++ compiler
240if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys):
241    print "No C++ compiler installed (package g++ on Ubuntu and RedHat)"
242    Exit(1)
243
244# Check that swig is present
245if not 'SWIG' in main_dict_keys:
246    print "swig is not installed (package swig on Ubuntu and RedHat)"
247    Exit(1)
248
249# add useful python code PYTHONPATH so it can be used by subprocesses
250# as well
251main.AppendENVPath('PYTHONPATH', extra_python_paths)
252
253########################################################################
254#
255# Mercurial Stuff.
256#
257# If the gem5 directory is a mercurial repository, we should do some
258# extra things.
259#
260########################################################################
261
262hgdir = main.root.Dir(".hg")
263
264
265style_message = """
266You're missing the gem5 style hook, which automatically checks your code
267against the gem5 style rules on %s.
268This script will now install the hook in your %s.
269Press enter to continue, or ctrl-c to abort: """
270
271mercurial_style_message = style_message % ("hg commit and qrefresh commands",
272                                           ".hg/hgrc file")
273git_style_message = style_message % ("'git commit'",
274                                     ".git/hooks/ directory")
275
276mercurial_style_upgrade_message = """
277Your Mercurial style hooks are not up-to-date. This script will now
278try to automatically update them. A backup of your hgrc will be saved
279in .hg/hgrc.old.
280Press enter to continue, or ctrl-c to abort: """
281
282mercurial_style_hook = """
283# The following lines were automatically added by gem5/SConstruct
284# to provide the gem5 style-checking hooks
285[extensions]
286hgstyle = %s/util/hgstyle.py
287
288[hooks]
289pretxncommit.style = python:hgstyle.check_style
290pre-qrefresh.style = python:hgstyle.check_style
291# End of SConstruct additions
292
293""" % (main.root.abspath)
294
295mercurial_lib_not_found = """
296Mercurial libraries cannot be found, ignoring style hook.  If
297you are a gem5 developer, please fix this and run the style
298hook. It is important.
299"""
300
301# Check for style hook and prompt for installation if it's not there.
302# Skip this if --ignore-style was specified, there's no interactive
303# terminal to prompt, or no recognized revision control system can be
304# found.
305ignore_style = GetOption('ignore_style') or not sys.stdin.isatty()
306
307# Try wire up Mercurial to the style hooks
308if not ignore_style and hgdir.exists():
309    style_hook = True
310    style_hooks = tuple()
311    hgrc = hgdir.File('hgrc')
312    hgrc_old = hgdir.File('hgrc.old')
313    try:
314        from mercurial import ui
315        ui = ui.ui()
316        ui.readconfig(hgrc.abspath)
317        style_hooks = (ui.config('hooks', 'pretxncommit.style', None),
318                       ui.config('hooks', 'pre-qrefresh.style', None))
319        style_hook = all(style_hooks)
320        style_extension = ui.config('extensions', 'style', None)
321    except ImportError:
322        print mercurial_lib_not_found
323
324    if "python:style.check_style" in style_hooks:
325        # Try to upgrade the style hooks
326        print mercurial_style_upgrade_message
327        # continue unless user does ctrl-c/ctrl-d etc.
328        try:
329            raw_input()
330        except:
331            print "Input exception, exiting scons.\n"
332            sys.exit(1)
333        shutil.copyfile(hgrc.abspath, hgrc_old.abspath)
334        re_style_hook = re.compile(r"^([^=#]+)\.style\s*=\s*([^#\s]+).*")
335        re_style_extension = re.compile("style\s*=\s*([^#\s]+).*")
336        old, new = open(hgrc_old.abspath, 'r'), open(hgrc.abspath, 'w')
337        for l in old:
338            m_hook = re_style_hook.match(l)
339            m_ext = re_style_extension.match(l)
340            if m_hook:
341                hook, check = m_hook.groups()
342                if check != "python:style.check_style":
343                    print "Warning: %s.style is using a non-default " \
344                        "checker: %s" % (hook, check)
345                if hook not in ("pretxncommit", "pre-qrefresh"):
346                    print "Warning: Updating unknown style hook: %s" % hook
347
348                l = "%s.style = python:hgstyle.check_style\n" % hook
349            elif m_ext and m_ext.group(1) == style_extension:
350                l = "hgstyle = %s/util/hgstyle.py\n" % main.root.abspath
351
352            new.write(l)
353    elif not style_hook:
354        print mercurial_style_message,
355        # continue unless user does ctrl-c/ctrl-d etc.
356        try:
357            raw_input()
358        except:
359            print "Input exception, exiting scons.\n"
360            sys.exit(1)
361        hgrc_path = '%s/.hg/hgrc' % main.root.abspath
362        print "Adding style hook to", hgrc_path, "\n"
363        try:
364            with open(hgrc_path, 'a') as f:
365                f.write(mercurial_style_hook)
366        except:
367            print "Error updating", hgrc_path
368            sys.exit(1)
369
370def install_git_style_hooks():
371    try:
372        gitdir = Dir(readCommand(
373            ["git", "rev-parse", "--git-dir"]).strip("\n"))
374    except Exception, e:
375        print "Warning: Failed to find git repo directory: %s" % e
376        return
377
378    git_hooks = gitdir.Dir("hooks")
379    git_pre_commit_hook = git_hooks.File("pre-commit")
380    git_style_script = File("util/git-pre-commit.py")
381
382    if git_pre_commit_hook.exists():
383        return
384
385    print git_style_message,
386    try:
387        raw_input()
388    except:
389        print "Input exception, exiting scons.\n"
390        sys.exit(1)
391
392    if not git_hooks.exists():
393        mkdir(git_hooks.get_abspath())
394
395    # Use a relative symlink if the hooks live in the source directory
396    if git_pre_commit_hook.is_under(main.root):
397        script_path = os.path.relpath(
398            git_style_script.get_abspath(),
399            git_pre_commit_hook.Dir(".").get_abspath())
400    else:
401        script_path = git_style_script.get_abspath()
402
403    try:
404        os.symlink(script_path, git_pre_commit_hook.get_abspath())
405    except:
406        print "Error updating git pre-commit hook"
407        raise
408
409# Try to wire up git to the style hooks
410if not ignore_style and main.root.Entry(".git").exists():
411    install_git_style_hooks()
412
413###################################################
414#
415# Figure out which configurations to set up based on the path(s) of
416# the target(s).
417#
418###################################################
419
420# Find default configuration & binary.
421Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug'))
422
423# helper function: find last occurrence of element in list
424def rfind(l, elt, offs = -1):
425    for i in range(len(l)+offs, 0, -1):
426        if l[i] == elt:
427            return i
428    raise ValueError, "element not found"
429
430# Take a list of paths (or SCons Nodes) and return a list with all
431# paths made absolute and ~-expanded.  Paths will be interpreted
432# relative to the launch directory unless a different root is provided
433def makePathListAbsolute(path_list, root=GetLaunchDir()):
434    return [abspath(joinpath(root, expanduser(str(p))))
435            for p in path_list]
436
437# Each target must have 'build' in the interior of the path; the
438# directory below this will determine the build parameters.  For
439# example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we
440# recognize that ALPHA_SE specifies the configuration because it
441# follow 'build' in the build path.
442
443# The funky assignment to "[:]" is needed to replace the list contents
444# in place rather than reassign the symbol to a new list, which
445# doesn't work (obviously!).
446BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS)
447
448# Generate a list of the unique build roots and configs that the
449# collected targets reference.
450variant_paths = []
451build_root = None
452for t in BUILD_TARGETS:
453    path_dirs = t.split('/')
454    try:
455        build_top = rfind(path_dirs, 'build', -2)
456    except:
457        print "Error: no non-leaf 'build' dir found on target path", t
458        Exit(1)
459    this_build_root = joinpath('/',*path_dirs[:build_top+1])
460    if not build_root:
461        build_root = this_build_root
462    else:
463        if this_build_root != build_root:
464            print "Error: build targets not under same build root\n"\
465                  "  %s\n  %s" % (build_root, this_build_root)
466            Exit(1)
467    variant_path = joinpath('/',*path_dirs[:build_top+2])
468    if variant_path not in variant_paths:
469        variant_paths.append(variant_path)
470
471# Make sure build_root exists (might not if this is the first build there)
472if not isdir(build_root):
473    mkdir(build_root)
474main['BUILDROOT'] = build_root
475
476Export('main')
477
478main.SConsignFile(joinpath(build_root, "sconsign"))
479
480# Default duplicate option is to use hard links, but this messes up
481# when you use emacs to edit a file in the target dir, as emacs moves
482# file to file~ then copies to file, breaking the link.  Symbolic
483# (soft) links work better.
484main.SetOption('duplicate', 'soft-copy')
485
486#
487# Set up global sticky variables... these are common to an entire build
488# tree (not specific to a particular build like ALPHA_SE)
489#
490
491global_vars_file = joinpath(build_root, 'variables.global')
492
493global_vars = Variables(global_vars_file, args=ARGUMENTS)
494
495global_vars.AddVariables(
496    ('CC', 'C compiler', environ.get('CC', main['CC'])),
497    ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])),
498    ('SWIG', 'SWIG tool', environ.get('SWIG', main['SWIG'])),
499    ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')),
500    ('BATCH', 'Use batch pool for build and tests', False),
501    ('BATCH_CMD', 'Batch pool submission command name', 'qdo'),
502    ('M5_BUILD_CACHE', 'Cache built objects in this directory', False),
503    ('EXTRAS', 'Add extra directories to the compilation', '')
504    )
505
506# Update main environment with values from ARGUMENTS & global_vars_file
507global_vars.Update(main)
508help_texts["global_vars"] += global_vars.GenerateHelpText(main)
509
510# Save sticky variable settings back to current variables file
511global_vars.Save(global_vars_file, main)
512
513# Parse EXTRAS variable to build list of all directories where we're
514# look for sources etc.  This list is exported as extras_dir_list.
515base_dir = main.srcdir.abspath
516if main['EXTRAS']:
517    extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':'))
518else:
519    extras_dir_list = []
520
521Export('base_dir')
522Export('extras_dir_list')
523
524# the ext directory should be on the #includes path
525main.Append(CPPPATH=[Dir('ext')])
526
527def strip_build_path(path, env):
528    path = str(path)
529    variant_base = env['BUILDROOT'] + os.path.sep
530    if path.startswith(variant_base):
531        path = path[len(variant_base):]
532    elif path.startswith('build/'):
533        path = path[6:]
534    return path
535
536# Generate a string of the form:
537#   common/path/prefix/src1, src2 -> tgt1, tgt2
538# to print while building.
539class Transform(object):
540    # all specific color settings should be here and nowhere else
541    tool_color = termcap.Normal
542    pfx_color = termcap.Yellow
543    srcs_color = termcap.Yellow + termcap.Bold
544    arrow_color = termcap.Blue + termcap.Bold
545    tgts_color = termcap.Yellow + termcap.Bold
546
547    def __init__(self, tool, max_sources=99):
548        self.format = self.tool_color + (" [%8s] " % tool) \
549                      + self.pfx_color + "%s" \
550                      + self.srcs_color + "%s" \
551                      + self.arrow_color + " -> " \
552                      + self.tgts_color + "%s" \
553                      + termcap.Normal
554        self.max_sources = max_sources
555
556    def __call__(self, target, source, env, for_signature=None):
557        # truncate source list according to max_sources param
558        source = source[0:self.max_sources]
559        def strip(f):
560            return strip_build_path(str(f), env)
561        if len(source) > 0:
562            srcs = map(strip, source)
563        else:
564            srcs = ['']
565        tgts = map(strip, target)
566        # surprisingly, os.path.commonprefix is a dumb char-by-char string
567        # operation that has nothing to do with paths.
568        com_pfx = os.path.commonprefix(srcs + tgts)
569        com_pfx_len = len(com_pfx)
570        if com_pfx:
571            # do some cleanup and sanity checking on common prefix
572            if com_pfx[-1] == ".":
573                # prefix matches all but file extension: ok
574                # back up one to change 'foo.cc -> o' to 'foo.cc -> .o'
575                com_pfx = com_pfx[0:-1]
576            elif com_pfx[-1] == "/":
577                # common prefix is directory path: OK
578                pass
579            else:
580                src0_len = len(srcs[0])
581                tgt0_len = len(tgts[0])
582                if src0_len == com_pfx_len:
583                    # source is a substring of target, OK
584                    pass
585                elif tgt0_len == com_pfx_len:
586                    # target is a substring of source, need to back up to
587                    # avoid empty string on RHS of arrow
588                    sep_idx = com_pfx.rfind(".")
589                    if sep_idx != -1:
590                        com_pfx = com_pfx[0:sep_idx]
591                    else:
592                        com_pfx = ''
593                elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".":
594                    # still splitting at file extension: ok
595                    pass
596                else:
597                    # probably a fluke; ignore it
598                    com_pfx = ''
599        # recalculate length in case com_pfx was modified
600        com_pfx_len = len(com_pfx)
601        def fmt(files):
602            f = map(lambda s: s[com_pfx_len:], files)
603            return ', '.join(f)
604        return self.format % (com_pfx, fmt(srcs), fmt(tgts))
605
606Export('Transform')
607
608# enable the regression script to use the termcap
609main['TERMCAP'] = termcap
610
611if GetOption('verbose'):
612    def MakeAction(action, string, *args, **kwargs):
613        return Action(action, *args, **kwargs)
614else:
615    MakeAction = Action
616    main['CCCOMSTR']        = Transform("CC")
617    main['CXXCOMSTR']       = Transform("CXX")
618    main['ASCOMSTR']        = Transform("AS")
619    main['SWIGCOMSTR']      = Transform("SWIG")
620    main['ARCOMSTR']        = Transform("AR", 0)
621    main['LINKCOMSTR']      = Transform("LINK", 0)
622    main['RANLIBCOMSTR']    = Transform("RANLIB", 0)
623    main['M4COMSTR']        = Transform("M4")
624    main['SHCCCOMSTR']      = Transform("SHCC")
625    main['SHCXXCOMSTR']     = Transform("SHCXX")
626Export('MakeAction')
627
628# Initialize the Link-Time Optimization (LTO) flags
629main['LTO_CCFLAGS'] = []
630main['LTO_LDFLAGS'] = []
631
632# According to the readme, tcmalloc works best if the compiler doesn't
633# assume that we're using the builtin malloc and friends. These flags
634# are compiler-specific, so we need to set them after we detect which
635# compiler we're using.
636main['TCMALLOC_CCFLAGS'] = []
637
638CXX_version = readCommand([main['CXX'],'--version'], exception=False)
639CXX_V = readCommand([main['CXX'],'-V'], exception=False)
640
641main['GCC'] = CXX_version and CXX_version.find('g++') >= 0
642main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0
643if main['GCC'] + main['CLANG'] > 1:
644    print 'Error: How can we have two at the same time?'
645    Exit(1)
646
647# Set up default C++ compiler flags
648if main['GCC'] or main['CLANG']:
649    # As gcc and clang share many flags, do the common parts here
650    main.Append(CCFLAGS=['-pipe'])
651    main.Append(CCFLAGS=['-fno-strict-aliasing'])
652    # Enable -Wall and -Wextra and then disable the few warnings that
653    # we consistently violate
654    main.Append(CCFLAGS=['-Wall', '-Wundef', '-Wextra',
655                         '-Wno-sign-compare', '-Wno-unused-parameter'])
656    # We always compile using C++11
657    main.Append(CXXFLAGS=['-std=c++11'])
658else:
659    print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
660    print "Don't know what compiler options to use for your compiler."
661    print termcap.Yellow + '       compiler:' + termcap.Normal, main['CXX']
662    print termcap.Yellow + '       version:' + termcap.Normal,
663    if not CXX_version:
664        print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
665               termcap.Normal
666    else:
667        print CXX_version.replace('\n', '<nl>')
668    print "       If you're trying to use a compiler other than GCC"
669    print "       or clang, there appears to be something wrong with your"
670    print "       environment."
671    print "       "
672    print "       If you are trying to use a compiler other than those listed"
673    print "       above you will need to ease fix SConstruct and "
674    print "       src/SConscript to support that compiler."
675    Exit(1)
676
677if main['GCC']:
678    # Check for a supported version of gcc. >= 4.7 is chosen for its
679    # level of c++11 support. See
680    # http://gcc.gnu.org/projects/cxx0x.html for details.
681    gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False)
682    if compareVersions(gcc_version, "4.7") < 0:
683        print 'Error: gcc version 4.7 or newer required.'
684        print '       Installed version:', gcc_version
685        Exit(1)
686
687    main['GCC_VERSION'] = gcc_version
688
689    # gcc from version 4.8 and above generates "rep; ret" instructions
690    # to avoid performance penalties on certain AMD chips. Older
691    # assemblers detect this as an error, "Error: expecting string
692    # instruction after `rep'"
693    if compareVersions(gcc_version, "4.8") > 0:
694        as_version_raw = readCommand([main['AS'], '-v', '/dev/null'],
695                                     exception=False).split()
696
697        # version strings may contain extra distro-specific
698        # qualifiers, so play it safe and keep only what comes before
699        # the first hyphen
700        as_version = as_version_raw[-1].split('-')[0] if as_version_raw \
701            else None
702
703        if not as_version or compareVersions(as_version, "2.23") < 0:
704            print termcap.Yellow + termcap.Bold + \
705                'Warning: This combination of gcc and binutils have' + \
706                ' known incompatibilities.\n' + \
707                '         If you encounter build problems, please update ' + \
708                'binutils to 2.23.' + \
709                termcap.Normal
710
711    # Make sure we warn if the user has requested to compile with the
712    # Undefined Benahvior Sanitizer and this version of gcc does not
713    # support it.
714    if GetOption('with_ubsan') and \
715            compareVersions(gcc_version, '4.9') < 0:
716        print termcap.Yellow + termcap.Bold + \
717            'Warning: UBSan is only supported using gcc 4.9 and later.' + \
718            termcap.Normal
719
720    # Add the appropriate Link-Time Optimization (LTO) flags
721    # unless LTO is explicitly turned off. Note that these flags
722    # are only used by the fast target.
723    if not GetOption('no_lto'):
724        # Pass the LTO flag when compiling to produce GIMPLE
725        # output, we merely create the flags here and only append
726        # them later
727        main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
728
729        # Use the same amount of jobs for LTO as we are running
730        # scons with
731        main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
732
733    main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc',
734                                  '-fno-builtin-realloc', '-fno-builtin-free'])
735
736elif main['CLANG']:
737    # Check for a supported version of clang, >= 3.1 is needed to
738    # support similar features as gcc 4.7. See
739    # http://clang.llvm.org/cxx_status.html for details
740    clang_version_re = re.compile(".* version (\d+\.\d+)")
741    clang_version_match = clang_version_re.search(CXX_version)
742    if (clang_version_match):
743        clang_version = clang_version_match.groups()[0]
744        if compareVersions(clang_version, "3.1") < 0:
745            print 'Error: clang version 3.1 or newer required.'
746            print '       Installed version:', clang_version
747            Exit(1)
748    else:
749        print 'Error: Unable to determine clang version.'
750        Exit(1)
751
752    # clang has a few additional warnings that we disable, extraneous
753    # parantheses are allowed due to Ruby's printing of the AST,
754    # finally self assignments are allowed as the generated CPU code
755    # is relying on this
756    main.Append(CCFLAGS=['-Wno-parentheses',
757                         '-Wno-self-assign',
758                         # Some versions of libstdc++ (4.8?) seem to
759                         # use struct hash and class hash
760                         # interchangeably.
761                         '-Wno-mismatched-tags',
762                         ])
763
764    main.Append(TCMALLOC_CCFLAGS=['-fno-builtin'])
765
766    # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as
767    # opposed to libstdc++, as the later is dated.
768    if sys.platform == "darwin":
769        main.Append(CXXFLAGS=['-stdlib=libc++'])
770        main.Append(LIBS=['c++'])
771
772else:
773    print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
774    print "Don't know what compiler options to use for your compiler."
775    print termcap.Yellow + '       compiler:' + termcap.Normal, main['CXX']
776    print termcap.Yellow + '       version:' + termcap.Normal,
777    if not CXX_version:
778        print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
779               termcap.Normal
780    else:
781        print CXX_version.replace('\n', '<nl>')
782    print "       If you're trying to use a compiler other than GCC"
783    print "       or clang, there appears to be something wrong with your"
784    print "       environment."
785    print "       "
786    print "       If you are trying to use a compiler other than those listed"
787    print "       above you will need to ease fix SConstruct and "
788    print "       src/SConscript to support that compiler."
789    Exit(1)
790
791# Set up common yacc/bison flags (needed for Ruby)
792main['YACCFLAGS'] = '-d'
793main['YACCHXXFILESUFFIX'] = '.hh'
794
795# Do this after we save setting back, or else we'll tack on an
796# extra 'qdo' every time we run scons.
797if main['BATCH']:
798    main['CC']     = main['BATCH_CMD'] + ' ' + main['CC']
799    main['CXX']    = main['BATCH_CMD'] + ' ' + main['CXX']
800    main['AS']     = main['BATCH_CMD'] + ' ' + main['AS']
801    main['AR']     = main['BATCH_CMD'] + ' ' + main['AR']
802    main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB']
803
804if sys.platform == 'cygwin':
805    # cygwin has some header file issues...
806    main.Append(CCFLAGS=["-Wno-uninitialized"])
807
808# Check for the protobuf compiler
809protoc_version = readCommand([main['PROTOC'], '--version'],
810                             exception='').split()
811
812# First two words should be "libprotoc x.y.z"
813if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc':
814    print termcap.Yellow + termcap.Bold + \
815        'Warning: Protocol buffer compiler (protoc) not found.\n' + \
816        '         Please install protobuf-compiler for tracing support.' + \
817        termcap.Normal
818    main['PROTOC'] = False
819else:
820    # Based on the availability of the compress stream wrappers,
821    # require 2.1.0
822    min_protoc_version = '2.1.0'
823    if compareVersions(protoc_version[1], min_protoc_version) < 0:
824        print termcap.Yellow + termcap.Bold + \
825            'Warning: protoc version', min_protoc_version, \
826            'or newer required.\n' + \
827            '         Installed version:', protoc_version[1], \
828            termcap.Normal
829        main['PROTOC'] = False
830    else:
831        # Attempt to determine the appropriate include path and
832        # library path using pkg-config, that means we also need to
833        # check for pkg-config. Note that it is possible to use
834        # protobuf without the involvement of pkg-config. Later on we
835        # check go a library config check and at that point the test
836        # will fail if libprotobuf cannot be found.
837        if readCommand(['pkg-config', '--version'], exception=''):
838            try:
839                # Attempt to establish what linking flags to add for protobuf
840                # using pkg-config
841                main.ParseConfig('pkg-config --cflags --libs-only-L protobuf')
842            except:
843                print termcap.Yellow + termcap.Bold + \
844                    'Warning: pkg-config could not get protobuf flags.' + \
845                    termcap.Normal
846
847# Check for SWIG
848if not main.has_key('SWIG'):
849    print 'Error: SWIG utility not found.'
850    print '       Please install (see http://www.swig.org) and retry.'
851    Exit(1)
852
853# Check for appropriate SWIG version
854swig_version = readCommand([main['SWIG'], '-version'], exception='').split()
855# First 3 words should be "SWIG Version x.y.z"
856if len(swig_version) < 3 or \
857        swig_version[0] != 'SWIG' or swig_version[1] != 'Version':
858    print 'Error determining SWIG version.'
859    Exit(1)
860
861min_swig_version = '2.0.4'
862if compareVersions(swig_version[2], min_swig_version) < 0:
863    print 'Error: SWIG version', min_swig_version, 'or newer required.'
864    print '       Installed version:', swig_version[2]
865    Exit(1)
866
867# Check for known incompatibilities. The standard library shipped with
868# gcc >= 4.9 does not play well with swig versions prior to 3.0
869if main['GCC'] and compareVersions(gcc_version, '4.9') >= 0 and \
870        compareVersions(swig_version[2], '3.0') < 0:
871    print termcap.Yellow + termcap.Bold + \
872        'Warning: This combination of gcc and swig have' + \
873        ' known incompatibilities.\n' + \
874        '         If you encounter build problems, please update ' + \
875        'swig to 3.0 or later.' + \
876        termcap.Normal
877
878# Set up SWIG flags & scanner
879swig_flags=Split('-c++ -python -modern -templatereduce $_CPPINCFLAGS')
880main.Append(SWIGFLAGS=swig_flags)
881
882# Check for 'timeout' from GNU coreutils. If present, regressions will
883# be run with a time limit. We require version 8.13 since we rely on
884# support for the '--foreground' option.
885timeout_lines = readCommand(['timeout', '--version'],
886                            exception='').splitlines()
887# Get the first line and tokenize it
888timeout_version = timeout_lines[0].split() if timeout_lines else []
889main['TIMEOUT'] =  timeout_version and \
890    compareVersions(timeout_version[-1], '8.13') >= 0
891
892# filter out all existing swig scanners, they mess up the dependency
893# stuff for some reason
894scanners = []
895for scanner in main['SCANNERS']:
896    skeys = scanner.skeys
897    if skeys == '.i':
898        continue
899
900    if isinstance(skeys, (list, tuple)) and '.i' in skeys:
901        continue
902
903    scanners.append(scanner)
904
905# add the new swig scanner that we like better
906from SCons.Scanner import ClassicCPP as CPPScanner
907swig_inc_re = '^[ \t]*[%,#][ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")'
908scanners.append(CPPScanner("SwigScan", [ ".i" ], "CPPPATH", swig_inc_re))
909
910# replace the scanners list that has what we want
911main['SCANNERS'] = scanners
912
913# Add a custom Check function to test for structure members.
914def CheckMember(context, include, decl, member, include_quotes="<>"):
915    context.Message("Checking for member %s in %s..." %
916                    (member, decl))
917    text = """
918#include %(header)s
919int main(){
920  %(decl)s test;
921  (void)test.%(member)s;
922  return 0;
923};
924""" % { "header" : include_quotes[0] + include + include_quotes[1],
925        "decl" : decl,
926        "member" : member,
927        }
928
929    ret = context.TryCompile(text, extension=".cc")
930    context.Result(ret)
931    return ret
932
933# Platform-specific configuration.  Note again that we assume that all
934# builds under a given build root run on the same host platform.
935conf = Configure(main,
936                 conf_dir = joinpath(build_root, '.scons_config'),
937                 log_file = joinpath(build_root, 'scons_config.log'),
938                 custom_tests = {
939        'CheckMember' : CheckMember,
940        })
941
942# Check if we should compile a 64 bit binary on Mac OS X/Darwin
943try:
944    import platform
945    uname = platform.uname()
946    if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0:
947        if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]):
948            main.Append(CCFLAGS=['-arch', 'x86_64'])
949            main.Append(CFLAGS=['-arch', 'x86_64'])
950            main.Append(LINKFLAGS=['-arch', 'x86_64'])
951            main.Append(ASFLAGS=['-arch', 'x86_64'])
952except:
953    pass
954
955# Recent versions of scons substitute a "Null" object for Configure()
956# when configuration isn't necessary, e.g., if the "--help" option is
957# present.  Unfortuantely this Null object always returns false,
958# breaking all our configuration checks.  We replace it with our own
959# more optimistic null object that returns True instead.
960if not conf:
961    def NullCheck(*args, **kwargs):
962        return True
963
964    class NullConf:
965        def __init__(self, env):
966            self.env = env
967        def Finish(self):
968            return self.env
969        def __getattr__(self, mname):
970            return NullCheck
971
972    conf = NullConf(main)
973
974# Cache build files in the supplied directory.
975if main['M5_BUILD_CACHE']:
976    print 'Using build cache located at', main['M5_BUILD_CACHE']
977    CacheDir(main['M5_BUILD_CACHE'])
978
979if not GetOption('without_python'):
980    # Find Python include and library directories for embedding the
981    # interpreter. We rely on python-config to resolve the appropriate
982    # includes and linker flags. ParseConfig does not seem to understand
983    # the more exotic linker flags such as -Xlinker and -export-dynamic so
984    # we add them explicitly below. If you want to link in an alternate
985    # version of python, see above for instructions on how to invoke
986    # scons with the appropriate PATH set.
987    #
988    # First we check if python2-config exists, else we use python-config
989    python_config = readCommand(['which', 'python2-config'],
990                                exception='').strip()
991    if not os.path.exists(python_config):
992        python_config = readCommand(['which', 'python-config'],
993                                    exception='').strip()
994    py_includes = readCommand([python_config, '--includes'],
995                              exception='').split()
996    # Strip the -I from the include folders before adding them to the
997    # CPPPATH
998    main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes))
999
1000    # Read the linker flags and split them into libraries and other link
1001    # flags. The libraries are added later through the call the CheckLib.
1002    py_ld_flags = readCommand([python_config, '--ldflags'],
1003        exception='').split()
1004    py_libs = []
1005    for lib in py_ld_flags:
1006         if not lib.startswith('-l'):
1007             main.Append(LINKFLAGS=[lib])
1008         else:
1009             lib = lib[2:]
1010             if lib not in py_libs:
1011                 py_libs.append(lib)
1012
1013    # verify that this stuff works
1014    if not conf.CheckHeader('Python.h', '<>'):
1015        print "Error: can't find Python.h header in", py_includes
1016        print "Install Python headers (package python-dev on Ubuntu and RedHat)"
1017        Exit(1)
1018
1019    for lib in py_libs:
1020        if not conf.CheckLib(lib):
1021            print "Error: can't find library %s required by python" % lib
1022            Exit(1)
1023
1024# On Solaris you need to use libsocket for socket ops
1025if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'):
1026   if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'):
1027       print "Can't find library with socket calls (e.g. accept())"
1028       Exit(1)
1029
1030# Check for zlib.  If the check passes, libz will be automatically
1031# added to the LIBS environment variable.
1032if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'):
1033    print 'Error: did not find needed zlib compression library '\
1034          'and/or zlib.h header file.'
1035    print '       Please install zlib and try again.'
1036    Exit(1)
1037
1038# If we have the protobuf compiler, also make sure we have the
1039# development libraries. If the check passes, libprotobuf will be
1040# automatically added to the LIBS environment variable. After
1041# this, we can use the HAVE_PROTOBUF flag to determine if we have
1042# got both protoc and libprotobuf available.
1043main['HAVE_PROTOBUF'] = main['PROTOC'] and \
1044    conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h',
1045                            'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;')
1046
1047# If we have the compiler but not the library, print another warning.
1048if main['PROTOC'] and not main['HAVE_PROTOBUF']:
1049    print termcap.Yellow + termcap.Bold + \
1050        'Warning: did not find protocol buffer library and/or headers.\n' + \
1051    '       Please install libprotobuf-dev for tracing support.' + \
1052    termcap.Normal
1053
1054# Check for librt.
1055have_posix_clock = \
1056    conf.CheckLibWithHeader(None, 'time.h', 'C',
1057                            'clock_nanosleep(0,0,NULL,NULL);') or \
1058    conf.CheckLibWithHeader('rt', 'time.h', 'C',
1059                            'clock_nanosleep(0,0,NULL,NULL);')
1060
1061have_posix_timers = \
1062    conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C',
1063                            'timer_create(CLOCK_MONOTONIC, NULL, NULL);')
1064
1065if not GetOption('without_tcmalloc'):
1066    if conf.CheckLib('tcmalloc'):
1067        main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1068    elif conf.CheckLib('tcmalloc_minimal'):
1069        main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1070    else:
1071        print termcap.Yellow + termcap.Bold + \
1072              "You can get a 12% performance improvement by "\
1073              "installing tcmalloc (libgoogle-perftools-dev package "\
1074              "on Ubuntu or RedHat)." + termcap.Normal
1075
1076
1077# Detect back trace implementations. The last implementation in the
1078# list will be used by default.
1079backtrace_impls = [ "none" ]
1080
1081if conf.CheckLibWithHeader(None, 'execinfo.h', 'C',
1082                           'backtrace_symbols_fd((void*)0, 0, 0);'):
1083    backtrace_impls.append("glibc")
1084
1085if backtrace_impls[-1] == "none":
1086    default_backtrace_impl = "none"
1087    print termcap.Yellow + termcap.Bold + \
1088        "No suitable back trace implementation found." + \
1089        termcap.Normal
1090
1091if not have_posix_clock:
1092    print "Can't find library for POSIX clocks."
1093
1094# Check for <fenv.h> (C99 FP environment control)
1095have_fenv = conf.CheckHeader('fenv.h', '<>')
1096if not have_fenv:
1097    print "Warning: Header file <fenv.h> not found."
1098    print "         This host has no IEEE FP rounding mode control."
1099
1100# Check if we should enable KVM-based hardware virtualization. The API
1101# we rely on exists since version 2.6.36 of the kernel, but somehow
1102# the KVM_API_VERSION does not reflect the change. We test for one of
1103# the types as a fall back.
1104have_kvm = conf.CheckHeader('linux/kvm.h', '<>')
1105if not have_kvm:
1106    print "Info: Compatible header file <linux/kvm.h> not found, " \
1107        "disabling KVM support."
1108
1109# x86 needs support for xsave. We test for the structure here since we
1110# won't be able to run new tests by the time we know which ISA we're
1111# targeting.
1112have_kvm_xsave = conf.CheckTypeSize('struct kvm_xsave',
1113                                    '#include <linux/kvm.h>') != 0
1114
1115# Check if the requested target ISA is compatible with the host
1116def is_isa_kvm_compatible(isa):
1117    try:
1118        import platform
1119        host_isa = platform.machine()
1120    except:
1121        print "Warning: Failed to determine host ISA."
1122        return False
1123
1124    if not have_posix_timers:
1125        print "Warning: Can not enable KVM, host seems to lack support " \
1126            "for POSIX timers"
1127        return False
1128
1129    if isa == "arm":
1130        return host_isa in ( "armv7l", "aarch64" )
1131    elif isa == "x86":
1132        if host_isa != "x86_64":
1133            return False
1134
1135        if not have_kvm_xsave:
1136            print "KVM on x86 requires xsave support in kernel headers."
1137            return False
1138
1139        return True
1140    else:
1141        return False
1142
1143
1144# Check if the exclude_host attribute is available. We want this to
1145# get accurate instruction counts in KVM.
1146main['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf.CheckMember(
1147    'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host')
1148
1149
1150######################################################################
1151#
1152# Finish the configuration
1153#
1154main = conf.Finish()
1155
1156######################################################################
1157#
1158# Collect all non-global variables
1159#
1160
1161# Define the universe of supported ISAs
1162all_isa_list = [ ]
1163all_gpu_isa_list = [ ]
1164Export('all_isa_list')
1165Export('all_gpu_isa_list')
1166
1167class CpuModel(object):
1168    '''The CpuModel class encapsulates everything the ISA parser needs to
1169    know about a particular CPU model.'''
1170
1171    # Dict of available CPU model objects.  Accessible as CpuModel.dict.
1172    dict = {}
1173
1174    # Constructor.  Automatically adds models to CpuModel.dict.
1175    def __init__(self, name, default=False):
1176        self.name = name           # name of model
1177
1178        # This cpu is enabled by default
1179        self.default = default
1180
1181        # Add self to dict
1182        if name in CpuModel.dict:
1183            raise AttributeError, "CpuModel '%s' already registered" % name
1184        CpuModel.dict[name] = self
1185
1186Export('CpuModel')
1187
1188# Sticky variables get saved in the variables file so they persist from
1189# one invocation to the next (unless overridden, in which case the new
1190# value becomes sticky).
1191sticky_vars = Variables(args=ARGUMENTS)
1192Export('sticky_vars')
1193
1194# Sticky variables that should be exported
1195export_vars = []
1196Export('export_vars')
1197
1198# For Ruby
1199all_protocols = []
1200Export('all_protocols')
1201protocol_dirs = []
1202Export('protocol_dirs')
1203slicc_includes = []
1204Export('slicc_includes')
1205
1206# Walk the tree and execute all SConsopts scripts that wil add to the
1207# above variables
1208if GetOption('verbose'):
1209    print "Reading SConsopts"
1210for bdir in [ base_dir ] + extras_dir_list:
1211    if not isdir(bdir):
1212        print "Error: directory '%s' does not exist" % bdir
1213        Exit(1)
1214    for root, dirs, files in os.walk(bdir):
1215        if 'SConsopts' in files:
1216            if GetOption('verbose'):
1217                print "Reading", joinpath(root, 'SConsopts')
1218            SConscript(joinpath(root, 'SConsopts'))
1219
1220all_isa_list.sort()
1221all_gpu_isa_list.sort()
1222
1223sticky_vars.AddVariables(
1224    EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list),
1225    EnumVariable('TARGET_GPU_ISA', 'Target GPU ISA', 'hsail', all_gpu_isa_list),
1226    ListVariable('CPU_MODELS', 'CPU models',
1227                 sorted(n for n,m in CpuModel.dict.iteritems() if m.default),
1228                 sorted(CpuModel.dict.keys())),
1229    BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger',
1230                 False),
1231    BoolVariable('SS_COMPATIBLE_FP',
1232                 'Make floating-point results compatible with SimpleScalar',
1233                 False),
1234    BoolVariable('USE_SSE2',
1235                 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts',
1236                 False),
1237    BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock),
1238    BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv),
1239    BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability', False),
1240    BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models', have_kvm),
1241    BoolVariable('BUILD_GPU', 'Build the compute-GPU model', False),
1242    EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None',
1243                  all_protocols),
1244    EnumVariable('BACKTRACE_IMPL', 'Post-mortem dump implementation',
1245                 backtrace_impls[-1], backtrace_impls)
1246    )
1247
1248# These variables get exported to #defines in config/*.hh (see src/SConscript).
1249export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'TARGET_GPU_ISA',
1250                'CP_ANNOTATE', 'USE_POSIX_CLOCK', 'USE_KVM', 'PROTOCOL',
1251                'HAVE_PROTOBUF', 'HAVE_PERF_ATTR_EXCLUDE_HOST']
1252
1253###################################################
1254#
1255# Define a SCons builder for configuration flag headers.
1256#
1257###################################################
1258
1259# This function generates a config header file that #defines the
1260# variable symbol to the current variable setting (0 or 1).  The source
1261# operands are the name of the variable and a Value node containing the
1262# value of the variable.
1263def build_config_file(target, source, env):
1264    (variable, value) = [s.get_contents() for s in source]
1265    f = file(str(target[0]), 'w')
1266    print >> f, '#define', variable, value
1267    f.close()
1268    return None
1269
1270# Combine the two functions into a scons Action object.
1271config_action = MakeAction(build_config_file, Transform("CONFIG H", 2))
1272
1273# The emitter munges the source & target node lists to reflect what
1274# we're really doing.
1275def config_emitter(target, source, env):
1276    # extract variable name from Builder arg
1277    variable = str(target[0])
1278    # True target is config header file
1279    target = joinpath('config', variable.lower() + '.hh')
1280    val = env[variable]
1281    if isinstance(val, bool):
1282        # Force value to 0/1
1283        val = int(val)
1284    elif isinstance(val, str):
1285        val = '"' + val + '"'
1286
1287    # Sources are variable name & value (packaged in SCons Value nodes)
1288    return ([target], [Value(variable), Value(val)])
1289
1290config_builder = Builder(emitter = config_emitter, action = config_action)
1291
1292main.Append(BUILDERS = { 'ConfigFile' : config_builder })
1293
1294# libelf build is shared across all configs in the build root.
1295main.SConscript('ext/libelf/SConscript',
1296                variant_dir = joinpath(build_root, 'libelf'))
1297
1298# iostream3 build is shared across all configs in the build root.
1299main.SConscript('ext/iostream3/SConscript',
1300                variant_dir = joinpath(build_root, 'iostream3'))
1301
1302# libfdt build is shared across all configs in the build root.
1303main.SConscript('ext/libfdt/SConscript',
1304                variant_dir = joinpath(build_root, 'libfdt'))
1305
1306# fputils build is shared across all configs in the build root.
1307main.SConscript('ext/fputils/SConscript',
1308                variant_dir = joinpath(build_root, 'fputils'))
1309
1310# DRAMSim2 build is shared across all configs in the build root.
1311main.SConscript('ext/dramsim2/SConscript',
1312                variant_dir = joinpath(build_root, 'dramsim2'))
1313
1314# DRAMPower build is shared across all configs in the build root.
1315main.SConscript('ext/drampower/SConscript',
1316                variant_dir = joinpath(build_root, 'drampower'))
1317
1318# nomali build is shared across all configs in the build root.
1319main.SConscript('ext/nomali/SConscript',
1320                variant_dir = joinpath(build_root, 'nomali'))
1321
1322###################################################
1323#
1324# This function is used to set up a directory with switching headers
1325#
1326###################################################
1327
1328main['ALL_ISA_LIST'] = all_isa_list
1329main['ALL_GPU_ISA_LIST'] = all_gpu_isa_list
1330all_isa_deps = {}
1331def make_switching_dir(dname, switch_headers, env):
1332    # Generate the header.  target[0] is the full path of the output
1333    # header to generate.  'source' is a dummy variable, since we get the
1334    # list of ISAs from env['ALL_ISA_LIST'].
1335    def gen_switch_hdr(target, source, env):
1336        fname = str(target[0])
1337        isa = env['TARGET_ISA'].lower()
1338        try:
1339            f = open(fname, 'w')
1340            print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname))
1341            f.close()
1342        except IOError:
1343            print "Failed to create %s" % fname
1344            raise
1345
1346    # Build SCons Action object. 'varlist' specifies env vars that this
1347    # action depends on; when env['ALL_ISA_LIST'] changes these actions
1348    # should get re-executed.
1349    switch_hdr_action = MakeAction(gen_switch_hdr,
1350                          Transform("GENERATE"), varlist=['ALL_ISA_LIST'])
1351
1352    # Instantiate actions for each header
1353    for hdr in switch_headers:
1354        env.Command(hdr, [], switch_hdr_action)
1355
1356    isa_target = Dir('.').up().name.lower().replace('_', '-')
1357    env['PHONY_BASE'] = '#'+isa_target
1358    all_isa_deps[isa_target] = None
1359
1360Export('make_switching_dir')
1361
1362def make_gpu_switching_dir(dname, switch_headers, env):
1363    # Generate the header.  target[0] is the full path of the output
1364    # header to generate.  'source' is a dummy variable, since we get the
1365    # list of ISAs from env['ALL_ISA_LIST'].
1366    def gen_switch_hdr(target, source, env):
1367        fname = str(target[0])
1368
1369        isa = env['TARGET_GPU_ISA'].lower()
1370
1371        try:
1372            f = open(fname, 'w')
1373            print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname))
1374            f.close()
1375        except IOError:
1376            print "Failed to create %s" % fname
1377            raise
1378
1379    # Build SCons Action object. 'varlist' specifies env vars that this
1380    # action depends on; when env['ALL_ISA_LIST'] changes these actions
1381    # should get re-executed.
1382    switch_hdr_action = MakeAction(gen_switch_hdr,
1383                          Transform("GENERATE"), varlist=['ALL_ISA_GPU_LIST'])
1384
1385    # Instantiate actions for each header
1386    for hdr in switch_headers:
1387        env.Command(hdr, [], switch_hdr_action)
1388
1389Export('make_gpu_switching_dir')
1390
1391# all-isas -> all-deps -> all-environs -> all_targets
1392main.Alias('#all-isas', [])
1393main.Alias('#all-deps', '#all-isas')
1394
1395# Dummy target to ensure all environments are created before telling
1396# SCons what to actually make (the command line arguments).  We attach
1397# them to the dependence graph after the environments are complete.
1398ORIG_BUILD_TARGETS = list(BUILD_TARGETS) # force a copy; gets closure to work.
1399def environsComplete(target, source, env):
1400    for t in ORIG_BUILD_TARGETS:
1401        main.Depends('#all-targets', t)
1402
1403# Each build/* switching_dir attaches its *-environs target to #all-environs.
1404main.Append(BUILDERS = {'CompleteEnvirons' :
1405                        Builder(action=MakeAction(environsComplete, None))})
1406main.CompleteEnvirons('#all-environs', [])
1407
1408def doNothing(**ignored): pass
1409main.Append(BUILDERS = {'Dummy': Builder(action=MakeAction(doNothing, None))})
1410
1411# The final target to which all the original targets ultimately get attached.
1412main.Dummy('#all-targets', '#all-environs')
1413BUILD_TARGETS[:] = ['#all-targets']
1414
1415###################################################
1416#
1417# Define build environments for selected configurations.
1418#
1419###################################################
1420
1421for variant_path in variant_paths:
1422    if not GetOption('silent'):
1423        print "Building in", variant_path
1424
1425    # Make a copy of the build-root environment to use for this config.
1426    env = main.Clone()
1427    env['BUILDDIR'] = variant_path
1428
1429    # variant_dir is the tail component of build path, and is used to
1430    # determine the build parameters (e.g., 'ALPHA_SE')
1431    (build_root, variant_dir) = splitpath(variant_path)
1432
1433    # Set env variables according to the build directory config.
1434    sticky_vars.files = []
1435    # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in
1436    # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke
1437    # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings.
1438    current_vars_file = joinpath(build_root, 'variables', variant_dir)
1439    if isfile(current_vars_file):
1440        sticky_vars.files.append(current_vars_file)
1441        if not GetOption('silent'):
1442            print "Using saved variables file %s" % current_vars_file
1443    else:
1444        # Build dir-specific variables file doesn't exist.
1445
1446        # Make sure the directory is there so we can create it later
1447        opt_dir = dirname(current_vars_file)
1448        if not isdir(opt_dir):
1449            mkdir(opt_dir)
1450
1451        # Get default build variables from source tree.  Variables are
1452        # normally determined by name of $VARIANT_DIR, but can be
1453        # overridden by '--default=' arg on command line.
1454        default = GetOption('default')
1455        opts_dir = joinpath(main.root.abspath, 'build_opts')
1456        if default:
1457            default_vars_files = [joinpath(build_root, 'variables', default),
1458                                  joinpath(opts_dir, default)]
1459        else:
1460            default_vars_files = [joinpath(opts_dir, variant_dir)]
1461        existing_files = filter(isfile, default_vars_files)
1462        if existing_files:
1463            default_vars_file = existing_files[0]
1464            sticky_vars.files.append(default_vars_file)
1465            print "Variables file %s not found,\n  using defaults in %s" \
1466                  % (current_vars_file, default_vars_file)
1467        else:
1468            print "Error: cannot find variables file %s or " \
1469                  "default file(s) %s" \
1470                  % (current_vars_file, ' or '.join(default_vars_files))
1471            Exit(1)
1472
1473    # Apply current variable settings to env
1474    sticky_vars.Update(env)
1475
1476    help_texts["local_vars"] += \
1477        "Build variables for %s:\n" % variant_dir \
1478                 + sticky_vars.GenerateHelpText(env)
1479
1480    # Process variable settings.
1481
1482    if not have_fenv and env['USE_FENV']:
1483        print "Warning: <fenv.h> not available; " \
1484              "forcing USE_FENV to False in", variant_dir + "."
1485        env['USE_FENV'] = False
1486
1487    if not env['USE_FENV']:
1488        print "Warning: No IEEE FP rounding mode control in", variant_dir + "."
1489        print "         FP results may deviate slightly from other platforms."
1490
1491    if env['EFENCE']:
1492        env.Append(LIBS=['efence'])
1493
1494    if env['USE_KVM']:
1495        if not have_kvm:
1496            print "Warning: Can not enable KVM, host seems to lack KVM support"
1497            env['USE_KVM'] = False
1498        elif not is_isa_kvm_compatible(env['TARGET_ISA']):
1499            print "Info: KVM support disabled due to unsupported host and " \
1500                "target ISA combination"
1501            env['USE_KVM'] = False
1502
1503    # Warn about missing optional functionality
1504    if env['USE_KVM']:
1505        if not main['HAVE_PERF_ATTR_EXCLUDE_HOST']:
1506            print "Warning: perf_event headers lack support for the " \
1507                "exclude_host attribute. KVM instruction counts will " \
1508                "be inaccurate."
1509
1510    # Save sticky variable settings back to current variables file
1511    sticky_vars.Save(current_vars_file, env)
1512
1513    if env['USE_SSE2']:
1514        env.Append(CCFLAGS=['-msse2'])
1515
1516    # The src/SConscript file sets up the build rules in 'env' according
1517    # to the configured variables.  It returns a list of environments,
1518    # one for each variant build (debug, opt, etc.)
1519    SConscript('src/SConscript', variant_dir = variant_path, exports = 'env')
1520
1521def pairwise(iterable):
1522    "s -> (s0,s1), (s1,s2), (s2, s3), ..."
1523    a, b = itertools.tee(iterable)
1524    b.next()
1525    return itertools.izip(a, b)
1526
1527# Create false dependencies so SCons will parse ISAs, establish
1528# dependencies, and setup the build Environments serially. Either
1529# SCons (likely) and/or our SConscripts (possibly) cannot cope with -j
1530# greater than 1. It appears to be standard race condition stuff; it
1531# doesn't always fail, but usually, and the behaviors are different.
1532# Every time I tried to remove this, builds would fail in some
1533# creative new way. So, don't do that. You'll want to, though, because
1534# tests/SConscript takes a long time to make its Environments.
1535for t1, t2 in pairwise(sorted(all_isa_deps.iterkeys())):
1536    main.Depends('#%s-deps'     % t2, '#%s-deps'     % t1)
1537    main.Depends('#%s-environs' % t2, '#%s-environs' % t1)
1538
1539# base help text
1540Help('''
1541Usage: scons [scons options] [build variables] [target(s)]
1542
1543Extra scons options:
1544%(options)s
1545
1546Global build variables:
1547%(global_vars)s
1548
1549%(local_vars)s
1550''' % help_texts)
1551