SConstruct revision 11408:cb18b6551499
1# -*- mode:python -*-
2
3# Copyright (c) 2013, 2015, 2016 ARM Limited
4# All rights reserved.
5#
6# The license below extends only to copyright in the software and shall
7# not be construed as granting a license to any other intellectual
8# property including but not limited to intellectual property relating
9# to a hardware implementation of the functionality of the software
10# licensed hereunder.  You may use the software subject to the license
11# terms below provided that you ensure that this notice is replicated
12# unmodified and in its entirety in all distributions of the software,
13# modified or unmodified, in source code or in binary form.
14#
15# Copyright (c) 2011 Advanced Micro Devices, Inc.
16# Copyright (c) 2009 The Hewlett-Packard Development Company
17# Copyright (c) 2004-2005 The Regents of The University of Michigan
18# All rights reserved.
19#
20# Redistribution and use in source and binary forms, with or without
21# modification, are permitted provided that the following conditions are
22# met: redistributions of source code must retain the above copyright
23# notice, this list of conditions and the following disclaimer;
24# redistributions in binary form must reproduce the above copyright
25# notice, this list of conditions and the following disclaimer in the
26# documentation and/or other materials provided with the distribution;
27# neither the name of the copyright holders nor the names of its
28# contributors may be used to endorse or promote products derived from
29# this software without specific prior written permission.
30#
31# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
32# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
33# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
34# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
35# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
37# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
38# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
39# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
40# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
41# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
42#
43# Authors: Steve Reinhardt
44#          Nathan Binkert
45
46###################################################
47#
48# SCons top-level build description (SConstruct) file.
49#
50# While in this directory ('gem5'), just type 'scons' to build the default
51# configuration (see below), or type 'scons build/<CONFIG>/<binary>'
52# to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for
53# the optimized full-system version).
54#
55# You can build gem5 in a different directory as long as there is a
56# 'build/<CONFIG>' somewhere along the target path.  The build system
57# expects that all configs under the same build directory are being
58# built for the same host system.
59#
60# Examples:
61#
62#   The following two commands are equivalent.  The '-u' option tells
63#   scons to search up the directory tree for this SConstruct file.
64#   % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug
65#   % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug
66#
67#   The following two commands are equivalent and demonstrate building
68#   in a directory outside of the source tree.  The '-C' option tells
69#   scons to chdir to the specified directory to find this SConstruct
70#   file.
71#   % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug
72#   % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug
73#
74# You can use 'scons -H' to print scons options.  If you're in this
75# 'gem5' directory (or use -u or -C to tell scons where to find this
76# file), you can use 'scons -h' to print all the gem5-specific build
77# options as well.
78#
79###################################################
80
81# Check for recent-enough Python and SCons versions.
82try:
83    # Really old versions of scons only take two options for the
84    # function, so check once without the revision and once with the
85    # revision, the first instance will fail for stuff other than
86    # 0.98, and the second will fail for 0.98.0
87    EnsureSConsVersion(0, 98)
88    EnsureSConsVersion(0, 98, 1)
89except SystemExit, e:
90    print """
91For more details, see:
92    http://gem5.org/Dependencies
93"""
94    raise
95
96# We ensure the python version early because because python-config
97# requires python 2.5
98try:
99    EnsurePythonVersion(2, 5)
100except SystemExit, e:
101    print """
102You can use a non-default installation of the Python interpreter by
103rearranging your PATH so that scons finds the non-default 'python' and
104'python-config' first.
105
106For more details, see:
107    http://gem5.org/wiki/index.php/Using_a_non-default_Python_installation
108"""
109    raise
110
111# Global Python includes
112import itertools
113import os
114import re
115import shutil
116import subprocess
117import sys
118
119from os import mkdir, environ
120from os.path import abspath, basename, dirname, expanduser, normpath
121from os.path import exists,  isdir, isfile
122from os.path import join as joinpath, split as splitpath
123
124# SCons includes
125import SCons
126import SCons.Node
127
128extra_python_paths = [
129    Dir('src/python').srcnode().abspath, # gem5 includes
130    Dir('ext/ply').srcnode().abspath, # ply is used by several files
131    ]
132
133sys.path[1:1] = extra_python_paths
134
135from m5.util import compareVersions, readCommand
136from m5.util.terminal import get_termcap
137
138help_texts = {
139    "options" : "",
140    "global_vars" : "",
141    "local_vars" : ""
142}
143
144Export("help_texts")
145
146
147# There's a bug in scons in that (1) by default, the help texts from
148# AddOption() are supposed to be displayed when you type 'scons -h'
149# and (2) you can override the help displayed by 'scons -h' using the
150# Help() function, but these two features are incompatible: once
151# you've overridden the help text using Help(), there's no way to get
152# at the help texts from AddOptions.  See:
153#     http://scons.tigris.org/issues/show_bug.cgi?id=2356
154#     http://scons.tigris.org/issues/show_bug.cgi?id=2611
155# This hack lets us extract the help text from AddOptions and
156# re-inject it via Help().  Ideally someday this bug will be fixed and
157# we can just use AddOption directly.
158def AddLocalOption(*args, **kwargs):
159    col_width = 30
160
161    help = "  " + ", ".join(args)
162    if "help" in kwargs:
163        length = len(help)
164        if length >= col_width:
165            help += "\n" + " " * col_width
166        else:
167            help += " " * (col_width - length)
168        help += kwargs["help"]
169    help_texts["options"] += help + "\n"
170
171    AddOption(*args, **kwargs)
172
173AddLocalOption('--colors', dest='use_colors', action='store_true',
174               help="Add color to abbreviated scons output")
175AddLocalOption('--no-colors', dest='use_colors', action='store_false',
176               help="Don't add color to abbreviated scons output")
177AddLocalOption('--with-cxx-config', dest='with_cxx_config',
178               action='store_true',
179               help="Build with support for C++-based configuration")
180AddLocalOption('--default', dest='default', type='string', action='store',
181               help='Override which build_opts file to use for defaults')
182AddLocalOption('--ignore-style', dest='ignore_style', action='store_true',
183               help='Disable style checking hooks')
184AddLocalOption('--no-lto', dest='no_lto', action='store_true',
185               help='Disable Link-Time Optimization for fast')
186AddLocalOption('--update-ref', dest='update_ref', action='store_true',
187               help='Update test reference outputs')
188AddLocalOption('--verbose', dest='verbose', action='store_true',
189               help='Print full tool command lines')
190AddLocalOption('--without-python', dest='without_python',
191               action='store_true',
192               help='Build without Python configuration support')
193AddLocalOption('--without-tcmalloc', dest='without_tcmalloc',
194               action='store_true',
195               help='Disable linking against tcmalloc')
196AddLocalOption('--with-ubsan', dest='with_ubsan', action='store_true',
197               help='Build with Undefined Behavior Sanitizer if available')
198AddLocalOption('--with-asan', dest='with_asan', action='store_true',
199               help='Build with Address Sanitizer if available')
200
201termcap = get_termcap(GetOption('use_colors'))
202
203########################################################################
204#
205# Set up the main build environment.
206#
207########################################################################
208
209# export TERM so that clang reports errors in color
210use_vars = set([ 'AS', 'AR', 'CC', 'CXX', 'HOME', 'LD_LIBRARY_PATH',
211                 'LIBRARY_PATH', 'PATH', 'PKG_CONFIG_PATH', 'PROTOC',
212                 'PYTHONPATH', 'RANLIB', 'SWIG', 'TERM' ])
213
214use_prefixes = [
215    "ASAN_",           # address sanitizer symbolizer path and settings
216    "CCACHE_",         # ccache (caching compiler wrapper) configuration
217    "CCC_",            # clang static analyzer configuration
218    "DISTCC_",         # distcc (distributed compiler wrapper) configuration
219    "INCLUDE_SERVER_", # distcc pump server settings
220    "M5",              # M5 configuration (e.g., path to kernels)
221    ]
222
223use_env = {}
224for key,val in sorted(os.environ.iteritems()):
225    if key in use_vars or \
226            any([key.startswith(prefix) for prefix in use_prefixes]):
227        use_env[key] = val
228
229# Tell scons to avoid implicit command dependencies to avoid issues
230# with the param wrappes being compiled twice (see
231# http://scons.tigris.org/issues/show_bug.cgi?id=2811)
232main = Environment(ENV=use_env, IMPLICIT_COMMAND_DEPENDENCIES=0)
233main.Decider('MD5-timestamp')
234main.root = Dir(".")         # The current directory (where this file lives).
235main.srcdir = Dir("src")     # The source directory
236
237main_dict_keys = main.Dictionary().keys()
238
239# Check that we have a C/C++ compiler
240if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys):
241    print "No C++ compiler installed (package g++ on Ubuntu and RedHat)"
242    Exit(1)
243
244# Check that swig is present
245if not 'SWIG' in main_dict_keys:
246    print "swig is not installed (package swig on Ubuntu and RedHat)"
247    Exit(1)
248
249# add useful python code PYTHONPATH so it can be used by subprocesses
250# as well
251main.AppendENVPath('PYTHONPATH', extra_python_paths)
252
253########################################################################
254#
255# Mercurial Stuff.
256#
257# If the gem5 directory is a mercurial repository, we should do some
258# extra things.
259#
260########################################################################
261
262hgdir = main.root.Dir(".hg")
263gitdir = main.root.Dir(".git")
264
265
266style_message = """
267You're missing the gem5 style hook, which automatically checks your code
268against the gem5 style rules on hg commit and qrefresh commands.  This
269script will now install the hook in your %s.
270Press enter to continue, or ctrl-c to abort: """
271
272mercurial_style_message = style_message % ".hg/hgrc file"
273git_style_message = style_message % ".git/hooks/ directory"
274
275mercurial_style_upgrade_message = """
276Your Mercurial style hooks are not up-to-date. This script will now
277try to automatically update them. A backup of your hgrc will be saved
278in .hg/hgrc.old.
279Press enter to continue, or ctrl-c to abort: """
280
281mercurial_style_hook = """
282# The following lines were automatically added by gem5/SConstruct
283# to provide the gem5 style-checking hooks
284[extensions]
285hgstyle = %s/util/hgstyle.py
286
287[hooks]
288pretxncommit.style = python:hgstyle.check_style
289pre-qrefresh.style = python:hgstyle.check_style
290# End of SConstruct additions
291
292""" % (main.root.abspath)
293
294mercurial_lib_not_found = """
295Mercurial libraries cannot be found, ignoring style hook.  If
296you are a gem5 developer, please fix this and run the style
297hook. It is important.
298"""
299
300# Check for style hook and prompt for installation if it's not there.
301# Skip this if --ignore-style was specified, there's no interactive
302# terminal to prompt, or no recognized revision control system can be
303# found.
304ignore_style = GetOption('ignore_style') or not sys.stdin.isatty()
305
306# Try wire up Mercurial to the style hooks
307if not ignore_style and hgdir.exists():
308    style_hook = True
309    style_hooks = tuple()
310    hgrc = hgdir.File('hgrc')
311    hgrc_old = hgdir.File('hgrc.old')
312    try:
313        from mercurial import ui
314        ui = ui.ui()
315        ui.readconfig(hgrc.abspath)
316        style_hooks = (ui.config('hooks', 'pretxncommit.style', None),
317                       ui.config('hooks', 'pre-qrefresh.style', None))
318        style_hook = all(style_hooks)
319        style_extension = ui.config('extensions', 'style', None)
320    except ImportError:
321        print mercurial_lib_not_found
322
323    if "python:style.check_style" in style_hooks:
324        # Try to upgrade the style hooks
325        print mercurial_style_upgrade_message
326        # continue unless user does ctrl-c/ctrl-d etc.
327        try:
328            raw_input()
329        except:
330            print "Input exception, exiting scons.\n"
331            sys.exit(1)
332        shutil.copyfile(hgrc.abspath, hgrc_old.abspath)
333        re_style_hook = re.compile(r"^([^=#]+)\.style\s*=\s*([^#\s]+).*")
334        re_style_extension = re.compile("style\s*=\s*([^#\s]+).*")
335        with open(hgrc_old.abspath, 'r') as old, \
336             open(hgrc.abspath, 'w') as new:
337
338            for l in old:
339                m_hook = re_style_hook.match(l)
340                m_ext = re_style_extension.match(l)
341                if m_hook:
342                    hook, check = m_hook.groups()
343                    if check != "python:style.check_style":
344                        print "Warning: %s.style is using a non-default " \
345                            "checker: %s" % (hook, check)
346                    if hook not in ("pretxncommit", "pre-qrefresh"):
347                        print "Warning: Updating unknown style hook: %s" % hook
348
349                    l = "%s.style = python:hgstyle.check_style\n" % hook
350                elif m_ext and m_ext.group(1) == style_extension:
351                    l = "hgstyle = %s/util/hgstyle.py\n" % main.root.abspath
352
353                new.write(l)
354    elif not style_hook:
355        print mercurial_style_message,
356        # continue unless user does ctrl-c/ctrl-d etc.
357        try:
358            raw_input()
359        except:
360            print "Input exception, exiting scons.\n"
361            sys.exit(1)
362        hgrc_path = '%s/.hg/hgrc' % main.root.abspath
363        print "Adding style hook to", hgrc_path, "\n"
364        try:
365            with open(hgrc_path, 'a') as f:
366                f.write(mercurial_style_hook)
367        except:
368            print "Error updating", hgrc_path
369            sys.exit(1)
370
371# Try to wire up git to the style hooks
372git_pre_commit_hook = gitdir.File("hooks/pre-commit")
373if not ignore_style and gitdir.exists() and not git_pre_commit_hook.exists():
374    git_style_script = File("util/git-pre-commit.py")
375
376    print git_style_message,
377    try:
378        raw_input()
379    except:
380        print "Input exception, exiting scons.\n"
381        sys.exit(1)
382
383    try:
384        rel_style_script = os.path.relpath(
385            git_style_script.get_abspath(),
386            git_pre_commit_hook.Dir(".").get_abspath())
387        os.symlink(rel_style_script, git_pre_commit_hook.get_abspath())
388    except:
389        print "Error updating git pre-commit hook"
390        raise
391        sys.exit(1)
392
393###################################################
394#
395# Figure out which configurations to set up based on the path(s) of
396# the target(s).
397#
398###################################################
399
400# Find default configuration & binary.
401Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug'))
402
403# helper function: find last occurrence of element in list
404def rfind(l, elt, offs = -1):
405    for i in range(len(l)+offs, 0, -1):
406        if l[i] == elt:
407            return i
408    raise ValueError, "element not found"
409
410# Take a list of paths (or SCons Nodes) and return a list with all
411# paths made absolute and ~-expanded.  Paths will be interpreted
412# relative to the launch directory unless a different root is provided
413def makePathListAbsolute(path_list, root=GetLaunchDir()):
414    return [abspath(joinpath(root, expanduser(str(p))))
415            for p in path_list]
416
417# Each target must have 'build' in the interior of the path; the
418# directory below this will determine the build parameters.  For
419# example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we
420# recognize that ALPHA_SE specifies the configuration because it
421# follow 'build' in the build path.
422
423# The funky assignment to "[:]" is needed to replace the list contents
424# in place rather than reassign the symbol to a new list, which
425# doesn't work (obviously!).
426BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS)
427
428# Generate a list of the unique build roots and configs that the
429# collected targets reference.
430variant_paths = []
431build_root = None
432for t in BUILD_TARGETS:
433    path_dirs = t.split('/')
434    try:
435        build_top = rfind(path_dirs, 'build', -2)
436    except:
437        print "Error: no non-leaf 'build' dir found on target path", t
438        Exit(1)
439    this_build_root = joinpath('/',*path_dirs[:build_top+1])
440    if not build_root:
441        build_root = this_build_root
442    else:
443        if this_build_root != build_root:
444            print "Error: build targets not under same build root\n"\
445                  "  %s\n  %s" % (build_root, this_build_root)
446            Exit(1)
447    variant_path = joinpath('/',*path_dirs[:build_top+2])
448    if variant_path not in variant_paths:
449        variant_paths.append(variant_path)
450
451# Make sure build_root exists (might not if this is the first build there)
452if not isdir(build_root):
453    mkdir(build_root)
454main['BUILDROOT'] = build_root
455
456Export('main')
457
458main.SConsignFile(joinpath(build_root, "sconsign"))
459
460# Default duplicate option is to use hard links, but this messes up
461# when you use emacs to edit a file in the target dir, as emacs moves
462# file to file~ then copies to file, breaking the link.  Symbolic
463# (soft) links work better.
464main.SetOption('duplicate', 'soft-copy')
465
466#
467# Set up global sticky variables... these are common to an entire build
468# tree (not specific to a particular build like ALPHA_SE)
469#
470
471global_vars_file = joinpath(build_root, 'variables.global')
472
473global_vars = Variables(global_vars_file, args=ARGUMENTS)
474
475global_vars.AddVariables(
476    ('CC', 'C compiler', environ.get('CC', main['CC'])),
477    ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])),
478    ('SWIG', 'SWIG tool', environ.get('SWIG', main['SWIG'])),
479    ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')),
480    ('BATCH', 'Use batch pool for build and tests', False),
481    ('BATCH_CMD', 'Batch pool submission command name', 'qdo'),
482    ('M5_BUILD_CACHE', 'Cache built objects in this directory', False),
483    ('EXTRAS', 'Add extra directories to the compilation', '')
484    )
485
486# Update main environment with values from ARGUMENTS & global_vars_file
487global_vars.Update(main)
488help_texts["global_vars"] += global_vars.GenerateHelpText(main)
489
490# Save sticky variable settings back to current variables file
491global_vars.Save(global_vars_file, main)
492
493# Parse EXTRAS variable to build list of all directories where we're
494# look for sources etc.  This list is exported as extras_dir_list.
495base_dir = main.srcdir.abspath
496if main['EXTRAS']:
497    extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':'))
498else:
499    extras_dir_list = []
500
501Export('base_dir')
502Export('extras_dir_list')
503
504# the ext directory should be on the #includes path
505main.Append(CPPPATH=[Dir('ext')])
506
507def strip_build_path(path, env):
508    path = str(path)
509    variant_base = env['BUILDROOT'] + os.path.sep
510    if path.startswith(variant_base):
511        path = path[len(variant_base):]
512    elif path.startswith('build/'):
513        path = path[6:]
514    return path
515
516# Generate a string of the form:
517#   common/path/prefix/src1, src2 -> tgt1, tgt2
518# to print while building.
519class Transform(object):
520    # all specific color settings should be here and nowhere else
521    tool_color = termcap.Normal
522    pfx_color = termcap.Yellow
523    srcs_color = termcap.Yellow + termcap.Bold
524    arrow_color = termcap.Blue + termcap.Bold
525    tgts_color = termcap.Yellow + termcap.Bold
526
527    def __init__(self, tool, max_sources=99):
528        self.format = self.tool_color + (" [%8s] " % tool) \
529                      + self.pfx_color + "%s" \
530                      + self.srcs_color + "%s" \
531                      + self.arrow_color + " -> " \
532                      + self.tgts_color + "%s" \
533                      + termcap.Normal
534        self.max_sources = max_sources
535
536    def __call__(self, target, source, env, for_signature=None):
537        # truncate source list according to max_sources param
538        source = source[0:self.max_sources]
539        def strip(f):
540            return strip_build_path(str(f), env)
541        if len(source) > 0:
542            srcs = map(strip, source)
543        else:
544            srcs = ['']
545        tgts = map(strip, target)
546        # surprisingly, os.path.commonprefix is a dumb char-by-char string
547        # operation that has nothing to do with paths.
548        com_pfx = os.path.commonprefix(srcs + tgts)
549        com_pfx_len = len(com_pfx)
550        if com_pfx:
551            # do some cleanup and sanity checking on common prefix
552            if com_pfx[-1] == ".":
553                # prefix matches all but file extension: ok
554                # back up one to change 'foo.cc -> o' to 'foo.cc -> .o'
555                com_pfx = com_pfx[0:-1]
556            elif com_pfx[-1] == "/":
557                # common prefix is directory path: OK
558                pass
559            else:
560                src0_len = len(srcs[0])
561                tgt0_len = len(tgts[0])
562                if src0_len == com_pfx_len:
563                    # source is a substring of target, OK
564                    pass
565                elif tgt0_len == com_pfx_len:
566                    # target is a substring of source, need to back up to
567                    # avoid empty string on RHS of arrow
568                    sep_idx = com_pfx.rfind(".")
569                    if sep_idx != -1:
570                        com_pfx = com_pfx[0:sep_idx]
571                    else:
572                        com_pfx = ''
573                elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".":
574                    # still splitting at file extension: ok
575                    pass
576                else:
577                    # probably a fluke; ignore it
578                    com_pfx = ''
579        # recalculate length in case com_pfx was modified
580        com_pfx_len = len(com_pfx)
581        def fmt(files):
582            f = map(lambda s: s[com_pfx_len:], files)
583            return ', '.join(f)
584        return self.format % (com_pfx, fmt(srcs), fmt(tgts))
585
586Export('Transform')
587
588# enable the regression script to use the termcap
589main['TERMCAP'] = termcap
590
591if GetOption('verbose'):
592    def MakeAction(action, string, *args, **kwargs):
593        return Action(action, *args, **kwargs)
594else:
595    MakeAction = Action
596    main['CCCOMSTR']        = Transform("CC")
597    main['CXXCOMSTR']       = Transform("CXX")
598    main['ASCOMSTR']        = Transform("AS")
599    main['SWIGCOMSTR']      = Transform("SWIG")
600    main['ARCOMSTR']        = Transform("AR", 0)
601    main['LINKCOMSTR']      = Transform("LINK", 0)
602    main['RANLIBCOMSTR']    = Transform("RANLIB", 0)
603    main['M4COMSTR']        = Transform("M4")
604    main['SHCCCOMSTR']      = Transform("SHCC")
605    main['SHCXXCOMSTR']     = Transform("SHCXX")
606Export('MakeAction')
607
608# Initialize the Link-Time Optimization (LTO) flags
609main['LTO_CCFLAGS'] = []
610main['LTO_LDFLAGS'] = []
611
612# According to the readme, tcmalloc works best if the compiler doesn't
613# assume that we're using the builtin malloc and friends. These flags
614# are compiler-specific, so we need to set them after we detect which
615# compiler we're using.
616main['TCMALLOC_CCFLAGS'] = []
617
618CXX_version = readCommand([main['CXX'],'--version'], exception=False)
619CXX_V = readCommand([main['CXX'],'-V'], exception=False)
620
621main['GCC'] = CXX_version and CXX_version.find('g++') >= 0
622main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0
623if main['GCC'] + main['CLANG'] > 1:
624    print 'Error: How can we have two at the same time?'
625    Exit(1)
626
627# Set up default C++ compiler flags
628if main['GCC'] or main['CLANG']:
629    # As gcc and clang share many flags, do the common parts here
630    main.Append(CCFLAGS=['-pipe'])
631    main.Append(CCFLAGS=['-fno-strict-aliasing'])
632    # Enable -Wall and -Wextra and then disable the few warnings that
633    # we consistently violate
634    main.Append(CCFLAGS=['-Wall', '-Wundef', '-Wextra',
635                         '-Wno-sign-compare', '-Wno-unused-parameter'])
636    # We always compile using C++11
637    main.Append(CXXFLAGS=['-std=c++11'])
638else:
639    print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
640    print "Don't know what compiler options to use for your compiler."
641    print termcap.Yellow + '       compiler:' + termcap.Normal, main['CXX']
642    print termcap.Yellow + '       version:' + termcap.Normal,
643    if not CXX_version:
644        print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
645               termcap.Normal
646    else:
647        print CXX_version.replace('\n', '<nl>')
648    print "       If you're trying to use a compiler other than GCC"
649    print "       or clang, there appears to be something wrong with your"
650    print "       environment."
651    print "       "
652    print "       If you are trying to use a compiler other than those listed"
653    print "       above you will need to ease fix SConstruct and "
654    print "       src/SConscript to support that compiler."
655    Exit(1)
656
657if main['GCC']:
658    # Check for a supported version of gcc. >= 4.7 is chosen for its
659    # level of c++11 support. See
660    # http://gcc.gnu.org/projects/cxx0x.html for details.
661    gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False)
662    if compareVersions(gcc_version, "4.7") < 0:
663        print 'Error: gcc version 4.7 or newer required.'
664        print '       Installed version:', gcc_version
665        Exit(1)
666
667    main['GCC_VERSION'] = gcc_version
668
669    # gcc from version 4.8 and above generates "rep; ret" instructions
670    # to avoid performance penalties on certain AMD chips. Older
671    # assemblers detect this as an error, "Error: expecting string
672    # instruction after `rep'"
673    if compareVersions(gcc_version, "4.8") > 0:
674        as_version_raw = readCommand([main['AS'], '-v', '/dev/null'],
675                                     exception=False).split()
676
677        # version strings may contain extra distro-specific
678        # qualifiers, so play it safe and keep only what comes before
679        # the first hyphen
680        as_version = as_version_raw[-1].split('-')[0] if as_version_raw \
681            else None
682
683        if not as_version or compareVersions(as_version, "2.23") < 0:
684            print termcap.Yellow + termcap.Bold + \
685                'Warning: This combination of gcc and binutils have' + \
686                ' known incompatibilities.\n' + \
687                '         If you encounter build problems, please update ' + \
688                'binutils to 2.23.' + \
689                termcap.Normal
690
691    # Make sure we warn if the user has requested to compile with the
692    # Undefined Benahvior Sanitizer and this version of gcc does not
693    # support it.
694    if GetOption('with_ubsan') and \
695            compareVersions(gcc_version, '4.9') < 0:
696        print termcap.Yellow + termcap.Bold + \
697            'Warning: UBSan is only supported using gcc 4.9 and later.' + \
698            termcap.Normal
699
700    # Add the appropriate Link-Time Optimization (LTO) flags
701    # unless LTO is explicitly turned off. Note that these flags
702    # are only used by the fast target.
703    if not GetOption('no_lto'):
704        # Pass the LTO flag when compiling to produce GIMPLE
705        # output, we merely create the flags here and only append
706        # them later
707        main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
708
709        # Use the same amount of jobs for LTO as we are running
710        # scons with
711        main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
712
713    main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc',
714                                  '-fno-builtin-realloc', '-fno-builtin-free'])
715
716elif main['CLANG']:
717    # Check for a supported version of clang, >= 3.1 is needed to
718    # support similar features as gcc 4.7. See
719    # http://clang.llvm.org/cxx_status.html for details
720    clang_version_re = re.compile(".* version (\d+\.\d+)")
721    clang_version_match = clang_version_re.search(CXX_version)
722    if (clang_version_match):
723        clang_version = clang_version_match.groups()[0]
724        if compareVersions(clang_version, "3.1") < 0:
725            print 'Error: clang version 3.1 or newer required.'
726            print '       Installed version:', clang_version
727            Exit(1)
728    else:
729        print 'Error: Unable to determine clang version.'
730        Exit(1)
731
732    # clang has a few additional warnings that we disable, extraneous
733    # parantheses are allowed due to Ruby's printing of the AST,
734    # finally self assignments are allowed as the generated CPU code
735    # is relying on this
736    main.Append(CCFLAGS=['-Wno-parentheses',
737                         '-Wno-self-assign',
738                         # Some versions of libstdc++ (4.8?) seem to
739                         # use struct hash and class hash
740                         # interchangeably.
741                         '-Wno-mismatched-tags',
742                         ])
743
744    main.Append(TCMALLOC_CCFLAGS=['-fno-builtin'])
745
746    # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as
747    # opposed to libstdc++, as the later is dated.
748    if sys.platform == "darwin":
749        main.Append(CXXFLAGS=['-stdlib=libc++'])
750        main.Append(LIBS=['c++'])
751
752else:
753    print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
754    print "Don't know what compiler options to use for your compiler."
755    print termcap.Yellow + '       compiler:' + termcap.Normal, main['CXX']
756    print termcap.Yellow + '       version:' + termcap.Normal,
757    if not CXX_version:
758        print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
759               termcap.Normal
760    else:
761        print CXX_version.replace('\n', '<nl>')
762    print "       If you're trying to use a compiler other than GCC"
763    print "       or clang, there appears to be something wrong with your"
764    print "       environment."
765    print "       "
766    print "       If you are trying to use a compiler other than those listed"
767    print "       above you will need to ease fix SConstruct and "
768    print "       src/SConscript to support that compiler."
769    Exit(1)
770
771# Set up common yacc/bison flags (needed for Ruby)
772main['YACCFLAGS'] = '-d'
773main['YACCHXXFILESUFFIX'] = '.hh'
774
775# Do this after we save setting back, or else we'll tack on an
776# extra 'qdo' every time we run scons.
777if main['BATCH']:
778    main['CC']     = main['BATCH_CMD'] + ' ' + main['CC']
779    main['CXX']    = main['BATCH_CMD'] + ' ' + main['CXX']
780    main['AS']     = main['BATCH_CMD'] + ' ' + main['AS']
781    main['AR']     = main['BATCH_CMD'] + ' ' + main['AR']
782    main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB']
783
784if sys.platform == 'cygwin':
785    # cygwin has some header file issues...
786    main.Append(CCFLAGS=["-Wno-uninitialized"])
787
788# Check for the protobuf compiler
789protoc_version = readCommand([main['PROTOC'], '--version'],
790                             exception='').split()
791
792# First two words should be "libprotoc x.y.z"
793if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc':
794    print termcap.Yellow + termcap.Bold + \
795        'Warning: Protocol buffer compiler (protoc) not found.\n' + \
796        '         Please install protobuf-compiler for tracing support.' + \
797        termcap.Normal
798    main['PROTOC'] = False
799else:
800    # Based on the availability of the compress stream wrappers,
801    # require 2.1.0
802    min_protoc_version = '2.1.0'
803    if compareVersions(protoc_version[1], min_protoc_version) < 0:
804        print termcap.Yellow + termcap.Bold + \
805            'Warning: protoc version', min_protoc_version, \
806            'or newer required.\n' + \
807            '         Installed version:', protoc_version[1], \
808            termcap.Normal
809        main['PROTOC'] = False
810    else:
811        # Attempt to determine the appropriate include path and
812        # library path using pkg-config, that means we also need to
813        # check for pkg-config. Note that it is possible to use
814        # protobuf without the involvement of pkg-config. Later on we
815        # check go a library config check and at that point the test
816        # will fail if libprotobuf cannot be found.
817        if readCommand(['pkg-config', '--version'], exception=''):
818            try:
819                # Attempt to establish what linking flags to add for protobuf
820                # using pkg-config
821                main.ParseConfig('pkg-config --cflags --libs-only-L protobuf')
822            except:
823                print termcap.Yellow + termcap.Bold + \
824                    'Warning: pkg-config could not get protobuf flags.' + \
825                    termcap.Normal
826
827# Check for SWIG
828if not main.has_key('SWIG'):
829    print 'Error: SWIG utility not found.'
830    print '       Please install (see http://www.swig.org) and retry.'
831    Exit(1)
832
833# Check for appropriate SWIG version
834swig_version = readCommand([main['SWIG'], '-version'], exception='').split()
835# First 3 words should be "SWIG Version x.y.z"
836if len(swig_version) < 3 or \
837        swig_version[0] != 'SWIG' or swig_version[1] != 'Version':
838    print 'Error determining SWIG version.'
839    Exit(1)
840
841min_swig_version = '2.0.4'
842if compareVersions(swig_version[2], min_swig_version) < 0:
843    print 'Error: SWIG version', min_swig_version, 'or newer required.'
844    print '       Installed version:', swig_version[2]
845    Exit(1)
846
847# Check for known incompatibilities. The standard library shipped with
848# gcc >= 4.9 does not play well with swig versions prior to 3.0
849if main['GCC'] and compareVersions(gcc_version, '4.9') >= 0 and \
850        compareVersions(swig_version[2], '3.0') < 0:
851    print termcap.Yellow + termcap.Bold + \
852        'Warning: This combination of gcc and swig have' + \
853        ' known incompatibilities.\n' + \
854        '         If you encounter build problems, please update ' + \
855        'swig to 3.0 or later.' + \
856        termcap.Normal
857
858# Set up SWIG flags & scanner
859swig_flags=Split('-c++ -python -modern -templatereduce $_CPPINCFLAGS')
860main.Append(SWIGFLAGS=swig_flags)
861
862# Check for 'timeout' from GNU coreutils. If present, regressions will
863# be run with a time limit. We require version 8.13 since we rely on
864# support for the '--foreground' option.
865timeout_lines = readCommand(['timeout', '--version'],
866                            exception='').splitlines()
867# Get the first line and tokenize it
868timeout_version = timeout_lines[0].split() if timeout_lines else []
869main['TIMEOUT'] =  timeout_version and \
870    compareVersions(timeout_version[-1], '8.13') >= 0
871
872# filter out all existing swig scanners, they mess up the dependency
873# stuff for some reason
874scanners = []
875for scanner in main['SCANNERS']:
876    skeys = scanner.skeys
877    if skeys == '.i':
878        continue
879
880    if isinstance(skeys, (list, tuple)) and '.i' in skeys:
881        continue
882
883    scanners.append(scanner)
884
885# add the new swig scanner that we like better
886from SCons.Scanner import ClassicCPP as CPPScanner
887swig_inc_re = '^[ \t]*[%,#][ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")'
888scanners.append(CPPScanner("SwigScan", [ ".i" ], "CPPPATH", swig_inc_re))
889
890# replace the scanners list that has what we want
891main['SCANNERS'] = scanners
892
893# Add a custom Check function to test for structure members.
894def CheckMember(context, include, decl, member, include_quotes="<>"):
895    context.Message("Checking for member %s in %s..." %
896                    (member, decl))
897    text = """
898#include %(header)s
899int main(){
900  %(decl)s test;
901  (void)test.%(member)s;
902  return 0;
903};
904""" % { "header" : include_quotes[0] + include + include_quotes[1],
905        "decl" : decl,
906        "member" : member,
907        }
908
909    ret = context.TryCompile(text, extension=".cc")
910    context.Result(ret)
911    return ret
912
913# Platform-specific configuration.  Note again that we assume that all
914# builds under a given build root run on the same host platform.
915conf = Configure(main,
916                 conf_dir = joinpath(build_root, '.scons_config'),
917                 log_file = joinpath(build_root, 'scons_config.log'),
918                 custom_tests = {
919        'CheckMember' : CheckMember,
920        })
921
922# Check if we should compile a 64 bit binary on Mac OS X/Darwin
923try:
924    import platform
925    uname = platform.uname()
926    if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0:
927        if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]):
928            main.Append(CCFLAGS=['-arch', 'x86_64'])
929            main.Append(CFLAGS=['-arch', 'x86_64'])
930            main.Append(LINKFLAGS=['-arch', 'x86_64'])
931            main.Append(ASFLAGS=['-arch', 'x86_64'])
932except:
933    pass
934
935# Recent versions of scons substitute a "Null" object for Configure()
936# when configuration isn't necessary, e.g., if the "--help" option is
937# present.  Unfortuantely this Null object always returns false,
938# breaking all our configuration checks.  We replace it with our own
939# more optimistic null object that returns True instead.
940if not conf:
941    def NullCheck(*args, **kwargs):
942        return True
943
944    class NullConf:
945        def __init__(self, env):
946            self.env = env
947        def Finish(self):
948            return self.env
949        def __getattr__(self, mname):
950            return NullCheck
951
952    conf = NullConf(main)
953
954# Cache build files in the supplied directory.
955if main['M5_BUILD_CACHE']:
956    print 'Using build cache located at', main['M5_BUILD_CACHE']
957    CacheDir(main['M5_BUILD_CACHE'])
958
959if not GetOption('without_python'):
960    # Find Python include and library directories for embedding the
961    # interpreter. We rely on python-config to resolve the appropriate
962    # includes and linker flags. ParseConfig does not seem to understand
963    # the more exotic linker flags such as -Xlinker and -export-dynamic so
964    # we add them explicitly below. If you want to link in an alternate
965    # version of python, see above for instructions on how to invoke
966    # scons with the appropriate PATH set.
967    #
968    # First we check if python2-config exists, else we use python-config
969    python_config = readCommand(['which', 'python2-config'],
970                                exception='').strip()
971    if not os.path.exists(python_config):
972        python_config = readCommand(['which', 'python-config'],
973                                    exception='').strip()
974    py_includes = readCommand([python_config, '--includes'],
975                              exception='').split()
976    # Strip the -I from the include folders before adding them to the
977    # CPPPATH
978    main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes))
979
980    # Read the linker flags and split them into libraries and other link
981    # flags. The libraries are added later through the call the CheckLib.
982    py_ld_flags = readCommand([python_config, '--ldflags'],
983        exception='').split()
984    py_libs = []
985    for lib in py_ld_flags:
986         if not lib.startswith('-l'):
987             main.Append(LINKFLAGS=[lib])
988         else:
989             lib = lib[2:]
990             if lib not in py_libs:
991                 py_libs.append(lib)
992
993    # verify that this stuff works
994    if not conf.CheckHeader('Python.h', '<>'):
995        print "Error: can't find Python.h header in", py_includes
996        print "Install Python headers (package python-dev on Ubuntu and RedHat)"
997        Exit(1)
998
999    for lib in py_libs:
1000        if not conf.CheckLib(lib):
1001            print "Error: can't find library %s required by python" % lib
1002            Exit(1)
1003
1004# On Solaris you need to use libsocket for socket ops
1005if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'):
1006   if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'):
1007       print "Can't find library with socket calls (e.g. accept())"
1008       Exit(1)
1009
1010# Check for zlib.  If the check passes, libz will be automatically
1011# added to the LIBS environment variable.
1012if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'):
1013    print 'Error: did not find needed zlib compression library '\
1014          'and/or zlib.h header file.'
1015    print '       Please install zlib and try again.'
1016    Exit(1)
1017
1018# If we have the protobuf compiler, also make sure we have the
1019# development libraries. If the check passes, libprotobuf will be
1020# automatically added to the LIBS environment variable. After
1021# this, we can use the HAVE_PROTOBUF flag to determine if we have
1022# got both protoc and libprotobuf available.
1023main['HAVE_PROTOBUF'] = main['PROTOC'] and \
1024    conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h',
1025                            'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;')
1026
1027# If we have the compiler but not the library, print another warning.
1028if main['PROTOC'] and not main['HAVE_PROTOBUF']:
1029    print termcap.Yellow + termcap.Bold + \
1030        'Warning: did not find protocol buffer library and/or headers.\n' + \
1031    '       Please install libprotobuf-dev for tracing support.' + \
1032    termcap.Normal
1033
1034# Check for librt.
1035have_posix_clock = \
1036    conf.CheckLibWithHeader(None, 'time.h', 'C',
1037                            'clock_nanosleep(0,0,NULL,NULL);') or \
1038    conf.CheckLibWithHeader('rt', 'time.h', 'C',
1039                            'clock_nanosleep(0,0,NULL,NULL);')
1040
1041have_posix_timers = \
1042    conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C',
1043                            'timer_create(CLOCK_MONOTONIC, NULL, NULL);')
1044
1045if not GetOption('without_tcmalloc'):
1046    if conf.CheckLib('tcmalloc'):
1047        main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1048    elif conf.CheckLib('tcmalloc_minimal'):
1049        main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1050    else:
1051        print termcap.Yellow + termcap.Bold + \
1052              "You can get a 12% performance improvement by "\
1053              "installing tcmalloc (libgoogle-perftools-dev package "\
1054              "on Ubuntu or RedHat)." + termcap.Normal
1055
1056
1057# Detect back trace implementations. The last implementation in the
1058# list will be used by default.
1059backtrace_impls = [ "none" ]
1060
1061if conf.CheckLibWithHeader(None, 'execinfo.h', 'C',
1062                           'backtrace_symbols_fd((void*)0, 0, 0);'):
1063    backtrace_impls.append("glibc")
1064
1065if backtrace_impls[-1] == "none":
1066    default_backtrace_impl = "none"
1067    print termcap.Yellow + termcap.Bold + \
1068        "No suitable back trace implementation found." + \
1069        termcap.Normal
1070
1071if not have_posix_clock:
1072    print "Can't find library for POSIX clocks."
1073
1074# Check for <fenv.h> (C99 FP environment control)
1075have_fenv = conf.CheckHeader('fenv.h', '<>')
1076if not have_fenv:
1077    print "Warning: Header file <fenv.h> not found."
1078    print "         This host has no IEEE FP rounding mode control."
1079
1080# Check if we should enable KVM-based hardware virtualization. The API
1081# we rely on exists since version 2.6.36 of the kernel, but somehow
1082# the KVM_API_VERSION does not reflect the change. We test for one of
1083# the types as a fall back.
1084have_kvm = conf.CheckHeader('linux/kvm.h', '<>')
1085if not have_kvm:
1086    print "Info: Compatible header file <linux/kvm.h> not found, " \
1087        "disabling KVM support."
1088
1089# x86 needs support for xsave. We test for the structure here since we
1090# won't be able to run new tests by the time we know which ISA we're
1091# targeting.
1092have_kvm_xsave = conf.CheckTypeSize('struct kvm_xsave',
1093                                    '#include <linux/kvm.h>') != 0
1094
1095# Check if the requested target ISA is compatible with the host
1096def is_isa_kvm_compatible(isa):
1097    try:
1098        import platform
1099        host_isa = platform.machine()
1100    except:
1101        print "Warning: Failed to determine host ISA."
1102        return False
1103
1104    if not have_posix_timers:
1105        print "Warning: Can not enable KVM, host seems to lack support " \
1106            "for POSIX timers"
1107        return False
1108
1109    if isa == "arm":
1110        return host_isa in ( "armv7l", "aarch64" )
1111    elif isa == "x86":
1112        if host_isa != "x86_64":
1113            return False
1114
1115        if not have_kvm_xsave:
1116            print "KVM on x86 requires xsave support in kernel headers."
1117            return False
1118
1119        return True
1120    else:
1121        return False
1122
1123
1124# Check if the exclude_host attribute is available. We want this to
1125# get accurate instruction counts in KVM.
1126main['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf.CheckMember(
1127    'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host')
1128
1129
1130######################################################################
1131#
1132# Finish the configuration
1133#
1134main = conf.Finish()
1135
1136######################################################################
1137#
1138# Collect all non-global variables
1139#
1140
1141# Define the universe of supported ISAs
1142all_isa_list = [ ]
1143all_gpu_isa_list = [ ]
1144Export('all_isa_list')
1145Export('all_gpu_isa_list')
1146
1147class CpuModel(object):
1148    '''The CpuModel class encapsulates everything the ISA parser needs to
1149    know about a particular CPU model.'''
1150
1151    # Dict of available CPU model objects.  Accessible as CpuModel.dict.
1152    dict = {}
1153
1154    # Constructor.  Automatically adds models to CpuModel.dict.
1155    def __init__(self, name, default=False):
1156        self.name = name           # name of model
1157
1158        # This cpu is enabled by default
1159        self.default = default
1160
1161        # Add self to dict
1162        if name in CpuModel.dict:
1163            raise AttributeError, "CpuModel '%s' already registered" % name
1164        CpuModel.dict[name] = self
1165
1166Export('CpuModel')
1167
1168# Sticky variables get saved in the variables file so they persist from
1169# one invocation to the next (unless overridden, in which case the new
1170# value becomes sticky).
1171sticky_vars = Variables(args=ARGUMENTS)
1172Export('sticky_vars')
1173
1174# Sticky variables that should be exported
1175export_vars = []
1176Export('export_vars')
1177
1178# For Ruby
1179all_protocols = []
1180Export('all_protocols')
1181protocol_dirs = []
1182Export('protocol_dirs')
1183slicc_includes = []
1184Export('slicc_includes')
1185
1186# Walk the tree and execute all SConsopts scripts that wil add to the
1187# above variables
1188if GetOption('verbose'):
1189    print "Reading SConsopts"
1190for bdir in [ base_dir ] + extras_dir_list:
1191    if not isdir(bdir):
1192        print "Error: directory '%s' does not exist" % bdir
1193        Exit(1)
1194    for root, dirs, files in os.walk(bdir):
1195        if 'SConsopts' in files:
1196            if GetOption('verbose'):
1197                print "Reading", joinpath(root, 'SConsopts')
1198            SConscript(joinpath(root, 'SConsopts'))
1199
1200all_isa_list.sort()
1201all_gpu_isa_list.sort()
1202
1203sticky_vars.AddVariables(
1204    EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list),
1205    EnumVariable('TARGET_GPU_ISA', 'Target GPU ISA', 'hsail', all_gpu_isa_list),
1206    ListVariable('CPU_MODELS', 'CPU models',
1207                 sorted(n for n,m in CpuModel.dict.iteritems() if m.default),
1208                 sorted(CpuModel.dict.keys())),
1209    BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger',
1210                 False),
1211    BoolVariable('SS_COMPATIBLE_FP',
1212                 'Make floating-point results compatible with SimpleScalar',
1213                 False),
1214    BoolVariable('USE_SSE2',
1215                 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts',
1216                 False),
1217    BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock),
1218    BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv),
1219    BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability', False),
1220    BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models', have_kvm),
1221    BoolVariable('BUILD_GPU', 'Build the compute-GPU model', False),
1222    EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None',
1223                  all_protocols),
1224    EnumVariable('BACKTRACE_IMPL', 'Post-mortem dump implementation',
1225                 backtrace_impls[-1], backtrace_impls)
1226    )
1227
1228# These variables get exported to #defines in config/*.hh (see src/SConscript).
1229export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'TARGET_GPU_ISA',
1230                'CP_ANNOTATE', 'USE_POSIX_CLOCK', 'USE_KVM', 'PROTOCOL',
1231                'HAVE_PROTOBUF', 'HAVE_PERF_ATTR_EXCLUDE_HOST']
1232
1233###################################################
1234#
1235# Define a SCons builder for configuration flag headers.
1236#
1237###################################################
1238
1239# This function generates a config header file that #defines the
1240# variable symbol to the current variable setting (0 or 1).  The source
1241# operands are the name of the variable and a Value node containing the
1242# value of the variable.
1243def build_config_file(target, source, env):
1244    (variable, value) = [s.get_contents() for s in source]
1245    f = file(str(target[0]), 'w')
1246    print >> f, '#define', variable, value
1247    f.close()
1248    return None
1249
1250# Combine the two functions into a scons Action object.
1251config_action = MakeAction(build_config_file, Transform("CONFIG H", 2))
1252
1253# The emitter munges the source & target node lists to reflect what
1254# we're really doing.
1255def config_emitter(target, source, env):
1256    # extract variable name from Builder arg
1257    variable = str(target[0])
1258    # True target is config header file
1259    target = joinpath('config', variable.lower() + '.hh')
1260    val = env[variable]
1261    if isinstance(val, bool):
1262        # Force value to 0/1
1263        val = int(val)
1264    elif isinstance(val, str):
1265        val = '"' + val + '"'
1266
1267    # Sources are variable name & value (packaged in SCons Value nodes)
1268    return ([target], [Value(variable), Value(val)])
1269
1270config_builder = Builder(emitter = config_emitter, action = config_action)
1271
1272main.Append(BUILDERS = { 'ConfigFile' : config_builder })
1273
1274# libelf build is shared across all configs in the build root.
1275main.SConscript('ext/libelf/SConscript',
1276                variant_dir = joinpath(build_root, 'libelf'))
1277
1278# iostream3 build is shared across all configs in the build root.
1279main.SConscript('ext/iostream3/SConscript',
1280                variant_dir = joinpath(build_root, 'iostream3'))
1281
1282# libfdt build is shared across all configs in the build root.
1283main.SConscript('ext/libfdt/SConscript',
1284                variant_dir = joinpath(build_root, 'libfdt'))
1285
1286# fputils build is shared across all configs in the build root.
1287main.SConscript('ext/fputils/SConscript',
1288                variant_dir = joinpath(build_root, 'fputils'))
1289
1290# DRAMSim2 build is shared across all configs in the build root.
1291main.SConscript('ext/dramsim2/SConscript',
1292                variant_dir = joinpath(build_root, 'dramsim2'))
1293
1294# DRAMPower build is shared across all configs in the build root.
1295main.SConscript('ext/drampower/SConscript',
1296                variant_dir = joinpath(build_root, 'drampower'))
1297
1298# nomali build is shared across all configs in the build root.
1299main.SConscript('ext/nomali/SConscript',
1300                variant_dir = joinpath(build_root, 'nomali'))
1301
1302###################################################
1303#
1304# This function is used to set up a directory with switching headers
1305#
1306###################################################
1307
1308main['ALL_ISA_LIST'] = all_isa_list
1309main['ALL_GPU_ISA_LIST'] = all_gpu_isa_list
1310all_isa_deps = {}
1311def make_switching_dir(dname, switch_headers, env):
1312    # Generate the header.  target[0] is the full path of the output
1313    # header to generate.  'source' is a dummy variable, since we get the
1314    # list of ISAs from env['ALL_ISA_LIST'].
1315    def gen_switch_hdr(target, source, env):
1316        fname = str(target[0])
1317        isa = env['TARGET_ISA'].lower()
1318        try:
1319            f = open(fname, 'w')
1320            print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname))
1321            f.close()
1322        except IOError:
1323            print "Failed to create %s" % fname
1324            raise
1325
1326    # Build SCons Action object. 'varlist' specifies env vars that this
1327    # action depends on; when env['ALL_ISA_LIST'] changes these actions
1328    # should get re-executed.
1329    switch_hdr_action = MakeAction(gen_switch_hdr,
1330                          Transform("GENERATE"), varlist=['ALL_ISA_LIST'])
1331
1332    # Instantiate actions for each header
1333    for hdr in switch_headers:
1334        env.Command(hdr, [], switch_hdr_action)
1335
1336    isa_target = Dir('.').up().name.lower().replace('_', '-')
1337    env['PHONY_BASE'] = '#'+isa_target
1338    all_isa_deps[isa_target] = None
1339
1340Export('make_switching_dir')
1341
1342def make_gpu_switching_dir(dname, switch_headers, env):
1343    # Generate the header.  target[0] is the full path of the output
1344    # header to generate.  'source' is a dummy variable, since we get the
1345    # list of ISAs from env['ALL_ISA_LIST'].
1346    def gen_switch_hdr(target, source, env):
1347        fname = str(target[0])
1348
1349        isa = env['TARGET_GPU_ISA'].lower()
1350
1351        try:
1352            f = open(fname, 'w')
1353            print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname))
1354            f.close()
1355        except IOError:
1356            print "Failed to create %s" % fname
1357            raise
1358
1359    # Build SCons Action object. 'varlist' specifies env vars that this
1360    # action depends on; when env['ALL_ISA_LIST'] changes these actions
1361    # should get re-executed.
1362    switch_hdr_action = MakeAction(gen_switch_hdr,
1363                          Transform("GENERATE"), varlist=['ALL_ISA_GPU_LIST'])
1364
1365    # Instantiate actions for each header
1366    for hdr in switch_headers:
1367        env.Command(hdr, [], switch_hdr_action)
1368
1369Export('make_gpu_switching_dir')
1370
1371# all-isas -> all-deps -> all-environs -> all_targets
1372main.Alias('#all-isas', [])
1373main.Alias('#all-deps', '#all-isas')
1374
1375# Dummy target to ensure all environments are created before telling
1376# SCons what to actually make (the command line arguments).  We attach
1377# them to the dependence graph after the environments are complete.
1378ORIG_BUILD_TARGETS = list(BUILD_TARGETS) # force a copy; gets closure to work.
1379def environsComplete(target, source, env):
1380    for t in ORIG_BUILD_TARGETS:
1381        main.Depends('#all-targets', t)
1382
1383# Each build/* switching_dir attaches its *-environs target to #all-environs.
1384main.Append(BUILDERS = {'CompleteEnvirons' :
1385                        Builder(action=MakeAction(environsComplete, None))})
1386main.CompleteEnvirons('#all-environs', [])
1387
1388def doNothing(**ignored): pass
1389main.Append(BUILDERS = {'Dummy': Builder(action=MakeAction(doNothing, None))})
1390
1391# The final target to which all the original targets ultimately get attached.
1392main.Dummy('#all-targets', '#all-environs')
1393BUILD_TARGETS[:] = ['#all-targets']
1394
1395###################################################
1396#
1397# Define build environments for selected configurations.
1398#
1399###################################################
1400
1401for variant_path in variant_paths:
1402    if not GetOption('silent'):
1403        print "Building in", variant_path
1404
1405    # Make a copy of the build-root environment to use for this config.
1406    env = main.Clone()
1407    env['BUILDDIR'] = variant_path
1408
1409    # variant_dir is the tail component of build path, and is used to
1410    # determine the build parameters (e.g., 'ALPHA_SE')
1411    (build_root, variant_dir) = splitpath(variant_path)
1412
1413    # Set env variables according to the build directory config.
1414    sticky_vars.files = []
1415    # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in
1416    # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke
1417    # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings.
1418    current_vars_file = joinpath(build_root, 'variables', variant_dir)
1419    if isfile(current_vars_file):
1420        sticky_vars.files.append(current_vars_file)
1421        if not GetOption('silent'):
1422            print "Using saved variables file %s" % current_vars_file
1423    else:
1424        # Build dir-specific variables file doesn't exist.
1425
1426        # Make sure the directory is there so we can create it later
1427        opt_dir = dirname(current_vars_file)
1428        if not isdir(opt_dir):
1429            mkdir(opt_dir)
1430
1431        # Get default build variables from source tree.  Variables are
1432        # normally determined by name of $VARIANT_DIR, but can be
1433        # overridden by '--default=' arg on command line.
1434        default = GetOption('default')
1435        opts_dir = joinpath(main.root.abspath, 'build_opts')
1436        if default:
1437            default_vars_files = [joinpath(build_root, 'variables', default),
1438                                  joinpath(opts_dir, default)]
1439        else:
1440            default_vars_files = [joinpath(opts_dir, variant_dir)]
1441        existing_files = filter(isfile, default_vars_files)
1442        if existing_files:
1443            default_vars_file = existing_files[0]
1444            sticky_vars.files.append(default_vars_file)
1445            print "Variables file %s not found,\n  using defaults in %s" \
1446                  % (current_vars_file, default_vars_file)
1447        else:
1448            print "Error: cannot find variables file %s or " \
1449                  "default file(s) %s" \
1450                  % (current_vars_file, ' or '.join(default_vars_files))
1451            Exit(1)
1452
1453    # Apply current variable settings to env
1454    sticky_vars.Update(env)
1455
1456    help_texts["local_vars"] += \
1457        "Build variables for %s:\n" % variant_dir \
1458                 + sticky_vars.GenerateHelpText(env)
1459
1460    # Process variable settings.
1461
1462    if not have_fenv and env['USE_FENV']:
1463        print "Warning: <fenv.h> not available; " \
1464              "forcing USE_FENV to False in", variant_dir + "."
1465        env['USE_FENV'] = False
1466
1467    if not env['USE_FENV']:
1468        print "Warning: No IEEE FP rounding mode control in", variant_dir + "."
1469        print "         FP results may deviate slightly from other platforms."
1470
1471    if env['EFENCE']:
1472        env.Append(LIBS=['efence'])
1473
1474    if env['USE_KVM']:
1475        if not have_kvm:
1476            print "Warning: Can not enable KVM, host seems to lack KVM support"
1477            env['USE_KVM'] = False
1478        elif not is_isa_kvm_compatible(env['TARGET_ISA']):
1479            print "Info: KVM support disabled due to unsupported host and " \
1480                "target ISA combination"
1481            env['USE_KVM'] = False
1482
1483    # Warn about missing optional functionality
1484    if env['USE_KVM']:
1485        if not main['HAVE_PERF_ATTR_EXCLUDE_HOST']:
1486            print "Warning: perf_event headers lack support for the " \
1487                "exclude_host attribute. KVM instruction counts will " \
1488                "be inaccurate."
1489
1490    # Save sticky variable settings back to current variables file
1491    sticky_vars.Save(current_vars_file, env)
1492
1493    if env['USE_SSE2']:
1494        env.Append(CCFLAGS=['-msse2'])
1495
1496    # The src/SConscript file sets up the build rules in 'env' according
1497    # to the configured variables.  It returns a list of environments,
1498    # one for each variant build (debug, opt, etc.)
1499    SConscript('src/SConscript', variant_dir = variant_path, exports = 'env')
1500
1501def pairwise(iterable):
1502    "s -> (s0,s1), (s1,s2), (s2, s3), ..."
1503    a, b = itertools.tee(iterable)
1504    b.next()
1505    return itertools.izip(a, b)
1506
1507# Create false dependencies so SCons will parse ISAs, establish
1508# dependencies, and setup the build Environments serially. Either
1509# SCons (likely) and/or our SConscripts (possibly) cannot cope with -j
1510# greater than 1. It appears to be standard race condition stuff; it
1511# doesn't always fail, but usually, and the behaviors are different.
1512# Every time I tried to remove this, builds would fail in some
1513# creative new way. So, don't do that. You'll want to, though, because
1514# tests/SConscript takes a long time to make its Environments.
1515for t1, t2 in pairwise(sorted(all_isa_deps.iterkeys())):
1516    main.Depends('#%s-deps'     % t2, '#%s-deps'     % t1)
1517    main.Depends('#%s-environs' % t2, '#%s-environs' % t1)
1518
1519# base help text
1520Help('''
1521Usage: scons [scons options] [build variables] [target(s)]
1522
1523Extra scons options:
1524%(options)s
1525
1526Global build variables:
1527%(global_vars)s
1528
1529%(local_vars)s
1530''' % help_texts)
1531