SConstruct revision 10238:b21b3aad6bd1
1# -*- mode:python -*-
2
3# Copyright (c) 2013 ARM Limited
4# All rights reserved.
5#
6# The license below extends only to copyright in the software and shall
7# not be construed as granting a license to any other intellectual
8# property including but not limited to intellectual property relating
9# to a hardware implementation of the functionality of the software
10# licensed hereunder.  You may use the software subject to the license
11# terms below provided that you ensure that this notice is replicated
12# unmodified and in its entirety in all distributions of the software,
13# modified or unmodified, in source code or in binary form.
14#
15# Copyright (c) 2011 Advanced Micro Devices, Inc.
16# Copyright (c) 2009 The Hewlett-Packard Development Company
17# Copyright (c) 2004-2005 The Regents of The University of Michigan
18# All rights reserved.
19#
20# Redistribution and use in source and binary forms, with or without
21# modification, are permitted provided that the following conditions are
22# met: redistributions of source code must retain the above copyright
23# notice, this list of conditions and the following disclaimer;
24# redistributions in binary form must reproduce the above copyright
25# notice, this list of conditions and the following disclaimer in the
26# documentation and/or other materials provided with the distribution;
27# neither the name of the copyright holders nor the names of its
28# contributors may be used to endorse or promote products derived from
29# this software without specific prior written permission.
30#
31# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
32# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
33# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
34# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
35# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
37# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
38# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
39# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
40# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
41# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
42#
43# Authors: Steve Reinhardt
44#          Nathan Binkert
45
46###################################################
47#
48# SCons top-level build description (SConstruct) file.
49#
50# While in this directory ('gem5'), just type 'scons' to build the default
51# configuration (see below), or type 'scons build/<CONFIG>/<binary>'
52# to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for
53# the optimized full-system version).
54#
55# You can build gem5 in a different directory as long as there is a
56# 'build/<CONFIG>' somewhere along the target path.  The build system
57# expects that all configs under the same build directory are being
58# built for the same host system.
59#
60# Examples:
61#
62#   The following two commands are equivalent.  The '-u' option tells
63#   scons to search up the directory tree for this SConstruct file.
64#   % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug
65#   % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug
66#
67#   The following two commands are equivalent and demonstrate building
68#   in a directory outside of the source tree.  The '-C' option tells
69#   scons to chdir to the specified directory to find this SConstruct
70#   file.
71#   % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug
72#   % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug
73#
74# You can use 'scons -H' to print scons options.  If you're in this
75# 'gem5' directory (or use -u or -C to tell scons where to find this
76# file), you can use 'scons -h' to print all the gem5-specific build
77# options as well.
78#
79###################################################
80
81# Check for recent-enough Python and SCons versions.
82try:
83    # Really old versions of scons only take two options for the
84    # function, so check once without the revision and once with the
85    # revision, the first instance will fail for stuff other than
86    # 0.98, and the second will fail for 0.98.0
87    EnsureSConsVersion(0, 98)
88    EnsureSConsVersion(0, 98, 1)
89except SystemExit, e:
90    print """
91For more details, see:
92    http://gem5.org/Dependencies
93"""
94    raise
95
96# We ensure the python version early because because python-config
97# requires python 2.5
98try:
99    EnsurePythonVersion(2, 5)
100except SystemExit, e:
101    print """
102You can use a non-default installation of the Python interpreter by
103rearranging your PATH so that scons finds the non-default 'python' and
104'python-config' first.
105
106For more details, see:
107    http://gem5.org/wiki/index.php/Using_a_non-default_Python_installation
108"""
109    raise
110
111# Global Python includes
112import itertools
113import os
114import re
115import subprocess
116import sys
117
118from os import mkdir, environ
119from os.path import abspath, basename, dirname, expanduser, normpath
120from os.path import exists,  isdir, isfile
121from os.path import join as joinpath, split as splitpath
122
123# SCons includes
124import SCons
125import SCons.Node
126
127extra_python_paths = [
128    Dir('src/python').srcnode().abspath, # gem5 includes
129    Dir('ext/ply').srcnode().abspath, # ply is used by several files
130    ]
131
132sys.path[1:1] = extra_python_paths
133
134from m5.util import compareVersions, readCommand
135from m5.util.terminal import get_termcap
136
137help_texts = {
138    "options" : "",
139    "global_vars" : "",
140    "local_vars" : ""
141}
142
143Export("help_texts")
144
145
146# There's a bug in scons in that (1) by default, the help texts from
147# AddOption() are supposed to be displayed when you type 'scons -h'
148# and (2) you can override the help displayed by 'scons -h' using the
149# Help() function, but these two features are incompatible: once
150# you've overridden the help text using Help(), there's no way to get
151# at the help texts from AddOptions.  See:
152#     http://scons.tigris.org/issues/show_bug.cgi?id=2356
153#     http://scons.tigris.org/issues/show_bug.cgi?id=2611
154# This hack lets us extract the help text from AddOptions and
155# re-inject it via Help().  Ideally someday this bug will be fixed and
156# we can just use AddOption directly.
157def AddLocalOption(*args, **kwargs):
158    col_width = 30
159
160    help = "  " + ", ".join(args)
161    if "help" in kwargs:
162        length = len(help)
163        if length >= col_width:
164            help += "\n" + " " * col_width
165        else:
166            help += " " * (col_width - length)
167        help += kwargs["help"]
168    help_texts["options"] += help + "\n"
169
170    AddOption(*args, **kwargs)
171
172AddLocalOption('--colors', dest='use_colors', action='store_true',
173               help="Add color to abbreviated scons output")
174AddLocalOption('--no-colors', dest='use_colors', action='store_false',
175               help="Don't add color to abbreviated scons output")
176AddLocalOption('--default', dest='default', type='string', action='store',
177               help='Override which build_opts file to use for defaults')
178AddLocalOption('--ignore-style', dest='ignore_style', action='store_true',
179               help='Disable style checking hooks')
180AddLocalOption('--no-lto', dest='no_lto', action='store_true',
181               help='Disable Link-Time Optimization for fast')
182AddLocalOption('--update-ref', dest='update_ref', action='store_true',
183               help='Update test reference outputs')
184AddLocalOption('--verbose', dest='verbose', action='store_true',
185               help='Print full tool command lines')
186
187termcap = get_termcap(GetOption('use_colors'))
188
189########################################################################
190#
191# Set up the main build environment.
192#
193########################################################################
194
195# export TERM so that clang reports errors in color
196use_vars = set([ 'AS', 'AR', 'CC', 'CXX', 'HOME', 'LD_LIBRARY_PATH',
197                 'LIBRARY_PATH', 'PATH', 'PKG_CONFIG_PATH', 'PROTOC',
198                 'PYTHONPATH', 'RANLIB', 'SWIG', 'TERM' ])
199
200use_prefixes = [
201    "M5",           # M5 configuration (e.g., path to kernels)
202    "DISTCC_",      # distcc (distributed compiler wrapper) configuration
203    "CCACHE_",      # ccache (caching compiler wrapper) configuration
204    "CCC_",         # clang static analyzer configuration
205    ]
206
207use_env = {}
208for key,val in os.environ.iteritems():
209    if key in use_vars or \
210            any([key.startswith(prefix) for prefix in use_prefixes]):
211        use_env[key] = val
212
213main = Environment(ENV=use_env)
214main.Decider('MD5-timestamp')
215main.root = Dir(".")         # The current directory (where this file lives).
216main.srcdir = Dir("src")     # The source directory
217
218main_dict_keys = main.Dictionary().keys()
219
220# Check that we have a C/C++ compiler
221if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys):
222    print "No C++ compiler installed (package g++ on Ubuntu and RedHat)"
223    Exit(1)
224
225# Check that swig is present
226if not 'SWIG' in main_dict_keys:
227    print "swig is not installed (package swig on Ubuntu and RedHat)"
228    Exit(1)
229
230# add useful python code PYTHONPATH so it can be used by subprocesses
231# as well
232main.AppendENVPath('PYTHONPATH', extra_python_paths)
233
234########################################################################
235#
236# Mercurial Stuff.
237#
238# If the gem5 directory is a mercurial repository, we should do some
239# extra things.
240#
241########################################################################
242
243hgdir = main.root.Dir(".hg")
244
245mercurial_style_message = """
246You're missing the gem5 style hook, which automatically checks your code
247against the gem5 style rules on hg commit and qrefresh commands.  This
248script will now install the hook in your .hg/hgrc file.
249Press enter to continue, or ctrl-c to abort: """
250
251mercurial_style_hook = """
252# The following lines were automatically added by gem5/SConstruct
253# to provide the gem5 style-checking hooks
254[extensions]
255style = %s/util/style.py
256
257[hooks]
258pretxncommit.style = python:style.check_style
259pre-qrefresh.style = python:style.check_style
260# End of SConstruct additions
261
262""" % (main.root.abspath)
263
264mercurial_lib_not_found = """
265Mercurial libraries cannot be found, ignoring style hook.  If
266you are a gem5 developer, please fix this and run the style
267hook. It is important.
268"""
269
270# Check for style hook and prompt for installation if it's not there.
271# Skip this if --ignore-style was specified, there's no .hg dir to
272# install a hook in, or there's no interactive terminal to prompt.
273if not GetOption('ignore_style') and hgdir.exists() and sys.stdin.isatty():
274    style_hook = True
275    try:
276        from mercurial import ui
277        ui = ui.ui()
278        ui.readconfig(hgdir.File('hgrc').abspath)
279        style_hook = ui.config('hooks', 'pretxncommit.style', None) and \
280                     ui.config('hooks', 'pre-qrefresh.style', None)
281    except ImportError:
282        print mercurial_lib_not_found
283
284    if not style_hook:
285        print mercurial_style_message,
286        # continue unless user does ctrl-c/ctrl-d etc.
287        try:
288            raw_input()
289        except:
290            print "Input exception, exiting scons.\n"
291            sys.exit(1)
292        hgrc_path = '%s/.hg/hgrc' % main.root.abspath
293        print "Adding style hook to", hgrc_path, "\n"
294        try:
295            hgrc = open(hgrc_path, 'a')
296            hgrc.write(mercurial_style_hook)
297            hgrc.close()
298        except:
299            print "Error updating", hgrc_path
300            sys.exit(1)
301
302
303###################################################
304#
305# Figure out which configurations to set up based on the path(s) of
306# the target(s).
307#
308###################################################
309
310# Find default configuration & binary.
311Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug'))
312
313# helper function: find last occurrence of element in list
314def rfind(l, elt, offs = -1):
315    for i in range(len(l)+offs, 0, -1):
316        if l[i] == elt:
317            return i
318    raise ValueError, "element not found"
319
320# Take a list of paths (or SCons Nodes) and return a list with all
321# paths made absolute and ~-expanded.  Paths will be interpreted
322# relative to the launch directory unless a different root is provided
323def makePathListAbsolute(path_list, root=GetLaunchDir()):
324    return [abspath(joinpath(root, expanduser(str(p))))
325            for p in path_list]
326
327# Each target must have 'build' in the interior of the path; the
328# directory below this will determine the build parameters.  For
329# example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we
330# recognize that ALPHA_SE specifies the configuration because it
331# follow 'build' in the build path.
332
333# The funky assignment to "[:]" is needed to replace the list contents
334# in place rather than reassign the symbol to a new list, which
335# doesn't work (obviously!).
336BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS)
337
338# Generate a list of the unique build roots and configs that the
339# collected targets reference.
340variant_paths = []
341build_root = None
342for t in BUILD_TARGETS:
343    path_dirs = t.split('/')
344    try:
345        build_top = rfind(path_dirs, 'build', -2)
346    except:
347        print "Error: no non-leaf 'build' dir found on target path", t
348        Exit(1)
349    this_build_root = joinpath('/',*path_dirs[:build_top+1])
350    if not build_root:
351        build_root = this_build_root
352    else:
353        if this_build_root != build_root:
354            print "Error: build targets not under same build root\n"\
355                  "  %s\n  %s" % (build_root, this_build_root)
356            Exit(1)
357    variant_path = joinpath('/',*path_dirs[:build_top+2])
358    if variant_path not in variant_paths:
359        variant_paths.append(variant_path)
360
361# Make sure build_root exists (might not if this is the first build there)
362if not isdir(build_root):
363    mkdir(build_root)
364main['BUILDROOT'] = build_root
365
366Export('main')
367
368main.SConsignFile(joinpath(build_root, "sconsign"))
369
370# Default duplicate option is to use hard links, but this messes up
371# when you use emacs to edit a file in the target dir, as emacs moves
372# file to file~ then copies to file, breaking the link.  Symbolic
373# (soft) links work better.
374main.SetOption('duplicate', 'soft-copy')
375
376#
377# Set up global sticky variables... these are common to an entire build
378# tree (not specific to a particular build like ALPHA_SE)
379#
380
381global_vars_file = joinpath(build_root, 'variables.global')
382
383global_vars = Variables(global_vars_file, args=ARGUMENTS)
384
385global_vars.AddVariables(
386    ('CC', 'C compiler', environ.get('CC', main['CC'])),
387    ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])),
388    ('SWIG', 'SWIG tool', environ.get('SWIG', main['SWIG'])),
389    ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')),
390    ('BATCH', 'Use batch pool for build and tests', False),
391    ('BATCH_CMD', 'Batch pool submission command name', 'qdo'),
392    ('M5_BUILD_CACHE', 'Cache built objects in this directory', False),
393    ('EXTRAS', 'Add extra directories to the compilation', '')
394    )
395
396# Update main environment with values from ARGUMENTS & global_vars_file
397global_vars.Update(main)
398help_texts["global_vars"] += global_vars.GenerateHelpText(main)
399
400# Save sticky variable settings back to current variables file
401global_vars.Save(global_vars_file, main)
402
403# Parse EXTRAS variable to build list of all directories where we're
404# look for sources etc.  This list is exported as extras_dir_list.
405base_dir = main.srcdir.abspath
406if main['EXTRAS']:
407    extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':'))
408else:
409    extras_dir_list = []
410
411Export('base_dir')
412Export('extras_dir_list')
413
414# the ext directory should be on the #includes path
415main.Append(CPPPATH=[Dir('ext')])
416
417def strip_build_path(path, env):
418    path = str(path)
419    variant_base = env['BUILDROOT'] + os.path.sep
420    if path.startswith(variant_base):
421        path = path[len(variant_base):]
422    elif path.startswith('build/'):
423        path = path[6:]
424    return path
425
426# Generate a string of the form:
427#   common/path/prefix/src1, src2 -> tgt1, tgt2
428# to print while building.
429class Transform(object):
430    # all specific color settings should be here and nowhere else
431    tool_color = termcap.Normal
432    pfx_color = termcap.Yellow
433    srcs_color = termcap.Yellow + termcap.Bold
434    arrow_color = termcap.Blue + termcap.Bold
435    tgts_color = termcap.Yellow + termcap.Bold
436
437    def __init__(self, tool, max_sources=99):
438        self.format = self.tool_color + (" [%8s] " % tool) \
439                      + self.pfx_color + "%s" \
440                      + self.srcs_color + "%s" \
441                      + self.arrow_color + " -> " \
442                      + self.tgts_color + "%s" \
443                      + termcap.Normal
444        self.max_sources = max_sources
445
446    def __call__(self, target, source, env, for_signature=None):
447        # truncate source list according to max_sources param
448        source = source[0:self.max_sources]
449        def strip(f):
450            return strip_build_path(str(f), env)
451        if len(source) > 0:
452            srcs = map(strip, source)
453        else:
454            srcs = ['']
455        tgts = map(strip, target)
456        # surprisingly, os.path.commonprefix is a dumb char-by-char string
457        # operation that has nothing to do with paths.
458        com_pfx = os.path.commonprefix(srcs + tgts)
459        com_pfx_len = len(com_pfx)
460        if com_pfx:
461            # do some cleanup and sanity checking on common prefix
462            if com_pfx[-1] == ".":
463                # prefix matches all but file extension: ok
464                # back up one to change 'foo.cc -> o' to 'foo.cc -> .o'
465                com_pfx = com_pfx[0:-1]
466            elif com_pfx[-1] == "/":
467                # common prefix is directory path: OK
468                pass
469            else:
470                src0_len = len(srcs[0])
471                tgt0_len = len(tgts[0])
472                if src0_len == com_pfx_len:
473                    # source is a substring of target, OK
474                    pass
475                elif tgt0_len == com_pfx_len:
476                    # target is a substring of source, need to back up to
477                    # avoid empty string on RHS of arrow
478                    sep_idx = com_pfx.rfind(".")
479                    if sep_idx != -1:
480                        com_pfx = com_pfx[0:sep_idx]
481                    else:
482                        com_pfx = ''
483                elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".":
484                    # still splitting at file extension: ok
485                    pass
486                else:
487                    # probably a fluke; ignore it
488                    com_pfx = ''
489        # recalculate length in case com_pfx was modified
490        com_pfx_len = len(com_pfx)
491        def fmt(files):
492            f = map(lambda s: s[com_pfx_len:], files)
493            return ', '.join(f)
494        return self.format % (com_pfx, fmt(srcs), fmt(tgts))
495
496Export('Transform')
497
498# enable the regression script to use the termcap
499main['TERMCAP'] = termcap
500
501if GetOption('verbose'):
502    def MakeAction(action, string, *args, **kwargs):
503        return Action(action, *args, **kwargs)
504else:
505    MakeAction = Action
506    main['CCCOMSTR']        = Transform("CC")
507    main['CXXCOMSTR']       = Transform("CXX")
508    main['ASCOMSTR']        = Transform("AS")
509    main['SWIGCOMSTR']      = Transform("SWIG")
510    main['ARCOMSTR']        = Transform("AR", 0)
511    main['LINKCOMSTR']      = Transform("LINK", 0)
512    main['RANLIBCOMSTR']    = Transform("RANLIB", 0)
513    main['M4COMSTR']        = Transform("M4")
514    main['SHCCCOMSTR']      = Transform("SHCC")
515    main['SHCXXCOMSTR']     = Transform("SHCXX")
516Export('MakeAction')
517
518# Initialize the Link-Time Optimization (LTO) flags
519main['LTO_CCFLAGS'] = []
520main['LTO_LDFLAGS'] = []
521
522# According to the readme, tcmalloc works best if the compiler doesn't
523# assume that we're using the builtin malloc and friends. These flags
524# are compiler-specific, so we need to set them after we detect which
525# compiler we're using.
526main['TCMALLOC_CCFLAGS'] = []
527
528CXX_version = readCommand([main['CXX'],'--version'], exception=False)
529CXX_V = readCommand([main['CXX'],'-V'], exception=False)
530
531main['GCC'] = CXX_version and CXX_version.find('g++') >= 0
532main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0
533if main['GCC'] + main['CLANG'] > 1:
534    print 'Error: How can we have two at the same time?'
535    Exit(1)
536
537# Set up default C++ compiler flags
538if main['GCC'] or main['CLANG']:
539    # As gcc and clang share many flags, do the common parts here
540    main.Append(CCFLAGS=['-pipe'])
541    main.Append(CCFLAGS=['-fno-strict-aliasing'])
542    # Enable -Wall and then disable the few warnings that we
543    # consistently violate
544    main.Append(CCFLAGS=['-Wall', '-Wno-sign-compare', '-Wundef'])
545    # We always compile using C++11, but only gcc >= 4.7 and clang 3.1
546    # actually use that name, so we stick with c++0x
547    main.Append(CXXFLAGS=['-std=c++0x'])
548    # Add selected sanity checks from -Wextra
549    main.Append(CXXFLAGS=['-Wmissing-field-initializers',
550                          '-Woverloaded-virtual'])
551else:
552    print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
553    print "Don't know what compiler options to use for your compiler."
554    print termcap.Yellow + '       compiler:' + termcap.Normal, main['CXX']
555    print termcap.Yellow + '       version:' + termcap.Normal,
556    if not CXX_version:
557        print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
558               termcap.Normal
559    else:
560        print CXX_version.replace('\n', '<nl>')
561    print "       If you're trying to use a compiler other than GCC"
562    print "       or clang, there appears to be something wrong with your"
563    print "       environment."
564    print "       "
565    print "       If you are trying to use a compiler other than those listed"
566    print "       above you will need to ease fix SConstruct and "
567    print "       src/SConscript to support that compiler."
568    Exit(1)
569
570if main['GCC']:
571    # Check for a supported version of gcc. >= 4.6 is chosen for its
572    # level of c++11 support. See
573    # http://gcc.gnu.org/projects/cxx0x.html for details. 4.6 is also
574    # the first version with proper LTO support.
575    gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False)
576    if compareVersions(gcc_version, "4.6") < 0:
577        print 'Error: gcc version 4.6 or newer required.'
578        print '       Installed version:', gcc_version
579        Exit(1)
580
581    main['GCC_VERSION'] = gcc_version
582
583    # Add the appropriate Link-Time Optimization (LTO) flags
584    # unless LTO is explicitly turned off. Note that these flags
585    # are only used by the fast target.
586    if not GetOption('no_lto'):
587        # Pass the LTO flag when compiling to produce GIMPLE
588        # output, we merely create the flags here and only append
589        # them later/
590        main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
591
592        # Use the same amount of jobs for LTO as we are running
593        # scons with, we hardcode the use of the linker plugin
594        # which requires either gold or GNU ld >= 2.21
595        main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs'),
596                               '-fuse-linker-plugin']
597
598    main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc',
599                                  '-fno-builtin-realloc', '-fno-builtin-free'])
600
601elif main['CLANG']:
602    # Check for a supported version of clang, >= 3.0 is needed to
603    # support similar features as gcc 4.6. See
604    # http://clang.llvm.org/cxx_status.html for details
605    clang_version_re = re.compile(".* version (\d+\.\d+)")
606    clang_version_match = clang_version_re.search(CXX_version)
607    if (clang_version_match):
608        clang_version = clang_version_match.groups()[0]
609        if compareVersions(clang_version, "3.0") < 0:
610            print 'Error: clang version 3.0 or newer required.'
611            print '       Installed version:', clang_version
612            Exit(1)
613    else:
614        print 'Error: Unable to determine clang version.'
615        Exit(1)
616
617    # clang has a few additional warnings that we disable,
618    # tautological comparisons are allowed due to unsigned integers
619    # being compared to constants that happen to be 0, and extraneous
620    # parantheses are allowed due to Ruby's printing of the AST,
621    # finally self assignments are allowed as the generated CPU code
622    # is relying on this
623    main.Append(CCFLAGS=['-Wno-tautological-compare',
624                         '-Wno-parentheses',
625                         '-Wno-self-assign'])
626
627    main.Append(TCMALLOC_CCFLAGS=['-fno-builtin'])
628
629    # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as
630    # opposed to libstdc++, as the later is dated.
631    if sys.platform == "darwin":
632        main.Append(CXXFLAGS=['-stdlib=libc++'])
633        main.Append(LIBS=['c++'])
634
635else:
636    print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
637    print "Don't know what compiler options to use for your compiler."
638    print termcap.Yellow + '       compiler:' + termcap.Normal, main['CXX']
639    print termcap.Yellow + '       version:' + termcap.Normal,
640    if not CXX_version:
641        print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
642               termcap.Normal
643    else:
644        print CXX_version.replace('\n', '<nl>')
645    print "       If you're trying to use a compiler other than GCC"
646    print "       or clang, there appears to be something wrong with your"
647    print "       environment."
648    print "       "
649    print "       If you are trying to use a compiler other than those listed"
650    print "       above you will need to ease fix SConstruct and "
651    print "       src/SConscript to support that compiler."
652    Exit(1)
653
654# Set up common yacc/bison flags (needed for Ruby)
655main['YACCFLAGS'] = '-d'
656main['YACCHXXFILESUFFIX'] = '.hh'
657
658# Do this after we save setting back, or else we'll tack on an
659# extra 'qdo' every time we run scons.
660if main['BATCH']:
661    main['CC']     = main['BATCH_CMD'] + ' ' + main['CC']
662    main['CXX']    = main['BATCH_CMD'] + ' ' + main['CXX']
663    main['AS']     = main['BATCH_CMD'] + ' ' + main['AS']
664    main['AR']     = main['BATCH_CMD'] + ' ' + main['AR']
665    main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB']
666
667if sys.platform == 'cygwin':
668    # cygwin has some header file issues...
669    main.Append(CCFLAGS=["-Wno-uninitialized"])
670
671# Check for the protobuf compiler
672protoc_version = readCommand([main['PROTOC'], '--version'],
673                             exception='').split()
674
675# First two words should be "libprotoc x.y.z"
676if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc':
677    print termcap.Yellow + termcap.Bold + \
678        'Warning: Protocol buffer compiler (protoc) not found.\n' + \
679        '         Please install protobuf-compiler for tracing support.' + \
680        termcap.Normal
681    main['PROTOC'] = False
682else:
683    # Based on the availability of the compress stream wrappers,
684    # require 2.1.0
685    min_protoc_version = '2.1.0'
686    if compareVersions(protoc_version[1], min_protoc_version) < 0:
687        print termcap.Yellow + termcap.Bold + \
688            'Warning: protoc version', min_protoc_version, \
689            'or newer required.\n' + \
690            '         Installed version:', protoc_version[1], \
691            termcap.Normal
692        main['PROTOC'] = False
693    else:
694        # Attempt to determine the appropriate include path and
695        # library path using pkg-config, that means we also need to
696        # check for pkg-config. Note that it is possible to use
697        # protobuf without the involvement of pkg-config. Later on we
698        # check go a library config check and at that point the test
699        # will fail if libprotobuf cannot be found.
700        if readCommand(['pkg-config', '--version'], exception=''):
701            try:
702                # Attempt to establish what linking flags to add for protobuf
703                # using pkg-config
704                main.ParseConfig('pkg-config --cflags --libs-only-L protobuf')
705            except:
706                print termcap.Yellow + termcap.Bold + \
707                    'Warning: pkg-config could not get protobuf flags.' + \
708                    termcap.Normal
709
710# Check for SWIG
711if not main.has_key('SWIG'):
712    print 'Error: SWIG utility not found.'
713    print '       Please install (see http://www.swig.org) and retry.'
714    Exit(1)
715
716# Check for appropriate SWIG version
717swig_version = readCommand([main['SWIG'], '-version'], exception='').split()
718# First 3 words should be "SWIG Version x.y.z"
719if len(swig_version) < 3 or \
720        swig_version[0] != 'SWIG' or swig_version[1] != 'Version':
721    print 'Error determining SWIG version.'
722    Exit(1)
723
724min_swig_version = '2.0.4'
725if compareVersions(swig_version[2], min_swig_version) < 0:
726    print 'Error: SWIG version', min_swig_version, 'or newer required.'
727    print '       Installed version:', swig_version[2]
728    Exit(1)
729
730# Set up SWIG flags & scanner
731swig_flags=Split('-c++ -python -modern -templatereduce $_CPPINCFLAGS')
732main.Append(SWIGFLAGS=swig_flags)
733
734# filter out all existing swig scanners, they mess up the dependency
735# stuff for some reason
736scanners = []
737for scanner in main['SCANNERS']:
738    skeys = scanner.skeys
739    if skeys == '.i':
740        continue
741
742    if isinstance(skeys, (list, tuple)) and '.i' in skeys:
743        continue
744
745    scanners.append(scanner)
746
747# add the new swig scanner that we like better
748from SCons.Scanner import ClassicCPP as CPPScanner
749swig_inc_re = '^[ \t]*[%,#][ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")'
750scanners.append(CPPScanner("SwigScan", [ ".i" ], "CPPPATH", swig_inc_re))
751
752# replace the scanners list that has what we want
753main['SCANNERS'] = scanners
754
755# Add a custom Check function to the Configure context so that we can
756# figure out if the compiler adds leading underscores to global
757# variables.  This is needed for the autogenerated asm files that we
758# use for embedding the python code.
759def CheckLeading(context):
760    context.Message("Checking for leading underscore in global variables...")
761    # 1) Define a global variable called x from asm so the C compiler
762    #    won't change the symbol at all.
763    # 2) Declare that variable.
764    # 3) Use the variable
765    #
766    # If the compiler prepends an underscore, this will successfully
767    # link because the external symbol 'x' will be called '_x' which
768    # was defined by the asm statement.  If the compiler does not
769    # prepend an underscore, this will not successfully link because
770    # '_x' will have been defined by assembly, while the C portion of
771    # the code will be trying to use 'x'
772    ret = context.TryLink('''
773        asm(".globl _x; _x: .byte 0");
774        extern int x;
775        int main() { return x; }
776        ''', extension=".c")
777    context.env.Append(LEADING_UNDERSCORE=ret)
778    context.Result(ret)
779    return ret
780
781# Add a custom Check function to test for structure members.
782def CheckMember(context, include, decl, member, include_quotes="<>"):
783    context.Message("Checking for member %s in %s..." %
784                    (member, decl))
785    text = """
786#include %(header)s
787int main(){
788  %(decl)s test;
789  (void)test.%(member)s;
790  return 0;
791};
792""" % { "header" : include_quotes[0] + include + include_quotes[1],
793        "decl" : decl,
794        "member" : member,
795        }
796
797    ret = context.TryCompile(text, extension=".cc")
798    context.Result(ret)
799    return ret
800
801# Platform-specific configuration.  Note again that we assume that all
802# builds under a given build root run on the same host platform.
803conf = Configure(main,
804                 conf_dir = joinpath(build_root, '.scons_config'),
805                 log_file = joinpath(build_root, 'scons_config.log'),
806                 custom_tests = {
807        'CheckLeading' : CheckLeading,
808        'CheckMember' : CheckMember,
809        })
810
811# Check for leading underscores.  Don't really need to worry either
812# way so don't need to check the return code.
813conf.CheckLeading()
814
815# Check if we should compile a 64 bit binary on Mac OS X/Darwin
816try:
817    import platform
818    uname = platform.uname()
819    if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0:
820        if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]):
821            main.Append(CCFLAGS=['-arch', 'x86_64'])
822            main.Append(CFLAGS=['-arch', 'x86_64'])
823            main.Append(LINKFLAGS=['-arch', 'x86_64'])
824            main.Append(ASFLAGS=['-arch', 'x86_64'])
825except:
826    pass
827
828# Recent versions of scons substitute a "Null" object for Configure()
829# when configuration isn't necessary, e.g., if the "--help" option is
830# present.  Unfortuantely this Null object always returns false,
831# breaking all our configuration checks.  We replace it with our own
832# more optimistic null object that returns True instead.
833if not conf:
834    def NullCheck(*args, **kwargs):
835        return True
836
837    class NullConf:
838        def __init__(self, env):
839            self.env = env
840        def Finish(self):
841            return self.env
842        def __getattr__(self, mname):
843            return NullCheck
844
845    conf = NullConf(main)
846
847# Cache build files in the supplied directory.
848if main['M5_BUILD_CACHE']:
849    print 'Using build cache located at', main['M5_BUILD_CACHE']
850    CacheDir(main['M5_BUILD_CACHE'])
851
852# Find Python include and library directories for embedding the
853# interpreter. We rely on python-config to resolve the appropriate
854# includes and linker flags. ParseConfig does not seem to understand
855# the more exotic linker flags such as -Xlinker and -export-dynamic so
856# we add them explicitly below. If you want to link in an alternate
857# version of python, see above for instructions on how to invoke
858# scons with the appropriate PATH set.
859#
860# First we check if python2-config exists, else we use python-config
861python_config = readCommand(['which', 'python2-config'], exception='').strip()
862if not os.path.exists(python_config):
863    python_config = readCommand(['which', 'python-config'],
864                                exception='').strip()
865py_includes = readCommand([python_config, '--includes'],
866                          exception='').split()
867# Strip the -I from the include folders before adding them to the
868# CPPPATH
869main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes))
870
871# Read the linker flags and split them into libraries and other link
872# flags. The libraries are added later through the call the CheckLib.
873py_ld_flags = readCommand([python_config, '--ldflags'], exception='').split()
874py_libs = []
875for lib in py_ld_flags:
876     if not lib.startswith('-l'):
877         main.Append(LINKFLAGS=[lib])
878     else:
879         lib = lib[2:]
880         if lib not in py_libs:
881             py_libs.append(lib)
882
883# verify that this stuff works
884if not conf.CheckHeader('Python.h', '<>'):
885    print "Error: can't find Python.h header in", py_includes
886    print "Install Python headers (package python-dev on Ubuntu and RedHat)"
887    Exit(1)
888
889for lib in py_libs:
890    if not conf.CheckLib(lib):
891        print "Error: can't find library %s required by python" % lib
892        Exit(1)
893
894# On Solaris you need to use libsocket for socket ops
895if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'):
896   if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'):
897       print "Can't find library with socket calls (e.g. accept())"
898       Exit(1)
899
900# Check for zlib.  If the check passes, libz will be automatically
901# added to the LIBS environment variable.
902if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'):
903    print 'Error: did not find needed zlib compression library '\
904          'and/or zlib.h header file.'
905    print '       Please install zlib and try again.'
906    Exit(1)
907
908# If we have the protobuf compiler, also make sure we have the
909# development libraries. If the check passes, libprotobuf will be
910# automatically added to the LIBS environment variable. After
911# this, we can use the HAVE_PROTOBUF flag to determine if we have
912# got both protoc and libprotobuf available.
913main['HAVE_PROTOBUF'] = main['PROTOC'] and \
914    conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h',
915                            'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;')
916
917# If we have the compiler but not the library, print another warning.
918if main['PROTOC'] and not main['HAVE_PROTOBUF']:
919    print termcap.Yellow + termcap.Bold + \
920        'Warning: did not find protocol buffer library and/or headers.\n' + \
921    '       Please install libprotobuf-dev for tracing support.' + \
922    termcap.Normal
923
924# Check for librt.
925have_posix_clock = \
926    conf.CheckLibWithHeader(None, 'time.h', 'C',
927                            'clock_nanosleep(0,0,NULL,NULL);') or \
928    conf.CheckLibWithHeader('rt', 'time.h', 'C',
929                            'clock_nanosleep(0,0,NULL,NULL);')
930
931have_posix_timers = \
932    conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C',
933                            'timer_create(CLOCK_MONOTONIC, NULL, NULL);')
934
935if conf.CheckLib('tcmalloc'):
936    main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
937elif conf.CheckLib('tcmalloc_minimal'):
938    main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
939else:
940    print termcap.Yellow + termcap.Bold + \
941          "You can get a 12% performance improvement by installing tcmalloc "\
942          "(libgoogle-perftools-dev package on Ubuntu or RedHat)." + \
943          termcap.Normal
944
945if not have_posix_clock:
946    print "Can't find library for POSIX clocks."
947
948# Check for <fenv.h> (C99 FP environment control)
949have_fenv = conf.CheckHeader('fenv.h', '<>')
950if not have_fenv:
951    print "Warning: Header file <fenv.h> not found."
952    print "         This host has no IEEE FP rounding mode control."
953
954# Check if we should enable KVM-based hardware virtualization. The API
955# we rely on exists since version 2.6.36 of the kernel, but somehow
956# the KVM_API_VERSION does not reflect the change. We test for one of
957# the types as a fall back.
958have_kvm = conf.CheckHeader('linux/kvm.h', '<>') and \
959    conf.CheckTypeSize('struct kvm_xsave', '#include <linux/kvm.h>') != 0
960if not have_kvm:
961    print "Info: Compatible header file <linux/kvm.h> not found, " \
962        "disabling KVM support."
963
964# Check if the requested target ISA is compatible with the host
965def is_isa_kvm_compatible(isa):
966    isa_comp_table = {
967        "arm" : ( "armv7l" ),
968        "x86" : ( "x86_64" ),
969        }
970    try:
971        import platform
972        host_isa = platform.machine()
973    except:
974        print "Warning: Failed to determine host ISA."
975        return False
976
977    return host_isa in isa_comp_table.get(isa, [])
978
979
980# Check if the exclude_host attribute is available. We want this to
981# get accurate instruction counts in KVM.
982main['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf.CheckMember(
983    'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host')
984
985
986######################################################################
987#
988# Finish the configuration
989#
990main = conf.Finish()
991
992######################################################################
993#
994# Collect all non-global variables
995#
996
997# Define the universe of supported ISAs
998all_isa_list = [ ]
999Export('all_isa_list')
1000
1001class CpuModel(object):
1002    '''The CpuModel class encapsulates everything the ISA parser needs to
1003    know about a particular CPU model.'''
1004
1005    # Dict of available CPU model objects.  Accessible as CpuModel.dict.
1006    dict = {}
1007    list = []
1008    defaults = []
1009
1010    # Constructor.  Automatically adds models to CpuModel.dict.
1011    def __init__(self, name, filename, includes, strings, default=False):
1012        self.name = name           # name of model
1013        self.filename = filename   # filename for output exec code
1014        self.includes = includes   # include files needed in exec file
1015        # The 'strings' dict holds all the per-CPU symbols we can
1016        # substitute into templates etc.
1017        self.strings = strings
1018
1019        # This cpu is enabled by default
1020        self.default = default
1021
1022        # Add self to dict
1023        if name in CpuModel.dict:
1024            raise AttributeError, "CpuModel '%s' already registered" % name
1025        CpuModel.dict[name] = self
1026        CpuModel.list.append(name)
1027
1028Export('CpuModel')
1029
1030# Sticky variables get saved in the variables file so they persist from
1031# one invocation to the next (unless overridden, in which case the new
1032# value becomes sticky).
1033sticky_vars = Variables(args=ARGUMENTS)
1034Export('sticky_vars')
1035
1036# Sticky variables that should be exported
1037export_vars = []
1038Export('export_vars')
1039
1040# For Ruby
1041all_protocols = []
1042Export('all_protocols')
1043protocol_dirs = []
1044Export('protocol_dirs')
1045slicc_includes = []
1046Export('slicc_includes')
1047
1048# Walk the tree and execute all SConsopts scripts that wil add to the
1049# above variables
1050if GetOption('verbose'):
1051    print "Reading SConsopts"
1052for bdir in [ base_dir ] + extras_dir_list:
1053    if not isdir(bdir):
1054        print "Error: directory '%s' does not exist" % bdir
1055        Exit(1)
1056    for root, dirs, files in os.walk(bdir):
1057        if 'SConsopts' in files:
1058            if GetOption('verbose'):
1059                print "Reading", joinpath(root, 'SConsopts')
1060            SConscript(joinpath(root, 'SConsopts'))
1061
1062all_isa_list.sort()
1063
1064sticky_vars.AddVariables(
1065    EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list),
1066    ListVariable('CPU_MODELS', 'CPU models',
1067                 sorted(n for n,m in CpuModel.dict.iteritems() if m.default),
1068                 sorted(CpuModel.list)),
1069    BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger',
1070                 False),
1071    BoolVariable('SS_COMPATIBLE_FP',
1072                 'Make floating-point results compatible with SimpleScalar',
1073                 False),
1074    BoolVariable('USE_SSE2',
1075                 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts',
1076                 False),
1077    BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock),
1078    BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv),
1079    BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability', False),
1080    BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models', have_kvm),
1081    EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None',
1082                  all_protocols),
1083    )
1084
1085# These variables get exported to #defines in config/*.hh (see src/SConscript).
1086export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'CP_ANNOTATE',
1087                'USE_POSIX_CLOCK', 'PROTOCOL', 'HAVE_PROTOBUF',
1088                'HAVE_PERF_ATTR_EXCLUDE_HOST']
1089
1090###################################################
1091#
1092# Define a SCons builder for configuration flag headers.
1093#
1094###################################################
1095
1096# This function generates a config header file that #defines the
1097# variable symbol to the current variable setting (0 or 1).  The source
1098# operands are the name of the variable and a Value node containing the
1099# value of the variable.
1100def build_config_file(target, source, env):
1101    (variable, value) = [s.get_contents() for s in source]
1102    f = file(str(target[0]), 'w')
1103    print >> f, '#define', variable, value
1104    f.close()
1105    return None
1106
1107# Combine the two functions into a scons Action object.
1108config_action = MakeAction(build_config_file, Transform("CONFIG H", 2))
1109
1110# The emitter munges the source & target node lists to reflect what
1111# we're really doing.
1112def config_emitter(target, source, env):
1113    # extract variable name from Builder arg
1114    variable = str(target[0])
1115    # True target is config header file
1116    target = joinpath('config', variable.lower() + '.hh')
1117    val = env[variable]
1118    if isinstance(val, bool):
1119        # Force value to 0/1
1120        val = int(val)
1121    elif isinstance(val, str):
1122        val = '"' + val + '"'
1123
1124    # Sources are variable name & value (packaged in SCons Value nodes)
1125    return ([target], [Value(variable), Value(val)])
1126
1127config_builder = Builder(emitter = config_emitter, action = config_action)
1128
1129main.Append(BUILDERS = { 'ConfigFile' : config_builder })
1130
1131# libelf build is shared across all configs in the build root.
1132main.SConscript('ext/libelf/SConscript',
1133                variant_dir = joinpath(build_root, 'libelf'))
1134
1135# gzstream build is shared across all configs in the build root.
1136main.SConscript('ext/gzstream/SConscript',
1137                variant_dir = joinpath(build_root, 'gzstream'))
1138
1139# libfdt build is shared across all configs in the build root.
1140main.SConscript('ext/libfdt/SConscript',
1141                variant_dir = joinpath(build_root, 'libfdt'))
1142
1143# fputils build is shared across all configs in the build root.
1144main.SConscript('ext/fputils/SConscript',
1145                variant_dir = joinpath(build_root, 'fputils'))
1146
1147# DRAMSim2 build is shared across all configs in the build root.
1148main.SConscript('ext/dramsim2/SConscript',
1149                variant_dir = joinpath(build_root, 'dramsim2'))
1150
1151###################################################
1152#
1153# This function is used to set up a directory with switching headers
1154#
1155###################################################
1156
1157main['ALL_ISA_LIST'] = all_isa_list
1158all_isa_deps = {}
1159def make_switching_dir(dname, switch_headers, env):
1160    # Generate the header.  target[0] is the full path of the output
1161    # header to generate.  'source' is a dummy variable, since we get the
1162    # list of ISAs from env['ALL_ISA_LIST'].
1163    def gen_switch_hdr(target, source, env):
1164        fname = str(target[0])
1165        isa = env['TARGET_ISA'].lower()
1166        try:
1167            f = open(fname, 'w')
1168            print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname))
1169            f.close()
1170        except IOError:
1171            print "Failed to create %s" % fname
1172            raise
1173
1174    # Build SCons Action object. 'varlist' specifies env vars that this
1175    # action depends on; when env['ALL_ISA_LIST'] changes these actions
1176    # should get re-executed.
1177    switch_hdr_action = MakeAction(gen_switch_hdr,
1178                          Transform("GENERATE"), varlist=['ALL_ISA_LIST'])
1179
1180    # Instantiate actions for each header
1181    for hdr in switch_headers:
1182        env.Command(hdr, [], switch_hdr_action)
1183
1184    isa_target = Dir('.').up().name.lower().replace('_', '-')
1185    env['PHONY_BASE'] = '#'+isa_target
1186    all_isa_deps[isa_target] = None
1187
1188Export('make_switching_dir')
1189
1190# all-isas -> all-deps -> all-environs -> all_targets
1191main.Alias('#all-isas', [])
1192main.Alias('#all-deps', '#all-isas')
1193
1194# Dummy target to ensure all environments are created before telling
1195# SCons what to actually make (the command line arguments).  We attach
1196# them to the dependence graph after the environments are complete.
1197ORIG_BUILD_TARGETS = list(BUILD_TARGETS) # force a copy; gets closure to work.
1198def environsComplete(target, source, env):
1199    for t in ORIG_BUILD_TARGETS:
1200        main.Depends('#all-targets', t)
1201
1202# Each build/* switching_dir attaches its *-environs target to #all-environs.
1203main.Append(BUILDERS = {'CompleteEnvirons' :
1204                        Builder(action=MakeAction(environsComplete, None))})
1205main.CompleteEnvirons('#all-environs', [])
1206
1207def doNothing(**ignored): pass
1208main.Append(BUILDERS = {'Dummy': Builder(action=MakeAction(doNothing, None))})
1209
1210# The final target to which all the original targets ultimately get attached.
1211main.Dummy('#all-targets', '#all-environs')
1212BUILD_TARGETS[:] = ['#all-targets']
1213
1214###################################################
1215#
1216# Define build environments for selected configurations.
1217#
1218###################################################
1219
1220for variant_path in variant_paths:
1221    if not GetOption('silent'):
1222        print "Building in", variant_path
1223
1224    # Make a copy of the build-root environment to use for this config.
1225    env = main.Clone()
1226    env['BUILDDIR'] = variant_path
1227
1228    # variant_dir is the tail component of build path, and is used to
1229    # determine the build parameters (e.g., 'ALPHA_SE')
1230    (build_root, variant_dir) = splitpath(variant_path)
1231
1232    # Set env variables according to the build directory config.
1233    sticky_vars.files = []
1234    # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in
1235    # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke
1236    # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings.
1237    current_vars_file = joinpath(build_root, 'variables', variant_dir)
1238    if isfile(current_vars_file):
1239        sticky_vars.files.append(current_vars_file)
1240        if not GetOption('silent'):
1241            print "Using saved variables file %s" % current_vars_file
1242    else:
1243        # Build dir-specific variables file doesn't exist.
1244
1245        # Make sure the directory is there so we can create it later
1246        opt_dir = dirname(current_vars_file)
1247        if not isdir(opt_dir):
1248            mkdir(opt_dir)
1249
1250        # Get default build variables from source tree.  Variables are
1251        # normally determined by name of $VARIANT_DIR, but can be
1252        # overridden by '--default=' arg on command line.
1253        default = GetOption('default')
1254        opts_dir = joinpath(main.root.abspath, 'build_opts')
1255        if default:
1256            default_vars_files = [joinpath(build_root, 'variables', default),
1257                                  joinpath(opts_dir, default)]
1258        else:
1259            default_vars_files = [joinpath(opts_dir, variant_dir)]
1260        existing_files = filter(isfile, default_vars_files)
1261        if existing_files:
1262            default_vars_file = existing_files[0]
1263            sticky_vars.files.append(default_vars_file)
1264            print "Variables file %s not found,\n  using defaults in %s" \
1265                  % (current_vars_file, default_vars_file)
1266        else:
1267            print "Error: cannot find variables file %s or " \
1268                  "default file(s) %s" \
1269                  % (current_vars_file, ' or '.join(default_vars_files))
1270            Exit(1)
1271
1272    # Apply current variable settings to env
1273    sticky_vars.Update(env)
1274
1275    help_texts["local_vars"] += \
1276        "Build variables for %s:\n" % variant_dir \
1277                 + sticky_vars.GenerateHelpText(env)
1278
1279    # Process variable settings.
1280
1281    if not have_fenv and env['USE_FENV']:
1282        print "Warning: <fenv.h> not available; " \
1283              "forcing USE_FENV to False in", variant_dir + "."
1284        env['USE_FENV'] = False
1285
1286    if not env['USE_FENV']:
1287        print "Warning: No IEEE FP rounding mode control in", variant_dir + "."
1288        print "         FP results may deviate slightly from other platforms."
1289
1290    if env['EFENCE']:
1291        env.Append(LIBS=['efence'])
1292
1293    if env['USE_KVM']:
1294        if not have_kvm:
1295            print "Warning: Can not enable KVM, host seems to lack KVM support"
1296            env['USE_KVM'] = False
1297        elif not have_posix_timers:
1298            print "Warning: Can not enable KVM, host seems to lack support " \
1299                "for POSIX timers"
1300            env['USE_KVM'] = False
1301        elif not is_isa_kvm_compatible(env['TARGET_ISA']):
1302            print "Info: KVM support disabled due to unsupported host and " \
1303                "target ISA combination"
1304            env['USE_KVM'] = False
1305
1306    # Warn about missing optional functionality
1307    if env['USE_KVM']:
1308        if not main['HAVE_PERF_ATTR_EXCLUDE_HOST']:
1309            print "Warning: perf_event headers lack support for the " \
1310                "exclude_host attribute. KVM instruction counts will " \
1311                "be inaccurate."
1312
1313    # Save sticky variable settings back to current variables file
1314    sticky_vars.Save(current_vars_file, env)
1315
1316    if env['USE_SSE2']:
1317        env.Append(CCFLAGS=['-msse2'])
1318
1319    # The src/SConscript file sets up the build rules in 'env' according
1320    # to the configured variables.  It returns a list of environments,
1321    # one for each variant build (debug, opt, etc.)
1322    SConscript('src/SConscript', variant_dir = variant_path, exports = 'env')
1323
1324def pairwise(iterable):
1325    "s -> (s0,s1), (s1,s2), (s2, s3), ..."
1326    a, b = itertools.tee(iterable)
1327    b.next()
1328    return itertools.izip(a, b)
1329
1330# Create false dependencies so SCons will parse ISAs, establish
1331# dependencies, and setup the build Environments serially. Either
1332# SCons (likely) and/or our SConscripts (possibly) cannot cope with -j
1333# greater than 1. It appears to be standard race condition stuff; it
1334# doesn't always fail, but usually, and the behaviors are different.
1335# Every time I tried to remove this, builds would fail in some
1336# creative new way. So, don't do that. You'll want to, though, because
1337# tests/SConscript takes a long time to make its Environments.
1338for t1, t2 in pairwise(sorted(all_isa_deps.iterkeys())):
1339    main.Depends('#%s-deps'     % t2, '#%s-deps'     % t1)
1340    main.Depends('#%s-environs' % t2, '#%s-environs' % t1)
1341
1342# base help text
1343Help('''
1344Usage: scons [scons options] [build variables] [target(s)]
1345
1346Extra scons options:
1347%(options)s
1348
1349Global build variables:
1350%(global_vars)s
1351
1352%(local_vars)s
1353''' % help_texts)
1354