SConstruct revision 12244:33af7397d081
1# -*- mode:python -*-
2
3# Copyright (c) 2013, 2015-2017 ARM Limited
4# All rights reserved.
5#
6# The license below extends only to copyright in the software and shall
7# not be construed as granting a license to any other intellectual
8# property including but not limited to intellectual property relating
9# to a hardware implementation of the functionality of the software
10# licensed hereunder.  You may use the software subject to the license
11# terms below provided that you ensure that this notice is replicated
12# unmodified and in its entirety in all distributions of the software,
13# modified or unmodified, in source code or in binary form.
14#
15# Copyright (c) 2011 Advanced Micro Devices, Inc.
16# Copyright (c) 2009 The Hewlett-Packard Development Company
17# Copyright (c) 2004-2005 The Regents of The University of Michigan
18# All rights reserved.
19#
20# Redistribution and use in source and binary forms, with or without
21# modification, are permitted provided that the following conditions are
22# met: redistributions of source code must retain the above copyright
23# notice, this list of conditions and the following disclaimer;
24# redistributions in binary form must reproduce the above copyright
25# notice, this list of conditions and the following disclaimer in the
26# documentation and/or other materials provided with the distribution;
27# neither the name of the copyright holders nor the names of its
28# contributors may be used to endorse or promote products derived from
29# this software without specific prior written permission.
30#
31# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
32# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
33# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
34# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
35# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
37# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
38# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
39# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
40# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
41# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
42#
43# Authors: Steve Reinhardt
44#          Nathan Binkert
45
46###################################################
47#
48# SCons top-level build description (SConstruct) file.
49#
50# While in this directory ('gem5'), just type 'scons' to build the default
51# configuration (see below), or type 'scons build/<CONFIG>/<binary>'
52# to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for
53# the optimized full-system version).
54#
55# You can build gem5 in a different directory as long as there is a
56# 'build/<CONFIG>' somewhere along the target path.  The build system
57# expects that all configs under the same build directory are being
58# built for the same host system.
59#
60# Examples:
61#
62#   The following two commands are equivalent.  The '-u' option tells
63#   scons to search up the directory tree for this SConstruct file.
64#   % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug
65#   % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug
66#
67#   The following two commands are equivalent and demonstrate building
68#   in a directory outside of the source tree.  The '-C' option tells
69#   scons to chdir to the specified directory to find this SConstruct
70#   file.
71#   % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug
72#   % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug
73#
74# You can use 'scons -H' to print scons options.  If you're in this
75# 'gem5' directory (or use -u or -C to tell scons where to find this
76# file), you can use 'scons -h' to print all the gem5-specific build
77# options as well.
78#
79###################################################
80
81# Global Python includes
82import itertools
83import os
84import re
85import shutil
86import subprocess
87import sys
88
89from os import mkdir, environ
90from os.path import abspath, basename, dirname, expanduser, normpath
91from os.path import exists,  isdir, isfile
92from os.path import join as joinpath, split as splitpath
93
94# SCons includes
95import SCons
96import SCons.Node
97
98extra_python_paths = [
99    Dir('src/python').srcnode().abspath, # gem5 includes
100    Dir('ext/ply').srcnode().abspath, # ply is used by several files
101    ]
102
103sys.path[1:1] = extra_python_paths
104
105from m5.util import compareVersions, readCommand
106from m5.util.terminal import get_termcap
107
108help_texts = {
109    "options" : "",
110    "global_vars" : "",
111    "local_vars" : ""
112}
113
114Export("help_texts")
115
116
117# There's a bug in scons in that (1) by default, the help texts from
118# AddOption() are supposed to be displayed when you type 'scons -h'
119# and (2) you can override the help displayed by 'scons -h' using the
120# Help() function, but these two features are incompatible: once
121# you've overridden the help text using Help(), there's no way to get
122# at the help texts from AddOptions.  See:
123#     http://scons.tigris.org/issues/show_bug.cgi?id=2356
124#     http://scons.tigris.org/issues/show_bug.cgi?id=2611
125# This hack lets us extract the help text from AddOptions and
126# re-inject it via Help().  Ideally someday this bug will be fixed and
127# we can just use AddOption directly.
128def AddLocalOption(*args, **kwargs):
129    col_width = 30
130
131    help = "  " + ", ".join(args)
132    if "help" in kwargs:
133        length = len(help)
134        if length >= col_width:
135            help += "\n" + " " * col_width
136        else:
137            help += " " * (col_width - length)
138        help += kwargs["help"]
139    help_texts["options"] += help + "\n"
140
141    AddOption(*args, **kwargs)
142
143AddLocalOption('--colors', dest='use_colors', action='store_true',
144               help="Add color to abbreviated scons output")
145AddLocalOption('--no-colors', dest='use_colors', action='store_false',
146               help="Don't add color to abbreviated scons output")
147AddLocalOption('--with-cxx-config', dest='with_cxx_config',
148               action='store_true',
149               help="Build with support for C++-based configuration")
150AddLocalOption('--default', dest='default', type='string', action='store',
151               help='Override which build_opts file to use for defaults')
152AddLocalOption('--ignore-style', dest='ignore_style', action='store_true',
153               help='Disable style checking hooks')
154AddLocalOption('--no-lto', dest='no_lto', action='store_true',
155               help='Disable Link-Time Optimization for fast')
156AddLocalOption('--force-lto', dest='force_lto', action='store_true',
157               help='Use Link-Time Optimization instead of partial linking' +
158                    ' when the compiler doesn\'t support using them together.')
159AddLocalOption('--update-ref', dest='update_ref', action='store_true',
160               help='Update test reference outputs')
161AddLocalOption('--verbose', dest='verbose', action='store_true',
162               help='Print full tool command lines')
163AddLocalOption('--without-python', dest='without_python',
164               action='store_true',
165               help='Build without Python configuration support')
166AddLocalOption('--without-tcmalloc', dest='without_tcmalloc',
167               action='store_true',
168               help='Disable linking against tcmalloc')
169AddLocalOption('--with-ubsan', dest='with_ubsan', action='store_true',
170               help='Build with Undefined Behavior Sanitizer if available')
171AddLocalOption('--with-asan', dest='with_asan', action='store_true',
172               help='Build with Address Sanitizer if available')
173
174if GetOption('no_lto') and GetOption('force_lto'):
175    print '--no-lto and --force-lto are mutually exclusive'
176    Exit(1)
177
178termcap = get_termcap(GetOption('use_colors'))
179
180########################################################################
181#
182# Set up the main build environment.
183#
184########################################################################
185
186main = Environment()
187
188main_dict_keys = main.Dictionary().keys()
189
190# Check that we have a C/C++ compiler
191if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys):
192    print "No C++ compiler installed (package g++ on Ubuntu and RedHat)"
193    Exit(1)
194
195# add useful python code PYTHONPATH so it can be used by subprocesses
196# as well
197main.AppendENVPath('PYTHONPATH', extra_python_paths)
198
199###################################################
200#
201# Figure out which configurations to set up based on the path(s) of
202# the target(s).
203#
204###################################################
205
206# Find default configuration & binary.
207Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug'))
208
209# helper function: find last occurrence of element in list
210def rfind(l, elt, offs = -1):
211    for i in range(len(l)+offs, 0, -1):
212        if l[i] == elt:
213            return i
214    raise ValueError, "element not found"
215
216# Take a list of paths (or SCons Nodes) and return a list with all
217# paths made absolute and ~-expanded.  Paths will be interpreted
218# relative to the launch directory unless a different root is provided
219def makePathListAbsolute(path_list, root=GetLaunchDir()):
220    return [abspath(joinpath(root, expanduser(str(p))))
221            for p in path_list]
222
223# Each target must have 'build' in the interior of the path; the
224# directory below this will determine the build parameters.  For
225# example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we
226# recognize that ALPHA_SE specifies the configuration because it
227# follow 'build' in the build path.
228
229# The funky assignment to "[:]" is needed to replace the list contents
230# in place rather than reassign the symbol to a new list, which
231# doesn't work (obviously!).
232BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS)
233
234# Generate a list of the unique build roots and configs that the
235# collected targets reference.
236variant_paths = []
237build_root = None
238for t in BUILD_TARGETS:
239    path_dirs = t.split('/')
240    try:
241        build_top = rfind(path_dirs, 'build', -2)
242    except:
243        print "Error: no non-leaf 'build' dir found on target path", t
244        Exit(1)
245    this_build_root = joinpath('/',*path_dirs[:build_top+1])
246    if not build_root:
247        build_root = this_build_root
248    else:
249        if this_build_root != build_root:
250            print "Error: build targets not under same build root\n"\
251                  "  %s\n  %s" % (build_root, this_build_root)
252            Exit(1)
253    variant_path = joinpath('/',*path_dirs[:build_top+2])
254    if variant_path not in variant_paths:
255        variant_paths.append(variant_path)
256
257# Make sure build_root exists (might not if this is the first build there)
258if not isdir(build_root):
259    mkdir(build_root)
260main['BUILDROOT'] = build_root
261
262Export('main')
263
264main.SConsignFile(joinpath(build_root, "sconsign"))
265
266# Default duplicate option is to use hard links, but this messes up
267# when you use emacs to edit a file in the target dir, as emacs moves
268# file to file~ then copies to file, breaking the link.  Symbolic
269# (soft) links work better.
270main.SetOption('duplicate', 'soft-copy')
271
272#
273# Set up global sticky variables... these are common to an entire build
274# tree (not specific to a particular build like ALPHA_SE)
275#
276
277global_vars_file = joinpath(build_root, 'variables.global')
278
279global_vars = Variables(global_vars_file, args=ARGUMENTS)
280
281global_vars.AddVariables(
282    ('CC', 'C compiler', environ.get('CC', main['CC'])),
283    ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])),
284    ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')),
285    ('BATCH', 'Use batch pool for build and tests', False),
286    ('BATCH_CMD', 'Batch pool submission command name', 'qdo'),
287    ('M5_BUILD_CACHE', 'Cache built objects in this directory', False),
288    ('EXTRAS', 'Add extra directories to the compilation', '')
289    )
290
291# Update main environment with values from ARGUMENTS & global_vars_file
292global_vars.Update(main)
293help_texts["global_vars"] += global_vars.GenerateHelpText(main)
294
295# Save sticky variable settings back to current variables file
296global_vars.Save(global_vars_file, main)
297
298# Parse EXTRAS variable to build list of all directories where we're
299# look for sources etc.  This list is exported as extras_dir_list.
300base_dir = main.srcdir.abspath
301if main['EXTRAS']:
302    extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':'))
303else:
304    extras_dir_list = []
305
306Export('base_dir')
307Export('extras_dir_list')
308
309# the ext directory should be on the #includes path
310main.Append(CPPPATH=[Dir('ext')])
311
312# Add shared top-level headers
313main.Prepend(CPPPATH=Dir('include'))
314
315def strip_build_path(path, env):
316    path = str(path)
317    variant_base = env['BUILDROOT'] + os.path.sep
318    if path.startswith(variant_base):
319        path = path[len(variant_base):]
320    elif path.startswith('build/'):
321        path = path[6:]
322    return path
323
324# Generate a string of the form:
325#   common/path/prefix/src1, src2 -> tgt1, tgt2
326# to print while building.
327class Transform(object):
328    # all specific color settings should be here and nowhere else
329    tool_color = termcap.Normal
330    pfx_color = termcap.Yellow
331    srcs_color = termcap.Yellow + termcap.Bold
332    arrow_color = termcap.Blue + termcap.Bold
333    tgts_color = termcap.Yellow + termcap.Bold
334
335    def __init__(self, tool, max_sources=99):
336        self.format = self.tool_color + (" [%8s] " % tool) \
337                      + self.pfx_color + "%s" \
338                      + self.srcs_color + "%s" \
339                      + self.arrow_color + " -> " \
340                      + self.tgts_color + "%s" \
341                      + termcap.Normal
342        self.max_sources = max_sources
343
344    def __call__(self, target, source, env, for_signature=None):
345        # truncate source list according to max_sources param
346        source = source[0:self.max_sources]
347        def strip(f):
348            return strip_build_path(str(f), env)
349        if len(source) > 0:
350            srcs = map(strip, source)
351        else:
352            srcs = ['']
353        tgts = map(strip, target)
354        # surprisingly, os.path.commonprefix is a dumb char-by-char string
355        # operation that has nothing to do with paths.
356        com_pfx = os.path.commonprefix(srcs + tgts)
357        com_pfx_len = len(com_pfx)
358        if com_pfx:
359            # do some cleanup and sanity checking on common prefix
360            if com_pfx[-1] == ".":
361                # prefix matches all but file extension: ok
362                # back up one to change 'foo.cc -> o' to 'foo.cc -> .o'
363                com_pfx = com_pfx[0:-1]
364            elif com_pfx[-1] == "/":
365                # common prefix is directory path: OK
366                pass
367            else:
368                src0_len = len(srcs[0])
369                tgt0_len = len(tgts[0])
370                if src0_len == com_pfx_len:
371                    # source is a substring of target, OK
372                    pass
373                elif tgt0_len == com_pfx_len:
374                    # target is a substring of source, need to back up to
375                    # avoid empty string on RHS of arrow
376                    sep_idx = com_pfx.rfind(".")
377                    if sep_idx != -1:
378                        com_pfx = com_pfx[0:sep_idx]
379                    else:
380                        com_pfx = ''
381                elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".":
382                    # still splitting at file extension: ok
383                    pass
384                else:
385                    # probably a fluke; ignore it
386                    com_pfx = ''
387        # recalculate length in case com_pfx was modified
388        com_pfx_len = len(com_pfx)
389        def fmt(files):
390            f = map(lambda s: s[com_pfx_len:], files)
391            return ', '.join(f)
392        return self.format % (com_pfx, fmt(srcs), fmt(tgts))
393
394Export('Transform')
395
396# enable the regression script to use the termcap
397main['TERMCAP'] = termcap
398
399if GetOption('verbose'):
400    def MakeAction(action, string, *args, **kwargs):
401        return Action(action, *args, **kwargs)
402else:
403    MakeAction = Action
404    main['CCCOMSTR']        = Transform("CC")
405    main['CXXCOMSTR']       = Transform("CXX")
406    main['ASCOMSTR']        = Transform("AS")
407    main['ARCOMSTR']        = Transform("AR", 0)
408    main['LINKCOMSTR']      = Transform("LINK", 0)
409    main['SHLINKCOMSTR']    = Transform("SHLINK", 0)
410    main['RANLIBCOMSTR']    = Transform("RANLIB", 0)
411    main['M4COMSTR']        = Transform("M4")
412    main['SHCCCOMSTR']      = Transform("SHCC")
413    main['SHCXXCOMSTR']     = Transform("SHCXX")
414Export('MakeAction')
415
416# Initialize the Link-Time Optimization (LTO) flags
417main['LTO_CCFLAGS'] = []
418main['LTO_LDFLAGS'] = []
419
420# According to the readme, tcmalloc works best if the compiler doesn't
421# assume that we're using the builtin malloc and friends. These flags
422# are compiler-specific, so we need to set them after we detect which
423# compiler we're using.
424main['TCMALLOC_CCFLAGS'] = []
425
426CXX_version = readCommand([main['CXX'],'--version'], exception=False)
427CXX_V = readCommand([main['CXX'],'-V'], exception=False)
428
429main['GCC'] = CXX_version and CXX_version.find('g++') >= 0
430main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0
431if main['GCC'] + main['CLANG'] > 1:
432    print 'Error: How can we have two at the same time?'
433    Exit(1)
434
435# Set up default C++ compiler flags
436if main['GCC'] or main['CLANG']:
437    # As gcc and clang share many flags, do the common parts here
438    main.Append(CCFLAGS=['-pipe'])
439    main.Append(CCFLAGS=['-fno-strict-aliasing'])
440    # Enable -Wall and -Wextra and then disable the few warnings that
441    # we consistently violate
442    main.Append(CCFLAGS=['-Wall', '-Wundef', '-Wextra',
443                         '-Wno-sign-compare', '-Wno-unused-parameter'])
444    # We always compile using C++11
445    main.Append(CXXFLAGS=['-std=c++11'])
446    if sys.platform.startswith('freebsd'):
447        main.Append(CCFLAGS=['-I/usr/local/include'])
448        main.Append(CXXFLAGS=['-I/usr/local/include'])
449
450    main['FILTER_PSHLINKFLAGS'] = lambda x: str(x).replace(' -shared', '')
451    main['PSHLINKFLAGS'] = main.subst('${FILTER_PSHLINKFLAGS(SHLINKFLAGS)}')
452    main['PLINKFLAGS'] = main.subst('${LINKFLAGS}')
453    shared_partial_flags = ['-r', '-nostdlib']
454    main.Append(PSHLINKFLAGS=shared_partial_flags)
455    main.Append(PLINKFLAGS=shared_partial_flags)
456else:
457    print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
458    print "Don't know what compiler options to use for your compiler."
459    print termcap.Yellow + '       compiler:' + termcap.Normal, main['CXX']
460    print termcap.Yellow + '       version:' + termcap.Normal,
461    if not CXX_version:
462        print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
463               termcap.Normal
464    else:
465        print CXX_version.replace('\n', '<nl>')
466    print "       If you're trying to use a compiler other than GCC"
467    print "       or clang, there appears to be something wrong with your"
468    print "       environment."
469    print "       "
470    print "       If you are trying to use a compiler other than those listed"
471    print "       above you will need to ease fix SConstruct and "
472    print "       src/SConscript to support that compiler."
473    Exit(1)
474
475if main['GCC']:
476    # Check for a supported version of gcc. >= 4.8 is chosen for its
477    # level of c++11 support. See
478    # http://gcc.gnu.org/projects/cxx0x.html for details.
479    gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False)
480    if compareVersions(gcc_version, "4.8") < 0:
481        print 'Error: gcc version 4.8 or newer required.'
482        print '       Installed version:', gcc_version
483        Exit(1)
484
485    main['GCC_VERSION'] = gcc_version
486
487    if compareVersions(gcc_version, '4.9') >= 0:
488        # Incremental linking with LTO is currently broken in gcc versions
489        # 4.9 and above. A version where everything works completely hasn't
490        # yet been identified.
491        #
492        # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=67548
493        main['BROKEN_INCREMENTAL_LTO'] = True
494    if compareVersions(gcc_version, '6.0') >= 0:
495        # gcc versions 6.0 and greater accept an -flinker-output flag which
496        # selects what type of output the linker should generate. This is
497        # necessary for incremental lto to work, but is also broken in
498        # current versions of gcc. It may not be necessary in future
499        # versions. We add it here since it might be, and as a reminder that
500        # it exists. It's excluded if lto is being forced.
501        #
502        # https://gcc.gnu.org/gcc-6/changes.html
503        # https://gcc.gnu.org/ml/gcc-patches/2015-11/msg03161.html
504        # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=69866
505        if not GetOption('force_lto'):
506            main.Append(PSHLINKFLAGS='-flinker-output=rel')
507            main.Append(PLINKFLAGS='-flinker-output=rel')
508
509    # gcc from version 4.8 and above generates "rep; ret" instructions
510    # to avoid performance penalties on certain AMD chips. Older
511    # assemblers detect this as an error, "Error: expecting string
512    # instruction after `rep'"
513    as_version_raw = readCommand([main['AS'], '-v', '/dev/null',
514                                  '-o', '/dev/null'],
515                                 exception=False).split()
516
517    # version strings may contain extra distro-specific
518    # qualifiers, so play it safe and keep only what comes before
519    # the first hyphen
520    as_version = as_version_raw[-1].split('-')[0] if as_version_raw else None
521
522    if not as_version or compareVersions(as_version, "2.23") < 0:
523        print termcap.Yellow + termcap.Bold + \
524            'Warning: This combination of gcc and binutils have' + \
525            ' known incompatibilities.\n' + \
526            '         If you encounter build problems, please update ' + \
527            'binutils to 2.23.' + \
528            termcap.Normal
529
530    # Make sure we warn if the user has requested to compile with the
531    # Undefined Benahvior Sanitizer and this version of gcc does not
532    # support it.
533    if GetOption('with_ubsan') and \
534            compareVersions(gcc_version, '4.9') < 0:
535        print termcap.Yellow + termcap.Bold + \
536            'Warning: UBSan is only supported using gcc 4.9 and later.' + \
537            termcap.Normal
538
539    disable_lto = GetOption('no_lto')
540    if not disable_lto and main.get('BROKEN_INCREMENTAL_LTO', False) and \
541            not GetOption('force_lto'):
542        print termcap.Yellow + termcap.Bold + \
543            'Warning: Your compiler doesn\'t support incremental linking' + \
544            ' and lto at the same time, so lto is being disabled. To force' + \
545            ' lto on anyway, use the --force-lto option. That will disable' + \
546            ' partial linking.' + \
547            termcap.Normal
548        disable_lto = True
549
550    # Add the appropriate Link-Time Optimization (LTO) flags
551    # unless LTO is explicitly turned off. Note that these flags
552    # are only used by the fast target.
553    if not disable_lto:
554        # Pass the LTO flag when compiling to produce GIMPLE
555        # output, we merely create the flags here and only append
556        # them later
557        main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
558
559        # Use the same amount of jobs for LTO as we are running
560        # scons with
561        main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
562
563    main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc',
564                                  '-fno-builtin-realloc', '-fno-builtin-free'])
565
566    # add option to check for undeclared overrides
567    if compareVersions(gcc_version, "5.0") > 0:
568        main.Append(CCFLAGS=['-Wno-error=suggest-override'])
569
570elif main['CLANG']:
571    # Check for a supported version of clang, >= 3.1 is needed to
572    # support similar features as gcc 4.8. See
573    # http://clang.llvm.org/cxx_status.html for details
574    clang_version_re = re.compile(".* version (\d+\.\d+)")
575    clang_version_match = clang_version_re.search(CXX_version)
576    if (clang_version_match):
577        clang_version = clang_version_match.groups()[0]
578        if compareVersions(clang_version, "3.1") < 0:
579            print 'Error: clang version 3.1 or newer required.'
580            print '       Installed version:', clang_version
581            Exit(1)
582    else:
583        print 'Error: Unable to determine clang version.'
584        Exit(1)
585
586    # clang has a few additional warnings that we disable, extraneous
587    # parantheses are allowed due to Ruby's printing of the AST,
588    # finally self assignments are allowed as the generated CPU code
589    # is relying on this
590    main.Append(CCFLAGS=['-Wno-parentheses',
591                         '-Wno-self-assign',
592                         # Some versions of libstdc++ (4.8?) seem to
593                         # use struct hash and class hash
594                         # interchangeably.
595                         '-Wno-mismatched-tags',
596                         ])
597
598    main.Append(TCMALLOC_CCFLAGS=['-fno-builtin'])
599
600    # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as
601    # opposed to libstdc++, as the later is dated.
602    if sys.platform == "darwin":
603        main.Append(CXXFLAGS=['-stdlib=libc++'])
604        main.Append(LIBS=['c++'])
605
606    # On FreeBSD we need libthr.
607    if sys.platform.startswith('freebsd'):
608        main.Append(LIBS=['thr'])
609
610else:
611    print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
612    print "Don't know what compiler options to use for your compiler."
613    print termcap.Yellow + '       compiler:' + termcap.Normal, main['CXX']
614    print termcap.Yellow + '       version:' + termcap.Normal,
615    if not CXX_version:
616        print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
617               termcap.Normal
618    else:
619        print CXX_version.replace('\n', '<nl>')
620    print "       If you're trying to use a compiler other than GCC"
621    print "       or clang, there appears to be something wrong with your"
622    print "       environment."
623    print "       "
624    print "       If you are trying to use a compiler other than those listed"
625    print "       above you will need to ease fix SConstruct and "
626    print "       src/SConscript to support that compiler."
627    Exit(1)
628
629# Set up common yacc/bison flags (needed for Ruby)
630main['YACCFLAGS'] = '-d'
631main['YACCHXXFILESUFFIX'] = '.hh'
632
633# Do this after we save setting back, or else we'll tack on an
634# extra 'qdo' every time we run scons.
635if main['BATCH']:
636    main['CC']     = main['BATCH_CMD'] + ' ' + main['CC']
637    main['CXX']    = main['BATCH_CMD'] + ' ' + main['CXX']
638    main['AS']     = main['BATCH_CMD'] + ' ' + main['AS']
639    main['AR']     = main['BATCH_CMD'] + ' ' + main['AR']
640    main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB']
641
642if sys.platform == 'cygwin':
643    # cygwin has some header file issues...
644    main.Append(CCFLAGS=["-Wno-uninitialized"])
645
646# Check for the protobuf compiler
647protoc_version = readCommand([main['PROTOC'], '--version'],
648                             exception='').split()
649
650# First two words should be "libprotoc x.y.z"
651if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc':
652    print termcap.Yellow + termcap.Bold + \
653        'Warning: Protocol buffer compiler (protoc) not found.\n' + \
654        '         Please install protobuf-compiler for tracing support.' + \
655        termcap.Normal
656    main['PROTOC'] = False
657else:
658    # Based on the availability of the compress stream wrappers,
659    # require 2.1.0
660    min_protoc_version = '2.1.0'
661    if compareVersions(protoc_version[1], min_protoc_version) < 0:
662        print termcap.Yellow + termcap.Bold + \
663            'Warning: protoc version', min_protoc_version, \
664            'or newer required.\n' + \
665            '         Installed version:', protoc_version[1], \
666            termcap.Normal
667        main['PROTOC'] = False
668    else:
669        # Attempt to determine the appropriate include path and
670        # library path using pkg-config, that means we also need to
671        # check for pkg-config. Note that it is possible to use
672        # protobuf without the involvement of pkg-config. Later on we
673        # check go a library config check and at that point the test
674        # will fail if libprotobuf cannot be found.
675        if readCommand(['pkg-config', '--version'], exception=''):
676            try:
677                # Attempt to establish what linking flags to add for protobuf
678                # using pkg-config
679                main.ParseConfig('pkg-config --cflags --libs-only-L protobuf')
680            except:
681                print termcap.Yellow + termcap.Bold + \
682                    'Warning: pkg-config could not get protobuf flags.' + \
683                    termcap.Normal
684
685
686# Check for 'timeout' from GNU coreutils. If present, regressions will
687# be run with a time limit. We require version 8.13 since we rely on
688# support for the '--foreground' option.
689if sys.platform.startswith('freebsd'):
690    timeout_lines = readCommand(['gtimeout', '--version'],
691                                exception='').splitlines()
692else:
693    timeout_lines = readCommand(['timeout', '--version'],
694                                exception='').splitlines()
695# Get the first line and tokenize it
696timeout_version = timeout_lines[0].split() if timeout_lines else []
697main['TIMEOUT'] =  timeout_version and \
698    compareVersions(timeout_version[-1], '8.13') >= 0
699
700# Add a custom Check function to test for structure members.
701def CheckMember(context, include, decl, member, include_quotes="<>"):
702    context.Message("Checking for member %s in %s..." %
703                    (member, decl))
704    text = """
705#include %(header)s
706int main(){
707  %(decl)s test;
708  (void)test.%(member)s;
709  return 0;
710};
711""" % { "header" : include_quotes[0] + include + include_quotes[1],
712        "decl" : decl,
713        "member" : member,
714        }
715
716    ret = context.TryCompile(text, extension=".cc")
717    context.Result(ret)
718    return ret
719
720# Platform-specific configuration.  Note again that we assume that all
721# builds under a given build root run on the same host platform.
722conf = Configure(main,
723                 conf_dir = joinpath(build_root, '.scons_config'),
724                 log_file = joinpath(build_root, 'scons_config.log'),
725                 custom_tests = {
726        'CheckMember' : CheckMember,
727        })
728
729# Check if we should compile a 64 bit binary on Mac OS X/Darwin
730try:
731    import platform
732    uname = platform.uname()
733    if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0:
734        if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]):
735            main.Append(CCFLAGS=['-arch', 'x86_64'])
736            main.Append(CFLAGS=['-arch', 'x86_64'])
737            main.Append(LINKFLAGS=['-arch', 'x86_64'])
738            main.Append(ASFLAGS=['-arch', 'x86_64'])
739except:
740    pass
741
742# Recent versions of scons substitute a "Null" object for Configure()
743# when configuration isn't necessary, e.g., if the "--help" option is
744# present.  Unfortuantely this Null object always returns false,
745# breaking all our configuration checks.  We replace it with our own
746# more optimistic null object that returns True instead.
747if not conf:
748    def NullCheck(*args, **kwargs):
749        return True
750
751    class NullConf:
752        def __init__(self, env):
753            self.env = env
754        def Finish(self):
755            return self.env
756        def __getattr__(self, mname):
757            return NullCheck
758
759    conf = NullConf(main)
760
761# Cache build files in the supplied directory.
762if main['M5_BUILD_CACHE']:
763    print 'Using build cache located at', main['M5_BUILD_CACHE']
764    CacheDir(main['M5_BUILD_CACHE'])
765
766main['USE_PYTHON'] = not GetOption('without_python')
767if main['USE_PYTHON']:
768    # Find Python include and library directories for embedding the
769    # interpreter. We rely on python-config to resolve the appropriate
770    # includes and linker flags. ParseConfig does not seem to understand
771    # the more exotic linker flags such as -Xlinker and -export-dynamic so
772    # we add them explicitly below. If you want to link in an alternate
773    # version of python, see above for instructions on how to invoke
774    # scons with the appropriate PATH set.
775    #
776    # First we check if python2-config exists, else we use python-config
777    python_config = readCommand(['which', 'python2-config'],
778                                exception='').strip()
779    if not os.path.exists(python_config):
780        python_config = readCommand(['which', 'python-config'],
781                                    exception='').strip()
782    py_includes = readCommand([python_config, '--includes'],
783                              exception='').split()
784    # Strip the -I from the include folders before adding them to the
785    # CPPPATH
786    main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes))
787
788    # Read the linker flags and split them into libraries and other link
789    # flags. The libraries are added later through the call the CheckLib.
790    py_ld_flags = readCommand([python_config, '--ldflags'],
791        exception='').split()
792    py_libs = []
793    for lib in py_ld_flags:
794         if not lib.startswith('-l'):
795             main.Append(LINKFLAGS=[lib])
796         else:
797             lib = lib[2:]
798             if lib not in py_libs:
799                 py_libs.append(lib)
800
801    # verify that this stuff works
802    if not conf.CheckHeader('Python.h', '<>'):
803        print "Error: can't find Python.h header in", py_includes
804        print "Install Python headers (package python-dev on Ubuntu and RedHat)"
805        Exit(1)
806
807    for lib in py_libs:
808        if not conf.CheckLib(lib):
809            print "Error: can't find library %s required by python" % lib
810            Exit(1)
811
812# On Solaris you need to use libsocket for socket ops
813if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'):
814   if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'):
815       print "Can't find library with socket calls (e.g. accept())"
816       Exit(1)
817
818# Check for zlib.  If the check passes, libz will be automatically
819# added to the LIBS environment variable.
820if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'):
821    print 'Error: did not find needed zlib compression library '\
822          'and/or zlib.h header file.'
823    print '       Please install zlib and try again.'
824    Exit(1)
825
826# If we have the protobuf compiler, also make sure we have the
827# development libraries. If the check passes, libprotobuf will be
828# automatically added to the LIBS environment variable. After
829# this, we can use the HAVE_PROTOBUF flag to determine if we have
830# got both protoc and libprotobuf available.
831main['HAVE_PROTOBUF'] = main['PROTOC'] and \
832    conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h',
833                            'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;')
834
835# If we have the compiler but not the library, print another warning.
836if main['PROTOC'] and not main['HAVE_PROTOBUF']:
837    print termcap.Yellow + termcap.Bold + \
838        'Warning: did not find protocol buffer library and/or headers.\n' + \
839    '       Please install libprotobuf-dev for tracing support.' + \
840    termcap.Normal
841
842# Check for librt.
843have_posix_clock = \
844    conf.CheckLibWithHeader(None, 'time.h', 'C',
845                            'clock_nanosleep(0,0,NULL,NULL);') or \
846    conf.CheckLibWithHeader('rt', 'time.h', 'C',
847                            'clock_nanosleep(0,0,NULL,NULL);')
848
849have_posix_timers = \
850    conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C',
851                            'timer_create(CLOCK_MONOTONIC, NULL, NULL);')
852
853if not GetOption('without_tcmalloc'):
854    if conf.CheckLib('tcmalloc'):
855        main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
856    elif conf.CheckLib('tcmalloc_minimal'):
857        main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
858    else:
859        print termcap.Yellow + termcap.Bold + \
860              "You can get a 12% performance improvement by "\
861              "installing tcmalloc (libgoogle-perftools-dev package "\
862              "on Ubuntu or RedHat)." + termcap.Normal
863
864
865# Detect back trace implementations. The last implementation in the
866# list will be used by default.
867backtrace_impls = [ "none" ]
868
869if conf.CheckLibWithHeader(None, 'execinfo.h', 'C',
870                           'backtrace_symbols_fd((void*)0, 0, 0);'):
871    backtrace_impls.append("glibc")
872elif conf.CheckLibWithHeader('execinfo', 'execinfo.h', 'C',
873                           'backtrace_symbols_fd((void*)0, 0, 0);'):
874    # NetBSD and FreeBSD need libexecinfo.
875    backtrace_impls.append("glibc")
876    main.Append(LIBS=['execinfo'])
877
878if backtrace_impls[-1] == "none":
879    default_backtrace_impl = "none"
880    print termcap.Yellow + termcap.Bold + \
881        "No suitable back trace implementation found." + \
882        termcap.Normal
883
884if not have_posix_clock:
885    print "Can't find library for POSIX clocks."
886
887# Check for <fenv.h> (C99 FP environment control)
888have_fenv = conf.CheckHeader('fenv.h', '<>')
889if not have_fenv:
890    print "Warning: Header file <fenv.h> not found."
891    print "         This host has no IEEE FP rounding mode control."
892
893# Check for <png.h> (libpng library needed if wanting to dump
894# frame buffer image in png format)
895have_png = conf.CheckHeader('png.h', '<>')
896if not have_png:
897    print "Warning: Header file <png.h> not found."
898    print "         This host has no libpng library."
899    print "         Disabling support for PNG framebuffers."
900
901# Check if we should enable KVM-based hardware virtualization. The API
902# we rely on exists since version 2.6.36 of the kernel, but somehow
903# the KVM_API_VERSION does not reflect the change. We test for one of
904# the types as a fall back.
905have_kvm = conf.CheckHeader('linux/kvm.h', '<>')
906if not have_kvm:
907    print "Info: Compatible header file <linux/kvm.h> not found, " \
908        "disabling KVM support."
909
910# Check if the TUN/TAP driver is available.
911have_tuntap = conf.CheckHeader('linux/if_tun.h', '<>')
912if not have_tuntap:
913    print "Info: Compatible header file <linux/if_tun.h> not found."
914
915# x86 needs support for xsave. We test for the structure here since we
916# won't be able to run new tests by the time we know which ISA we're
917# targeting.
918have_kvm_xsave = conf.CheckTypeSize('struct kvm_xsave',
919                                    '#include <linux/kvm.h>') != 0
920
921# Check if the requested target ISA is compatible with the host
922def is_isa_kvm_compatible(isa):
923    try:
924        import platform
925        host_isa = platform.machine()
926    except:
927        print "Warning: Failed to determine host ISA."
928        return False
929
930    if not have_posix_timers:
931        print "Warning: Can not enable KVM, host seems to lack support " \
932            "for POSIX timers"
933        return False
934
935    if isa == "arm":
936        return host_isa in ( "armv7l", "aarch64" )
937    elif isa == "x86":
938        if host_isa != "x86_64":
939            return False
940
941        if not have_kvm_xsave:
942            print "KVM on x86 requires xsave support in kernel headers."
943            return False
944
945        return True
946    else:
947        return False
948
949
950# Check if the exclude_host attribute is available. We want this to
951# get accurate instruction counts in KVM.
952main['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf.CheckMember(
953    'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host')
954
955
956######################################################################
957#
958# Finish the configuration
959#
960main = conf.Finish()
961
962######################################################################
963#
964# Collect all non-global variables
965#
966
967# Define the universe of supported ISAs
968all_isa_list = [ ]
969all_gpu_isa_list = [ ]
970Export('all_isa_list')
971Export('all_gpu_isa_list')
972
973class CpuModel(object):
974    '''The CpuModel class encapsulates everything the ISA parser needs to
975    know about a particular CPU model.'''
976
977    # Dict of available CPU model objects.  Accessible as CpuModel.dict.
978    dict = {}
979
980    # Constructor.  Automatically adds models to CpuModel.dict.
981    def __init__(self, name, default=False):
982        self.name = name           # name of model
983
984        # This cpu is enabled by default
985        self.default = default
986
987        # Add self to dict
988        if name in CpuModel.dict:
989            raise AttributeError, "CpuModel '%s' already registered" % name
990        CpuModel.dict[name] = self
991
992Export('CpuModel')
993
994# Sticky variables get saved in the variables file so they persist from
995# one invocation to the next (unless overridden, in which case the new
996# value becomes sticky).
997sticky_vars = Variables(args=ARGUMENTS)
998Export('sticky_vars')
999
1000# Sticky variables that should be exported
1001export_vars = []
1002Export('export_vars')
1003
1004# For Ruby
1005all_protocols = []
1006Export('all_protocols')
1007protocol_dirs = []
1008Export('protocol_dirs')
1009slicc_includes = []
1010Export('slicc_includes')
1011
1012# Walk the tree and execute all SConsopts scripts that wil add to the
1013# above variables
1014if GetOption('verbose'):
1015    print "Reading SConsopts"
1016for bdir in [ base_dir ] + extras_dir_list:
1017    if not isdir(bdir):
1018        print "Error: directory '%s' does not exist" % bdir
1019        Exit(1)
1020    for root, dirs, files in os.walk(bdir):
1021        if 'SConsopts' in files:
1022            if GetOption('verbose'):
1023                print "Reading", joinpath(root, 'SConsopts')
1024            SConscript(joinpath(root, 'SConsopts'))
1025
1026all_isa_list.sort()
1027all_gpu_isa_list.sort()
1028
1029sticky_vars.AddVariables(
1030    EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list),
1031    EnumVariable('TARGET_GPU_ISA', 'Target GPU ISA', 'hsail', all_gpu_isa_list),
1032    ListVariable('CPU_MODELS', 'CPU models',
1033                 sorted(n for n,m in CpuModel.dict.iteritems() if m.default),
1034                 sorted(CpuModel.dict.keys())),
1035    BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger',
1036                 False),
1037    BoolVariable('SS_COMPATIBLE_FP',
1038                 'Make floating-point results compatible with SimpleScalar',
1039                 False),
1040    BoolVariable('USE_SSE2',
1041                 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts',
1042                 False),
1043    BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock),
1044    BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv),
1045    BoolVariable('USE_PNG',  'Enable support for PNG images', have_png),
1046    BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability',
1047                 False),
1048    BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models',
1049                 have_kvm),
1050    BoolVariable('USE_TUNTAP',
1051                 'Enable using a tap device to bridge to the host network',
1052                 have_tuntap),
1053    BoolVariable('BUILD_GPU', 'Build the compute-GPU model', False),
1054    EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None',
1055                  all_protocols),
1056    EnumVariable('BACKTRACE_IMPL', 'Post-mortem dump implementation',
1057                 backtrace_impls[-1], backtrace_impls)
1058    )
1059
1060# These variables get exported to #defines in config/*.hh (see src/SConscript).
1061export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'TARGET_GPU_ISA',
1062                'CP_ANNOTATE', 'USE_POSIX_CLOCK', 'USE_KVM', 'USE_TUNTAP',
1063                'PROTOCOL', 'HAVE_PROTOBUF', 'HAVE_PERF_ATTR_EXCLUDE_HOST',
1064                'USE_PNG']
1065
1066###################################################
1067#
1068# Define a SCons builder for configuration flag headers.
1069#
1070###################################################
1071
1072# This function generates a config header file that #defines the
1073# variable symbol to the current variable setting (0 or 1).  The source
1074# operands are the name of the variable and a Value node containing the
1075# value of the variable.
1076def build_config_file(target, source, env):
1077    (variable, value) = [s.get_contents() for s in source]
1078    f = file(str(target[0]), 'w')
1079    print >> f, '#define', variable, value
1080    f.close()
1081    return None
1082
1083# Combine the two functions into a scons Action object.
1084config_action = MakeAction(build_config_file, Transform("CONFIG H", 2))
1085
1086# The emitter munges the source & target node lists to reflect what
1087# we're really doing.
1088def config_emitter(target, source, env):
1089    # extract variable name from Builder arg
1090    variable = str(target[0])
1091    # True target is config header file
1092    target = joinpath('config', variable.lower() + '.hh')
1093    val = env[variable]
1094    if isinstance(val, bool):
1095        # Force value to 0/1
1096        val = int(val)
1097    elif isinstance(val, str):
1098        val = '"' + val + '"'
1099
1100    # Sources are variable name & value (packaged in SCons Value nodes)
1101    return ([target], [Value(variable), Value(val)])
1102
1103config_builder = Builder(emitter = config_emitter, action = config_action)
1104
1105main.Append(BUILDERS = { 'ConfigFile' : config_builder })
1106
1107###################################################
1108#
1109# Builders for static and shared partially linked object files.
1110#
1111###################################################
1112
1113partial_static_builder = Builder(action=SCons.Defaults.LinkAction,
1114                                 src_suffix='$OBJSUFFIX',
1115                                 src_builder=['StaticObject', 'Object'],
1116                                 LINKFLAGS='$PLINKFLAGS',
1117                                 LIBS='')
1118
1119def partial_shared_emitter(target, source, env):
1120    for tgt in target:
1121        tgt.attributes.shared = 1
1122    return (target, source)
1123partial_shared_builder = Builder(action=SCons.Defaults.ShLinkAction,
1124                                 emitter=partial_shared_emitter,
1125                                 src_suffix='$SHOBJSUFFIX',
1126                                 src_builder='SharedObject',
1127                                 SHLINKFLAGS='$PSHLINKFLAGS',
1128                                 LIBS='')
1129
1130main.Append(BUILDERS = { 'PartialShared' : partial_shared_builder,
1131                         'PartialStatic' : partial_static_builder })
1132
1133# builds in ext are shared across all configs in the build root.
1134ext_dir = abspath(joinpath(str(main.root), 'ext'))
1135ext_build_dirs = []
1136for root, dirs, files in os.walk(ext_dir):
1137    if 'SConscript' in files:
1138        build_dir = os.path.relpath(root, ext_dir)
1139        ext_build_dirs.append(build_dir)
1140        main.SConscript(joinpath(root, 'SConscript'),
1141                        variant_dir=joinpath(build_root, build_dir))
1142
1143main.Prepend(CPPPATH=Dir('ext/pybind11/include/'))
1144
1145###################################################
1146#
1147# This builder and wrapper method are used to set up a directory with
1148# switching headers. Those are headers which are in a generic location and
1149# that include more specific headers from a directory chosen at build time
1150# based on the current build settings.
1151#
1152###################################################
1153
1154def build_switching_header(target, source, env):
1155    path = str(target[0])
1156    subdir = str(source[0])
1157    dp, fp = os.path.split(path)
1158    dp = os.path.relpath(os.path.realpath(dp),
1159                         os.path.realpath(env['BUILDDIR']))
1160    with open(path, 'w') as hdr:
1161        print >>hdr, '#include "%s/%s/%s"' % (dp, subdir, fp)
1162
1163switching_header_action = MakeAction(build_switching_header,
1164                                     Transform('GENERATE'))
1165
1166switching_header_builder = Builder(action=switching_header_action,
1167                                   source_factory=Value,
1168                                   single_source=True)
1169
1170main.Append(BUILDERS = { 'SwitchingHeader': switching_header_builder })
1171
1172def switching_headers(self, headers, source):
1173    for header in headers:
1174        self.SwitchingHeader(header, source)
1175
1176main.AddMethod(switching_headers, 'SwitchingHeaders')
1177
1178###################################################
1179#
1180# Define build environments for selected configurations.
1181#
1182###################################################
1183
1184for variant_path in variant_paths:
1185    if not GetOption('silent'):
1186        print "Building in", variant_path
1187
1188    # Make a copy of the build-root environment to use for this config.
1189    env = main.Clone()
1190    env['BUILDDIR'] = variant_path
1191
1192    # variant_dir is the tail component of build path, and is used to
1193    # determine the build parameters (e.g., 'ALPHA_SE')
1194    (build_root, variant_dir) = splitpath(variant_path)
1195
1196    # Set env variables according to the build directory config.
1197    sticky_vars.files = []
1198    # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in
1199    # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke
1200    # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings.
1201    current_vars_file = joinpath(build_root, 'variables', variant_dir)
1202    if isfile(current_vars_file):
1203        sticky_vars.files.append(current_vars_file)
1204        if not GetOption('silent'):
1205            print "Using saved variables file %s" % current_vars_file
1206    elif variant_dir in ext_build_dirs:
1207        # Things in ext are built without a variant directory.
1208        continue
1209    else:
1210        # Build dir-specific variables file doesn't exist.
1211
1212        # Make sure the directory is there so we can create it later
1213        opt_dir = dirname(current_vars_file)
1214        if not isdir(opt_dir):
1215            mkdir(opt_dir)
1216
1217        # Get default build variables from source tree.  Variables are
1218        # normally determined by name of $VARIANT_DIR, but can be
1219        # overridden by '--default=' arg on command line.
1220        default = GetOption('default')
1221        opts_dir = joinpath(main.root.abspath, 'build_opts')
1222        if default:
1223            default_vars_files = [joinpath(build_root, 'variables', default),
1224                                  joinpath(opts_dir, default)]
1225        else:
1226            default_vars_files = [joinpath(opts_dir, variant_dir)]
1227        existing_files = filter(isfile, default_vars_files)
1228        if existing_files:
1229            default_vars_file = existing_files[0]
1230            sticky_vars.files.append(default_vars_file)
1231            print "Variables file %s not found,\n  using defaults in %s" \
1232                  % (current_vars_file, default_vars_file)
1233        else:
1234            print "Error: cannot find variables file %s or " \
1235                  "default file(s) %s" \
1236                  % (current_vars_file, ' or '.join(default_vars_files))
1237            Exit(1)
1238
1239    # Apply current variable settings to env
1240    sticky_vars.Update(env)
1241
1242    help_texts["local_vars"] += \
1243        "Build variables for %s:\n" % variant_dir \
1244                 + sticky_vars.GenerateHelpText(env)
1245
1246    # Process variable settings.
1247
1248    if not have_fenv and env['USE_FENV']:
1249        print "Warning: <fenv.h> not available; " \
1250              "forcing USE_FENV to False in", variant_dir + "."
1251        env['USE_FENV'] = False
1252
1253    if not env['USE_FENV']:
1254        print "Warning: No IEEE FP rounding mode control in", variant_dir + "."
1255        print "         FP results may deviate slightly from other platforms."
1256
1257    if not have_png and env['USE_PNG']:
1258        print "Warning: <png.h> not available; " \
1259              "forcing USE_PNG to False in", variant_dir + "."
1260        env['USE_PNG'] = False
1261
1262    if env['USE_PNG']:
1263        env.Append(LIBS=['png'])
1264
1265    if env['EFENCE']:
1266        env.Append(LIBS=['efence'])
1267
1268    if env['USE_KVM']:
1269        if not have_kvm:
1270            print "Warning: Can not enable KVM, host seems to lack KVM support"
1271            env['USE_KVM'] = False
1272        elif not is_isa_kvm_compatible(env['TARGET_ISA']):
1273            print "Info: KVM support disabled due to unsupported host and " \
1274                "target ISA combination"
1275            env['USE_KVM'] = False
1276
1277    if env['USE_TUNTAP']:
1278        if not have_tuntap:
1279            print "Warning: Can't connect EtherTap with a tap device."
1280            env['USE_TUNTAP'] = False
1281
1282    if env['BUILD_GPU']:
1283        env.Append(CPPDEFINES=['BUILD_GPU'])
1284
1285    # Warn about missing optional functionality
1286    if env['USE_KVM']:
1287        if not main['HAVE_PERF_ATTR_EXCLUDE_HOST']:
1288            print "Warning: perf_event headers lack support for the " \
1289                "exclude_host attribute. KVM instruction counts will " \
1290                "be inaccurate."
1291
1292    # Save sticky variable settings back to current variables file
1293    sticky_vars.Save(current_vars_file, env)
1294
1295    if env['USE_SSE2']:
1296        env.Append(CCFLAGS=['-msse2'])
1297
1298    # The src/SConscript file sets up the build rules in 'env' according
1299    # to the configured variables.  It returns a list of environments,
1300    # one for each variant build (debug, opt, etc.)
1301    SConscript('src/SConscript', variant_dir = variant_path, exports = 'env')
1302
1303# base help text
1304Help('''
1305Usage: scons [scons options] [build variables] [target(s)]
1306
1307Extra scons options:
1308%(options)s
1309
1310Global build variables:
1311%(global_vars)s
1312
1313%(local_vars)s
1314''' % help_texts)
1315