SConstruct revision 12245:ad6fa75d2aba
1# -*- mode:python -*-
2
3# Copyright (c) 2013, 2015-2017 ARM Limited
4# All rights reserved.
5#
6# The license below extends only to copyright in the software and shall
7# not be construed as granting a license to any other intellectual
8# property including but not limited to intellectual property relating
9# to a hardware implementation of the functionality of the software
10# licensed hereunder.  You may use the software subject to the license
11# terms below provided that you ensure that this notice is replicated
12# unmodified and in its entirety in all distributions of the software,
13# modified or unmodified, in source code or in binary form.
14#
15# Copyright (c) 2011 Advanced Micro Devices, Inc.
16# Copyright (c) 2009 The Hewlett-Packard Development Company
17# Copyright (c) 2004-2005 The Regents of The University of Michigan
18# All rights reserved.
19#
20# Redistribution and use in source and binary forms, with or without
21# modification, are permitted provided that the following conditions are
22# met: redistributions of source code must retain the above copyright
23# notice, this list of conditions and the following disclaimer;
24# redistributions in binary form must reproduce the above copyright
25# notice, this list of conditions and the following disclaimer in the
26# documentation and/or other materials provided with the distribution;
27# neither the name of the copyright holders nor the names of its
28# contributors may be used to endorse or promote products derived from
29# this software without specific prior written permission.
30#
31# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
32# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
33# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
34# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
35# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
37# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
38# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
39# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
40# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
41# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
42#
43# Authors: Steve Reinhardt
44#          Nathan Binkert
45
46###################################################
47#
48# SCons top-level build description (SConstruct) file.
49#
50# While in this directory ('gem5'), just type 'scons' to build the default
51# configuration (see below), or type 'scons build/<CONFIG>/<binary>'
52# to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for
53# the optimized full-system version).
54#
55# You can build gem5 in a different directory as long as there is a
56# 'build/<CONFIG>' somewhere along the target path.  The build system
57# expects that all configs under the same build directory are being
58# built for the same host system.
59#
60# Examples:
61#
62#   The following two commands are equivalent.  The '-u' option tells
63#   scons to search up the directory tree for this SConstruct file.
64#   % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug
65#   % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug
66#
67#   The following two commands are equivalent and demonstrate building
68#   in a directory outside of the source tree.  The '-C' option tells
69#   scons to chdir to the specified directory to find this SConstruct
70#   file.
71#   % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug
72#   % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug
73#
74# You can use 'scons -H' to print scons options.  If you're in this
75# 'gem5' directory (or use -u or -C to tell scons where to find this
76# file), you can use 'scons -h' to print all the gem5-specific build
77# options as well.
78#
79###################################################
80
81# Global Python includes
82import itertools
83import os
84import re
85import shutil
86import subprocess
87import sys
88
89from os import mkdir, environ
90from os.path import abspath, basename, dirname, expanduser, normpath
91from os.path import exists,  isdir, isfile
92from os.path import join as joinpath, split as splitpath
93
94# SCons includes
95import SCons
96import SCons.Node
97
98from m5.util import compareVersions, readCommand
99from m5.util.terminal import get_termcap
100
101help_texts = {
102    "options" : "",
103    "global_vars" : "",
104    "local_vars" : ""
105}
106
107Export("help_texts")
108
109
110# There's a bug in scons in that (1) by default, the help texts from
111# AddOption() are supposed to be displayed when you type 'scons -h'
112# and (2) you can override the help displayed by 'scons -h' using the
113# Help() function, but these two features are incompatible: once
114# you've overridden the help text using Help(), there's no way to get
115# at the help texts from AddOptions.  See:
116#     http://scons.tigris.org/issues/show_bug.cgi?id=2356
117#     http://scons.tigris.org/issues/show_bug.cgi?id=2611
118# This hack lets us extract the help text from AddOptions and
119# re-inject it via Help().  Ideally someday this bug will be fixed and
120# we can just use AddOption directly.
121def AddLocalOption(*args, **kwargs):
122    col_width = 30
123
124    help = "  " + ", ".join(args)
125    if "help" in kwargs:
126        length = len(help)
127        if length >= col_width:
128            help += "\n" + " " * col_width
129        else:
130            help += " " * (col_width - length)
131        help += kwargs["help"]
132    help_texts["options"] += help + "\n"
133
134    AddOption(*args, **kwargs)
135
136AddLocalOption('--colors', dest='use_colors', action='store_true',
137               help="Add color to abbreviated scons output")
138AddLocalOption('--no-colors', dest='use_colors', action='store_false',
139               help="Don't add color to abbreviated scons output")
140AddLocalOption('--with-cxx-config', dest='with_cxx_config',
141               action='store_true',
142               help="Build with support for C++-based configuration")
143AddLocalOption('--default', dest='default', type='string', action='store',
144               help='Override which build_opts file to use for defaults')
145AddLocalOption('--ignore-style', dest='ignore_style', action='store_true',
146               help='Disable style checking hooks')
147AddLocalOption('--no-lto', dest='no_lto', action='store_true',
148               help='Disable Link-Time Optimization for fast')
149AddLocalOption('--force-lto', dest='force_lto', action='store_true',
150               help='Use Link-Time Optimization instead of partial linking' +
151                    ' when the compiler doesn\'t support using them together.')
152AddLocalOption('--update-ref', dest='update_ref', action='store_true',
153               help='Update test reference outputs')
154AddLocalOption('--verbose', dest='verbose', action='store_true',
155               help='Print full tool command lines')
156AddLocalOption('--without-python', dest='without_python',
157               action='store_true',
158               help='Build without Python configuration support')
159AddLocalOption('--without-tcmalloc', dest='without_tcmalloc',
160               action='store_true',
161               help='Disable linking against tcmalloc')
162AddLocalOption('--with-ubsan', dest='with_ubsan', action='store_true',
163               help='Build with Undefined Behavior Sanitizer if available')
164AddLocalOption('--with-asan', dest='with_asan', action='store_true',
165               help='Build with Address Sanitizer if available')
166
167if GetOption('no_lto') and GetOption('force_lto'):
168    print '--no-lto and --force-lto are mutually exclusive'
169    Exit(1)
170
171termcap = get_termcap(GetOption('use_colors'))
172
173########################################################################
174#
175# Set up the main build environment.
176#
177########################################################################
178
179main = Environment()
180
181main_dict_keys = main.Dictionary().keys()
182
183# Check that we have a C/C++ compiler
184if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys):
185    print "No C++ compiler installed (package g++ on Ubuntu and RedHat)"
186    Exit(1)
187
188###################################################
189#
190# Figure out which configurations to set up based on the path(s) of
191# the target(s).
192#
193###################################################
194
195# Find default configuration & binary.
196Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug'))
197
198# helper function: find last occurrence of element in list
199def rfind(l, elt, offs = -1):
200    for i in range(len(l)+offs, 0, -1):
201        if l[i] == elt:
202            return i
203    raise ValueError, "element not found"
204
205# Take a list of paths (or SCons Nodes) and return a list with all
206# paths made absolute and ~-expanded.  Paths will be interpreted
207# relative to the launch directory unless a different root is provided
208def makePathListAbsolute(path_list, root=GetLaunchDir()):
209    return [abspath(joinpath(root, expanduser(str(p))))
210            for p in path_list]
211
212# Each target must have 'build' in the interior of the path; the
213# directory below this will determine the build parameters.  For
214# example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we
215# recognize that ALPHA_SE specifies the configuration because it
216# follow 'build' in the build path.
217
218# The funky assignment to "[:]" is needed to replace the list contents
219# in place rather than reassign the symbol to a new list, which
220# doesn't work (obviously!).
221BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS)
222
223# Generate a list of the unique build roots and configs that the
224# collected targets reference.
225variant_paths = []
226build_root = None
227for t in BUILD_TARGETS:
228    path_dirs = t.split('/')
229    try:
230        build_top = rfind(path_dirs, 'build', -2)
231    except:
232        print "Error: no non-leaf 'build' dir found on target path", t
233        Exit(1)
234    this_build_root = joinpath('/',*path_dirs[:build_top+1])
235    if not build_root:
236        build_root = this_build_root
237    else:
238        if this_build_root != build_root:
239            print "Error: build targets not under same build root\n"\
240                  "  %s\n  %s" % (build_root, this_build_root)
241            Exit(1)
242    variant_path = joinpath('/',*path_dirs[:build_top+2])
243    if variant_path not in variant_paths:
244        variant_paths.append(variant_path)
245
246# Make sure build_root exists (might not if this is the first build there)
247if not isdir(build_root):
248    mkdir(build_root)
249main['BUILDROOT'] = build_root
250
251Export('main')
252
253main.SConsignFile(joinpath(build_root, "sconsign"))
254
255# Default duplicate option is to use hard links, but this messes up
256# when you use emacs to edit a file in the target dir, as emacs moves
257# file to file~ then copies to file, breaking the link.  Symbolic
258# (soft) links work better.
259main.SetOption('duplicate', 'soft-copy')
260
261#
262# Set up global sticky variables... these are common to an entire build
263# tree (not specific to a particular build like ALPHA_SE)
264#
265
266global_vars_file = joinpath(build_root, 'variables.global')
267
268global_vars = Variables(global_vars_file, args=ARGUMENTS)
269
270global_vars.AddVariables(
271    ('CC', 'C compiler', environ.get('CC', main['CC'])),
272    ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])),
273    ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')),
274    ('BATCH', 'Use batch pool for build and tests', False),
275    ('BATCH_CMD', 'Batch pool submission command name', 'qdo'),
276    ('M5_BUILD_CACHE', 'Cache built objects in this directory', False),
277    ('EXTRAS', 'Add extra directories to the compilation', '')
278    )
279
280# Update main environment with values from ARGUMENTS & global_vars_file
281global_vars.Update(main)
282help_texts["global_vars"] += global_vars.GenerateHelpText(main)
283
284# Save sticky variable settings back to current variables file
285global_vars.Save(global_vars_file, main)
286
287# Parse EXTRAS variable to build list of all directories where we're
288# look for sources etc.  This list is exported as extras_dir_list.
289base_dir = main.srcdir.abspath
290if main['EXTRAS']:
291    extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':'))
292else:
293    extras_dir_list = []
294
295Export('base_dir')
296Export('extras_dir_list')
297
298# the ext directory should be on the #includes path
299main.Append(CPPPATH=[Dir('ext')])
300
301# Add shared top-level headers
302main.Prepend(CPPPATH=Dir('include'))
303
304def strip_build_path(path, env):
305    path = str(path)
306    variant_base = env['BUILDROOT'] + os.path.sep
307    if path.startswith(variant_base):
308        path = path[len(variant_base):]
309    elif path.startswith('build/'):
310        path = path[6:]
311    return path
312
313# Generate a string of the form:
314#   common/path/prefix/src1, src2 -> tgt1, tgt2
315# to print while building.
316class Transform(object):
317    # all specific color settings should be here and nowhere else
318    tool_color = termcap.Normal
319    pfx_color = termcap.Yellow
320    srcs_color = termcap.Yellow + termcap.Bold
321    arrow_color = termcap.Blue + termcap.Bold
322    tgts_color = termcap.Yellow + termcap.Bold
323
324    def __init__(self, tool, max_sources=99):
325        self.format = self.tool_color + (" [%8s] " % tool) \
326                      + self.pfx_color + "%s" \
327                      + self.srcs_color + "%s" \
328                      + self.arrow_color + " -> " \
329                      + self.tgts_color + "%s" \
330                      + termcap.Normal
331        self.max_sources = max_sources
332
333    def __call__(self, target, source, env, for_signature=None):
334        # truncate source list according to max_sources param
335        source = source[0:self.max_sources]
336        def strip(f):
337            return strip_build_path(str(f), env)
338        if len(source) > 0:
339            srcs = map(strip, source)
340        else:
341            srcs = ['']
342        tgts = map(strip, target)
343        # surprisingly, os.path.commonprefix is a dumb char-by-char string
344        # operation that has nothing to do with paths.
345        com_pfx = os.path.commonprefix(srcs + tgts)
346        com_pfx_len = len(com_pfx)
347        if com_pfx:
348            # do some cleanup and sanity checking on common prefix
349            if com_pfx[-1] == ".":
350                # prefix matches all but file extension: ok
351                # back up one to change 'foo.cc -> o' to 'foo.cc -> .o'
352                com_pfx = com_pfx[0:-1]
353            elif com_pfx[-1] == "/":
354                # common prefix is directory path: OK
355                pass
356            else:
357                src0_len = len(srcs[0])
358                tgt0_len = len(tgts[0])
359                if src0_len == com_pfx_len:
360                    # source is a substring of target, OK
361                    pass
362                elif tgt0_len == com_pfx_len:
363                    # target is a substring of source, need to back up to
364                    # avoid empty string on RHS of arrow
365                    sep_idx = com_pfx.rfind(".")
366                    if sep_idx != -1:
367                        com_pfx = com_pfx[0:sep_idx]
368                    else:
369                        com_pfx = ''
370                elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".":
371                    # still splitting at file extension: ok
372                    pass
373                else:
374                    # probably a fluke; ignore it
375                    com_pfx = ''
376        # recalculate length in case com_pfx was modified
377        com_pfx_len = len(com_pfx)
378        def fmt(files):
379            f = map(lambda s: s[com_pfx_len:], files)
380            return ', '.join(f)
381        return self.format % (com_pfx, fmt(srcs), fmt(tgts))
382
383Export('Transform')
384
385# enable the regression script to use the termcap
386main['TERMCAP'] = termcap
387
388if GetOption('verbose'):
389    def MakeAction(action, string, *args, **kwargs):
390        return Action(action, *args, **kwargs)
391else:
392    MakeAction = Action
393    main['CCCOMSTR']        = Transform("CC")
394    main['CXXCOMSTR']       = Transform("CXX")
395    main['ASCOMSTR']        = Transform("AS")
396    main['ARCOMSTR']        = Transform("AR", 0)
397    main['LINKCOMSTR']      = Transform("LINK", 0)
398    main['SHLINKCOMSTR']    = Transform("SHLINK", 0)
399    main['RANLIBCOMSTR']    = Transform("RANLIB", 0)
400    main['M4COMSTR']        = Transform("M4")
401    main['SHCCCOMSTR']      = Transform("SHCC")
402    main['SHCXXCOMSTR']     = Transform("SHCXX")
403Export('MakeAction')
404
405# Initialize the Link-Time Optimization (LTO) flags
406main['LTO_CCFLAGS'] = []
407main['LTO_LDFLAGS'] = []
408
409# According to the readme, tcmalloc works best if the compiler doesn't
410# assume that we're using the builtin malloc and friends. These flags
411# are compiler-specific, so we need to set them after we detect which
412# compiler we're using.
413main['TCMALLOC_CCFLAGS'] = []
414
415CXX_version = readCommand([main['CXX'],'--version'], exception=False)
416CXX_V = readCommand([main['CXX'],'-V'], exception=False)
417
418main['GCC'] = CXX_version and CXX_version.find('g++') >= 0
419main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0
420if main['GCC'] + main['CLANG'] > 1:
421    print 'Error: How can we have two at the same time?'
422    Exit(1)
423
424# Set up default C++ compiler flags
425if main['GCC'] or main['CLANG']:
426    # As gcc and clang share many flags, do the common parts here
427    main.Append(CCFLAGS=['-pipe'])
428    main.Append(CCFLAGS=['-fno-strict-aliasing'])
429    # Enable -Wall and -Wextra and then disable the few warnings that
430    # we consistently violate
431    main.Append(CCFLAGS=['-Wall', '-Wundef', '-Wextra',
432                         '-Wno-sign-compare', '-Wno-unused-parameter'])
433    # We always compile using C++11
434    main.Append(CXXFLAGS=['-std=c++11'])
435    if sys.platform.startswith('freebsd'):
436        main.Append(CCFLAGS=['-I/usr/local/include'])
437        main.Append(CXXFLAGS=['-I/usr/local/include'])
438
439    main['FILTER_PSHLINKFLAGS'] = lambda x: str(x).replace(' -shared', '')
440    main['PSHLINKFLAGS'] = main.subst('${FILTER_PSHLINKFLAGS(SHLINKFLAGS)}')
441    main['PLINKFLAGS'] = main.subst('${LINKFLAGS}')
442    shared_partial_flags = ['-r', '-nostdlib']
443    main.Append(PSHLINKFLAGS=shared_partial_flags)
444    main.Append(PLINKFLAGS=shared_partial_flags)
445else:
446    print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
447    print "Don't know what compiler options to use for your compiler."
448    print termcap.Yellow + '       compiler:' + termcap.Normal, main['CXX']
449    print termcap.Yellow + '       version:' + termcap.Normal,
450    if not CXX_version:
451        print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
452               termcap.Normal
453    else:
454        print CXX_version.replace('\n', '<nl>')
455    print "       If you're trying to use a compiler other than GCC"
456    print "       or clang, there appears to be something wrong with your"
457    print "       environment."
458    print "       "
459    print "       If you are trying to use a compiler other than those listed"
460    print "       above you will need to ease fix SConstruct and "
461    print "       src/SConscript to support that compiler."
462    Exit(1)
463
464if main['GCC']:
465    # Check for a supported version of gcc. >= 4.8 is chosen for its
466    # level of c++11 support. See
467    # http://gcc.gnu.org/projects/cxx0x.html for details.
468    gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False)
469    if compareVersions(gcc_version, "4.8") < 0:
470        print 'Error: gcc version 4.8 or newer required.'
471        print '       Installed version:', gcc_version
472        Exit(1)
473
474    main['GCC_VERSION'] = gcc_version
475
476    if compareVersions(gcc_version, '4.9') >= 0:
477        # Incremental linking with LTO is currently broken in gcc versions
478        # 4.9 and above. A version where everything works completely hasn't
479        # yet been identified.
480        #
481        # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=67548
482        main['BROKEN_INCREMENTAL_LTO'] = True
483    if compareVersions(gcc_version, '6.0') >= 0:
484        # gcc versions 6.0 and greater accept an -flinker-output flag which
485        # selects what type of output the linker should generate. This is
486        # necessary for incremental lto to work, but is also broken in
487        # current versions of gcc. It may not be necessary in future
488        # versions. We add it here since it might be, and as a reminder that
489        # it exists. It's excluded if lto is being forced.
490        #
491        # https://gcc.gnu.org/gcc-6/changes.html
492        # https://gcc.gnu.org/ml/gcc-patches/2015-11/msg03161.html
493        # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=69866
494        if not GetOption('force_lto'):
495            main.Append(PSHLINKFLAGS='-flinker-output=rel')
496            main.Append(PLINKFLAGS='-flinker-output=rel')
497
498    # gcc from version 4.8 and above generates "rep; ret" instructions
499    # to avoid performance penalties on certain AMD chips. Older
500    # assemblers detect this as an error, "Error: expecting string
501    # instruction after `rep'"
502    as_version_raw = readCommand([main['AS'], '-v', '/dev/null',
503                                  '-o', '/dev/null'],
504                                 exception=False).split()
505
506    # version strings may contain extra distro-specific
507    # qualifiers, so play it safe and keep only what comes before
508    # the first hyphen
509    as_version = as_version_raw[-1].split('-')[0] if as_version_raw else None
510
511    if not as_version or compareVersions(as_version, "2.23") < 0:
512        print termcap.Yellow + termcap.Bold + \
513            'Warning: This combination of gcc and binutils have' + \
514            ' known incompatibilities.\n' + \
515            '         If you encounter build problems, please update ' + \
516            'binutils to 2.23.' + \
517            termcap.Normal
518
519    # Make sure we warn if the user has requested to compile with the
520    # Undefined Benahvior Sanitizer and this version of gcc does not
521    # support it.
522    if GetOption('with_ubsan') and \
523            compareVersions(gcc_version, '4.9') < 0:
524        print termcap.Yellow + termcap.Bold + \
525            'Warning: UBSan is only supported using gcc 4.9 and later.' + \
526            termcap.Normal
527
528    disable_lto = GetOption('no_lto')
529    if not disable_lto and main.get('BROKEN_INCREMENTAL_LTO', False) and \
530            not GetOption('force_lto'):
531        print termcap.Yellow + termcap.Bold + \
532            'Warning: Your compiler doesn\'t support incremental linking' + \
533            ' and lto at the same time, so lto is being disabled. To force' + \
534            ' lto on anyway, use the --force-lto option. That will disable' + \
535            ' partial linking.' + \
536            termcap.Normal
537        disable_lto = True
538
539    # Add the appropriate Link-Time Optimization (LTO) flags
540    # unless LTO is explicitly turned off. Note that these flags
541    # are only used by the fast target.
542    if not disable_lto:
543        # Pass the LTO flag when compiling to produce GIMPLE
544        # output, we merely create the flags here and only append
545        # them later
546        main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
547
548        # Use the same amount of jobs for LTO as we are running
549        # scons with
550        main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
551
552    main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc',
553                                  '-fno-builtin-realloc', '-fno-builtin-free'])
554
555    # add option to check for undeclared overrides
556    if compareVersions(gcc_version, "5.0") > 0:
557        main.Append(CCFLAGS=['-Wno-error=suggest-override'])
558
559elif main['CLANG']:
560    # Check for a supported version of clang, >= 3.1 is needed to
561    # support similar features as gcc 4.8. See
562    # http://clang.llvm.org/cxx_status.html for details
563    clang_version_re = re.compile(".* version (\d+\.\d+)")
564    clang_version_match = clang_version_re.search(CXX_version)
565    if (clang_version_match):
566        clang_version = clang_version_match.groups()[0]
567        if compareVersions(clang_version, "3.1") < 0:
568            print 'Error: clang version 3.1 or newer required.'
569            print '       Installed version:', clang_version
570            Exit(1)
571    else:
572        print 'Error: Unable to determine clang version.'
573        Exit(1)
574
575    # clang has a few additional warnings that we disable, extraneous
576    # parantheses are allowed due to Ruby's printing of the AST,
577    # finally self assignments are allowed as the generated CPU code
578    # is relying on this
579    main.Append(CCFLAGS=['-Wno-parentheses',
580                         '-Wno-self-assign',
581                         # Some versions of libstdc++ (4.8?) seem to
582                         # use struct hash and class hash
583                         # interchangeably.
584                         '-Wno-mismatched-tags',
585                         ])
586
587    main.Append(TCMALLOC_CCFLAGS=['-fno-builtin'])
588
589    # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as
590    # opposed to libstdc++, as the later is dated.
591    if sys.platform == "darwin":
592        main.Append(CXXFLAGS=['-stdlib=libc++'])
593        main.Append(LIBS=['c++'])
594
595    # On FreeBSD we need libthr.
596    if sys.platform.startswith('freebsd'):
597        main.Append(LIBS=['thr'])
598
599else:
600    print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
601    print "Don't know what compiler options to use for your compiler."
602    print termcap.Yellow + '       compiler:' + termcap.Normal, main['CXX']
603    print termcap.Yellow + '       version:' + termcap.Normal,
604    if not CXX_version:
605        print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
606               termcap.Normal
607    else:
608        print CXX_version.replace('\n', '<nl>')
609    print "       If you're trying to use a compiler other than GCC"
610    print "       or clang, there appears to be something wrong with your"
611    print "       environment."
612    print "       "
613    print "       If you are trying to use a compiler other than those listed"
614    print "       above you will need to ease fix SConstruct and "
615    print "       src/SConscript to support that compiler."
616    Exit(1)
617
618# Set up common yacc/bison flags (needed for Ruby)
619main['YACCFLAGS'] = '-d'
620main['YACCHXXFILESUFFIX'] = '.hh'
621
622# Do this after we save setting back, or else we'll tack on an
623# extra 'qdo' every time we run scons.
624if main['BATCH']:
625    main['CC']     = main['BATCH_CMD'] + ' ' + main['CC']
626    main['CXX']    = main['BATCH_CMD'] + ' ' + main['CXX']
627    main['AS']     = main['BATCH_CMD'] + ' ' + main['AS']
628    main['AR']     = main['BATCH_CMD'] + ' ' + main['AR']
629    main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB']
630
631if sys.platform == 'cygwin':
632    # cygwin has some header file issues...
633    main.Append(CCFLAGS=["-Wno-uninitialized"])
634
635# Check for the protobuf compiler
636protoc_version = readCommand([main['PROTOC'], '--version'],
637                             exception='').split()
638
639# First two words should be "libprotoc x.y.z"
640if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc':
641    print termcap.Yellow + termcap.Bold + \
642        'Warning: Protocol buffer compiler (protoc) not found.\n' + \
643        '         Please install protobuf-compiler for tracing support.' + \
644        termcap.Normal
645    main['PROTOC'] = False
646else:
647    # Based on the availability of the compress stream wrappers,
648    # require 2.1.0
649    min_protoc_version = '2.1.0'
650    if compareVersions(protoc_version[1], min_protoc_version) < 0:
651        print termcap.Yellow + termcap.Bold + \
652            'Warning: protoc version', min_protoc_version, \
653            'or newer required.\n' + \
654            '         Installed version:', protoc_version[1], \
655            termcap.Normal
656        main['PROTOC'] = False
657    else:
658        # Attempt to determine the appropriate include path and
659        # library path using pkg-config, that means we also need to
660        # check for pkg-config. Note that it is possible to use
661        # protobuf without the involvement of pkg-config. Later on we
662        # check go a library config check and at that point the test
663        # will fail if libprotobuf cannot be found.
664        if readCommand(['pkg-config', '--version'], exception=''):
665            try:
666                # Attempt to establish what linking flags to add for protobuf
667                # using pkg-config
668                main.ParseConfig('pkg-config --cflags --libs-only-L protobuf')
669            except:
670                print termcap.Yellow + termcap.Bold + \
671                    'Warning: pkg-config could not get protobuf flags.' + \
672                    termcap.Normal
673
674
675# Check for 'timeout' from GNU coreutils. If present, regressions will
676# be run with a time limit. We require version 8.13 since we rely on
677# support for the '--foreground' option.
678if sys.platform.startswith('freebsd'):
679    timeout_lines = readCommand(['gtimeout', '--version'],
680                                exception='').splitlines()
681else:
682    timeout_lines = readCommand(['timeout', '--version'],
683                                exception='').splitlines()
684# Get the first line and tokenize it
685timeout_version = timeout_lines[0].split() if timeout_lines else []
686main['TIMEOUT'] =  timeout_version and \
687    compareVersions(timeout_version[-1], '8.13') >= 0
688
689# Add a custom Check function to test for structure members.
690def CheckMember(context, include, decl, member, include_quotes="<>"):
691    context.Message("Checking for member %s in %s..." %
692                    (member, decl))
693    text = """
694#include %(header)s
695int main(){
696  %(decl)s test;
697  (void)test.%(member)s;
698  return 0;
699};
700""" % { "header" : include_quotes[0] + include + include_quotes[1],
701        "decl" : decl,
702        "member" : member,
703        }
704
705    ret = context.TryCompile(text, extension=".cc")
706    context.Result(ret)
707    return ret
708
709# Platform-specific configuration.  Note again that we assume that all
710# builds under a given build root run on the same host platform.
711conf = Configure(main,
712                 conf_dir = joinpath(build_root, '.scons_config'),
713                 log_file = joinpath(build_root, 'scons_config.log'),
714                 custom_tests = {
715        'CheckMember' : CheckMember,
716        })
717
718# Check if we should compile a 64 bit binary on Mac OS X/Darwin
719try:
720    import platform
721    uname = platform.uname()
722    if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0:
723        if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]):
724            main.Append(CCFLAGS=['-arch', 'x86_64'])
725            main.Append(CFLAGS=['-arch', 'x86_64'])
726            main.Append(LINKFLAGS=['-arch', 'x86_64'])
727            main.Append(ASFLAGS=['-arch', 'x86_64'])
728except:
729    pass
730
731# Recent versions of scons substitute a "Null" object for Configure()
732# when configuration isn't necessary, e.g., if the "--help" option is
733# present.  Unfortuantely this Null object always returns false,
734# breaking all our configuration checks.  We replace it with our own
735# more optimistic null object that returns True instead.
736if not conf:
737    def NullCheck(*args, **kwargs):
738        return True
739
740    class NullConf:
741        def __init__(self, env):
742            self.env = env
743        def Finish(self):
744            return self.env
745        def __getattr__(self, mname):
746            return NullCheck
747
748    conf = NullConf(main)
749
750# Cache build files in the supplied directory.
751if main['M5_BUILD_CACHE']:
752    print 'Using build cache located at', main['M5_BUILD_CACHE']
753    CacheDir(main['M5_BUILD_CACHE'])
754
755main['USE_PYTHON'] = not GetOption('without_python')
756if main['USE_PYTHON']:
757    # Find Python include and library directories for embedding the
758    # interpreter. We rely on python-config to resolve the appropriate
759    # includes and linker flags. ParseConfig does not seem to understand
760    # the more exotic linker flags such as -Xlinker and -export-dynamic so
761    # we add them explicitly below. If you want to link in an alternate
762    # version of python, see above for instructions on how to invoke
763    # scons with the appropriate PATH set.
764    #
765    # First we check if python2-config exists, else we use python-config
766    python_config = readCommand(['which', 'python2-config'],
767                                exception='').strip()
768    if not os.path.exists(python_config):
769        python_config = readCommand(['which', 'python-config'],
770                                    exception='').strip()
771    py_includes = readCommand([python_config, '--includes'],
772                              exception='').split()
773    # Strip the -I from the include folders before adding them to the
774    # CPPPATH
775    main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes))
776
777    # Read the linker flags and split them into libraries and other link
778    # flags. The libraries are added later through the call the CheckLib.
779    py_ld_flags = readCommand([python_config, '--ldflags'],
780        exception='').split()
781    py_libs = []
782    for lib in py_ld_flags:
783         if not lib.startswith('-l'):
784             main.Append(LINKFLAGS=[lib])
785         else:
786             lib = lib[2:]
787             if lib not in py_libs:
788                 py_libs.append(lib)
789
790    # verify that this stuff works
791    if not conf.CheckHeader('Python.h', '<>'):
792        print "Error: can't find Python.h header in", py_includes
793        print "Install Python headers (package python-dev on Ubuntu and RedHat)"
794        Exit(1)
795
796    for lib in py_libs:
797        if not conf.CheckLib(lib):
798            print "Error: can't find library %s required by python" % lib
799            Exit(1)
800
801# On Solaris you need to use libsocket for socket ops
802if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'):
803   if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'):
804       print "Can't find library with socket calls (e.g. accept())"
805       Exit(1)
806
807# Check for zlib.  If the check passes, libz will be automatically
808# added to the LIBS environment variable.
809if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'):
810    print 'Error: did not find needed zlib compression library '\
811          'and/or zlib.h header file.'
812    print '       Please install zlib and try again.'
813    Exit(1)
814
815# If we have the protobuf compiler, also make sure we have the
816# development libraries. If the check passes, libprotobuf will be
817# automatically added to the LIBS environment variable. After
818# this, we can use the HAVE_PROTOBUF flag to determine if we have
819# got both protoc and libprotobuf available.
820main['HAVE_PROTOBUF'] = main['PROTOC'] and \
821    conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h',
822                            'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;')
823
824# If we have the compiler but not the library, print another warning.
825if main['PROTOC'] and not main['HAVE_PROTOBUF']:
826    print termcap.Yellow + termcap.Bold + \
827        'Warning: did not find protocol buffer library and/or headers.\n' + \
828    '       Please install libprotobuf-dev for tracing support.' + \
829    termcap.Normal
830
831# Check for librt.
832have_posix_clock = \
833    conf.CheckLibWithHeader(None, 'time.h', 'C',
834                            'clock_nanosleep(0,0,NULL,NULL);') or \
835    conf.CheckLibWithHeader('rt', 'time.h', 'C',
836                            'clock_nanosleep(0,0,NULL,NULL);')
837
838have_posix_timers = \
839    conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C',
840                            'timer_create(CLOCK_MONOTONIC, NULL, NULL);')
841
842if not GetOption('without_tcmalloc'):
843    if conf.CheckLib('tcmalloc'):
844        main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
845    elif conf.CheckLib('tcmalloc_minimal'):
846        main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
847    else:
848        print termcap.Yellow + termcap.Bold + \
849              "You can get a 12% performance improvement by "\
850              "installing tcmalloc (libgoogle-perftools-dev package "\
851              "on Ubuntu or RedHat)." + termcap.Normal
852
853
854# Detect back trace implementations. The last implementation in the
855# list will be used by default.
856backtrace_impls = [ "none" ]
857
858if conf.CheckLibWithHeader(None, 'execinfo.h', 'C',
859                           'backtrace_symbols_fd((void*)0, 0, 0);'):
860    backtrace_impls.append("glibc")
861elif conf.CheckLibWithHeader('execinfo', 'execinfo.h', 'C',
862                           'backtrace_symbols_fd((void*)0, 0, 0);'):
863    # NetBSD and FreeBSD need libexecinfo.
864    backtrace_impls.append("glibc")
865    main.Append(LIBS=['execinfo'])
866
867if backtrace_impls[-1] == "none":
868    default_backtrace_impl = "none"
869    print termcap.Yellow + termcap.Bold + \
870        "No suitable back trace implementation found." + \
871        termcap.Normal
872
873if not have_posix_clock:
874    print "Can't find library for POSIX clocks."
875
876# Check for <fenv.h> (C99 FP environment control)
877have_fenv = conf.CheckHeader('fenv.h', '<>')
878if not have_fenv:
879    print "Warning: Header file <fenv.h> not found."
880    print "         This host has no IEEE FP rounding mode control."
881
882# Check for <png.h> (libpng library needed if wanting to dump
883# frame buffer image in png format)
884have_png = conf.CheckHeader('png.h', '<>')
885if not have_png:
886    print "Warning: Header file <png.h> not found."
887    print "         This host has no libpng library."
888    print "         Disabling support for PNG framebuffers."
889
890# Check if we should enable KVM-based hardware virtualization. The API
891# we rely on exists since version 2.6.36 of the kernel, but somehow
892# the KVM_API_VERSION does not reflect the change. We test for one of
893# the types as a fall back.
894have_kvm = conf.CheckHeader('linux/kvm.h', '<>')
895if not have_kvm:
896    print "Info: Compatible header file <linux/kvm.h> not found, " \
897        "disabling KVM support."
898
899# Check if the TUN/TAP driver is available.
900have_tuntap = conf.CheckHeader('linux/if_tun.h', '<>')
901if not have_tuntap:
902    print "Info: Compatible header file <linux/if_tun.h> not found."
903
904# x86 needs support for xsave. We test for the structure here since we
905# won't be able to run new tests by the time we know which ISA we're
906# targeting.
907have_kvm_xsave = conf.CheckTypeSize('struct kvm_xsave',
908                                    '#include <linux/kvm.h>') != 0
909
910# Check if the requested target ISA is compatible with the host
911def is_isa_kvm_compatible(isa):
912    try:
913        import platform
914        host_isa = platform.machine()
915    except:
916        print "Warning: Failed to determine host ISA."
917        return False
918
919    if not have_posix_timers:
920        print "Warning: Can not enable KVM, host seems to lack support " \
921            "for POSIX timers"
922        return False
923
924    if isa == "arm":
925        return host_isa in ( "armv7l", "aarch64" )
926    elif isa == "x86":
927        if host_isa != "x86_64":
928            return False
929
930        if not have_kvm_xsave:
931            print "KVM on x86 requires xsave support in kernel headers."
932            return False
933
934        return True
935    else:
936        return False
937
938
939# Check if the exclude_host attribute is available. We want this to
940# get accurate instruction counts in KVM.
941main['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf.CheckMember(
942    'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host')
943
944
945######################################################################
946#
947# Finish the configuration
948#
949main = conf.Finish()
950
951######################################################################
952#
953# Collect all non-global variables
954#
955
956# Define the universe of supported ISAs
957all_isa_list = [ ]
958all_gpu_isa_list = [ ]
959Export('all_isa_list')
960Export('all_gpu_isa_list')
961
962class CpuModel(object):
963    '''The CpuModel class encapsulates everything the ISA parser needs to
964    know about a particular CPU model.'''
965
966    # Dict of available CPU model objects.  Accessible as CpuModel.dict.
967    dict = {}
968
969    # Constructor.  Automatically adds models to CpuModel.dict.
970    def __init__(self, name, default=False):
971        self.name = name           # name of model
972
973        # This cpu is enabled by default
974        self.default = default
975
976        # Add self to dict
977        if name in CpuModel.dict:
978            raise AttributeError, "CpuModel '%s' already registered" % name
979        CpuModel.dict[name] = self
980
981Export('CpuModel')
982
983# Sticky variables get saved in the variables file so they persist from
984# one invocation to the next (unless overridden, in which case the new
985# value becomes sticky).
986sticky_vars = Variables(args=ARGUMENTS)
987Export('sticky_vars')
988
989# Sticky variables that should be exported
990export_vars = []
991Export('export_vars')
992
993# For Ruby
994all_protocols = []
995Export('all_protocols')
996protocol_dirs = []
997Export('protocol_dirs')
998slicc_includes = []
999Export('slicc_includes')
1000
1001# Walk the tree and execute all SConsopts scripts that wil add to the
1002# above variables
1003if GetOption('verbose'):
1004    print "Reading SConsopts"
1005for bdir in [ base_dir ] + extras_dir_list:
1006    if not isdir(bdir):
1007        print "Error: directory '%s' does not exist" % bdir
1008        Exit(1)
1009    for root, dirs, files in os.walk(bdir):
1010        if 'SConsopts' in files:
1011            if GetOption('verbose'):
1012                print "Reading", joinpath(root, 'SConsopts')
1013            SConscript(joinpath(root, 'SConsopts'))
1014
1015all_isa_list.sort()
1016all_gpu_isa_list.sort()
1017
1018sticky_vars.AddVariables(
1019    EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list),
1020    EnumVariable('TARGET_GPU_ISA', 'Target GPU ISA', 'hsail', all_gpu_isa_list),
1021    ListVariable('CPU_MODELS', 'CPU models',
1022                 sorted(n for n,m in CpuModel.dict.iteritems() if m.default),
1023                 sorted(CpuModel.dict.keys())),
1024    BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger',
1025                 False),
1026    BoolVariable('SS_COMPATIBLE_FP',
1027                 'Make floating-point results compatible with SimpleScalar',
1028                 False),
1029    BoolVariable('USE_SSE2',
1030                 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts',
1031                 False),
1032    BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock),
1033    BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv),
1034    BoolVariable('USE_PNG',  'Enable support for PNG images', have_png),
1035    BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability',
1036                 False),
1037    BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models',
1038                 have_kvm),
1039    BoolVariable('USE_TUNTAP',
1040                 'Enable using a tap device to bridge to the host network',
1041                 have_tuntap),
1042    BoolVariable('BUILD_GPU', 'Build the compute-GPU model', False),
1043    EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None',
1044                  all_protocols),
1045    EnumVariable('BACKTRACE_IMPL', 'Post-mortem dump implementation',
1046                 backtrace_impls[-1], backtrace_impls)
1047    )
1048
1049# These variables get exported to #defines in config/*.hh (see src/SConscript).
1050export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'TARGET_GPU_ISA',
1051                'CP_ANNOTATE', 'USE_POSIX_CLOCK', 'USE_KVM', 'USE_TUNTAP',
1052                'PROTOCOL', 'HAVE_PROTOBUF', 'HAVE_PERF_ATTR_EXCLUDE_HOST',
1053                'USE_PNG']
1054
1055###################################################
1056#
1057# Define a SCons builder for configuration flag headers.
1058#
1059###################################################
1060
1061# This function generates a config header file that #defines the
1062# variable symbol to the current variable setting (0 or 1).  The source
1063# operands are the name of the variable and a Value node containing the
1064# value of the variable.
1065def build_config_file(target, source, env):
1066    (variable, value) = [s.get_contents() for s in source]
1067    f = file(str(target[0]), 'w')
1068    print >> f, '#define', variable, value
1069    f.close()
1070    return None
1071
1072# Combine the two functions into a scons Action object.
1073config_action = MakeAction(build_config_file, Transform("CONFIG H", 2))
1074
1075# The emitter munges the source & target node lists to reflect what
1076# we're really doing.
1077def config_emitter(target, source, env):
1078    # extract variable name from Builder arg
1079    variable = str(target[0])
1080    # True target is config header file
1081    target = joinpath('config', variable.lower() + '.hh')
1082    val = env[variable]
1083    if isinstance(val, bool):
1084        # Force value to 0/1
1085        val = int(val)
1086    elif isinstance(val, str):
1087        val = '"' + val + '"'
1088
1089    # Sources are variable name & value (packaged in SCons Value nodes)
1090    return ([target], [Value(variable), Value(val)])
1091
1092config_builder = Builder(emitter = config_emitter, action = config_action)
1093
1094main.Append(BUILDERS = { 'ConfigFile' : config_builder })
1095
1096###################################################
1097#
1098# Builders for static and shared partially linked object files.
1099#
1100###################################################
1101
1102partial_static_builder = Builder(action=SCons.Defaults.LinkAction,
1103                                 src_suffix='$OBJSUFFIX',
1104                                 src_builder=['StaticObject', 'Object'],
1105                                 LINKFLAGS='$PLINKFLAGS',
1106                                 LIBS='')
1107
1108def partial_shared_emitter(target, source, env):
1109    for tgt in target:
1110        tgt.attributes.shared = 1
1111    return (target, source)
1112partial_shared_builder = Builder(action=SCons.Defaults.ShLinkAction,
1113                                 emitter=partial_shared_emitter,
1114                                 src_suffix='$SHOBJSUFFIX',
1115                                 src_builder='SharedObject',
1116                                 SHLINKFLAGS='$PSHLINKFLAGS',
1117                                 LIBS='')
1118
1119main.Append(BUILDERS = { 'PartialShared' : partial_shared_builder,
1120                         'PartialStatic' : partial_static_builder })
1121
1122# builds in ext are shared across all configs in the build root.
1123ext_dir = abspath(joinpath(str(main.root), 'ext'))
1124ext_build_dirs = []
1125for root, dirs, files in os.walk(ext_dir):
1126    if 'SConscript' in files:
1127        build_dir = os.path.relpath(root, ext_dir)
1128        ext_build_dirs.append(build_dir)
1129        main.SConscript(joinpath(root, 'SConscript'),
1130                        variant_dir=joinpath(build_root, build_dir))
1131
1132main.Prepend(CPPPATH=Dir('ext/pybind11/include/'))
1133
1134###################################################
1135#
1136# This builder and wrapper method are used to set up a directory with
1137# switching headers. Those are headers which are in a generic location and
1138# that include more specific headers from a directory chosen at build time
1139# based on the current build settings.
1140#
1141###################################################
1142
1143def build_switching_header(target, source, env):
1144    path = str(target[0])
1145    subdir = str(source[0])
1146    dp, fp = os.path.split(path)
1147    dp = os.path.relpath(os.path.realpath(dp),
1148                         os.path.realpath(env['BUILDDIR']))
1149    with open(path, 'w') as hdr:
1150        print >>hdr, '#include "%s/%s/%s"' % (dp, subdir, fp)
1151
1152switching_header_action = MakeAction(build_switching_header,
1153                                     Transform('GENERATE'))
1154
1155switching_header_builder = Builder(action=switching_header_action,
1156                                   source_factory=Value,
1157                                   single_source=True)
1158
1159main.Append(BUILDERS = { 'SwitchingHeader': switching_header_builder })
1160
1161def switching_headers(self, headers, source):
1162    for header in headers:
1163        self.SwitchingHeader(header, source)
1164
1165main.AddMethod(switching_headers, 'SwitchingHeaders')
1166
1167###################################################
1168#
1169# Define build environments for selected configurations.
1170#
1171###################################################
1172
1173for variant_path in variant_paths:
1174    if not GetOption('silent'):
1175        print "Building in", variant_path
1176
1177    # Make a copy of the build-root environment to use for this config.
1178    env = main.Clone()
1179    env['BUILDDIR'] = variant_path
1180
1181    # variant_dir is the tail component of build path, and is used to
1182    # determine the build parameters (e.g., 'ALPHA_SE')
1183    (build_root, variant_dir) = splitpath(variant_path)
1184
1185    # Set env variables according to the build directory config.
1186    sticky_vars.files = []
1187    # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in
1188    # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke
1189    # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings.
1190    current_vars_file = joinpath(build_root, 'variables', variant_dir)
1191    if isfile(current_vars_file):
1192        sticky_vars.files.append(current_vars_file)
1193        if not GetOption('silent'):
1194            print "Using saved variables file %s" % current_vars_file
1195    elif variant_dir in ext_build_dirs:
1196        # Things in ext are built without a variant directory.
1197        continue
1198    else:
1199        # Build dir-specific variables file doesn't exist.
1200
1201        # Make sure the directory is there so we can create it later
1202        opt_dir = dirname(current_vars_file)
1203        if not isdir(opt_dir):
1204            mkdir(opt_dir)
1205
1206        # Get default build variables from source tree.  Variables are
1207        # normally determined by name of $VARIANT_DIR, but can be
1208        # overridden by '--default=' arg on command line.
1209        default = GetOption('default')
1210        opts_dir = joinpath(main.root.abspath, 'build_opts')
1211        if default:
1212            default_vars_files = [joinpath(build_root, 'variables', default),
1213                                  joinpath(opts_dir, default)]
1214        else:
1215            default_vars_files = [joinpath(opts_dir, variant_dir)]
1216        existing_files = filter(isfile, default_vars_files)
1217        if existing_files:
1218            default_vars_file = existing_files[0]
1219            sticky_vars.files.append(default_vars_file)
1220            print "Variables file %s not found,\n  using defaults in %s" \
1221                  % (current_vars_file, default_vars_file)
1222        else:
1223            print "Error: cannot find variables file %s or " \
1224                  "default file(s) %s" \
1225                  % (current_vars_file, ' or '.join(default_vars_files))
1226            Exit(1)
1227
1228    # Apply current variable settings to env
1229    sticky_vars.Update(env)
1230
1231    help_texts["local_vars"] += \
1232        "Build variables for %s:\n" % variant_dir \
1233                 + sticky_vars.GenerateHelpText(env)
1234
1235    # Process variable settings.
1236
1237    if not have_fenv and env['USE_FENV']:
1238        print "Warning: <fenv.h> not available; " \
1239              "forcing USE_FENV to False in", variant_dir + "."
1240        env['USE_FENV'] = False
1241
1242    if not env['USE_FENV']:
1243        print "Warning: No IEEE FP rounding mode control in", variant_dir + "."
1244        print "         FP results may deviate slightly from other platforms."
1245
1246    if not have_png and env['USE_PNG']:
1247        print "Warning: <png.h> not available; " \
1248              "forcing USE_PNG to False in", variant_dir + "."
1249        env['USE_PNG'] = False
1250
1251    if env['USE_PNG']:
1252        env.Append(LIBS=['png'])
1253
1254    if env['EFENCE']:
1255        env.Append(LIBS=['efence'])
1256
1257    if env['USE_KVM']:
1258        if not have_kvm:
1259            print "Warning: Can not enable KVM, host seems to lack KVM support"
1260            env['USE_KVM'] = False
1261        elif not is_isa_kvm_compatible(env['TARGET_ISA']):
1262            print "Info: KVM support disabled due to unsupported host and " \
1263                "target ISA combination"
1264            env['USE_KVM'] = False
1265
1266    if env['USE_TUNTAP']:
1267        if not have_tuntap:
1268            print "Warning: Can't connect EtherTap with a tap device."
1269            env['USE_TUNTAP'] = False
1270
1271    if env['BUILD_GPU']:
1272        env.Append(CPPDEFINES=['BUILD_GPU'])
1273
1274    # Warn about missing optional functionality
1275    if env['USE_KVM']:
1276        if not main['HAVE_PERF_ATTR_EXCLUDE_HOST']:
1277            print "Warning: perf_event headers lack support for the " \
1278                "exclude_host attribute. KVM instruction counts will " \
1279                "be inaccurate."
1280
1281    # Save sticky variable settings back to current variables file
1282    sticky_vars.Save(current_vars_file, env)
1283
1284    if env['USE_SSE2']:
1285        env.Append(CCFLAGS=['-msse2'])
1286
1287    # The src/SConscript file sets up the build rules in 'env' according
1288    # to the configured variables.  It returns a list of environments,
1289    # one for each variant build (debug, opt, etc.)
1290    SConscript('src/SConscript', variant_dir = variant_path, exports = 'env')
1291
1292# base help text
1293Help('''
1294Usage: scons [scons options] [build variables] [target(s)]
1295
1296Extra scons options:
1297%(options)s
1298
1299Global build variables:
1300%(global_vars)s
1301
1302%(local_vars)s
1303''' % help_texts)
1304