SConstruct revision 9903:cb74b075186e
1# -*- mode:python -*-
2
3# Copyright (c) 2013 ARM Limited
4# All rights reserved.
5#
6# The license below extends only to copyright in the software and shall
7# not be construed as granting a license to any other intellectual
8# property including but not limited to intellectual property relating
9# to a hardware implementation of the functionality of the software
10# licensed hereunder.  You may use the software subject to the license
11# terms below provided that you ensure that this notice is replicated
12# unmodified and in its entirety in all distributions of the software,
13# modified or unmodified, in source code or in binary form.
14#
15# Copyright (c) 2011 Advanced Micro Devices, Inc.
16# Copyright (c) 2009 The Hewlett-Packard Development Company
17# Copyright (c) 2004-2005 The Regents of The University of Michigan
18# All rights reserved.
19#
20# Redistribution and use in source and binary forms, with or without
21# modification, are permitted provided that the following conditions are
22# met: redistributions of source code must retain the above copyright
23# notice, this list of conditions and the following disclaimer;
24# redistributions in binary form must reproduce the above copyright
25# notice, this list of conditions and the following disclaimer in the
26# documentation and/or other materials provided with the distribution;
27# neither the name of the copyright holders nor the names of its
28# contributors may be used to endorse or promote products derived from
29# this software without specific prior written permission.
30#
31# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
32# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
33# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
34# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
35# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
37# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
38# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
39# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
40# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
41# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
42#
43# Authors: Steve Reinhardt
44#          Nathan Binkert
45
46###################################################
47#
48# SCons top-level build description (SConstruct) file.
49#
50# While in this directory ('gem5'), just type 'scons' to build the default
51# configuration (see below), or type 'scons build/<CONFIG>/<binary>'
52# to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for
53# the optimized full-system version).
54#
55# You can build gem5 in a different directory as long as there is a
56# 'build/<CONFIG>' somewhere along the target path.  The build system
57# expects that all configs under the same build directory are being
58# built for the same host system.
59#
60# Examples:
61#
62#   The following two commands are equivalent.  The '-u' option tells
63#   scons to search up the directory tree for this SConstruct file.
64#   % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug
65#   % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug
66#
67#   The following two commands are equivalent and demonstrate building
68#   in a directory outside of the source tree.  The '-C' option tells
69#   scons to chdir to the specified directory to find this SConstruct
70#   file.
71#   % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug
72#   % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug
73#
74# You can use 'scons -H' to print scons options.  If you're in this
75# 'gem5' directory (or use -u or -C to tell scons where to find this
76# file), you can use 'scons -h' to print all the gem5-specific build
77# options as well.
78#
79###################################################
80
81# Check for recent-enough Python and SCons versions.
82try:
83    # Really old versions of scons only take two options for the
84    # function, so check once without the revision and once with the
85    # revision, the first instance will fail for stuff other than
86    # 0.98, and the second will fail for 0.98.0
87    EnsureSConsVersion(0, 98)
88    EnsureSConsVersion(0, 98, 1)
89except SystemExit, e:
90    print """
91For more details, see:
92    http://gem5.org/Dependencies
93"""
94    raise
95
96# We ensure the python version early because because python-config
97# requires python 2.5
98try:
99    EnsurePythonVersion(2, 5)
100except SystemExit, e:
101    print """
102You can use a non-default installation of the Python interpreter by
103rearranging your PATH so that scons finds the non-default 'python' and
104'python-config' first.
105
106For more details, see:
107    http://gem5.org/wiki/index.php/Using_a_non-default_Python_installation
108"""
109    raise
110
111# Global Python includes
112import os
113import re
114import subprocess
115import sys
116
117from os import mkdir, environ
118from os.path import abspath, basename, dirname, expanduser, normpath
119from os.path import exists,  isdir, isfile
120from os.path import join as joinpath, split as splitpath
121
122# SCons includes
123import SCons
124import SCons.Node
125
126extra_python_paths = [
127    Dir('src/python').srcnode().abspath, # gem5 includes
128    Dir('ext/ply').srcnode().abspath, # ply is used by several files
129    ]
130
131sys.path[1:1] = extra_python_paths
132
133from m5.util import compareVersions, readCommand
134from m5.util.terminal import get_termcap
135
136help_texts = {
137    "options" : "",
138    "global_vars" : "",
139    "local_vars" : ""
140}
141
142Export("help_texts")
143
144
145# There's a bug in scons in that (1) by default, the help texts from
146# AddOption() are supposed to be displayed when you type 'scons -h'
147# and (2) you can override the help displayed by 'scons -h' using the
148# Help() function, but these two features are incompatible: once
149# you've overridden the help text using Help(), there's no way to get
150# at the help texts from AddOptions.  See:
151#     http://scons.tigris.org/issues/show_bug.cgi?id=2356
152#     http://scons.tigris.org/issues/show_bug.cgi?id=2611
153# This hack lets us extract the help text from AddOptions and
154# re-inject it via Help().  Ideally someday this bug will be fixed and
155# we can just use AddOption directly.
156def AddLocalOption(*args, **kwargs):
157    col_width = 30
158
159    help = "  " + ", ".join(args)
160    if "help" in kwargs:
161        length = len(help)
162        if length >= col_width:
163            help += "\n" + " " * col_width
164        else:
165            help += " " * (col_width - length)
166        help += kwargs["help"]
167    help_texts["options"] += help + "\n"
168
169    AddOption(*args, **kwargs)
170
171AddLocalOption('--colors', dest='use_colors', action='store_true',
172               help="Add color to abbreviated scons output")
173AddLocalOption('--no-colors', dest='use_colors', action='store_false',
174               help="Don't add color to abbreviated scons output")
175AddLocalOption('--default', dest='default', type='string', action='store',
176               help='Override which build_opts file to use for defaults')
177AddLocalOption('--ignore-style', dest='ignore_style', action='store_true',
178               help='Disable style checking hooks')
179AddLocalOption('--no-lto', dest='no_lto', action='store_true',
180               help='Disable Link-Time Optimization for fast')
181AddLocalOption('--update-ref', dest='update_ref', action='store_true',
182               help='Update test reference outputs')
183AddLocalOption('--verbose', dest='verbose', action='store_true',
184               help='Print full tool command lines')
185
186termcap = get_termcap(GetOption('use_colors'))
187
188########################################################################
189#
190# Set up the main build environment.
191#
192########################################################################
193use_vars = set([ 'AS', 'AR', 'CC', 'CXX', 'HOME', 'LD_LIBRARY_PATH',
194                 'LIBRARY_PATH', 'PATH', 'PKG_CONFIG_PATH', 'PYTHONPATH',
195                 'RANLIB', 'SWIG' ])
196
197use_prefixes = [
198    "M5",           # M5 configuration (e.g., path to kernels)
199    "DISTCC_",      # distcc (distributed compiler wrapper) configuration
200    "CCACHE_",      # ccache (caching compiler wrapper) configuration
201    "CCC_",         # clang static analyzer configuration
202    ]
203
204use_env = {}
205for key,val in os.environ.iteritems():
206    if key in use_vars or \
207            any([key.startswith(prefix) for prefix in use_prefixes]):
208        use_env[key] = val
209
210main = Environment(ENV=use_env)
211main.Decider('MD5-timestamp')
212main.root = Dir(".")         # The current directory (where this file lives).
213main.srcdir = Dir("src")     # The source directory
214
215main_dict_keys = main.Dictionary().keys()
216
217# Check that we have a C/C++ compiler
218if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys):
219    print "No C++ compiler installed (package g++ on Ubuntu and RedHat)"
220    Exit(1)
221
222# Check that swig is present
223if not 'SWIG' in main_dict_keys:
224    print "swig is not installed (package swig on Ubuntu and RedHat)"
225    Exit(1)
226
227# add useful python code PYTHONPATH so it can be used by subprocesses
228# as well
229main.AppendENVPath('PYTHONPATH', extra_python_paths)
230
231########################################################################
232#
233# Mercurial Stuff.
234#
235# If the gem5 directory is a mercurial repository, we should do some
236# extra things.
237#
238########################################################################
239
240hgdir = main.root.Dir(".hg")
241
242mercurial_style_message = """
243You're missing the gem5 style hook, which automatically checks your code
244against the gem5 style rules on hg commit and qrefresh commands.  This
245script will now install the hook in your .hg/hgrc file.
246Press enter to continue, or ctrl-c to abort: """
247
248mercurial_style_hook = """
249# The following lines were automatically added by gem5/SConstruct
250# to provide the gem5 style-checking hooks
251[extensions]
252style = %s/util/style.py
253
254[hooks]
255pretxncommit.style = python:style.check_style
256pre-qrefresh.style = python:style.check_style
257# End of SConstruct additions
258
259""" % (main.root.abspath)
260
261mercurial_lib_not_found = """
262Mercurial libraries cannot be found, ignoring style hook.  If
263you are a gem5 developer, please fix this and run the style
264hook. It is important.
265"""
266
267# Check for style hook and prompt for installation if it's not there.
268# Skip this if --ignore-style was specified, there's no .hg dir to
269# install a hook in, or there's no interactive terminal to prompt.
270if not GetOption('ignore_style') and hgdir.exists() and sys.stdin.isatty():
271    style_hook = True
272    try:
273        from mercurial import ui
274        ui = ui.ui()
275        ui.readconfig(hgdir.File('hgrc').abspath)
276        style_hook = ui.config('hooks', 'pretxncommit.style', None) and \
277                     ui.config('hooks', 'pre-qrefresh.style', None)
278    except ImportError:
279        print mercurial_lib_not_found
280
281    if not style_hook:
282        print mercurial_style_message,
283        # continue unless user does ctrl-c/ctrl-d etc.
284        try:
285            raw_input()
286        except:
287            print "Input exception, exiting scons.\n"
288            sys.exit(1)
289        hgrc_path = '%s/.hg/hgrc' % main.root.abspath
290        print "Adding style hook to", hgrc_path, "\n"
291        try:
292            hgrc = open(hgrc_path, 'a')
293            hgrc.write(mercurial_style_hook)
294            hgrc.close()
295        except:
296            print "Error updating", hgrc_path
297            sys.exit(1)
298
299
300###################################################
301#
302# Figure out which configurations to set up based on the path(s) of
303# the target(s).
304#
305###################################################
306
307# Find default configuration & binary.
308Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug'))
309
310# helper function: find last occurrence of element in list
311def rfind(l, elt, offs = -1):
312    for i in range(len(l)+offs, 0, -1):
313        if l[i] == elt:
314            return i
315    raise ValueError, "element not found"
316
317# Take a list of paths (or SCons Nodes) and return a list with all
318# paths made absolute and ~-expanded.  Paths will be interpreted
319# relative to the launch directory unless a different root is provided
320def makePathListAbsolute(path_list, root=GetLaunchDir()):
321    return [abspath(joinpath(root, expanduser(str(p))))
322            for p in path_list]
323
324# Each target must have 'build' in the interior of the path; the
325# directory below this will determine the build parameters.  For
326# example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we
327# recognize that ALPHA_SE specifies the configuration because it
328# follow 'build' in the build path.
329
330# The funky assignment to "[:]" is needed to replace the list contents
331# in place rather than reassign the symbol to a new list, which
332# doesn't work (obviously!).
333BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS)
334
335# Generate a list of the unique build roots and configs that the
336# collected targets reference.
337variant_paths = []
338build_root = None
339for t in BUILD_TARGETS:
340    path_dirs = t.split('/')
341    try:
342        build_top = rfind(path_dirs, 'build', -2)
343    except:
344        print "Error: no non-leaf 'build' dir found on target path", t
345        Exit(1)
346    this_build_root = joinpath('/',*path_dirs[:build_top+1])
347    if not build_root:
348        build_root = this_build_root
349    else:
350        if this_build_root != build_root:
351            print "Error: build targets not under same build root\n"\
352                  "  %s\n  %s" % (build_root, this_build_root)
353            Exit(1)
354    variant_path = joinpath('/',*path_dirs[:build_top+2])
355    if variant_path not in variant_paths:
356        variant_paths.append(variant_path)
357
358# Make sure build_root exists (might not if this is the first build there)
359if not isdir(build_root):
360    mkdir(build_root)
361main['BUILDROOT'] = build_root
362
363Export('main')
364
365main.SConsignFile(joinpath(build_root, "sconsign"))
366
367# Default duplicate option is to use hard links, but this messes up
368# when you use emacs to edit a file in the target dir, as emacs moves
369# file to file~ then copies to file, breaking the link.  Symbolic
370# (soft) links work better.
371main.SetOption('duplicate', 'soft-copy')
372
373#
374# Set up global sticky variables... these are common to an entire build
375# tree (not specific to a particular build like ALPHA_SE)
376#
377
378global_vars_file = joinpath(build_root, 'variables.global')
379
380global_vars = Variables(global_vars_file, args=ARGUMENTS)
381
382global_vars.AddVariables(
383    ('CC', 'C compiler', environ.get('CC', main['CC'])),
384    ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])),
385    ('SWIG', 'SWIG tool', environ.get('SWIG', main['SWIG'])),
386    ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')),
387    ('BATCH', 'Use batch pool for build and tests', False),
388    ('BATCH_CMD', 'Batch pool submission command name', 'qdo'),
389    ('M5_BUILD_CACHE', 'Cache built objects in this directory', False),
390    ('EXTRAS', 'Add extra directories to the compilation', '')
391    )
392
393# Update main environment with values from ARGUMENTS & global_vars_file
394global_vars.Update(main)
395help_texts["global_vars"] += global_vars.GenerateHelpText(main)
396
397# Save sticky variable settings back to current variables file
398global_vars.Save(global_vars_file, main)
399
400# Parse EXTRAS variable to build list of all directories where we're
401# look for sources etc.  This list is exported as extras_dir_list.
402base_dir = main.srcdir.abspath
403if main['EXTRAS']:
404    extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':'))
405else:
406    extras_dir_list = []
407
408Export('base_dir')
409Export('extras_dir_list')
410
411# the ext directory should be on the #includes path
412main.Append(CPPPATH=[Dir('ext')])
413
414def strip_build_path(path, env):
415    path = str(path)
416    variant_base = env['BUILDROOT'] + os.path.sep
417    if path.startswith(variant_base):
418        path = path[len(variant_base):]
419    elif path.startswith('build/'):
420        path = path[6:]
421    return path
422
423# Generate a string of the form:
424#   common/path/prefix/src1, src2 -> tgt1, tgt2
425# to print while building.
426class Transform(object):
427    # all specific color settings should be here and nowhere else
428    tool_color = termcap.Normal
429    pfx_color = termcap.Yellow
430    srcs_color = termcap.Yellow + termcap.Bold
431    arrow_color = termcap.Blue + termcap.Bold
432    tgts_color = termcap.Yellow + termcap.Bold
433
434    def __init__(self, tool, max_sources=99):
435        self.format = self.tool_color + (" [%8s] " % tool) \
436                      + self.pfx_color + "%s" \
437                      + self.srcs_color + "%s" \
438                      + self.arrow_color + " -> " \
439                      + self.tgts_color + "%s" \
440                      + termcap.Normal
441        self.max_sources = max_sources
442
443    def __call__(self, target, source, env, for_signature=None):
444        # truncate source list according to max_sources param
445        source = source[0:self.max_sources]
446        def strip(f):
447            return strip_build_path(str(f), env)
448        if len(source) > 0:
449            srcs = map(strip, source)
450        else:
451            srcs = ['']
452        tgts = map(strip, target)
453        # surprisingly, os.path.commonprefix is a dumb char-by-char string
454        # operation that has nothing to do with paths.
455        com_pfx = os.path.commonprefix(srcs + tgts)
456        com_pfx_len = len(com_pfx)
457        if com_pfx:
458            # do some cleanup and sanity checking on common prefix
459            if com_pfx[-1] == ".":
460                # prefix matches all but file extension: ok
461                # back up one to change 'foo.cc -> o' to 'foo.cc -> .o'
462                com_pfx = com_pfx[0:-1]
463            elif com_pfx[-1] == "/":
464                # common prefix is directory path: OK
465                pass
466            else:
467                src0_len = len(srcs[0])
468                tgt0_len = len(tgts[0])
469                if src0_len == com_pfx_len:
470                    # source is a substring of target, OK
471                    pass
472                elif tgt0_len == com_pfx_len:
473                    # target is a substring of source, need to back up to
474                    # avoid empty string on RHS of arrow
475                    sep_idx = com_pfx.rfind(".")
476                    if sep_idx != -1:
477                        com_pfx = com_pfx[0:sep_idx]
478                    else:
479                        com_pfx = ''
480                elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".":
481                    # still splitting at file extension: ok
482                    pass
483                else:
484                    # probably a fluke; ignore it
485                    com_pfx = ''
486        # recalculate length in case com_pfx was modified
487        com_pfx_len = len(com_pfx)
488        def fmt(files):
489            f = map(lambda s: s[com_pfx_len:], files)
490            return ', '.join(f)
491        return self.format % (com_pfx, fmt(srcs), fmt(tgts))
492
493Export('Transform')
494
495# enable the regression script to use the termcap
496main['TERMCAP'] = termcap
497
498if GetOption('verbose'):
499    def MakeAction(action, string, *args, **kwargs):
500        return Action(action, *args, **kwargs)
501else:
502    MakeAction = Action
503    main['CCCOMSTR']        = Transform("CC")
504    main['CXXCOMSTR']       = Transform("CXX")
505    main['ASCOMSTR']        = Transform("AS")
506    main['SWIGCOMSTR']      = Transform("SWIG")
507    main['ARCOMSTR']        = Transform("AR", 0)
508    main['LINKCOMSTR']      = Transform("LINK", 0)
509    main['RANLIBCOMSTR']    = Transform("RANLIB", 0)
510    main['M4COMSTR']        = Transform("M4")
511    main['SHCCCOMSTR']      = Transform("SHCC")
512    main['SHCXXCOMSTR']     = Transform("SHCXX")
513Export('MakeAction')
514
515# Initialize the Link-Time Optimization (LTO) flags
516main['LTO_CCFLAGS'] = []
517main['LTO_LDFLAGS'] = []
518
519# According to the readme, tcmalloc works best if the compiler doesn't
520# assume that we're using the builtin malloc and friends. These flags
521# are compiler-specific, so we need to set them after we detect which
522# compiler we're using.
523main['TCMALLOC_CCFLAGS'] = []
524
525CXX_version = readCommand([main['CXX'],'--version'], exception=False)
526CXX_V = readCommand([main['CXX'],'-V'], exception=False)
527
528main['GCC'] = CXX_version and CXX_version.find('g++') >= 0
529main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0
530if main['GCC'] + main['CLANG'] > 1:
531    print 'Error: How can we have two at the same time?'
532    Exit(1)
533
534# Set up default C++ compiler flags
535if main['GCC'] or main['CLANG']:
536    # As gcc and clang share many flags, do the common parts here
537    main.Append(CCFLAGS=['-pipe'])
538    main.Append(CCFLAGS=['-fno-strict-aliasing'])
539    # Enable -Wall and then disable the few warnings that we
540    # consistently violate
541    main.Append(CCFLAGS=['-Wall', '-Wno-sign-compare', '-Wundef'])
542    # We always compile using C++11, but only gcc >= 4.7 and clang 3.1
543    # actually use that name, so we stick with c++0x
544    main.Append(CXXFLAGS=['-std=c++0x'])
545    # Add selected sanity checks from -Wextra
546    main.Append(CXXFLAGS=['-Wmissing-field-initializers',
547                          '-Woverloaded-virtual'])
548else:
549    print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
550    print "Don't know what compiler options to use for your compiler."
551    print termcap.Yellow + '       compiler:' + termcap.Normal, main['CXX']
552    print termcap.Yellow + '       version:' + termcap.Normal,
553    if not CXX_version:
554        print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
555               termcap.Normal
556    else:
557        print CXX_version.replace('\n', '<nl>')
558    print "       If you're trying to use a compiler other than GCC"
559    print "       or clang, there appears to be something wrong with your"
560    print "       environment."
561    print "       "
562    print "       If you are trying to use a compiler other than those listed"
563    print "       above you will need to ease fix SConstruct and "
564    print "       src/SConscript to support that compiler."
565    Exit(1)
566
567if main['GCC']:
568    # Check for a supported version of gcc, >= 4.4 is needed for c++0x
569    # support. See http://gcc.gnu.org/projects/cxx0x.html for details
570    gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False)
571    if compareVersions(gcc_version, "4.4") < 0:
572        print 'Error: gcc version 4.4 or newer required.'
573        print '       Installed version:', gcc_version
574        Exit(1)
575
576    main['GCC_VERSION'] = gcc_version
577
578    # Check for versions with bugs
579    if not compareVersions(gcc_version, '4.4.1') or \
580       not compareVersions(gcc_version, '4.4.2'):
581        print 'Info: Tree vectorizer in GCC 4.4.1 & 4.4.2 is buggy, disabling.'
582        main.Append(CCFLAGS=['-fno-tree-vectorize'])
583
584    # LTO support is only really working properly from 4.6 and beyond
585    if compareVersions(gcc_version, '4.6') >= 0:
586        # Add the appropriate Link-Time Optimization (LTO) flags
587        # unless LTO is explicitly turned off. Note that these flags
588        # are only used by the fast target.
589        if not GetOption('no_lto'):
590            # Pass the LTO flag when compiling to produce GIMPLE
591            # output, we merely create the flags here and only append
592            # them later/
593            main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
594
595            # Use the same amount of jobs for LTO as we are running
596            # scons with, we hardcode the use of the linker plugin
597            # which requires either gold or GNU ld >= 2.21
598            main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs'),
599                                   '-fuse-linker-plugin']
600
601    main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc',
602                                  '-fno-builtin-realloc', '-fno-builtin-free'])
603
604elif main['CLANG']:
605    # Check for a supported version of clang, >= 2.9 is needed to
606    # support similar features as gcc 4.4. See
607    # http://clang.llvm.org/cxx_status.html for details
608    clang_version_re = re.compile(".* version (\d+\.\d+)")
609    clang_version_match = clang_version_re.match(CXX_version)
610    if (clang_version_match):
611        clang_version = clang_version_match.groups()[0]
612        if compareVersions(clang_version, "2.9") < 0:
613            print 'Error: clang version 2.9 or newer required.'
614            print '       Installed version:', clang_version
615            Exit(1)
616    else:
617        print 'Error: Unable to determine clang version.'
618        Exit(1)
619
620    # clang has a few additional warnings that we disable,
621    # tautological comparisons are allowed due to unsigned integers
622    # being compared to constants that happen to be 0, and extraneous
623    # parantheses are allowed due to Ruby's printing of the AST,
624    # finally self assignments are allowed as the generated CPU code
625    # is relying on this
626    main.Append(CCFLAGS=['-Wno-tautological-compare',
627                         '-Wno-parentheses',
628                         '-Wno-self-assign'])
629
630    main.Append(TCMALLOC_CCFLAGS=['-fno-builtin'])
631
632    # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as
633    # opposed to libstdc++, as the later is dated.
634    if sys.platform == "darwin":
635        main.Append(CXXFLAGS=['-stdlib=libc++'])
636        main.Append(LIBS=['c++'])
637
638else:
639    print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
640    print "Don't know what compiler options to use for your compiler."
641    print termcap.Yellow + '       compiler:' + termcap.Normal, main['CXX']
642    print termcap.Yellow + '       version:' + termcap.Normal,
643    if not CXX_version:
644        print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
645               termcap.Normal
646    else:
647        print CXX_version.replace('\n', '<nl>')
648    print "       If you're trying to use a compiler other than GCC"
649    print "       or clang, there appears to be something wrong with your"
650    print "       environment."
651    print "       "
652    print "       If you are trying to use a compiler other than those listed"
653    print "       above you will need to ease fix SConstruct and "
654    print "       src/SConscript to support that compiler."
655    Exit(1)
656
657# Set up common yacc/bison flags (needed for Ruby)
658main['YACCFLAGS'] = '-d'
659main['YACCHXXFILESUFFIX'] = '.hh'
660
661# Do this after we save setting back, or else we'll tack on an
662# extra 'qdo' every time we run scons.
663if main['BATCH']:
664    main['CC']     = main['BATCH_CMD'] + ' ' + main['CC']
665    main['CXX']    = main['BATCH_CMD'] + ' ' + main['CXX']
666    main['AS']     = main['BATCH_CMD'] + ' ' + main['AS']
667    main['AR']     = main['BATCH_CMD'] + ' ' + main['AR']
668    main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB']
669
670if sys.platform == 'cygwin':
671    # cygwin has some header file issues...
672    main.Append(CCFLAGS=["-Wno-uninitialized"])
673
674# Check for the protobuf compiler
675protoc_version = readCommand([main['PROTOC'], '--version'],
676                             exception='').split()
677
678# First two words should be "libprotoc x.y.z"
679if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc':
680    print termcap.Yellow + termcap.Bold + \
681        'Warning: Protocol buffer compiler (protoc) not found.\n' + \
682        '         Please install protobuf-compiler for tracing support.' + \
683        termcap.Normal
684    main['PROTOC'] = False
685else:
686    # Based on the availability of the compress stream wrappers,
687    # require 2.1.0
688    min_protoc_version = '2.1.0'
689    if compareVersions(protoc_version[1], min_protoc_version) < 0:
690        print termcap.Yellow + termcap.Bold + \
691            'Warning: protoc version', min_protoc_version, \
692            'or newer required.\n' + \
693            '         Installed version:', protoc_version[1], \
694            termcap.Normal
695        main['PROTOC'] = False
696    else:
697        # Attempt to determine the appropriate include path and
698        # library path using pkg-config, that means we also need to
699        # check for pkg-config. Note that it is possible to use
700        # protobuf without the involvement of pkg-config. Later on we
701        # check go a library config check and at that point the test
702        # will fail if libprotobuf cannot be found.
703        if readCommand(['pkg-config', '--version'], exception=''):
704            try:
705                # Attempt to establish what linking flags to add for protobuf
706                # using pkg-config
707                main.ParseConfig('pkg-config --cflags --libs-only-L protobuf')
708            except:
709                print termcap.Yellow + termcap.Bold + \
710                    'Warning: pkg-config could not get protobuf flags.' + \
711                    termcap.Normal
712
713# Check for SWIG
714if not main.has_key('SWIG'):
715    print 'Error: SWIG utility not found.'
716    print '       Please install (see http://www.swig.org) and retry.'
717    Exit(1)
718
719# Check for appropriate SWIG version
720swig_version = readCommand([main['SWIG'], '-version'], exception='').split()
721# First 3 words should be "SWIG Version x.y.z"
722if len(swig_version) < 3 or \
723        swig_version[0] != 'SWIG' or swig_version[1] != 'Version':
724    print 'Error determining SWIG version.'
725    Exit(1)
726
727min_swig_version = '1.3.34'
728if compareVersions(swig_version[2], min_swig_version) < 0:
729    print 'Error: SWIG version', min_swig_version, 'or newer required.'
730    print '       Installed version:', swig_version[2]
731    Exit(1)
732
733# Older versions of swig do not play well with more recent versions of
734# gcc due to assumptions on implicit includes (cstddef) and use of
735# namespaces
736if main['GCC'] and compareVersions(gcc_version, '4.6') > 0 and \
737        compareVersions(swig_version[2], '2') < 0:
738    print '\n' + termcap.Yellow + termcap.Bold + \
739        'Warning: SWIG 1.x cause issues with gcc 4.6 and later.\n' + \
740        termcap.Normal + \
741        'Use SWIG 2.x to avoid assumptions on implicit includes\n' + \
742        'and use of namespaces\n'
743
744# Set up SWIG flags & scanner
745swig_flags=Split('-c++ -python -modern -templatereduce $_CPPINCFLAGS')
746main.Append(SWIGFLAGS=swig_flags)
747
748# filter out all existing swig scanners, they mess up the dependency
749# stuff for some reason
750scanners = []
751for scanner in main['SCANNERS']:
752    skeys = scanner.skeys
753    if skeys == '.i':
754        continue
755
756    if isinstance(skeys, (list, tuple)) and '.i' in skeys:
757        continue
758
759    scanners.append(scanner)
760
761# add the new swig scanner that we like better
762from SCons.Scanner import ClassicCPP as CPPScanner
763swig_inc_re = '^[ \t]*[%,#][ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")'
764scanners.append(CPPScanner("SwigScan", [ ".i" ], "CPPPATH", swig_inc_re))
765
766# replace the scanners list that has what we want
767main['SCANNERS'] = scanners
768
769# Add a custom Check function to the Configure context so that we can
770# figure out if the compiler adds leading underscores to global
771# variables.  This is needed for the autogenerated asm files that we
772# use for embedding the python code.
773def CheckLeading(context):
774    context.Message("Checking for leading underscore in global variables...")
775    # 1) Define a global variable called x from asm so the C compiler
776    #    won't change the symbol at all.
777    # 2) Declare that variable.
778    # 3) Use the variable
779    #
780    # If the compiler prepends an underscore, this will successfully
781    # link because the external symbol 'x' will be called '_x' which
782    # was defined by the asm statement.  If the compiler does not
783    # prepend an underscore, this will not successfully link because
784    # '_x' will have been defined by assembly, while the C portion of
785    # the code will be trying to use 'x'
786    ret = context.TryLink('''
787        asm(".globl _x; _x: .byte 0");
788        extern int x;
789        int main() { return x; }
790        ''', extension=".c")
791    context.env.Append(LEADING_UNDERSCORE=ret)
792    context.Result(ret)
793    return ret
794
795# Platform-specific configuration.  Note again that we assume that all
796# builds under a given build root run on the same host platform.
797conf = Configure(main,
798                 conf_dir = joinpath(build_root, '.scons_config'),
799                 log_file = joinpath(build_root, 'scons_config.log'),
800                 custom_tests = { 'CheckLeading' : CheckLeading })
801
802# Check for leading underscores.  Don't really need to worry either
803# way so don't need to check the return code.
804conf.CheckLeading()
805
806# Check if we should compile a 64 bit binary on Mac OS X/Darwin
807try:
808    import platform
809    uname = platform.uname()
810    if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0:
811        if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]):
812            main.Append(CCFLAGS=['-arch', 'x86_64'])
813            main.Append(CFLAGS=['-arch', 'x86_64'])
814            main.Append(LINKFLAGS=['-arch', 'x86_64'])
815            main.Append(ASFLAGS=['-arch', 'x86_64'])
816except:
817    pass
818
819# Recent versions of scons substitute a "Null" object for Configure()
820# when configuration isn't necessary, e.g., if the "--help" option is
821# present.  Unfortuantely this Null object always returns false,
822# breaking all our configuration checks.  We replace it with our own
823# more optimistic null object that returns True instead.
824if not conf:
825    def NullCheck(*args, **kwargs):
826        return True
827
828    class NullConf:
829        def __init__(self, env):
830            self.env = env
831        def Finish(self):
832            return self.env
833        def __getattr__(self, mname):
834            return NullCheck
835
836    conf = NullConf(main)
837
838# Cache build files in the supplied directory.
839if main['M5_BUILD_CACHE']:
840    print 'Using build cache located at', main['M5_BUILD_CACHE']
841    CacheDir(main['M5_BUILD_CACHE'])
842
843# Find Python include and library directories for embedding the
844# interpreter. We rely on python-config to resolve the appropriate
845# includes and linker flags. ParseConfig does not seem to understand
846# the more exotic linker flags such as -Xlinker and -export-dynamic so
847# we add them explicitly below. If you want to link in an alternate
848# version of python, see above for instructions on how to invoke
849# scons with the appropriate PATH set.
850py_includes = readCommand(['python-config', '--includes'],
851                          exception='').split()
852# Strip the -I from the include folders before adding them to the
853# CPPPATH
854main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes))
855
856# Read the linker flags and split them into libraries and other link
857# flags. The libraries are added later through the call the CheckLib.
858py_ld_flags = readCommand(['python-config', '--ldflags'], exception='').split()
859py_libs = []
860for lib in py_ld_flags:
861     if not lib.startswith('-l'):
862         main.Append(LINKFLAGS=[lib])
863     else:
864         lib = lib[2:]
865         if lib not in py_libs:
866             py_libs.append(lib)
867
868# verify that this stuff works
869if not conf.CheckHeader('Python.h', '<>'):
870    print "Error: can't find Python.h header in", py_includes
871    print "Install Python headers (package python-dev on Ubuntu and RedHat)"
872    Exit(1)
873
874for lib in py_libs:
875    if not conf.CheckLib(lib):
876        print "Error: can't find library %s required by python" % lib
877        Exit(1)
878
879# On Solaris you need to use libsocket for socket ops
880if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'):
881   if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'):
882       print "Can't find library with socket calls (e.g. accept())"
883       Exit(1)
884
885# Check for zlib.  If the check passes, libz will be automatically
886# added to the LIBS environment variable.
887if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'):
888    print 'Error: did not find needed zlib compression library '\
889          'and/or zlib.h header file.'
890    print '       Please install zlib and try again.'
891    Exit(1)
892
893# If we have the protobuf compiler, also make sure we have the
894# development libraries. If the check passes, libprotobuf will be
895# automatically added to the LIBS environment variable. After
896# this, we can use the HAVE_PROTOBUF flag to determine if we have
897# got both protoc and libprotobuf available.
898main['HAVE_PROTOBUF'] = main['PROTOC'] and \
899    conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h',
900                            'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;')
901
902# If we have the compiler but not the library, print another warning.
903if main['PROTOC'] and not main['HAVE_PROTOBUF']:
904    print termcap.Yellow + termcap.Bold + \
905        'Warning: did not find protocol buffer library and/or headers.\n' + \
906    '       Please install libprotobuf-dev for tracing support.' + \
907    termcap.Normal
908
909# Check for librt.
910have_posix_clock = \
911    conf.CheckLibWithHeader(None, 'time.h', 'C',
912                            'clock_nanosleep(0,0,NULL,NULL);') or \
913    conf.CheckLibWithHeader('rt', 'time.h', 'C',
914                            'clock_nanosleep(0,0,NULL,NULL);')
915
916have_posix_timers = \
917    conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C',
918                            'timer_create(CLOCK_MONOTONIC, NULL, NULL);')
919
920if conf.CheckLib('tcmalloc'):
921    main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
922elif conf.CheckLib('tcmalloc_minimal'):
923    main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
924else:
925    print termcap.Yellow + termcap.Bold + \
926          "You can get a 12% performance improvement by installing tcmalloc "\
927          "(libgoogle-perftools-dev package on Ubuntu or RedHat)." + \
928          termcap.Normal
929
930if not have_posix_clock:
931    print "Can't find library for POSIX clocks."
932
933# Check for <fenv.h> (C99 FP environment control)
934have_fenv = conf.CheckHeader('fenv.h', '<>')
935if not have_fenv:
936    print "Warning: Header file <fenv.h> not found."
937    print "         This host has no IEEE FP rounding mode control."
938
939# Check if we should enable KVM-based hardware virtualization. The API
940# we rely on exists since version 2.6.36 of the kernel, but somehow
941# the KVM_API_VERSION does not reflect the change. We test for one of
942# the types as a fall back.
943have_kvm = conf.CheckHeader('linux/kvm.h', '<>') and \
944    conf.CheckTypeSize('struct kvm_xsave', '#include <linux/kvm.h>') != 0
945if not have_kvm:
946    print "Info: Compatible header file <linux/kvm.h> not found, " \
947        "disabling KVM support."
948
949# Check if the requested target ISA is compatible with the host
950def is_isa_kvm_compatible(isa):
951    isa_comp_table = {
952        "arm" : ( "armv7l" ),
953        "x86" : ( "x86_64" ),
954        }
955    try:
956        import platform
957        host_isa = platform.machine()
958    except:
959        print "Warning: Failed to determine host ISA."
960        return False
961
962    return host_isa in isa_comp_table.get(isa, [])
963
964
965######################################################################
966#
967# Finish the configuration
968#
969main = conf.Finish()
970
971######################################################################
972#
973# Collect all non-global variables
974#
975
976# Define the universe of supported ISAs
977all_isa_list = [ ]
978Export('all_isa_list')
979
980class CpuModel(object):
981    '''The CpuModel class encapsulates everything the ISA parser needs to
982    know about a particular CPU model.'''
983
984    # Dict of available CPU model objects.  Accessible as CpuModel.dict.
985    dict = {}
986    list = []
987    defaults = []
988
989    # Constructor.  Automatically adds models to CpuModel.dict.
990    def __init__(self, name, filename, includes, strings, default=False):
991        self.name = name           # name of model
992        self.filename = filename   # filename for output exec code
993        self.includes = includes   # include files needed in exec file
994        # The 'strings' dict holds all the per-CPU symbols we can
995        # substitute into templates etc.
996        self.strings = strings
997
998        # This cpu is enabled by default
999        self.default = default
1000
1001        # Add self to dict
1002        if name in CpuModel.dict:
1003            raise AttributeError, "CpuModel '%s' already registered" % name
1004        CpuModel.dict[name] = self
1005        CpuModel.list.append(name)
1006
1007Export('CpuModel')
1008
1009# Sticky variables get saved in the variables file so they persist from
1010# one invocation to the next (unless overridden, in which case the new
1011# value becomes sticky).
1012sticky_vars = Variables(args=ARGUMENTS)
1013Export('sticky_vars')
1014
1015# Sticky variables that should be exported
1016export_vars = []
1017Export('export_vars')
1018
1019# For Ruby
1020all_protocols = []
1021Export('all_protocols')
1022protocol_dirs = []
1023Export('protocol_dirs')
1024slicc_includes = []
1025Export('slicc_includes')
1026
1027# Walk the tree and execute all SConsopts scripts that wil add to the
1028# above variables
1029if not GetOption('verbose'):
1030    print "Reading SConsopts"
1031for bdir in [ base_dir ] + extras_dir_list:
1032    if not isdir(bdir):
1033        print "Error: directory '%s' does not exist" % bdir
1034        Exit(1)
1035    for root, dirs, files in os.walk(bdir):
1036        if 'SConsopts' in files:
1037            if GetOption('verbose'):
1038                print "Reading", joinpath(root, 'SConsopts')
1039            SConscript(joinpath(root, 'SConsopts'))
1040
1041all_isa_list.sort()
1042
1043sticky_vars.AddVariables(
1044    EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list),
1045    ListVariable('CPU_MODELS', 'CPU models',
1046                 sorted(n for n,m in CpuModel.dict.iteritems() if m.default),
1047                 sorted(CpuModel.list)),
1048    BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger',
1049                 False),
1050    BoolVariable('SS_COMPATIBLE_FP',
1051                 'Make floating-point results compatible with SimpleScalar',
1052                 False),
1053    BoolVariable('USE_SSE2',
1054                 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts',
1055                 False),
1056    BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock),
1057    BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv),
1058    BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability', False),
1059    BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models', have_kvm),
1060    EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None',
1061                  all_protocols),
1062    )
1063
1064# These variables get exported to #defines in config/*.hh (see src/SConscript).
1065export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'CP_ANNOTATE',
1066                'USE_POSIX_CLOCK', 'PROTOCOL', 'HAVE_PROTOBUF']
1067
1068###################################################
1069#
1070# Define a SCons builder for configuration flag headers.
1071#
1072###################################################
1073
1074# This function generates a config header file that #defines the
1075# variable symbol to the current variable setting (0 or 1).  The source
1076# operands are the name of the variable and a Value node containing the
1077# value of the variable.
1078def build_config_file(target, source, env):
1079    (variable, value) = [s.get_contents() for s in source]
1080    f = file(str(target[0]), 'w')
1081    print >> f, '#define', variable, value
1082    f.close()
1083    return None
1084
1085# Combine the two functions into a scons Action object.
1086config_action = MakeAction(build_config_file, Transform("CONFIG H", 2))
1087
1088# The emitter munges the source & target node lists to reflect what
1089# we're really doing.
1090def config_emitter(target, source, env):
1091    # extract variable name from Builder arg
1092    variable = str(target[0])
1093    # True target is config header file
1094    target = joinpath('config', variable.lower() + '.hh')
1095    val = env[variable]
1096    if isinstance(val, bool):
1097        # Force value to 0/1
1098        val = int(val)
1099    elif isinstance(val, str):
1100        val = '"' + val + '"'
1101
1102    # Sources are variable name & value (packaged in SCons Value nodes)
1103    return ([target], [Value(variable), Value(val)])
1104
1105config_builder = Builder(emitter = config_emitter, action = config_action)
1106
1107main.Append(BUILDERS = { 'ConfigFile' : config_builder })
1108
1109# libelf build is shared across all configs in the build root.
1110main.SConscript('ext/libelf/SConscript',
1111                variant_dir = joinpath(build_root, 'libelf'))
1112
1113# gzstream build is shared across all configs in the build root.
1114main.SConscript('ext/gzstream/SConscript',
1115                variant_dir = joinpath(build_root, 'gzstream'))
1116
1117# libfdt build is shared across all configs in the build root.
1118main.SConscript('ext/libfdt/SConscript',
1119                variant_dir = joinpath(build_root, 'libfdt'))
1120
1121# fputils build is shared across all configs in the build root.
1122main.SConscript('ext/fputils/SConscript',
1123                variant_dir = joinpath(build_root, 'fputils'))
1124
1125###################################################
1126#
1127# This function is used to set up a directory with switching headers
1128#
1129###################################################
1130
1131main['ALL_ISA_LIST'] = all_isa_list
1132def make_switching_dir(dname, switch_headers, env):
1133    # Generate the header.  target[0] is the full path of the output
1134    # header to generate.  'source' is a dummy variable, since we get the
1135    # list of ISAs from env['ALL_ISA_LIST'].
1136    def gen_switch_hdr(target, source, env):
1137        fname = str(target[0])
1138        f = open(fname, 'w')
1139        isa = env['TARGET_ISA'].lower()
1140        print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname))
1141        f.close()
1142
1143    # Build SCons Action object. 'varlist' specifies env vars that this
1144    # action depends on; when env['ALL_ISA_LIST'] changes these actions
1145    # should get re-executed.
1146    switch_hdr_action = MakeAction(gen_switch_hdr,
1147                          Transform("GENERATE"), varlist=['ALL_ISA_LIST'])
1148
1149    # Instantiate actions for each header
1150    for hdr in switch_headers:
1151        env.Command(hdr, [], switch_hdr_action)
1152Export('make_switching_dir')
1153
1154###################################################
1155#
1156# Define build environments for selected configurations.
1157#
1158###################################################
1159
1160for variant_path in variant_paths:
1161    print "Building in", variant_path
1162
1163    # Make a copy of the build-root environment to use for this config.
1164    env = main.Clone()
1165    env['BUILDDIR'] = variant_path
1166
1167    # variant_dir is the tail component of build path, and is used to
1168    # determine the build parameters (e.g., 'ALPHA_SE')
1169    (build_root, variant_dir) = splitpath(variant_path)
1170
1171    # Set env variables according to the build directory config.
1172    sticky_vars.files = []
1173    # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in
1174    # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke
1175    # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings.
1176    current_vars_file = joinpath(build_root, 'variables', variant_dir)
1177    if isfile(current_vars_file):
1178        sticky_vars.files.append(current_vars_file)
1179        print "Using saved variables file %s" % current_vars_file
1180    else:
1181        # Build dir-specific variables file doesn't exist.
1182
1183        # Make sure the directory is there so we can create it later
1184        opt_dir = dirname(current_vars_file)
1185        if not isdir(opt_dir):
1186            mkdir(opt_dir)
1187
1188        # Get default build variables from source tree.  Variables are
1189        # normally determined by name of $VARIANT_DIR, but can be
1190        # overridden by '--default=' arg on command line.
1191        default = GetOption('default')
1192        opts_dir = joinpath(main.root.abspath, 'build_opts')
1193        if default:
1194            default_vars_files = [joinpath(build_root, 'variables', default),
1195                                  joinpath(opts_dir, default)]
1196        else:
1197            default_vars_files = [joinpath(opts_dir, variant_dir)]
1198        existing_files = filter(isfile, default_vars_files)
1199        if existing_files:
1200            default_vars_file = existing_files[0]
1201            sticky_vars.files.append(default_vars_file)
1202            print "Variables file %s not found,\n  using defaults in %s" \
1203                  % (current_vars_file, default_vars_file)
1204        else:
1205            print "Error: cannot find variables file %s or " \
1206                  "default file(s) %s" \
1207                  % (current_vars_file, ' or '.join(default_vars_files))
1208            Exit(1)
1209
1210    # Apply current variable settings to env
1211    sticky_vars.Update(env)
1212
1213    help_texts["local_vars"] += \
1214        "Build variables for %s:\n" % variant_dir \
1215                 + sticky_vars.GenerateHelpText(env)
1216
1217    # Process variable settings.
1218
1219    if not have_fenv and env['USE_FENV']:
1220        print "Warning: <fenv.h> not available; " \
1221              "forcing USE_FENV to False in", variant_dir + "."
1222        env['USE_FENV'] = False
1223
1224    if not env['USE_FENV']:
1225        print "Warning: No IEEE FP rounding mode control in", variant_dir + "."
1226        print "         FP results may deviate slightly from other platforms."
1227
1228    if env['EFENCE']:
1229        env.Append(LIBS=['efence'])
1230
1231    if env['USE_KVM']:
1232        if not have_kvm:
1233            print "Warning: Can not enable KVM, host seems to lack KVM support"
1234            env['USE_KVM'] = False
1235        elif not have_posix_timers:
1236            print "Warning: Can not enable KVM, host seems to lack support " \
1237                "for POSIX timers"
1238            env['USE_KVM'] = False
1239        elif not is_isa_kvm_compatible(env['TARGET_ISA']):
1240            print "Info: KVM support disabled due to unsupported host and " \
1241                "target ISA combination"
1242            env['USE_KVM'] = False
1243
1244    # Save sticky variable settings back to current variables file
1245    sticky_vars.Save(current_vars_file, env)
1246
1247    if env['USE_SSE2']:
1248        env.Append(CCFLAGS=['-msse2'])
1249
1250    # The src/SConscript file sets up the build rules in 'env' according
1251    # to the configured variables.  It returns a list of environments,
1252    # one for each variant build (debug, opt, etc.)
1253    envList = SConscript('src/SConscript', variant_dir = variant_path,
1254                         exports = 'env')
1255
1256    # Set up the regression tests for each build.
1257    for e in envList:
1258        SConscript('tests/SConscript',
1259                   variant_dir = joinpath(variant_path, 'tests', e.Label),
1260                   exports = { 'env' : e }, duplicate = False)
1261
1262# base help text
1263Help('''
1264Usage: scons [scons options] [build variables] [target(s)]
1265
1266Extra scons options:
1267%(options)s
1268
1269Global build variables:
1270%(global_vars)s
1271
1272%(local_vars)s
1273''' % help_texts)
1274