SConstruct revision 10453
1# -*- mode:python -*-
2
3# Copyright (c) 2013 ARM Limited
4# All rights reserved.
5#
6# The license below extends only to copyright in the software and shall
7# not be construed as granting a license to any other intellectual
8# property including but not limited to intellectual property relating
9# to a hardware implementation of the functionality of the software
10# licensed hereunder.  You may use the software subject to the license
11# terms below provided that you ensure that this notice is replicated
12# unmodified and in its entirety in all distributions of the software,
13# modified or unmodified, in source code or in binary form.
14#
15# Copyright (c) 2011 Advanced Micro Devices, Inc.
16# Copyright (c) 2009 The Hewlett-Packard Development Company
17# Copyright (c) 2004-2005 The Regents of The University of Michigan
18# All rights reserved.
19#
20# Redistribution and use in source and binary forms, with or without
21# modification, are permitted provided that the following conditions are
22# met: redistributions of source code must retain the above copyright
23# notice, this list of conditions and the following disclaimer;
24# redistributions in binary form must reproduce the above copyright
25# notice, this list of conditions and the following disclaimer in the
26# documentation and/or other materials provided with the distribution;
27# neither the name of the copyright holders nor the names of its
28# contributors may be used to endorse or promote products derived from
29# this software without specific prior written permission.
30#
31# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
32# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
33# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
34# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
35# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
37# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
38# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
39# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
40# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
41# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
42#
43# Authors: Steve Reinhardt
44#          Nathan Binkert
45
46###################################################
47#
48# SCons top-level build description (SConstruct) file.
49#
50# While in this directory ('gem5'), just type 'scons' to build the default
51# configuration (see below), or type 'scons build/<CONFIG>/<binary>'
52# to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for
53# the optimized full-system version).
54#
55# You can build gem5 in a different directory as long as there is a
56# 'build/<CONFIG>' somewhere along the target path.  The build system
57# expects that all configs under the same build directory are being
58# built for the same host system.
59#
60# Examples:
61#
62#   The following two commands are equivalent.  The '-u' option tells
63#   scons to search up the directory tree for this SConstruct file.
64#   % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug
65#   % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug
66#
67#   The following two commands are equivalent and demonstrate building
68#   in a directory outside of the source tree.  The '-C' option tells
69#   scons to chdir to the specified directory to find this SConstruct
70#   file.
71#   % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug
72#   % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug
73#
74# You can use 'scons -H' to print scons options.  If you're in this
75# 'gem5' directory (or use -u or -C to tell scons where to find this
76# file), you can use 'scons -h' to print all the gem5-specific build
77# options as well.
78#
79###################################################
80
81# Check for recent-enough Python and SCons versions.
82try:
83    # Really old versions of scons only take two options for the
84    # function, so check once without the revision and once with the
85    # revision, the first instance will fail for stuff other than
86    # 0.98, and the second will fail for 0.98.0
87    EnsureSConsVersion(0, 98)
88    EnsureSConsVersion(0, 98, 1)
89except SystemExit, e:
90    print """
91For more details, see:
92    http://gem5.org/Dependencies
93"""
94    raise
95
96# We ensure the python version early because because python-config
97# requires python 2.5
98try:
99    EnsurePythonVersion(2, 5)
100except SystemExit, e:
101    print """
102You can use a non-default installation of the Python interpreter by
103rearranging your PATH so that scons finds the non-default 'python' and
104'python-config' first.
105
106For more details, see:
107    http://gem5.org/wiki/index.php/Using_a_non-default_Python_installation
108"""
109    raise
110
111# Global Python includes
112import itertools
113import os
114import re
115import subprocess
116import sys
117
118from os import mkdir, environ
119from os.path import abspath, basename, dirname, expanduser, normpath
120from os.path import exists,  isdir, isfile
121from os.path import join as joinpath, split as splitpath
122
123# SCons includes
124import SCons
125import SCons.Node
126
127extra_python_paths = [
128    Dir('src/python').srcnode().abspath, # gem5 includes
129    Dir('ext/ply').srcnode().abspath, # ply is used by several files
130    ]
131
132sys.path[1:1] = extra_python_paths
133
134from m5.util import compareVersions, readCommand
135from m5.util.terminal import get_termcap
136
137help_texts = {
138    "options" : "",
139    "global_vars" : "",
140    "local_vars" : ""
141}
142
143Export("help_texts")
144
145
146# There's a bug in scons in that (1) by default, the help texts from
147# AddOption() are supposed to be displayed when you type 'scons -h'
148# and (2) you can override the help displayed by 'scons -h' using the
149# Help() function, but these two features are incompatible: once
150# you've overridden the help text using Help(), there's no way to get
151# at the help texts from AddOptions.  See:
152#     http://scons.tigris.org/issues/show_bug.cgi?id=2356
153#     http://scons.tigris.org/issues/show_bug.cgi?id=2611
154# This hack lets us extract the help text from AddOptions and
155# re-inject it via Help().  Ideally someday this bug will be fixed and
156# we can just use AddOption directly.
157def AddLocalOption(*args, **kwargs):
158    col_width = 30
159
160    help = "  " + ", ".join(args)
161    if "help" in kwargs:
162        length = len(help)
163        if length >= col_width:
164            help += "\n" + " " * col_width
165        else:
166            help += " " * (col_width - length)
167        help += kwargs["help"]
168    help_texts["options"] += help + "\n"
169
170    AddOption(*args, **kwargs)
171
172AddLocalOption('--colors', dest='use_colors', action='store_true',
173               help="Add color to abbreviated scons output")
174AddLocalOption('--no-colors', dest='use_colors', action='store_false',
175               help="Don't add color to abbreviated scons output")
176AddLocalOption('--default', dest='default', type='string', action='store',
177               help='Override which build_opts file to use for defaults')
178AddLocalOption('--ignore-style', dest='ignore_style', action='store_true',
179               help='Disable style checking hooks')
180AddLocalOption('--no-lto', dest='no_lto', action='store_true',
181               help='Disable Link-Time Optimization for fast')
182AddLocalOption('--update-ref', dest='update_ref', action='store_true',
183               help='Update test reference outputs')
184AddLocalOption('--verbose', dest='verbose', action='store_true',
185               help='Print full tool command lines')
186AddLocalOption('--without-python', dest='without_python',
187               action='store_true',
188               help='Build without Python configuration support')
189
190termcap = get_termcap(GetOption('use_colors'))
191
192########################################################################
193#
194# Set up the main build environment.
195#
196########################################################################
197
198# export TERM so that clang reports errors in color
199use_vars = set([ 'AS', 'AR', 'CC', 'CXX', 'HOME', 'LD_LIBRARY_PATH',
200                 'LIBRARY_PATH', 'PATH', 'PKG_CONFIG_PATH', 'PROTOC',
201                 'PYTHONPATH', 'RANLIB', 'SWIG', 'TERM' ])
202
203use_prefixes = [
204    "M5",           # M5 configuration (e.g., path to kernels)
205    "DISTCC_",      # distcc (distributed compiler wrapper) configuration
206    "CCACHE_",      # ccache (caching compiler wrapper) configuration
207    "CCC_",         # clang static analyzer configuration
208    ]
209
210use_env = {}
211for key,val in os.environ.iteritems():
212    if key in use_vars or \
213            any([key.startswith(prefix) for prefix in use_prefixes]):
214        use_env[key] = val
215
216main = Environment(ENV=use_env)
217main.Decider('MD5-timestamp')
218main.root = Dir(".")         # The current directory (where this file lives).
219main.srcdir = Dir("src")     # The source directory
220
221main_dict_keys = main.Dictionary().keys()
222
223# Check that we have a C/C++ compiler
224if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys):
225    print "No C++ compiler installed (package g++ on Ubuntu and RedHat)"
226    Exit(1)
227
228# Check that swig is present
229if not 'SWIG' in main_dict_keys:
230    print "swig is not installed (package swig on Ubuntu and RedHat)"
231    Exit(1)
232
233# add useful python code PYTHONPATH so it can be used by subprocesses
234# as well
235main.AppendENVPath('PYTHONPATH', extra_python_paths)
236
237########################################################################
238#
239# Mercurial Stuff.
240#
241# If the gem5 directory is a mercurial repository, we should do some
242# extra things.
243#
244########################################################################
245
246hgdir = main.root.Dir(".hg")
247
248mercurial_style_message = """
249You're missing the gem5 style hook, which automatically checks your code
250against the gem5 style rules on hg commit and qrefresh commands.  This
251script will now install the hook in your .hg/hgrc file.
252Press enter to continue, or ctrl-c to abort: """
253
254mercurial_style_hook = """
255# The following lines were automatically added by gem5/SConstruct
256# to provide the gem5 style-checking hooks
257[extensions]
258style = %s/util/style.py
259
260[hooks]
261pretxncommit.style = python:style.check_style
262pre-qrefresh.style = python:style.check_style
263# End of SConstruct additions
264
265""" % (main.root.abspath)
266
267mercurial_lib_not_found = """
268Mercurial libraries cannot be found, ignoring style hook.  If
269you are a gem5 developer, please fix this and run the style
270hook. It is important.
271"""
272
273# Check for style hook and prompt for installation if it's not there.
274# Skip this if --ignore-style was specified, there's no .hg dir to
275# install a hook in, or there's no interactive terminal to prompt.
276if not GetOption('ignore_style') and hgdir.exists() and sys.stdin.isatty():
277    style_hook = True
278    try:
279        from mercurial import ui
280        ui = ui.ui()
281        ui.readconfig(hgdir.File('hgrc').abspath)
282        style_hook = ui.config('hooks', 'pretxncommit.style', None) and \
283                     ui.config('hooks', 'pre-qrefresh.style', None)
284    except ImportError:
285        print mercurial_lib_not_found
286
287    if not style_hook:
288        print mercurial_style_message,
289        # continue unless user does ctrl-c/ctrl-d etc.
290        try:
291            raw_input()
292        except:
293            print "Input exception, exiting scons.\n"
294            sys.exit(1)
295        hgrc_path = '%s/.hg/hgrc' % main.root.abspath
296        print "Adding style hook to", hgrc_path, "\n"
297        try:
298            hgrc = open(hgrc_path, 'a')
299            hgrc.write(mercurial_style_hook)
300            hgrc.close()
301        except:
302            print "Error updating", hgrc_path
303            sys.exit(1)
304
305
306###################################################
307#
308# Figure out which configurations to set up based on the path(s) of
309# the target(s).
310#
311###################################################
312
313# Find default configuration & binary.
314Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug'))
315
316# helper function: find last occurrence of element in list
317def rfind(l, elt, offs = -1):
318    for i in range(len(l)+offs, 0, -1):
319        if l[i] == elt:
320            return i
321    raise ValueError, "element not found"
322
323# Take a list of paths (or SCons Nodes) and return a list with all
324# paths made absolute and ~-expanded.  Paths will be interpreted
325# relative to the launch directory unless a different root is provided
326def makePathListAbsolute(path_list, root=GetLaunchDir()):
327    return [abspath(joinpath(root, expanduser(str(p))))
328            for p in path_list]
329
330# Each target must have 'build' in the interior of the path; the
331# directory below this will determine the build parameters.  For
332# example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we
333# recognize that ALPHA_SE specifies the configuration because it
334# follow 'build' in the build path.
335
336# The funky assignment to "[:]" is needed to replace the list contents
337# in place rather than reassign the symbol to a new list, which
338# doesn't work (obviously!).
339BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS)
340
341# Generate a list of the unique build roots and configs that the
342# collected targets reference.
343variant_paths = []
344build_root = None
345for t in BUILD_TARGETS:
346    path_dirs = t.split('/')
347    try:
348        build_top = rfind(path_dirs, 'build', -2)
349    except:
350        print "Error: no non-leaf 'build' dir found on target path", t
351        Exit(1)
352    this_build_root = joinpath('/',*path_dirs[:build_top+1])
353    if not build_root:
354        build_root = this_build_root
355    else:
356        if this_build_root != build_root:
357            print "Error: build targets not under same build root\n"\
358                  "  %s\n  %s" % (build_root, this_build_root)
359            Exit(1)
360    variant_path = joinpath('/',*path_dirs[:build_top+2])
361    if variant_path not in variant_paths:
362        variant_paths.append(variant_path)
363
364# Make sure build_root exists (might not if this is the first build there)
365if not isdir(build_root):
366    mkdir(build_root)
367main['BUILDROOT'] = build_root
368
369Export('main')
370
371main.SConsignFile(joinpath(build_root, "sconsign"))
372
373# Default duplicate option is to use hard links, but this messes up
374# when you use emacs to edit a file in the target dir, as emacs moves
375# file to file~ then copies to file, breaking the link.  Symbolic
376# (soft) links work better.
377main.SetOption('duplicate', 'soft-copy')
378
379#
380# Set up global sticky variables... these are common to an entire build
381# tree (not specific to a particular build like ALPHA_SE)
382#
383
384global_vars_file = joinpath(build_root, 'variables.global')
385
386global_vars = Variables(global_vars_file, args=ARGUMENTS)
387
388global_vars.AddVariables(
389    ('CC', 'C compiler', environ.get('CC', main['CC'])),
390    ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])),
391    ('SWIG', 'SWIG tool', environ.get('SWIG', main['SWIG'])),
392    ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')),
393    ('BATCH', 'Use batch pool for build and tests', False),
394    ('BATCH_CMD', 'Batch pool submission command name', 'qdo'),
395    ('M5_BUILD_CACHE', 'Cache built objects in this directory', False),
396    ('EXTRAS', 'Add extra directories to the compilation', '')
397    )
398
399# Update main environment with values from ARGUMENTS & global_vars_file
400global_vars.Update(main)
401help_texts["global_vars"] += global_vars.GenerateHelpText(main)
402
403# Save sticky variable settings back to current variables file
404global_vars.Save(global_vars_file, main)
405
406# Parse EXTRAS variable to build list of all directories where we're
407# look for sources etc.  This list is exported as extras_dir_list.
408base_dir = main.srcdir.abspath
409if main['EXTRAS']:
410    extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':'))
411else:
412    extras_dir_list = []
413
414Export('base_dir')
415Export('extras_dir_list')
416
417# the ext directory should be on the #includes path
418main.Append(CPPPATH=[Dir('ext')])
419
420def strip_build_path(path, env):
421    path = str(path)
422    variant_base = env['BUILDROOT'] + os.path.sep
423    if path.startswith(variant_base):
424        path = path[len(variant_base):]
425    elif path.startswith('build/'):
426        path = path[6:]
427    return path
428
429# Generate a string of the form:
430#   common/path/prefix/src1, src2 -> tgt1, tgt2
431# to print while building.
432class Transform(object):
433    # all specific color settings should be here and nowhere else
434    tool_color = termcap.Normal
435    pfx_color = termcap.Yellow
436    srcs_color = termcap.Yellow + termcap.Bold
437    arrow_color = termcap.Blue + termcap.Bold
438    tgts_color = termcap.Yellow + termcap.Bold
439
440    def __init__(self, tool, max_sources=99):
441        self.format = self.tool_color + (" [%8s] " % tool) \
442                      + self.pfx_color + "%s" \
443                      + self.srcs_color + "%s" \
444                      + self.arrow_color + " -> " \
445                      + self.tgts_color + "%s" \
446                      + termcap.Normal
447        self.max_sources = max_sources
448
449    def __call__(self, target, source, env, for_signature=None):
450        # truncate source list according to max_sources param
451        source = source[0:self.max_sources]
452        def strip(f):
453            return strip_build_path(str(f), env)
454        if len(source) > 0:
455            srcs = map(strip, source)
456        else:
457            srcs = ['']
458        tgts = map(strip, target)
459        # surprisingly, os.path.commonprefix is a dumb char-by-char string
460        # operation that has nothing to do with paths.
461        com_pfx = os.path.commonprefix(srcs + tgts)
462        com_pfx_len = len(com_pfx)
463        if com_pfx:
464            # do some cleanup and sanity checking on common prefix
465            if com_pfx[-1] == ".":
466                # prefix matches all but file extension: ok
467                # back up one to change 'foo.cc -> o' to 'foo.cc -> .o'
468                com_pfx = com_pfx[0:-1]
469            elif com_pfx[-1] == "/":
470                # common prefix is directory path: OK
471                pass
472            else:
473                src0_len = len(srcs[0])
474                tgt0_len = len(tgts[0])
475                if src0_len == com_pfx_len:
476                    # source is a substring of target, OK
477                    pass
478                elif tgt0_len == com_pfx_len:
479                    # target is a substring of source, need to back up to
480                    # avoid empty string on RHS of arrow
481                    sep_idx = com_pfx.rfind(".")
482                    if sep_idx != -1:
483                        com_pfx = com_pfx[0:sep_idx]
484                    else:
485                        com_pfx = ''
486                elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".":
487                    # still splitting at file extension: ok
488                    pass
489                else:
490                    # probably a fluke; ignore it
491                    com_pfx = ''
492        # recalculate length in case com_pfx was modified
493        com_pfx_len = len(com_pfx)
494        def fmt(files):
495            f = map(lambda s: s[com_pfx_len:], files)
496            return ', '.join(f)
497        return self.format % (com_pfx, fmt(srcs), fmt(tgts))
498
499Export('Transform')
500
501# enable the regression script to use the termcap
502main['TERMCAP'] = termcap
503
504if GetOption('verbose'):
505    def MakeAction(action, string, *args, **kwargs):
506        return Action(action, *args, **kwargs)
507else:
508    MakeAction = Action
509    main['CCCOMSTR']        = Transform("CC")
510    main['CXXCOMSTR']       = Transform("CXX")
511    main['ASCOMSTR']        = Transform("AS")
512    main['SWIGCOMSTR']      = Transform("SWIG")
513    main['ARCOMSTR']        = Transform("AR", 0)
514    main['LINKCOMSTR']      = Transform("LINK", 0)
515    main['RANLIBCOMSTR']    = Transform("RANLIB", 0)
516    main['M4COMSTR']        = Transform("M4")
517    main['SHCCCOMSTR']      = Transform("SHCC")
518    main['SHCXXCOMSTR']     = Transform("SHCXX")
519Export('MakeAction')
520
521# Initialize the Link-Time Optimization (LTO) flags
522main['LTO_CCFLAGS'] = []
523main['LTO_LDFLAGS'] = []
524
525# According to the readme, tcmalloc works best if the compiler doesn't
526# assume that we're using the builtin malloc and friends. These flags
527# are compiler-specific, so we need to set them after we detect which
528# compiler we're using.
529main['TCMALLOC_CCFLAGS'] = []
530
531CXX_version = readCommand([main['CXX'],'--version'], exception=False)
532CXX_V = readCommand([main['CXX'],'-V'], exception=False)
533
534main['GCC'] = CXX_version and CXX_version.find('g++') >= 0
535main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0
536if main['GCC'] + main['CLANG'] > 1:
537    print 'Error: How can we have two at the same time?'
538    Exit(1)
539
540# Set up default C++ compiler flags
541if main['GCC'] or main['CLANG']:
542    # As gcc and clang share many flags, do the common parts here
543    main.Append(CCFLAGS=['-pipe'])
544    main.Append(CCFLAGS=['-fno-strict-aliasing'])
545    # Enable -Wall and then disable the few warnings that we
546    # consistently violate
547    main.Append(CCFLAGS=['-Wall', '-Wno-sign-compare', '-Wundef'])
548    # We always compile using C++11, but only gcc >= 4.7 and clang 3.1
549    # actually use that name, so we stick with c++0x
550    main.Append(CXXFLAGS=['-std=c++0x'])
551    # Add selected sanity checks from -Wextra
552    main.Append(CXXFLAGS=['-Wmissing-field-initializers',
553                          '-Woverloaded-virtual'])
554else:
555    print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
556    print "Don't know what compiler options to use for your compiler."
557    print termcap.Yellow + '       compiler:' + termcap.Normal, main['CXX']
558    print termcap.Yellow + '       version:' + termcap.Normal,
559    if not CXX_version:
560        print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
561               termcap.Normal
562    else:
563        print CXX_version.replace('\n', '<nl>')
564    print "       If you're trying to use a compiler other than GCC"
565    print "       or clang, there appears to be something wrong with your"
566    print "       environment."
567    print "       "
568    print "       If you are trying to use a compiler other than those listed"
569    print "       above you will need to ease fix SConstruct and "
570    print "       src/SConscript to support that compiler."
571    Exit(1)
572
573if main['GCC']:
574    # Check for a supported version of gcc. >= 4.6 is chosen for its
575    # level of c++11 support. See
576    # http://gcc.gnu.org/projects/cxx0x.html for details. 4.6 is also
577    # the first version with proper LTO support.
578    gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False)
579    if compareVersions(gcc_version, "4.6") < 0:
580        print 'Error: gcc version 4.6 or newer required.'
581        print '       Installed version:', gcc_version
582        Exit(1)
583
584    main['GCC_VERSION'] = gcc_version
585
586    # gcc from version 4.8 and above generates "rep; ret" instructions
587    # to avoid performance penalties on certain AMD chips. Older
588    # assemblers detect this as an error, "Error: expecting string
589    # instruction after `rep'"
590    if compareVersions(gcc_version, "4.8") > 0:
591        as_version = readCommand([main['AS'], '-v', '/dev/null'],
592                                 exception=False).split()
593
594        if not as_version or compareVersions(as_version[-1], "2.23") < 0:
595            print termcap.Yellow + termcap.Bold + \
596                'Warning: This combination of gcc and binutils have' + \
597                ' known incompatibilities.\n' + \
598                '         If you encounter build problems, please update ' + \
599                'binutils to 2.23.' + \
600                termcap.Normal
601
602    # Add the appropriate Link-Time Optimization (LTO) flags
603    # unless LTO is explicitly turned off. Note that these flags
604    # are only used by the fast target.
605    if not GetOption('no_lto'):
606        # Pass the LTO flag when compiling to produce GIMPLE
607        # output, we merely create the flags here and only append
608        # them later
609        main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
610
611        # Use the same amount of jobs for LTO as we are running
612        # scons with
613        main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
614
615    main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc',
616                                  '-fno-builtin-realloc', '-fno-builtin-free'])
617
618elif main['CLANG']:
619    # Check for a supported version of clang, >= 3.0 is needed to
620    # support similar features as gcc 4.6. See
621    # http://clang.llvm.org/cxx_status.html for details
622    clang_version_re = re.compile(".* version (\d+\.\d+)")
623    clang_version_match = clang_version_re.search(CXX_version)
624    if (clang_version_match):
625        clang_version = clang_version_match.groups()[0]
626        if compareVersions(clang_version, "3.0") < 0:
627            print 'Error: clang version 3.0 or newer required.'
628            print '       Installed version:', clang_version
629            Exit(1)
630    else:
631        print 'Error: Unable to determine clang version.'
632        Exit(1)
633
634    # clang has a few additional warnings that we disable,
635    # tautological comparisons are allowed due to unsigned integers
636    # being compared to constants that happen to be 0, and extraneous
637    # parantheses are allowed due to Ruby's printing of the AST,
638    # finally self assignments are allowed as the generated CPU code
639    # is relying on this
640    main.Append(CCFLAGS=['-Wno-tautological-compare',
641                         '-Wno-parentheses',
642                         '-Wno-self-assign',
643                         # Some versions of libstdc++ (4.8?) seem to
644                         # use struct hash and class hash
645                         # interchangeably.
646                         '-Wno-mismatched-tags',
647                         ])
648
649    main.Append(TCMALLOC_CCFLAGS=['-fno-builtin'])
650
651    # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as
652    # opposed to libstdc++, as the later is dated.
653    if sys.platform == "darwin":
654        main.Append(CXXFLAGS=['-stdlib=libc++'])
655        main.Append(LIBS=['c++'])
656
657else:
658    print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
659    print "Don't know what compiler options to use for your compiler."
660    print termcap.Yellow + '       compiler:' + termcap.Normal, main['CXX']
661    print termcap.Yellow + '       version:' + termcap.Normal,
662    if not CXX_version:
663        print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
664               termcap.Normal
665    else:
666        print CXX_version.replace('\n', '<nl>')
667    print "       If you're trying to use a compiler other than GCC"
668    print "       or clang, there appears to be something wrong with your"
669    print "       environment."
670    print "       "
671    print "       If you are trying to use a compiler other than those listed"
672    print "       above you will need to ease fix SConstruct and "
673    print "       src/SConscript to support that compiler."
674    Exit(1)
675
676# Set up common yacc/bison flags (needed for Ruby)
677main['YACCFLAGS'] = '-d'
678main['YACCHXXFILESUFFIX'] = '.hh'
679
680# Do this after we save setting back, or else we'll tack on an
681# extra 'qdo' every time we run scons.
682if main['BATCH']:
683    main['CC']     = main['BATCH_CMD'] + ' ' + main['CC']
684    main['CXX']    = main['BATCH_CMD'] + ' ' + main['CXX']
685    main['AS']     = main['BATCH_CMD'] + ' ' + main['AS']
686    main['AR']     = main['BATCH_CMD'] + ' ' + main['AR']
687    main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB']
688
689if sys.platform == 'cygwin':
690    # cygwin has some header file issues...
691    main.Append(CCFLAGS=["-Wno-uninitialized"])
692
693# Check for the protobuf compiler
694protoc_version = readCommand([main['PROTOC'], '--version'],
695                             exception='').split()
696
697# First two words should be "libprotoc x.y.z"
698if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc':
699    print termcap.Yellow + termcap.Bold + \
700        'Warning: Protocol buffer compiler (protoc) not found.\n' + \
701        '         Please install protobuf-compiler for tracing support.' + \
702        termcap.Normal
703    main['PROTOC'] = False
704else:
705    # Based on the availability of the compress stream wrappers,
706    # require 2.1.0
707    min_protoc_version = '2.1.0'
708    if compareVersions(protoc_version[1], min_protoc_version) < 0:
709        print termcap.Yellow + termcap.Bold + \
710            'Warning: protoc version', min_protoc_version, \
711            'or newer required.\n' + \
712            '         Installed version:', protoc_version[1], \
713            termcap.Normal
714        main['PROTOC'] = False
715    else:
716        # Attempt to determine the appropriate include path and
717        # library path using pkg-config, that means we also need to
718        # check for pkg-config. Note that it is possible to use
719        # protobuf without the involvement of pkg-config. Later on we
720        # check go a library config check and at that point the test
721        # will fail if libprotobuf cannot be found.
722        if readCommand(['pkg-config', '--version'], exception=''):
723            try:
724                # Attempt to establish what linking flags to add for protobuf
725                # using pkg-config
726                main.ParseConfig('pkg-config --cflags --libs-only-L protobuf')
727            except:
728                print termcap.Yellow + termcap.Bold + \
729                    'Warning: pkg-config could not get protobuf flags.' + \
730                    termcap.Normal
731
732# Check for SWIG
733if not main.has_key('SWIG'):
734    print 'Error: SWIG utility not found.'
735    print '       Please install (see http://www.swig.org) and retry.'
736    Exit(1)
737
738# Check for appropriate SWIG version
739swig_version = readCommand([main['SWIG'], '-version'], exception='').split()
740# First 3 words should be "SWIG Version x.y.z"
741if len(swig_version) < 3 or \
742        swig_version[0] != 'SWIG' or swig_version[1] != 'Version':
743    print 'Error determining SWIG version.'
744    Exit(1)
745
746min_swig_version = '2.0.4'
747if compareVersions(swig_version[2], min_swig_version) < 0:
748    print 'Error: SWIG version', min_swig_version, 'or newer required.'
749    print '       Installed version:', swig_version[2]
750    Exit(1)
751
752# Check for known incompatibilities. The standard library shipped with
753# gcc >= 4.9 does not play well with swig versions prior to 3.0
754if main['GCC'] and compareVersions(gcc_version, '4.9') >= 0 and \
755        compareVersions(swig_version[2], '3.0') < 0:
756    print termcap.Yellow + termcap.Bold + \
757        'Warning: This combination of gcc and swig have' + \
758        ' known incompatibilities.\n' + \
759        '         If you encounter build problems, please update ' + \
760        'swig to 3.0 or later.' + \
761        termcap.Normal
762
763# Set up SWIG flags & scanner
764swig_flags=Split('-c++ -python -modern -templatereduce $_CPPINCFLAGS')
765main.Append(SWIGFLAGS=swig_flags)
766
767# Check for 'timeout' from GNU coreutils.  If present, regressions
768# will be run with a time limit.
769TIMEOUT_version = readCommand(['timeout', '--version'], exception=False)
770main['TIMEOUT'] = TIMEOUT_version and TIMEOUT_version.find('timeout') == 0
771
772# filter out all existing swig scanners, they mess up the dependency
773# stuff for some reason
774scanners = []
775for scanner in main['SCANNERS']:
776    skeys = scanner.skeys
777    if skeys == '.i':
778        continue
779
780    if isinstance(skeys, (list, tuple)) and '.i' in skeys:
781        continue
782
783    scanners.append(scanner)
784
785# add the new swig scanner that we like better
786from SCons.Scanner import ClassicCPP as CPPScanner
787swig_inc_re = '^[ \t]*[%,#][ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")'
788scanners.append(CPPScanner("SwigScan", [ ".i" ], "CPPPATH", swig_inc_re))
789
790# replace the scanners list that has what we want
791main['SCANNERS'] = scanners
792
793# Add a custom Check function to the Configure context so that we can
794# figure out if the compiler adds leading underscores to global
795# variables.  This is needed for the autogenerated asm files that we
796# use for embedding the python code.
797def CheckLeading(context):
798    context.Message("Checking for leading underscore in global variables...")
799    # 1) Define a global variable called x from asm so the C compiler
800    #    won't change the symbol at all.
801    # 2) Declare that variable.
802    # 3) Use the variable
803    #
804    # If the compiler prepends an underscore, this will successfully
805    # link because the external symbol 'x' will be called '_x' which
806    # was defined by the asm statement.  If the compiler does not
807    # prepend an underscore, this will not successfully link because
808    # '_x' will have been defined by assembly, while the C portion of
809    # the code will be trying to use 'x'
810    ret = context.TryLink('''
811        asm(".globl _x; _x: .byte 0");
812        extern int x;
813        int main() { return x; }
814        ''', extension=".c")
815    context.env.Append(LEADING_UNDERSCORE=ret)
816    context.Result(ret)
817    return ret
818
819# Add a custom Check function to test for structure members.
820def CheckMember(context, include, decl, member, include_quotes="<>"):
821    context.Message("Checking for member %s in %s..." %
822                    (member, decl))
823    text = """
824#include %(header)s
825int main(){
826  %(decl)s test;
827  (void)test.%(member)s;
828  return 0;
829};
830""" % { "header" : include_quotes[0] + include + include_quotes[1],
831        "decl" : decl,
832        "member" : member,
833        }
834
835    ret = context.TryCompile(text, extension=".cc")
836    context.Result(ret)
837    return ret
838
839# Platform-specific configuration.  Note again that we assume that all
840# builds under a given build root run on the same host platform.
841conf = Configure(main,
842                 conf_dir = joinpath(build_root, '.scons_config'),
843                 log_file = joinpath(build_root, 'scons_config.log'),
844                 custom_tests = {
845        'CheckLeading' : CheckLeading,
846        'CheckMember' : CheckMember,
847        })
848
849# Check for leading underscores.  Don't really need to worry either
850# way so don't need to check the return code.
851conf.CheckLeading()
852
853# Check if we should compile a 64 bit binary on Mac OS X/Darwin
854try:
855    import platform
856    uname = platform.uname()
857    if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0:
858        if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]):
859            main.Append(CCFLAGS=['-arch', 'x86_64'])
860            main.Append(CFLAGS=['-arch', 'x86_64'])
861            main.Append(LINKFLAGS=['-arch', 'x86_64'])
862            main.Append(ASFLAGS=['-arch', 'x86_64'])
863except:
864    pass
865
866# Recent versions of scons substitute a "Null" object for Configure()
867# when configuration isn't necessary, e.g., if the "--help" option is
868# present.  Unfortuantely this Null object always returns false,
869# breaking all our configuration checks.  We replace it with our own
870# more optimistic null object that returns True instead.
871if not conf:
872    def NullCheck(*args, **kwargs):
873        return True
874
875    class NullConf:
876        def __init__(self, env):
877            self.env = env
878        def Finish(self):
879            return self.env
880        def __getattr__(self, mname):
881            return NullCheck
882
883    conf = NullConf(main)
884
885# Cache build files in the supplied directory.
886if main['M5_BUILD_CACHE']:
887    print 'Using build cache located at', main['M5_BUILD_CACHE']
888    CacheDir(main['M5_BUILD_CACHE'])
889
890if not GetOption('without_python'):
891    # Find Python include and library directories for embedding the
892    # interpreter. We rely on python-config to resolve the appropriate
893    # includes and linker flags. ParseConfig does not seem to understand
894    # the more exotic linker flags such as -Xlinker and -export-dynamic so
895    # we add them explicitly below. If you want to link in an alternate
896    # version of python, see above for instructions on how to invoke
897    # scons with the appropriate PATH set.
898    #
899    # First we check if python2-config exists, else we use python-config
900    python_config = readCommand(['which', 'python2-config'],
901                                exception='').strip()
902    if not os.path.exists(python_config):
903        python_config = readCommand(['which', 'python-config'],
904                                    exception='').strip()
905    py_includes = readCommand([python_config, '--includes'],
906                              exception='').split()
907    # Strip the -I from the include folders before adding them to the
908    # CPPPATH
909    main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes))
910
911    # Read the linker flags and split them into libraries and other link
912    # flags. The libraries are added later through the call the CheckLib.
913    py_ld_flags = readCommand([python_config, '--ldflags'],
914        exception='').split()
915    py_libs = []
916    for lib in py_ld_flags:
917         if not lib.startswith('-l'):
918             main.Append(LINKFLAGS=[lib])
919         else:
920             lib = lib[2:]
921             if lib not in py_libs:
922                 py_libs.append(lib)
923
924    # verify that this stuff works
925    if not conf.CheckHeader('Python.h', '<>'):
926        print "Error: can't find Python.h header in", py_includes
927        print "Install Python headers (package python-dev on Ubuntu and RedHat)"
928        Exit(1)
929
930    for lib in py_libs:
931        if not conf.CheckLib(lib):
932            print "Error: can't find library %s required by python" % lib
933            Exit(1)
934
935# On Solaris you need to use libsocket for socket ops
936if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'):
937   if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'):
938       print "Can't find library with socket calls (e.g. accept())"
939       Exit(1)
940
941# Check for zlib.  If the check passes, libz will be automatically
942# added to the LIBS environment variable.
943if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'):
944    print 'Error: did not find needed zlib compression library '\
945          'and/or zlib.h header file.'
946    print '       Please install zlib and try again.'
947    Exit(1)
948
949# If we have the protobuf compiler, also make sure we have the
950# development libraries. If the check passes, libprotobuf will be
951# automatically added to the LIBS environment variable. After
952# this, we can use the HAVE_PROTOBUF flag to determine if we have
953# got both protoc and libprotobuf available.
954main['HAVE_PROTOBUF'] = main['PROTOC'] and \
955    conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h',
956                            'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;')
957
958# If we have the compiler but not the library, print another warning.
959if main['PROTOC'] and not main['HAVE_PROTOBUF']:
960    print termcap.Yellow + termcap.Bold + \
961        'Warning: did not find protocol buffer library and/or headers.\n' + \
962    '       Please install libprotobuf-dev for tracing support.' + \
963    termcap.Normal
964
965# Check for librt.
966have_posix_clock = \
967    conf.CheckLibWithHeader(None, 'time.h', 'C',
968                            'clock_nanosleep(0,0,NULL,NULL);') or \
969    conf.CheckLibWithHeader('rt', 'time.h', 'C',
970                            'clock_nanosleep(0,0,NULL,NULL);')
971
972have_posix_timers = \
973    conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C',
974                            'timer_create(CLOCK_MONOTONIC, NULL, NULL);')
975
976if conf.CheckLib('tcmalloc'):
977    main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
978elif conf.CheckLib('tcmalloc_minimal'):
979    main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
980else:
981    print termcap.Yellow + termcap.Bold + \
982          "You can get a 12% performance improvement by installing tcmalloc "\
983          "(libgoogle-perftools-dev package on Ubuntu or RedHat)." + \
984          termcap.Normal
985
986if not have_posix_clock:
987    print "Can't find library for POSIX clocks."
988
989# Check for <fenv.h> (C99 FP environment control)
990have_fenv = conf.CheckHeader('fenv.h', '<>')
991if not have_fenv:
992    print "Warning: Header file <fenv.h> not found."
993    print "         This host has no IEEE FP rounding mode control."
994
995# Check if we should enable KVM-based hardware virtualization. The API
996# we rely on exists since version 2.6.36 of the kernel, but somehow
997# the KVM_API_VERSION does not reflect the change. We test for one of
998# the types as a fall back.
999have_kvm = conf.CheckHeader('linux/kvm.h', '<>') and \
1000    conf.CheckTypeSize('struct kvm_xsave', '#include <linux/kvm.h>') != 0
1001if not have_kvm:
1002    print "Info: Compatible header file <linux/kvm.h> not found, " \
1003        "disabling KVM support."
1004
1005# Check if the requested target ISA is compatible with the host
1006def is_isa_kvm_compatible(isa):
1007    isa_comp_table = {
1008        "arm" : ( "armv7l" ),
1009        "x86" : ( "x86_64" ),
1010        }
1011    try:
1012        import platform
1013        host_isa = platform.machine()
1014    except:
1015        print "Warning: Failed to determine host ISA."
1016        return False
1017
1018    return host_isa in isa_comp_table.get(isa, [])
1019
1020
1021# Check if the exclude_host attribute is available. We want this to
1022# get accurate instruction counts in KVM.
1023main['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf.CheckMember(
1024    'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host')
1025
1026
1027######################################################################
1028#
1029# Finish the configuration
1030#
1031main = conf.Finish()
1032
1033######################################################################
1034#
1035# Collect all non-global variables
1036#
1037
1038# Define the universe of supported ISAs
1039all_isa_list = [ ]
1040Export('all_isa_list')
1041
1042class CpuModel(object):
1043    '''The CpuModel class encapsulates everything the ISA parser needs to
1044    know about a particular CPU model.'''
1045
1046    # Dict of available CPU model objects.  Accessible as CpuModel.dict.
1047    dict = {}
1048
1049    # Constructor.  Automatically adds models to CpuModel.dict.
1050    def __init__(self, name, default=False):
1051        self.name = name           # name of model
1052
1053        # This cpu is enabled by default
1054        self.default = default
1055
1056        # Add self to dict
1057        if name in CpuModel.dict:
1058            raise AttributeError, "CpuModel '%s' already registered" % name
1059        CpuModel.dict[name] = self
1060
1061Export('CpuModel')
1062
1063# Sticky variables get saved in the variables file so they persist from
1064# one invocation to the next (unless overridden, in which case the new
1065# value becomes sticky).
1066sticky_vars = Variables(args=ARGUMENTS)
1067Export('sticky_vars')
1068
1069# Sticky variables that should be exported
1070export_vars = []
1071Export('export_vars')
1072
1073# For Ruby
1074all_protocols = []
1075Export('all_protocols')
1076protocol_dirs = []
1077Export('protocol_dirs')
1078slicc_includes = []
1079Export('slicc_includes')
1080
1081# Walk the tree and execute all SConsopts scripts that wil add to the
1082# above variables
1083if GetOption('verbose'):
1084    print "Reading SConsopts"
1085for bdir in [ base_dir ] + extras_dir_list:
1086    if not isdir(bdir):
1087        print "Error: directory '%s' does not exist" % bdir
1088        Exit(1)
1089    for root, dirs, files in os.walk(bdir):
1090        if 'SConsopts' in files:
1091            if GetOption('verbose'):
1092                print "Reading", joinpath(root, 'SConsopts')
1093            SConscript(joinpath(root, 'SConsopts'))
1094
1095all_isa_list.sort()
1096
1097sticky_vars.AddVariables(
1098    EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list),
1099    ListVariable('CPU_MODELS', 'CPU models',
1100                 sorted(n for n,m in CpuModel.dict.iteritems() if m.default),
1101                 sorted(CpuModel.dict.keys())),
1102    BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger',
1103                 False),
1104    BoolVariable('SS_COMPATIBLE_FP',
1105                 'Make floating-point results compatible with SimpleScalar',
1106                 False),
1107    BoolVariable('USE_SSE2',
1108                 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts',
1109                 False),
1110    BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock),
1111    BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv),
1112    BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability', False),
1113    BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models', have_kvm),
1114    EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None',
1115                  all_protocols),
1116    )
1117
1118# These variables get exported to #defines in config/*.hh (see src/SConscript).
1119export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'CP_ANNOTATE',
1120                'USE_POSIX_CLOCK', 'PROTOCOL', 'HAVE_PROTOBUF',
1121                'HAVE_PERF_ATTR_EXCLUDE_HOST']
1122
1123###################################################
1124#
1125# Define a SCons builder for configuration flag headers.
1126#
1127###################################################
1128
1129# This function generates a config header file that #defines the
1130# variable symbol to the current variable setting (0 or 1).  The source
1131# operands are the name of the variable and a Value node containing the
1132# value of the variable.
1133def build_config_file(target, source, env):
1134    (variable, value) = [s.get_contents() for s in source]
1135    f = file(str(target[0]), 'w')
1136    print >> f, '#define', variable, value
1137    f.close()
1138    return None
1139
1140# Combine the two functions into a scons Action object.
1141config_action = MakeAction(build_config_file, Transform("CONFIG H", 2))
1142
1143# The emitter munges the source & target node lists to reflect what
1144# we're really doing.
1145def config_emitter(target, source, env):
1146    # extract variable name from Builder arg
1147    variable = str(target[0])
1148    # True target is config header file
1149    target = joinpath('config', variable.lower() + '.hh')
1150    val = env[variable]
1151    if isinstance(val, bool):
1152        # Force value to 0/1
1153        val = int(val)
1154    elif isinstance(val, str):
1155        val = '"' + val + '"'
1156
1157    # Sources are variable name & value (packaged in SCons Value nodes)
1158    return ([target], [Value(variable), Value(val)])
1159
1160config_builder = Builder(emitter = config_emitter, action = config_action)
1161
1162main.Append(BUILDERS = { 'ConfigFile' : config_builder })
1163
1164# libelf build is shared across all configs in the build root.
1165main.SConscript('ext/libelf/SConscript',
1166                variant_dir = joinpath(build_root, 'libelf'))
1167
1168# gzstream build is shared across all configs in the build root.
1169main.SConscript('ext/gzstream/SConscript',
1170                variant_dir = joinpath(build_root, 'gzstream'))
1171
1172# libfdt build is shared across all configs in the build root.
1173main.SConscript('ext/libfdt/SConscript',
1174                variant_dir = joinpath(build_root, 'libfdt'))
1175
1176# fputils build is shared across all configs in the build root.
1177main.SConscript('ext/fputils/SConscript',
1178                variant_dir = joinpath(build_root, 'fputils'))
1179
1180# DRAMSim2 build is shared across all configs in the build root.
1181main.SConscript('ext/dramsim2/SConscript',
1182                variant_dir = joinpath(build_root, 'dramsim2'))
1183
1184# DRAMPower build is shared across all configs in the build root.
1185main.SConscript('ext/drampower/SConscript',
1186                variant_dir = joinpath(build_root, 'drampower'))
1187
1188###################################################
1189#
1190# This function is used to set up a directory with switching headers
1191#
1192###################################################
1193
1194main['ALL_ISA_LIST'] = all_isa_list
1195all_isa_deps = {}
1196def make_switching_dir(dname, switch_headers, env):
1197    # Generate the header.  target[0] is the full path of the output
1198    # header to generate.  'source' is a dummy variable, since we get the
1199    # list of ISAs from env['ALL_ISA_LIST'].
1200    def gen_switch_hdr(target, source, env):
1201        fname = str(target[0])
1202        isa = env['TARGET_ISA'].lower()
1203        try:
1204            f = open(fname, 'w')
1205            print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname))
1206            f.close()
1207        except IOError:
1208            print "Failed to create %s" % fname
1209            raise
1210
1211    # Build SCons Action object. 'varlist' specifies env vars that this
1212    # action depends on; when env['ALL_ISA_LIST'] changes these actions
1213    # should get re-executed.
1214    switch_hdr_action = MakeAction(gen_switch_hdr,
1215                          Transform("GENERATE"), varlist=['ALL_ISA_LIST'])
1216
1217    # Instantiate actions for each header
1218    for hdr in switch_headers:
1219        env.Command(hdr, [], switch_hdr_action)
1220
1221    isa_target = Dir('.').up().name.lower().replace('_', '-')
1222    env['PHONY_BASE'] = '#'+isa_target
1223    all_isa_deps[isa_target] = None
1224
1225Export('make_switching_dir')
1226
1227# all-isas -> all-deps -> all-environs -> all_targets
1228main.Alias('#all-isas', [])
1229main.Alias('#all-deps', '#all-isas')
1230
1231# Dummy target to ensure all environments are created before telling
1232# SCons what to actually make (the command line arguments).  We attach
1233# them to the dependence graph after the environments are complete.
1234ORIG_BUILD_TARGETS = list(BUILD_TARGETS) # force a copy; gets closure to work.
1235def environsComplete(target, source, env):
1236    for t in ORIG_BUILD_TARGETS:
1237        main.Depends('#all-targets', t)
1238
1239# Each build/* switching_dir attaches its *-environs target to #all-environs.
1240main.Append(BUILDERS = {'CompleteEnvirons' :
1241                        Builder(action=MakeAction(environsComplete, None))})
1242main.CompleteEnvirons('#all-environs', [])
1243
1244def doNothing(**ignored): pass
1245main.Append(BUILDERS = {'Dummy': Builder(action=MakeAction(doNothing, None))})
1246
1247# The final target to which all the original targets ultimately get attached.
1248main.Dummy('#all-targets', '#all-environs')
1249BUILD_TARGETS[:] = ['#all-targets']
1250
1251###################################################
1252#
1253# Define build environments for selected configurations.
1254#
1255###################################################
1256
1257for variant_path in variant_paths:
1258    if not GetOption('silent'):
1259        print "Building in", variant_path
1260
1261    # Make a copy of the build-root environment to use for this config.
1262    env = main.Clone()
1263    env['BUILDDIR'] = variant_path
1264
1265    # variant_dir is the tail component of build path, and is used to
1266    # determine the build parameters (e.g., 'ALPHA_SE')
1267    (build_root, variant_dir) = splitpath(variant_path)
1268
1269    # Set env variables according to the build directory config.
1270    sticky_vars.files = []
1271    # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in
1272    # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke
1273    # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings.
1274    current_vars_file = joinpath(build_root, 'variables', variant_dir)
1275    if isfile(current_vars_file):
1276        sticky_vars.files.append(current_vars_file)
1277        if not GetOption('silent'):
1278            print "Using saved variables file %s" % current_vars_file
1279    else:
1280        # Build dir-specific variables file doesn't exist.
1281
1282        # Make sure the directory is there so we can create it later
1283        opt_dir = dirname(current_vars_file)
1284        if not isdir(opt_dir):
1285            mkdir(opt_dir)
1286
1287        # Get default build variables from source tree.  Variables are
1288        # normally determined by name of $VARIANT_DIR, but can be
1289        # overridden by '--default=' arg on command line.
1290        default = GetOption('default')
1291        opts_dir = joinpath(main.root.abspath, 'build_opts')
1292        if default:
1293            default_vars_files = [joinpath(build_root, 'variables', default),
1294                                  joinpath(opts_dir, default)]
1295        else:
1296            default_vars_files = [joinpath(opts_dir, variant_dir)]
1297        existing_files = filter(isfile, default_vars_files)
1298        if existing_files:
1299            default_vars_file = existing_files[0]
1300            sticky_vars.files.append(default_vars_file)
1301            print "Variables file %s not found,\n  using defaults in %s" \
1302                  % (current_vars_file, default_vars_file)
1303        else:
1304            print "Error: cannot find variables file %s or " \
1305                  "default file(s) %s" \
1306                  % (current_vars_file, ' or '.join(default_vars_files))
1307            Exit(1)
1308
1309    # Apply current variable settings to env
1310    sticky_vars.Update(env)
1311
1312    help_texts["local_vars"] += \
1313        "Build variables for %s:\n" % variant_dir \
1314                 + sticky_vars.GenerateHelpText(env)
1315
1316    # Process variable settings.
1317
1318    if not have_fenv and env['USE_FENV']:
1319        print "Warning: <fenv.h> not available; " \
1320              "forcing USE_FENV to False in", variant_dir + "."
1321        env['USE_FENV'] = False
1322
1323    if not env['USE_FENV']:
1324        print "Warning: No IEEE FP rounding mode control in", variant_dir + "."
1325        print "         FP results may deviate slightly from other platforms."
1326
1327    if env['EFENCE']:
1328        env.Append(LIBS=['efence'])
1329
1330    if env['USE_KVM']:
1331        if not have_kvm:
1332            print "Warning: Can not enable KVM, host seems to lack KVM support"
1333            env['USE_KVM'] = False
1334        elif not have_posix_timers:
1335            print "Warning: Can not enable KVM, host seems to lack support " \
1336                "for POSIX timers"
1337            env['USE_KVM'] = False
1338        elif not is_isa_kvm_compatible(env['TARGET_ISA']):
1339            print "Info: KVM support disabled due to unsupported host and " \
1340                "target ISA combination"
1341            env['USE_KVM'] = False
1342
1343    # Warn about missing optional functionality
1344    if env['USE_KVM']:
1345        if not main['HAVE_PERF_ATTR_EXCLUDE_HOST']:
1346            print "Warning: perf_event headers lack support for the " \
1347                "exclude_host attribute. KVM instruction counts will " \
1348                "be inaccurate."
1349
1350    # Save sticky variable settings back to current variables file
1351    sticky_vars.Save(current_vars_file, env)
1352
1353    if env['USE_SSE2']:
1354        env.Append(CCFLAGS=['-msse2'])
1355
1356    # The src/SConscript file sets up the build rules in 'env' according
1357    # to the configured variables.  It returns a list of environments,
1358    # one for each variant build (debug, opt, etc.)
1359    SConscript('src/SConscript', variant_dir = variant_path, exports = 'env')
1360
1361def pairwise(iterable):
1362    "s -> (s0,s1), (s1,s2), (s2, s3), ..."
1363    a, b = itertools.tee(iterable)
1364    b.next()
1365    return itertools.izip(a, b)
1366
1367# Create false dependencies so SCons will parse ISAs, establish
1368# dependencies, and setup the build Environments serially. Either
1369# SCons (likely) and/or our SConscripts (possibly) cannot cope with -j
1370# greater than 1. It appears to be standard race condition stuff; it
1371# doesn't always fail, but usually, and the behaviors are different.
1372# Every time I tried to remove this, builds would fail in some
1373# creative new way. So, don't do that. You'll want to, though, because
1374# tests/SConscript takes a long time to make its Environments.
1375for t1, t2 in pairwise(sorted(all_isa_deps.iterkeys())):
1376    main.Depends('#%s-deps'     % t2, '#%s-deps'     % t1)
1377    main.Depends('#%s-environs' % t2, '#%s-environs' % t1)
1378
1379# base help text
1380Help('''
1381Usage: scons [scons options] [build variables] [target(s)]
1382
1383Extra scons options:
1384%(options)s
1385
1386Global build variables:
1387%(global_vars)s
1388
1389%(local_vars)s
1390''' % help_texts)
1391