SConstruct revision 11811
1# -*- mode:python -*-
2
3# Copyright (c) 2013, 2015, 2016 ARM Limited
4# All rights reserved.
5#
6# The license below extends only to copyright in the software and shall
7# not be construed as granting a license to any other intellectual
8# property including but not limited to intellectual property relating
9# to a hardware implementation of the functionality of the software
10# licensed hereunder.  You may use the software subject to the license
11# terms below provided that you ensure that this notice is replicated
12# unmodified and in its entirety in all distributions of the software,
13# modified or unmodified, in source code or in binary form.
14#
15# Copyright (c) 2011 Advanced Micro Devices, Inc.
16# Copyright (c) 2009 The Hewlett-Packard Development Company
17# Copyright (c) 2004-2005 The Regents of The University of Michigan
18# All rights reserved.
19#
20# Redistribution and use in source and binary forms, with or without
21# modification, are permitted provided that the following conditions are
22# met: redistributions of source code must retain the above copyright
23# notice, this list of conditions and the following disclaimer;
24# redistributions in binary form must reproduce the above copyright
25# notice, this list of conditions and the following disclaimer in the
26# documentation and/or other materials provided with the distribution;
27# neither the name of the copyright holders nor the names of its
28# contributors may be used to endorse or promote products derived from
29# this software without specific prior written permission.
30#
31# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
32# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
33# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
34# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
35# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
37# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
38# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
39# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
40# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
41# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
42#
43# Authors: Steve Reinhardt
44#          Nathan Binkert
45
46###################################################
47#
48# SCons top-level build description (SConstruct) file.
49#
50# While in this directory ('gem5'), just type 'scons' to build the default
51# configuration (see below), or type 'scons build/<CONFIG>/<binary>'
52# to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for
53# the optimized full-system version).
54#
55# You can build gem5 in a different directory as long as there is a
56# 'build/<CONFIG>' somewhere along the target path.  The build system
57# expects that all configs under the same build directory are being
58# built for the same host system.
59#
60# Examples:
61#
62#   The following two commands are equivalent.  The '-u' option tells
63#   scons to search up the directory tree for this SConstruct file.
64#   % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug
65#   % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug
66#
67#   The following two commands are equivalent and demonstrate building
68#   in a directory outside of the source tree.  The '-C' option tells
69#   scons to chdir to the specified directory to find this SConstruct
70#   file.
71#   % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug
72#   % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug
73#
74# You can use 'scons -H' to print scons options.  If you're in this
75# 'gem5' directory (or use -u or -C to tell scons where to find this
76# file), you can use 'scons -h' to print all the gem5-specific build
77# options as well.
78#
79###################################################
80
81# Check for recent-enough Python and SCons versions.
82try:
83    # Really old versions of scons only take two options for the
84    # function, so check once without the revision and once with the
85    # revision, the first instance will fail for stuff other than
86    # 0.98, and the second will fail for 0.98.0
87    EnsureSConsVersion(0, 98)
88    EnsureSConsVersion(0, 98, 1)
89except SystemExit, e:
90    print """
91For more details, see:
92    http://gem5.org/Dependencies
93"""
94    raise
95
96# We ensure the python version early because because python-config
97# requires python 2.5
98try:
99    EnsurePythonVersion(2, 5)
100except SystemExit, e:
101    print """
102You can use a non-default installation of the Python interpreter by
103rearranging your PATH so that scons finds the non-default 'python' and
104'python-config' first.
105
106For more details, see:
107    http://gem5.org/wiki/index.php/Using_a_non-default_Python_installation
108"""
109    raise
110
111# Global Python includes
112import itertools
113import os
114import re
115import shutil
116import subprocess
117import sys
118
119from os import mkdir, environ
120from os.path import abspath, basename, dirname, expanduser, normpath
121from os.path import exists,  isdir, isfile
122from os.path import join as joinpath, split as splitpath
123
124# SCons includes
125import SCons
126import SCons.Node
127
128extra_python_paths = [
129    Dir('src/python').srcnode().abspath, # gem5 includes
130    Dir('ext/ply').srcnode().abspath, # ply is used by several files
131    ]
132
133sys.path[1:1] = extra_python_paths
134
135from m5.util import compareVersions, readCommand
136from m5.util.terminal import get_termcap
137
138help_texts = {
139    "options" : "",
140    "global_vars" : "",
141    "local_vars" : ""
142}
143
144Export("help_texts")
145
146
147# There's a bug in scons in that (1) by default, the help texts from
148# AddOption() are supposed to be displayed when you type 'scons -h'
149# and (2) you can override the help displayed by 'scons -h' using the
150# Help() function, but these two features are incompatible: once
151# you've overridden the help text using Help(), there's no way to get
152# at the help texts from AddOptions.  See:
153#     http://scons.tigris.org/issues/show_bug.cgi?id=2356
154#     http://scons.tigris.org/issues/show_bug.cgi?id=2611
155# This hack lets us extract the help text from AddOptions and
156# re-inject it via Help().  Ideally someday this bug will be fixed and
157# we can just use AddOption directly.
158def AddLocalOption(*args, **kwargs):
159    col_width = 30
160
161    help = "  " + ", ".join(args)
162    if "help" in kwargs:
163        length = len(help)
164        if length >= col_width:
165            help += "\n" + " " * col_width
166        else:
167            help += " " * (col_width - length)
168        help += kwargs["help"]
169    help_texts["options"] += help + "\n"
170
171    AddOption(*args, **kwargs)
172
173AddLocalOption('--colors', dest='use_colors', action='store_true',
174               help="Add color to abbreviated scons output")
175AddLocalOption('--no-colors', dest='use_colors', action='store_false',
176               help="Don't add color to abbreviated scons output")
177AddLocalOption('--with-cxx-config', dest='with_cxx_config',
178               action='store_true',
179               help="Build with support for C++-based configuration")
180AddLocalOption('--default', dest='default', type='string', action='store',
181               help='Override which build_opts file to use for defaults')
182AddLocalOption('--ignore-style', dest='ignore_style', action='store_true',
183               help='Disable style checking hooks')
184AddLocalOption('--no-lto', dest='no_lto', action='store_true',
185               help='Disable Link-Time Optimization for fast')
186AddLocalOption('--update-ref', dest='update_ref', action='store_true',
187               help='Update test reference outputs')
188AddLocalOption('--verbose', dest='verbose', action='store_true',
189               help='Print full tool command lines')
190AddLocalOption('--without-python', dest='without_python',
191               action='store_true',
192               help='Build without Python configuration support')
193AddLocalOption('--without-tcmalloc', dest='without_tcmalloc',
194               action='store_true',
195               help='Disable linking against tcmalloc')
196AddLocalOption('--with-ubsan', dest='with_ubsan', action='store_true',
197               help='Build with Undefined Behavior Sanitizer if available')
198AddLocalOption('--with-asan', dest='with_asan', action='store_true',
199               help='Build with Address Sanitizer if available')
200
201termcap = get_termcap(GetOption('use_colors'))
202
203########################################################################
204#
205# Set up the main build environment.
206#
207########################################################################
208
209# export TERM so that clang reports errors in color
210use_vars = set([ 'AS', 'AR', 'CC', 'CXX', 'HOME', 'LD_LIBRARY_PATH',
211                 'LIBRARY_PATH', 'PATH', 'PKG_CONFIG_PATH', 'PROTOC',
212                 'PYTHONPATH', 'RANLIB', 'SWIG', 'TERM' ])
213
214use_prefixes = [
215    "ASAN_",           # address sanitizer symbolizer path and settings
216    "CCACHE_",         # ccache (caching compiler wrapper) configuration
217    "CCC_",            # clang static analyzer configuration
218    "DISTCC_",         # distcc (distributed compiler wrapper) configuration
219    "INCLUDE_SERVER_", # distcc pump server settings
220    "M5",              # M5 configuration (e.g., path to kernels)
221    ]
222
223use_env = {}
224for key,val in sorted(os.environ.iteritems()):
225    if key in use_vars or \
226            any([key.startswith(prefix) for prefix in use_prefixes]):
227        use_env[key] = val
228
229# Tell scons to avoid implicit command dependencies to avoid issues
230# with the param wrappes being compiled twice (see
231# http://scons.tigris.org/issues/show_bug.cgi?id=2811)
232main = Environment(ENV=use_env, IMPLICIT_COMMAND_DEPENDENCIES=0)
233main.Decider('MD5-timestamp')
234main.root = Dir(".")         # The current directory (where this file lives).
235main.srcdir = Dir("src")     # The source directory
236
237main_dict_keys = main.Dictionary().keys()
238
239# Check that we have a C/C++ compiler
240if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys):
241    print "No C++ compiler installed (package g++ on Ubuntu and RedHat)"
242    Exit(1)
243
244# Check that swig is present
245if not 'SWIG' in main_dict_keys:
246    print "swig is not installed (package swig on Ubuntu and RedHat)"
247    Exit(1)
248
249# add useful python code PYTHONPATH so it can be used by subprocesses
250# as well
251main.AppendENVPath('PYTHONPATH', extra_python_paths)
252
253########################################################################
254#
255# Mercurial Stuff.
256#
257# If the gem5 directory is a mercurial repository, we should do some
258# extra things.
259#
260########################################################################
261
262hgdir = main.root.Dir(".hg")
263
264
265style_message = """
266You're missing the gem5 style hook, which automatically checks your code
267against the gem5 style rules on %s.
268This script will now install the hook in your %s.
269Press enter to continue, or ctrl-c to abort: """
270
271mercurial_style_message = style_message % ("hg commit and qrefresh commands",
272                                           ".hg/hgrc file")
273git_style_message = style_message % ("'git commit'",
274                                     ".git/hooks/ directory")
275
276mercurial_style_upgrade_message = """
277Your Mercurial style hooks are not up-to-date. This script will now
278try to automatically update them. A backup of your hgrc will be saved
279in .hg/hgrc.old.
280Press enter to continue, or ctrl-c to abort: """
281
282mercurial_style_hook = """
283# The following lines were automatically added by gem5/SConstruct
284# to provide the gem5 style-checking hooks
285[extensions]
286hgstyle = %s/util/hgstyle.py
287
288[hooks]
289pretxncommit.style = python:hgstyle.check_style
290pre-qrefresh.style = python:hgstyle.check_style
291# End of SConstruct additions
292
293""" % (main.root.abspath)
294
295mercurial_lib_not_found = """
296Mercurial libraries cannot be found, ignoring style hook.  If
297you are a gem5 developer, please fix this and run the style
298hook. It is important.
299"""
300
301# Check for style hook and prompt for installation if it's not there.
302# Skip this if --ignore-style was specified, there's no interactive
303# terminal to prompt, or no recognized revision control system can be
304# found.
305ignore_style = GetOption('ignore_style') or not sys.stdin.isatty()
306
307# Try wire up Mercurial to the style hooks
308if not ignore_style and hgdir.exists():
309    style_hook = True
310    style_hooks = tuple()
311    hgrc = hgdir.File('hgrc')
312    hgrc_old = hgdir.File('hgrc.old')
313    try:
314        from mercurial import ui
315        ui = ui.ui()
316        ui.readconfig(hgrc.abspath)
317        style_hooks = (ui.config('hooks', 'pretxncommit.style', None),
318                       ui.config('hooks', 'pre-qrefresh.style', None))
319        style_hook = all(style_hooks)
320        style_extension = ui.config('extensions', 'style', None)
321    except ImportError:
322        print mercurial_lib_not_found
323
324    if "python:style.check_style" in style_hooks:
325        # Try to upgrade the style hooks
326        print mercurial_style_upgrade_message
327        # continue unless user does ctrl-c/ctrl-d etc.
328        try:
329            raw_input()
330        except:
331            print "Input exception, exiting scons.\n"
332            sys.exit(1)
333        shutil.copyfile(hgrc.abspath, hgrc_old.abspath)
334        re_style_hook = re.compile(r"^([^=#]+)\.style\s*=\s*([^#\s]+).*")
335        re_style_extension = re.compile("style\s*=\s*([^#\s]+).*")
336        old, new = open(hgrc_old.abspath, 'r'), open(hgrc.abspath, 'w')
337        for l in old:
338            m_hook = re_style_hook.match(l)
339            m_ext = re_style_extension.match(l)
340            if m_hook:
341                hook, check = m_hook.groups()
342                if check != "python:style.check_style":
343                    print "Warning: %s.style is using a non-default " \
344                        "checker: %s" % (hook, check)
345                if hook not in ("pretxncommit", "pre-qrefresh"):
346                    print "Warning: Updating unknown style hook: %s" % hook
347
348                l = "%s.style = python:hgstyle.check_style\n" % hook
349            elif m_ext and m_ext.group(1) == style_extension:
350                l = "hgstyle = %s/util/hgstyle.py\n" % main.root.abspath
351
352            new.write(l)
353    elif not style_hook:
354        print mercurial_style_message,
355        # continue unless user does ctrl-c/ctrl-d etc.
356        try:
357            raw_input()
358        except:
359            print "Input exception, exiting scons.\n"
360            sys.exit(1)
361        hgrc_path = '%s/.hg/hgrc' % main.root.abspath
362        print "Adding style hook to", hgrc_path, "\n"
363        try:
364            with open(hgrc_path, 'a') as f:
365                f.write(mercurial_style_hook)
366        except:
367            print "Error updating", hgrc_path
368            sys.exit(1)
369
370def install_git_style_hooks():
371    try:
372        gitdir = Dir(readCommand(
373            ["git", "rev-parse", "--git-dir"]).strip("\n"))
374    except Exception, e:
375        print "Warning: Failed to find git repo directory: %s" % e
376        return
377
378    git_hooks = gitdir.Dir("hooks")
379    git_pre_commit_hook = git_hooks.File("pre-commit")
380    git_style_script = File("util/git-pre-commit.py")
381
382    if git_pre_commit_hook.exists():
383        return
384
385    print git_style_message,
386    try:
387        raw_input()
388    except:
389        print "Input exception, exiting scons.\n"
390        sys.exit(1)
391
392    if not git_hooks.exists():
393        mkdir(git_hooks.get_abspath())
394
395    # Use a relative symlink if the hooks live in the source directory
396    if git_pre_commit_hook.is_under(main.root):
397        script_path = os.path.relpath(
398            git_style_script.get_abspath(),
399            git_pre_commit_hook.Dir(".").get_abspath())
400    else:
401        script_path = git_style_script.get_abspath()
402
403    try:
404        os.symlink(script_path, git_pre_commit_hook.get_abspath())
405    except:
406        print "Error updating git pre-commit hook"
407        raise
408
409# Try to wire up git to the style hooks
410if not ignore_style and main.root.Entry(".git").exists():
411    install_git_style_hooks()
412
413###################################################
414#
415# Figure out which configurations to set up based on the path(s) of
416# the target(s).
417#
418###################################################
419
420# Find default configuration & binary.
421Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug'))
422
423# helper function: find last occurrence of element in list
424def rfind(l, elt, offs = -1):
425    for i in range(len(l)+offs, 0, -1):
426        if l[i] == elt:
427            return i
428    raise ValueError, "element not found"
429
430# Take a list of paths (or SCons Nodes) and return a list with all
431# paths made absolute and ~-expanded.  Paths will be interpreted
432# relative to the launch directory unless a different root is provided
433def makePathListAbsolute(path_list, root=GetLaunchDir()):
434    return [abspath(joinpath(root, expanduser(str(p))))
435            for p in path_list]
436
437# Each target must have 'build' in the interior of the path; the
438# directory below this will determine the build parameters.  For
439# example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we
440# recognize that ALPHA_SE specifies the configuration because it
441# follow 'build' in the build path.
442
443# The funky assignment to "[:]" is needed to replace the list contents
444# in place rather than reassign the symbol to a new list, which
445# doesn't work (obviously!).
446BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS)
447
448# Generate a list of the unique build roots and configs that the
449# collected targets reference.
450variant_paths = []
451build_root = None
452for t in BUILD_TARGETS:
453    path_dirs = t.split('/')
454    try:
455        build_top = rfind(path_dirs, 'build', -2)
456    except:
457        print "Error: no non-leaf 'build' dir found on target path", t
458        Exit(1)
459    this_build_root = joinpath('/',*path_dirs[:build_top+1])
460    if not build_root:
461        build_root = this_build_root
462    else:
463        if this_build_root != build_root:
464            print "Error: build targets not under same build root\n"\
465                  "  %s\n  %s" % (build_root, this_build_root)
466            Exit(1)
467    variant_path = joinpath('/',*path_dirs[:build_top+2])
468    if variant_path not in variant_paths:
469        variant_paths.append(variant_path)
470
471# Make sure build_root exists (might not if this is the first build there)
472if not isdir(build_root):
473    mkdir(build_root)
474main['BUILDROOT'] = build_root
475
476Export('main')
477
478main.SConsignFile(joinpath(build_root, "sconsign"))
479
480# Default duplicate option is to use hard links, but this messes up
481# when you use emacs to edit a file in the target dir, as emacs moves
482# file to file~ then copies to file, breaking the link.  Symbolic
483# (soft) links work better.
484main.SetOption('duplicate', 'soft-copy')
485
486#
487# Set up global sticky variables... these are common to an entire build
488# tree (not specific to a particular build like ALPHA_SE)
489#
490
491global_vars_file = joinpath(build_root, 'variables.global')
492
493global_vars = Variables(global_vars_file, args=ARGUMENTS)
494
495global_vars.AddVariables(
496    ('CC', 'C compiler', environ.get('CC', main['CC'])),
497    ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])),
498    ('SWIG', 'SWIG tool', environ.get('SWIG', main['SWIG'])),
499    ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')),
500    ('BATCH', 'Use batch pool for build and tests', False),
501    ('BATCH_CMD', 'Batch pool submission command name', 'qdo'),
502    ('M5_BUILD_CACHE', 'Cache built objects in this directory', False),
503    ('EXTRAS', 'Add extra directories to the compilation', '')
504    )
505
506# Update main environment with values from ARGUMENTS & global_vars_file
507global_vars.Update(main)
508help_texts["global_vars"] += global_vars.GenerateHelpText(main)
509
510# Save sticky variable settings back to current variables file
511global_vars.Save(global_vars_file, main)
512
513# Parse EXTRAS variable to build list of all directories where we're
514# look for sources etc.  This list is exported as extras_dir_list.
515base_dir = main.srcdir.abspath
516if main['EXTRAS']:
517    extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':'))
518else:
519    extras_dir_list = []
520
521Export('base_dir')
522Export('extras_dir_list')
523
524# the ext directory should be on the #includes path
525main.Append(CPPPATH=[Dir('ext')])
526
527def strip_build_path(path, env):
528    path = str(path)
529    variant_base = env['BUILDROOT'] + os.path.sep
530    if path.startswith(variant_base):
531        path = path[len(variant_base):]
532    elif path.startswith('build/'):
533        path = path[6:]
534    return path
535
536# Generate a string of the form:
537#   common/path/prefix/src1, src2 -> tgt1, tgt2
538# to print while building.
539class Transform(object):
540    # all specific color settings should be here and nowhere else
541    tool_color = termcap.Normal
542    pfx_color = termcap.Yellow
543    srcs_color = termcap.Yellow + termcap.Bold
544    arrow_color = termcap.Blue + termcap.Bold
545    tgts_color = termcap.Yellow + termcap.Bold
546
547    def __init__(self, tool, max_sources=99):
548        self.format = self.tool_color + (" [%8s] " % tool) \
549                      + self.pfx_color + "%s" \
550                      + self.srcs_color + "%s" \
551                      + self.arrow_color + " -> " \
552                      + self.tgts_color + "%s" \
553                      + termcap.Normal
554        self.max_sources = max_sources
555
556    def __call__(self, target, source, env, for_signature=None):
557        # truncate source list according to max_sources param
558        source = source[0:self.max_sources]
559        def strip(f):
560            return strip_build_path(str(f), env)
561        if len(source) > 0:
562            srcs = map(strip, source)
563        else:
564            srcs = ['']
565        tgts = map(strip, target)
566        # surprisingly, os.path.commonprefix is a dumb char-by-char string
567        # operation that has nothing to do with paths.
568        com_pfx = os.path.commonprefix(srcs + tgts)
569        com_pfx_len = len(com_pfx)
570        if com_pfx:
571            # do some cleanup and sanity checking on common prefix
572            if com_pfx[-1] == ".":
573                # prefix matches all but file extension: ok
574                # back up one to change 'foo.cc -> o' to 'foo.cc -> .o'
575                com_pfx = com_pfx[0:-1]
576            elif com_pfx[-1] == "/":
577                # common prefix is directory path: OK
578                pass
579            else:
580                src0_len = len(srcs[0])
581                tgt0_len = len(tgts[0])
582                if src0_len == com_pfx_len:
583                    # source is a substring of target, OK
584                    pass
585                elif tgt0_len == com_pfx_len:
586                    # target is a substring of source, need to back up to
587                    # avoid empty string on RHS of arrow
588                    sep_idx = com_pfx.rfind(".")
589                    if sep_idx != -1:
590                        com_pfx = com_pfx[0:sep_idx]
591                    else:
592                        com_pfx = ''
593                elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".":
594                    # still splitting at file extension: ok
595                    pass
596                else:
597                    # probably a fluke; ignore it
598                    com_pfx = ''
599        # recalculate length in case com_pfx was modified
600        com_pfx_len = len(com_pfx)
601        def fmt(files):
602            f = map(lambda s: s[com_pfx_len:], files)
603            return ', '.join(f)
604        return self.format % (com_pfx, fmt(srcs), fmt(tgts))
605
606Export('Transform')
607
608# enable the regression script to use the termcap
609main['TERMCAP'] = termcap
610
611if GetOption('verbose'):
612    def MakeAction(action, string, *args, **kwargs):
613        return Action(action, *args, **kwargs)
614else:
615    MakeAction = Action
616    main['CCCOMSTR']        = Transform("CC")
617    main['CXXCOMSTR']       = Transform("CXX")
618    main['ASCOMSTR']        = Transform("AS")
619    main['SWIGCOMSTR']      = Transform("SWIG")
620    main['ARCOMSTR']        = Transform("AR", 0)
621    main['LINKCOMSTR']      = Transform("LINK", 0)
622    main['RANLIBCOMSTR']    = Transform("RANLIB", 0)
623    main['M4COMSTR']        = Transform("M4")
624    main['SHCCCOMSTR']      = Transform("SHCC")
625    main['SHCXXCOMSTR']     = Transform("SHCXX")
626Export('MakeAction')
627
628# Initialize the Link-Time Optimization (LTO) flags
629main['LTO_CCFLAGS'] = []
630main['LTO_LDFLAGS'] = []
631
632# According to the readme, tcmalloc works best if the compiler doesn't
633# assume that we're using the builtin malloc and friends. These flags
634# are compiler-specific, so we need to set them after we detect which
635# compiler we're using.
636main['TCMALLOC_CCFLAGS'] = []
637
638CXX_version = readCommand([main['CXX'],'--version'], exception=False)
639CXX_V = readCommand([main['CXX'],'-V'], exception=False)
640
641main['GCC'] = CXX_version and CXX_version.find('g++') >= 0
642main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0
643if main['GCC'] + main['CLANG'] > 1:
644    print 'Error: How can we have two at the same time?'
645    Exit(1)
646
647# Set up default C++ compiler flags
648if main['GCC'] or main['CLANG']:
649    # As gcc and clang share many flags, do the common parts here
650    main.Append(CCFLAGS=['-pipe'])
651    main.Append(CCFLAGS=['-fno-strict-aliasing'])
652    # Enable -Wall and -Wextra and then disable the few warnings that
653    # we consistently violate
654    main.Append(CCFLAGS=['-Wall', '-Wundef', '-Wextra',
655                         '-Wno-sign-compare', '-Wno-unused-parameter'])
656    # We always compile using C++11
657    main.Append(CXXFLAGS=['-std=c++11'])
658    if sys.platform.startswith('freebsd'):
659        main.Append(CCFLAGS=['-I/usr/local/include'])
660        main.Append(CXXFLAGS=['-I/usr/local/include'])
661else:
662    print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
663    print "Don't know what compiler options to use for your compiler."
664    print termcap.Yellow + '       compiler:' + termcap.Normal, main['CXX']
665    print termcap.Yellow + '       version:' + termcap.Normal,
666    if not CXX_version:
667        print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
668               termcap.Normal
669    else:
670        print CXX_version.replace('\n', '<nl>')
671    print "       If you're trying to use a compiler other than GCC"
672    print "       or clang, there appears to be something wrong with your"
673    print "       environment."
674    print "       "
675    print "       If you are trying to use a compiler other than those listed"
676    print "       above you will need to ease fix SConstruct and "
677    print "       src/SConscript to support that compiler."
678    Exit(1)
679
680if main['GCC']:
681    # Check for a supported version of gcc. >= 4.8 is chosen for its
682    # level of c++11 support. See
683    # http://gcc.gnu.org/projects/cxx0x.html for details.
684    gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False)
685    if compareVersions(gcc_version, "4.8") < 0:
686        print 'Error: gcc version 4.8 or newer required.'
687        print '       Installed version:', gcc_version
688        Exit(1)
689
690    main['GCC_VERSION'] = gcc_version
691
692    # gcc from version 4.8 and above generates "rep; ret" instructions
693    # to avoid performance penalties on certain AMD chips. Older
694    # assemblers detect this as an error, "Error: expecting string
695    # instruction after `rep'"
696    as_version_raw = readCommand([main['AS'], '-v', '/dev/null'],
697                                 exception=False).split()
698
699    # version strings may contain extra distro-specific
700    # qualifiers, so play it safe and keep only what comes before
701    # the first hyphen
702    as_version = as_version_raw[-1].split('-')[0] if as_version_raw else None
703
704    if not as_version or compareVersions(as_version, "2.23") < 0:
705        print termcap.Yellow + termcap.Bold + \
706            'Warning: This combination of gcc and binutils have' + \
707            ' known incompatibilities.\n' + \
708            '         If you encounter build problems, please update ' + \
709            'binutils to 2.23.' + \
710            termcap.Normal
711
712    # Make sure we warn if the user has requested to compile with the
713    # Undefined Benahvior Sanitizer and this version of gcc does not
714    # support it.
715    if GetOption('with_ubsan') and \
716            compareVersions(gcc_version, '4.9') < 0:
717        print termcap.Yellow + termcap.Bold + \
718            'Warning: UBSan is only supported using gcc 4.9 and later.' + \
719            termcap.Normal
720
721    # Add the appropriate Link-Time Optimization (LTO) flags
722    # unless LTO is explicitly turned off. Note that these flags
723    # are only used by the fast target.
724    if not GetOption('no_lto'):
725        # Pass the LTO flag when compiling to produce GIMPLE
726        # output, we merely create the flags here and only append
727        # them later
728        main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
729
730        # Use the same amount of jobs for LTO as we are running
731        # scons with
732        main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
733
734    main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc',
735                                  '-fno-builtin-realloc', '-fno-builtin-free'])
736
737    # add option to check for undeclared overrides
738    if compareVersions(gcc_version, "5.0") > 0:
739        main.Append(CCFLAGS=['-Wno-error=suggest-override'])
740
741elif main['CLANG']:
742    # Check for a supported version of clang, >= 3.1 is needed to
743    # support similar features as gcc 4.8. See
744    # http://clang.llvm.org/cxx_status.html for details
745    clang_version_re = re.compile(".* version (\d+\.\d+)")
746    clang_version_match = clang_version_re.search(CXX_version)
747    if (clang_version_match):
748        clang_version = clang_version_match.groups()[0]
749        if compareVersions(clang_version, "3.1") < 0:
750            print 'Error: clang version 3.1 or newer required.'
751            print '       Installed version:', clang_version
752            Exit(1)
753    else:
754        print 'Error: Unable to determine clang version.'
755        Exit(1)
756
757    # clang has a few additional warnings that we disable, extraneous
758    # parantheses are allowed due to Ruby's printing of the AST,
759    # finally self assignments are allowed as the generated CPU code
760    # is relying on this
761    main.Append(CCFLAGS=['-Wno-parentheses',
762                         '-Wno-self-assign',
763                         # Some versions of libstdc++ (4.8?) seem to
764                         # use struct hash and class hash
765                         # interchangeably.
766                         '-Wno-mismatched-tags',
767                         ])
768
769    main.Append(TCMALLOC_CCFLAGS=['-fno-builtin'])
770
771    # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as
772    # opposed to libstdc++, as the later is dated.
773    if sys.platform == "darwin":
774        main.Append(CXXFLAGS=['-stdlib=libc++'])
775        main.Append(LIBS=['c++'])
776
777    # On FreeBSD we need libthr.
778    if sys.platform.startswith('freebsd'):
779        main.Append(LIBS=['thr'])
780
781else:
782    print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
783    print "Don't know what compiler options to use for your compiler."
784    print termcap.Yellow + '       compiler:' + termcap.Normal, main['CXX']
785    print termcap.Yellow + '       version:' + termcap.Normal,
786    if not CXX_version:
787        print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
788               termcap.Normal
789    else:
790        print CXX_version.replace('\n', '<nl>')
791    print "       If you're trying to use a compiler other than GCC"
792    print "       or clang, there appears to be something wrong with your"
793    print "       environment."
794    print "       "
795    print "       If you are trying to use a compiler other than those listed"
796    print "       above you will need to ease fix SConstruct and "
797    print "       src/SConscript to support that compiler."
798    Exit(1)
799
800# Set up common yacc/bison flags (needed for Ruby)
801main['YACCFLAGS'] = '-d'
802main['YACCHXXFILESUFFIX'] = '.hh'
803
804# Do this after we save setting back, or else we'll tack on an
805# extra 'qdo' every time we run scons.
806if main['BATCH']:
807    main['CC']     = main['BATCH_CMD'] + ' ' + main['CC']
808    main['CXX']    = main['BATCH_CMD'] + ' ' + main['CXX']
809    main['AS']     = main['BATCH_CMD'] + ' ' + main['AS']
810    main['AR']     = main['BATCH_CMD'] + ' ' + main['AR']
811    main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB']
812
813if sys.platform == 'cygwin':
814    # cygwin has some header file issues...
815    main.Append(CCFLAGS=["-Wno-uninitialized"])
816
817# Check for the protobuf compiler
818protoc_version = readCommand([main['PROTOC'], '--version'],
819                             exception='').split()
820
821# First two words should be "libprotoc x.y.z"
822if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc':
823    print termcap.Yellow + termcap.Bold + \
824        'Warning: Protocol buffer compiler (protoc) not found.\n' + \
825        '         Please install protobuf-compiler for tracing support.' + \
826        termcap.Normal
827    main['PROTOC'] = False
828else:
829    # Based on the availability of the compress stream wrappers,
830    # require 2.1.0
831    min_protoc_version = '2.1.0'
832    if compareVersions(protoc_version[1], min_protoc_version) < 0:
833        print termcap.Yellow + termcap.Bold + \
834            'Warning: protoc version', min_protoc_version, \
835            'or newer required.\n' + \
836            '         Installed version:', protoc_version[1], \
837            termcap.Normal
838        main['PROTOC'] = False
839    else:
840        # Attempt to determine the appropriate include path and
841        # library path using pkg-config, that means we also need to
842        # check for pkg-config. Note that it is possible to use
843        # protobuf without the involvement of pkg-config. Later on we
844        # check go a library config check and at that point the test
845        # will fail if libprotobuf cannot be found.
846        if readCommand(['pkg-config', '--version'], exception=''):
847            try:
848                # Attempt to establish what linking flags to add for protobuf
849                # using pkg-config
850                main.ParseConfig('pkg-config --cflags --libs-only-L protobuf')
851            except:
852                print termcap.Yellow + termcap.Bold + \
853                    'Warning: pkg-config could not get protobuf flags.' + \
854                    termcap.Normal
855
856# Check for SWIG
857if not main.has_key('SWIG'):
858    print 'Error: SWIG utility not found.'
859    print '       Please install (see http://www.swig.org) and retry.'
860    Exit(1)
861
862# Check for appropriate SWIG version
863swig_version = readCommand([main['SWIG'], '-version'], exception='').split()
864# First 3 words should be "SWIG Version x.y.z"
865if len(swig_version) < 3 or \
866        swig_version[0] != 'SWIG' or swig_version[1] != 'Version':
867    print 'Error determining SWIG version.'
868    Exit(1)
869
870min_swig_version = '2.0.4'
871if compareVersions(swig_version[2], min_swig_version) < 0:
872    print 'Error: SWIG version', min_swig_version, 'or newer required.'
873    print '       Installed version:', swig_version[2]
874    Exit(1)
875
876# Check for known incompatibilities. The standard library shipped with
877# gcc >= 4.9 does not play well with swig versions prior to 3.0
878if main['GCC'] and compareVersions(gcc_version, '4.9') >= 0 and \
879        compareVersions(swig_version[2], '3.0') < 0:
880    print termcap.Yellow + termcap.Bold + \
881        'Warning: This combination of gcc and swig have' + \
882        ' known incompatibilities.\n' + \
883        '         If you encounter build problems, please update ' + \
884        'swig to 3.0 or later.' + \
885        termcap.Normal
886
887# Set up SWIG flags & scanner
888swig_flags=Split('-c++ -python -modern -templatereduce $_CPPINCFLAGS')
889main.Append(SWIGFLAGS=swig_flags)
890
891# Check for 'timeout' from GNU coreutils. If present, regressions will
892# be run with a time limit. We require version 8.13 since we rely on
893# support for the '--foreground' option.
894if sys.platform.startswith('freebsd'):
895    timeout_lines = readCommand(['gtimeout', '--version'],
896                                exception='').splitlines()
897else:
898    timeout_lines = readCommand(['timeout', '--version'],
899                                exception='').splitlines()
900# Get the first line and tokenize it
901timeout_version = timeout_lines[0].split() if timeout_lines else []
902main['TIMEOUT'] =  timeout_version and \
903    compareVersions(timeout_version[-1], '8.13') >= 0
904
905# filter out all existing swig scanners, they mess up the dependency
906# stuff for some reason
907scanners = []
908for scanner in main['SCANNERS']:
909    skeys = scanner.skeys
910    if skeys == '.i':
911        continue
912
913    if isinstance(skeys, (list, tuple)) and '.i' in skeys:
914        continue
915
916    scanners.append(scanner)
917
918# add the new swig scanner that we like better
919from SCons.Scanner import ClassicCPP as CPPScanner
920swig_inc_re = '^[ \t]*[%,#][ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")'
921scanners.append(CPPScanner("SwigScan", [ ".i" ], "CPPPATH", swig_inc_re))
922
923# replace the scanners list that has what we want
924main['SCANNERS'] = scanners
925
926# Add a custom Check function to test for structure members.
927def CheckMember(context, include, decl, member, include_quotes="<>"):
928    context.Message("Checking for member %s in %s..." %
929                    (member, decl))
930    text = """
931#include %(header)s
932int main(){
933  %(decl)s test;
934  (void)test.%(member)s;
935  return 0;
936};
937""" % { "header" : include_quotes[0] + include + include_quotes[1],
938        "decl" : decl,
939        "member" : member,
940        }
941
942    ret = context.TryCompile(text, extension=".cc")
943    context.Result(ret)
944    return ret
945
946# Platform-specific configuration.  Note again that we assume that all
947# builds under a given build root run on the same host platform.
948conf = Configure(main,
949                 conf_dir = joinpath(build_root, '.scons_config'),
950                 log_file = joinpath(build_root, 'scons_config.log'),
951                 custom_tests = {
952        'CheckMember' : CheckMember,
953        })
954
955# Check if we should compile a 64 bit binary on Mac OS X/Darwin
956try:
957    import platform
958    uname = platform.uname()
959    if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0:
960        if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]):
961            main.Append(CCFLAGS=['-arch', 'x86_64'])
962            main.Append(CFLAGS=['-arch', 'x86_64'])
963            main.Append(LINKFLAGS=['-arch', 'x86_64'])
964            main.Append(ASFLAGS=['-arch', 'x86_64'])
965except:
966    pass
967
968# Recent versions of scons substitute a "Null" object for Configure()
969# when configuration isn't necessary, e.g., if the "--help" option is
970# present.  Unfortuantely this Null object always returns false,
971# breaking all our configuration checks.  We replace it with our own
972# more optimistic null object that returns True instead.
973if not conf:
974    def NullCheck(*args, **kwargs):
975        return True
976
977    class NullConf:
978        def __init__(self, env):
979            self.env = env
980        def Finish(self):
981            return self.env
982        def __getattr__(self, mname):
983            return NullCheck
984
985    conf = NullConf(main)
986
987# Cache build files in the supplied directory.
988if main['M5_BUILD_CACHE']:
989    print 'Using build cache located at', main['M5_BUILD_CACHE']
990    CacheDir(main['M5_BUILD_CACHE'])
991
992if not GetOption('without_python'):
993    # Find Python include and library directories for embedding the
994    # interpreter. We rely on python-config to resolve the appropriate
995    # includes and linker flags. ParseConfig does not seem to understand
996    # the more exotic linker flags such as -Xlinker and -export-dynamic so
997    # we add them explicitly below. If you want to link in an alternate
998    # version of python, see above for instructions on how to invoke
999    # scons with the appropriate PATH set.
1000    #
1001    # First we check if python2-config exists, else we use python-config
1002    python_config = readCommand(['which', 'python2-config'],
1003                                exception='').strip()
1004    if not os.path.exists(python_config):
1005        python_config = readCommand(['which', 'python-config'],
1006                                    exception='').strip()
1007    py_includes = readCommand([python_config, '--includes'],
1008                              exception='').split()
1009    # Strip the -I from the include folders before adding them to the
1010    # CPPPATH
1011    main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes))
1012
1013    # Read the linker flags and split them into libraries and other link
1014    # flags. The libraries are added later through the call the CheckLib.
1015    py_ld_flags = readCommand([python_config, '--ldflags'],
1016        exception='').split()
1017    py_libs = []
1018    for lib in py_ld_flags:
1019         if not lib.startswith('-l'):
1020             main.Append(LINKFLAGS=[lib])
1021         else:
1022             lib = lib[2:]
1023             if lib not in py_libs:
1024                 py_libs.append(lib)
1025
1026    # verify that this stuff works
1027    if not conf.CheckHeader('Python.h', '<>'):
1028        print "Error: can't find Python.h header in", py_includes
1029        print "Install Python headers (package python-dev on Ubuntu and RedHat)"
1030        Exit(1)
1031
1032    for lib in py_libs:
1033        if not conf.CheckLib(lib):
1034            print "Error: can't find library %s required by python" % lib
1035            Exit(1)
1036
1037# On Solaris you need to use libsocket for socket ops
1038if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'):
1039   if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'):
1040       print "Can't find library with socket calls (e.g. accept())"
1041       Exit(1)
1042
1043# Check for zlib.  If the check passes, libz will be automatically
1044# added to the LIBS environment variable.
1045if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'):
1046    print 'Error: did not find needed zlib compression library '\
1047          'and/or zlib.h header file.'
1048    print '       Please install zlib and try again.'
1049    Exit(1)
1050
1051# If we have the protobuf compiler, also make sure we have the
1052# development libraries. If the check passes, libprotobuf will be
1053# automatically added to the LIBS environment variable. After
1054# this, we can use the HAVE_PROTOBUF flag to determine if we have
1055# got both protoc and libprotobuf available.
1056main['HAVE_PROTOBUF'] = main['PROTOC'] and \
1057    conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h',
1058                            'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;')
1059
1060# If we have the compiler but not the library, print another warning.
1061if main['PROTOC'] and not main['HAVE_PROTOBUF']:
1062    print termcap.Yellow + termcap.Bold + \
1063        'Warning: did not find protocol buffer library and/or headers.\n' + \
1064    '       Please install libprotobuf-dev for tracing support.' + \
1065    termcap.Normal
1066
1067# Check for librt.
1068have_posix_clock = \
1069    conf.CheckLibWithHeader(None, 'time.h', 'C',
1070                            'clock_nanosleep(0,0,NULL,NULL);') or \
1071    conf.CheckLibWithHeader('rt', 'time.h', 'C',
1072                            'clock_nanosleep(0,0,NULL,NULL);')
1073
1074have_posix_timers = \
1075    conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C',
1076                            'timer_create(CLOCK_MONOTONIC, NULL, NULL);')
1077
1078if not GetOption('without_tcmalloc'):
1079    if conf.CheckLib('tcmalloc'):
1080        main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1081    elif conf.CheckLib('tcmalloc_minimal'):
1082        main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1083    else:
1084        print termcap.Yellow + termcap.Bold + \
1085              "You can get a 12% performance improvement by "\
1086              "installing tcmalloc (libgoogle-perftools-dev package "\
1087              "on Ubuntu or RedHat)." + termcap.Normal
1088
1089
1090# Detect back trace implementations. The last implementation in the
1091# list will be used by default.
1092backtrace_impls = [ "none" ]
1093
1094if conf.CheckLibWithHeader(None, 'execinfo.h', 'C',
1095                           'backtrace_symbols_fd((void*)0, 0, 0);'):
1096    backtrace_impls.append("glibc")
1097elif conf.CheckLibWithHeader('execinfo', 'execinfo.h', 'C',
1098                           'backtrace_symbols_fd((void*)0, 0, 0);'):
1099    # NetBSD and FreeBSD need libexecinfo.
1100    backtrace_impls.append("glibc")
1101    main.Append(LIBS=['execinfo'])
1102
1103if backtrace_impls[-1] == "none":
1104    default_backtrace_impl = "none"
1105    print termcap.Yellow + termcap.Bold + \
1106        "No suitable back trace implementation found." + \
1107        termcap.Normal
1108
1109if not have_posix_clock:
1110    print "Can't find library for POSIX clocks."
1111
1112# Check for <fenv.h> (C99 FP environment control)
1113have_fenv = conf.CheckHeader('fenv.h', '<>')
1114if not have_fenv:
1115    print "Warning: Header file <fenv.h> not found."
1116    print "         This host has no IEEE FP rounding mode control."
1117
1118# Check if we should enable KVM-based hardware virtualization. The API
1119# we rely on exists since version 2.6.36 of the kernel, but somehow
1120# the KVM_API_VERSION does not reflect the change. We test for one of
1121# the types as a fall back.
1122have_kvm = conf.CheckHeader('linux/kvm.h', '<>')
1123if not have_kvm:
1124    print "Info: Compatible header file <linux/kvm.h> not found, " \
1125        "disabling KVM support."
1126
1127# x86 needs support for xsave. We test for the structure here since we
1128# won't be able to run new tests by the time we know which ISA we're
1129# targeting.
1130have_kvm_xsave = conf.CheckTypeSize('struct kvm_xsave',
1131                                    '#include <linux/kvm.h>') != 0
1132
1133# Check if the requested target ISA is compatible with the host
1134def is_isa_kvm_compatible(isa):
1135    try:
1136        import platform
1137        host_isa = platform.machine()
1138    except:
1139        print "Warning: Failed to determine host ISA."
1140        return False
1141
1142    if not have_posix_timers:
1143        print "Warning: Can not enable KVM, host seems to lack support " \
1144            "for POSIX timers"
1145        return False
1146
1147    if isa == "arm":
1148        return host_isa in ( "armv7l", "aarch64" )
1149    elif isa == "x86":
1150        if host_isa != "x86_64":
1151            return False
1152
1153        if not have_kvm_xsave:
1154            print "KVM on x86 requires xsave support in kernel headers."
1155            return False
1156
1157        return True
1158    else:
1159        return False
1160
1161
1162# Check if the exclude_host attribute is available. We want this to
1163# get accurate instruction counts in KVM.
1164main['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf.CheckMember(
1165    'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host')
1166
1167
1168######################################################################
1169#
1170# Finish the configuration
1171#
1172main = conf.Finish()
1173
1174######################################################################
1175#
1176# Collect all non-global variables
1177#
1178
1179# Define the universe of supported ISAs
1180all_isa_list = [ ]
1181all_gpu_isa_list = [ ]
1182Export('all_isa_list')
1183Export('all_gpu_isa_list')
1184
1185class CpuModel(object):
1186    '''The CpuModel class encapsulates everything the ISA parser needs to
1187    know about a particular CPU model.'''
1188
1189    # Dict of available CPU model objects.  Accessible as CpuModel.dict.
1190    dict = {}
1191
1192    # Constructor.  Automatically adds models to CpuModel.dict.
1193    def __init__(self, name, default=False):
1194        self.name = name           # name of model
1195
1196        # This cpu is enabled by default
1197        self.default = default
1198
1199        # Add self to dict
1200        if name in CpuModel.dict:
1201            raise AttributeError, "CpuModel '%s' already registered" % name
1202        CpuModel.dict[name] = self
1203
1204Export('CpuModel')
1205
1206# Sticky variables get saved in the variables file so they persist from
1207# one invocation to the next (unless overridden, in which case the new
1208# value becomes sticky).
1209sticky_vars = Variables(args=ARGUMENTS)
1210Export('sticky_vars')
1211
1212# Sticky variables that should be exported
1213export_vars = []
1214Export('export_vars')
1215
1216# For Ruby
1217all_protocols = []
1218Export('all_protocols')
1219protocol_dirs = []
1220Export('protocol_dirs')
1221slicc_includes = []
1222Export('slicc_includes')
1223
1224# Walk the tree and execute all SConsopts scripts that wil add to the
1225# above variables
1226if GetOption('verbose'):
1227    print "Reading SConsopts"
1228for bdir in [ base_dir ] + extras_dir_list:
1229    if not isdir(bdir):
1230        print "Error: directory '%s' does not exist" % bdir
1231        Exit(1)
1232    for root, dirs, files in os.walk(bdir):
1233        if 'SConsopts' in files:
1234            if GetOption('verbose'):
1235                print "Reading", joinpath(root, 'SConsopts')
1236            SConscript(joinpath(root, 'SConsopts'))
1237
1238all_isa_list.sort()
1239all_gpu_isa_list.sort()
1240
1241sticky_vars.AddVariables(
1242    EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list),
1243    EnumVariable('TARGET_GPU_ISA', 'Target GPU ISA', 'hsail', all_gpu_isa_list),
1244    ListVariable('CPU_MODELS', 'CPU models',
1245                 sorted(n for n,m in CpuModel.dict.iteritems() if m.default),
1246                 sorted(CpuModel.dict.keys())),
1247    BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger',
1248                 False),
1249    BoolVariable('SS_COMPATIBLE_FP',
1250                 'Make floating-point results compatible with SimpleScalar',
1251                 False),
1252    BoolVariable('USE_SSE2',
1253                 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts',
1254                 False),
1255    BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock),
1256    BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv),
1257    BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability', False),
1258    BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models', have_kvm),
1259    BoolVariable('BUILD_GPU', 'Build the compute-GPU model', False),
1260    EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None',
1261                  all_protocols),
1262    EnumVariable('BACKTRACE_IMPL', 'Post-mortem dump implementation',
1263                 backtrace_impls[-1], backtrace_impls)
1264    )
1265
1266# These variables get exported to #defines in config/*.hh (see src/SConscript).
1267export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'TARGET_GPU_ISA',
1268                'CP_ANNOTATE', 'USE_POSIX_CLOCK', 'USE_KVM', 'PROTOCOL',
1269                'HAVE_PROTOBUF', 'HAVE_PERF_ATTR_EXCLUDE_HOST']
1270
1271###################################################
1272#
1273# Define a SCons builder for configuration flag headers.
1274#
1275###################################################
1276
1277# This function generates a config header file that #defines the
1278# variable symbol to the current variable setting (0 or 1).  The source
1279# operands are the name of the variable and a Value node containing the
1280# value of the variable.
1281def build_config_file(target, source, env):
1282    (variable, value) = [s.get_contents() for s in source]
1283    f = file(str(target[0]), 'w')
1284    print >> f, '#define', variable, value
1285    f.close()
1286    return None
1287
1288# Combine the two functions into a scons Action object.
1289config_action = MakeAction(build_config_file, Transform("CONFIG H", 2))
1290
1291# The emitter munges the source & target node lists to reflect what
1292# we're really doing.
1293def config_emitter(target, source, env):
1294    # extract variable name from Builder arg
1295    variable = str(target[0])
1296    # True target is config header file
1297    target = joinpath('config', variable.lower() + '.hh')
1298    val = env[variable]
1299    if isinstance(val, bool):
1300        # Force value to 0/1
1301        val = int(val)
1302    elif isinstance(val, str):
1303        val = '"' + val + '"'
1304
1305    # Sources are variable name & value (packaged in SCons Value nodes)
1306    return ([target], [Value(variable), Value(val)])
1307
1308config_builder = Builder(emitter = config_emitter, action = config_action)
1309
1310main.Append(BUILDERS = { 'ConfigFile' : config_builder })
1311
1312# libelf build is shared across all configs in the build root.
1313main.SConscript('ext/libelf/SConscript',
1314                variant_dir = joinpath(build_root, 'libelf'))
1315
1316# iostream3 build is shared across all configs in the build root.
1317main.SConscript('ext/iostream3/SConscript',
1318                variant_dir = joinpath(build_root, 'iostream3'))
1319
1320# libfdt build is shared across all configs in the build root.
1321main.SConscript('ext/libfdt/SConscript',
1322                variant_dir = joinpath(build_root, 'libfdt'))
1323
1324# fputils build is shared across all configs in the build root.
1325main.SConscript('ext/fputils/SConscript',
1326                variant_dir = joinpath(build_root, 'fputils'))
1327
1328# DRAMSim2 build is shared across all configs in the build root.
1329main.SConscript('ext/dramsim2/SConscript',
1330                variant_dir = joinpath(build_root, 'dramsim2'))
1331
1332# DRAMPower build is shared across all configs in the build root.
1333main.SConscript('ext/drampower/SConscript',
1334                variant_dir = joinpath(build_root, 'drampower'))
1335
1336# nomali build is shared across all configs in the build root.
1337main.SConscript('ext/nomali/SConscript',
1338                variant_dir = joinpath(build_root, 'nomali'))
1339
1340###################################################
1341#
1342# This function is used to set up a directory with switching headers
1343#
1344###################################################
1345
1346main['ALL_ISA_LIST'] = all_isa_list
1347main['ALL_GPU_ISA_LIST'] = all_gpu_isa_list
1348all_isa_deps = {}
1349def make_switching_dir(dname, switch_headers, env):
1350    # Generate the header.  target[0] is the full path of the output
1351    # header to generate.  'source' is a dummy variable, since we get the
1352    # list of ISAs from env['ALL_ISA_LIST'].
1353    def gen_switch_hdr(target, source, env):
1354        fname = str(target[0])
1355        isa = env['TARGET_ISA'].lower()
1356        try:
1357            f = open(fname, 'w')
1358            print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname))
1359            f.close()
1360        except IOError:
1361            print "Failed to create %s" % fname
1362            raise
1363
1364    # Build SCons Action object. 'varlist' specifies env vars that this
1365    # action depends on; when env['ALL_ISA_LIST'] changes these actions
1366    # should get re-executed.
1367    switch_hdr_action = MakeAction(gen_switch_hdr,
1368                          Transform("GENERATE"), varlist=['ALL_ISA_LIST'])
1369
1370    # Instantiate actions for each header
1371    for hdr in switch_headers:
1372        env.Command(hdr, [], switch_hdr_action)
1373
1374    isa_target = Dir('.').up().name.lower().replace('_', '-')
1375    env['PHONY_BASE'] = '#'+isa_target
1376    all_isa_deps[isa_target] = None
1377
1378Export('make_switching_dir')
1379
1380def make_gpu_switching_dir(dname, switch_headers, env):
1381    # Generate the header.  target[0] is the full path of the output
1382    # header to generate.  'source' is a dummy variable, since we get the
1383    # list of ISAs from env['ALL_ISA_LIST'].
1384    def gen_switch_hdr(target, source, env):
1385        fname = str(target[0])
1386
1387        isa = env['TARGET_GPU_ISA'].lower()
1388
1389        try:
1390            f = open(fname, 'w')
1391            print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname))
1392            f.close()
1393        except IOError:
1394            print "Failed to create %s" % fname
1395            raise
1396
1397    # Build SCons Action object. 'varlist' specifies env vars that this
1398    # action depends on; when env['ALL_ISA_LIST'] changes these actions
1399    # should get re-executed.
1400    switch_hdr_action = MakeAction(gen_switch_hdr,
1401                          Transform("GENERATE"), varlist=['ALL_ISA_GPU_LIST'])
1402
1403    # Instantiate actions for each header
1404    for hdr in switch_headers:
1405        env.Command(hdr, [], switch_hdr_action)
1406
1407Export('make_gpu_switching_dir')
1408
1409# all-isas -> all-deps -> all-environs -> all_targets
1410main.Alias('#all-isas', [])
1411main.Alias('#all-deps', '#all-isas')
1412
1413# Dummy target to ensure all environments are created before telling
1414# SCons what to actually make (the command line arguments).  We attach
1415# them to the dependence graph after the environments are complete.
1416ORIG_BUILD_TARGETS = list(BUILD_TARGETS) # force a copy; gets closure to work.
1417def environsComplete(target, source, env):
1418    for t in ORIG_BUILD_TARGETS:
1419        main.Depends('#all-targets', t)
1420
1421# Each build/* switching_dir attaches its *-environs target to #all-environs.
1422main.Append(BUILDERS = {'CompleteEnvirons' :
1423                        Builder(action=MakeAction(environsComplete, None))})
1424main.CompleteEnvirons('#all-environs', [])
1425
1426def doNothing(**ignored): pass
1427main.Append(BUILDERS = {'Dummy': Builder(action=MakeAction(doNothing, None))})
1428
1429# The final target to which all the original targets ultimately get attached.
1430main.Dummy('#all-targets', '#all-environs')
1431BUILD_TARGETS[:] = ['#all-targets']
1432
1433###################################################
1434#
1435# Define build environments for selected configurations.
1436#
1437###################################################
1438
1439for variant_path in variant_paths:
1440    if not GetOption('silent'):
1441        print "Building in", variant_path
1442
1443    # Make a copy of the build-root environment to use for this config.
1444    env = main.Clone()
1445    env['BUILDDIR'] = variant_path
1446
1447    # variant_dir is the tail component of build path, and is used to
1448    # determine the build parameters (e.g., 'ALPHA_SE')
1449    (build_root, variant_dir) = splitpath(variant_path)
1450
1451    # Set env variables according to the build directory config.
1452    sticky_vars.files = []
1453    # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in
1454    # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke
1455    # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings.
1456    current_vars_file = joinpath(build_root, 'variables', variant_dir)
1457    if isfile(current_vars_file):
1458        sticky_vars.files.append(current_vars_file)
1459        if not GetOption('silent'):
1460            print "Using saved variables file %s" % current_vars_file
1461    else:
1462        # Build dir-specific variables file doesn't exist.
1463
1464        # Make sure the directory is there so we can create it later
1465        opt_dir = dirname(current_vars_file)
1466        if not isdir(opt_dir):
1467            mkdir(opt_dir)
1468
1469        # Get default build variables from source tree.  Variables are
1470        # normally determined by name of $VARIANT_DIR, but can be
1471        # overridden by '--default=' arg on command line.
1472        default = GetOption('default')
1473        opts_dir = joinpath(main.root.abspath, 'build_opts')
1474        if default:
1475            default_vars_files = [joinpath(build_root, 'variables', default),
1476                                  joinpath(opts_dir, default)]
1477        else:
1478            default_vars_files = [joinpath(opts_dir, variant_dir)]
1479        existing_files = filter(isfile, default_vars_files)
1480        if existing_files:
1481            default_vars_file = existing_files[0]
1482            sticky_vars.files.append(default_vars_file)
1483            print "Variables file %s not found,\n  using defaults in %s" \
1484                  % (current_vars_file, default_vars_file)
1485        else:
1486            print "Error: cannot find variables file %s or " \
1487                  "default file(s) %s" \
1488                  % (current_vars_file, ' or '.join(default_vars_files))
1489            Exit(1)
1490
1491    # Apply current variable settings to env
1492    sticky_vars.Update(env)
1493
1494    help_texts["local_vars"] += \
1495        "Build variables for %s:\n" % variant_dir \
1496                 + sticky_vars.GenerateHelpText(env)
1497
1498    # Process variable settings.
1499
1500    if not have_fenv and env['USE_FENV']:
1501        print "Warning: <fenv.h> not available; " \
1502              "forcing USE_FENV to False in", variant_dir + "."
1503        env['USE_FENV'] = False
1504
1505    if not env['USE_FENV']:
1506        print "Warning: No IEEE FP rounding mode control in", variant_dir + "."
1507        print "         FP results may deviate slightly from other platforms."
1508
1509    if env['EFENCE']:
1510        env.Append(LIBS=['efence'])
1511
1512    if env['USE_KVM']:
1513        if not have_kvm:
1514            print "Warning: Can not enable KVM, host seems to lack KVM support"
1515            env['USE_KVM'] = False
1516        elif not is_isa_kvm_compatible(env['TARGET_ISA']):
1517            print "Info: KVM support disabled due to unsupported host and " \
1518                "target ISA combination"
1519            env['USE_KVM'] = False
1520
1521    if env['BUILD_GPU']:
1522        env.Append(CPPDEFINES=['BUILD_GPU'])
1523
1524    # Warn about missing optional functionality
1525    if env['USE_KVM']:
1526        if not main['HAVE_PERF_ATTR_EXCLUDE_HOST']:
1527            print "Warning: perf_event headers lack support for the " \
1528                "exclude_host attribute. KVM instruction counts will " \
1529                "be inaccurate."
1530
1531    # Save sticky variable settings back to current variables file
1532    sticky_vars.Save(current_vars_file, env)
1533
1534    if env['USE_SSE2']:
1535        env.Append(CCFLAGS=['-msse2'])
1536
1537    # The src/SConscript file sets up the build rules in 'env' according
1538    # to the configured variables.  It returns a list of environments,
1539    # one for each variant build (debug, opt, etc.)
1540    SConscript('src/SConscript', variant_dir = variant_path, exports = 'env')
1541
1542def pairwise(iterable):
1543    "s -> (s0,s1), (s1,s2), (s2, s3), ..."
1544    a, b = itertools.tee(iterable)
1545    b.next()
1546    return itertools.izip(a, b)
1547
1548# Create false dependencies so SCons will parse ISAs, establish
1549# dependencies, and setup the build Environments serially. Either
1550# SCons (likely) and/or our SConscripts (possibly) cannot cope with -j
1551# greater than 1. It appears to be standard race condition stuff; it
1552# doesn't always fail, but usually, and the behaviors are different.
1553# Every time I tried to remove this, builds would fail in some
1554# creative new way. So, don't do that. You'll want to, though, because
1555# tests/SConscript takes a long time to make its Environments.
1556for t1, t2 in pairwise(sorted(all_isa_deps.iterkeys())):
1557    main.Depends('#%s-deps'     % t2, '#%s-deps'     % t1)
1558    main.Depends('#%s-environs' % t2, '#%s-environs' % t1)
1559
1560# base help text
1561Help('''
1562Usage: scons [scons options] [build variables] [target(s)]
1563
1564Extra scons options:
1565%(options)s
1566
1567Global build variables:
1568%(global_vars)s
1569
1570%(local_vars)s
1571''' % help_texts)
1572