SConstruct revision 11982:a17ad156c01e
1# -*- mode:python -*-
2
3# Copyright (c) 2013, 2015, 2016 ARM Limited
4# All rights reserved.
5#
6# The license below extends only to copyright in the software and shall
7# not be construed as granting a license to any other intellectual
8# property including but not limited to intellectual property relating
9# to a hardware implementation of the functionality of the software
10# licensed hereunder.  You may use the software subject to the license
11# terms below provided that you ensure that this notice is replicated
12# unmodified and in its entirety in all distributions of the software,
13# modified or unmodified, in source code or in binary form.
14#
15# Copyright (c) 2011 Advanced Micro Devices, Inc.
16# Copyright (c) 2009 The Hewlett-Packard Development Company
17# Copyright (c) 2004-2005 The Regents of The University of Michigan
18# All rights reserved.
19#
20# Redistribution and use in source and binary forms, with or without
21# modification, are permitted provided that the following conditions are
22# met: redistributions of source code must retain the above copyright
23# notice, this list of conditions and the following disclaimer;
24# redistributions in binary form must reproduce the above copyright
25# notice, this list of conditions and the following disclaimer in the
26# documentation and/or other materials provided with the distribution;
27# neither the name of the copyright holders nor the names of its
28# contributors may be used to endorse or promote products derived from
29# this software without specific prior written permission.
30#
31# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
32# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
33# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
34# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
35# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
37# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
38# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
39# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
40# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
41# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
42#
43# Authors: Steve Reinhardt
44#          Nathan Binkert
45
46###################################################
47#
48# SCons top-level build description (SConstruct) file.
49#
50# While in this directory ('gem5'), just type 'scons' to build the default
51# configuration (see below), or type 'scons build/<CONFIG>/<binary>'
52# to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for
53# the optimized full-system version).
54#
55# You can build gem5 in a different directory as long as there is a
56# 'build/<CONFIG>' somewhere along the target path.  The build system
57# expects that all configs under the same build directory are being
58# built for the same host system.
59#
60# Examples:
61#
62#   The following two commands are equivalent.  The '-u' option tells
63#   scons to search up the directory tree for this SConstruct file.
64#   % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug
65#   % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug
66#
67#   The following two commands are equivalent and demonstrate building
68#   in a directory outside of the source tree.  The '-C' option tells
69#   scons to chdir to the specified directory to find this SConstruct
70#   file.
71#   % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug
72#   % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug
73#
74# You can use 'scons -H' to print scons options.  If you're in this
75# 'gem5' directory (or use -u or -C to tell scons where to find this
76# file), you can use 'scons -h' to print all the gem5-specific build
77# options as well.
78#
79###################################################
80
81# Check for recent-enough Python and SCons versions.
82try:
83    # Really old versions of scons only take two options for the
84    # function, so check once without the revision and once with the
85    # revision, the first instance will fail for stuff other than
86    # 0.98, and the second will fail for 0.98.0
87    EnsureSConsVersion(0, 98)
88    EnsureSConsVersion(0, 98, 1)
89except SystemExit, e:
90    print """
91For more details, see:
92    http://gem5.org/Dependencies
93"""
94    raise
95
96# We ensure the python version early because because python-config
97# requires python 2.5
98try:
99    EnsurePythonVersion(2, 5)
100except SystemExit, e:
101    print """
102You can use a non-default installation of the Python interpreter by
103rearranging your PATH so that scons finds the non-default 'python' and
104'python-config' first.
105
106For more details, see:
107    http://gem5.org/wiki/index.php/Using_a_non-default_Python_installation
108"""
109    raise
110
111# Global Python includes
112import itertools
113import os
114import re
115import shutil
116import subprocess
117import sys
118
119from os import mkdir, environ
120from os.path import abspath, basename, dirname, expanduser, normpath
121from os.path import exists,  isdir, isfile
122from os.path import join as joinpath, split as splitpath
123
124# SCons includes
125import SCons
126import SCons.Node
127
128extra_python_paths = [
129    Dir('src/python').srcnode().abspath, # gem5 includes
130    Dir('ext/ply').srcnode().abspath, # ply is used by several files
131    ]
132
133sys.path[1:1] = extra_python_paths
134
135from m5.util import compareVersions, readCommand
136from m5.util.terminal import get_termcap
137
138help_texts = {
139    "options" : "",
140    "global_vars" : "",
141    "local_vars" : ""
142}
143
144Export("help_texts")
145
146
147# There's a bug in scons in that (1) by default, the help texts from
148# AddOption() are supposed to be displayed when you type 'scons -h'
149# and (2) you can override the help displayed by 'scons -h' using the
150# Help() function, but these two features are incompatible: once
151# you've overridden the help text using Help(), there's no way to get
152# at the help texts from AddOptions.  See:
153#     http://scons.tigris.org/issues/show_bug.cgi?id=2356
154#     http://scons.tigris.org/issues/show_bug.cgi?id=2611
155# This hack lets us extract the help text from AddOptions and
156# re-inject it via Help().  Ideally someday this bug will be fixed and
157# we can just use AddOption directly.
158def AddLocalOption(*args, **kwargs):
159    col_width = 30
160
161    help = "  " + ", ".join(args)
162    if "help" in kwargs:
163        length = len(help)
164        if length >= col_width:
165            help += "\n" + " " * col_width
166        else:
167            help += " " * (col_width - length)
168        help += kwargs["help"]
169    help_texts["options"] += help + "\n"
170
171    AddOption(*args, **kwargs)
172
173AddLocalOption('--colors', dest='use_colors', action='store_true',
174               help="Add color to abbreviated scons output")
175AddLocalOption('--no-colors', dest='use_colors', action='store_false',
176               help="Don't add color to abbreviated scons output")
177AddLocalOption('--with-cxx-config', dest='with_cxx_config',
178               action='store_true',
179               help="Build with support for C++-based configuration")
180AddLocalOption('--default', dest='default', type='string', action='store',
181               help='Override which build_opts file to use for defaults')
182AddLocalOption('--ignore-style', dest='ignore_style', action='store_true',
183               help='Disable style checking hooks')
184AddLocalOption('--no-lto', dest='no_lto', action='store_true',
185               help='Disable Link-Time Optimization for fast')
186AddLocalOption('--update-ref', dest='update_ref', action='store_true',
187               help='Update test reference outputs')
188AddLocalOption('--verbose', dest='verbose', action='store_true',
189               help='Print full tool command lines')
190AddLocalOption('--without-python', dest='without_python',
191               action='store_true',
192               help='Build without Python configuration support')
193AddLocalOption('--without-tcmalloc', dest='without_tcmalloc',
194               action='store_true',
195               help='Disable linking against tcmalloc')
196AddLocalOption('--with-ubsan', dest='with_ubsan', action='store_true',
197               help='Build with Undefined Behavior Sanitizer if available')
198AddLocalOption('--with-asan', dest='with_asan', action='store_true',
199               help='Build with Address Sanitizer if available')
200
201termcap = get_termcap(GetOption('use_colors'))
202
203########################################################################
204#
205# Set up the main build environment.
206#
207########################################################################
208
209# export TERM so that clang reports errors in color
210use_vars = set([ 'AS', 'AR', 'CC', 'CXX', 'HOME', 'LD_LIBRARY_PATH',
211                 'LIBRARY_PATH', 'PATH', 'PKG_CONFIG_PATH', 'PROTOC',
212                 'PYTHONPATH', 'RANLIB', 'SWIG', 'TERM' ])
213
214use_prefixes = [
215    "ASAN_",           # address sanitizer symbolizer path and settings
216    "CCACHE_",         # ccache (caching compiler wrapper) configuration
217    "CCC_",            # clang static analyzer configuration
218    "DISTCC_",         # distcc (distributed compiler wrapper) configuration
219    "INCLUDE_SERVER_", # distcc pump server settings
220    "M5",              # M5 configuration (e.g., path to kernels)
221    ]
222
223use_env = {}
224for key,val in sorted(os.environ.iteritems()):
225    if key in use_vars or \
226            any([key.startswith(prefix) for prefix in use_prefixes]):
227        use_env[key] = val
228
229# Tell scons to avoid implicit command dependencies to avoid issues
230# with the param wrappes being compiled twice (see
231# http://scons.tigris.org/issues/show_bug.cgi?id=2811)
232main = Environment(ENV=use_env, IMPLICIT_COMMAND_DEPENDENCIES=0)
233main.Decider('MD5-timestamp')
234main.root = Dir(".")         # The current directory (where this file lives).
235main.srcdir = Dir("src")     # The source directory
236
237main_dict_keys = main.Dictionary().keys()
238
239# Check that we have a C/C++ compiler
240if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys):
241    print "No C++ compiler installed (package g++ on Ubuntu and RedHat)"
242    Exit(1)
243
244# Check that swig is present
245if not 'SWIG' in main_dict_keys:
246    print "swig is not installed (package swig on Ubuntu and RedHat)"
247    Exit(1)
248
249# add useful python code PYTHONPATH so it can be used by subprocesses
250# as well
251main.AppendENVPath('PYTHONPATH', extra_python_paths)
252
253########################################################################
254#
255# Mercurial Stuff.
256#
257# If the gem5 directory is a mercurial repository, we should do some
258# extra things.
259#
260########################################################################
261
262hgdir = main.root.Dir(".hg")
263
264
265style_message = """
266You're missing the gem5 style hook, which automatically checks your code
267against the gem5 style rules on %s.
268This script will now install the hook in your %s.
269Press enter to continue, or ctrl-c to abort: """
270
271mercurial_style_message = """
272You're missing the gem5 style hook, which automatically checks your code
273against the gem5 style rules on hg commit and qrefresh commands.
274This script will now install the hook in your .hg/hgrc file.
275Press enter to continue, or ctrl-c to abort: """
276
277git_style_message = """
278You're missing the gem5 style or commit message hook. These hooks help
279to ensure that your code follows gem5's style rules on git commit.
280This script will now install the hook in your .git/hooks/ directory.
281Press enter to continue, or ctrl-c to abort: """
282
283mercurial_style_upgrade_message = """
284Your Mercurial style hooks are not up-to-date. This script will now
285try to automatically update them. A backup of your hgrc will be saved
286in .hg/hgrc.old.
287Press enter to continue, or ctrl-c to abort: """
288
289mercurial_style_hook = """
290# The following lines were automatically added by gem5/SConstruct
291# to provide the gem5 style-checking hooks
292[extensions]
293hgstyle = %s/util/hgstyle.py
294
295[hooks]
296pretxncommit.style = python:hgstyle.check_style
297pre-qrefresh.style = python:hgstyle.check_style
298# End of SConstruct additions
299
300""" % (main.root.abspath)
301
302mercurial_lib_not_found = """
303Mercurial libraries cannot be found, ignoring style hook.  If
304you are a gem5 developer, please fix this and run the style
305hook. It is important.
306"""
307
308# Check for style hook and prompt for installation if it's not there.
309# Skip this if --ignore-style was specified, there's no interactive
310# terminal to prompt, or no recognized revision control system can be
311# found.
312ignore_style = GetOption('ignore_style') or not sys.stdin.isatty()
313
314# Try wire up Mercurial to the style hooks
315if not ignore_style and hgdir.exists():
316    style_hook = True
317    style_hooks = tuple()
318    hgrc = hgdir.File('hgrc')
319    hgrc_old = hgdir.File('hgrc.old')
320    try:
321        from mercurial import ui
322        ui = ui.ui()
323        ui.readconfig(hgrc.abspath)
324        style_hooks = (ui.config('hooks', 'pretxncommit.style', None),
325                       ui.config('hooks', 'pre-qrefresh.style', None))
326        style_hook = all(style_hooks)
327        style_extension = ui.config('extensions', 'style', None)
328    except ImportError:
329        print mercurial_lib_not_found
330
331    if "python:style.check_style" in style_hooks:
332        # Try to upgrade the style hooks
333        print mercurial_style_upgrade_message
334        # continue unless user does ctrl-c/ctrl-d etc.
335        try:
336            raw_input()
337        except:
338            print "Input exception, exiting scons.\n"
339            sys.exit(1)
340        shutil.copyfile(hgrc.abspath, hgrc_old.abspath)
341        re_style_hook = re.compile(r"^([^=#]+)\.style\s*=\s*([^#\s]+).*")
342        re_style_extension = re.compile("style\s*=\s*([^#\s]+).*")
343        old, new = open(hgrc_old.abspath, 'r'), open(hgrc.abspath, 'w')
344        for l in old:
345            m_hook = re_style_hook.match(l)
346            m_ext = re_style_extension.match(l)
347            if m_hook:
348                hook, check = m_hook.groups()
349                if check != "python:style.check_style":
350                    print "Warning: %s.style is using a non-default " \
351                        "checker: %s" % (hook, check)
352                if hook not in ("pretxncommit", "pre-qrefresh"):
353                    print "Warning: Updating unknown style hook: %s" % hook
354
355                l = "%s.style = python:hgstyle.check_style\n" % hook
356            elif m_ext and m_ext.group(1) == style_extension:
357                l = "hgstyle = %s/util/hgstyle.py\n" % main.root.abspath
358
359            new.write(l)
360    elif not style_hook:
361        print mercurial_style_message,
362        # continue unless user does ctrl-c/ctrl-d etc.
363        try:
364            raw_input()
365        except:
366            print "Input exception, exiting scons.\n"
367            sys.exit(1)
368        hgrc_path = '%s/.hg/hgrc' % main.root.abspath
369        print "Adding style hook to", hgrc_path, "\n"
370        try:
371            with open(hgrc_path, 'a') as f:
372                f.write(mercurial_style_hook)
373        except:
374            print "Error updating", hgrc_path
375            sys.exit(1)
376
377def install_git_style_hooks():
378    try:
379        gitdir = Dir(readCommand(
380            ["git", "rev-parse", "--git-dir"]).strip("\n"))
381    except Exception, e:
382        print "Warning: Failed to find git repo directory: %s" % e
383        return
384
385    git_hooks = gitdir.Dir("hooks")
386    def hook_exists(hook_name):
387        hook = git_hooks.File(hook_name)
388        return hook.exists()
389
390    def hook_install(hook_name, script):
391        hook = git_hooks.File(hook_name)
392        if hook.exists():
393            print "Warning: Can't install %s, hook already exists." % hook_name
394            return
395
396        if hook.islink():
397            print "Warning: Removing broken symlink for hook %s." % hook_name
398            os.unlink(hook.get_abspath())
399
400        if not git_hooks.exists():
401            mkdir(git_hooks.get_abspath())
402            git_hooks.clear()
403
404        abs_symlink_hooks = git_hooks.islink() and \
405            os.path.isabs(os.readlink(git_hooks.get_abspath()))
406
407        # Use a relative symlink if the hooks live in the source directory,
408        # and the hooks directory is not a symlink to an absolute path.
409        if hook.is_under(main.root) and not abs_symlink_hooks:
410            script_path = os.path.relpath(
411                os.path.realpath(script.get_abspath()),
412                os.path.realpath(hook.Dir(".").get_abspath()))
413        else:
414            script_path = script.get_abspath()
415
416        try:
417            os.symlink(script_path, hook.get_abspath())
418        except:
419            print "Error updating git %s hook" % hook_name
420            raise
421
422    if hook_exists("pre-commit") and hook_exists("commit-msg"):
423        return
424
425    print git_style_message,
426    try:
427        raw_input()
428    except:
429        print "Input exception, exiting scons.\n"
430        sys.exit(1)
431
432    git_style_script = File("util/git-pre-commit.py")
433    git_msg_script = File("ext/git-commit-msg")
434
435    hook_install("pre-commit", git_style_script)
436    hook_install("commit-msg", git_msg_script)
437
438# Try to wire up git to the style hooks
439if not ignore_style and main.root.Entry(".git").exists():
440    install_git_style_hooks()
441
442###################################################
443#
444# Figure out which configurations to set up based on the path(s) of
445# the target(s).
446#
447###################################################
448
449# Find default configuration & binary.
450Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug'))
451
452# helper function: find last occurrence of element in list
453def rfind(l, elt, offs = -1):
454    for i in range(len(l)+offs, 0, -1):
455        if l[i] == elt:
456            return i
457    raise ValueError, "element not found"
458
459# Take a list of paths (or SCons Nodes) and return a list with all
460# paths made absolute and ~-expanded.  Paths will be interpreted
461# relative to the launch directory unless a different root is provided
462def makePathListAbsolute(path_list, root=GetLaunchDir()):
463    return [abspath(joinpath(root, expanduser(str(p))))
464            for p in path_list]
465
466# Each target must have 'build' in the interior of the path; the
467# directory below this will determine the build parameters.  For
468# example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we
469# recognize that ALPHA_SE specifies the configuration because it
470# follow 'build' in the build path.
471
472# The funky assignment to "[:]" is needed to replace the list contents
473# in place rather than reassign the symbol to a new list, which
474# doesn't work (obviously!).
475BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS)
476
477# Generate a list of the unique build roots and configs that the
478# collected targets reference.
479variant_paths = []
480build_root = None
481for t in BUILD_TARGETS:
482    path_dirs = t.split('/')
483    try:
484        build_top = rfind(path_dirs, 'build', -2)
485    except:
486        print "Error: no non-leaf 'build' dir found on target path", t
487        Exit(1)
488    this_build_root = joinpath('/',*path_dirs[:build_top+1])
489    if not build_root:
490        build_root = this_build_root
491    else:
492        if this_build_root != build_root:
493            print "Error: build targets not under same build root\n"\
494                  "  %s\n  %s" % (build_root, this_build_root)
495            Exit(1)
496    variant_path = joinpath('/',*path_dirs[:build_top+2])
497    if variant_path not in variant_paths:
498        variant_paths.append(variant_path)
499
500# Make sure build_root exists (might not if this is the first build there)
501if not isdir(build_root):
502    mkdir(build_root)
503main['BUILDROOT'] = build_root
504
505Export('main')
506
507main.SConsignFile(joinpath(build_root, "sconsign"))
508
509# Default duplicate option is to use hard links, but this messes up
510# when you use emacs to edit a file in the target dir, as emacs moves
511# file to file~ then copies to file, breaking the link.  Symbolic
512# (soft) links work better.
513main.SetOption('duplicate', 'soft-copy')
514
515#
516# Set up global sticky variables... these are common to an entire build
517# tree (not specific to a particular build like ALPHA_SE)
518#
519
520global_vars_file = joinpath(build_root, 'variables.global')
521
522global_vars = Variables(global_vars_file, args=ARGUMENTS)
523
524global_vars.AddVariables(
525    ('CC', 'C compiler', environ.get('CC', main['CC'])),
526    ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])),
527    ('SWIG', 'SWIG tool', environ.get('SWIG', main['SWIG'])),
528    ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')),
529    ('BATCH', 'Use batch pool for build and tests', False),
530    ('BATCH_CMD', 'Batch pool submission command name', 'qdo'),
531    ('M5_BUILD_CACHE', 'Cache built objects in this directory', False),
532    ('EXTRAS', 'Add extra directories to the compilation', '')
533    )
534
535# Update main environment with values from ARGUMENTS & global_vars_file
536global_vars.Update(main)
537help_texts["global_vars"] += global_vars.GenerateHelpText(main)
538
539# Save sticky variable settings back to current variables file
540global_vars.Save(global_vars_file, main)
541
542# Parse EXTRAS variable to build list of all directories where we're
543# look for sources etc.  This list is exported as extras_dir_list.
544base_dir = main.srcdir.abspath
545if main['EXTRAS']:
546    extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':'))
547else:
548    extras_dir_list = []
549
550Export('base_dir')
551Export('extras_dir_list')
552
553# the ext directory should be on the #includes path
554main.Append(CPPPATH=[Dir('ext')])
555
556def strip_build_path(path, env):
557    path = str(path)
558    variant_base = env['BUILDROOT'] + os.path.sep
559    if path.startswith(variant_base):
560        path = path[len(variant_base):]
561    elif path.startswith('build/'):
562        path = path[6:]
563    return path
564
565# Generate a string of the form:
566#   common/path/prefix/src1, src2 -> tgt1, tgt2
567# to print while building.
568class Transform(object):
569    # all specific color settings should be here and nowhere else
570    tool_color = termcap.Normal
571    pfx_color = termcap.Yellow
572    srcs_color = termcap.Yellow + termcap.Bold
573    arrow_color = termcap.Blue + termcap.Bold
574    tgts_color = termcap.Yellow + termcap.Bold
575
576    def __init__(self, tool, max_sources=99):
577        self.format = self.tool_color + (" [%8s] " % tool) \
578                      + self.pfx_color + "%s" \
579                      + self.srcs_color + "%s" \
580                      + self.arrow_color + " -> " \
581                      + self.tgts_color + "%s" \
582                      + termcap.Normal
583        self.max_sources = max_sources
584
585    def __call__(self, target, source, env, for_signature=None):
586        # truncate source list according to max_sources param
587        source = source[0:self.max_sources]
588        def strip(f):
589            return strip_build_path(str(f), env)
590        if len(source) > 0:
591            srcs = map(strip, source)
592        else:
593            srcs = ['']
594        tgts = map(strip, target)
595        # surprisingly, os.path.commonprefix is a dumb char-by-char string
596        # operation that has nothing to do with paths.
597        com_pfx = os.path.commonprefix(srcs + tgts)
598        com_pfx_len = len(com_pfx)
599        if com_pfx:
600            # do some cleanup and sanity checking on common prefix
601            if com_pfx[-1] == ".":
602                # prefix matches all but file extension: ok
603                # back up one to change 'foo.cc -> o' to 'foo.cc -> .o'
604                com_pfx = com_pfx[0:-1]
605            elif com_pfx[-1] == "/":
606                # common prefix is directory path: OK
607                pass
608            else:
609                src0_len = len(srcs[0])
610                tgt0_len = len(tgts[0])
611                if src0_len == com_pfx_len:
612                    # source is a substring of target, OK
613                    pass
614                elif tgt0_len == com_pfx_len:
615                    # target is a substring of source, need to back up to
616                    # avoid empty string on RHS of arrow
617                    sep_idx = com_pfx.rfind(".")
618                    if sep_idx != -1:
619                        com_pfx = com_pfx[0:sep_idx]
620                    else:
621                        com_pfx = ''
622                elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".":
623                    # still splitting at file extension: ok
624                    pass
625                else:
626                    # probably a fluke; ignore it
627                    com_pfx = ''
628        # recalculate length in case com_pfx was modified
629        com_pfx_len = len(com_pfx)
630        def fmt(files):
631            f = map(lambda s: s[com_pfx_len:], files)
632            return ', '.join(f)
633        return self.format % (com_pfx, fmt(srcs), fmt(tgts))
634
635Export('Transform')
636
637# enable the regression script to use the termcap
638main['TERMCAP'] = termcap
639
640if GetOption('verbose'):
641    def MakeAction(action, string, *args, **kwargs):
642        return Action(action, *args, **kwargs)
643else:
644    MakeAction = Action
645    main['CCCOMSTR']        = Transform("CC")
646    main['CXXCOMSTR']       = Transform("CXX")
647    main['ASCOMSTR']        = Transform("AS")
648    main['SWIGCOMSTR']      = Transform("SWIG")
649    main['ARCOMSTR']        = Transform("AR", 0)
650    main['LINKCOMSTR']      = Transform("LINK", 0)
651    main['SHLINKCOMSTR']    = Transform("SHLINK", 0)
652    main['RANLIBCOMSTR']    = Transform("RANLIB", 0)
653    main['M4COMSTR']        = Transform("M4")
654    main['SHCCCOMSTR']      = Transform("SHCC")
655    main['SHCXXCOMSTR']     = Transform("SHCXX")
656Export('MakeAction')
657
658# Initialize the Link-Time Optimization (LTO) flags
659main['LTO_CCFLAGS'] = []
660main['LTO_LDFLAGS'] = []
661
662# According to the readme, tcmalloc works best if the compiler doesn't
663# assume that we're using the builtin malloc and friends. These flags
664# are compiler-specific, so we need to set them after we detect which
665# compiler we're using.
666main['TCMALLOC_CCFLAGS'] = []
667
668CXX_version = readCommand([main['CXX'],'--version'], exception=False)
669CXX_V = readCommand([main['CXX'],'-V'], exception=False)
670
671main['GCC'] = CXX_version and CXX_version.find('g++') >= 0
672main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0
673if main['GCC'] + main['CLANG'] > 1:
674    print 'Error: How can we have two at the same time?'
675    Exit(1)
676
677# Set up default C++ compiler flags
678if main['GCC'] or main['CLANG']:
679    # As gcc and clang share many flags, do the common parts here
680    main.Append(CCFLAGS=['-pipe'])
681    main.Append(CCFLAGS=['-fno-strict-aliasing'])
682    # Enable -Wall and -Wextra and then disable the few warnings that
683    # we consistently violate
684    main.Append(CCFLAGS=['-Wall', '-Wundef', '-Wextra',
685                         '-Wno-sign-compare', '-Wno-unused-parameter'])
686    # We always compile using C++11
687    main.Append(CXXFLAGS=['-std=c++11'])
688    if sys.platform.startswith('freebsd'):
689        main.Append(CCFLAGS=['-I/usr/local/include'])
690        main.Append(CXXFLAGS=['-I/usr/local/include'])
691
692    main['FILTER_PSHLINKFLAGS'] = lambda x: str(x).replace(' -shared', '')
693    main['PSHLINKFLAGS'] = main.subst('${FILTER_PSHLINKFLAGS(SHLINKFLAGS)}')
694    main['PLINKFLAGS'] = main.subst('${LINKFLAGS}')
695    shared_partial_flags = ['-Wl,--relocatable', '-nostdlib']
696    main.Append(PSHLINKFLAGS=shared_partial_flags)
697    main.Append(PLINKFLAGS=shared_partial_flags)
698else:
699    print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
700    print "Don't know what compiler options to use for your compiler."
701    print termcap.Yellow + '       compiler:' + termcap.Normal, main['CXX']
702    print termcap.Yellow + '       version:' + termcap.Normal,
703    if not CXX_version:
704        print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
705               termcap.Normal
706    else:
707        print CXX_version.replace('\n', '<nl>')
708    print "       If you're trying to use a compiler other than GCC"
709    print "       or clang, there appears to be something wrong with your"
710    print "       environment."
711    print "       "
712    print "       If you are trying to use a compiler other than those listed"
713    print "       above you will need to ease fix SConstruct and "
714    print "       src/SConscript to support that compiler."
715    Exit(1)
716
717if main['GCC']:
718    # Check for a supported version of gcc. >= 4.8 is chosen for its
719    # level of c++11 support. See
720    # http://gcc.gnu.org/projects/cxx0x.html for details.
721    gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False)
722    if compareVersions(gcc_version, "4.8") < 0:
723        print 'Error: gcc version 4.8 or newer required.'
724        print '       Installed version:', gcc_version
725        Exit(1)
726
727    main['GCC_VERSION'] = gcc_version
728
729    # gcc from version 4.8 and above generates "rep; ret" instructions
730    # to avoid performance penalties on certain AMD chips. Older
731    # assemblers detect this as an error, "Error: expecting string
732    # instruction after `rep'"
733    as_version_raw = readCommand([main['AS'], '-v', '/dev/null',
734                                  '-o', '/dev/null'],
735                                 exception=False).split()
736
737    # version strings may contain extra distro-specific
738    # qualifiers, so play it safe and keep only what comes before
739    # the first hyphen
740    as_version = as_version_raw[-1].split('-')[0] if as_version_raw else None
741
742    if not as_version or compareVersions(as_version, "2.23") < 0:
743        print termcap.Yellow + termcap.Bold + \
744            'Warning: This combination of gcc and binutils have' + \
745            ' known incompatibilities.\n' + \
746            '         If you encounter build problems, please update ' + \
747            'binutils to 2.23.' + \
748            termcap.Normal
749
750    # Make sure we warn if the user has requested to compile with the
751    # Undefined Benahvior Sanitizer and this version of gcc does not
752    # support it.
753    if GetOption('with_ubsan') and \
754            compareVersions(gcc_version, '4.9') < 0:
755        print termcap.Yellow + termcap.Bold + \
756            'Warning: UBSan is only supported using gcc 4.9 and later.' + \
757            termcap.Normal
758
759    # Add the appropriate Link-Time Optimization (LTO) flags
760    # unless LTO is explicitly turned off. Note that these flags
761    # are only used by the fast target.
762    if not GetOption('no_lto'):
763        # Pass the LTO flag when compiling to produce GIMPLE
764        # output, we merely create the flags here and only append
765        # them later
766        main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
767
768        # Use the same amount of jobs for LTO as we are running
769        # scons with
770        main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
771
772    main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc',
773                                  '-fno-builtin-realloc', '-fno-builtin-free'])
774
775    # add option to check for undeclared overrides
776    if compareVersions(gcc_version, "5.0") > 0:
777        main.Append(CCFLAGS=['-Wno-error=suggest-override'])
778
779elif main['CLANG']:
780    # Check for a supported version of clang, >= 3.1 is needed to
781    # support similar features as gcc 4.8. See
782    # http://clang.llvm.org/cxx_status.html for details
783    clang_version_re = re.compile(".* version (\d+\.\d+)")
784    clang_version_match = clang_version_re.search(CXX_version)
785    if (clang_version_match):
786        clang_version = clang_version_match.groups()[0]
787        if compareVersions(clang_version, "3.1") < 0:
788            print 'Error: clang version 3.1 or newer required.'
789            print '       Installed version:', clang_version
790            Exit(1)
791    else:
792        print 'Error: Unable to determine clang version.'
793        Exit(1)
794
795    # clang has a few additional warnings that we disable, extraneous
796    # parantheses are allowed due to Ruby's printing of the AST,
797    # finally self assignments are allowed as the generated CPU code
798    # is relying on this
799    main.Append(CCFLAGS=['-Wno-parentheses',
800                         '-Wno-self-assign',
801                         # Some versions of libstdc++ (4.8?) seem to
802                         # use struct hash and class hash
803                         # interchangeably.
804                         '-Wno-mismatched-tags',
805                         ])
806
807    main.Append(TCMALLOC_CCFLAGS=['-fno-builtin'])
808
809    # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as
810    # opposed to libstdc++, as the later is dated.
811    if sys.platform == "darwin":
812        main.Append(CXXFLAGS=['-stdlib=libc++'])
813        main.Append(LIBS=['c++'])
814
815    # On FreeBSD we need libthr.
816    if sys.platform.startswith('freebsd'):
817        main.Append(LIBS=['thr'])
818
819else:
820    print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
821    print "Don't know what compiler options to use for your compiler."
822    print termcap.Yellow + '       compiler:' + termcap.Normal, main['CXX']
823    print termcap.Yellow + '       version:' + termcap.Normal,
824    if not CXX_version:
825        print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
826               termcap.Normal
827    else:
828        print CXX_version.replace('\n', '<nl>')
829    print "       If you're trying to use a compiler other than GCC"
830    print "       or clang, there appears to be something wrong with your"
831    print "       environment."
832    print "       "
833    print "       If you are trying to use a compiler other than those listed"
834    print "       above you will need to ease fix SConstruct and "
835    print "       src/SConscript to support that compiler."
836    Exit(1)
837
838# Set up common yacc/bison flags (needed for Ruby)
839main['YACCFLAGS'] = '-d'
840main['YACCHXXFILESUFFIX'] = '.hh'
841
842# Do this after we save setting back, or else we'll tack on an
843# extra 'qdo' every time we run scons.
844if main['BATCH']:
845    main['CC']     = main['BATCH_CMD'] + ' ' + main['CC']
846    main['CXX']    = main['BATCH_CMD'] + ' ' + main['CXX']
847    main['AS']     = main['BATCH_CMD'] + ' ' + main['AS']
848    main['AR']     = main['BATCH_CMD'] + ' ' + main['AR']
849    main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB']
850
851if sys.platform == 'cygwin':
852    # cygwin has some header file issues...
853    main.Append(CCFLAGS=["-Wno-uninitialized"])
854
855# Check for the protobuf compiler
856protoc_version = readCommand([main['PROTOC'], '--version'],
857                             exception='').split()
858
859# First two words should be "libprotoc x.y.z"
860if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc':
861    print termcap.Yellow + termcap.Bold + \
862        'Warning: Protocol buffer compiler (protoc) not found.\n' + \
863        '         Please install protobuf-compiler for tracing support.' + \
864        termcap.Normal
865    main['PROTOC'] = False
866else:
867    # Based on the availability of the compress stream wrappers,
868    # require 2.1.0
869    min_protoc_version = '2.1.0'
870    if compareVersions(protoc_version[1], min_protoc_version) < 0:
871        print termcap.Yellow + termcap.Bold + \
872            'Warning: protoc version', min_protoc_version, \
873            'or newer required.\n' + \
874            '         Installed version:', protoc_version[1], \
875            termcap.Normal
876        main['PROTOC'] = False
877    else:
878        # Attempt to determine the appropriate include path and
879        # library path using pkg-config, that means we also need to
880        # check for pkg-config. Note that it is possible to use
881        # protobuf without the involvement of pkg-config. Later on we
882        # check go a library config check and at that point the test
883        # will fail if libprotobuf cannot be found.
884        if readCommand(['pkg-config', '--version'], exception=''):
885            try:
886                # Attempt to establish what linking flags to add for protobuf
887                # using pkg-config
888                main.ParseConfig('pkg-config --cflags --libs-only-L protobuf')
889            except:
890                print termcap.Yellow + termcap.Bold + \
891                    'Warning: pkg-config could not get protobuf flags.' + \
892                    termcap.Normal
893
894# Check for SWIG
895if not main.has_key('SWIG'):
896    print 'Error: SWIG utility not found.'
897    print '       Please install (see http://www.swig.org) and retry.'
898    Exit(1)
899
900# Check for appropriate SWIG version
901swig_version = readCommand([main['SWIG'], '-version'], exception='').split()
902# First 3 words should be "SWIG Version x.y.z"
903if len(swig_version) < 3 or \
904        swig_version[0] != 'SWIG' or swig_version[1] != 'Version':
905    print 'Error determining SWIG version.'
906    Exit(1)
907
908min_swig_version = '2.0.4'
909if compareVersions(swig_version[2], min_swig_version) < 0:
910    print 'Error: SWIG version', min_swig_version, 'or newer required.'
911    print '       Installed version:', swig_version[2]
912    Exit(1)
913
914# Check for known incompatibilities. The standard library shipped with
915# gcc >= 4.9 does not play well with swig versions prior to 3.0
916if main['GCC'] and compareVersions(gcc_version, '4.9') >= 0 and \
917        compareVersions(swig_version[2], '3.0') < 0:
918    print termcap.Yellow + termcap.Bold + \
919        'Warning: This combination of gcc and swig have' + \
920        ' known incompatibilities.\n' + \
921        '         If you encounter build problems, please update ' + \
922        'swig to 3.0 or later.' + \
923        termcap.Normal
924
925# Set up SWIG flags & scanner
926swig_flags=Split('-c++ -python -modern -templatereduce $_CPPINCFLAGS')
927main.Append(SWIGFLAGS=swig_flags)
928
929# Check for 'timeout' from GNU coreutils. If present, regressions will
930# be run with a time limit. We require version 8.13 since we rely on
931# support for the '--foreground' option.
932if sys.platform.startswith('freebsd'):
933    timeout_lines = readCommand(['gtimeout', '--version'],
934                                exception='').splitlines()
935else:
936    timeout_lines = readCommand(['timeout', '--version'],
937                                exception='').splitlines()
938# Get the first line and tokenize it
939timeout_version = timeout_lines[0].split() if timeout_lines else []
940main['TIMEOUT'] =  timeout_version and \
941    compareVersions(timeout_version[-1], '8.13') >= 0
942
943# filter out all existing swig scanners, they mess up the dependency
944# stuff for some reason
945scanners = []
946for scanner in main['SCANNERS']:
947    skeys = scanner.skeys
948    if skeys == '.i':
949        continue
950
951    if isinstance(skeys, (list, tuple)) and '.i' in skeys:
952        continue
953
954    scanners.append(scanner)
955
956# add the new swig scanner that we like better
957from SCons.Scanner import ClassicCPP as CPPScanner
958swig_inc_re = '^[ \t]*[%,#][ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")'
959scanners.append(CPPScanner("SwigScan", [ ".i" ], "CPPPATH", swig_inc_re))
960
961# replace the scanners list that has what we want
962main['SCANNERS'] = scanners
963
964# Add a custom Check function to test for structure members.
965def CheckMember(context, include, decl, member, include_quotes="<>"):
966    context.Message("Checking for member %s in %s..." %
967                    (member, decl))
968    text = """
969#include %(header)s
970int main(){
971  %(decl)s test;
972  (void)test.%(member)s;
973  return 0;
974};
975""" % { "header" : include_quotes[0] + include + include_quotes[1],
976        "decl" : decl,
977        "member" : member,
978        }
979
980    ret = context.TryCompile(text, extension=".cc")
981    context.Result(ret)
982    return ret
983
984# Platform-specific configuration.  Note again that we assume that all
985# builds under a given build root run on the same host platform.
986conf = Configure(main,
987                 conf_dir = joinpath(build_root, '.scons_config'),
988                 log_file = joinpath(build_root, 'scons_config.log'),
989                 custom_tests = {
990        'CheckMember' : CheckMember,
991        })
992
993# Check if we should compile a 64 bit binary on Mac OS X/Darwin
994try:
995    import platform
996    uname = platform.uname()
997    if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0:
998        if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]):
999            main.Append(CCFLAGS=['-arch', 'x86_64'])
1000            main.Append(CFLAGS=['-arch', 'x86_64'])
1001            main.Append(LINKFLAGS=['-arch', 'x86_64'])
1002            main.Append(ASFLAGS=['-arch', 'x86_64'])
1003except:
1004    pass
1005
1006# Recent versions of scons substitute a "Null" object for Configure()
1007# when configuration isn't necessary, e.g., if the "--help" option is
1008# present.  Unfortuantely this Null object always returns false,
1009# breaking all our configuration checks.  We replace it with our own
1010# more optimistic null object that returns True instead.
1011if not conf:
1012    def NullCheck(*args, **kwargs):
1013        return True
1014
1015    class NullConf:
1016        def __init__(self, env):
1017            self.env = env
1018        def Finish(self):
1019            return self.env
1020        def __getattr__(self, mname):
1021            return NullCheck
1022
1023    conf = NullConf(main)
1024
1025# Cache build files in the supplied directory.
1026if main['M5_BUILD_CACHE']:
1027    print 'Using build cache located at', main['M5_BUILD_CACHE']
1028    CacheDir(main['M5_BUILD_CACHE'])
1029
1030if not GetOption('without_python'):
1031    # Find Python include and library directories for embedding the
1032    # interpreter. We rely on python-config to resolve the appropriate
1033    # includes and linker flags. ParseConfig does not seem to understand
1034    # the more exotic linker flags such as -Xlinker and -export-dynamic so
1035    # we add them explicitly below. If you want to link in an alternate
1036    # version of python, see above for instructions on how to invoke
1037    # scons with the appropriate PATH set.
1038    #
1039    # First we check if python2-config exists, else we use python-config
1040    python_config = readCommand(['which', 'python2-config'],
1041                                exception='').strip()
1042    if not os.path.exists(python_config):
1043        python_config = readCommand(['which', 'python-config'],
1044                                    exception='').strip()
1045    py_includes = readCommand([python_config, '--includes'],
1046                              exception='').split()
1047    # Strip the -I from the include folders before adding them to the
1048    # CPPPATH
1049    main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes))
1050
1051    # Read the linker flags and split them into libraries and other link
1052    # flags. The libraries are added later through the call the CheckLib.
1053    py_ld_flags = readCommand([python_config, '--ldflags'],
1054        exception='').split()
1055    py_libs = []
1056    for lib in py_ld_flags:
1057         if not lib.startswith('-l'):
1058             main.Append(LINKFLAGS=[lib])
1059         else:
1060             lib = lib[2:]
1061             if lib not in py_libs:
1062                 py_libs.append(lib)
1063
1064    # verify that this stuff works
1065    if not conf.CheckHeader('Python.h', '<>'):
1066        print "Error: can't find Python.h header in", py_includes
1067        print "Install Python headers (package python-dev on Ubuntu and RedHat)"
1068        Exit(1)
1069
1070    for lib in py_libs:
1071        if not conf.CheckLib(lib):
1072            print "Error: can't find library %s required by python" % lib
1073            Exit(1)
1074
1075# On Solaris you need to use libsocket for socket ops
1076if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'):
1077   if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'):
1078       print "Can't find library with socket calls (e.g. accept())"
1079       Exit(1)
1080
1081# Check for zlib.  If the check passes, libz will be automatically
1082# added to the LIBS environment variable.
1083if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'):
1084    print 'Error: did not find needed zlib compression library '\
1085          'and/or zlib.h header file.'
1086    print '       Please install zlib and try again.'
1087    Exit(1)
1088
1089# If we have the protobuf compiler, also make sure we have the
1090# development libraries. If the check passes, libprotobuf will be
1091# automatically added to the LIBS environment variable. After
1092# this, we can use the HAVE_PROTOBUF flag to determine if we have
1093# got both protoc and libprotobuf available.
1094main['HAVE_PROTOBUF'] = main['PROTOC'] and \
1095    conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h',
1096                            'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;')
1097
1098# If we have the compiler but not the library, print another warning.
1099if main['PROTOC'] and not main['HAVE_PROTOBUF']:
1100    print termcap.Yellow + termcap.Bold + \
1101        'Warning: did not find protocol buffer library and/or headers.\n' + \
1102    '       Please install libprotobuf-dev for tracing support.' + \
1103    termcap.Normal
1104
1105# Check for librt.
1106have_posix_clock = \
1107    conf.CheckLibWithHeader(None, 'time.h', 'C',
1108                            'clock_nanosleep(0,0,NULL,NULL);') or \
1109    conf.CheckLibWithHeader('rt', 'time.h', 'C',
1110                            'clock_nanosleep(0,0,NULL,NULL);')
1111
1112have_posix_timers = \
1113    conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C',
1114                            'timer_create(CLOCK_MONOTONIC, NULL, NULL);')
1115
1116if not GetOption('without_tcmalloc'):
1117    if conf.CheckLib('tcmalloc'):
1118        main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1119    elif conf.CheckLib('tcmalloc_minimal'):
1120        main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1121    else:
1122        print termcap.Yellow + termcap.Bold + \
1123              "You can get a 12% performance improvement by "\
1124              "installing tcmalloc (libgoogle-perftools-dev package "\
1125              "on Ubuntu or RedHat)." + termcap.Normal
1126
1127
1128# Detect back trace implementations. The last implementation in the
1129# list will be used by default.
1130backtrace_impls = [ "none" ]
1131
1132if conf.CheckLibWithHeader(None, 'execinfo.h', 'C',
1133                           'backtrace_symbols_fd((void*)0, 0, 0);'):
1134    backtrace_impls.append("glibc")
1135elif conf.CheckLibWithHeader('execinfo', 'execinfo.h', 'C',
1136                           'backtrace_symbols_fd((void*)0, 0, 0);'):
1137    # NetBSD and FreeBSD need libexecinfo.
1138    backtrace_impls.append("glibc")
1139    main.Append(LIBS=['execinfo'])
1140
1141if backtrace_impls[-1] == "none":
1142    default_backtrace_impl = "none"
1143    print termcap.Yellow + termcap.Bold + \
1144        "No suitable back trace implementation found." + \
1145        termcap.Normal
1146
1147if not have_posix_clock:
1148    print "Can't find library for POSIX clocks."
1149
1150# Check for <fenv.h> (C99 FP environment control)
1151have_fenv = conf.CheckHeader('fenv.h', '<>')
1152if not have_fenv:
1153    print "Warning: Header file <fenv.h> not found."
1154    print "         This host has no IEEE FP rounding mode control."
1155
1156# Check if we should enable KVM-based hardware virtualization. The API
1157# we rely on exists since version 2.6.36 of the kernel, but somehow
1158# the KVM_API_VERSION does not reflect the change. We test for one of
1159# the types as a fall back.
1160have_kvm = conf.CheckHeader('linux/kvm.h', '<>')
1161if not have_kvm:
1162    print "Info: Compatible header file <linux/kvm.h> not found, " \
1163        "disabling KVM support."
1164
1165# x86 needs support for xsave. We test for the structure here since we
1166# won't be able to run new tests by the time we know which ISA we're
1167# targeting.
1168have_kvm_xsave = conf.CheckTypeSize('struct kvm_xsave',
1169                                    '#include <linux/kvm.h>') != 0
1170
1171# Check if the requested target ISA is compatible with the host
1172def is_isa_kvm_compatible(isa):
1173    try:
1174        import platform
1175        host_isa = platform.machine()
1176    except:
1177        print "Warning: Failed to determine host ISA."
1178        return False
1179
1180    if not have_posix_timers:
1181        print "Warning: Can not enable KVM, host seems to lack support " \
1182            "for POSIX timers"
1183        return False
1184
1185    if isa == "arm":
1186        return host_isa in ( "armv7l", "aarch64" )
1187    elif isa == "x86":
1188        if host_isa != "x86_64":
1189            return False
1190
1191        if not have_kvm_xsave:
1192            print "KVM on x86 requires xsave support in kernel headers."
1193            return False
1194
1195        return True
1196    else:
1197        return False
1198
1199
1200# Check if the exclude_host attribute is available. We want this to
1201# get accurate instruction counts in KVM.
1202main['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf.CheckMember(
1203    'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host')
1204
1205
1206######################################################################
1207#
1208# Finish the configuration
1209#
1210main = conf.Finish()
1211
1212######################################################################
1213#
1214# Collect all non-global variables
1215#
1216
1217# Define the universe of supported ISAs
1218all_isa_list = [ ]
1219all_gpu_isa_list = [ ]
1220Export('all_isa_list')
1221Export('all_gpu_isa_list')
1222
1223class CpuModel(object):
1224    '''The CpuModel class encapsulates everything the ISA parser needs to
1225    know about a particular CPU model.'''
1226
1227    # Dict of available CPU model objects.  Accessible as CpuModel.dict.
1228    dict = {}
1229
1230    # Constructor.  Automatically adds models to CpuModel.dict.
1231    def __init__(self, name, default=False):
1232        self.name = name           # name of model
1233
1234        # This cpu is enabled by default
1235        self.default = default
1236
1237        # Add self to dict
1238        if name in CpuModel.dict:
1239            raise AttributeError, "CpuModel '%s' already registered" % name
1240        CpuModel.dict[name] = self
1241
1242Export('CpuModel')
1243
1244# Sticky variables get saved in the variables file so they persist from
1245# one invocation to the next (unless overridden, in which case the new
1246# value becomes sticky).
1247sticky_vars = Variables(args=ARGUMENTS)
1248Export('sticky_vars')
1249
1250# Sticky variables that should be exported
1251export_vars = []
1252Export('export_vars')
1253
1254# For Ruby
1255all_protocols = []
1256Export('all_protocols')
1257protocol_dirs = []
1258Export('protocol_dirs')
1259slicc_includes = []
1260Export('slicc_includes')
1261
1262# Walk the tree and execute all SConsopts scripts that wil add to the
1263# above variables
1264if GetOption('verbose'):
1265    print "Reading SConsopts"
1266for bdir in [ base_dir ] + extras_dir_list:
1267    if not isdir(bdir):
1268        print "Error: directory '%s' does not exist" % bdir
1269        Exit(1)
1270    for root, dirs, files in os.walk(bdir):
1271        if 'SConsopts' in files:
1272            if GetOption('verbose'):
1273                print "Reading", joinpath(root, 'SConsopts')
1274            SConscript(joinpath(root, 'SConsopts'))
1275
1276all_isa_list.sort()
1277all_gpu_isa_list.sort()
1278
1279sticky_vars.AddVariables(
1280    EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list),
1281    EnumVariable('TARGET_GPU_ISA', 'Target GPU ISA', 'hsail', all_gpu_isa_list),
1282    ListVariable('CPU_MODELS', 'CPU models',
1283                 sorted(n for n,m in CpuModel.dict.iteritems() if m.default),
1284                 sorted(CpuModel.dict.keys())),
1285    BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger',
1286                 False),
1287    BoolVariable('SS_COMPATIBLE_FP',
1288                 'Make floating-point results compatible with SimpleScalar',
1289                 False),
1290    BoolVariable('USE_SSE2',
1291                 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts',
1292                 False),
1293    BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock),
1294    BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv),
1295    BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability', False),
1296    BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models', have_kvm),
1297    BoolVariable('BUILD_GPU', 'Build the compute-GPU model', False),
1298    EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None',
1299                  all_protocols),
1300    EnumVariable('BACKTRACE_IMPL', 'Post-mortem dump implementation',
1301                 backtrace_impls[-1], backtrace_impls)
1302    )
1303
1304# These variables get exported to #defines in config/*.hh (see src/SConscript).
1305export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'TARGET_GPU_ISA',
1306                'CP_ANNOTATE', 'USE_POSIX_CLOCK', 'USE_KVM', 'PROTOCOL',
1307                'HAVE_PROTOBUF', 'HAVE_PERF_ATTR_EXCLUDE_HOST']
1308
1309###################################################
1310#
1311# Define a SCons builder for configuration flag headers.
1312#
1313###################################################
1314
1315# This function generates a config header file that #defines the
1316# variable symbol to the current variable setting (0 or 1).  The source
1317# operands are the name of the variable and a Value node containing the
1318# value of the variable.
1319def build_config_file(target, source, env):
1320    (variable, value) = [s.get_contents() for s in source]
1321    f = file(str(target[0]), 'w')
1322    print >> f, '#define', variable, value
1323    f.close()
1324    return None
1325
1326# Combine the two functions into a scons Action object.
1327config_action = MakeAction(build_config_file, Transform("CONFIG H", 2))
1328
1329# The emitter munges the source & target node lists to reflect what
1330# we're really doing.
1331def config_emitter(target, source, env):
1332    # extract variable name from Builder arg
1333    variable = str(target[0])
1334    # True target is config header file
1335    target = joinpath('config', variable.lower() + '.hh')
1336    val = env[variable]
1337    if isinstance(val, bool):
1338        # Force value to 0/1
1339        val = int(val)
1340    elif isinstance(val, str):
1341        val = '"' + val + '"'
1342
1343    # Sources are variable name & value (packaged in SCons Value nodes)
1344    return ([target], [Value(variable), Value(val)])
1345
1346config_builder = Builder(emitter = config_emitter, action = config_action)
1347
1348main.Append(BUILDERS = { 'ConfigFile' : config_builder })
1349
1350###################################################
1351#
1352# Builders for static and shared partially linked object files.
1353#
1354###################################################
1355
1356partial_static_builder = Builder(action=SCons.Defaults.LinkAction,
1357                                 src_suffix='$OBJSUFFIX',
1358                                 src_builder=['StaticObject', 'Object'],
1359                                 LINKFLAGS='$PLINKFLAGS',
1360                                 LIBS='')
1361
1362def partial_shared_emitter(target, source, env):
1363    for tgt in target:
1364        tgt.attributes.shared = 1
1365    return (target, source)
1366partial_shared_builder = Builder(action=SCons.Defaults.ShLinkAction,
1367                                 emitter=partial_shared_emitter,
1368                                 src_suffix='$SHOBJSUFFIX',
1369                                 src_builder='SharedObject',
1370                                 SHLINKFLAGS='$PSHLINKFLAGS',
1371                                 LIBS='')
1372
1373main.Append(BUILDERS = { 'PartialShared' : partial_shared_builder,
1374                         'PartialStatic' : partial_static_builder })
1375
1376# builds in ext are shared across all configs in the build root.
1377ext_dir = abspath(joinpath(str(main.root), 'ext'))
1378for root, dirs, files in os.walk(ext_dir):
1379    if 'SConscript' in files:
1380        build_dir = os.path.relpath(root, ext_dir)
1381        main.SConscript(joinpath(root, 'SConscript'),
1382                        variant_dir=joinpath(build_root, build_dir))
1383
1384###################################################
1385#
1386# This function is used to set up a directory with switching headers
1387#
1388###################################################
1389
1390main['ALL_ISA_LIST'] = all_isa_list
1391main['ALL_GPU_ISA_LIST'] = all_gpu_isa_list
1392all_isa_deps = {}
1393def make_switching_dir(dname, switch_headers, env):
1394    # Generate the header.  target[0] is the full path of the output
1395    # header to generate.  'source' is a dummy variable, since we get the
1396    # list of ISAs from env['ALL_ISA_LIST'].
1397    def gen_switch_hdr(target, source, env):
1398        fname = str(target[0])
1399        isa = env['TARGET_ISA'].lower()
1400        try:
1401            f = open(fname, 'w')
1402            print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname))
1403            f.close()
1404        except IOError:
1405            print "Failed to create %s" % fname
1406            raise
1407
1408    # Build SCons Action object. 'varlist' specifies env vars that this
1409    # action depends on; when env['ALL_ISA_LIST'] changes these actions
1410    # should get re-executed.
1411    switch_hdr_action = MakeAction(gen_switch_hdr,
1412                          Transform("GENERATE"), varlist=['ALL_ISA_LIST'])
1413
1414    # Instantiate actions for each header
1415    for hdr in switch_headers:
1416        env.Command(hdr, [], switch_hdr_action)
1417
1418    isa_target = Dir('.').up().name.lower().replace('_', '-')
1419    env['PHONY_BASE'] = '#'+isa_target
1420    all_isa_deps[isa_target] = None
1421
1422Export('make_switching_dir')
1423
1424def make_gpu_switching_dir(dname, switch_headers, env):
1425    # Generate the header.  target[0] is the full path of the output
1426    # header to generate.  'source' is a dummy variable, since we get the
1427    # list of ISAs from env['ALL_ISA_LIST'].
1428    def gen_switch_hdr(target, source, env):
1429        fname = str(target[0])
1430
1431        isa = env['TARGET_GPU_ISA'].lower()
1432
1433        try:
1434            f = open(fname, 'w')
1435            print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname))
1436            f.close()
1437        except IOError:
1438            print "Failed to create %s" % fname
1439            raise
1440
1441    # Build SCons Action object. 'varlist' specifies env vars that this
1442    # action depends on; when env['ALL_ISA_LIST'] changes these actions
1443    # should get re-executed.
1444    switch_hdr_action = MakeAction(gen_switch_hdr,
1445                          Transform("GENERATE"), varlist=['ALL_ISA_GPU_LIST'])
1446
1447    # Instantiate actions for each header
1448    for hdr in switch_headers:
1449        env.Command(hdr, [], switch_hdr_action)
1450
1451Export('make_gpu_switching_dir')
1452
1453# all-isas -> all-deps -> all-environs -> all_targets
1454main.Alias('#all-isas', [])
1455main.Alias('#all-deps', '#all-isas')
1456
1457# Dummy target to ensure all environments are created before telling
1458# SCons what to actually make (the command line arguments).  We attach
1459# them to the dependence graph after the environments are complete.
1460ORIG_BUILD_TARGETS = list(BUILD_TARGETS) # force a copy; gets closure to work.
1461def environsComplete(target, source, env):
1462    for t in ORIG_BUILD_TARGETS:
1463        main.Depends('#all-targets', t)
1464
1465# Each build/* switching_dir attaches its *-environs target to #all-environs.
1466main.Append(BUILDERS = {'CompleteEnvirons' :
1467                        Builder(action=MakeAction(environsComplete, None))})
1468main.CompleteEnvirons('#all-environs', [])
1469
1470def doNothing(**ignored): pass
1471main.Append(BUILDERS = {'Dummy': Builder(action=MakeAction(doNothing, None))})
1472
1473# The final target to which all the original targets ultimately get attached.
1474main.Dummy('#all-targets', '#all-environs')
1475BUILD_TARGETS[:] = ['#all-targets']
1476
1477###################################################
1478#
1479# Define build environments for selected configurations.
1480#
1481###################################################
1482
1483for variant_path in variant_paths:
1484    if not GetOption('silent'):
1485        print "Building in", variant_path
1486
1487    # Make a copy of the build-root environment to use for this config.
1488    env = main.Clone()
1489    env['BUILDDIR'] = variant_path
1490
1491    # variant_dir is the tail component of build path, and is used to
1492    # determine the build parameters (e.g., 'ALPHA_SE')
1493    (build_root, variant_dir) = splitpath(variant_path)
1494
1495    # Set env variables according to the build directory config.
1496    sticky_vars.files = []
1497    # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in
1498    # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke
1499    # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings.
1500    current_vars_file = joinpath(build_root, 'variables', variant_dir)
1501    if isfile(current_vars_file):
1502        sticky_vars.files.append(current_vars_file)
1503        if not GetOption('silent'):
1504            print "Using saved variables file %s" % current_vars_file
1505    else:
1506        # Build dir-specific variables file doesn't exist.
1507
1508        # Make sure the directory is there so we can create it later
1509        opt_dir = dirname(current_vars_file)
1510        if not isdir(opt_dir):
1511            mkdir(opt_dir)
1512
1513        # Get default build variables from source tree.  Variables are
1514        # normally determined by name of $VARIANT_DIR, but can be
1515        # overridden by '--default=' arg on command line.
1516        default = GetOption('default')
1517        opts_dir = joinpath(main.root.abspath, 'build_opts')
1518        if default:
1519            default_vars_files = [joinpath(build_root, 'variables', default),
1520                                  joinpath(opts_dir, default)]
1521        else:
1522            default_vars_files = [joinpath(opts_dir, variant_dir)]
1523        existing_files = filter(isfile, default_vars_files)
1524        if existing_files:
1525            default_vars_file = existing_files[0]
1526            sticky_vars.files.append(default_vars_file)
1527            print "Variables file %s not found,\n  using defaults in %s" \
1528                  % (current_vars_file, default_vars_file)
1529        else:
1530            print "Error: cannot find variables file %s or " \
1531                  "default file(s) %s" \
1532                  % (current_vars_file, ' or '.join(default_vars_files))
1533            Exit(1)
1534
1535    # Apply current variable settings to env
1536    sticky_vars.Update(env)
1537
1538    help_texts["local_vars"] += \
1539        "Build variables for %s:\n" % variant_dir \
1540                 + sticky_vars.GenerateHelpText(env)
1541
1542    # Process variable settings.
1543
1544    if not have_fenv and env['USE_FENV']:
1545        print "Warning: <fenv.h> not available; " \
1546              "forcing USE_FENV to False in", variant_dir + "."
1547        env['USE_FENV'] = False
1548
1549    if not env['USE_FENV']:
1550        print "Warning: No IEEE FP rounding mode control in", variant_dir + "."
1551        print "         FP results may deviate slightly from other platforms."
1552
1553    if env['EFENCE']:
1554        env.Append(LIBS=['efence'])
1555
1556    if env['USE_KVM']:
1557        if not have_kvm:
1558            print "Warning: Can not enable KVM, host seems to lack KVM support"
1559            env['USE_KVM'] = False
1560        elif not is_isa_kvm_compatible(env['TARGET_ISA']):
1561            print "Info: KVM support disabled due to unsupported host and " \
1562                "target ISA combination"
1563            env['USE_KVM'] = False
1564
1565    if env['BUILD_GPU']:
1566        env.Append(CPPDEFINES=['BUILD_GPU'])
1567
1568    # Warn about missing optional functionality
1569    if env['USE_KVM']:
1570        if not main['HAVE_PERF_ATTR_EXCLUDE_HOST']:
1571            print "Warning: perf_event headers lack support for the " \
1572                "exclude_host attribute. KVM instruction counts will " \
1573                "be inaccurate."
1574
1575    # Save sticky variable settings back to current variables file
1576    sticky_vars.Save(current_vars_file, env)
1577
1578    if env['USE_SSE2']:
1579        env.Append(CCFLAGS=['-msse2'])
1580
1581    # The src/SConscript file sets up the build rules in 'env' according
1582    # to the configured variables.  It returns a list of environments,
1583    # one for each variant build (debug, opt, etc.)
1584    SConscript('src/SConscript', variant_dir = variant_path, exports = 'env')
1585
1586def pairwise(iterable):
1587    "s -> (s0,s1), (s1,s2), (s2, s3), ..."
1588    a, b = itertools.tee(iterable)
1589    b.next()
1590    return itertools.izip(a, b)
1591
1592# Create false dependencies so SCons will parse ISAs, establish
1593# dependencies, and setup the build Environments serially. Either
1594# SCons (likely) and/or our SConscripts (possibly) cannot cope with -j
1595# greater than 1. It appears to be standard race condition stuff; it
1596# doesn't always fail, but usually, and the behaviors are different.
1597# Every time I tried to remove this, builds would fail in some
1598# creative new way. So, don't do that. You'll want to, though, because
1599# tests/SConscript takes a long time to make its Environments.
1600for t1, t2 in pairwise(sorted(all_isa_deps.iterkeys())):
1601    main.Depends('#%s-deps'     % t2, '#%s-deps'     % t1)
1602    main.Depends('#%s-environs' % t2, '#%s-environs' % t1)
1603
1604# base help text
1605Help('''
1606Usage: scons [scons options] [build variables] [target(s)]
1607
1608Extra scons options:
1609%(options)s
1610
1611Global build variables:
1612%(global_vars)s
1613
1614%(local_vars)s
1615''' % help_texts)
1616