SConstruct revision 11401
1# -*- mode:python -*-
2
3# Copyright (c) 2013, 2015 ARM Limited
4# All rights reserved.
5#
6# The license below extends only to copyright in the software and shall
7# not be construed as granting a license to any other intellectual
8# property including but not limited to intellectual property relating
9# to a hardware implementation of the functionality of the software
10# licensed hereunder.  You may use the software subject to the license
11# terms below provided that you ensure that this notice is replicated
12# unmodified and in its entirety in all distributions of the software,
13# modified or unmodified, in source code or in binary form.
14#
15# Copyright (c) 2011 Advanced Micro Devices, Inc.
16# Copyright (c) 2009 The Hewlett-Packard Development Company
17# Copyright (c) 2004-2005 The Regents of The University of Michigan
18# All rights reserved.
19#
20# Redistribution and use in source and binary forms, with or without
21# modification, are permitted provided that the following conditions are
22# met: redistributions of source code must retain the above copyright
23# notice, this list of conditions and the following disclaimer;
24# redistributions in binary form must reproduce the above copyright
25# notice, this list of conditions and the following disclaimer in the
26# documentation and/or other materials provided with the distribution;
27# neither the name of the copyright holders nor the names of its
28# contributors may be used to endorse or promote products derived from
29# this software without specific prior written permission.
30#
31# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
32# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
33# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
34# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
35# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
37# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
38# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
39# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
40# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
41# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
42#
43# Authors: Steve Reinhardt
44#          Nathan Binkert
45
46###################################################
47#
48# SCons top-level build description (SConstruct) file.
49#
50# While in this directory ('gem5'), just type 'scons' to build the default
51# configuration (see below), or type 'scons build/<CONFIG>/<binary>'
52# to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for
53# the optimized full-system version).
54#
55# You can build gem5 in a different directory as long as there is a
56# 'build/<CONFIG>' somewhere along the target path.  The build system
57# expects that all configs under the same build directory are being
58# built for the same host system.
59#
60# Examples:
61#
62#   The following two commands are equivalent.  The '-u' option tells
63#   scons to search up the directory tree for this SConstruct file.
64#   % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug
65#   % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug
66#
67#   The following two commands are equivalent and demonstrate building
68#   in a directory outside of the source tree.  The '-C' option tells
69#   scons to chdir to the specified directory to find this SConstruct
70#   file.
71#   % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug
72#   % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug
73#
74# You can use 'scons -H' to print scons options.  If you're in this
75# 'gem5' directory (or use -u or -C to tell scons where to find this
76# file), you can use 'scons -h' to print all the gem5-specific build
77# options as well.
78#
79###################################################
80
81# Check for recent-enough Python and SCons versions.
82try:
83    # Really old versions of scons only take two options for the
84    # function, so check once without the revision and once with the
85    # revision, the first instance will fail for stuff other than
86    # 0.98, and the second will fail for 0.98.0
87    EnsureSConsVersion(0, 98)
88    EnsureSConsVersion(0, 98, 1)
89except SystemExit, e:
90    print """
91For more details, see:
92    http://gem5.org/Dependencies
93"""
94    raise
95
96# We ensure the python version early because because python-config
97# requires python 2.5
98try:
99    EnsurePythonVersion(2, 5)
100except SystemExit, e:
101    print """
102You can use a non-default installation of the Python interpreter by
103rearranging your PATH so that scons finds the non-default 'python' and
104'python-config' first.
105
106For more details, see:
107    http://gem5.org/wiki/index.php/Using_a_non-default_Python_installation
108"""
109    raise
110
111# Global Python includes
112import itertools
113import os
114import re
115import shutil
116import subprocess
117import sys
118
119from os import mkdir, environ
120from os.path import abspath, basename, dirname, expanduser, normpath
121from os.path import exists,  isdir, isfile
122from os.path import join as joinpath, split as splitpath
123
124# SCons includes
125import SCons
126import SCons.Node
127
128extra_python_paths = [
129    Dir('src/python').srcnode().abspath, # gem5 includes
130    Dir('ext/ply').srcnode().abspath, # ply is used by several files
131    ]
132
133sys.path[1:1] = extra_python_paths
134
135from m5.util import compareVersions, readCommand
136from m5.util.terminal import get_termcap
137
138help_texts = {
139    "options" : "",
140    "global_vars" : "",
141    "local_vars" : ""
142}
143
144Export("help_texts")
145
146
147# There's a bug in scons in that (1) by default, the help texts from
148# AddOption() are supposed to be displayed when you type 'scons -h'
149# and (2) you can override the help displayed by 'scons -h' using the
150# Help() function, but these two features are incompatible: once
151# you've overridden the help text using Help(), there's no way to get
152# at the help texts from AddOptions.  See:
153#     http://scons.tigris.org/issues/show_bug.cgi?id=2356
154#     http://scons.tigris.org/issues/show_bug.cgi?id=2611
155# This hack lets us extract the help text from AddOptions and
156# re-inject it via Help().  Ideally someday this bug will be fixed and
157# we can just use AddOption directly.
158def AddLocalOption(*args, **kwargs):
159    col_width = 30
160
161    help = "  " + ", ".join(args)
162    if "help" in kwargs:
163        length = len(help)
164        if length >= col_width:
165            help += "\n" + " " * col_width
166        else:
167            help += " " * (col_width - length)
168        help += kwargs["help"]
169    help_texts["options"] += help + "\n"
170
171    AddOption(*args, **kwargs)
172
173AddLocalOption('--colors', dest='use_colors', action='store_true',
174               help="Add color to abbreviated scons output")
175AddLocalOption('--no-colors', dest='use_colors', action='store_false',
176               help="Don't add color to abbreviated scons output")
177AddLocalOption('--with-cxx-config', dest='with_cxx_config',
178               action='store_true',
179               help="Build with support for C++-based configuration")
180AddLocalOption('--default', dest='default', type='string', action='store',
181               help='Override which build_opts file to use for defaults')
182AddLocalOption('--ignore-style', dest='ignore_style', action='store_true',
183               help='Disable style checking hooks')
184AddLocalOption('--no-lto', dest='no_lto', action='store_true',
185               help='Disable Link-Time Optimization for fast')
186AddLocalOption('--update-ref', dest='update_ref', action='store_true',
187               help='Update test reference outputs')
188AddLocalOption('--verbose', dest='verbose', action='store_true',
189               help='Print full tool command lines')
190AddLocalOption('--without-python', dest='without_python',
191               action='store_true',
192               help='Build without Python configuration support')
193AddLocalOption('--without-tcmalloc', dest='without_tcmalloc',
194               action='store_true',
195               help='Disable linking against tcmalloc')
196AddLocalOption('--with-ubsan', dest='with_ubsan', action='store_true',
197               help='Build with Undefined Behavior Sanitizer if available')
198AddLocalOption('--with-asan', dest='with_asan', action='store_true',
199               help='Build with Address Sanitizer if available')
200
201termcap = get_termcap(GetOption('use_colors'))
202
203########################################################################
204#
205# Set up the main build environment.
206#
207########################################################################
208
209# export TERM so that clang reports errors in color
210use_vars = set([ 'AS', 'AR', 'CC', 'CXX', 'HOME', 'LD_LIBRARY_PATH',
211                 'LIBRARY_PATH', 'PATH', 'PKG_CONFIG_PATH', 'PROTOC',
212                 'PYTHONPATH', 'RANLIB', 'SWIG', 'TERM' ])
213
214use_prefixes = [
215    "ASAN_",           # address sanitizer symbolizer path and settings
216    "CCACHE_",         # ccache (caching compiler wrapper) configuration
217    "CCC_",            # clang static analyzer configuration
218    "DISTCC_",         # distcc (distributed compiler wrapper) configuration
219    "INCLUDE_SERVER_", # distcc pump server settings
220    "M5",              # M5 configuration (e.g., path to kernels)
221    ]
222
223use_env = {}
224for key,val in sorted(os.environ.iteritems()):
225    if key in use_vars or \
226            any([key.startswith(prefix) for prefix in use_prefixes]):
227        use_env[key] = val
228
229# Tell scons to avoid implicit command dependencies to avoid issues
230# with the param wrappes being compiled twice (see
231# http://scons.tigris.org/issues/show_bug.cgi?id=2811)
232main = Environment(ENV=use_env, IMPLICIT_COMMAND_DEPENDENCIES=0)
233main.Decider('MD5-timestamp')
234main.root = Dir(".")         # The current directory (where this file lives).
235main.srcdir = Dir("src")     # The source directory
236
237main_dict_keys = main.Dictionary().keys()
238
239# Check that we have a C/C++ compiler
240if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys):
241    print "No C++ compiler installed (package g++ on Ubuntu and RedHat)"
242    Exit(1)
243
244# Check that swig is present
245if not 'SWIG' in main_dict_keys:
246    print "swig is not installed (package swig on Ubuntu and RedHat)"
247    Exit(1)
248
249# add useful python code PYTHONPATH so it can be used by subprocesses
250# as well
251main.AppendENVPath('PYTHONPATH', extra_python_paths)
252
253########################################################################
254#
255# Mercurial Stuff.
256#
257# If the gem5 directory is a mercurial repository, we should do some
258# extra things.
259#
260########################################################################
261
262hgdir = main.root.Dir(".hg")
263
264mercurial_style_message = """
265You're missing the gem5 style hook, which automatically checks your code
266against the gem5 style rules on hg commit and qrefresh commands.  This
267script will now install the hook in your .hg/hgrc file.
268Press enter to continue, or ctrl-c to abort: """
269
270mercurial_style_upgrade_message = """
271Your Mercurial style hooks are not up-to-date. This script will now
272try to automatically update them. A backup of your hgrc will be saved
273in .hg/hgrc.old.
274Press enter to continue, or ctrl-c to abort: """
275
276mercurial_style_hook = """
277# The following lines were automatically added by gem5/SConstruct
278# to provide the gem5 style-checking hooks
279[extensions]
280hgstyle = %s/util/hgstyle.py
281
282[hooks]
283pretxncommit.style = python:hgstyle.check_style
284pre-qrefresh.style = python:hgstyle.check_style
285# End of SConstruct additions
286
287""" % (main.root.abspath)
288
289mercurial_lib_not_found = """
290Mercurial libraries cannot be found, ignoring style hook.  If
291you are a gem5 developer, please fix this and run the style
292hook. It is important.
293"""
294
295# Check for style hook and prompt for installation if it's not there.
296# Skip this if --ignore-style was specified, there's no .hg dir to
297# install a hook in, or there's no interactive terminal to prompt.
298if not GetOption('ignore_style') and hgdir.exists() and sys.stdin.isatty():
299    style_hook = True
300    style_hooks = tuple()
301    hgrc = hgdir.File('hgrc')
302    hgrc_old = hgdir.File('hgrc.old')
303    try:
304        from mercurial import ui
305        ui = ui.ui()
306        ui.readconfig(hgrc.abspath)
307        style_hooks = (ui.config('hooks', 'pretxncommit.style', None),
308                       ui.config('hooks', 'pre-qrefresh.style', None))
309        style_hook = all(style_hooks)
310        style_extension = ui.config('extensions', 'style', None)
311    except ImportError:
312        print mercurial_lib_not_found
313
314    if "python:style.check_style" in style_hooks:
315        # Try to upgrade the style hooks
316        print mercurial_style_upgrade_message
317        # continue unless user does ctrl-c/ctrl-d etc.
318        try:
319            raw_input()
320        except:
321            print "Input exception, exiting scons.\n"
322            sys.exit(1)
323        shutil.copyfile(hgrc.abspath, hgrc_old.abspath)
324        re_style_hook = re.compile(r"^([^=#]+)\.style\s*=\s*([^#\s]+).*")
325        re_style_extension = re.compile("style\s*=\s*([^#\s]+).*")
326        with open(hgrc_old.abspath, 'r') as old, \
327             open(hgrc.abspath, 'w') as new:
328
329            for l in old:
330                m_hook = re_style_hook.match(l)
331                m_ext = re_style_extension.match(l)
332                if m_hook:
333                    hook, check = m_hook.groups()
334                    if check != "python:style.check_style":
335                        print "Warning: %s.style is using a non-default " \
336                            "checker: %s" % (hook, check)
337                    if hook not in ("pretxncommit", "pre-qrefresh"):
338                        print "Warning: Updating unknown style hook: %s" % hook
339
340                    l = "%s.style = python:hgstyle.check_style\n" % hook
341                elif m_ext and m_ext.group(1) == style_extension:
342                    l = "hgstyle = %s/util/hgstyle.py\n" % main.root.abspath
343
344                new.write(l)
345    elif not style_hook:
346        print mercurial_style_message,
347        # continue unless user does ctrl-c/ctrl-d etc.
348        try:
349            raw_input()
350        except:
351            print "Input exception, exiting scons.\n"
352            sys.exit(1)
353        hgrc_path = '%s/.hg/hgrc' % main.root.abspath
354        print "Adding style hook to", hgrc_path, "\n"
355        try:
356            with open(hgrc_path, 'a') as f:
357                f.write(mercurial_style_hook)
358        except:
359            print "Error updating", hgrc_path
360            sys.exit(1)
361
362
363###################################################
364#
365# Figure out which configurations to set up based on the path(s) of
366# the target(s).
367#
368###################################################
369
370# Find default configuration & binary.
371Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug'))
372
373# helper function: find last occurrence of element in list
374def rfind(l, elt, offs = -1):
375    for i in range(len(l)+offs, 0, -1):
376        if l[i] == elt:
377            return i
378    raise ValueError, "element not found"
379
380# Take a list of paths (or SCons Nodes) and return a list with all
381# paths made absolute and ~-expanded.  Paths will be interpreted
382# relative to the launch directory unless a different root is provided
383def makePathListAbsolute(path_list, root=GetLaunchDir()):
384    return [abspath(joinpath(root, expanduser(str(p))))
385            for p in path_list]
386
387# Each target must have 'build' in the interior of the path; the
388# directory below this will determine the build parameters.  For
389# example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we
390# recognize that ALPHA_SE specifies the configuration because it
391# follow 'build' in the build path.
392
393# The funky assignment to "[:]" is needed to replace the list contents
394# in place rather than reassign the symbol to a new list, which
395# doesn't work (obviously!).
396BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS)
397
398# Generate a list of the unique build roots and configs that the
399# collected targets reference.
400variant_paths = []
401build_root = None
402for t in BUILD_TARGETS:
403    path_dirs = t.split('/')
404    try:
405        build_top = rfind(path_dirs, 'build', -2)
406    except:
407        print "Error: no non-leaf 'build' dir found on target path", t
408        Exit(1)
409    this_build_root = joinpath('/',*path_dirs[:build_top+1])
410    if not build_root:
411        build_root = this_build_root
412    else:
413        if this_build_root != build_root:
414            print "Error: build targets not under same build root\n"\
415                  "  %s\n  %s" % (build_root, this_build_root)
416            Exit(1)
417    variant_path = joinpath('/',*path_dirs[:build_top+2])
418    if variant_path not in variant_paths:
419        variant_paths.append(variant_path)
420
421# Make sure build_root exists (might not if this is the first build there)
422if not isdir(build_root):
423    mkdir(build_root)
424main['BUILDROOT'] = build_root
425
426Export('main')
427
428main.SConsignFile(joinpath(build_root, "sconsign"))
429
430# Default duplicate option is to use hard links, but this messes up
431# when you use emacs to edit a file in the target dir, as emacs moves
432# file to file~ then copies to file, breaking the link.  Symbolic
433# (soft) links work better.
434main.SetOption('duplicate', 'soft-copy')
435
436#
437# Set up global sticky variables... these are common to an entire build
438# tree (not specific to a particular build like ALPHA_SE)
439#
440
441global_vars_file = joinpath(build_root, 'variables.global')
442
443global_vars = Variables(global_vars_file, args=ARGUMENTS)
444
445global_vars.AddVariables(
446    ('CC', 'C compiler', environ.get('CC', main['CC'])),
447    ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])),
448    ('SWIG', 'SWIG tool', environ.get('SWIG', main['SWIG'])),
449    ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')),
450    ('BATCH', 'Use batch pool for build and tests', False),
451    ('BATCH_CMD', 'Batch pool submission command name', 'qdo'),
452    ('M5_BUILD_CACHE', 'Cache built objects in this directory', False),
453    ('EXTRAS', 'Add extra directories to the compilation', '')
454    )
455
456# Update main environment with values from ARGUMENTS & global_vars_file
457global_vars.Update(main)
458help_texts["global_vars"] += global_vars.GenerateHelpText(main)
459
460# Save sticky variable settings back to current variables file
461global_vars.Save(global_vars_file, main)
462
463# Parse EXTRAS variable to build list of all directories where we're
464# look for sources etc.  This list is exported as extras_dir_list.
465base_dir = main.srcdir.abspath
466if main['EXTRAS']:
467    extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':'))
468else:
469    extras_dir_list = []
470
471Export('base_dir')
472Export('extras_dir_list')
473
474# the ext directory should be on the #includes path
475main.Append(CPPPATH=[Dir('ext')])
476
477def strip_build_path(path, env):
478    path = str(path)
479    variant_base = env['BUILDROOT'] + os.path.sep
480    if path.startswith(variant_base):
481        path = path[len(variant_base):]
482    elif path.startswith('build/'):
483        path = path[6:]
484    return path
485
486# Generate a string of the form:
487#   common/path/prefix/src1, src2 -> tgt1, tgt2
488# to print while building.
489class Transform(object):
490    # all specific color settings should be here and nowhere else
491    tool_color = termcap.Normal
492    pfx_color = termcap.Yellow
493    srcs_color = termcap.Yellow + termcap.Bold
494    arrow_color = termcap.Blue + termcap.Bold
495    tgts_color = termcap.Yellow + termcap.Bold
496
497    def __init__(self, tool, max_sources=99):
498        self.format = self.tool_color + (" [%8s] " % tool) \
499                      + self.pfx_color + "%s" \
500                      + self.srcs_color + "%s" \
501                      + self.arrow_color + " -> " \
502                      + self.tgts_color + "%s" \
503                      + termcap.Normal
504        self.max_sources = max_sources
505
506    def __call__(self, target, source, env, for_signature=None):
507        # truncate source list according to max_sources param
508        source = source[0:self.max_sources]
509        def strip(f):
510            return strip_build_path(str(f), env)
511        if len(source) > 0:
512            srcs = map(strip, source)
513        else:
514            srcs = ['']
515        tgts = map(strip, target)
516        # surprisingly, os.path.commonprefix is a dumb char-by-char string
517        # operation that has nothing to do with paths.
518        com_pfx = os.path.commonprefix(srcs + tgts)
519        com_pfx_len = len(com_pfx)
520        if com_pfx:
521            # do some cleanup and sanity checking on common prefix
522            if com_pfx[-1] == ".":
523                # prefix matches all but file extension: ok
524                # back up one to change 'foo.cc -> o' to 'foo.cc -> .o'
525                com_pfx = com_pfx[0:-1]
526            elif com_pfx[-1] == "/":
527                # common prefix is directory path: OK
528                pass
529            else:
530                src0_len = len(srcs[0])
531                tgt0_len = len(tgts[0])
532                if src0_len == com_pfx_len:
533                    # source is a substring of target, OK
534                    pass
535                elif tgt0_len == com_pfx_len:
536                    # target is a substring of source, need to back up to
537                    # avoid empty string on RHS of arrow
538                    sep_idx = com_pfx.rfind(".")
539                    if sep_idx != -1:
540                        com_pfx = com_pfx[0:sep_idx]
541                    else:
542                        com_pfx = ''
543                elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".":
544                    # still splitting at file extension: ok
545                    pass
546                else:
547                    # probably a fluke; ignore it
548                    com_pfx = ''
549        # recalculate length in case com_pfx was modified
550        com_pfx_len = len(com_pfx)
551        def fmt(files):
552            f = map(lambda s: s[com_pfx_len:], files)
553            return ', '.join(f)
554        return self.format % (com_pfx, fmt(srcs), fmt(tgts))
555
556Export('Transform')
557
558# enable the regression script to use the termcap
559main['TERMCAP'] = termcap
560
561if GetOption('verbose'):
562    def MakeAction(action, string, *args, **kwargs):
563        return Action(action, *args, **kwargs)
564else:
565    MakeAction = Action
566    main['CCCOMSTR']        = Transform("CC")
567    main['CXXCOMSTR']       = Transform("CXX")
568    main['ASCOMSTR']        = Transform("AS")
569    main['SWIGCOMSTR']      = Transform("SWIG")
570    main['ARCOMSTR']        = Transform("AR", 0)
571    main['LINKCOMSTR']      = Transform("LINK", 0)
572    main['RANLIBCOMSTR']    = Transform("RANLIB", 0)
573    main['M4COMSTR']        = Transform("M4")
574    main['SHCCCOMSTR']      = Transform("SHCC")
575    main['SHCXXCOMSTR']     = Transform("SHCXX")
576Export('MakeAction')
577
578# Initialize the Link-Time Optimization (LTO) flags
579main['LTO_CCFLAGS'] = []
580main['LTO_LDFLAGS'] = []
581
582# According to the readme, tcmalloc works best if the compiler doesn't
583# assume that we're using the builtin malloc and friends. These flags
584# are compiler-specific, so we need to set them after we detect which
585# compiler we're using.
586main['TCMALLOC_CCFLAGS'] = []
587
588CXX_version = readCommand([main['CXX'],'--version'], exception=False)
589CXX_V = readCommand([main['CXX'],'-V'], exception=False)
590
591main['GCC'] = CXX_version and CXX_version.find('g++') >= 0
592main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0
593if main['GCC'] + main['CLANG'] > 1:
594    print 'Error: How can we have two at the same time?'
595    Exit(1)
596
597# Set up default C++ compiler flags
598if main['GCC'] or main['CLANG']:
599    # As gcc and clang share many flags, do the common parts here
600    main.Append(CCFLAGS=['-pipe'])
601    main.Append(CCFLAGS=['-fno-strict-aliasing'])
602    # Enable -Wall and -Wextra and then disable the few warnings that
603    # we consistently violate
604    main.Append(CCFLAGS=['-Wall', '-Wundef', '-Wextra',
605                         '-Wno-sign-compare', '-Wno-unused-parameter'])
606    # We always compile using C++11
607    main.Append(CXXFLAGS=['-std=c++11'])
608else:
609    print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
610    print "Don't know what compiler options to use for your compiler."
611    print termcap.Yellow + '       compiler:' + termcap.Normal, main['CXX']
612    print termcap.Yellow + '       version:' + termcap.Normal,
613    if not CXX_version:
614        print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
615               termcap.Normal
616    else:
617        print CXX_version.replace('\n', '<nl>')
618    print "       If you're trying to use a compiler other than GCC"
619    print "       or clang, there appears to be something wrong with your"
620    print "       environment."
621    print "       "
622    print "       If you are trying to use a compiler other than those listed"
623    print "       above you will need to ease fix SConstruct and "
624    print "       src/SConscript to support that compiler."
625    Exit(1)
626
627if main['GCC']:
628    # Check for a supported version of gcc. >= 4.7 is chosen for its
629    # level of c++11 support. See
630    # http://gcc.gnu.org/projects/cxx0x.html for details.
631    gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False)
632    if compareVersions(gcc_version, "4.7") < 0:
633        print 'Error: gcc version 4.7 or newer required.'
634        print '       Installed version:', gcc_version
635        Exit(1)
636
637    main['GCC_VERSION'] = gcc_version
638
639    # gcc from version 4.8 and above generates "rep; ret" instructions
640    # to avoid performance penalties on certain AMD chips. Older
641    # assemblers detect this as an error, "Error: expecting string
642    # instruction after `rep'"
643    if compareVersions(gcc_version, "4.8") > 0:
644        as_version_raw = readCommand([main['AS'], '-v', '/dev/null'],
645                                     exception=False).split()
646
647        # version strings may contain extra distro-specific
648        # qualifiers, so play it safe and keep only what comes before
649        # the first hyphen
650        as_version = as_version_raw[-1].split('-')[0] if as_version_raw \
651            else None
652
653        if not as_version or compareVersions(as_version, "2.23") < 0:
654            print termcap.Yellow + termcap.Bold + \
655                'Warning: This combination of gcc and binutils have' + \
656                ' known incompatibilities.\n' + \
657                '         If you encounter build problems, please update ' + \
658                'binutils to 2.23.' + \
659                termcap.Normal
660
661    # Make sure we warn if the user has requested to compile with the
662    # Undefined Benahvior Sanitizer and this version of gcc does not
663    # support it.
664    if GetOption('with_ubsan') and \
665            compareVersions(gcc_version, '4.9') < 0:
666        print termcap.Yellow + termcap.Bold + \
667            'Warning: UBSan is only supported using gcc 4.9 and later.' + \
668            termcap.Normal
669
670    # Add the appropriate Link-Time Optimization (LTO) flags
671    # unless LTO is explicitly turned off. Note that these flags
672    # are only used by the fast target.
673    if not GetOption('no_lto'):
674        # Pass the LTO flag when compiling to produce GIMPLE
675        # output, we merely create the flags here and only append
676        # them later
677        main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
678
679        # Use the same amount of jobs for LTO as we are running
680        # scons with
681        main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
682
683    main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc',
684                                  '-fno-builtin-realloc', '-fno-builtin-free'])
685
686elif main['CLANG']:
687    # Check for a supported version of clang, >= 3.1 is needed to
688    # support similar features as gcc 4.7. See
689    # http://clang.llvm.org/cxx_status.html for details
690    clang_version_re = re.compile(".* version (\d+\.\d+)")
691    clang_version_match = clang_version_re.search(CXX_version)
692    if (clang_version_match):
693        clang_version = clang_version_match.groups()[0]
694        if compareVersions(clang_version, "3.1") < 0:
695            print 'Error: clang version 3.1 or newer required.'
696            print '       Installed version:', clang_version
697            Exit(1)
698    else:
699        print 'Error: Unable to determine clang version.'
700        Exit(1)
701
702    # clang has a few additional warnings that we disable, extraneous
703    # parantheses are allowed due to Ruby's printing of the AST,
704    # finally self assignments are allowed as the generated CPU code
705    # is relying on this
706    main.Append(CCFLAGS=['-Wno-parentheses',
707                         '-Wno-self-assign',
708                         # Some versions of libstdc++ (4.8?) seem to
709                         # use struct hash and class hash
710                         # interchangeably.
711                         '-Wno-mismatched-tags',
712                         ])
713
714    main.Append(TCMALLOC_CCFLAGS=['-fno-builtin'])
715
716    # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as
717    # opposed to libstdc++, as the later is dated.
718    if sys.platform == "darwin":
719        main.Append(CXXFLAGS=['-stdlib=libc++'])
720        main.Append(LIBS=['c++'])
721
722else:
723    print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
724    print "Don't know what compiler options to use for your compiler."
725    print termcap.Yellow + '       compiler:' + termcap.Normal, main['CXX']
726    print termcap.Yellow + '       version:' + termcap.Normal,
727    if not CXX_version:
728        print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
729               termcap.Normal
730    else:
731        print CXX_version.replace('\n', '<nl>')
732    print "       If you're trying to use a compiler other than GCC"
733    print "       or clang, there appears to be something wrong with your"
734    print "       environment."
735    print "       "
736    print "       If you are trying to use a compiler other than those listed"
737    print "       above you will need to ease fix SConstruct and "
738    print "       src/SConscript to support that compiler."
739    Exit(1)
740
741# Set up common yacc/bison flags (needed for Ruby)
742main['YACCFLAGS'] = '-d'
743main['YACCHXXFILESUFFIX'] = '.hh'
744
745# Do this after we save setting back, or else we'll tack on an
746# extra 'qdo' every time we run scons.
747if main['BATCH']:
748    main['CC']     = main['BATCH_CMD'] + ' ' + main['CC']
749    main['CXX']    = main['BATCH_CMD'] + ' ' + main['CXX']
750    main['AS']     = main['BATCH_CMD'] + ' ' + main['AS']
751    main['AR']     = main['BATCH_CMD'] + ' ' + main['AR']
752    main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB']
753
754if sys.platform == 'cygwin':
755    # cygwin has some header file issues...
756    main.Append(CCFLAGS=["-Wno-uninitialized"])
757
758# Check for the protobuf compiler
759protoc_version = readCommand([main['PROTOC'], '--version'],
760                             exception='').split()
761
762# First two words should be "libprotoc x.y.z"
763if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc':
764    print termcap.Yellow + termcap.Bold + \
765        'Warning: Protocol buffer compiler (protoc) not found.\n' + \
766        '         Please install protobuf-compiler for tracing support.' + \
767        termcap.Normal
768    main['PROTOC'] = False
769else:
770    # Based on the availability of the compress stream wrappers,
771    # require 2.1.0
772    min_protoc_version = '2.1.0'
773    if compareVersions(protoc_version[1], min_protoc_version) < 0:
774        print termcap.Yellow + termcap.Bold + \
775            'Warning: protoc version', min_protoc_version, \
776            'or newer required.\n' + \
777            '         Installed version:', protoc_version[1], \
778            termcap.Normal
779        main['PROTOC'] = False
780    else:
781        # Attempt to determine the appropriate include path and
782        # library path using pkg-config, that means we also need to
783        # check for pkg-config. Note that it is possible to use
784        # protobuf without the involvement of pkg-config. Later on we
785        # check go a library config check and at that point the test
786        # will fail if libprotobuf cannot be found.
787        if readCommand(['pkg-config', '--version'], exception=''):
788            try:
789                # Attempt to establish what linking flags to add for protobuf
790                # using pkg-config
791                main.ParseConfig('pkg-config --cflags --libs-only-L protobuf')
792            except:
793                print termcap.Yellow + termcap.Bold + \
794                    'Warning: pkg-config could not get protobuf flags.' + \
795                    termcap.Normal
796
797# Check for SWIG
798if not main.has_key('SWIG'):
799    print 'Error: SWIG utility not found.'
800    print '       Please install (see http://www.swig.org) and retry.'
801    Exit(1)
802
803# Check for appropriate SWIG version
804swig_version = readCommand([main['SWIG'], '-version'], exception='').split()
805# First 3 words should be "SWIG Version x.y.z"
806if len(swig_version) < 3 or \
807        swig_version[0] != 'SWIG' or swig_version[1] != 'Version':
808    print 'Error determining SWIG version.'
809    Exit(1)
810
811min_swig_version = '2.0.4'
812if compareVersions(swig_version[2], min_swig_version) < 0:
813    print 'Error: SWIG version', min_swig_version, 'or newer required.'
814    print '       Installed version:', swig_version[2]
815    Exit(1)
816
817# Check for known incompatibilities. The standard library shipped with
818# gcc >= 4.9 does not play well with swig versions prior to 3.0
819if main['GCC'] and compareVersions(gcc_version, '4.9') >= 0 and \
820        compareVersions(swig_version[2], '3.0') < 0:
821    print termcap.Yellow + termcap.Bold + \
822        'Warning: This combination of gcc and swig have' + \
823        ' known incompatibilities.\n' + \
824        '         If you encounter build problems, please update ' + \
825        'swig to 3.0 or later.' + \
826        termcap.Normal
827
828# Set up SWIG flags & scanner
829swig_flags=Split('-c++ -python -modern -templatereduce $_CPPINCFLAGS')
830main.Append(SWIGFLAGS=swig_flags)
831
832# Check for 'timeout' from GNU coreutils. If present, regressions will
833# be run with a time limit. We require version 8.13 since we rely on
834# support for the '--foreground' option.
835timeout_lines = readCommand(['timeout', '--version'],
836                            exception='').splitlines()
837# Get the first line and tokenize it
838timeout_version = timeout_lines[0].split() if timeout_lines else []
839main['TIMEOUT'] =  timeout_version and \
840    compareVersions(timeout_version[-1], '8.13') >= 0
841
842# filter out all existing swig scanners, they mess up the dependency
843# stuff for some reason
844scanners = []
845for scanner in main['SCANNERS']:
846    skeys = scanner.skeys
847    if skeys == '.i':
848        continue
849
850    if isinstance(skeys, (list, tuple)) and '.i' in skeys:
851        continue
852
853    scanners.append(scanner)
854
855# add the new swig scanner that we like better
856from SCons.Scanner import ClassicCPP as CPPScanner
857swig_inc_re = '^[ \t]*[%,#][ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")'
858scanners.append(CPPScanner("SwigScan", [ ".i" ], "CPPPATH", swig_inc_re))
859
860# replace the scanners list that has what we want
861main['SCANNERS'] = scanners
862
863# Add a custom Check function to test for structure members.
864def CheckMember(context, include, decl, member, include_quotes="<>"):
865    context.Message("Checking for member %s in %s..." %
866                    (member, decl))
867    text = """
868#include %(header)s
869int main(){
870  %(decl)s test;
871  (void)test.%(member)s;
872  return 0;
873};
874""" % { "header" : include_quotes[0] + include + include_quotes[1],
875        "decl" : decl,
876        "member" : member,
877        }
878
879    ret = context.TryCompile(text, extension=".cc")
880    context.Result(ret)
881    return ret
882
883# Platform-specific configuration.  Note again that we assume that all
884# builds under a given build root run on the same host platform.
885conf = Configure(main,
886                 conf_dir = joinpath(build_root, '.scons_config'),
887                 log_file = joinpath(build_root, 'scons_config.log'),
888                 custom_tests = {
889        'CheckMember' : CheckMember,
890        })
891
892# Check if we should compile a 64 bit binary on Mac OS X/Darwin
893try:
894    import platform
895    uname = platform.uname()
896    if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0:
897        if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]):
898            main.Append(CCFLAGS=['-arch', 'x86_64'])
899            main.Append(CFLAGS=['-arch', 'x86_64'])
900            main.Append(LINKFLAGS=['-arch', 'x86_64'])
901            main.Append(ASFLAGS=['-arch', 'x86_64'])
902except:
903    pass
904
905# Recent versions of scons substitute a "Null" object for Configure()
906# when configuration isn't necessary, e.g., if the "--help" option is
907# present.  Unfortuantely this Null object always returns false,
908# breaking all our configuration checks.  We replace it with our own
909# more optimistic null object that returns True instead.
910if not conf:
911    def NullCheck(*args, **kwargs):
912        return True
913
914    class NullConf:
915        def __init__(self, env):
916            self.env = env
917        def Finish(self):
918            return self.env
919        def __getattr__(self, mname):
920            return NullCheck
921
922    conf = NullConf(main)
923
924# Cache build files in the supplied directory.
925if main['M5_BUILD_CACHE']:
926    print 'Using build cache located at', main['M5_BUILD_CACHE']
927    CacheDir(main['M5_BUILD_CACHE'])
928
929if not GetOption('without_python'):
930    # Find Python include and library directories for embedding the
931    # interpreter. We rely on python-config to resolve the appropriate
932    # includes and linker flags. ParseConfig does not seem to understand
933    # the more exotic linker flags such as -Xlinker and -export-dynamic so
934    # we add them explicitly below. If you want to link in an alternate
935    # version of python, see above for instructions on how to invoke
936    # scons with the appropriate PATH set.
937    #
938    # First we check if python2-config exists, else we use python-config
939    python_config = readCommand(['which', 'python2-config'],
940                                exception='').strip()
941    if not os.path.exists(python_config):
942        python_config = readCommand(['which', 'python-config'],
943                                    exception='').strip()
944    py_includes = readCommand([python_config, '--includes'],
945                              exception='').split()
946    # Strip the -I from the include folders before adding them to the
947    # CPPPATH
948    main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes))
949
950    # Read the linker flags and split them into libraries and other link
951    # flags. The libraries are added later through the call the CheckLib.
952    py_ld_flags = readCommand([python_config, '--ldflags'],
953        exception='').split()
954    py_libs = []
955    for lib in py_ld_flags:
956         if not lib.startswith('-l'):
957             main.Append(LINKFLAGS=[lib])
958         else:
959             lib = lib[2:]
960             if lib not in py_libs:
961                 py_libs.append(lib)
962
963    # verify that this stuff works
964    if not conf.CheckHeader('Python.h', '<>'):
965        print "Error: can't find Python.h header in", py_includes
966        print "Install Python headers (package python-dev on Ubuntu and RedHat)"
967        Exit(1)
968
969    for lib in py_libs:
970        if not conf.CheckLib(lib):
971            print "Error: can't find library %s required by python" % lib
972            Exit(1)
973
974# On Solaris you need to use libsocket for socket ops
975if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'):
976   if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'):
977       print "Can't find library with socket calls (e.g. accept())"
978       Exit(1)
979
980# Check for zlib.  If the check passes, libz will be automatically
981# added to the LIBS environment variable.
982if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'):
983    print 'Error: did not find needed zlib compression library '\
984          'and/or zlib.h header file.'
985    print '       Please install zlib and try again.'
986    Exit(1)
987
988# If we have the protobuf compiler, also make sure we have the
989# development libraries. If the check passes, libprotobuf will be
990# automatically added to the LIBS environment variable. After
991# this, we can use the HAVE_PROTOBUF flag to determine if we have
992# got both protoc and libprotobuf available.
993main['HAVE_PROTOBUF'] = main['PROTOC'] and \
994    conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h',
995                            'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;')
996
997# If we have the compiler but not the library, print another warning.
998if main['PROTOC'] and not main['HAVE_PROTOBUF']:
999    print termcap.Yellow + termcap.Bold + \
1000        'Warning: did not find protocol buffer library and/or headers.\n' + \
1001    '       Please install libprotobuf-dev for tracing support.' + \
1002    termcap.Normal
1003
1004# Check for librt.
1005have_posix_clock = \
1006    conf.CheckLibWithHeader(None, 'time.h', 'C',
1007                            'clock_nanosleep(0,0,NULL,NULL);') or \
1008    conf.CheckLibWithHeader('rt', 'time.h', 'C',
1009                            'clock_nanosleep(0,0,NULL,NULL);')
1010
1011have_posix_timers = \
1012    conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C',
1013                            'timer_create(CLOCK_MONOTONIC, NULL, NULL);')
1014
1015if not GetOption('without_tcmalloc'):
1016    if conf.CheckLib('tcmalloc'):
1017        main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1018    elif conf.CheckLib('tcmalloc_minimal'):
1019        main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1020    else:
1021        print termcap.Yellow + termcap.Bold + \
1022              "You can get a 12% performance improvement by "\
1023              "installing tcmalloc (libgoogle-perftools-dev package "\
1024              "on Ubuntu or RedHat)." + termcap.Normal
1025
1026
1027# Detect back trace implementations. The last implementation in the
1028# list will be used by default.
1029backtrace_impls = [ "none" ]
1030
1031if conf.CheckLibWithHeader(None, 'execinfo.h', 'C',
1032                           'backtrace_symbols_fd((void*)0, 0, 0);'):
1033    backtrace_impls.append("glibc")
1034
1035if backtrace_impls[-1] == "none":
1036    default_backtrace_impl = "none"
1037    print termcap.Yellow + termcap.Bold + \
1038        "No suitable back trace implementation found." + \
1039        termcap.Normal
1040
1041if not have_posix_clock:
1042    print "Can't find library for POSIX clocks."
1043
1044# Check for <fenv.h> (C99 FP environment control)
1045have_fenv = conf.CheckHeader('fenv.h', '<>')
1046if not have_fenv:
1047    print "Warning: Header file <fenv.h> not found."
1048    print "         This host has no IEEE FP rounding mode control."
1049
1050# Check if we should enable KVM-based hardware virtualization. The API
1051# we rely on exists since version 2.6.36 of the kernel, but somehow
1052# the KVM_API_VERSION does not reflect the change. We test for one of
1053# the types as a fall back.
1054have_kvm = conf.CheckHeader('linux/kvm.h', '<>')
1055if not have_kvm:
1056    print "Info: Compatible header file <linux/kvm.h> not found, " \
1057        "disabling KVM support."
1058
1059# x86 needs support for xsave. We test for the structure here since we
1060# won't be able to run new tests by the time we know which ISA we're
1061# targeting.
1062have_kvm_xsave = conf.CheckTypeSize('struct kvm_xsave',
1063                                    '#include <linux/kvm.h>') != 0
1064
1065# Check if the requested target ISA is compatible with the host
1066def is_isa_kvm_compatible(isa):
1067    try:
1068        import platform
1069        host_isa = platform.machine()
1070    except:
1071        print "Warning: Failed to determine host ISA."
1072        return False
1073
1074    if not have_posix_timers:
1075        print "Warning: Can not enable KVM, host seems to lack support " \
1076            "for POSIX timers"
1077        return False
1078
1079    if isa == "arm":
1080        return host_isa in ( "armv7l", "aarch64" )
1081    elif isa == "x86":
1082        if host_isa != "x86_64":
1083            return False
1084
1085        if not have_kvm_xsave:
1086            print "KVM on x86 requires xsave support in kernel headers."
1087            return False
1088
1089        return True
1090    else:
1091        return False
1092
1093
1094# Check if the exclude_host attribute is available. We want this to
1095# get accurate instruction counts in KVM.
1096main['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf.CheckMember(
1097    'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host')
1098
1099
1100######################################################################
1101#
1102# Finish the configuration
1103#
1104main = conf.Finish()
1105
1106######################################################################
1107#
1108# Collect all non-global variables
1109#
1110
1111# Define the universe of supported ISAs
1112all_isa_list = [ ]
1113all_gpu_isa_list = [ ]
1114Export('all_isa_list')
1115Export('all_gpu_isa_list')
1116
1117class CpuModel(object):
1118    '''The CpuModel class encapsulates everything the ISA parser needs to
1119    know about a particular CPU model.'''
1120
1121    # Dict of available CPU model objects.  Accessible as CpuModel.dict.
1122    dict = {}
1123
1124    # Constructor.  Automatically adds models to CpuModel.dict.
1125    def __init__(self, name, default=False):
1126        self.name = name           # name of model
1127
1128        # This cpu is enabled by default
1129        self.default = default
1130
1131        # Add self to dict
1132        if name in CpuModel.dict:
1133            raise AttributeError, "CpuModel '%s' already registered" % name
1134        CpuModel.dict[name] = self
1135
1136Export('CpuModel')
1137
1138# Sticky variables get saved in the variables file so they persist from
1139# one invocation to the next (unless overridden, in which case the new
1140# value becomes sticky).
1141sticky_vars = Variables(args=ARGUMENTS)
1142Export('sticky_vars')
1143
1144# Sticky variables that should be exported
1145export_vars = []
1146Export('export_vars')
1147
1148# For Ruby
1149all_protocols = []
1150Export('all_protocols')
1151protocol_dirs = []
1152Export('protocol_dirs')
1153slicc_includes = []
1154Export('slicc_includes')
1155
1156# Walk the tree and execute all SConsopts scripts that wil add to the
1157# above variables
1158if GetOption('verbose'):
1159    print "Reading SConsopts"
1160for bdir in [ base_dir ] + extras_dir_list:
1161    if not isdir(bdir):
1162        print "Error: directory '%s' does not exist" % bdir
1163        Exit(1)
1164    for root, dirs, files in os.walk(bdir):
1165        if 'SConsopts' in files:
1166            if GetOption('verbose'):
1167                print "Reading", joinpath(root, 'SConsopts')
1168            SConscript(joinpath(root, 'SConsopts'))
1169
1170all_isa_list.sort()
1171all_gpu_isa_list.sort()
1172
1173sticky_vars.AddVariables(
1174    EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list),
1175    EnumVariable('TARGET_GPU_ISA', 'Target GPU ISA', 'hsail', all_gpu_isa_list),
1176    ListVariable('CPU_MODELS', 'CPU models',
1177                 sorted(n for n,m in CpuModel.dict.iteritems() if m.default),
1178                 sorted(CpuModel.dict.keys())),
1179    BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger',
1180                 False),
1181    BoolVariable('SS_COMPATIBLE_FP',
1182                 'Make floating-point results compatible with SimpleScalar',
1183                 False),
1184    BoolVariable('USE_SSE2',
1185                 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts',
1186                 False),
1187    BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock),
1188    BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv),
1189    BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability', False),
1190    BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models', have_kvm),
1191    BoolVariable('BUILD_GPU', 'Build the compute-GPU model', False),
1192    EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None',
1193                  all_protocols),
1194    EnumVariable('BACKTRACE_IMPL', 'Post-mortem dump implementation',
1195                 backtrace_impls[-1], backtrace_impls)
1196    )
1197
1198# These variables get exported to #defines in config/*.hh (see src/SConscript).
1199export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'TARGET_GPU_ISA',
1200                'CP_ANNOTATE', 'USE_POSIX_CLOCK', 'USE_KVM', 'PROTOCOL',
1201                'HAVE_PROTOBUF', 'HAVE_PERF_ATTR_EXCLUDE_HOST']
1202
1203###################################################
1204#
1205# Define a SCons builder for configuration flag headers.
1206#
1207###################################################
1208
1209# This function generates a config header file that #defines the
1210# variable symbol to the current variable setting (0 or 1).  The source
1211# operands are the name of the variable and a Value node containing the
1212# value of the variable.
1213def build_config_file(target, source, env):
1214    (variable, value) = [s.get_contents() for s in source]
1215    f = file(str(target[0]), 'w')
1216    print >> f, '#define', variable, value
1217    f.close()
1218    return None
1219
1220# Combine the two functions into a scons Action object.
1221config_action = MakeAction(build_config_file, Transform("CONFIG H", 2))
1222
1223# The emitter munges the source & target node lists to reflect what
1224# we're really doing.
1225def config_emitter(target, source, env):
1226    # extract variable name from Builder arg
1227    variable = str(target[0])
1228    # True target is config header file
1229    target = joinpath('config', variable.lower() + '.hh')
1230    val = env[variable]
1231    if isinstance(val, bool):
1232        # Force value to 0/1
1233        val = int(val)
1234    elif isinstance(val, str):
1235        val = '"' + val + '"'
1236
1237    # Sources are variable name & value (packaged in SCons Value nodes)
1238    return ([target], [Value(variable), Value(val)])
1239
1240config_builder = Builder(emitter = config_emitter, action = config_action)
1241
1242main.Append(BUILDERS = { 'ConfigFile' : config_builder })
1243
1244# libelf build is shared across all configs in the build root.
1245main.SConscript('ext/libelf/SConscript',
1246                variant_dir = joinpath(build_root, 'libelf'))
1247
1248# iostream3 build is shared across all configs in the build root.
1249main.SConscript('ext/iostream3/SConscript',
1250                variant_dir = joinpath(build_root, 'iostream3'))
1251
1252# libfdt build is shared across all configs in the build root.
1253main.SConscript('ext/libfdt/SConscript',
1254                variant_dir = joinpath(build_root, 'libfdt'))
1255
1256# fputils build is shared across all configs in the build root.
1257main.SConscript('ext/fputils/SConscript',
1258                variant_dir = joinpath(build_root, 'fputils'))
1259
1260# DRAMSim2 build is shared across all configs in the build root.
1261main.SConscript('ext/dramsim2/SConscript',
1262                variant_dir = joinpath(build_root, 'dramsim2'))
1263
1264# DRAMPower build is shared across all configs in the build root.
1265main.SConscript('ext/drampower/SConscript',
1266                variant_dir = joinpath(build_root, 'drampower'))
1267
1268# nomali build is shared across all configs in the build root.
1269main.SConscript('ext/nomali/SConscript',
1270                variant_dir = joinpath(build_root, 'nomali'))
1271
1272###################################################
1273#
1274# This function is used to set up a directory with switching headers
1275#
1276###################################################
1277
1278main['ALL_ISA_LIST'] = all_isa_list
1279main['ALL_GPU_ISA_LIST'] = all_gpu_isa_list
1280all_isa_deps = {}
1281def make_switching_dir(dname, switch_headers, env):
1282    # Generate the header.  target[0] is the full path of the output
1283    # header to generate.  'source' is a dummy variable, since we get the
1284    # list of ISAs from env['ALL_ISA_LIST'].
1285    def gen_switch_hdr(target, source, env):
1286        fname = str(target[0])
1287        isa = env['TARGET_ISA'].lower()
1288        try:
1289            f = open(fname, 'w')
1290            print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname))
1291            f.close()
1292        except IOError:
1293            print "Failed to create %s" % fname
1294            raise
1295
1296    # Build SCons Action object. 'varlist' specifies env vars that this
1297    # action depends on; when env['ALL_ISA_LIST'] changes these actions
1298    # should get re-executed.
1299    switch_hdr_action = MakeAction(gen_switch_hdr,
1300                          Transform("GENERATE"), varlist=['ALL_ISA_LIST'])
1301
1302    # Instantiate actions for each header
1303    for hdr in switch_headers:
1304        env.Command(hdr, [], switch_hdr_action)
1305
1306    isa_target = Dir('.').up().name.lower().replace('_', '-')
1307    env['PHONY_BASE'] = '#'+isa_target
1308    all_isa_deps[isa_target] = None
1309
1310Export('make_switching_dir')
1311
1312def make_gpu_switching_dir(dname, switch_headers, env):
1313    # Generate the header.  target[0] is the full path of the output
1314    # header to generate.  'source' is a dummy variable, since we get the
1315    # list of ISAs from env['ALL_ISA_LIST'].
1316    def gen_switch_hdr(target, source, env):
1317        fname = str(target[0])
1318
1319        isa = env['TARGET_GPU_ISA'].lower()
1320
1321        try:
1322            f = open(fname, 'w')
1323            print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname))
1324            f.close()
1325        except IOError:
1326            print "Failed to create %s" % fname
1327            raise
1328
1329    # Build SCons Action object. 'varlist' specifies env vars that this
1330    # action depends on; when env['ALL_ISA_LIST'] changes these actions
1331    # should get re-executed.
1332    switch_hdr_action = MakeAction(gen_switch_hdr,
1333                          Transform("GENERATE"), varlist=['ALL_ISA_GPU_LIST'])
1334
1335    # Instantiate actions for each header
1336    for hdr in switch_headers:
1337        env.Command(hdr, [], switch_hdr_action)
1338
1339Export('make_gpu_switching_dir')
1340
1341# all-isas -> all-deps -> all-environs -> all_targets
1342main.Alias('#all-isas', [])
1343main.Alias('#all-deps', '#all-isas')
1344
1345# Dummy target to ensure all environments are created before telling
1346# SCons what to actually make (the command line arguments).  We attach
1347# them to the dependence graph after the environments are complete.
1348ORIG_BUILD_TARGETS = list(BUILD_TARGETS) # force a copy; gets closure to work.
1349def environsComplete(target, source, env):
1350    for t in ORIG_BUILD_TARGETS:
1351        main.Depends('#all-targets', t)
1352
1353# Each build/* switching_dir attaches its *-environs target to #all-environs.
1354main.Append(BUILDERS = {'CompleteEnvirons' :
1355                        Builder(action=MakeAction(environsComplete, None))})
1356main.CompleteEnvirons('#all-environs', [])
1357
1358def doNothing(**ignored): pass
1359main.Append(BUILDERS = {'Dummy': Builder(action=MakeAction(doNothing, None))})
1360
1361# The final target to which all the original targets ultimately get attached.
1362main.Dummy('#all-targets', '#all-environs')
1363BUILD_TARGETS[:] = ['#all-targets']
1364
1365###################################################
1366#
1367# Define build environments for selected configurations.
1368#
1369###################################################
1370
1371for variant_path in variant_paths:
1372    if not GetOption('silent'):
1373        print "Building in", variant_path
1374
1375    # Make a copy of the build-root environment to use for this config.
1376    env = main.Clone()
1377    env['BUILDDIR'] = variant_path
1378
1379    # variant_dir is the tail component of build path, and is used to
1380    # determine the build parameters (e.g., 'ALPHA_SE')
1381    (build_root, variant_dir) = splitpath(variant_path)
1382
1383    # Set env variables according to the build directory config.
1384    sticky_vars.files = []
1385    # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in
1386    # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke
1387    # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings.
1388    current_vars_file = joinpath(build_root, 'variables', variant_dir)
1389    if isfile(current_vars_file):
1390        sticky_vars.files.append(current_vars_file)
1391        if not GetOption('silent'):
1392            print "Using saved variables file %s" % current_vars_file
1393    else:
1394        # Build dir-specific variables file doesn't exist.
1395
1396        # Make sure the directory is there so we can create it later
1397        opt_dir = dirname(current_vars_file)
1398        if not isdir(opt_dir):
1399            mkdir(opt_dir)
1400
1401        # Get default build variables from source tree.  Variables are
1402        # normally determined by name of $VARIANT_DIR, but can be
1403        # overridden by '--default=' arg on command line.
1404        default = GetOption('default')
1405        opts_dir = joinpath(main.root.abspath, 'build_opts')
1406        if default:
1407            default_vars_files = [joinpath(build_root, 'variables', default),
1408                                  joinpath(opts_dir, default)]
1409        else:
1410            default_vars_files = [joinpath(opts_dir, variant_dir)]
1411        existing_files = filter(isfile, default_vars_files)
1412        if existing_files:
1413            default_vars_file = existing_files[0]
1414            sticky_vars.files.append(default_vars_file)
1415            print "Variables file %s not found,\n  using defaults in %s" \
1416                  % (current_vars_file, default_vars_file)
1417        else:
1418            print "Error: cannot find variables file %s or " \
1419                  "default file(s) %s" \
1420                  % (current_vars_file, ' or '.join(default_vars_files))
1421            Exit(1)
1422
1423    # Apply current variable settings to env
1424    sticky_vars.Update(env)
1425
1426    help_texts["local_vars"] += \
1427        "Build variables for %s:\n" % variant_dir \
1428                 + sticky_vars.GenerateHelpText(env)
1429
1430    # Process variable settings.
1431
1432    if not have_fenv and env['USE_FENV']:
1433        print "Warning: <fenv.h> not available; " \
1434              "forcing USE_FENV to False in", variant_dir + "."
1435        env['USE_FENV'] = False
1436
1437    if not env['USE_FENV']:
1438        print "Warning: No IEEE FP rounding mode control in", variant_dir + "."
1439        print "         FP results may deviate slightly from other platforms."
1440
1441    if env['EFENCE']:
1442        env.Append(LIBS=['efence'])
1443
1444    if env['USE_KVM']:
1445        if not have_kvm:
1446            print "Warning: Can not enable KVM, host seems to lack KVM support"
1447            env['USE_KVM'] = False
1448        elif not is_isa_kvm_compatible(env['TARGET_ISA']):
1449            print "Info: KVM support disabled due to unsupported host and " \
1450                "target ISA combination"
1451            env['USE_KVM'] = False
1452
1453    # Warn about missing optional functionality
1454    if env['USE_KVM']:
1455        if not main['HAVE_PERF_ATTR_EXCLUDE_HOST']:
1456            print "Warning: perf_event headers lack support for the " \
1457                "exclude_host attribute. KVM instruction counts will " \
1458                "be inaccurate."
1459
1460    # Save sticky variable settings back to current variables file
1461    sticky_vars.Save(current_vars_file, env)
1462
1463    if env['USE_SSE2']:
1464        env.Append(CCFLAGS=['-msse2'])
1465
1466    # The src/SConscript file sets up the build rules in 'env' according
1467    # to the configured variables.  It returns a list of environments,
1468    # one for each variant build (debug, opt, etc.)
1469    SConscript('src/SConscript', variant_dir = variant_path, exports = 'env')
1470
1471def pairwise(iterable):
1472    "s -> (s0,s1), (s1,s2), (s2, s3), ..."
1473    a, b = itertools.tee(iterable)
1474    b.next()
1475    return itertools.izip(a, b)
1476
1477# Create false dependencies so SCons will parse ISAs, establish
1478# dependencies, and setup the build Environments serially. Either
1479# SCons (likely) and/or our SConscripts (possibly) cannot cope with -j
1480# greater than 1. It appears to be standard race condition stuff; it
1481# doesn't always fail, but usually, and the behaviors are different.
1482# Every time I tried to remove this, builds would fail in some
1483# creative new way. So, don't do that. You'll want to, though, because
1484# tests/SConscript takes a long time to make its Environments.
1485for t1, t2 in pairwise(sorted(all_isa_deps.iterkeys())):
1486    main.Depends('#%s-deps'     % t2, '#%s-deps'     % t1)
1487    main.Depends('#%s-environs' % t2, '#%s-environs' % t1)
1488
1489# base help text
1490Help('''
1491Usage: scons [scons options] [build variables] [target(s)]
1492
1493Extra scons options:
1494%(options)s
1495
1496Global build variables:
1497%(global_vars)s
1498
1499%(local_vars)s
1500''' % help_texts)
1501