SConstruct revision 11999
1# -*- mode:python -*- 2 3# Copyright (c) 2013, 2015, 2016 ARM Limited 4# All rights reserved. 5# 6# The license below extends only to copyright in the software and shall 7# not be construed as granting a license to any other intellectual 8# property including but not limited to intellectual property relating 9# to a hardware implementation of the functionality of the software 10# licensed hereunder. You may use the software subject to the license 11# terms below provided that you ensure that this notice is replicated 12# unmodified and in its entirety in all distributions of the software, 13# modified or unmodified, in source code or in binary form. 14# 15# Copyright (c) 2011 Advanced Micro Devices, Inc. 16# Copyright (c) 2009 The Hewlett-Packard Development Company 17# Copyright (c) 2004-2005 The Regents of The University of Michigan 18# All rights reserved. 19# 20# Redistribution and use in source and binary forms, with or without 21# modification, are permitted provided that the following conditions are 22# met: redistributions of source code must retain the above copyright 23# notice, this list of conditions and the following disclaimer; 24# redistributions in binary form must reproduce the above copyright 25# notice, this list of conditions and the following disclaimer in the 26# documentation and/or other materials provided with the distribution; 27# neither the name of the copyright holders nor the names of its 28# contributors may be used to endorse or promote products derived from 29# this software without specific prior written permission. 30# 31# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 32# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 33# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 34# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 35# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 36# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 37# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 38# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 39# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 40# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 41# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 42# 43# Authors: Steve Reinhardt 44# Nathan Binkert 45 46################################################### 47# 48# SCons top-level build description (SConstruct) file. 49# 50# While in this directory ('gem5'), just type 'scons' to build the default 51# configuration (see below), or type 'scons build/<CONFIG>/<binary>' 52# to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for 53# the optimized full-system version). 54# 55# You can build gem5 in a different directory as long as there is a 56# 'build/<CONFIG>' somewhere along the target path. The build system 57# expects that all configs under the same build directory are being 58# built for the same host system. 59# 60# Examples: 61# 62# The following two commands are equivalent. The '-u' option tells 63# scons to search up the directory tree for this SConstruct file. 64# % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug 65# % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug 66# 67# The following two commands are equivalent and demonstrate building 68# in a directory outside of the source tree. The '-C' option tells 69# scons to chdir to the specified directory to find this SConstruct 70# file. 71# % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug 72# % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug 73# 74# You can use 'scons -H' to print scons options. If you're in this 75# 'gem5' directory (or use -u or -C to tell scons where to find this 76# file), you can use 'scons -h' to print all the gem5-specific build 77# options as well. 78# 79################################################### 80 81# Check for recent-enough Python and SCons versions. 82try: 83 # Really old versions of scons only take two options for the 84 # function, so check once without the revision and once with the 85 # revision, the first instance will fail for stuff other than 86 # 0.98, and the second will fail for 0.98.0 87 EnsureSConsVersion(0, 98) 88 EnsureSConsVersion(0, 98, 1) 89except SystemExit, e: 90 print """ 91For more details, see: 92 http://gem5.org/Dependencies 93""" 94 raise 95 96# We ensure the python version early because because python-config 97# requires python 2.5 98try: 99 EnsurePythonVersion(2, 5) 100except SystemExit, e: 101 print """ 102You can use a non-default installation of the Python interpreter by 103rearranging your PATH so that scons finds the non-default 'python' and 104'python-config' first. 105 106For more details, see: 107 http://gem5.org/wiki/index.php/Using_a_non-default_Python_installation 108""" 109 raise 110 111# Global Python includes 112import itertools 113import os 114import re 115import shutil 116import subprocess 117import sys 118 119from os import mkdir, environ 120from os.path import abspath, basename, dirname, expanduser, normpath 121from os.path import exists, isdir, isfile 122from os.path import join as joinpath, split as splitpath 123 124# SCons includes 125import SCons 126import SCons.Node 127 128extra_python_paths = [ 129 Dir('src/python').srcnode().abspath, # gem5 includes 130 Dir('ext/ply').srcnode().abspath, # ply is used by several files 131 ] 132 133sys.path[1:1] = extra_python_paths 134 135from m5.util import compareVersions, readCommand 136from m5.util.terminal import get_termcap 137 138help_texts = { 139 "options" : "", 140 "global_vars" : "", 141 "local_vars" : "" 142} 143 144Export("help_texts") 145 146 147# There's a bug in scons in that (1) by default, the help texts from 148# AddOption() are supposed to be displayed when you type 'scons -h' 149# and (2) you can override the help displayed by 'scons -h' using the 150# Help() function, but these two features are incompatible: once 151# you've overridden the help text using Help(), there's no way to get 152# at the help texts from AddOptions. See: 153# http://scons.tigris.org/issues/show_bug.cgi?id=2356 154# http://scons.tigris.org/issues/show_bug.cgi?id=2611 155# This hack lets us extract the help text from AddOptions and 156# re-inject it via Help(). Ideally someday this bug will be fixed and 157# we can just use AddOption directly. 158def AddLocalOption(*args, **kwargs): 159 col_width = 30 160 161 help = " " + ", ".join(args) 162 if "help" in kwargs: 163 length = len(help) 164 if length >= col_width: 165 help += "\n" + " " * col_width 166 else: 167 help += " " * (col_width - length) 168 help += kwargs["help"] 169 help_texts["options"] += help + "\n" 170 171 AddOption(*args, **kwargs) 172 173AddLocalOption('--colors', dest='use_colors', action='store_true', 174 help="Add color to abbreviated scons output") 175AddLocalOption('--no-colors', dest='use_colors', action='store_false', 176 help="Don't add color to abbreviated scons output") 177AddLocalOption('--with-cxx-config', dest='with_cxx_config', 178 action='store_true', 179 help="Build with support for C++-based configuration") 180AddLocalOption('--default', dest='default', type='string', action='store', 181 help='Override which build_opts file to use for defaults') 182AddLocalOption('--ignore-style', dest='ignore_style', action='store_true', 183 help='Disable style checking hooks') 184AddLocalOption('--no-lto', dest='no_lto', action='store_true', 185 help='Disable Link-Time Optimization for fast') 186AddLocalOption('--update-ref', dest='update_ref', action='store_true', 187 help='Update test reference outputs') 188AddLocalOption('--verbose', dest='verbose', action='store_true', 189 help='Print full tool command lines') 190AddLocalOption('--without-python', dest='without_python', 191 action='store_true', 192 help='Build without Python configuration support') 193AddLocalOption('--without-tcmalloc', dest='without_tcmalloc', 194 action='store_true', 195 help='Disable linking against tcmalloc') 196AddLocalOption('--with-ubsan', dest='with_ubsan', action='store_true', 197 help='Build with Undefined Behavior Sanitizer if available') 198AddLocalOption('--with-asan', dest='with_asan', action='store_true', 199 help='Build with Address Sanitizer if available') 200 201termcap = get_termcap(GetOption('use_colors')) 202 203######################################################################## 204# 205# Set up the main build environment. 206# 207######################################################################## 208 209# export TERM so that clang reports errors in color 210use_vars = set([ 'AS', 'AR', 'CC', 'CXX', 'HOME', 'LD_LIBRARY_PATH', 211 'LIBRARY_PATH', 'PATH', 'PKG_CONFIG_PATH', 'PROTOC', 212 'PYTHONPATH', 'RANLIB', 'TERM' ]) 213 214use_prefixes = [ 215 "ASAN_", # address sanitizer symbolizer path and settings 216 "CCACHE_", # ccache (caching compiler wrapper) configuration 217 "CCC_", # clang static analyzer configuration 218 "DISTCC_", # distcc (distributed compiler wrapper) configuration 219 "INCLUDE_SERVER_", # distcc pump server settings 220 "M5", # M5 configuration (e.g., path to kernels) 221 ] 222 223use_env = {} 224for key,val in sorted(os.environ.iteritems()): 225 if key in use_vars or \ 226 any([key.startswith(prefix) for prefix in use_prefixes]): 227 use_env[key] = val 228 229# Tell scons to avoid implicit command dependencies to avoid issues 230# with the param wrappes being compiled twice (see 231# http://scons.tigris.org/issues/show_bug.cgi?id=2811) 232main = Environment(ENV=use_env, IMPLICIT_COMMAND_DEPENDENCIES=0) 233main.Decider('MD5-timestamp') 234main.root = Dir(".") # The current directory (where this file lives). 235main.srcdir = Dir("src") # The source directory 236 237main_dict_keys = main.Dictionary().keys() 238 239# Check that we have a C/C++ compiler 240if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys): 241 print "No C++ compiler installed (package g++ on Ubuntu and RedHat)" 242 Exit(1) 243 244# add useful python code PYTHONPATH so it can be used by subprocesses 245# as well 246main.AppendENVPath('PYTHONPATH', extra_python_paths) 247 248######################################################################## 249# 250# Mercurial Stuff. 251# 252# If the gem5 directory is a mercurial repository, we should do some 253# extra things. 254# 255######################################################################## 256 257hgdir = main.root.Dir(".hg") 258 259 260style_message = """ 261You're missing the gem5 style hook, which automatically checks your code 262against the gem5 style rules on %s. 263This script will now install the hook in your %s. 264Press enter to continue, or ctrl-c to abort: """ 265 266mercurial_style_message = """ 267You're missing the gem5 style hook, which automatically checks your code 268against the gem5 style rules on hg commit and qrefresh commands. 269This script will now install the hook in your .hg/hgrc file. 270Press enter to continue, or ctrl-c to abort: """ 271 272git_style_message = """ 273You're missing the gem5 style or commit message hook. These hooks help 274to ensure that your code follows gem5's style rules on git commit. 275This script will now install the hook in your .git/hooks/ directory. 276Press enter to continue, or ctrl-c to abort: """ 277 278mercurial_style_upgrade_message = """ 279Your Mercurial style hooks are not up-to-date. This script will now 280try to automatically update them. A backup of your hgrc will be saved 281in .hg/hgrc.old. 282Press enter to continue, or ctrl-c to abort: """ 283 284mercurial_style_hook = """ 285# The following lines were automatically added by gem5/SConstruct 286# to provide the gem5 style-checking hooks 287[extensions] 288hgstyle = %s/util/hgstyle.py 289 290[hooks] 291pretxncommit.style = python:hgstyle.check_style 292pre-qrefresh.style = python:hgstyle.check_style 293# End of SConstruct additions 294 295""" % (main.root.abspath) 296 297mercurial_lib_not_found = """ 298Mercurial libraries cannot be found, ignoring style hook. If 299you are a gem5 developer, please fix this and run the style 300hook. It is important. 301""" 302 303# Check for style hook and prompt for installation if it's not there. 304# Skip this if --ignore-style was specified, there's no interactive 305# terminal to prompt, or no recognized revision control system can be 306# found. 307ignore_style = GetOption('ignore_style') or not sys.stdin.isatty() 308 309# Try wire up Mercurial to the style hooks 310if not ignore_style and hgdir.exists(): 311 style_hook = True 312 style_hooks = tuple() 313 hgrc = hgdir.File('hgrc') 314 hgrc_old = hgdir.File('hgrc.old') 315 try: 316 from mercurial import ui 317 ui = ui.ui() 318 ui.readconfig(hgrc.abspath) 319 style_hooks = (ui.config('hooks', 'pretxncommit.style', None), 320 ui.config('hooks', 'pre-qrefresh.style', None)) 321 style_hook = all(style_hooks) 322 style_extension = ui.config('extensions', 'style', None) 323 except ImportError: 324 print mercurial_lib_not_found 325 326 if "python:style.check_style" in style_hooks: 327 # Try to upgrade the style hooks 328 print mercurial_style_upgrade_message 329 # continue unless user does ctrl-c/ctrl-d etc. 330 try: 331 raw_input() 332 except: 333 print "Input exception, exiting scons.\n" 334 sys.exit(1) 335 shutil.copyfile(hgrc.abspath, hgrc_old.abspath) 336 re_style_hook = re.compile(r"^([^=#]+)\.style\s*=\s*([^#\s]+).*") 337 re_style_extension = re.compile("style\s*=\s*([^#\s]+).*") 338 old, new = open(hgrc_old.abspath, 'r'), open(hgrc.abspath, 'w') 339 for l in old: 340 m_hook = re_style_hook.match(l) 341 m_ext = re_style_extension.match(l) 342 if m_hook: 343 hook, check = m_hook.groups() 344 if check != "python:style.check_style": 345 print "Warning: %s.style is using a non-default " \ 346 "checker: %s" % (hook, check) 347 if hook not in ("pretxncommit", "pre-qrefresh"): 348 print "Warning: Updating unknown style hook: %s" % hook 349 350 l = "%s.style = python:hgstyle.check_style\n" % hook 351 elif m_ext and m_ext.group(1) == style_extension: 352 l = "hgstyle = %s/util/hgstyle.py\n" % main.root.abspath 353 354 new.write(l) 355 elif not style_hook: 356 print mercurial_style_message, 357 # continue unless user does ctrl-c/ctrl-d etc. 358 try: 359 raw_input() 360 except: 361 print "Input exception, exiting scons.\n" 362 sys.exit(1) 363 hgrc_path = '%s/.hg/hgrc' % main.root.abspath 364 print "Adding style hook to", hgrc_path, "\n" 365 try: 366 with open(hgrc_path, 'a') as f: 367 f.write(mercurial_style_hook) 368 except: 369 print "Error updating", hgrc_path 370 sys.exit(1) 371 372def install_git_style_hooks(): 373 try: 374 gitdir = Dir(readCommand( 375 ["git", "rev-parse", "--git-dir"]).strip("\n")) 376 except Exception, e: 377 print "Warning: Failed to find git repo directory: %s" % e 378 return 379 380 git_hooks = gitdir.Dir("hooks") 381 def hook_exists(hook_name): 382 hook = git_hooks.File(hook_name) 383 return hook.exists() 384 385 def hook_install(hook_name, script): 386 hook = git_hooks.File(hook_name) 387 if hook.exists(): 388 print "Warning: Can't install %s, hook already exists." % hook_name 389 return 390 391 if hook.islink(): 392 print "Warning: Removing broken symlink for hook %s." % hook_name 393 os.unlink(hook.get_abspath()) 394 395 if not git_hooks.exists(): 396 mkdir(git_hooks.get_abspath()) 397 git_hooks.clear() 398 399 abs_symlink_hooks = git_hooks.islink() and \ 400 os.path.isabs(os.readlink(git_hooks.get_abspath())) 401 402 # Use a relative symlink if the hooks live in the source directory, 403 # and the hooks directory is not a symlink to an absolute path. 404 if hook.is_under(main.root) and not abs_symlink_hooks: 405 script_path = os.path.relpath( 406 os.path.realpath(script.get_abspath()), 407 os.path.realpath(hook.Dir(".").get_abspath())) 408 else: 409 script_path = script.get_abspath() 410 411 try: 412 os.symlink(script_path, hook.get_abspath()) 413 except: 414 print "Error updating git %s hook" % hook_name 415 raise 416 417 if hook_exists("pre-commit") and hook_exists("commit-msg"): 418 return 419 420 print git_style_message, 421 try: 422 raw_input() 423 except: 424 print "Input exception, exiting scons.\n" 425 sys.exit(1) 426 427 git_style_script = File("util/git-pre-commit.py") 428 git_msg_script = File("ext/git-commit-msg") 429 430 hook_install("pre-commit", git_style_script) 431 hook_install("commit-msg", git_msg_script) 432 433# Try to wire up git to the style hooks 434if not ignore_style and main.root.Entry(".git").exists(): 435 install_git_style_hooks() 436 437################################################### 438# 439# Figure out which configurations to set up based on the path(s) of 440# the target(s). 441# 442################################################### 443 444# Find default configuration & binary. 445Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug')) 446 447# helper function: find last occurrence of element in list 448def rfind(l, elt, offs = -1): 449 for i in range(len(l)+offs, 0, -1): 450 if l[i] == elt: 451 return i 452 raise ValueError, "element not found" 453 454# Take a list of paths (or SCons Nodes) and return a list with all 455# paths made absolute and ~-expanded. Paths will be interpreted 456# relative to the launch directory unless a different root is provided 457def makePathListAbsolute(path_list, root=GetLaunchDir()): 458 return [abspath(joinpath(root, expanduser(str(p)))) 459 for p in path_list] 460 461# Each target must have 'build' in the interior of the path; the 462# directory below this will determine the build parameters. For 463# example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we 464# recognize that ALPHA_SE specifies the configuration because it 465# follow 'build' in the build path. 466 467# The funky assignment to "[:]" is needed to replace the list contents 468# in place rather than reassign the symbol to a new list, which 469# doesn't work (obviously!). 470BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS) 471 472# Generate a list of the unique build roots and configs that the 473# collected targets reference. 474variant_paths = [] 475build_root = None 476for t in BUILD_TARGETS: 477 path_dirs = t.split('/') 478 try: 479 build_top = rfind(path_dirs, 'build', -2) 480 except: 481 print "Error: no non-leaf 'build' dir found on target path", t 482 Exit(1) 483 this_build_root = joinpath('/',*path_dirs[:build_top+1]) 484 if not build_root: 485 build_root = this_build_root 486 else: 487 if this_build_root != build_root: 488 print "Error: build targets not under same build root\n"\ 489 " %s\n %s" % (build_root, this_build_root) 490 Exit(1) 491 variant_path = joinpath('/',*path_dirs[:build_top+2]) 492 if variant_path not in variant_paths: 493 variant_paths.append(variant_path) 494 495# Make sure build_root exists (might not if this is the first build there) 496if not isdir(build_root): 497 mkdir(build_root) 498main['BUILDROOT'] = build_root 499 500Export('main') 501 502main.SConsignFile(joinpath(build_root, "sconsign")) 503 504# Default duplicate option is to use hard links, but this messes up 505# when you use emacs to edit a file in the target dir, as emacs moves 506# file to file~ then copies to file, breaking the link. Symbolic 507# (soft) links work better. 508main.SetOption('duplicate', 'soft-copy') 509 510# 511# Set up global sticky variables... these are common to an entire build 512# tree (not specific to a particular build like ALPHA_SE) 513# 514 515global_vars_file = joinpath(build_root, 'variables.global') 516 517global_vars = Variables(global_vars_file, args=ARGUMENTS) 518 519global_vars.AddVariables( 520 ('CC', 'C compiler', environ.get('CC', main['CC'])), 521 ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])), 522 ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')), 523 ('BATCH', 'Use batch pool for build and tests', False), 524 ('BATCH_CMD', 'Batch pool submission command name', 'qdo'), 525 ('M5_BUILD_CACHE', 'Cache built objects in this directory', False), 526 ('EXTRAS', 'Add extra directories to the compilation', '') 527 ) 528 529# Update main environment with values from ARGUMENTS & global_vars_file 530global_vars.Update(main) 531help_texts["global_vars"] += global_vars.GenerateHelpText(main) 532 533# Save sticky variable settings back to current variables file 534global_vars.Save(global_vars_file, main) 535 536# Parse EXTRAS variable to build list of all directories where we're 537# look for sources etc. This list is exported as extras_dir_list. 538base_dir = main.srcdir.abspath 539if main['EXTRAS']: 540 extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':')) 541else: 542 extras_dir_list = [] 543 544Export('base_dir') 545Export('extras_dir_list') 546 547# the ext directory should be on the #includes path 548main.Append(CPPPATH=[Dir('ext')]) 549 550def strip_build_path(path, env): 551 path = str(path) 552 variant_base = env['BUILDROOT'] + os.path.sep 553 if path.startswith(variant_base): 554 path = path[len(variant_base):] 555 elif path.startswith('build/'): 556 path = path[6:] 557 return path 558 559# Generate a string of the form: 560# common/path/prefix/src1, src2 -> tgt1, tgt2 561# to print while building. 562class Transform(object): 563 # all specific color settings should be here and nowhere else 564 tool_color = termcap.Normal 565 pfx_color = termcap.Yellow 566 srcs_color = termcap.Yellow + termcap.Bold 567 arrow_color = termcap.Blue + termcap.Bold 568 tgts_color = termcap.Yellow + termcap.Bold 569 570 def __init__(self, tool, max_sources=99): 571 self.format = self.tool_color + (" [%8s] " % tool) \ 572 + self.pfx_color + "%s" \ 573 + self.srcs_color + "%s" \ 574 + self.arrow_color + " -> " \ 575 + self.tgts_color + "%s" \ 576 + termcap.Normal 577 self.max_sources = max_sources 578 579 def __call__(self, target, source, env, for_signature=None): 580 # truncate source list according to max_sources param 581 source = source[0:self.max_sources] 582 def strip(f): 583 return strip_build_path(str(f), env) 584 if len(source) > 0: 585 srcs = map(strip, source) 586 else: 587 srcs = [''] 588 tgts = map(strip, target) 589 # surprisingly, os.path.commonprefix is a dumb char-by-char string 590 # operation that has nothing to do with paths. 591 com_pfx = os.path.commonprefix(srcs + tgts) 592 com_pfx_len = len(com_pfx) 593 if com_pfx: 594 # do some cleanup and sanity checking on common prefix 595 if com_pfx[-1] == ".": 596 # prefix matches all but file extension: ok 597 # back up one to change 'foo.cc -> o' to 'foo.cc -> .o' 598 com_pfx = com_pfx[0:-1] 599 elif com_pfx[-1] == "/": 600 # common prefix is directory path: OK 601 pass 602 else: 603 src0_len = len(srcs[0]) 604 tgt0_len = len(tgts[0]) 605 if src0_len == com_pfx_len: 606 # source is a substring of target, OK 607 pass 608 elif tgt0_len == com_pfx_len: 609 # target is a substring of source, need to back up to 610 # avoid empty string on RHS of arrow 611 sep_idx = com_pfx.rfind(".") 612 if sep_idx != -1: 613 com_pfx = com_pfx[0:sep_idx] 614 else: 615 com_pfx = '' 616 elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".": 617 # still splitting at file extension: ok 618 pass 619 else: 620 # probably a fluke; ignore it 621 com_pfx = '' 622 # recalculate length in case com_pfx was modified 623 com_pfx_len = len(com_pfx) 624 def fmt(files): 625 f = map(lambda s: s[com_pfx_len:], files) 626 return ', '.join(f) 627 return self.format % (com_pfx, fmt(srcs), fmt(tgts)) 628 629Export('Transform') 630 631# enable the regression script to use the termcap 632main['TERMCAP'] = termcap 633 634if GetOption('verbose'): 635 def MakeAction(action, string, *args, **kwargs): 636 return Action(action, *args, **kwargs) 637else: 638 MakeAction = Action 639 main['CCCOMSTR'] = Transform("CC") 640 main['CXXCOMSTR'] = Transform("CXX") 641 main['ASCOMSTR'] = Transform("AS") 642 main['ARCOMSTR'] = Transform("AR", 0) 643 main['LINKCOMSTR'] = Transform("LINK", 0) 644 main['SHLINKCOMSTR'] = Transform("SHLINK", 0) 645 main['RANLIBCOMSTR'] = Transform("RANLIB", 0) 646 main['M4COMSTR'] = Transform("M4") 647 main['SHCCCOMSTR'] = Transform("SHCC") 648 main['SHCXXCOMSTR'] = Transform("SHCXX") 649Export('MakeAction') 650 651# Initialize the Link-Time Optimization (LTO) flags 652main['LTO_CCFLAGS'] = [] 653main['LTO_LDFLAGS'] = [] 654 655# According to the readme, tcmalloc works best if the compiler doesn't 656# assume that we're using the builtin malloc and friends. These flags 657# are compiler-specific, so we need to set them after we detect which 658# compiler we're using. 659main['TCMALLOC_CCFLAGS'] = [] 660 661CXX_version = readCommand([main['CXX'],'--version'], exception=False) 662CXX_V = readCommand([main['CXX'],'-V'], exception=False) 663 664main['GCC'] = CXX_version and CXX_version.find('g++') >= 0 665main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0 666if main['GCC'] + main['CLANG'] > 1: 667 print 'Error: How can we have two at the same time?' 668 Exit(1) 669 670# Set up default C++ compiler flags 671if main['GCC'] or main['CLANG']: 672 # As gcc and clang share many flags, do the common parts here 673 main.Append(CCFLAGS=['-pipe']) 674 main.Append(CCFLAGS=['-fno-strict-aliasing']) 675 # Enable -Wall and -Wextra and then disable the few warnings that 676 # we consistently violate 677 main.Append(CCFLAGS=['-Wall', '-Wundef', '-Wextra', 678 '-Wno-sign-compare', '-Wno-unused-parameter']) 679 # We always compile using C++11 680 main.Append(CXXFLAGS=['-std=c++11']) 681 if sys.platform.startswith('freebsd'): 682 main.Append(CCFLAGS=['-I/usr/local/include']) 683 main.Append(CXXFLAGS=['-I/usr/local/include']) 684 685 main['FILTER_PSHLINKFLAGS'] = lambda x: str(x).replace(' -shared', '') 686 main['PSHLINKFLAGS'] = main.subst('${FILTER_PSHLINKFLAGS(SHLINKFLAGS)}') 687 main['PLINKFLAGS'] = main.subst('${LINKFLAGS}') 688 shared_partial_flags = ['-r', '-nostdlib'] 689 main.Append(PSHLINKFLAGS=shared_partial_flags) 690 main.Append(PLINKFLAGS=shared_partial_flags) 691else: 692 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal, 693 print "Don't know what compiler options to use for your compiler." 694 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX'] 695 print termcap.Yellow + ' version:' + termcap.Normal, 696 if not CXX_version: 697 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\ 698 termcap.Normal 699 else: 700 print CXX_version.replace('\n', '<nl>') 701 print " If you're trying to use a compiler other than GCC" 702 print " or clang, there appears to be something wrong with your" 703 print " environment." 704 print " " 705 print " If you are trying to use a compiler other than those listed" 706 print " above you will need to ease fix SConstruct and " 707 print " src/SConscript to support that compiler." 708 Exit(1) 709 710if main['GCC']: 711 # Check for a supported version of gcc. >= 4.8 is chosen for its 712 # level of c++11 support. See 713 # http://gcc.gnu.org/projects/cxx0x.html for details. 714 gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False) 715 if compareVersions(gcc_version, "4.8") < 0: 716 print 'Error: gcc version 4.8 or newer required.' 717 print ' Installed version:', gcc_version 718 Exit(1) 719 720 main['GCC_VERSION'] = gcc_version 721 722 # gcc from version 4.8 and above generates "rep; ret" instructions 723 # to avoid performance penalties on certain AMD chips. Older 724 # assemblers detect this as an error, "Error: expecting string 725 # instruction after `rep'" 726 as_version_raw = readCommand([main['AS'], '-v', '/dev/null', 727 '-o', '/dev/null'], 728 exception=False).split() 729 730 # version strings may contain extra distro-specific 731 # qualifiers, so play it safe and keep only what comes before 732 # the first hyphen 733 as_version = as_version_raw[-1].split('-')[0] if as_version_raw else None 734 735 if not as_version or compareVersions(as_version, "2.23") < 0: 736 print termcap.Yellow + termcap.Bold + \ 737 'Warning: This combination of gcc and binutils have' + \ 738 ' known incompatibilities.\n' + \ 739 ' If you encounter build problems, please update ' + \ 740 'binutils to 2.23.' + \ 741 termcap.Normal 742 743 # Make sure we warn if the user has requested to compile with the 744 # Undefined Benahvior Sanitizer and this version of gcc does not 745 # support it. 746 if GetOption('with_ubsan') and \ 747 compareVersions(gcc_version, '4.9') < 0: 748 print termcap.Yellow + termcap.Bold + \ 749 'Warning: UBSan is only supported using gcc 4.9 and later.' + \ 750 termcap.Normal 751 752 # Add the appropriate Link-Time Optimization (LTO) flags 753 # unless LTO is explicitly turned off. Note that these flags 754 # are only used by the fast target. 755 if not GetOption('no_lto'): 756 # Pass the LTO flag when compiling to produce GIMPLE 757 # output, we merely create the flags here and only append 758 # them later 759 main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')] 760 761 # Use the same amount of jobs for LTO as we are running 762 # scons with 763 main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')] 764 765 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc', 766 '-fno-builtin-realloc', '-fno-builtin-free']) 767 768 # add option to check for undeclared overrides 769 if compareVersions(gcc_version, "5.0") > 0: 770 main.Append(CCFLAGS=['-Wno-error=suggest-override']) 771 772elif main['CLANG']: 773 # Check for a supported version of clang, >= 3.1 is needed to 774 # support similar features as gcc 4.8. See 775 # http://clang.llvm.org/cxx_status.html for details 776 clang_version_re = re.compile(".* version (\d+\.\d+)") 777 clang_version_match = clang_version_re.search(CXX_version) 778 if (clang_version_match): 779 clang_version = clang_version_match.groups()[0] 780 if compareVersions(clang_version, "3.1") < 0: 781 print 'Error: clang version 3.1 or newer required.' 782 print ' Installed version:', clang_version 783 Exit(1) 784 else: 785 print 'Error: Unable to determine clang version.' 786 Exit(1) 787 788 # clang has a few additional warnings that we disable, extraneous 789 # parantheses are allowed due to Ruby's printing of the AST, 790 # finally self assignments are allowed as the generated CPU code 791 # is relying on this 792 main.Append(CCFLAGS=['-Wno-parentheses', 793 '-Wno-self-assign', 794 # Some versions of libstdc++ (4.8?) seem to 795 # use struct hash and class hash 796 # interchangeably. 797 '-Wno-mismatched-tags', 798 ]) 799 800 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin']) 801 802 # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as 803 # opposed to libstdc++, as the later is dated. 804 if sys.platform == "darwin": 805 main.Append(CXXFLAGS=['-stdlib=libc++']) 806 main.Append(LIBS=['c++']) 807 808 # On FreeBSD we need libthr. 809 if sys.platform.startswith('freebsd'): 810 main.Append(LIBS=['thr']) 811 812else: 813 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal, 814 print "Don't know what compiler options to use for your compiler." 815 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX'] 816 print termcap.Yellow + ' version:' + termcap.Normal, 817 if not CXX_version: 818 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\ 819 termcap.Normal 820 else: 821 print CXX_version.replace('\n', '<nl>') 822 print " If you're trying to use a compiler other than GCC" 823 print " or clang, there appears to be something wrong with your" 824 print " environment." 825 print " " 826 print " If you are trying to use a compiler other than those listed" 827 print " above you will need to ease fix SConstruct and " 828 print " src/SConscript to support that compiler." 829 Exit(1) 830 831# Set up common yacc/bison flags (needed for Ruby) 832main['YACCFLAGS'] = '-d' 833main['YACCHXXFILESUFFIX'] = '.hh' 834 835# Do this after we save setting back, or else we'll tack on an 836# extra 'qdo' every time we run scons. 837if main['BATCH']: 838 main['CC'] = main['BATCH_CMD'] + ' ' + main['CC'] 839 main['CXX'] = main['BATCH_CMD'] + ' ' + main['CXX'] 840 main['AS'] = main['BATCH_CMD'] + ' ' + main['AS'] 841 main['AR'] = main['BATCH_CMD'] + ' ' + main['AR'] 842 main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB'] 843 844if sys.platform == 'cygwin': 845 # cygwin has some header file issues... 846 main.Append(CCFLAGS=["-Wno-uninitialized"]) 847 848# Check for the protobuf compiler 849protoc_version = readCommand([main['PROTOC'], '--version'], 850 exception='').split() 851 852# First two words should be "libprotoc x.y.z" 853if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc': 854 print termcap.Yellow + termcap.Bold + \ 855 'Warning: Protocol buffer compiler (protoc) not found.\n' + \ 856 ' Please install protobuf-compiler for tracing support.' + \ 857 termcap.Normal 858 main['PROTOC'] = False 859else: 860 # Based on the availability of the compress stream wrappers, 861 # require 2.1.0 862 min_protoc_version = '2.1.0' 863 if compareVersions(protoc_version[1], min_protoc_version) < 0: 864 print termcap.Yellow + termcap.Bold + \ 865 'Warning: protoc version', min_protoc_version, \ 866 'or newer required.\n' + \ 867 ' Installed version:', protoc_version[1], \ 868 termcap.Normal 869 main['PROTOC'] = False 870 else: 871 # Attempt to determine the appropriate include path and 872 # library path using pkg-config, that means we also need to 873 # check for pkg-config. Note that it is possible to use 874 # protobuf without the involvement of pkg-config. Later on we 875 # check go a library config check and at that point the test 876 # will fail if libprotobuf cannot be found. 877 if readCommand(['pkg-config', '--version'], exception=''): 878 try: 879 # Attempt to establish what linking flags to add for protobuf 880 # using pkg-config 881 main.ParseConfig('pkg-config --cflags --libs-only-L protobuf') 882 except: 883 print termcap.Yellow + termcap.Bold + \ 884 'Warning: pkg-config could not get protobuf flags.' + \ 885 termcap.Normal 886 887 888# Check for 'timeout' from GNU coreutils. If present, regressions will 889# be run with a time limit. We require version 8.13 since we rely on 890# support for the '--foreground' option. 891if sys.platform.startswith('freebsd'): 892 timeout_lines = readCommand(['gtimeout', '--version'], 893 exception='').splitlines() 894else: 895 timeout_lines = readCommand(['timeout', '--version'], 896 exception='').splitlines() 897# Get the first line and tokenize it 898timeout_version = timeout_lines[0].split() if timeout_lines else [] 899main['TIMEOUT'] = timeout_version and \ 900 compareVersions(timeout_version[-1], '8.13') >= 0 901 902# Add a custom Check function to test for structure members. 903def CheckMember(context, include, decl, member, include_quotes="<>"): 904 context.Message("Checking for member %s in %s..." % 905 (member, decl)) 906 text = """ 907#include %(header)s 908int main(){ 909 %(decl)s test; 910 (void)test.%(member)s; 911 return 0; 912}; 913""" % { "header" : include_quotes[0] + include + include_quotes[1], 914 "decl" : decl, 915 "member" : member, 916 } 917 918 ret = context.TryCompile(text, extension=".cc") 919 context.Result(ret) 920 return ret 921 922# Platform-specific configuration. Note again that we assume that all 923# builds under a given build root run on the same host platform. 924conf = Configure(main, 925 conf_dir = joinpath(build_root, '.scons_config'), 926 log_file = joinpath(build_root, 'scons_config.log'), 927 custom_tests = { 928 'CheckMember' : CheckMember, 929 }) 930 931# Check if we should compile a 64 bit binary on Mac OS X/Darwin 932try: 933 import platform 934 uname = platform.uname() 935 if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0: 936 if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]): 937 main.Append(CCFLAGS=['-arch', 'x86_64']) 938 main.Append(CFLAGS=['-arch', 'x86_64']) 939 main.Append(LINKFLAGS=['-arch', 'x86_64']) 940 main.Append(ASFLAGS=['-arch', 'x86_64']) 941except: 942 pass 943 944# Recent versions of scons substitute a "Null" object for Configure() 945# when configuration isn't necessary, e.g., if the "--help" option is 946# present. Unfortuantely this Null object always returns false, 947# breaking all our configuration checks. We replace it with our own 948# more optimistic null object that returns True instead. 949if not conf: 950 def NullCheck(*args, **kwargs): 951 return True 952 953 class NullConf: 954 def __init__(self, env): 955 self.env = env 956 def Finish(self): 957 return self.env 958 def __getattr__(self, mname): 959 return NullCheck 960 961 conf = NullConf(main) 962 963# Cache build files in the supplied directory. 964if main['M5_BUILD_CACHE']: 965 print 'Using build cache located at', main['M5_BUILD_CACHE'] 966 CacheDir(main['M5_BUILD_CACHE']) 967 968main['USE_PYTHON'] = not GetOption('without_python') 969if main['USE_PYTHON']: 970 # Find Python include and library directories for embedding the 971 # interpreter. We rely on python-config to resolve the appropriate 972 # includes and linker flags. ParseConfig does not seem to understand 973 # the more exotic linker flags such as -Xlinker and -export-dynamic so 974 # we add them explicitly below. If you want to link in an alternate 975 # version of python, see above for instructions on how to invoke 976 # scons with the appropriate PATH set. 977 # 978 # First we check if python2-config exists, else we use python-config 979 python_config = readCommand(['which', 'python2-config'], 980 exception='').strip() 981 if not os.path.exists(python_config): 982 python_config = readCommand(['which', 'python-config'], 983 exception='').strip() 984 py_includes = readCommand([python_config, '--includes'], 985 exception='').split() 986 # Strip the -I from the include folders before adding them to the 987 # CPPPATH 988 main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes)) 989 990 # Read the linker flags and split them into libraries and other link 991 # flags. The libraries are added later through the call the CheckLib. 992 py_ld_flags = readCommand([python_config, '--ldflags'], 993 exception='').split() 994 py_libs = [] 995 for lib in py_ld_flags: 996 if not lib.startswith('-l'): 997 main.Append(LINKFLAGS=[lib]) 998 else: 999 lib = lib[2:] 1000 if lib not in py_libs: 1001 py_libs.append(lib) 1002 1003 # verify that this stuff works 1004 if not conf.CheckHeader('Python.h', '<>'): 1005 print "Error: can't find Python.h header in", py_includes 1006 print "Install Python headers (package python-dev on Ubuntu and RedHat)" 1007 Exit(1) 1008 1009 for lib in py_libs: 1010 if not conf.CheckLib(lib): 1011 print "Error: can't find library %s required by python" % lib 1012 Exit(1) 1013 1014# On Solaris you need to use libsocket for socket ops 1015if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'): 1016 if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'): 1017 print "Can't find library with socket calls (e.g. accept())" 1018 Exit(1) 1019 1020# Check for zlib. If the check passes, libz will be automatically 1021# added to the LIBS environment variable. 1022if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'): 1023 print 'Error: did not find needed zlib compression library '\ 1024 'and/or zlib.h header file.' 1025 print ' Please install zlib and try again.' 1026 Exit(1) 1027 1028# If we have the protobuf compiler, also make sure we have the 1029# development libraries. If the check passes, libprotobuf will be 1030# automatically added to the LIBS environment variable. After 1031# this, we can use the HAVE_PROTOBUF flag to determine if we have 1032# got both protoc and libprotobuf available. 1033main['HAVE_PROTOBUF'] = main['PROTOC'] and \ 1034 conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h', 1035 'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;') 1036 1037# If we have the compiler but not the library, print another warning. 1038if main['PROTOC'] and not main['HAVE_PROTOBUF']: 1039 print termcap.Yellow + termcap.Bold + \ 1040 'Warning: did not find protocol buffer library and/or headers.\n' + \ 1041 ' Please install libprotobuf-dev for tracing support.' + \ 1042 termcap.Normal 1043 1044# Check for librt. 1045have_posix_clock = \ 1046 conf.CheckLibWithHeader(None, 'time.h', 'C', 1047 'clock_nanosleep(0,0,NULL,NULL);') or \ 1048 conf.CheckLibWithHeader('rt', 'time.h', 'C', 1049 'clock_nanosleep(0,0,NULL,NULL);') 1050 1051have_posix_timers = \ 1052 conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C', 1053 'timer_create(CLOCK_MONOTONIC, NULL, NULL);') 1054 1055if not GetOption('without_tcmalloc'): 1056 if conf.CheckLib('tcmalloc'): 1057 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS']) 1058 elif conf.CheckLib('tcmalloc_minimal'): 1059 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS']) 1060 else: 1061 print termcap.Yellow + termcap.Bold + \ 1062 "You can get a 12% performance improvement by "\ 1063 "installing tcmalloc (libgoogle-perftools-dev package "\ 1064 "on Ubuntu or RedHat)." + termcap.Normal 1065 1066 1067# Detect back trace implementations. The last implementation in the 1068# list will be used by default. 1069backtrace_impls = [ "none" ] 1070 1071if conf.CheckLibWithHeader(None, 'execinfo.h', 'C', 1072 'backtrace_symbols_fd((void*)0, 0, 0);'): 1073 backtrace_impls.append("glibc") 1074elif conf.CheckLibWithHeader('execinfo', 'execinfo.h', 'C', 1075 'backtrace_symbols_fd((void*)0, 0, 0);'): 1076 # NetBSD and FreeBSD need libexecinfo. 1077 backtrace_impls.append("glibc") 1078 main.Append(LIBS=['execinfo']) 1079 1080if backtrace_impls[-1] == "none": 1081 default_backtrace_impl = "none" 1082 print termcap.Yellow + termcap.Bold + \ 1083 "No suitable back trace implementation found." + \ 1084 termcap.Normal 1085 1086if not have_posix_clock: 1087 print "Can't find library for POSIX clocks." 1088 1089# Check for <fenv.h> (C99 FP environment control) 1090have_fenv = conf.CheckHeader('fenv.h', '<>') 1091if not have_fenv: 1092 print "Warning: Header file <fenv.h> not found." 1093 print " This host has no IEEE FP rounding mode control." 1094 1095# Check if we should enable KVM-based hardware virtualization. The API 1096# we rely on exists since version 2.6.36 of the kernel, but somehow 1097# the KVM_API_VERSION does not reflect the change. We test for one of 1098# the types as a fall back. 1099have_kvm = conf.CheckHeader('linux/kvm.h', '<>') 1100if not have_kvm: 1101 print "Info: Compatible header file <linux/kvm.h> not found, " \ 1102 "disabling KVM support." 1103 1104# x86 needs support for xsave. We test for the structure here since we 1105# won't be able to run new tests by the time we know which ISA we're 1106# targeting. 1107have_kvm_xsave = conf.CheckTypeSize('struct kvm_xsave', 1108 '#include <linux/kvm.h>') != 0 1109 1110# Check if the requested target ISA is compatible with the host 1111def is_isa_kvm_compatible(isa): 1112 try: 1113 import platform 1114 host_isa = platform.machine() 1115 except: 1116 print "Warning: Failed to determine host ISA." 1117 return False 1118 1119 if not have_posix_timers: 1120 print "Warning: Can not enable KVM, host seems to lack support " \ 1121 "for POSIX timers" 1122 return False 1123 1124 if isa == "arm": 1125 return host_isa in ( "armv7l", "aarch64" ) 1126 elif isa == "x86": 1127 if host_isa != "x86_64": 1128 return False 1129 1130 if not have_kvm_xsave: 1131 print "KVM on x86 requires xsave support in kernel headers." 1132 return False 1133 1134 return True 1135 else: 1136 return False 1137 1138 1139# Check if the exclude_host attribute is available. We want this to 1140# get accurate instruction counts in KVM. 1141main['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf.CheckMember( 1142 'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host') 1143 1144 1145###################################################################### 1146# 1147# Finish the configuration 1148# 1149main = conf.Finish() 1150 1151###################################################################### 1152# 1153# Collect all non-global variables 1154# 1155 1156# Define the universe of supported ISAs 1157all_isa_list = [ ] 1158all_gpu_isa_list = [ ] 1159Export('all_isa_list') 1160Export('all_gpu_isa_list') 1161 1162class CpuModel(object): 1163 '''The CpuModel class encapsulates everything the ISA parser needs to 1164 know about a particular CPU model.''' 1165 1166 # Dict of available CPU model objects. Accessible as CpuModel.dict. 1167 dict = {} 1168 1169 # Constructor. Automatically adds models to CpuModel.dict. 1170 def __init__(self, name, default=False): 1171 self.name = name # name of model 1172 1173 # This cpu is enabled by default 1174 self.default = default 1175 1176 # Add self to dict 1177 if name in CpuModel.dict: 1178 raise AttributeError, "CpuModel '%s' already registered" % name 1179 CpuModel.dict[name] = self 1180 1181Export('CpuModel') 1182 1183# Sticky variables get saved in the variables file so they persist from 1184# one invocation to the next (unless overridden, in which case the new 1185# value becomes sticky). 1186sticky_vars = Variables(args=ARGUMENTS) 1187Export('sticky_vars') 1188 1189# Sticky variables that should be exported 1190export_vars = [] 1191Export('export_vars') 1192 1193# For Ruby 1194all_protocols = [] 1195Export('all_protocols') 1196protocol_dirs = [] 1197Export('protocol_dirs') 1198slicc_includes = [] 1199Export('slicc_includes') 1200 1201# Walk the tree and execute all SConsopts scripts that wil add to the 1202# above variables 1203if GetOption('verbose'): 1204 print "Reading SConsopts" 1205for bdir in [ base_dir ] + extras_dir_list: 1206 if not isdir(bdir): 1207 print "Error: directory '%s' does not exist" % bdir 1208 Exit(1) 1209 for root, dirs, files in os.walk(bdir): 1210 if 'SConsopts' in files: 1211 if GetOption('verbose'): 1212 print "Reading", joinpath(root, 'SConsopts') 1213 SConscript(joinpath(root, 'SConsopts')) 1214 1215all_isa_list.sort() 1216all_gpu_isa_list.sort() 1217 1218sticky_vars.AddVariables( 1219 EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list), 1220 EnumVariable('TARGET_GPU_ISA', 'Target GPU ISA', 'hsail', all_gpu_isa_list), 1221 ListVariable('CPU_MODELS', 'CPU models', 1222 sorted(n for n,m in CpuModel.dict.iteritems() if m.default), 1223 sorted(CpuModel.dict.keys())), 1224 BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger', 1225 False), 1226 BoolVariable('SS_COMPATIBLE_FP', 1227 'Make floating-point results compatible with SimpleScalar', 1228 False), 1229 BoolVariable('USE_SSE2', 1230 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts', 1231 False), 1232 BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock), 1233 BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv), 1234 BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability', False), 1235 BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models', have_kvm), 1236 BoolVariable('BUILD_GPU', 'Build the compute-GPU model', False), 1237 EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None', 1238 all_protocols), 1239 EnumVariable('BACKTRACE_IMPL', 'Post-mortem dump implementation', 1240 backtrace_impls[-1], backtrace_impls) 1241 ) 1242 1243# These variables get exported to #defines in config/*.hh (see src/SConscript). 1244export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'TARGET_GPU_ISA', 1245 'CP_ANNOTATE', 'USE_POSIX_CLOCK', 'USE_KVM', 'PROTOCOL', 1246 'HAVE_PROTOBUF', 'HAVE_PERF_ATTR_EXCLUDE_HOST'] 1247 1248################################################### 1249# 1250# Define a SCons builder for configuration flag headers. 1251# 1252################################################### 1253 1254# This function generates a config header file that #defines the 1255# variable symbol to the current variable setting (0 or 1). The source 1256# operands are the name of the variable and a Value node containing the 1257# value of the variable. 1258def build_config_file(target, source, env): 1259 (variable, value) = [s.get_contents() for s in source] 1260 f = file(str(target[0]), 'w') 1261 print >> f, '#define', variable, value 1262 f.close() 1263 return None 1264 1265# Combine the two functions into a scons Action object. 1266config_action = MakeAction(build_config_file, Transform("CONFIG H", 2)) 1267 1268# The emitter munges the source & target node lists to reflect what 1269# we're really doing. 1270def config_emitter(target, source, env): 1271 # extract variable name from Builder arg 1272 variable = str(target[0]) 1273 # True target is config header file 1274 target = joinpath('config', variable.lower() + '.hh') 1275 val = env[variable] 1276 if isinstance(val, bool): 1277 # Force value to 0/1 1278 val = int(val) 1279 elif isinstance(val, str): 1280 val = '"' + val + '"' 1281 1282 # Sources are variable name & value (packaged in SCons Value nodes) 1283 return ([target], [Value(variable), Value(val)]) 1284 1285config_builder = Builder(emitter = config_emitter, action = config_action) 1286 1287main.Append(BUILDERS = { 'ConfigFile' : config_builder }) 1288 1289################################################### 1290# 1291# Builders for static and shared partially linked object files. 1292# 1293################################################### 1294 1295partial_static_builder = Builder(action=SCons.Defaults.LinkAction, 1296 src_suffix='$OBJSUFFIX', 1297 src_builder=['StaticObject', 'Object'], 1298 LINKFLAGS='$PLINKFLAGS', 1299 LIBS='') 1300 1301def partial_shared_emitter(target, source, env): 1302 for tgt in target: 1303 tgt.attributes.shared = 1 1304 return (target, source) 1305partial_shared_builder = Builder(action=SCons.Defaults.ShLinkAction, 1306 emitter=partial_shared_emitter, 1307 src_suffix='$SHOBJSUFFIX', 1308 src_builder='SharedObject', 1309 SHLINKFLAGS='$PSHLINKFLAGS', 1310 LIBS='') 1311 1312main.Append(BUILDERS = { 'PartialShared' : partial_shared_builder, 1313 'PartialStatic' : partial_static_builder }) 1314 1315# builds in ext are shared across all configs in the build root. 1316ext_dir = abspath(joinpath(str(main.root), 'ext')) 1317for root, dirs, files in os.walk(ext_dir): 1318 if 'SConscript' in files: 1319 build_dir = os.path.relpath(root, ext_dir) 1320 main.SConscript(joinpath(root, 'SConscript'), 1321 variant_dir=joinpath(build_root, build_dir)) 1322 1323main.Prepend(CPPPATH=Dir('ext/pybind11/include/')) 1324 1325################################################### 1326# 1327# This function is used to set up a directory with switching headers 1328# 1329################################################### 1330 1331main['ALL_ISA_LIST'] = all_isa_list 1332main['ALL_GPU_ISA_LIST'] = all_gpu_isa_list 1333all_isa_deps = {} 1334def make_switching_dir(dname, switch_headers, env): 1335 # Generate the header. target[0] is the full path of the output 1336 # header to generate. 'source' is a dummy variable, since we get the 1337 # list of ISAs from env['ALL_ISA_LIST']. 1338 def gen_switch_hdr(target, source, env): 1339 fname = str(target[0]) 1340 isa = env['TARGET_ISA'].lower() 1341 try: 1342 f = open(fname, 'w') 1343 print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname)) 1344 f.close() 1345 except IOError: 1346 print "Failed to create %s" % fname 1347 raise 1348 1349 # Build SCons Action object. 'varlist' specifies env vars that this 1350 # action depends on; when env['ALL_ISA_LIST'] changes these actions 1351 # should get re-executed. 1352 switch_hdr_action = MakeAction(gen_switch_hdr, 1353 Transform("GENERATE"), varlist=['ALL_ISA_LIST']) 1354 1355 # Instantiate actions for each header 1356 for hdr in switch_headers: 1357 env.Command(hdr, [], switch_hdr_action) 1358 1359 isa_target = Dir('.').up().name.lower().replace('_', '-') 1360 all_isa_deps[isa_target] = None 1361 1362Export('make_switching_dir') 1363 1364def make_gpu_switching_dir(dname, switch_headers, env): 1365 # Generate the header. target[0] is the full path of the output 1366 # header to generate. 'source' is a dummy variable, since we get the 1367 # list of ISAs from env['ALL_ISA_LIST']. 1368 def gen_switch_hdr(target, source, env): 1369 fname = str(target[0]) 1370 1371 isa = env['TARGET_GPU_ISA'].lower() 1372 1373 try: 1374 f = open(fname, 'w') 1375 print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname)) 1376 f.close() 1377 except IOError: 1378 print "Failed to create %s" % fname 1379 raise 1380 1381 # Build SCons Action object. 'varlist' specifies env vars that this 1382 # action depends on; when env['ALL_ISA_LIST'] changes these actions 1383 # should get re-executed. 1384 switch_hdr_action = MakeAction(gen_switch_hdr, 1385 Transform("GENERATE"), varlist=['ALL_ISA_GPU_LIST']) 1386 1387 # Instantiate actions for each header 1388 for hdr in switch_headers: 1389 env.Command(hdr, [], switch_hdr_action) 1390 1391Export('make_gpu_switching_dir') 1392 1393# all-isas -> all-deps -> all-environs -> all_targets 1394main.Alias('#all-isas', []) 1395main.Alias('#all-deps', '#all-isas') 1396 1397# Dummy target to ensure all environments are created before telling 1398# SCons what to actually make (the command line arguments). We attach 1399# them to the dependence graph after the environments are complete. 1400ORIG_BUILD_TARGETS = list(BUILD_TARGETS) # force a copy; gets closure to work. 1401def environsComplete(target, source, env): 1402 for t in ORIG_BUILD_TARGETS: 1403 main.Depends('#all-targets', t) 1404 1405# Each build/* switching_dir attaches its *-environs target to #all-environs. 1406main.Append(BUILDERS = {'CompleteEnvirons' : 1407 Builder(action=MakeAction(environsComplete, None))}) 1408main.CompleteEnvirons('#all-environs', []) 1409 1410def doNothing(**ignored): pass 1411main.Append(BUILDERS = {'Dummy': Builder(action=MakeAction(doNothing, None))}) 1412 1413# The final target to which all the original targets ultimately get attached. 1414main.Dummy('#all-targets', '#all-environs') 1415BUILD_TARGETS[:] = ['#all-targets'] 1416 1417################################################### 1418# 1419# Define build environments for selected configurations. 1420# 1421################################################### 1422 1423def variant_name(path): 1424 return os.path.basename(path).lower().replace('_', '-') 1425main['variant_name'] = variant_name 1426main['VARIANT_NAME'] = '${variant_name(BUILDDIR)}' 1427 1428for variant_path in variant_paths: 1429 if not GetOption('silent'): 1430 print "Building in", variant_path 1431 1432 # Make a copy of the build-root environment to use for this config. 1433 env = main.Clone() 1434 env['BUILDDIR'] = variant_path 1435 1436 # variant_dir is the tail component of build path, and is used to 1437 # determine the build parameters (e.g., 'ALPHA_SE') 1438 (build_root, variant_dir) = splitpath(variant_path) 1439 1440 # Set env variables according to the build directory config. 1441 sticky_vars.files = [] 1442 # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in 1443 # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke 1444 # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings. 1445 current_vars_file = joinpath(build_root, 'variables', variant_dir) 1446 if isfile(current_vars_file): 1447 sticky_vars.files.append(current_vars_file) 1448 if not GetOption('silent'): 1449 print "Using saved variables file %s" % current_vars_file 1450 else: 1451 # Build dir-specific variables file doesn't exist. 1452 1453 # Make sure the directory is there so we can create it later 1454 opt_dir = dirname(current_vars_file) 1455 if not isdir(opt_dir): 1456 mkdir(opt_dir) 1457 1458 # Get default build variables from source tree. Variables are 1459 # normally determined by name of $VARIANT_DIR, but can be 1460 # overridden by '--default=' arg on command line. 1461 default = GetOption('default') 1462 opts_dir = joinpath(main.root.abspath, 'build_opts') 1463 if default: 1464 default_vars_files = [joinpath(build_root, 'variables', default), 1465 joinpath(opts_dir, default)] 1466 else: 1467 default_vars_files = [joinpath(opts_dir, variant_dir)] 1468 existing_files = filter(isfile, default_vars_files) 1469 if existing_files: 1470 default_vars_file = existing_files[0] 1471 sticky_vars.files.append(default_vars_file) 1472 print "Variables file %s not found,\n using defaults in %s" \ 1473 % (current_vars_file, default_vars_file) 1474 else: 1475 print "Error: cannot find variables file %s or " \ 1476 "default file(s) %s" \ 1477 % (current_vars_file, ' or '.join(default_vars_files)) 1478 Exit(1) 1479 1480 # Apply current variable settings to env 1481 sticky_vars.Update(env) 1482 1483 help_texts["local_vars"] += \ 1484 "Build variables for %s:\n" % variant_dir \ 1485 + sticky_vars.GenerateHelpText(env) 1486 1487 # Process variable settings. 1488 1489 if not have_fenv and env['USE_FENV']: 1490 print "Warning: <fenv.h> not available; " \ 1491 "forcing USE_FENV to False in", variant_dir + "." 1492 env['USE_FENV'] = False 1493 1494 if not env['USE_FENV']: 1495 print "Warning: No IEEE FP rounding mode control in", variant_dir + "." 1496 print " FP results may deviate slightly from other platforms." 1497 1498 if env['EFENCE']: 1499 env.Append(LIBS=['efence']) 1500 1501 if env['USE_KVM']: 1502 if not have_kvm: 1503 print "Warning: Can not enable KVM, host seems to lack KVM support" 1504 env['USE_KVM'] = False 1505 elif not is_isa_kvm_compatible(env['TARGET_ISA']): 1506 print "Info: KVM support disabled due to unsupported host and " \ 1507 "target ISA combination" 1508 env['USE_KVM'] = False 1509 1510 if env['BUILD_GPU']: 1511 env.Append(CPPDEFINES=['BUILD_GPU']) 1512 1513 # Warn about missing optional functionality 1514 if env['USE_KVM']: 1515 if not main['HAVE_PERF_ATTR_EXCLUDE_HOST']: 1516 print "Warning: perf_event headers lack support for the " \ 1517 "exclude_host attribute. KVM instruction counts will " \ 1518 "be inaccurate." 1519 1520 # Save sticky variable settings back to current variables file 1521 sticky_vars.Save(current_vars_file, env) 1522 1523 if env['USE_SSE2']: 1524 env.Append(CCFLAGS=['-msse2']) 1525 1526 # The src/SConscript file sets up the build rules in 'env' according 1527 # to the configured variables. It returns a list of environments, 1528 # one for each variant build (debug, opt, etc.) 1529 SConscript('src/SConscript', variant_dir = variant_path, exports = 'env') 1530 1531def pairwise(iterable): 1532 "s -> (s0,s1), (s1,s2), (s2, s3), ..." 1533 a, b = itertools.tee(iterable) 1534 b.next() 1535 return itertools.izip(a, b) 1536 1537# Create false dependencies so SCons will parse ISAs, establish 1538# dependencies, and setup the build Environments serially. Either 1539# SCons (likely) and/or our SConscripts (possibly) cannot cope with -j 1540# greater than 1. It appears to be standard race condition stuff; it 1541# doesn't always fail, but usually, and the behaviors are different. 1542# Every time I tried to remove this, builds would fail in some 1543# creative new way. So, don't do that. You'll want to, though, because 1544# tests/SConscript takes a long time to make its Environments. 1545for t1, t2 in pairwise(sorted(all_isa_deps.iterkeys())): 1546 main.Depends('#%s-deps' % t2, '#%s-deps' % t1) 1547 main.Depends('#%s-environs' % t2, '#%s-environs' % t1) 1548 1549# base help text 1550Help(''' 1551Usage: scons [scons options] [build variables] [target(s)] 1552 1553Extra scons options: 1554%(options)s 1555 1556Global build variables: 1557%(global_vars)s 1558 1559%(local_vars)s 1560''' % help_texts) 1561