SConstruct revision 12063
1# -*- mode:python -*- 2 3# Copyright (c) 2013, 2015, 2016 ARM Limited 4# All rights reserved. 5# 6# The license below extends only to copyright in the software and shall 7# not be construed as granting a license to any other intellectual 8# property including but not limited to intellectual property relating 9# to a hardware implementation of the functionality of the software 10# licensed hereunder. You may use the software subject to the license 11# terms below provided that you ensure that this notice is replicated 12# unmodified and in its entirety in all distributions of the software, 13# modified or unmodified, in source code or in binary form. 14# 15# Copyright (c) 2011 Advanced Micro Devices, Inc. 16# Copyright (c) 2009 The Hewlett-Packard Development Company 17# Copyright (c) 2004-2005 The Regents of The University of Michigan 18# All rights reserved. 19# 20# Redistribution and use in source and binary forms, with or without 21# modification, are permitted provided that the following conditions are 22# met: redistributions of source code must retain the above copyright 23# notice, this list of conditions and the following disclaimer; 24# redistributions in binary form must reproduce the above copyright 25# notice, this list of conditions and the following disclaimer in the 26# documentation and/or other materials provided with the distribution; 27# neither the name of the copyright holders nor the names of its 28# contributors may be used to endorse or promote products derived from 29# this software without specific prior written permission. 30# 31# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 32# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 33# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 34# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 35# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 36# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 37# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 38# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 39# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 40# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 41# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 42# 43# Authors: Steve Reinhardt 44# Nathan Binkert 45 46################################################### 47# 48# SCons top-level build description (SConstruct) file. 49# 50# While in this directory ('gem5'), just type 'scons' to build the default 51# configuration (see below), or type 'scons build/<CONFIG>/<binary>' 52# to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for 53# the optimized full-system version). 54# 55# You can build gem5 in a different directory as long as there is a 56# 'build/<CONFIG>' somewhere along the target path. The build system 57# expects that all configs under the same build directory are being 58# built for the same host system. 59# 60# Examples: 61# 62# The following two commands are equivalent. The '-u' option tells 63# scons to search up the directory tree for this SConstruct file. 64# % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug 65# % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug 66# 67# The following two commands are equivalent and demonstrate building 68# in a directory outside of the source tree. The '-C' option tells 69# scons to chdir to the specified directory to find this SConstruct 70# file. 71# % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug 72# % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug 73# 74# You can use 'scons -H' to print scons options. If you're in this 75# 'gem5' directory (or use -u or -C to tell scons where to find this 76# file), you can use 'scons -h' to print all the gem5-specific build 77# options as well. 78# 79################################################### 80 81# Check for recent-enough Python and SCons versions. 82try: 83 # Really old versions of scons only take two options for the 84 # function, so check once without the revision and once with the 85 # revision, the first instance will fail for stuff other than 86 # 0.98, and the second will fail for 0.98.0 87 EnsureSConsVersion(0, 98) 88 EnsureSConsVersion(0, 98, 1) 89except SystemExit, e: 90 print """ 91For more details, see: 92 http://gem5.org/Dependencies 93""" 94 raise 95 96# We ensure the python version early because because python-config 97# requires python 2.5 98try: 99 EnsurePythonVersion(2, 5) 100except SystemExit, e: 101 print """ 102You can use a non-default installation of the Python interpreter by 103rearranging your PATH so that scons finds the non-default 'python' and 104'python-config' first. 105 106For more details, see: 107 http://gem5.org/wiki/index.php/Using_a_non-default_Python_installation 108""" 109 raise 110 111# Global Python includes 112import itertools 113import os 114import re 115import shutil 116import subprocess 117import sys 118 119from os import mkdir, environ 120from os.path import abspath, basename, dirname, expanduser, normpath 121from os.path import exists, isdir, isfile 122from os.path import join as joinpath, split as splitpath 123 124# SCons includes 125import SCons 126import SCons.Node 127 128extra_python_paths = [ 129 Dir('src/python').srcnode().abspath, # gem5 includes 130 Dir('ext/ply').srcnode().abspath, # ply is used by several files 131 ] 132 133sys.path[1:1] = extra_python_paths 134 135from m5.util import compareVersions, readCommand 136from m5.util.terminal import get_termcap 137 138help_texts = { 139 "options" : "", 140 "global_vars" : "", 141 "local_vars" : "" 142} 143 144Export("help_texts") 145 146 147# There's a bug in scons in that (1) by default, the help texts from 148# AddOption() are supposed to be displayed when you type 'scons -h' 149# and (2) you can override the help displayed by 'scons -h' using the 150# Help() function, but these two features are incompatible: once 151# you've overridden the help text using Help(), there's no way to get 152# at the help texts from AddOptions. See: 153# http://scons.tigris.org/issues/show_bug.cgi?id=2356 154# http://scons.tigris.org/issues/show_bug.cgi?id=2611 155# This hack lets us extract the help text from AddOptions and 156# re-inject it via Help(). Ideally someday this bug will be fixed and 157# we can just use AddOption directly. 158def AddLocalOption(*args, **kwargs): 159 col_width = 30 160 161 help = " " + ", ".join(args) 162 if "help" in kwargs: 163 length = len(help) 164 if length >= col_width: 165 help += "\n" + " " * col_width 166 else: 167 help += " " * (col_width - length) 168 help += kwargs["help"] 169 help_texts["options"] += help + "\n" 170 171 AddOption(*args, **kwargs) 172 173AddLocalOption('--colors', dest='use_colors', action='store_true', 174 help="Add color to abbreviated scons output") 175AddLocalOption('--no-colors', dest='use_colors', action='store_false', 176 help="Don't add color to abbreviated scons output") 177AddLocalOption('--with-cxx-config', dest='with_cxx_config', 178 action='store_true', 179 help="Build with support for C++-based configuration") 180AddLocalOption('--default', dest='default', type='string', action='store', 181 help='Override which build_opts file to use for defaults') 182AddLocalOption('--ignore-style', dest='ignore_style', action='store_true', 183 help='Disable style checking hooks') 184AddLocalOption('--no-lto', dest='no_lto', action='store_true', 185 help='Disable Link-Time Optimization for fast') 186AddLocalOption('--force-lto', dest='force_lto', action='store_true', 187 help='Use Link-Time Optimization instead of partial linking' + 188 ' when the compiler doesn\'t support using them together.') 189AddLocalOption('--update-ref', dest='update_ref', action='store_true', 190 help='Update test reference outputs') 191AddLocalOption('--verbose', dest='verbose', action='store_true', 192 help='Print full tool command lines') 193AddLocalOption('--without-python', dest='without_python', 194 action='store_true', 195 help='Build without Python configuration support') 196AddLocalOption('--without-tcmalloc', dest='without_tcmalloc', 197 action='store_true', 198 help='Disable linking against tcmalloc') 199AddLocalOption('--with-ubsan', dest='with_ubsan', action='store_true', 200 help='Build with Undefined Behavior Sanitizer if available') 201AddLocalOption('--with-asan', dest='with_asan', action='store_true', 202 help='Build with Address Sanitizer if available') 203 204if GetOption('no_lto') and GetOption('force_lto'): 205 print '--no-lto and --force-lto are mutually exclusive' 206 Exit(1) 207 208termcap = get_termcap(GetOption('use_colors')) 209 210######################################################################## 211# 212# Set up the main build environment. 213# 214######################################################################## 215 216# export TERM so that clang reports errors in color 217use_vars = set([ 'AS', 'AR', 'CC', 'CXX', 'HOME', 'LD_LIBRARY_PATH', 218 'LIBRARY_PATH', 'PATH', 'PKG_CONFIG_PATH', 'PROTOC', 219 'PYTHONPATH', 'RANLIB', 'TERM' ]) 220 221use_prefixes = [ 222 "ASAN_", # address sanitizer symbolizer path and settings 223 "CCACHE_", # ccache (caching compiler wrapper) configuration 224 "CCC_", # clang static analyzer configuration 225 "DISTCC_", # distcc (distributed compiler wrapper) configuration 226 "INCLUDE_SERVER_", # distcc pump server settings 227 "M5", # M5 configuration (e.g., path to kernels) 228 ] 229 230use_env = {} 231for key,val in sorted(os.environ.iteritems()): 232 if key in use_vars or \ 233 any([key.startswith(prefix) for prefix in use_prefixes]): 234 use_env[key] = val 235 236# Tell scons to avoid implicit command dependencies to avoid issues 237# with the param wrappes being compiled twice (see 238# http://scons.tigris.org/issues/show_bug.cgi?id=2811) 239main = Environment(ENV=use_env, IMPLICIT_COMMAND_DEPENDENCIES=0) 240main.Decider('MD5-timestamp') 241main.root = Dir(".") # The current directory (where this file lives). 242main.srcdir = Dir("src") # The source directory 243 244main_dict_keys = main.Dictionary().keys() 245 246# Check that we have a C/C++ compiler 247if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys): 248 print "No C++ compiler installed (package g++ on Ubuntu and RedHat)" 249 Exit(1) 250 251# add useful python code PYTHONPATH so it can be used by subprocesses 252# as well 253main.AppendENVPath('PYTHONPATH', extra_python_paths) 254 255######################################################################## 256# 257# Mercurial Stuff. 258# 259# If the gem5 directory is a mercurial repository, we should do some 260# extra things. 261# 262######################################################################## 263 264hgdir = main.root.Dir(".hg") 265 266 267style_message = """ 268You're missing the gem5 style hook, which automatically checks your code 269against the gem5 style rules on %s. 270This script will now install the hook in your %s. 271Press enter to continue, or ctrl-c to abort: """ 272 273mercurial_style_message = """ 274You're missing the gem5 style hook, which automatically checks your code 275against the gem5 style rules on hg commit and qrefresh commands. 276This script will now install the hook in your .hg/hgrc file. 277Press enter to continue, or ctrl-c to abort: """ 278 279git_style_message = """ 280You're missing the gem5 style or commit message hook. These hooks help 281to ensure that your code follows gem5's style rules on git commit. 282This script will now install the hook in your .git/hooks/ directory. 283Press enter to continue, or ctrl-c to abort: """ 284 285mercurial_style_upgrade_message = """ 286Your Mercurial style hooks are not up-to-date. This script will now 287try to automatically update them. A backup of your hgrc will be saved 288in .hg/hgrc.old. 289Press enter to continue, or ctrl-c to abort: """ 290 291mercurial_style_hook = """ 292# The following lines were automatically added by gem5/SConstruct 293# to provide the gem5 style-checking hooks 294[extensions] 295hgstyle = %s/util/hgstyle.py 296 297[hooks] 298pretxncommit.style = python:hgstyle.check_style 299pre-qrefresh.style = python:hgstyle.check_style 300# End of SConstruct additions 301 302""" % (main.root.abspath) 303 304mercurial_lib_not_found = """ 305Mercurial libraries cannot be found, ignoring style hook. If 306you are a gem5 developer, please fix this and run the style 307hook. It is important. 308""" 309 310# Check for style hook and prompt for installation if it's not there. 311# Skip this if --ignore-style was specified, there's no interactive 312# terminal to prompt, or no recognized revision control system can be 313# found. 314ignore_style = GetOption('ignore_style') or not sys.stdin.isatty() 315 316# Try wire up Mercurial to the style hooks 317if not ignore_style and hgdir.exists(): 318 style_hook = True 319 style_hooks = tuple() 320 hgrc = hgdir.File('hgrc') 321 hgrc_old = hgdir.File('hgrc.old') 322 try: 323 from mercurial import ui 324 ui = ui.ui() 325 ui.readconfig(hgrc.abspath) 326 style_hooks = (ui.config('hooks', 'pretxncommit.style', None), 327 ui.config('hooks', 'pre-qrefresh.style', None)) 328 style_hook = all(style_hooks) 329 style_extension = ui.config('extensions', 'style', None) 330 except ImportError: 331 print mercurial_lib_not_found 332 333 if "python:style.check_style" in style_hooks: 334 # Try to upgrade the style hooks 335 print mercurial_style_upgrade_message 336 # continue unless user does ctrl-c/ctrl-d etc. 337 try: 338 raw_input() 339 except: 340 print "Input exception, exiting scons.\n" 341 sys.exit(1) 342 shutil.copyfile(hgrc.abspath, hgrc_old.abspath) 343 re_style_hook = re.compile(r"^([^=#]+)\.style\s*=\s*([^#\s]+).*") 344 re_style_extension = re.compile("style\s*=\s*([^#\s]+).*") 345 old, new = open(hgrc_old.abspath, 'r'), open(hgrc.abspath, 'w') 346 for l in old: 347 m_hook = re_style_hook.match(l) 348 m_ext = re_style_extension.match(l) 349 if m_hook: 350 hook, check = m_hook.groups() 351 if check != "python:style.check_style": 352 print "Warning: %s.style is using a non-default " \ 353 "checker: %s" % (hook, check) 354 if hook not in ("pretxncommit", "pre-qrefresh"): 355 print "Warning: Updating unknown style hook: %s" % hook 356 357 l = "%s.style = python:hgstyle.check_style\n" % hook 358 elif m_ext and m_ext.group(1) == style_extension: 359 l = "hgstyle = %s/util/hgstyle.py\n" % main.root.abspath 360 361 new.write(l) 362 elif not style_hook: 363 print mercurial_style_message, 364 # continue unless user does ctrl-c/ctrl-d etc. 365 try: 366 raw_input() 367 except: 368 print "Input exception, exiting scons.\n" 369 sys.exit(1) 370 hgrc_path = '%s/.hg/hgrc' % main.root.abspath 371 print "Adding style hook to", hgrc_path, "\n" 372 try: 373 with open(hgrc_path, 'a') as f: 374 f.write(mercurial_style_hook) 375 except: 376 print "Error updating", hgrc_path 377 sys.exit(1) 378 379def install_git_style_hooks(): 380 try: 381 gitdir = Dir(readCommand( 382 ["git", "rev-parse", "--git-dir"]).strip("\n")) 383 except Exception, e: 384 print "Warning: Failed to find git repo directory: %s" % e 385 return 386 387 git_hooks = gitdir.Dir("hooks") 388 def hook_exists(hook_name): 389 hook = git_hooks.File(hook_name) 390 return hook.exists() 391 392 def hook_install(hook_name, script): 393 hook = git_hooks.File(hook_name) 394 if hook.exists(): 395 print "Warning: Can't install %s, hook already exists." % hook_name 396 return 397 398 if hook.islink(): 399 print "Warning: Removing broken symlink for hook %s." % hook_name 400 os.unlink(hook.get_abspath()) 401 402 if not git_hooks.exists(): 403 mkdir(git_hooks.get_abspath()) 404 git_hooks.clear() 405 406 abs_symlink_hooks = git_hooks.islink() and \ 407 os.path.isabs(os.readlink(git_hooks.get_abspath())) 408 409 # Use a relative symlink if the hooks live in the source directory, 410 # and the hooks directory is not a symlink to an absolute path. 411 if hook.is_under(main.root) and not abs_symlink_hooks: 412 script_path = os.path.relpath( 413 os.path.realpath(script.get_abspath()), 414 os.path.realpath(hook.Dir(".").get_abspath())) 415 else: 416 script_path = script.get_abspath() 417 418 try: 419 os.symlink(script_path, hook.get_abspath()) 420 except: 421 print "Error updating git %s hook" % hook_name 422 raise 423 424 if hook_exists("pre-commit") and hook_exists("commit-msg"): 425 return 426 427 print git_style_message, 428 try: 429 raw_input() 430 except: 431 print "Input exception, exiting scons.\n" 432 sys.exit(1) 433 434 git_style_script = File("util/git-pre-commit.py") 435 git_msg_script = File("ext/git-commit-msg") 436 437 hook_install("pre-commit", git_style_script) 438 hook_install("commit-msg", git_msg_script) 439 440# Try to wire up git to the style hooks 441if not ignore_style and main.root.Entry(".git").exists(): 442 install_git_style_hooks() 443 444################################################### 445# 446# Figure out which configurations to set up based on the path(s) of 447# the target(s). 448# 449################################################### 450 451# Find default configuration & binary. 452Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug')) 453 454# helper function: find last occurrence of element in list 455def rfind(l, elt, offs = -1): 456 for i in range(len(l)+offs, 0, -1): 457 if l[i] == elt: 458 return i 459 raise ValueError, "element not found" 460 461# Take a list of paths (or SCons Nodes) and return a list with all 462# paths made absolute and ~-expanded. Paths will be interpreted 463# relative to the launch directory unless a different root is provided 464def makePathListAbsolute(path_list, root=GetLaunchDir()): 465 return [abspath(joinpath(root, expanduser(str(p)))) 466 for p in path_list] 467 468# Each target must have 'build' in the interior of the path; the 469# directory below this will determine the build parameters. For 470# example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we 471# recognize that ALPHA_SE specifies the configuration because it 472# follow 'build' in the build path. 473 474# The funky assignment to "[:]" is needed to replace the list contents 475# in place rather than reassign the symbol to a new list, which 476# doesn't work (obviously!). 477BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS) 478 479# Generate a list of the unique build roots and configs that the 480# collected targets reference. 481variant_paths = [] 482build_root = None 483for t in BUILD_TARGETS: 484 path_dirs = t.split('/') 485 try: 486 build_top = rfind(path_dirs, 'build', -2) 487 except: 488 print "Error: no non-leaf 'build' dir found on target path", t 489 Exit(1) 490 this_build_root = joinpath('/',*path_dirs[:build_top+1]) 491 if not build_root: 492 build_root = this_build_root 493 else: 494 if this_build_root != build_root: 495 print "Error: build targets not under same build root\n"\ 496 " %s\n %s" % (build_root, this_build_root) 497 Exit(1) 498 variant_path = joinpath('/',*path_dirs[:build_top+2]) 499 if variant_path not in variant_paths: 500 variant_paths.append(variant_path) 501 502# Make sure build_root exists (might not if this is the first build there) 503if not isdir(build_root): 504 mkdir(build_root) 505main['BUILDROOT'] = build_root 506 507Export('main') 508 509main.SConsignFile(joinpath(build_root, "sconsign")) 510 511# Default duplicate option is to use hard links, but this messes up 512# when you use emacs to edit a file in the target dir, as emacs moves 513# file to file~ then copies to file, breaking the link. Symbolic 514# (soft) links work better. 515main.SetOption('duplicate', 'soft-copy') 516 517# 518# Set up global sticky variables... these are common to an entire build 519# tree (not specific to a particular build like ALPHA_SE) 520# 521 522global_vars_file = joinpath(build_root, 'variables.global') 523 524global_vars = Variables(global_vars_file, args=ARGUMENTS) 525 526global_vars.AddVariables( 527 ('CC', 'C compiler', environ.get('CC', main['CC'])), 528 ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])), 529 ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')), 530 ('BATCH', 'Use batch pool for build and tests', False), 531 ('BATCH_CMD', 'Batch pool submission command name', 'qdo'), 532 ('M5_BUILD_CACHE', 'Cache built objects in this directory', False), 533 ('EXTRAS', 'Add extra directories to the compilation', '') 534 ) 535 536# Update main environment with values from ARGUMENTS & global_vars_file 537global_vars.Update(main) 538help_texts["global_vars"] += global_vars.GenerateHelpText(main) 539 540# Save sticky variable settings back to current variables file 541global_vars.Save(global_vars_file, main) 542 543# Parse EXTRAS variable to build list of all directories where we're 544# look for sources etc. This list is exported as extras_dir_list. 545base_dir = main.srcdir.abspath 546if main['EXTRAS']: 547 extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':')) 548else: 549 extras_dir_list = [] 550 551Export('base_dir') 552Export('extras_dir_list') 553 554# the ext directory should be on the #includes path 555main.Append(CPPPATH=[Dir('ext')]) 556 557def strip_build_path(path, env): 558 path = str(path) 559 variant_base = env['BUILDROOT'] + os.path.sep 560 if path.startswith(variant_base): 561 path = path[len(variant_base):] 562 elif path.startswith('build/'): 563 path = path[6:] 564 return path 565 566# Generate a string of the form: 567# common/path/prefix/src1, src2 -> tgt1, tgt2 568# to print while building. 569class Transform(object): 570 # all specific color settings should be here and nowhere else 571 tool_color = termcap.Normal 572 pfx_color = termcap.Yellow 573 srcs_color = termcap.Yellow + termcap.Bold 574 arrow_color = termcap.Blue + termcap.Bold 575 tgts_color = termcap.Yellow + termcap.Bold 576 577 def __init__(self, tool, max_sources=99): 578 self.format = self.tool_color + (" [%8s] " % tool) \ 579 + self.pfx_color + "%s" \ 580 + self.srcs_color + "%s" \ 581 + self.arrow_color + " -> " \ 582 + self.tgts_color + "%s" \ 583 + termcap.Normal 584 self.max_sources = max_sources 585 586 def __call__(self, target, source, env, for_signature=None): 587 # truncate source list according to max_sources param 588 source = source[0:self.max_sources] 589 def strip(f): 590 return strip_build_path(str(f), env) 591 if len(source) > 0: 592 srcs = map(strip, source) 593 else: 594 srcs = [''] 595 tgts = map(strip, target) 596 # surprisingly, os.path.commonprefix is a dumb char-by-char string 597 # operation that has nothing to do with paths. 598 com_pfx = os.path.commonprefix(srcs + tgts) 599 com_pfx_len = len(com_pfx) 600 if com_pfx: 601 # do some cleanup and sanity checking on common prefix 602 if com_pfx[-1] == ".": 603 # prefix matches all but file extension: ok 604 # back up one to change 'foo.cc -> o' to 'foo.cc -> .o' 605 com_pfx = com_pfx[0:-1] 606 elif com_pfx[-1] == "/": 607 # common prefix is directory path: OK 608 pass 609 else: 610 src0_len = len(srcs[0]) 611 tgt0_len = len(tgts[0]) 612 if src0_len == com_pfx_len: 613 # source is a substring of target, OK 614 pass 615 elif tgt0_len == com_pfx_len: 616 # target is a substring of source, need to back up to 617 # avoid empty string on RHS of arrow 618 sep_idx = com_pfx.rfind(".") 619 if sep_idx != -1: 620 com_pfx = com_pfx[0:sep_idx] 621 else: 622 com_pfx = '' 623 elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".": 624 # still splitting at file extension: ok 625 pass 626 else: 627 # probably a fluke; ignore it 628 com_pfx = '' 629 # recalculate length in case com_pfx was modified 630 com_pfx_len = len(com_pfx) 631 def fmt(files): 632 f = map(lambda s: s[com_pfx_len:], files) 633 return ', '.join(f) 634 return self.format % (com_pfx, fmt(srcs), fmt(tgts)) 635 636Export('Transform') 637 638# enable the regression script to use the termcap 639main['TERMCAP'] = termcap 640 641if GetOption('verbose'): 642 def MakeAction(action, string, *args, **kwargs): 643 return Action(action, *args, **kwargs) 644else: 645 MakeAction = Action 646 main['CCCOMSTR'] = Transform("CC") 647 main['CXXCOMSTR'] = Transform("CXX") 648 main['ASCOMSTR'] = Transform("AS") 649 main['ARCOMSTR'] = Transform("AR", 0) 650 main['LINKCOMSTR'] = Transform("LINK", 0) 651 main['SHLINKCOMSTR'] = Transform("SHLINK", 0) 652 main['RANLIBCOMSTR'] = Transform("RANLIB", 0) 653 main['M4COMSTR'] = Transform("M4") 654 main['SHCCCOMSTR'] = Transform("SHCC") 655 main['SHCXXCOMSTR'] = Transform("SHCXX") 656Export('MakeAction') 657 658# Initialize the Link-Time Optimization (LTO) flags 659main['LTO_CCFLAGS'] = [] 660main['LTO_LDFLAGS'] = [] 661 662# According to the readme, tcmalloc works best if the compiler doesn't 663# assume that we're using the builtin malloc and friends. These flags 664# are compiler-specific, so we need to set them after we detect which 665# compiler we're using. 666main['TCMALLOC_CCFLAGS'] = [] 667 668CXX_version = readCommand([main['CXX'],'--version'], exception=False) 669CXX_V = readCommand([main['CXX'],'-V'], exception=False) 670 671main['GCC'] = CXX_version and CXX_version.find('g++') >= 0 672main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0 673if main['GCC'] + main['CLANG'] > 1: 674 print 'Error: How can we have two at the same time?' 675 Exit(1) 676 677# Set up default C++ compiler flags 678if main['GCC'] or main['CLANG']: 679 # As gcc and clang share many flags, do the common parts here 680 main.Append(CCFLAGS=['-pipe']) 681 main.Append(CCFLAGS=['-fno-strict-aliasing']) 682 # Enable -Wall and -Wextra and then disable the few warnings that 683 # we consistently violate 684 main.Append(CCFLAGS=['-Wall', '-Wundef', '-Wextra', 685 '-Wno-sign-compare', '-Wno-unused-parameter']) 686 # We always compile using C++11 687 main.Append(CXXFLAGS=['-std=c++11']) 688 if sys.platform.startswith('freebsd'): 689 main.Append(CCFLAGS=['-I/usr/local/include']) 690 main.Append(CXXFLAGS=['-I/usr/local/include']) 691 692 main['FILTER_PSHLINKFLAGS'] = lambda x: str(x).replace(' -shared', '') 693 main['PSHLINKFLAGS'] = main.subst('${FILTER_PSHLINKFLAGS(SHLINKFLAGS)}') 694 main['PLINKFLAGS'] = main.subst('${LINKFLAGS}') 695 shared_partial_flags = ['-r', '-nostdlib'] 696 main.Append(PSHLINKFLAGS=shared_partial_flags) 697 main.Append(PLINKFLAGS=shared_partial_flags) 698else: 699 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal, 700 print "Don't know what compiler options to use for your compiler." 701 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX'] 702 print termcap.Yellow + ' version:' + termcap.Normal, 703 if not CXX_version: 704 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\ 705 termcap.Normal 706 else: 707 print CXX_version.replace('\n', '<nl>') 708 print " If you're trying to use a compiler other than GCC" 709 print " or clang, there appears to be something wrong with your" 710 print " environment." 711 print " " 712 print " If you are trying to use a compiler other than those listed" 713 print " above you will need to ease fix SConstruct and " 714 print " src/SConscript to support that compiler." 715 Exit(1) 716 717if main['GCC']: 718 # Check for a supported version of gcc. >= 4.8 is chosen for its 719 # level of c++11 support. See 720 # http://gcc.gnu.org/projects/cxx0x.html for details. 721 gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False) 722 if compareVersions(gcc_version, "4.8") < 0: 723 print 'Error: gcc version 4.8 or newer required.' 724 print ' Installed version:', gcc_version 725 Exit(1) 726 727 main['GCC_VERSION'] = gcc_version 728 729 if compareVersions(gcc_version, '4.9') >= 0: 730 # Incremental linking with LTO is currently broken in gcc versions 731 # 4.9 and above. A version where everything works completely hasn't 732 # yet been identified. 733 # 734 # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=67548 735 main['BROKEN_INCREMENTAL_LTO'] = True 736 if compareVersions(gcc_version, '6.0') >= 0: 737 # gcc versions 6.0 and greater accept an -flinker-output flag which 738 # selects what type of output the linker should generate. This is 739 # necessary for incremental lto to work, but is also broken in 740 # current versions of gcc. It may not be necessary in future 741 # versions. We add it here since it might be, and as a reminder that 742 # it exists. It's excluded if lto is being forced. 743 # 744 # https://gcc.gnu.org/gcc-6/changes.html 745 # https://gcc.gnu.org/ml/gcc-patches/2015-11/msg03161.html 746 # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=69866 747 if not GetOption('force_lto'): 748 main.Append(PSHLINKFLAGS='-flinker-output=rel') 749 main.Append(PLINKFLAGS='-flinker-output=rel') 750 751 # gcc from version 4.8 and above generates "rep; ret" instructions 752 # to avoid performance penalties on certain AMD chips. Older 753 # assemblers detect this as an error, "Error: expecting string 754 # instruction after `rep'" 755 as_version_raw = readCommand([main['AS'], '-v', '/dev/null', 756 '-o', '/dev/null'], 757 exception=False).split() 758 759 # version strings may contain extra distro-specific 760 # qualifiers, so play it safe and keep only what comes before 761 # the first hyphen 762 as_version = as_version_raw[-1].split('-')[0] if as_version_raw else None 763 764 if not as_version or compareVersions(as_version, "2.23") < 0: 765 print termcap.Yellow + termcap.Bold + \ 766 'Warning: This combination of gcc and binutils have' + \ 767 ' known incompatibilities.\n' + \ 768 ' If you encounter build problems, please update ' + \ 769 'binutils to 2.23.' + \ 770 termcap.Normal 771 772 # Make sure we warn if the user has requested to compile with the 773 # Undefined Benahvior Sanitizer and this version of gcc does not 774 # support it. 775 if GetOption('with_ubsan') and \ 776 compareVersions(gcc_version, '4.9') < 0: 777 print termcap.Yellow + termcap.Bold + \ 778 'Warning: UBSan is only supported using gcc 4.9 and later.' + \ 779 termcap.Normal 780 781 disable_lto = GetOption('no_lto') 782 if not disable_lto and main.get('BROKEN_INCREMENTAL_LTO', False) and \ 783 not GetOption('force_lto'): 784 print termcap.Yellow + termcap.Bold + \ 785 'Warning: Your compiler doesn\'t support incremental linking' + \ 786 ' and lto at the same time, so lto is being disabled. To force' + \ 787 ' lto on anyway, use the --force-lto option. That will disable' + \ 788 ' partial linking.' + \ 789 termcap.Normal 790 disable_lto = True 791 792 # Add the appropriate Link-Time Optimization (LTO) flags 793 # unless LTO is explicitly turned off. Note that these flags 794 # are only used by the fast target. 795 if not disable_lto: 796 # Pass the LTO flag when compiling to produce GIMPLE 797 # output, we merely create the flags here and only append 798 # them later 799 main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')] 800 801 # Use the same amount of jobs for LTO as we are running 802 # scons with 803 main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')] 804 805 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc', 806 '-fno-builtin-realloc', '-fno-builtin-free']) 807 808 # add option to check for undeclared overrides 809 if compareVersions(gcc_version, "5.0") > 0: 810 main.Append(CCFLAGS=['-Wno-error=suggest-override']) 811 812elif main['CLANG']: 813 # Check for a supported version of clang, >= 3.1 is needed to 814 # support similar features as gcc 4.8. See 815 # http://clang.llvm.org/cxx_status.html for details 816 clang_version_re = re.compile(".* version (\d+\.\d+)") 817 clang_version_match = clang_version_re.search(CXX_version) 818 if (clang_version_match): 819 clang_version = clang_version_match.groups()[0] 820 if compareVersions(clang_version, "3.1") < 0: 821 print 'Error: clang version 3.1 or newer required.' 822 print ' Installed version:', clang_version 823 Exit(1) 824 else: 825 print 'Error: Unable to determine clang version.' 826 Exit(1) 827 828 # clang has a few additional warnings that we disable, extraneous 829 # parantheses are allowed due to Ruby's printing of the AST, 830 # finally self assignments are allowed as the generated CPU code 831 # is relying on this 832 main.Append(CCFLAGS=['-Wno-parentheses', 833 '-Wno-self-assign', 834 # Some versions of libstdc++ (4.8?) seem to 835 # use struct hash and class hash 836 # interchangeably. 837 '-Wno-mismatched-tags', 838 ]) 839 840 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin']) 841 842 # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as 843 # opposed to libstdc++, as the later is dated. 844 if sys.platform == "darwin": 845 main.Append(CXXFLAGS=['-stdlib=libc++']) 846 main.Append(LIBS=['c++']) 847 848 # On FreeBSD we need libthr. 849 if sys.platform.startswith('freebsd'): 850 main.Append(LIBS=['thr']) 851 852else: 853 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal, 854 print "Don't know what compiler options to use for your compiler." 855 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX'] 856 print termcap.Yellow + ' version:' + termcap.Normal, 857 if not CXX_version: 858 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\ 859 termcap.Normal 860 else: 861 print CXX_version.replace('\n', '<nl>') 862 print " If you're trying to use a compiler other than GCC" 863 print " or clang, there appears to be something wrong with your" 864 print " environment." 865 print " " 866 print " If you are trying to use a compiler other than those listed" 867 print " above you will need to ease fix SConstruct and " 868 print " src/SConscript to support that compiler." 869 Exit(1) 870 871# Set up common yacc/bison flags (needed for Ruby) 872main['YACCFLAGS'] = '-d' 873main['YACCHXXFILESUFFIX'] = '.hh' 874 875# Do this after we save setting back, or else we'll tack on an 876# extra 'qdo' every time we run scons. 877if main['BATCH']: 878 main['CC'] = main['BATCH_CMD'] + ' ' + main['CC'] 879 main['CXX'] = main['BATCH_CMD'] + ' ' + main['CXX'] 880 main['AS'] = main['BATCH_CMD'] + ' ' + main['AS'] 881 main['AR'] = main['BATCH_CMD'] + ' ' + main['AR'] 882 main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB'] 883 884if sys.platform == 'cygwin': 885 # cygwin has some header file issues... 886 main.Append(CCFLAGS=["-Wno-uninitialized"]) 887 888# Check for the protobuf compiler 889protoc_version = readCommand([main['PROTOC'], '--version'], 890 exception='').split() 891 892# First two words should be "libprotoc x.y.z" 893if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc': 894 print termcap.Yellow + termcap.Bold + \ 895 'Warning: Protocol buffer compiler (protoc) not found.\n' + \ 896 ' Please install protobuf-compiler for tracing support.' + \ 897 termcap.Normal 898 main['PROTOC'] = False 899else: 900 # Based on the availability of the compress stream wrappers, 901 # require 2.1.0 902 min_protoc_version = '2.1.0' 903 if compareVersions(protoc_version[1], min_protoc_version) < 0: 904 print termcap.Yellow + termcap.Bold + \ 905 'Warning: protoc version', min_protoc_version, \ 906 'or newer required.\n' + \ 907 ' Installed version:', protoc_version[1], \ 908 termcap.Normal 909 main['PROTOC'] = False 910 else: 911 # Attempt to determine the appropriate include path and 912 # library path using pkg-config, that means we also need to 913 # check for pkg-config. Note that it is possible to use 914 # protobuf without the involvement of pkg-config. Later on we 915 # check go a library config check and at that point the test 916 # will fail if libprotobuf cannot be found. 917 if readCommand(['pkg-config', '--version'], exception=''): 918 try: 919 # Attempt to establish what linking flags to add for protobuf 920 # using pkg-config 921 main.ParseConfig('pkg-config --cflags --libs-only-L protobuf') 922 except: 923 print termcap.Yellow + termcap.Bold + \ 924 'Warning: pkg-config could not get protobuf flags.' + \ 925 termcap.Normal 926 927 928# Check for 'timeout' from GNU coreutils. If present, regressions will 929# be run with a time limit. We require version 8.13 since we rely on 930# support for the '--foreground' option. 931if sys.platform.startswith('freebsd'): 932 timeout_lines = readCommand(['gtimeout', '--version'], 933 exception='').splitlines() 934else: 935 timeout_lines = readCommand(['timeout', '--version'], 936 exception='').splitlines() 937# Get the first line and tokenize it 938timeout_version = timeout_lines[0].split() if timeout_lines else [] 939main['TIMEOUT'] = timeout_version and \ 940 compareVersions(timeout_version[-1], '8.13') >= 0 941 942# Add a custom Check function to test for structure members. 943def CheckMember(context, include, decl, member, include_quotes="<>"): 944 context.Message("Checking for member %s in %s..." % 945 (member, decl)) 946 text = """ 947#include %(header)s 948int main(){ 949 %(decl)s test; 950 (void)test.%(member)s; 951 return 0; 952}; 953""" % { "header" : include_quotes[0] + include + include_quotes[1], 954 "decl" : decl, 955 "member" : member, 956 } 957 958 ret = context.TryCompile(text, extension=".cc") 959 context.Result(ret) 960 return ret 961 962# Platform-specific configuration. Note again that we assume that all 963# builds under a given build root run on the same host platform. 964conf = Configure(main, 965 conf_dir = joinpath(build_root, '.scons_config'), 966 log_file = joinpath(build_root, 'scons_config.log'), 967 custom_tests = { 968 'CheckMember' : CheckMember, 969 }) 970 971# Check if we should compile a 64 bit binary on Mac OS X/Darwin 972try: 973 import platform 974 uname = platform.uname() 975 if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0: 976 if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]): 977 main.Append(CCFLAGS=['-arch', 'x86_64']) 978 main.Append(CFLAGS=['-arch', 'x86_64']) 979 main.Append(LINKFLAGS=['-arch', 'x86_64']) 980 main.Append(ASFLAGS=['-arch', 'x86_64']) 981except: 982 pass 983 984# Recent versions of scons substitute a "Null" object for Configure() 985# when configuration isn't necessary, e.g., if the "--help" option is 986# present. Unfortuantely this Null object always returns false, 987# breaking all our configuration checks. We replace it with our own 988# more optimistic null object that returns True instead. 989if not conf: 990 def NullCheck(*args, **kwargs): 991 return True 992 993 class NullConf: 994 def __init__(self, env): 995 self.env = env 996 def Finish(self): 997 return self.env 998 def __getattr__(self, mname): 999 return NullCheck 1000 1001 conf = NullConf(main) 1002 1003# Cache build files in the supplied directory. 1004if main['M5_BUILD_CACHE']: 1005 print 'Using build cache located at', main['M5_BUILD_CACHE'] 1006 CacheDir(main['M5_BUILD_CACHE']) 1007 1008main['USE_PYTHON'] = not GetOption('without_python') 1009if main['USE_PYTHON']: 1010 # Find Python include and library directories for embedding the 1011 # interpreter. We rely on python-config to resolve the appropriate 1012 # includes and linker flags. ParseConfig does not seem to understand 1013 # the more exotic linker flags such as -Xlinker and -export-dynamic so 1014 # we add them explicitly below. If you want to link in an alternate 1015 # version of python, see above for instructions on how to invoke 1016 # scons with the appropriate PATH set. 1017 # 1018 # First we check if python2-config exists, else we use python-config 1019 python_config = readCommand(['which', 'python2-config'], 1020 exception='').strip() 1021 if not os.path.exists(python_config): 1022 python_config = readCommand(['which', 'python-config'], 1023 exception='').strip() 1024 py_includes = readCommand([python_config, '--includes'], 1025 exception='').split() 1026 # Strip the -I from the include folders before adding them to the 1027 # CPPPATH 1028 main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes)) 1029 1030 # Read the linker flags and split them into libraries and other link 1031 # flags. The libraries are added later through the call the CheckLib. 1032 py_ld_flags = readCommand([python_config, '--ldflags'], 1033 exception='').split() 1034 py_libs = [] 1035 for lib in py_ld_flags: 1036 if not lib.startswith('-l'): 1037 main.Append(LINKFLAGS=[lib]) 1038 else: 1039 lib = lib[2:] 1040 if lib not in py_libs: 1041 py_libs.append(lib) 1042 1043 # verify that this stuff works 1044 if not conf.CheckHeader('Python.h', '<>'): 1045 print "Error: can't find Python.h header in", py_includes 1046 print "Install Python headers (package python-dev on Ubuntu and RedHat)" 1047 Exit(1) 1048 1049 for lib in py_libs: 1050 if not conf.CheckLib(lib): 1051 print "Error: can't find library %s required by python" % lib 1052 Exit(1) 1053 1054# On Solaris you need to use libsocket for socket ops 1055if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'): 1056 if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'): 1057 print "Can't find library with socket calls (e.g. accept())" 1058 Exit(1) 1059 1060# Check for zlib. If the check passes, libz will be automatically 1061# added to the LIBS environment variable. 1062if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'): 1063 print 'Error: did not find needed zlib compression library '\ 1064 'and/or zlib.h header file.' 1065 print ' Please install zlib and try again.' 1066 Exit(1) 1067 1068# If we have the protobuf compiler, also make sure we have the 1069# development libraries. If the check passes, libprotobuf will be 1070# automatically added to the LIBS environment variable. After 1071# this, we can use the HAVE_PROTOBUF flag to determine if we have 1072# got both protoc and libprotobuf available. 1073main['HAVE_PROTOBUF'] = main['PROTOC'] and \ 1074 conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h', 1075 'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;') 1076 1077# If we have the compiler but not the library, print another warning. 1078if main['PROTOC'] and not main['HAVE_PROTOBUF']: 1079 print termcap.Yellow + termcap.Bold + \ 1080 'Warning: did not find protocol buffer library and/or headers.\n' + \ 1081 ' Please install libprotobuf-dev for tracing support.' + \ 1082 termcap.Normal 1083 1084# Check for librt. 1085have_posix_clock = \ 1086 conf.CheckLibWithHeader(None, 'time.h', 'C', 1087 'clock_nanosleep(0,0,NULL,NULL);') or \ 1088 conf.CheckLibWithHeader('rt', 'time.h', 'C', 1089 'clock_nanosleep(0,0,NULL,NULL);') 1090 1091have_posix_timers = \ 1092 conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C', 1093 'timer_create(CLOCK_MONOTONIC, NULL, NULL);') 1094 1095if not GetOption('without_tcmalloc'): 1096 if conf.CheckLib('tcmalloc'): 1097 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS']) 1098 elif conf.CheckLib('tcmalloc_minimal'): 1099 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS']) 1100 else: 1101 print termcap.Yellow + termcap.Bold + \ 1102 "You can get a 12% performance improvement by "\ 1103 "installing tcmalloc (libgoogle-perftools-dev package "\ 1104 "on Ubuntu or RedHat)." + termcap.Normal 1105 1106 1107# Detect back trace implementations. The last implementation in the 1108# list will be used by default. 1109backtrace_impls = [ "none" ] 1110 1111if conf.CheckLibWithHeader(None, 'execinfo.h', 'C', 1112 'backtrace_symbols_fd((void*)0, 0, 0);'): 1113 backtrace_impls.append("glibc") 1114elif conf.CheckLibWithHeader('execinfo', 'execinfo.h', 'C', 1115 'backtrace_symbols_fd((void*)0, 0, 0);'): 1116 # NetBSD and FreeBSD need libexecinfo. 1117 backtrace_impls.append("glibc") 1118 main.Append(LIBS=['execinfo']) 1119 1120if backtrace_impls[-1] == "none": 1121 default_backtrace_impl = "none" 1122 print termcap.Yellow + termcap.Bold + \ 1123 "No suitable back trace implementation found." + \ 1124 termcap.Normal 1125 1126if not have_posix_clock: 1127 print "Can't find library for POSIX clocks." 1128 1129# Check for <fenv.h> (C99 FP environment control) 1130have_fenv = conf.CheckHeader('fenv.h', '<>') 1131if not have_fenv: 1132 print "Warning: Header file <fenv.h> not found." 1133 print " This host has no IEEE FP rounding mode control." 1134 1135# Check if we should enable KVM-based hardware virtualization. The API 1136# we rely on exists since version 2.6.36 of the kernel, but somehow 1137# the KVM_API_VERSION does not reflect the change. We test for one of 1138# the types as a fall back. 1139have_kvm = conf.CheckHeader('linux/kvm.h', '<>') 1140if not have_kvm: 1141 print "Info: Compatible header file <linux/kvm.h> not found, " \ 1142 "disabling KVM support." 1143 1144# Check if the TUN/TAP driver is available. 1145have_tuntap = conf.CheckHeader('linux/if_tun.h', '<>') 1146if not have_tuntap: 1147 print "Info: Compatible header file <linux/if_tun.h> not found." 1148 1149# x86 needs support for xsave. We test for the structure here since we 1150# won't be able to run new tests by the time we know which ISA we're 1151# targeting. 1152have_kvm_xsave = conf.CheckTypeSize('struct kvm_xsave', 1153 '#include <linux/kvm.h>') != 0 1154 1155# Check if the requested target ISA is compatible with the host 1156def is_isa_kvm_compatible(isa): 1157 try: 1158 import platform 1159 host_isa = platform.machine() 1160 except: 1161 print "Warning: Failed to determine host ISA." 1162 return False 1163 1164 if not have_posix_timers: 1165 print "Warning: Can not enable KVM, host seems to lack support " \ 1166 "for POSIX timers" 1167 return False 1168 1169 if isa == "arm": 1170 return host_isa in ( "armv7l", "aarch64" ) 1171 elif isa == "x86": 1172 if host_isa != "x86_64": 1173 return False 1174 1175 if not have_kvm_xsave: 1176 print "KVM on x86 requires xsave support in kernel headers." 1177 return False 1178 1179 return True 1180 else: 1181 return False 1182 1183 1184# Check if the exclude_host attribute is available. We want this to 1185# get accurate instruction counts in KVM. 1186main['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf.CheckMember( 1187 'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host') 1188 1189 1190###################################################################### 1191# 1192# Finish the configuration 1193# 1194main = conf.Finish() 1195 1196###################################################################### 1197# 1198# Collect all non-global variables 1199# 1200 1201# Define the universe of supported ISAs 1202all_isa_list = [ ] 1203all_gpu_isa_list = [ ] 1204Export('all_isa_list') 1205Export('all_gpu_isa_list') 1206 1207class CpuModel(object): 1208 '''The CpuModel class encapsulates everything the ISA parser needs to 1209 know about a particular CPU model.''' 1210 1211 # Dict of available CPU model objects. Accessible as CpuModel.dict. 1212 dict = {} 1213 1214 # Constructor. Automatically adds models to CpuModel.dict. 1215 def __init__(self, name, default=False): 1216 self.name = name # name of model 1217 1218 # This cpu is enabled by default 1219 self.default = default 1220 1221 # Add self to dict 1222 if name in CpuModel.dict: 1223 raise AttributeError, "CpuModel '%s' already registered" % name 1224 CpuModel.dict[name] = self 1225 1226Export('CpuModel') 1227 1228# Sticky variables get saved in the variables file so they persist from 1229# one invocation to the next (unless overridden, in which case the new 1230# value becomes sticky). 1231sticky_vars = Variables(args=ARGUMENTS) 1232Export('sticky_vars') 1233 1234# Sticky variables that should be exported 1235export_vars = [] 1236Export('export_vars') 1237 1238# For Ruby 1239all_protocols = [] 1240Export('all_protocols') 1241protocol_dirs = [] 1242Export('protocol_dirs') 1243slicc_includes = [] 1244Export('slicc_includes') 1245 1246# Walk the tree and execute all SConsopts scripts that wil add to the 1247# above variables 1248if GetOption('verbose'): 1249 print "Reading SConsopts" 1250for bdir in [ base_dir ] + extras_dir_list: 1251 if not isdir(bdir): 1252 print "Error: directory '%s' does not exist" % bdir 1253 Exit(1) 1254 for root, dirs, files in os.walk(bdir): 1255 if 'SConsopts' in files: 1256 if GetOption('verbose'): 1257 print "Reading", joinpath(root, 'SConsopts') 1258 SConscript(joinpath(root, 'SConsopts')) 1259 1260all_isa_list.sort() 1261all_gpu_isa_list.sort() 1262 1263sticky_vars.AddVariables( 1264 EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list), 1265 EnumVariable('TARGET_GPU_ISA', 'Target GPU ISA', 'hsail', all_gpu_isa_list), 1266 ListVariable('CPU_MODELS', 'CPU models', 1267 sorted(n for n,m in CpuModel.dict.iteritems() if m.default), 1268 sorted(CpuModel.dict.keys())), 1269 BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger', 1270 False), 1271 BoolVariable('SS_COMPATIBLE_FP', 1272 'Make floating-point results compatible with SimpleScalar', 1273 False), 1274 BoolVariable('USE_SSE2', 1275 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts', 1276 False), 1277 BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock), 1278 BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv), 1279 BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability', False), 1280 BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models', have_kvm), 1281 BoolVariable('USE_TUNTAP', 1282 'Enable using a tap device to bridge to the host network', 1283 have_tuntap), 1284 BoolVariable('BUILD_GPU', 'Build the compute-GPU model', False), 1285 EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None', 1286 all_protocols), 1287 EnumVariable('BACKTRACE_IMPL', 'Post-mortem dump implementation', 1288 backtrace_impls[-1], backtrace_impls) 1289 ) 1290 1291# These variables get exported to #defines in config/*.hh (see src/SConscript). 1292export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'TARGET_GPU_ISA', 1293 'CP_ANNOTATE', 'USE_POSIX_CLOCK', 'USE_KVM', 'USE_TUNTAP', 1294 'PROTOCOL', 'HAVE_PROTOBUF', 'HAVE_PERF_ATTR_EXCLUDE_HOST'] 1295 1296################################################### 1297# 1298# Define a SCons builder for configuration flag headers. 1299# 1300################################################### 1301 1302# This function generates a config header file that #defines the 1303# variable symbol to the current variable setting (0 or 1). The source 1304# operands are the name of the variable and a Value node containing the 1305# value of the variable. 1306def build_config_file(target, source, env): 1307 (variable, value) = [s.get_contents() for s in source] 1308 f = file(str(target[0]), 'w') 1309 print >> f, '#define', variable, value 1310 f.close() 1311 return None 1312 1313# Combine the two functions into a scons Action object. 1314config_action = MakeAction(build_config_file, Transform("CONFIG H", 2)) 1315 1316# The emitter munges the source & target node lists to reflect what 1317# we're really doing. 1318def config_emitter(target, source, env): 1319 # extract variable name from Builder arg 1320 variable = str(target[0]) 1321 # True target is config header file 1322 target = joinpath('config', variable.lower() + '.hh') 1323 val = env[variable] 1324 if isinstance(val, bool): 1325 # Force value to 0/1 1326 val = int(val) 1327 elif isinstance(val, str): 1328 val = '"' + val + '"' 1329 1330 # Sources are variable name & value (packaged in SCons Value nodes) 1331 return ([target], [Value(variable), Value(val)]) 1332 1333config_builder = Builder(emitter = config_emitter, action = config_action) 1334 1335main.Append(BUILDERS = { 'ConfigFile' : config_builder }) 1336 1337################################################### 1338# 1339# Builders for static and shared partially linked object files. 1340# 1341################################################### 1342 1343partial_static_builder = Builder(action=SCons.Defaults.LinkAction, 1344 src_suffix='$OBJSUFFIX', 1345 src_builder=['StaticObject', 'Object'], 1346 LINKFLAGS='$PLINKFLAGS', 1347 LIBS='') 1348 1349def partial_shared_emitter(target, source, env): 1350 for tgt in target: 1351 tgt.attributes.shared = 1 1352 return (target, source) 1353partial_shared_builder = Builder(action=SCons.Defaults.ShLinkAction, 1354 emitter=partial_shared_emitter, 1355 src_suffix='$SHOBJSUFFIX', 1356 src_builder='SharedObject', 1357 SHLINKFLAGS='$PSHLINKFLAGS', 1358 LIBS='') 1359 1360main.Append(BUILDERS = { 'PartialShared' : partial_shared_builder, 1361 'PartialStatic' : partial_static_builder }) 1362 1363# builds in ext are shared across all configs in the build root. 1364ext_dir = abspath(joinpath(str(main.root), 'ext')) 1365ext_build_dirs = [] 1366for root, dirs, files in os.walk(ext_dir): 1367 if 'SConscript' in files: 1368 build_dir = os.path.relpath(root, ext_dir) 1369 ext_build_dirs.append(build_dir) 1370 main.SConscript(joinpath(root, 'SConscript'), 1371 variant_dir=joinpath(build_root, build_dir)) 1372 1373main.Prepend(CPPPATH=Dir('ext/pybind11/include/')) 1374 1375################################################### 1376# 1377# This builder and wrapper method are used to set up a directory with 1378# switching headers. Those are headers which are in a generic location and 1379# that include more specific headers from a directory chosen at build time 1380# based on the current build settings. 1381# 1382################################################### 1383 1384def build_switching_header(target, source, env): 1385 path = str(target[0]) 1386 subdir = str(source[0]) 1387 dp, fp = os.path.split(path) 1388 dp = os.path.relpath(os.path.realpath(dp), 1389 os.path.realpath(env['BUILDDIR'])) 1390 with open(path, 'w') as hdr: 1391 print >>hdr, '#include "%s/%s/%s"' % (dp, subdir, fp) 1392 1393switching_header_action = MakeAction(build_switching_header, 1394 Transform('GENERATE')) 1395 1396switching_header_builder = Builder(action=switching_header_action, 1397 source_factory=Value, 1398 single_source=True) 1399 1400main.Append(BUILDERS = { 'SwitchingHeader': switching_header_builder }) 1401 1402def switching_headers(self, headers, source): 1403 for header in headers: 1404 self.SwitchingHeader(header, source) 1405 1406main.AddMethod(switching_headers, 'SwitchingHeaders') 1407 1408# all-isas -> all-deps -> all-environs -> all_targets 1409main.Alias('#all-isas', []) 1410main.Alias('#all-deps', '#all-isas') 1411 1412# Dummy target to ensure all environments are created before telling 1413# SCons what to actually make (the command line arguments). We attach 1414# them to the dependence graph after the environments are complete. 1415ORIG_BUILD_TARGETS = list(BUILD_TARGETS) # force a copy; gets closure to work. 1416def environsComplete(target, source, env): 1417 for t in ORIG_BUILD_TARGETS: 1418 main.Depends('#all-targets', t) 1419 1420# Each build/* switching_dir attaches its *-environs target to #all-environs. 1421main.Append(BUILDERS = {'CompleteEnvirons' : 1422 Builder(action=MakeAction(environsComplete, None))}) 1423main.CompleteEnvirons('#all-environs', []) 1424 1425def doNothing(**ignored): pass 1426main.Append(BUILDERS = {'Dummy': Builder(action=MakeAction(doNothing, None))}) 1427 1428# The final target to which all the original targets ultimately get attached. 1429main.Dummy('#all-targets', '#all-environs') 1430BUILD_TARGETS[:] = ['#all-targets'] 1431 1432################################################### 1433# 1434# Define build environments for selected configurations. 1435# 1436################################################### 1437 1438def variant_name(path): 1439 return os.path.basename(path).lower().replace('_', '-') 1440main['variant_name'] = variant_name 1441main['VARIANT_NAME'] = '${variant_name(BUILDDIR)}' 1442 1443for variant_path in variant_paths: 1444 if not GetOption('silent'): 1445 print "Building in", variant_path 1446 1447 # Make a copy of the build-root environment to use for this config. 1448 env = main.Clone() 1449 env['BUILDDIR'] = variant_path 1450 1451 # variant_dir is the tail component of build path, and is used to 1452 # determine the build parameters (e.g., 'ALPHA_SE') 1453 (build_root, variant_dir) = splitpath(variant_path) 1454 1455 # Set env variables according to the build directory config. 1456 sticky_vars.files = [] 1457 # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in 1458 # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke 1459 # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings. 1460 current_vars_file = joinpath(build_root, 'variables', variant_dir) 1461 if isfile(current_vars_file): 1462 sticky_vars.files.append(current_vars_file) 1463 if not GetOption('silent'): 1464 print "Using saved variables file %s" % current_vars_file 1465 elif variant_dir in ext_build_dirs: 1466 # Things in ext are built without a variant directory. 1467 continue 1468 else: 1469 # Build dir-specific variables file doesn't exist. 1470 1471 # Make sure the directory is there so we can create it later 1472 opt_dir = dirname(current_vars_file) 1473 if not isdir(opt_dir): 1474 mkdir(opt_dir) 1475 1476 # Get default build variables from source tree. Variables are 1477 # normally determined by name of $VARIANT_DIR, but can be 1478 # overridden by '--default=' arg on command line. 1479 default = GetOption('default') 1480 opts_dir = joinpath(main.root.abspath, 'build_opts') 1481 if default: 1482 default_vars_files = [joinpath(build_root, 'variables', default), 1483 joinpath(opts_dir, default)] 1484 else: 1485 default_vars_files = [joinpath(opts_dir, variant_dir)] 1486 existing_files = filter(isfile, default_vars_files) 1487 if existing_files: 1488 default_vars_file = existing_files[0] 1489 sticky_vars.files.append(default_vars_file) 1490 print "Variables file %s not found,\n using defaults in %s" \ 1491 % (current_vars_file, default_vars_file) 1492 else: 1493 print "Error: cannot find variables file %s or " \ 1494 "default file(s) %s" \ 1495 % (current_vars_file, ' or '.join(default_vars_files)) 1496 Exit(1) 1497 1498 # Apply current variable settings to env 1499 sticky_vars.Update(env) 1500 1501 help_texts["local_vars"] += \ 1502 "Build variables for %s:\n" % variant_dir \ 1503 + sticky_vars.GenerateHelpText(env) 1504 1505 # Process variable settings. 1506 1507 if not have_fenv and env['USE_FENV']: 1508 print "Warning: <fenv.h> not available; " \ 1509 "forcing USE_FENV to False in", variant_dir + "." 1510 env['USE_FENV'] = False 1511 1512 if not env['USE_FENV']: 1513 print "Warning: No IEEE FP rounding mode control in", variant_dir + "." 1514 print " FP results may deviate slightly from other platforms." 1515 1516 if env['EFENCE']: 1517 env.Append(LIBS=['efence']) 1518 1519 if env['USE_KVM']: 1520 if not have_kvm: 1521 print "Warning: Can not enable KVM, host seems to lack KVM support" 1522 env['USE_KVM'] = False 1523 elif not is_isa_kvm_compatible(env['TARGET_ISA']): 1524 print "Info: KVM support disabled due to unsupported host and " \ 1525 "target ISA combination" 1526 env['USE_KVM'] = False 1527 1528 if env['USE_TUNTAP']: 1529 if not have_tuntap: 1530 print "Warning: Can't connect EtherTap with a tap device." 1531 env['USE_TUNTAP'] = False 1532 1533 if env['BUILD_GPU']: 1534 env.Append(CPPDEFINES=['BUILD_GPU']) 1535 1536 # Warn about missing optional functionality 1537 if env['USE_KVM']: 1538 if not main['HAVE_PERF_ATTR_EXCLUDE_HOST']: 1539 print "Warning: perf_event headers lack support for the " \ 1540 "exclude_host attribute. KVM instruction counts will " \ 1541 "be inaccurate." 1542 1543 # Save sticky variable settings back to current variables file 1544 sticky_vars.Save(current_vars_file, env) 1545 1546 if env['USE_SSE2']: 1547 env.Append(CCFLAGS=['-msse2']) 1548 1549 # The src/SConscript file sets up the build rules in 'env' according 1550 # to the configured variables. It returns a list of environments, 1551 # one for each variant build (debug, opt, etc.) 1552 SConscript('src/SConscript', variant_dir = variant_path, exports = 'env') 1553 1554def pairwise(iterable): 1555 "s -> (s0,s1), (s1,s2), (s2, s3), ..." 1556 a, b = itertools.tee(iterable) 1557 b.next() 1558 return itertools.izip(a, b) 1559 1560variant_names = [variant_name(path) for path in variant_paths] 1561 1562# Create false dependencies so SCons will parse ISAs, establish 1563# dependencies, and setup the build Environments serially. Either 1564# SCons (likely) and/or our SConscripts (possibly) cannot cope with -j 1565# greater than 1. It appears to be standard race condition stuff; it 1566# doesn't always fail, but usually, and the behaviors are different. 1567# Every time I tried to remove this, builds would fail in some 1568# creative new way. So, don't do that. You'll want to, though, because 1569# tests/SConscript takes a long time to make its Environments. 1570for t1, t2 in pairwise(sorted(variant_names)): 1571 main.Depends('#%s-deps' % t2, '#%s-deps' % t1) 1572 main.Depends('#%s-environs' % t2, '#%s-environs' % t1) 1573 1574# base help text 1575Help(''' 1576Usage: scons [scons options] [build variables] [target(s)] 1577 1578Extra scons options: 1579%(options)s 1580 1581Global build variables: 1582%(global_vars)s 1583 1584%(local_vars)s 1585''' % help_texts) 1586