SConstruct revision 11293
1# -*- mode:python -*- 2 3# Copyright (c) 2013, 2015 ARM Limited 4# All rights reserved. 5# 6# The license below extends only to copyright in the software and shall 7# not be construed as granting a license to any other intellectual 8# property including but not limited to intellectual property relating 9# to a hardware implementation of the functionality of the software 10# licensed hereunder. You may use the software subject to the license 11# terms below provided that you ensure that this notice is replicated 12# unmodified and in its entirety in all distributions of the software, 13# modified or unmodified, in source code or in binary form. 14# 15# Copyright (c) 2011 Advanced Micro Devices, Inc. 16# Copyright (c) 2009 The Hewlett-Packard Development Company 17# Copyright (c) 2004-2005 The Regents of The University of Michigan 18# All rights reserved. 19# 20# Redistribution and use in source and binary forms, with or without 21# modification, are permitted provided that the following conditions are 22# met: redistributions of source code must retain the above copyright 23# notice, this list of conditions and the following disclaimer; 24# redistributions in binary form must reproduce the above copyright 25# notice, this list of conditions and the following disclaimer in the 26# documentation and/or other materials provided with the distribution; 27# neither the name of the copyright holders nor the names of its 28# contributors may be used to endorse or promote products derived from 29# this software without specific prior written permission. 30# 31# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 32# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 33# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 34# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 35# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 36# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 37# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 38# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 39# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 40# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 41# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 42# 43# Authors: Steve Reinhardt 44# Nathan Binkert 45 46################################################### 47# 48# SCons top-level build description (SConstruct) file. 49# 50# While in this directory ('gem5'), just type 'scons' to build the default 51# configuration (see below), or type 'scons build/<CONFIG>/<binary>' 52# to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for 53# the optimized full-system version). 54# 55# You can build gem5 in a different directory as long as there is a 56# 'build/<CONFIG>' somewhere along the target path. The build system 57# expects that all configs under the same build directory are being 58# built for the same host system. 59# 60# Examples: 61# 62# The following two commands are equivalent. The '-u' option tells 63# scons to search up the directory tree for this SConstruct file. 64# % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug 65# % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug 66# 67# The following two commands are equivalent and demonstrate building 68# in a directory outside of the source tree. The '-C' option tells 69# scons to chdir to the specified directory to find this SConstruct 70# file. 71# % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug 72# % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug 73# 74# You can use 'scons -H' to print scons options. If you're in this 75# 'gem5' directory (or use -u or -C to tell scons where to find this 76# file), you can use 'scons -h' to print all the gem5-specific build 77# options as well. 78# 79################################################### 80 81# Check for recent-enough Python and SCons versions. 82try: 83 # Really old versions of scons only take two options for the 84 # function, so check once without the revision and once with the 85 # revision, the first instance will fail for stuff other than 86 # 0.98, and the second will fail for 0.98.0 87 EnsureSConsVersion(0, 98) 88 EnsureSConsVersion(0, 98, 1) 89except SystemExit, e: 90 print """ 91For more details, see: 92 http://gem5.org/Dependencies 93""" 94 raise 95 96# We ensure the python version early because because python-config 97# requires python 2.5 98try: 99 EnsurePythonVersion(2, 5) 100except SystemExit, e: 101 print """ 102You can use a non-default installation of the Python interpreter by 103rearranging your PATH so that scons finds the non-default 'python' and 104'python-config' first. 105 106For more details, see: 107 http://gem5.org/wiki/index.php/Using_a_non-default_Python_installation 108""" 109 raise 110 111# Global Python includes 112import itertools 113import os 114import re 115import subprocess 116import sys 117 118from os import mkdir, environ 119from os.path import abspath, basename, dirname, expanduser, normpath 120from os.path import exists, isdir, isfile 121from os.path import join as joinpath, split as splitpath 122 123# SCons includes 124import SCons 125import SCons.Node 126 127extra_python_paths = [ 128 Dir('src/python').srcnode().abspath, # gem5 includes 129 Dir('ext/ply').srcnode().abspath, # ply is used by several files 130 ] 131 132sys.path[1:1] = extra_python_paths 133 134from m5.util import compareVersions, readCommand 135from m5.util.terminal import get_termcap 136 137help_texts = { 138 "options" : "", 139 "global_vars" : "", 140 "local_vars" : "" 141} 142 143Export("help_texts") 144 145 146# There's a bug in scons in that (1) by default, the help texts from 147# AddOption() are supposed to be displayed when you type 'scons -h' 148# and (2) you can override the help displayed by 'scons -h' using the 149# Help() function, but these two features are incompatible: once 150# you've overridden the help text using Help(), there's no way to get 151# at the help texts from AddOptions. See: 152# http://scons.tigris.org/issues/show_bug.cgi?id=2356 153# http://scons.tigris.org/issues/show_bug.cgi?id=2611 154# This hack lets us extract the help text from AddOptions and 155# re-inject it via Help(). Ideally someday this bug will be fixed and 156# we can just use AddOption directly. 157def AddLocalOption(*args, **kwargs): 158 col_width = 30 159 160 help = " " + ", ".join(args) 161 if "help" in kwargs: 162 length = len(help) 163 if length >= col_width: 164 help += "\n" + " " * col_width 165 else: 166 help += " " * (col_width - length) 167 help += kwargs["help"] 168 help_texts["options"] += help + "\n" 169 170 AddOption(*args, **kwargs) 171 172AddLocalOption('--colors', dest='use_colors', action='store_true', 173 help="Add color to abbreviated scons output") 174AddLocalOption('--no-colors', dest='use_colors', action='store_false', 175 help="Don't add color to abbreviated scons output") 176AddLocalOption('--with-cxx-config', dest='with_cxx_config', 177 action='store_true', 178 help="Build with support for C++-based configuration") 179AddLocalOption('--default', dest='default', type='string', action='store', 180 help='Override which build_opts file to use for defaults') 181AddLocalOption('--ignore-style', dest='ignore_style', action='store_true', 182 help='Disable style checking hooks') 183AddLocalOption('--no-lto', dest='no_lto', action='store_true', 184 help='Disable Link-Time Optimization for fast') 185AddLocalOption('--update-ref', dest='update_ref', action='store_true', 186 help='Update test reference outputs') 187AddLocalOption('--verbose', dest='verbose', action='store_true', 188 help='Print full tool command lines') 189AddLocalOption('--without-python', dest='without_python', 190 action='store_true', 191 help='Build without Python configuration support') 192AddLocalOption('--without-tcmalloc', dest='without_tcmalloc', 193 action='store_true', 194 help='Disable linking against tcmalloc') 195AddLocalOption('--with-ubsan', dest='with_ubsan', action='store_true', 196 help='Build with Undefined Behavior Sanitizer if available') 197 198termcap = get_termcap(GetOption('use_colors')) 199 200######################################################################## 201# 202# Set up the main build environment. 203# 204######################################################################## 205 206# export TERM so that clang reports errors in color 207use_vars = set([ 'AS', 'AR', 'CC', 'CXX', 'HOME', 'LD_LIBRARY_PATH', 208 'LIBRARY_PATH', 'PATH', 'PKG_CONFIG_PATH', 'PROTOC', 209 'PYTHONPATH', 'RANLIB', 'SWIG', 'TERM' ]) 210 211use_prefixes = [ 212 "CCACHE_", # ccache (caching compiler wrapper) configuration 213 "CCC_", # clang static analyzer configuration 214 "DISTCC_", # distcc (distributed compiler wrapper) configuration 215 "INCLUDE_SERVER_", # distcc pump server settings 216 "M5", # M5 configuration (e.g., path to kernels) 217 ] 218 219use_env = {} 220for key,val in sorted(os.environ.iteritems()): 221 if key in use_vars or \ 222 any([key.startswith(prefix) for prefix in use_prefixes]): 223 use_env[key] = val 224 225# Tell scons to avoid implicit command dependencies to avoid issues 226# with the param wrappes being compiled twice (see 227# http://scons.tigris.org/issues/show_bug.cgi?id=2811) 228main = Environment(ENV=use_env, IMPLICIT_COMMAND_DEPENDENCIES=0) 229main.Decider('MD5-timestamp') 230main.root = Dir(".") # The current directory (where this file lives). 231main.srcdir = Dir("src") # The source directory 232 233main_dict_keys = main.Dictionary().keys() 234 235# Check that we have a C/C++ compiler 236if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys): 237 print "No C++ compiler installed (package g++ on Ubuntu and RedHat)" 238 Exit(1) 239 240# Check that swig is present 241if not 'SWIG' in main_dict_keys: 242 print "swig is not installed (package swig on Ubuntu and RedHat)" 243 Exit(1) 244 245# add useful python code PYTHONPATH so it can be used by subprocesses 246# as well 247main.AppendENVPath('PYTHONPATH', extra_python_paths) 248 249######################################################################## 250# 251# Mercurial Stuff. 252# 253# If the gem5 directory is a mercurial repository, we should do some 254# extra things. 255# 256######################################################################## 257 258hgdir = main.root.Dir(".hg") 259 260mercurial_style_message = """ 261You're missing the gem5 style hook, which automatically checks your code 262against the gem5 style rules on hg commit and qrefresh commands. This 263script will now install the hook in your .hg/hgrc file. 264Press enter to continue, or ctrl-c to abort: """ 265 266mercurial_style_hook = """ 267# The following lines were automatically added by gem5/SConstruct 268# to provide the gem5 style-checking hooks 269[extensions] 270style = %s/util/style.py 271 272[hooks] 273pretxncommit.style = python:style.check_style 274pre-qrefresh.style = python:style.check_style 275# End of SConstruct additions 276 277""" % (main.root.abspath) 278 279mercurial_lib_not_found = """ 280Mercurial libraries cannot be found, ignoring style hook. If 281you are a gem5 developer, please fix this and run the style 282hook. It is important. 283""" 284 285# Check for style hook and prompt for installation if it's not there. 286# Skip this if --ignore-style was specified, there's no .hg dir to 287# install a hook in, or there's no interactive terminal to prompt. 288if not GetOption('ignore_style') and hgdir.exists() and sys.stdin.isatty(): 289 style_hook = True 290 try: 291 from mercurial import ui 292 ui = ui.ui() 293 ui.readconfig(hgdir.File('hgrc').abspath) 294 style_hook = ui.config('hooks', 'pretxncommit.style', None) and \ 295 ui.config('hooks', 'pre-qrefresh.style', None) 296 except ImportError: 297 print mercurial_lib_not_found 298 299 if not style_hook: 300 print mercurial_style_message, 301 # continue unless user does ctrl-c/ctrl-d etc. 302 try: 303 raw_input() 304 except: 305 print "Input exception, exiting scons.\n" 306 sys.exit(1) 307 hgrc_path = '%s/.hg/hgrc' % main.root.abspath 308 print "Adding style hook to", hgrc_path, "\n" 309 try: 310 hgrc = open(hgrc_path, 'a') 311 hgrc.write(mercurial_style_hook) 312 hgrc.close() 313 except: 314 print "Error updating", hgrc_path 315 sys.exit(1) 316 317 318################################################### 319# 320# Figure out which configurations to set up based on the path(s) of 321# the target(s). 322# 323################################################### 324 325# Find default configuration & binary. 326Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug')) 327 328# helper function: find last occurrence of element in list 329def rfind(l, elt, offs = -1): 330 for i in range(len(l)+offs, 0, -1): 331 if l[i] == elt: 332 return i 333 raise ValueError, "element not found" 334 335# Take a list of paths (or SCons Nodes) and return a list with all 336# paths made absolute and ~-expanded. Paths will be interpreted 337# relative to the launch directory unless a different root is provided 338def makePathListAbsolute(path_list, root=GetLaunchDir()): 339 return [abspath(joinpath(root, expanduser(str(p)))) 340 for p in path_list] 341 342# Each target must have 'build' in the interior of the path; the 343# directory below this will determine the build parameters. For 344# example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we 345# recognize that ALPHA_SE specifies the configuration because it 346# follow 'build' in the build path. 347 348# The funky assignment to "[:]" is needed to replace the list contents 349# in place rather than reassign the symbol to a new list, which 350# doesn't work (obviously!). 351BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS) 352 353# Generate a list of the unique build roots and configs that the 354# collected targets reference. 355variant_paths = [] 356build_root = None 357for t in BUILD_TARGETS: 358 path_dirs = t.split('/') 359 try: 360 build_top = rfind(path_dirs, 'build', -2) 361 except: 362 print "Error: no non-leaf 'build' dir found on target path", t 363 Exit(1) 364 this_build_root = joinpath('/',*path_dirs[:build_top+1]) 365 if not build_root: 366 build_root = this_build_root 367 else: 368 if this_build_root != build_root: 369 print "Error: build targets not under same build root\n"\ 370 " %s\n %s" % (build_root, this_build_root) 371 Exit(1) 372 variant_path = joinpath('/',*path_dirs[:build_top+2]) 373 if variant_path not in variant_paths: 374 variant_paths.append(variant_path) 375 376# Make sure build_root exists (might not if this is the first build there) 377if not isdir(build_root): 378 mkdir(build_root) 379main['BUILDROOT'] = build_root 380 381Export('main') 382 383main.SConsignFile(joinpath(build_root, "sconsign")) 384 385# Default duplicate option is to use hard links, but this messes up 386# when you use emacs to edit a file in the target dir, as emacs moves 387# file to file~ then copies to file, breaking the link. Symbolic 388# (soft) links work better. 389main.SetOption('duplicate', 'soft-copy') 390 391# 392# Set up global sticky variables... these are common to an entire build 393# tree (not specific to a particular build like ALPHA_SE) 394# 395 396global_vars_file = joinpath(build_root, 'variables.global') 397 398global_vars = Variables(global_vars_file, args=ARGUMENTS) 399 400global_vars.AddVariables( 401 ('CC', 'C compiler', environ.get('CC', main['CC'])), 402 ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])), 403 ('SWIG', 'SWIG tool', environ.get('SWIG', main['SWIG'])), 404 ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')), 405 ('BATCH', 'Use batch pool for build and tests', False), 406 ('BATCH_CMD', 'Batch pool submission command name', 'qdo'), 407 ('M5_BUILD_CACHE', 'Cache built objects in this directory', False), 408 ('EXTRAS', 'Add extra directories to the compilation', '') 409 ) 410 411# Update main environment with values from ARGUMENTS & global_vars_file 412global_vars.Update(main) 413help_texts["global_vars"] += global_vars.GenerateHelpText(main) 414 415# Save sticky variable settings back to current variables file 416global_vars.Save(global_vars_file, main) 417 418# Parse EXTRAS variable to build list of all directories where we're 419# look for sources etc. This list is exported as extras_dir_list. 420base_dir = main.srcdir.abspath 421if main['EXTRAS']: 422 extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':')) 423else: 424 extras_dir_list = [] 425 426Export('base_dir') 427Export('extras_dir_list') 428 429# the ext directory should be on the #includes path 430main.Append(CPPPATH=[Dir('ext')]) 431 432def strip_build_path(path, env): 433 path = str(path) 434 variant_base = env['BUILDROOT'] + os.path.sep 435 if path.startswith(variant_base): 436 path = path[len(variant_base):] 437 elif path.startswith('build/'): 438 path = path[6:] 439 return path 440 441# Generate a string of the form: 442# common/path/prefix/src1, src2 -> tgt1, tgt2 443# to print while building. 444class Transform(object): 445 # all specific color settings should be here and nowhere else 446 tool_color = termcap.Normal 447 pfx_color = termcap.Yellow 448 srcs_color = termcap.Yellow + termcap.Bold 449 arrow_color = termcap.Blue + termcap.Bold 450 tgts_color = termcap.Yellow + termcap.Bold 451 452 def __init__(self, tool, max_sources=99): 453 self.format = self.tool_color + (" [%8s] " % tool) \ 454 + self.pfx_color + "%s" \ 455 + self.srcs_color + "%s" \ 456 + self.arrow_color + " -> " \ 457 + self.tgts_color + "%s" \ 458 + termcap.Normal 459 self.max_sources = max_sources 460 461 def __call__(self, target, source, env, for_signature=None): 462 # truncate source list according to max_sources param 463 source = source[0:self.max_sources] 464 def strip(f): 465 return strip_build_path(str(f), env) 466 if len(source) > 0: 467 srcs = map(strip, source) 468 else: 469 srcs = [''] 470 tgts = map(strip, target) 471 # surprisingly, os.path.commonprefix is a dumb char-by-char string 472 # operation that has nothing to do with paths. 473 com_pfx = os.path.commonprefix(srcs + tgts) 474 com_pfx_len = len(com_pfx) 475 if com_pfx: 476 # do some cleanup and sanity checking on common prefix 477 if com_pfx[-1] == ".": 478 # prefix matches all but file extension: ok 479 # back up one to change 'foo.cc -> o' to 'foo.cc -> .o' 480 com_pfx = com_pfx[0:-1] 481 elif com_pfx[-1] == "/": 482 # common prefix is directory path: OK 483 pass 484 else: 485 src0_len = len(srcs[0]) 486 tgt0_len = len(tgts[0]) 487 if src0_len == com_pfx_len: 488 # source is a substring of target, OK 489 pass 490 elif tgt0_len == com_pfx_len: 491 # target is a substring of source, need to back up to 492 # avoid empty string on RHS of arrow 493 sep_idx = com_pfx.rfind(".") 494 if sep_idx != -1: 495 com_pfx = com_pfx[0:sep_idx] 496 else: 497 com_pfx = '' 498 elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".": 499 # still splitting at file extension: ok 500 pass 501 else: 502 # probably a fluke; ignore it 503 com_pfx = '' 504 # recalculate length in case com_pfx was modified 505 com_pfx_len = len(com_pfx) 506 def fmt(files): 507 f = map(lambda s: s[com_pfx_len:], files) 508 return ', '.join(f) 509 return self.format % (com_pfx, fmt(srcs), fmt(tgts)) 510 511Export('Transform') 512 513# enable the regression script to use the termcap 514main['TERMCAP'] = termcap 515 516if GetOption('verbose'): 517 def MakeAction(action, string, *args, **kwargs): 518 return Action(action, *args, **kwargs) 519else: 520 MakeAction = Action 521 main['CCCOMSTR'] = Transform("CC") 522 main['CXXCOMSTR'] = Transform("CXX") 523 main['ASCOMSTR'] = Transform("AS") 524 main['SWIGCOMSTR'] = Transform("SWIG") 525 main['ARCOMSTR'] = Transform("AR", 0) 526 main['LINKCOMSTR'] = Transform("LINK", 0) 527 main['RANLIBCOMSTR'] = Transform("RANLIB", 0) 528 main['M4COMSTR'] = Transform("M4") 529 main['SHCCCOMSTR'] = Transform("SHCC") 530 main['SHCXXCOMSTR'] = Transform("SHCXX") 531Export('MakeAction') 532 533# Initialize the Link-Time Optimization (LTO) flags 534main['LTO_CCFLAGS'] = [] 535main['LTO_LDFLAGS'] = [] 536 537# According to the readme, tcmalloc works best if the compiler doesn't 538# assume that we're using the builtin malloc and friends. These flags 539# are compiler-specific, so we need to set them after we detect which 540# compiler we're using. 541main['TCMALLOC_CCFLAGS'] = [] 542 543CXX_version = readCommand([main['CXX'],'--version'], exception=False) 544CXX_V = readCommand([main['CXX'],'-V'], exception=False) 545 546main['GCC'] = CXX_version and CXX_version.find('g++') >= 0 547main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0 548if main['GCC'] + main['CLANG'] > 1: 549 print 'Error: How can we have two at the same time?' 550 Exit(1) 551 552# Set up default C++ compiler flags 553if main['GCC'] or main['CLANG']: 554 # As gcc and clang share many flags, do the common parts here 555 main.Append(CCFLAGS=['-pipe']) 556 main.Append(CCFLAGS=['-fno-strict-aliasing']) 557 # Enable -Wall and then disable the few warnings that we 558 # consistently violate 559 main.Append(CCFLAGS=['-Wall', '-Wno-sign-compare', '-Wundef']) 560 # We always compile using C++11 561 main.Append(CXXFLAGS=['-std=c++11']) 562 # Add selected sanity checks from -Wextra 563 main.Append(CXXFLAGS=['-Wmissing-field-initializers', 564 '-Woverloaded-virtual']) 565else: 566 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal, 567 print "Don't know what compiler options to use for your compiler." 568 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX'] 569 print termcap.Yellow + ' version:' + termcap.Normal, 570 if not CXX_version: 571 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\ 572 termcap.Normal 573 else: 574 print CXX_version.replace('\n', '<nl>') 575 print " If you're trying to use a compiler other than GCC" 576 print " or clang, there appears to be something wrong with your" 577 print " environment." 578 print " " 579 print " If you are trying to use a compiler other than those listed" 580 print " above you will need to ease fix SConstruct and " 581 print " src/SConscript to support that compiler." 582 Exit(1) 583 584if main['GCC']: 585 # Check for a supported version of gcc. >= 4.7 is chosen for its 586 # level of c++11 support. See 587 # http://gcc.gnu.org/projects/cxx0x.html for details. 588 gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False) 589 if compareVersions(gcc_version, "4.7") < 0: 590 print 'Error: gcc version 4.7 or newer required.' 591 print ' Installed version:', gcc_version 592 Exit(1) 593 594 main['GCC_VERSION'] = gcc_version 595 596 # gcc from version 4.8 and above generates "rep; ret" instructions 597 # to avoid performance penalties on certain AMD chips. Older 598 # assemblers detect this as an error, "Error: expecting string 599 # instruction after `rep'" 600 if compareVersions(gcc_version, "4.8") > 0: 601 as_version_raw = readCommand([main['AS'], '-v', '/dev/null'], 602 exception=False).split() 603 604 # version strings may contain extra distro-specific 605 # qualifiers, so play it safe and keep only what comes before 606 # the first hyphen 607 as_version = as_version_raw[-1].split('-')[0] if as_version_raw \ 608 else None 609 610 if not as_version or compareVersions(as_version, "2.23") < 0: 611 print termcap.Yellow + termcap.Bold + \ 612 'Warning: This combination of gcc and binutils have' + \ 613 ' known incompatibilities.\n' + \ 614 ' If you encounter build problems, please update ' + \ 615 'binutils to 2.23.' + \ 616 termcap.Normal 617 618 # Make sure we warn if the user has requested to compile with the 619 # Undefined Benahvior Sanitizer and this version of gcc does not 620 # support it. 621 if GetOption('with_ubsan') and \ 622 compareVersions(gcc_version, '4.9') < 0: 623 print termcap.Yellow + termcap.Bold + \ 624 'Warning: UBSan is only supported using gcc 4.9 and later.' + \ 625 termcap.Normal 626 627 # Add the appropriate Link-Time Optimization (LTO) flags 628 # unless LTO is explicitly turned off. Note that these flags 629 # are only used by the fast target. 630 if not GetOption('no_lto'): 631 # Pass the LTO flag when compiling to produce GIMPLE 632 # output, we merely create the flags here and only append 633 # them later 634 main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')] 635 636 # Use the same amount of jobs for LTO as we are running 637 # scons with 638 main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')] 639 640 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc', 641 '-fno-builtin-realloc', '-fno-builtin-free']) 642 643elif main['CLANG']: 644 # Check for a supported version of clang, >= 3.1 is needed to 645 # support similar features as gcc 4.7. See 646 # http://clang.llvm.org/cxx_status.html for details 647 clang_version_re = re.compile(".* version (\d+\.\d+)") 648 clang_version_match = clang_version_re.search(CXX_version) 649 if (clang_version_match): 650 clang_version = clang_version_match.groups()[0] 651 if compareVersions(clang_version, "3.1") < 0: 652 print 'Error: clang version 3.1 or newer required.' 653 print ' Installed version:', clang_version 654 Exit(1) 655 else: 656 print 'Error: Unable to determine clang version.' 657 Exit(1) 658 659 # clang has a few additional warnings that we disable, 660 # tautological comparisons are allowed due to unsigned integers 661 # being compared to constants that happen to be 0, and extraneous 662 # parantheses are allowed due to Ruby's printing of the AST, 663 # finally self assignments are allowed as the generated CPU code 664 # is relying on this 665 main.Append(CCFLAGS=['-Wno-tautological-compare', 666 '-Wno-parentheses', 667 '-Wno-self-assign', 668 # Some versions of libstdc++ (4.8?) seem to 669 # use struct hash and class hash 670 # interchangeably. 671 '-Wno-mismatched-tags', 672 ]) 673 674 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin']) 675 676 # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as 677 # opposed to libstdc++, as the later is dated. 678 if sys.platform == "darwin": 679 main.Append(CXXFLAGS=['-stdlib=libc++']) 680 main.Append(LIBS=['c++']) 681 682else: 683 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal, 684 print "Don't know what compiler options to use for your compiler." 685 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX'] 686 print termcap.Yellow + ' version:' + termcap.Normal, 687 if not CXX_version: 688 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\ 689 termcap.Normal 690 else: 691 print CXX_version.replace('\n', '<nl>') 692 print " If you're trying to use a compiler other than GCC" 693 print " or clang, there appears to be something wrong with your" 694 print " environment." 695 print " " 696 print " If you are trying to use a compiler other than those listed" 697 print " above you will need to ease fix SConstruct and " 698 print " src/SConscript to support that compiler." 699 Exit(1) 700 701# Set up common yacc/bison flags (needed for Ruby) 702main['YACCFLAGS'] = '-d' 703main['YACCHXXFILESUFFIX'] = '.hh' 704 705# Do this after we save setting back, or else we'll tack on an 706# extra 'qdo' every time we run scons. 707if main['BATCH']: 708 main['CC'] = main['BATCH_CMD'] + ' ' + main['CC'] 709 main['CXX'] = main['BATCH_CMD'] + ' ' + main['CXX'] 710 main['AS'] = main['BATCH_CMD'] + ' ' + main['AS'] 711 main['AR'] = main['BATCH_CMD'] + ' ' + main['AR'] 712 main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB'] 713 714if sys.platform == 'cygwin': 715 # cygwin has some header file issues... 716 main.Append(CCFLAGS=["-Wno-uninitialized"]) 717 718# Check for the protobuf compiler 719protoc_version = readCommand([main['PROTOC'], '--version'], 720 exception='').split() 721 722# First two words should be "libprotoc x.y.z" 723if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc': 724 print termcap.Yellow + termcap.Bold + \ 725 'Warning: Protocol buffer compiler (protoc) not found.\n' + \ 726 ' Please install protobuf-compiler for tracing support.' + \ 727 termcap.Normal 728 main['PROTOC'] = False 729else: 730 # Based on the availability of the compress stream wrappers, 731 # require 2.1.0 732 min_protoc_version = '2.1.0' 733 if compareVersions(protoc_version[1], min_protoc_version) < 0: 734 print termcap.Yellow + termcap.Bold + \ 735 'Warning: protoc version', min_protoc_version, \ 736 'or newer required.\n' + \ 737 ' Installed version:', protoc_version[1], \ 738 termcap.Normal 739 main['PROTOC'] = False 740 else: 741 # Attempt to determine the appropriate include path and 742 # library path using pkg-config, that means we also need to 743 # check for pkg-config. Note that it is possible to use 744 # protobuf without the involvement of pkg-config. Later on we 745 # check go a library config check and at that point the test 746 # will fail if libprotobuf cannot be found. 747 if readCommand(['pkg-config', '--version'], exception=''): 748 try: 749 # Attempt to establish what linking flags to add for protobuf 750 # using pkg-config 751 main.ParseConfig('pkg-config --cflags --libs-only-L protobuf') 752 except: 753 print termcap.Yellow + termcap.Bold + \ 754 'Warning: pkg-config could not get protobuf flags.' + \ 755 termcap.Normal 756 757# Check for SWIG 758if not main.has_key('SWIG'): 759 print 'Error: SWIG utility not found.' 760 print ' Please install (see http://www.swig.org) and retry.' 761 Exit(1) 762 763# Check for appropriate SWIG version 764swig_version = readCommand([main['SWIG'], '-version'], exception='').split() 765# First 3 words should be "SWIG Version x.y.z" 766if len(swig_version) < 3 or \ 767 swig_version[0] != 'SWIG' or swig_version[1] != 'Version': 768 print 'Error determining SWIG version.' 769 Exit(1) 770 771min_swig_version = '2.0.4' 772if compareVersions(swig_version[2], min_swig_version) < 0: 773 print 'Error: SWIG version', min_swig_version, 'or newer required.' 774 print ' Installed version:', swig_version[2] 775 Exit(1) 776 777# Check for known incompatibilities. The standard library shipped with 778# gcc >= 4.9 does not play well with swig versions prior to 3.0 779if main['GCC'] and compareVersions(gcc_version, '4.9') >= 0 and \ 780 compareVersions(swig_version[2], '3.0') < 0: 781 print termcap.Yellow + termcap.Bold + \ 782 'Warning: This combination of gcc and swig have' + \ 783 ' known incompatibilities.\n' + \ 784 ' If you encounter build problems, please update ' + \ 785 'swig to 3.0 or later.' + \ 786 termcap.Normal 787 788# Set up SWIG flags & scanner 789swig_flags=Split('-c++ -python -modern -templatereduce $_CPPINCFLAGS') 790main.Append(SWIGFLAGS=swig_flags) 791 792# Check for 'timeout' from GNU coreutils. If present, regressions will 793# be run with a time limit. We require version 8.13 since we rely on 794# support for the '--foreground' option. 795timeout_lines = readCommand(['timeout', '--version'], 796 exception='').splitlines() 797# Get the first line and tokenize it 798timeout_version = timeout_lines[0].split() if timeout_lines else [] 799main['TIMEOUT'] = timeout_version and \ 800 compareVersions(timeout_version[-1], '8.13') >= 0 801 802# filter out all existing swig scanners, they mess up the dependency 803# stuff for some reason 804scanners = [] 805for scanner in main['SCANNERS']: 806 skeys = scanner.skeys 807 if skeys == '.i': 808 continue 809 810 if isinstance(skeys, (list, tuple)) and '.i' in skeys: 811 continue 812 813 scanners.append(scanner) 814 815# add the new swig scanner that we like better 816from SCons.Scanner import ClassicCPP as CPPScanner 817swig_inc_re = '^[ \t]*[%,#][ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")' 818scanners.append(CPPScanner("SwigScan", [ ".i" ], "CPPPATH", swig_inc_re)) 819 820# replace the scanners list that has what we want 821main['SCANNERS'] = scanners 822 823# Add a custom Check function to test for structure members. 824def CheckMember(context, include, decl, member, include_quotes="<>"): 825 context.Message("Checking for member %s in %s..." % 826 (member, decl)) 827 text = """ 828#include %(header)s 829int main(){ 830 %(decl)s test; 831 (void)test.%(member)s; 832 return 0; 833}; 834""" % { "header" : include_quotes[0] + include + include_quotes[1], 835 "decl" : decl, 836 "member" : member, 837 } 838 839 ret = context.TryCompile(text, extension=".cc") 840 context.Result(ret) 841 return ret 842 843# Platform-specific configuration. Note again that we assume that all 844# builds under a given build root run on the same host platform. 845conf = Configure(main, 846 conf_dir = joinpath(build_root, '.scons_config'), 847 log_file = joinpath(build_root, 'scons_config.log'), 848 custom_tests = { 849 'CheckMember' : CheckMember, 850 }) 851 852# Check if we should compile a 64 bit binary on Mac OS X/Darwin 853try: 854 import platform 855 uname = platform.uname() 856 if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0: 857 if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]): 858 main.Append(CCFLAGS=['-arch', 'x86_64']) 859 main.Append(CFLAGS=['-arch', 'x86_64']) 860 main.Append(LINKFLAGS=['-arch', 'x86_64']) 861 main.Append(ASFLAGS=['-arch', 'x86_64']) 862except: 863 pass 864 865# Recent versions of scons substitute a "Null" object for Configure() 866# when configuration isn't necessary, e.g., if the "--help" option is 867# present. Unfortuantely this Null object always returns false, 868# breaking all our configuration checks. We replace it with our own 869# more optimistic null object that returns True instead. 870if not conf: 871 def NullCheck(*args, **kwargs): 872 return True 873 874 class NullConf: 875 def __init__(self, env): 876 self.env = env 877 def Finish(self): 878 return self.env 879 def __getattr__(self, mname): 880 return NullCheck 881 882 conf = NullConf(main) 883 884# Cache build files in the supplied directory. 885if main['M5_BUILD_CACHE']: 886 print 'Using build cache located at', main['M5_BUILD_CACHE'] 887 CacheDir(main['M5_BUILD_CACHE']) 888 889if not GetOption('without_python'): 890 # Find Python include and library directories for embedding the 891 # interpreter. We rely on python-config to resolve the appropriate 892 # includes and linker flags. ParseConfig does not seem to understand 893 # the more exotic linker flags such as -Xlinker and -export-dynamic so 894 # we add them explicitly below. If you want to link in an alternate 895 # version of python, see above for instructions on how to invoke 896 # scons with the appropriate PATH set. 897 # 898 # First we check if python2-config exists, else we use python-config 899 python_config = readCommand(['which', 'python2-config'], 900 exception='').strip() 901 if not os.path.exists(python_config): 902 python_config = readCommand(['which', 'python-config'], 903 exception='').strip() 904 py_includes = readCommand([python_config, '--includes'], 905 exception='').split() 906 # Strip the -I from the include folders before adding them to the 907 # CPPPATH 908 main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes)) 909 910 # Read the linker flags and split them into libraries and other link 911 # flags. The libraries are added later through the call the CheckLib. 912 py_ld_flags = readCommand([python_config, '--ldflags'], 913 exception='').split() 914 py_libs = [] 915 for lib in py_ld_flags: 916 if not lib.startswith('-l'): 917 main.Append(LINKFLAGS=[lib]) 918 else: 919 lib = lib[2:] 920 if lib not in py_libs: 921 py_libs.append(lib) 922 923 # verify that this stuff works 924 if not conf.CheckHeader('Python.h', '<>'): 925 print "Error: can't find Python.h header in", py_includes 926 print "Install Python headers (package python-dev on Ubuntu and RedHat)" 927 Exit(1) 928 929 for lib in py_libs: 930 if not conf.CheckLib(lib): 931 print "Error: can't find library %s required by python" % lib 932 Exit(1) 933 934# On Solaris you need to use libsocket for socket ops 935if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'): 936 if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'): 937 print "Can't find library with socket calls (e.g. accept())" 938 Exit(1) 939 940# Check for zlib. If the check passes, libz will be automatically 941# added to the LIBS environment variable. 942if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'): 943 print 'Error: did not find needed zlib compression library '\ 944 'and/or zlib.h header file.' 945 print ' Please install zlib and try again.' 946 Exit(1) 947 948# If we have the protobuf compiler, also make sure we have the 949# development libraries. If the check passes, libprotobuf will be 950# automatically added to the LIBS environment variable. After 951# this, we can use the HAVE_PROTOBUF flag to determine if we have 952# got both protoc and libprotobuf available. 953main['HAVE_PROTOBUF'] = main['PROTOC'] and \ 954 conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h', 955 'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;') 956 957# If we have the compiler but not the library, print another warning. 958if main['PROTOC'] and not main['HAVE_PROTOBUF']: 959 print termcap.Yellow + termcap.Bold + \ 960 'Warning: did not find protocol buffer library and/or headers.\n' + \ 961 ' Please install libprotobuf-dev for tracing support.' + \ 962 termcap.Normal 963 964# Check for librt. 965have_posix_clock = \ 966 conf.CheckLibWithHeader(None, 'time.h', 'C', 967 'clock_nanosleep(0,0,NULL,NULL);') or \ 968 conf.CheckLibWithHeader('rt', 'time.h', 'C', 969 'clock_nanosleep(0,0,NULL,NULL);') 970 971have_posix_timers = \ 972 conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C', 973 'timer_create(CLOCK_MONOTONIC, NULL, NULL);') 974 975if not GetOption('without_tcmalloc'): 976 if conf.CheckLib('tcmalloc'): 977 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS']) 978 elif conf.CheckLib('tcmalloc_minimal'): 979 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS']) 980 else: 981 print termcap.Yellow + termcap.Bold + \ 982 "You can get a 12% performance improvement by "\ 983 "installing tcmalloc (libgoogle-perftools-dev package "\ 984 "on Ubuntu or RedHat)." + termcap.Normal 985 986 987# Detect back trace implementations. The last implementation in the 988# list will be used by default. 989backtrace_impls = [ "none" ] 990 991if conf.CheckLibWithHeader(None, 'execinfo.h', 'C', 992 'backtrace_symbols_fd((void*)0, 0, 0);'): 993 backtrace_impls.append("glibc") 994 995if backtrace_impls[-1] == "none": 996 default_backtrace_impl = "none" 997 print termcap.Yellow + termcap.Bold + \ 998 "No suitable back trace implementation found." + \ 999 termcap.Normal 1000 1001if not have_posix_clock: 1002 print "Can't find library for POSIX clocks." 1003 1004# Check for <fenv.h> (C99 FP environment control) 1005have_fenv = conf.CheckHeader('fenv.h', '<>') 1006if not have_fenv: 1007 print "Warning: Header file <fenv.h> not found." 1008 print " This host has no IEEE FP rounding mode control." 1009 1010# Check if we should enable KVM-based hardware virtualization. The API 1011# we rely on exists since version 2.6.36 of the kernel, but somehow 1012# the KVM_API_VERSION does not reflect the change. We test for one of 1013# the types as a fall back. 1014have_kvm = conf.CheckHeader('linux/kvm.h', '<>') 1015if not have_kvm: 1016 print "Info: Compatible header file <linux/kvm.h> not found, " \ 1017 "disabling KVM support." 1018 1019# x86 needs support for xsave. We test for the structure here since we 1020# won't be able to run new tests by the time we know which ISA we're 1021# targeting. 1022have_kvm_xsave = conf.CheckTypeSize('struct kvm_xsave', 1023 '#include <linux/kvm.h>') != 0 1024 1025# Check if the requested target ISA is compatible with the host 1026def is_isa_kvm_compatible(isa): 1027 try: 1028 import platform 1029 host_isa = platform.machine() 1030 except: 1031 print "Warning: Failed to determine host ISA." 1032 return False 1033 1034 if not have_posix_timers: 1035 print "Warning: Can not enable KVM, host seems to lack support " \ 1036 "for POSIX timers" 1037 return False 1038 1039 if isa == "arm": 1040 return host_isa in ( "armv7l", "aarch64" ) 1041 elif isa == "x86": 1042 if host_isa != "x86_64": 1043 return False 1044 1045 if not have_kvm_xsave: 1046 print "KVM on x86 requires xsave support in kernel headers." 1047 return False 1048 1049 return True 1050 else: 1051 return False 1052 1053 1054# Check if the exclude_host attribute is available. We want this to 1055# get accurate instruction counts in KVM. 1056main['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf.CheckMember( 1057 'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host') 1058 1059 1060###################################################################### 1061# 1062# Finish the configuration 1063# 1064main = conf.Finish() 1065 1066###################################################################### 1067# 1068# Collect all non-global variables 1069# 1070 1071# Define the universe of supported ISAs 1072all_isa_list = [ ] 1073Export('all_isa_list') 1074 1075class CpuModel(object): 1076 '''The CpuModel class encapsulates everything the ISA parser needs to 1077 know about a particular CPU model.''' 1078 1079 # Dict of available CPU model objects. Accessible as CpuModel.dict. 1080 dict = {} 1081 1082 # Constructor. Automatically adds models to CpuModel.dict. 1083 def __init__(self, name, default=False): 1084 self.name = name # name of model 1085 1086 # This cpu is enabled by default 1087 self.default = default 1088 1089 # Add self to dict 1090 if name in CpuModel.dict: 1091 raise AttributeError, "CpuModel '%s' already registered" % name 1092 CpuModel.dict[name] = self 1093 1094Export('CpuModel') 1095 1096# Sticky variables get saved in the variables file so they persist from 1097# one invocation to the next (unless overridden, in which case the new 1098# value becomes sticky). 1099sticky_vars = Variables(args=ARGUMENTS) 1100Export('sticky_vars') 1101 1102# Sticky variables that should be exported 1103export_vars = [] 1104Export('export_vars') 1105 1106# For Ruby 1107all_protocols = [] 1108Export('all_protocols') 1109protocol_dirs = [] 1110Export('protocol_dirs') 1111slicc_includes = [] 1112Export('slicc_includes') 1113 1114# Walk the tree and execute all SConsopts scripts that wil add to the 1115# above variables 1116if GetOption('verbose'): 1117 print "Reading SConsopts" 1118for bdir in [ base_dir ] + extras_dir_list: 1119 if not isdir(bdir): 1120 print "Error: directory '%s' does not exist" % bdir 1121 Exit(1) 1122 for root, dirs, files in os.walk(bdir): 1123 if 'SConsopts' in files: 1124 if GetOption('verbose'): 1125 print "Reading", joinpath(root, 'SConsopts') 1126 SConscript(joinpath(root, 'SConsopts')) 1127 1128all_isa_list.sort() 1129 1130sticky_vars.AddVariables( 1131 EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list), 1132 ListVariable('CPU_MODELS', 'CPU models', 1133 sorted(n for n,m in CpuModel.dict.iteritems() if m.default), 1134 sorted(CpuModel.dict.keys())), 1135 BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger', 1136 False), 1137 BoolVariable('SS_COMPATIBLE_FP', 1138 'Make floating-point results compatible with SimpleScalar', 1139 False), 1140 BoolVariable('USE_SSE2', 1141 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts', 1142 False), 1143 BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock), 1144 BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv), 1145 BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability', False), 1146 BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models', have_kvm), 1147 EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None', 1148 all_protocols), 1149 EnumVariable('BACKTRACE_IMPL', 'Post-mortem dump implementation', 1150 backtrace_impls[-1], backtrace_impls) 1151 ) 1152 1153# These variables get exported to #defines in config/*.hh (see src/SConscript). 1154export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'CP_ANNOTATE', 1155 'USE_POSIX_CLOCK', 'USE_KVM', 'PROTOCOL', 'HAVE_PROTOBUF', 1156 'HAVE_PERF_ATTR_EXCLUDE_HOST'] 1157 1158################################################### 1159# 1160# Define a SCons builder for configuration flag headers. 1161# 1162################################################### 1163 1164# This function generates a config header file that #defines the 1165# variable symbol to the current variable setting (0 or 1). The source 1166# operands are the name of the variable and a Value node containing the 1167# value of the variable. 1168def build_config_file(target, source, env): 1169 (variable, value) = [s.get_contents() for s in source] 1170 f = file(str(target[0]), 'w') 1171 print >> f, '#define', variable, value 1172 f.close() 1173 return None 1174 1175# Combine the two functions into a scons Action object. 1176config_action = MakeAction(build_config_file, Transform("CONFIG H", 2)) 1177 1178# The emitter munges the source & target node lists to reflect what 1179# we're really doing. 1180def config_emitter(target, source, env): 1181 # extract variable name from Builder arg 1182 variable = str(target[0]) 1183 # True target is config header file 1184 target = joinpath('config', variable.lower() + '.hh') 1185 val = env[variable] 1186 if isinstance(val, bool): 1187 # Force value to 0/1 1188 val = int(val) 1189 elif isinstance(val, str): 1190 val = '"' + val + '"' 1191 1192 # Sources are variable name & value (packaged in SCons Value nodes) 1193 return ([target], [Value(variable), Value(val)]) 1194 1195config_builder = Builder(emitter = config_emitter, action = config_action) 1196 1197main.Append(BUILDERS = { 'ConfigFile' : config_builder }) 1198 1199# libelf build is shared across all configs in the build root. 1200main.SConscript('ext/libelf/SConscript', 1201 variant_dir = joinpath(build_root, 'libelf')) 1202 1203# iostream3 build is shared across all configs in the build root. 1204main.SConscript('ext/iostream3/SConscript', 1205 variant_dir = joinpath(build_root, 'iostream3')) 1206 1207# libfdt build is shared across all configs in the build root. 1208main.SConscript('ext/libfdt/SConscript', 1209 variant_dir = joinpath(build_root, 'libfdt')) 1210 1211# fputils build is shared across all configs in the build root. 1212main.SConscript('ext/fputils/SConscript', 1213 variant_dir = joinpath(build_root, 'fputils')) 1214 1215# DRAMSim2 build is shared across all configs in the build root. 1216main.SConscript('ext/dramsim2/SConscript', 1217 variant_dir = joinpath(build_root, 'dramsim2')) 1218 1219# DRAMPower build is shared across all configs in the build root. 1220main.SConscript('ext/drampower/SConscript', 1221 variant_dir = joinpath(build_root, 'drampower')) 1222 1223# nomali build is shared across all configs in the build root. 1224main.SConscript('ext/nomali/SConscript', 1225 variant_dir = joinpath(build_root, 'nomali')) 1226 1227################################################### 1228# 1229# This function is used to set up a directory with switching headers 1230# 1231################################################### 1232 1233main['ALL_ISA_LIST'] = all_isa_list 1234all_isa_deps = {} 1235def make_switching_dir(dname, switch_headers, env): 1236 # Generate the header. target[0] is the full path of the output 1237 # header to generate. 'source' is a dummy variable, since we get the 1238 # list of ISAs from env['ALL_ISA_LIST']. 1239 def gen_switch_hdr(target, source, env): 1240 fname = str(target[0]) 1241 isa = env['TARGET_ISA'].lower() 1242 try: 1243 f = open(fname, 'w') 1244 print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname)) 1245 f.close() 1246 except IOError: 1247 print "Failed to create %s" % fname 1248 raise 1249 1250 # Build SCons Action object. 'varlist' specifies env vars that this 1251 # action depends on; when env['ALL_ISA_LIST'] changes these actions 1252 # should get re-executed. 1253 switch_hdr_action = MakeAction(gen_switch_hdr, 1254 Transform("GENERATE"), varlist=['ALL_ISA_LIST']) 1255 1256 # Instantiate actions for each header 1257 for hdr in switch_headers: 1258 env.Command(hdr, [], switch_hdr_action) 1259 1260 isa_target = Dir('.').up().name.lower().replace('_', '-') 1261 env['PHONY_BASE'] = '#'+isa_target 1262 all_isa_deps[isa_target] = None 1263 1264Export('make_switching_dir') 1265 1266# all-isas -> all-deps -> all-environs -> all_targets 1267main.Alias('#all-isas', []) 1268main.Alias('#all-deps', '#all-isas') 1269 1270# Dummy target to ensure all environments are created before telling 1271# SCons what to actually make (the command line arguments). We attach 1272# them to the dependence graph after the environments are complete. 1273ORIG_BUILD_TARGETS = list(BUILD_TARGETS) # force a copy; gets closure to work. 1274def environsComplete(target, source, env): 1275 for t in ORIG_BUILD_TARGETS: 1276 main.Depends('#all-targets', t) 1277 1278# Each build/* switching_dir attaches its *-environs target to #all-environs. 1279main.Append(BUILDERS = {'CompleteEnvirons' : 1280 Builder(action=MakeAction(environsComplete, None))}) 1281main.CompleteEnvirons('#all-environs', []) 1282 1283def doNothing(**ignored): pass 1284main.Append(BUILDERS = {'Dummy': Builder(action=MakeAction(doNothing, None))}) 1285 1286# The final target to which all the original targets ultimately get attached. 1287main.Dummy('#all-targets', '#all-environs') 1288BUILD_TARGETS[:] = ['#all-targets'] 1289 1290################################################### 1291# 1292# Define build environments for selected configurations. 1293# 1294################################################### 1295 1296for variant_path in variant_paths: 1297 if not GetOption('silent'): 1298 print "Building in", variant_path 1299 1300 # Make a copy of the build-root environment to use for this config. 1301 env = main.Clone() 1302 env['BUILDDIR'] = variant_path 1303 1304 # variant_dir is the tail component of build path, and is used to 1305 # determine the build parameters (e.g., 'ALPHA_SE') 1306 (build_root, variant_dir) = splitpath(variant_path) 1307 1308 # Set env variables according to the build directory config. 1309 sticky_vars.files = [] 1310 # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in 1311 # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke 1312 # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings. 1313 current_vars_file = joinpath(build_root, 'variables', variant_dir) 1314 if isfile(current_vars_file): 1315 sticky_vars.files.append(current_vars_file) 1316 if not GetOption('silent'): 1317 print "Using saved variables file %s" % current_vars_file 1318 else: 1319 # Build dir-specific variables file doesn't exist. 1320 1321 # Make sure the directory is there so we can create it later 1322 opt_dir = dirname(current_vars_file) 1323 if not isdir(opt_dir): 1324 mkdir(opt_dir) 1325 1326 # Get default build variables from source tree. Variables are 1327 # normally determined by name of $VARIANT_DIR, but can be 1328 # overridden by '--default=' arg on command line. 1329 default = GetOption('default') 1330 opts_dir = joinpath(main.root.abspath, 'build_opts') 1331 if default: 1332 default_vars_files = [joinpath(build_root, 'variables', default), 1333 joinpath(opts_dir, default)] 1334 else: 1335 default_vars_files = [joinpath(opts_dir, variant_dir)] 1336 existing_files = filter(isfile, default_vars_files) 1337 if existing_files: 1338 default_vars_file = existing_files[0] 1339 sticky_vars.files.append(default_vars_file) 1340 print "Variables file %s not found,\n using defaults in %s" \ 1341 % (current_vars_file, default_vars_file) 1342 else: 1343 print "Error: cannot find variables file %s or " \ 1344 "default file(s) %s" \ 1345 % (current_vars_file, ' or '.join(default_vars_files)) 1346 Exit(1) 1347 1348 # Apply current variable settings to env 1349 sticky_vars.Update(env) 1350 1351 help_texts["local_vars"] += \ 1352 "Build variables for %s:\n" % variant_dir \ 1353 + sticky_vars.GenerateHelpText(env) 1354 1355 # Process variable settings. 1356 1357 if not have_fenv and env['USE_FENV']: 1358 print "Warning: <fenv.h> not available; " \ 1359 "forcing USE_FENV to False in", variant_dir + "." 1360 env['USE_FENV'] = False 1361 1362 if not env['USE_FENV']: 1363 print "Warning: No IEEE FP rounding mode control in", variant_dir + "." 1364 print " FP results may deviate slightly from other platforms." 1365 1366 if env['EFENCE']: 1367 env.Append(LIBS=['efence']) 1368 1369 if env['USE_KVM']: 1370 if not have_kvm: 1371 print "Warning: Can not enable KVM, host seems to lack KVM support" 1372 env['USE_KVM'] = False 1373 elif not is_isa_kvm_compatible(env['TARGET_ISA']): 1374 print "Info: KVM support disabled due to unsupported host and " \ 1375 "target ISA combination" 1376 env['USE_KVM'] = False 1377 1378 # Warn about missing optional functionality 1379 if env['USE_KVM']: 1380 if not main['HAVE_PERF_ATTR_EXCLUDE_HOST']: 1381 print "Warning: perf_event headers lack support for the " \ 1382 "exclude_host attribute. KVM instruction counts will " \ 1383 "be inaccurate." 1384 1385 # Save sticky variable settings back to current variables file 1386 sticky_vars.Save(current_vars_file, env) 1387 1388 if env['USE_SSE2']: 1389 env.Append(CCFLAGS=['-msse2']) 1390 1391 # The src/SConscript file sets up the build rules in 'env' according 1392 # to the configured variables. It returns a list of environments, 1393 # one for each variant build (debug, opt, etc.) 1394 SConscript('src/SConscript', variant_dir = variant_path, exports = 'env') 1395 1396def pairwise(iterable): 1397 "s -> (s0,s1), (s1,s2), (s2, s3), ..." 1398 a, b = itertools.tee(iterable) 1399 b.next() 1400 return itertools.izip(a, b) 1401 1402# Create false dependencies so SCons will parse ISAs, establish 1403# dependencies, and setup the build Environments serially. Either 1404# SCons (likely) and/or our SConscripts (possibly) cannot cope with -j 1405# greater than 1. It appears to be standard race condition stuff; it 1406# doesn't always fail, but usually, and the behaviors are different. 1407# Every time I tried to remove this, builds would fail in some 1408# creative new way. So, don't do that. You'll want to, though, because 1409# tests/SConscript takes a long time to make its Environments. 1410for t1, t2 in pairwise(sorted(all_isa_deps.iterkeys())): 1411 main.Depends('#%s-deps' % t2, '#%s-deps' % t1) 1412 main.Depends('#%s-environs' % t2, '#%s-environs' % t1) 1413 1414# base help text 1415Help(''' 1416Usage: scons [scons options] [build variables] [target(s)] 1417 1418Extra scons options: 1419%(options)s 1420 1421Global build variables: 1422%(global_vars)s 1423 1424%(local_vars)s 1425''' % help_texts) 1426