SConstruct revision 10384:fa66d9c5e180
1# -*- mode:python -*- 2 3# Copyright (c) 2013 ARM Limited 4# All rights reserved. 5# 6# The license below extends only to copyright in the software and shall 7# not be construed as granting a license to any other intellectual 8# property including but not limited to intellectual property relating 9# to a hardware implementation of the functionality of the software 10# licensed hereunder. You may use the software subject to the license 11# terms below provided that you ensure that this notice is replicated 12# unmodified and in its entirety in all distributions of the software, 13# modified or unmodified, in source code or in binary form. 14# 15# Copyright (c) 2011 Advanced Micro Devices, Inc. 16# Copyright (c) 2009 The Hewlett-Packard Development Company 17# Copyright (c) 2004-2005 The Regents of The University of Michigan 18# All rights reserved. 19# 20# Redistribution and use in source and binary forms, with or without 21# modification, are permitted provided that the following conditions are 22# met: redistributions of source code must retain the above copyright 23# notice, this list of conditions and the following disclaimer; 24# redistributions in binary form must reproduce the above copyright 25# notice, this list of conditions and the following disclaimer in the 26# documentation and/or other materials provided with the distribution; 27# neither the name of the copyright holders nor the names of its 28# contributors may be used to endorse or promote products derived from 29# this software without specific prior written permission. 30# 31# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 32# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 33# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 34# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 35# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 36# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 37# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 38# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 39# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 40# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 41# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 42# 43# Authors: Steve Reinhardt 44# Nathan Binkert 45 46################################################### 47# 48# SCons top-level build description (SConstruct) file. 49# 50# While in this directory ('gem5'), just type 'scons' to build the default 51# configuration (see below), or type 'scons build/<CONFIG>/<binary>' 52# to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for 53# the optimized full-system version). 54# 55# You can build gem5 in a different directory as long as there is a 56# 'build/<CONFIG>' somewhere along the target path. The build system 57# expects that all configs under the same build directory are being 58# built for the same host system. 59# 60# Examples: 61# 62# The following two commands are equivalent. The '-u' option tells 63# scons to search up the directory tree for this SConstruct file. 64# % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug 65# % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug 66# 67# The following two commands are equivalent and demonstrate building 68# in a directory outside of the source tree. The '-C' option tells 69# scons to chdir to the specified directory to find this SConstruct 70# file. 71# % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug 72# % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug 73# 74# You can use 'scons -H' to print scons options. If you're in this 75# 'gem5' directory (or use -u or -C to tell scons where to find this 76# file), you can use 'scons -h' to print all the gem5-specific build 77# options as well. 78# 79################################################### 80 81# Check for recent-enough Python and SCons versions. 82try: 83 # Really old versions of scons only take two options for the 84 # function, so check once without the revision and once with the 85 # revision, the first instance will fail for stuff other than 86 # 0.98, and the second will fail for 0.98.0 87 EnsureSConsVersion(0, 98) 88 EnsureSConsVersion(0, 98, 1) 89except SystemExit, e: 90 print """ 91For more details, see: 92 http://gem5.org/Dependencies 93""" 94 raise 95 96# We ensure the python version early because because python-config 97# requires python 2.5 98try: 99 EnsurePythonVersion(2, 5) 100except SystemExit, e: 101 print """ 102You can use a non-default installation of the Python interpreter by 103rearranging your PATH so that scons finds the non-default 'python' and 104'python-config' first. 105 106For more details, see: 107 http://gem5.org/wiki/index.php/Using_a_non-default_Python_installation 108""" 109 raise 110 111# Global Python includes 112import itertools 113import os 114import re 115import subprocess 116import sys 117 118from os import mkdir, environ 119from os.path import abspath, basename, dirname, expanduser, normpath 120from os.path import exists, isdir, isfile 121from os.path import join as joinpath, split as splitpath 122 123# SCons includes 124import SCons 125import SCons.Node 126 127extra_python_paths = [ 128 Dir('src/python').srcnode().abspath, # gem5 includes 129 Dir('ext/ply').srcnode().abspath, # ply is used by several files 130 ] 131 132sys.path[1:1] = extra_python_paths 133 134from m5.util import compareVersions, readCommand 135from m5.util.terminal import get_termcap 136 137help_texts = { 138 "options" : "", 139 "global_vars" : "", 140 "local_vars" : "" 141} 142 143Export("help_texts") 144 145 146# There's a bug in scons in that (1) by default, the help texts from 147# AddOption() are supposed to be displayed when you type 'scons -h' 148# and (2) you can override the help displayed by 'scons -h' using the 149# Help() function, but these two features are incompatible: once 150# you've overridden the help text using Help(), there's no way to get 151# at the help texts from AddOptions. See: 152# http://scons.tigris.org/issues/show_bug.cgi?id=2356 153# http://scons.tigris.org/issues/show_bug.cgi?id=2611 154# This hack lets us extract the help text from AddOptions and 155# re-inject it via Help(). Ideally someday this bug will be fixed and 156# we can just use AddOption directly. 157def AddLocalOption(*args, **kwargs): 158 col_width = 30 159 160 help = " " + ", ".join(args) 161 if "help" in kwargs: 162 length = len(help) 163 if length >= col_width: 164 help += "\n" + " " * col_width 165 else: 166 help += " " * (col_width - length) 167 help += kwargs["help"] 168 help_texts["options"] += help + "\n" 169 170 AddOption(*args, **kwargs) 171 172AddLocalOption('--colors', dest='use_colors', action='store_true', 173 help="Add color to abbreviated scons output") 174AddLocalOption('--no-colors', dest='use_colors', action='store_false', 175 help="Don't add color to abbreviated scons output") 176AddLocalOption('--default', dest='default', type='string', action='store', 177 help='Override which build_opts file to use for defaults') 178AddLocalOption('--ignore-style', dest='ignore_style', action='store_true', 179 help='Disable style checking hooks') 180AddLocalOption('--no-lto', dest='no_lto', action='store_true', 181 help='Disable Link-Time Optimization for fast') 182AddLocalOption('--update-ref', dest='update_ref', action='store_true', 183 help='Update test reference outputs') 184AddLocalOption('--verbose', dest='verbose', action='store_true', 185 help='Print full tool command lines') 186 187termcap = get_termcap(GetOption('use_colors')) 188 189######################################################################## 190# 191# Set up the main build environment. 192# 193######################################################################## 194 195# export TERM so that clang reports errors in color 196use_vars = set([ 'AS', 'AR', 'CC', 'CXX', 'HOME', 'LD_LIBRARY_PATH', 197 'LIBRARY_PATH', 'PATH', 'PKG_CONFIG_PATH', 'PROTOC', 198 'PYTHONPATH', 'RANLIB', 'SWIG', 'TERM' ]) 199 200use_prefixes = [ 201 "M5", # M5 configuration (e.g., path to kernels) 202 "DISTCC_", # distcc (distributed compiler wrapper) configuration 203 "CCACHE_", # ccache (caching compiler wrapper) configuration 204 "CCC_", # clang static analyzer configuration 205 ] 206 207use_env = {} 208for key,val in os.environ.iteritems(): 209 if key in use_vars or \ 210 any([key.startswith(prefix) for prefix in use_prefixes]): 211 use_env[key] = val 212 213main = Environment(ENV=use_env) 214main.Decider('MD5-timestamp') 215main.root = Dir(".") # The current directory (where this file lives). 216main.srcdir = Dir("src") # The source directory 217 218main_dict_keys = main.Dictionary().keys() 219 220# Check that we have a C/C++ compiler 221if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys): 222 print "No C++ compiler installed (package g++ on Ubuntu and RedHat)" 223 Exit(1) 224 225# Check that swig is present 226if not 'SWIG' in main_dict_keys: 227 print "swig is not installed (package swig on Ubuntu and RedHat)" 228 Exit(1) 229 230# add useful python code PYTHONPATH so it can be used by subprocesses 231# as well 232main.AppendENVPath('PYTHONPATH', extra_python_paths) 233 234######################################################################## 235# 236# Mercurial Stuff. 237# 238# If the gem5 directory is a mercurial repository, we should do some 239# extra things. 240# 241######################################################################## 242 243hgdir = main.root.Dir(".hg") 244 245mercurial_style_message = """ 246You're missing the gem5 style hook, which automatically checks your code 247against the gem5 style rules on hg commit and qrefresh commands. This 248script will now install the hook in your .hg/hgrc file. 249Press enter to continue, or ctrl-c to abort: """ 250 251mercurial_style_hook = """ 252# The following lines were automatically added by gem5/SConstruct 253# to provide the gem5 style-checking hooks 254[extensions] 255style = %s/util/style.py 256 257[hooks] 258pretxncommit.style = python:style.check_style 259pre-qrefresh.style = python:style.check_style 260# End of SConstruct additions 261 262""" % (main.root.abspath) 263 264mercurial_lib_not_found = """ 265Mercurial libraries cannot be found, ignoring style hook. If 266you are a gem5 developer, please fix this and run the style 267hook. It is important. 268""" 269 270# Check for style hook and prompt for installation if it's not there. 271# Skip this if --ignore-style was specified, there's no .hg dir to 272# install a hook in, or there's no interactive terminal to prompt. 273if not GetOption('ignore_style') and hgdir.exists() and sys.stdin.isatty(): 274 style_hook = True 275 try: 276 from mercurial import ui 277 ui = ui.ui() 278 ui.readconfig(hgdir.File('hgrc').abspath) 279 style_hook = ui.config('hooks', 'pretxncommit.style', None) and \ 280 ui.config('hooks', 'pre-qrefresh.style', None) 281 except ImportError: 282 print mercurial_lib_not_found 283 284 if not style_hook: 285 print mercurial_style_message, 286 # continue unless user does ctrl-c/ctrl-d etc. 287 try: 288 raw_input() 289 except: 290 print "Input exception, exiting scons.\n" 291 sys.exit(1) 292 hgrc_path = '%s/.hg/hgrc' % main.root.abspath 293 print "Adding style hook to", hgrc_path, "\n" 294 try: 295 hgrc = open(hgrc_path, 'a') 296 hgrc.write(mercurial_style_hook) 297 hgrc.close() 298 except: 299 print "Error updating", hgrc_path 300 sys.exit(1) 301 302 303################################################### 304# 305# Figure out which configurations to set up based on the path(s) of 306# the target(s). 307# 308################################################### 309 310# Find default configuration & binary. 311Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug')) 312 313# helper function: find last occurrence of element in list 314def rfind(l, elt, offs = -1): 315 for i in range(len(l)+offs, 0, -1): 316 if l[i] == elt: 317 return i 318 raise ValueError, "element not found" 319 320# Take a list of paths (or SCons Nodes) and return a list with all 321# paths made absolute and ~-expanded. Paths will be interpreted 322# relative to the launch directory unless a different root is provided 323def makePathListAbsolute(path_list, root=GetLaunchDir()): 324 return [abspath(joinpath(root, expanduser(str(p)))) 325 for p in path_list] 326 327# Each target must have 'build' in the interior of the path; the 328# directory below this will determine the build parameters. For 329# example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we 330# recognize that ALPHA_SE specifies the configuration because it 331# follow 'build' in the build path. 332 333# The funky assignment to "[:]" is needed to replace the list contents 334# in place rather than reassign the symbol to a new list, which 335# doesn't work (obviously!). 336BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS) 337 338# Generate a list of the unique build roots and configs that the 339# collected targets reference. 340variant_paths = [] 341build_root = None 342for t in BUILD_TARGETS: 343 path_dirs = t.split('/') 344 try: 345 build_top = rfind(path_dirs, 'build', -2) 346 except: 347 print "Error: no non-leaf 'build' dir found on target path", t 348 Exit(1) 349 this_build_root = joinpath('/',*path_dirs[:build_top+1]) 350 if not build_root: 351 build_root = this_build_root 352 else: 353 if this_build_root != build_root: 354 print "Error: build targets not under same build root\n"\ 355 " %s\n %s" % (build_root, this_build_root) 356 Exit(1) 357 variant_path = joinpath('/',*path_dirs[:build_top+2]) 358 if variant_path not in variant_paths: 359 variant_paths.append(variant_path) 360 361# Make sure build_root exists (might not if this is the first build there) 362if not isdir(build_root): 363 mkdir(build_root) 364main['BUILDROOT'] = build_root 365 366Export('main') 367 368main.SConsignFile(joinpath(build_root, "sconsign")) 369 370# Default duplicate option is to use hard links, but this messes up 371# when you use emacs to edit a file in the target dir, as emacs moves 372# file to file~ then copies to file, breaking the link. Symbolic 373# (soft) links work better. 374main.SetOption('duplicate', 'soft-copy') 375 376# 377# Set up global sticky variables... these are common to an entire build 378# tree (not specific to a particular build like ALPHA_SE) 379# 380 381global_vars_file = joinpath(build_root, 'variables.global') 382 383global_vars = Variables(global_vars_file, args=ARGUMENTS) 384 385global_vars.AddVariables( 386 ('CC', 'C compiler', environ.get('CC', main['CC'])), 387 ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])), 388 ('SWIG', 'SWIG tool', environ.get('SWIG', main['SWIG'])), 389 ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')), 390 ('BATCH', 'Use batch pool for build and tests', False), 391 ('BATCH_CMD', 'Batch pool submission command name', 'qdo'), 392 ('M5_BUILD_CACHE', 'Cache built objects in this directory', False), 393 ('EXTRAS', 'Add extra directories to the compilation', '') 394 ) 395 396# Update main environment with values from ARGUMENTS & global_vars_file 397global_vars.Update(main) 398help_texts["global_vars"] += global_vars.GenerateHelpText(main) 399 400# Save sticky variable settings back to current variables file 401global_vars.Save(global_vars_file, main) 402 403# Parse EXTRAS variable to build list of all directories where we're 404# look for sources etc. This list is exported as extras_dir_list. 405base_dir = main.srcdir.abspath 406if main['EXTRAS']: 407 extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':')) 408else: 409 extras_dir_list = [] 410 411Export('base_dir') 412Export('extras_dir_list') 413 414# the ext directory should be on the #includes path 415main.Append(CPPPATH=[Dir('ext')]) 416 417def strip_build_path(path, env): 418 path = str(path) 419 variant_base = env['BUILDROOT'] + os.path.sep 420 if path.startswith(variant_base): 421 path = path[len(variant_base):] 422 elif path.startswith('build/'): 423 path = path[6:] 424 return path 425 426# Generate a string of the form: 427# common/path/prefix/src1, src2 -> tgt1, tgt2 428# to print while building. 429class Transform(object): 430 # all specific color settings should be here and nowhere else 431 tool_color = termcap.Normal 432 pfx_color = termcap.Yellow 433 srcs_color = termcap.Yellow + termcap.Bold 434 arrow_color = termcap.Blue + termcap.Bold 435 tgts_color = termcap.Yellow + termcap.Bold 436 437 def __init__(self, tool, max_sources=99): 438 self.format = self.tool_color + (" [%8s] " % tool) \ 439 + self.pfx_color + "%s" \ 440 + self.srcs_color + "%s" \ 441 + self.arrow_color + " -> " \ 442 + self.tgts_color + "%s" \ 443 + termcap.Normal 444 self.max_sources = max_sources 445 446 def __call__(self, target, source, env, for_signature=None): 447 # truncate source list according to max_sources param 448 source = source[0:self.max_sources] 449 def strip(f): 450 return strip_build_path(str(f), env) 451 if len(source) > 0: 452 srcs = map(strip, source) 453 else: 454 srcs = [''] 455 tgts = map(strip, target) 456 # surprisingly, os.path.commonprefix is a dumb char-by-char string 457 # operation that has nothing to do with paths. 458 com_pfx = os.path.commonprefix(srcs + tgts) 459 com_pfx_len = len(com_pfx) 460 if com_pfx: 461 # do some cleanup and sanity checking on common prefix 462 if com_pfx[-1] == ".": 463 # prefix matches all but file extension: ok 464 # back up one to change 'foo.cc -> o' to 'foo.cc -> .o' 465 com_pfx = com_pfx[0:-1] 466 elif com_pfx[-1] == "/": 467 # common prefix is directory path: OK 468 pass 469 else: 470 src0_len = len(srcs[0]) 471 tgt0_len = len(tgts[0]) 472 if src0_len == com_pfx_len: 473 # source is a substring of target, OK 474 pass 475 elif tgt0_len == com_pfx_len: 476 # target is a substring of source, need to back up to 477 # avoid empty string on RHS of arrow 478 sep_idx = com_pfx.rfind(".") 479 if sep_idx != -1: 480 com_pfx = com_pfx[0:sep_idx] 481 else: 482 com_pfx = '' 483 elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".": 484 # still splitting at file extension: ok 485 pass 486 else: 487 # probably a fluke; ignore it 488 com_pfx = '' 489 # recalculate length in case com_pfx was modified 490 com_pfx_len = len(com_pfx) 491 def fmt(files): 492 f = map(lambda s: s[com_pfx_len:], files) 493 return ', '.join(f) 494 return self.format % (com_pfx, fmt(srcs), fmt(tgts)) 495 496Export('Transform') 497 498# enable the regression script to use the termcap 499main['TERMCAP'] = termcap 500 501if GetOption('verbose'): 502 def MakeAction(action, string, *args, **kwargs): 503 return Action(action, *args, **kwargs) 504else: 505 MakeAction = Action 506 main['CCCOMSTR'] = Transform("CC") 507 main['CXXCOMSTR'] = Transform("CXX") 508 main['ASCOMSTR'] = Transform("AS") 509 main['SWIGCOMSTR'] = Transform("SWIG") 510 main['ARCOMSTR'] = Transform("AR", 0) 511 main['LINKCOMSTR'] = Transform("LINK", 0) 512 main['RANLIBCOMSTR'] = Transform("RANLIB", 0) 513 main['M4COMSTR'] = Transform("M4") 514 main['SHCCCOMSTR'] = Transform("SHCC") 515 main['SHCXXCOMSTR'] = Transform("SHCXX") 516Export('MakeAction') 517 518# Initialize the Link-Time Optimization (LTO) flags 519main['LTO_CCFLAGS'] = [] 520main['LTO_LDFLAGS'] = [] 521 522# According to the readme, tcmalloc works best if the compiler doesn't 523# assume that we're using the builtin malloc and friends. These flags 524# are compiler-specific, so we need to set them after we detect which 525# compiler we're using. 526main['TCMALLOC_CCFLAGS'] = [] 527 528CXX_version = readCommand([main['CXX'],'--version'], exception=False) 529CXX_V = readCommand([main['CXX'],'-V'], exception=False) 530 531main['GCC'] = CXX_version and CXX_version.find('g++') >= 0 532main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0 533if main['GCC'] + main['CLANG'] > 1: 534 print 'Error: How can we have two at the same time?' 535 Exit(1) 536 537# Set up default C++ compiler flags 538if main['GCC'] or main['CLANG']: 539 # As gcc and clang share many flags, do the common parts here 540 main.Append(CCFLAGS=['-pipe']) 541 main.Append(CCFLAGS=['-fno-strict-aliasing']) 542 # Enable -Wall and then disable the few warnings that we 543 # consistently violate 544 main.Append(CCFLAGS=['-Wall', '-Wno-sign-compare', '-Wundef']) 545 # We always compile using C++11, but only gcc >= 4.7 and clang 3.1 546 # actually use that name, so we stick with c++0x 547 main.Append(CXXFLAGS=['-std=c++0x']) 548 # Add selected sanity checks from -Wextra 549 main.Append(CXXFLAGS=['-Wmissing-field-initializers', 550 '-Woverloaded-virtual']) 551else: 552 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal, 553 print "Don't know what compiler options to use for your compiler." 554 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX'] 555 print termcap.Yellow + ' version:' + termcap.Normal, 556 if not CXX_version: 557 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\ 558 termcap.Normal 559 else: 560 print CXX_version.replace('\n', '<nl>') 561 print " If you're trying to use a compiler other than GCC" 562 print " or clang, there appears to be something wrong with your" 563 print " environment." 564 print " " 565 print " If you are trying to use a compiler other than those listed" 566 print " above you will need to ease fix SConstruct and " 567 print " src/SConscript to support that compiler." 568 Exit(1) 569 570if main['GCC']: 571 # Check for a supported version of gcc. >= 4.6 is chosen for its 572 # level of c++11 support. See 573 # http://gcc.gnu.org/projects/cxx0x.html for details. 4.6 is also 574 # the first version with proper LTO support. 575 gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False) 576 if compareVersions(gcc_version, "4.6") < 0: 577 print 'Error: gcc version 4.6 or newer required.' 578 print ' Installed version:', gcc_version 579 Exit(1) 580 581 main['GCC_VERSION'] = gcc_version 582 583 # gcc from version 4.8 and above generates "rep; ret" instructions 584 # to avoid performance penalties on certain AMD chips. Older 585 # assemblers detect this as an error, "Error: expecting string 586 # instruction after `rep'" 587 if compareVersions(gcc_version, "4.8") > 0: 588 as_version = readCommand([main['AS'], '-v', '/dev/null'], 589 exception=False).split() 590 591 if not as_version or compareVersions(as_version[-1], "2.23") < 0: 592 print termcap.Yellow + termcap.Bold + \ 593 'Warning: This combination of gcc and binutils have' + \ 594 ' known incompatibilities.\n' + \ 595 ' If you encounter build problems, please update ' + \ 596 'binutils to 2.23.' + \ 597 termcap.Normal 598 599 # Add the appropriate Link-Time Optimization (LTO) flags 600 # unless LTO is explicitly turned off. Note that these flags 601 # are only used by the fast target. 602 if not GetOption('no_lto'): 603 # Pass the LTO flag when compiling to produce GIMPLE 604 # output, we merely create the flags here and only append 605 # them later/ 606 main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')] 607 608 # Use the same amount of jobs for LTO as we are running 609 # scons with, we hardcode the use of the linker plugin 610 # which requires either gold or GNU ld >= 2.21 611 main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs'), 612 '-fuse-linker-plugin'] 613 614 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc', 615 '-fno-builtin-realloc', '-fno-builtin-free']) 616 617elif main['CLANG']: 618 # Check for a supported version of clang, >= 3.0 is needed to 619 # support similar features as gcc 4.6. See 620 # http://clang.llvm.org/cxx_status.html for details 621 clang_version_re = re.compile(".* version (\d+\.\d+)") 622 clang_version_match = clang_version_re.search(CXX_version) 623 if (clang_version_match): 624 clang_version = clang_version_match.groups()[0] 625 if compareVersions(clang_version, "3.0") < 0: 626 print 'Error: clang version 3.0 or newer required.' 627 print ' Installed version:', clang_version 628 Exit(1) 629 else: 630 print 'Error: Unable to determine clang version.' 631 Exit(1) 632 633 # clang has a few additional warnings that we disable, 634 # tautological comparisons are allowed due to unsigned integers 635 # being compared to constants that happen to be 0, and extraneous 636 # parantheses are allowed due to Ruby's printing of the AST, 637 # finally self assignments are allowed as the generated CPU code 638 # is relying on this 639 main.Append(CCFLAGS=['-Wno-tautological-compare', 640 '-Wno-parentheses', 641 '-Wno-self-assign', 642 # Some versions of libstdc++ (4.8?) seem to 643 # use struct hash and class hash 644 # interchangeably. 645 '-Wno-mismatched-tags', 646 ]) 647 648 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin']) 649 650 # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as 651 # opposed to libstdc++, as the later is dated. 652 if sys.platform == "darwin": 653 main.Append(CXXFLAGS=['-stdlib=libc++']) 654 main.Append(LIBS=['c++']) 655 656else: 657 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal, 658 print "Don't know what compiler options to use for your compiler." 659 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX'] 660 print termcap.Yellow + ' version:' + termcap.Normal, 661 if not CXX_version: 662 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\ 663 termcap.Normal 664 else: 665 print CXX_version.replace('\n', '<nl>') 666 print " If you're trying to use a compiler other than GCC" 667 print " or clang, there appears to be something wrong with your" 668 print " environment." 669 print " " 670 print " If you are trying to use a compiler other than those listed" 671 print " above you will need to ease fix SConstruct and " 672 print " src/SConscript to support that compiler." 673 Exit(1) 674 675# Set up common yacc/bison flags (needed for Ruby) 676main['YACCFLAGS'] = '-d' 677main['YACCHXXFILESUFFIX'] = '.hh' 678 679# Do this after we save setting back, or else we'll tack on an 680# extra 'qdo' every time we run scons. 681if main['BATCH']: 682 main['CC'] = main['BATCH_CMD'] + ' ' + main['CC'] 683 main['CXX'] = main['BATCH_CMD'] + ' ' + main['CXX'] 684 main['AS'] = main['BATCH_CMD'] + ' ' + main['AS'] 685 main['AR'] = main['BATCH_CMD'] + ' ' + main['AR'] 686 main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB'] 687 688if sys.platform == 'cygwin': 689 # cygwin has some header file issues... 690 main.Append(CCFLAGS=["-Wno-uninitialized"]) 691 692# Check for the protobuf compiler 693protoc_version = readCommand([main['PROTOC'], '--version'], 694 exception='').split() 695 696# First two words should be "libprotoc x.y.z" 697if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc': 698 print termcap.Yellow + termcap.Bold + \ 699 'Warning: Protocol buffer compiler (protoc) not found.\n' + \ 700 ' Please install protobuf-compiler for tracing support.' + \ 701 termcap.Normal 702 main['PROTOC'] = False 703else: 704 # Based on the availability of the compress stream wrappers, 705 # require 2.1.0 706 min_protoc_version = '2.1.0' 707 if compareVersions(protoc_version[1], min_protoc_version) < 0: 708 print termcap.Yellow + termcap.Bold + \ 709 'Warning: protoc version', min_protoc_version, \ 710 'or newer required.\n' + \ 711 ' Installed version:', protoc_version[1], \ 712 termcap.Normal 713 main['PROTOC'] = False 714 else: 715 # Attempt to determine the appropriate include path and 716 # library path using pkg-config, that means we also need to 717 # check for pkg-config. Note that it is possible to use 718 # protobuf without the involvement of pkg-config. Later on we 719 # check go a library config check and at that point the test 720 # will fail if libprotobuf cannot be found. 721 if readCommand(['pkg-config', '--version'], exception=''): 722 try: 723 # Attempt to establish what linking flags to add for protobuf 724 # using pkg-config 725 main.ParseConfig('pkg-config --cflags --libs-only-L protobuf') 726 except: 727 print termcap.Yellow + termcap.Bold + \ 728 'Warning: pkg-config could not get protobuf flags.' + \ 729 termcap.Normal 730 731# Check for SWIG 732if not main.has_key('SWIG'): 733 print 'Error: SWIG utility not found.' 734 print ' Please install (see http://www.swig.org) and retry.' 735 Exit(1) 736 737# Check for appropriate SWIG version 738swig_version = readCommand([main['SWIG'], '-version'], exception='').split() 739# First 3 words should be "SWIG Version x.y.z" 740if len(swig_version) < 3 or \ 741 swig_version[0] != 'SWIG' or swig_version[1] != 'Version': 742 print 'Error determining SWIG version.' 743 Exit(1) 744 745min_swig_version = '2.0.4' 746if compareVersions(swig_version[2], min_swig_version) < 0: 747 print 'Error: SWIG version', min_swig_version, 'or newer required.' 748 print ' Installed version:', swig_version[2] 749 Exit(1) 750 751# Set up SWIG flags & scanner 752swig_flags=Split('-c++ -python -modern -templatereduce $_CPPINCFLAGS') 753main.Append(SWIGFLAGS=swig_flags) 754 755# Check for 'timeout' from GNU coreutils. If present, regressions 756# will be run with a time limit. 757TIMEOUT_version = readCommand(['timeout', '--version'], exception=False) 758main['TIMEOUT'] = TIMEOUT_version and TIMEOUT_version.find('timeout') == 0 759 760# filter out all existing swig scanners, they mess up the dependency 761# stuff for some reason 762scanners = [] 763for scanner in main['SCANNERS']: 764 skeys = scanner.skeys 765 if skeys == '.i': 766 continue 767 768 if isinstance(skeys, (list, tuple)) and '.i' in skeys: 769 continue 770 771 scanners.append(scanner) 772 773# add the new swig scanner that we like better 774from SCons.Scanner import ClassicCPP as CPPScanner 775swig_inc_re = '^[ \t]*[%,#][ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")' 776scanners.append(CPPScanner("SwigScan", [ ".i" ], "CPPPATH", swig_inc_re)) 777 778# replace the scanners list that has what we want 779main['SCANNERS'] = scanners 780 781# Add a custom Check function to the Configure context so that we can 782# figure out if the compiler adds leading underscores to global 783# variables. This is needed for the autogenerated asm files that we 784# use for embedding the python code. 785def CheckLeading(context): 786 context.Message("Checking for leading underscore in global variables...") 787 # 1) Define a global variable called x from asm so the C compiler 788 # won't change the symbol at all. 789 # 2) Declare that variable. 790 # 3) Use the variable 791 # 792 # If the compiler prepends an underscore, this will successfully 793 # link because the external symbol 'x' will be called '_x' which 794 # was defined by the asm statement. If the compiler does not 795 # prepend an underscore, this will not successfully link because 796 # '_x' will have been defined by assembly, while the C portion of 797 # the code will be trying to use 'x' 798 ret = context.TryLink(''' 799 asm(".globl _x; _x: .byte 0"); 800 extern int x; 801 int main() { return x; } 802 ''', extension=".c") 803 context.env.Append(LEADING_UNDERSCORE=ret) 804 context.Result(ret) 805 return ret 806 807# Add a custom Check function to test for structure members. 808def CheckMember(context, include, decl, member, include_quotes="<>"): 809 context.Message("Checking for member %s in %s..." % 810 (member, decl)) 811 text = """ 812#include %(header)s 813int main(){ 814 %(decl)s test; 815 (void)test.%(member)s; 816 return 0; 817}; 818""" % { "header" : include_quotes[0] + include + include_quotes[1], 819 "decl" : decl, 820 "member" : member, 821 } 822 823 ret = context.TryCompile(text, extension=".cc") 824 context.Result(ret) 825 return ret 826 827# Platform-specific configuration. Note again that we assume that all 828# builds under a given build root run on the same host platform. 829conf = Configure(main, 830 conf_dir = joinpath(build_root, '.scons_config'), 831 log_file = joinpath(build_root, 'scons_config.log'), 832 custom_tests = { 833 'CheckLeading' : CheckLeading, 834 'CheckMember' : CheckMember, 835 }) 836 837# Check for leading underscores. Don't really need to worry either 838# way so don't need to check the return code. 839conf.CheckLeading() 840 841# Check if we should compile a 64 bit binary on Mac OS X/Darwin 842try: 843 import platform 844 uname = platform.uname() 845 if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0: 846 if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]): 847 main.Append(CCFLAGS=['-arch', 'x86_64']) 848 main.Append(CFLAGS=['-arch', 'x86_64']) 849 main.Append(LINKFLAGS=['-arch', 'x86_64']) 850 main.Append(ASFLAGS=['-arch', 'x86_64']) 851except: 852 pass 853 854# Recent versions of scons substitute a "Null" object for Configure() 855# when configuration isn't necessary, e.g., if the "--help" option is 856# present. Unfortuantely this Null object always returns false, 857# breaking all our configuration checks. We replace it with our own 858# more optimistic null object that returns True instead. 859if not conf: 860 def NullCheck(*args, **kwargs): 861 return True 862 863 class NullConf: 864 def __init__(self, env): 865 self.env = env 866 def Finish(self): 867 return self.env 868 def __getattr__(self, mname): 869 return NullCheck 870 871 conf = NullConf(main) 872 873# Cache build files in the supplied directory. 874if main['M5_BUILD_CACHE']: 875 print 'Using build cache located at', main['M5_BUILD_CACHE'] 876 CacheDir(main['M5_BUILD_CACHE']) 877 878# Find Python include and library directories for embedding the 879# interpreter. We rely on python-config to resolve the appropriate 880# includes and linker flags. ParseConfig does not seem to understand 881# the more exotic linker flags such as -Xlinker and -export-dynamic so 882# we add them explicitly below. If you want to link in an alternate 883# version of python, see above for instructions on how to invoke 884# scons with the appropriate PATH set. 885# 886# First we check if python2-config exists, else we use python-config 887python_config = readCommand(['which', 'python2-config'], exception='').strip() 888if not os.path.exists(python_config): 889 python_config = readCommand(['which', 'python-config'], 890 exception='').strip() 891py_includes = readCommand([python_config, '--includes'], 892 exception='').split() 893# Strip the -I from the include folders before adding them to the 894# CPPPATH 895main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes)) 896 897# Read the linker flags and split them into libraries and other link 898# flags. The libraries are added later through the call the CheckLib. 899py_ld_flags = readCommand([python_config, '--ldflags'], exception='').split() 900py_libs = [] 901for lib in py_ld_flags: 902 if not lib.startswith('-l'): 903 main.Append(LINKFLAGS=[lib]) 904 else: 905 lib = lib[2:] 906 if lib not in py_libs: 907 py_libs.append(lib) 908 909# verify that this stuff works 910if not conf.CheckHeader('Python.h', '<>'): 911 print "Error: can't find Python.h header in", py_includes 912 print "Install Python headers (package python-dev on Ubuntu and RedHat)" 913 Exit(1) 914 915for lib in py_libs: 916 if not conf.CheckLib(lib): 917 print "Error: can't find library %s required by python" % lib 918 Exit(1) 919 920# On Solaris you need to use libsocket for socket ops 921if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'): 922 if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'): 923 print "Can't find library with socket calls (e.g. accept())" 924 Exit(1) 925 926# Check for zlib. If the check passes, libz will be automatically 927# added to the LIBS environment variable. 928if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'): 929 print 'Error: did not find needed zlib compression library '\ 930 'and/or zlib.h header file.' 931 print ' Please install zlib and try again.' 932 Exit(1) 933 934# If we have the protobuf compiler, also make sure we have the 935# development libraries. If the check passes, libprotobuf will be 936# automatically added to the LIBS environment variable. After 937# this, we can use the HAVE_PROTOBUF flag to determine if we have 938# got both protoc and libprotobuf available. 939main['HAVE_PROTOBUF'] = main['PROTOC'] and \ 940 conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h', 941 'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;') 942 943# If we have the compiler but not the library, print another warning. 944if main['PROTOC'] and not main['HAVE_PROTOBUF']: 945 print termcap.Yellow + termcap.Bold + \ 946 'Warning: did not find protocol buffer library and/or headers.\n' + \ 947 ' Please install libprotobuf-dev for tracing support.' + \ 948 termcap.Normal 949 950# Check for librt. 951have_posix_clock = \ 952 conf.CheckLibWithHeader(None, 'time.h', 'C', 953 'clock_nanosleep(0,0,NULL,NULL);') or \ 954 conf.CheckLibWithHeader('rt', 'time.h', 'C', 955 'clock_nanosleep(0,0,NULL,NULL);') 956 957have_posix_timers = \ 958 conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C', 959 'timer_create(CLOCK_MONOTONIC, NULL, NULL);') 960 961if conf.CheckLib('tcmalloc'): 962 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS']) 963elif conf.CheckLib('tcmalloc_minimal'): 964 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS']) 965else: 966 print termcap.Yellow + termcap.Bold + \ 967 "You can get a 12% performance improvement by installing tcmalloc "\ 968 "(libgoogle-perftools-dev package on Ubuntu or RedHat)." + \ 969 termcap.Normal 970 971if not have_posix_clock: 972 print "Can't find library for POSIX clocks." 973 974# Check for <fenv.h> (C99 FP environment control) 975have_fenv = conf.CheckHeader('fenv.h', '<>') 976if not have_fenv: 977 print "Warning: Header file <fenv.h> not found." 978 print " This host has no IEEE FP rounding mode control." 979 980# Check if we should enable KVM-based hardware virtualization. The API 981# we rely on exists since version 2.6.36 of the kernel, but somehow 982# the KVM_API_VERSION does not reflect the change. We test for one of 983# the types as a fall back. 984have_kvm = conf.CheckHeader('linux/kvm.h', '<>') and \ 985 conf.CheckTypeSize('struct kvm_xsave', '#include <linux/kvm.h>') != 0 986if not have_kvm: 987 print "Info: Compatible header file <linux/kvm.h> not found, " \ 988 "disabling KVM support." 989 990# Check if the requested target ISA is compatible with the host 991def is_isa_kvm_compatible(isa): 992 isa_comp_table = { 993 "arm" : ( "armv7l" ), 994 "x86" : ( "x86_64" ), 995 } 996 try: 997 import platform 998 host_isa = platform.machine() 999 except: 1000 print "Warning: Failed to determine host ISA." 1001 return False 1002 1003 return host_isa in isa_comp_table.get(isa, []) 1004 1005 1006# Check if the exclude_host attribute is available. We want this to 1007# get accurate instruction counts in KVM. 1008main['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf.CheckMember( 1009 'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host') 1010 1011 1012###################################################################### 1013# 1014# Finish the configuration 1015# 1016main = conf.Finish() 1017 1018###################################################################### 1019# 1020# Collect all non-global variables 1021# 1022 1023# Define the universe of supported ISAs 1024all_isa_list = [ ] 1025Export('all_isa_list') 1026 1027class CpuModel(object): 1028 '''The CpuModel class encapsulates everything the ISA parser needs to 1029 know about a particular CPU model.''' 1030 1031 # Dict of available CPU model objects. Accessible as CpuModel.dict. 1032 dict = {} 1033 1034 # Constructor. Automatically adds models to CpuModel.dict. 1035 def __init__(self, name, default=False): 1036 self.name = name # name of model 1037 1038 # This cpu is enabled by default 1039 self.default = default 1040 1041 # Add self to dict 1042 if name in CpuModel.dict: 1043 raise AttributeError, "CpuModel '%s' already registered" % name 1044 CpuModel.dict[name] = self 1045 1046Export('CpuModel') 1047 1048# Sticky variables get saved in the variables file so they persist from 1049# one invocation to the next (unless overridden, in which case the new 1050# value becomes sticky). 1051sticky_vars = Variables(args=ARGUMENTS) 1052Export('sticky_vars') 1053 1054# Sticky variables that should be exported 1055export_vars = [] 1056Export('export_vars') 1057 1058# For Ruby 1059all_protocols = [] 1060Export('all_protocols') 1061protocol_dirs = [] 1062Export('protocol_dirs') 1063slicc_includes = [] 1064Export('slicc_includes') 1065 1066# Walk the tree and execute all SConsopts scripts that wil add to the 1067# above variables 1068if GetOption('verbose'): 1069 print "Reading SConsopts" 1070for bdir in [ base_dir ] + extras_dir_list: 1071 if not isdir(bdir): 1072 print "Error: directory '%s' does not exist" % bdir 1073 Exit(1) 1074 for root, dirs, files in os.walk(bdir): 1075 if 'SConsopts' in files: 1076 if GetOption('verbose'): 1077 print "Reading", joinpath(root, 'SConsopts') 1078 SConscript(joinpath(root, 'SConsopts')) 1079 1080all_isa_list.sort() 1081 1082sticky_vars.AddVariables( 1083 EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list), 1084 ListVariable('CPU_MODELS', 'CPU models', 1085 sorted(n for n,m in CpuModel.dict.iteritems() if m.default), 1086 sorted(CpuModel.dict.keys())), 1087 BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger', 1088 False), 1089 BoolVariable('SS_COMPATIBLE_FP', 1090 'Make floating-point results compatible with SimpleScalar', 1091 False), 1092 BoolVariable('USE_SSE2', 1093 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts', 1094 False), 1095 BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock), 1096 BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv), 1097 BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability', False), 1098 BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models', have_kvm), 1099 EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None', 1100 all_protocols), 1101 ) 1102 1103# These variables get exported to #defines in config/*.hh (see src/SConscript). 1104export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'CP_ANNOTATE', 1105 'USE_POSIX_CLOCK', 'PROTOCOL', 'HAVE_PROTOBUF', 1106 'HAVE_PERF_ATTR_EXCLUDE_HOST'] 1107 1108################################################### 1109# 1110# Define a SCons builder for configuration flag headers. 1111# 1112################################################### 1113 1114# This function generates a config header file that #defines the 1115# variable symbol to the current variable setting (0 or 1). The source 1116# operands are the name of the variable and a Value node containing the 1117# value of the variable. 1118def build_config_file(target, source, env): 1119 (variable, value) = [s.get_contents() for s in source] 1120 f = file(str(target[0]), 'w') 1121 print >> f, '#define', variable, value 1122 f.close() 1123 return None 1124 1125# Combine the two functions into a scons Action object. 1126config_action = MakeAction(build_config_file, Transform("CONFIG H", 2)) 1127 1128# The emitter munges the source & target node lists to reflect what 1129# we're really doing. 1130def config_emitter(target, source, env): 1131 # extract variable name from Builder arg 1132 variable = str(target[0]) 1133 # True target is config header file 1134 target = joinpath('config', variable.lower() + '.hh') 1135 val = env[variable] 1136 if isinstance(val, bool): 1137 # Force value to 0/1 1138 val = int(val) 1139 elif isinstance(val, str): 1140 val = '"' + val + '"' 1141 1142 # Sources are variable name & value (packaged in SCons Value nodes) 1143 return ([target], [Value(variable), Value(val)]) 1144 1145config_builder = Builder(emitter = config_emitter, action = config_action) 1146 1147main.Append(BUILDERS = { 'ConfigFile' : config_builder }) 1148 1149# libelf build is shared across all configs in the build root. 1150main.SConscript('ext/libelf/SConscript', 1151 variant_dir = joinpath(build_root, 'libelf')) 1152 1153# gzstream build is shared across all configs in the build root. 1154main.SConscript('ext/gzstream/SConscript', 1155 variant_dir = joinpath(build_root, 'gzstream')) 1156 1157# libfdt build is shared across all configs in the build root. 1158main.SConscript('ext/libfdt/SConscript', 1159 variant_dir = joinpath(build_root, 'libfdt')) 1160 1161# fputils build is shared across all configs in the build root. 1162main.SConscript('ext/fputils/SConscript', 1163 variant_dir = joinpath(build_root, 'fputils')) 1164 1165# DRAMSim2 build is shared across all configs in the build root. 1166main.SConscript('ext/dramsim2/SConscript', 1167 variant_dir = joinpath(build_root, 'dramsim2')) 1168 1169################################################### 1170# 1171# This function is used to set up a directory with switching headers 1172# 1173################################################### 1174 1175main['ALL_ISA_LIST'] = all_isa_list 1176all_isa_deps = {} 1177def make_switching_dir(dname, switch_headers, env): 1178 # Generate the header. target[0] is the full path of the output 1179 # header to generate. 'source' is a dummy variable, since we get the 1180 # list of ISAs from env['ALL_ISA_LIST']. 1181 def gen_switch_hdr(target, source, env): 1182 fname = str(target[0]) 1183 isa = env['TARGET_ISA'].lower() 1184 try: 1185 f = open(fname, 'w') 1186 print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname)) 1187 f.close() 1188 except IOError: 1189 print "Failed to create %s" % fname 1190 raise 1191 1192 # Build SCons Action object. 'varlist' specifies env vars that this 1193 # action depends on; when env['ALL_ISA_LIST'] changes these actions 1194 # should get re-executed. 1195 switch_hdr_action = MakeAction(gen_switch_hdr, 1196 Transform("GENERATE"), varlist=['ALL_ISA_LIST']) 1197 1198 # Instantiate actions for each header 1199 for hdr in switch_headers: 1200 env.Command(hdr, [], switch_hdr_action) 1201 1202 isa_target = Dir('.').up().name.lower().replace('_', '-') 1203 env['PHONY_BASE'] = '#'+isa_target 1204 all_isa_deps[isa_target] = None 1205 1206Export('make_switching_dir') 1207 1208# all-isas -> all-deps -> all-environs -> all_targets 1209main.Alias('#all-isas', []) 1210main.Alias('#all-deps', '#all-isas') 1211 1212# Dummy target to ensure all environments are created before telling 1213# SCons what to actually make (the command line arguments). We attach 1214# them to the dependence graph after the environments are complete. 1215ORIG_BUILD_TARGETS = list(BUILD_TARGETS) # force a copy; gets closure to work. 1216def environsComplete(target, source, env): 1217 for t in ORIG_BUILD_TARGETS: 1218 main.Depends('#all-targets', t) 1219 1220# Each build/* switching_dir attaches its *-environs target to #all-environs. 1221main.Append(BUILDERS = {'CompleteEnvirons' : 1222 Builder(action=MakeAction(environsComplete, None))}) 1223main.CompleteEnvirons('#all-environs', []) 1224 1225def doNothing(**ignored): pass 1226main.Append(BUILDERS = {'Dummy': Builder(action=MakeAction(doNothing, None))}) 1227 1228# The final target to which all the original targets ultimately get attached. 1229main.Dummy('#all-targets', '#all-environs') 1230BUILD_TARGETS[:] = ['#all-targets'] 1231 1232################################################### 1233# 1234# Define build environments for selected configurations. 1235# 1236################################################### 1237 1238for variant_path in variant_paths: 1239 if not GetOption('silent'): 1240 print "Building in", variant_path 1241 1242 # Make a copy of the build-root environment to use for this config. 1243 env = main.Clone() 1244 env['BUILDDIR'] = variant_path 1245 1246 # variant_dir is the tail component of build path, and is used to 1247 # determine the build parameters (e.g., 'ALPHA_SE') 1248 (build_root, variant_dir) = splitpath(variant_path) 1249 1250 # Set env variables according to the build directory config. 1251 sticky_vars.files = [] 1252 # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in 1253 # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke 1254 # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings. 1255 current_vars_file = joinpath(build_root, 'variables', variant_dir) 1256 if isfile(current_vars_file): 1257 sticky_vars.files.append(current_vars_file) 1258 if not GetOption('silent'): 1259 print "Using saved variables file %s" % current_vars_file 1260 else: 1261 # Build dir-specific variables file doesn't exist. 1262 1263 # Make sure the directory is there so we can create it later 1264 opt_dir = dirname(current_vars_file) 1265 if not isdir(opt_dir): 1266 mkdir(opt_dir) 1267 1268 # Get default build variables from source tree. Variables are 1269 # normally determined by name of $VARIANT_DIR, but can be 1270 # overridden by '--default=' arg on command line. 1271 default = GetOption('default') 1272 opts_dir = joinpath(main.root.abspath, 'build_opts') 1273 if default: 1274 default_vars_files = [joinpath(build_root, 'variables', default), 1275 joinpath(opts_dir, default)] 1276 else: 1277 default_vars_files = [joinpath(opts_dir, variant_dir)] 1278 existing_files = filter(isfile, default_vars_files) 1279 if existing_files: 1280 default_vars_file = existing_files[0] 1281 sticky_vars.files.append(default_vars_file) 1282 print "Variables file %s not found,\n using defaults in %s" \ 1283 % (current_vars_file, default_vars_file) 1284 else: 1285 print "Error: cannot find variables file %s or " \ 1286 "default file(s) %s" \ 1287 % (current_vars_file, ' or '.join(default_vars_files)) 1288 Exit(1) 1289 1290 # Apply current variable settings to env 1291 sticky_vars.Update(env) 1292 1293 help_texts["local_vars"] += \ 1294 "Build variables for %s:\n" % variant_dir \ 1295 + sticky_vars.GenerateHelpText(env) 1296 1297 # Process variable settings. 1298 1299 if not have_fenv and env['USE_FENV']: 1300 print "Warning: <fenv.h> not available; " \ 1301 "forcing USE_FENV to False in", variant_dir + "." 1302 env['USE_FENV'] = False 1303 1304 if not env['USE_FENV']: 1305 print "Warning: No IEEE FP rounding mode control in", variant_dir + "." 1306 print " FP results may deviate slightly from other platforms." 1307 1308 if env['EFENCE']: 1309 env.Append(LIBS=['efence']) 1310 1311 if env['USE_KVM']: 1312 if not have_kvm: 1313 print "Warning: Can not enable KVM, host seems to lack KVM support" 1314 env['USE_KVM'] = False 1315 elif not have_posix_timers: 1316 print "Warning: Can not enable KVM, host seems to lack support " \ 1317 "for POSIX timers" 1318 env['USE_KVM'] = False 1319 elif not is_isa_kvm_compatible(env['TARGET_ISA']): 1320 print "Info: KVM support disabled due to unsupported host and " \ 1321 "target ISA combination" 1322 env['USE_KVM'] = False 1323 1324 # Warn about missing optional functionality 1325 if env['USE_KVM']: 1326 if not main['HAVE_PERF_ATTR_EXCLUDE_HOST']: 1327 print "Warning: perf_event headers lack support for the " \ 1328 "exclude_host attribute. KVM instruction counts will " \ 1329 "be inaccurate." 1330 1331 # Save sticky variable settings back to current variables file 1332 sticky_vars.Save(current_vars_file, env) 1333 1334 if env['USE_SSE2']: 1335 env.Append(CCFLAGS=['-msse2']) 1336 1337 # The src/SConscript file sets up the build rules in 'env' according 1338 # to the configured variables. It returns a list of environments, 1339 # one for each variant build (debug, opt, etc.) 1340 SConscript('src/SConscript', variant_dir = variant_path, exports = 'env') 1341 1342def pairwise(iterable): 1343 "s -> (s0,s1), (s1,s2), (s2, s3), ..." 1344 a, b = itertools.tee(iterable) 1345 b.next() 1346 return itertools.izip(a, b) 1347 1348# Create false dependencies so SCons will parse ISAs, establish 1349# dependencies, and setup the build Environments serially. Either 1350# SCons (likely) and/or our SConscripts (possibly) cannot cope with -j 1351# greater than 1. It appears to be standard race condition stuff; it 1352# doesn't always fail, but usually, and the behaviors are different. 1353# Every time I tried to remove this, builds would fail in some 1354# creative new way. So, don't do that. You'll want to, though, because 1355# tests/SConscript takes a long time to make its Environments. 1356for t1, t2 in pairwise(sorted(all_isa_deps.iterkeys())): 1357 main.Depends('#%s-deps' % t2, '#%s-deps' % t1) 1358 main.Depends('#%s-environs' % t2, '#%s-environs' % t1) 1359 1360# base help text 1361Help(''' 1362Usage: scons [scons options] [build variables] [target(s)] 1363 1364Extra scons options: 1365%(options)s 1366 1367Global build variables: 1368%(global_vars)s 1369 1370%(local_vars)s 1371''' % help_texts) 1372