SConstruct revision 11450
1# -*- mode:python -*- 2 3# Copyright (c) 2013, 2015, 2016 ARM Limited 4# All rights reserved. 5# 6# The license below extends only to copyright in the software and shall 7# not be construed as granting a license to any other intellectual 8# property including but not limited to intellectual property relating 9# to a hardware implementation of the functionality of the software 10# licensed hereunder. You may use the software subject to the license 11# terms below provided that you ensure that this notice is replicated 12# unmodified and in its entirety in all distributions of the software, 13# modified or unmodified, in source code or in binary form. 14# 15# Copyright (c) 2011 Advanced Micro Devices, Inc. 16# Copyright (c) 2009 The Hewlett-Packard Development Company 17# Copyright (c) 2004-2005 The Regents of The University of Michigan 18# All rights reserved. 19# 20# Redistribution and use in source and binary forms, with or without 21# modification, are permitted provided that the following conditions are 22# met: redistributions of source code must retain the above copyright 23# notice, this list of conditions and the following disclaimer; 24# redistributions in binary form must reproduce the above copyright 25# notice, this list of conditions and the following disclaimer in the 26# documentation and/or other materials provided with the distribution; 27# neither the name of the copyright holders nor the names of its 28# contributors may be used to endorse or promote products derived from 29# this software without specific prior written permission. 30# 31# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 32# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 33# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 34# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 35# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 36# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 37# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 38# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 39# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 40# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 41# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 42# 43# Authors: Steve Reinhardt 44# Nathan Binkert 45 46################################################### 47# 48# SCons top-level build description (SConstruct) file. 49# 50# While in this directory ('gem5'), just type 'scons' to build the default 51# configuration (see below), or type 'scons build/<CONFIG>/<binary>' 52# to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for 53# the optimized full-system version). 54# 55# You can build gem5 in a different directory as long as there is a 56# 'build/<CONFIG>' somewhere along the target path. The build system 57# expects that all configs under the same build directory are being 58# built for the same host system. 59# 60# Examples: 61# 62# The following two commands are equivalent. The '-u' option tells 63# scons to search up the directory tree for this SConstruct file. 64# % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug 65# % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug 66# 67# The following two commands are equivalent and demonstrate building 68# in a directory outside of the source tree. The '-C' option tells 69# scons to chdir to the specified directory to find this SConstruct 70# file. 71# % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug 72# % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug 73# 74# You can use 'scons -H' to print scons options. If you're in this 75# 'gem5' directory (or use -u or -C to tell scons where to find this 76# file), you can use 'scons -h' to print all the gem5-specific build 77# options as well. 78# 79################################################### 80 81# Check for recent-enough Python and SCons versions. 82try: 83 # Really old versions of scons only take two options for the 84 # function, so check once without the revision and once with the 85 # revision, the first instance will fail for stuff other than 86 # 0.98, and the second will fail for 0.98.0 87 EnsureSConsVersion(0, 98) 88 EnsureSConsVersion(0, 98, 1) 89except SystemExit, e: 90 print """ 91For more details, see: 92 http://gem5.org/Dependencies 93""" 94 raise 95 96# We ensure the python version early because because python-config 97# requires python 2.5 98try: 99 EnsurePythonVersion(2, 5) 100except SystemExit, e: 101 print """ 102You can use a non-default installation of the Python interpreter by 103rearranging your PATH so that scons finds the non-default 'python' and 104'python-config' first. 105 106For more details, see: 107 http://gem5.org/wiki/index.php/Using_a_non-default_Python_installation 108""" 109 raise 110 111# Global Python includes 112import itertools 113import os 114import re 115import shutil 116import subprocess 117import sys 118 119from os import mkdir, environ 120from os.path import abspath, basename, dirname, expanduser, normpath 121from os.path import exists, isdir, isfile 122from os.path import join as joinpath, split as splitpath 123 124# SCons includes 125import SCons 126import SCons.Node 127 128extra_python_paths = [ 129 Dir('src/python').srcnode().abspath, # gem5 includes 130 Dir('ext/ply').srcnode().abspath, # ply is used by several files 131 ] 132 133sys.path[1:1] = extra_python_paths 134 135from m5.util import compareVersions, readCommand 136from m5.util.terminal import get_termcap 137 138help_texts = { 139 "options" : "", 140 "global_vars" : "", 141 "local_vars" : "" 142} 143 144Export("help_texts") 145 146 147# There's a bug in scons in that (1) by default, the help texts from 148# AddOption() are supposed to be displayed when you type 'scons -h' 149# and (2) you can override the help displayed by 'scons -h' using the 150# Help() function, but these two features are incompatible: once 151# you've overridden the help text using Help(), there's no way to get 152# at the help texts from AddOptions. See: 153# http://scons.tigris.org/issues/show_bug.cgi?id=2356 154# http://scons.tigris.org/issues/show_bug.cgi?id=2611 155# This hack lets us extract the help text from AddOptions and 156# re-inject it via Help(). Ideally someday this bug will be fixed and 157# we can just use AddOption directly. 158def AddLocalOption(*args, **kwargs): 159 col_width = 30 160 161 help = " " + ", ".join(args) 162 if "help" in kwargs: 163 length = len(help) 164 if length >= col_width: 165 help += "\n" + " " * col_width 166 else: 167 help += " " * (col_width - length) 168 help += kwargs["help"] 169 help_texts["options"] += help + "\n" 170 171 AddOption(*args, **kwargs) 172 173AddLocalOption('--colors', dest='use_colors', action='store_true', 174 help="Add color to abbreviated scons output") 175AddLocalOption('--no-colors', dest='use_colors', action='store_false', 176 help="Don't add color to abbreviated scons output") 177AddLocalOption('--with-cxx-config', dest='with_cxx_config', 178 action='store_true', 179 help="Build with support for C++-based configuration") 180AddLocalOption('--default', dest='default', type='string', action='store', 181 help='Override which build_opts file to use for defaults') 182AddLocalOption('--ignore-style', dest='ignore_style', action='store_true', 183 help='Disable style checking hooks') 184AddLocalOption('--no-lto', dest='no_lto', action='store_true', 185 help='Disable Link-Time Optimization for fast') 186AddLocalOption('--update-ref', dest='update_ref', action='store_true', 187 help='Update test reference outputs') 188AddLocalOption('--verbose', dest='verbose', action='store_true', 189 help='Print full tool command lines') 190AddLocalOption('--without-python', dest='without_python', 191 action='store_true', 192 help='Build without Python configuration support') 193AddLocalOption('--without-tcmalloc', dest='without_tcmalloc', 194 action='store_true', 195 help='Disable linking against tcmalloc') 196AddLocalOption('--with-ubsan', dest='with_ubsan', action='store_true', 197 help='Build with Undefined Behavior Sanitizer if available') 198AddLocalOption('--with-asan', dest='with_asan', action='store_true', 199 help='Build with Address Sanitizer if available') 200 201termcap = get_termcap(GetOption('use_colors')) 202 203######################################################################## 204# 205# Set up the main build environment. 206# 207######################################################################## 208 209# export TERM so that clang reports errors in color 210use_vars = set([ 'AS', 'AR', 'CC', 'CXX', 'HOME', 'LD_LIBRARY_PATH', 211 'LIBRARY_PATH', 'PATH', 'PKG_CONFIG_PATH', 'PROTOC', 212 'PYTHONPATH', 'RANLIB', 'SWIG', 'TERM' ]) 213 214use_prefixes = [ 215 "ASAN_", # address sanitizer symbolizer path and settings 216 "CCACHE_", # ccache (caching compiler wrapper) configuration 217 "CCC_", # clang static analyzer configuration 218 "DISTCC_", # distcc (distributed compiler wrapper) configuration 219 "INCLUDE_SERVER_", # distcc pump server settings 220 "M5", # M5 configuration (e.g., path to kernels) 221 ] 222 223use_env = {} 224for key,val in sorted(os.environ.iteritems()): 225 if key in use_vars or \ 226 any([key.startswith(prefix) for prefix in use_prefixes]): 227 use_env[key] = val 228 229# Tell scons to avoid implicit command dependencies to avoid issues 230# with the param wrappes being compiled twice (see 231# http://scons.tigris.org/issues/show_bug.cgi?id=2811) 232main = Environment(ENV=use_env, IMPLICIT_COMMAND_DEPENDENCIES=0) 233main.Decider('MD5-timestamp') 234main.root = Dir(".") # The current directory (where this file lives). 235main.srcdir = Dir("src") # The source directory 236 237main_dict_keys = main.Dictionary().keys() 238 239# Check that we have a C/C++ compiler 240if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys): 241 print "No C++ compiler installed (package g++ on Ubuntu and RedHat)" 242 Exit(1) 243 244# Check that swig is present 245if not 'SWIG' in main_dict_keys: 246 print "swig is not installed (package swig on Ubuntu and RedHat)" 247 Exit(1) 248 249# add useful python code PYTHONPATH so it can be used by subprocesses 250# as well 251main.AppendENVPath('PYTHONPATH', extra_python_paths) 252 253######################################################################## 254# 255# Mercurial Stuff. 256# 257# If the gem5 directory is a mercurial repository, we should do some 258# extra things. 259# 260######################################################################## 261 262hgdir = main.root.Dir(".hg") 263gitdir = main.root.Dir(".git") 264 265 266style_message = """ 267You're missing the gem5 style hook, which automatically checks your code 268against the gem5 style rules on hg commit and qrefresh commands. This 269script will now install the hook in your %s. 270Press enter to continue, or ctrl-c to abort: """ 271 272mercurial_style_message = style_message % ".hg/hgrc file" 273git_style_message = style_message % ".git/hooks/ directory" 274 275mercurial_style_upgrade_message = """ 276Your Mercurial style hooks are not up-to-date. This script will now 277try to automatically update them. A backup of your hgrc will be saved 278in .hg/hgrc.old. 279Press enter to continue, or ctrl-c to abort: """ 280 281mercurial_style_hook = """ 282# The following lines were automatically added by gem5/SConstruct 283# to provide the gem5 style-checking hooks 284[extensions] 285hgstyle = %s/util/hgstyle.py 286 287[hooks] 288pretxncommit.style = python:hgstyle.check_style 289pre-qrefresh.style = python:hgstyle.check_style 290# End of SConstruct additions 291 292""" % (main.root.abspath) 293 294mercurial_lib_not_found = """ 295Mercurial libraries cannot be found, ignoring style hook. If 296you are a gem5 developer, please fix this and run the style 297hook. It is important. 298""" 299 300# Check for style hook and prompt for installation if it's not there. 301# Skip this if --ignore-style was specified, there's no interactive 302# terminal to prompt, or no recognized revision control system can be 303# found. 304ignore_style = GetOption('ignore_style') or not sys.stdin.isatty() 305 306# Try wire up Mercurial to the style hooks 307if not ignore_style and hgdir.exists(): 308 style_hook = True 309 style_hooks = tuple() 310 hgrc = hgdir.File('hgrc') 311 hgrc_old = hgdir.File('hgrc.old') 312 try: 313 from mercurial import ui 314 ui = ui.ui() 315 ui.readconfig(hgrc.abspath) 316 style_hooks = (ui.config('hooks', 'pretxncommit.style', None), 317 ui.config('hooks', 'pre-qrefresh.style', None)) 318 style_hook = all(style_hooks) 319 style_extension = ui.config('extensions', 'style', None) 320 except ImportError: 321 print mercurial_lib_not_found 322 323 if "python:style.check_style" in style_hooks: 324 # Try to upgrade the style hooks 325 print mercurial_style_upgrade_message 326 # continue unless user does ctrl-c/ctrl-d etc. 327 try: 328 raw_input() 329 except: 330 print "Input exception, exiting scons.\n" 331 sys.exit(1) 332 shutil.copyfile(hgrc.abspath, hgrc_old.abspath) 333 re_style_hook = re.compile(r"^([^=#]+)\.style\s*=\s*([^#\s]+).*") 334 re_style_extension = re.compile("style\s*=\s*([^#\s]+).*") 335 old, new = open(hgrc_old.abspath, 'r'), open(hgrc.abspath, 'w') 336 for l in old: 337 m_hook = re_style_hook.match(l) 338 m_ext = re_style_extension.match(l) 339 if m_hook: 340 hook, check = m_hook.groups() 341 if check != "python:style.check_style": 342 print "Warning: %s.style is using a non-default " \ 343 "checker: %s" % (hook, check) 344 if hook not in ("pretxncommit", "pre-qrefresh"): 345 print "Warning: Updating unknown style hook: %s" % hook 346 347 l = "%s.style = python:hgstyle.check_style\n" % hook 348 elif m_ext and m_ext.group(1) == style_extension: 349 l = "hgstyle = %s/util/hgstyle.py\n" % main.root.abspath 350 351 new.write(l) 352 elif not style_hook: 353 print mercurial_style_message, 354 # continue unless user does ctrl-c/ctrl-d etc. 355 try: 356 raw_input() 357 except: 358 print "Input exception, exiting scons.\n" 359 sys.exit(1) 360 hgrc_path = '%s/.hg/hgrc' % main.root.abspath 361 print "Adding style hook to", hgrc_path, "\n" 362 try: 363 with open(hgrc_path, 'a') as f: 364 f.write(mercurial_style_hook) 365 except: 366 print "Error updating", hgrc_path 367 sys.exit(1) 368 369# Try to wire up git to the style hooks 370git_pre_commit_hook = gitdir.File("hooks/pre-commit") 371if not ignore_style and gitdir.exists() and not git_pre_commit_hook.exists(): 372 git_style_script = File("util/git-pre-commit.py") 373 374 print git_style_message, 375 try: 376 raw_input() 377 except: 378 print "Input exception, exiting scons.\n" 379 sys.exit(1) 380 381 try: 382 rel_style_script = os.path.relpath( 383 git_style_script.get_abspath(), 384 git_pre_commit_hook.Dir(".").get_abspath()) 385 os.symlink(rel_style_script, git_pre_commit_hook.get_abspath()) 386 except: 387 print "Error updating git pre-commit hook" 388 raise 389 sys.exit(1) 390 391################################################### 392# 393# Figure out which configurations to set up based on the path(s) of 394# the target(s). 395# 396################################################### 397 398# Find default configuration & binary. 399Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug')) 400 401# helper function: find last occurrence of element in list 402def rfind(l, elt, offs = -1): 403 for i in range(len(l)+offs, 0, -1): 404 if l[i] == elt: 405 return i 406 raise ValueError, "element not found" 407 408# Take a list of paths (or SCons Nodes) and return a list with all 409# paths made absolute and ~-expanded. Paths will be interpreted 410# relative to the launch directory unless a different root is provided 411def makePathListAbsolute(path_list, root=GetLaunchDir()): 412 return [abspath(joinpath(root, expanduser(str(p)))) 413 for p in path_list] 414 415# Each target must have 'build' in the interior of the path; the 416# directory below this will determine the build parameters. For 417# example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we 418# recognize that ALPHA_SE specifies the configuration because it 419# follow 'build' in the build path. 420 421# The funky assignment to "[:]" is needed to replace the list contents 422# in place rather than reassign the symbol to a new list, which 423# doesn't work (obviously!). 424BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS) 425 426# Generate a list of the unique build roots and configs that the 427# collected targets reference. 428variant_paths = [] 429build_root = None 430for t in BUILD_TARGETS: 431 path_dirs = t.split('/') 432 try: 433 build_top = rfind(path_dirs, 'build', -2) 434 except: 435 print "Error: no non-leaf 'build' dir found on target path", t 436 Exit(1) 437 this_build_root = joinpath('/',*path_dirs[:build_top+1]) 438 if not build_root: 439 build_root = this_build_root 440 else: 441 if this_build_root != build_root: 442 print "Error: build targets not under same build root\n"\ 443 " %s\n %s" % (build_root, this_build_root) 444 Exit(1) 445 variant_path = joinpath('/',*path_dirs[:build_top+2]) 446 if variant_path not in variant_paths: 447 variant_paths.append(variant_path) 448 449# Make sure build_root exists (might not if this is the first build there) 450if not isdir(build_root): 451 mkdir(build_root) 452main['BUILDROOT'] = build_root 453 454Export('main') 455 456main.SConsignFile(joinpath(build_root, "sconsign")) 457 458# Default duplicate option is to use hard links, but this messes up 459# when you use emacs to edit a file in the target dir, as emacs moves 460# file to file~ then copies to file, breaking the link. Symbolic 461# (soft) links work better. 462main.SetOption('duplicate', 'soft-copy') 463 464# 465# Set up global sticky variables... these are common to an entire build 466# tree (not specific to a particular build like ALPHA_SE) 467# 468 469global_vars_file = joinpath(build_root, 'variables.global') 470 471global_vars = Variables(global_vars_file, args=ARGUMENTS) 472 473global_vars.AddVariables( 474 ('CC', 'C compiler', environ.get('CC', main['CC'])), 475 ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])), 476 ('SWIG', 'SWIG tool', environ.get('SWIG', main['SWIG'])), 477 ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')), 478 ('BATCH', 'Use batch pool for build and tests', False), 479 ('BATCH_CMD', 'Batch pool submission command name', 'qdo'), 480 ('M5_BUILD_CACHE', 'Cache built objects in this directory', False), 481 ('EXTRAS', 'Add extra directories to the compilation', '') 482 ) 483 484# Update main environment with values from ARGUMENTS & global_vars_file 485global_vars.Update(main) 486help_texts["global_vars"] += global_vars.GenerateHelpText(main) 487 488# Save sticky variable settings back to current variables file 489global_vars.Save(global_vars_file, main) 490 491# Parse EXTRAS variable to build list of all directories where we're 492# look for sources etc. This list is exported as extras_dir_list. 493base_dir = main.srcdir.abspath 494if main['EXTRAS']: 495 extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':')) 496else: 497 extras_dir_list = [] 498 499Export('base_dir') 500Export('extras_dir_list') 501 502# the ext directory should be on the #includes path 503main.Append(CPPPATH=[Dir('ext')]) 504 505def strip_build_path(path, env): 506 path = str(path) 507 variant_base = env['BUILDROOT'] + os.path.sep 508 if path.startswith(variant_base): 509 path = path[len(variant_base):] 510 elif path.startswith('build/'): 511 path = path[6:] 512 return path 513 514# Generate a string of the form: 515# common/path/prefix/src1, src2 -> tgt1, tgt2 516# to print while building. 517class Transform(object): 518 # all specific color settings should be here and nowhere else 519 tool_color = termcap.Normal 520 pfx_color = termcap.Yellow 521 srcs_color = termcap.Yellow + termcap.Bold 522 arrow_color = termcap.Blue + termcap.Bold 523 tgts_color = termcap.Yellow + termcap.Bold 524 525 def __init__(self, tool, max_sources=99): 526 self.format = self.tool_color + (" [%8s] " % tool) \ 527 + self.pfx_color + "%s" \ 528 + self.srcs_color + "%s" \ 529 + self.arrow_color + " -> " \ 530 + self.tgts_color + "%s" \ 531 + termcap.Normal 532 self.max_sources = max_sources 533 534 def __call__(self, target, source, env, for_signature=None): 535 # truncate source list according to max_sources param 536 source = source[0:self.max_sources] 537 def strip(f): 538 return strip_build_path(str(f), env) 539 if len(source) > 0: 540 srcs = map(strip, source) 541 else: 542 srcs = [''] 543 tgts = map(strip, target) 544 # surprisingly, os.path.commonprefix is a dumb char-by-char string 545 # operation that has nothing to do with paths. 546 com_pfx = os.path.commonprefix(srcs + tgts) 547 com_pfx_len = len(com_pfx) 548 if com_pfx: 549 # do some cleanup and sanity checking on common prefix 550 if com_pfx[-1] == ".": 551 # prefix matches all but file extension: ok 552 # back up one to change 'foo.cc -> o' to 'foo.cc -> .o' 553 com_pfx = com_pfx[0:-1] 554 elif com_pfx[-1] == "/": 555 # common prefix is directory path: OK 556 pass 557 else: 558 src0_len = len(srcs[0]) 559 tgt0_len = len(tgts[0]) 560 if src0_len == com_pfx_len: 561 # source is a substring of target, OK 562 pass 563 elif tgt0_len == com_pfx_len: 564 # target is a substring of source, need to back up to 565 # avoid empty string on RHS of arrow 566 sep_idx = com_pfx.rfind(".") 567 if sep_idx != -1: 568 com_pfx = com_pfx[0:sep_idx] 569 else: 570 com_pfx = '' 571 elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".": 572 # still splitting at file extension: ok 573 pass 574 else: 575 # probably a fluke; ignore it 576 com_pfx = '' 577 # recalculate length in case com_pfx was modified 578 com_pfx_len = len(com_pfx) 579 def fmt(files): 580 f = map(lambda s: s[com_pfx_len:], files) 581 return ', '.join(f) 582 return self.format % (com_pfx, fmt(srcs), fmt(tgts)) 583 584Export('Transform') 585 586# enable the regression script to use the termcap 587main['TERMCAP'] = termcap 588 589if GetOption('verbose'): 590 def MakeAction(action, string, *args, **kwargs): 591 return Action(action, *args, **kwargs) 592else: 593 MakeAction = Action 594 main['CCCOMSTR'] = Transform("CC") 595 main['CXXCOMSTR'] = Transform("CXX") 596 main['ASCOMSTR'] = Transform("AS") 597 main['SWIGCOMSTR'] = Transform("SWIG") 598 main['ARCOMSTR'] = Transform("AR", 0) 599 main['LINKCOMSTR'] = Transform("LINK", 0) 600 main['RANLIBCOMSTR'] = Transform("RANLIB", 0) 601 main['M4COMSTR'] = Transform("M4") 602 main['SHCCCOMSTR'] = Transform("SHCC") 603 main['SHCXXCOMSTR'] = Transform("SHCXX") 604Export('MakeAction') 605 606# Initialize the Link-Time Optimization (LTO) flags 607main['LTO_CCFLAGS'] = [] 608main['LTO_LDFLAGS'] = [] 609 610# According to the readme, tcmalloc works best if the compiler doesn't 611# assume that we're using the builtin malloc and friends. These flags 612# are compiler-specific, so we need to set them after we detect which 613# compiler we're using. 614main['TCMALLOC_CCFLAGS'] = [] 615 616CXX_version = readCommand([main['CXX'],'--version'], exception=False) 617CXX_V = readCommand([main['CXX'],'-V'], exception=False) 618 619main['GCC'] = CXX_version and CXX_version.find('g++') >= 0 620main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0 621if main['GCC'] + main['CLANG'] > 1: 622 print 'Error: How can we have two at the same time?' 623 Exit(1) 624 625# Set up default C++ compiler flags 626if main['GCC'] or main['CLANG']: 627 # As gcc and clang share many flags, do the common parts here 628 main.Append(CCFLAGS=['-pipe']) 629 main.Append(CCFLAGS=['-fno-strict-aliasing']) 630 # Enable -Wall and -Wextra and then disable the few warnings that 631 # we consistently violate 632 main.Append(CCFLAGS=['-Wall', '-Wundef', '-Wextra', 633 '-Wno-sign-compare', '-Wno-unused-parameter']) 634 # We always compile using C++11 635 main.Append(CXXFLAGS=['-std=c++11']) 636else: 637 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal, 638 print "Don't know what compiler options to use for your compiler." 639 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX'] 640 print termcap.Yellow + ' version:' + termcap.Normal, 641 if not CXX_version: 642 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\ 643 termcap.Normal 644 else: 645 print CXX_version.replace('\n', '<nl>') 646 print " If you're trying to use a compiler other than GCC" 647 print " or clang, there appears to be something wrong with your" 648 print " environment." 649 print " " 650 print " If you are trying to use a compiler other than those listed" 651 print " above you will need to ease fix SConstruct and " 652 print " src/SConscript to support that compiler." 653 Exit(1) 654 655if main['GCC']: 656 # Check for a supported version of gcc. >= 4.7 is chosen for its 657 # level of c++11 support. See 658 # http://gcc.gnu.org/projects/cxx0x.html for details. 659 gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False) 660 if compareVersions(gcc_version, "4.7") < 0: 661 print 'Error: gcc version 4.7 or newer required.' 662 print ' Installed version:', gcc_version 663 Exit(1) 664 665 main['GCC_VERSION'] = gcc_version 666 667 # gcc from version 4.8 and above generates "rep; ret" instructions 668 # to avoid performance penalties on certain AMD chips. Older 669 # assemblers detect this as an error, "Error: expecting string 670 # instruction after `rep'" 671 if compareVersions(gcc_version, "4.8") > 0: 672 as_version_raw = readCommand([main['AS'], '-v', '/dev/null'], 673 exception=False).split() 674 675 # version strings may contain extra distro-specific 676 # qualifiers, so play it safe and keep only what comes before 677 # the first hyphen 678 as_version = as_version_raw[-1].split('-')[0] if as_version_raw \ 679 else None 680 681 if not as_version or compareVersions(as_version, "2.23") < 0: 682 print termcap.Yellow + termcap.Bold + \ 683 'Warning: This combination of gcc and binutils have' + \ 684 ' known incompatibilities.\n' + \ 685 ' If you encounter build problems, please update ' + \ 686 'binutils to 2.23.' + \ 687 termcap.Normal 688 689 # Make sure we warn if the user has requested to compile with the 690 # Undefined Benahvior Sanitizer and this version of gcc does not 691 # support it. 692 if GetOption('with_ubsan') and \ 693 compareVersions(gcc_version, '4.9') < 0: 694 print termcap.Yellow + termcap.Bold + \ 695 'Warning: UBSan is only supported using gcc 4.9 and later.' + \ 696 termcap.Normal 697 698 # Add the appropriate Link-Time Optimization (LTO) flags 699 # unless LTO is explicitly turned off. Note that these flags 700 # are only used by the fast target. 701 if not GetOption('no_lto'): 702 # Pass the LTO flag when compiling to produce GIMPLE 703 # output, we merely create the flags here and only append 704 # them later 705 main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')] 706 707 # Use the same amount of jobs for LTO as we are running 708 # scons with 709 main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')] 710 711 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc', 712 '-fno-builtin-realloc', '-fno-builtin-free']) 713 714elif main['CLANG']: 715 # Check for a supported version of clang, >= 3.1 is needed to 716 # support similar features as gcc 4.7. See 717 # http://clang.llvm.org/cxx_status.html for details 718 clang_version_re = re.compile(".* version (\d+\.\d+)") 719 clang_version_match = clang_version_re.search(CXX_version) 720 if (clang_version_match): 721 clang_version = clang_version_match.groups()[0] 722 if compareVersions(clang_version, "3.1") < 0: 723 print 'Error: clang version 3.1 or newer required.' 724 print ' Installed version:', clang_version 725 Exit(1) 726 else: 727 print 'Error: Unable to determine clang version.' 728 Exit(1) 729 730 # clang has a few additional warnings that we disable, extraneous 731 # parantheses are allowed due to Ruby's printing of the AST, 732 # finally self assignments are allowed as the generated CPU code 733 # is relying on this 734 main.Append(CCFLAGS=['-Wno-parentheses', 735 '-Wno-self-assign', 736 # Some versions of libstdc++ (4.8?) seem to 737 # use struct hash and class hash 738 # interchangeably. 739 '-Wno-mismatched-tags', 740 ]) 741 742 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin']) 743 744 # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as 745 # opposed to libstdc++, as the later is dated. 746 if sys.platform == "darwin": 747 main.Append(CXXFLAGS=['-stdlib=libc++']) 748 main.Append(LIBS=['c++']) 749 750else: 751 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal, 752 print "Don't know what compiler options to use for your compiler." 753 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX'] 754 print termcap.Yellow + ' version:' + termcap.Normal, 755 if not CXX_version: 756 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\ 757 termcap.Normal 758 else: 759 print CXX_version.replace('\n', '<nl>') 760 print " If you're trying to use a compiler other than GCC" 761 print " or clang, there appears to be something wrong with your" 762 print " environment." 763 print " " 764 print " If you are trying to use a compiler other than those listed" 765 print " above you will need to ease fix SConstruct and " 766 print " src/SConscript to support that compiler." 767 Exit(1) 768 769# Set up common yacc/bison flags (needed for Ruby) 770main['YACCFLAGS'] = '-d' 771main['YACCHXXFILESUFFIX'] = '.hh' 772 773# Do this after we save setting back, or else we'll tack on an 774# extra 'qdo' every time we run scons. 775if main['BATCH']: 776 main['CC'] = main['BATCH_CMD'] + ' ' + main['CC'] 777 main['CXX'] = main['BATCH_CMD'] + ' ' + main['CXX'] 778 main['AS'] = main['BATCH_CMD'] + ' ' + main['AS'] 779 main['AR'] = main['BATCH_CMD'] + ' ' + main['AR'] 780 main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB'] 781 782if sys.platform == 'cygwin': 783 # cygwin has some header file issues... 784 main.Append(CCFLAGS=["-Wno-uninitialized"]) 785 786# Check for the protobuf compiler 787protoc_version = readCommand([main['PROTOC'], '--version'], 788 exception='').split() 789 790# First two words should be "libprotoc x.y.z" 791if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc': 792 print termcap.Yellow + termcap.Bold + \ 793 'Warning: Protocol buffer compiler (protoc) not found.\n' + \ 794 ' Please install protobuf-compiler for tracing support.' + \ 795 termcap.Normal 796 main['PROTOC'] = False 797else: 798 # Based on the availability of the compress stream wrappers, 799 # require 2.1.0 800 min_protoc_version = '2.1.0' 801 if compareVersions(protoc_version[1], min_protoc_version) < 0: 802 print termcap.Yellow + termcap.Bold + \ 803 'Warning: protoc version', min_protoc_version, \ 804 'or newer required.\n' + \ 805 ' Installed version:', protoc_version[1], \ 806 termcap.Normal 807 main['PROTOC'] = False 808 else: 809 # Attempt to determine the appropriate include path and 810 # library path using pkg-config, that means we also need to 811 # check for pkg-config. Note that it is possible to use 812 # protobuf without the involvement of pkg-config. Later on we 813 # check go a library config check and at that point the test 814 # will fail if libprotobuf cannot be found. 815 if readCommand(['pkg-config', '--version'], exception=''): 816 try: 817 # Attempt to establish what linking flags to add for protobuf 818 # using pkg-config 819 main.ParseConfig('pkg-config --cflags --libs-only-L protobuf') 820 except: 821 print termcap.Yellow + termcap.Bold + \ 822 'Warning: pkg-config could not get protobuf flags.' + \ 823 termcap.Normal 824 825# Check for SWIG 826if not main.has_key('SWIG'): 827 print 'Error: SWIG utility not found.' 828 print ' Please install (see http://www.swig.org) and retry.' 829 Exit(1) 830 831# Check for appropriate SWIG version 832swig_version = readCommand([main['SWIG'], '-version'], exception='').split() 833# First 3 words should be "SWIG Version x.y.z" 834if len(swig_version) < 3 or \ 835 swig_version[0] != 'SWIG' or swig_version[1] != 'Version': 836 print 'Error determining SWIG version.' 837 Exit(1) 838 839min_swig_version = '2.0.4' 840if compareVersions(swig_version[2], min_swig_version) < 0: 841 print 'Error: SWIG version', min_swig_version, 'or newer required.' 842 print ' Installed version:', swig_version[2] 843 Exit(1) 844 845# Check for known incompatibilities. The standard library shipped with 846# gcc >= 4.9 does not play well with swig versions prior to 3.0 847if main['GCC'] and compareVersions(gcc_version, '4.9') >= 0 and \ 848 compareVersions(swig_version[2], '3.0') < 0: 849 print termcap.Yellow + termcap.Bold + \ 850 'Warning: This combination of gcc and swig have' + \ 851 ' known incompatibilities.\n' + \ 852 ' If you encounter build problems, please update ' + \ 853 'swig to 3.0 or later.' + \ 854 termcap.Normal 855 856# Set up SWIG flags & scanner 857swig_flags=Split('-c++ -python -modern -templatereduce $_CPPINCFLAGS') 858main.Append(SWIGFLAGS=swig_flags) 859 860# Check for 'timeout' from GNU coreutils. If present, regressions will 861# be run with a time limit. We require version 8.13 since we rely on 862# support for the '--foreground' option. 863timeout_lines = readCommand(['timeout', '--version'], 864 exception='').splitlines() 865# Get the first line and tokenize it 866timeout_version = timeout_lines[0].split() if timeout_lines else [] 867main['TIMEOUT'] = timeout_version and \ 868 compareVersions(timeout_version[-1], '8.13') >= 0 869 870# filter out all existing swig scanners, they mess up the dependency 871# stuff for some reason 872scanners = [] 873for scanner in main['SCANNERS']: 874 skeys = scanner.skeys 875 if skeys == '.i': 876 continue 877 878 if isinstance(skeys, (list, tuple)) and '.i' in skeys: 879 continue 880 881 scanners.append(scanner) 882 883# add the new swig scanner that we like better 884from SCons.Scanner import ClassicCPP as CPPScanner 885swig_inc_re = '^[ \t]*[%,#][ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")' 886scanners.append(CPPScanner("SwigScan", [ ".i" ], "CPPPATH", swig_inc_re)) 887 888# replace the scanners list that has what we want 889main['SCANNERS'] = scanners 890 891# Add a custom Check function to test for structure members. 892def CheckMember(context, include, decl, member, include_quotes="<>"): 893 context.Message("Checking for member %s in %s..." % 894 (member, decl)) 895 text = """ 896#include %(header)s 897int main(){ 898 %(decl)s test; 899 (void)test.%(member)s; 900 return 0; 901}; 902""" % { "header" : include_quotes[0] + include + include_quotes[1], 903 "decl" : decl, 904 "member" : member, 905 } 906 907 ret = context.TryCompile(text, extension=".cc") 908 context.Result(ret) 909 return ret 910 911# Platform-specific configuration. Note again that we assume that all 912# builds under a given build root run on the same host platform. 913conf = Configure(main, 914 conf_dir = joinpath(build_root, '.scons_config'), 915 log_file = joinpath(build_root, 'scons_config.log'), 916 custom_tests = { 917 'CheckMember' : CheckMember, 918 }) 919 920# Check if we should compile a 64 bit binary on Mac OS X/Darwin 921try: 922 import platform 923 uname = platform.uname() 924 if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0: 925 if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]): 926 main.Append(CCFLAGS=['-arch', 'x86_64']) 927 main.Append(CFLAGS=['-arch', 'x86_64']) 928 main.Append(LINKFLAGS=['-arch', 'x86_64']) 929 main.Append(ASFLAGS=['-arch', 'x86_64']) 930except: 931 pass 932 933# Recent versions of scons substitute a "Null" object for Configure() 934# when configuration isn't necessary, e.g., if the "--help" option is 935# present. Unfortuantely this Null object always returns false, 936# breaking all our configuration checks. We replace it with our own 937# more optimistic null object that returns True instead. 938if not conf: 939 def NullCheck(*args, **kwargs): 940 return True 941 942 class NullConf: 943 def __init__(self, env): 944 self.env = env 945 def Finish(self): 946 return self.env 947 def __getattr__(self, mname): 948 return NullCheck 949 950 conf = NullConf(main) 951 952# Cache build files in the supplied directory. 953if main['M5_BUILD_CACHE']: 954 print 'Using build cache located at', main['M5_BUILD_CACHE'] 955 CacheDir(main['M5_BUILD_CACHE']) 956 957if not GetOption('without_python'): 958 # Find Python include and library directories for embedding the 959 # interpreter. We rely on python-config to resolve the appropriate 960 # includes and linker flags. ParseConfig does not seem to understand 961 # the more exotic linker flags such as -Xlinker and -export-dynamic so 962 # we add them explicitly below. If you want to link in an alternate 963 # version of python, see above for instructions on how to invoke 964 # scons with the appropriate PATH set. 965 # 966 # First we check if python2-config exists, else we use python-config 967 python_config = readCommand(['which', 'python2-config'], 968 exception='').strip() 969 if not os.path.exists(python_config): 970 python_config = readCommand(['which', 'python-config'], 971 exception='').strip() 972 py_includes = readCommand([python_config, '--includes'], 973 exception='').split() 974 # Strip the -I from the include folders before adding them to the 975 # CPPPATH 976 main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes)) 977 978 # Read the linker flags and split them into libraries and other link 979 # flags. The libraries are added later through the call the CheckLib. 980 py_ld_flags = readCommand([python_config, '--ldflags'], 981 exception='').split() 982 py_libs = [] 983 for lib in py_ld_flags: 984 if not lib.startswith('-l'): 985 main.Append(LINKFLAGS=[lib]) 986 else: 987 lib = lib[2:] 988 if lib not in py_libs: 989 py_libs.append(lib) 990 991 # verify that this stuff works 992 if not conf.CheckHeader('Python.h', '<>'): 993 print "Error: can't find Python.h header in", py_includes 994 print "Install Python headers (package python-dev on Ubuntu and RedHat)" 995 Exit(1) 996 997 for lib in py_libs: 998 if not conf.CheckLib(lib): 999 print "Error: can't find library %s required by python" % lib 1000 Exit(1) 1001 1002# On Solaris you need to use libsocket for socket ops 1003if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'): 1004 if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'): 1005 print "Can't find library with socket calls (e.g. accept())" 1006 Exit(1) 1007 1008# Check for zlib. If the check passes, libz will be automatically 1009# added to the LIBS environment variable. 1010if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'): 1011 print 'Error: did not find needed zlib compression library '\ 1012 'and/or zlib.h header file.' 1013 print ' Please install zlib and try again.' 1014 Exit(1) 1015 1016# If we have the protobuf compiler, also make sure we have the 1017# development libraries. If the check passes, libprotobuf will be 1018# automatically added to the LIBS environment variable. After 1019# this, we can use the HAVE_PROTOBUF flag to determine if we have 1020# got both protoc and libprotobuf available. 1021main['HAVE_PROTOBUF'] = main['PROTOC'] and \ 1022 conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h', 1023 'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;') 1024 1025# If we have the compiler but not the library, print another warning. 1026if main['PROTOC'] and not main['HAVE_PROTOBUF']: 1027 print termcap.Yellow + termcap.Bold + \ 1028 'Warning: did not find protocol buffer library and/or headers.\n' + \ 1029 ' Please install libprotobuf-dev for tracing support.' + \ 1030 termcap.Normal 1031 1032# Check for librt. 1033have_posix_clock = \ 1034 conf.CheckLibWithHeader(None, 'time.h', 'C', 1035 'clock_nanosleep(0,0,NULL,NULL);') or \ 1036 conf.CheckLibWithHeader('rt', 'time.h', 'C', 1037 'clock_nanosleep(0,0,NULL,NULL);') 1038 1039have_posix_timers = \ 1040 conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C', 1041 'timer_create(CLOCK_MONOTONIC, NULL, NULL);') 1042 1043if not GetOption('without_tcmalloc'): 1044 if conf.CheckLib('tcmalloc'): 1045 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS']) 1046 elif conf.CheckLib('tcmalloc_minimal'): 1047 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS']) 1048 else: 1049 print termcap.Yellow + termcap.Bold + \ 1050 "You can get a 12% performance improvement by "\ 1051 "installing tcmalloc (libgoogle-perftools-dev package "\ 1052 "on Ubuntu or RedHat)." + termcap.Normal 1053 1054 1055# Detect back trace implementations. The last implementation in the 1056# list will be used by default. 1057backtrace_impls = [ "none" ] 1058 1059if conf.CheckLibWithHeader(None, 'execinfo.h', 'C', 1060 'backtrace_symbols_fd((void*)0, 0, 0);'): 1061 backtrace_impls.append("glibc") 1062 1063if backtrace_impls[-1] == "none": 1064 default_backtrace_impl = "none" 1065 print termcap.Yellow + termcap.Bold + \ 1066 "No suitable back trace implementation found." + \ 1067 termcap.Normal 1068 1069if not have_posix_clock: 1070 print "Can't find library for POSIX clocks." 1071 1072# Check for <fenv.h> (C99 FP environment control) 1073have_fenv = conf.CheckHeader('fenv.h', '<>') 1074if not have_fenv: 1075 print "Warning: Header file <fenv.h> not found." 1076 print " This host has no IEEE FP rounding mode control." 1077 1078# Check if we should enable KVM-based hardware virtualization. The API 1079# we rely on exists since version 2.6.36 of the kernel, but somehow 1080# the KVM_API_VERSION does not reflect the change. We test for one of 1081# the types as a fall back. 1082have_kvm = conf.CheckHeader('linux/kvm.h', '<>') 1083if not have_kvm: 1084 print "Info: Compatible header file <linux/kvm.h> not found, " \ 1085 "disabling KVM support." 1086 1087# x86 needs support for xsave. We test for the structure here since we 1088# won't be able to run new tests by the time we know which ISA we're 1089# targeting. 1090have_kvm_xsave = conf.CheckTypeSize('struct kvm_xsave', 1091 '#include <linux/kvm.h>') != 0 1092 1093# Check if the requested target ISA is compatible with the host 1094def is_isa_kvm_compatible(isa): 1095 try: 1096 import platform 1097 host_isa = platform.machine() 1098 except: 1099 print "Warning: Failed to determine host ISA." 1100 return False 1101 1102 if not have_posix_timers: 1103 print "Warning: Can not enable KVM, host seems to lack support " \ 1104 "for POSIX timers" 1105 return False 1106 1107 if isa == "arm": 1108 return host_isa in ( "armv7l", "aarch64" ) 1109 elif isa == "x86": 1110 if host_isa != "x86_64": 1111 return False 1112 1113 if not have_kvm_xsave: 1114 print "KVM on x86 requires xsave support in kernel headers." 1115 return False 1116 1117 return True 1118 else: 1119 return False 1120 1121 1122# Check if the exclude_host attribute is available. We want this to 1123# get accurate instruction counts in KVM. 1124main['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf.CheckMember( 1125 'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host') 1126 1127 1128###################################################################### 1129# 1130# Finish the configuration 1131# 1132main = conf.Finish() 1133 1134###################################################################### 1135# 1136# Collect all non-global variables 1137# 1138 1139# Define the universe of supported ISAs 1140all_isa_list = [ ] 1141all_gpu_isa_list = [ ] 1142Export('all_isa_list') 1143Export('all_gpu_isa_list') 1144 1145class CpuModel(object): 1146 '''The CpuModel class encapsulates everything the ISA parser needs to 1147 know about a particular CPU model.''' 1148 1149 # Dict of available CPU model objects. Accessible as CpuModel.dict. 1150 dict = {} 1151 1152 # Constructor. Automatically adds models to CpuModel.dict. 1153 def __init__(self, name, default=False): 1154 self.name = name # name of model 1155 1156 # This cpu is enabled by default 1157 self.default = default 1158 1159 # Add self to dict 1160 if name in CpuModel.dict: 1161 raise AttributeError, "CpuModel '%s' already registered" % name 1162 CpuModel.dict[name] = self 1163 1164Export('CpuModel') 1165 1166# Sticky variables get saved in the variables file so they persist from 1167# one invocation to the next (unless overridden, in which case the new 1168# value becomes sticky). 1169sticky_vars = Variables(args=ARGUMENTS) 1170Export('sticky_vars') 1171 1172# Sticky variables that should be exported 1173export_vars = [] 1174Export('export_vars') 1175 1176# For Ruby 1177all_protocols = [] 1178Export('all_protocols') 1179protocol_dirs = [] 1180Export('protocol_dirs') 1181slicc_includes = [] 1182Export('slicc_includes') 1183 1184# Walk the tree and execute all SConsopts scripts that wil add to the 1185# above variables 1186if GetOption('verbose'): 1187 print "Reading SConsopts" 1188for bdir in [ base_dir ] + extras_dir_list: 1189 if not isdir(bdir): 1190 print "Error: directory '%s' does not exist" % bdir 1191 Exit(1) 1192 for root, dirs, files in os.walk(bdir): 1193 if 'SConsopts' in files: 1194 if GetOption('verbose'): 1195 print "Reading", joinpath(root, 'SConsopts') 1196 SConscript(joinpath(root, 'SConsopts')) 1197 1198all_isa_list.sort() 1199all_gpu_isa_list.sort() 1200 1201sticky_vars.AddVariables( 1202 EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list), 1203 EnumVariable('TARGET_GPU_ISA', 'Target GPU ISA', 'hsail', all_gpu_isa_list), 1204 ListVariable('CPU_MODELS', 'CPU models', 1205 sorted(n for n,m in CpuModel.dict.iteritems() if m.default), 1206 sorted(CpuModel.dict.keys())), 1207 BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger', 1208 False), 1209 BoolVariable('SS_COMPATIBLE_FP', 1210 'Make floating-point results compatible with SimpleScalar', 1211 False), 1212 BoolVariable('USE_SSE2', 1213 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts', 1214 False), 1215 BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock), 1216 BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv), 1217 BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability', False), 1218 BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models', have_kvm), 1219 BoolVariable('BUILD_GPU', 'Build the compute-GPU model', False), 1220 EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None', 1221 all_protocols), 1222 EnumVariable('BACKTRACE_IMPL', 'Post-mortem dump implementation', 1223 backtrace_impls[-1], backtrace_impls) 1224 ) 1225 1226# These variables get exported to #defines in config/*.hh (see src/SConscript). 1227export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'TARGET_GPU_ISA', 1228 'CP_ANNOTATE', 'USE_POSIX_CLOCK', 'USE_KVM', 'PROTOCOL', 1229 'HAVE_PROTOBUF', 'HAVE_PERF_ATTR_EXCLUDE_HOST'] 1230 1231################################################### 1232# 1233# Define a SCons builder for configuration flag headers. 1234# 1235################################################### 1236 1237# This function generates a config header file that #defines the 1238# variable symbol to the current variable setting (0 or 1). The source 1239# operands are the name of the variable and a Value node containing the 1240# value of the variable. 1241def build_config_file(target, source, env): 1242 (variable, value) = [s.get_contents() for s in source] 1243 f = file(str(target[0]), 'w') 1244 print >> f, '#define', variable, value 1245 f.close() 1246 return None 1247 1248# Combine the two functions into a scons Action object. 1249config_action = MakeAction(build_config_file, Transform("CONFIG H", 2)) 1250 1251# The emitter munges the source & target node lists to reflect what 1252# we're really doing. 1253def config_emitter(target, source, env): 1254 # extract variable name from Builder arg 1255 variable = str(target[0]) 1256 # True target is config header file 1257 target = joinpath('config', variable.lower() + '.hh') 1258 val = env[variable] 1259 if isinstance(val, bool): 1260 # Force value to 0/1 1261 val = int(val) 1262 elif isinstance(val, str): 1263 val = '"' + val + '"' 1264 1265 # Sources are variable name & value (packaged in SCons Value nodes) 1266 return ([target], [Value(variable), Value(val)]) 1267 1268config_builder = Builder(emitter = config_emitter, action = config_action) 1269 1270main.Append(BUILDERS = { 'ConfigFile' : config_builder }) 1271 1272# libelf build is shared across all configs in the build root. 1273main.SConscript('ext/libelf/SConscript', 1274 variant_dir = joinpath(build_root, 'libelf')) 1275 1276# iostream3 build is shared across all configs in the build root. 1277main.SConscript('ext/iostream3/SConscript', 1278 variant_dir = joinpath(build_root, 'iostream3')) 1279 1280# libfdt build is shared across all configs in the build root. 1281main.SConscript('ext/libfdt/SConscript', 1282 variant_dir = joinpath(build_root, 'libfdt')) 1283 1284# fputils build is shared across all configs in the build root. 1285main.SConscript('ext/fputils/SConscript', 1286 variant_dir = joinpath(build_root, 'fputils')) 1287 1288# DRAMSim2 build is shared across all configs in the build root. 1289main.SConscript('ext/dramsim2/SConscript', 1290 variant_dir = joinpath(build_root, 'dramsim2')) 1291 1292# DRAMPower build is shared across all configs in the build root. 1293main.SConscript('ext/drampower/SConscript', 1294 variant_dir = joinpath(build_root, 'drampower')) 1295 1296# nomali build is shared across all configs in the build root. 1297main.SConscript('ext/nomali/SConscript', 1298 variant_dir = joinpath(build_root, 'nomali')) 1299 1300################################################### 1301# 1302# This function is used to set up a directory with switching headers 1303# 1304################################################### 1305 1306main['ALL_ISA_LIST'] = all_isa_list 1307main['ALL_GPU_ISA_LIST'] = all_gpu_isa_list 1308all_isa_deps = {} 1309def make_switching_dir(dname, switch_headers, env): 1310 # Generate the header. target[0] is the full path of the output 1311 # header to generate. 'source' is a dummy variable, since we get the 1312 # list of ISAs from env['ALL_ISA_LIST']. 1313 def gen_switch_hdr(target, source, env): 1314 fname = str(target[0]) 1315 isa = env['TARGET_ISA'].lower() 1316 try: 1317 f = open(fname, 'w') 1318 print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname)) 1319 f.close() 1320 except IOError: 1321 print "Failed to create %s" % fname 1322 raise 1323 1324 # Build SCons Action object. 'varlist' specifies env vars that this 1325 # action depends on; when env['ALL_ISA_LIST'] changes these actions 1326 # should get re-executed. 1327 switch_hdr_action = MakeAction(gen_switch_hdr, 1328 Transform("GENERATE"), varlist=['ALL_ISA_LIST']) 1329 1330 # Instantiate actions for each header 1331 for hdr in switch_headers: 1332 env.Command(hdr, [], switch_hdr_action) 1333 1334 isa_target = Dir('.').up().name.lower().replace('_', '-') 1335 env['PHONY_BASE'] = '#'+isa_target 1336 all_isa_deps[isa_target] = None 1337 1338Export('make_switching_dir') 1339 1340def make_gpu_switching_dir(dname, switch_headers, env): 1341 # Generate the header. target[0] is the full path of the output 1342 # header to generate. 'source' is a dummy variable, since we get the 1343 # list of ISAs from env['ALL_ISA_LIST']. 1344 def gen_switch_hdr(target, source, env): 1345 fname = str(target[0]) 1346 1347 isa = env['TARGET_GPU_ISA'].lower() 1348 1349 try: 1350 f = open(fname, 'w') 1351 print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname)) 1352 f.close() 1353 except IOError: 1354 print "Failed to create %s" % fname 1355 raise 1356 1357 # Build SCons Action object. 'varlist' specifies env vars that this 1358 # action depends on; when env['ALL_ISA_LIST'] changes these actions 1359 # should get re-executed. 1360 switch_hdr_action = MakeAction(gen_switch_hdr, 1361 Transform("GENERATE"), varlist=['ALL_ISA_GPU_LIST']) 1362 1363 # Instantiate actions for each header 1364 for hdr in switch_headers: 1365 env.Command(hdr, [], switch_hdr_action) 1366 1367Export('make_gpu_switching_dir') 1368 1369# all-isas -> all-deps -> all-environs -> all_targets 1370main.Alias('#all-isas', []) 1371main.Alias('#all-deps', '#all-isas') 1372 1373# Dummy target to ensure all environments are created before telling 1374# SCons what to actually make (the command line arguments). We attach 1375# them to the dependence graph after the environments are complete. 1376ORIG_BUILD_TARGETS = list(BUILD_TARGETS) # force a copy; gets closure to work. 1377def environsComplete(target, source, env): 1378 for t in ORIG_BUILD_TARGETS: 1379 main.Depends('#all-targets', t) 1380 1381# Each build/* switching_dir attaches its *-environs target to #all-environs. 1382main.Append(BUILDERS = {'CompleteEnvirons' : 1383 Builder(action=MakeAction(environsComplete, None))}) 1384main.CompleteEnvirons('#all-environs', []) 1385 1386def doNothing(**ignored): pass 1387main.Append(BUILDERS = {'Dummy': Builder(action=MakeAction(doNothing, None))}) 1388 1389# The final target to which all the original targets ultimately get attached. 1390main.Dummy('#all-targets', '#all-environs') 1391BUILD_TARGETS[:] = ['#all-targets'] 1392 1393################################################### 1394# 1395# Define build environments for selected configurations. 1396# 1397################################################### 1398 1399for variant_path in variant_paths: 1400 if not GetOption('silent'): 1401 print "Building in", variant_path 1402 1403 # Make a copy of the build-root environment to use for this config. 1404 env = main.Clone() 1405 env['BUILDDIR'] = variant_path 1406 1407 # variant_dir is the tail component of build path, and is used to 1408 # determine the build parameters (e.g., 'ALPHA_SE') 1409 (build_root, variant_dir) = splitpath(variant_path) 1410 1411 # Set env variables according to the build directory config. 1412 sticky_vars.files = [] 1413 # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in 1414 # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke 1415 # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings. 1416 current_vars_file = joinpath(build_root, 'variables', variant_dir) 1417 if isfile(current_vars_file): 1418 sticky_vars.files.append(current_vars_file) 1419 if not GetOption('silent'): 1420 print "Using saved variables file %s" % current_vars_file 1421 else: 1422 # Build dir-specific variables file doesn't exist. 1423 1424 # Make sure the directory is there so we can create it later 1425 opt_dir = dirname(current_vars_file) 1426 if not isdir(opt_dir): 1427 mkdir(opt_dir) 1428 1429 # Get default build variables from source tree. Variables are 1430 # normally determined by name of $VARIANT_DIR, but can be 1431 # overridden by '--default=' arg on command line. 1432 default = GetOption('default') 1433 opts_dir = joinpath(main.root.abspath, 'build_opts') 1434 if default: 1435 default_vars_files = [joinpath(build_root, 'variables', default), 1436 joinpath(opts_dir, default)] 1437 else: 1438 default_vars_files = [joinpath(opts_dir, variant_dir)] 1439 existing_files = filter(isfile, default_vars_files) 1440 if existing_files: 1441 default_vars_file = existing_files[0] 1442 sticky_vars.files.append(default_vars_file) 1443 print "Variables file %s not found,\n using defaults in %s" \ 1444 % (current_vars_file, default_vars_file) 1445 else: 1446 print "Error: cannot find variables file %s or " \ 1447 "default file(s) %s" \ 1448 % (current_vars_file, ' or '.join(default_vars_files)) 1449 Exit(1) 1450 1451 # Apply current variable settings to env 1452 sticky_vars.Update(env) 1453 1454 help_texts["local_vars"] += \ 1455 "Build variables for %s:\n" % variant_dir \ 1456 + sticky_vars.GenerateHelpText(env) 1457 1458 # Process variable settings. 1459 1460 if not have_fenv and env['USE_FENV']: 1461 print "Warning: <fenv.h> not available; " \ 1462 "forcing USE_FENV to False in", variant_dir + "." 1463 env['USE_FENV'] = False 1464 1465 if not env['USE_FENV']: 1466 print "Warning: No IEEE FP rounding mode control in", variant_dir + "." 1467 print " FP results may deviate slightly from other platforms." 1468 1469 if env['EFENCE']: 1470 env.Append(LIBS=['efence']) 1471 1472 if env['USE_KVM']: 1473 if not have_kvm: 1474 print "Warning: Can not enable KVM, host seems to lack KVM support" 1475 env['USE_KVM'] = False 1476 elif not is_isa_kvm_compatible(env['TARGET_ISA']): 1477 print "Info: KVM support disabled due to unsupported host and " \ 1478 "target ISA combination" 1479 env['USE_KVM'] = False 1480 1481 # Warn about missing optional functionality 1482 if env['USE_KVM']: 1483 if not main['HAVE_PERF_ATTR_EXCLUDE_HOST']: 1484 print "Warning: perf_event headers lack support for the " \ 1485 "exclude_host attribute. KVM instruction counts will " \ 1486 "be inaccurate." 1487 1488 # Save sticky variable settings back to current variables file 1489 sticky_vars.Save(current_vars_file, env) 1490 1491 if env['USE_SSE2']: 1492 env.Append(CCFLAGS=['-msse2']) 1493 1494 # The src/SConscript file sets up the build rules in 'env' according 1495 # to the configured variables. It returns a list of environments, 1496 # one for each variant build (debug, opt, etc.) 1497 SConscript('src/SConscript', variant_dir = variant_path, exports = 'env') 1498 1499def pairwise(iterable): 1500 "s -> (s0,s1), (s1,s2), (s2, s3), ..." 1501 a, b = itertools.tee(iterable) 1502 b.next() 1503 return itertools.izip(a, b) 1504 1505# Create false dependencies so SCons will parse ISAs, establish 1506# dependencies, and setup the build Environments serially. Either 1507# SCons (likely) and/or our SConscripts (possibly) cannot cope with -j 1508# greater than 1. It appears to be standard race condition stuff; it 1509# doesn't always fail, but usually, and the behaviors are different. 1510# Every time I tried to remove this, builds would fail in some 1511# creative new way. So, don't do that. You'll want to, though, because 1512# tests/SConscript takes a long time to make its Environments. 1513for t1, t2 in pairwise(sorted(all_isa_deps.iterkeys())): 1514 main.Depends('#%s-deps' % t2, '#%s-deps' % t1) 1515 main.Depends('#%s-environs' % t2, '#%s-environs' % t1) 1516 1517# base help text 1518Help(''' 1519Usage: scons [scons options] [build variables] [target(s)] 1520 1521Extra scons options: 1522%(options)s 1523 1524Global build variables: 1525%(global_vars)s 1526 1527%(local_vars)s 1528''' % help_texts) 1529