SConstruct revision 9589:d2fbb792a8a0
1# -*- mode:python -*- 2 3# Copyright (c) 2011 Advanced Micro Devices, Inc. 4# Copyright (c) 2009 The Hewlett-Packard Development Company 5# Copyright (c) 2004-2005 The Regents of The University of Michigan 6# All rights reserved. 7# 8# Redistribution and use in source and binary forms, with or without 9# modification, are permitted provided that the following conditions are 10# met: redistributions of source code must retain the above copyright 11# notice, this list of conditions and the following disclaimer; 12# redistributions in binary form must reproduce the above copyright 13# notice, this list of conditions and the following disclaimer in the 14# documentation and/or other materials provided with the distribution; 15# neither the name of the copyright holders nor the names of its 16# contributors may be used to endorse or promote products derived from 17# this software without specific prior written permission. 18# 19# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 20# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 21# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 22# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 23# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 24# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 25# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 26# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 27# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 28# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30# 31# Authors: Steve Reinhardt 32# Nathan Binkert 33 34################################################### 35# 36# SCons top-level build description (SConstruct) file. 37# 38# While in this directory ('gem5'), just type 'scons' to build the default 39# configuration (see below), or type 'scons build/<CONFIG>/<binary>' 40# to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for 41# the optimized full-system version). 42# 43# You can build gem5 in a different directory as long as there is a 44# 'build/<CONFIG>' somewhere along the target path. The build system 45# expects that all configs under the same build directory are being 46# built for the same host system. 47# 48# Examples: 49# 50# The following two commands are equivalent. The '-u' option tells 51# scons to search up the directory tree for this SConstruct file. 52# % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug 53# % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug 54# 55# The following two commands are equivalent and demonstrate building 56# in a directory outside of the source tree. The '-C' option tells 57# scons to chdir to the specified directory to find this SConstruct 58# file. 59# % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug 60# % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug 61# 62# You can use 'scons -H' to print scons options. If you're in this 63# 'gem5' directory (or use -u or -C to tell scons where to find this 64# file), you can use 'scons -h' to print all the gem5-specific build 65# options as well. 66# 67################################################### 68 69# Check for recent-enough Python and SCons versions. 70try: 71 # Really old versions of scons only take two options for the 72 # function, so check once without the revision and once with the 73 # revision, the first instance will fail for stuff other than 74 # 0.98, and the second will fail for 0.98.0 75 EnsureSConsVersion(0, 98) 76 EnsureSConsVersion(0, 98, 1) 77except SystemExit, e: 78 print """ 79For more details, see: 80 http://gem5.org/Dependencies 81""" 82 raise 83 84# We ensure the python version early because we have stuff that 85# requires python 2.4 86try: 87 EnsurePythonVersion(2, 4) 88except SystemExit, e: 89 print """ 90You can use a non-default installation of the Python interpreter by 91either (1) rearranging your PATH so that scons finds the non-default 92'python' first or (2) explicitly invoking an alternative interpreter 93on the scons script. 94 95For more details, see: 96 http://gem5.org/wiki/index.php/Using_a_non-default_Python_installation 97""" 98 raise 99 100# Global Python includes 101import os 102import re 103import subprocess 104import sys 105 106from os import mkdir, environ 107from os.path import abspath, basename, dirname, expanduser, normpath 108from os.path import exists, isdir, isfile 109from os.path import join as joinpath, split as splitpath 110 111# SCons includes 112import SCons 113import SCons.Node 114 115extra_python_paths = [ 116 Dir('src/python').srcnode().abspath, # gem5 includes 117 Dir('ext/ply').srcnode().abspath, # ply is used by several files 118 ] 119 120sys.path[1:1] = extra_python_paths 121 122from m5.util import compareVersions, readCommand 123from m5.util.terminal import get_termcap 124 125help_texts = { 126 "options" : "", 127 "global_vars" : "", 128 "local_vars" : "" 129} 130 131Export("help_texts") 132 133 134# There's a bug in scons in that (1) by default, the help texts from 135# AddOption() are supposed to be displayed when you type 'scons -h' 136# and (2) you can override the help displayed by 'scons -h' using the 137# Help() function, but these two features are incompatible: once 138# you've overridden the help text using Help(), there's no way to get 139# at the help texts from AddOptions. See: 140# http://scons.tigris.org/issues/show_bug.cgi?id=2356 141# http://scons.tigris.org/issues/show_bug.cgi?id=2611 142# This hack lets us extract the help text from AddOptions and 143# re-inject it via Help(). Ideally someday this bug will be fixed and 144# we can just use AddOption directly. 145def AddLocalOption(*args, **kwargs): 146 col_width = 30 147 148 help = " " + ", ".join(args) 149 if "help" in kwargs: 150 length = len(help) 151 if length >= col_width: 152 help += "\n" + " " * col_width 153 else: 154 help += " " * (col_width - length) 155 help += kwargs["help"] 156 help_texts["options"] += help + "\n" 157 158 AddOption(*args, **kwargs) 159 160AddLocalOption('--colors', dest='use_colors', action='store_true', 161 help="Add color to abbreviated scons output") 162AddLocalOption('--no-colors', dest='use_colors', action='store_false', 163 help="Don't add color to abbreviated scons output") 164AddLocalOption('--default', dest='default', type='string', action='store', 165 help='Override which build_opts file to use for defaults') 166AddLocalOption('--ignore-style', dest='ignore_style', action='store_true', 167 help='Disable style checking hooks') 168AddLocalOption('--no-lto', dest='no_lto', action='store_true', 169 help='Disable Link-Time Optimization for fast') 170AddLocalOption('--update-ref', dest='update_ref', action='store_true', 171 help='Update test reference outputs') 172AddLocalOption('--verbose', dest='verbose', action='store_true', 173 help='Print full tool command lines') 174 175termcap = get_termcap(GetOption('use_colors')) 176 177######################################################################## 178# 179# Set up the main build environment. 180# 181######################################################################## 182use_vars = set([ 'AS', 'AR', 'CC', 'CXX', 'HOME', 'LD_LIBRARY_PATH', 183 'LIBRARY_PATH', 'PATH', 'PKG_CONFIG_PATH', 'PYTHONPATH', 184 'RANLIB', 'SWIG' ]) 185 186use_prefixes = [ 187 "M5", # M5 configuration (e.g., path to kernels) 188 "DISTCC_", # distcc (distributed compiler wrapper) configuration 189 "CCACHE_", # ccache (caching compiler wrapper) configuration 190 "CCC_", # clang static analyzer configuration 191 ] 192 193use_env = {} 194for key,val in os.environ.iteritems(): 195 if key in use_vars or \ 196 any([key.startswith(prefix) for prefix in use_prefixes]): 197 use_env[key] = val 198 199main = Environment(ENV=use_env) 200main.Decider('MD5-timestamp') 201main.root = Dir(".") # The current directory (where this file lives). 202main.srcdir = Dir("src") # The source directory 203 204main_dict_keys = main.Dictionary().keys() 205 206# Check that we have a C/C++ compiler 207if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys): 208 print "No C++ compiler installed (package g++ on Ubuntu and RedHat)" 209 Exit(1) 210 211# Check that swig is present 212if not 'SWIG' in main_dict_keys: 213 print "swig is not installed (package swig on Ubuntu and RedHat)" 214 Exit(1) 215 216# add useful python code PYTHONPATH so it can be used by subprocesses 217# as well 218main.AppendENVPath('PYTHONPATH', extra_python_paths) 219 220######################################################################## 221# 222# Mercurial Stuff. 223# 224# If the gem5 directory is a mercurial repository, we should do some 225# extra things. 226# 227######################################################################## 228 229hgdir = main.root.Dir(".hg") 230 231mercurial_style_message = """ 232You're missing the gem5 style hook, which automatically checks your code 233against the gem5 style rules on hg commit and qrefresh commands. This 234script will now install the hook in your .hg/hgrc file. 235Press enter to continue, or ctrl-c to abort: """ 236 237mercurial_style_hook = """ 238# The following lines were automatically added by gem5/SConstruct 239# to provide the gem5 style-checking hooks 240[extensions] 241style = %s/util/style.py 242 243[hooks] 244pretxncommit.style = python:style.check_style 245pre-qrefresh.style = python:style.check_style 246# End of SConstruct additions 247 248""" % (main.root.abspath) 249 250mercurial_lib_not_found = """ 251Mercurial libraries cannot be found, ignoring style hook. If 252you are a gem5 developer, please fix this and run the style 253hook. It is important. 254""" 255 256# Check for style hook and prompt for installation if it's not there. 257# Skip this if --ignore-style was specified, there's no .hg dir to 258# install a hook in, or there's no interactive terminal to prompt. 259if not GetOption('ignore_style') and hgdir.exists() and sys.stdin.isatty(): 260 style_hook = True 261 try: 262 from mercurial import ui 263 ui = ui.ui() 264 ui.readconfig(hgdir.File('hgrc').abspath) 265 style_hook = ui.config('hooks', 'pretxncommit.style', None) and \ 266 ui.config('hooks', 'pre-qrefresh.style', None) 267 except ImportError: 268 print mercurial_lib_not_found 269 270 if not style_hook: 271 print mercurial_style_message, 272 # continue unless user does ctrl-c/ctrl-d etc. 273 try: 274 raw_input() 275 except: 276 print "Input exception, exiting scons.\n" 277 sys.exit(1) 278 hgrc_path = '%s/.hg/hgrc' % main.root.abspath 279 print "Adding style hook to", hgrc_path, "\n" 280 try: 281 hgrc = open(hgrc_path, 'a') 282 hgrc.write(mercurial_style_hook) 283 hgrc.close() 284 except: 285 print "Error updating", hgrc_path 286 sys.exit(1) 287 288 289################################################### 290# 291# Figure out which configurations to set up based on the path(s) of 292# the target(s). 293# 294################################################### 295 296# Find default configuration & binary. 297Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug')) 298 299# helper function: find last occurrence of element in list 300def rfind(l, elt, offs = -1): 301 for i in range(len(l)+offs, 0, -1): 302 if l[i] == elt: 303 return i 304 raise ValueError, "element not found" 305 306# Take a list of paths (or SCons Nodes) and return a list with all 307# paths made absolute and ~-expanded. Paths will be interpreted 308# relative to the launch directory unless a different root is provided 309def makePathListAbsolute(path_list, root=GetLaunchDir()): 310 return [abspath(joinpath(root, expanduser(str(p)))) 311 for p in path_list] 312 313# Each target must have 'build' in the interior of the path; the 314# directory below this will determine the build parameters. For 315# example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we 316# recognize that ALPHA_SE specifies the configuration because it 317# follow 'build' in the build path. 318 319# The funky assignment to "[:]" is needed to replace the list contents 320# in place rather than reassign the symbol to a new list, which 321# doesn't work (obviously!). 322BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS) 323 324# Generate a list of the unique build roots and configs that the 325# collected targets reference. 326variant_paths = [] 327build_root = None 328for t in BUILD_TARGETS: 329 path_dirs = t.split('/') 330 try: 331 build_top = rfind(path_dirs, 'build', -2) 332 except: 333 print "Error: no non-leaf 'build' dir found on target path", t 334 Exit(1) 335 this_build_root = joinpath('/',*path_dirs[:build_top+1]) 336 if not build_root: 337 build_root = this_build_root 338 else: 339 if this_build_root != build_root: 340 print "Error: build targets not under same build root\n"\ 341 " %s\n %s" % (build_root, this_build_root) 342 Exit(1) 343 variant_path = joinpath('/',*path_dirs[:build_top+2]) 344 if variant_path not in variant_paths: 345 variant_paths.append(variant_path) 346 347# Make sure build_root exists (might not if this is the first build there) 348if not isdir(build_root): 349 mkdir(build_root) 350main['BUILDROOT'] = build_root 351 352Export('main') 353 354main.SConsignFile(joinpath(build_root, "sconsign")) 355 356# Default duplicate option is to use hard links, but this messes up 357# when you use emacs to edit a file in the target dir, as emacs moves 358# file to file~ then copies to file, breaking the link. Symbolic 359# (soft) links work better. 360main.SetOption('duplicate', 'soft-copy') 361 362# 363# Set up global sticky variables... these are common to an entire build 364# tree (not specific to a particular build like ALPHA_SE) 365# 366 367global_vars_file = joinpath(build_root, 'variables.global') 368 369global_vars = Variables(global_vars_file, args=ARGUMENTS) 370 371global_vars.AddVariables( 372 ('CC', 'C compiler', environ.get('CC', main['CC'])), 373 ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])), 374 ('SWIG', 'SWIG tool', environ.get('SWIG', main['SWIG'])), 375 ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')), 376 ('BATCH', 'Use batch pool for build and tests', False), 377 ('BATCH_CMD', 'Batch pool submission command name', 'qdo'), 378 ('M5_BUILD_CACHE', 'Cache built objects in this directory', False), 379 ('EXTRAS', 'Add extra directories to the compilation', '') 380 ) 381 382# Update main environment with values from ARGUMENTS & global_vars_file 383global_vars.Update(main) 384help_texts["global_vars"] += global_vars.GenerateHelpText(main) 385 386# Save sticky variable settings back to current variables file 387global_vars.Save(global_vars_file, main) 388 389# Parse EXTRAS variable to build list of all directories where we're 390# look for sources etc. This list is exported as extras_dir_list. 391base_dir = main.srcdir.abspath 392if main['EXTRAS']: 393 extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':')) 394else: 395 extras_dir_list = [] 396 397Export('base_dir') 398Export('extras_dir_list') 399 400# the ext directory should be on the #includes path 401main.Append(CPPPATH=[Dir('ext')]) 402 403def strip_build_path(path, env): 404 path = str(path) 405 variant_base = env['BUILDROOT'] + os.path.sep 406 if path.startswith(variant_base): 407 path = path[len(variant_base):] 408 elif path.startswith('build/'): 409 path = path[6:] 410 return path 411 412# Generate a string of the form: 413# common/path/prefix/src1, src2 -> tgt1, tgt2 414# to print while building. 415class Transform(object): 416 # all specific color settings should be here and nowhere else 417 tool_color = termcap.Normal 418 pfx_color = termcap.Yellow 419 srcs_color = termcap.Yellow + termcap.Bold 420 arrow_color = termcap.Blue + termcap.Bold 421 tgts_color = termcap.Yellow + termcap.Bold 422 423 def __init__(self, tool, max_sources=99): 424 self.format = self.tool_color + (" [%8s] " % tool) \ 425 + self.pfx_color + "%s" \ 426 + self.srcs_color + "%s" \ 427 + self.arrow_color + " -> " \ 428 + self.tgts_color + "%s" \ 429 + termcap.Normal 430 self.max_sources = max_sources 431 432 def __call__(self, target, source, env, for_signature=None): 433 # truncate source list according to max_sources param 434 source = source[0:self.max_sources] 435 def strip(f): 436 return strip_build_path(str(f), env) 437 if len(source) > 0: 438 srcs = map(strip, source) 439 else: 440 srcs = [''] 441 tgts = map(strip, target) 442 # surprisingly, os.path.commonprefix is a dumb char-by-char string 443 # operation that has nothing to do with paths. 444 com_pfx = os.path.commonprefix(srcs + tgts) 445 com_pfx_len = len(com_pfx) 446 if com_pfx: 447 # do some cleanup and sanity checking on common prefix 448 if com_pfx[-1] == ".": 449 # prefix matches all but file extension: ok 450 # back up one to change 'foo.cc -> o' to 'foo.cc -> .o' 451 com_pfx = com_pfx[0:-1] 452 elif com_pfx[-1] == "/": 453 # common prefix is directory path: OK 454 pass 455 else: 456 src0_len = len(srcs[0]) 457 tgt0_len = len(tgts[0]) 458 if src0_len == com_pfx_len: 459 # source is a substring of target, OK 460 pass 461 elif tgt0_len == com_pfx_len: 462 # target is a substring of source, need to back up to 463 # avoid empty string on RHS of arrow 464 sep_idx = com_pfx.rfind(".") 465 if sep_idx != -1: 466 com_pfx = com_pfx[0:sep_idx] 467 else: 468 com_pfx = '' 469 elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".": 470 # still splitting at file extension: ok 471 pass 472 else: 473 # probably a fluke; ignore it 474 com_pfx = '' 475 # recalculate length in case com_pfx was modified 476 com_pfx_len = len(com_pfx) 477 def fmt(files): 478 f = map(lambda s: s[com_pfx_len:], files) 479 return ', '.join(f) 480 return self.format % (com_pfx, fmt(srcs), fmt(tgts)) 481 482Export('Transform') 483 484# enable the regression script to use the termcap 485main['TERMCAP'] = termcap 486 487if GetOption('verbose'): 488 def MakeAction(action, string, *args, **kwargs): 489 return Action(action, *args, **kwargs) 490else: 491 MakeAction = Action 492 main['CCCOMSTR'] = Transform("CC") 493 main['CXXCOMSTR'] = Transform("CXX") 494 main['ASCOMSTR'] = Transform("AS") 495 main['SWIGCOMSTR'] = Transform("SWIG") 496 main['ARCOMSTR'] = Transform("AR", 0) 497 main['LINKCOMSTR'] = Transform("LINK", 0) 498 main['RANLIBCOMSTR'] = Transform("RANLIB", 0) 499 main['M4COMSTR'] = Transform("M4") 500 main['SHCCCOMSTR'] = Transform("SHCC") 501 main['SHCXXCOMSTR'] = Transform("SHCXX") 502Export('MakeAction') 503 504# Initialize the Link-Time Optimization (LTO) flags 505main['LTO_CCFLAGS'] = [] 506main['LTO_LDFLAGS'] = [] 507 508CXX_version = readCommand([main['CXX'],'--version'], exception=False) 509CXX_V = readCommand([main['CXX'],'-V'], exception=False) 510 511main['GCC'] = CXX_version and CXX_version.find('g++') >= 0 512main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0 513if main['GCC'] + main['CLANG'] > 1: 514 print 'Error: How can we have two at the same time?' 515 Exit(1) 516 517# Set up default C++ compiler flags 518if main['GCC'] or main['CLANG']: 519 # As gcc and clang share many flags, do the common parts here 520 main.Append(CCFLAGS=['-pipe']) 521 main.Append(CCFLAGS=['-fno-strict-aliasing']) 522 # Enable -Wall and then disable the few warnings that we 523 # consistently violate 524 main.Append(CCFLAGS=['-Wall', '-Wno-sign-compare', '-Wundef']) 525 # We always compile using C++11, but only gcc >= 4.7 and clang 3.1 526 # actually use that name, so we stick with c++0x 527 main.Append(CXXFLAGS=['-std=c++0x']) 528 # Add selected sanity checks from -Wextra 529 main.Append(CXXFLAGS=['-Wmissing-field-initializers', 530 '-Woverloaded-virtual']) 531else: 532 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal, 533 print "Don't know what compiler options to use for your compiler." 534 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX'] 535 print termcap.Yellow + ' version:' + termcap.Normal, 536 if not CXX_version: 537 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\ 538 termcap.Normal 539 else: 540 print CXX_version.replace('\n', '<nl>') 541 print " If you're trying to use a compiler other than GCC" 542 print " or clang, there appears to be something wrong with your" 543 print " environment." 544 print " " 545 print " If you are trying to use a compiler other than those listed" 546 print " above you will need to ease fix SConstruct and " 547 print " src/SConscript to support that compiler." 548 Exit(1) 549 550if main['GCC']: 551 # Check for a supported version of gcc, >= 4.4 is needed for c++0x 552 # support. See http://gcc.gnu.org/projects/cxx0x.html for details 553 gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False) 554 if compareVersions(gcc_version, "4.4") < 0: 555 print 'Error: gcc version 4.4 or newer required.' 556 print ' Installed version:', gcc_version 557 Exit(1) 558 559 main['GCC_VERSION'] = gcc_version 560 561 # Check for versions with bugs 562 if not compareVersions(gcc_version, '4.4.1') or \ 563 not compareVersions(gcc_version, '4.4.2'): 564 print 'Info: Tree vectorizer in GCC 4.4.1 & 4.4.2 is buggy, disabling.' 565 main.Append(CCFLAGS=['-fno-tree-vectorize']) 566 567 # LTO support is only really working properly from 4.6 and beyond 568 if compareVersions(gcc_version, '4.6') >= 0: 569 # Add the appropriate Link-Time Optimization (LTO) flags 570 # unless LTO is explicitly turned off. Note that these flags 571 # are only used by the fast target. 572 if not GetOption('no_lto'): 573 # Pass the LTO flag when compiling to produce GIMPLE 574 # output, we merely create the flags here and only append 575 # them later/ 576 main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')] 577 578 # Use the same amount of jobs for LTO as we are running 579 # scons with, we hardcode the use of the linker plugin 580 # which requires either gold or GNU ld >= 2.21 581 main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs'), 582 '-fuse-linker-plugin'] 583 584elif main['CLANG']: 585 # Check for a supported version of clang, >= 2.9 is needed to 586 # support similar features as gcc 4.4. See 587 # http://clang.llvm.org/cxx_status.html for details 588 clang_version_re = re.compile(".* version (\d+\.\d+)") 589 clang_version_match = clang_version_re.match(CXX_version) 590 if (clang_version_match): 591 clang_version = clang_version_match.groups()[0] 592 if compareVersions(clang_version, "2.9") < 0: 593 print 'Error: clang version 2.9 or newer required.' 594 print ' Installed version:', clang_version 595 Exit(1) 596 else: 597 print 'Error: Unable to determine clang version.' 598 Exit(1) 599 600 # clang has a few additional warnings that we disable, 601 # tautological comparisons are allowed due to unsigned integers 602 # being compared to constants that happen to be 0, and extraneous 603 # parantheses are allowed due to Ruby's printing of the AST, 604 # finally self assignments are allowed as the generated CPU code 605 # is relying on this 606 main.Append(CCFLAGS=['-Wno-tautological-compare', 607 '-Wno-parentheses', 608 '-Wno-self-assign']) 609 610 # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as 611 # opposed to libstdc++ to make the transition from TR1 to 612 # C++11. See http://libcxx.llvm.org. However, clang has chosen a 613 # strict implementation of the C++11 standard, and does not allow 614 # incomplete types in template arguments (besides unique_ptr and 615 # shared_ptr), and the libc++ STL containers create problems in 616 # combination with the current gem5 code. For now, we stick with 617 # libstdc++ and use the TR1 namespace. 618 # if sys.platform == "darwin": 619 # main.Append(CXXFLAGS=['-stdlib=libc++']) 620 621else: 622 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal, 623 print "Don't know what compiler options to use for your compiler." 624 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX'] 625 print termcap.Yellow + ' version:' + termcap.Normal, 626 if not CXX_version: 627 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\ 628 termcap.Normal 629 else: 630 print CXX_version.replace('\n', '<nl>') 631 print " If you're trying to use a compiler other than GCC" 632 print " or clang, there appears to be something wrong with your" 633 print " environment." 634 print " " 635 print " If you are trying to use a compiler other than those listed" 636 print " above you will need to ease fix SConstruct and " 637 print " src/SConscript to support that compiler." 638 Exit(1) 639 640# Set up common yacc/bison flags (needed for Ruby) 641main['YACCFLAGS'] = '-d' 642main['YACCHXXFILESUFFIX'] = '.hh' 643 644# Do this after we save setting back, or else we'll tack on an 645# extra 'qdo' every time we run scons. 646if main['BATCH']: 647 main['CC'] = main['BATCH_CMD'] + ' ' + main['CC'] 648 main['CXX'] = main['BATCH_CMD'] + ' ' + main['CXX'] 649 main['AS'] = main['BATCH_CMD'] + ' ' + main['AS'] 650 main['AR'] = main['BATCH_CMD'] + ' ' + main['AR'] 651 main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB'] 652 653if sys.platform == 'cygwin': 654 # cygwin has some header file issues... 655 main.Append(CCFLAGS=["-Wno-uninitialized"]) 656 657# Check for the protobuf compiler 658protoc_version = readCommand([main['PROTOC'], '--version'], 659 exception='').split() 660 661# First two words should be "libprotoc x.y.z" 662if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc': 663 print termcap.Yellow + termcap.Bold + \ 664 'Warning: Protocol buffer compiler (protoc) not found.\n' + \ 665 ' Please install protobuf-compiler for tracing support.' + \ 666 termcap.Normal 667 main['PROTOC'] = False 668else: 669 # Based on the availability of the compress stream wrappers, 670 # require 2.1.0 671 min_protoc_version = '2.1.0' 672 if compareVersions(protoc_version[1], min_protoc_version) < 0: 673 print termcap.Yellow + termcap.Bold + \ 674 'Warning: protoc version', min_protoc_version, \ 675 'or newer required.\n' + \ 676 ' Installed version:', protoc_version[1], \ 677 termcap.Normal 678 main['PROTOC'] = False 679 else: 680 # Attempt to determine the appropriate include path and 681 # library path using pkg-config, that means we also need to 682 # check for pkg-config. Note that it is possible to use 683 # protobuf without the involvement of pkg-config. Later on we 684 # check go a library config check and at that point the test 685 # will fail if libprotobuf cannot be found. 686 if readCommand(['pkg-config', '--version'], exception=''): 687 try: 688 # Attempt to establish what linking flags to add for protobuf 689 # using pkg-config 690 main.ParseConfig('pkg-config --cflags --libs-only-L protobuf') 691 except: 692 print termcap.Yellow + termcap.Bold + \ 693 'Warning: pkg-config could not get protobuf flags.' + \ 694 termcap.Normal 695 696# Check for SWIG 697if not main.has_key('SWIG'): 698 print 'Error: SWIG utility not found.' 699 print ' Please install (see http://www.swig.org) and retry.' 700 Exit(1) 701 702# Check for appropriate SWIG version 703swig_version = readCommand([main['SWIG'], '-version'], exception='').split() 704# First 3 words should be "SWIG Version x.y.z" 705if len(swig_version) < 3 or \ 706 swig_version[0] != 'SWIG' or swig_version[1] != 'Version': 707 print 'Error determining SWIG version.' 708 Exit(1) 709 710min_swig_version = '1.3.34' 711if compareVersions(swig_version[2], min_swig_version) < 0: 712 print 'Error: SWIG version', min_swig_version, 'or newer required.' 713 print ' Installed version:', swig_version[2] 714 Exit(1) 715 716if swig_version[2] == "2.0.9": 717 print '\n' + termcap.Yellow + termcap.Bold + \ 718 'Warning: SWIG version 2.0.9 sometimes generates broken code.\n' + \ 719 termcap.Normal + \ 720 'This problem only affects some platforms and some Python\n' + \ 721 'versions. See the following SWIG bug report for details:\n' + \ 722 'http://sourceforge.net/p/swig/bugs/1297/\n' 723 724 725# Set up SWIG flags & scanner 726swig_flags=Split('-c++ -python -modern -templatereduce $_CPPINCFLAGS') 727main.Append(SWIGFLAGS=swig_flags) 728 729# filter out all existing swig scanners, they mess up the dependency 730# stuff for some reason 731scanners = [] 732for scanner in main['SCANNERS']: 733 skeys = scanner.skeys 734 if skeys == '.i': 735 continue 736 737 if isinstance(skeys, (list, tuple)) and '.i' in skeys: 738 continue 739 740 scanners.append(scanner) 741 742# add the new swig scanner that we like better 743from SCons.Scanner import ClassicCPP as CPPScanner 744swig_inc_re = '^[ \t]*[%,#][ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")' 745scanners.append(CPPScanner("SwigScan", [ ".i" ], "CPPPATH", swig_inc_re)) 746 747# replace the scanners list that has what we want 748main['SCANNERS'] = scanners 749 750# Add a custom Check function to the Configure context so that we can 751# figure out if the compiler adds leading underscores to global 752# variables. This is needed for the autogenerated asm files that we 753# use for embedding the python code. 754def CheckLeading(context): 755 context.Message("Checking for leading underscore in global variables...") 756 # 1) Define a global variable called x from asm so the C compiler 757 # won't change the symbol at all. 758 # 2) Declare that variable. 759 # 3) Use the variable 760 # 761 # If the compiler prepends an underscore, this will successfully 762 # link because the external symbol 'x' will be called '_x' which 763 # was defined by the asm statement. If the compiler does not 764 # prepend an underscore, this will not successfully link because 765 # '_x' will have been defined by assembly, while the C portion of 766 # the code will be trying to use 'x' 767 ret = context.TryLink(''' 768 asm(".globl _x; _x: .byte 0"); 769 extern int x; 770 int main() { return x; } 771 ''', extension=".c") 772 context.env.Append(LEADING_UNDERSCORE=ret) 773 context.Result(ret) 774 return ret 775 776# Platform-specific configuration. Note again that we assume that all 777# builds under a given build root run on the same host platform. 778conf = Configure(main, 779 conf_dir = joinpath(build_root, '.scons_config'), 780 log_file = joinpath(build_root, 'scons_config.log'), 781 custom_tests = { 'CheckLeading' : CheckLeading }) 782 783# Check for leading underscores. Don't really need to worry either 784# way so don't need to check the return code. 785conf.CheckLeading() 786 787# Check if we should compile a 64 bit binary on Mac OS X/Darwin 788try: 789 import platform 790 uname = platform.uname() 791 if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0: 792 if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]): 793 main.Append(CCFLAGS=['-arch', 'x86_64']) 794 main.Append(CFLAGS=['-arch', 'x86_64']) 795 main.Append(LINKFLAGS=['-arch', 'x86_64']) 796 main.Append(ASFLAGS=['-arch', 'x86_64']) 797except: 798 pass 799 800# Recent versions of scons substitute a "Null" object for Configure() 801# when configuration isn't necessary, e.g., if the "--help" option is 802# present. Unfortuantely this Null object always returns false, 803# breaking all our configuration checks. We replace it with our own 804# more optimistic null object that returns True instead. 805if not conf: 806 def NullCheck(*args, **kwargs): 807 return True 808 809 class NullConf: 810 def __init__(self, env): 811 self.env = env 812 def Finish(self): 813 return self.env 814 def __getattr__(self, mname): 815 return NullCheck 816 817 conf = NullConf(main) 818 819# Find Python include and library directories for embedding the 820# interpreter. For consistency, we will use the same Python 821# installation used to run scons (and thus this script). If you want 822# to link in an alternate version, see above for instructions on how 823# to invoke scons with a different copy of the Python interpreter. 824from distutils import sysconfig 825 826py_getvar = sysconfig.get_config_var 827 828py_debug = getattr(sys, 'pydebug', False) 829py_version = 'python' + py_getvar('VERSION') + (py_debug and "_d" or "") 830 831py_general_include = sysconfig.get_python_inc() 832py_platform_include = sysconfig.get_python_inc(plat_specific=True) 833py_includes = [ py_general_include ] 834if py_platform_include != py_general_include: 835 py_includes.append(py_platform_include) 836 837py_lib_path = [ py_getvar('LIBDIR') ] 838# add the prefix/lib/pythonX.Y/config dir, but only if there is no 839# shared library in prefix/lib/. 840if not py_getvar('Py_ENABLE_SHARED'): 841 py_lib_path.append(py_getvar('LIBPL')) 842 # Python requires the flags in LINKFORSHARED to be added the 843 # linker flags when linking with a statically with Python. Failing 844 # to do so can lead to errors from the Python's dynamic module 845 # loader at start up. 846 main.Append(LINKFLAGS=[py_getvar('LINKFORSHARED').split()]) 847 848py_libs = [] 849for lib in py_getvar('LIBS').split() + py_getvar('SYSLIBS').split(): 850 if not lib.startswith('-l'): 851 # Python requires some special flags to link (e.g. -framework 852 # common on OS X systems), assume appending preserves order 853 main.Append(LINKFLAGS=[lib]) 854 else: 855 lib = lib[2:] 856 if lib not in py_libs: 857 py_libs.append(lib) 858py_libs.append(py_version) 859 860main.Append(CPPPATH=py_includes) 861main.Append(LIBPATH=py_lib_path) 862 863# Cache build files in the supplied directory. 864if main['M5_BUILD_CACHE']: 865 print 'Using build cache located at', main['M5_BUILD_CACHE'] 866 CacheDir(main['M5_BUILD_CACHE']) 867 868 869# verify that this stuff works 870if not conf.CheckHeader('Python.h', '<>'): 871 print "Error: can't find Python.h header in", py_includes 872 print "Install Python headers (package python-dev on Ubuntu and RedHat)" 873 Exit(1) 874 875for lib in py_libs: 876 if not conf.CheckLib(lib): 877 print "Error: can't find library %s required by python" % lib 878 Exit(1) 879 880# On Solaris you need to use libsocket for socket ops 881if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'): 882 if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'): 883 print "Can't find library with socket calls (e.g. accept())" 884 Exit(1) 885 886# Check for zlib. If the check passes, libz will be automatically 887# added to the LIBS environment variable. 888if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'): 889 print 'Error: did not find needed zlib compression library '\ 890 'and/or zlib.h header file.' 891 print ' Please install zlib and try again.' 892 Exit(1) 893 894# If we have the protobuf compiler, also make sure we have the 895# development libraries. If the check passes, libprotobuf will be 896# automatically added to the LIBS environment variable. After 897# this, we can use the HAVE_PROTOBUF flag to determine if we have 898# got both protoc and libprotobuf available. 899main['HAVE_PROTOBUF'] = main['PROTOC'] and \ 900 conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h', 901 'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;') 902 903# If we have the compiler but not the library, print another warning. 904if main['PROTOC'] and not main['HAVE_PROTOBUF']: 905 print termcap.Yellow + termcap.Bold + \ 906 'Warning: did not find protocol buffer library and/or headers.\n' + \ 907 ' Please install libprotobuf-dev for tracing support.' + \ 908 termcap.Normal 909 910# Check for librt. 911have_posix_clock = \ 912 conf.CheckLibWithHeader(None, 'time.h', 'C', 913 'clock_nanosleep(0,0,NULL,NULL);') or \ 914 conf.CheckLibWithHeader('rt', 'time.h', 'C', 915 'clock_nanosleep(0,0,NULL,NULL);') 916 917if not conf.CheckLib('tcmalloc_minimal'): 918 print termcap.Yellow + termcap.Bold + \ 919 "You can get a 12% performance improvement by installing tcmalloc "\ 920 "(libgoogle-perftools-dev package on Ubuntu or RedHat)." + \ 921 termcap.Normal 922 923if not have_posix_clock: 924 print "Can't find library for POSIX clocks." 925 926# Check for <fenv.h> (C99 FP environment control) 927have_fenv = conf.CheckHeader('fenv.h', '<>') 928if not have_fenv: 929 print "Warning: Header file <fenv.h> not found." 930 print " This host has no IEEE FP rounding mode control." 931 932###################################################################### 933# 934# Finish the configuration 935# 936main = conf.Finish() 937 938###################################################################### 939# 940# Collect all non-global variables 941# 942 943# Define the universe of supported ISAs 944all_isa_list = [ ] 945Export('all_isa_list') 946 947class CpuModel(object): 948 '''The CpuModel class encapsulates everything the ISA parser needs to 949 know about a particular CPU model.''' 950 951 # Dict of available CPU model objects. Accessible as CpuModel.dict. 952 dict = {} 953 list = [] 954 defaults = [] 955 956 # Constructor. Automatically adds models to CpuModel.dict. 957 def __init__(self, name, filename, includes, strings, default=False): 958 self.name = name # name of model 959 self.filename = filename # filename for output exec code 960 self.includes = includes # include files needed in exec file 961 # The 'strings' dict holds all the per-CPU symbols we can 962 # substitute into templates etc. 963 self.strings = strings 964 965 # This cpu is enabled by default 966 self.default = default 967 968 # Add self to dict 969 if name in CpuModel.dict: 970 raise AttributeError, "CpuModel '%s' already registered" % name 971 CpuModel.dict[name] = self 972 CpuModel.list.append(name) 973 974Export('CpuModel') 975 976# Sticky variables get saved in the variables file so they persist from 977# one invocation to the next (unless overridden, in which case the new 978# value becomes sticky). 979sticky_vars = Variables(args=ARGUMENTS) 980Export('sticky_vars') 981 982# Sticky variables that should be exported 983export_vars = [] 984Export('export_vars') 985 986# For Ruby 987all_protocols = [] 988Export('all_protocols') 989protocol_dirs = [] 990Export('protocol_dirs') 991slicc_includes = [] 992Export('slicc_includes') 993 994# Walk the tree and execute all SConsopts scripts that wil add to the 995# above variables 996if not GetOption('verbose'): 997 print "Reading SConsopts" 998for bdir in [ base_dir ] + extras_dir_list: 999 if not isdir(bdir): 1000 print "Error: directory '%s' does not exist" % bdir 1001 Exit(1) 1002 for root, dirs, files in os.walk(bdir): 1003 if 'SConsopts' in files: 1004 if GetOption('verbose'): 1005 print "Reading", joinpath(root, 'SConsopts') 1006 SConscript(joinpath(root, 'SConsopts')) 1007 1008all_isa_list.sort() 1009 1010sticky_vars.AddVariables( 1011 EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list), 1012 ListVariable('CPU_MODELS', 'CPU models', 1013 sorted(n for n,m in CpuModel.dict.iteritems() if m.default), 1014 sorted(CpuModel.list)), 1015 BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger', 1016 False), 1017 BoolVariable('SS_COMPATIBLE_FP', 1018 'Make floating-point results compatible with SimpleScalar', 1019 False), 1020 BoolVariable('USE_SSE2', 1021 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts', 1022 False), 1023 BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock), 1024 BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv), 1025 BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability', False), 1026 EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None', 1027 all_protocols), 1028 ) 1029 1030# These variables get exported to #defines in config/*.hh (see src/SConscript). 1031export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'CP_ANNOTATE', 1032 'USE_POSIX_CLOCK', 'PROTOCOL', 'HAVE_PROTOBUF'] 1033 1034################################################### 1035# 1036# Define a SCons builder for configuration flag headers. 1037# 1038################################################### 1039 1040# This function generates a config header file that #defines the 1041# variable symbol to the current variable setting (0 or 1). The source 1042# operands are the name of the variable and a Value node containing the 1043# value of the variable. 1044def build_config_file(target, source, env): 1045 (variable, value) = [s.get_contents() for s in source] 1046 f = file(str(target[0]), 'w') 1047 print >> f, '#define', variable, value 1048 f.close() 1049 return None 1050 1051# Combine the two functions into a scons Action object. 1052config_action = MakeAction(build_config_file, Transform("CONFIG H", 2)) 1053 1054# The emitter munges the source & target node lists to reflect what 1055# we're really doing. 1056def config_emitter(target, source, env): 1057 # extract variable name from Builder arg 1058 variable = str(target[0]) 1059 # True target is config header file 1060 target = joinpath('config', variable.lower() + '.hh') 1061 val = env[variable] 1062 if isinstance(val, bool): 1063 # Force value to 0/1 1064 val = int(val) 1065 elif isinstance(val, str): 1066 val = '"' + val + '"' 1067 1068 # Sources are variable name & value (packaged in SCons Value nodes) 1069 return ([target], [Value(variable), Value(val)]) 1070 1071config_builder = Builder(emitter = config_emitter, action = config_action) 1072 1073main.Append(BUILDERS = { 'ConfigFile' : config_builder }) 1074 1075# libelf build is shared across all configs in the build root. 1076main.SConscript('ext/libelf/SConscript', 1077 variant_dir = joinpath(build_root, 'libelf')) 1078 1079# gzstream build is shared across all configs in the build root. 1080main.SConscript('ext/gzstream/SConscript', 1081 variant_dir = joinpath(build_root, 'gzstream')) 1082 1083# libfdt build is shared across all configs in the build root. 1084main.SConscript('ext/libfdt/SConscript', 1085 variant_dir = joinpath(build_root, 'libfdt')) 1086 1087################################################### 1088# 1089# This function is used to set up a directory with switching headers 1090# 1091################################################### 1092 1093main['ALL_ISA_LIST'] = all_isa_list 1094def make_switching_dir(dname, switch_headers, env): 1095 # Generate the header. target[0] is the full path of the output 1096 # header to generate. 'source' is a dummy variable, since we get the 1097 # list of ISAs from env['ALL_ISA_LIST']. 1098 def gen_switch_hdr(target, source, env): 1099 fname = str(target[0]) 1100 f = open(fname, 'w') 1101 isa = env['TARGET_ISA'].lower() 1102 print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname)) 1103 f.close() 1104 1105 # Build SCons Action object. 'varlist' specifies env vars that this 1106 # action depends on; when env['ALL_ISA_LIST'] changes these actions 1107 # should get re-executed. 1108 switch_hdr_action = MakeAction(gen_switch_hdr, 1109 Transform("GENERATE"), varlist=['ALL_ISA_LIST']) 1110 1111 # Instantiate actions for each header 1112 for hdr in switch_headers: 1113 env.Command(hdr, [], switch_hdr_action) 1114Export('make_switching_dir') 1115 1116################################################### 1117# 1118# Define build environments for selected configurations. 1119# 1120################################################### 1121 1122for variant_path in variant_paths: 1123 print "Building in", variant_path 1124 1125 # Make a copy of the build-root environment to use for this config. 1126 env = main.Clone() 1127 env['BUILDDIR'] = variant_path 1128 1129 # variant_dir is the tail component of build path, and is used to 1130 # determine the build parameters (e.g., 'ALPHA_SE') 1131 (build_root, variant_dir) = splitpath(variant_path) 1132 1133 # Set env variables according to the build directory config. 1134 sticky_vars.files = [] 1135 # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in 1136 # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke 1137 # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings. 1138 current_vars_file = joinpath(build_root, 'variables', variant_dir) 1139 if isfile(current_vars_file): 1140 sticky_vars.files.append(current_vars_file) 1141 print "Using saved variables file %s" % current_vars_file 1142 else: 1143 # Build dir-specific variables file doesn't exist. 1144 1145 # Make sure the directory is there so we can create it later 1146 opt_dir = dirname(current_vars_file) 1147 if not isdir(opt_dir): 1148 mkdir(opt_dir) 1149 1150 # Get default build variables from source tree. Variables are 1151 # normally determined by name of $VARIANT_DIR, but can be 1152 # overridden by '--default=' arg on command line. 1153 default = GetOption('default') 1154 opts_dir = joinpath(main.root.abspath, 'build_opts') 1155 if default: 1156 default_vars_files = [joinpath(build_root, 'variables', default), 1157 joinpath(opts_dir, default)] 1158 else: 1159 default_vars_files = [joinpath(opts_dir, variant_dir)] 1160 existing_files = filter(isfile, default_vars_files) 1161 if existing_files: 1162 default_vars_file = existing_files[0] 1163 sticky_vars.files.append(default_vars_file) 1164 print "Variables file %s not found,\n using defaults in %s" \ 1165 % (current_vars_file, default_vars_file) 1166 else: 1167 print "Error: cannot find variables file %s or " \ 1168 "default file(s) %s" \ 1169 % (current_vars_file, ' or '.join(default_vars_files)) 1170 Exit(1) 1171 1172 # Apply current variable settings to env 1173 sticky_vars.Update(env) 1174 1175 help_texts["local_vars"] += \ 1176 "Build variables for %s:\n" % variant_dir \ 1177 + sticky_vars.GenerateHelpText(env) 1178 1179 # Process variable settings. 1180 1181 if not have_fenv and env['USE_FENV']: 1182 print "Warning: <fenv.h> not available; " \ 1183 "forcing USE_FENV to False in", variant_dir + "." 1184 env['USE_FENV'] = False 1185 1186 if not env['USE_FENV']: 1187 print "Warning: No IEEE FP rounding mode control in", variant_dir + "." 1188 print " FP results may deviate slightly from other platforms." 1189 1190 if env['EFENCE']: 1191 env.Append(LIBS=['efence']) 1192 1193 # Save sticky variable settings back to current variables file 1194 sticky_vars.Save(current_vars_file, env) 1195 1196 if env['USE_SSE2']: 1197 env.Append(CCFLAGS=['-msse2']) 1198 1199 # The src/SConscript file sets up the build rules in 'env' according 1200 # to the configured variables. It returns a list of environments, 1201 # one for each variant build (debug, opt, etc.) 1202 envList = SConscript('src/SConscript', variant_dir = variant_path, 1203 exports = 'env') 1204 1205 # Set up the regression tests for each build. 1206 for e in envList: 1207 SConscript('tests/SConscript', 1208 variant_dir = joinpath(variant_path, 'tests', e.Label), 1209 exports = { 'env' : e }, duplicate = False) 1210 1211# base help text 1212Help(''' 1213Usage: scons [scons options] [build variables] [target(s)] 1214 1215Extra scons options: 1216%(options)s 1217 1218Global build variables: 1219%(global_vars)s 1220 1221%(local_vars)s 1222''' % help_texts) 1223