SConstruct (12243:c56b7387cddc) | SConstruct (12244:33af7397d081) |
---|---|
1# -*- mode:python -*- 2 3# Copyright (c) 2013, 2015-2017 ARM Limited 4# All rights reserved. 5# 6# The license below extends only to copyright in the software and shall 7# not be construed as granting a license to any other intellectual 8# property including but not limited to intellectual property relating 9# to a hardware implementation of the functionality of the software 10# licensed hereunder. You may use the software subject to the license 11# terms below provided that you ensure that this notice is replicated 12# unmodified and in its entirety in all distributions of the software, 13# modified or unmodified, in source code or in binary form. 14# 15# Copyright (c) 2011 Advanced Micro Devices, Inc. 16# Copyright (c) 2009 The Hewlett-Packard Development Company 17# Copyright (c) 2004-2005 The Regents of The University of Michigan 18# All rights reserved. 19# 20# Redistribution and use in source and binary forms, with or without 21# modification, are permitted provided that the following conditions are 22# met: redistributions of source code must retain the above copyright 23# notice, this list of conditions and the following disclaimer; 24# redistributions in binary form must reproduce the above copyright 25# notice, this list of conditions and the following disclaimer in the 26# documentation and/or other materials provided with the distribution; 27# neither the name of the copyright holders nor the names of its 28# contributors may be used to endorse or promote products derived from 29# this software without specific prior written permission. 30# 31# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 32# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 33# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 34# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 35# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 36# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 37# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 38# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 39# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 40# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 41# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 42# 43# Authors: Steve Reinhardt 44# Nathan Binkert 45 46################################################### 47# 48# SCons top-level build description (SConstruct) file. 49# 50# While in this directory ('gem5'), just type 'scons' to build the default 51# configuration (see below), or type 'scons build/<CONFIG>/<binary>' 52# to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for 53# the optimized full-system version). 54# 55# You can build gem5 in a different directory as long as there is a 56# 'build/<CONFIG>' somewhere along the target path. The build system 57# expects that all configs under the same build directory are being 58# built for the same host system. 59# 60# Examples: 61# 62# The following two commands are equivalent. The '-u' option tells 63# scons to search up the directory tree for this SConstruct file. 64# % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug 65# % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug 66# 67# The following two commands are equivalent and demonstrate building 68# in a directory outside of the source tree. The '-C' option tells 69# scons to chdir to the specified directory to find this SConstruct 70# file. 71# % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug 72# % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug 73# 74# You can use 'scons -H' to print scons options. If you're in this 75# 'gem5' directory (or use -u or -C to tell scons where to find this 76# file), you can use 'scons -h' to print all the gem5-specific build 77# options as well. 78# 79################################################### 80 81# Global Python includes 82import itertools 83import os 84import re 85import shutil 86import subprocess 87import sys 88 89from os import mkdir, environ 90from os.path import abspath, basename, dirname, expanduser, normpath 91from os.path import exists, isdir, isfile 92from os.path import join as joinpath, split as splitpath 93 94# SCons includes 95import SCons 96import SCons.Node 97 98extra_python_paths = [ 99 Dir('src/python').srcnode().abspath, # gem5 includes 100 Dir('ext/ply').srcnode().abspath, # ply is used by several files 101 ] 102 103sys.path[1:1] = extra_python_paths 104 105from m5.util import compareVersions, readCommand 106from m5.util.terminal import get_termcap 107 108help_texts = { 109 "options" : "", 110 "global_vars" : "", 111 "local_vars" : "" 112} 113 114Export("help_texts") 115 116 117# There's a bug in scons in that (1) by default, the help texts from 118# AddOption() are supposed to be displayed when you type 'scons -h' 119# and (2) you can override the help displayed by 'scons -h' using the 120# Help() function, but these two features are incompatible: once 121# you've overridden the help text using Help(), there's no way to get 122# at the help texts from AddOptions. See: 123# http://scons.tigris.org/issues/show_bug.cgi?id=2356 124# http://scons.tigris.org/issues/show_bug.cgi?id=2611 125# This hack lets us extract the help text from AddOptions and 126# re-inject it via Help(). Ideally someday this bug will be fixed and 127# we can just use AddOption directly. 128def AddLocalOption(*args, **kwargs): 129 col_width = 30 130 131 help = " " + ", ".join(args) 132 if "help" in kwargs: 133 length = len(help) 134 if length >= col_width: 135 help += "\n" + " " * col_width 136 else: 137 help += " " * (col_width - length) 138 help += kwargs["help"] 139 help_texts["options"] += help + "\n" 140 141 AddOption(*args, **kwargs) 142 143AddLocalOption('--colors', dest='use_colors', action='store_true', 144 help="Add color to abbreviated scons output") 145AddLocalOption('--no-colors', dest='use_colors', action='store_false', 146 help="Don't add color to abbreviated scons output") 147AddLocalOption('--with-cxx-config', dest='with_cxx_config', 148 action='store_true', 149 help="Build with support for C++-based configuration") 150AddLocalOption('--default', dest='default', type='string', action='store', 151 help='Override which build_opts file to use for defaults') 152AddLocalOption('--ignore-style', dest='ignore_style', action='store_true', 153 help='Disable style checking hooks') 154AddLocalOption('--no-lto', dest='no_lto', action='store_true', 155 help='Disable Link-Time Optimization for fast') 156AddLocalOption('--force-lto', dest='force_lto', action='store_true', 157 help='Use Link-Time Optimization instead of partial linking' + 158 ' when the compiler doesn\'t support using them together.') 159AddLocalOption('--update-ref', dest='update_ref', action='store_true', 160 help='Update test reference outputs') 161AddLocalOption('--verbose', dest='verbose', action='store_true', 162 help='Print full tool command lines') 163AddLocalOption('--without-python', dest='without_python', 164 action='store_true', 165 help='Build without Python configuration support') 166AddLocalOption('--without-tcmalloc', dest='without_tcmalloc', 167 action='store_true', 168 help='Disable linking against tcmalloc') 169AddLocalOption('--with-ubsan', dest='with_ubsan', action='store_true', 170 help='Build with Undefined Behavior Sanitizer if available') 171AddLocalOption('--with-asan', dest='with_asan', action='store_true', 172 help='Build with Address Sanitizer if available') 173 174if GetOption('no_lto') and GetOption('force_lto'): 175 print '--no-lto and --force-lto are mutually exclusive' 176 Exit(1) 177 178termcap = get_termcap(GetOption('use_colors')) 179 180######################################################################## 181# 182# Set up the main build environment. 183# 184######################################################################## 185 186main = Environment() 187 188main_dict_keys = main.Dictionary().keys() 189 190# Check that we have a C/C++ compiler 191if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys): 192 print "No C++ compiler installed (package g++ on Ubuntu and RedHat)" 193 Exit(1) 194 195# add useful python code PYTHONPATH so it can be used by subprocesses 196# as well 197main.AppendENVPath('PYTHONPATH', extra_python_paths) 198 | 1# -*- mode:python -*- 2 3# Copyright (c) 2013, 2015-2017 ARM Limited 4# All rights reserved. 5# 6# The license below extends only to copyright in the software and shall 7# not be construed as granting a license to any other intellectual 8# property including but not limited to intellectual property relating 9# to a hardware implementation of the functionality of the software 10# licensed hereunder. You may use the software subject to the license 11# terms below provided that you ensure that this notice is replicated 12# unmodified and in its entirety in all distributions of the software, 13# modified or unmodified, in source code or in binary form. 14# 15# Copyright (c) 2011 Advanced Micro Devices, Inc. 16# Copyright (c) 2009 The Hewlett-Packard Development Company 17# Copyright (c) 2004-2005 The Regents of The University of Michigan 18# All rights reserved. 19# 20# Redistribution and use in source and binary forms, with or without 21# modification, are permitted provided that the following conditions are 22# met: redistributions of source code must retain the above copyright 23# notice, this list of conditions and the following disclaimer; 24# redistributions in binary form must reproduce the above copyright 25# notice, this list of conditions and the following disclaimer in the 26# documentation and/or other materials provided with the distribution; 27# neither the name of the copyright holders nor the names of its 28# contributors may be used to endorse or promote products derived from 29# this software without specific prior written permission. 30# 31# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS 32# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT 33# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR 34# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT 35# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, 36# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT 37# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 38# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY 39# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 40# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 41# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 42# 43# Authors: Steve Reinhardt 44# Nathan Binkert 45 46################################################### 47# 48# SCons top-level build description (SConstruct) file. 49# 50# While in this directory ('gem5'), just type 'scons' to build the default 51# configuration (see below), or type 'scons build/<CONFIG>/<binary>' 52# to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for 53# the optimized full-system version). 54# 55# You can build gem5 in a different directory as long as there is a 56# 'build/<CONFIG>' somewhere along the target path. The build system 57# expects that all configs under the same build directory are being 58# built for the same host system. 59# 60# Examples: 61# 62# The following two commands are equivalent. The '-u' option tells 63# scons to search up the directory tree for this SConstruct file. 64# % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug 65# % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug 66# 67# The following two commands are equivalent and demonstrate building 68# in a directory outside of the source tree. The '-C' option tells 69# scons to chdir to the specified directory to find this SConstruct 70# file. 71# % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug 72# % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug 73# 74# You can use 'scons -H' to print scons options. If you're in this 75# 'gem5' directory (or use -u or -C to tell scons where to find this 76# file), you can use 'scons -h' to print all the gem5-specific build 77# options as well. 78# 79################################################### 80 81# Global Python includes 82import itertools 83import os 84import re 85import shutil 86import subprocess 87import sys 88 89from os import mkdir, environ 90from os.path import abspath, basename, dirname, expanduser, normpath 91from os.path import exists, isdir, isfile 92from os.path import join as joinpath, split as splitpath 93 94# SCons includes 95import SCons 96import SCons.Node 97 98extra_python_paths = [ 99 Dir('src/python').srcnode().abspath, # gem5 includes 100 Dir('ext/ply').srcnode().abspath, # ply is used by several files 101 ] 102 103sys.path[1:1] = extra_python_paths 104 105from m5.util import compareVersions, readCommand 106from m5.util.terminal import get_termcap 107 108help_texts = { 109 "options" : "", 110 "global_vars" : "", 111 "local_vars" : "" 112} 113 114Export("help_texts") 115 116 117# There's a bug in scons in that (1) by default, the help texts from 118# AddOption() are supposed to be displayed when you type 'scons -h' 119# and (2) you can override the help displayed by 'scons -h' using the 120# Help() function, but these two features are incompatible: once 121# you've overridden the help text using Help(), there's no way to get 122# at the help texts from AddOptions. See: 123# http://scons.tigris.org/issues/show_bug.cgi?id=2356 124# http://scons.tigris.org/issues/show_bug.cgi?id=2611 125# This hack lets us extract the help text from AddOptions and 126# re-inject it via Help(). Ideally someday this bug will be fixed and 127# we can just use AddOption directly. 128def AddLocalOption(*args, **kwargs): 129 col_width = 30 130 131 help = " " + ", ".join(args) 132 if "help" in kwargs: 133 length = len(help) 134 if length >= col_width: 135 help += "\n" + " " * col_width 136 else: 137 help += " " * (col_width - length) 138 help += kwargs["help"] 139 help_texts["options"] += help + "\n" 140 141 AddOption(*args, **kwargs) 142 143AddLocalOption('--colors', dest='use_colors', action='store_true', 144 help="Add color to abbreviated scons output") 145AddLocalOption('--no-colors', dest='use_colors', action='store_false', 146 help="Don't add color to abbreviated scons output") 147AddLocalOption('--with-cxx-config', dest='with_cxx_config', 148 action='store_true', 149 help="Build with support for C++-based configuration") 150AddLocalOption('--default', dest='default', type='string', action='store', 151 help='Override which build_opts file to use for defaults') 152AddLocalOption('--ignore-style', dest='ignore_style', action='store_true', 153 help='Disable style checking hooks') 154AddLocalOption('--no-lto', dest='no_lto', action='store_true', 155 help='Disable Link-Time Optimization for fast') 156AddLocalOption('--force-lto', dest='force_lto', action='store_true', 157 help='Use Link-Time Optimization instead of partial linking' + 158 ' when the compiler doesn\'t support using them together.') 159AddLocalOption('--update-ref', dest='update_ref', action='store_true', 160 help='Update test reference outputs') 161AddLocalOption('--verbose', dest='verbose', action='store_true', 162 help='Print full tool command lines') 163AddLocalOption('--without-python', dest='without_python', 164 action='store_true', 165 help='Build without Python configuration support') 166AddLocalOption('--without-tcmalloc', dest='without_tcmalloc', 167 action='store_true', 168 help='Disable linking against tcmalloc') 169AddLocalOption('--with-ubsan', dest='with_ubsan', action='store_true', 170 help='Build with Undefined Behavior Sanitizer if available') 171AddLocalOption('--with-asan', dest='with_asan', action='store_true', 172 help='Build with Address Sanitizer if available') 173 174if GetOption('no_lto') and GetOption('force_lto'): 175 print '--no-lto and --force-lto are mutually exclusive' 176 Exit(1) 177 178termcap = get_termcap(GetOption('use_colors')) 179 180######################################################################## 181# 182# Set up the main build environment. 183# 184######################################################################## 185 186main = Environment() 187 188main_dict_keys = main.Dictionary().keys() 189 190# Check that we have a C/C++ compiler 191if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys): 192 print "No C++ compiler installed (package g++ on Ubuntu and RedHat)" 193 Exit(1) 194 195# add useful python code PYTHONPATH so it can be used by subprocesses 196# as well 197main.AppendENVPath('PYTHONPATH', extra_python_paths) 198 |
199######################################################################## 200# 201# Mercurial Stuff. 202# 203# If the gem5 directory is a mercurial repository, we should do some 204# extra things. 205# 206######################################################################## 207 208hgdir = main.root.Dir(".hg") 209 210 211style_message = """ 212You're missing the gem5 style hook, which automatically checks your code 213against the gem5 style rules on %s. 214This script will now install the hook in your %s. 215Press enter to continue, or ctrl-c to abort: """ 216 217mercurial_style_message = """ 218You're missing the gem5 style hook, which automatically checks your code 219against the gem5 style rules on hg commit and qrefresh commands. 220This script will now install the hook in your .hg/hgrc file. 221Press enter to continue, or ctrl-c to abort: """ 222 223git_style_message = """ 224You're missing the gem5 style or commit message hook. These hooks help 225to ensure that your code follows gem5's style rules on git commit. 226This script will now install the hook in your .git/hooks/ directory. 227Press enter to continue, or ctrl-c to abort: """ 228 229mercurial_style_upgrade_message = """ 230Your Mercurial style hooks are not up-to-date. This script will now 231try to automatically update them. A backup of your hgrc will be saved 232in .hg/hgrc.old. 233Press enter to continue, or ctrl-c to abort: """ 234 235mercurial_style_hook = """ 236# The following lines were automatically added by gem5/SConstruct 237# to provide the gem5 style-checking hooks 238[extensions] 239hgstyle = %s/util/hgstyle.py 240 241[hooks] 242pretxncommit.style = python:hgstyle.check_style 243pre-qrefresh.style = python:hgstyle.check_style 244# End of SConstruct additions 245 246""" % (main.root.abspath) 247 248mercurial_lib_not_found = """ 249Mercurial libraries cannot be found, ignoring style hook. If 250you are a gem5 developer, please fix this and run the style 251hook. It is important. 252""" 253 254# Check for style hook and prompt for installation if it's not there. 255# Skip this if --ignore-style was specified, there's no interactive 256# terminal to prompt, or no recognized revision control system can be 257# found. 258ignore_style = GetOption('ignore_style') or not sys.stdin.isatty() 259 260# Try wire up Mercurial to the style hooks 261if not ignore_style and hgdir.exists(): 262 style_hook = True 263 style_hooks = tuple() 264 hgrc = hgdir.File('hgrc') 265 hgrc_old = hgdir.File('hgrc.old') 266 try: 267 from mercurial import ui 268 ui = ui.ui() 269 ui.readconfig(hgrc.abspath) 270 style_hooks = (ui.config('hooks', 'pretxncommit.style', None), 271 ui.config('hooks', 'pre-qrefresh.style', None)) 272 style_hook = all(style_hooks) 273 style_extension = ui.config('extensions', 'style', None) 274 except ImportError: 275 print mercurial_lib_not_found 276 277 if "python:style.check_style" in style_hooks: 278 # Try to upgrade the style hooks 279 print mercurial_style_upgrade_message 280 # continue unless user does ctrl-c/ctrl-d etc. 281 try: 282 raw_input() 283 except: 284 print "Input exception, exiting scons.\n" 285 sys.exit(1) 286 shutil.copyfile(hgrc.abspath, hgrc_old.abspath) 287 re_style_hook = re.compile(r"^([^=#]+)\.style\s*=\s*([^#\s]+).*") 288 re_style_extension = re.compile("style\s*=\s*([^#\s]+).*") 289 old, new = open(hgrc_old.abspath, 'r'), open(hgrc.abspath, 'w') 290 for l in old: 291 m_hook = re_style_hook.match(l) 292 m_ext = re_style_extension.match(l) 293 if m_hook: 294 hook, check = m_hook.groups() 295 if check != "python:style.check_style": 296 print "Warning: %s.style is using a non-default " \ 297 "checker: %s" % (hook, check) 298 if hook not in ("pretxncommit", "pre-qrefresh"): 299 print "Warning: Updating unknown style hook: %s" % hook 300 301 l = "%s.style = python:hgstyle.check_style\n" % hook 302 elif m_ext and m_ext.group(1) == style_extension: 303 l = "hgstyle = %s/util/hgstyle.py\n" % main.root.abspath 304 305 new.write(l) 306 elif not style_hook: 307 print mercurial_style_message, 308 # continue unless user does ctrl-c/ctrl-d etc. 309 try: 310 raw_input() 311 except: 312 print "Input exception, exiting scons.\n" 313 sys.exit(1) 314 hgrc_path = '%s/.hg/hgrc' % main.root.abspath 315 print "Adding style hook to", hgrc_path, "\n" 316 try: 317 with open(hgrc_path, 'a') as f: 318 f.write(mercurial_style_hook) 319 except: 320 print "Error updating", hgrc_path 321 sys.exit(1) 322 323def install_git_style_hooks(): 324 try: 325 gitdir = Dir(readCommand( 326 ["git", "rev-parse", "--git-dir"]).strip("\n")) 327 except Exception, e: 328 print "Warning: Failed to find git repo directory: %s" % e 329 return 330 331 git_hooks = gitdir.Dir("hooks") 332 def hook_exists(hook_name): 333 hook = git_hooks.File(hook_name) 334 return hook.exists() 335 336 def hook_install(hook_name, script): 337 hook = git_hooks.File(hook_name) 338 if hook.exists(): 339 print "Warning: Can't install %s, hook already exists." % hook_name 340 return 341 342 if hook.islink(): 343 print "Warning: Removing broken symlink for hook %s." % hook_name 344 os.unlink(hook.get_abspath()) 345 346 if not git_hooks.exists(): 347 mkdir(git_hooks.get_abspath()) 348 git_hooks.clear() 349 350 abs_symlink_hooks = git_hooks.islink() and \ 351 os.path.isabs(os.readlink(git_hooks.get_abspath())) 352 353 # Use a relative symlink if the hooks live in the source directory, 354 # and the hooks directory is not a symlink to an absolute path. 355 if hook.is_under(main.root) and not abs_symlink_hooks: 356 script_path = os.path.relpath( 357 os.path.realpath(script.get_abspath()), 358 os.path.realpath(hook.Dir(".").get_abspath())) 359 else: 360 script_path = script.get_abspath() 361 362 try: 363 os.symlink(script_path, hook.get_abspath()) 364 except: 365 print "Error updating git %s hook" % hook_name 366 raise 367 368 if hook_exists("pre-commit") and hook_exists("commit-msg"): 369 return 370 371 print git_style_message, 372 try: 373 raw_input() 374 except: 375 print "Input exception, exiting scons.\n" 376 sys.exit(1) 377 378 git_style_script = File("util/git-pre-commit.py") 379 git_msg_script = File("ext/git-commit-msg") 380 381 hook_install("pre-commit", git_style_script) 382 hook_install("commit-msg", git_msg_script) 383 384# Try to wire up git to the style hooks 385if not ignore_style and main.root.Entry(".git").exists(): 386 install_git_style_hooks() 387 | |
388################################################### 389# 390# Figure out which configurations to set up based on the path(s) of 391# the target(s). 392# 393################################################### 394 395# Find default configuration & binary. 396Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug')) 397 398# helper function: find last occurrence of element in list 399def rfind(l, elt, offs = -1): 400 for i in range(len(l)+offs, 0, -1): 401 if l[i] == elt: 402 return i 403 raise ValueError, "element not found" 404 405# Take a list of paths (or SCons Nodes) and return a list with all 406# paths made absolute and ~-expanded. Paths will be interpreted 407# relative to the launch directory unless a different root is provided 408def makePathListAbsolute(path_list, root=GetLaunchDir()): 409 return [abspath(joinpath(root, expanduser(str(p)))) 410 for p in path_list] 411 412# Each target must have 'build' in the interior of the path; the 413# directory below this will determine the build parameters. For 414# example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we 415# recognize that ALPHA_SE specifies the configuration because it 416# follow 'build' in the build path. 417 418# The funky assignment to "[:]" is needed to replace the list contents 419# in place rather than reassign the symbol to a new list, which 420# doesn't work (obviously!). 421BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS) 422 423# Generate a list of the unique build roots and configs that the 424# collected targets reference. 425variant_paths = [] 426build_root = None 427for t in BUILD_TARGETS: 428 path_dirs = t.split('/') 429 try: 430 build_top = rfind(path_dirs, 'build', -2) 431 except: 432 print "Error: no non-leaf 'build' dir found on target path", t 433 Exit(1) 434 this_build_root = joinpath('/',*path_dirs[:build_top+1]) 435 if not build_root: 436 build_root = this_build_root 437 else: 438 if this_build_root != build_root: 439 print "Error: build targets not under same build root\n"\ 440 " %s\n %s" % (build_root, this_build_root) 441 Exit(1) 442 variant_path = joinpath('/',*path_dirs[:build_top+2]) 443 if variant_path not in variant_paths: 444 variant_paths.append(variant_path) 445 446# Make sure build_root exists (might not if this is the first build there) 447if not isdir(build_root): 448 mkdir(build_root) 449main['BUILDROOT'] = build_root 450 451Export('main') 452 453main.SConsignFile(joinpath(build_root, "sconsign")) 454 455# Default duplicate option is to use hard links, but this messes up 456# when you use emacs to edit a file in the target dir, as emacs moves 457# file to file~ then copies to file, breaking the link. Symbolic 458# (soft) links work better. 459main.SetOption('duplicate', 'soft-copy') 460 461# 462# Set up global sticky variables... these are common to an entire build 463# tree (not specific to a particular build like ALPHA_SE) 464# 465 466global_vars_file = joinpath(build_root, 'variables.global') 467 468global_vars = Variables(global_vars_file, args=ARGUMENTS) 469 470global_vars.AddVariables( 471 ('CC', 'C compiler', environ.get('CC', main['CC'])), 472 ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])), 473 ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')), 474 ('BATCH', 'Use batch pool for build and tests', False), 475 ('BATCH_CMD', 'Batch pool submission command name', 'qdo'), 476 ('M5_BUILD_CACHE', 'Cache built objects in this directory', False), 477 ('EXTRAS', 'Add extra directories to the compilation', '') 478 ) 479 480# Update main environment with values from ARGUMENTS & global_vars_file 481global_vars.Update(main) 482help_texts["global_vars"] += global_vars.GenerateHelpText(main) 483 484# Save sticky variable settings back to current variables file 485global_vars.Save(global_vars_file, main) 486 487# Parse EXTRAS variable to build list of all directories where we're 488# look for sources etc. This list is exported as extras_dir_list. 489base_dir = main.srcdir.abspath 490if main['EXTRAS']: 491 extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':')) 492else: 493 extras_dir_list = [] 494 495Export('base_dir') 496Export('extras_dir_list') 497 498# the ext directory should be on the #includes path 499main.Append(CPPPATH=[Dir('ext')]) 500 501# Add shared top-level headers 502main.Prepend(CPPPATH=Dir('include')) 503 504def strip_build_path(path, env): 505 path = str(path) 506 variant_base = env['BUILDROOT'] + os.path.sep 507 if path.startswith(variant_base): 508 path = path[len(variant_base):] 509 elif path.startswith('build/'): 510 path = path[6:] 511 return path 512 513# Generate a string of the form: 514# common/path/prefix/src1, src2 -> tgt1, tgt2 515# to print while building. 516class Transform(object): 517 # all specific color settings should be here and nowhere else 518 tool_color = termcap.Normal 519 pfx_color = termcap.Yellow 520 srcs_color = termcap.Yellow + termcap.Bold 521 arrow_color = termcap.Blue + termcap.Bold 522 tgts_color = termcap.Yellow + termcap.Bold 523 524 def __init__(self, tool, max_sources=99): 525 self.format = self.tool_color + (" [%8s] " % tool) \ 526 + self.pfx_color + "%s" \ 527 + self.srcs_color + "%s" \ 528 + self.arrow_color + " -> " \ 529 + self.tgts_color + "%s" \ 530 + termcap.Normal 531 self.max_sources = max_sources 532 533 def __call__(self, target, source, env, for_signature=None): 534 # truncate source list according to max_sources param 535 source = source[0:self.max_sources] 536 def strip(f): 537 return strip_build_path(str(f), env) 538 if len(source) > 0: 539 srcs = map(strip, source) 540 else: 541 srcs = [''] 542 tgts = map(strip, target) 543 # surprisingly, os.path.commonprefix is a dumb char-by-char string 544 # operation that has nothing to do with paths. 545 com_pfx = os.path.commonprefix(srcs + tgts) 546 com_pfx_len = len(com_pfx) 547 if com_pfx: 548 # do some cleanup and sanity checking on common prefix 549 if com_pfx[-1] == ".": 550 # prefix matches all but file extension: ok 551 # back up one to change 'foo.cc -> o' to 'foo.cc -> .o' 552 com_pfx = com_pfx[0:-1] 553 elif com_pfx[-1] == "/": 554 # common prefix is directory path: OK 555 pass 556 else: 557 src0_len = len(srcs[0]) 558 tgt0_len = len(tgts[0]) 559 if src0_len == com_pfx_len: 560 # source is a substring of target, OK 561 pass 562 elif tgt0_len == com_pfx_len: 563 # target is a substring of source, need to back up to 564 # avoid empty string on RHS of arrow 565 sep_idx = com_pfx.rfind(".") 566 if sep_idx != -1: 567 com_pfx = com_pfx[0:sep_idx] 568 else: 569 com_pfx = '' 570 elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".": 571 # still splitting at file extension: ok 572 pass 573 else: 574 # probably a fluke; ignore it 575 com_pfx = '' 576 # recalculate length in case com_pfx was modified 577 com_pfx_len = len(com_pfx) 578 def fmt(files): 579 f = map(lambda s: s[com_pfx_len:], files) 580 return ', '.join(f) 581 return self.format % (com_pfx, fmt(srcs), fmt(tgts)) 582 583Export('Transform') 584 585# enable the regression script to use the termcap 586main['TERMCAP'] = termcap 587 588if GetOption('verbose'): 589 def MakeAction(action, string, *args, **kwargs): 590 return Action(action, *args, **kwargs) 591else: 592 MakeAction = Action 593 main['CCCOMSTR'] = Transform("CC") 594 main['CXXCOMSTR'] = Transform("CXX") 595 main['ASCOMSTR'] = Transform("AS") 596 main['ARCOMSTR'] = Transform("AR", 0) 597 main['LINKCOMSTR'] = Transform("LINK", 0) 598 main['SHLINKCOMSTR'] = Transform("SHLINK", 0) 599 main['RANLIBCOMSTR'] = Transform("RANLIB", 0) 600 main['M4COMSTR'] = Transform("M4") 601 main['SHCCCOMSTR'] = Transform("SHCC") 602 main['SHCXXCOMSTR'] = Transform("SHCXX") 603Export('MakeAction') 604 605# Initialize the Link-Time Optimization (LTO) flags 606main['LTO_CCFLAGS'] = [] 607main['LTO_LDFLAGS'] = [] 608 609# According to the readme, tcmalloc works best if the compiler doesn't 610# assume that we're using the builtin malloc and friends. These flags 611# are compiler-specific, so we need to set them after we detect which 612# compiler we're using. 613main['TCMALLOC_CCFLAGS'] = [] 614 615CXX_version = readCommand([main['CXX'],'--version'], exception=False) 616CXX_V = readCommand([main['CXX'],'-V'], exception=False) 617 618main['GCC'] = CXX_version and CXX_version.find('g++') >= 0 619main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0 620if main['GCC'] + main['CLANG'] > 1: 621 print 'Error: How can we have two at the same time?' 622 Exit(1) 623 624# Set up default C++ compiler flags 625if main['GCC'] or main['CLANG']: 626 # As gcc and clang share many flags, do the common parts here 627 main.Append(CCFLAGS=['-pipe']) 628 main.Append(CCFLAGS=['-fno-strict-aliasing']) 629 # Enable -Wall and -Wextra and then disable the few warnings that 630 # we consistently violate 631 main.Append(CCFLAGS=['-Wall', '-Wundef', '-Wextra', 632 '-Wno-sign-compare', '-Wno-unused-parameter']) 633 # We always compile using C++11 634 main.Append(CXXFLAGS=['-std=c++11']) 635 if sys.platform.startswith('freebsd'): 636 main.Append(CCFLAGS=['-I/usr/local/include']) 637 main.Append(CXXFLAGS=['-I/usr/local/include']) 638 639 main['FILTER_PSHLINKFLAGS'] = lambda x: str(x).replace(' -shared', '') 640 main['PSHLINKFLAGS'] = main.subst('${FILTER_PSHLINKFLAGS(SHLINKFLAGS)}') 641 main['PLINKFLAGS'] = main.subst('${LINKFLAGS}') 642 shared_partial_flags = ['-r', '-nostdlib'] 643 main.Append(PSHLINKFLAGS=shared_partial_flags) 644 main.Append(PLINKFLAGS=shared_partial_flags) 645else: 646 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal, 647 print "Don't know what compiler options to use for your compiler." 648 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX'] 649 print termcap.Yellow + ' version:' + termcap.Normal, 650 if not CXX_version: 651 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\ 652 termcap.Normal 653 else: 654 print CXX_version.replace('\n', '<nl>') 655 print " If you're trying to use a compiler other than GCC" 656 print " or clang, there appears to be something wrong with your" 657 print " environment." 658 print " " 659 print " If you are trying to use a compiler other than those listed" 660 print " above you will need to ease fix SConstruct and " 661 print " src/SConscript to support that compiler." 662 Exit(1) 663 664if main['GCC']: 665 # Check for a supported version of gcc. >= 4.8 is chosen for its 666 # level of c++11 support. See 667 # http://gcc.gnu.org/projects/cxx0x.html for details. 668 gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False) 669 if compareVersions(gcc_version, "4.8") < 0: 670 print 'Error: gcc version 4.8 or newer required.' 671 print ' Installed version:', gcc_version 672 Exit(1) 673 674 main['GCC_VERSION'] = gcc_version 675 676 if compareVersions(gcc_version, '4.9') >= 0: 677 # Incremental linking with LTO is currently broken in gcc versions 678 # 4.9 and above. A version where everything works completely hasn't 679 # yet been identified. 680 # 681 # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=67548 682 main['BROKEN_INCREMENTAL_LTO'] = True 683 if compareVersions(gcc_version, '6.0') >= 0: 684 # gcc versions 6.0 and greater accept an -flinker-output flag which 685 # selects what type of output the linker should generate. This is 686 # necessary for incremental lto to work, but is also broken in 687 # current versions of gcc. It may not be necessary in future 688 # versions. We add it here since it might be, and as a reminder that 689 # it exists. It's excluded if lto is being forced. 690 # 691 # https://gcc.gnu.org/gcc-6/changes.html 692 # https://gcc.gnu.org/ml/gcc-patches/2015-11/msg03161.html 693 # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=69866 694 if not GetOption('force_lto'): 695 main.Append(PSHLINKFLAGS='-flinker-output=rel') 696 main.Append(PLINKFLAGS='-flinker-output=rel') 697 698 # gcc from version 4.8 and above generates "rep; ret" instructions 699 # to avoid performance penalties on certain AMD chips. Older 700 # assemblers detect this as an error, "Error: expecting string 701 # instruction after `rep'" 702 as_version_raw = readCommand([main['AS'], '-v', '/dev/null', 703 '-o', '/dev/null'], 704 exception=False).split() 705 706 # version strings may contain extra distro-specific 707 # qualifiers, so play it safe and keep only what comes before 708 # the first hyphen 709 as_version = as_version_raw[-1].split('-')[0] if as_version_raw else None 710 711 if not as_version or compareVersions(as_version, "2.23") < 0: 712 print termcap.Yellow + termcap.Bold + \ 713 'Warning: This combination of gcc and binutils have' + \ 714 ' known incompatibilities.\n' + \ 715 ' If you encounter build problems, please update ' + \ 716 'binutils to 2.23.' + \ 717 termcap.Normal 718 719 # Make sure we warn if the user has requested to compile with the 720 # Undefined Benahvior Sanitizer and this version of gcc does not 721 # support it. 722 if GetOption('with_ubsan') and \ 723 compareVersions(gcc_version, '4.9') < 0: 724 print termcap.Yellow + termcap.Bold + \ 725 'Warning: UBSan is only supported using gcc 4.9 and later.' + \ 726 termcap.Normal 727 728 disable_lto = GetOption('no_lto') 729 if not disable_lto and main.get('BROKEN_INCREMENTAL_LTO', False) and \ 730 not GetOption('force_lto'): 731 print termcap.Yellow + termcap.Bold + \ 732 'Warning: Your compiler doesn\'t support incremental linking' + \ 733 ' and lto at the same time, so lto is being disabled. To force' + \ 734 ' lto on anyway, use the --force-lto option. That will disable' + \ 735 ' partial linking.' + \ 736 termcap.Normal 737 disable_lto = True 738 739 # Add the appropriate Link-Time Optimization (LTO) flags 740 # unless LTO is explicitly turned off. Note that these flags 741 # are only used by the fast target. 742 if not disable_lto: 743 # Pass the LTO flag when compiling to produce GIMPLE 744 # output, we merely create the flags here and only append 745 # them later 746 main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')] 747 748 # Use the same amount of jobs for LTO as we are running 749 # scons with 750 main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')] 751 752 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc', 753 '-fno-builtin-realloc', '-fno-builtin-free']) 754 755 # add option to check for undeclared overrides 756 if compareVersions(gcc_version, "5.0") > 0: 757 main.Append(CCFLAGS=['-Wno-error=suggest-override']) 758 759elif main['CLANG']: 760 # Check for a supported version of clang, >= 3.1 is needed to 761 # support similar features as gcc 4.8. See 762 # http://clang.llvm.org/cxx_status.html for details 763 clang_version_re = re.compile(".* version (\d+\.\d+)") 764 clang_version_match = clang_version_re.search(CXX_version) 765 if (clang_version_match): 766 clang_version = clang_version_match.groups()[0] 767 if compareVersions(clang_version, "3.1") < 0: 768 print 'Error: clang version 3.1 or newer required.' 769 print ' Installed version:', clang_version 770 Exit(1) 771 else: 772 print 'Error: Unable to determine clang version.' 773 Exit(1) 774 775 # clang has a few additional warnings that we disable, extraneous 776 # parantheses are allowed due to Ruby's printing of the AST, 777 # finally self assignments are allowed as the generated CPU code 778 # is relying on this 779 main.Append(CCFLAGS=['-Wno-parentheses', 780 '-Wno-self-assign', 781 # Some versions of libstdc++ (4.8?) seem to 782 # use struct hash and class hash 783 # interchangeably. 784 '-Wno-mismatched-tags', 785 ]) 786 787 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin']) 788 789 # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as 790 # opposed to libstdc++, as the later is dated. 791 if sys.platform == "darwin": 792 main.Append(CXXFLAGS=['-stdlib=libc++']) 793 main.Append(LIBS=['c++']) 794 795 # On FreeBSD we need libthr. 796 if sys.platform.startswith('freebsd'): 797 main.Append(LIBS=['thr']) 798 799else: 800 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal, 801 print "Don't know what compiler options to use for your compiler." 802 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX'] 803 print termcap.Yellow + ' version:' + termcap.Normal, 804 if not CXX_version: 805 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\ 806 termcap.Normal 807 else: 808 print CXX_version.replace('\n', '<nl>') 809 print " If you're trying to use a compiler other than GCC" 810 print " or clang, there appears to be something wrong with your" 811 print " environment." 812 print " " 813 print " If you are trying to use a compiler other than those listed" 814 print " above you will need to ease fix SConstruct and " 815 print " src/SConscript to support that compiler." 816 Exit(1) 817 818# Set up common yacc/bison flags (needed for Ruby) 819main['YACCFLAGS'] = '-d' 820main['YACCHXXFILESUFFIX'] = '.hh' 821 822# Do this after we save setting back, or else we'll tack on an 823# extra 'qdo' every time we run scons. 824if main['BATCH']: 825 main['CC'] = main['BATCH_CMD'] + ' ' + main['CC'] 826 main['CXX'] = main['BATCH_CMD'] + ' ' + main['CXX'] 827 main['AS'] = main['BATCH_CMD'] + ' ' + main['AS'] 828 main['AR'] = main['BATCH_CMD'] + ' ' + main['AR'] 829 main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB'] 830 831if sys.platform == 'cygwin': 832 # cygwin has some header file issues... 833 main.Append(CCFLAGS=["-Wno-uninitialized"]) 834 835# Check for the protobuf compiler 836protoc_version = readCommand([main['PROTOC'], '--version'], 837 exception='').split() 838 839# First two words should be "libprotoc x.y.z" 840if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc': 841 print termcap.Yellow + termcap.Bold + \ 842 'Warning: Protocol buffer compiler (protoc) not found.\n' + \ 843 ' Please install protobuf-compiler for tracing support.' + \ 844 termcap.Normal 845 main['PROTOC'] = False 846else: 847 # Based on the availability of the compress stream wrappers, 848 # require 2.1.0 849 min_protoc_version = '2.1.0' 850 if compareVersions(protoc_version[1], min_protoc_version) < 0: 851 print termcap.Yellow + termcap.Bold + \ 852 'Warning: protoc version', min_protoc_version, \ 853 'or newer required.\n' + \ 854 ' Installed version:', protoc_version[1], \ 855 termcap.Normal 856 main['PROTOC'] = False 857 else: 858 # Attempt to determine the appropriate include path and 859 # library path using pkg-config, that means we also need to 860 # check for pkg-config. Note that it is possible to use 861 # protobuf without the involvement of pkg-config. Later on we 862 # check go a library config check and at that point the test 863 # will fail if libprotobuf cannot be found. 864 if readCommand(['pkg-config', '--version'], exception=''): 865 try: 866 # Attempt to establish what linking flags to add for protobuf 867 # using pkg-config 868 main.ParseConfig('pkg-config --cflags --libs-only-L protobuf') 869 except: 870 print termcap.Yellow + termcap.Bold + \ 871 'Warning: pkg-config could not get protobuf flags.' + \ 872 termcap.Normal 873 874 875# Check for 'timeout' from GNU coreutils. If present, regressions will 876# be run with a time limit. We require version 8.13 since we rely on 877# support for the '--foreground' option. 878if sys.platform.startswith('freebsd'): 879 timeout_lines = readCommand(['gtimeout', '--version'], 880 exception='').splitlines() 881else: 882 timeout_lines = readCommand(['timeout', '--version'], 883 exception='').splitlines() 884# Get the first line and tokenize it 885timeout_version = timeout_lines[0].split() if timeout_lines else [] 886main['TIMEOUT'] = timeout_version and \ 887 compareVersions(timeout_version[-1], '8.13') >= 0 888 889# Add a custom Check function to test for structure members. 890def CheckMember(context, include, decl, member, include_quotes="<>"): 891 context.Message("Checking for member %s in %s..." % 892 (member, decl)) 893 text = """ 894#include %(header)s 895int main(){ 896 %(decl)s test; 897 (void)test.%(member)s; 898 return 0; 899}; 900""" % { "header" : include_quotes[0] + include + include_quotes[1], 901 "decl" : decl, 902 "member" : member, 903 } 904 905 ret = context.TryCompile(text, extension=".cc") 906 context.Result(ret) 907 return ret 908 909# Platform-specific configuration. Note again that we assume that all 910# builds under a given build root run on the same host platform. 911conf = Configure(main, 912 conf_dir = joinpath(build_root, '.scons_config'), 913 log_file = joinpath(build_root, 'scons_config.log'), 914 custom_tests = { 915 'CheckMember' : CheckMember, 916 }) 917 918# Check if we should compile a 64 bit binary on Mac OS X/Darwin 919try: 920 import platform 921 uname = platform.uname() 922 if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0: 923 if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]): 924 main.Append(CCFLAGS=['-arch', 'x86_64']) 925 main.Append(CFLAGS=['-arch', 'x86_64']) 926 main.Append(LINKFLAGS=['-arch', 'x86_64']) 927 main.Append(ASFLAGS=['-arch', 'x86_64']) 928except: 929 pass 930 931# Recent versions of scons substitute a "Null" object for Configure() 932# when configuration isn't necessary, e.g., if the "--help" option is 933# present. Unfortuantely this Null object always returns false, 934# breaking all our configuration checks. We replace it with our own 935# more optimistic null object that returns True instead. 936if not conf: 937 def NullCheck(*args, **kwargs): 938 return True 939 940 class NullConf: 941 def __init__(self, env): 942 self.env = env 943 def Finish(self): 944 return self.env 945 def __getattr__(self, mname): 946 return NullCheck 947 948 conf = NullConf(main) 949 950# Cache build files in the supplied directory. 951if main['M5_BUILD_CACHE']: 952 print 'Using build cache located at', main['M5_BUILD_CACHE'] 953 CacheDir(main['M5_BUILD_CACHE']) 954 955main['USE_PYTHON'] = not GetOption('without_python') 956if main['USE_PYTHON']: 957 # Find Python include and library directories for embedding the 958 # interpreter. We rely on python-config to resolve the appropriate 959 # includes and linker flags. ParseConfig does not seem to understand 960 # the more exotic linker flags such as -Xlinker and -export-dynamic so 961 # we add them explicitly below. If you want to link in an alternate 962 # version of python, see above for instructions on how to invoke 963 # scons with the appropriate PATH set. 964 # 965 # First we check if python2-config exists, else we use python-config 966 python_config = readCommand(['which', 'python2-config'], 967 exception='').strip() 968 if not os.path.exists(python_config): 969 python_config = readCommand(['which', 'python-config'], 970 exception='').strip() 971 py_includes = readCommand([python_config, '--includes'], 972 exception='').split() 973 # Strip the -I from the include folders before adding them to the 974 # CPPPATH 975 main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes)) 976 977 # Read the linker flags and split them into libraries and other link 978 # flags. The libraries are added later through the call the CheckLib. 979 py_ld_flags = readCommand([python_config, '--ldflags'], 980 exception='').split() 981 py_libs = [] 982 for lib in py_ld_flags: 983 if not lib.startswith('-l'): 984 main.Append(LINKFLAGS=[lib]) 985 else: 986 lib = lib[2:] 987 if lib not in py_libs: 988 py_libs.append(lib) 989 990 # verify that this stuff works 991 if not conf.CheckHeader('Python.h', '<>'): 992 print "Error: can't find Python.h header in", py_includes 993 print "Install Python headers (package python-dev on Ubuntu and RedHat)" 994 Exit(1) 995 996 for lib in py_libs: 997 if not conf.CheckLib(lib): 998 print "Error: can't find library %s required by python" % lib 999 Exit(1) 1000 1001# On Solaris you need to use libsocket for socket ops 1002if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'): 1003 if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'): 1004 print "Can't find library with socket calls (e.g. accept())" 1005 Exit(1) 1006 1007# Check for zlib. If the check passes, libz will be automatically 1008# added to the LIBS environment variable. 1009if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'): 1010 print 'Error: did not find needed zlib compression library '\ 1011 'and/or zlib.h header file.' 1012 print ' Please install zlib and try again.' 1013 Exit(1) 1014 1015# If we have the protobuf compiler, also make sure we have the 1016# development libraries. If the check passes, libprotobuf will be 1017# automatically added to the LIBS environment variable. After 1018# this, we can use the HAVE_PROTOBUF flag to determine if we have 1019# got both protoc and libprotobuf available. 1020main['HAVE_PROTOBUF'] = main['PROTOC'] and \ 1021 conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h', 1022 'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;') 1023 1024# If we have the compiler but not the library, print another warning. 1025if main['PROTOC'] and not main['HAVE_PROTOBUF']: 1026 print termcap.Yellow + termcap.Bold + \ 1027 'Warning: did not find protocol buffer library and/or headers.\n' + \ 1028 ' Please install libprotobuf-dev for tracing support.' + \ 1029 termcap.Normal 1030 1031# Check for librt. 1032have_posix_clock = \ 1033 conf.CheckLibWithHeader(None, 'time.h', 'C', 1034 'clock_nanosleep(0,0,NULL,NULL);') or \ 1035 conf.CheckLibWithHeader('rt', 'time.h', 'C', 1036 'clock_nanosleep(0,0,NULL,NULL);') 1037 1038have_posix_timers = \ 1039 conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C', 1040 'timer_create(CLOCK_MONOTONIC, NULL, NULL);') 1041 1042if not GetOption('without_tcmalloc'): 1043 if conf.CheckLib('tcmalloc'): 1044 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS']) 1045 elif conf.CheckLib('tcmalloc_minimal'): 1046 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS']) 1047 else: 1048 print termcap.Yellow + termcap.Bold + \ 1049 "You can get a 12% performance improvement by "\ 1050 "installing tcmalloc (libgoogle-perftools-dev package "\ 1051 "on Ubuntu or RedHat)." + termcap.Normal 1052 1053 1054# Detect back trace implementations. The last implementation in the 1055# list will be used by default. 1056backtrace_impls = [ "none" ] 1057 1058if conf.CheckLibWithHeader(None, 'execinfo.h', 'C', 1059 'backtrace_symbols_fd((void*)0, 0, 0);'): 1060 backtrace_impls.append("glibc") 1061elif conf.CheckLibWithHeader('execinfo', 'execinfo.h', 'C', 1062 'backtrace_symbols_fd((void*)0, 0, 0);'): 1063 # NetBSD and FreeBSD need libexecinfo. 1064 backtrace_impls.append("glibc") 1065 main.Append(LIBS=['execinfo']) 1066 1067if backtrace_impls[-1] == "none": 1068 default_backtrace_impl = "none" 1069 print termcap.Yellow + termcap.Bold + \ 1070 "No suitable back trace implementation found." + \ 1071 termcap.Normal 1072 1073if not have_posix_clock: 1074 print "Can't find library for POSIX clocks." 1075 1076# Check for <fenv.h> (C99 FP environment control) 1077have_fenv = conf.CheckHeader('fenv.h', '<>') 1078if not have_fenv: 1079 print "Warning: Header file <fenv.h> not found." 1080 print " This host has no IEEE FP rounding mode control." 1081 1082# Check for <png.h> (libpng library needed if wanting to dump 1083# frame buffer image in png format) 1084have_png = conf.CheckHeader('png.h', '<>') 1085if not have_png: 1086 print "Warning: Header file <png.h> not found." 1087 print " This host has no libpng library." 1088 print " Disabling support for PNG framebuffers." 1089 1090# Check if we should enable KVM-based hardware virtualization. The API 1091# we rely on exists since version 2.6.36 of the kernel, but somehow 1092# the KVM_API_VERSION does not reflect the change. We test for one of 1093# the types as a fall back. 1094have_kvm = conf.CheckHeader('linux/kvm.h', '<>') 1095if not have_kvm: 1096 print "Info: Compatible header file <linux/kvm.h> not found, " \ 1097 "disabling KVM support." 1098 1099# Check if the TUN/TAP driver is available. 1100have_tuntap = conf.CheckHeader('linux/if_tun.h', '<>') 1101if not have_tuntap: 1102 print "Info: Compatible header file <linux/if_tun.h> not found." 1103 1104# x86 needs support for xsave. We test for the structure here since we 1105# won't be able to run new tests by the time we know which ISA we're 1106# targeting. 1107have_kvm_xsave = conf.CheckTypeSize('struct kvm_xsave', 1108 '#include <linux/kvm.h>') != 0 1109 1110# Check if the requested target ISA is compatible with the host 1111def is_isa_kvm_compatible(isa): 1112 try: 1113 import platform 1114 host_isa = platform.machine() 1115 except: 1116 print "Warning: Failed to determine host ISA." 1117 return False 1118 1119 if not have_posix_timers: 1120 print "Warning: Can not enable KVM, host seems to lack support " \ 1121 "for POSIX timers" 1122 return False 1123 1124 if isa == "arm": 1125 return host_isa in ( "armv7l", "aarch64" ) 1126 elif isa == "x86": 1127 if host_isa != "x86_64": 1128 return False 1129 1130 if not have_kvm_xsave: 1131 print "KVM on x86 requires xsave support in kernel headers." 1132 return False 1133 1134 return True 1135 else: 1136 return False 1137 1138 1139# Check if the exclude_host attribute is available. We want this to 1140# get accurate instruction counts in KVM. 1141main['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf.CheckMember( 1142 'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host') 1143 1144 1145###################################################################### 1146# 1147# Finish the configuration 1148# 1149main = conf.Finish() 1150 1151###################################################################### 1152# 1153# Collect all non-global variables 1154# 1155 1156# Define the universe of supported ISAs 1157all_isa_list = [ ] 1158all_gpu_isa_list = [ ] 1159Export('all_isa_list') 1160Export('all_gpu_isa_list') 1161 1162class CpuModel(object): 1163 '''The CpuModel class encapsulates everything the ISA parser needs to 1164 know about a particular CPU model.''' 1165 1166 # Dict of available CPU model objects. Accessible as CpuModel.dict. 1167 dict = {} 1168 1169 # Constructor. Automatically adds models to CpuModel.dict. 1170 def __init__(self, name, default=False): 1171 self.name = name # name of model 1172 1173 # This cpu is enabled by default 1174 self.default = default 1175 1176 # Add self to dict 1177 if name in CpuModel.dict: 1178 raise AttributeError, "CpuModel '%s' already registered" % name 1179 CpuModel.dict[name] = self 1180 1181Export('CpuModel') 1182 1183# Sticky variables get saved in the variables file so they persist from 1184# one invocation to the next (unless overridden, in which case the new 1185# value becomes sticky). 1186sticky_vars = Variables(args=ARGUMENTS) 1187Export('sticky_vars') 1188 1189# Sticky variables that should be exported 1190export_vars = [] 1191Export('export_vars') 1192 1193# For Ruby 1194all_protocols = [] 1195Export('all_protocols') 1196protocol_dirs = [] 1197Export('protocol_dirs') 1198slicc_includes = [] 1199Export('slicc_includes') 1200 1201# Walk the tree and execute all SConsopts scripts that wil add to the 1202# above variables 1203if GetOption('verbose'): 1204 print "Reading SConsopts" 1205for bdir in [ base_dir ] + extras_dir_list: 1206 if not isdir(bdir): 1207 print "Error: directory '%s' does not exist" % bdir 1208 Exit(1) 1209 for root, dirs, files in os.walk(bdir): 1210 if 'SConsopts' in files: 1211 if GetOption('verbose'): 1212 print "Reading", joinpath(root, 'SConsopts') 1213 SConscript(joinpath(root, 'SConsopts')) 1214 1215all_isa_list.sort() 1216all_gpu_isa_list.sort() 1217 1218sticky_vars.AddVariables( 1219 EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list), 1220 EnumVariable('TARGET_GPU_ISA', 'Target GPU ISA', 'hsail', all_gpu_isa_list), 1221 ListVariable('CPU_MODELS', 'CPU models', 1222 sorted(n for n,m in CpuModel.dict.iteritems() if m.default), 1223 sorted(CpuModel.dict.keys())), 1224 BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger', 1225 False), 1226 BoolVariable('SS_COMPATIBLE_FP', 1227 'Make floating-point results compatible with SimpleScalar', 1228 False), 1229 BoolVariable('USE_SSE2', 1230 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts', 1231 False), 1232 BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock), 1233 BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv), 1234 BoolVariable('USE_PNG', 'Enable support for PNG images', have_png), 1235 BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability', 1236 False), 1237 BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models', 1238 have_kvm), 1239 BoolVariable('USE_TUNTAP', 1240 'Enable using a tap device to bridge to the host network', 1241 have_tuntap), 1242 BoolVariable('BUILD_GPU', 'Build the compute-GPU model', False), 1243 EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None', 1244 all_protocols), 1245 EnumVariable('BACKTRACE_IMPL', 'Post-mortem dump implementation', 1246 backtrace_impls[-1], backtrace_impls) 1247 ) 1248 1249# These variables get exported to #defines in config/*.hh (see src/SConscript). 1250export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'TARGET_GPU_ISA', 1251 'CP_ANNOTATE', 'USE_POSIX_CLOCK', 'USE_KVM', 'USE_TUNTAP', 1252 'PROTOCOL', 'HAVE_PROTOBUF', 'HAVE_PERF_ATTR_EXCLUDE_HOST', 1253 'USE_PNG'] 1254 1255################################################### 1256# 1257# Define a SCons builder for configuration flag headers. 1258# 1259################################################### 1260 1261# This function generates a config header file that #defines the 1262# variable symbol to the current variable setting (0 or 1). The source 1263# operands are the name of the variable and a Value node containing the 1264# value of the variable. 1265def build_config_file(target, source, env): 1266 (variable, value) = [s.get_contents() for s in source] 1267 f = file(str(target[0]), 'w') 1268 print >> f, '#define', variable, value 1269 f.close() 1270 return None 1271 1272# Combine the two functions into a scons Action object. 1273config_action = MakeAction(build_config_file, Transform("CONFIG H", 2)) 1274 1275# The emitter munges the source & target node lists to reflect what 1276# we're really doing. 1277def config_emitter(target, source, env): 1278 # extract variable name from Builder arg 1279 variable = str(target[0]) 1280 # True target is config header file 1281 target = joinpath('config', variable.lower() + '.hh') 1282 val = env[variable] 1283 if isinstance(val, bool): 1284 # Force value to 0/1 1285 val = int(val) 1286 elif isinstance(val, str): 1287 val = '"' + val + '"' 1288 1289 # Sources are variable name & value (packaged in SCons Value nodes) 1290 return ([target], [Value(variable), Value(val)]) 1291 1292config_builder = Builder(emitter = config_emitter, action = config_action) 1293 1294main.Append(BUILDERS = { 'ConfigFile' : config_builder }) 1295 1296################################################### 1297# 1298# Builders for static and shared partially linked object files. 1299# 1300################################################### 1301 1302partial_static_builder = Builder(action=SCons.Defaults.LinkAction, 1303 src_suffix='$OBJSUFFIX', 1304 src_builder=['StaticObject', 'Object'], 1305 LINKFLAGS='$PLINKFLAGS', 1306 LIBS='') 1307 1308def partial_shared_emitter(target, source, env): 1309 for tgt in target: 1310 tgt.attributes.shared = 1 1311 return (target, source) 1312partial_shared_builder = Builder(action=SCons.Defaults.ShLinkAction, 1313 emitter=partial_shared_emitter, 1314 src_suffix='$SHOBJSUFFIX', 1315 src_builder='SharedObject', 1316 SHLINKFLAGS='$PSHLINKFLAGS', 1317 LIBS='') 1318 1319main.Append(BUILDERS = { 'PartialShared' : partial_shared_builder, 1320 'PartialStatic' : partial_static_builder }) 1321 1322# builds in ext are shared across all configs in the build root. 1323ext_dir = abspath(joinpath(str(main.root), 'ext')) 1324ext_build_dirs = [] 1325for root, dirs, files in os.walk(ext_dir): 1326 if 'SConscript' in files: 1327 build_dir = os.path.relpath(root, ext_dir) 1328 ext_build_dirs.append(build_dir) 1329 main.SConscript(joinpath(root, 'SConscript'), 1330 variant_dir=joinpath(build_root, build_dir)) 1331 1332main.Prepend(CPPPATH=Dir('ext/pybind11/include/')) 1333 1334################################################### 1335# 1336# This builder and wrapper method are used to set up a directory with 1337# switching headers. Those are headers which are in a generic location and 1338# that include more specific headers from a directory chosen at build time 1339# based on the current build settings. 1340# 1341################################################### 1342 1343def build_switching_header(target, source, env): 1344 path = str(target[0]) 1345 subdir = str(source[0]) 1346 dp, fp = os.path.split(path) 1347 dp = os.path.relpath(os.path.realpath(dp), 1348 os.path.realpath(env['BUILDDIR'])) 1349 with open(path, 'w') as hdr: 1350 print >>hdr, '#include "%s/%s/%s"' % (dp, subdir, fp) 1351 1352switching_header_action = MakeAction(build_switching_header, 1353 Transform('GENERATE')) 1354 1355switching_header_builder = Builder(action=switching_header_action, 1356 source_factory=Value, 1357 single_source=True) 1358 1359main.Append(BUILDERS = { 'SwitchingHeader': switching_header_builder }) 1360 1361def switching_headers(self, headers, source): 1362 for header in headers: 1363 self.SwitchingHeader(header, source) 1364 1365main.AddMethod(switching_headers, 'SwitchingHeaders') 1366 1367################################################### 1368# 1369# Define build environments for selected configurations. 1370# 1371################################################### 1372 1373for variant_path in variant_paths: 1374 if not GetOption('silent'): 1375 print "Building in", variant_path 1376 1377 # Make a copy of the build-root environment to use for this config. 1378 env = main.Clone() 1379 env['BUILDDIR'] = variant_path 1380 1381 # variant_dir is the tail component of build path, and is used to 1382 # determine the build parameters (e.g., 'ALPHA_SE') 1383 (build_root, variant_dir) = splitpath(variant_path) 1384 1385 # Set env variables according to the build directory config. 1386 sticky_vars.files = [] 1387 # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in 1388 # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke 1389 # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings. 1390 current_vars_file = joinpath(build_root, 'variables', variant_dir) 1391 if isfile(current_vars_file): 1392 sticky_vars.files.append(current_vars_file) 1393 if not GetOption('silent'): 1394 print "Using saved variables file %s" % current_vars_file 1395 elif variant_dir in ext_build_dirs: 1396 # Things in ext are built without a variant directory. 1397 continue 1398 else: 1399 # Build dir-specific variables file doesn't exist. 1400 1401 # Make sure the directory is there so we can create it later 1402 opt_dir = dirname(current_vars_file) 1403 if not isdir(opt_dir): 1404 mkdir(opt_dir) 1405 1406 # Get default build variables from source tree. Variables are 1407 # normally determined by name of $VARIANT_DIR, but can be 1408 # overridden by '--default=' arg on command line. 1409 default = GetOption('default') 1410 opts_dir = joinpath(main.root.abspath, 'build_opts') 1411 if default: 1412 default_vars_files = [joinpath(build_root, 'variables', default), 1413 joinpath(opts_dir, default)] 1414 else: 1415 default_vars_files = [joinpath(opts_dir, variant_dir)] 1416 existing_files = filter(isfile, default_vars_files) 1417 if existing_files: 1418 default_vars_file = existing_files[0] 1419 sticky_vars.files.append(default_vars_file) 1420 print "Variables file %s not found,\n using defaults in %s" \ 1421 % (current_vars_file, default_vars_file) 1422 else: 1423 print "Error: cannot find variables file %s or " \ 1424 "default file(s) %s" \ 1425 % (current_vars_file, ' or '.join(default_vars_files)) 1426 Exit(1) 1427 1428 # Apply current variable settings to env 1429 sticky_vars.Update(env) 1430 1431 help_texts["local_vars"] += \ 1432 "Build variables for %s:\n" % variant_dir \ 1433 + sticky_vars.GenerateHelpText(env) 1434 1435 # Process variable settings. 1436 1437 if not have_fenv and env['USE_FENV']: 1438 print "Warning: <fenv.h> not available; " \ 1439 "forcing USE_FENV to False in", variant_dir + "." 1440 env['USE_FENV'] = False 1441 1442 if not env['USE_FENV']: 1443 print "Warning: No IEEE FP rounding mode control in", variant_dir + "." 1444 print " FP results may deviate slightly from other platforms." 1445 1446 if not have_png and env['USE_PNG']: 1447 print "Warning: <png.h> not available; " \ 1448 "forcing USE_PNG to False in", variant_dir + "." 1449 env['USE_PNG'] = False 1450 1451 if env['USE_PNG']: 1452 env.Append(LIBS=['png']) 1453 1454 if env['EFENCE']: 1455 env.Append(LIBS=['efence']) 1456 1457 if env['USE_KVM']: 1458 if not have_kvm: 1459 print "Warning: Can not enable KVM, host seems to lack KVM support" 1460 env['USE_KVM'] = False 1461 elif not is_isa_kvm_compatible(env['TARGET_ISA']): 1462 print "Info: KVM support disabled due to unsupported host and " \ 1463 "target ISA combination" 1464 env['USE_KVM'] = False 1465 1466 if env['USE_TUNTAP']: 1467 if not have_tuntap: 1468 print "Warning: Can't connect EtherTap with a tap device." 1469 env['USE_TUNTAP'] = False 1470 1471 if env['BUILD_GPU']: 1472 env.Append(CPPDEFINES=['BUILD_GPU']) 1473 1474 # Warn about missing optional functionality 1475 if env['USE_KVM']: 1476 if not main['HAVE_PERF_ATTR_EXCLUDE_HOST']: 1477 print "Warning: perf_event headers lack support for the " \ 1478 "exclude_host attribute. KVM instruction counts will " \ 1479 "be inaccurate." 1480 1481 # Save sticky variable settings back to current variables file 1482 sticky_vars.Save(current_vars_file, env) 1483 1484 if env['USE_SSE2']: 1485 env.Append(CCFLAGS=['-msse2']) 1486 1487 # The src/SConscript file sets up the build rules in 'env' according 1488 # to the configured variables. It returns a list of environments, 1489 # one for each variant build (debug, opt, etc.) 1490 SConscript('src/SConscript', variant_dir = variant_path, exports = 'env') 1491 1492# base help text 1493Help(''' 1494Usage: scons [scons options] [build variables] [target(s)] 1495 1496Extra scons options: 1497%(options)s 1498 1499Global build variables: 1500%(global_vars)s 1501 1502%(local_vars)s 1503''' % help_texts) | 199################################################### 200# 201# Figure out which configurations to set up based on the path(s) of 202# the target(s). 203# 204################################################### 205 206# Find default configuration & binary. 207Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug')) 208 209# helper function: find last occurrence of element in list 210def rfind(l, elt, offs = -1): 211 for i in range(len(l)+offs, 0, -1): 212 if l[i] == elt: 213 return i 214 raise ValueError, "element not found" 215 216# Take a list of paths (or SCons Nodes) and return a list with all 217# paths made absolute and ~-expanded. Paths will be interpreted 218# relative to the launch directory unless a different root is provided 219def makePathListAbsolute(path_list, root=GetLaunchDir()): 220 return [abspath(joinpath(root, expanduser(str(p)))) 221 for p in path_list] 222 223# Each target must have 'build' in the interior of the path; the 224# directory below this will determine the build parameters. For 225# example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we 226# recognize that ALPHA_SE specifies the configuration because it 227# follow 'build' in the build path. 228 229# The funky assignment to "[:]" is needed to replace the list contents 230# in place rather than reassign the symbol to a new list, which 231# doesn't work (obviously!). 232BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS) 233 234# Generate a list of the unique build roots and configs that the 235# collected targets reference. 236variant_paths = [] 237build_root = None 238for t in BUILD_TARGETS: 239 path_dirs = t.split('/') 240 try: 241 build_top = rfind(path_dirs, 'build', -2) 242 except: 243 print "Error: no non-leaf 'build' dir found on target path", t 244 Exit(1) 245 this_build_root = joinpath('/',*path_dirs[:build_top+1]) 246 if not build_root: 247 build_root = this_build_root 248 else: 249 if this_build_root != build_root: 250 print "Error: build targets not under same build root\n"\ 251 " %s\n %s" % (build_root, this_build_root) 252 Exit(1) 253 variant_path = joinpath('/',*path_dirs[:build_top+2]) 254 if variant_path not in variant_paths: 255 variant_paths.append(variant_path) 256 257# Make sure build_root exists (might not if this is the first build there) 258if not isdir(build_root): 259 mkdir(build_root) 260main['BUILDROOT'] = build_root 261 262Export('main') 263 264main.SConsignFile(joinpath(build_root, "sconsign")) 265 266# Default duplicate option is to use hard links, but this messes up 267# when you use emacs to edit a file in the target dir, as emacs moves 268# file to file~ then copies to file, breaking the link. Symbolic 269# (soft) links work better. 270main.SetOption('duplicate', 'soft-copy') 271 272# 273# Set up global sticky variables... these are common to an entire build 274# tree (not specific to a particular build like ALPHA_SE) 275# 276 277global_vars_file = joinpath(build_root, 'variables.global') 278 279global_vars = Variables(global_vars_file, args=ARGUMENTS) 280 281global_vars.AddVariables( 282 ('CC', 'C compiler', environ.get('CC', main['CC'])), 283 ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])), 284 ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')), 285 ('BATCH', 'Use batch pool for build and tests', False), 286 ('BATCH_CMD', 'Batch pool submission command name', 'qdo'), 287 ('M5_BUILD_CACHE', 'Cache built objects in this directory', False), 288 ('EXTRAS', 'Add extra directories to the compilation', '') 289 ) 290 291# Update main environment with values from ARGUMENTS & global_vars_file 292global_vars.Update(main) 293help_texts["global_vars"] += global_vars.GenerateHelpText(main) 294 295# Save sticky variable settings back to current variables file 296global_vars.Save(global_vars_file, main) 297 298# Parse EXTRAS variable to build list of all directories where we're 299# look for sources etc. This list is exported as extras_dir_list. 300base_dir = main.srcdir.abspath 301if main['EXTRAS']: 302 extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':')) 303else: 304 extras_dir_list = [] 305 306Export('base_dir') 307Export('extras_dir_list') 308 309# the ext directory should be on the #includes path 310main.Append(CPPPATH=[Dir('ext')]) 311 312# Add shared top-level headers 313main.Prepend(CPPPATH=Dir('include')) 314 315def strip_build_path(path, env): 316 path = str(path) 317 variant_base = env['BUILDROOT'] + os.path.sep 318 if path.startswith(variant_base): 319 path = path[len(variant_base):] 320 elif path.startswith('build/'): 321 path = path[6:] 322 return path 323 324# Generate a string of the form: 325# common/path/prefix/src1, src2 -> tgt1, tgt2 326# to print while building. 327class Transform(object): 328 # all specific color settings should be here and nowhere else 329 tool_color = termcap.Normal 330 pfx_color = termcap.Yellow 331 srcs_color = termcap.Yellow + termcap.Bold 332 arrow_color = termcap.Blue + termcap.Bold 333 tgts_color = termcap.Yellow + termcap.Bold 334 335 def __init__(self, tool, max_sources=99): 336 self.format = self.tool_color + (" [%8s] " % tool) \ 337 + self.pfx_color + "%s" \ 338 + self.srcs_color + "%s" \ 339 + self.arrow_color + " -> " \ 340 + self.tgts_color + "%s" \ 341 + termcap.Normal 342 self.max_sources = max_sources 343 344 def __call__(self, target, source, env, for_signature=None): 345 # truncate source list according to max_sources param 346 source = source[0:self.max_sources] 347 def strip(f): 348 return strip_build_path(str(f), env) 349 if len(source) > 0: 350 srcs = map(strip, source) 351 else: 352 srcs = [''] 353 tgts = map(strip, target) 354 # surprisingly, os.path.commonprefix is a dumb char-by-char string 355 # operation that has nothing to do with paths. 356 com_pfx = os.path.commonprefix(srcs + tgts) 357 com_pfx_len = len(com_pfx) 358 if com_pfx: 359 # do some cleanup and sanity checking on common prefix 360 if com_pfx[-1] == ".": 361 # prefix matches all but file extension: ok 362 # back up one to change 'foo.cc -> o' to 'foo.cc -> .o' 363 com_pfx = com_pfx[0:-1] 364 elif com_pfx[-1] == "/": 365 # common prefix is directory path: OK 366 pass 367 else: 368 src0_len = len(srcs[0]) 369 tgt0_len = len(tgts[0]) 370 if src0_len == com_pfx_len: 371 # source is a substring of target, OK 372 pass 373 elif tgt0_len == com_pfx_len: 374 # target is a substring of source, need to back up to 375 # avoid empty string on RHS of arrow 376 sep_idx = com_pfx.rfind(".") 377 if sep_idx != -1: 378 com_pfx = com_pfx[0:sep_idx] 379 else: 380 com_pfx = '' 381 elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".": 382 # still splitting at file extension: ok 383 pass 384 else: 385 # probably a fluke; ignore it 386 com_pfx = '' 387 # recalculate length in case com_pfx was modified 388 com_pfx_len = len(com_pfx) 389 def fmt(files): 390 f = map(lambda s: s[com_pfx_len:], files) 391 return ', '.join(f) 392 return self.format % (com_pfx, fmt(srcs), fmt(tgts)) 393 394Export('Transform') 395 396# enable the regression script to use the termcap 397main['TERMCAP'] = termcap 398 399if GetOption('verbose'): 400 def MakeAction(action, string, *args, **kwargs): 401 return Action(action, *args, **kwargs) 402else: 403 MakeAction = Action 404 main['CCCOMSTR'] = Transform("CC") 405 main['CXXCOMSTR'] = Transform("CXX") 406 main['ASCOMSTR'] = Transform("AS") 407 main['ARCOMSTR'] = Transform("AR", 0) 408 main['LINKCOMSTR'] = Transform("LINK", 0) 409 main['SHLINKCOMSTR'] = Transform("SHLINK", 0) 410 main['RANLIBCOMSTR'] = Transform("RANLIB", 0) 411 main['M4COMSTR'] = Transform("M4") 412 main['SHCCCOMSTR'] = Transform("SHCC") 413 main['SHCXXCOMSTR'] = Transform("SHCXX") 414Export('MakeAction') 415 416# Initialize the Link-Time Optimization (LTO) flags 417main['LTO_CCFLAGS'] = [] 418main['LTO_LDFLAGS'] = [] 419 420# According to the readme, tcmalloc works best if the compiler doesn't 421# assume that we're using the builtin malloc and friends. These flags 422# are compiler-specific, so we need to set them after we detect which 423# compiler we're using. 424main['TCMALLOC_CCFLAGS'] = [] 425 426CXX_version = readCommand([main['CXX'],'--version'], exception=False) 427CXX_V = readCommand([main['CXX'],'-V'], exception=False) 428 429main['GCC'] = CXX_version and CXX_version.find('g++') >= 0 430main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0 431if main['GCC'] + main['CLANG'] > 1: 432 print 'Error: How can we have two at the same time?' 433 Exit(1) 434 435# Set up default C++ compiler flags 436if main['GCC'] or main['CLANG']: 437 # As gcc and clang share many flags, do the common parts here 438 main.Append(CCFLAGS=['-pipe']) 439 main.Append(CCFLAGS=['-fno-strict-aliasing']) 440 # Enable -Wall and -Wextra and then disable the few warnings that 441 # we consistently violate 442 main.Append(CCFLAGS=['-Wall', '-Wundef', '-Wextra', 443 '-Wno-sign-compare', '-Wno-unused-parameter']) 444 # We always compile using C++11 445 main.Append(CXXFLAGS=['-std=c++11']) 446 if sys.platform.startswith('freebsd'): 447 main.Append(CCFLAGS=['-I/usr/local/include']) 448 main.Append(CXXFLAGS=['-I/usr/local/include']) 449 450 main['FILTER_PSHLINKFLAGS'] = lambda x: str(x).replace(' -shared', '') 451 main['PSHLINKFLAGS'] = main.subst('${FILTER_PSHLINKFLAGS(SHLINKFLAGS)}') 452 main['PLINKFLAGS'] = main.subst('${LINKFLAGS}') 453 shared_partial_flags = ['-r', '-nostdlib'] 454 main.Append(PSHLINKFLAGS=shared_partial_flags) 455 main.Append(PLINKFLAGS=shared_partial_flags) 456else: 457 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal, 458 print "Don't know what compiler options to use for your compiler." 459 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX'] 460 print termcap.Yellow + ' version:' + termcap.Normal, 461 if not CXX_version: 462 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\ 463 termcap.Normal 464 else: 465 print CXX_version.replace('\n', '<nl>') 466 print " If you're trying to use a compiler other than GCC" 467 print " or clang, there appears to be something wrong with your" 468 print " environment." 469 print " " 470 print " If you are trying to use a compiler other than those listed" 471 print " above you will need to ease fix SConstruct and " 472 print " src/SConscript to support that compiler." 473 Exit(1) 474 475if main['GCC']: 476 # Check for a supported version of gcc. >= 4.8 is chosen for its 477 # level of c++11 support. See 478 # http://gcc.gnu.org/projects/cxx0x.html for details. 479 gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False) 480 if compareVersions(gcc_version, "4.8") < 0: 481 print 'Error: gcc version 4.8 or newer required.' 482 print ' Installed version:', gcc_version 483 Exit(1) 484 485 main['GCC_VERSION'] = gcc_version 486 487 if compareVersions(gcc_version, '4.9') >= 0: 488 # Incremental linking with LTO is currently broken in gcc versions 489 # 4.9 and above. A version where everything works completely hasn't 490 # yet been identified. 491 # 492 # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=67548 493 main['BROKEN_INCREMENTAL_LTO'] = True 494 if compareVersions(gcc_version, '6.0') >= 0: 495 # gcc versions 6.0 and greater accept an -flinker-output flag which 496 # selects what type of output the linker should generate. This is 497 # necessary for incremental lto to work, but is also broken in 498 # current versions of gcc. It may not be necessary in future 499 # versions. We add it here since it might be, and as a reminder that 500 # it exists. It's excluded if lto is being forced. 501 # 502 # https://gcc.gnu.org/gcc-6/changes.html 503 # https://gcc.gnu.org/ml/gcc-patches/2015-11/msg03161.html 504 # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=69866 505 if not GetOption('force_lto'): 506 main.Append(PSHLINKFLAGS='-flinker-output=rel') 507 main.Append(PLINKFLAGS='-flinker-output=rel') 508 509 # gcc from version 4.8 and above generates "rep; ret" instructions 510 # to avoid performance penalties on certain AMD chips. Older 511 # assemblers detect this as an error, "Error: expecting string 512 # instruction after `rep'" 513 as_version_raw = readCommand([main['AS'], '-v', '/dev/null', 514 '-o', '/dev/null'], 515 exception=False).split() 516 517 # version strings may contain extra distro-specific 518 # qualifiers, so play it safe and keep only what comes before 519 # the first hyphen 520 as_version = as_version_raw[-1].split('-')[0] if as_version_raw else None 521 522 if not as_version or compareVersions(as_version, "2.23") < 0: 523 print termcap.Yellow + termcap.Bold + \ 524 'Warning: This combination of gcc and binutils have' + \ 525 ' known incompatibilities.\n' + \ 526 ' If you encounter build problems, please update ' + \ 527 'binutils to 2.23.' + \ 528 termcap.Normal 529 530 # Make sure we warn if the user has requested to compile with the 531 # Undefined Benahvior Sanitizer and this version of gcc does not 532 # support it. 533 if GetOption('with_ubsan') and \ 534 compareVersions(gcc_version, '4.9') < 0: 535 print termcap.Yellow + termcap.Bold + \ 536 'Warning: UBSan is only supported using gcc 4.9 and later.' + \ 537 termcap.Normal 538 539 disable_lto = GetOption('no_lto') 540 if not disable_lto and main.get('BROKEN_INCREMENTAL_LTO', False) and \ 541 not GetOption('force_lto'): 542 print termcap.Yellow + termcap.Bold + \ 543 'Warning: Your compiler doesn\'t support incremental linking' + \ 544 ' and lto at the same time, so lto is being disabled. To force' + \ 545 ' lto on anyway, use the --force-lto option. That will disable' + \ 546 ' partial linking.' + \ 547 termcap.Normal 548 disable_lto = True 549 550 # Add the appropriate Link-Time Optimization (LTO) flags 551 # unless LTO is explicitly turned off. Note that these flags 552 # are only used by the fast target. 553 if not disable_lto: 554 # Pass the LTO flag when compiling to produce GIMPLE 555 # output, we merely create the flags here and only append 556 # them later 557 main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')] 558 559 # Use the same amount of jobs for LTO as we are running 560 # scons with 561 main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')] 562 563 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc', 564 '-fno-builtin-realloc', '-fno-builtin-free']) 565 566 # add option to check for undeclared overrides 567 if compareVersions(gcc_version, "5.0") > 0: 568 main.Append(CCFLAGS=['-Wno-error=suggest-override']) 569 570elif main['CLANG']: 571 # Check for a supported version of clang, >= 3.1 is needed to 572 # support similar features as gcc 4.8. See 573 # http://clang.llvm.org/cxx_status.html for details 574 clang_version_re = re.compile(".* version (\d+\.\d+)") 575 clang_version_match = clang_version_re.search(CXX_version) 576 if (clang_version_match): 577 clang_version = clang_version_match.groups()[0] 578 if compareVersions(clang_version, "3.1") < 0: 579 print 'Error: clang version 3.1 or newer required.' 580 print ' Installed version:', clang_version 581 Exit(1) 582 else: 583 print 'Error: Unable to determine clang version.' 584 Exit(1) 585 586 # clang has a few additional warnings that we disable, extraneous 587 # parantheses are allowed due to Ruby's printing of the AST, 588 # finally self assignments are allowed as the generated CPU code 589 # is relying on this 590 main.Append(CCFLAGS=['-Wno-parentheses', 591 '-Wno-self-assign', 592 # Some versions of libstdc++ (4.8?) seem to 593 # use struct hash and class hash 594 # interchangeably. 595 '-Wno-mismatched-tags', 596 ]) 597 598 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin']) 599 600 # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as 601 # opposed to libstdc++, as the later is dated. 602 if sys.platform == "darwin": 603 main.Append(CXXFLAGS=['-stdlib=libc++']) 604 main.Append(LIBS=['c++']) 605 606 # On FreeBSD we need libthr. 607 if sys.platform.startswith('freebsd'): 608 main.Append(LIBS=['thr']) 609 610else: 611 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal, 612 print "Don't know what compiler options to use for your compiler." 613 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX'] 614 print termcap.Yellow + ' version:' + termcap.Normal, 615 if not CXX_version: 616 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\ 617 termcap.Normal 618 else: 619 print CXX_version.replace('\n', '<nl>') 620 print " If you're trying to use a compiler other than GCC" 621 print " or clang, there appears to be something wrong with your" 622 print " environment." 623 print " " 624 print " If you are trying to use a compiler other than those listed" 625 print " above you will need to ease fix SConstruct and " 626 print " src/SConscript to support that compiler." 627 Exit(1) 628 629# Set up common yacc/bison flags (needed for Ruby) 630main['YACCFLAGS'] = '-d' 631main['YACCHXXFILESUFFIX'] = '.hh' 632 633# Do this after we save setting back, or else we'll tack on an 634# extra 'qdo' every time we run scons. 635if main['BATCH']: 636 main['CC'] = main['BATCH_CMD'] + ' ' + main['CC'] 637 main['CXX'] = main['BATCH_CMD'] + ' ' + main['CXX'] 638 main['AS'] = main['BATCH_CMD'] + ' ' + main['AS'] 639 main['AR'] = main['BATCH_CMD'] + ' ' + main['AR'] 640 main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB'] 641 642if sys.platform == 'cygwin': 643 # cygwin has some header file issues... 644 main.Append(CCFLAGS=["-Wno-uninitialized"]) 645 646# Check for the protobuf compiler 647protoc_version = readCommand([main['PROTOC'], '--version'], 648 exception='').split() 649 650# First two words should be "libprotoc x.y.z" 651if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc': 652 print termcap.Yellow + termcap.Bold + \ 653 'Warning: Protocol buffer compiler (protoc) not found.\n' + \ 654 ' Please install protobuf-compiler for tracing support.' + \ 655 termcap.Normal 656 main['PROTOC'] = False 657else: 658 # Based on the availability of the compress stream wrappers, 659 # require 2.1.0 660 min_protoc_version = '2.1.0' 661 if compareVersions(protoc_version[1], min_protoc_version) < 0: 662 print termcap.Yellow + termcap.Bold + \ 663 'Warning: protoc version', min_protoc_version, \ 664 'or newer required.\n' + \ 665 ' Installed version:', protoc_version[1], \ 666 termcap.Normal 667 main['PROTOC'] = False 668 else: 669 # Attempt to determine the appropriate include path and 670 # library path using pkg-config, that means we also need to 671 # check for pkg-config. Note that it is possible to use 672 # protobuf without the involvement of pkg-config. Later on we 673 # check go a library config check and at that point the test 674 # will fail if libprotobuf cannot be found. 675 if readCommand(['pkg-config', '--version'], exception=''): 676 try: 677 # Attempt to establish what linking flags to add for protobuf 678 # using pkg-config 679 main.ParseConfig('pkg-config --cflags --libs-only-L protobuf') 680 except: 681 print termcap.Yellow + termcap.Bold + \ 682 'Warning: pkg-config could not get protobuf flags.' + \ 683 termcap.Normal 684 685 686# Check for 'timeout' from GNU coreutils. If present, regressions will 687# be run with a time limit. We require version 8.13 since we rely on 688# support for the '--foreground' option. 689if sys.platform.startswith('freebsd'): 690 timeout_lines = readCommand(['gtimeout', '--version'], 691 exception='').splitlines() 692else: 693 timeout_lines = readCommand(['timeout', '--version'], 694 exception='').splitlines() 695# Get the first line and tokenize it 696timeout_version = timeout_lines[0].split() if timeout_lines else [] 697main['TIMEOUT'] = timeout_version and \ 698 compareVersions(timeout_version[-1], '8.13') >= 0 699 700# Add a custom Check function to test for structure members. 701def CheckMember(context, include, decl, member, include_quotes="<>"): 702 context.Message("Checking for member %s in %s..." % 703 (member, decl)) 704 text = """ 705#include %(header)s 706int main(){ 707 %(decl)s test; 708 (void)test.%(member)s; 709 return 0; 710}; 711""" % { "header" : include_quotes[0] + include + include_quotes[1], 712 "decl" : decl, 713 "member" : member, 714 } 715 716 ret = context.TryCompile(text, extension=".cc") 717 context.Result(ret) 718 return ret 719 720# Platform-specific configuration. Note again that we assume that all 721# builds under a given build root run on the same host platform. 722conf = Configure(main, 723 conf_dir = joinpath(build_root, '.scons_config'), 724 log_file = joinpath(build_root, 'scons_config.log'), 725 custom_tests = { 726 'CheckMember' : CheckMember, 727 }) 728 729# Check if we should compile a 64 bit binary on Mac OS X/Darwin 730try: 731 import platform 732 uname = platform.uname() 733 if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0: 734 if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]): 735 main.Append(CCFLAGS=['-arch', 'x86_64']) 736 main.Append(CFLAGS=['-arch', 'x86_64']) 737 main.Append(LINKFLAGS=['-arch', 'x86_64']) 738 main.Append(ASFLAGS=['-arch', 'x86_64']) 739except: 740 pass 741 742# Recent versions of scons substitute a "Null" object for Configure() 743# when configuration isn't necessary, e.g., if the "--help" option is 744# present. Unfortuantely this Null object always returns false, 745# breaking all our configuration checks. We replace it with our own 746# more optimistic null object that returns True instead. 747if not conf: 748 def NullCheck(*args, **kwargs): 749 return True 750 751 class NullConf: 752 def __init__(self, env): 753 self.env = env 754 def Finish(self): 755 return self.env 756 def __getattr__(self, mname): 757 return NullCheck 758 759 conf = NullConf(main) 760 761# Cache build files in the supplied directory. 762if main['M5_BUILD_CACHE']: 763 print 'Using build cache located at', main['M5_BUILD_CACHE'] 764 CacheDir(main['M5_BUILD_CACHE']) 765 766main['USE_PYTHON'] = not GetOption('without_python') 767if main['USE_PYTHON']: 768 # Find Python include and library directories for embedding the 769 # interpreter. We rely on python-config to resolve the appropriate 770 # includes and linker flags. ParseConfig does not seem to understand 771 # the more exotic linker flags such as -Xlinker and -export-dynamic so 772 # we add them explicitly below. If you want to link in an alternate 773 # version of python, see above for instructions on how to invoke 774 # scons with the appropriate PATH set. 775 # 776 # First we check if python2-config exists, else we use python-config 777 python_config = readCommand(['which', 'python2-config'], 778 exception='').strip() 779 if not os.path.exists(python_config): 780 python_config = readCommand(['which', 'python-config'], 781 exception='').strip() 782 py_includes = readCommand([python_config, '--includes'], 783 exception='').split() 784 # Strip the -I from the include folders before adding them to the 785 # CPPPATH 786 main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes)) 787 788 # Read the linker flags and split them into libraries and other link 789 # flags. The libraries are added later through the call the CheckLib. 790 py_ld_flags = readCommand([python_config, '--ldflags'], 791 exception='').split() 792 py_libs = [] 793 for lib in py_ld_flags: 794 if not lib.startswith('-l'): 795 main.Append(LINKFLAGS=[lib]) 796 else: 797 lib = lib[2:] 798 if lib not in py_libs: 799 py_libs.append(lib) 800 801 # verify that this stuff works 802 if not conf.CheckHeader('Python.h', '<>'): 803 print "Error: can't find Python.h header in", py_includes 804 print "Install Python headers (package python-dev on Ubuntu and RedHat)" 805 Exit(1) 806 807 for lib in py_libs: 808 if not conf.CheckLib(lib): 809 print "Error: can't find library %s required by python" % lib 810 Exit(1) 811 812# On Solaris you need to use libsocket for socket ops 813if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'): 814 if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'): 815 print "Can't find library with socket calls (e.g. accept())" 816 Exit(1) 817 818# Check for zlib. If the check passes, libz will be automatically 819# added to the LIBS environment variable. 820if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'): 821 print 'Error: did not find needed zlib compression library '\ 822 'and/or zlib.h header file.' 823 print ' Please install zlib and try again.' 824 Exit(1) 825 826# If we have the protobuf compiler, also make sure we have the 827# development libraries. If the check passes, libprotobuf will be 828# automatically added to the LIBS environment variable. After 829# this, we can use the HAVE_PROTOBUF flag to determine if we have 830# got both protoc and libprotobuf available. 831main['HAVE_PROTOBUF'] = main['PROTOC'] and \ 832 conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h', 833 'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;') 834 835# If we have the compiler but not the library, print another warning. 836if main['PROTOC'] and not main['HAVE_PROTOBUF']: 837 print termcap.Yellow + termcap.Bold + \ 838 'Warning: did not find protocol buffer library and/or headers.\n' + \ 839 ' Please install libprotobuf-dev for tracing support.' + \ 840 termcap.Normal 841 842# Check for librt. 843have_posix_clock = \ 844 conf.CheckLibWithHeader(None, 'time.h', 'C', 845 'clock_nanosleep(0,0,NULL,NULL);') or \ 846 conf.CheckLibWithHeader('rt', 'time.h', 'C', 847 'clock_nanosleep(0,0,NULL,NULL);') 848 849have_posix_timers = \ 850 conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C', 851 'timer_create(CLOCK_MONOTONIC, NULL, NULL);') 852 853if not GetOption('without_tcmalloc'): 854 if conf.CheckLib('tcmalloc'): 855 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS']) 856 elif conf.CheckLib('tcmalloc_minimal'): 857 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS']) 858 else: 859 print termcap.Yellow + termcap.Bold + \ 860 "You can get a 12% performance improvement by "\ 861 "installing tcmalloc (libgoogle-perftools-dev package "\ 862 "on Ubuntu or RedHat)." + termcap.Normal 863 864 865# Detect back trace implementations. The last implementation in the 866# list will be used by default. 867backtrace_impls = [ "none" ] 868 869if conf.CheckLibWithHeader(None, 'execinfo.h', 'C', 870 'backtrace_symbols_fd((void*)0, 0, 0);'): 871 backtrace_impls.append("glibc") 872elif conf.CheckLibWithHeader('execinfo', 'execinfo.h', 'C', 873 'backtrace_symbols_fd((void*)0, 0, 0);'): 874 # NetBSD and FreeBSD need libexecinfo. 875 backtrace_impls.append("glibc") 876 main.Append(LIBS=['execinfo']) 877 878if backtrace_impls[-1] == "none": 879 default_backtrace_impl = "none" 880 print termcap.Yellow + termcap.Bold + \ 881 "No suitable back trace implementation found." + \ 882 termcap.Normal 883 884if not have_posix_clock: 885 print "Can't find library for POSIX clocks." 886 887# Check for <fenv.h> (C99 FP environment control) 888have_fenv = conf.CheckHeader('fenv.h', '<>') 889if not have_fenv: 890 print "Warning: Header file <fenv.h> not found." 891 print " This host has no IEEE FP rounding mode control." 892 893# Check for <png.h> (libpng library needed if wanting to dump 894# frame buffer image in png format) 895have_png = conf.CheckHeader('png.h', '<>') 896if not have_png: 897 print "Warning: Header file <png.h> not found." 898 print " This host has no libpng library." 899 print " Disabling support for PNG framebuffers." 900 901# Check if we should enable KVM-based hardware virtualization. The API 902# we rely on exists since version 2.6.36 of the kernel, but somehow 903# the KVM_API_VERSION does not reflect the change. We test for one of 904# the types as a fall back. 905have_kvm = conf.CheckHeader('linux/kvm.h', '<>') 906if not have_kvm: 907 print "Info: Compatible header file <linux/kvm.h> not found, " \ 908 "disabling KVM support." 909 910# Check if the TUN/TAP driver is available. 911have_tuntap = conf.CheckHeader('linux/if_tun.h', '<>') 912if not have_tuntap: 913 print "Info: Compatible header file <linux/if_tun.h> not found." 914 915# x86 needs support for xsave. We test for the structure here since we 916# won't be able to run new tests by the time we know which ISA we're 917# targeting. 918have_kvm_xsave = conf.CheckTypeSize('struct kvm_xsave', 919 '#include <linux/kvm.h>') != 0 920 921# Check if the requested target ISA is compatible with the host 922def is_isa_kvm_compatible(isa): 923 try: 924 import platform 925 host_isa = platform.machine() 926 except: 927 print "Warning: Failed to determine host ISA." 928 return False 929 930 if not have_posix_timers: 931 print "Warning: Can not enable KVM, host seems to lack support " \ 932 "for POSIX timers" 933 return False 934 935 if isa == "arm": 936 return host_isa in ( "armv7l", "aarch64" ) 937 elif isa == "x86": 938 if host_isa != "x86_64": 939 return False 940 941 if not have_kvm_xsave: 942 print "KVM on x86 requires xsave support in kernel headers." 943 return False 944 945 return True 946 else: 947 return False 948 949 950# Check if the exclude_host attribute is available. We want this to 951# get accurate instruction counts in KVM. 952main['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf.CheckMember( 953 'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host') 954 955 956###################################################################### 957# 958# Finish the configuration 959# 960main = conf.Finish() 961 962###################################################################### 963# 964# Collect all non-global variables 965# 966 967# Define the universe of supported ISAs 968all_isa_list = [ ] 969all_gpu_isa_list = [ ] 970Export('all_isa_list') 971Export('all_gpu_isa_list') 972 973class CpuModel(object): 974 '''The CpuModel class encapsulates everything the ISA parser needs to 975 know about a particular CPU model.''' 976 977 # Dict of available CPU model objects. Accessible as CpuModel.dict. 978 dict = {} 979 980 # Constructor. Automatically adds models to CpuModel.dict. 981 def __init__(self, name, default=False): 982 self.name = name # name of model 983 984 # This cpu is enabled by default 985 self.default = default 986 987 # Add self to dict 988 if name in CpuModel.dict: 989 raise AttributeError, "CpuModel '%s' already registered" % name 990 CpuModel.dict[name] = self 991 992Export('CpuModel') 993 994# Sticky variables get saved in the variables file so they persist from 995# one invocation to the next (unless overridden, in which case the new 996# value becomes sticky). 997sticky_vars = Variables(args=ARGUMENTS) 998Export('sticky_vars') 999 1000# Sticky variables that should be exported 1001export_vars = [] 1002Export('export_vars') 1003 1004# For Ruby 1005all_protocols = [] 1006Export('all_protocols') 1007protocol_dirs = [] 1008Export('protocol_dirs') 1009slicc_includes = [] 1010Export('slicc_includes') 1011 1012# Walk the tree and execute all SConsopts scripts that wil add to the 1013# above variables 1014if GetOption('verbose'): 1015 print "Reading SConsopts" 1016for bdir in [ base_dir ] + extras_dir_list: 1017 if not isdir(bdir): 1018 print "Error: directory '%s' does not exist" % bdir 1019 Exit(1) 1020 for root, dirs, files in os.walk(bdir): 1021 if 'SConsopts' in files: 1022 if GetOption('verbose'): 1023 print "Reading", joinpath(root, 'SConsopts') 1024 SConscript(joinpath(root, 'SConsopts')) 1025 1026all_isa_list.sort() 1027all_gpu_isa_list.sort() 1028 1029sticky_vars.AddVariables( 1030 EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list), 1031 EnumVariable('TARGET_GPU_ISA', 'Target GPU ISA', 'hsail', all_gpu_isa_list), 1032 ListVariable('CPU_MODELS', 'CPU models', 1033 sorted(n for n,m in CpuModel.dict.iteritems() if m.default), 1034 sorted(CpuModel.dict.keys())), 1035 BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger', 1036 False), 1037 BoolVariable('SS_COMPATIBLE_FP', 1038 'Make floating-point results compatible with SimpleScalar', 1039 False), 1040 BoolVariable('USE_SSE2', 1041 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts', 1042 False), 1043 BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock), 1044 BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv), 1045 BoolVariable('USE_PNG', 'Enable support for PNG images', have_png), 1046 BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability', 1047 False), 1048 BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models', 1049 have_kvm), 1050 BoolVariable('USE_TUNTAP', 1051 'Enable using a tap device to bridge to the host network', 1052 have_tuntap), 1053 BoolVariable('BUILD_GPU', 'Build the compute-GPU model', False), 1054 EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None', 1055 all_protocols), 1056 EnumVariable('BACKTRACE_IMPL', 'Post-mortem dump implementation', 1057 backtrace_impls[-1], backtrace_impls) 1058 ) 1059 1060# These variables get exported to #defines in config/*.hh (see src/SConscript). 1061export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'TARGET_GPU_ISA', 1062 'CP_ANNOTATE', 'USE_POSIX_CLOCK', 'USE_KVM', 'USE_TUNTAP', 1063 'PROTOCOL', 'HAVE_PROTOBUF', 'HAVE_PERF_ATTR_EXCLUDE_HOST', 1064 'USE_PNG'] 1065 1066################################################### 1067# 1068# Define a SCons builder for configuration flag headers. 1069# 1070################################################### 1071 1072# This function generates a config header file that #defines the 1073# variable symbol to the current variable setting (0 or 1). The source 1074# operands are the name of the variable and a Value node containing the 1075# value of the variable. 1076def build_config_file(target, source, env): 1077 (variable, value) = [s.get_contents() for s in source] 1078 f = file(str(target[0]), 'w') 1079 print >> f, '#define', variable, value 1080 f.close() 1081 return None 1082 1083# Combine the two functions into a scons Action object. 1084config_action = MakeAction(build_config_file, Transform("CONFIG H", 2)) 1085 1086# The emitter munges the source & target node lists to reflect what 1087# we're really doing. 1088def config_emitter(target, source, env): 1089 # extract variable name from Builder arg 1090 variable = str(target[0]) 1091 # True target is config header file 1092 target = joinpath('config', variable.lower() + '.hh') 1093 val = env[variable] 1094 if isinstance(val, bool): 1095 # Force value to 0/1 1096 val = int(val) 1097 elif isinstance(val, str): 1098 val = '"' + val + '"' 1099 1100 # Sources are variable name & value (packaged in SCons Value nodes) 1101 return ([target], [Value(variable), Value(val)]) 1102 1103config_builder = Builder(emitter = config_emitter, action = config_action) 1104 1105main.Append(BUILDERS = { 'ConfigFile' : config_builder }) 1106 1107################################################### 1108# 1109# Builders for static and shared partially linked object files. 1110# 1111################################################### 1112 1113partial_static_builder = Builder(action=SCons.Defaults.LinkAction, 1114 src_suffix='$OBJSUFFIX', 1115 src_builder=['StaticObject', 'Object'], 1116 LINKFLAGS='$PLINKFLAGS', 1117 LIBS='') 1118 1119def partial_shared_emitter(target, source, env): 1120 for tgt in target: 1121 tgt.attributes.shared = 1 1122 return (target, source) 1123partial_shared_builder = Builder(action=SCons.Defaults.ShLinkAction, 1124 emitter=partial_shared_emitter, 1125 src_suffix='$SHOBJSUFFIX', 1126 src_builder='SharedObject', 1127 SHLINKFLAGS='$PSHLINKFLAGS', 1128 LIBS='') 1129 1130main.Append(BUILDERS = { 'PartialShared' : partial_shared_builder, 1131 'PartialStatic' : partial_static_builder }) 1132 1133# builds in ext are shared across all configs in the build root. 1134ext_dir = abspath(joinpath(str(main.root), 'ext')) 1135ext_build_dirs = [] 1136for root, dirs, files in os.walk(ext_dir): 1137 if 'SConscript' in files: 1138 build_dir = os.path.relpath(root, ext_dir) 1139 ext_build_dirs.append(build_dir) 1140 main.SConscript(joinpath(root, 'SConscript'), 1141 variant_dir=joinpath(build_root, build_dir)) 1142 1143main.Prepend(CPPPATH=Dir('ext/pybind11/include/')) 1144 1145################################################### 1146# 1147# This builder and wrapper method are used to set up a directory with 1148# switching headers. Those are headers which are in a generic location and 1149# that include more specific headers from a directory chosen at build time 1150# based on the current build settings. 1151# 1152################################################### 1153 1154def build_switching_header(target, source, env): 1155 path = str(target[0]) 1156 subdir = str(source[0]) 1157 dp, fp = os.path.split(path) 1158 dp = os.path.relpath(os.path.realpath(dp), 1159 os.path.realpath(env['BUILDDIR'])) 1160 with open(path, 'w') as hdr: 1161 print >>hdr, '#include "%s/%s/%s"' % (dp, subdir, fp) 1162 1163switching_header_action = MakeAction(build_switching_header, 1164 Transform('GENERATE')) 1165 1166switching_header_builder = Builder(action=switching_header_action, 1167 source_factory=Value, 1168 single_source=True) 1169 1170main.Append(BUILDERS = { 'SwitchingHeader': switching_header_builder }) 1171 1172def switching_headers(self, headers, source): 1173 for header in headers: 1174 self.SwitchingHeader(header, source) 1175 1176main.AddMethod(switching_headers, 'SwitchingHeaders') 1177 1178################################################### 1179# 1180# Define build environments for selected configurations. 1181# 1182################################################### 1183 1184for variant_path in variant_paths: 1185 if not GetOption('silent'): 1186 print "Building in", variant_path 1187 1188 # Make a copy of the build-root environment to use for this config. 1189 env = main.Clone() 1190 env['BUILDDIR'] = variant_path 1191 1192 # variant_dir is the tail component of build path, and is used to 1193 # determine the build parameters (e.g., 'ALPHA_SE') 1194 (build_root, variant_dir) = splitpath(variant_path) 1195 1196 # Set env variables according to the build directory config. 1197 sticky_vars.files = [] 1198 # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in 1199 # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke 1200 # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings. 1201 current_vars_file = joinpath(build_root, 'variables', variant_dir) 1202 if isfile(current_vars_file): 1203 sticky_vars.files.append(current_vars_file) 1204 if not GetOption('silent'): 1205 print "Using saved variables file %s" % current_vars_file 1206 elif variant_dir in ext_build_dirs: 1207 # Things in ext are built without a variant directory. 1208 continue 1209 else: 1210 # Build dir-specific variables file doesn't exist. 1211 1212 # Make sure the directory is there so we can create it later 1213 opt_dir = dirname(current_vars_file) 1214 if not isdir(opt_dir): 1215 mkdir(opt_dir) 1216 1217 # Get default build variables from source tree. Variables are 1218 # normally determined by name of $VARIANT_DIR, but can be 1219 # overridden by '--default=' arg on command line. 1220 default = GetOption('default') 1221 opts_dir = joinpath(main.root.abspath, 'build_opts') 1222 if default: 1223 default_vars_files = [joinpath(build_root, 'variables', default), 1224 joinpath(opts_dir, default)] 1225 else: 1226 default_vars_files = [joinpath(opts_dir, variant_dir)] 1227 existing_files = filter(isfile, default_vars_files) 1228 if existing_files: 1229 default_vars_file = existing_files[0] 1230 sticky_vars.files.append(default_vars_file) 1231 print "Variables file %s not found,\n using defaults in %s" \ 1232 % (current_vars_file, default_vars_file) 1233 else: 1234 print "Error: cannot find variables file %s or " \ 1235 "default file(s) %s" \ 1236 % (current_vars_file, ' or '.join(default_vars_files)) 1237 Exit(1) 1238 1239 # Apply current variable settings to env 1240 sticky_vars.Update(env) 1241 1242 help_texts["local_vars"] += \ 1243 "Build variables for %s:\n" % variant_dir \ 1244 + sticky_vars.GenerateHelpText(env) 1245 1246 # Process variable settings. 1247 1248 if not have_fenv and env['USE_FENV']: 1249 print "Warning: <fenv.h> not available; " \ 1250 "forcing USE_FENV to False in", variant_dir + "." 1251 env['USE_FENV'] = False 1252 1253 if not env['USE_FENV']: 1254 print "Warning: No IEEE FP rounding mode control in", variant_dir + "." 1255 print " FP results may deviate slightly from other platforms." 1256 1257 if not have_png and env['USE_PNG']: 1258 print "Warning: <png.h> not available; " \ 1259 "forcing USE_PNG to False in", variant_dir + "." 1260 env['USE_PNG'] = False 1261 1262 if env['USE_PNG']: 1263 env.Append(LIBS=['png']) 1264 1265 if env['EFENCE']: 1266 env.Append(LIBS=['efence']) 1267 1268 if env['USE_KVM']: 1269 if not have_kvm: 1270 print "Warning: Can not enable KVM, host seems to lack KVM support" 1271 env['USE_KVM'] = False 1272 elif not is_isa_kvm_compatible(env['TARGET_ISA']): 1273 print "Info: KVM support disabled due to unsupported host and " \ 1274 "target ISA combination" 1275 env['USE_KVM'] = False 1276 1277 if env['USE_TUNTAP']: 1278 if not have_tuntap: 1279 print "Warning: Can't connect EtherTap with a tap device." 1280 env['USE_TUNTAP'] = False 1281 1282 if env['BUILD_GPU']: 1283 env.Append(CPPDEFINES=['BUILD_GPU']) 1284 1285 # Warn about missing optional functionality 1286 if env['USE_KVM']: 1287 if not main['HAVE_PERF_ATTR_EXCLUDE_HOST']: 1288 print "Warning: perf_event headers lack support for the " \ 1289 "exclude_host attribute. KVM instruction counts will " \ 1290 "be inaccurate." 1291 1292 # Save sticky variable settings back to current variables file 1293 sticky_vars.Save(current_vars_file, env) 1294 1295 if env['USE_SSE2']: 1296 env.Append(CCFLAGS=['-msse2']) 1297 1298 # The src/SConscript file sets up the build rules in 'env' according 1299 # to the configured variables. It returns a list of environments, 1300 # one for each variant build (debug, opt, etc.) 1301 SConscript('src/SConscript', variant_dir = variant_path, exports = 'env') 1302 1303# base help text 1304Help(''' 1305Usage: scons [scons options] [build variables] [target(s)] 1306 1307Extra scons options: 1308%(options)s 1309 1310Global build variables: 1311%(global_vars)s 1312 1313%(local_vars)s 1314''' % help_texts) |