SConstruct (12061:0225580779db) SConstruct (12063:06cd2c297b04)
1# -*- mode:python -*-
2
3# Copyright (c) 2013, 2015, 2016 ARM Limited
4# All rights reserved.
5#
6# The license below extends only to copyright in the software and shall
7# not be construed as granting a license to any other intellectual
8# property including but not limited to intellectual property relating
9# to a hardware implementation of the functionality of the software
10# licensed hereunder. You may use the software subject to the license
11# terms below provided that you ensure that this notice is replicated
12# unmodified and in its entirety in all distributions of the software,
13# modified or unmodified, in source code or in binary form.
14#
15# Copyright (c) 2011 Advanced Micro Devices, Inc.
16# Copyright (c) 2009 The Hewlett-Packard Development Company
17# Copyright (c) 2004-2005 The Regents of The University of Michigan
18# All rights reserved.
19#
20# Redistribution and use in source and binary forms, with or without
21# modification, are permitted provided that the following conditions are
22# met: redistributions of source code must retain the above copyright
23# notice, this list of conditions and the following disclaimer;
24# redistributions in binary form must reproduce the above copyright
25# notice, this list of conditions and the following disclaimer in the
26# documentation and/or other materials provided with the distribution;
27# neither the name of the copyright holders nor the names of its
28# contributors may be used to endorse or promote products derived from
29# this software without specific prior written permission.
30#
31# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
32# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
33# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
34# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
35# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
37# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
38# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
39# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
40# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
41# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
42#
43# Authors: Steve Reinhardt
44# Nathan Binkert
45
46###################################################
47#
48# SCons top-level build description (SConstruct) file.
49#
50# While in this directory ('gem5'), just type 'scons' to build the default
51# configuration (see below), or type 'scons build/<CONFIG>/<binary>'
52# to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for
53# the optimized full-system version).
54#
55# You can build gem5 in a different directory as long as there is a
56# 'build/<CONFIG>' somewhere along the target path. The build system
57# expects that all configs under the same build directory are being
58# built for the same host system.
59#
60# Examples:
61#
62# The following two commands are equivalent. The '-u' option tells
63# scons to search up the directory tree for this SConstruct file.
64# % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug
65# % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug
66#
67# The following two commands are equivalent and demonstrate building
68# in a directory outside of the source tree. The '-C' option tells
69# scons to chdir to the specified directory to find this SConstruct
70# file.
71# % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug
72# % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug
73#
74# You can use 'scons -H' to print scons options. If you're in this
75# 'gem5' directory (or use -u or -C to tell scons where to find this
76# file), you can use 'scons -h' to print all the gem5-specific build
77# options as well.
78#
79###################################################
80
81# Check for recent-enough Python and SCons versions.
82try:
83 # Really old versions of scons only take two options for the
84 # function, so check once without the revision and once with the
85 # revision, the first instance will fail for stuff other than
86 # 0.98, and the second will fail for 0.98.0
87 EnsureSConsVersion(0, 98)
88 EnsureSConsVersion(0, 98, 1)
89except SystemExit, e:
90 print """
91For more details, see:
92 http://gem5.org/Dependencies
93"""
94 raise
95
96# We ensure the python version early because because python-config
97# requires python 2.5
98try:
99 EnsurePythonVersion(2, 5)
100except SystemExit, e:
101 print """
102You can use a non-default installation of the Python interpreter by
103rearranging your PATH so that scons finds the non-default 'python' and
104'python-config' first.
105
106For more details, see:
107 http://gem5.org/wiki/index.php/Using_a_non-default_Python_installation
108"""
109 raise
110
111# Global Python includes
112import itertools
113import os
114import re
115import shutil
116import subprocess
117import sys
118
119from os import mkdir, environ
120from os.path import abspath, basename, dirname, expanduser, normpath
121from os.path import exists, isdir, isfile
122from os.path import join as joinpath, split as splitpath
123
124# SCons includes
125import SCons
126import SCons.Node
127
128extra_python_paths = [
129 Dir('src/python').srcnode().abspath, # gem5 includes
130 Dir('ext/ply').srcnode().abspath, # ply is used by several files
131 ]
132
133sys.path[1:1] = extra_python_paths
134
135from m5.util import compareVersions, readCommand
136from m5.util.terminal import get_termcap
137
138help_texts = {
139 "options" : "",
140 "global_vars" : "",
141 "local_vars" : ""
142}
143
144Export("help_texts")
145
146
147# There's a bug in scons in that (1) by default, the help texts from
148# AddOption() are supposed to be displayed when you type 'scons -h'
149# and (2) you can override the help displayed by 'scons -h' using the
150# Help() function, but these two features are incompatible: once
151# you've overridden the help text using Help(), there's no way to get
152# at the help texts from AddOptions. See:
153# http://scons.tigris.org/issues/show_bug.cgi?id=2356
154# http://scons.tigris.org/issues/show_bug.cgi?id=2611
155# This hack lets us extract the help text from AddOptions and
156# re-inject it via Help(). Ideally someday this bug will be fixed and
157# we can just use AddOption directly.
158def AddLocalOption(*args, **kwargs):
159 col_width = 30
160
161 help = " " + ", ".join(args)
162 if "help" in kwargs:
163 length = len(help)
164 if length >= col_width:
165 help += "\n" + " " * col_width
166 else:
167 help += " " * (col_width - length)
168 help += kwargs["help"]
169 help_texts["options"] += help + "\n"
170
171 AddOption(*args, **kwargs)
172
173AddLocalOption('--colors', dest='use_colors', action='store_true',
174 help="Add color to abbreviated scons output")
175AddLocalOption('--no-colors', dest='use_colors', action='store_false',
176 help="Don't add color to abbreviated scons output")
177AddLocalOption('--with-cxx-config', dest='with_cxx_config',
178 action='store_true',
179 help="Build with support for C++-based configuration")
180AddLocalOption('--default', dest='default', type='string', action='store',
181 help='Override which build_opts file to use for defaults')
182AddLocalOption('--ignore-style', dest='ignore_style', action='store_true',
183 help='Disable style checking hooks')
184AddLocalOption('--no-lto', dest='no_lto', action='store_true',
185 help='Disable Link-Time Optimization for fast')
1# -*- mode:python -*-
2
3# Copyright (c) 2013, 2015, 2016 ARM Limited
4# All rights reserved.
5#
6# The license below extends only to copyright in the software and shall
7# not be construed as granting a license to any other intellectual
8# property including but not limited to intellectual property relating
9# to a hardware implementation of the functionality of the software
10# licensed hereunder. You may use the software subject to the license
11# terms below provided that you ensure that this notice is replicated
12# unmodified and in its entirety in all distributions of the software,
13# modified or unmodified, in source code or in binary form.
14#
15# Copyright (c) 2011 Advanced Micro Devices, Inc.
16# Copyright (c) 2009 The Hewlett-Packard Development Company
17# Copyright (c) 2004-2005 The Regents of The University of Michigan
18# All rights reserved.
19#
20# Redistribution and use in source and binary forms, with or without
21# modification, are permitted provided that the following conditions are
22# met: redistributions of source code must retain the above copyright
23# notice, this list of conditions and the following disclaimer;
24# redistributions in binary form must reproduce the above copyright
25# notice, this list of conditions and the following disclaimer in the
26# documentation and/or other materials provided with the distribution;
27# neither the name of the copyright holders nor the names of its
28# contributors may be used to endorse or promote products derived from
29# this software without specific prior written permission.
30#
31# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
32# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
33# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
34# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
35# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
37# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
38# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
39# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
40# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
41# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
42#
43# Authors: Steve Reinhardt
44# Nathan Binkert
45
46###################################################
47#
48# SCons top-level build description (SConstruct) file.
49#
50# While in this directory ('gem5'), just type 'scons' to build the default
51# configuration (see below), or type 'scons build/<CONFIG>/<binary>'
52# to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for
53# the optimized full-system version).
54#
55# You can build gem5 in a different directory as long as there is a
56# 'build/<CONFIG>' somewhere along the target path. The build system
57# expects that all configs under the same build directory are being
58# built for the same host system.
59#
60# Examples:
61#
62# The following two commands are equivalent. The '-u' option tells
63# scons to search up the directory tree for this SConstruct file.
64# % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug
65# % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug
66#
67# The following two commands are equivalent and demonstrate building
68# in a directory outside of the source tree. The '-C' option tells
69# scons to chdir to the specified directory to find this SConstruct
70# file.
71# % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug
72# % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug
73#
74# You can use 'scons -H' to print scons options. If you're in this
75# 'gem5' directory (or use -u or -C to tell scons where to find this
76# file), you can use 'scons -h' to print all the gem5-specific build
77# options as well.
78#
79###################################################
80
81# Check for recent-enough Python and SCons versions.
82try:
83 # Really old versions of scons only take two options for the
84 # function, so check once without the revision and once with the
85 # revision, the first instance will fail for stuff other than
86 # 0.98, and the second will fail for 0.98.0
87 EnsureSConsVersion(0, 98)
88 EnsureSConsVersion(0, 98, 1)
89except SystemExit, e:
90 print """
91For more details, see:
92 http://gem5.org/Dependencies
93"""
94 raise
95
96# We ensure the python version early because because python-config
97# requires python 2.5
98try:
99 EnsurePythonVersion(2, 5)
100except SystemExit, e:
101 print """
102You can use a non-default installation of the Python interpreter by
103rearranging your PATH so that scons finds the non-default 'python' and
104'python-config' first.
105
106For more details, see:
107 http://gem5.org/wiki/index.php/Using_a_non-default_Python_installation
108"""
109 raise
110
111# Global Python includes
112import itertools
113import os
114import re
115import shutil
116import subprocess
117import sys
118
119from os import mkdir, environ
120from os.path import abspath, basename, dirname, expanduser, normpath
121from os.path import exists, isdir, isfile
122from os.path import join as joinpath, split as splitpath
123
124# SCons includes
125import SCons
126import SCons.Node
127
128extra_python_paths = [
129 Dir('src/python').srcnode().abspath, # gem5 includes
130 Dir('ext/ply').srcnode().abspath, # ply is used by several files
131 ]
132
133sys.path[1:1] = extra_python_paths
134
135from m5.util import compareVersions, readCommand
136from m5.util.terminal import get_termcap
137
138help_texts = {
139 "options" : "",
140 "global_vars" : "",
141 "local_vars" : ""
142}
143
144Export("help_texts")
145
146
147# There's a bug in scons in that (1) by default, the help texts from
148# AddOption() are supposed to be displayed when you type 'scons -h'
149# and (2) you can override the help displayed by 'scons -h' using the
150# Help() function, but these two features are incompatible: once
151# you've overridden the help text using Help(), there's no way to get
152# at the help texts from AddOptions. See:
153# http://scons.tigris.org/issues/show_bug.cgi?id=2356
154# http://scons.tigris.org/issues/show_bug.cgi?id=2611
155# This hack lets us extract the help text from AddOptions and
156# re-inject it via Help(). Ideally someday this bug will be fixed and
157# we can just use AddOption directly.
158def AddLocalOption(*args, **kwargs):
159 col_width = 30
160
161 help = " " + ", ".join(args)
162 if "help" in kwargs:
163 length = len(help)
164 if length >= col_width:
165 help += "\n" + " " * col_width
166 else:
167 help += " " * (col_width - length)
168 help += kwargs["help"]
169 help_texts["options"] += help + "\n"
170
171 AddOption(*args, **kwargs)
172
173AddLocalOption('--colors', dest='use_colors', action='store_true',
174 help="Add color to abbreviated scons output")
175AddLocalOption('--no-colors', dest='use_colors', action='store_false',
176 help="Don't add color to abbreviated scons output")
177AddLocalOption('--with-cxx-config', dest='with_cxx_config',
178 action='store_true',
179 help="Build with support for C++-based configuration")
180AddLocalOption('--default', dest='default', type='string', action='store',
181 help='Override which build_opts file to use for defaults')
182AddLocalOption('--ignore-style', dest='ignore_style', action='store_true',
183 help='Disable style checking hooks')
184AddLocalOption('--no-lto', dest='no_lto', action='store_true',
185 help='Disable Link-Time Optimization for fast')
186AddLocalOption('--force-lto', dest='force_lto', action='store_true',
187 help='Use Link-Time Optimization instead of partial linking' +
188 ' when the compiler doesn\'t support using them together.')
186AddLocalOption('--update-ref', dest='update_ref', action='store_true',
187 help='Update test reference outputs')
188AddLocalOption('--verbose', dest='verbose', action='store_true',
189 help='Print full tool command lines')
190AddLocalOption('--without-python', dest='without_python',
191 action='store_true',
192 help='Build without Python configuration support')
193AddLocalOption('--without-tcmalloc', dest='without_tcmalloc',
194 action='store_true',
195 help='Disable linking against tcmalloc')
196AddLocalOption('--with-ubsan', dest='with_ubsan', action='store_true',
197 help='Build with Undefined Behavior Sanitizer if available')
198AddLocalOption('--with-asan', dest='with_asan', action='store_true',
199 help='Build with Address Sanitizer if available')
200
189AddLocalOption('--update-ref', dest='update_ref', action='store_true',
190 help='Update test reference outputs')
191AddLocalOption('--verbose', dest='verbose', action='store_true',
192 help='Print full tool command lines')
193AddLocalOption('--without-python', dest='without_python',
194 action='store_true',
195 help='Build without Python configuration support')
196AddLocalOption('--without-tcmalloc', dest='without_tcmalloc',
197 action='store_true',
198 help='Disable linking against tcmalloc')
199AddLocalOption('--with-ubsan', dest='with_ubsan', action='store_true',
200 help='Build with Undefined Behavior Sanitizer if available')
201AddLocalOption('--with-asan', dest='with_asan', action='store_true',
202 help='Build with Address Sanitizer if available')
203
204if GetOption('no_lto') and GetOption('force_lto'):
205 print '--no-lto and --force-lto are mutually exclusive'
206 Exit(1)
207
201termcap = get_termcap(GetOption('use_colors'))
202
203########################################################################
204#
205# Set up the main build environment.
206#
207########################################################################
208
209# export TERM so that clang reports errors in color
210use_vars = set([ 'AS', 'AR', 'CC', 'CXX', 'HOME', 'LD_LIBRARY_PATH',
211 'LIBRARY_PATH', 'PATH', 'PKG_CONFIG_PATH', 'PROTOC',
212 'PYTHONPATH', 'RANLIB', 'TERM' ])
213
214use_prefixes = [
215 "ASAN_", # address sanitizer symbolizer path and settings
216 "CCACHE_", # ccache (caching compiler wrapper) configuration
217 "CCC_", # clang static analyzer configuration
218 "DISTCC_", # distcc (distributed compiler wrapper) configuration
219 "INCLUDE_SERVER_", # distcc pump server settings
220 "M5", # M5 configuration (e.g., path to kernels)
221 ]
222
223use_env = {}
224for key,val in sorted(os.environ.iteritems()):
225 if key in use_vars or \
226 any([key.startswith(prefix) for prefix in use_prefixes]):
227 use_env[key] = val
228
229# Tell scons to avoid implicit command dependencies to avoid issues
230# with the param wrappes being compiled twice (see
231# http://scons.tigris.org/issues/show_bug.cgi?id=2811)
232main = Environment(ENV=use_env, IMPLICIT_COMMAND_DEPENDENCIES=0)
233main.Decider('MD5-timestamp')
234main.root = Dir(".") # The current directory (where this file lives).
235main.srcdir = Dir("src") # The source directory
236
237main_dict_keys = main.Dictionary().keys()
238
239# Check that we have a C/C++ compiler
240if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys):
241 print "No C++ compiler installed (package g++ on Ubuntu and RedHat)"
242 Exit(1)
243
244# add useful python code PYTHONPATH so it can be used by subprocesses
245# as well
246main.AppendENVPath('PYTHONPATH', extra_python_paths)
247
248########################################################################
249#
250# Mercurial Stuff.
251#
252# If the gem5 directory is a mercurial repository, we should do some
253# extra things.
254#
255########################################################################
256
257hgdir = main.root.Dir(".hg")
258
259
260style_message = """
261You're missing the gem5 style hook, which automatically checks your code
262against the gem5 style rules on %s.
263This script will now install the hook in your %s.
264Press enter to continue, or ctrl-c to abort: """
265
266mercurial_style_message = """
267You're missing the gem5 style hook, which automatically checks your code
268against the gem5 style rules on hg commit and qrefresh commands.
269This script will now install the hook in your .hg/hgrc file.
270Press enter to continue, or ctrl-c to abort: """
271
272git_style_message = """
273You're missing the gem5 style or commit message hook. These hooks help
274to ensure that your code follows gem5's style rules on git commit.
275This script will now install the hook in your .git/hooks/ directory.
276Press enter to continue, or ctrl-c to abort: """
277
278mercurial_style_upgrade_message = """
279Your Mercurial style hooks are not up-to-date. This script will now
280try to automatically update them. A backup of your hgrc will be saved
281in .hg/hgrc.old.
282Press enter to continue, or ctrl-c to abort: """
283
284mercurial_style_hook = """
285# The following lines were automatically added by gem5/SConstruct
286# to provide the gem5 style-checking hooks
287[extensions]
288hgstyle = %s/util/hgstyle.py
289
290[hooks]
291pretxncommit.style = python:hgstyle.check_style
292pre-qrefresh.style = python:hgstyle.check_style
293# End of SConstruct additions
294
295""" % (main.root.abspath)
296
297mercurial_lib_not_found = """
298Mercurial libraries cannot be found, ignoring style hook. If
299you are a gem5 developer, please fix this and run the style
300hook. It is important.
301"""
302
303# Check for style hook and prompt for installation if it's not there.
304# Skip this if --ignore-style was specified, there's no interactive
305# terminal to prompt, or no recognized revision control system can be
306# found.
307ignore_style = GetOption('ignore_style') or not sys.stdin.isatty()
308
309# Try wire up Mercurial to the style hooks
310if not ignore_style and hgdir.exists():
311 style_hook = True
312 style_hooks = tuple()
313 hgrc = hgdir.File('hgrc')
314 hgrc_old = hgdir.File('hgrc.old')
315 try:
316 from mercurial import ui
317 ui = ui.ui()
318 ui.readconfig(hgrc.abspath)
319 style_hooks = (ui.config('hooks', 'pretxncommit.style', None),
320 ui.config('hooks', 'pre-qrefresh.style', None))
321 style_hook = all(style_hooks)
322 style_extension = ui.config('extensions', 'style', None)
323 except ImportError:
324 print mercurial_lib_not_found
325
326 if "python:style.check_style" in style_hooks:
327 # Try to upgrade the style hooks
328 print mercurial_style_upgrade_message
329 # continue unless user does ctrl-c/ctrl-d etc.
330 try:
331 raw_input()
332 except:
333 print "Input exception, exiting scons.\n"
334 sys.exit(1)
335 shutil.copyfile(hgrc.abspath, hgrc_old.abspath)
336 re_style_hook = re.compile(r"^([^=#]+)\.style\s*=\s*([^#\s]+).*")
337 re_style_extension = re.compile("style\s*=\s*([^#\s]+).*")
338 old, new = open(hgrc_old.abspath, 'r'), open(hgrc.abspath, 'w')
339 for l in old:
340 m_hook = re_style_hook.match(l)
341 m_ext = re_style_extension.match(l)
342 if m_hook:
343 hook, check = m_hook.groups()
344 if check != "python:style.check_style":
345 print "Warning: %s.style is using a non-default " \
346 "checker: %s" % (hook, check)
347 if hook not in ("pretxncommit", "pre-qrefresh"):
348 print "Warning: Updating unknown style hook: %s" % hook
349
350 l = "%s.style = python:hgstyle.check_style\n" % hook
351 elif m_ext and m_ext.group(1) == style_extension:
352 l = "hgstyle = %s/util/hgstyle.py\n" % main.root.abspath
353
354 new.write(l)
355 elif not style_hook:
356 print mercurial_style_message,
357 # continue unless user does ctrl-c/ctrl-d etc.
358 try:
359 raw_input()
360 except:
361 print "Input exception, exiting scons.\n"
362 sys.exit(1)
363 hgrc_path = '%s/.hg/hgrc' % main.root.abspath
364 print "Adding style hook to", hgrc_path, "\n"
365 try:
366 with open(hgrc_path, 'a') as f:
367 f.write(mercurial_style_hook)
368 except:
369 print "Error updating", hgrc_path
370 sys.exit(1)
371
372def install_git_style_hooks():
373 try:
374 gitdir = Dir(readCommand(
375 ["git", "rev-parse", "--git-dir"]).strip("\n"))
376 except Exception, e:
377 print "Warning: Failed to find git repo directory: %s" % e
378 return
379
380 git_hooks = gitdir.Dir("hooks")
381 def hook_exists(hook_name):
382 hook = git_hooks.File(hook_name)
383 return hook.exists()
384
385 def hook_install(hook_name, script):
386 hook = git_hooks.File(hook_name)
387 if hook.exists():
388 print "Warning: Can't install %s, hook already exists." % hook_name
389 return
390
391 if hook.islink():
392 print "Warning: Removing broken symlink for hook %s." % hook_name
393 os.unlink(hook.get_abspath())
394
395 if not git_hooks.exists():
396 mkdir(git_hooks.get_abspath())
397 git_hooks.clear()
398
399 abs_symlink_hooks = git_hooks.islink() and \
400 os.path.isabs(os.readlink(git_hooks.get_abspath()))
401
402 # Use a relative symlink if the hooks live in the source directory,
403 # and the hooks directory is not a symlink to an absolute path.
404 if hook.is_under(main.root) and not abs_symlink_hooks:
405 script_path = os.path.relpath(
406 os.path.realpath(script.get_abspath()),
407 os.path.realpath(hook.Dir(".").get_abspath()))
408 else:
409 script_path = script.get_abspath()
410
411 try:
412 os.symlink(script_path, hook.get_abspath())
413 except:
414 print "Error updating git %s hook" % hook_name
415 raise
416
417 if hook_exists("pre-commit") and hook_exists("commit-msg"):
418 return
419
420 print git_style_message,
421 try:
422 raw_input()
423 except:
424 print "Input exception, exiting scons.\n"
425 sys.exit(1)
426
427 git_style_script = File("util/git-pre-commit.py")
428 git_msg_script = File("ext/git-commit-msg")
429
430 hook_install("pre-commit", git_style_script)
431 hook_install("commit-msg", git_msg_script)
432
433# Try to wire up git to the style hooks
434if not ignore_style and main.root.Entry(".git").exists():
435 install_git_style_hooks()
436
437###################################################
438#
439# Figure out which configurations to set up based on the path(s) of
440# the target(s).
441#
442###################################################
443
444# Find default configuration & binary.
445Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug'))
446
447# helper function: find last occurrence of element in list
448def rfind(l, elt, offs = -1):
449 for i in range(len(l)+offs, 0, -1):
450 if l[i] == elt:
451 return i
452 raise ValueError, "element not found"
453
454# Take a list of paths (or SCons Nodes) and return a list with all
455# paths made absolute and ~-expanded. Paths will be interpreted
456# relative to the launch directory unless a different root is provided
457def makePathListAbsolute(path_list, root=GetLaunchDir()):
458 return [abspath(joinpath(root, expanduser(str(p))))
459 for p in path_list]
460
461# Each target must have 'build' in the interior of the path; the
462# directory below this will determine the build parameters. For
463# example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we
464# recognize that ALPHA_SE specifies the configuration because it
465# follow 'build' in the build path.
466
467# The funky assignment to "[:]" is needed to replace the list contents
468# in place rather than reassign the symbol to a new list, which
469# doesn't work (obviously!).
470BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS)
471
472# Generate a list of the unique build roots and configs that the
473# collected targets reference.
474variant_paths = []
475build_root = None
476for t in BUILD_TARGETS:
477 path_dirs = t.split('/')
478 try:
479 build_top = rfind(path_dirs, 'build', -2)
480 except:
481 print "Error: no non-leaf 'build' dir found on target path", t
482 Exit(1)
483 this_build_root = joinpath('/',*path_dirs[:build_top+1])
484 if not build_root:
485 build_root = this_build_root
486 else:
487 if this_build_root != build_root:
488 print "Error: build targets not under same build root\n"\
489 " %s\n %s" % (build_root, this_build_root)
490 Exit(1)
491 variant_path = joinpath('/',*path_dirs[:build_top+2])
492 if variant_path not in variant_paths:
493 variant_paths.append(variant_path)
494
495# Make sure build_root exists (might not if this is the first build there)
496if not isdir(build_root):
497 mkdir(build_root)
498main['BUILDROOT'] = build_root
499
500Export('main')
501
502main.SConsignFile(joinpath(build_root, "sconsign"))
503
504# Default duplicate option is to use hard links, but this messes up
505# when you use emacs to edit a file in the target dir, as emacs moves
506# file to file~ then copies to file, breaking the link. Symbolic
507# (soft) links work better.
508main.SetOption('duplicate', 'soft-copy')
509
510#
511# Set up global sticky variables... these are common to an entire build
512# tree (not specific to a particular build like ALPHA_SE)
513#
514
515global_vars_file = joinpath(build_root, 'variables.global')
516
517global_vars = Variables(global_vars_file, args=ARGUMENTS)
518
519global_vars.AddVariables(
520 ('CC', 'C compiler', environ.get('CC', main['CC'])),
521 ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])),
522 ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')),
523 ('BATCH', 'Use batch pool for build and tests', False),
524 ('BATCH_CMD', 'Batch pool submission command name', 'qdo'),
525 ('M5_BUILD_CACHE', 'Cache built objects in this directory', False),
526 ('EXTRAS', 'Add extra directories to the compilation', '')
527 )
528
529# Update main environment with values from ARGUMENTS & global_vars_file
530global_vars.Update(main)
531help_texts["global_vars"] += global_vars.GenerateHelpText(main)
532
533# Save sticky variable settings back to current variables file
534global_vars.Save(global_vars_file, main)
535
536# Parse EXTRAS variable to build list of all directories where we're
537# look for sources etc. This list is exported as extras_dir_list.
538base_dir = main.srcdir.abspath
539if main['EXTRAS']:
540 extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':'))
541else:
542 extras_dir_list = []
543
544Export('base_dir')
545Export('extras_dir_list')
546
547# the ext directory should be on the #includes path
548main.Append(CPPPATH=[Dir('ext')])
549
550def strip_build_path(path, env):
551 path = str(path)
552 variant_base = env['BUILDROOT'] + os.path.sep
553 if path.startswith(variant_base):
554 path = path[len(variant_base):]
555 elif path.startswith('build/'):
556 path = path[6:]
557 return path
558
559# Generate a string of the form:
560# common/path/prefix/src1, src2 -> tgt1, tgt2
561# to print while building.
562class Transform(object):
563 # all specific color settings should be here and nowhere else
564 tool_color = termcap.Normal
565 pfx_color = termcap.Yellow
566 srcs_color = termcap.Yellow + termcap.Bold
567 arrow_color = termcap.Blue + termcap.Bold
568 tgts_color = termcap.Yellow + termcap.Bold
569
570 def __init__(self, tool, max_sources=99):
571 self.format = self.tool_color + (" [%8s] " % tool) \
572 + self.pfx_color + "%s" \
573 + self.srcs_color + "%s" \
574 + self.arrow_color + " -> " \
575 + self.tgts_color + "%s" \
576 + termcap.Normal
577 self.max_sources = max_sources
578
579 def __call__(self, target, source, env, for_signature=None):
580 # truncate source list according to max_sources param
581 source = source[0:self.max_sources]
582 def strip(f):
583 return strip_build_path(str(f), env)
584 if len(source) > 0:
585 srcs = map(strip, source)
586 else:
587 srcs = ['']
588 tgts = map(strip, target)
589 # surprisingly, os.path.commonprefix is a dumb char-by-char string
590 # operation that has nothing to do with paths.
591 com_pfx = os.path.commonprefix(srcs + tgts)
592 com_pfx_len = len(com_pfx)
593 if com_pfx:
594 # do some cleanup and sanity checking on common prefix
595 if com_pfx[-1] == ".":
596 # prefix matches all but file extension: ok
597 # back up one to change 'foo.cc -> o' to 'foo.cc -> .o'
598 com_pfx = com_pfx[0:-1]
599 elif com_pfx[-1] == "/":
600 # common prefix is directory path: OK
601 pass
602 else:
603 src0_len = len(srcs[0])
604 tgt0_len = len(tgts[0])
605 if src0_len == com_pfx_len:
606 # source is a substring of target, OK
607 pass
608 elif tgt0_len == com_pfx_len:
609 # target is a substring of source, need to back up to
610 # avoid empty string on RHS of arrow
611 sep_idx = com_pfx.rfind(".")
612 if sep_idx != -1:
613 com_pfx = com_pfx[0:sep_idx]
614 else:
615 com_pfx = ''
616 elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".":
617 # still splitting at file extension: ok
618 pass
619 else:
620 # probably a fluke; ignore it
621 com_pfx = ''
622 # recalculate length in case com_pfx was modified
623 com_pfx_len = len(com_pfx)
624 def fmt(files):
625 f = map(lambda s: s[com_pfx_len:], files)
626 return ', '.join(f)
627 return self.format % (com_pfx, fmt(srcs), fmt(tgts))
628
629Export('Transform')
630
631# enable the regression script to use the termcap
632main['TERMCAP'] = termcap
633
634if GetOption('verbose'):
635 def MakeAction(action, string, *args, **kwargs):
636 return Action(action, *args, **kwargs)
637else:
638 MakeAction = Action
639 main['CCCOMSTR'] = Transform("CC")
640 main['CXXCOMSTR'] = Transform("CXX")
641 main['ASCOMSTR'] = Transform("AS")
642 main['ARCOMSTR'] = Transform("AR", 0)
643 main['LINKCOMSTR'] = Transform("LINK", 0)
644 main['SHLINKCOMSTR'] = Transform("SHLINK", 0)
645 main['RANLIBCOMSTR'] = Transform("RANLIB", 0)
646 main['M4COMSTR'] = Transform("M4")
647 main['SHCCCOMSTR'] = Transform("SHCC")
648 main['SHCXXCOMSTR'] = Transform("SHCXX")
649Export('MakeAction')
650
651# Initialize the Link-Time Optimization (LTO) flags
652main['LTO_CCFLAGS'] = []
653main['LTO_LDFLAGS'] = []
654
655# According to the readme, tcmalloc works best if the compiler doesn't
656# assume that we're using the builtin malloc and friends. These flags
657# are compiler-specific, so we need to set them after we detect which
658# compiler we're using.
659main['TCMALLOC_CCFLAGS'] = []
660
661CXX_version = readCommand([main['CXX'],'--version'], exception=False)
662CXX_V = readCommand([main['CXX'],'-V'], exception=False)
663
664main['GCC'] = CXX_version and CXX_version.find('g++') >= 0
665main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0
666if main['GCC'] + main['CLANG'] > 1:
667 print 'Error: How can we have two at the same time?'
668 Exit(1)
669
670# Set up default C++ compiler flags
671if main['GCC'] or main['CLANG']:
672 # As gcc and clang share many flags, do the common parts here
673 main.Append(CCFLAGS=['-pipe'])
674 main.Append(CCFLAGS=['-fno-strict-aliasing'])
675 # Enable -Wall and -Wextra and then disable the few warnings that
676 # we consistently violate
677 main.Append(CCFLAGS=['-Wall', '-Wundef', '-Wextra',
678 '-Wno-sign-compare', '-Wno-unused-parameter'])
679 # We always compile using C++11
680 main.Append(CXXFLAGS=['-std=c++11'])
681 if sys.platform.startswith('freebsd'):
682 main.Append(CCFLAGS=['-I/usr/local/include'])
683 main.Append(CXXFLAGS=['-I/usr/local/include'])
684
685 main['FILTER_PSHLINKFLAGS'] = lambda x: str(x).replace(' -shared', '')
686 main['PSHLINKFLAGS'] = main.subst('${FILTER_PSHLINKFLAGS(SHLINKFLAGS)}')
687 main['PLINKFLAGS'] = main.subst('${LINKFLAGS}')
688 shared_partial_flags = ['-r', '-nostdlib']
689 main.Append(PSHLINKFLAGS=shared_partial_flags)
690 main.Append(PLINKFLAGS=shared_partial_flags)
691else:
692 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
693 print "Don't know what compiler options to use for your compiler."
694 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX']
695 print termcap.Yellow + ' version:' + termcap.Normal,
696 if not CXX_version:
697 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
698 termcap.Normal
699 else:
700 print CXX_version.replace('\n', '<nl>')
701 print " If you're trying to use a compiler other than GCC"
702 print " or clang, there appears to be something wrong with your"
703 print " environment."
704 print " "
705 print " If you are trying to use a compiler other than those listed"
706 print " above you will need to ease fix SConstruct and "
707 print " src/SConscript to support that compiler."
708 Exit(1)
709
710if main['GCC']:
711 # Check for a supported version of gcc. >= 4.8 is chosen for its
712 # level of c++11 support. See
713 # http://gcc.gnu.org/projects/cxx0x.html for details.
714 gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False)
715 if compareVersions(gcc_version, "4.8") < 0:
716 print 'Error: gcc version 4.8 or newer required.'
717 print ' Installed version:', gcc_version
718 Exit(1)
719
720 main['GCC_VERSION'] = gcc_version
721
208termcap = get_termcap(GetOption('use_colors'))
209
210########################################################################
211#
212# Set up the main build environment.
213#
214########################################################################
215
216# export TERM so that clang reports errors in color
217use_vars = set([ 'AS', 'AR', 'CC', 'CXX', 'HOME', 'LD_LIBRARY_PATH',
218 'LIBRARY_PATH', 'PATH', 'PKG_CONFIG_PATH', 'PROTOC',
219 'PYTHONPATH', 'RANLIB', 'TERM' ])
220
221use_prefixes = [
222 "ASAN_", # address sanitizer symbolizer path and settings
223 "CCACHE_", # ccache (caching compiler wrapper) configuration
224 "CCC_", # clang static analyzer configuration
225 "DISTCC_", # distcc (distributed compiler wrapper) configuration
226 "INCLUDE_SERVER_", # distcc pump server settings
227 "M5", # M5 configuration (e.g., path to kernels)
228 ]
229
230use_env = {}
231for key,val in sorted(os.environ.iteritems()):
232 if key in use_vars or \
233 any([key.startswith(prefix) for prefix in use_prefixes]):
234 use_env[key] = val
235
236# Tell scons to avoid implicit command dependencies to avoid issues
237# with the param wrappes being compiled twice (see
238# http://scons.tigris.org/issues/show_bug.cgi?id=2811)
239main = Environment(ENV=use_env, IMPLICIT_COMMAND_DEPENDENCIES=0)
240main.Decider('MD5-timestamp')
241main.root = Dir(".") # The current directory (where this file lives).
242main.srcdir = Dir("src") # The source directory
243
244main_dict_keys = main.Dictionary().keys()
245
246# Check that we have a C/C++ compiler
247if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys):
248 print "No C++ compiler installed (package g++ on Ubuntu and RedHat)"
249 Exit(1)
250
251# add useful python code PYTHONPATH so it can be used by subprocesses
252# as well
253main.AppendENVPath('PYTHONPATH', extra_python_paths)
254
255########################################################################
256#
257# Mercurial Stuff.
258#
259# If the gem5 directory is a mercurial repository, we should do some
260# extra things.
261#
262########################################################################
263
264hgdir = main.root.Dir(".hg")
265
266
267style_message = """
268You're missing the gem5 style hook, which automatically checks your code
269against the gem5 style rules on %s.
270This script will now install the hook in your %s.
271Press enter to continue, or ctrl-c to abort: """
272
273mercurial_style_message = """
274You're missing the gem5 style hook, which automatically checks your code
275against the gem5 style rules on hg commit and qrefresh commands.
276This script will now install the hook in your .hg/hgrc file.
277Press enter to continue, or ctrl-c to abort: """
278
279git_style_message = """
280You're missing the gem5 style or commit message hook. These hooks help
281to ensure that your code follows gem5's style rules on git commit.
282This script will now install the hook in your .git/hooks/ directory.
283Press enter to continue, or ctrl-c to abort: """
284
285mercurial_style_upgrade_message = """
286Your Mercurial style hooks are not up-to-date. This script will now
287try to automatically update them. A backup of your hgrc will be saved
288in .hg/hgrc.old.
289Press enter to continue, or ctrl-c to abort: """
290
291mercurial_style_hook = """
292# The following lines were automatically added by gem5/SConstruct
293# to provide the gem5 style-checking hooks
294[extensions]
295hgstyle = %s/util/hgstyle.py
296
297[hooks]
298pretxncommit.style = python:hgstyle.check_style
299pre-qrefresh.style = python:hgstyle.check_style
300# End of SConstruct additions
301
302""" % (main.root.abspath)
303
304mercurial_lib_not_found = """
305Mercurial libraries cannot be found, ignoring style hook. If
306you are a gem5 developer, please fix this and run the style
307hook. It is important.
308"""
309
310# Check for style hook and prompt for installation if it's not there.
311# Skip this if --ignore-style was specified, there's no interactive
312# terminal to prompt, or no recognized revision control system can be
313# found.
314ignore_style = GetOption('ignore_style') or not sys.stdin.isatty()
315
316# Try wire up Mercurial to the style hooks
317if not ignore_style and hgdir.exists():
318 style_hook = True
319 style_hooks = tuple()
320 hgrc = hgdir.File('hgrc')
321 hgrc_old = hgdir.File('hgrc.old')
322 try:
323 from mercurial import ui
324 ui = ui.ui()
325 ui.readconfig(hgrc.abspath)
326 style_hooks = (ui.config('hooks', 'pretxncommit.style', None),
327 ui.config('hooks', 'pre-qrefresh.style', None))
328 style_hook = all(style_hooks)
329 style_extension = ui.config('extensions', 'style', None)
330 except ImportError:
331 print mercurial_lib_not_found
332
333 if "python:style.check_style" in style_hooks:
334 # Try to upgrade the style hooks
335 print mercurial_style_upgrade_message
336 # continue unless user does ctrl-c/ctrl-d etc.
337 try:
338 raw_input()
339 except:
340 print "Input exception, exiting scons.\n"
341 sys.exit(1)
342 shutil.copyfile(hgrc.abspath, hgrc_old.abspath)
343 re_style_hook = re.compile(r"^([^=#]+)\.style\s*=\s*([^#\s]+).*")
344 re_style_extension = re.compile("style\s*=\s*([^#\s]+).*")
345 old, new = open(hgrc_old.abspath, 'r'), open(hgrc.abspath, 'w')
346 for l in old:
347 m_hook = re_style_hook.match(l)
348 m_ext = re_style_extension.match(l)
349 if m_hook:
350 hook, check = m_hook.groups()
351 if check != "python:style.check_style":
352 print "Warning: %s.style is using a non-default " \
353 "checker: %s" % (hook, check)
354 if hook not in ("pretxncommit", "pre-qrefresh"):
355 print "Warning: Updating unknown style hook: %s" % hook
356
357 l = "%s.style = python:hgstyle.check_style\n" % hook
358 elif m_ext and m_ext.group(1) == style_extension:
359 l = "hgstyle = %s/util/hgstyle.py\n" % main.root.abspath
360
361 new.write(l)
362 elif not style_hook:
363 print mercurial_style_message,
364 # continue unless user does ctrl-c/ctrl-d etc.
365 try:
366 raw_input()
367 except:
368 print "Input exception, exiting scons.\n"
369 sys.exit(1)
370 hgrc_path = '%s/.hg/hgrc' % main.root.abspath
371 print "Adding style hook to", hgrc_path, "\n"
372 try:
373 with open(hgrc_path, 'a') as f:
374 f.write(mercurial_style_hook)
375 except:
376 print "Error updating", hgrc_path
377 sys.exit(1)
378
379def install_git_style_hooks():
380 try:
381 gitdir = Dir(readCommand(
382 ["git", "rev-parse", "--git-dir"]).strip("\n"))
383 except Exception, e:
384 print "Warning: Failed to find git repo directory: %s" % e
385 return
386
387 git_hooks = gitdir.Dir("hooks")
388 def hook_exists(hook_name):
389 hook = git_hooks.File(hook_name)
390 return hook.exists()
391
392 def hook_install(hook_name, script):
393 hook = git_hooks.File(hook_name)
394 if hook.exists():
395 print "Warning: Can't install %s, hook already exists." % hook_name
396 return
397
398 if hook.islink():
399 print "Warning: Removing broken symlink for hook %s." % hook_name
400 os.unlink(hook.get_abspath())
401
402 if not git_hooks.exists():
403 mkdir(git_hooks.get_abspath())
404 git_hooks.clear()
405
406 abs_symlink_hooks = git_hooks.islink() and \
407 os.path.isabs(os.readlink(git_hooks.get_abspath()))
408
409 # Use a relative symlink if the hooks live in the source directory,
410 # and the hooks directory is not a symlink to an absolute path.
411 if hook.is_under(main.root) and not abs_symlink_hooks:
412 script_path = os.path.relpath(
413 os.path.realpath(script.get_abspath()),
414 os.path.realpath(hook.Dir(".").get_abspath()))
415 else:
416 script_path = script.get_abspath()
417
418 try:
419 os.symlink(script_path, hook.get_abspath())
420 except:
421 print "Error updating git %s hook" % hook_name
422 raise
423
424 if hook_exists("pre-commit") and hook_exists("commit-msg"):
425 return
426
427 print git_style_message,
428 try:
429 raw_input()
430 except:
431 print "Input exception, exiting scons.\n"
432 sys.exit(1)
433
434 git_style_script = File("util/git-pre-commit.py")
435 git_msg_script = File("ext/git-commit-msg")
436
437 hook_install("pre-commit", git_style_script)
438 hook_install("commit-msg", git_msg_script)
439
440# Try to wire up git to the style hooks
441if not ignore_style and main.root.Entry(".git").exists():
442 install_git_style_hooks()
443
444###################################################
445#
446# Figure out which configurations to set up based on the path(s) of
447# the target(s).
448#
449###################################################
450
451# Find default configuration & binary.
452Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug'))
453
454# helper function: find last occurrence of element in list
455def rfind(l, elt, offs = -1):
456 for i in range(len(l)+offs, 0, -1):
457 if l[i] == elt:
458 return i
459 raise ValueError, "element not found"
460
461# Take a list of paths (or SCons Nodes) and return a list with all
462# paths made absolute and ~-expanded. Paths will be interpreted
463# relative to the launch directory unless a different root is provided
464def makePathListAbsolute(path_list, root=GetLaunchDir()):
465 return [abspath(joinpath(root, expanduser(str(p))))
466 for p in path_list]
467
468# Each target must have 'build' in the interior of the path; the
469# directory below this will determine the build parameters. For
470# example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we
471# recognize that ALPHA_SE specifies the configuration because it
472# follow 'build' in the build path.
473
474# The funky assignment to "[:]" is needed to replace the list contents
475# in place rather than reassign the symbol to a new list, which
476# doesn't work (obviously!).
477BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS)
478
479# Generate a list of the unique build roots and configs that the
480# collected targets reference.
481variant_paths = []
482build_root = None
483for t in BUILD_TARGETS:
484 path_dirs = t.split('/')
485 try:
486 build_top = rfind(path_dirs, 'build', -2)
487 except:
488 print "Error: no non-leaf 'build' dir found on target path", t
489 Exit(1)
490 this_build_root = joinpath('/',*path_dirs[:build_top+1])
491 if not build_root:
492 build_root = this_build_root
493 else:
494 if this_build_root != build_root:
495 print "Error: build targets not under same build root\n"\
496 " %s\n %s" % (build_root, this_build_root)
497 Exit(1)
498 variant_path = joinpath('/',*path_dirs[:build_top+2])
499 if variant_path not in variant_paths:
500 variant_paths.append(variant_path)
501
502# Make sure build_root exists (might not if this is the first build there)
503if not isdir(build_root):
504 mkdir(build_root)
505main['BUILDROOT'] = build_root
506
507Export('main')
508
509main.SConsignFile(joinpath(build_root, "sconsign"))
510
511# Default duplicate option is to use hard links, but this messes up
512# when you use emacs to edit a file in the target dir, as emacs moves
513# file to file~ then copies to file, breaking the link. Symbolic
514# (soft) links work better.
515main.SetOption('duplicate', 'soft-copy')
516
517#
518# Set up global sticky variables... these are common to an entire build
519# tree (not specific to a particular build like ALPHA_SE)
520#
521
522global_vars_file = joinpath(build_root, 'variables.global')
523
524global_vars = Variables(global_vars_file, args=ARGUMENTS)
525
526global_vars.AddVariables(
527 ('CC', 'C compiler', environ.get('CC', main['CC'])),
528 ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])),
529 ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')),
530 ('BATCH', 'Use batch pool for build and tests', False),
531 ('BATCH_CMD', 'Batch pool submission command name', 'qdo'),
532 ('M5_BUILD_CACHE', 'Cache built objects in this directory', False),
533 ('EXTRAS', 'Add extra directories to the compilation', '')
534 )
535
536# Update main environment with values from ARGUMENTS & global_vars_file
537global_vars.Update(main)
538help_texts["global_vars"] += global_vars.GenerateHelpText(main)
539
540# Save sticky variable settings back to current variables file
541global_vars.Save(global_vars_file, main)
542
543# Parse EXTRAS variable to build list of all directories where we're
544# look for sources etc. This list is exported as extras_dir_list.
545base_dir = main.srcdir.abspath
546if main['EXTRAS']:
547 extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':'))
548else:
549 extras_dir_list = []
550
551Export('base_dir')
552Export('extras_dir_list')
553
554# the ext directory should be on the #includes path
555main.Append(CPPPATH=[Dir('ext')])
556
557def strip_build_path(path, env):
558 path = str(path)
559 variant_base = env['BUILDROOT'] + os.path.sep
560 if path.startswith(variant_base):
561 path = path[len(variant_base):]
562 elif path.startswith('build/'):
563 path = path[6:]
564 return path
565
566# Generate a string of the form:
567# common/path/prefix/src1, src2 -> tgt1, tgt2
568# to print while building.
569class Transform(object):
570 # all specific color settings should be here and nowhere else
571 tool_color = termcap.Normal
572 pfx_color = termcap.Yellow
573 srcs_color = termcap.Yellow + termcap.Bold
574 arrow_color = termcap.Blue + termcap.Bold
575 tgts_color = termcap.Yellow + termcap.Bold
576
577 def __init__(self, tool, max_sources=99):
578 self.format = self.tool_color + (" [%8s] " % tool) \
579 + self.pfx_color + "%s" \
580 + self.srcs_color + "%s" \
581 + self.arrow_color + " -> " \
582 + self.tgts_color + "%s" \
583 + termcap.Normal
584 self.max_sources = max_sources
585
586 def __call__(self, target, source, env, for_signature=None):
587 # truncate source list according to max_sources param
588 source = source[0:self.max_sources]
589 def strip(f):
590 return strip_build_path(str(f), env)
591 if len(source) > 0:
592 srcs = map(strip, source)
593 else:
594 srcs = ['']
595 tgts = map(strip, target)
596 # surprisingly, os.path.commonprefix is a dumb char-by-char string
597 # operation that has nothing to do with paths.
598 com_pfx = os.path.commonprefix(srcs + tgts)
599 com_pfx_len = len(com_pfx)
600 if com_pfx:
601 # do some cleanup and sanity checking on common prefix
602 if com_pfx[-1] == ".":
603 # prefix matches all but file extension: ok
604 # back up one to change 'foo.cc -> o' to 'foo.cc -> .o'
605 com_pfx = com_pfx[0:-1]
606 elif com_pfx[-1] == "/":
607 # common prefix is directory path: OK
608 pass
609 else:
610 src0_len = len(srcs[0])
611 tgt0_len = len(tgts[0])
612 if src0_len == com_pfx_len:
613 # source is a substring of target, OK
614 pass
615 elif tgt0_len == com_pfx_len:
616 # target is a substring of source, need to back up to
617 # avoid empty string on RHS of arrow
618 sep_idx = com_pfx.rfind(".")
619 if sep_idx != -1:
620 com_pfx = com_pfx[0:sep_idx]
621 else:
622 com_pfx = ''
623 elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".":
624 # still splitting at file extension: ok
625 pass
626 else:
627 # probably a fluke; ignore it
628 com_pfx = ''
629 # recalculate length in case com_pfx was modified
630 com_pfx_len = len(com_pfx)
631 def fmt(files):
632 f = map(lambda s: s[com_pfx_len:], files)
633 return ', '.join(f)
634 return self.format % (com_pfx, fmt(srcs), fmt(tgts))
635
636Export('Transform')
637
638# enable the regression script to use the termcap
639main['TERMCAP'] = termcap
640
641if GetOption('verbose'):
642 def MakeAction(action, string, *args, **kwargs):
643 return Action(action, *args, **kwargs)
644else:
645 MakeAction = Action
646 main['CCCOMSTR'] = Transform("CC")
647 main['CXXCOMSTR'] = Transform("CXX")
648 main['ASCOMSTR'] = Transform("AS")
649 main['ARCOMSTR'] = Transform("AR", 0)
650 main['LINKCOMSTR'] = Transform("LINK", 0)
651 main['SHLINKCOMSTR'] = Transform("SHLINK", 0)
652 main['RANLIBCOMSTR'] = Transform("RANLIB", 0)
653 main['M4COMSTR'] = Transform("M4")
654 main['SHCCCOMSTR'] = Transform("SHCC")
655 main['SHCXXCOMSTR'] = Transform("SHCXX")
656Export('MakeAction')
657
658# Initialize the Link-Time Optimization (LTO) flags
659main['LTO_CCFLAGS'] = []
660main['LTO_LDFLAGS'] = []
661
662# According to the readme, tcmalloc works best if the compiler doesn't
663# assume that we're using the builtin malloc and friends. These flags
664# are compiler-specific, so we need to set them after we detect which
665# compiler we're using.
666main['TCMALLOC_CCFLAGS'] = []
667
668CXX_version = readCommand([main['CXX'],'--version'], exception=False)
669CXX_V = readCommand([main['CXX'],'-V'], exception=False)
670
671main['GCC'] = CXX_version and CXX_version.find('g++') >= 0
672main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0
673if main['GCC'] + main['CLANG'] > 1:
674 print 'Error: How can we have two at the same time?'
675 Exit(1)
676
677# Set up default C++ compiler flags
678if main['GCC'] or main['CLANG']:
679 # As gcc and clang share many flags, do the common parts here
680 main.Append(CCFLAGS=['-pipe'])
681 main.Append(CCFLAGS=['-fno-strict-aliasing'])
682 # Enable -Wall and -Wextra and then disable the few warnings that
683 # we consistently violate
684 main.Append(CCFLAGS=['-Wall', '-Wundef', '-Wextra',
685 '-Wno-sign-compare', '-Wno-unused-parameter'])
686 # We always compile using C++11
687 main.Append(CXXFLAGS=['-std=c++11'])
688 if sys.platform.startswith('freebsd'):
689 main.Append(CCFLAGS=['-I/usr/local/include'])
690 main.Append(CXXFLAGS=['-I/usr/local/include'])
691
692 main['FILTER_PSHLINKFLAGS'] = lambda x: str(x).replace(' -shared', '')
693 main['PSHLINKFLAGS'] = main.subst('${FILTER_PSHLINKFLAGS(SHLINKFLAGS)}')
694 main['PLINKFLAGS'] = main.subst('${LINKFLAGS}')
695 shared_partial_flags = ['-r', '-nostdlib']
696 main.Append(PSHLINKFLAGS=shared_partial_flags)
697 main.Append(PLINKFLAGS=shared_partial_flags)
698else:
699 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
700 print "Don't know what compiler options to use for your compiler."
701 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX']
702 print termcap.Yellow + ' version:' + termcap.Normal,
703 if not CXX_version:
704 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
705 termcap.Normal
706 else:
707 print CXX_version.replace('\n', '<nl>')
708 print " If you're trying to use a compiler other than GCC"
709 print " or clang, there appears to be something wrong with your"
710 print " environment."
711 print " "
712 print " If you are trying to use a compiler other than those listed"
713 print " above you will need to ease fix SConstruct and "
714 print " src/SConscript to support that compiler."
715 Exit(1)
716
717if main['GCC']:
718 # Check for a supported version of gcc. >= 4.8 is chosen for its
719 # level of c++11 support. See
720 # http://gcc.gnu.org/projects/cxx0x.html for details.
721 gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False)
722 if compareVersions(gcc_version, "4.8") < 0:
723 print 'Error: gcc version 4.8 or newer required.'
724 print ' Installed version:', gcc_version
725 Exit(1)
726
727 main['GCC_VERSION'] = gcc_version
728
729 if compareVersions(gcc_version, '4.9') >= 0:
730 # Incremental linking with LTO is currently broken in gcc versions
731 # 4.9 and above. A version where everything works completely hasn't
732 # yet been identified.
733 #
734 # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=67548
735 main['BROKEN_INCREMENTAL_LTO'] = True
736 if compareVersions(gcc_version, '6.0') >= 0:
737 # gcc versions 6.0 and greater accept an -flinker-output flag which
738 # selects what type of output the linker should generate. This is
739 # necessary for incremental lto to work, but is also broken in
740 # current versions of gcc. It may not be necessary in future
741 # versions. We add it here since it might be, and as a reminder that
742 # it exists. It's excluded if lto is being forced.
743 #
744 # https://gcc.gnu.org/gcc-6/changes.html
745 # https://gcc.gnu.org/ml/gcc-patches/2015-11/msg03161.html
746 # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=69866
747 if not GetOption('force_lto'):
748 main.Append(PSHLINKFLAGS='-flinker-output=rel')
749 main.Append(PLINKFLAGS='-flinker-output=rel')
750
722 # gcc from version 4.8 and above generates "rep; ret" instructions
723 # to avoid performance penalties on certain AMD chips. Older
724 # assemblers detect this as an error, "Error: expecting string
725 # instruction after `rep'"
726 as_version_raw = readCommand([main['AS'], '-v', '/dev/null',
727 '-o', '/dev/null'],
728 exception=False).split()
729
730 # version strings may contain extra distro-specific
731 # qualifiers, so play it safe and keep only what comes before
732 # the first hyphen
733 as_version = as_version_raw[-1].split('-')[0] if as_version_raw else None
734
735 if not as_version or compareVersions(as_version, "2.23") < 0:
736 print termcap.Yellow + termcap.Bold + \
737 'Warning: This combination of gcc and binutils have' + \
738 ' known incompatibilities.\n' + \
739 ' If you encounter build problems, please update ' + \
740 'binutils to 2.23.' + \
741 termcap.Normal
742
743 # Make sure we warn if the user has requested to compile with the
744 # Undefined Benahvior Sanitizer and this version of gcc does not
745 # support it.
746 if GetOption('with_ubsan') and \
747 compareVersions(gcc_version, '4.9') < 0:
748 print termcap.Yellow + termcap.Bold + \
749 'Warning: UBSan is only supported using gcc 4.9 and later.' + \
750 termcap.Normal
751
751 # gcc from version 4.8 and above generates "rep; ret" instructions
752 # to avoid performance penalties on certain AMD chips. Older
753 # assemblers detect this as an error, "Error: expecting string
754 # instruction after `rep'"
755 as_version_raw = readCommand([main['AS'], '-v', '/dev/null',
756 '-o', '/dev/null'],
757 exception=False).split()
758
759 # version strings may contain extra distro-specific
760 # qualifiers, so play it safe and keep only what comes before
761 # the first hyphen
762 as_version = as_version_raw[-1].split('-')[0] if as_version_raw else None
763
764 if not as_version or compareVersions(as_version, "2.23") < 0:
765 print termcap.Yellow + termcap.Bold + \
766 'Warning: This combination of gcc and binutils have' + \
767 ' known incompatibilities.\n' + \
768 ' If you encounter build problems, please update ' + \
769 'binutils to 2.23.' + \
770 termcap.Normal
771
772 # Make sure we warn if the user has requested to compile with the
773 # Undefined Benahvior Sanitizer and this version of gcc does not
774 # support it.
775 if GetOption('with_ubsan') and \
776 compareVersions(gcc_version, '4.9') < 0:
777 print termcap.Yellow + termcap.Bold + \
778 'Warning: UBSan is only supported using gcc 4.9 and later.' + \
779 termcap.Normal
780
781 disable_lto = GetOption('no_lto')
782 if not disable_lto and main.get('BROKEN_INCREMENTAL_LTO', False) and \
783 not GetOption('force_lto'):
784 print termcap.Yellow + termcap.Bold + \
785 'Warning: Your compiler doesn\'t support incremental linking' + \
786 ' and lto at the same time, so lto is being disabled. To force' + \
787 ' lto on anyway, use the --force-lto option. That will disable' + \
788 ' partial linking.' + \
789 termcap.Normal
790 disable_lto = True
791
752 # Add the appropriate Link-Time Optimization (LTO) flags
753 # unless LTO is explicitly turned off. Note that these flags
754 # are only used by the fast target.
792 # Add the appropriate Link-Time Optimization (LTO) flags
793 # unless LTO is explicitly turned off. Note that these flags
794 # are only used by the fast target.
755 if not GetOption('no_lto'):
795 if not disable_lto:
756 # Pass the LTO flag when compiling to produce GIMPLE
757 # output, we merely create the flags here and only append
758 # them later
759 main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
760
761 # Use the same amount of jobs for LTO as we are running
762 # scons with
763 main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
764
765 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc',
766 '-fno-builtin-realloc', '-fno-builtin-free'])
767
768 # add option to check for undeclared overrides
769 if compareVersions(gcc_version, "5.0") > 0:
770 main.Append(CCFLAGS=['-Wno-error=suggest-override'])
771
772elif main['CLANG']:
773 # Check for a supported version of clang, >= 3.1 is needed to
774 # support similar features as gcc 4.8. See
775 # http://clang.llvm.org/cxx_status.html for details
776 clang_version_re = re.compile(".* version (\d+\.\d+)")
777 clang_version_match = clang_version_re.search(CXX_version)
778 if (clang_version_match):
779 clang_version = clang_version_match.groups()[0]
780 if compareVersions(clang_version, "3.1") < 0:
781 print 'Error: clang version 3.1 or newer required.'
782 print ' Installed version:', clang_version
783 Exit(1)
784 else:
785 print 'Error: Unable to determine clang version.'
786 Exit(1)
787
788 # clang has a few additional warnings that we disable, extraneous
789 # parantheses are allowed due to Ruby's printing of the AST,
790 # finally self assignments are allowed as the generated CPU code
791 # is relying on this
792 main.Append(CCFLAGS=['-Wno-parentheses',
793 '-Wno-self-assign',
794 # Some versions of libstdc++ (4.8?) seem to
795 # use struct hash and class hash
796 # interchangeably.
797 '-Wno-mismatched-tags',
798 ])
799
800 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin'])
801
802 # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as
803 # opposed to libstdc++, as the later is dated.
804 if sys.platform == "darwin":
805 main.Append(CXXFLAGS=['-stdlib=libc++'])
806 main.Append(LIBS=['c++'])
807
808 # On FreeBSD we need libthr.
809 if sys.platform.startswith('freebsd'):
810 main.Append(LIBS=['thr'])
811
812else:
813 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
814 print "Don't know what compiler options to use for your compiler."
815 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX']
816 print termcap.Yellow + ' version:' + termcap.Normal,
817 if not CXX_version:
818 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
819 termcap.Normal
820 else:
821 print CXX_version.replace('\n', '<nl>')
822 print " If you're trying to use a compiler other than GCC"
823 print " or clang, there appears to be something wrong with your"
824 print " environment."
825 print " "
826 print " If you are trying to use a compiler other than those listed"
827 print " above you will need to ease fix SConstruct and "
828 print " src/SConscript to support that compiler."
829 Exit(1)
830
831# Set up common yacc/bison flags (needed for Ruby)
832main['YACCFLAGS'] = '-d'
833main['YACCHXXFILESUFFIX'] = '.hh'
834
835# Do this after we save setting back, or else we'll tack on an
836# extra 'qdo' every time we run scons.
837if main['BATCH']:
838 main['CC'] = main['BATCH_CMD'] + ' ' + main['CC']
839 main['CXX'] = main['BATCH_CMD'] + ' ' + main['CXX']
840 main['AS'] = main['BATCH_CMD'] + ' ' + main['AS']
841 main['AR'] = main['BATCH_CMD'] + ' ' + main['AR']
842 main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB']
843
844if sys.platform == 'cygwin':
845 # cygwin has some header file issues...
846 main.Append(CCFLAGS=["-Wno-uninitialized"])
847
848# Check for the protobuf compiler
849protoc_version = readCommand([main['PROTOC'], '--version'],
850 exception='').split()
851
852# First two words should be "libprotoc x.y.z"
853if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc':
854 print termcap.Yellow + termcap.Bold + \
855 'Warning: Protocol buffer compiler (protoc) not found.\n' + \
856 ' Please install protobuf-compiler for tracing support.' + \
857 termcap.Normal
858 main['PROTOC'] = False
859else:
860 # Based on the availability of the compress stream wrappers,
861 # require 2.1.0
862 min_protoc_version = '2.1.0'
863 if compareVersions(protoc_version[1], min_protoc_version) < 0:
864 print termcap.Yellow + termcap.Bold + \
865 'Warning: protoc version', min_protoc_version, \
866 'or newer required.\n' + \
867 ' Installed version:', protoc_version[1], \
868 termcap.Normal
869 main['PROTOC'] = False
870 else:
871 # Attempt to determine the appropriate include path and
872 # library path using pkg-config, that means we also need to
873 # check for pkg-config. Note that it is possible to use
874 # protobuf without the involvement of pkg-config. Later on we
875 # check go a library config check and at that point the test
876 # will fail if libprotobuf cannot be found.
877 if readCommand(['pkg-config', '--version'], exception=''):
878 try:
879 # Attempt to establish what linking flags to add for protobuf
880 # using pkg-config
881 main.ParseConfig('pkg-config --cflags --libs-only-L protobuf')
882 except:
883 print termcap.Yellow + termcap.Bold + \
884 'Warning: pkg-config could not get protobuf flags.' + \
885 termcap.Normal
886
887
888# Check for 'timeout' from GNU coreutils. If present, regressions will
889# be run with a time limit. We require version 8.13 since we rely on
890# support for the '--foreground' option.
891if sys.platform.startswith('freebsd'):
892 timeout_lines = readCommand(['gtimeout', '--version'],
893 exception='').splitlines()
894else:
895 timeout_lines = readCommand(['timeout', '--version'],
896 exception='').splitlines()
897# Get the first line and tokenize it
898timeout_version = timeout_lines[0].split() if timeout_lines else []
899main['TIMEOUT'] = timeout_version and \
900 compareVersions(timeout_version[-1], '8.13') >= 0
901
902# Add a custom Check function to test for structure members.
903def CheckMember(context, include, decl, member, include_quotes="<>"):
904 context.Message("Checking for member %s in %s..." %
905 (member, decl))
906 text = """
907#include %(header)s
908int main(){
909 %(decl)s test;
910 (void)test.%(member)s;
911 return 0;
912};
913""" % { "header" : include_quotes[0] + include + include_quotes[1],
914 "decl" : decl,
915 "member" : member,
916 }
917
918 ret = context.TryCompile(text, extension=".cc")
919 context.Result(ret)
920 return ret
921
922# Platform-specific configuration. Note again that we assume that all
923# builds under a given build root run on the same host platform.
924conf = Configure(main,
925 conf_dir = joinpath(build_root, '.scons_config'),
926 log_file = joinpath(build_root, 'scons_config.log'),
927 custom_tests = {
928 'CheckMember' : CheckMember,
929 })
930
931# Check if we should compile a 64 bit binary on Mac OS X/Darwin
932try:
933 import platform
934 uname = platform.uname()
935 if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0:
936 if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]):
937 main.Append(CCFLAGS=['-arch', 'x86_64'])
938 main.Append(CFLAGS=['-arch', 'x86_64'])
939 main.Append(LINKFLAGS=['-arch', 'x86_64'])
940 main.Append(ASFLAGS=['-arch', 'x86_64'])
941except:
942 pass
943
944# Recent versions of scons substitute a "Null" object for Configure()
945# when configuration isn't necessary, e.g., if the "--help" option is
946# present. Unfortuantely this Null object always returns false,
947# breaking all our configuration checks. We replace it with our own
948# more optimistic null object that returns True instead.
949if not conf:
950 def NullCheck(*args, **kwargs):
951 return True
952
953 class NullConf:
954 def __init__(self, env):
955 self.env = env
956 def Finish(self):
957 return self.env
958 def __getattr__(self, mname):
959 return NullCheck
960
961 conf = NullConf(main)
962
963# Cache build files in the supplied directory.
964if main['M5_BUILD_CACHE']:
965 print 'Using build cache located at', main['M5_BUILD_CACHE']
966 CacheDir(main['M5_BUILD_CACHE'])
967
968main['USE_PYTHON'] = not GetOption('without_python')
969if main['USE_PYTHON']:
970 # Find Python include and library directories for embedding the
971 # interpreter. We rely on python-config to resolve the appropriate
972 # includes and linker flags. ParseConfig does not seem to understand
973 # the more exotic linker flags such as -Xlinker and -export-dynamic so
974 # we add them explicitly below. If you want to link in an alternate
975 # version of python, see above for instructions on how to invoke
976 # scons with the appropriate PATH set.
977 #
978 # First we check if python2-config exists, else we use python-config
979 python_config = readCommand(['which', 'python2-config'],
980 exception='').strip()
981 if not os.path.exists(python_config):
982 python_config = readCommand(['which', 'python-config'],
983 exception='').strip()
984 py_includes = readCommand([python_config, '--includes'],
985 exception='').split()
986 # Strip the -I from the include folders before adding them to the
987 # CPPPATH
988 main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes))
989
990 # Read the linker flags and split them into libraries and other link
991 # flags. The libraries are added later through the call the CheckLib.
992 py_ld_flags = readCommand([python_config, '--ldflags'],
993 exception='').split()
994 py_libs = []
995 for lib in py_ld_flags:
996 if not lib.startswith('-l'):
997 main.Append(LINKFLAGS=[lib])
998 else:
999 lib = lib[2:]
1000 if lib not in py_libs:
1001 py_libs.append(lib)
1002
1003 # verify that this stuff works
1004 if not conf.CheckHeader('Python.h', '<>'):
1005 print "Error: can't find Python.h header in", py_includes
1006 print "Install Python headers (package python-dev on Ubuntu and RedHat)"
1007 Exit(1)
1008
1009 for lib in py_libs:
1010 if not conf.CheckLib(lib):
1011 print "Error: can't find library %s required by python" % lib
1012 Exit(1)
1013
1014# On Solaris you need to use libsocket for socket ops
1015if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'):
1016 if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'):
1017 print "Can't find library with socket calls (e.g. accept())"
1018 Exit(1)
1019
1020# Check for zlib. If the check passes, libz will be automatically
1021# added to the LIBS environment variable.
1022if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'):
1023 print 'Error: did not find needed zlib compression library '\
1024 'and/or zlib.h header file.'
1025 print ' Please install zlib and try again.'
1026 Exit(1)
1027
1028# If we have the protobuf compiler, also make sure we have the
1029# development libraries. If the check passes, libprotobuf will be
1030# automatically added to the LIBS environment variable. After
1031# this, we can use the HAVE_PROTOBUF flag to determine if we have
1032# got both protoc and libprotobuf available.
1033main['HAVE_PROTOBUF'] = main['PROTOC'] and \
1034 conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h',
1035 'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;')
1036
1037# If we have the compiler but not the library, print another warning.
1038if main['PROTOC'] and not main['HAVE_PROTOBUF']:
1039 print termcap.Yellow + termcap.Bold + \
1040 'Warning: did not find protocol buffer library and/or headers.\n' + \
1041 ' Please install libprotobuf-dev for tracing support.' + \
1042 termcap.Normal
1043
1044# Check for librt.
1045have_posix_clock = \
1046 conf.CheckLibWithHeader(None, 'time.h', 'C',
1047 'clock_nanosleep(0,0,NULL,NULL);') or \
1048 conf.CheckLibWithHeader('rt', 'time.h', 'C',
1049 'clock_nanosleep(0,0,NULL,NULL);')
1050
1051have_posix_timers = \
1052 conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C',
1053 'timer_create(CLOCK_MONOTONIC, NULL, NULL);')
1054
1055if not GetOption('without_tcmalloc'):
1056 if conf.CheckLib('tcmalloc'):
1057 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1058 elif conf.CheckLib('tcmalloc_minimal'):
1059 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1060 else:
1061 print termcap.Yellow + termcap.Bold + \
1062 "You can get a 12% performance improvement by "\
1063 "installing tcmalloc (libgoogle-perftools-dev package "\
1064 "on Ubuntu or RedHat)." + termcap.Normal
1065
1066
1067# Detect back trace implementations. The last implementation in the
1068# list will be used by default.
1069backtrace_impls = [ "none" ]
1070
1071if conf.CheckLibWithHeader(None, 'execinfo.h', 'C',
1072 'backtrace_symbols_fd((void*)0, 0, 0);'):
1073 backtrace_impls.append("glibc")
1074elif conf.CheckLibWithHeader('execinfo', 'execinfo.h', 'C',
1075 'backtrace_symbols_fd((void*)0, 0, 0);'):
1076 # NetBSD and FreeBSD need libexecinfo.
1077 backtrace_impls.append("glibc")
1078 main.Append(LIBS=['execinfo'])
1079
1080if backtrace_impls[-1] == "none":
1081 default_backtrace_impl = "none"
1082 print termcap.Yellow + termcap.Bold + \
1083 "No suitable back trace implementation found." + \
1084 termcap.Normal
1085
1086if not have_posix_clock:
1087 print "Can't find library for POSIX clocks."
1088
1089# Check for <fenv.h> (C99 FP environment control)
1090have_fenv = conf.CheckHeader('fenv.h', '<>')
1091if not have_fenv:
1092 print "Warning: Header file <fenv.h> not found."
1093 print " This host has no IEEE FP rounding mode control."
1094
1095# Check if we should enable KVM-based hardware virtualization. The API
1096# we rely on exists since version 2.6.36 of the kernel, but somehow
1097# the KVM_API_VERSION does not reflect the change. We test for one of
1098# the types as a fall back.
1099have_kvm = conf.CheckHeader('linux/kvm.h', '<>')
1100if not have_kvm:
1101 print "Info: Compatible header file <linux/kvm.h> not found, " \
1102 "disabling KVM support."
1103
1104# Check if the TUN/TAP driver is available.
1105have_tuntap = conf.CheckHeader('linux/if_tun.h', '<>')
1106if not have_tuntap:
1107 print "Info: Compatible header file <linux/if_tun.h> not found."
1108
1109# x86 needs support for xsave. We test for the structure here since we
1110# won't be able to run new tests by the time we know which ISA we're
1111# targeting.
1112have_kvm_xsave = conf.CheckTypeSize('struct kvm_xsave',
1113 '#include <linux/kvm.h>') != 0
1114
1115# Check if the requested target ISA is compatible with the host
1116def is_isa_kvm_compatible(isa):
1117 try:
1118 import platform
1119 host_isa = platform.machine()
1120 except:
1121 print "Warning: Failed to determine host ISA."
1122 return False
1123
1124 if not have_posix_timers:
1125 print "Warning: Can not enable KVM, host seems to lack support " \
1126 "for POSIX timers"
1127 return False
1128
1129 if isa == "arm":
1130 return host_isa in ( "armv7l", "aarch64" )
1131 elif isa == "x86":
1132 if host_isa != "x86_64":
1133 return False
1134
1135 if not have_kvm_xsave:
1136 print "KVM on x86 requires xsave support in kernel headers."
1137 return False
1138
1139 return True
1140 else:
1141 return False
1142
1143
1144# Check if the exclude_host attribute is available. We want this to
1145# get accurate instruction counts in KVM.
1146main['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf.CheckMember(
1147 'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host')
1148
1149
1150######################################################################
1151#
1152# Finish the configuration
1153#
1154main = conf.Finish()
1155
1156######################################################################
1157#
1158# Collect all non-global variables
1159#
1160
1161# Define the universe of supported ISAs
1162all_isa_list = [ ]
1163all_gpu_isa_list = [ ]
1164Export('all_isa_list')
1165Export('all_gpu_isa_list')
1166
1167class CpuModel(object):
1168 '''The CpuModel class encapsulates everything the ISA parser needs to
1169 know about a particular CPU model.'''
1170
1171 # Dict of available CPU model objects. Accessible as CpuModel.dict.
1172 dict = {}
1173
1174 # Constructor. Automatically adds models to CpuModel.dict.
1175 def __init__(self, name, default=False):
1176 self.name = name # name of model
1177
1178 # This cpu is enabled by default
1179 self.default = default
1180
1181 # Add self to dict
1182 if name in CpuModel.dict:
1183 raise AttributeError, "CpuModel '%s' already registered" % name
1184 CpuModel.dict[name] = self
1185
1186Export('CpuModel')
1187
1188# Sticky variables get saved in the variables file so they persist from
1189# one invocation to the next (unless overridden, in which case the new
1190# value becomes sticky).
1191sticky_vars = Variables(args=ARGUMENTS)
1192Export('sticky_vars')
1193
1194# Sticky variables that should be exported
1195export_vars = []
1196Export('export_vars')
1197
1198# For Ruby
1199all_protocols = []
1200Export('all_protocols')
1201protocol_dirs = []
1202Export('protocol_dirs')
1203slicc_includes = []
1204Export('slicc_includes')
1205
1206# Walk the tree and execute all SConsopts scripts that wil add to the
1207# above variables
1208if GetOption('verbose'):
1209 print "Reading SConsopts"
1210for bdir in [ base_dir ] + extras_dir_list:
1211 if not isdir(bdir):
1212 print "Error: directory '%s' does not exist" % bdir
1213 Exit(1)
1214 for root, dirs, files in os.walk(bdir):
1215 if 'SConsopts' in files:
1216 if GetOption('verbose'):
1217 print "Reading", joinpath(root, 'SConsopts')
1218 SConscript(joinpath(root, 'SConsopts'))
1219
1220all_isa_list.sort()
1221all_gpu_isa_list.sort()
1222
1223sticky_vars.AddVariables(
1224 EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list),
1225 EnumVariable('TARGET_GPU_ISA', 'Target GPU ISA', 'hsail', all_gpu_isa_list),
1226 ListVariable('CPU_MODELS', 'CPU models',
1227 sorted(n for n,m in CpuModel.dict.iteritems() if m.default),
1228 sorted(CpuModel.dict.keys())),
1229 BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger',
1230 False),
1231 BoolVariable('SS_COMPATIBLE_FP',
1232 'Make floating-point results compatible with SimpleScalar',
1233 False),
1234 BoolVariable('USE_SSE2',
1235 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts',
1236 False),
1237 BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock),
1238 BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv),
1239 BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability', False),
1240 BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models', have_kvm),
1241 BoolVariable('USE_TUNTAP',
1242 'Enable using a tap device to bridge to the host network',
1243 have_tuntap),
1244 BoolVariable('BUILD_GPU', 'Build the compute-GPU model', False),
1245 EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None',
1246 all_protocols),
1247 EnumVariable('BACKTRACE_IMPL', 'Post-mortem dump implementation',
1248 backtrace_impls[-1], backtrace_impls)
1249 )
1250
1251# These variables get exported to #defines in config/*.hh (see src/SConscript).
1252export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'TARGET_GPU_ISA',
1253 'CP_ANNOTATE', 'USE_POSIX_CLOCK', 'USE_KVM', 'USE_TUNTAP',
1254 'PROTOCOL', 'HAVE_PROTOBUF', 'HAVE_PERF_ATTR_EXCLUDE_HOST']
1255
1256###################################################
1257#
1258# Define a SCons builder for configuration flag headers.
1259#
1260###################################################
1261
1262# This function generates a config header file that #defines the
1263# variable symbol to the current variable setting (0 or 1). The source
1264# operands are the name of the variable and a Value node containing the
1265# value of the variable.
1266def build_config_file(target, source, env):
1267 (variable, value) = [s.get_contents() for s in source]
1268 f = file(str(target[0]), 'w')
1269 print >> f, '#define', variable, value
1270 f.close()
1271 return None
1272
1273# Combine the two functions into a scons Action object.
1274config_action = MakeAction(build_config_file, Transform("CONFIG H", 2))
1275
1276# The emitter munges the source & target node lists to reflect what
1277# we're really doing.
1278def config_emitter(target, source, env):
1279 # extract variable name from Builder arg
1280 variable = str(target[0])
1281 # True target is config header file
1282 target = joinpath('config', variable.lower() + '.hh')
1283 val = env[variable]
1284 if isinstance(val, bool):
1285 # Force value to 0/1
1286 val = int(val)
1287 elif isinstance(val, str):
1288 val = '"' + val + '"'
1289
1290 # Sources are variable name & value (packaged in SCons Value nodes)
1291 return ([target], [Value(variable), Value(val)])
1292
1293config_builder = Builder(emitter = config_emitter, action = config_action)
1294
1295main.Append(BUILDERS = { 'ConfigFile' : config_builder })
1296
1297###################################################
1298#
1299# Builders for static and shared partially linked object files.
1300#
1301###################################################
1302
1303partial_static_builder = Builder(action=SCons.Defaults.LinkAction,
1304 src_suffix='$OBJSUFFIX',
1305 src_builder=['StaticObject', 'Object'],
1306 LINKFLAGS='$PLINKFLAGS',
1307 LIBS='')
1308
1309def partial_shared_emitter(target, source, env):
1310 for tgt in target:
1311 tgt.attributes.shared = 1
1312 return (target, source)
1313partial_shared_builder = Builder(action=SCons.Defaults.ShLinkAction,
1314 emitter=partial_shared_emitter,
1315 src_suffix='$SHOBJSUFFIX',
1316 src_builder='SharedObject',
1317 SHLINKFLAGS='$PSHLINKFLAGS',
1318 LIBS='')
1319
1320main.Append(BUILDERS = { 'PartialShared' : partial_shared_builder,
1321 'PartialStatic' : partial_static_builder })
1322
1323# builds in ext are shared across all configs in the build root.
1324ext_dir = abspath(joinpath(str(main.root), 'ext'))
1325ext_build_dirs = []
1326for root, dirs, files in os.walk(ext_dir):
1327 if 'SConscript' in files:
1328 build_dir = os.path.relpath(root, ext_dir)
1329 ext_build_dirs.append(build_dir)
1330 main.SConscript(joinpath(root, 'SConscript'),
1331 variant_dir=joinpath(build_root, build_dir))
1332
1333main.Prepend(CPPPATH=Dir('ext/pybind11/include/'))
1334
1335###################################################
1336#
1337# This builder and wrapper method are used to set up a directory with
1338# switching headers. Those are headers which are in a generic location and
1339# that include more specific headers from a directory chosen at build time
1340# based on the current build settings.
1341#
1342###################################################
1343
1344def build_switching_header(target, source, env):
1345 path = str(target[0])
1346 subdir = str(source[0])
1347 dp, fp = os.path.split(path)
1348 dp = os.path.relpath(os.path.realpath(dp),
1349 os.path.realpath(env['BUILDDIR']))
1350 with open(path, 'w') as hdr:
1351 print >>hdr, '#include "%s/%s/%s"' % (dp, subdir, fp)
1352
1353switching_header_action = MakeAction(build_switching_header,
1354 Transform('GENERATE'))
1355
1356switching_header_builder = Builder(action=switching_header_action,
1357 source_factory=Value,
1358 single_source=True)
1359
1360main.Append(BUILDERS = { 'SwitchingHeader': switching_header_builder })
1361
1362def switching_headers(self, headers, source):
1363 for header in headers:
1364 self.SwitchingHeader(header, source)
1365
1366main.AddMethod(switching_headers, 'SwitchingHeaders')
1367
1368# all-isas -> all-deps -> all-environs -> all_targets
1369main.Alias('#all-isas', [])
1370main.Alias('#all-deps', '#all-isas')
1371
1372# Dummy target to ensure all environments are created before telling
1373# SCons what to actually make (the command line arguments). We attach
1374# them to the dependence graph after the environments are complete.
1375ORIG_BUILD_TARGETS = list(BUILD_TARGETS) # force a copy; gets closure to work.
1376def environsComplete(target, source, env):
1377 for t in ORIG_BUILD_TARGETS:
1378 main.Depends('#all-targets', t)
1379
1380# Each build/* switching_dir attaches its *-environs target to #all-environs.
1381main.Append(BUILDERS = {'CompleteEnvirons' :
1382 Builder(action=MakeAction(environsComplete, None))})
1383main.CompleteEnvirons('#all-environs', [])
1384
1385def doNothing(**ignored): pass
1386main.Append(BUILDERS = {'Dummy': Builder(action=MakeAction(doNothing, None))})
1387
1388# The final target to which all the original targets ultimately get attached.
1389main.Dummy('#all-targets', '#all-environs')
1390BUILD_TARGETS[:] = ['#all-targets']
1391
1392###################################################
1393#
1394# Define build environments for selected configurations.
1395#
1396###################################################
1397
1398def variant_name(path):
1399 return os.path.basename(path).lower().replace('_', '-')
1400main['variant_name'] = variant_name
1401main['VARIANT_NAME'] = '${variant_name(BUILDDIR)}'
1402
1403for variant_path in variant_paths:
1404 if not GetOption('silent'):
1405 print "Building in", variant_path
1406
1407 # Make a copy of the build-root environment to use for this config.
1408 env = main.Clone()
1409 env['BUILDDIR'] = variant_path
1410
1411 # variant_dir is the tail component of build path, and is used to
1412 # determine the build parameters (e.g., 'ALPHA_SE')
1413 (build_root, variant_dir) = splitpath(variant_path)
1414
1415 # Set env variables according to the build directory config.
1416 sticky_vars.files = []
1417 # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in
1418 # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke
1419 # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings.
1420 current_vars_file = joinpath(build_root, 'variables', variant_dir)
1421 if isfile(current_vars_file):
1422 sticky_vars.files.append(current_vars_file)
1423 if not GetOption('silent'):
1424 print "Using saved variables file %s" % current_vars_file
1425 elif variant_dir in ext_build_dirs:
1426 # Things in ext are built without a variant directory.
1427 continue
1428 else:
1429 # Build dir-specific variables file doesn't exist.
1430
1431 # Make sure the directory is there so we can create it later
1432 opt_dir = dirname(current_vars_file)
1433 if not isdir(opt_dir):
1434 mkdir(opt_dir)
1435
1436 # Get default build variables from source tree. Variables are
1437 # normally determined by name of $VARIANT_DIR, but can be
1438 # overridden by '--default=' arg on command line.
1439 default = GetOption('default')
1440 opts_dir = joinpath(main.root.abspath, 'build_opts')
1441 if default:
1442 default_vars_files = [joinpath(build_root, 'variables', default),
1443 joinpath(opts_dir, default)]
1444 else:
1445 default_vars_files = [joinpath(opts_dir, variant_dir)]
1446 existing_files = filter(isfile, default_vars_files)
1447 if existing_files:
1448 default_vars_file = existing_files[0]
1449 sticky_vars.files.append(default_vars_file)
1450 print "Variables file %s not found,\n using defaults in %s" \
1451 % (current_vars_file, default_vars_file)
1452 else:
1453 print "Error: cannot find variables file %s or " \
1454 "default file(s) %s" \
1455 % (current_vars_file, ' or '.join(default_vars_files))
1456 Exit(1)
1457
1458 # Apply current variable settings to env
1459 sticky_vars.Update(env)
1460
1461 help_texts["local_vars"] += \
1462 "Build variables for %s:\n" % variant_dir \
1463 + sticky_vars.GenerateHelpText(env)
1464
1465 # Process variable settings.
1466
1467 if not have_fenv and env['USE_FENV']:
1468 print "Warning: <fenv.h> not available; " \
1469 "forcing USE_FENV to False in", variant_dir + "."
1470 env['USE_FENV'] = False
1471
1472 if not env['USE_FENV']:
1473 print "Warning: No IEEE FP rounding mode control in", variant_dir + "."
1474 print " FP results may deviate slightly from other platforms."
1475
1476 if env['EFENCE']:
1477 env.Append(LIBS=['efence'])
1478
1479 if env['USE_KVM']:
1480 if not have_kvm:
1481 print "Warning: Can not enable KVM, host seems to lack KVM support"
1482 env['USE_KVM'] = False
1483 elif not is_isa_kvm_compatible(env['TARGET_ISA']):
1484 print "Info: KVM support disabled due to unsupported host and " \
1485 "target ISA combination"
1486 env['USE_KVM'] = False
1487
1488 if env['USE_TUNTAP']:
1489 if not have_tuntap:
1490 print "Warning: Can't connect EtherTap with a tap device."
1491 env['USE_TUNTAP'] = False
1492
1493 if env['BUILD_GPU']:
1494 env.Append(CPPDEFINES=['BUILD_GPU'])
1495
1496 # Warn about missing optional functionality
1497 if env['USE_KVM']:
1498 if not main['HAVE_PERF_ATTR_EXCLUDE_HOST']:
1499 print "Warning: perf_event headers lack support for the " \
1500 "exclude_host attribute. KVM instruction counts will " \
1501 "be inaccurate."
1502
1503 # Save sticky variable settings back to current variables file
1504 sticky_vars.Save(current_vars_file, env)
1505
1506 if env['USE_SSE2']:
1507 env.Append(CCFLAGS=['-msse2'])
1508
1509 # The src/SConscript file sets up the build rules in 'env' according
1510 # to the configured variables. It returns a list of environments,
1511 # one for each variant build (debug, opt, etc.)
1512 SConscript('src/SConscript', variant_dir = variant_path, exports = 'env')
1513
1514def pairwise(iterable):
1515 "s -> (s0,s1), (s1,s2), (s2, s3), ..."
1516 a, b = itertools.tee(iterable)
1517 b.next()
1518 return itertools.izip(a, b)
1519
1520variant_names = [variant_name(path) for path in variant_paths]
1521
1522# Create false dependencies so SCons will parse ISAs, establish
1523# dependencies, and setup the build Environments serially. Either
1524# SCons (likely) and/or our SConscripts (possibly) cannot cope with -j
1525# greater than 1. It appears to be standard race condition stuff; it
1526# doesn't always fail, but usually, and the behaviors are different.
1527# Every time I tried to remove this, builds would fail in some
1528# creative new way. So, don't do that. You'll want to, though, because
1529# tests/SConscript takes a long time to make its Environments.
1530for t1, t2 in pairwise(sorted(variant_names)):
1531 main.Depends('#%s-deps' % t2, '#%s-deps' % t1)
1532 main.Depends('#%s-environs' % t2, '#%s-environs' % t1)
1533
1534# base help text
1535Help('''
1536Usage: scons [scons options] [build variables] [target(s)]
1537
1538Extra scons options:
1539%(options)s
1540
1541Global build variables:
1542%(global_vars)s
1543
1544%(local_vars)s
1545''' % help_texts)
796 # Pass the LTO flag when compiling to produce GIMPLE
797 # output, we merely create the flags here and only append
798 # them later
799 main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
800
801 # Use the same amount of jobs for LTO as we are running
802 # scons with
803 main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
804
805 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc',
806 '-fno-builtin-realloc', '-fno-builtin-free'])
807
808 # add option to check for undeclared overrides
809 if compareVersions(gcc_version, "5.0") > 0:
810 main.Append(CCFLAGS=['-Wno-error=suggest-override'])
811
812elif main['CLANG']:
813 # Check for a supported version of clang, >= 3.1 is needed to
814 # support similar features as gcc 4.8. See
815 # http://clang.llvm.org/cxx_status.html for details
816 clang_version_re = re.compile(".* version (\d+\.\d+)")
817 clang_version_match = clang_version_re.search(CXX_version)
818 if (clang_version_match):
819 clang_version = clang_version_match.groups()[0]
820 if compareVersions(clang_version, "3.1") < 0:
821 print 'Error: clang version 3.1 or newer required.'
822 print ' Installed version:', clang_version
823 Exit(1)
824 else:
825 print 'Error: Unable to determine clang version.'
826 Exit(1)
827
828 # clang has a few additional warnings that we disable, extraneous
829 # parantheses are allowed due to Ruby's printing of the AST,
830 # finally self assignments are allowed as the generated CPU code
831 # is relying on this
832 main.Append(CCFLAGS=['-Wno-parentheses',
833 '-Wno-self-assign',
834 # Some versions of libstdc++ (4.8?) seem to
835 # use struct hash and class hash
836 # interchangeably.
837 '-Wno-mismatched-tags',
838 ])
839
840 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin'])
841
842 # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as
843 # opposed to libstdc++, as the later is dated.
844 if sys.platform == "darwin":
845 main.Append(CXXFLAGS=['-stdlib=libc++'])
846 main.Append(LIBS=['c++'])
847
848 # On FreeBSD we need libthr.
849 if sys.platform.startswith('freebsd'):
850 main.Append(LIBS=['thr'])
851
852else:
853 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
854 print "Don't know what compiler options to use for your compiler."
855 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX']
856 print termcap.Yellow + ' version:' + termcap.Normal,
857 if not CXX_version:
858 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
859 termcap.Normal
860 else:
861 print CXX_version.replace('\n', '<nl>')
862 print " If you're trying to use a compiler other than GCC"
863 print " or clang, there appears to be something wrong with your"
864 print " environment."
865 print " "
866 print " If you are trying to use a compiler other than those listed"
867 print " above you will need to ease fix SConstruct and "
868 print " src/SConscript to support that compiler."
869 Exit(1)
870
871# Set up common yacc/bison flags (needed for Ruby)
872main['YACCFLAGS'] = '-d'
873main['YACCHXXFILESUFFIX'] = '.hh'
874
875# Do this after we save setting back, or else we'll tack on an
876# extra 'qdo' every time we run scons.
877if main['BATCH']:
878 main['CC'] = main['BATCH_CMD'] + ' ' + main['CC']
879 main['CXX'] = main['BATCH_CMD'] + ' ' + main['CXX']
880 main['AS'] = main['BATCH_CMD'] + ' ' + main['AS']
881 main['AR'] = main['BATCH_CMD'] + ' ' + main['AR']
882 main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB']
883
884if sys.platform == 'cygwin':
885 # cygwin has some header file issues...
886 main.Append(CCFLAGS=["-Wno-uninitialized"])
887
888# Check for the protobuf compiler
889protoc_version = readCommand([main['PROTOC'], '--version'],
890 exception='').split()
891
892# First two words should be "libprotoc x.y.z"
893if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc':
894 print termcap.Yellow + termcap.Bold + \
895 'Warning: Protocol buffer compiler (protoc) not found.\n' + \
896 ' Please install protobuf-compiler for tracing support.' + \
897 termcap.Normal
898 main['PROTOC'] = False
899else:
900 # Based on the availability of the compress stream wrappers,
901 # require 2.1.0
902 min_protoc_version = '2.1.0'
903 if compareVersions(protoc_version[1], min_protoc_version) < 0:
904 print termcap.Yellow + termcap.Bold + \
905 'Warning: protoc version', min_protoc_version, \
906 'or newer required.\n' + \
907 ' Installed version:', protoc_version[1], \
908 termcap.Normal
909 main['PROTOC'] = False
910 else:
911 # Attempt to determine the appropriate include path and
912 # library path using pkg-config, that means we also need to
913 # check for pkg-config. Note that it is possible to use
914 # protobuf without the involvement of pkg-config. Later on we
915 # check go a library config check and at that point the test
916 # will fail if libprotobuf cannot be found.
917 if readCommand(['pkg-config', '--version'], exception=''):
918 try:
919 # Attempt to establish what linking flags to add for protobuf
920 # using pkg-config
921 main.ParseConfig('pkg-config --cflags --libs-only-L protobuf')
922 except:
923 print termcap.Yellow + termcap.Bold + \
924 'Warning: pkg-config could not get protobuf flags.' + \
925 termcap.Normal
926
927
928# Check for 'timeout' from GNU coreutils. If present, regressions will
929# be run with a time limit. We require version 8.13 since we rely on
930# support for the '--foreground' option.
931if sys.platform.startswith('freebsd'):
932 timeout_lines = readCommand(['gtimeout', '--version'],
933 exception='').splitlines()
934else:
935 timeout_lines = readCommand(['timeout', '--version'],
936 exception='').splitlines()
937# Get the first line and tokenize it
938timeout_version = timeout_lines[0].split() if timeout_lines else []
939main['TIMEOUT'] = timeout_version and \
940 compareVersions(timeout_version[-1], '8.13') >= 0
941
942# Add a custom Check function to test for structure members.
943def CheckMember(context, include, decl, member, include_quotes="<>"):
944 context.Message("Checking for member %s in %s..." %
945 (member, decl))
946 text = """
947#include %(header)s
948int main(){
949 %(decl)s test;
950 (void)test.%(member)s;
951 return 0;
952};
953""" % { "header" : include_quotes[0] + include + include_quotes[1],
954 "decl" : decl,
955 "member" : member,
956 }
957
958 ret = context.TryCompile(text, extension=".cc")
959 context.Result(ret)
960 return ret
961
962# Platform-specific configuration. Note again that we assume that all
963# builds under a given build root run on the same host platform.
964conf = Configure(main,
965 conf_dir = joinpath(build_root, '.scons_config'),
966 log_file = joinpath(build_root, 'scons_config.log'),
967 custom_tests = {
968 'CheckMember' : CheckMember,
969 })
970
971# Check if we should compile a 64 bit binary on Mac OS X/Darwin
972try:
973 import platform
974 uname = platform.uname()
975 if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0:
976 if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]):
977 main.Append(CCFLAGS=['-arch', 'x86_64'])
978 main.Append(CFLAGS=['-arch', 'x86_64'])
979 main.Append(LINKFLAGS=['-arch', 'x86_64'])
980 main.Append(ASFLAGS=['-arch', 'x86_64'])
981except:
982 pass
983
984# Recent versions of scons substitute a "Null" object for Configure()
985# when configuration isn't necessary, e.g., if the "--help" option is
986# present. Unfortuantely this Null object always returns false,
987# breaking all our configuration checks. We replace it with our own
988# more optimistic null object that returns True instead.
989if not conf:
990 def NullCheck(*args, **kwargs):
991 return True
992
993 class NullConf:
994 def __init__(self, env):
995 self.env = env
996 def Finish(self):
997 return self.env
998 def __getattr__(self, mname):
999 return NullCheck
1000
1001 conf = NullConf(main)
1002
1003# Cache build files in the supplied directory.
1004if main['M5_BUILD_CACHE']:
1005 print 'Using build cache located at', main['M5_BUILD_CACHE']
1006 CacheDir(main['M5_BUILD_CACHE'])
1007
1008main['USE_PYTHON'] = not GetOption('without_python')
1009if main['USE_PYTHON']:
1010 # Find Python include and library directories for embedding the
1011 # interpreter. We rely on python-config to resolve the appropriate
1012 # includes and linker flags. ParseConfig does not seem to understand
1013 # the more exotic linker flags such as -Xlinker and -export-dynamic so
1014 # we add them explicitly below. If you want to link in an alternate
1015 # version of python, see above for instructions on how to invoke
1016 # scons with the appropriate PATH set.
1017 #
1018 # First we check if python2-config exists, else we use python-config
1019 python_config = readCommand(['which', 'python2-config'],
1020 exception='').strip()
1021 if not os.path.exists(python_config):
1022 python_config = readCommand(['which', 'python-config'],
1023 exception='').strip()
1024 py_includes = readCommand([python_config, '--includes'],
1025 exception='').split()
1026 # Strip the -I from the include folders before adding them to the
1027 # CPPPATH
1028 main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes))
1029
1030 # Read the linker flags and split them into libraries and other link
1031 # flags. The libraries are added later through the call the CheckLib.
1032 py_ld_flags = readCommand([python_config, '--ldflags'],
1033 exception='').split()
1034 py_libs = []
1035 for lib in py_ld_flags:
1036 if not lib.startswith('-l'):
1037 main.Append(LINKFLAGS=[lib])
1038 else:
1039 lib = lib[2:]
1040 if lib not in py_libs:
1041 py_libs.append(lib)
1042
1043 # verify that this stuff works
1044 if not conf.CheckHeader('Python.h', '<>'):
1045 print "Error: can't find Python.h header in", py_includes
1046 print "Install Python headers (package python-dev on Ubuntu and RedHat)"
1047 Exit(1)
1048
1049 for lib in py_libs:
1050 if not conf.CheckLib(lib):
1051 print "Error: can't find library %s required by python" % lib
1052 Exit(1)
1053
1054# On Solaris you need to use libsocket for socket ops
1055if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'):
1056 if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'):
1057 print "Can't find library with socket calls (e.g. accept())"
1058 Exit(1)
1059
1060# Check for zlib. If the check passes, libz will be automatically
1061# added to the LIBS environment variable.
1062if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'):
1063 print 'Error: did not find needed zlib compression library '\
1064 'and/or zlib.h header file.'
1065 print ' Please install zlib and try again.'
1066 Exit(1)
1067
1068# If we have the protobuf compiler, also make sure we have the
1069# development libraries. If the check passes, libprotobuf will be
1070# automatically added to the LIBS environment variable. After
1071# this, we can use the HAVE_PROTOBUF flag to determine if we have
1072# got both protoc and libprotobuf available.
1073main['HAVE_PROTOBUF'] = main['PROTOC'] and \
1074 conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h',
1075 'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;')
1076
1077# If we have the compiler but not the library, print another warning.
1078if main['PROTOC'] and not main['HAVE_PROTOBUF']:
1079 print termcap.Yellow + termcap.Bold + \
1080 'Warning: did not find protocol buffer library and/or headers.\n' + \
1081 ' Please install libprotobuf-dev for tracing support.' + \
1082 termcap.Normal
1083
1084# Check for librt.
1085have_posix_clock = \
1086 conf.CheckLibWithHeader(None, 'time.h', 'C',
1087 'clock_nanosleep(0,0,NULL,NULL);') or \
1088 conf.CheckLibWithHeader('rt', 'time.h', 'C',
1089 'clock_nanosleep(0,0,NULL,NULL);')
1090
1091have_posix_timers = \
1092 conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C',
1093 'timer_create(CLOCK_MONOTONIC, NULL, NULL);')
1094
1095if not GetOption('without_tcmalloc'):
1096 if conf.CheckLib('tcmalloc'):
1097 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1098 elif conf.CheckLib('tcmalloc_minimal'):
1099 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1100 else:
1101 print termcap.Yellow + termcap.Bold + \
1102 "You can get a 12% performance improvement by "\
1103 "installing tcmalloc (libgoogle-perftools-dev package "\
1104 "on Ubuntu or RedHat)." + termcap.Normal
1105
1106
1107# Detect back trace implementations. The last implementation in the
1108# list will be used by default.
1109backtrace_impls = [ "none" ]
1110
1111if conf.CheckLibWithHeader(None, 'execinfo.h', 'C',
1112 'backtrace_symbols_fd((void*)0, 0, 0);'):
1113 backtrace_impls.append("glibc")
1114elif conf.CheckLibWithHeader('execinfo', 'execinfo.h', 'C',
1115 'backtrace_symbols_fd((void*)0, 0, 0);'):
1116 # NetBSD and FreeBSD need libexecinfo.
1117 backtrace_impls.append("glibc")
1118 main.Append(LIBS=['execinfo'])
1119
1120if backtrace_impls[-1] == "none":
1121 default_backtrace_impl = "none"
1122 print termcap.Yellow + termcap.Bold + \
1123 "No suitable back trace implementation found." + \
1124 termcap.Normal
1125
1126if not have_posix_clock:
1127 print "Can't find library for POSIX clocks."
1128
1129# Check for <fenv.h> (C99 FP environment control)
1130have_fenv = conf.CheckHeader('fenv.h', '<>')
1131if not have_fenv:
1132 print "Warning: Header file <fenv.h> not found."
1133 print " This host has no IEEE FP rounding mode control."
1134
1135# Check if we should enable KVM-based hardware virtualization. The API
1136# we rely on exists since version 2.6.36 of the kernel, but somehow
1137# the KVM_API_VERSION does not reflect the change. We test for one of
1138# the types as a fall back.
1139have_kvm = conf.CheckHeader('linux/kvm.h', '<>')
1140if not have_kvm:
1141 print "Info: Compatible header file <linux/kvm.h> not found, " \
1142 "disabling KVM support."
1143
1144# Check if the TUN/TAP driver is available.
1145have_tuntap = conf.CheckHeader('linux/if_tun.h', '<>')
1146if not have_tuntap:
1147 print "Info: Compatible header file <linux/if_tun.h> not found."
1148
1149# x86 needs support for xsave. We test for the structure here since we
1150# won't be able to run new tests by the time we know which ISA we're
1151# targeting.
1152have_kvm_xsave = conf.CheckTypeSize('struct kvm_xsave',
1153 '#include <linux/kvm.h>') != 0
1154
1155# Check if the requested target ISA is compatible with the host
1156def is_isa_kvm_compatible(isa):
1157 try:
1158 import platform
1159 host_isa = platform.machine()
1160 except:
1161 print "Warning: Failed to determine host ISA."
1162 return False
1163
1164 if not have_posix_timers:
1165 print "Warning: Can not enable KVM, host seems to lack support " \
1166 "for POSIX timers"
1167 return False
1168
1169 if isa == "arm":
1170 return host_isa in ( "armv7l", "aarch64" )
1171 elif isa == "x86":
1172 if host_isa != "x86_64":
1173 return False
1174
1175 if not have_kvm_xsave:
1176 print "KVM on x86 requires xsave support in kernel headers."
1177 return False
1178
1179 return True
1180 else:
1181 return False
1182
1183
1184# Check if the exclude_host attribute is available. We want this to
1185# get accurate instruction counts in KVM.
1186main['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf.CheckMember(
1187 'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host')
1188
1189
1190######################################################################
1191#
1192# Finish the configuration
1193#
1194main = conf.Finish()
1195
1196######################################################################
1197#
1198# Collect all non-global variables
1199#
1200
1201# Define the universe of supported ISAs
1202all_isa_list = [ ]
1203all_gpu_isa_list = [ ]
1204Export('all_isa_list')
1205Export('all_gpu_isa_list')
1206
1207class CpuModel(object):
1208 '''The CpuModel class encapsulates everything the ISA parser needs to
1209 know about a particular CPU model.'''
1210
1211 # Dict of available CPU model objects. Accessible as CpuModel.dict.
1212 dict = {}
1213
1214 # Constructor. Automatically adds models to CpuModel.dict.
1215 def __init__(self, name, default=False):
1216 self.name = name # name of model
1217
1218 # This cpu is enabled by default
1219 self.default = default
1220
1221 # Add self to dict
1222 if name in CpuModel.dict:
1223 raise AttributeError, "CpuModel '%s' already registered" % name
1224 CpuModel.dict[name] = self
1225
1226Export('CpuModel')
1227
1228# Sticky variables get saved in the variables file so they persist from
1229# one invocation to the next (unless overridden, in which case the new
1230# value becomes sticky).
1231sticky_vars = Variables(args=ARGUMENTS)
1232Export('sticky_vars')
1233
1234# Sticky variables that should be exported
1235export_vars = []
1236Export('export_vars')
1237
1238# For Ruby
1239all_protocols = []
1240Export('all_protocols')
1241protocol_dirs = []
1242Export('protocol_dirs')
1243slicc_includes = []
1244Export('slicc_includes')
1245
1246# Walk the tree and execute all SConsopts scripts that wil add to the
1247# above variables
1248if GetOption('verbose'):
1249 print "Reading SConsopts"
1250for bdir in [ base_dir ] + extras_dir_list:
1251 if not isdir(bdir):
1252 print "Error: directory '%s' does not exist" % bdir
1253 Exit(1)
1254 for root, dirs, files in os.walk(bdir):
1255 if 'SConsopts' in files:
1256 if GetOption('verbose'):
1257 print "Reading", joinpath(root, 'SConsopts')
1258 SConscript(joinpath(root, 'SConsopts'))
1259
1260all_isa_list.sort()
1261all_gpu_isa_list.sort()
1262
1263sticky_vars.AddVariables(
1264 EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list),
1265 EnumVariable('TARGET_GPU_ISA', 'Target GPU ISA', 'hsail', all_gpu_isa_list),
1266 ListVariable('CPU_MODELS', 'CPU models',
1267 sorted(n for n,m in CpuModel.dict.iteritems() if m.default),
1268 sorted(CpuModel.dict.keys())),
1269 BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger',
1270 False),
1271 BoolVariable('SS_COMPATIBLE_FP',
1272 'Make floating-point results compatible with SimpleScalar',
1273 False),
1274 BoolVariable('USE_SSE2',
1275 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts',
1276 False),
1277 BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock),
1278 BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv),
1279 BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability', False),
1280 BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models', have_kvm),
1281 BoolVariable('USE_TUNTAP',
1282 'Enable using a tap device to bridge to the host network',
1283 have_tuntap),
1284 BoolVariable('BUILD_GPU', 'Build the compute-GPU model', False),
1285 EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None',
1286 all_protocols),
1287 EnumVariable('BACKTRACE_IMPL', 'Post-mortem dump implementation',
1288 backtrace_impls[-1], backtrace_impls)
1289 )
1290
1291# These variables get exported to #defines in config/*.hh (see src/SConscript).
1292export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'TARGET_GPU_ISA',
1293 'CP_ANNOTATE', 'USE_POSIX_CLOCK', 'USE_KVM', 'USE_TUNTAP',
1294 'PROTOCOL', 'HAVE_PROTOBUF', 'HAVE_PERF_ATTR_EXCLUDE_HOST']
1295
1296###################################################
1297#
1298# Define a SCons builder for configuration flag headers.
1299#
1300###################################################
1301
1302# This function generates a config header file that #defines the
1303# variable symbol to the current variable setting (0 or 1). The source
1304# operands are the name of the variable and a Value node containing the
1305# value of the variable.
1306def build_config_file(target, source, env):
1307 (variable, value) = [s.get_contents() for s in source]
1308 f = file(str(target[0]), 'w')
1309 print >> f, '#define', variable, value
1310 f.close()
1311 return None
1312
1313# Combine the two functions into a scons Action object.
1314config_action = MakeAction(build_config_file, Transform("CONFIG H", 2))
1315
1316# The emitter munges the source & target node lists to reflect what
1317# we're really doing.
1318def config_emitter(target, source, env):
1319 # extract variable name from Builder arg
1320 variable = str(target[0])
1321 # True target is config header file
1322 target = joinpath('config', variable.lower() + '.hh')
1323 val = env[variable]
1324 if isinstance(val, bool):
1325 # Force value to 0/1
1326 val = int(val)
1327 elif isinstance(val, str):
1328 val = '"' + val + '"'
1329
1330 # Sources are variable name & value (packaged in SCons Value nodes)
1331 return ([target], [Value(variable), Value(val)])
1332
1333config_builder = Builder(emitter = config_emitter, action = config_action)
1334
1335main.Append(BUILDERS = { 'ConfigFile' : config_builder })
1336
1337###################################################
1338#
1339# Builders for static and shared partially linked object files.
1340#
1341###################################################
1342
1343partial_static_builder = Builder(action=SCons.Defaults.LinkAction,
1344 src_suffix='$OBJSUFFIX',
1345 src_builder=['StaticObject', 'Object'],
1346 LINKFLAGS='$PLINKFLAGS',
1347 LIBS='')
1348
1349def partial_shared_emitter(target, source, env):
1350 for tgt in target:
1351 tgt.attributes.shared = 1
1352 return (target, source)
1353partial_shared_builder = Builder(action=SCons.Defaults.ShLinkAction,
1354 emitter=partial_shared_emitter,
1355 src_suffix='$SHOBJSUFFIX',
1356 src_builder='SharedObject',
1357 SHLINKFLAGS='$PSHLINKFLAGS',
1358 LIBS='')
1359
1360main.Append(BUILDERS = { 'PartialShared' : partial_shared_builder,
1361 'PartialStatic' : partial_static_builder })
1362
1363# builds in ext are shared across all configs in the build root.
1364ext_dir = abspath(joinpath(str(main.root), 'ext'))
1365ext_build_dirs = []
1366for root, dirs, files in os.walk(ext_dir):
1367 if 'SConscript' in files:
1368 build_dir = os.path.relpath(root, ext_dir)
1369 ext_build_dirs.append(build_dir)
1370 main.SConscript(joinpath(root, 'SConscript'),
1371 variant_dir=joinpath(build_root, build_dir))
1372
1373main.Prepend(CPPPATH=Dir('ext/pybind11/include/'))
1374
1375###################################################
1376#
1377# This builder and wrapper method are used to set up a directory with
1378# switching headers. Those are headers which are in a generic location and
1379# that include more specific headers from a directory chosen at build time
1380# based on the current build settings.
1381#
1382###################################################
1383
1384def build_switching_header(target, source, env):
1385 path = str(target[0])
1386 subdir = str(source[0])
1387 dp, fp = os.path.split(path)
1388 dp = os.path.relpath(os.path.realpath(dp),
1389 os.path.realpath(env['BUILDDIR']))
1390 with open(path, 'w') as hdr:
1391 print >>hdr, '#include "%s/%s/%s"' % (dp, subdir, fp)
1392
1393switching_header_action = MakeAction(build_switching_header,
1394 Transform('GENERATE'))
1395
1396switching_header_builder = Builder(action=switching_header_action,
1397 source_factory=Value,
1398 single_source=True)
1399
1400main.Append(BUILDERS = { 'SwitchingHeader': switching_header_builder })
1401
1402def switching_headers(self, headers, source):
1403 for header in headers:
1404 self.SwitchingHeader(header, source)
1405
1406main.AddMethod(switching_headers, 'SwitchingHeaders')
1407
1408# all-isas -> all-deps -> all-environs -> all_targets
1409main.Alias('#all-isas', [])
1410main.Alias('#all-deps', '#all-isas')
1411
1412# Dummy target to ensure all environments are created before telling
1413# SCons what to actually make (the command line arguments). We attach
1414# them to the dependence graph after the environments are complete.
1415ORIG_BUILD_TARGETS = list(BUILD_TARGETS) # force a copy; gets closure to work.
1416def environsComplete(target, source, env):
1417 for t in ORIG_BUILD_TARGETS:
1418 main.Depends('#all-targets', t)
1419
1420# Each build/* switching_dir attaches its *-environs target to #all-environs.
1421main.Append(BUILDERS = {'CompleteEnvirons' :
1422 Builder(action=MakeAction(environsComplete, None))})
1423main.CompleteEnvirons('#all-environs', [])
1424
1425def doNothing(**ignored): pass
1426main.Append(BUILDERS = {'Dummy': Builder(action=MakeAction(doNothing, None))})
1427
1428# The final target to which all the original targets ultimately get attached.
1429main.Dummy('#all-targets', '#all-environs')
1430BUILD_TARGETS[:] = ['#all-targets']
1431
1432###################################################
1433#
1434# Define build environments for selected configurations.
1435#
1436###################################################
1437
1438def variant_name(path):
1439 return os.path.basename(path).lower().replace('_', '-')
1440main['variant_name'] = variant_name
1441main['VARIANT_NAME'] = '${variant_name(BUILDDIR)}'
1442
1443for variant_path in variant_paths:
1444 if not GetOption('silent'):
1445 print "Building in", variant_path
1446
1447 # Make a copy of the build-root environment to use for this config.
1448 env = main.Clone()
1449 env['BUILDDIR'] = variant_path
1450
1451 # variant_dir is the tail component of build path, and is used to
1452 # determine the build parameters (e.g., 'ALPHA_SE')
1453 (build_root, variant_dir) = splitpath(variant_path)
1454
1455 # Set env variables according to the build directory config.
1456 sticky_vars.files = []
1457 # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in
1458 # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke
1459 # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings.
1460 current_vars_file = joinpath(build_root, 'variables', variant_dir)
1461 if isfile(current_vars_file):
1462 sticky_vars.files.append(current_vars_file)
1463 if not GetOption('silent'):
1464 print "Using saved variables file %s" % current_vars_file
1465 elif variant_dir in ext_build_dirs:
1466 # Things in ext are built without a variant directory.
1467 continue
1468 else:
1469 # Build dir-specific variables file doesn't exist.
1470
1471 # Make sure the directory is there so we can create it later
1472 opt_dir = dirname(current_vars_file)
1473 if not isdir(opt_dir):
1474 mkdir(opt_dir)
1475
1476 # Get default build variables from source tree. Variables are
1477 # normally determined by name of $VARIANT_DIR, but can be
1478 # overridden by '--default=' arg on command line.
1479 default = GetOption('default')
1480 opts_dir = joinpath(main.root.abspath, 'build_opts')
1481 if default:
1482 default_vars_files = [joinpath(build_root, 'variables', default),
1483 joinpath(opts_dir, default)]
1484 else:
1485 default_vars_files = [joinpath(opts_dir, variant_dir)]
1486 existing_files = filter(isfile, default_vars_files)
1487 if existing_files:
1488 default_vars_file = existing_files[0]
1489 sticky_vars.files.append(default_vars_file)
1490 print "Variables file %s not found,\n using defaults in %s" \
1491 % (current_vars_file, default_vars_file)
1492 else:
1493 print "Error: cannot find variables file %s or " \
1494 "default file(s) %s" \
1495 % (current_vars_file, ' or '.join(default_vars_files))
1496 Exit(1)
1497
1498 # Apply current variable settings to env
1499 sticky_vars.Update(env)
1500
1501 help_texts["local_vars"] += \
1502 "Build variables for %s:\n" % variant_dir \
1503 + sticky_vars.GenerateHelpText(env)
1504
1505 # Process variable settings.
1506
1507 if not have_fenv and env['USE_FENV']:
1508 print "Warning: <fenv.h> not available; " \
1509 "forcing USE_FENV to False in", variant_dir + "."
1510 env['USE_FENV'] = False
1511
1512 if not env['USE_FENV']:
1513 print "Warning: No IEEE FP rounding mode control in", variant_dir + "."
1514 print " FP results may deviate slightly from other platforms."
1515
1516 if env['EFENCE']:
1517 env.Append(LIBS=['efence'])
1518
1519 if env['USE_KVM']:
1520 if not have_kvm:
1521 print "Warning: Can not enable KVM, host seems to lack KVM support"
1522 env['USE_KVM'] = False
1523 elif not is_isa_kvm_compatible(env['TARGET_ISA']):
1524 print "Info: KVM support disabled due to unsupported host and " \
1525 "target ISA combination"
1526 env['USE_KVM'] = False
1527
1528 if env['USE_TUNTAP']:
1529 if not have_tuntap:
1530 print "Warning: Can't connect EtherTap with a tap device."
1531 env['USE_TUNTAP'] = False
1532
1533 if env['BUILD_GPU']:
1534 env.Append(CPPDEFINES=['BUILD_GPU'])
1535
1536 # Warn about missing optional functionality
1537 if env['USE_KVM']:
1538 if not main['HAVE_PERF_ATTR_EXCLUDE_HOST']:
1539 print "Warning: perf_event headers lack support for the " \
1540 "exclude_host attribute. KVM instruction counts will " \
1541 "be inaccurate."
1542
1543 # Save sticky variable settings back to current variables file
1544 sticky_vars.Save(current_vars_file, env)
1545
1546 if env['USE_SSE2']:
1547 env.Append(CCFLAGS=['-msse2'])
1548
1549 # The src/SConscript file sets up the build rules in 'env' according
1550 # to the configured variables. It returns a list of environments,
1551 # one for each variant build (debug, opt, etc.)
1552 SConscript('src/SConscript', variant_dir = variant_path, exports = 'env')
1553
1554def pairwise(iterable):
1555 "s -> (s0,s1), (s1,s2), (s2, s3), ..."
1556 a, b = itertools.tee(iterable)
1557 b.next()
1558 return itertools.izip(a, b)
1559
1560variant_names = [variant_name(path) for path in variant_paths]
1561
1562# Create false dependencies so SCons will parse ISAs, establish
1563# dependencies, and setup the build Environments serially. Either
1564# SCons (likely) and/or our SConscripts (possibly) cannot cope with -j
1565# greater than 1. It appears to be standard race condition stuff; it
1566# doesn't always fail, but usually, and the behaviors are different.
1567# Every time I tried to remove this, builds would fail in some
1568# creative new way. So, don't do that. You'll want to, though, because
1569# tests/SConscript takes a long time to make its Environments.
1570for t1, t2 in pairwise(sorted(variant_names)):
1571 main.Depends('#%s-deps' % t2, '#%s-deps' % t1)
1572 main.Depends('#%s-environs' % t2, '#%s-environs' % t1)
1573
1574# base help text
1575Help('''
1576Usage: scons [scons options] [build variables] [target(s)]
1577
1578Extra scons options:
1579%(options)s
1580
1581Global build variables:
1582%(global_vars)s
1583
1584%(local_vars)s
1585''' % help_texts)