SConstruct (12242:585b6820ced0) SConstruct (12243:c56b7387cddc)
1# -*- mode:python -*-
2
3# Copyright (c) 2013, 2015-2017 ARM Limited
4# All rights reserved.
5#
6# The license below extends only to copyright in the software and shall
7# not be construed as granting a license to any other intellectual
8# property including but not limited to intellectual property relating
9# to a hardware implementation of the functionality of the software
10# licensed hereunder. You may use the software subject to the license
11# terms below provided that you ensure that this notice is replicated
12# unmodified and in its entirety in all distributions of the software,
13# modified or unmodified, in source code or in binary form.
14#
15# Copyright (c) 2011 Advanced Micro Devices, Inc.
16# Copyright (c) 2009 The Hewlett-Packard Development Company
17# Copyright (c) 2004-2005 The Regents of The University of Michigan
18# All rights reserved.
19#
20# Redistribution and use in source and binary forms, with or without
21# modification, are permitted provided that the following conditions are
22# met: redistributions of source code must retain the above copyright
23# notice, this list of conditions and the following disclaimer;
24# redistributions in binary form must reproduce the above copyright
25# notice, this list of conditions and the following disclaimer in the
26# documentation and/or other materials provided with the distribution;
27# neither the name of the copyright holders nor the names of its
28# contributors may be used to endorse or promote products derived from
29# this software without specific prior written permission.
30#
31# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
32# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
33# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
34# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
35# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
37# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
38# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
39# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
40# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
41# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
42#
43# Authors: Steve Reinhardt
44# Nathan Binkert
45
46###################################################
47#
48# SCons top-level build description (SConstruct) file.
49#
50# While in this directory ('gem5'), just type 'scons' to build the default
51# configuration (see below), or type 'scons build/<CONFIG>/<binary>'
52# to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for
53# the optimized full-system version).
54#
55# You can build gem5 in a different directory as long as there is a
56# 'build/<CONFIG>' somewhere along the target path. The build system
57# expects that all configs under the same build directory are being
58# built for the same host system.
59#
60# Examples:
61#
62# The following two commands are equivalent. The '-u' option tells
63# scons to search up the directory tree for this SConstruct file.
64# % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug
65# % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug
66#
67# The following two commands are equivalent and demonstrate building
68# in a directory outside of the source tree. The '-C' option tells
69# scons to chdir to the specified directory to find this SConstruct
70# file.
71# % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug
72# % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug
73#
74# You can use 'scons -H' to print scons options. If you're in this
75# 'gem5' directory (or use -u or -C to tell scons where to find this
76# file), you can use 'scons -h' to print all the gem5-specific build
77# options as well.
78#
79###################################################
80
81# Global Python includes
82import itertools
83import os
84import re
85import shutil
86import subprocess
87import sys
88
89from os import mkdir, environ
90from os.path import abspath, basename, dirname, expanduser, normpath
91from os.path import exists, isdir, isfile
92from os.path import join as joinpath, split as splitpath
93
94# SCons includes
95import SCons
96import SCons.Node
97
98extra_python_paths = [
99 Dir('src/python').srcnode().abspath, # gem5 includes
100 Dir('ext/ply').srcnode().abspath, # ply is used by several files
101 ]
102
103sys.path[1:1] = extra_python_paths
104
105from m5.util import compareVersions, readCommand
106from m5.util.terminal import get_termcap
107
108help_texts = {
109 "options" : "",
110 "global_vars" : "",
111 "local_vars" : ""
112}
113
114Export("help_texts")
115
116
117# There's a bug in scons in that (1) by default, the help texts from
118# AddOption() are supposed to be displayed when you type 'scons -h'
119# and (2) you can override the help displayed by 'scons -h' using the
120# Help() function, but these two features are incompatible: once
121# you've overridden the help text using Help(), there's no way to get
122# at the help texts from AddOptions. See:
123# http://scons.tigris.org/issues/show_bug.cgi?id=2356
124# http://scons.tigris.org/issues/show_bug.cgi?id=2611
125# This hack lets us extract the help text from AddOptions and
126# re-inject it via Help(). Ideally someday this bug will be fixed and
127# we can just use AddOption directly.
128def AddLocalOption(*args, **kwargs):
129 col_width = 30
130
131 help = " " + ", ".join(args)
132 if "help" in kwargs:
133 length = len(help)
134 if length >= col_width:
135 help += "\n" + " " * col_width
136 else:
137 help += " " * (col_width - length)
138 help += kwargs["help"]
139 help_texts["options"] += help + "\n"
140
141 AddOption(*args, **kwargs)
142
143AddLocalOption('--colors', dest='use_colors', action='store_true',
144 help="Add color to abbreviated scons output")
145AddLocalOption('--no-colors', dest='use_colors', action='store_false',
146 help="Don't add color to abbreviated scons output")
147AddLocalOption('--with-cxx-config', dest='with_cxx_config',
148 action='store_true',
149 help="Build with support for C++-based configuration")
150AddLocalOption('--default', dest='default', type='string', action='store',
151 help='Override which build_opts file to use for defaults')
152AddLocalOption('--ignore-style', dest='ignore_style', action='store_true',
153 help='Disable style checking hooks')
154AddLocalOption('--no-lto', dest='no_lto', action='store_true',
155 help='Disable Link-Time Optimization for fast')
156AddLocalOption('--force-lto', dest='force_lto', action='store_true',
157 help='Use Link-Time Optimization instead of partial linking' +
158 ' when the compiler doesn\'t support using them together.')
159AddLocalOption('--update-ref', dest='update_ref', action='store_true',
160 help='Update test reference outputs')
161AddLocalOption('--verbose', dest='verbose', action='store_true',
162 help='Print full tool command lines')
163AddLocalOption('--without-python', dest='without_python',
164 action='store_true',
165 help='Build without Python configuration support')
166AddLocalOption('--without-tcmalloc', dest='without_tcmalloc',
167 action='store_true',
168 help='Disable linking against tcmalloc')
169AddLocalOption('--with-ubsan', dest='with_ubsan', action='store_true',
170 help='Build with Undefined Behavior Sanitizer if available')
171AddLocalOption('--with-asan', dest='with_asan', action='store_true',
172 help='Build with Address Sanitizer if available')
173
174if GetOption('no_lto') and GetOption('force_lto'):
175 print '--no-lto and --force-lto are mutually exclusive'
176 Exit(1)
177
178termcap = get_termcap(GetOption('use_colors'))
179
180########################################################################
181#
182# Set up the main build environment.
183#
184########################################################################
185
1# -*- mode:python -*-
2
3# Copyright (c) 2013, 2015-2017 ARM Limited
4# All rights reserved.
5#
6# The license below extends only to copyright in the software and shall
7# not be construed as granting a license to any other intellectual
8# property including but not limited to intellectual property relating
9# to a hardware implementation of the functionality of the software
10# licensed hereunder. You may use the software subject to the license
11# terms below provided that you ensure that this notice is replicated
12# unmodified and in its entirety in all distributions of the software,
13# modified or unmodified, in source code or in binary form.
14#
15# Copyright (c) 2011 Advanced Micro Devices, Inc.
16# Copyright (c) 2009 The Hewlett-Packard Development Company
17# Copyright (c) 2004-2005 The Regents of The University of Michigan
18# All rights reserved.
19#
20# Redistribution and use in source and binary forms, with or without
21# modification, are permitted provided that the following conditions are
22# met: redistributions of source code must retain the above copyright
23# notice, this list of conditions and the following disclaimer;
24# redistributions in binary form must reproduce the above copyright
25# notice, this list of conditions and the following disclaimer in the
26# documentation and/or other materials provided with the distribution;
27# neither the name of the copyright holders nor the names of its
28# contributors may be used to endorse or promote products derived from
29# this software without specific prior written permission.
30#
31# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
32# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
33# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
34# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
35# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
37# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
38# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
39# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
40# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
41# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
42#
43# Authors: Steve Reinhardt
44# Nathan Binkert
45
46###################################################
47#
48# SCons top-level build description (SConstruct) file.
49#
50# While in this directory ('gem5'), just type 'scons' to build the default
51# configuration (see below), or type 'scons build/<CONFIG>/<binary>'
52# to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for
53# the optimized full-system version).
54#
55# You can build gem5 in a different directory as long as there is a
56# 'build/<CONFIG>' somewhere along the target path. The build system
57# expects that all configs under the same build directory are being
58# built for the same host system.
59#
60# Examples:
61#
62# The following two commands are equivalent. The '-u' option tells
63# scons to search up the directory tree for this SConstruct file.
64# % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug
65# % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug
66#
67# The following two commands are equivalent and demonstrate building
68# in a directory outside of the source tree. The '-C' option tells
69# scons to chdir to the specified directory to find this SConstruct
70# file.
71# % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug
72# % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug
73#
74# You can use 'scons -H' to print scons options. If you're in this
75# 'gem5' directory (or use -u or -C to tell scons where to find this
76# file), you can use 'scons -h' to print all the gem5-specific build
77# options as well.
78#
79###################################################
80
81# Global Python includes
82import itertools
83import os
84import re
85import shutil
86import subprocess
87import sys
88
89from os import mkdir, environ
90from os.path import abspath, basename, dirname, expanduser, normpath
91from os.path import exists, isdir, isfile
92from os.path import join as joinpath, split as splitpath
93
94# SCons includes
95import SCons
96import SCons.Node
97
98extra_python_paths = [
99 Dir('src/python').srcnode().abspath, # gem5 includes
100 Dir('ext/ply').srcnode().abspath, # ply is used by several files
101 ]
102
103sys.path[1:1] = extra_python_paths
104
105from m5.util import compareVersions, readCommand
106from m5.util.terminal import get_termcap
107
108help_texts = {
109 "options" : "",
110 "global_vars" : "",
111 "local_vars" : ""
112}
113
114Export("help_texts")
115
116
117# There's a bug in scons in that (1) by default, the help texts from
118# AddOption() are supposed to be displayed when you type 'scons -h'
119# and (2) you can override the help displayed by 'scons -h' using the
120# Help() function, but these two features are incompatible: once
121# you've overridden the help text using Help(), there's no way to get
122# at the help texts from AddOptions. See:
123# http://scons.tigris.org/issues/show_bug.cgi?id=2356
124# http://scons.tigris.org/issues/show_bug.cgi?id=2611
125# This hack lets us extract the help text from AddOptions and
126# re-inject it via Help(). Ideally someday this bug will be fixed and
127# we can just use AddOption directly.
128def AddLocalOption(*args, **kwargs):
129 col_width = 30
130
131 help = " " + ", ".join(args)
132 if "help" in kwargs:
133 length = len(help)
134 if length >= col_width:
135 help += "\n" + " " * col_width
136 else:
137 help += " " * (col_width - length)
138 help += kwargs["help"]
139 help_texts["options"] += help + "\n"
140
141 AddOption(*args, **kwargs)
142
143AddLocalOption('--colors', dest='use_colors', action='store_true',
144 help="Add color to abbreviated scons output")
145AddLocalOption('--no-colors', dest='use_colors', action='store_false',
146 help="Don't add color to abbreviated scons output")
147AddLocalOption('--with-cxx-config', dest='with_cxx_config',
148 action='store_true',
149 help="Build with support for C++-based configuration")
150AddLocalOption('--default', dest='default', type='string', action='store',
151 help='Override which build_opts file to use for defaults')
152AddLocalOption('--ignore-style', dest='ignore_style', action='store_true',
153 help='Disable style checking hooks')
154AddLocalOption('--no-lto', dest='no_lto', action='store_true',
155 help='Disable Link-Time Optimization for fast')
156AddLocalOption('--force-lto', dest='force_lto', action='store_true',
157 help='Use Link-Time Optimization instead of partial linking' +
158 ' when the compiler doesn\'t support using them together.')
159AddLocalOption('--update-ref', dest='update_ref', action='store_true',
160 help='Update test reference outputs')
161AddLocalOption('--verbose', dest='verbose', action='store_true',
162 help='Print full tool command lines')
163AddLocalOption('--without-python', dest='without_python',
164 action='store_true',
165 help='Build without Python configuration support')
166AddLocalOption('--without-tcmalloc', dest='without_tcmalloc',
167 action='store_true',
168 help='Disable linking against tcmalloc')
169AddLocalOption('--with-ubsan', dest='with_ubsan', action='store_true',
170 help='Build with Undefined Behavior Sanitizer if available')
171AddLocalOption('--with-asan', dest='with_asan', action='store_true',
172 help='Build with Address Sanitizer if available')
173
174if GetOption('no_lto') and GetOption('force_lto'):
175 print '--no-lto and --force-lto are mutually exclusive'
176 Exit(1)
177
178termcap = get_termcap(GetOption('use_colors'))
179
180########################################################################
181#
182# Set up the main build environment.
183#
184########################################################################
185
186# export TERM so that clang reports errors in color
187use_vars = set([ 'AS', 'AR', 'CC', 'CXX', 'HOME', 'LD_LIBRARY_PATH',
188 'LIBRARY_PATH', 'PATH', 'PKG_CONFIG_PATH', 'PROTOC',
189 'PYTHONPATH', 'RANLIB', 'TERM' ])
186main = Environment()
190
187
191use_prefixes = [
192 "ASAN_", # address sanitizer symbolizer path and settings
193 "CCACHE_", # ccache (caching compiler wrapper) configuration
194 "CCC_", # clang static analyzer configuration
195 "DISTCC_", # distcc (distributed compiler wrapper) configuration
196 "INCLUDE_SERVER_", # distcc pump server settings
197 "M5", # M5 configuration (e.g., path to kernels)
198 ]
199
200use_env = {}
201for key,val in sorted(os.environ.iteritems()):
202 if key in use_vars or \
203 any([key.startswith(prefix) for prefix in use_prefixes]):
204 use_env[key] = val
205
206# Tell scons to avoid implicit command dependencies to avoid issues
207# with the param wrappes being compiled twice (see
208# http://scons.tigris.org/issues/show_bug.cgi?id=2811)
209main = Environment(ENV=use_env, IMPLICIT_COMMAND_DEPENDENCIES=0)
210main.Decider('MD5-timestamp')
211main.root = Dir(".") # The current directory (where this file lives).
212main.srcdir = Dir("src") # The source directory
213
214main_dict_keys = main.Dictionary().keys()
215
216# Check that we have a C/C++ compiler
217if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys):
218 print "No C++ compiler installed (package g++ on Ubuntu and RedHat)"
219 Exit(1)
220
221# add useful python code PYTHONPATH so it can be used by subprocesses
222# as well
223main.AppendENVPath('PYTHONPATH', extra_python_paths)
224
225########################################################################
226#
227# Mercurial Stuff.
228#
229# If the gem5 directory is a mercurial repository, we should do some
230# extra things.
231#
232########################################################################
233
234hgdir = main.root.Dir(".hg")
235
236
237style_message = """
238You're missing the gem5 style hook, which automatically checks your code
239against the gem5 style rules on %s.
240This script will now install the hook in your %s.
241Press enter to continue, or ctrl-c to abort: """
242
243mercurial_style_message = """
244You're missing the gem5 style hook, which automatically checks your code
245against the gem5 style rules on hg commit and qrefresh commands.
246This script will now install the hook in your .hg/hgrc file.
247Press enter to continue, or ctrl-c to abort: """
248
249git_style_message = """
250You're missing the gem5 style or commit message hook. These hooks help
251to ensure that your code follows gem5's style rules on git commit.
252This script will now install the hook in your .git/hooks/ directory.
253Press enter to continue, or ctrl-c to abort: """
254
255mercurial_style_upgrade_message = """
256Your Mercurial style hooks are not up-to-date. This script will now
257try to automatically update them. A backup of your hgrc will be saved
258in .hg/hgrc.old.
259Press enter to continue, or ctrl-c to abort: """
260
261mercurial_style_hook = """
262# The following lines were automatically added by gem5/SConstruct
263# to provide the gem5 style-checking hooks
264[extensions]
265hgstyle = %s/util/hgstyle.py
266
267[hooks]
268pretxncommit.style = python:hgstyle.check_style
269pre-qrefresh.style = python:hgstyle.check_style
270# End of SConstruct additions
271
272""" % (main.root.abspath)
273
274mercurial_lib_not_found = """
275Mercurial libraries cannot be found, ignoring style hook. If
276you are a gem5 developer, please fix this and run the style
277hook. It is important.
278"""
279
280# Check for style hook and prompt for installation if it's not there.
281# Skip this if --ignore-style was specified, there's no interactive
282# terminal to prompt, or no recognized revision control system can be
283# found.
284ignore_style = GetOption('ignore_style') or not sys.stdin.isatty()
285
286# Try wire up Mercurial to the style hooks
287if not ignore_style and hgdir.exists():
288 style_hook = True
289 style_hooks = tuple()
290 hgrc = hgdir.File('hgrc')
291 hgrc_old = hgdir.File('hgrc.old')
292 try:
293 from mercurial import ui
294 ui = ui.ui()
295 ui.readconfig(hgrc.abspath)
296 style_hooks = (ui.config('hooks', 'pretxncommit.style', None),
297 ui.config('hooks', 'pre-qrefresh.style', None))
298 style_hook = all(style_hooks)
299 style_extension = ui.config('extensions', 'style', None)
300 except ImportError:
301 print mercurial_lib_not_found
302
303 if "python:style.check_style" in style_hooks:
304 # Try to upgrade the style hooks
305 print mercurial_style_upgrade_message
306 # continue unless user does ctrl-c/ctrl-d etc.
307 try:
308 raw_input()
309 except:
310 print "Input exception, exiting scons.\n"
311 sys.exit(1)
312 shutil.copyfile(hgrc.abspath, hgrc_old.abspath)
313 re_style_hook = re.compile(r"^([^=#]+)\.style\s*=\s*([^#\s]+).*")
314 re_style_extension = re.compile("style\s*=\s*([^#\s]+).*")
315 old, new = open(hgrc_old.abspath, 'r'), open(hgrc.abspath, 'w')
316 for l in old:
317 m_hook = re_style_hook.match(l)
318 m_ext = re_style_extension.match(l)
319 if m_hook:
320 hook, check = m_hook.groups()
321 if check != "python:style.check_style":
322 print "Warning: %s.style is using a non-default " \
323 "checker: %s" % (hook, check)
324 if hook not in ("pretxncommit", "pre-qrefresh"):
325 print "Warning: Updating unknown style hook: %s" % hook
326
327 l = "%s.style = python:hgstyle.check_style\n" % hook
328 elif m_ext and m_ext.group(1) == style_extension:
329 l = "hgstyle = %s/util/hgstyle.py\n" % main.root.abspath
330
331 new.write(l)
332 elif not style_hook:
333 print mercurial_style_message,
334 # continue unless user does ctrl-c/ctrl-d etc.
335 try:
336 raw_input()
337 except:
338 print "Input exception, exiting scons.\n"
339 sys.exit(1)
340 hgrc_path = '%s/.hg/hgrc' % main.root.abspath
341 print "Adding style hook to", hgrc_path, "\n"
342 try:
343 with open(hgrc_path, 'a') as f:
344 f.write(mercurial_style_hook)
345 except:
346 print "Error updating", hgrc_path
347 sys.exit(1)
348
349def install_git_style_hooks():
350 try:
351 gitdir = Dir(readCommand(
352 ["git", "rev-parse", "--git-dir"]).strip("\n"))
353 except Exception, e:
354 print "Warning: Failed to find git repo directory: %s" % e
355 return
356
357 git_hooks = gitdir.Dir("hooks")
358 def hook_exists(hook_name):
359 hook = git_hooks.File(hook_name)
360 return hook.exists()
361
362 def hook_install(hook_name, script):
363 hook = git_hooks.File(hook_name)
364 if hook.exists():
365 print "Warning: Can't install %s, hook already exists." % hook_name
366 return
367
368 if hook.islink():
369 print "Warning: Removing broken symlink for hook %s." % hook_name
370 os.unlink(hook.get_abspath())
371
372 if not git_hooks.exists():
373 mkdir(git_hooks.get_abspath())
374 git_hooks.clear()
375
376 abs_symlink_hooks = git_hooks.islink() and \
377 os.path.isabs(os.readlink(git_hooks.get_abspath()))
378
379 # Use a relative symlink if the hooks live in the source directory,
380 # and the hooks directory is not a symlink to an absolute path.
381 if hook.is_under(main.root) and not abs_symlink_hooks:
382 script_path = os.path.relpath(
383 os.path.realpath(script.get_abspath()),
384 os.path.realpath(hook.Dir(".").get_abspath()))
385 else:
386 script_path = script.get_abspath()
387
388 try:
389 os.symlink(script_path, hook.get_abspath())
390 except:
391 print "Error updating git %s hook" % hook_name
392 raise
393
394 if hook_exists("pre-commit") and hook_exists("commit-msg"):
395 return
396
397 print git_style_message,
398 try:
399 raw_input()
400 except:
401 print "Input exception, exiting scons.\n"
402 sys.exit(1)
403
404 git_style_script = File("util/git-pre-commit.py")
405 git_msg_script = File("ext/git-commit-msg")
406
407 hook_install("pre-commit", git_style_script)
408 hook_install("commit-msg", git_msg_script)
409
410# Try to wire up git to the style hooks
411if not ignore_style and main.root.Entry(".git").exists():
412 install_git_style_hooks()
413
414###################################################
415#
416# Figure out which configurations to set up based on the path(s) of
417# the target(s).
418#
419###################################################
420
421# Find default configuration & binary.
422Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug'))
423
424# helper function: find last occurrence of element in list
425def rfind(l, elt, offs = -1):
426 for i in range(len(l)+offs, 0, -1):
427 if l[i] == elt:
428 return i
429 raise ValueError, "element not found"
430
431# Take a list of paths (or SCons Nodes) and return a list with all
432# paths made absolute and ~-expanded. Paths will be interpreted
433# relative to the launch directory unless a different root is provided
434def makePathListAbsolute(path_list, root=GetLaunchDir()):
435 return [abspath(joinpath(root, expanduser(str(p))))
436 for p in path_list]
437
438# Each target must have 'build' in the interior of the path; the
439# directory below this will determine the build parameters. For
440# example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we
441# recognize that ALPHA_SE specifies the configuration because it
442# follow 'build' in the build path.
443
444# The funky assignment to "[:]" is needed to replace the list contents
445# in place rather than reassign the symbol to a new list, which
446# doesn't work (obviously!).
447BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS)
448
449# Generate a list of the unique build roots and configs that the
450# collected targets reference.
451variant_paths = []
452build_root = None
453for t in BUILD_TARGETS:
454 path_dirs = t.split('/')
455 try:
456 build_top = rfind(path_dirs, 'build', -2)
457 except:
458 print "Error: no non-leaf 'build' dir found on target path", t
459 Exit(1)
460 this_build_root = joinpath('/',*path_dirs[:build_top+1])
461 if not build_root:
462 build_root = this_build_root
463 else:
464 if this_build_root != build_root:
465 print "Error: build targets not under same build root\n"\
466 " %s\n %s" % (build_root, this_build_root)
467 Exit(1)
468 variant_path = joinpath('/',*path_dirs[:build_top+2])
469 if variant_path not in variant_paths:
470 variant_paths.append(variant_path)
471
472# Make sure build_root exists (might not if this is the first build there)
473if not isdir(build_root):
474 mkdir(build_root)
475main['BUILDROOT'] = build_root
476
477Export('main')
478
479main.SConsignFile(joinpath(build_root, "sconsign"))
480
481# Default duplicate option is to use hard links, but this messes up
482# when you use emacs to edit a file in the target dir, as emacs moves
483# file to file~ then copies to file, breaking the link. Symbolic
484# (soft) links work better.
485main.SetOption('duplicate', 'soft-copy')
486
487#
488# Set up global sticky variables... these are common to an entire build
489# tree (not specific to a particular build like ALPHA_SE)
490#
491
492global_vars_file = joinpath(build_root, 'variables.global')
493
494global_vars = Variables(global_vars_file, args=ARGUMENTS)
495
496global_vars.AddVariables(
497 ('CC', 'C compiler', environ.get('CC', main['CC'])),
498 ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])),
499 ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')),
500 ('BATCH', 'Use batch pool for build and tests', False),
501 ('BATCH_CMD', 'Batch pool submission command name', 'qdo'),
502 ('M5_BUILD_CACHE', 'Cache built objects in this directory', False),
503 ('EXTRAS', 'Add extra directories to the compilation', '')
504 )
505
506# Update main environment with values from ARGUMENTS & global_vars_file
507global_vars.Update(main)
508help_texts["global_vars"] += global_vars.GenerateHelpText(main)
509
510# Save sticky variable settings back to current variables file
511global_vars.Save(global_vars_file, main)
512
513# Parse EXTRAS variable to build list of all directories where we're
514# look for sources etc. This list is exported as extras_dir_list.
515base_dir = main.srcdir.abspath
516if main['EXTRAS']:
517 extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':'))
518else:
519 extras_dir_list = []
520
521Export('base_dir')
522Export('extras_dir_list')
523
524# the ext directory should be on the #includes path
525main.Append(CPPPATH=[Dir('ext')])
526
527# Add shared top-level headers
528main.Prepend(CPPPATH=Dir('include'))
529
530def strip_build_path(path, env):
531 path = str(path)
532 variant_base = env['BUILDROOT'] + os.path.sep
533 if path.startswith(variant_base):
534 path = path[len(variant_base):]
535 elif path.startswith('build/'):
536 path = path[6:]
537 return path
538
539# Generate a string of the form:
540# common/path/prefix/src1, src2 -> tgt1, tgt2
541# to print while building.
542class Transform(object):
543 # all specific color settings should be here and nowhere else
544 tool_color = termcap.Normal
545 pfx_color = termcap.Yellow
546 srcs_color = termcap.Yellow + termcap.Bold
547 arrow_color = termcap.Blue + termcap.Bold
548 tgts_color = termcap.Yellow + termcap.Bold
549
550 def __init__(self, tool, max_sources=99):
551 self.format = self.tool_color + (" [%8s] " % tool) \
552 + self.pfx_color + "%s" \
553 + self.srcs_color + "%s" \
554 + self.arrow_color + " -> " \
555 + self.tgts_color + "%s" \
556 + termcap.Normal
557 self.max_sources = max_sources
558
559 def __call__(self, target, source, env, for_signature=None):
560 # truncate source list according to max_sources param
561 source = source[0:self.max_sources]
562 def strip(f):
563 return strip_build_path(str(f), env)
564 if len(source) > 0:
565 srcs = map(strip, source)
566 else:
567 srcs = ['']
568 tgts = map(strip, target)
569 # surprisingly, os.path.commonprefix is a dumb char-by-char string
570 # operation that has nothing to do with paths.
571 com_pfx = os.path.commonprefix(srcs + tgts)
572 com_pfx_len = len(com_pfx)
573 if com_pfx:
574 # do some cleanup and sanity checking on common prefix
575 if com_pfx[-1] == ".":
576 # prefix matches all but file extension: ok
577 # back up one to change 'foo.cc -> o' to 'foo.cc -> .o'
578 com_pfx = com_pfx[0:-1]
579 elif com_pfx[-1] == "/":
580 # common prefix is directory path: OK
581 pass
582 else:
583 src0_len = len(srcs[0])
584 tgt0_len = len(tgts[0])
585 if src0_len == com_pfx_len:
586 # source is a substring of target, OK
587 pass
588 elif tgt0_len == com_pfx_len:
589 # target is a substring of source, need to back up to
590 # avoid empty string on RHS of arrow
591 sep_idx = com_pfx.rfind(".")
592 if sep_idx != -1:
593 com_pfx = com_pfx[0:sep_idx]
594 else:
595 com_pfx = ''
596 elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".":
597 # still splitting at file extension: ok
598 pass
599 else:
600 # probably a fluke; ignore it
601 com_pfx = ''
602 # recalculate length in case com_pfx was modified
603 com_pfx_len = len(com_pfx)
604 def fmt(files):
605 f = map(lambda s: s[com_pfx_len:], files)
606 return ', '.join(f)
607 return self.format % (com_pfx, fmt(srcs), fmt(tgts))
608
609Export('Transform')
610
611# enable the regression script to use the termcap
612main['TERMCAP'] = termcap
613
614if GetOption('verbose'):
615 def MakeAction(action, string, *args, **kwargs):
616 return Action(action, *args, **kwargs)
617else:
618 MakeAction = Action
619 main['CCCOMSTR'] = Transform("CC")
620 main['CXXCOMSTR'] = Transform("CXX")
621 main['ASCOMSTR'] = Transform("AS")
622 main['ARCOMSTR'] = Transform("AR", 0)
623 main['LINKCOMSTR'] = Transform("LINK", 0)
624 main['SHLINKCOMSTR'] = Transform("SHLINK", 0)
625 main['RANLIBCOMSTR'] = Transform("RANLIB", 0)
626 main['M4COMSTR'] = Transform("M4")
627 main['SHCCCOMSTR'] = Transform("SHCC")
628 main['SHCXXCOMSTR'] = Transform("SHCXX")
629Export('MakeAction')
630
631# Initialize the Link-Time Optimization (LTO) flags
632main['LTO_CCFLAGS'] = []
633main['LTO_LDFLAGS'] = []
634
635# According to the readme, tcmalloc works best if the compiler doesn't
636# assume that we're using the builtin malloc and friends. These flags
637# are compiler-specific, so we need to set them after we detect which
638# compiler we're using.
639main['TCMALLOC_CCFLAGS'] = []
640
641CXX_version = readCommand([main['CXX'],'--version'], exception=False)
642CXX_V = readCommand([main['CXX'],'-V'], exception=False)
643
644main['GCC'] = CXX_version and CXX_version.find('g++') >= 0
645main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0
646if main['GCC'] + main['CLANG'] > 1:
647 print 'Error: How can we have two at the same time?'
648 Exit(1)
649
650# Set up default C++ compiler flags
651if main['GCC'] or main['CLANG']:
652 # As gcc and clang share many flags, do the common parts here
653 main.Append(CCFLAGS=['-pipe'])
654 main.Append(CCFLAGS=['-fno-strict-aliasing'])
655 # Enable -Wall and -Wextra and then disable the few warnings that
656 # we consistently violate
657 main.Append(CCFLAGS=['-Wall', '-Wundef', '-Wextra',
658 '-Wno-sign-compare', '-Wno-unused-parameter'])
659 # We always compile using C++11
660 main.Append(CXXFLAGS=['-std=c++11'])
661 if sys.platform.startswith('freebsd'):
662 main.Append(CCFLAGS=['-I/usr/local/include'])
663 main.Append(CXXFLAGS=['-I/usr/local/include'])
664
665 main['FILTER_PSHLINKFLAGS'] = lambda x: str(x).replace(' -shared', '')
666 main['PSHLINKFLAGS'] = main.subst('${FILTER_PSHLINKFLAGS(SHLINKFLAGS)}')
667 main['PLINKFLAGS'] = main.subst('${LINKFLAGS}')
668 shared_partial_flags = ['-r', '-nostdlib']
669 main.Append(PSHLINKFLAGS=shared_partial_flags)
670 main.Append(PLINKFLAGS=shared_partial_flags)
671else:
672 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
673 print "Don't know what compiler options to use for your compiler."
674 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX']
675 print termcap.Yellow + ' version:' + termcap.Normal,
676 if not CXX_version:
677 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
678 termcap.Normal
679 else:
680 print CXX_version.replace('\n', '<nl>')
681 print " If you're trying to use a compiler other than GCC"
682 print " or clang, there appears to be something wrong with your"
683 print " environment."
684 print " "
685 print " If you are trying to use a compiler other than those listed"
686 print " above you will need to ease fix SConstruct and "
687 print " src/SConscript to support that compiler."
688 Exit(1)
689
690if main['GCC']:
691 # Check for a supported version of gcc. >= 4.8 is chosen for its
692 # level of c++11 support. See
693 # http://gcc.gnu.org/projects/cxx0x.html for details.
694 gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False)
695 if compareVersions(gcc_version, "4.8") < 0:
696 print 'Error: gcc version 4.8 or newer required.'
697 print ' Installed version:', gcc_version
698 Exit(1)
699
700 main['GCC_VERSION'] = gcc_version
701
702 if compareVersions(gcc_version, '4.9') >= 0:
703 # Incremental linking with LTO is currently broken in gcc versions
704 # 4.9 and above. A version where everything works completely hasn't
705 # yet been identified.
706 #
707 # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=67548
708 main['BROKEN_INCREMENTAL_LTO'] = True
709 if compareVersions(gcc_version, '6.0') >= 0:
710 # gcc versions 6.0 and greater accept an -flinker-output flag which
711 # selects what type of output the linker should generate. This is
712 # necessary for incremental lto to work, but is also broken in
713 # current versions of gcc. It may not be necessary in future
714 # versions. We add it here since it might be, and as a reminder that
715 # it exists. It's excluded if lto is being forced.
716 #
717 # https://gcc.gnu.org/gcc-6/changes.html
718 # https://gcc.gnu.org/ml/gcc-patches/2015-11/msg03161.html
719 # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=69866
720 if not GetOption('force_lto'):
721 main.Append(PSHLINKFLAGS='-flinker-output=rel')
722 main.Append(PLINKFLAGS='-flinker-output=rel')
723
724 # gcc from version 4.8 and above generates "rep; ret" instructions
725 # to avoid performance penalties on certain AMD chips. Older
726 # assemblers detect this as an error, "Error: expecting string
727 # instruction after `rep'"
728 as_version_raw = readCommand([main['AS'], '-v', '/dev/null',
729 '-o', '/dev/null'],
730 exception=False).split()
731
732 # version strings may contain extra distro-specific
733 # qualifiers, so play it safe and keep only what comes before
734 # the first hyphen
735 as_version = as_version_raw[-1].split('-')[0] if as_version_raw else None
736
737 if not as_version or compareVersions(as_version, "2.23") < 0:
738 print termcap.Yellow + termcap.Bold + \
739 'Warning: This combination of gcc and binutils have' + \
740 ' known incompatibilities.\n' + \
741 ' If you encounter build problems, please update ' + \
742 'binutils to 2.23.' + \
743 termcap.Normal
744
745 # Make sure we warn if the user has requested to compile with the
746 # Undefined Benahvior Sanitizer and this version of gcc does not
747 # support it.
748 if GetOption('with_ubsan') and \
749 compareVersions(gcc_version, '4.9') < 0:
750 print termcap.Yellow + termcap.Bold + \
751 'Warning: UBSan is only supported using gcc 4.9 and later.' + \
752 termcap.Normal
753
754 disable_lto = GetOption('no_lto')
755 if not disable_lto and main.get('BROKEN_INCREMENTAL_LTO', False) and \
756 not GetOption('force_lto'):
757 print termcap.Yellow + termcap.Bold + \
758 'Warning: Your compiler doesn\'t support incremental linking' + \
759 ' and lto at the same time, so lto is being disabled. To force' + \
760 ' lto on anyway, use the --force-lto option. That will disable' + \
761 ' partial linking.' + \
762 termcap.Normal
763 disable_lto = True
764
765 # Add the appropriate Link-Time Optimization (LTO) flags
766 # unless LTO is explicitly turned off. Note that these flags
767 # are only used by the fast target.
768 if not disable_lto:
769 # Pass the LTO flag when compiling to produce GIMPLE
770 # output, we merely create the flags here and only append
771 # them later
772 main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
773
774 # Use the same amount of jobs for LTO as we are running
775 # scons with
776 main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
777
778 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc',
779 '-fno-builtin-realloc', '-fno-builtin-free'])
780
781 # add option to check for undeclared overrides
782 if compareVersions(gcc_version, "5.0") > 0:
783 main.Append(CCFLAGS=['-Wno-error=suggest-override'])
784
785elif main['CLANG']:
786 # Check for a supported version of clang, >= 3.1 is needed to
787 # support similar features as gcc 4.8. See
788 # http://clang.llvm.org/cxx_status.html for details
789 clang_version_re = re.compile(".* version (\d+\.\d+)")
790 clang_version_match = clang_version_re.search(CXX_version)
791 if (clang_version_match):
792 clang_version = clang_version_match.groups()[0]
793 if compareVersions(clang_version, "3.1") < 0:
794 print 'Error: clang version 3.1 or newer required.'
795 print ' Installed version:', clang_version
796 Exit(1)
797 else:
798 print 'Error: Unable to determine clang version.'
799 Exit(1)
800
801 # clang has a few additional warnings that we disable, extraneous
802 # parantheses are allowed due to Ruby's printing of the AST,
803 # finally self assignments are allowed as the generated CPU code
804 # is relying on this
805 main.Append(CCFLAGS=['-Wno-parentheses',
806 '-Wno-self-assign',
807 # Some versions of libstdc++ (4.8?) seem to
808 # use struct hash and class hash
809 # interchangeably.
810 '-Wno-mismatched-tags',
811 ])
812
813 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin'])
814
815 # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as
816 # opposed to libstdc++, as the later is dated.
817 if sys.platform == "darwin":
818 main.Append(CXXFLAGS=['-stdlib=libc++'])
819 main.Append(LIBS=['c++'])
820
821 # On FreeBSD we need libthr.
822 if sys.platform.startswith('freebsd'):
823 main.Append(LIBS=['thr'])
824
825else:
826 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
827 print "Don't know what compiler options to use for your compiler."
828 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX']
829 print termcap.Yellow + ' version:' + termcap.Normal,
830 if not CXX_version:
831 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
832 termcap.Normal
833 else:
834 print CXX_version.replace('\n', '<nl>')
835 print " If you're trying to use a compiler other than GCC"
836 print " or clang, there appears to be something wrong with your"
837 print " environment."
838 print " "
839 print " If you are trying to use a compiler other than those listed"
840 print " above you will need to ease fix SConstruct and "
841 print " src/SConscript to support that compiler."
842 Exit(1)
843
844# Set up common yacc/bison flags (needed for Ruby)
845main['YACCFLAGS'] = '-d'
846main['YACCHXXFILESUFFIX'] = '.hh'
847
848# Do this after we save setting back, or else we'll tack on an
849# extra 'qdo' every time we run scons.
850if main['BATCH']:
851 main['CC'] = main['BATCH_CMD'] + ' ' + main['CC']
852 main['CXX'] = main['BATCH_CMD'] + ' ' + main['CXX']
853 main['AS'] = main['BATCH_CMD'] + ' ' + main['AS']
854 main['AR'] = main['BATCH_CMD'] + ' ' + main['AR']
855 main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB']
856
857if sys.platform == 'cygwin':
858 # cygwin has some header file issues...
859 main.Append(CCFLAGS=["-Wno-uninitialized"])
860
861# Check for the protobuf compiler
862protoc_version = readCommand([main['PROTOC'], '--version'],
863 exception='').split()
864
865# First two words should be "libprotoc x.y.z"
866if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc':
867 print termcap.Yellow + termcap.Bold + \
868 'Warning: Protocol buffer compiler (protoc) not found.\n' + \
869 ' Please install protobuf-compiler for tracing support.' + \
870 termcap.Normal
871 main['PROTOC'] = False
872else:
873 # Based on the availability of the compress stream wrappers,
874 # require 2.1.0
875 min_protoc_version = '2.1.0'
876 if compareVersions(protoc_version[1], min_protoc_version) < 0:
877 print termcap.Yellow + termcap.Bold + \
878 'Warning: protoc version', min_protoc_version, \
879 'or newer required.\n' + \
880 ' Installed version:', protoc_version[1], \
881 termcap.Normal
882 main['PROTOC'] = False
883 else:
884 # Attempt to determine the appropriate include path and
885 # library path using pkg-config, that means we also need to
886 # check for pkg-config. Note that it is possible to use
887 # protobuf without the involvement of pkg-config. Later on we
888 # check go a library config check and at that point the test
889 # will fail if libprotobuf cannot be found.
890 if readCommand(['pkg-config', '--version'], exception=''):
891 try:
892 # Attempt to establish what linking flags to add for protobuf
893 # using pkg-config
894 main.ParseConfig('pkg-config --cflags --libs-only-L protobuf')
895 except:
896 print termcap.Yellow + termcap.Bold + \
897 'Warning: pkg-config could not get protobuf flags.' + \
898 termcap.Normal
899
900
901# Check for 'timeout' from GNU coreutils. If present, regressions will
902# be run with a time limit. We require version 8.13 since we rely on
903# support for the '--foreground' option.
904if sys.platform.startswith('freebsd'):
905 timeout_lines = readCommand(['gtimeout', '--version'],
906 exception='').splitlines()
907else:
908 timeout_lines = readCommand(['timeout', '--version'],
909 exception='').splitlines()
910# Get the first line and tokenize it
911timeout_version = timeout_lines[0].split() if timeout_lines else []
912main['TIMEOUT'] = timeout_version and \
913 compareVersions(timeout_version[-1], '8.13') >= 0
914
915# Add a custom Check function to test for structure members.
916def CheckMember(context, include, decl, member, include_quotes="<>"):
917 context.Message("Checking for member %s in %s..." %
918 (member, decl))
919 text = """
920#include %(header)s
921int main(){
922 %(decl)s test;
923 (void)test.%(member)s;
924 return 0;
925};
926""" % { "header" : include_quotes[0] + include + include_quotes[1],
927 "decl" : decl,
928 "member" : member,
929 }
930
931 ret = context.TryCompile(text, extension=".cc")
932 context.Result(ret)
933 return ret
934
935# Platform-specific configuration. Note again that we assume that all
936# builds under a given build root run on the same host platform.
937conf = Configure(main,
938 conf_dir = joinpath(build_root, '.scons_config'),
939 log_file = joinpath(build_root, 'scons_config.log'),
940 custom_tests = {
941 'CheckMember' : CheckMember,
942 })
943
944# Check if we should compile a 64 bit binary on Mac OS X/Darwin
945try:
946 import platform
947 uname = platform.uname()
948 if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0:
949 if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]):
950 main.Append(CCFLAGS=['-arch', 'x86_64'])
951 main.Append(CFLAGS=['-arch', 'x86_64'])
952 main.Append(LINKFLAGS=['-arch', 'x86_64'])
953 main.Append(ASFLAGS=['-arch', 'x86_64'])
954except:
955 pass
956
957# Recent versions of scons substitute a "Null" object for Configure()
958# when configuration isn't necessary, e.g., if the "--help" option is
959# present. Unfortuantely this Null object always returns false,
960# breaking all our configuration checks. We replace it with our own
961# more optimistic null object that returns True instead.
962if not conf:
963 def NullCheck(*args, **kwargs):
964 return True
965
966 class NullConf:
967 def __init__(self, env):
968 self.env = env
969 def Finish(self):
970 return self.env
971 def __getattr__(self, mname):
972 return NullCheck
973
974 conf = NullConf(main)
975
976# Cache build files in the supplied directory.
977if main['M5_BUILD_CACHE']:
978 print 'Using build cache located at', main['M5_BUILD_CACHE']
979 CacheDir(main['M5_BUILD_CACHE'])
980
981main['USE_PYTHON'] = not GetOption('without_python')
982if main['USE_PYTHON']:
983 # Find Python include and library directories for embedding the
984 # interpreter. We rely on python-config to resolve the appropriate
985 # includes and linker flags. ParseConfig does not seem to understand
986 # the more exotic linker flags such as -Xlinker and -export-dynamic so
987 # we add them explicitly below. If you want to link in an alternate
988 # version of python, see above for instructions on how to invoke
989 # scons with the appropriate PATH set.
990 #
991 # First we check if python2-config exists, else we use python-config
992 python_config = readCommand(['which', 'python2-config'],
993 exception='').strip()
994 if not os.path.exists(python_config):
995 python_config = readCommand(['which', 'python-config'],
996 exception='').strip()
997 py_includes = readCommand([python_config, '--includes'],
998 exception='').split()
999 # Strip the -I from the include folders before adding them to the
1000 # CPPPATH
1001 main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes))
1002
1003 # Read the linker flags and split them into libraries and other link
1004 # flags. The libraries are added later through the call the CheckLib.
1005 py_ld_flags = readCommand([python_config, '--ldflags'],
1006 exception='').split()
1007 py_libs = []
1008 for lib in py_ld_flags:
1009 if not lib.startswith('-l'):
1010 main.Append(LINKFLAGS=[lib])
1011 else:
1012 lib = lib[2:]
1013 if lib not in py_libs:
1014 py_libs.append(lib)
1015
1016 # verify that this stuff works
1017 if not conf.CheckHeader('Python.h', '<>'):
1018 print "Error: can't find Python.h header in", py_includes
1019 print "Install Python headers (package python-dev on Ubuntu and RedHat)"
1020 Exit(1)
1021
1022 for lib in py_libs:
1023 if not conf.CheckLib(lib):
1024 print "Error: can't find library %s required by python" % lib
1025 Exit(1)
1026
1027# On Solaris you need to use libsocket for socket ops
1028if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'):
1029 if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'):
1030 print "Can't find library with socket calls (e.g. accept())"
1031 Exit(1)
1032
1033# Check for zlib. If the check passes, libz will be automatically
1034# added to the LIBS environment variable.
1035if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'):
1036 print 'Error: did not find needed zlib compression library '\
1037 'and/or zlib.h header file.'
1038 print ' Please install zlib and try again.'
1039 Exit(1)
1040
1041# If we have the protobuf compiler, also make sure we have the
1042# development libraries. If the check passes, libprotobuf will be
1043# automatically added to the LIBS environment variable. After
1044# this, we can use the HAVE_PROTOBUF flag to determine if we have
1045# got both protoc and libprotobuf available.
1046main['HAVE_PROTOBUF'] = main['PROTOC'] and \
1047 conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h',
1048 'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;')
1049
1050# If we have the compiler but not the library, print another warning.
1051if main['PROTOC'] and not main['HAVE_PROTOBUF']:
1052 print termcap.Yellow + termcap.Bold + \
1053 'Warning: did not find protocol buffer library and/or headers.\n' + \
1054 ' Please install libprotobuf-dev for tracing support.' + \
1055 termcap.Normal
1056
1057# Check for librt.
1058have_posix_clock = \
1059 conf.CheckLibWithHeader(None, 'time.h', 'C',
1060 'clock_nanosleep(0,0,NULL,NULL);') or \
1061 conf.CheckLibWithHeader('rt', 'time.h', 'C',
1062 'clock_nanosleep(0,0,NULL,NULL);')
1063
1064have_posix_timers = \
1065 conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C',
1066 'timer_create(CLOCK_MONOTONIC, NULL, NULL);')
1067
1068if not GetOption('without_tcmalloc'):
1069 if conf.CheckLib('tcmalloc'):
1070 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1071 elif conf.CheckLib('tcmalloc_minimal'):
1072 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1073 else:
1074 print termcap.Yellow + termcap.Bold + \
1075 "You can get a 12% performance improvement by "\
1076 "installing tcmalloc (libgoogle-perftools-dev package "\
1077 "on Ubuntu or RedHat)." + termcap.Normal
1078
1079
1080# Detect back trace implementations. The last implementation in the
1081# list will be used by default.
1082backtrace_impls = [ "none" ]
1083
1084if conf.CheckLibWithHeader(None, 'execinfo.h', 'C',
1085 'backtrace_symbols_fd((void*)0, 0, 0);'):
1086 backtrace_impls.append("glibc")
1087elif conf.CheckLibWithHeader('execinfo', 'execinfo.h', 'C',
1088 'backtrace_symbols_fd((void*)0, 0, 0);'):
1089 # NetBSD and FreeBSD need libexecinfo.
1090 backtrace_impls.append("glibc")
1091 main.Append(LIBS=['execinfo'])
1092
1093if backtrace_impls[-1] == "none":
1094 default_backtrace_impl = "none"
1095 print termcap.Yellow + termcap.Bold + \
1096 "No suitable back trace implementation found." + \
1097 termcap.Normal
1098
1099if not have_posix_clock:
1100 print "Can't find library for POSIX clocks."
1101
1102# Check for <fenv.h> (C99 FP environment control)
1103have_fenv = conf.CheckHeader('fenv.h', '<>')
1104if not have_fenv:
1105 print "Warning: Header file <fenv.h> not found."
1106 print " This host has no IEEE FP rounding mode control."
1107
1108# Check for <png.h> (libpng library needed if wanting to dump
1109# frame buffer image in png format)
1110have_png = conf.CheckHeader('png.h', '<>')
1111if not have_png:
1112 print "Warning: Header file <png.h> not found."
1113 print " This host has no libpng library."
1114 print " Disabling support for PNG framebuffers."
1115
1116# Check if we should enable KVM-based hardware virtualization. The API
1117# we rely on exists since version 2.6.36 of the kernel, but somehow
1118# the KVM_API_VERSION does not reflect the change. We test for one of
1119# the types as a fall back.
1120have_kvm = conf.CheckHeader('linux/kvm.h', '<>')
1121if not have_kvm:
1122 print "Info: Compatible header file <linux/kvm.h> not found, " \
1123 "disabling KVM support."
1124
1125# Check if the TUN/TAP driver is available.
1126have_tuntap = conf.CheckHeader('linux/if_tun.h', '<>')
1127if not have_tuntap:
1128 print "Info: Compatible header file <linux/if_tun.h> not found."
1129
1130# x86 needs support for xsave. We test for the structure here since we
1131# won't be able to run new tests by the time we know which ISA we're
1132# targeting.
1133have_kvm_xsave = conf.CheckTypeSize('struct kvm_xsave',
1134 '#include <linux/kvm.h>') != 0
1135
1136# Check if the requested target ISA is compatible with the host
1137def is_isa_kvm_compatible(isa):
1138 try:
1139 import platform
1140 host_isa = platform.machine()
1141 except:
1142 print "Warning: Failed to determine host ISA."
1143 return False
1144
1145 if not have_posix_timers:
1146 print "Warning: Can not enable KVM, host seems to lack support " \
1147 "for POSIX timers"
1148 return False
1149
1150 if isa == "arm":
1151 return host_isa in ( "armv7l", "aarch64" )
1152 elif isa == "x86":
1153 if host_isa != "x86_64":
1154 return False
1155
1156 if not have_kvm_xsave:
1157 print "KVM on x86 requires xsave support in kernel headers."
1158 return False
1159
1160 return True
1161 else:
1162 return False
1163
1164
1165# Check if the exclude_host attribute is available. We want this to
1166# get accurate instruction counts in KVM.
1167main['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf.CheckMember(
1168 'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host')
1169
1170
1171######################################################################
1172#
1173# Finish the configuration
1174#
1175main = conf.Finish()
1176
1177######################################################################
1178#
1179# Collect all non-global variables
1180#
1181
1182# Define the universe of supported ISAs
1183all_isa_list = [ ]
1184all_gpu_isa_list = [ ]
1185Export('all_isa_list')
1186Export('all_gpu_isa_list')
1187
1188class CpuModel(object):
1189 '''The CpuModel class encapsulates everything the ISA parser needs to
1190 know about a particular CPU model.'''
1191
1192 # Dict of available CPU model objects. Accessible as CpuModel.dict.
1193 dict = {}
1194
1195 # Constructor. Automatically adds models to CpuModel.dict.
1196 def __init__(self, name, default=False):
1197 self.name = name # name of model
1198
1199 # This cpu is enabled by default
1200 self.default = default
1201
1202 # Add self to dict
1203 if name in CpuModel.dict:
1204 raise AttributeError, "CpuModel '%s' already registered" % name
1205 CpuModel.dict[name] = self
1206
1207Export('CpuModel')
1208
1209# Sticky variables get saved in the variables file so they persist from
1210# one invocation to the next (unless overridden, in which case the new
1211# value becomes sticky).
1212sticky_vars = Variables(args=ARGUMENTS)
1213Export('sticky_vars')
1214
1215# Sticky variables that should be exported
1216export_vars = []
1217Export('export_vars')
1218
1219# For Ruby
1220all_protocols = []
1221Export('all_protocols')
1222protocol_dirs = []
1223Export('protocol_dirs')
1224slicc_includes = []
1225Export('slicc_includes')
1226
1227# Walk the tree and execute all SConsopts scripts that wil add to the
1228# above variables
1229if GetOption('verbose'):
1230 print "Reading SConsopts"
1231for bdir in [ base_dir ] + extras_dir_list:
1232 if not isdir(bdir):
1233 print "Error: directory '%s' does not exist" % bdir
1234 Exit(1)
1235 for root, dirs, files in os.walk(bdir):
1236 if 'SConsopts' in files:
1237 if GetOption('verbose'):
1238 print "Reading", joinpath(root, 'SConsopts')
1239 SConscript(joinpath(root, 'SConsopts'))
1240
1241all_isa_list.sort()
1242all_gpu_isa_list.sort()
1243
1244sticky_vars.AddVariables(
1245 EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list),
1246 EnumVariable('TARGET_GPU_ISA', 'Target GPU ISA', 'hsail', all_gpu_isa_list),
1247 ListVariable('CPU_MODELS', 'CPU models',
1248 sorted(n for n,m in CpuModel.dict.iteritems() if m.default),
1249 sorted(CpuModel.dict.keys())),
1250 BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger',
1251 False),
1252 BoolVariable('SS_COMPATIBLE_FP',
1253 'Make floating-point results compatible with SimpleScalar',
1254 False),
1255 BoolVariable('USE_SSE2',
1256 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts',
1257 False),
1258 BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock),
1259 BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv),
1260 BoolVariable('USE_PNG', 'Enable support for PNG images', have_png),
1261 BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability',
1262 False),
1263 BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models',
1264 have_kvm),
1265 BoolVariable('USE_TUNTAP',
1266 'Enable using a tap device to bridge to the host network',
1267 have_tuntap),
1268 BoolVariable('BUILD_GPU', 'Build the compute-GPU model', False),
1269 EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None',
1270 all_protocols),
1271 EnumVariable('BACKTRACE_IMPL', 'Post-mortem dump implementation',
1272 backtrace_impls[-1], backtrace_impls)
1273 )
1274
1275# These variables get exported to #defines in config/*.hh (see src/SConscript).
1276export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'TARGET_GPU_ISA',
1277 'CP_ANNOTATE', 'USE_POSIX_CLOCK', 'USE_KVM', 'USE_TUNTAP',
1278 'PROTOCOL', 'HAVE_PROTOBUF', 'HAVE_PERF_ATTR_EXCLUDE_HOST',
1279 'USE_PNG']
1280
1281###################################################
1282#
1283# Define a SCons builder for configuration flag headers.
1284#
1285###################################################
1286
1287# This function generates a config header file that #defines the
1288# variable symbol to the current variable setting (0 or 1). The source
1289# operands are the name of the variable and a Value node containing the
1290# value of the variable.
1291def build_config_file(target, source, env):
1292 (variable, value) = [s.get_contents() for s in source]
1293 f = file(str(target[0]), 'w')
1294 print >> f, '#define', variable, value
1295 f.close()
1296 return None
1297
1298# Combine the two functions into a scons Action object.
1299config_action = MakeAction(build_config_file, Transform("CONFIG H", 2))
1300
1301# The emitter munges the source & target node lists to reflect what
1302# we're really doing.
1303def config_emitter(target, source, env):
1304 # extract variable name from Builder arg
1305 variable = str(target[0])
1306 # True target is config header file
1307 target = joinpath('config', variable.lower() + '.hh')
1308 val = env[variable]
1309 if isinstance(val, bool):
1310 # Force value to 0/1
1311 val = int(val)
1312 elif isinstance(val, str):
1313 val = '"' + val + '"'
1314
1315 # Sources are variable name & value (packaged in SCons Value nodes)
1316 return ([target], [Value(variable), Value(val)])
1317
1318config_builder = Builder(emitter = config_emitter, action = config_action)
1319
1320main.Append(BUILDERS = { 'ConfigFile' : config_builder })
1321
1322###################################################
1323#
1324# Builders for static and shared partially linked object files.
1325#
1326###################################################
1327
1328partial_static_builder = Builder(action=SCons.Defaults.LinkAction,
1329 src_suffix='$OBJSUFFIX',
1330 src_builder=['StaticObject', 'Object'],
1331 LINKFLAGS='$PLINKFLAGS',
1332 LIBS='')
1333
1334def partial_shared_emitter(target, source, env):
1335 for tgt in target:
1336 tgt.attributes.shared = 1
1337 return (target, source)
1338partial_shared_builder = Builder(action=SCons.Defaults.ShLinkAction,
1339 emitter=partial_shared_emitter,
1340 src_suffix='$SHOBJSUFFIX',
1341 src_builder='SharedObject',
1342 SHLINKFLAGS='$PSHLINKFLAGS',
1343 LIBS='')
1344
1345main.Append(BUILDERS = { 'PartialShared' : partial_shared_builder,
1346 'PartialStatic' : partial_static_builder })
1347
1348# builds in ext are shared across all configs in the build root.
1349ext_dir = abspath(joinpath(str(main.root), 'ext'))
1350ext_build_dirs = []
1351for root, dirs, files in os.walk(ext_dir):
1352 if 'SConscript' in files:
1353 build_dir = os.path.relpath(root, ext_dir)
1354 ext_build_dirs.append(build_dir)
1355 main.SConscript(joinpath(root, 'SConscript'),
1356 variant_dir=joinpath(build_root, build_dir))
1357
1358main.Prepend(CPPPATH=Dir('ext/pybind11/include/'))
1359
1360###################################################
1361#
1362# This builder and wrapper method are used to set up a directory with
1363# switching headers. Those are headers which are in a generic location and
1364# that include more specific headers from a directory chosen at build time
1365# based on the current build settings.
1366#
1367###################################################
1368
1369def build_switching_header(target, source, env):
1370 path = str(target[0])
1371 subdir = str(source[0])
1372 dp, fp = os.path.split(path)
1373 dp = os.path.relpath(os.path.realpath(dp),
1374 os.path.realpath(env['BUILDDIR']))
1375 with open(path, 'w') as hdr:
1376 print >>hdr, '#include "%s/%s/%s"' % (dp, subdir, fp)
1377
1378switching_header_action = MakeAction(build_switching_header,
1379 Transform('GENERATE'))
1380
1381switching_header_builder = Builder(action=switching_header_action,
1382 source_factory=Value,
1383 single_source=True)
1384
1385main.Append(BUILDERS = { 'SwitchingHeader': switching_header_builder })
1386
1387def switching_headers(self, headers, source):
1388 for header in headers:
1389 self.SwitchingHeader(header, source)
1390
1391main.AddMethod(switching_headers, 'SwitchingHeaders')
1392
1393###################################################
1394#
1395# Define build environments for selected configurations.
1396#
1397###################################################
1398
1399for variant_path in variant_paths:
1400 if not GetOption('silent'):
1401 print "Building in", variant_path
1402
1403 # Make a copy of the build-root environment to use for this config.
1404 env = main.Clone()
1405 env['BUILDDIR'] = variant_path
1406
1407 # variant_dir is the tail component of build path, and is used to
1408 # determine the build parameters (e.g., 'ALPHA_SE')
1409 (build_root, variant_dir) = splitpath(variant_path)
1410
1411 # Set env variables according to the build directory config.
1412 sticky_vars.files = []
1413 # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in
1414 # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke
1415 # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings.
1416 current_vars_file = joinpath(build_root, 'variables', variant_dir)
1417 if isfile(current_vars_file):
1418 sticky_vars.files.append(current_vars_file)
1419 if not GetOption('silent'):
1420 print "Using saved variables file %s" % current_vars_file
1421 elif variant_dir in ext_build_dirs:
1422 # Things in ext are built without a variant directory.
1423 continue
1424 else:
1425 # Build dir-specific variables file doesn't exist.
1426
1427 # Make sure the directory is there so we can create it later
1428 opt_dir = dirname(current_vars_file)
1429 if not isdir(opt_dir):
1430 mkdir(opt_dir)
1431
1432 # Get default build variables from source tree. Variables are
1433 # normally determined by name of $VARIANT_DIR, but can be
1434 # overridden by '--default=' arg on command line.
1435 default = GetOption('default')
1436 opts_dir = joinpath(main.root.abspath, 'build_opts')
1437 if default:
1438 default_vars_files = [joinpath(build_root, 'variables', default),
1439 joinpath(opts_dir, default)]
1440 else:
1441 default_vars_files = [joinpath(opts_dir, variant_dir)]
1442 existing_files = filter(isfile, default_vars_files)
1443 if existing_files:
1444 default_vars_file = existing_files[0]
1445 sticky_vars.files.append(default_vars_file)
1446 print "Variables file %s not found,\n using defaults in %s" \
1447 % (current_vars_file, default_vars_file)
1448 else:
1449 print "Error: cannot find variables file %s or " \
1450 "default file(s) %s" \
1451 % (current_vars_file, ' or '.join(default_vars_files))
1452 Exit(1)
1453
1454 # Apply current variable settings to env
1455 sticky_vars.Update(env)
1456
1457 help_texts["local_vars"] += \
1458 "Build variables for %s:\n" % variant_dir \
1459 + sticky_vars.GenerateHelpText(env)
1460
1461 # Process variable settings.
1462
1463 if not have_fenv and env['USE_FENV']:
1464 print "Warning: <fenv.h> not available; " \
1465 "forcing USE_FENV to False in", variant_dir + "."
1466 env['USE_FENV'] = False
1467
1468 if not env['USE_FENV']:
1469 print "Warning: No IEEE FP rounding mode control in", variant_dir + "."
1470 print " FP results may deviate slightly from other platforms."
1471
1472 if not have_png and env['USE_PNG']:
1473 print "Warning: <png.h> not available; " \
1474 "forcing USE_PNG to False in", variant_dir + "."
1475 env['USE_PNG'] = False
1476
1477 if env['USE_PNG']:
1478 env.Append(LIBS=['png'])
1479
1480 if env['EFENCE']:
1481 env.Append(LIBS=['efence'])
1482
1483 if env['USE_KVM']:
1484 if not have_kvm:
1485 print "Warning: Can not enable KVM, host seems to lack KVM support"
1486 env['USE_KVM'] = False
1487 elif not is_isa_kvm_compatible(env['TARGET_ISA']):
1488 print "Info: KVM support disabled due to unsupported host and " \
1489 "target ISA combination"
1490 env['USE_KVM'] = False
1491
1492 if env['USE_TUNTAP']:
1493 if not have_tuntap:
1494 print "Warning: Can't connect EtherTap with a tap device."
1495 env['USE_TUNTAP'] = False
1496
1497 if env['BUILD_GPU']:
1498 env.Append(CPPDEFINES=['BUILD_GPU'])
1499
1500 # Warn about missing optional functionality
1501 if env['USE_KVM']:
1502 if not main['HAVE_PERF_ATTR_EXCLUDE_HOST']:
1503 print "Warning: perf_event headers lack support for the " \
1504 "exclude_host attribute. KVM instruction counts will " \
1505 "be inaccurate."
1506
1507 # Save sticky variable settings back to current variables file
1508 sticky_vars.Save(current_vars_file, env)
1509
1510 if env['USE_SSE2']:
1511 env.Append(CCFLAGS=['-msse2'])
1512
1513 # The src/SConscript file sets up the build rules in 'env' according
1514 # to the configured variables. It returns a list of environments,
1515 # one for each variant build (debug, opt, etc.)
1516 SConscript('src/SConscript', variant_dir = variant_path, exports = 'env')
1517
1518# base help text
1519Help('''
1520Usage: scons [scons options] [build variables] [target(s)]
1521
1522Extra scons options:
1523%(options)s
1524
1525Global build variables:
1526%(global_vars)s
1527
1528%(local_vars)s
1529''' % help_texts)
188main_dict_keys = main.Dictionary().keys()
189
190# Check that we have a C/C++ compiler
191if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys):
192 print "No C++ compiler installed (package g++ on Ubuntu and RedHat)"
193 Exit(1)
194
195# add useful python code PYTHONPATH so it can be used by subprocesses
196# as well
197main.AppendENVPath('PYTHONPATH', extra_python_paths)
198
199########################################################################
200#
201# Mercurial Stuff.
202#
203# If the gem5 directory is a mercurial repository, we should do some
204# extra things.
205#
206########################################################################
207
208hgdir = main.root.Dir(".hg")
209
210
211style_message = """
212You're missing the gem5 style hook, which automatically checks your code
213against the gem5 style rules on %s.
214This script will now install the hook in your %s.
215Press enter to continue, or ctrl-c to abort: """
216
217mercurial_style_message = """
218You're missing the gem5 style hook, which automatically checks your code
219against the gem5 style rules on hg commit and qrefresh commands.
220This script will now install the hook in your .hg/hgrc file.
221Press enter to continue, or ctrl-c to abort: """
222
223git_style_message = """
224You're missing the gem5 style or commit message hook. These hooks help
225to ensure that your code follows gem5's style rules on git commit.
226This script will now install the hook in your .git/hooks/ directory.
227Press enter to continue, or ctrl-c to abort: """
228
229mercurial_style_upgrade_message = """
230Your Mercurial style hooks are not up-to-date. This script will now
231try to automatically update them. A backup of your hgrc will be saved
232in .hg/hgrc.old.
233Press enter to continue, or ctrl-c to abort: """
234
235mercurial_style_hook = """
236# The following lines were automatically added by gem5/SConstruct
237# to provide the gem5 style-checking hooks
238[extensions]
239hgstyle = %s/util/hgstyle.py
240
241[hooks]
242pretxncommit.style = python:hgstyle.check_style
243pre-qrefresh.style = python:hgstyle.check_style
244# End of SConstruct additions
245
246""" % (main.root.abspath)
247
248mercurial_lib_not_found = """
249Mercurial libraries cannot be found, ignoring style hook. If
250you are a gem5 developer, please fix this and run the style
251hook. It is important.
252"""
253
254# Check for style hook and prompt for installation if it's not there.
255# Skip this if --ignore-style was specified, there's no interactive
256# terminal to prompt, or no recognized revision control system can be
257# found.
258ignore_style = GetOption('ignore_style') or not sys.stdin.isatty()
259
260# Try wire up Mercurial to the style hooks
261if not ignore_style and hgdir.exists():
262 style_hook = True
263 style_hooks = tuple()
264 hgrc = hgdir.File('hgrc')
265 hgrc_old = hgdir.File('hgrc.old')
266 try:
267 from mercurial import ui
268 ui = ui.ui()
269 ui.readconfig(hgrc.abspath)
270 style_hooks = (ui.config('hooks', 'pretxncommit.style', None),
271 ui.config('hooks', 'pre-qrefresh.style', None))
272 style_hook = all(style_hooks)
273 style_extension = ui.config('extensions', 'style', None)
274 except ImportError:
275 print mercurial_lib_not_found
276
277 if "python:style.check_style" in style_hooks:
278 # Try to upgrade the style hooks
279 print mercurial_style_upgrade_message
280 # continue unless user does ctrl-c/ctrl-d etc.
281 try:
282 raw_input()
283 except:
284 print "Input exception, exiting scons.\n"
285 sys.exit(1)
286 shutil.copyfile(hgrc.abspath, hgrc_old.abspath)
287 re_style_hook = re.compile(r"^([^=#]+)\.style\s*=\s*([^#\s]+).*")
288 re_style_extension = re.compile("style\s*=\s*([^#\s]+).*")
289 old, new = open(hgrc_old.abspath, 'r'), open(hgrc.abspath, 'w')
290 for l in old:
291 m_hook = re_style_hook.match(l)
292 m_ext = re_style_extension.match(l)
293 if m_hook:
294 hook, check = m_hook.groups()
295 if check != "python:style.check_style":
296 print "Warning: %s.style is using a non-default " \
297 "checker: %s" % (hook, check)
298 if hook not in ("pretxncommit", "pre-qrefresh"):
299 print "Warning: Updating unknown style hook: %s" % hook
300
301 l = "%s.style = python:hgstyle.check_style\n" % hook
302 elif m_ext and m_ext.group(1) == style_extension:
303 l = "hgstyle = %s/util/hgstyle.py\n" % main.root.abspath
304
305 new.write(l)
306 elif not style_hook:
307 print mercurial_style_message,
308 # continue unless user does ctrl-c/ctrl-d etc.
309 try:
310 raw_input()
311 except:
312 print "Input exception, exiting scons.\n"
313 sys.exit(1)
314 hgrc_path = '%s/.hg/hgrc' % main.root.abspath
315 print "Adding style hook to", hgrc_path, "\n"
316 try:
317 with open(hgrc_path, 'a') as f:
318 f.write(mercurial_style_hook)
319 except:
320 print "Error updating", hgrc_path
321 sys.exit(1)
322
323def install_git_style_hooks():
324 try:
325 gitdir = Dir(readCommand(
326 ["git", "rev-parse", "--git-dir"]).strip("\n"))
327 except Exception, e:
328 print "Warning: Failed to find git repo directory: %s" % e
329 return
330
331 git_hooks = gitdir.Dir("hooks")
332 def hook_exists(hook_name):
333 hook = git_hooks.File(hook_name)
334 return hook.exists()
335
336 def hook_install(hook_name, script):
337 hook = git_hooks.File(hook_name)
338 if hook.exists():
339 print "Warning: Can't install %s, hook already exists." % hook_name
340 return
341
342 if hook.islink():
343 print "Warning: Removing broken symlink for hook %s." % hook_name
344 os.unlink(hook.get_abspath())
345
346 if not git_hooks.exists():
347 mkdir(git_hooks.get_abspath())
348 git_hooks.clear()
349
350 abs_symlink_hooks = git_hooks.islink() and \
351 os.path.isabs(os.readlink(git_hooks.get_abspath()))
352
353 # Use a relative symlink if the hooks live in the source directory,
354 # and the hooks directory is not a symlink to an absolute path.
355 if hook.is_under(main.root) and not abs_symlink_hooks:
356 script_path = os.path.relpath(
357 os.path.realpath(script.get_abspath()),
358 os.path.realpath(hook.Dir(".").get_abspath()))
359 else:
360 script_path = script.get_abspath()
361
362 try:
363 os.symlink(script_path, hook.get_abspath())
364 except:
365 print "Error updating git %s hook" % hook_name
366 raise
367
368 if hook_exists("pre-commit") and hook_exists("commit-msg"):
369 return
370
371 print git_style_message,
372 try:
373 raw_input()
374 except:
375 print "Input exception, exiting scons.\n"
376 sys.exit(1)
377
378 git_style_script = File("util/git-pre-commit.py")
379 git_msg_script = File("ext/git-commit-msg")
380
381 hook_install("pre-commit", git_style_script)
382 hook_install("commit-msg", git_msg_script)
383
384# Try to wire up git to the style hooks
385if not ignore_style and main.root.Entry(".git").exists():
386 install_git_style_hooks()
387
388###################################################
389#
390# Figure out which configurations to set up based on the path(s) of
391# the target(s).
392#
393###################################################
394
395# Find default configuration & binary.
396Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug'))
397
398# helper function: find last occurrence of element in list
399def rfind(l, elt, offs = -1):
400 for i in range(len(l)+offs, 0, -1):
401 if l[i] == elt:
402 return i
403 raise ValueError, "element not found"
404
405# Take a list of paths (or SCons Nodes) and return a list with all
406# paths made absolute and ~-expanded. Paths will be interpreted
407# relative to the launch directory unless a different root is provided
408def makePathListAbsolute(path_list, root=GetLaunchDir()):
409 return [abspath(joinpath(root, expanduser(str(p))))
410 for p in path_list]
411
412# Each target must have 'build' in the interior of the path; the
413# directory below this will determine the build parameters. For
414# example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we
415# recognize that ALPHA_SE specifies the configuration because it
416# follow 'build' in the build path.
417
418# The funky assignment to "[:]" is needed to replace the list contents
419# in place rather than reassign the symbol to a new list, which
420# doesn't work (obviously!).
421BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS)
422
423# Generate a list of the unique build roots and configs that the
424# collected targets reference.
425variant_paths = []
426build_root = None
427for t in BUILD_TARGETS:
428 path_dirs = t.split('/')
429 try:
430 build_top = rfind(path_dirs, 'build', -2)
431 except:
432 print "Error: no non-leaf 'build' dir found on target path", t
433 Exit(1)
434 this_build_root = joinpath('/',*path_dirs[:build_top+1])
435 if not build_root:
436 build_root = this_build_root
437 else:
438 if this_build_root != build_root:
439 print "Error: build targets not under same build root\n"\
440 " %s\n %s" % (build_root, this_build_root)
441 Exit(1)
442 variant_path = joinpath('/',*path_dirs[:build_top+2])
443 if variant_path not in variant_paths:
444 variant_paths.append(variant_path)
445
446# Make sure build_root exists (might not if this is the first build there)
447if not isdir(build_root):
448 mkdir(build_root)
449main['BUILDROOT'] = build_root
450
451Export('main')
452
453main.SConsignFile(joinpath(build_root, "sconsign"))
454
455# Default duplicate option is to use hard links, but this messes up
456# when you use emacs to edit a file in the target dir, as emacs moves
457# file to file~ then copies to file, breaking the link. Symbolic
458# (soft) links work better.
459main.SetOption('duplicate', 'soft-copy')
460
461#
462# Set up global sticky variables... these are common to an entire build
463# tree (not specific to a particular build like ALPHA_SE)
464#
465
466global_vars_file = joinpath(build_root, 'variables.global')
467
468global_vars = Variables(global_vars_file, args=ARGUMENTS)
469
470global_vars.AddVariables(
471 ('CC', 'C compiler', environ.get('CC', main['CC'])),
472 ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])),
473 ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')),
474 ('BATCH', 'Use batch pool for build and tests', False),
475 ('BATCH_CMD', 'Batch pool submission command name', 'qdo'),
476 ('M5_BUILD_CACHE', 'Cache built objects in this directory', False),
477 ('EXTRAS', 'Add extra directories to the compilation', '')
478 )
479
480# Update main environment with values from ARGUMENTS & global_vars_file
481global_vars.Update(main)
482help_texts["global_vars"] += global_vars.GenerateHelpText(main)
483
484# Save sticky variable settings back to current variables file
485global_vars.Save(global_vars_file, main)
486
487# Parse EXTRAS variable to build list of all directories where we're
488# look for sources etc. This list is exported as extras_dir_list.
489base_dir = main.srcdir.abspath
490if main['EXTRAS']:
491 extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':'))
492else:
493 extras_dir_list = []
494
495Export('base_dir')
496Export('extras_dir_list')
497
498# the ext directory should be on the #includes path
499main.Append(CPPPATH=[Dir('ext')])
500
501# Add shared top-level headers
502main.Prepend(CPPPATH=Dir('include'))
503
504def strip_build_path(path, env):
505 path = str(path)
506 variant_base = env['BUILDROOT'] + os.path.sep
507 if path.startswith(variant_base):
508 path = path[len(variant_base):]
509 elif path.startswith('build/'):
510 path = path[6:]
511 return path
512
513# Generate a string of the form:
514# common/path/prefix/src1, src2 -> tgt1, tgt2
515# to print while building.
516class Transform(object):
517 # all specific color settings should be here and nowhere else
518 tool_color = termcap.Normal
519 pfx_color = termcap.Yellow
520 srcs_color = termcap.Yellow + termcap.Bold
521 arrow_color = termcap.Blue + termcap.Bold
522 tgts_color = termcap.Yellow + termcap.Bold
523
524 def __init__(self, tool, max_sources=99):
525 self.format = self.tool_color + (" [%8s] " % tool) \
526 + self.pfx_color + "%s" \
527 + self.srcs_color + "%s" \
528 + self.arrow_color + " -> " \
529 + self.tgts_color + "%s" \
530 + termcap.Normal
531 self.max_sources = max_sources
532
533 def __call__(self, target, source, env, for_signature=None):
534 # truncate source list according to max_sources param
535 source = source[0:self.max_sources]
536 def strip(f):
537 return strip_build_path(str(f), env)
538 if len(source) > 0:
539 srcs = map(strip, source)
540 else:
541 srcs = ['']
542 tgts = map(strip, target)
543 # surprisingly, os.path.commonprefix is a dumb char-by-char string
544 # operation that has nothing to do with paths.
545 com_pfx = os.path.commonprefix(srcs + tgts)
546 com_pfx_len = len(com_pfx)
547 if com_pfx:
548 # do some cleanup and sanity checking on common prefix
549 if com_pfx[-1] == ".":
550 # prefix matches all but file extension: ok
551 # back up one to change 'foo.cc -> o' to 'foo.cc -> .o'
552 com_pfx = com_pfx[0:-1]
553 elif com_pfx[-1] == "/":
554 # common prefix is directory path: OK
555 pass
556 else:
557 src0_len = len(srcs[0])
558 tgt0_len = len(tgts[0])
559 if src0_len == com_pfx_len:
560 # source is a substring of target, OK
561 pass
562 elif tgt0_len == com_pfx_len:
563 # target is a substring of source, need to back up to
564 # avoid empty string on RHS of arrow
565 sep_idx = com_pfx.rfind(".")
566 if sep_idx != -1:
567 com_pfx = com_pfx[0:sep_idx]
568 else:
569 com_pfx = ''
570 elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".":
571 # still splitting at file extension: ok
572 pass
573 else:
574 # probably a fluke; ignore it
575 com_pfx = ''
576 # recalculate length in case com_pfx was modified
577 com_pfx_len = len(com_pfx)
578 def fmt(files):
579 f = map(lambda s: s[com_pfx_len:], files)
580 return ', '.join(f)
581 return self.format % (com_pfx, fmt(srcs), fmt(tgts))
582
583Export('Transform')
584
585# enable the regression script to use the termcap
586main['TERMCAP'] = termcap
587
588if GetOption('verbose'):
589 def MakeAction(action, string, *args, **kwargs):
590 return Action(action, *args, **kwargs)
591else:
592 MakeAction = Action
593 main['CCCOMSTR'] = Transform("CC")
594 main['CXXCOMSTR'] = Transform("CXX")
595 main['ASCOMSTR'] = Transform("AS")
596 main['ARCOMSTR'] = Transform("AR", 0)
597 main['LINKCOMSTR'] = Transform("LINK", 0)
598 main['SHLINKCOMSTR'] = Transform("SHLINK", 0)
599 main['RANLIBCOMSTR'] = Transform("RANLIB", 0)
600 main['M4COMSTR'] = Transform("M4")
601 main['SHCCCOMSTR'] = Transform("SHCC")
602 main['SHCXXCOMSTR'] = Transform("SHCXX")
603Export('MakeAction')
604
605# Initialize the Link-Time Optimization (LTO) flags
606main['LTO_CCFLAGS'] = []
607main['LTO_LDFLAGS'] = []
608
609# According to the readme, tcmalloc works best if the compiler doesn't
610# assume that we're using the builtin malloc and friends. These flags
611# are compiler-specific, so we need to set them after we detect which
612# compiler we're using.
613main['TCMALLOC_CCFLAGS'] = []
614
615CXX_version = readCommand([main['CXX'],'--version'], exception=False)
616CXX_V = readCommand([main['CXX'],'-V'], exception=False)
617
618main['GCC'] = CXX_version and CXX_version.find('g++') >= 0
619main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0
620if main['GCC'] + main['CLANG'] > 1:
621 print 'Error: How can we have two at the same time?'
622 Exit(1)
623
624# Set up default C++ compiler flags
625if main['GCC'] or main['CLANG']:
626 # As gcc and clang share many flags, do the common parts here
627 main.Append(CCFLAGS=['-pipe'])
628 main.Append(CCFLAGS=['-fno-strict-aliasing'])
629 # Enable -Wall and -Wextra and then disable the few warnings that
630 # we consistently violate
631 main.Append(CCFLAGS=['-Wall', '-Wundef', '-Wextra',
632 '-Wno-sign-compare', '-Wno-unused-parameter'])
633 # We always compile using C++11
634 main.Append(CXXFLAGS=['-std=c++11'])
635 if sys.platform.startswith('freebsd'):
636 main.Append(CCFLAGS=['-I/usr/local/include'])
637 main.Append(CXXFLAGS=['-I/usr/local/include'])
638
639 main['FILTER_PSHLINKFLAGS'] = lambda x: str(x).replace(' -shared', '')
640 main['PSHLINKFLAGS'] = main.subst('${FILTER_PSHLINKFLAGS(SHLINKFLAGS)}')
641 main['PLINKFLAGS'] = main.subst('${LINKFLAGS}')
642 shared_partial_flags = ['-r', '-nostdlib']
643 main.Append(PSHLINKFLAGS=shared_partial_flags)
644 main.Append(PLINKFLAGS=shared_partial_flags)
645else:
646 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
647 print "Don't know what compiler options to use for your compiler."
648 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX']
649 print termcap.Yellow + ' version:' + termcap.Normal,
650 if not CXX_version:
651 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
652 termcap.Normal
653 else:
654 print CXX_version.replace('\n', '<nl>')
655 print " If you're trying to use a compiler other than GCC"
656 print " or clang, there appears to be something wrong with your"
657 print " environment."
658 print " "
659 print " If you are trying to use a compiler other than those listed"
660 print " above you will need to ease fix SConstruct and "
661 print " src/SConscript to support that compiler."
662 Exit(1)
663
664if main['GCC']:
665 # Check for a supported version of gcc. >= 4.8 is chosen for its
666 # level of c++11 support. See
667 # http://gcc.gnu.org/projects/cxx0x.html for details.
668 gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False)
669 if compareVersions(gcc_version, "4.8") < 0:
670 print 'Error: gcc version 4.8 or newer required.'
671 print ' Installed version:', gcc_version
672 Exit(1)
673
674 main['GCC_VERSION'] = gcc_version
675
676 if compareVersions(gcc_version, '4.9') >= 0:
677 # Incremental linking with LTO is currently broken in gcc versions
678 # 4.9 and above. A version where everything works completely hasn't
679 # yet been identified.
680 #
681 # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=67548
682 main['BROKEN_INCREMENTAL_LTO'] = True
683 if compareVersions(gcc_version, '6.0') >= 0:
684 # gcc versions 6.0 and greater accept an -flinker-output flag which
685 # selects what type of output the linker should generate. This is
686 # necessary for incremental lto to work, but is also broken in
687 # current versions of gcc. It may not be necessary in future
688 # versions. We add it here since it might be, and as a reminder that
689 # it exists. It's excluded if lto is being forced.
690 #
691 # https://gcc.gnu.org/gcc-6/changes.html
692 # https://gcc.gnu.org/ml/gcc-patches/2015-11/msg03161.html
693 # https://gcc.gnu.org/bugzilla/show_bug.cgi?id=69866
694 if not GetOption('force_lto'):
695 main.Append(PSHLINKFLAGS='-flinker-output=rel')
696 main.Append(PLINKFLAGS='-flinker-output=rel')
697
698 # gcc from version 4.8 and above generates "rep; ret" instructions
699 # to avoid performance penalties on certain AMD chips. Older
700 # assemblers detect this as an error, "Error: expecting string
701 # instruction after `rep'"
702 as_version_raw = readCommand([main['AS'], '-v', '/dev/null',
703 '-o', '/dev/null'],
704 exception=False).split()
705
706 # version strings may contain extra distro-specific
707 # qualifiers, so play it safe and keep only what comes before
708 # the first hyphen
709 as_version = as_version_raw[-1].split('-')[0] if as_version_raw else None
710
711 if not as_version or compareVersions(as_version, "2.23") < 0:
712 print termcap.Yellow + termcap.Bold + \
713 'Warning: This combination of gcc and binutils have' + \
714 ' known incompatibilities.\n' + \
715 ' If you encounter build problems, please update ' + \
716 'binutils to 2.23.' + \
717 termcap.Normal
718
719 # Make sure we warn if the user has requested to compile with the
720 # Undefined Benahvior Sanitizer and this version of gcc does not
721 # support it.
722 if GetOption('with_ubsan') and \
723 compareVersions(gcc_version, '4.9') < 0:
724 print termcap.Yellow + termcap.Bold + \
725 'Warning: UBSan is only supported using gcc 4.9 and later.' + \
726 termcap.Normal
727
728 disable_lto = GetOption('no_lto')
729 if not disable_lto and main.get('BROKEN_INCREMENTAL_LTO', False) and \
730 not GetOption('force_lto'):
731 print termcap.Yellow + termcap.Bold + \
732 'Warning: Your compiler doesn\'t support incremental linking' + \
733 ' and lto at the same time, so lto is being disabled. To force' + \
734 ' lto on anyway, use the --force-lto option. That will disable' + \
735 ' partial linking.' + \
736 termcap.Normal
737 disable_lto = True
738
739 # Add the appropriate Link-Time Optimization (LTO) flags
740 # unless LTO is explicitly turned off. Note that these flags
741 # are only used by the fast target.
742 if not disable_lto:
743 # Pass the LTO flag when compiling to produce GIMPLE
744 # output, we merely create the flags here and only append
745 # them later
746 main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
747
748 # Use the same amount of jobs for LTO as we are running
749 # scons with
750 main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
751
752 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc',
753 '-fno-builtin-realloc', '-fno-builtin-free'])
754
755 # add option to check for undeclared overrides
756 if compareVersions(gcc_version, "5.0") > 0:
757 main.Append(CCFLAGS=['-Wno-error=suggest-override'])
758
759elif main['CLANG']:
760 # Check for a supported version of clang, >= 3.1 is needed to
761 # support similar features as gcc 4.8. See
762 # http://clang.llvm.org/cxx_status.html for details
763 clang_version_re = re.compile(".* version (\d+\.\d+)")
764 clang_version_match = clang_version_re.search(CXX_version)
765 if (clang_version_match):
766 clang_version = clang_version_match.groups()[0]
767 if compareVersions(clang_version, "3.1") < 0:
768 print 'Error: clang version 3.1 or newer required.'
769 print ' Installed version:', clang_version
770 Exit(1)
771 else:
772 print 'Error: Unable to determine clang version.'
773 Exit(1)
774
775 # clang has a few additional warnings that we disable, extraneous
776 # parantheses are allowed due to Ruby's printing of the AST,
777 # finally self assignments are allowed as the generated CPU code
778 # is relying on this
779 main.Append(CCFLAGS=['-Wno-parentheses',
780 '-Wno-self-assign',
781 # Some versions of libstdc++ (4.8?) seem to
782 # use struct hash and class hash
783 # interchangeably.
784 '-Wno-mismatched-tags',
785 ])
786
787 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin'])
788
789 # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as
790 # opposed to libstdc++, as the later is dated.
791 if sys.platform == "darwin":
792 main.Append(CXXFLAGS=['-stdlib=libc++'])
793 main.Append(LIBS=['c++'])
794
795 # On FreeBSD we need libthr.
796 if sys.platform.startswith('freebsd'):
797 main.Append(LIBS=['thr'])
798
799else:
800 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
801 print "Don't know what compiler options to use for your compiler."
802 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX']
803 print termcap.Yellow + ' version:' + termcap.Normal,
804 if not CXX_version:
805 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
806 termcap.Normal
807 else:
808 print CXX_version.replace('\n', '<nl>')
809 print " If you're trying to use a compiler other than GCC"
810 print " or clang, there appears to be something wrong with your"
811 print " environment."
812 print " "
813 print " If you are trying to use a compiler other than those listed"
814 print " above you will need to ease fix SConstruct and "
815 print " src/SConscript to support that compiler."
816 Exit(1)
817
818# Set up common yacc/bison flags (needed for Ruby)
819main['YACCFLAGS'] = '-d'
820main['YACCHXXFILESUFFIX'] = '.hh'
821
822# Do this after we save setting back, or else we'll tack on an
823# extra 'qdo' every time we run scons.
824if main['BATCH']:
825 main['CC'] = main['BATCH_CMD'] + ' ' + main['CC']
826 main['CXX'] = main['BATCH_CMD'] + ' ' + main['CXX']
827 main['AS'] = main['BATCH_CMD'] + ' ' + main['AS']
828 main['AR'] = main['BATCH_CMD'] + ' ' + main['AR']
829 main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB']
830
831if sys.platform == 'cygwin':
832 # cygwin has some header file issues...
833 main.Append(CCFLAGS=["-Wno-uninitialized"])
834
835# Check for the protobuf compiler
836protoc_version = readCommand([main['PROTOC'], '--version'],
837 exception='').split()
838
839# First two words should be "libprotoc x.y.z"
840if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc':
841 print termcap.Yellow + termcap.Bold + \
842 'Warning: Protocol buffer compiler (protoc) not found.\n' + \
843 ' Please install protobuf-compiler for tracing support.' + \
844 termcap.Normal
845 main['PROTOC'] = False
846else:
847 # Based on the availability of the compress stream wrappers,
848 # require 2.1.0
849 min_protoc_version = '2.1.0'
850 if compareVersions(protoc_version[1], min_protoc_version) < 0:
851 print termcap.Yellow + termcap.Bold + \
852 'Warning: protoc version', min_protoc_version, \
853 'or newer required.\n' + \
854 ' Installed version:', protoc_version[1], \
855 termcap.Normal
856 main['PROTOC'] = False
857 else:
858 # Attempt to determine the appropriate include path and
859 # library path using pkg-config, that means we also need to
860 # check for pkg-config. Note that it is possible to use
861 # protobuf without the involvement of pkg-config. Later on we
862 # check go a library config check and at that point the test
863 # will fail if libprotobuf cannot be found.
864 if readCommand(['pkg-config', '--version'], exception=''):
865 try:
866 # Attempt to establish what linking flags to add for protobuf
867 # using pkg-config
868 main.ParseConfig('pkg-config --cflags --libs-only-L protobuf')
869 except:
870 print termcap.Yellow + termcap.Bold + \
871 'Warning: pkg-config could not get protobuf flags.' + \
872 termcap.Normal
873
874
875# Check for 'timeout' from GNU coreutils. If present, regressions will
876# be run with a time limit. We require version 8.13 since we rely on
877# support for the '--foreground' option.
878if sys.platform.startswith('freebsd'):
879 timeout_lines = readCommand(['gtimeout', '--version'],
880 exception='').splitlines()
881else:
882 timeout_lines = readCommand(['timeout', '--version'],
883 exception='').splitlines()
884# Get the first line and tokenize it
885timeout_version = timeout_lines[0].split() if timeout_lines else []
886main['TIMEOUT'] = timeout_version and \
887 compareVersions(timeout_version[-1], '8.13') >= 0
888
889# Add a custom Check function to test for structure members.
890def CheckMember(context, include, decl, member, include_quotes="<>"):
891 context.Message("Checking for member %s in %s..." %
892 (member, decl))
893 text = """
894#include %(header)s
895int main(){
896 %(decl)s test;
897 (void)test.%(member)s;
898 return 0;
899};
900""" % { "header" : include_quotes[0] + include + include_quotes[1],
901 "decl" : decl,
902 "member" : member,
903 }
904
905 ret = context.TryCompile(text, extension=".cc")
906 context.Result(ret)
907 return ret
908
909# Platform-specific configuration. Note again that we assume that all
910# builds under a given build root run on the same host platform.
911conf = Configure(main,
912 conf_dir = joinpath(build_root, '.scons_config'),
913 log_file = joinpath(build_root, 'scons_config.log'),
914 custom_tests = {
915 'CheckMember' : CheckMember,
916 })
917
918# Check if we should compile a 64 bit binary on Mac OS X/Darwin
919try:
920 import platform
921 uname = platform.uname()
922 if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0:
923 if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]):
924 main.Append(CCFLAGS=['-arch', 'x86_64'])
925 main.Append(CFLAGS=['-arch', 'x86_64'])
926 main.Append(LINKFLAGS=['-arch', 'x86_64'])
927 main.Append(ASFLAGS=['-arch', 'x86_64'])
928except:
929 pass
930
931# Recent versions of scons substitute a "Null" object for Configure()
932# when configuration isn't necessary, e.g., if the "--help" option is
933# present. Unfortuantely this Null object always returns false,
934# breaking all our configuration checks. We replace it with our own
935# more optimistic null object that returns True instead.
936if not conf:
937 def NullCheck(*args, **kwargs):
938 return True
939
940 class NullConf:
941 def __init__(self, env):
942 self.env = env
943 def Finish(self):
944 return self.env
945 def __getattr__(self, mname):
946 return NullCheck
947
948 conf = NullConf(main)
949
950# Cache build files in the supplied directory.
951if main['M5_BUILD_CACHE']:
952 print 'Using build cache located at', main['M5_BUILD_CACHE']
953 CacheDir(main['M5_BUILD_CACHE'])
954
955main['USE_PYTHON'] = not GetOption('without_python')
956if main['USE_PYTHON']:
957 # Find Python include and library directories for embedding the
958 # interpreter. We rely on python-config to resolve the appropriate
959 # includes and linker flags. ParseConfig does not seem to understand
960 # the more exotic linker flags such as -Xlinker and -export-dynamic so
961 # we add them explicitly below. If you want to link in an alternate
962 # version of python, see above for instructions on how to invoke
963 # scons with the appropriate PATH set.
964 #
965 # First we check if python2-config exists, else we use python-config
966 python_config = readCommand(['which', 'python2-config'],
967 exception='').strip()
968 if not os.path.exists(python_config):
969 python_config = readCommand(['which', 'python-config'],
970 exception='').strip()
971 py_includes = readCommand([python_config, '--includes'],
972 exception='').split()
973 # Strip the -I from the include folders before adding them to the
974 # CPPPATH
975 main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes))
976
977 # Read the linker flags and split them into libraries and other link
978 # flags. The libraries are added later through the call the CheckLib.
979 py_ld_flags = readCommand([python_config, '--ldflags'],
980 exception='').split()
981 py_libs = []
982 for lib in py_ld_flags:
983 if not lib.startswith('-l'):
984 main.Append(LINKFLAGS=[lib])
985 else:
986 lib = lib[2:]
987 if lib not in py_libs:
988 py_libs.append(lib)
989
990 # verify that this stuff works
991 if not conf.CheckHeader('Python.h', '<>'):
992 print "Error: can't find Python.h header in", py_includes
993 print "Install Python headers (package python-dev on Ubuntu and RedHat)"
994 Exit(1)
995
996 for lib in py_libs:
997 if not conf.CheckLib(lib):
998 print "Error: can't find library %s required by python" % lib
999 Exit(1)
1000
1001# On Solaris you need to use libsocket for socket ops
1002if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'):
1003 if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'):
1004 print "Can't find library with socket calls (e.g. accept())"
1005 Exit(1)
1006
1007# Check for zlib. If the check passes, libz will be automatically
1008# added to the LIBS environment variable.
1009if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'):
1010 print 'Error: did not find needed zlib compression library '\
1011 'and/or zlib.h header file.'
1012 print ' Please install zlib and try again.'
1013 Exit(1)
1014
1015# If we have the protobuf compiler, also make sure we have the
1016# development libraries. If the check passes, libprotobuf will be
1017# automatically added to the LIBS environment variable. After
1018# this, we can use the HAVE_PROTOBUF flag to determine if we have
1019# got both protoc and libprotobuf available.
1020main['HAVE_PROTOBUF'] = main['PROTOC'] and \
1021 conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h',
1022 'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;')
1023
1024# If we have the compiler but not the library, print another warning.
1025if main['PROTOC'] and not main['HAVE_PROTOBUF']:
1026 print termcap.Yellow + termcap.Bold + \
1027 'Warning: did not find protocol buffer library and/or headers.\n' + \
1028 ' Please install libprotobuf-dev for tracing support.' + \
1029 termcap.Normal
1030
1031# Check for librt.
1032have_posix_clock = \
1033 conf.CheckLibWithHeader(None, 'time.h', 'C',
1034 'clock_nanosleep(0,0,NULL,NULL);') or \
1035 conf.CheckLibWithHeader('rt', 'time.h', 'C',
1036 'clock_nanosleep(0,0,NULL,NULL);')
1037
1038have_posix_timers = \
1039 conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C',
1040 'timer_create(CLOCK_MONOTONIC, NULL, NULL);')
1041
1042if not GetOption('without_tcmalloc'):
1043 if conf.CheckLib('tcmalloc'):
1044 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1045 elif conf.CheckLib('tcmalloc_minimal'):
1046 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1047 else:
1048 print termcap.Yellow + termcap.Bold + \
1049 "You can get a 12% performance improvement by "\
1050 "installing tcmalloc (libgoogle-perftools-dev package "\
1051 "on Ubuntu or RedHat)." + termcap.Normal
1052
1053
1054# Detect back trace implementations. The last implementation in the
1055# list will be used by default.
1056backtrace_impls = [ "none" ]
1057
1058if conf.CheckLibWithHeader(None, 'execinfo.h', 'C',
1059 'backtrace_symbols_fd((void*)0, 0, 0);'):
1060 backtrace_impls.append("glibc")
1061elif conf.CheckLibWithHeader('execinfo', 'execinfo.h', 'C',
1062 'backtrace_symbols_fd((void*)0, 0, 0);'):
1063 # NetBSD and FreeBSD need libexecinfo.
1064 backtrace_impls.append("glibc")
1065 main.Append(LIBS=['execinfo'])
1066
1067if backtrace_impls[-1] == "none":
1068 default_backtrace_impl = "none"
1069 print termcap.Yellow + termcap.Bold + \
1070 "No suitable back trace implementation found." + \
1071 termcap.Normal
1072
1073if not have_posix_clock:
1074 print "Can't find library for POSIX clocks."
1075
1076# Check for <fenv.h> (C99 FP environment control)
1077have_fenv = conf.CheckHeader('fenv.h', '<>')
1078if not have_fenv:
1079 print "Warning: Header file <fenv.h> not found."
1080 print " This host has no IEEE FP rounding mode control."
1081
1082# Check for <png.h> (libpng library needed if wanting to dump
1083# frame buffer image in png format)
1084have_png = conf.CheckHeader('png.h', '<>')
1085if not have_png:
1086 print "Warning: Header file <png.h> not found."
1087 print " This host has no libpng library."
1088 print " Disabling support for PNG framebuffers."
1089
1090# Check if we should enable KVM-based hardware virtualization. The API
1091# we rely on exists since version 2.6.36 of the kernel, but somehow
1092# the KVM_API_VERSION does not reflect the change. We test for one of
1093# the types as a fall back.
1094have_kvm = conf.CheckHeader('linux/kvm.h', '<>')
1095if not have_kvm:
1096 print "Info: Compatible header file <linux/kvm.h> not found, " \
1097 "disabling KVM support."
1098
1099# Check if the TUN/TAP driver is available.
1100have_tuntap = conf.CheckHeader('linux/if_tun.h', '<>')
1101if not have_tuntap:
1102 print "Info: Compatible header file <linux/if_tun.h> not found."
1103
1104# x86 needs support for xsave. We test for the structure here since we
1105# won't be able to run new tests by the time we know which ISA we're
1106# targeting.
1107have_kvm_xsave = conf.CheckTypeSize('struct kvm_xsave',
1108 '#include <linux/kvm.h>') != 0
1109
1110# Check if the requested target ISA is compatible with the host
1111def is_isa_kvm_compatible(isa):
1112 try:
1113 import platform
1114 host_isa = platform.machine()
1115 except:
1116 print "Warning: Failed to determine host ISA."
1117 return False
1118
1119 if not have_posix_timers:
1120 print "Warning: Can not enable KVM, host seems to lack support " \
1121 "for POSIX timers"
1122 return False
1123
1124 if isa == "arm":
1125 return host_isa in ( "armv7l", "aarch64" )
1126 elif isa == "x86":
1127 if host_isa != "x86_64":
1128 return False
1129
1130 if not have_kvm_xsave:
1131 print "KVM on x86 requires xsave support in kernel headers."
1132 return False
1133
1134 return True
1135 else:
1136 return False
1137
1138
1139# Check if the exclude_host attribute is available. We want this to
1140# get accurate instruction counts in KVM.
1141main['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf.CheckMember(
1142 'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host')
1143
1144
1145######################################################################
1146#
1147# Finish the configuration
1148#
1149main = conf.Finish()
1150
1151######################################################################
1152#
1153# Collect all non-global variables
1154#
1155
1156# Define the universe of supported ISAs
1157all_isa_list = [ ]
1158all_gpu_isa_list = [ ]
1159Export('all_isa_list')
1160Export('all_gpu_isa_list')
1161
1162class CpuModel(object):
1163 '''The CpuModel class encapsulates everything the ISA parser needs to
1164 know about a particular CPU model.'''
1165
1166 # Dict of available CPU model objects. Accessible as CpuModel.dict.
1167 dict = {}
1168
1169 # Constructor. Automatically adds models to CpuModel.dict.
1170 def __init__(self, name, default=False):
1171 self.name = name # name of model
1172
1173 # This cpu is enabled by default
1174 self.default = default
1175
1176 # Add self to dict
1177 if name in CpuModel.dict:
1178 raise AttributeError, "CpuModel '%s' already registered" % name
1179 CpuModel.dict[name] = self
1180
1181Export('CpuModel')
1182
1183# Sticky variables get saved in the variables file so they persist from
1184# one invocation to the next (unless overridden, in which case the new
1185# value becomes sticky).
1186sticky_vars = Variables(args=ARGUMENTS)
1187Export('sticky_vars')
1188
1189# Sticky variables that should be exported
1190export_vars = []
1191Export('export_vars')
1192
1193# For Ruby
1194all_protocols = []
1195Export('all_protocols')
1196protocol_dirs = []
1197Export('protocol_dirs')
1198slicc_includes = []
1199Export('slicc_includes')
1200
1201# Walk the tree and execute all SConsopts scripts that wil add to the
1202# above variables
1203if GetOption('verbose'):
1204 print "Reading SConsopts"
1205for bdir in [ base_dir ] + extras_dir_list:
1206 if not isdir(bdir):
1207 print "Error: directory '%s' does not exist" % bdir
1208 Exit(1)
1209 for root, dirs, files in os.walk(bdir):
1210 if 'SConsopts' in files:
1211 if GetOption('verbose'):
1212 print "Reading", joinpath(root, 'SConsopts')
1213 SConscript(joinpath(root, 'SConsopts'))
1214
1215all_isa_list.sort()
1216all_gpu_isa_list.sort()
1217
1218sticky_vars.AddVariables(
1219 EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list),
1220 EnumVariable('TARGET_GPU_ISA', 'Target GPU ISA', 'hsail', all_gpu_isa_list),
1221 ListVariable('CPU_MODELS', 'CPU models',
1222 sorted(n for n,m in CpuModel.dict.iteritems() if m.default),
1223 sorted(CpuModel.dict.keys())),
1224 BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger',
1225 False),
1226 BoolVariable('SS_COMPATIBLE_FP',
1227 'Make floating-point results compatible with SimpleScalar',
1228 False),
1229 BoolVariable('USE_SSE2',
1230 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts',
1231 False),
1232 BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock),
1233 BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv),
1234 BoolVariable('USE_PNG', 'Enable support for PNG images', have_png),
1235 BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability',
1236 False),
1237 BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models',
1238 have_kvm),
1239 BoolVariable('USE_TUNTAP',
1240 'Enable using a tap device to bridge to the host network',
1241 have_tuntap),
1242 BoolVariable('BUILD_GPU', 'Build the compute-GPU model', False),
1243 EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None',
1244 all_protocols),
1245 EnumVariable('BACKTRACE_IMPL', 'Post-mortem dump implementation',
1246 backtrace_impls[-1], backtrace_impls)
1247 )
1248
1249# These variables get exported to #defines in config/*.hh (see src/SConscript).
1250export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'TARGET_GPU_ISA',
1251 'CP_ANNOTATE', 'USE_POSIX_CLOCK', 'USE_KVM', 'USE_TUNTAP',
1252 'PROTOCOL', 'HAVE_PROTOBUF', 'HAVE_PERF_ATTR_EXCLUDE_HOST',
1253 'USE_PNG']
1254
1255###################################################
1256#
1257# Define a SCons builder for configuration flag headers.
1258#
1259###################################################
1260
1261# This function generates a config header file that #defines the
1262# variable symbol to the current variable setting (0 or 1). The source
1263# operands are the name of the variable and a Value node containing the
1264# value of the variable.
1265def build_config_file(target, source, env):
1266 (variable, value) = [s.get_contents() for s in source]
1267 f = file(str(target[0]), 'w')
1268 print >> f, '#define', variable, value
1269 f.close()
1270 return None
1271
1272# Combine the two functions into a scons Action object.
1273config_action = MakeAction(build_config_file, Transform("CONFIG H", 2))
1274
1275# The emitter munges the source & target node lists to reflect what
1276# we're really doing.
1277def config_emitter(target, source, env):
1278 # extract variable name from Builder arg
1279 variable = str(target[0])
1280 # True target is config header file
1281 target = joinpath('config', variable.lower() + '.hh')
1282 val = env[variable]
1283 if isinstance(val, bool):
1284 # Force value to 0/1
1285 val = int(val)
1286 elif isinstance(val, str):
1287 val = '"' + val + '"'
1288
1289 # Sources are variable name & value (packaged in SCons Value nodes)
1290 return ([target], [Value(variable), Value(val)])
1291
1292config_builder = Builder(emitter = config_emitter, action = config_action)
1293
1294main.Append(BUILDERS = { 'ConfigFile' : config_builder })
1295
1296###################################################
1297#
1298# Builders for static and shared partially linked object files.
1299#
1300###################################################
1301
1302partial_static_builder = Builder(action=SCons.Defaults.LinkAction,
1303 src_suffix='$OBJSUFFIX',
1304 src_builder=['StaticObject', 'Object'],
1305 LINKFLAGS='$PLINKFLAGS',
1306 LIBS='')
1307
1308def partial_shared_emitter(target, source, env):
1309 for tgt in target:
1310 tgt.attributes.shared = 1
1311 return (target, source)
1312partial_shared_builder = Builder(action=SCons.Defaults.ShLinkAction,
1313 emitter=partial_shared_emitter,
1314 src_suffix='$SHOBJSUFFIX',
1315 src_builder='SharedObject',
1316 SHLINKFLAGS='$PSHLINKFLAGS',
1317 LIBS='')
1318
1319main.Append(BUILDERS = { 'PartialShared' : partial_shared_builder,
1320 'PartialStatic' : partial_static_builder })
1321
1322# builds in ext are shared across all configs in the build root.
1323ext_dir = abspath(joinpath(str(main.root), 'ext'))
1324ext_build_dirs = []
1325for root, dirs, files in os.walk(ext_dir):
1326 if 'SConscript' in files:
1327 build_dir = os.path.relpath(root, ext_dir)
1328 ext_build_dirs.append(build_dir)
1329 main.SConscript(joinpath(root, 'SConscript'),
1330 variant_dir=joinpath(build_root, build_dir))
1331
1332main.Prepend(CPPPATH=Dir('ext/pybind11/include/'))
1333
1334###################################################
1335#
1336# This builder and wrapper method are used to set up a directory with
1337# switching headers. Those are headers which are in a generic location and
1338# that include more specific headers from a directory chosen at build time
1339# based on the current build settings.
1340#
1341###################################################
1342
1343def build_switching_header(target, source, env):
1344 path = str(target[0])
1345 subdir = str(source[0])
1346 dp, fp = os.path.split(path)
1347 dp = os.path.relpath(os.path.realpath(dp),
1348 os.path.realpath(env['BUILDDIR']))
1349 with open(path, 'w') as hdr:
1350 print >>hdr, '#include "%s/%s/%s"' % (dp, subdir, fp)
1351
1352switching_header_action = MakeAction(build_switching_header,
1353 Transform('GENERATE'))
1354
1355switching_header_builder = Builder(action=switching_header_action,
1356 source_factory=Value,
1357 single_source=True)
1358
1359main.Append(BUILDERS = { 'SwitchingHeader': switching_header_builder })
1360
1361def switching_headers(self, headers, source):
1362 for header in headers:
1363 self.SwitchingHeader(header, source)
1364
1365main.AddMethod(switching_headers, 'SwitchingHeaders')
1366
1367###################################################
1368#
1369# Define build environments for selected configurations.
1370#
1371###################################################
1372
1373for variant_path in variant_paths:
1374 if not GetOption('silent'):
1375 print "Building in", variant_path
1376
1377 # Make a copy of the build-root environment to use for this config.
1378 env = main.Clone()
1379 env['BUILDDIR'] = variant_path
1380
1381 # variant_dir is the tail component of build path, and is used to
1382 # determine the build parameters (e.g., 'ALPHA_SE')
1383 (build_root, variant_dir) = splitpath(variant_path)
1384
1385 # Set env variables according to the build directory config.
1386 sticky_vars.files = []
1387 # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in
1388 # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke
1389 # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings.
1390 current_vars_file = joinpath(build_root, 'variables', variant_dir)
1391 if isfile(current_vars_file):
1392 sticky_vars.files.append(current_vars_file)
1393 if not GetOption('silent'):
1394 print "Using saved variables file %s" % current_vars_file
1395 elif variant_dir in ext_build_dirs:
1396 # Things in ext are built without a variant directory.
1397 continue
1398 else:
1399 # Build dir-specific variables file doesn't exist.
1400
1401 # Make sure the directory is there so we can create it later
1402 opt_dir = dirname(current_vars_file)
1403 if not isdir(opt_dir):
1404 mkdir(opt_dir)
1405
1406 # Get default build variables from source tree. Variables are
1407 # normally determined by name of $VARIANT_DIR, but can be
1408 # overridden by '--default=' arg on command line.
1409 default = GetOption('default')
1410 opts_dir = joinpath(main.root.abspath, 'build_opts')
1411 if default:
1412 default_vars_files = [joinpath(build_root, 'variables', default),
1413 joinpath(opts_dir, default)]
1414 else:
1415 default_vars_files = [joinpath(opts_dir, variant_dir)]
1416 existing_files = filter(isfile, default_vars_files)
1417 if existing_files:
1418 default_vars_file = existing_files[0]
1419 sticky_vars.files.append(default_vars_file)
1420 print "Variables file %s not found,\n using defaults in %s" \
1421 % (current_vars_file, default_vars_file)
1422 else:
1423 print "Error: cannot find variables file %s or " \
1424 "default file(s) %s" \
1425 % (current_vars_file, ' or '.join(default_vars_files))
1426 Exit(1)
1427
1428 # Apply current variable settings to env
1429 sticky_vars.Update(env)
1430
1431 help_texts["local_vars"] += \
1432 "Build variables for %s:\n" % variant_dir \
1433 + sticky_vars.GenerateHelpText(env)
1434
1435 # Process variable settings.
1436
1437 if not have_fenv and env['USE_FENV']:
1438 print "Warning: <fenv.h> not available; " \
1439 "forcing USE_FENV to False in", variant_dir + "."
1440 env['USE_FENV'] = False
1441
1442 if not env['USE_FENV']:
1443 print "Warning: No IEEE FP rounding mode control in", variant_dir + "."
1444 print " FP results may deviate slightly from other platforms."
1445
1446 if not have_png and env['USE_PNG']:
1447 print "Warning: <png.h> not available; " \
1448 "forcing USE_PNG to False in", variant_dir + "."
1449 env['USE_PNG'] = False
1450
1451 if env['USE_PNG']:
1452 env.Append(LIBS=['png'])
1453
1454 if env['EFENCE']:
1455 env.Append(LIBS=['efence'])
1456
1457 if env['USE_KVM']:
1458 if not have_kvm:
1459 print "Warning: Can not enable KVM, host seems to lack KVM support"
1460 env['USE_KVM'] = False
1461 elif not is_isa_kvm_compatible(env['TARGET_ISA']):
1462 print "Info: KVM support disabled due to unsupported host and " \
1463 "target ISA combination"
1464 env['USE_KVM'] = False
1465
1466 if env['USE_TUNTAP']:
1467 if not have_tuntap:
1468 print "Warning: Can't connect EtherTap with a tap device."
1469 env['USE_TUNTAP'] = False
1470
1471 if env['BUILD_GPU']:
1472 env.Append(CPPDEFINES=['BUILD_GPU'])
1473
1474 # Warn about missing optional functionality
1475 if env['USE_KVM']:
1476 if not main['HAVE_PERF_ATTR_EXCLUDE_HOST']:
1477 print "Warning: perf_event headers lack support for the " \
1478 "exclude_host attribute. KVM instruction counts will " \
1479 "be inaccurate."
1480
1481 # Save sticky variable settings back to current variables file
1482 sticky_vars.Save(current_vars_file, env)
1483
1484 if env['USE_SSE2']:
1485 env.Append(CCFLAGS=['-msse2'])
1486
1487 # The src/SConscript file sets up the build rules in 'env' according
1488 # to the configured variables. It returns a list of environments,
1489 # one for each variant build (debug, opt, etc.)
1490 SConscript('src/SConscript', variant_dir = variant_path, exports = 'env')
1491
1492# base help text
1493Help('''
1494Usage: scons [scons options] [build variables] [target(s)]
1495
1496Extra scons options:
1497%(options)s
1498
1499Global build variables:
1500%(global_vars)s
1501
1502%(local_vars)s
1503''' % help_texts)