SConstruct (11925:cc1e01a99561) SConstruct (11926:3e3d489a3a69)
1# -*- mode:python -*-
2
3# Copyright (c) 2013, 2015, 2016 ARM Limited
4# All rights reserved.
5#
6# The license below extends only to copyright in the software and shall
7# not be construed as granting a license to any other intellectual
8# property including but not limited to intellectual property relating
9# to a hardware implementation of the functionality of the software
10# licensed hereunder. You may use the software subject to the license
11# terms below provided that you ensure that this notice is replicated
12# unmodified and in its entirety in all distributions of the software,
13# modified or unmodified, in source code or in binary form.
14#
15# Copyright (c) 2011 Advanced Micro Devices, Inc.
16# Copyright (c) 2009 The Hewlett-Packard Development Company
17# Copyright (c) 2004-2005 The Regents of The University of Michigan
18# All rights reserved.
19#
20# Redistribution and use in source and binary forms, with or without
21# modification, are permitted provided that the following conditions are
22# met: redistributions of source code must retain the above copyright
23# notice, this list of conditions and the following disclaimer;
24# redistributions in binary form must reproduce the above copyright
25# notice, this list of conditions and the following disclaimer in the
26# documentation and/or other materials provided with the distribution;
27# neither the name of the copyright holders nor the names of its
28# contributors may be used to endorse or promote products derived from
29# this software without specific prior written permission.
30#
31# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
32# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
33# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
34# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
35# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
37# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
38# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
39# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
40# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
41# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
42#
43# Authors: Steve Reinhardt
44# Nathan Binkert
45
46###################################################
47#
48# SCons top-level build description (SConstruct) file.
49#
50# While in this directory ('gem5'), just type 'scons' to build the default
51# configuration (see below), or type 'scons build/<CONFIG>/<binary>'
52# to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for
53# the optimized full-system version).
54#
55# You can build gem5 in a different directory as long as there is a
56# 'build/<CONFIG>' somewhere along the target path. The build system
57# expects that all configs under the same build directory are being
58# built for the same host system.
59#
60# Examples:
61#
62# The following two commands are equivalent. The '-u' option tells
63# scons to search up the directory tree for this SConstruct file.
64# % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug
65# % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug
66#
67# The following two commands are equivalent and demonstrate building
68# in a directory outside of the source tree. The '-C' option tells
69# scons to chdir to the specified directory to find this SConstruct
70# file.
71# % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug
72# % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug
73#
74# You can use 'scons -H' to print scons options. If you're in this
75# 'gem5' directory (or use -u or -C to tell scons where to find this
76# file), you can use 'scons -h' to print all the gem5-specific build
77# options as well.
78#
79###################################################
80
81# Check for recent-enough Python and SCons versions.
82try:
83 # Really old versions of scons only take two options for the
84 # function, so check once without the revision and once with the
85 # revision, the first instance will fail for stuff other than
86 # 0.98, and the second will fail for 0.98.0
87 EnsureSConsVersion(0, 98)
88 EnsureSConsVersion(0, 98, 1)
89except SystemExit, e:
90 print """
91For more details, see:
92 http://gem5.org/Dependencies
93"""
94 raise
95
96# We ensure the python version early because because python-config
97# requires python 2.5
98try:
99 EnsurePythonVersion(2, 5)
100except SystemExit, e:
101 print """
102You can use a non-default installation of the Python interpreter by
103rearranging your PATH so that scons finds the non-default 'python' and
104'python-config' first.
105
106For more details, see:
107 http://gem5.org/wiki/index.php/Using_a_non-default_Python_installation
108"""
109 raise
110
111# Global Python includes
112import itertools
113import os
114import re
115import shutil
116import subprocess
117import sys
118
119from os import mkdir, environ
120from os.path import abspath, basename, dirname, expanduser, normpath
121from os.path import exists, isdir, isfile
122from os.path import join as joinpath, split as splitpath
123
124# SCons includes
125import SCons
126import SCons.Node
127
128extra_python_paths = [
129 Dir('src/python').srcnode().abspath, # gem5 includes
130 Dir('ext/ply').srcnode().abspath, # ply is used by several files
131 ]
132
133sys.path[1:1] = extra_python_paths
134
135from m5.util import compareVersions, readCommand
136from m5.util.terminal import get_termcap
137
138help_texts = {
139 "options" : "",
140 "global_vars" : "",
141 "local_vars" : ""
142}
143
144Export("help_texts")
145
146
147# There's a bug in scons in that (1) by default, the help texts from
148# AddOption() are supposed to be displayed when you type 'scons -h'
149# and (2) you can override the help displayed by 'scons -h' using the
150# Help() function, but these two features are incompatible: once
151# you've overridden the help text using Help(), there's no way to get
152# at the help texts from AddOptions. See:
153# http://scons.tigris.org/issues/show_bug.cgi?id=2356
154# http://scons.tigris.org/issues/show_bug.cgi?id=2611
155# This hack lets us extract the help text from AddOptions and
156# re-inject it via Help(). Ideally someday this bug will be fixed and
157# we can just use AddOption directly.
158def AddLocalOption(*args, **kwargs):
159 col_width = 30
160
161 help = " " + ", ".join(args)
162 if "help" in kwargs:
163 length = len(help)
164 if length >= col_width:
165 help += "\n" + " " * col_width
166 else:
167 help += " " * (col_width - length)
168 help += kwargs["help"]
169 help_texts["options"] += help + "\n"
170
171 AddOption(*args, **kwargs)
172
173AddLocalOption('--colors', dest='use_colors', action='store_true',
174 help="Add color to abbreviated scons output")
175AddLocalOption('--no-colors', dest='use_colors', action='store_false',
176 help="Don't add color to abbreviated scons output")
177AddLocalOption('--with-cxx-config', dest='with_cxx_config',
178 action='store_true',
179 help="Build with support for C++-based configuration")
180AddLocalOption('--default', dest='default', type='string', action='store',
181 help='Override which build_opts file to use for defaults')
182AddLocalOption('--ignore-style', dest='ignore_style', action='store_true',
183 help='Disable style checking hooks')
184AddLocalOption('--no-lto', dest='no_lto', action='store_true',
185 help='Disable Link-Time Optimization for fast')
186AddLocalOption('--update-ref', dest='update_ref', action='store_true',
187 help='Update test reference outputs')
188AddLocalOption('--verbose', dest='verbose', action='store_true',
189 help='Print full tool command lines')
190AddLocalOption('--without-python', dest='without_python',
191 action='store_true',
192 help='Build without Python configuration support')
193AddLocalOption('--without-tcmalloc', dest='without_tcmalloc',
194 action='store_true',
195 help='Disable linking against tcmalloc')
196AddLocalOption('--with-ubsan', dest='with_ubsan', action='store_true',
197 help='Build with Undefined Behavior Sanitizer if available')
198AddLocalOption('--with-asan', dest='with_asan', action='store_true',
199 help='Build with Address Sanitizer if available')
200
201termcap = get_termcap(GetOption('use_colors'))
202
203########################################################################
204#
205# Set up the main build environment.
206#
207########################################################################
208
209# export TERM so that clang reports errors in color
210use_vars = set([ 'AS', 'AR', 'CC', 'CXX', 'HOME', 'LD_LIBRARY_PATH',
211 'LIBRARY_PATH', 'PATH', 'PKG_CONFIG_PATH', 'PROTOC',
212 'PYTHONPATH', 'RANLIB', 'SWIG', 'TERM' ])
213
214use_prefixes = [
215 "ASAN_", # address sanitizer symbolizer path and settings
216 "CCACHE_", # ccache (caching compiler wrapper) configuration
217 "CCC_", # clang static analyzer configuration
218 "DISTCC_", # distcc (distributed compiler wrapper) configuration
219 "INCLUDE_SERVER_", # distcc pump server settings
220 "M5", # M5 configuration (e.g., path to kernels)
221 ]
222
223use_env = {}
224for key,val in sorted(os.environ.iteritems()):
225 if key in use_vars or \
226 any([key.startswith(prefix) for prefix in use_prefixes]):
227 use_env[key] = val
228
229# Tell scons to avoid implicit command dependencies to avoid issues
230# with the param wrappes being compiled twice (see
231# http://scons.tigris.org/issues/show_bug.cgi?id=2811)
232main = Environment(ENV=use_env, IMPLICIT_COMMAND_DEPENDENCIES=0)
233main.Decider('MD5-timestamp')
234main.root = Dir(".") # The current directory (where this file lives).
235main.srcdir = Dir("src") # The source directory
236
237main_dict_keys = main.Dictionary().keys()
238
239# Check that we have a C/C++ compiler
240if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys):
241 print "No C++ compiler installed (package g++ on Ubuntu and RedHat)"
242 Exit(1)
243
244# Check that swig is present
245if not 'SWIG' in main_dict_keys:
246 print "swig is not installed (package swig on Ubuntu and RedHat)"
247 Exit(1)
248
249# add useful python code PYTHONPATH so it can be used by subprocesses
250# as well
251main.AppendENVPath('PYTHONPATH', extra_python_paths)
252
253########################################################################
254#
255# Mercurial Stuff.
256#
257# If the gem5 directory is a mercurial repository, we should do some
258# extra things.
259#
260########################################################################
261
262hgdir = main.root.Dir(".hg")
263
264
265style_message = """
266You're missing the gem5 style hook, which automatically checks your code
267against the gem5 style rules on %s.
268This script will now install the hook in your %s.
269Press enter to continue, or ctrl-c to abort: """
270
271mercurial_style_message = """
272You're missing the gem5 style hook, which automatically checks your code
273against the gem5 style rules on hg commit and qrefresh commands.
274This script will now install the hook in your .hg/hgrc file.
275Press enter to continue, or ctrl-c to abort: """
276
277git_style_message = """
278You're missing the gem5 style or commit message hook. These hooks help
279to ensure that your code follows gem5's style rules on git commit.
280This script will now install the hook in your .git/hooks/ directory.
281Press enter to continue, or ctrl-c to abort: """
282
283mercurial_style_upgrade_message = """
284Your Mercurial style hooks are not up-to-date. This script will now
285try to automatically update them. A backup of your hgrc will be saved
286in .hg/hgrc.old.
287Press enter to continue, or ctrl-c to abort: """
288
289mercurial_style_hook = """
290# The following lines were automatically added by gem5/SConstruct
291# to provide the gem5 style-checking hooks
292[extensions]
293hgstyle = %s/util/hgstyle.py
294
295[hooks]
296pretxncommit.style = python:hgstyle.check_style
297pre-qrefresh.style = python:hgstyle.check_style
298# End of SConstruct additions
299
300""" % (main.root.abspath)
301
302mercurial_lib_not_found = """
303Mercurial libraries cannot be found, ignoring style hook. If
304you are a gem5 developer, please fix this and run the style
305hook. It is important.
306"""
307
308# Check for style hook and prompt for installation if it's not there.
309# Skip this if --ignore-style was specified, there's no interactive
310# terminal to prompt, or no recognized revision control system can be
311# found.
312ignore_style = GetOption('ignore_style') or not sys.stdin.isatty()
313
314# Try wire up Mercurial to the style hooks
315if not ignore_style and hgdir.exists():
316 style_hook = True
317 style_hooks = tuple()
318 hgrc = hgdir.File('hgrc')
319 hgrc_old = hgdir.File('hgrc.old')
320 try:
321 from mercurial import ui
322 ui = ui.ui()
323 ui.readconfig(hgrc.abspath)
324 style_hooks = (ui.config('hooks', 'pretxncommit.style', None),
325 ui.config('hooks', 'pre-qrefresh.style', None))
326 style_hook = all(style_hooks)
327 style_extension = ui.config('extensions', 'style', None)
328 except ImportError:
329 print mercurial_lib_not_found
330
331 if "python:style.check_style" in style_hooks:
332 # Try to upgrade the style hooks
333 print mercurial_style_upgrade_message
334 # continue unless user does ctrl-c/ctrl-d etc.
335 try:
336 raw_input()
337 except:
338 print "Input exception, exiting scons.\n"
339 sys.exit(1)
340 shutil.copyfile(hgrc.abspath, hgrc_old.abspath)
341 re_style_hook = re.compile(r"^([^=#]+)\.style\s*=\s*([^#\s]+).*")
342 re_style_extension = re.compile("style\s*=\s*([^#\s]+).*")
343 old, new = open(hgrc_old.abspath, 'r'), open(hgrc.abspath, 'w')
344 for l in old:
345 m_hook = re_style_hook.match(l)
346 m_ext = re_style_extension.match(l)
347 if m_hook:
348 hook, check = m_hook.groups()
349 if check != "python:style.check_style":
350 print "Warning: %s.style is using a non-default " \
351 "checker: %s" % (hook, check)
352 if hook not in ("pretxncommit", "pre-qrefresh"):
353 print "Warning: Updating unknown style hook: %s" % hook
354
355 l = "%s.style = python:hgstyle.check_style\n" % hook
356 elif m_ext and m_ext.group(1) == style_extension:
357 l = "hgstyle = %s/util/hgstyle.py\n" % main.root.abspath
358
359 new.write(l)
360 elif not style_hook:
361 print mercurial_style_message,
362 # continue unless user does ctrl-c/ctrl-d etc.
363 try:
364 raw_input()
365 except:
366 print "Input exception, exiting scons.\n"
367 sys.exit(1)
368 hgrc_path = '%s/.hg/hgrc' % main.root.abspath
369 print "Adding style hook to", hgrc_path, "\n"
370 try:
371 with open(hgrc_path, 'a') as f:
372 f.write(mercurial_style_hook)
373 except:
374 print "Error updating", hgrc_path
375 sys.exit(1)
376
377def install_git_style_hooks():
378 try:
379 gitdir = Dir(readCommand(
380 ["git", "rev-parse", "--git-dir"]).strip("\n"))
381 except Exception, e:
382 print "Warning: Failed to find git repo directory: %s" % e
383 return
384
385 git_hooks = gitdir.Dir("hooks")
386 def hook_exists(hook_name):
387 hook = git_hooks.File(hook_name)
388 return hook.exists()
389
390 def hook_install(hook_name, script):
391 hook = git_hooks.File(hook_name)
392 if hook.exists():
393 print "Warning: Can't install %s, hook already exists." % hook_name
394 return
395
1# -*- mode:python -*-
2
3# Copyright (c) 2013, 2015, 2016 ARM Limited
4# All rights reserved.
5#
6# The license below extends only to copyright in the software and shall
7# not be construed as granting a license to any other intellectual
8# property including but not limited to intellectual property relating
9# to a hardware implementation of the functionality of the software
10# licensed hereunder. You may use the software subject to the license
11# terms below provided that you ensure that this notice is replicated
12# unmodified and in its entirety in all distributions of the software,
13# modified or unmodified, in source code or in binary form.
14#
15# Copyright (c) 2011 Advanced Micro Devices, Inc.
16# Copyright (c) 2009 The Hewlett-Packard Development Company
17# Copyright (c) 2004-2005 The Regents of The University of Michigan
18# All rights reserved.
19#
20# Redistribution and use in source and binary forms, with or without
21# modification, are permitted provided that the following conditions are
22# met: redistributions of source code must retain the above copyright
23# notice, this list of conditions and the following disclaimer;
24# redistributions in binary form must reproduce the above copyright
25# notice, this list of conditions and the following disclaimer in the
26# documentation and/or other materials provided with the distribution;
27# neither the name of the copyright holders nor the names of its
28# contributors may be used to endorse or promote products derived from
29# this software without specific prior written permission.
30#
31# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
32# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
33# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
34# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
35# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
36# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
37# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
38# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
39# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
40# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
41# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
42#
43# Authors: Steve Reinhardt
44# Nathan Binkert
45
46###################################################
47#
48# SCons top-level build description (SConstruct) file.
49#
50# While in this directory ('gem5'), just type 'scons' to build the default
51# configuration (see below), or type 'scons build/<CONFIG>/<binary>'
52# to build some other configuration (e.g., 'build/ALPHA/gem5.opt' for
53# the optimized full-system version).
54#
55# You can build gem5 in a different directory as long as there is a
56# 'build/<CONFIG>' somewhere along the target path. The build system
57# expects that all configs under the same build directory are being
58# built for the same host system.
59#
60# Examples:
61#
62# The following two commands are equivalent. The '-u' option tells
63# scons to search up the directory tree for this SConstruct file.
64# % cd <path-to-src>/gem5 ; scons build/ALPHA/gem5.debug
65# % cd <path-to-src>/gem5/build/ALPHA; scons -u gem5.debug
66#
67# The following two commands are equivalent and demonstrate building
68# in a directory outside of the source tree. The '-C' option tells
69# scons to chdir to the specified directory to find this SConstruct
70# file.
71# % cd <path-to-src>/gem5 ; scons /local/foo/build/ALPHA/gem5.debug
72# % cd /local/foo/build/ALPHA; scons -C <path-to-src>/gem5 gem5.debug
73#
74# You can use 'scons -H' to print scons options. If you're in this
75# 'gem5' directory (or use -u or -C to tell scons where to find this
76# file), you can use 'scons -h' to print all the gem5-specific build
77# options as well.
78#
79###################################################
80
81# Check for recent-enough Python and SCons versions.
82try:
83 # Really old versions of scons only take two options for the
84 # function, so check once without the revision and once with the
85 # revision, the first instance will fail for stuff other than
86 # 0.98, and the second will fail for 0.98.0
87 EnsureSConsVersion(0, 98)
88 EnsureSConsVersion(0, 98, 1)
89except SystemExit, e:
90 print """
91For more details, see:
92 http://gem5.org/Dependencies
93"""
94 raise
95
96# We ensure the python version early because because python-config
97# requires python 2.5
98try:
99 EnsurePythonVersion(2, 5)
100except SystemExit, e:
101 print """
102You can use a non-default installation of the Python interpreter by
103rearranging your PATH so that scons finds the non-default 'python' and
104'python-config' first.
105
106For more details, see:
107 http://gem5.org/wiki/index.php/Using_a_non-default_Python_installation
108"""
109 raise
110
111# Global Python includes
112import itertools
113import os
114import re
115import shutil
116import subprocess
117import sys
118
119from os import mkdir, environ
120from os.path import abspath, basename, dirname, expanduser, normpath
121from os.path import exists, isdir, isfile
122from os.path import join as joinpath, split as splitpath
123
124# SCons includes
125import SCons
126import SCons.Node
127
128extra_python_paths = [
129 Dir('src/python').srcnode().abspath, # gem5 includes
130 Dir('ext/ply').srcnode().abspath, # ply is used by several files
131 ]
132
133sys.path[1:1] = extra_python_paths
134
135from m5.util import compareVersions, readCommand
136from m5.util.terminal import get_termcap
137
138help_texts = {
139 "options" : "",
140 "global_vars" : "",
141 "local_vars" : ""
142}
143
144Export("help_texts")
145
146
147# There's a bug in scons in that (1) by default, the help texts from
148# AddOption() are supposed to be displayed when you type 'scons -h'
149# and (2) you can override the help displayed by 'scons -h' using the
150# Help() function, but these two features are incompatible: once
151# you've overridden the help text using Help(), there's no way to get
152# at the help texts from AddOptions. See:
153# http://scons.tigris.org/issues/show_bug.cgi?id=2356
154# http://scons.tigris.org/issues/show_bug.cgi?id=2611
155# This hack lets us extract the help text from AddOptions and
156# re-inject it via Help(). Ideally someday this bug will be fixed and
157# we can just use AddOption directly.
158def AddLocalOption(*args, **kwargs):
159 col_width = 30
160
161 help = " " + ", ".join(args)
162 if "help" in kwargs:
163 length = len(help)
164 if length >= col_width:
165 help += "\n" + " " * col_width
166 else:
167 help += " " * (col_width - length)
168 help += kwargs["help"]
169 help_texts["options"] += help + "\n"
170
171 AddOption(*args, **kwargs)
172
173AddLocalOption('--colors', dest='use_colors', action='store_true',
174 help="Add color to abbreviated scons output")
175AddLocalOption('--no-colors', dest='use_colors', action='store_false',
176 help="Don't add color to abbreviated scons output")
177AddLocalOption('--with-cxx-config', dest='with_cxx_config',
178 action='store_true',
179 help="Build with support for C++-based configuration")
180AddLocalOption('--default', dest='default', type='string', action='store',
181 help='Override which build_opts file to use for defaults')
182AddLocalOption('--ignore-style', dest='ignore_style', action='store_true',
183 help='Disable style checking hooks')
184AddLocalOption('--no-lto', dest='no_lto', action='store_true',
185 help='Disable Link-Time Optimization for fast')
186AddLocalOption('--update-ref', dest='update_ref', action='store_true',
187 help='Update test reference outputs')
188AddLocalOption('--verbose', dest='verbose', action='store_true',
189 help='Print full tool command lines')
190AddLocalOption('--without-python', dest='without_python',
191 action='store_true',
192 help='Build without Python configuration support')
193AddLocalOption('--without-tcmalloc', dest='without_tcmalloc',
194 action='store_true',
195 help='Disable linking against tcmalloc')
196AddLocalOption('--with-ubsan', dest='with_ubsan', action='store_true',
197 help='Build with Undefined Behavior Sanitizer if available')
198AddLocalOption('--with-asan', dest='with_asan', action='store_true',
199 help='Build with Address Sanitizer if available')
200
201termcap = get_termcap(GetOption('use_colors'))
202
203########################################################################
204#
205# Set up the main build environment.
206#
207########################################################################
208
209# export TERM so that clang reports errors in color
210use_vars = set([ 'AS', 'AR', 'CC', 'CXX', 'HOME', 'LD_LIBRARY_PATH',
211 'LIBRARY_PATH', 'PATH', 'PKG_CONFIG_PATH', 'PROTOC',
212 'PYTHONPATH', 'RANLIB', 'SWIG', 'TERM' ])
213
214use_prefixes = [
215 "ASAN_", # address sanitizer symbolizer path and settings
216 "CCACHE_", # ccache (caching compiler wrapper) configuration
217 "CCC_", # clang static analyzer configuration
218 "DISTCC_", # distcc (distributed compiler wrapper) configuration
219 "INCLUDE_SERVER_", # distcc pump server settings
220 "M5", # M5 configuration (e.g., path to kernels)
221 ]
222
223use_env = {}
224for key,val in sorted(os.environ.iteritems()):
225 if key in use_vars or \
226 any([key.startswith(prefix) for prefix in use_prefixes]):
227 use_env[key] = val
228
229# Tell scons to avoid implicit command dependencies to avoid issues
230# with the param wrappes being compiled twice (see
231# http://scons.tigris.org/issues/show_bug.cgi?id=2811)
232main = Environment(ENV=use_env, IMPLICIT_COMMAND_DEPENDENCIES=0)
233main.Decider('MD5-timestamp')
234main.root = Dir(".") # The current directory (where this file lives).
235main.srcdir = Dir("src") # The source directory
236
237main_dict_keys = main.Dictionary().keys()
238
239# Check that we have a C/C++ compiler
240if not ('CC' in main_dict_keys and 'CXX' in main_dict_keys):
241 print "No C++ compiler installed (package g++ on Ubuntu and RedHat)"
242 Exit(1)
243
244# Check that swig is present
245if not 'SWIG' in main_dict_keys:
246 print "swig is not installed (package swig on Ubuntu and RedHat)"
247 Exit(1)
248
249# add useful python code PYTHONPATH so it can be used by subprocesses
250# as well
251main.AppendENVPath('PYTHONPATH', extra_python_paths)
252
253########################################################################
254#
255# Mercurial Stuff.
256#
257# If the gem5 directory is a mercurial repository, we should do some
258# extra things.
259#
260########################################################################
261
262hgdir = main.root.Dir(".hg")
263
264
265style_message = """
266You're missing the gem5 style hook, which automatically checks your code
267against the gem5 style rules on %s.
268This script will now install the hook in your %s.
269Press enter to continue, or ctrl-c to abort: """
270
271mercurial_style_message = """
272You're missing the gem5 style hook, which automatically checks your code
273against the gem5 style rules on hg commit and qrefresh commands.
274This script will now install the hook in your .hg/hgrc file.
275Press enter to continue, or ctrl-c to abort: """
276
277git_style_message = """
278You're missing the gem5 style or commit message hook. These hooks help
279to ensure that your code follows gem5's style rules on git commit.
280This script will now install the hook in your .git/hooks/ directory.
281Press enter to continue, or ctrl-c to abort: """
282
283mercurial_style_upgrade_message = """
284Your Mercurial style hooks are not up-to-date. This script will now
285try to automatically update them. A backup of your hgrc will be saved
286in .hg/hgrc.old.
287Press enter to continue, or ctrl-c to abort: """
288
289mercurial_style_hook = """
290# The following lines were automatically added by gem5/SConstruct
291# to provide the gem5 style-checking hooks
292[extensions]
293hgstyle = %s/util/hgstyle.py
294
295[hooks]
296pretxncommit.style = python:hgstyle.check_style
297pre-qrefresh.style = python:hgstyle.check_style
298# End of SConstruct additions
299
300""" % (main.root.abspath)
301
302mercurial_lib_not_found = """
303Mercurial libraries cannot be found, ignoring style hook. If
304you are a gem5 developer, please fix this and run the style
305hook. It is important.
306"""
307
308# Check for style hook and prompt for installation if it's not there.
309# Skip this if --ignore-style was specified, there's no interactive
310# terminal to prompt, or no recognized revision control system can be
311# found.
312ignore_style = GetOption('ignore_style') or not sys.stdin.isatty()
313
314# Try wire up Mercurial to the style hooks
315if not ignore_style and hgdir.exists():
316 style_hook = True
317 style_hooks = tuple()
318 hgrc = hgdir.File('hgrc')
319 hgrc_old = hgdir.File('hgrc.old')
320 try:
321 from mercurial import ui
322 ui = ui.ui()
323 ui.readconfig(hgrc.abspath)
324 style_hooks = (ui.config('hooks', 'pretxncommit.style', None),
325 ui.config('hooks', 'pre-qrefresh.style', None))
326 style_hook = all(style_hooks)
327 style_extension = ui.config('extensions', 'style', None)
328 except ImportError:
329 print mercurial_lib_not_found
330
331 if "python:style.check_style" in style_hooks:
332 # Try to upgrade the style hooks
333 print mercurial_style_upgrade_message
334 # continue unless user does ctrl-c/ctrl-d etc.
335 try:
336 raw_input()
337 except:
338 print "Input exception, exiting scons.\n"
339 sys.exit(1)
340 shutil.copyfile(hgrc.abspath, hgrc_old.abspath)
341 re_style_hook = re.compile(r"^([^=#]+)\.style\s*=\s*([^#\s]+).*")
342 re_style_extension = re.compile("style\s*=\s*([^#\s]+).*")
343 old, new = open(hgrc_old.abspath, 'r'), open(hgrc.abspath, 'w')
344 for l in old:
345 m_hook = re_style_hook.match(l)
346 m_ext = re_style_extension.match(l)
347 if m_hook:
348 hook, check = m_hook.groups()
349 if check != "python:style.check_style":
350 print "Warning: %s.style is using a non-default " \
351 "checker: %s" % (hook, check)
352 if hook not in ("pretxncommit", "pre-qrefresh"):
353 print "Warning: Updating unknown style hook: %s" % hook
354
355 l = "%s.style = python:hgstyle.check_style\n" % hook
356 elif m_ext and m_ext.group(1) == style_extension:
357 l = "hgstyle = %s/util/hgstyle.py\n" % main.root.abspath
358
359 new.write(l)
360 elif not style_hook:
361 print mercurial_style_message,
362 # continue unless user does ctrl-c/ctrl-d etc.
363 try:
364 raw_input()
365 except:
366 print "Input exception, exiting scons.\n"
367 sys.exit(1)
368 hgrc_path = '%s/.hg/hgrc' % main.root.abspath
369 print "Adding style hook to", hgrc_path, "\n"
370 try:
371 with open(hgrc_path, 'a') as f:
372 f.write(mercurial_style_hook)
373 except:
374 print "Error updating", hgrc_path
375 sys.exit(1)
376
377def install_git_style_hooks():
378 try:
379 gitdir = Dir(readCommand(
380 ["git", "rev-parse", "--git-dir"]).strip("\n"))
381 except Exception, e:
382 print "Warning: Failed to find git repo directory: %s" % e
383 return
384
385 git_hooks = gitdir.Dir("hooks")
386 def hook_exists(hook_name):
387 hook = git_hooks.File(hook_name)
388 return hook.exists()
389
390 def hook_install(hook_name, script):
391 hook = git_hooks.File(hook_name)
392 if hook.exists():
393 print "Warning: Can't install %s, hook already exists." % hook_name
394 return
395
396 if hook.islink():
397 print "Warning: Removing broken symlink for hook %s." % hook_name
398 os.unlink(hook.get_abspath())
399
396 if not git_hooks.exists():
397 mkdir(git_hooks.get_abspath())
398
399 # Use a relative symlink if the hooks live in the source directory
400 if hook.is_under(main.root):
401 script_path = os.path.relpath(
402 script.get_abspath(),
403 hook.Dir(".").get_abspath())
404 else:
405 script_path = script.get_abspath()
406
407 try:
408 os.symlink(script_path, hook.get_abspath())
409 except:
410 print "Error updating git %s hook" % hook_name
411 raise
412
413 if hook_exists("pre-commit") and hook_exists("commit-msg"):
414 return
415
416 print git_style_message,
417 try:
418 raw_input()
419 except:
420 print "Input exception, exiting scons.\n"
421 sys.exit(1)
422
423 git_style_script = File("util/git-pre-commit.py")
424 git_msg_script = File("ext/git-commit-msg")
425
426 hook_install("pre-commit", git_style_script)
427 hook_install("commit-msg", git_msg_script)
428
429# Try to wire up git to the style hooks
430if not ignore_style and main.root.Entry(".git").exists():
431 install_git_style_hooks()
432
433###################################################
434#
435# Figure out which configurations to set up based on the path(s) of
436# the target(s).
437#
438###################################################
439
440# Find default configuration & binary.
441Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug'))
442
443# helper function: find last occurrence of element in list
444def rfind(l, elt, offs = -1):
445 for i in range(len(l)+offs, 0, -1):
446 if l[i] == elt:
447 return i
448 raise ValueError, "element not found"
449
450# Take a list of paths (or SCons Nodes) and return a list with all
451# paths made absolute and ~-expanded. Paths will be interpreted
452# relative to the launch directory unless a different root is provided
453def makePathListAbsolute(path_list, root=GetLaunchDir()):
454 return [abspath(joinpath(root, expanduser(str(p))))
455 for p in path_list]
456
457# Each target must have 'build' in the interior of the path; the
458# directory below this will determine the build parameters. For
459# example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we
460# recognize that ALPHA_SE specifies the configuration because it
461# follow 'build' in the build path.
462
463# The funky assignment to "[:]" is needed to replace the list contents
464# in place rather than reassign the symbol to a new list, which
465# doesn't work (obviously!).
466BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS)
467
468# Generate a list of the unique build roots and configs that the
469# collected targets reference.
470variant_paths = []
471build_root = None
472for t in BUILD_TARGETS:
473 path_dirs = t.split('/')
474 try:
475 build_top = rfind(path_dirs, 'build', -2)
476 except:
477 print "Error: no non-leaf 'build' dir found on target path", t
478 Exit(1)
479 this_build_root = joinpath('/',*path_dirs[:build_top+1])
480 if not build_root:
481 build_root = this_build_root
482 else:
483 if this_build_root != build_root:
484 print "Error: build targets not under same build root\n"\
485 " %s\n %s" % (build_root, this_build_root)
486 Exit(1)
487 variant_path = joinpath('/',*path_dirs[:build_top+2])
488 if variant_path not in variant_paths:
489 variant_paths.append(variant_path)
490
491# Make sure build_root exists (might not if this is the first build there)
492if not isdir(build_root):
493 mkdir(build_root)
494main['BUILDROOT'] = build_root
495
496Export('main')
497
498main.SConsignFile(joinpath(build_root, "sconsign"))
499
500# Default duplicate option is to use hard links, but this messes up
501# when you use emacs to edit a file in the target dir, as emacs moves
502# file to file~ then copies to file, breaking the link. Symbolic
503# (soft) links work better.
504main.SetOption('duplicate', 'soft-copy')
505
506#
507# Set up global sticky variables... these are common to an entire build
508# tree (not specific to a particular build like ALPHA_SE)
509#
510
511global_vars_file = joinpath(build_root, 'variables.global')
512
513global_vars = Variables(global_vars_file, args=ARGUMENTS)
514
515global_vars.AddVariables(
516 ('CC', 'C compiler', environ.get('CC', main['CC'])),
517 ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])),
518 ('SWIG', 'SWIG tool', environ.get('SWIG', main['SWIG'])),
519 ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')),
520 ('BATCH', 'Use batch pool for build and tests', False),
521 ('BATCH_CMD', 'Batch pool submission command name', 'qdo'),
522 ('M5_BUILD_CACHE', 'Cache built objects in this directory', False),
523 ('EXTRAS', 'Add extra directories to the compilation', '')
524 )
525
526# Update main environment with values from ARGUMENTS & global_vars_file
527global_vars.Update(main)
528help_texts["global_vars"] += global_vars.GenerateHelpText(main)
529
530# Save sticky variable settings back to current variables file
531global_vars.Save(global_vars_file, main)
532
533# Parse EXTRAS variable to build list of all directories where we're
534# look for sources etc. This list is exported as extras_dir_list.
535base_dir = main.srcdir.abspath
536if main['EXTRAS']:
537 extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':'))
538else:
539 extras_dir_list = []
540
541Export('base_dir')
542Export('extras_dir_list')
543
544# the ext directory should be on the #includes path
545main.Append(CPPPATH=[Dir('ext')])
546
547def strip_build_path(path, env):
548 path = str(path)
549 variant_base = env['BUILDROOT'] + os.path.sep
550 if path.startswith(variant_base):
551 path = path[len(variant_base):]
552 elif path.startswith('build/'):
553 path = path[6:]
554 return path
555
556# Generate a string of the form:
557# common/path/prefix/src1, src2 -> tgt1, tgt2
558# to print while building.
559class Transform(object):
560 # all specific color settings should be here and nowhere else
561 tool_color = termcap.Normal
562 pfx_color = termcap.Yellow
563 srcs_color = termcap.Yellow + termcap.Bold
564 arrow_color = termcap.Blue + termcap.Bold
565 tgts_color = termcap.Yellow + termcap.Bold
566
567 def __init__(self, tool, max_sources=99):
568 self.format = self.tool_color + (" [%8s] " % tool) \
569 + self.pfx_color + "%s" \
570 + self.srcs_color + "%s" \
571 + self.arrow_color + " -> " \
572 + self.tgts_color + "%s" \
573 + termcap.Normal
574 self.max_sources = max_sources
575
576 def __call__(self, target, source, env, for_signature=None):
577 # truncate source list according to max_sources param
578 source = source[0:self.max_sources]
579 def strip(f):
580 return strip_build_path(str(f), env)
581 if len(source) > 0:
582 srcs = map(strip, source)
583 else:
584 srcs = ['']
585 tgts = map(strip, target)
586 # surprisingly, os.path.commonprefix is a dumb char-by-char string
587 # operation that has nothing to do with paths.
588 com_pfx = os.path.commonprefix(srcs + tgts)
589 com_pfx_len = len(com_pfx)
590 if com_pfx:
591 # do some cleanup and sanity checking on common prefix
592 if com_pfx[-1] == ".":
593 # prefix matches all but file extension: ok
594 # back up one to change 'foo.cc -> o' to 'foo.cc -> .o'
595 com_pfx = com_pfx[0:-1]
596 elif com_pfx[-1] == "/":
597 # common prefix is directory path: OK
598 pass
599 else:
600 src0_len = len(srcs[0])
601 tgt0_len = len(tgts[0])
602 if src0_len == com_pfx_len:
603 # source is a substring of target, OK
604 pass
605 elif tgt0_len == com_pfx_len:
606 # target is a substring of source, need to back up to
607 # avoid empty string on RHS of arrow
608 sep_idx = com_pfx.rfind(".")
609 if sep_idx != -1:
610 com_pfx = com_pfx[0:sep_idx]
611 else:
612 com_pfx = ''
613 elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".":
614 # still splitting at file extension: ok
615 pass
616 else:
617 # probably a fluke; ignore it
618 com_pfx = ''
619 # recalculate length in case com_pfx was modified
620 com_pfx_len = len(com_pfx)
621 def fmt(files):
622 f = map(lambda s: s[com_pfx_len:], files)
623 return ', '.join(f)
624 return self.format % (com_pfx, fmt(srcs), fmt(tgts))
625
626Export('Transform')
627
628# enable the regression script to use the termcap
629main['TERMCAP'] = termcap
630
631if GetOption('verbose'):
632 def MakeAction(action, string, *args, **kwargs):
633 return Action(action, *args, **kwargs)
634else:
635 MakeAction = Action
636 main['CCCOMSTR'] = Transform("CC")
637 main['CXXCOMSTR'] = Transform("CXX")
638 main['ASCOMSTR'] = Transform("AS")
639 main['SWIGCOMSTR'] = Transform("SWIG")
640 main['ARCOMSTR'] = Transform("AR", 0)
641 main['LINKCOMSTR'] = Transform("LINK", 0)
642 main['RANLIBCOMSTR'] = Transform("RANLIB", 0)
643 main['M4COMSTR'] = Transform("M4")
644 main['SHCCCOMSTR'] = Transform("SHCC")
645 main['SHCXXCOMSTR'] = Transform("SHCXX")
646Export('MakeAction')
647
648# Initialize the Link-Time Optimization (LTO) flags
649main['LTO_CCFLAGS'] = []
650main['LTO_LDFLAGS'] = []
651
652# According to the readme, tcmalloc works best if the compiler doesn't
653# assume that we're using the builtin malloc and friends. These flags
654# are compiler-specific, so we need to set them after we detect which
655# compiler we're using.
656main['TCMALLOC_CCFLAGS'] = []
657
658CXX_version = readCommand([main['CXX'],'--version'], exception=False)
659CXX_V = readCommand([main['CXX'],'-V'], exception=False)
660
661main['GCC'] = CXX_version and CXX_version.find('g++') >= 0
662main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0
663if main['GCC'] + main['CLANG'] > 1:
664 print 'Error: How can we have two at the same time?'
665 Exit(1)
666
667# Set up default C++ compiler flags
668if main['GCC'] or main['CLANG']:
669 # As gcc and clang share many flags, do the common parts here
670 main.Append(CCFLAGS=['-pipe'])
671 main.Append(CCFLAGS=['-fno-strict-aliasing'])
672 # Enable -Wall and -Wextra and then disable the few warnings that
673 # we consistently violate
674 main.Append(CCFLAGS=['-Wall', '-Wundef', '-Wextra',
675 '-Wno-sign-compare', '-Wno-unused-parameter'])
676 # We always compile using C++11
677 main.Append(CXXFLAGS=['-std=c++11'])
678 if sys.platform.startswith('freebsd'):
679 main.Append(CCFLAGS=['-I/usr/local/include'])
680 main.Append(CXXFLAGS=['-I/usr/local/include'])
681else:
682 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
683 print "Don't know what compiler options to use for your compiler."
684 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX']
685 print termcap.Yellow + ' version:' + termcap.Normal,
686 if not CXX_version:
687 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
688 termcap.Normal
689 else:
690 print CXX_version.replace('\n', '<nl>')
691 print " If you're trying to use a compiler other than GCC"
692 print " or clang, there appears to be something wrong with your"
693 print " environment."
694 print " "
695 print " If you are trying to use a compiler other than those listed"
696 print " above you will need to ease fix SConstruct and "
697 print " src/SConscript to support that compiler."
698 Exit(1)
699
700if main['GCC']:
701 # Check for a supported version of gcc. >= 4.8 is chosen for its
702 # level of c++11 support. See
703 # http://gcc.gnu.org/projects/cxx0x.html for details.
704 gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False)
705 if compareVersions(gcc_version, "4.8") < 0:
706 print 'Error: gcc version 4.8 or newer required.'
707 print ' Installed version:', gcc_version
708 Exit(1)
709
710 main['GCC_VERSION'] = gcc_version
711
712 # gcc from version 4.8 and above generates "rep; ret" instructions
713 # to avoid performance penalties on certain AMD chips. Older
714 # assemblers detect this as an error, "Error: expecting string
715 # instruction after `rep'"
716 as_version_raw = readCommand([main['AS'], '-v', '/dev/null',
717 '-o', '/dev/null'],
718 exception=False).split()
719
720 # version strings may contain extra distro-specific
721 # qualifiers, so play it safe and keep only what comes before
722 # the first hyphen
723 as_version = as_version_raw[-1].split('-')[0] if as_version_raw else None
724
725 if not as_version or compareVersions(as_version, "2.23") < 0:
726 print termcap.Yellow + termcap.Bold + \
727 'Warning: This combination of gcc and binutils have' + \
728 ' known incompatibilities.\n' + \
729 ' If you encounter build problems, please update ' + \
730 'binutils to 2.23.' + \
731 termcap.Normal
732
733 # Make sure we warn if the user has requested to compile with the
734 # Undefined Benahvior Sanitizer and this version of gcc does not
735 # support it.
736 if GetOption('with_ubsan') and \
737 compareVersions(gcc_version, '4.9') < 0:
738 print termcap.Yellow + termcap.Bold + \
739 'Warning: UBSan is only supported using gcc 4.9 and later.' + \
740 termcap.Normal
741
742 # Add the appropriate Link-Time Optimization (LTO) flags
743 # unless LTO is explicitly turned off. Note that these flags
744 # are only used by the fast target.
745 if not GetOption('no_lto'):
746 # Pass the LTO flag when compiling to produce GIMPLE
747 # output, we merely create the flags here and only append
748 # them later
749 main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
750
751 # Use the same amount of jobs for LTO as we are running
752 # scons with
753 main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
754
755 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc',
756 '-fno-builtin-realloc', '-fno-builtin-free'])
757
758 # add option to check for undeclared overrides
759 if compareVersions(gcc_version, "5.0") > 0:
760 main.Append(CCFLAGS=['-Wno-error=suggest-override'])
761
762elif main['CLANG']:
763 # Check for a supported version of clang, >= 3.1 is needed to
764 # support similar features as gcc 4.8. See
765 # http://clang.llvm.org/cxx_status.html for details
766 clang_version_re = re.compile(".* version (\d+\.\d+)")
767 clang_version_match = clang_version_re.search(CXX_version)
768 if (clang_version_match):
769 clang_version = clang_version_match.groups()[0]
770 if compareVersions(clang_version, "3.1") < 0:
771 print 'Error: clang version 3.1 or newer required.'
772 print ' Installed version:', clang_version
773 Exit(1)
774 else:
775 print 'Error: Unable to determine clang version.'
776 Exit(1)
777
778 # clang has a few additional warnings that we disable, extraneous
779 # parantheses are allowed due to Ruby's printing of the AST,
780 # finally self assignments are allowed as the generated CPU code
781 # is relying on this
782 main.Append(CCFLAGS=['-Wno-parentheses',
783 '-Wno-self-assign',
784 # Some versions of libstdc++ (4.8?) seem to
785 # use struct hash and class hash
786 # interchangeably.
787 '-Wno-mismatched-tags',
788 ])
789
790 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin'])
791
792 # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as
793 # opposed to libstdc++, as the later is dated.
794 if sys.platform == "darwin":
795 main.Append(CXXFLAGS=['-stdlib=libc++'])
796 main.Append(LIBS=['c++'])
797
798 # On FreeBSD we need libthr.
799 if sys.platform.startswith('freebsd'):
800 main.Append(LIBS=['thr'])
801
802else:
803 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
804 print "Don't know what compiler options to use for your compiler."
805 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX']
806 print termcap.Yellow + ' version:' + termcap.Normal,
807 if not CXX_version:
808 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
809 termcap.Normal
810 else:
811 print CXX_version.replace('\n', '<nl>')
812 print " If you're trying to use a compiler other than GCC"
813 print " or clang, there appears to be something wrong with your"
814 print " environment."
815 print " "
816 print " If you are trying to use a compiler other than those listed"
817 print " above you will need to ease fix SConstruct and "
818 print " src/SConscript to support that compiler."
819 Exit(1)
820
821# Set up common yacc/bison flags (needed for Ruby)
822main['YACCFLAGS'] = '-d'
823main['YACCHXXFILESUFFIX'] = '.hh'
824
825# Do this after we save setting back, or else we'll tack on an
826# extra 'qdo' every time we run scons.
827if main['BATCH']:
828 main['CC'] = main['BATCH_CMD'] + ' ' + main['CC']
829 main['CXX'] = main['BATCH_CMD'] + ' ' + main['CXX']
830 main['AS'] = main['BATCH_CMD'] + ' ' + main['AS']
831 main['AR'] = main['BATCH_CMD'] + ' ' + main['AR']
832 main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB']
833
834if sys.platform == 'cygwin':
835 # cygwin has some header file issues...
836 main.Append(CCFLAGS=["-Wno-uninitialized"])
837
838# Check for the protobuf compiler
839protoc_version = readCommand([main['PROTOC'], '--version'],
840 exception='').split()
841
842# First two words should be "libprotoc x.y.z"
843if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc':
844 print termcap.Yellow + termcap.Bold + \
845 'Warning: Protocol buffer compiler (protoc) not found.\n' + \
846 ' Please install protobuf-compiler for tracing support.' + \
847 termcap.Normal
848 main['PROTOC'] = False
849else:
850 # Based on the availability of the compress stream wrappers,
851 # require 2.1.0
852 min_protoc_version = '2.1.0'
853 if compareVersions(protoc_version[1], min_protoc_version) < 0:
854 print termcap.Yellow + termcap.Bold + \
855 'Warning: protoc version', min_protoc_version, \
856 'or newer required.\n' + \
857 ' Installed version:', protoc_version[1], \
858 termcap.Normal
859 main['PROTOC'] = False
860 else:
861 # Attempt to determine the appropriate include path and
862 # library path using pkg-config, that means we also need to
863 # check for pkg-config. Note that it is possible to use
864 # protobuf without the involvement of pkg-config. Later on we
865 # check go a library config check and at that point the test
866 # will fail if libprotobuf cannot be found.
867 if readCommand(['pkg-config', '--version'], exception=''):
868 try:
869 # Attempt to establish what linking flags to add for protobuf
870 # using pkg-config
871 main.ParseConfig('pkg-config --cflags --libs-only-L protobuf')
872 except:
873 print termcap.Yellow + termcap.Bold + \
874 'Warning: pkg-config could not get protobuf flags.' + \
875 termcap.Normal
876
877# Check for SWIG
878if not main.has_key('SWIG'):
879 print 'Error: SWIG utility not found.'
880 print ' Please install (see http://www.swig.org) and retry.'
881 Exit(1)
882
883# Check for appropriate SWIG version
884swig_version = readCommand([main['SWIG'], '-version'], exception='').split()
885# First 3 words should be "SWIG Version x.y.z"
886if len(swig_version) < 3 or \
887 swig_version[0] != 'SWIG' or swig_version[1] != 'Version':
888 print 'Error determining SWIG version.'
889 Exit(1)
890
891min_swig_version = '2.0.4'
892if compareVersions(swig_version[2], min_swig_version) < 0:
893 print 'Error: SWIG version', min_swig_version, 'or newer required.'
894 print ' Installed version:', swig_version[2]
895 Exit(1)
896
897# Check for known incompatibilities. The standard library shipped with
898# gcc >= 4.9 does not play well with swig versions prior to 3.0
899if main['GCC'] and compareVersions(gcc_version, '4.9') >= 0 and \
900 compareVersions(swig_version[2], '3.0') < 0:
901 print termcap.Yellow + termcap.Bold + \
902 'Warning: This combination of gcc and swig have' + \
903 ' known incompatibilities.\n' + \
904 ' If you encounter build problems, please update ' + \
905 'swig to 3.0 or later.' + \
906 termcap.Normal
907
908# Set up SWIG flags & scanner
909swig_flags=Split('-c++ -python -modern -templatereduce $_CPPINCFLAGS')
910main.Append(SWIGFLAGS=swig_flags)
911
912# Check for 'timeout' from GNU coreutils. If present, regressions will
913# be run with a time limit. We require version 8.13 since we rely on
914# support for the '--foreground' option.
915if sys.platform.startswith('freebsd'):
916 timeout_lines = readCommand(['gtimeout', '--version'],
917 exception='').splitlines()
918else:
919 timeout_lines = readCommand(['timeout', '--version'],
920 exception='').splitlines()
921# Get the first line and tokenize it
922timeout_version = timeout_lines[0].split() if timeout_lines else []
923main['TIMEOUT'] = timeout_version and \
924 compareVersions(timeout_version[-1], '8.13') >= 0
925
926# filter out all existing swig scanners, they mess up the dependency
927# stuff for some reason
928scanners = []
929for scanner in main['SCANNERS']:
930 skeys = scanner.skeys
931 if skeys == '.i':
932 continue
933
934 if isinstance(skeys, (list, tuple)) and '.i' in skeys:
935 continue
936
937 scanners.append(scanner)
938
939# add the new swig scanner that we like better
940from SCons.Scanner import ClassicCPP as CPPScanner
941swig_inc_re = '^[ \t]*[%,#][ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")'
942scanners.append(CPPScanner("SwigScan", [ ".i" ], "CPPPATH", swig_inc_re))
943
944# replace the scanners list that has what we want
945main['SCANNERS'] = scanners
946
947# Add a custom Check function to test for structure members.
948def CheckMember(context, include, decl, member, include_quotes="<>"):
949 context.Message("Checking for member %s in %s..." %
950 (member, decl))
951 text = """
952#include %(header)s
953int main(){
954 %(decl)s test;
955 (void)test.%(member)s;
956 return 0;
957};
958""" % { "header" : include_quotes[0] + include + include_quotes[1],
959 "decl" : decl,
960 "member" : member,
961 }
962
963 ret = context.TryCompile(text, extension=".cc")
964 context.Result(ret)
965 return ret
966
967# Platform-specific configuration. Note again that we assume that all
968# builds under a given build root run on the same host platform.
969conf = Configure(main,
970 conf_dir = joinpath(build_root, '.scons_config'),
971 log_file = joinpath(build_root, 'scons_config.log'),
972 custom_tests = {
973 'CheckMember' : CheckMember,
974 })
975
976# Check if we should compile a 64 bit binary on Mac OS X/Darwin
977try:
978 import platform
979 uname = platform.uname()
980 if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0:
981 if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]):
982 main.Append(CCFLAGS=['-arch', 'x86_64'])
983 main.Append(CFLAGS=['-arch', 'x86_64'])
984 main.Append(LINKFLAGS=['-arch', 'x86_64'])
985 main.Append(ASFLAGS=['-arch', 'x86_64'])
986except:
987 pass
988
989# Recent versions of scons substitute a "Null" object for Configure()
990# when configuration isn't necessary, e.g., if the "--help" option is
991# present. Unfortuantely this Null object always returns false,
992# breaking all our configuration checks. We replace it with our own
993# more optimistic null object that returns True instead.
994if not conf:
995 def NullCheck(*args, **kwargs):
996 return True
997
998 class NullConf:
999 def __init__(self, env):
1000 self.env = env
1001 def Finish(self):
1002 return self.env
1003 def __getattr__(self, mname):
1004 return NullCheck
1005
1006 conf = NullConf(main)
1007
1008# Cache build files in the supplied directory.
1009if main['M5_BUILD_CACHE']:
1010 print 'Using build cache located at', main['M5_BUILD_CACHE']
1011 CacheDir(main['M5_BUILD_CACHE'])
1012
1013if not GetOption('without_python'):
1014 # Find Python include and library directories for embedding the
1015 # interpreter. We rely on python-config to resolve the appropriate
1016 # includes and linker flags. ParseConfig does not seem to understand
1017 # the more exotic linker flags such as -Xlinker and -export-dynamic so
1018 # we add them explicitly below. If you want to link in an alternate
1019 # version of python, see above for instructions on how to invoke
1020 # scons with the appropriate PATH set.
1021 #
1022 # First we check if python2-config exists, else we use python-config
1023 python_config = readCommand(['which', 'python2-config'],
1024 exception='').strip()
1025 if not os.path.exists(python_config):
1026 python_config = readCommand(['which', 'python-config'],
1027 exception='').strip()
1028 py_includes = readCommand([python_config, '--includes'],
1029 exception='').split()
1030 # Strip the -I from the include folders before adding them to the
1031 # CPPPATH
1032 main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes))
1033
1034 # Read the linker flags and split them into libraries and other link
1035 # flags. The libraries are added later through the call the CheckLib.
1036 py_ld_flags = readCommand([python_config, '--ldflags'],
1037 exception='').split()
1038 py_libs = []
1039 for lib in py_ld_flags:
1040 if not lib.startswith('-l'):
1041 main.Append(LINKFLAGS=[lib])
1042 else:
1043 lib = lib[2:]
1044 if lib not in py_libs:
1045 py_libs.append(lib)
1046
1047 # verify that this stuff works
1048 if not conf.CheckHeader('Python.h', '<>'):
1049 print "Error: can't find Python.h header in", py_includes
1050 print "Install Python headers (package python-dev on Ubuntu and RedHat)"
1051 Exit(1)
1052
1053 for lib in py_libs:
1054 if not conf.CheckLib(lib):
1055 print "Error: can't find library %s required by python" % lib
1056 Exit(1)
1057
1058# On Solaris you need to use libsocket for socket ops
1059if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'):
1060 if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'):
1061 print "Can't find library with socket calls (e.g. accept())"
1062 Exit(1)
1063
1064# Check for zlib. If the check passes, libz will be automatically
1065# added to the LIBS environment variable.
1066if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'):
1067 print 'Error: did not find needed zlib compression library '\
1068 'and/or zlib.h header file.'
1069 print ' Please install zlib and try again.'
1070 Exit(1)
1071
1072# If we have the protobuf compiler, also make sure we have the
1073# development libraries. If the check passes, libprotobuf will be
1074# automatically added to the LIBS environment variable. After
1075# this, we can use the HAVE_PROTOBUF flag to determine if we have
1076# got both protoc and libprotobuf available.
1077main['HAVE_PROTOBUF'] = main['PROTOC'] and \
1078 conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h',
1079 'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;')
1080
1081# If we have the compiler but not the library, print another warning.
1082if main['PROTOC'] and not main['HAVE_PROTOBUF']:
1083 print termcap.Yellow + termcap.Bold + \
1084 'Warning: did not find protocol buffer library and/or headers.\n' + \
1085 ' Please install libprotobuf-dev for tracing support.' + \
1086 termcap.Normal
1087
1088# Check for librt.
1089have_posix_clock = \
1090 conf.CheckLibWithHeader(None, 'time.h', 'C',
1091 'clock_nanosleep(0,0,NULL,NULL);') or \
1092 conf.CheckLibWithHeader('rt', 'time.h', 'C',
1093 'clock_nanosleep(0,0,NULL,NULL);')
1094
1095have_posix_timers = \
1096 conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C',
1097 'timer_create(CLOCK_MONOTONIC, NULL, NULL);')
1098
1099if not GetOption('without_tcmalloc'):
1100 if conf.CheckLib('tcmalloc'):
1101 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1102 elif conf.CheckLib('tcmalloc_minimal'):
1103 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1104 else:
1105 print termcap.Yellow + termcap.Bold + \
1106 "You can get a 12% performance improvement by "\
1107 "installing tcmalloc (libgoogle-perftools-dev package "\
1108 "on Ubuntu or RedHat)." + termcap.Normal
1109
1110
1111# Detect back trace implementations. The last implementation in the
1112# list will be used by default.
1113backtrace_impls = [ "none" ]
1114
1115if conf.CheckLibWithHeader(None, 'execinfo.h', 'C',
1116 'backtrace_symbols_fd((void*)0, 0, 0);'):
1117 backtrace_impls.append("glibc")
1118elif conf.CheckLibWithHeader('execinfo', 'execinfo.h', 'C',
1119 'backtrace_symbols_fd((void*)0, 0, 0);'):
1120 # NetBSD and FreeBSD need libexecinfo.
1121 backtrace_impls.append("glibc")
1122 main.Append(LIBS=['execinfo'])
1123
1124if backtrace_impls[-1] == "none":
1125 default_backtrace_impl = "none"
1126 print termcap.Yellow + termcap.Bold + \
1127 "No suitable back trace implementation found." + \
1128 termcap.Normal
1129
1130if not have_posix_clock:
1131 print "Can't find library for POSIX clocks."
1132
1133# Check for <fenv.h> (C99 FP environment control)
1134have_fenv = conf.CheckHeader('fenv.h', '<>')
1135if not have_fenv:
1136 print "Warning: Header file <fenv.h> not found."
1137 print " This host has no IEEE FP rounding mode control."
1138
1139# Check if we should enable KVM-based hardware virtualization. The API
1140# we rely on exists since version 2.6.36 of the kernel, but somehow
1141# the KVM_API_VERSION does not reflect the change. We test for one of
1142# the types as a fall back.
1143have_kvm = conf.CheckHeader('linux/kvm.h', '<>')
1144if not have_kvm:
1145 print "Info: Compatible header file <linux/kvm.h> not found, " \
1146 "disabling KVM support."
1147
1148# x86 needs support for xsave. We test for the structure here since we
1149# won't be able to run new tests by the time we know which ISA we're
1150# targeting.
1151have_kvm_xsave = conf.CheckTypeSize('struct kvm_xsave',
1152 '#include <linux/kvm.h>') != 0
1153
1154# Check if the requested target ISA is compatible with the host
1155def is_isa_kvm_compatible(isa):
1156 try:
1157 import platform
1158 host_isa = platform.machine()
1159 except:
1160 print "Warning: Failed to determine host ISA."
1161 return False
1162
1163 if not have_posix_timers:
1164 print "Warning: Can not enable KVM, host seems to lack support " \
1165 "for POSIX timers"
1166 return False
1167
1168 if isa == "arm":
1169 return host_isa in ( "armv7l", "aarch64" )
1170 elif isa == "x86":
1171 if host_isa != "x86_64":
1172 return False
1173
1174 if not have_kvm_xsave:
1175 print "KVM on x86 requires xsave support in kernel headers."
1176 return False
1177
1178 return True
1179 else:
1180 return False
1181
1182
1183# Check if the exclude_host attribute is available. We want this to
1184# get accurate instruction counts in KVM.
1185main['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf.CheckMember(
1186 'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host')
1187
1188
1189######################################################################
1190#
1191# Finish the configuration
1192#
1193main = conf.Finish()
1194
1195######################################################################
1196#
1197# Collect all non-global variables
1198#
1199
1200# Define the universe of supported ISAs
1201all_isa_list = [ ]
1202all_gpu_isa_list = [ ]
1203Export('all_isa_list')
1204Export('all_gpu_isa_list')
1205
1206class CpuModel(object):
1207 '''The CpuModel class encapsulates everything the ISA parser needs to
1208 know about a particular CPU model.'''
1209
1210 # Dict of available CPU model objects. Accessible as CpuModel.dict.
1211 dict = {}
1212
1213 # Constructor. Automatically adds models to CpuModel.dict.
1214 def __init__(self, name, default=False):
1215 self.name = name # name of model
1216
1217 # This cpu is enabled by default
1218 self.default = default
1219
1220 # Add self to dict
1221 if name in CpuModel.dict:
1222 raise AttributeError, "CpuModel '%s' already registered" % name
1223 CpuModel.dict[name] = self
1224
1225Export('CpuModel')
1226
1227# Sticky variables get saved in the variables file so they persist from
1228# one invocation to the next (unless overridden, in which case the new
1229# value becomes sticky).
1230sticky_vars = Variables(args=ARGUMENTS)
1231Export('sticky_vars')
1232
1233# Sticky variables that should be exported
1234export_vars = []
1235Export('export_vars')
1236
1237# For Ruby
1238all_protocols = []
1239Export('all_protocols')
1240protocol_dirs = []
1241Export('protocol_dirs')
1242slicc_includes = []
1243Export('slicc_includes')
1244
1245# Walk the tree and execute all SConsopts scripts that wil add to the
1246# above variables
1247if GetOption('verbose'):
1248 print "Reading SConsopts"
1249for bdir in [ base_dir ] + extras_dir_list:
1250 if not isdir(bdir):
1251 print "Error: directory '%s' does not exist" % bdir
1252 Exit(1)
1253 for root, dirs, files in os.walk(bdir):
1254 if 'SConsopts' in files:
1255 if GetOption('verbose'):
1256 print "Reading", joinpath(root, 'SConsopts')
1257 SConscript(joinpath(root, 'SConsopts'))
1258
1259all_isa_list.sort()
1260all_gpu_isa_list.sort()
1261
1262sticky_vars.AddVariables(
1263 EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list),
1264 EnumVariable('TARGET_GPU_ISA', 'Target GPU ISA', 'hsail', all_gpu_isa_list),
1265 ListVariable('CPU_MODELS', 'CPU models',
1266 sorted(n for n,m in CpuModel.dict.iteritems() if m.default),
1267 sorted(CpuModel.dict.keys())),
1268 BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger',
1269 False),
1270 BoolVariable('SS_COMPATIBLE_FP',
1271 'Make floating-point results compatible with SimpleScalar',
1272 False),
1273 BoolVariable('USE_SSE2',
1274 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts',
1275 False),
1276 BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock),
1277 BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv),
1278 BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability', False),
1279 BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models', have_kvm),
1280 BoolVariable('BUILD_GPU', 'Build the compute-GPU model', False),
1281 EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None',
1282 all_protocols),
1283 EnumVariable('BACKTRACE_IMPL', 'Post-mortem dump implementation',
1284 backtrace_impls[-1], backtrace_impls)
1285 )
1286
1287# These variables get exported to #defines in config/*.hh (see src/SConscript).
1288export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'TARGET_GPU_ISA',
1289 'CP_ANNOTATE', 'USE_POSIX_CLOCK', 'USE_KVM', 'PROTOCOL',
1290 'HAVE_PROTOBUF', 'HAVE_PERF_ATTR_EXCLUDE_HOST']
1291
1292###################################################
1293#
1294# Define a SCons builder for configuration flag headers.
1295#
1296###################################################
1297
1298# This function generates a config header file that #defines the
1299# variable symbol to the current variable setting (0 or 1). The source
1300# operands are the name of the variable and a Value node containing the
1301# value of the variable.
1302def build_config_file(target, source, env):
1303 (variable, value) = [s.get_contents() for s in source]
1304 f = file(str(target[0]), 'w')
1305 print >> f, '#define', variable, value
1306 f.close()
1307 return None
1308
1309# Combine the two functions into a scons Action object.
1310config_action = MakeAction(build_config_file, Transform("CONFIG H", 2))
1311
1312# The emitter munges the source & target node lists to reflect what
1313# we're really doing.
1314def config_emitter(target, source, env):
1315 # extract variable name from Builder arg
1316 variable = str(target[0])
1317 # True target is config header file
1318 target = joinpath('config', variable.lower() + '.hh')
1319 val = env[variable]
1320 if isinstance(val, bool):
1321 # Force value to 0/1
1322 val = int(val)
1323 elif isinstance(val, str):
1324 val = '"' + val + '"'
1325
1326 # Sources are variable name & value (packaged in SCons Value nodes)
1327 return ([target], [Value(variable), Value(val)])
1328
1329config_builder = Builder(emitter = config_emitter, action = config_action)
1330
1331main.Append(BUILDERS = { 'ConfigFile' : config_builder })
1332
1333# libelf build is shared across all configs in the build root.
1334main.SConscript('ext/libelf/SConscript',
1335 variant_dir = joinpath(build_root, 'libelf'))
1336
1337# iostream3 build is shared across all configs in the build root.
1338main.SConscript('ext/iostream3/SConscript',
1339 variant_dir = joinpath(build_root, 'iostream3'))
1340
1341# libfdt build is shared across all configs in the build root.
1342main.SConscript('ext/libfdt/SConscript',
1343 variant_dir = joinpath(build_root, 'libfdt'))
1344
1345# fputils build is shared across all configs in the build root.
1346main.SConscript('ext/fputils/SConscript',
1347 variant_dir = joinpath(build_root, 'fputils'))
1348
1349# DRAMSim2 build is shared across all configs in the build root.
1350main.SConscript('ext/dramsim2/SConscript',
1351 variant_dir = joinpath(build_root, 'dramsim2'))
1352
1353# DRAMPower build is shared across all configs in the build root.
1354main.SConscript('ext/drampower/SConscript',
1355 variant_dir = joinpath(build_root, 'drampower'))
1356
1357# nomali build is shared across all configs in the build root.
1358main.SConscript('ext/nomali/SConscript',
1359 variant_dir = joinpath(build_root, 'nomali'))
1360
1361###################################################
1362#
1363# This function is used to set up a directory with switching headers
1364#
1365###################################################
1366
1367main['ALL_ISA_LIST'] = all_isa_list
1368main['ALL_GPU_ISA_LIST'] = all_gpu_isa_list
1369all_isa_deps = {}
1370def make_switching_dir(dname, switch_headers, env):
1371 # Generate the header. target[0] is the full path of the output
1372 # header to generate. 'source' is a dummy variable, since we get the
1373 # list of ISAs from env['ALL_ISA_LIST'].
1374 def gen_switch_hdr(target, source, env):
1375 fname = str(target[0])
1376 isa = env['TARGET_ISA'].lower()
1377 try:
1378 f = open(fname, 'w')
1379 print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname))
1380 f.close()
1381 except IOError:
1382 print "Failed to create %s" % fname
1383 raise
1384
1385 # Build SCons Action object. 'varlist' specifies env vars that this
1386 # action depends on; when env['ALL_ISA_LIST'] changes these actions
1387 # should get re-executed.
1388 switch_hdr_action = MakeAction(gen_switch_hdr,
1389 Transform("GENERATE"), varlist=['ALL_ISA_LIST'])
1390
1391 # Instantiate actions for each header
1392 for hdr in switch_headers:
1393 env.Command(hdr, [], switch_hdr_action)
1394
1395 isa_target = Dir('.').up().name.lower().replace('_', '-')
1396 env['PHONY_BASE'] = '#'+isa_target
1397 all_isa_deps[isa_target] = None
1398
1399Export('make_switching_dir')
1400
1401def make_gpu_switching_dir(dname, switch_headers, env):
1402 # Generate the header. target[0] is the full path of the output
1403 # header to generate. 'source' is a dummy variable, since we get the
1404 # list of ISAs from env['ALL_ISA_LIST'].
1405 def gen_switch_hdr(target, source, env):
1406 fname = str(target[0])
1407
1408 isa = env['TARGET_GPU_ISA'].lower()
1409
1410 try:
1411 f = open(fname, 'w')
1412 print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname))
1413 f.close()
1414 except IOError:
1415 print "Failed to create %s" % fname
1416 raise
1417
1418 # Build SCons Action object. 'varlist' specifies env vars that this
1419 # action depends on; when env['ALL_ISA_LIST'] changes these actions
1420 # should get re-executed.
1421 switch_hdr_action = MakeAction(gen_switch_hdr,
1422 Transform("GENERATE"), varlist=['ALL_ISA_GPU_LIST'])
1423
1424 # Instantiate actions for each header
1425 for hdr in switch_headers:
1426 env.Command(hdr, [], switch_hdr_action)
1427
1428Export('make_gpu_switching_dir')
1429
1430# all-isas -> all-deps -> all-environs -> all_targets
1431main.Alias('#all-isas', [])
1432main.Alias('#all-deps', '#all-isas')
1433
1434# Dummy target to ensure all environments are created before telling
1435# SCons what to actually make (the command line arguments). We attach
1436# them to the dependence graph after the environments are complete.
1437ORIG_BUILD_TARGETS = list(BUILD_TARGETS) # force a copy; gets closure to work.
1438def environsComplete(target, source, env):
1439 for t in ORIG_BUILD_TARGETS:
1440 main.Depends('#all-targets', t)
1441
1442# Each build/* switching_dir attaches its *-environs target to #all-environs.
1443main.Append(BUILDERS = {'CompleteEnvirons' :
1444 Builder(action=MakeAction(environsComplete, None))})
1445main.CompleteEnvirons('#all-environs', [])
1446
1447def doNothing(**ignored): pass
1448main.Append(BUILDERS = {'Dummy': Builder(action=MakeAction(doNothing, None))})
1449
1450# The final target to which all the original targets ultimately get attached.
1451main.Dummy('#all-targets', '#all-environs')
1452BUILD_TARGETS[:] = ['#all-targets']
1453
1454###################################################
1455#
1456# Define build environments for selected configurations.
1457#
1458###################################################
1459
1460for variant_path in variant_paths:
1461 if not GetOption('silent'):
1462 print "Building in", variant_path
1463
1464 # Make a copy of the build-root environment to use for this config.
1465 env = main.Clone()
1466 env['BUILDDIR'] = variant_path
1467
1468 # variant_dir is the tail component of build path, and is used to
1469 # determine the build parameters (e.g., 'ALPHA_SE')
1470 (build_root, variant_dir) = splitpath(variant_path)
1471
1472 # Set env variables according to the build directory config.
1473 sticky_vars.files = []
1474 # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in
1475 # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke
1476 # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings.
1477 current_vars_file = joinpath(build_root, 'variables', variant_dir)
1478 if isfile(current_vars_file):
1479 sticky_vars.files.append(current_vars_file)
1480 if not GetOption('silent'):
1481 print "Using saved variables file %s" % current_vars_file
1482 else:
1483 # Build dir-specific variables file doesn't exist.
1484
1485 # Make sure the directory is there so we can create it later
1486 opt_dir = dirname(current_vars_file)
1487 if not isdir(opt_dir):
1488 mkdir(opt_dir)
1489
1490 # Get default build variables from source tree. Variables are
1491 # normally determined by name of $VARIANT_DIR, but can be
1492 # overridden by '--default=' arg on command line.
1493 default = GetOption('default')
1494 opts_dir = joinpath(main.root.abspath, 'build_opts')
1495 if default:
1496 default_vars_files = [joinpath(build_root, 'variables', default),
1497 joinpath(opts_dir, default)]
1498 else:
1499 default_vars_files = [joinpath(opts_dir, variant_dir)]
1500 existing_files = filter(isfile, default_vars_files)
1501 if existing_files:
1502 default_vars_file = existing_files[0]
1503 sticky_vars.files.append(default_vars_file)
1504 print "Variables file %s not found,\n using defaults in %s" \
1505 % (current_vars_file, default_vars_file)
1506 else:
1507 print "Error: cannot find variables file %s or " \
1508 "default file(s) %s" \
1509 % (current_vars_file, ' or '.join(default_vars_files))
1510 Exit(1)
1511
1512 # Apply current variable settings to env
1513 sticky_vars.Update(env)
1514
1515 help_texts["local_vars"] += \
1516 "Build variables for %s:\n" % variant_dir \
1517 + sticky_vars.GenerateHelpText(env)
1518
1519 # Process variable settings.
1520
1521 if not have_fenv and env['USE_FENV']:
1522 print "Warning: <fenv.h> not available; " \
1523 "forcing USE_FENV to False in", variant_dir + "."
1524 env['USE_FENV'] = False
1525
1526 if not env['USE_FENV']:
1527 print "Warning: No IEEE FP rounding mode control in", variant_dir + "."
1528 print " FP results may deviate slightly from other platforms."
1529
1530 if env['EFENCE']:
1531 env.Append(LIBS=['efence'])
1532
1533 if env['USE_KVM']:
1534 if not have_kvm:
1535 print "Warning: Can not enable KVM, host seems to lack KVM support"
1536 env['USE_KVM'] = False
1537 elif not is_isa_kvm_compatible(env['TARGET_ISA']):
1538 print "Info: KVM support disabled due to unsupported host and " \
1539 "target ISA combination"
1540 env['USE_KVM'] = False
1541
1542 if env['BUILD_GPU']:
1543 env.Append(CPPDEFINES=['BUILD_GPU'])
1544
1545 # Warn about missing optional functionality
1546 if env['USE_KVM']:
1547 if not main['HAVE_PERF_ATTR_EXCLUDE_HOST']:
1548 print "Warning: perf_event headers lack support for the " \
1549 "exclude_host attribute. KVM instruction counts will " \
1550 "be inaccurate."
1551
1552 # Save sticky variable settings back to current variables file
1553 sticky_vars.Save(current_vars_file, env)
1554
1555 if env['USE_SSE2']:
1556 env.Append(CCFLAGS=['-msse2'])
1557
1558 # The src/SConscript file sets up the build rules in 'env' according
1559 # to the configured variables. It returns a list of environments,
1560 # one for each variant build (debug, opt, etc.)
1561 SConscript('src/SConscript', variant_dir = variant_path, exports = 'env')
1562
1563def pairwise(iterable):
1564 "s -> (s0,s1), (s1,s2), (s2, s3), ..."
1565 a, b = itertools.tee(iterable)
1566 b.next()
1567 return itertools.izip(a, b)
1568
1569# Create false dependencies so SCons will parse ISAs, establish
1570# dependencies, and setup the build Environments serially. Either
1571# SCons (likely) and/or our SConscripts (possibly) cannot cope with -j
1572# greater than 1. It appears to be standard race condition stuff; it
1573# doesn't always fail, but usually, and the behaviors are different.
1574# Every time I tried to remove this, builds would fail in some
1575# creative new way. So, don't do that. You'll want to, though, because
1576# tests/SConscript takes a long time to make its Environments.
1577for t1, t2 in pairwise(sorted(all_isa_deps.iterkeys())):
1578 main.Depends('#%s-deps' % t2, '#%s-deps' % t1)
1579 main.Depends('#%s-environs' % t2, '#%s-environs' % t1)
1580
1581# base help text
1582Help('''
1583Usage: scons [scons options] [build variables] [target(s)]
1584
1585Extra scons options:
1586%(options)s
1587
1588Global build variables:
1589%(global_vars)s
1590
1591%(local_vars)s
1592''' % help_texts)
400 if not git_hooks.exists():
401 mkdir(git_hooks.get_abspath())
402
403 # Use a relative symlink if the hooks live in the source directory
404 if hook.is_under(main.root):
405 script_path = os.path.relpath(
406 script.get_abspath(),
407 hook.Dir(".").get_abspath())
408 else:
409 script_path = script.get_abspath()
410
411 try:
412 os.symlink(script_path, hook.get_abspath())
413 except:
414 print "Error updating git %s hook" % hook_name
415 raise
416
417 if hook_exists("pre-commit") and hook_exists("commit-msg"):
418 return
419
420 print git_style_message,
421 try:
422 raw_input()
423 except:
424 print "Input exception, exiting scons.\n"
425 sys.exit(1)
426
427 git_style_script = File("util/git-pre-commit.py")
428 git_msg_script = File("ext/git-commit-msg")
429
430 hook_install("pre-commit", git_style_script)
431 hook_install("commit-msg", git_msg_script)
432
433# Try to wire up git to the style hooks
434if not ignore_style and main.root.Entry(".git").exists():
435 install_git_style_hooks()
436
437###################################################
438#
439# Figure out which configurations to set up based on the path(s) of
440# the target(s).
441#
442###################################################
443
444# Find default configuration & binary.
445Default(environ.get('M5_DEFAULT_BINARY', 'build/ALPHA/gem5.debug'))
446
447# helper function: find last occurrence of element in list
448def rfind(l, elt, offs = -1):
449 for i in range(len(l)+offs, 0, -1):
450 if l[i] == elt:
451 return i
452 raise ValueError, "element not found"
453
454# Take a list of paths (or SCons Nodes) and return a list with all
455# paths made absolute and ~-expanded. Paths will be interpreted
456# relative to the launch directory unless a different root is provided
457def makePathListAbsolute(path_list, root=GetLaunchDir()):
458 return [abspath(joinpath(root, expanduser(str(p))))
459 for p in path_list]
460
461# Each target must have 'build' in the interior of the path; the
462# directory below this will determine the build parameters. For
463# example, for target 'foo/bar/build/ALPHA_SE/arch/alpha/blah.do' we
464# recognize that ALPHA_SE specifies the configuration because it
465# follow 'build' in the build path.
466
467# The funky assignment to "[:]" is needed to replace the list contents
468# in place rather than reassign the symbol to a new list, which
469# doesn't work (obviously!).
470BUILD_TARGETS[:] = makePathListAbsolute(BUILD_TARGETS)
471
472# Generate a list of the unique build roots and configs that the
473# collected targets reference.
474variant_paths = []
475build_root = None
476for t in BUILD_TARGETS:
477 path_dirs = t.split('/')
478 try:
479 build_top = rfind(path_dirs, 'build', -2)
480 except:
481 print "Error: no non-leaf 'build' dir found on target path", t
482 Exit(1)
483 this_build_root = joinpath('/',*path_dirs[:build_top+1])
484 if not build_root:
485 build_root = this_build_root
486 else:
487 if this_build_root != build_root:
488 print "Error: build targets not under same build root\n"\
489 " %s\n %s" % (build_root, this_build_root)
490 Exit(1)
491 variant_path = joinpath('/',*path_dirs[:build_top+2])
492 if variant_path not in variant_paths:
493 variant_paths.append(variant_path)
494
495# Make sure build_root exists (might not if this is the first build there)
496if not isdir(build_root):
497 mkdir(build_root)
498main['BUILDROOT'] = build_root
499
500Export('main')
501
502main.SConsignFile(joinpath(build_root, "sconsign"))
503
504# Default duplicate option is to use hard links, but this messes up
505# when you use emacs to edit a file in the target dir, as emacs moves
506# file to file~ then copies to file, breaking the link. Symbolic
507# (soft) links work better.
508main.SetOption('duplicate', 'soft-copy')
509
510#
511# Set up global sticky variables... these are common to an entire build
512# tree (not specific to a particular build like ALPHA_SE)
513#
514
515global_vars_file = joinpath(build_root, 'variables.global')
516
517global_vars = Variables(global_vars_file, args=ARGUMENTS)
518
519global_vars.AddVariables(
520 ('CC', 'C compiler', environ.get('CC', main['CC'])),
521 ('CXX', 'C++ compiler', environ.get('CXX', main['CXX'])),
522 ('SWIG', 'SWIG tool', environ.get('SWIG', main['SWIG'])),
523 ('PROTOC', 'protoc tool', environ.get('PROTOC', 'protoc')),
524 ('BATCH', 'Use batch pool for build and tests', False),
525 ('BATCH_CMD', 'Batch pool submission command name', 'qdo'),
526 ('M5_BUILD_CACHE', 'Cache built objects in this directory', False),
527 ('EXTRAS', 'Add extra directories to the compilation', '')
528 )
529
530# Update main environment with values from ARGUMENTS & global_vars_file
531global_vars.Update(main)
532help_texts["global_vars"] += global_vars.GenerateHelpText(main)
533
534# Save sticky variable settings back to current variables file
535global_vars.Save(global_vars_file, main)
536
537# Parse EXTRAS variable to build list of all directories where we're
538# look for sources etc. This list is exported as extras_dir_list.
539base_dir = main.srcdir.abspath
540if main['EXTRAS']:
541 extras_dir_list = makePathListAbsolute(main['EXTRAS'].split(':'))
542else:
543 extras_dir_list = []
544
545Export('base_dir')
546Export('extras_dir_list')
547
548# the ext directory should be on the #includes path
549main.Append(CPPPATH=[Dir('ext')])
550
551def strip_build_path(path, env):
552 path = str(path)
553 variant_base = env['BUILDROOT'] + os.path.sep
554 if path.startswith(variant_base):
555 path = path[len(variant_base):]
556 elif path.startswith('build/'):
557 path = path[6:]
558 return path
559
560# Generate a string of the form:
561# common/path/prefix/src1, src2 -> tgt1, tgt2
562# to print while building.
563class Transform(object):
564 # all specific color settings should be here and nowhere else
565 tool_color = termcap.Normal
566 pfx_color = termcap.Yellow
567 srcs_color = termcap.Yellow + termcap.Bold
568 arrow_color = termcap.Blue + termcap.Bold
569 tgts_color = termcap.Yellow + termcap.Bold
570
571 def __init__(self, tool, max_sources=99):
572 self.format = self.tool_color + (" [%8s] " % tool) \
573 + self.pfx_color + "%s" \
574 + self.srcs_color + "%s" \
575 + self.arrow_color + " -> " \
576 + self.tgts_color + "%s" \
577 + termcap.Normal
578 self.max_sources = max_sources
579
580 def __call__(self, target, source, env, for_signature=None):
581 # truncate source list according to max_sources param
582 source = source[0:self.max_sources]
583 def strip(f):
584 return strip_build_path(str(f), env)
585 if len(source) > 0:
586 srcs = map(strip, source)
587 else:
588 srcs = ['']
589 tgts = map(strip, target)
590 # surprisingly, os.path.commonprefix is a dumb char-by-char string
591 # operation that has nothing to do with paths.
592 com_pfx = os.path.commonprefix(srcs + tgts)
593 com_pfx_len = len(com_pfx)
594 if com_pfx:
595 # do some cleanup and sanity checking on common prefix
596 if com_pfx[-1] == ".":
597 # prefix matches all but file extension: ok
598 # back up one to change 'foo.cc -> o' to 'foo.cc -> .o'
599 com_pfx = com_pfx[0:-1]
600 elif com_pfx[-1] == "/":
601 # common prefix is directory path: OK
602 pass
603 else:
604 src0_len = len(srcs[0])
605 tgt0_len = len(tgts[0])
606 if src0_len == com_pfx_len:
607 # source is a substring of target, OK
608 pass
609 elif tgt0_len == com_pfx_len:
610 # target is a substring of source, need to back up to
611 # avoid empty string on RHS of arrow
612 sep_idx = com_pfx.rfind(".")
613 if sep_idx != -1:
614 com_pfx = com_pfx[0:sep_idx]
615 else:
616 com_pfx = ''
617 elif src0_len > com_pfx_len and srcs[0][com_pfx_len] == ".":
618 # still splitting at file extension: ok
619 pass
620 else:
621 # probably a fluke; ignore it
622 com_pfx = ''
623 # recalculate length in case com_pfx was modified
624 com_pfx_len = len(com_pfx)
625 def fmt(files):
626 f = map(lambda s: s[com_pfx_len:], files)
627 return ', '.join(f)
628 return self.format % (com_pfx, fmt(srcs), fmt(tgts))
629
630Export('Transform')
631
632# enable the regression script to use the termcap
633main['TERMCAP'] = termcap
634
635if GetOption('verbose'):
636 def MakeAction(action, string, *args, **kwargs):
637 return Action(action, *args, **kwargs)
638else:
639 MakeAction = Action
640 main['CCCOMSTR'] = Transform("CC")
641 main['CXXCOMSTR'] = Transform("CXX")
642 main['ASCOMSTR'] = Transform("AS")
643 main['SWIGCOMSTR'] = Transform("SWIG")
644 main['ARCOMSTR'] = Transform("AR", 0)
645 main['LINKCOMSTR'] = Transform("LINK", 0)
646 main['RANLIBCOMSTR'] = Transform("RANLIB", 0)
647 main['M4COMSTR'] = Transform("M4")
648 main['SHCCCOMSTR'] = Transform("SHCC")
649 main['SHCXXCOMSTR'] = Transform("SHCXX")
650Export('MakeAction')
651
652# Initialize the Link-Time Optimization (LTO) flags
653main['LTO_CCFLAGS'] = []
654main['LTO_LDFLAGS'] = []
655
656# According to the readme, tcmalloc works best if the compiler doesn't
657# assume that we're using the builtin malloc and friends. These flags
658# are compiler-specific, so we need to set them after we detect which
659# compiler we're using.
660main['TCMALLOC_CCFLAGS'] = []
661
662CXX_version = readCommand([main['CXX'],'--version'], exception=False)
663CXX_V = readCommand([main['CXX'],'-V'], exception=False)
664
665main['GCC'] = CXX_version and CXX_version.find('g++') >= 0
666main['CLANG'] = CXX_version and CXX_version.find('clang') >= 0
667if main['GCC'] + main['CLANG'] > 1:
668 print 'Error: How can we have two at the same time?'
669 Exit(1)
670
671# Set up default C++ compiler flags
672if main['GCC'] or main['CLANG']:
673 # As gcc and clang share many flags, do the common parts here
674 main.Append(CCFLAGS=['-pipe'])
675 main.Append(CCFLAGS=['-fno-strict-aliasing'])
676 # Enable -Wall and -Wextra and then disable the few warnings that
677 # we consistently violate
678 main.Append(CCFLAGS=['-Wall', '-Wundef', '-Wextra',
679 '-Wno-sign-compare', '-Wno-unused-parameter'])
680 # We always compile using C++11
681 main.Append(CXXFLAGS=['-std=c++11'])
682 if sys.platform.startswith('freebsd'):
683 main.Append(CCFLAGS=['-I/usr/local/include'])
684 main.Append(CXXFLAGS=['-I/usr/local/include'])
685else:
686 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
687 print "Don't know what compiler options to use for your compiler."
688 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX']
689 print termcap.Yellow + ' version:' + termcap.Normal,
690 if not CXX_version:
691 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
692 termcap.Normal
693 else:
694 print CXX_version.replace('\n', '<nl>')
695 print " If you're trying to use a compiler other than GCC"
696 print " or clang, there appears to be something wrong with your"
697 print " environment."
698 print " "
699 print " If you are trying to use a compiler other than those listed"
700 print " above you will need to ease fix SConstruct and "
701 print " src/SConscript to support that compiler."
702 Exit(1)
703
704if main['GCC']:
705 # Check for a supported version of gcc. >= 4.8 is chosen for its
706 # level of c++11 support. See
707 # http://gcc.gnu.org/projects/cxx0x.html for details.
708 gcc_version = readCommand([main['CXX'], '-dumpversion'], exception=False)
709 if compareVersions(gcc_version, "4.8") < 0:
710 print 'Error: gcc version 4.8 or newer required.'
711 print ' Installed version:', gcc_version
712 Exit(1)
713
714 main['GCC_VERSION'] = gcc_version
715
716 # gcc from version 4.8 and above generates "rep; ret" instructions
717 # to avoid performance penalties on certain AMD chips. Older
718 # assemblers detect this as an error, "Error: expecting string
719 # instruction after `rep'"
720 as_version_raw = readCommand([main['AS'], '-v', '/dev/null',
721 '-o', '/dev/null'],
722 exception=False).split()
723
724 # version strings may contain extra distro-specific
725 # qualifiers, so play it safe and keep only what comes before
726 # the first hyphen
727 as_version = as_version_raw[-1].split('-')[0] if as_version_raw else None
728
729 if not as_version or compareVersions(as_version, "2.23") < 0:
730 print termcap.Yellow + termcap.Bold + \
731 'Warning: This combination of gcc and binutils have' + \
732 ' known incompatibilities.\n' + \
733 ' If you encounter build problems, please update ' + \
734 'binutils to 2.23.' + \
735 termcap.Normal
736
737 # Make sure we warn if the user has requested to compile with the
738 # Undefined Benahvior Sanitizer and this version of gcc does not
739 # support it.
740 if GetOption('with_ubsan') and \
741 compareVersions(gcc_version, '4.9') < 0:
742 print termcap.Yellow + termcap.Bold + \
743 'Warning: UBSan is only supported using gcc 4.9 and later.' + \
744 termcap.Normal
745
746 # Add the appropriate Link-Time Optimization (LTO) flags
747 # unless LTO is explicitly turned off. Note that these flags
748 # are only used by the fast target.
749 if not GetOption('no_lto'):
750 # Pass the LTO flag when compiling to produce GIMPLE
751 # output, we merely create the flags here and only append
752 # them later
753 main['LTO_CCFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
754
755 # Use the same amount of jobs for LTO as we are running
756 # scons with
757 main['LTO_LDFLAGS'] = ['-flto=%d' % GetOption('num_jobs')]
758
759 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin-malloc', '-fno-builtin-calloc',
760 '-fno-builtin-realloc', '-fno-builtin-free'])
761
762 # add option to check for undeclared overrides
763 if compareVersions(gcc_version, "5.0") > 0:
764 main.Append(CCFLAGS=['-Wno-error=suggest-override'])
765
766elif main['CLANG']:
767 # Check for a supported version of clang, >= 3.1 is needed to
768 # support similar features as gcc 4.8. See
769 # http://clang.llvm.org/cxx_status.html for details
770 clang_version_re = re.compile(".* version (\d+\.\d+)")
771 clang_version_match = clang_version_re.search(CXX_version)
772 if (clang_version_match):
773 clang_version = clang_version_match.groups()[0]
774 if compareVersions(clang_version, "3.1") < 0:
775 print 'Error: clang version 3.1 or newer required.'
776 print ' Installed version:', clang_version
777 Exit(1)
778 else:
779 print 'Error: Unable to determine clang version.'
780 Exit(1)
781
782 # clang has a few additional warnings that we disable, extraneous
783 # parantheses are allowed due to Ruby's printing of the AST,
784 # finally self assignments are allowed as the generated CPU code
785 # is relying on this
786 main.Append(CCFLAGS=['-Wno-parentheses',
787 '-Wno-self-assign',
788 # Some versions of libstdc++ (4.8?) seem to
789 # use struct hash and class hash
790 # interchangeably.
791 '-Wno-mismatched-tags',
792 ])
793
794 main.Append(TCMALLOC_CCFLAGS=['-fno-builtin'])
795
796 # On Mac OS X/Darwin we need to also use libc++ (part of XCode) as
797 # opposed to libstdc++, as the later is dated.
798 if sys.platform == "darwin":
799 main.Append(CXXFLAGS=['-stdlib=libc++'])
800 main.Append(LIBS=['c++'])
801
802 # On FreeBSD we need libthr.
803 if sys.platform.startswith('freebsd'):
804 main.Append(LIBS=['thr'])
805
806else:
807 print termcap.Yellow + termcap.Bold + 'Error' + termcap.Normal,
808 print "Don't know what compiler options to use for your compiler."
809 print termcap.Yellow + ' compiler:' + termcap.Normal, main['CXX']
810 print termcap.Yellow + ' version:' + termcap.Normal,
811 if not CXX_version:
812 print termcap.Yellow + termcap.Bold + "COMMAND NOT FOUND!" +\
813 termcap.Normal
814 else:
815 print CXX_version.replace('\n', '<nl>')
816 print " If you're trying to use a compiler other than GCC"
817 print " or clang, there appears to be something wrong with your"
818 print " environment."
819 print " "
820 print " If you are trying to use a compiler other than those listed"
821 print " above you will need to ease fix SConstruct and "
822 print " src/SConscript to support that compiler."
823 Exit(1)
824
825# Set up common yacc/bison flags (needed for Ruby)
826main['YACCFLAGS'] = '-d'
827main['YACCHXXFILESUFFIX'] = '.hh'
828
829# Do this after we save setting back, or else we'll tack on an
830# extra 'qdo' every time we run scons.
831if main['BATCH']:
832 main['CC'] = main['BATCH_CMD'] + ' ' + main['CC']
833 main['CXX'] = main['BATCH_CMD'] + ' ' + main['CXX']
834 main['AS'] = main['BATCH_CMD'] + ' ' + main['AS']
835 main['AR'] = main['BATCH_CMD'] + ' ' + main['AR']
836 main['RANLIB'] = main['BATCH_CMD'] + ' ' + main['RANLIB']
837
838if sys.platform == 'cygwin':
839 # cygwin has some header file issues...
840 main.Append(CCFLAGS=["-Wno-uninitialized"])
841
842# Check for the protobuf compiler
843protoc_version = readCommand([main['PROTOC'], '--version'],
844 exception='').split()
845
846# First two words should be "libprotoc x.y.z"
847if len(protoc_version) < 2 or protoc_version[0] != 'libprotoc':
848 print termcap.Yellow + termcap.Bold + \
849 'Warning: Protocol buffer compiler (protoc) not found.\n' + \
850 ' Please install protobuf-compiler for tracing support.' + \
851 termcap.Normal
852 main['PROTOC'] = False
853else:
854 # Based on the availability of the compress stream wrappers,
855 # require 2.1.0
856 min_protoc_version = '2.1.0'
857 if compareVersions(protoc_version[1], min_protoc_version) < 0:
858 print termcap.Yellow + termcap.Bold + \
859 'Warning: protoc version', min_protoc_version, \
860 'or newer required.\n' + \
861 ' Installed version:', protoc_version[1], \
862 termcap.Normal
863 main['PROTOC'] = False
864 else:
865 # Attempt to determine the appropriate include path and
866 # library path using pkg-config, that means we also need to
867 # check for pkg-config. Note that it is possible to use
868 # protobuf without the involvement of pkg-config. Later on we
869 # check go a library config check and at that point the test
870 # will fail if libprotobuf cannot be found.
871 if readCommand(['pkg-config', '--version'], exception=''):
872 try:
873 # Attempt to establish what linking flags to add for protobuf
874 # using pkg-config
875 main.ParseConfig('pkg-config --cflags --libs-only-L protobuf')
876 except:
877 print termcap.Yellow + termcap.Bold + \
878 'Warning: pkg-config could not get protobuf flags.' + \
879 termcap.Normal
880
881# Check for SWIG
882if not main.has_key('SWIG'):
883 print 'Error: SWIG utility not found.'
884 print ' Please install (see http://www.swig.org) and retry.'
885 Exit(1)
886
887# Check for appropriate SWIG version
888swig_version = readCommand([main['SWIG'], '-version'], exception='').split()
889# First 3 words should be "SWIG Version x.y.z"
890if len(swig_version) < 3 or \
891 swig_version[0] != 'SWIG' or swig_version[1] != 'Version':
892 print 'Error determining SWIG version.'
893 Exit(1)
894
895min_swig_version = '2.0.4'
896if compareVersions(swig_version[2], min_swig_version) < 0:
897 print 'Error: SWIG version', min_swig_version, 'or newer required.'
898 print ' Installed version:', swig_version[2]
899 Exit(1)
900
901# Check for known incompatibilities. The standard library shipped with
902# gcc >= 4.9 does not play well with swig versions prior to 3.0
903if main['GCC'] and compareVersions(gcc_version, '4.9') >= 0 and \
904 compareVersions(swig_version[2], '3.0') < 0:
905 print termcap.Yellow + termcap.Bold + \
906 'Warning: This combination of gcc and swig have' + \
907 ' known incompatibilities.\n' + \
908 ' If you encounter build problems, please update ' + \
909 'swig to 3.0 or later.' + \
910 termcap.Normal
911
912# Set up SWIG flags & scanner
913swig_flags=Split('-c++ -python -modern -templatereduce $_CPPINCFLAGS')
914main.Append(SWIGFLAGS=swig_flags)
915
916# Check for 'timeout' from GNU coreutils. If present, regressions will
917# be run with a time limit. We require version 8.13 since we rely on
918# support for the '--foreground' option.
919if sys.platform.startswith('freebsd'):
920 timeout_lines = readCommand(['gtimeout', '--version'],
921 exception='').splitlines()
922else:
923 timeout_lines = readCommand(['timeout', '--version'],
924 exception='').splitlines()
925# Get the first line and tokenize it
926timeout_version = timeout_lines[0].split() if timeout_lines else []
927main['TIMEOUT'] = timeout_version and \
928 compareVersions(timeout_version[-1], '8.13') >= 0
929
930# filter out all existing swig scanners, they mess up the dependency
931# stuff for some reason
932scanners = []
933for scanner in main['SCANNERS']:
934 skeys = scanner.skeys
935 if skeys == '.i':
936 continue
937
938 if isinstance(skeys, (list, tuple)) and '.i' in skeys:
939 continue
940
941 scanners.append(scanner)
942
943# add the new swig scanner that we like better
944from SCons.Scanner import ClassicCPP as CPPScanner
945swig_inc_re = '^[ \t]*[%,#][ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")'
946scanners.append(CPPScanner("SwigScan", [ ".i" ], "CPPPATH", swig_inc_re))
947
948# replace the scanners list that has what we want
949main['SCANNERS'] = scanners
950
951# Add a custom Check function to test for structure members.
952def CheckMember(context, include, decl, member, include_quotes="<>"):
953 context.Message("Checking for member %s in %s..." %
954 (member, decl))
955 text = """
956#include %(header)s
957int main(){
958 %(decl)s test;
959 (void)test.%(member)s;
960 return 0;
961};
962""" % { "header" : include_quotes[0] + include + include_quotes[1],
963 "decl" : decl,
964 "member" : member,
965 }
966
967 ret = context.TryCompile(text, extension=".cc")
968 context.Result(ret)
969 return ret
970
971# Platform-specific configuration. Note again that we assume that all
972# builds under a given build root run on the same host platform.
973conf = Configure(main,
974 conf_dir = joinpath(build_root, '.scons_config'),
975 log_file = joinpath(build_root, 'scons_config.log'),
976 custom_tests = {
977 'CheckMember' : CheckMember,
978 })
979
980# Check if we should compile a 64 bit binary on Mac OS X/Darwin
981try:
982 import platform
983 uname = platform.uname()
984 if uname[0] == 'Darwin' and compareVersions(uname[2], '9.0.0') >= 0:
985 if int(readCommand('sysctl -n hw.cpu64bit_capable')[0]):
986 main.Append(CCFLAGS=['-arch', 'x86_64'])
987 main.Append(CFLAGS=['-arch', 'x86_64'])
988 main.Append(LINKFLAGS=['-arch', 'x86_64'])
989 main.Append(ASFLAGS=['-arch', 'x86_64'])
990except:
991 pass
992
993# Recent versions of scons substitute a "Null" object for Configure()
994# when configuration isn't necessary, e.g., if the "--help" option is
995# present. Unfortuantely this Null object always returns false,
996# breaking all our configuration checks. We replace it with our own
997# more optimistic null object that returns True instead.
998if not conf:
999 def NullCheck(*args, **kwargs):
1000 return True
1001
1002 class NullConf:
1003 def __init__(self, env):
1004 self.env = env
1005 def Finish(self):
1006 return self.env
1007 def __getattr__(self, mname):
1008 return NullCheck
1009
1010 conf = NullConf(main)
1011
1012# Cache build files in the supplied directory.
1013if main['M5_BUILD_CACHE']:
1014 print 'Using build cache located at', main['M5_BUILD_CACHE']
1015 CacheDir(main['M5_BUILD_CACHE'])
1016
1017if not GetOption('without_python'):
1018 # Find Python include and library directories for embedding the
1019 # interpreter. We rely on python-config to resolve the appropriate
1020 # includes and linker flags. ParseConfig does not seem to understand
1021 # the more exotic linker flags such as -Xlinker and -export-dynamic so
1022 # we add them explicitly below. If you want to link in an alternate
1023 # version of python, see above for instructions on how to invoke
1024 # scons with the appropriate PATH set.
1025 #
1026 # First we check if python2-config exists, else we use python-config
1027 python_config = readCommand(['which', 'python2-config'],
1028 exception='').strip()
1029 if not os.path.exists(python_config):
1030 python_config = readCommand(['which', 'python-config'],
1031 exception='').strip()
1032 py_includes = readCommand([python_config, '--includes'],
1033 exception='').split()
1034 # Strip the -I from the include folders before adding them to the
1035 # CPPPATH
1036 main.Append(CPPPATH=map(lambda inc: inc[2:], py_includes))
1037
1038 # Read the linker flags and split them into libraries and other link
1039 # flags. The libraries are added later through the call the CheckLib.
1040 py_ld_flags = readCommand([python_config, '--ldflags'],
1041 exception='').split()
1042 py_libs = []
1043 for lib in py_ld_flags:
1044 if not lib.startswith('-l'):
1045 main.Append(LINKFLAGS=[lib])
1046 else:
1047 lib = lib[2:]
1048 if lib not in py_libs:
1049 py_libs.append(lib)
1050
1051 # verify that this stuff works
1052 if not conf.CheckHeader('Python.h', '<>'):
1053 print "Error: can't find Python.h header in", py_includes
1054 print "Install Python headers (package python-dev on Ubuntu and RedHat)"
1055 Exit(1)
1056
1057 for lib in py_libs:
1058 if not conf.CheckLib(lib):
1059 print "Error: can't find library %s required by python" % lib
1060 Exit(1)
1061
1062# On Solaris you need to use libsocket for socket ops
1063if not conf.CheckLibWithHeader(None, 'sys/socket.h', 'C++', 'accept(0,0,0);'):
1064 if not conf.CheckLibWithHeader('socket', 'sys/socket.h', 'C++', 'accept(0,0,0);'):
1065 print "Can't find library with socket calls (e.g. accept())"
1066 Exit(1)
1067
1068# Check for zlib. If the check passes, libz will be automatically
1069# added to the LIBS environment variable.
1070if not conf.CheckLibWithHeader('z', 'zlib.h', 'C++','zlibVersion();'):
1071 print 'Error: did not find needed zlib compression library '\
1072 'and/or zlib.h header file.'
1073 print ' Please install zlib and try again.'
1074 Exit(1)
1075
1076# If we have the protobuf compiler, also make sure we have the
1077# development libraries. If the check passes, libprotobuf will be
1078# automatically added to the LIBS environment variable. After
1079# this, we can use the HAVE_PROTOBUF flag to determine if we have
1080# got both protoc and libprotobuf available.
1081main['HAVE_PROTOBUF'] = main['PROTOC'] and \
1082 conf.CheckLibWithHeader('protobuf', 'google/protobuf/message.h',
1083 'C++', 'GOOGLE_PROTOBUF_VERIFY_VERSION;')
1084
1085# If we have the compiler but not the library, print another warning.
1086if main['PROTOC'] and not main['HAVE_PROTOBUF']:
1087 print termcap.Yellow + termcap.Bold + \
1088 'Warning: did not find protocol buffer library and/or headers.\n' + \
1089 ' Please install libprotobuf-dev for tracing support.' + \
1090 termcap.Normal
1091
1092# Check for librt.
1093have_posix_clock = \
1094 conf.CheckLibWithHeader(None, 'time.h', 'C',
1095 'clock_nanosleep(0,0,NULL,NULL);') or \
1096 conf.CheckLibWithHeader('rt', 'time.h', 'C',
1097 'clock_nanosleep(0,0,NULL,NULL);')
1098
1099have_posix_timers = \
1100 conf.CheckLibWithHeader([None, 'rt'], [ 'time.h', 'signal.h' ], 'C',
1101 'timer_create(CLOCK_MONOTONIC, NULL, NULL);')
1102
1103if not GetOption('without_tcmalloc'):
1104 if conf.CheckLib('tcmalloc'):
1105 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1106 elif conf.CheckLib('tcmalloc_minimal'):
1107 main.Append(CCFLAGS=main['TCMALLOC_CCFLAGS'])
1108 else:
1109 print termcap.Yellow + termcap.Bold + \
1110 "You can get a 12% performance improvement by "\
1111 "installing tcmalloc (libgoogle-perftools-dev package "\
1112 "on Ubuntu or RedHat)." + termcap.Normal
1113
1114
1115# Detect back trace implementations. The last implementation in the
1116# list will be used by default.
1117backtrace_impls = [ "none" ]
1118
1119if conf.CheckLibWithHeader(None, 'execinfo.h', 'C',
1120 'backtrace_symbols_fd((void*)0, 0, 0);'):
1121 backtrace_impls.append("glibc")
1122elif conf.CheckLibWithHeader('execinfo', 'execinfo.h', 'C',
1123 'backtrace_symbols_fd((void*)0, 0, 0);'):
1124 # NetBSD and FreeBSD need libexecinfo.
1125 backtrace_impls.append("glibc")
1126 main.Append(LIBS=['execinfo'])
1127
1128if backtrace_impls[-1] == "none":
1129 default_backtrace_impl = "none"
1130 print termcap.Yellow + termcap.Bold + \
1131 "No suitable back trace implementation found." + \
1132 termcap.Normal
1133
1134if not have_posix_clock:
1135 print "Can't find library for POSIX clocks."
1136
1137# Check for <fenv.h> (C99 FP environment control)
1138have_fenv = conf.CheckHeader('fenv.h', '<>')
1139if not have_fenv:
1140 print "Warning: Header file <fenv.h> not found."
1141 print " This host has no IEEE FP rounding mode control."
1142
1143# Check if we should enable KVM-based hardware virtualization. The API
1144# we rely on exists since version 2.6.36 of the kernel, but somehow
1145# the KVM_API_VERSION does not reflect the change. We test for one of
1146# the types as a fall back.
1147have_kvm = conf.CheckHeader('linux/kvm.h', '<>')
1148if not have_kvm:
1149 print "Info: Compatible header file <linux/kvm.h> not found, " \
1150 "disabling KVM support."
1151
1152# x86 needs support for xsave. We test for the structure here since we
1153# won't be able to run new tests by the time we know which ISA we're
1154# targeting.
1155have_kvm_xsave = conf.CheckTypeSize('struct kvm_xsave',
1156 '#include <linux/kvm.h>') != 0
1157
1158# Check if the requested target ISA is compatible with the host
1159def is_isa_kvm_compatible(isa):
1160 try:
1161 import platform
1162 host_isa = platform.machine()
1163 except:
1164 print "Warning: Failed to determine host ISA."
1165 return False
1166
1167 if not have_posix_timers:
1168 print "Warning: Can not enable KVM, host seems to lack support " \
1169 "for POSIX timers"
1170 return False
1171
1172 if isa == "arm":
1173 return host_isa in ( "armv7l", "aarch64" )
1174 elif isa == "x86":
1175 if host_isa != "x86_64":
1176 return False
1177
1178 if not have_kvm_xsave:
1179 print "KVM on x86 requires xsave support in kernel headers."
1180 return False
1181
1182 return True
1183 else:
1184 return False
1185
1186
1187# Check if the exclude_host attribute is available. We want this to
1188# get accurate instruction counts in KVM.
1189main['HAVE_PERF_ATTR_EXCLUDE_HOST'] = conf.CheckMember(
1190 'linux/perf_event.h', 'struct perf_event_attr', 'exclude_host')
1191
1192
1193######################################################################
1194#
1195# Finish the configuration
1196#
1197main = conf.Finish()
1198
1199######################################################################
1200#
1201# Collect all non-global variables
1202#
1203
1204# Define the universe of supported ISAs
1205all_isa_list = [ ]
1206all_gpu_isa_list = [ ]
1207Export('all_isa_list')
1208Export('all_gpu_isa_list')
1209
1210class CpuModel(object):
1211 '''The CpuModel class encapsulates everything the ISA parser needs to
1212 know about a particular CPU model.'''
1213
1214 # Dict of available CPU model objects. Accessible as CpuModel.dict.
1215 dict = {}
1216
1217 # Constructor. Automatically adds models to CpuModel.dict.
1218 def __init__(self, name, default=False):
1219 self.name = name # name of model
1220
1221 # This cpu is enabled by default
1222 self.default = default
1223
1224 # Add self to dict
1225 if name in CpuModel.dict:
1226 raise AttributeError, "CpuModel '%s' already registered" % name
1227 CpuModel.dict[name] = self
1228
1229Export('CpuModel')
1230
1231# Sticky variables get saved in the variables file so they persist from
1232# one invocation to the next (unless overridden, in which case the new
1233# value becomes sticky).
1234sticky_vars = Variables(args=ARGUMENTS)
1235Export('sticky_vars')
1236
1237# Sticky variables that should be exported
1238export_vars = []
1239Export('export_vars')
1240
1241# For Ruby
1242all_protocols = []
1243Export('all_protocols')
1244protocol_dirs = []
1245Export('protocol_dirs')
1246slicc_includes = []
1247Export('slicc_includes')
1248
1249# Walk the tree and execute all SConsopts scripts that wil add to the
1250# above variables
1251if GetOption('verbose'):
1252 print "Reading SConsopts"
1253for bdir in [ base_dir ] + extras_dir_list:
1254 if not isdir(bdir):
1255 print "Error: directory '%s' does not exist" % bdir
1256 Exit(1)
1257 for root, dirs, files in os.walk(bdir):
1258 if 'SConsopts' in files:
1259 if GetOption('verbose'):
1260 print "Reading", joinpath(root, 'SConsopts')
1261 SConscript(joinpath(root, 'SConsopts'))
1262
1263all_isa_list.sort()
1264all_gpu_isa_list.sort()
1265
1266sticky_vars.AddVariables(
1267 EnumVariable('TARGET_ISA', 'Target ISA', 'alpha', all_isa_list),
1268 EnumVariable('TARGET_GPU_ISA', 'Target GPU ISA', 'hsail', all_gpu_isa_list),
1269 ListVariable('CPU_MODELS', 'CPU models',
1270 sorted(n for n,m in CpuModel.dict.iteritems() if m.default),
1271 sorted(CpuModel.dict.keys())),
1272 BoolVariable('EFENCE', 'Link with Electric Fence malloc debugger',
1273 False),
1274 BoolVariable('SS_COMPATIBLE_FP',
1275 'Make floating-point results compatible with SimpleScalar',
1276 False),
1277 BoolVariable('USE_SSE2',
1278 'Compile for SSE2 (-msse2) to get IEEE FP on x86 hosts',
1279 False),
1280 BoolVariable('USE_POSIX_CLOCK', 'Use POSIX Clocks', have_posix_clock),
1281 BoolVariable('USE_FENV', 'Use <fenv.h> IEEE mode control', have_fenv),
1282 BoolVariable('CP_ANNOTATE', 'Enable critical path annotation capability', False),
1283 BoolVariable('USE_KVM', 'Enable hardware virtualized (KVM) CPU models', have_kvm),
1284 BoolVariable('BUILD_GPU', 'Build the compute-GPU model', False),
1285 EnumVariable('PROTOCOL', 'Coherence protocol for Ruby', 'None',
1286 all_protocols),
1287 EnumVariable('BACKTRACE_IMPL', 'Post-mortem dump implementation',
1288 backtrace_impls[-1], backtrace_impls)
1289 )
1290
1291# These variables get exported to #defines in config/*.hh (see src/SConscript).
1292export_vars += ['USE_FENV', 'SS_COMPATIBLE_FP', 'TARGET_ISA', 'TARGET_GPU_ISA',
1293 'CP_ANNOTATE', 'USE_POSIX_CLOCK', 'USE_KVM', 'PROTOCOL',
1294 'HAVE_PROTOBUF', 'HAVE_PERF_ATTR_EXCLUDE_HOST']
1295
1296###################################################
1297#
1298# Define a SCons builder for configuration flag headers.
1299#
1300###################################################
1301
1302# This function generates a config header file that #defines the
1303# variable symbol to the current variable setting (0 or 1). The source
1304# operands are the name of the variable and a Value node containing the
1305# value of the variable.
1306def build_config_file(target, source, env):
1307 (variable, value) = [s.get_contents() for s in source]
1308 f = file(str(target[0]), 'w')
1309 print >> f, '#define', variable, value
1310 f.close()
1311 return None
1312
1313# Combine the two functions into a scons Action object.
1314config_action = MakeAction(build_config_file, Transform("CONFIG H", 2))
1315
1316# The emitter munges the source & target node lists to reflect what
1317# we're really doing.
1318def config_emitter(target, source, env):
1319 # extract variable name from Builder arg
1320 variable = str(target[0])
1321 # True target is config header file
1322 target = joinpath('config', variable.lower() + '.hh')
1323 val = env[variable]
1324 if isinstance(val, bool):
1325 # Force value to 0/1
1326 val = int(val)
1327 elif isinstance(val, str):
1328 val = '"' + val + '"'
1329
1330 # Sources are variable name & value (packaged in SCons Value nodes)
1331 return ([target], [Value(variable), Value(val)])
1332
1333config_builder = Builder(emitter = config_emitter, action = config_action)
1334
1335main.Append(BUILDERS = { 'ConfigFile' : config_builder })
1336
1337# libelf build is shared across all configs in the build root.
1338main.SConscript('ext/libelf/SConscript',
1339 variant_dir = joinpath(build_root, 'libelf'))
1340
1341# iostream3 build is shared across all configs in the build root.
1342main.SConscript('ext/iostream3/SConscript',
1343 variant_dir = joinpath(build_root, 'iostream3'))
1344
1345# libfdt build is shared across all configs in the build root.
1346main.SConscript('ext/libfdt/SConscript',
1347 variant_dir = joinpath(build_root, 'libfdt'))
1348
1349# fputils build is shared across all configs in the build root.
1350main.SConscript('ext/fputils/SConscript',
1351 variant_dir = joinpath(build_root, 'fputils'))
1352
1353# DRAMSim2 build is shared across all configs in the build root.
1354main.SConscript('ext/dramsim2/SConscript',
1355 variant_dir = joinpath(build_root, 'dramsim2'))
1356
1357# DRAMPower build is shared across all configs in the build root.
1358main.SConscript('ext/drampower/SConscript',
1359 variant_dir = joinpath(build_root, 'drampower'))
1360
1361# nomali build is shared across all configs in the build root.
1362main.SConscript('ext/nomali/SConscript',
1363 variant_dir = joinpath(build_root, 'nomali'))
1364
1365###################################################
1366#
1367# This function is used to set up a directory with switching headers
1368#
1369###################################################
1370
1371main['ALL_ISA_LIST'] = all_isa_list
1372main['ALL_GPU_ISA_LIST'] = all_gpu_isa_list
1373all_isa_deps = {}
1374def make_switching_dir(dname, switch_headers, env):
1375 # Generate the header. target[0] is the full path of the output
1376 # header to generate. 'source' is a dummy variable, since we get the
1377 # list of ISAs from env['ALL_ISA_LIST'].
1378 def gen_switch_hdr(target, source, env):
1379 fname = str(target[0])
1380 isa = env['TARGET_ISA'].lower()
1381 try:
1382 f = open(fname, 'w')
1383 print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname))
1384 f.close()
1385 except IOError:
1386 print "Failed to create %s" % fname
1387 raise
1388
1389 # Build SCons Action object. 'varlist' specifies env vars that this
1390 # action depends on; when env['ALL_ISA_LIST'] changes these actions
1391 # should get re-executed.
1392 switch_hdr_action = MakeAction(gen_switch_hdr,
1393 Transform("GENERATE"), varlist=['ALL_ISA_LIST'])
1394
1395 # Instantiate actions for each header
1396 for hdr in switch_headers:
1397 env.Command(hdr, [], switch_hdr_action)
1398
1399 isa_target = Dir('.').up().name.lower().replace('_', '-')
1400 env['PHONY_BASE'] = '#'+isa_target
1401 all_isa_deps[isa_target] = None
1402
1403Export('make_switching_dir')
1404
1405def make_gpu_switching_dir(dname, switch_headers, env):
1406 # Generate the header. target[0] is the full path of the output
1407 # header to generate. 'source' is a dummy variable, since we get the
1408 # list of ISAs from env['ALL_ISA_LIST'].
1409 def gen_switch_hdr(target, source, env):
1410 fname = str(target[0])
1411
1412 isa = env['TARGET_GPU_ISA'].lower()
1413
1414 try:
1415 f = open(fname, 'w')
1416 print >>f, '#include "%s/%s/%s"' % (dname, isa, basename(fname))
1417 f.close()
1418 except IOError:
1419 print "Failed to create %s" % fname
1420 raise
1421
1422 # Build SCons Action object. 'varlist' specifies env vars that this
1423 # action depends on; when env['ALL_ISA_LIST'] changes these actions
1424 # should get re-executed.
1425 switch_hdr_action = MakeAction(gen_switch_hdr,
1426 Transform("GENERATE"), varlist=['ALL_ISA_GPU_LIST'])
1427
1428 # Instantiate actions for each header
1429 for hdr in switch_headers:
1430 env.Command(hdr, [], switch_hdr_action)
1431
1432Export('make_gpu_switching_dir')
1433
1434# all-isas -> all-deps -> all-environs -> all_targets
1435main.Alias('#all-isas', [])
1436main.Alias('#all-deps', '#all-isas')
1437
1438# Dummy target to ensure all environments are created before telling
1439# SCons what to actually make (the command line arguments). We attach
1440# them to the dependence graph after the environments are complete.
1441ORIG_BUILD_TARGETS = list(BUILD_TARGETS) # force a copy; gets closure to work.
1442def environsComplete(target, source, env):
1443 for t in ORIG_BUILD_TARGETS:
1444 main.Depends('#all-targets', t)
1445
1446# Each build/* switching_dir attaches its *-environs target to #all-environs.
1447main.Append(BUILDERS = {'CompleteEnvirons' :
1448 Builder(action=MakeAction(environsComplete, None))})
1449main.CompleteEnvirons('#all-environs', [])
1450
1451def doNothing(**ignored): pass
1452main.Append(BUILDERS = {'Dummy': Builder(action=MakeAction(doNothing, None))})
1453
1454# The final target to which all the original targets ultimately get attached.
1455main.Dummy('#all-targets', '#all-environs')
1456BUILD_TARGETS[:] = ['#all-targets']
1457
1458###################################################
1459#
1460# Define build environments for selected configurations.
1461#
1462###################################################
1463
1464for variant_path in variant_paths:
1465 if not GetOption('silent'):
1466 print "Building in", variant_path
1467
1468 # Make a copy of the build-root environment to use for this config.
1469 env = main.Clone()
1470 env['BUILDDIR'] = variant_path
1471
1472 # variant_dir is the tail component of build path, and is used to
1473 # determine the build parameters (e.g., 'ALPHA_SE')
1474 (build_root, variant_dir) = splitpath(variant_path)
1475
1476 # Set env variables according to the build directory config.
1477 sticky_vars.files = []
1478 # Variables for $BUILD_ROOT/$VARIANT_DIR are stored in
1479 # $BUILD_ROOT/variables/$VARIANT_DIR so you can nuke
1480 # $BUILD_ROOT/$VARIANT_DIR without losing your variables settings.
1481 current_vars_file = joinpath(build_root, 'variables', variant_dir)
1482 if isfile(current_vars_file):
1483 sticky_vars.files.append(current_vars_file)
1484 if not GetOption('silent'):
1485 print "Using saved variables file %s" % current_vars_file
1486 else:
1487 # Build dir-specific variables file doesn't exist.
1488
1489 # Make sure the directory is there so we can create it later
1490 opt_dir = dirname(current_vars_file)
1491 if not isdir(opt_dir):
1492 mkdir(opt_dir)
1493
1494 # Get default build variables from source tree. Variables are
1495 # normally determined by name of $VARIANT_DIR, but can be
1496 # overridden by '--default=' arg on command line.
1497 default = GetOption('default')
1498 opts_dir = joinpath(main.root.abspath, 'build_opts')
1499 if default:
1500 default_vars_files = [joinpath(build_root, 'variables', default),
1501 joinpath(opts_dir, default)]
1502 else:
1503 default_vars_files = [joinpath(opts_dir, variant_dir)]
1504 existing_files = filter(isfile, default_vars_files)
1505 if existing_files:
1506 default_vars_file = existing_files[0]
1507 sticky_vars.files.append(default_vars_file)
1508 print "Variables file %s not found,\n using defaults in %s" \
1509 % (current_vars_file, default_vars_file)
1510 else:
1511 print "Error: cannot find variables file %s or " \
1512 "default file(s) %s" \
1513 % (current_vars_file, ' or '.join(default_vars_files))
1514 Exit(1)
1515
1516 # Apply current variable settings to env
1517 sticky_vars.Update(env)
1518
1519 help_texts["local_vars"] += \
1520 "Build variables for %s:\n" % variant_dir \
1521 + sticky_vars.GenerateHelpText(env)
1522
1523 # Process variable settings.
1524
1525 if not have_fenv and env['USE_FENV']:
1526 print "Warning: <fenv.h> not available; " \
1527 "forcing USE_FENV to False in", variant_dir + "."
1528 env['USE_FENV'] = False
1529
1530 if not env['USE_FENV']:
1531 print "Warning: No IEEE FP rounding mode control in", variant_dir + "."
1532 print " FP results may deviate slightly from other platforms."
1533
1534 if env['EFENCE']:
1535 env.Append(LIBS=['efence'])
1536
1537 if env['USE_KVM']:
1538 if not have_kvm:
1539 print "Warning: Can not enable KVM, host seems to lack KVM support"
1540 env['USE_KVM'] = False
1541 elif not is_isa_kvm_compatible(env['TARGET_ISA']):
1542 print "Info: KVM support disabled due to unsupported host and " \
1543 "target ISA combination"
1544 env['USE_KVM'] = False
1545
1546 if env['BUILD_GPU']:
1547 env.Append(CPPDEFINES=['BUILD_GPU'])
1548
1549 # Warn about missing optional functionality
1550 if env['USE_KVM']:
1551 if not main['HAVE_PERF_ATTR_EXCLUDE_HOST']:
1552 print "Warning: perf_event headers lack support for the " \
1553 "exclude_host attribute. KVM instruction counts will " \
1554 "be inaccurate."
1555
1556 # Save sticky variable settings back to current variables file
1557 sticky_vars.Save(current_vars_file, env)
1558
1559 if env['USE_SSE2']:
1560 env.Append(CCFLAGS=['-msse2'])
1561
1562 # The src/SConscript file sets up the build rules in 'env' according
1563 # to the configured variables. It returns a list of environments,
1564 # one for each variant build (debug, opt, etc.)
1565 SConscript('src/SConscript', variant_dir = variant_path, exports = 'env')
1566
1567def pairwise(iterable):
1568 "s -> (s0,s1), (s1,s2), (s2, s3), ..."
1569 a, b = itertools.tee(iterable)
1570 b.next()
1571 return itertools.izip(a, b)
1572
1573# Create false dependencies so SCons will parse ISAs, establish
1574# dependencies, and setup the build Environments serially. Either
1575# SCons (likely) and/or our SConscripts (possibly) cannot cope with -j
1576# greater than 1. It appears to be standard race condition stuff; it
1577# doesn't always fail, but usually, and the behaviors are different.
1578# Every time I tried to remove this, builds would fail in some
1579# creative new way. So, don't do that. You'll want to, though, because
1580# tests/SConscript takes a long time to make its Environments.
1581for t1, t2 in pairwise(sorted(all_isa_deps.iterkeys())):
1582 main.Depends('#%s-deps' % t2, '#%s-deps' % t1)
1583 main.Depends('#%s-environs' % t2, '#%s-environs' % t1)
1584
1585# base help text
1586Help('''
1587Usage: scons [scons options] [build variables] [target(s)]
1588
1589Extra scons options:
1590%(options)s
1591
1592Global build variables:
1593%(global_vars)s
1594
1595%(local_vars)s
1596''' % help_texts)